Merge branch 'main' into functions

This commit is contained in:
Ray Lyon
2022-07-01 17:52:39 -04:00
committed by GitHub
7 changed files with 605 additions and 16 deletions

1
.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
.vscode

150
ansible/playbook-snmp.yml Normal file
View File

@@ -0,0 +1,150 @@
---
# Expects snmpd.conf and snmpd_arm.conf in same directory
- name: configure snmp
hosts: active
remote_user: raylyon
become: yes
vars:
pihole01_key: ""
pihole02_key: ""
tasks:
- name: get service facts
service_facts:
- name: get package facts
ansible.builtin.package_facts:
manager: auto
- name: check for pihole
ansible.builtin.stat:
path: "/usr/local/bin/pihole"
register: pihole
- name: install latest snmpd - debian
package: name=snmpd state=latest
when: ansible_os_family == "Debian"
- name: install latest snmpd - centos
package: name=net-snmp state=latest
when: ansible_os_family == "RedHat"
- name: install latest jq
package: name=jq state=latest
- name: copy snmpd config x86
copy:
src: snmpd.conf
dest: "/etc/snmp/snmpd.conf"
when: ansible_architecture == "x86_64"
- name: copy snmpd config arm
copy:
src: snmpd_arm.conf
dest: "/etc/snmp/snmpd.conf"
when: ansible_architecture == "armv6l"
- name: fix extend serial permissions
ansible.builtin.file:
path: "/sys/devices/virtual/dmi/id/product_serial"
mode: '444'
when: ansible_architecture == "x86_64"
- name: cron job for extend serial permissions
ansible.builtin.lineinfile:
path: /etc/crontab
line: "@reboot chmod 444 /sys/devices/virtual/dmi/id/product_serial"
when: ansible_architecture == "x86_64"
- name: download script for extend distro
ansible.builtin.get_url:
url: "https://raw.githubusercontent.com/librenms/librenms-agent/master/snmp/distro"
dest: "/usr/bin/distro"
mode: '755'
- name: download script for extend osupdates
ansible.builtin.get_url:
url: "https://raw.githubusercontent.com/librenms/librenms-agent/master/snmp/osupdate"
dest: "/etc/snmp/osupdate"
mode: '755'
- name: download script for extend zfs
ansible.builtin.get_url:
url: "https://github.com/librenms/librenms-agent/raw/master/snmp/zfs-linux"
dest: "/etc/snmp/zfs-linux"
mode: '755'
when: "'zfs-zed' in ansible_facts.packages"
- name: download script for extend docker
ansible.builtin.get_url:
url: "https://github.com/librenms/librenms-agent/raw/master/snmp/docker-stats.sh"
dest: "/etc/snmp/docker-stats.sh"
mode: '755'
when: "'docker' in services"
- name: download script for extend pihole
ansible.builtin.get_url:
url: "https://github.com/librenms/librenms-agent/raw/master/snmp/pi-hole"
dest: "/etc/snmp/pi-hole"
mode: '755'
when: pihole.stat.exists
- name: add api key to pihole script for pihole01
ansible.builtin.lineinfile:
path: "/etc/snmp/pi-hole"
regexp: '^API_AUTH_KEY='
line: 'API_AUTH_KEY="{{ pihole01_key }}"'
backrefs: yes
when: ansible_hostname == "pihole01"
- name: add api key to pihole script for pihole02
ansible.builtin.lineinfile:
path: "/etc/snmp/pi-hole"
regexp: '^API_AUTH_KEY='
line: 'API_AUTH_KEY="{{ pihole02_key }}"'
backrefs: yes
when: ansible_hostname == "pihole02"
- name: enable extend nfs-server
ansible.builtin.lineinfile:
path: "/etc/snmp/snmpd.conf"
line: "extend nfs-server /bin/cat /proc/net/rpc/nfsd"
when: "'nfs-kernel-server' in ansible_facts.services"
- name: enable extend zfs
ansible.builtin.lineinfile:
path: "/etc/snmp/snmpd.conf"
line: "extend zfs '/usr/bin/sudo /etc/snmp/zfs-linux'"
when: "'zfs-zed' in ansible_facts.packages"
- name: update sudoers file for extend zfs
ansible.builtin.lineinfile:
path: "/etc/sudoers"
line: "Debian-snmp ALL=(ALL) NOPASSWD: /etc/snmp/zfs-linux"
when: "'zfs-zed' in ansible_facts.packages"
- name: enable extend docker
when: "'docker' in services"
ansible.builtin.lineinfile:
path: "/etc/snmp/snmpd.conf"
line: "extend docker /usr/bin/sudo /etc/snmp/docker-stats.sh"
- name: enable extend pihole
when: pihole.stat.exists
ansible.builtin.lineinfile:
path: "/etc/snmp/snmpd.conf"
line: "extend pi-hole /etc/snmp/pi-hole"
- name: update sudoers file for extend docker
when: "'docker' in services"
ansible.builtin.lineinfile:
path: "/etc/sudoers"
line: "Debian-snmp ALL=(ALL) NOPASSWD: /etc/snmp/docker-stats.sh"
- name: enable extend osupdates
ansible.builtin.lineinfile:
path: "/etc/snmp/snmpd.conf"
line: "extend osupdate /etc/snmp/osupdate"
- name: set ExecStart options in service file - ubuntu
ansible.builtin.lineinfile:
path: "/lib/systemd/system/snmpd.service"
regexp: '^ExecStart='
line: "ExecStart=/usr/sbin/snmpd -LS4d -Lf /dev/null -u Debian-snmp -g Debian-snmp -I -smux,mteTrigger,mteTriggerConf -f"
backrefs: yes
when: ansible_os_family == "Debian"
- name: reload systemd configs - ubuntu
ansible.builtin.systemd:
daemon_reload: yes
when: ansible_os_family == "Debian"
- name: set snmpdopts - centos
ansible.builtin.lineinfile:
path: "/etc/sysconfig/snmpd"
regexp: '^# OPTIONS=|^OPTIONS='
line: 'OPTIONS="-LS4-6d"'
when: ansible_os_family == "RedHat"
- name: enable and restart snmpd.service
ansible.builtin.systemd:
state: restarted
enabled: yes
name: snmpd
- name: verify the snmpd service is running
ansible.builtin.systemd:
state: started
name: snmpd

27
ansible/snmpd.conf Normal file
View File

@@ -0,0 +1,27 @@
# Change RANDOMSTRINGGOESHERE to your preferred SNMP community string
com2sec readonly default RANDOMSTRINGGOESHERE
group MyROGroup v2c readonly
view all included .1 80
access MyROGroup "" any noauth exact all none none
syslocation Home
syscontact Ray Lyon <ray@raylyon.net>
agentAddress udp:161,udp6:[::1]:161
rocommunity RANDOMSTRINGGOESHERE
#OS Distribution Detection
extend distro /usr/bin/distro
#Hardware Detection
# (uncomment for x86 platforms)
extend manufacturer '/bin/cat /sys/devices/virtual/dmi/id/sys_vendor'
extend hardware '/bin/cat /sys/devices/virtual/dmi/id/product_name'
extend serial '/bin/cat /sys/devices/virtual/dmi/id/product_serial'
# (uncomment for ARM platforms)
#extend hardware '/bin/cat /sys/firmware/devicetree/base/model'
#extend serial '/bin/cat /sys/firmware/devicetree/base/serial-number
#
#

View File

@@ -94,23 +94,7 @@ else
error_exit "$LINENO: Not a valid source" 1>&2 error_exit "$LINENO: Not a valid source" 1>&2
fi fi
<<<<<<< HEAD
echo "File(s) encoded successfully!" echo "File(s) encoded successfully!"
exit 0 exit 0
=======
# Encode each file in the directory with different CRF setting based on resolution
for FILE in "$DIRECTORY"/*.*; do
RES=$(ffprobe -v error -select_streams v:0 -show_entries stream=width -of default=noprint_wrappers=1:nokey=1 "$FILE")
FILENAME=$(basename "$FILE")
if [[ $RES -gt 1920 ]]; then
ffmpeg -i "$FILE" -c:v libx264 -preset slow -tune film -crf "$QUALITY_4K" -c:a copy "$DIRECTORY"/output/"$FILENAME"
elif [[ $RES -le 1920 ]]; then
ffmpeg -i "$FILE" -c:v libx264 -preset slow -tune film -crf "$QUALITY_HD" -c:a copy "$DIRECTORY"/output/"$FILENAME"
else
echo "$FILENAME is not a valid filetype"
fi
done
>>>>>>> main

View File

@@ -0,0 +1,150 @@
#!/usr/local/bin/python3
"""
This script pulls RADIUS logs from your Foxpass instance and allows you to parse them by date, user, IP, or connection outcome.
Logs can be printed (in pretty colors) or exported in CSV format.
Required packages:
pip install requests
To run:
python foxpass_radius_logs.py
By default the script will print color-coded RADIUS logs from the last (5) days. You can use the optional arguments below:
--hours - How far back to show the logs in Hours.
--user - Filter by user.
--location - Filter by RADIUS Client, based on the items defined in the OFFICE_IPS dict.
--outcome - Filter by outcome of the connection, specify True or False.
--csv - Output the logs to a CSV file, specify the filename and path.
"""
from datetime import datetime, timedelta, timezone
import requests
import argparse
import csv
##### EDIT THESE #####
FOXPASS_API_TOKEN = ""
# RADIUS clients, can be called with the --location argument.
OFFICE_IPS = {
"office1":"",
"office2":"",
"office3":"",
}
# "YYYY-MM-DDTHH:MMZ". STARTDATE Default 5 days ago, ENDDATE is current day/time in UTC. Can be changed with the --hours argument.
STARTDATE = (datetime.now(timezone.utc) - timedelta(days=5)).strftime('%Y-%m-%dT%H:%MZ')
ENDDATE = datetime.now(timezone.utc).strftime('%Y-%m-%dT%H:%MZ')
FOXPASS_URL = "https://api.foxpass.com/v1/logs/radius/"
HEADERS = {'Authorization': 'Token ' + FOXPASS_API_TOKEN}
PAGEREQUEST = requests.post(FOXPASS_URL, json={"from": STARTDATE, "to": ENDDATE}, headers=HEADERS).json()
PAGES = PAGEREQUEST["numPages"]
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKCYAN = '\033[96m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def get_args():
parser = argparse.ArgumentParser(description='Pull and parse RADIUS logs from your Foxpass environment')
parser.add_argument('--user', help='Filter logs by username')
parser.add_argument('--outcome', help='Filter logs by connection outcome: True or False')
parser.add_argument('--hours', type=int, help='How far back to check the logs in hours')
parser.add_argument('--location', help='Filter logs by location: tlv, tlv-backup, nyc, nyc-backup, sha')
parser.add_argument('--csv', help='Export a CSV of the log data to the specified filename and path')
return parser.parse_args()
# Builds an if statement to filter the logs based on user arguments
def build_query(username=None, outcome=None, location=None):
query_string = ""
if username != None:
query_string += f"log['username']=='{username}' and "
if location != None:
query_string += f"log['ipAddress']=='{location}' and "
if outcome != None:
query_string += f"log['success']=={outcome}"
if query_string != "":
# If the string ends with "and", remove it before returning.
if query_string[-2] == 'd':
query_string = query_string[:-5]
return query_string
# Pulls logs from Foxpass and stores them
def get_logs():
p = 0
logs_full = []
while p < PAGES:
p += 1
request = requests.post(FOXPASS_URL, json={"from": STARTDATE, "to": ENDDATE, "page": p, "ascending": True}, headers=HEADERS).json()
request_clean = request["data"]
logs_full.append(request_clean)
return logs_full
# Prints or exports all logs for the specified time period
def lookup_all(logs, csv_arg=None, csv_writer=None):
p=0
while p < PAGES:
for log in logs[p]:
if csv_arg == None:
print_logs(log)
elif csv_arg != None:
csv_export(log, csv_writer)
p += 1
# Prints or exports logs based on user-provided filter arguments for the specified time period
def lookup_filter(logs, if_statement, csvarg=None, csv_writer=None):
p=0
while p < PAGES:
for log in logs[p]:
if eval(if_statement) and csvarg == None:
print_logs(log)
elif eval(if_statement) and csvarg != None:
csv_export(log, csv_writer)
p += 1
def csv_export(log, csv_writer):
csv_writer.writerow([log["timestamp"], log["username"], log["ipAddress"], log["message"], log["success"]])
# Determines start time based on the --hours argument
def start_time(hours):
d = datetime.now(timezone.utc) - timedelta(hours=hours)
return d.strftime('%Y-%m-%dT%H:%MZ')
def print_logs(sourcedict):
print(bcolors.OKCYAN + sourcedict["timestamp"],bcolors.OKGREEN + sourcedict["username"],bcolors.WARNING + sourcedict["ipAddress"],bcolors.FAIL + sourcedict["message"],bcolors.OKBLUE + "Success:",sourcedict["success"])
def main():
global STARTDATE
args = get_args()
if args.csv != None:
csv_open = open(args.csv, 'w', newline='')
csv_writer = csv.writer(csv_open)
csv_writer.writerow(["TIMESTAMP (UTC)","USERNAME","IP ADDRESS","MESSAGE","SUCCESS"])
else:
csv_writer = None
if args.hours:
STARTDATE = start_time(args.hours)
if args.location != None:
location_ip = OFFICE_IPS[args.location]
else:
location_ip = None
if_statement = build_query(args.user, args.outcome, location_ip)
logs = get_logs()
if if_statement == "":
lookup_all(logs, args.csv, csv_writer)
else:
lookup_filter(logs, if_statement, args.csv, csv_writer)
if __name__ == '__main__':
main()

146
movie-check/movie_check.py Normal file
View File

@@ -0,0 +1,146 @@
#!/usr/bin/python3
import requests
from datetime import datetime
import os
import argparse
tmdb_api_token = os.environ.get("TMDB_API_TOKEN")
sa_api_token = os.environ.get("SA_API_TOKEN")
tmdb_url = "https://api.themoviedb.org/3"
tmdb_headers = {
'Authorization': f'Bearer {tmdb_api_token}',
'Content-Type': 'application/json;charset=utf-8',
'Accept': 'application/json;charset=utf-8'
}
sa_url = "https://streaming-availability.p.rapidapi.com/get/basic"
sa_headers = {
'x-rapidapi-host': "streaming-availability.p.rapidapi.com",
'x-rapidapi-key': sa_api_token
}
def get_args():
parser = argparse.ArgumentParser(
description='Search movie streaming availability.')
parser.add_argument('--year', type=int, help='Specify movie release year')
return parser.parse_args()
def tmdb_lookup(tmdb_url, tmdb_headers, movie, args):
tmdb_params = {
"language": "en-US",
"query": movie,
"page": 1,
"include_adult": False
}
if args.year:
tmdb_params["primary_release_year"] = args.year
tmdb_search = requests.get(f"{tmdb_url}/search/movie", params=tmdb_params,
headers=tmdb_headers).json()
if not tmdb_search["results"]:
print("I'm having trouble finding that movie. " +
"Check your spelling and try again.")
exit()
movie_id = tmdb_search['results'][0]['id']
movie_title = tmdb_search['results'][0]['title']
movie_release_check = tmdb_search['results'][0]['release_date']
if movie_release_check:
movie_release = datetime.strptime(
tmdb_search['results'][0]['release_date'], "%Y-%m-%d")
movie_year = movie_release.year
else:
movie_year = "???"
movie_rating = tmdb_search['results'][0]['vote_average']
return movie_id, movie_title, movie_year, movie_rating
def sa_lookup(sa_url, sa_headers, movie_id):
sa_params = {
"country": "us",
"tmdb_id": f"movie/{movie_id}",
"output_language": "en"
}
sa_request = requests.request("GET", sa_url, headers=sa_headers,
params=sa_params)
if sa_request.status_code == 404:
print("I'm having trouble finding that movie on streaming. " +
"Check your spelling and try again.")
exit()
sa_response = sa_request.json()
services = sa_response["streamingInfo"]
return sa_response, services
def services_speller(service):
if service == "hbo":
service_proper = "HBO Max"
elif service == "hulu":
service_proper = "Hulu"
elif service == "prime":
service_proper = "Amazon Prime"
elif service == "netflix":
service_proper = "Netflix"
elif service == "disney":
service_proper = "Disney+"
elif service == "apple":
service_proper = "Apple TV+"
elif service == "paramount":
service_proper = "Paramount+"
elif service == "starz":
service_proper = "STARZ"
elif service == "showtime":
service_proper = "Showtime"
else:
return service
return service_proper
def main():
args = get_args()
movie = input("Enter a movie: ")
movie_id, movie_title, movie_release, movie_rating = tmdb_lookup(
tmdb_url, tmdb_headers, movie, args)
print(f"\n{movie_title} ({movie_release})")
print(f"https://themoviedb.org/movie/{movie_id}")
print(f"Rating: {movie_rating}\n")
sa_response, services = sa_lookup(sa_url, sa_headers, movie_id)
if not services:
print("Streaming not available :(")
for s in services:
leaving_epoch = sa_response["streamingInfo"][s]["us"]["leaving"]
leaving_date = datetime.fromtimestamp(
int(leaving_epoch)).strftime('%Y-%m-%d')
link = sa_response["streamingInfo"][s]["us"]["link"]
print(f"Available on {services_speller(s)}")
if leaving_epoch != 0:
print(f"Will be leaving on {leaving_date}")
print(f"Watch here: {link}\n")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,131 @@
#!/usr/bin/python3
"""
This script gets uploaded files from a Netlify Forms submission, renames them, and uploads them to a Nextcloud folder.
I originally used it to download vaccine cards that my wedding guests submitted and move them to a shared folder.
Required Packages:
pip install requests
pip install webdavclient3
"""
import requests
from webdav3.client import Client
import urllib.request
import os
import shutil
### USER VARIABLES ###
# Netlify
USERNAME=""
OAUTH_TOKEN=""
SITE_ID=""
FORM_ID=""
# Nextcloud
NEXTCLOUD_DIR = ""
NEXTCLOUD_USER = ""
NEXTCLOUD_PASS = ""
NEXTCLOUD_URL = ""
#### DON'T EDIT BELOW THIS LINE ####
# Netlify API calls
headers = {'Authorization': 'Bearer ' + OAUTH_TOKEN , 'User-Agent': 'MyApp (' + USERNAME + ')'}
form_submissions = requests.get(f"https://api.netlify.com/api/v1/sites/{SITE_ID}/forms/{FORM_ID}/submissions", headers=headers).json()
vaccine_cards = {}
webdav_options = {
'webdav_hostname': NEXTCLOUD_URL,
'webdav_login': NEXTCLOUD_USER,
'webdav_password': NEXTCLOUD_PASS
}
client = Client(webdav_options)
nc = client.list(NEXTCLOUD_DIR)
existing_cards = nc[1:]
new_cards = []
all_cards = []
#### FUNCTIONS ####
def build_dict():
for entry in form_submissions:
name = entry["data"]["name"]
card_img = entry["data"]["vaccine_card"]["url"]
vaccine_cards[name] = card_img
def card_sizes_netlify():
netlify_cards = {}
for name, card in vaccine_cards.items():
response = urllib.request.urlopen(card)
info = response.headers
filesize = info['Content-Length']
extension = "." + str(info.get_content_subtype())
name_clean = name.strip()
output_file = name_clean.replace(' ', '_') + extension
netlify_cards[output_file] = filesize
return netlify_cards
def card_sizes_nextcloud():
nextcloud_cards = {}
for card in existing_cards:
card_info = client.info(NEXTCLOUD_DIR + card)
filesize = card_info['size']
nextcloud_cards[card] = filesize
return nextcloud_cards
def download_cards():
print("Downloading cards from Netlify...")
for name, card in vaccine_cards.items():
response = urllib.request.urlopen(card)
info = response.headers
extension = "." + str(info.get_content_subtype())
name_clean = name.strip()
output_file = name_clean.replace(' ', '_') + extension
all_cards.append(output_file)
if output_file not in existing_cards:
new_cards.append(output_file)
file_download = requests.get(card, stream=True)
if os.path.exists('tmp/') == False:
os.makedirs('tmp/')
print(output_file)
with open(f'tmp/{output_file}', 'wb') as f:
for chunk in file_download.iter_content(2000):
f.write(chunk)
else:
continue
def upload_cards():
num_cards = len(new_cards)
current_card = 0
print("")
print("Uploading cards to Nextcloud...")
for card in os.listdir("tmp"):
if card in new_cards:
current_card += 1
print(f"Uploading card {current_card} of {num_cards}")
client.upload_sync(remote_path=f'{NEXTCLOUD_DIR}/{card}', local_path=f"tmp/{card}")
else:
continue
print("Done!")
def main():
build_dict()
print("Checking for new vaccine cards...")
if card_sizes_netlify() != card_sizes_nextcloud():
download_cards()
else:
print("Nothing new to download!")
if new_cards:
upload_cards()
if os.path.exists('tmp/') == True:
print("")
print("Cleaning up...")
shutil.rmtree('tmp/')
if __name__ == '__main__':
main()