mirror of
https://github.com/skoobasteeve/useful-scripts.git
synced 2026-03-20 15:38:56 +00:00
Compare commits
15 Commits
errors-and
...
9df2412231
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9df2412231 | ||
|
|
7d8d312d6d | ||
|
|
5788dd5a5f | ||
|
|
34fa70a9d8 | ||
|
|
0822221190 | ||
|
|
fb99dafe67 | ||
|
|
50cfd3a115 | ||
|
|
11a2f3ce40 | ||
| 82bcf4079a | |||
| 331ef142e2 | |||
|
|
4a8d849c0a | ||
| 5bae92bf42 | |||
| 2a39877706 | |||
|
|
e6f4936468 | ||
|
|
a2c20bb3d6 |
2
README.md
Normal file
2
README.md
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
# Ray's Useful Scripts
|
||||||
|
A collection of scripts that have helped me at home and in the office.
|
||||||
150
ansible/playbook-snmp.yml
Normal file
150
ansible/playbook-snmp.yml
Normal file
@@ -0,0 +1,150 @@
|
|||||||
|
---
|
||||||
|
# Expects snmpd.conf and snmpd_arm.conf in same directory
|
||||||
|
- name: configure snmp
|
||||||
|
hosts: active
|
||||||
|
remote_user: raylyon
|
||||||
|
become: yes
|
||||||
|
vars:
|
||||||
|
pihole01_key: ""
|
||||||
|
pihole02_key: ""
|
||||||
|
|
||||||
|
tasks:
|
||||||
|
- name: get service facts
|
||||||
|
service_facts:
|
||||||
|
- name: get package facts
|
||||||
|
ansible.builtin.package_facts:
|
||||||
|
manager: auto
|
||||||
|
- name: check for pihole
|
||||||
|
ansible.builtin.stat:
|
||||||
|
path: "/usr/local/bin/pihole"
|
||||||
|
register: pihole
|
||||||
|
- name: install latest snmpd - debian
|
||||||
|
package: name=snmpd state=latest
|
||||||
|
when: ansible_os_family == "Debian"
|
||||||
|
- name: install latest snmpd - centos
|
||||||
|
package: name=net-snmp state=latest
|
||||||
|
when: ansible_os_family == "RedHat"
|
||||||
|
- name: install latest jq
|
||||||
|
package: name=jq state=latest
|
||||||
|
- name: copy snmpd config x86
|
||||||
|
copy:
|
||||||
|
src: snmpd.conf
|
||||||
|
dest: "/etc/snmp/snmpd.conf"
|
||||||
|
when: ansible_architecture == "x86_64"
|
||||||
|
- name: copy snmpd config arm
|
||||||
|
copy:
|
||||||
|
src: snmpd_arm.conf
|
||||||
|
dest: "/etc/snmp/snmpd.conf"
|
||||||
|
when: ansible_architecture == "armv6l"
|
||||||
|
- name: fix extend serial permissions
|
||||||
|
ansible.builtin.file:
|
||||||
|
path: "/sys/devices/virtual/dmi/id/product_serial"
|
||||||
|
mode: '444'
|
||||||
|
when: ansible_architecture == "x86_64"
|
||||||
|
- name: cron job for extend serial permissions
|
||||||
|
ansible.builtin.lineinfile:
|
||||||
|
path: /etc/crontab
|
||||||
|
line: "@reboot chmod 444 /sys/devices/virtual/dmi/id/product_serial"
|
||||||
|
when: ansible_architecture == "x86_64"
|
||||||
|
- name: download script for extend distro
|
||||||
|
ansible.builtin.get_url:
|
||||||
|
url: "https://raw.githubusercontent.com/librenms/librenms-agent/master/snmp/distro"
|
||||||
|
dest: "/usr/bin/distro"
|
||||||
|
mode: '755'
|
||||||
|
- name: download script for extend osupdates
|
||||||
|
ansible.builtin.get_url:
|
||||||
|
url: "https://raw.githubusercontent.com/librenms/librenms-agent/master/snmp/osupdate"
|
||||||
|
dest: "/etc/snmp/osupdate"
|
||||||
|
mode: '755'
|
||||||
|
- name: download script for extend zfs
|
||||||
|
ansible.builtin.get_url:
|
||||||
|
url: "https://github.com/librenms/librenms-agent/raw/master/snmp/zfs-linux"
|
||||||
|
dest: "/etc/snmp/zfs-linux"
|
||||||
|
mode: '755'
|
||||||
|
when: "'zfs-zed' in ansible_facts.packages"
|
||||||
|
- name: download script for extend docker
|
||||||
|
ansible.builtin.get_url:
|
||||||
|
url: "https://github.com/librenms/librenms-agent/raw/master/snmp/docker-stats.sh"
|
||||||
|
dest: "/etc/snmp/docker-stats.sh"
|
||||||
|
mode: '755'
|
||||||
|
when: "'docker' in services"
|
||||||
|
- name: download script for extend pihole
|
||||||
|
ansible.builtin.get_url:
|
||||||
|
url: "https://github.com/librenms/librenms-agent/raw/master/snmp/pi-hole"
|
||||||
|
dest: "/etc/snmp/pi-hole"
|
||||||
|
mode: '755'
|
||||||
|
when: pihole.stat.exists
|
||||||
|
- name: add api key to pihole script for pihole01
|
||||||
|
ansible.builtin.lineinfile:
|
||||||
|
path: "/etc/snmp/pi-hole"
|
||||||
|
regexp: '^API_AUTH_KEY='
|
||||||
|
line: 'API_AUTH_KEY="{{ pihole01_key }}"'
|
||||||
|
backrefs: yes
|
||||||
|
when: ansible_hostname == "pihole01"
|
||||||
|
- name: add api key to pihole script for pihole02
|
||||||
|
ansible.builtin.lineinfile:
|
||||||
|
path: "/etc/snmp/pi-hole"
|
||||||
|
regexp: '^API_AUTH_KEY='
|
||||||
|
line: 'API_AUTH_KEY="{{ pihole02_key }}"'
|
||||||
|
backrefs: yes
|
||||||
|
when: ansible_hostname == "pihole02"
|
||||||
|
- name: enable extend nfs-server
|
||||||
|
ansible.builtin.lineinfile:
|
||||||
|
path: "/etc/snmp/snmpd.conf"
|
||||||
|
line: "extend nfs-server /bin/cat /proc/net/rpc/nfsd"
|
||||||
|
when: "'nfs-kernel-server' in ansible_facts.services"
|
||||||
|
- name: enable extend zfs
|
||||||
|
ansible.builtin.lineinfile:
|
||||||
|
path: "/etc/snmp/snmpd.conf"
|
||||||
|
line: "extend zfs '/usr/bin/sudo /etc/snmp/zfs-linux'"
|
||||||
|
when: "'zfs-zed' in ansible_facts.packages"
|
||||||
|
- name: update sudoers file for extend zfs
|
||||||
|
ansible.builtin.lineinfile:
|
||||||
|
path: "/etc/sudoers"
|
||||||
|
line: "Debian-snmp ALL=(ALL) NOPASSWD: /etc/snmp/zfs-linux"
|
||||||
|
when: "'zfs-zed' in ansible_facts.packages"
|
||||||
|
- name: enable extend docker
|
||||||
|
when: "'docker' in services"
|
||||||
|
ansible.builtin.lineinfile:
|
||||||
|
path: "/etc/snmp/snmpd.conf"
|
||||||
|
line: "extend docker /usr/bin/sudo /etc/snmp/docker-stats.sh"
|
||||||
|
- name: enable extend pihole
|
||||||
|
when: pihole.stat.exists
|
||||||
|
ansible.builtin.lineinfile:
|
||||||
|
path: "/etc/snmp/snmpd.conf"
|
||||||
|
line: "extend pi-hole /etc/snmp/pi-hole"
|
||||||
|
- name: update sudoers file for extend docker
|
||||||
|
when: "'docker' in services"
|
||||||
|
ansible.builtin.lineinfile:
|
||||||
|
path: "/etc/sudoers"
|
||||||
|
line: "Debian-snmp ALL=(ALL) NOPASSWD: /etc/snmp/docker-stats.sh"
|
||||||
|
- name: enable extend osupdates
|
||||||
|
ansible.builtin.lineinfile:
|
||||||
|
path: "/etc/snmp/snmpd.conf"
|
||||||
|
line: "extend osupdate /etc/snmp/osupdate"
|
||||||
|
- name: set ExecStart options in service file - ubuntu
|
||||||
|
ansible.builtin.lineinfile:
|
||||||
|
path: "/lib/systemd/system/snmpd.service"
|
||||||
|
regexp: '^ExecStart='
|
||||||
|
line: "ExecStart=/usr/sbin/snmpd -LS4d -Lf /dev/null -u Debian-snmp -g Debian-snmp -I -smux,mteTrigger,mteTriggerConf -f"
|
||||||
|
backrefs: yes
|
||||||
|
when: ansible_os_family == "Debian"
|
||||||
|
- name: reload systemd configs - ubuntu
|
||||||
|
ansible.builtin.systemd:
|
||||||
|
daemon_reload: yes
|
||||||
|
when: ansible_os_family == "Debian"
|
||||||
|
- name: set snmpdopts - centos
|
||||||
|
ansible.builtin.lineinfile:
|
||||||
|
path: "/etc/sysconfig/snmpd"
|
||||||
|
regexp: '^# OPTIONS=|^OPTIONS='
|
||||||
|
line: 'OPTIONS="-LS4-6d"'
|
||||||
|
when: ansible_os_family == "RedHat"
|
||||||
|
- name: enable and restart snmpd.service
|
||||||
|
ansible.builtin.systemd:
|
||||||
|
state: restarted
|
||||||
|
enabled: yes
|
||||||
|
name: snmpd
|
||||||
|
- name: verify the snmpd service is running
|
||||||
|
ansible.builtin.systemd:
|
||||||
|
state: started
|
||||||
|
name: snmpd
|
||||||
27
ansible/snmpd.conf
Normal file
27
ansible/snmpd.conf
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
# Change RANDOMSTRINGGOESHERE to your preferred SNMP community string
|
||||||
|
com2sec readonly default RANDOMSTRINGGOESHERE
|
||||||
|
|
||||||
|
group MyROGroup v2c readonly
|
||||||
|
view all included .1 80
|
||||||
|
access MyROGroup "" any noauth exact all none none
|
||||||
|
|
||||||
|
syslocation Home
|
||||||
|
syscontact Ray Lyon <ray@raylyon.net>
|
||||||
|
|
||||||
|
agentAddress udp:161,udp6:[::1]:161
|
||||||
|
rocommunity RANDOMSTRINGGOESHERE
|
||||||
|
|
||||||
|
#OS Distribution Detection
|
||||||
|
extend distro /usr/bin/distro
|
||||||
|
|
||||||
|
#Hardware Detection
|
||||||
|
# (uncomment for x86 platforms)
|
||||||
|
extend manufacturer '/bin/cat /sys/devices/virtual/dmi/id/sys_vendor'
|
||||||
|
extend hardware '/bin/cat /sys/devices/virtual/dmi/id/product_name'
|
||||||
|
extend serial '/bin/cat /sys/devices/virtual/dmi/id/product_serial'
|
||||||
|
|
||||||
|
# (uncomment for ARM platforms)
|
||||||
|
#extend hardware '/bin/cat /sys/firmware/devicetree/base/model'
|
||||||
|
#extend serial '/bin/cat /sys/firmware/devicetree/base/serial-number
|
||||||
|
#
|
||||||
|
#
|
||||||
@@ -1,10 +1,20 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
### Help function ###
|
### VARIABLES ###
|
||||||
|
|
||||||
|
PROGNAME="$(basename "$0")"
|
||||||
|
INPUT_SOURCE=$1
|
||||||
|
QUALITY_HD=23
|
||||||
|
QUALITY_4K=22
|
||||||
|
FILEDIR=$(dirname "$INPUT_SOURCE")
|
||||||
|
|
||||||
|
|
||||||
|
### FUNCTIONS###
|
||||||
|
|
||||||
|
### Function: Help
|
||||||
|
|
||||||
Help()
|
Help()
|
||||||
{
|
{
|
||||||
# Display Help
|
|
||||||
echo "This script uses sensible ffmpeg options to batch encode MKV files in a directory to compressed H264 MKVs."
|
echo "This script uses sensible ffmpeg options to batch encode MKV files in a directory to compressed H264 MKVs."
|
||||||
echo "You can change the CRF parameters in the script, defaults are 24 for HD and 22 for 4K."
|
echo "You can change the CRF parameters in the script, defaults are 24 for HD and 22 for 4K."
|
||||||
echo
|
echo
|
||||||
@@ -14,56 +24,77 @@ Help()
|
|||||||
echo "Learn more about FFmpeg's quality settings: https://trac.ffmpeg.org/wiki/Encode/H.264"
|
echo "Learn more about FFmpeg's quality settings: https://trac.ffmpeg.org/wiki/Encode/H.264"
|
||||||
}
|
}
|
||||||
|
|
||||||
### Error function ###
|
### Funtion: Error
|
||||||
|
|
||||||
PROGNAME="$(basename $0)"
|
|
||||||
|
|
||||||
error_exit()
|
error_exit()
|
||||||
{
|
{
|
||||||
|
|
||||||
# ----------------------------------------------------------------
|
|
||||||
# Function for exit due to fatal program error
|
|
||||||
# Accepts 1 argument:
|
|
||||||
# string containing descriptive error message
|
|
||||||
# ----------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
echo "${PROGNAME}: ${1:-"Unknown Error"}" 1>&2
|
echo "${PROGNAME}: ${1:-"Unknown Error"}" 1>&2
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
# Example call of the error_exit function. Note the inclusion
|
|
||||||
# of the LINENO environment variable. It contains the current
|
|
||||||
# line number.
|
|
||||||
|
|
||||||
### Script ###
|
# Function: encode each file in the directory with different CRF setting based on resolution
|
||||||
|
|
||||||
DIRECTORY=$1
|
folder_encode () {
|
||||||
QUALITY_HD=23
|
if [ ! -d "$INPUT_SOURCE/output" ]; then
|
||||||
QUALITY_4K=22
|
mkdir "$INPUT_SOURCE/output"
|
||||||
|
fi
|
||||||
|
|
||||||
# Check if source directory is provided
|
for FILE in "$INPUT_SOURCE"/*.*; do
|
||||||
if [ -z "$1" ] || [ ! -d "$1" ]; then
|
RES=$(ffprobe -v error -select_streams v:0 -show_entries stream=width -of default=noprint_wrappers=1:nokey=1 "$FILE")
|
||||||
|
FILENAME=$(basename "$FILE")
|
||||||
|
if [[ $RES -gt 1920 ]]; then
|
||||||
|
echo "File is 4K or higher, encoding using CRF $QUALITY_4K"
|
||||||
|
ffmpeg -i "$FILE" -c:v libx264 -preset slow -tune film -crf "$QUALITY_4K" -maxrate 25M -bufsize 25M -c:a copy "$INPUT_SOURCE"/output/"$FILENAME" || echo "ERROR Line $LINENO: File not encoded, unknown error occurred." 1>&2
|
||||||
|
elif [[ $RES -le 1920 ]] && [[ -n $RES ]]; then
|
||||||
|
echo "File is HD or lower, encoding using CRF $QUALITY_HD"
|
||||||
|
ffmpeg -i "$FILE" -c:v libx264 -preset slow -tune film -crf "$QUALITY_HD" -maxrate 15M -bufsize 15M -c:a copy "$INPUT_SOURCE"/output/"$FILENAME" || echo "ERROR Line $LINENO: File not encoded, unknown error occurred." 1>&2
|
||||||
|
else
|
||||||
|
echo "ERROR Line $LINENO: Source file $FILE is not a valid video file" 1>&2
|
||||||
|
echo "Skipping..."
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function: encode single file with different CRF setting based on resolution
|
||||||
|
|
||||||
|
file_encode () {
|
||||||
|
if [ ! -d "$FILEDIR/output" ]; then
|
||||||
|
mkdir "$FILEDIR/output"
|
||||||
|
fi
|
||||||
|
|
||||||
|
FILENAME=$(basename "$INPUT_SOURCE")
|
||||||
|
RES=$(ffprobe -v error -select_streams v:0 -show_entries stream=width -of default=noprint_wrappers=1:nokey=1 "$INPUT_SOURCE")
|
||||||
|
if [[ $RES -gt 1920 ]]; then
|
||||||
|
echo "File is 4K or higher, encoding using CRF $QUALITY_4K"
|
||||||
|
ffmpeg -i "$INPUT_SOURCE" -c:v libx264 -preset slow -tune film -crf "$QUALITY_4K" -maxrate 25M -bufsize 25M -c:a copy "$FILEDIR"/output/"$FILENAME" || echo "ERROR Line $LINENO: File not encoded, unknown error occurred." 1>&2
|
||||||
|
elif [[ $RES -le 1920 ]] && [[ -n $RES ]]; then
|
||||||
|
echo "File is HD or lower, encoding using CRF $QUALITY_HD"
|
||||||
|
ffmpeg -i "$INPUT_SOURCE" -c:v libx264 -preset slow -tune film -crf "$QUALITY_HD" -maxrate 15M -bufsize 15M -c:a copy "$FILEDIR"/output/"$FILENAME" || echo "ERROR Line $LINENO: File not encoded, unknown error occurred." 1>&2
|
||||||
|
else
|
||||||
|
echo "ERROR Line $LINENO: Source file $INPUT_SOURCE is not a valid video file" 1>&2
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
### SCRIPT ###
|
||||||
|
|
||||||
|
# Check if source input is provided
|
||||||
|
if [ -z "$1" ]; then
|
||||||
printf "ERROR: You must specify a source directory\n\n" 1>&2
|
printf "ERROR: You must specify a source directory\n\n" 1>&2
|
||||||
Help
|
Help
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Create output folder within source directory
|
# Run function based on file or folder input
|
||||||
if [ ! -d "$DIRECTORY/output" ]; then
|
if [ -f "$1" ]; then
|
||||||
mkdir "$DIRECTORY/output"
|
file_encode || error_exit "$LINENO: An error has occurred." 1>&2
|
||||||
|
elif [ -d "$1" ]; then
|
||||||
|
folder_encode || error_exit "$LINENO: An error has occurred." 1>&2
|
||||||
|
else
|
||||||
|
error_exit "$LINENO: Not a valid source" 1>&2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
echo "File(s) encoded successfully!"
|
||||||
|
|
||||||
# Encode each file in the directory with different CRF setting based on resolution
|
exit 0
|
||||||
for FILE in "$DIRECTORY"/*.*; do
|
|
||||||
RES=$(ffprobe -v error -select_streams v:0 -show_entries stream=width -of default=noprint_wrappers=1:nokey=1 "$FILE")
|
|
||||||
FILENAME=$(basename "$FILE")
|
|
||||||
if [[ $RES -gt 1920 ]]; then
|
|
||||||
ffmpeg -i "$FILE" -c:v libx264 -preset slow -tune film -crf "$QUALITY_4K" -c:a copy "$DIRECTORY"/output/"$FILENAME"
|
|
||||||
elif [[ $RES -le 1920 ]]; then
|
|
||||||
ffmpeg -i "$FILE" -c:v libx264 -preset slow -tune film -crf "$QUALITY_HD" -c:a copy "$DIRECTORY"/output/"$FILENAME"
|
|
||||||
fi
|
|
||||||
done || error_exit "$LINENO: An error has occurred."
|
|
||||||
|
|
||||||
exit 0
|
|
||||||
150
foxpass/foxpass_radius_logs.py
Normal file
150
foxpass/foxpass_radius_logs.py
Normal file
@@ -0,0 +1,150 @@
|
|||||||
|
#!/usr/local/bin/python3
|
||||||
|
|
||||||
|
"""
|
||||||
|
This script pulls RADIUS logs from your Foxpass instance and allows you to parse them by date, user, IP, or connection outcome.
|
||||||
|
Logs can be printed (in pretty colors) or exported in CSV format.
|
||||||
|
Required packages:
|
||||||
|
pip install requests
|
||||||
|
To run:
|
||||||
|
python foxpass_radius_logs.py
|
||||||
|
By default the script will print color-coded RADIUS logs from the last (5) days. You can use the optional arguments below:
|
||||||
|
--hours - How far back to show the logs in Hours.
|
||||||
|
--user - Filter by user.
|
||||||
|
--location - Filter by RADIUS Client, based on the items defined in the OFFICE_IPS dict.
|
||||||
|
--outcome - Filter by outcome of the connection, specify True or False.
|
||||||
|
--csv - Output the logs to a CSV file, specify the filename and path.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
import requests
|
||||||
|
import argparse
|
||||||
|
import csv
|
||||||
|
|
||||||
|
##### EDIT THESE #####
|
||||||
|
FOXPASS_API_TOKEN = ""
|
||||||
|
# RADIUS clients, can be called with the --location argument.
|
||||||
|
OFFICE_IPS = {
|
||||||
|
"office1":"",
|
||||||
|
"office2":"",
|
||||||
|
"office3":"",
|
||||||
|
}
|
||||||
|
|
||||||
|
# "YYYY-MM-DDTHH:MMZ". STARTDATE Default 5 days ago, ENDDATE is current day/time in UTC. Can be changed with the --hours argument.
|
||||||
|
STARTDATE = (datetime.now(timezone.utc) - timedelta(days=5)).strftime('%Y-%m-%dT%H:%MZ')
|
||||||
|
ENDDATE = datetime.now(timezone.utc).strftime('%Y-%m-%dT%H:%MZ')
|
||||||
|
|
||||||
|
FOXPASS_URL = "https://api.foxpass.com/v1/logs/radius/"
|
||||||
|
HEADERS = {'Authorization': 'Token ' + FOXPASS_API_TOKEN}
|
||||||
|
PAGEREQUEST = requests.post(FOXPASS_URL, json={"from": STARTDATE, "to": ENDDATE}, headers=HEADERS).json()
|
||||||
|
PAGES = PAGEREQUEST["numPages"]
|
||||||
|
|
||||||
|
class bcolors:
|
||||||
|
HEADER = '\033[95m'
|
||||||
|
OKBLUE = '\033[94m'
|
||||||
|
OKCYAN = '\033[96m'
|
||||||
|
OKGREEN = '\033[92m'
|
||||||
|
WARNING = '\033[93m'
|
||||||
|
FAIL = '\033[91m'
|
||||||
|
ENDC = '\033[0m'
|
||||||
|
BOLD = '\033[1m'
|
||||||
|
UNDERLINE = '\033[4m'
|
||||||
|
|
||||||
|
def get_args():
|
||||||
|
parser = argparse.ArgumentParser(description='Pull and parse RADIUS logs from your Foxpass environment')
|
||||||
|
parser.add_argument('--user', help='Filter logs by username')
|
||||||
|
parser.add_argument('--outcome', help='Filter logs by connection outcome: True or False')
|
||||||
|
parser.add_argument('--hours', type=int, help='How far back to check the logs in hours')
|
||||||
|
parser.add_argument('--location', help='Filter logs by location: tlv, tlv-backup, nyc, nyc-backup, sha')
|
||||||
|
parser.add_argument('--csv', help='Export a CSV of the log data to the specified filename and path')
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
# Builds an if statement to filter the logs based on user arguments
|
||||||
|
def build_query(username=None, outcome=None, location=None):
|
||||||
|
query_string = ""
|
||||||
|
if username != None:
|
||||||
|
query_string += f"log['username']=='{username}' and "
|
||||||
|
if location != None:
|
||||||
|
query_string += f"log['ipAddress']=='{location}' and "
|
||||||
|
if outcome != None:
|
||||||
|
query_string += f"log['success']=={outcome}"
|
||||||
|
if query_string != "":
|
||||||
|
# If the string ends with "and", remove it before returning.
|
||||||
|
if query_string[-2] == 'd':
|
||||||
|
query_string = query_string[:-5]
|
||||||
|
return query_string
|
||||||
|
|
||||||
|
# Pulls logs from Foxpass and stores them
|
||||||
|
def get_logs():
|
||||||
|
p = 0
|
||||||
|
logs_full = []
|
||||||
|
while p < PAGES:
|
||||||
|
p += 1
|
||||||
|
request = requests.post(FOXPASS_URL, json={"from": STARTDATE, "to": ENDDATE, "page": p, "ascending": True}, headers=HEADERS).json()
|
||||||
|
request_clean = request["data"]
|
||||||
|
logs_full.append(request_clean)
|
||||||
|
return logs_full
|
||||||
|
|
||||||
|
# Prints or exports all logs for the specified time period
|
||||||
|
def lookup_all(logs, csv_arg=None, csv_writer=None):
|
||||||
|
p=0
|
||||||
|
while p < PAGES:
|
||||||
|
for log in logs[p]:
|
||||||
|
if csv_arg == None:
|
||||||
|
print_logs(log)
|
||||||
|
elif csv_arg != None:
|
||||||
|
csv_export(log, csv_writer)
|
||||||
|
p += 1
|
||||||
|
|
||||||
|
# Prints or exports logs based on user-provided filter arguments for the specified time period
|
||||||
|
def lookup_filter(logs, if_statement, csvarg=None, csv_writer=None):
|
||||||
|
p=0
|
||||||
|
while p < PAGES:
|
||||||
|
for log in logs[p]:
|
||||||
|
if eval(if_statement) and csvarg == None:
|
||||||
|
print_logs(log)
|
||||||
|
elif eval(if_statement) and csvarg != None:
|
||||||
|
csv_export(log, csv_writer)
|
||||||
|
p += 1
|
||||||
|
|
||||||
|
def csv_export(log, csv_writer):
|
||||||
|
csv_writer.writerow([log["timestamp"], log["username"], log["ipAddress"], log["message"], log["success"]])
|
||||||
|
|
||||||
|
# Determines start time based on the --hours argument
|
||||||
|
def start_time(hours):
|
||||||
|
d = datetime.now(timezone.utc) - timedelta(hours=hours)
|
||||||
|
return d.strftime('%Y-%m-%dT%H:%MZ')
|
||||||
|
|
||||||
|
def print_logs(sourcedict):
|
||||||
|
print(bcolors.OKCYAN + sourcedict["timestamp"],bcolors.OKGREEN + sourcedict["username"],bcolors.WARNING + sourcedict["ipAddress"],bcolors.FAIL + sourcedict["message"],bcolors.OKBLUE + "Success:",sourcedict["success"])
|
||||||
|
|
||||||
|
def main():
|
||||||
|
global STARTDATE
|
||||||
|
args = get_args()
|
||||||
|
|
||||||
|
if args.csv != None:
|
||||||
|
csv_open = open(args.csv, 'w', newline='')
|
||||||
|
csv_writer = csv.writer(csv_open)
|
||||||
|
csv_writer.writerow(["TIMESTAMP (UTC)","USERNAME","IP ADDRESS","MESSAGE","SUCCESS"])
|
||||||
|
else:
|
||||||
|
csv_writer = None
|
||||||
|
|
||||||
|
if args.hours:
|
||||||
|
STARTDATE = start_time(args.hours)
|
||||||
|
|
||||||
|
if args.location != None:
|
||||||
|
location_ip = OFFICE_IPS[args.location]
|
||||||
|
else:
|
||||||
|
location_ip = None
|
||||||
|
|
||||||
|
if_statement = build_query(args.user, args.outcome, location_ip)
|
||||||
|
|
||||||
|
logs = get_logs()
|
||||||
|
|
||||||
|
if if_statement == "":
|
||||||
|
lookup_all(logs, args.csv, csv_writer)
|
||||||
|
else:
|
||||||
|
lookup_filter(logs, if_statement, args.csv, csv_writer)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
131
netlify/netlify-form-downloader.py
Normal file
131
netlify/netlify-form-downloader.py
Normal file
@@ -0,0 +1,131 @@
|
|||||||
|
#!/usr/bin/python3
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
This script gets uploaded files from a Netlify Forms submission, renames them, and uploads them to a Nextcloud folder.
|
||||||
|
I originally used it to download vaccine cards that my wedding guests submitted and move them to a shared folder.
|
||||||
|
|
||||||
|
Required Packages:
|
||||||
|
pip install requests
|
||||||
|
pip install webdavclient3
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from webdav3.client import Client
|
||||||
|
import urllib.request
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
### USER VARIABLES ###
|
||||||
|
|
||||||
|
# Netlify
|
||||||
|
USERNAME=""
|
||||||
|
OAUTH_TOKEN=""
|
||||||
|
SITE_ID=""
|
||||||
|
FORM_ID=""
|
||||||
|
|
||||||
|
# Nextcloud
|
||||||
|
NEXTCLOUD_DIR = ""
|
||||||
|
NEXTCLOUD_USER = ""
|
||||||
|
NEXTCLOUD_PASS = ""
|
||||||
|
NEXTCLOUD_URL = ""
|
||||||
|
|
||||||
|
#### DON'T EDIT BELOW THIS LINE ####
|
||||||
|
|
||||||
|
# Netlify API calls
|
||||||
|
headers = {'Authorization': 'Bearer ' + OAUTH_TOKEN , 'User-Agent': 'MyApp (' + USERNAME + ')'}
|
||||||
|
form_submissions = requests.get(f"https://api.netlify.com/api/v1/sites/{SITE_ID}/forms/{FORM_ID}/submissions", headers=headers).json()
|
||||||
|
vaccine_cards = {}
|
||||||
|
webdav_options = {
|
||||||
|
'webdav_hostname': NEXTCLOUD_URL,
|
||||||
|
'webdav_login': NEXTCLOUD_USER,
|
||||||
|
'webdav_password': NEXTCLOUD_PASS
|
||||||
|
}
|
||||||
|
client = Client(webdav_options)
|
||||||
|
nc = client.list(NEXTCLOUD_DIR)
|
||||||
|
existing_cards = nc[1:]
|
||||||
|
new_cards = []
|
||||||
|
all_cards = []
|
||||||
|
|
||||||
|
#### FUNCTIONS ####
|
||||||
|
|
||||||
|
def build_dict():
|
||||||
|
for entry in form_submissions:
|
||||||
|
name = entry["data"]["name"]
|
||||||
|
card_img = entry["data"]["vaccine_card"]["url"]
|
||||||
|
vaccine_cards[name] = card_img
|
||||||
|
|
||||||
|
def card_sizes_netlify():
|
||||||
|
netlify_cards = {}
|
||||||
|
for name, card in vaccine_cards.items():
|
||||||
|
response = urllib.request.urlopen(card)
|
||||||
|
info = response.headers
|
||||||
|
filesize = info['Content-Length']
|
||||||
|
extension = "." + str(info.get_content_subtype())
|
||||||
|
name_clean = name.strip()
|
||||||
|
output_file = name_clean.replace(' ', '_') + extension
|
||||||
|
netlify_cards[output_file] = filesize
|
||||||
|
return netlify_cards
|
||||||
|
|
||||||
|
def card_sizes_nextcloud():
|
||||||
|
nextcloud_cards = {}
|
||||||
|
for card in existing_cards:
|
||||||
|
card_info = client.info(NEXTCLOUD_DIR + card)
|
||||||
|
filesize = card_info['size']
|
||||||
|
nextcloud_cards[card] = filesize
|
||||||
|
return nextcloud_cards
|
||||||
|
|
||||||
|
def download_cards():
|
||||||
|
print("Downloading cards from Netlify...")
|
||||||
|
for name, card in vaccine_cards.items():
|
||||||
|
response = urllib.request.urlopen(card)
|
||||||
|
info = response.headers
|
||||||
|
extension = "." + str(info.get_content_subtype())
|
||||||
|
name_clean = name.strip()
|
||||||
|
output_file = name_clean.replace(' ', '_') + extension
|
||||||
|
all_cards.append(output_file)
|
||||||
|
if output_file not in existing_cards:
|
||||||
|
new_cards.append(output_file)
|
||||||
|
file_download = requests.get(card, stream=True)
|
||||||
|
if os.path.exists('tmp/') == False:
|
||||||
|
os.makedirs('tmp/')
|
||||||
|
print(output_file)
|
||||||
|
with open(f'tmp/{output_file}', 'wb') as f:
|
||||||
|
for chunk in file_download.iter_content(2000):
|
||||||
|
f.write(chunk)
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
|
def upload_cards():
|
||||||
|
num_cards = len(new_cards)
|
||||||
|
current_card = 0
|
||||||
|
print("")
|
||||||
|
print("Uploading cards to Nextcloud...")
|
||||||
|
for card in os.listdir("tmp"):
|
||||||
|
if card in new_cards:
|
||||||
|
current_card += 1
|
||||||
|
print(f"Uploading card {current_card} of {num_cards}")
|
||||||
|
client.upload_sync(remote_path=f'{NEXTCLOUD_DIR}/{card}', local_path=f"tmp/{card}")
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
print("Done!")
|
||||||
|
|
||||||
|
def main():
|
||||||
|
build_dict()
|
||||||
|
print("Checking for new vaccine cards...")
|
||||||
|
if card_sizes_netlify() != card_sizes_nextcloud():
|
||||||
|
download_cards()
|
||||||
|
else:
|
||||||
|
print("Nothing new to download!")
|
||||||
|
if new_cards:
|
||||||
|
upload_cards()
|
||||||
|
if os.path.exists('tmp/') == True:
|
||||||
|
print("")
|
||||||
|
print("Cleaning up...")
|
||||||
|
shutil.rmtree('tmp/')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user