initial commit

This commit is contained in:
2021-03-12 13:24:59 -05:00
parent accffd56ae
commit b9b98f9e2f
7 changed files with 373 additions and 0 deletions

View File

@@ -0,0 +1,135 @@
#!/bin/bash
#### README ####
#
# This script is intended to run as a cron job on an always-on macOS machine.
# It works as part of an overall git-focused workflow where the master/main branch is the source-of-truth for your AutoPKG recipe overrides.
#
# It does the following:
# 1. Git pull from the master/main branch of recipe overrides repository.
# 2. Updates your AutoPkgr recipe list for scheduled runs.
# 3. Sends notifications to Slack for new, deleted, and modified recipes.
#
#### REQUIREMENTS ####
#
# * Always-on macOS machine
# * Git
# * AutoPkgr and all dependencies
#
#### USER VARIABLES ####
# Github repo where your AutoPkg recipe overrides are stored
github_repo=""
overrides_folder=""
# AutoPkgr recipe list file, default is "$HOME/Library/Application Support/AutoPkgr/recipe_list.txt"
recipe_list_file=""
# Generate from your Slack workspace for notifications
slack_webhook_url=""
#### DON'T EDIT BELOW THIS LINE ####
recipe_list_file_old="/private/tmp/recipe_list.old.txt"
recipes="$(find "$overrides_folder" -type f -name "*.recipe")"
progname="$(basename "$0")"
IFS=$'\n'
#### FUNCTIONS ####
# Error handling
error_exit() {
echo "${progname}: ${1:-"Unknown Error"}" 1>&2
curl -X POST "$slack_webhook_url" -H "Content-type: application/json" --data \
'{"type": "mrkdwn", "text": '"${progname}"': '"${1:-"Unknown Error"}"'"}'
exit 1
}
# Creates recipe_list.txt based on recipes cloned from Github repo
update_recipe_list() {
for recipe in $recipes; do
recipe_list_name=$(xmllint --xpath 'string(//key[.="Identifier"]/following-sibling::string[1])' "$recipe")
echo "$recipe_list_name" >> "$recipe_list_file"
done
}
# Gets any new recipes added to the list and sends them to #autopkg-alerts in Slack
slack_new_recipes() {
new_recipes="$(diff "$recipe_list_file" "$recipe_list_file_old" | grep "< local." | cut -c 3-)"
if [[ "$new_recipes" > /dev/null ]]; then
curl -X POST "$slack_webhook_url" -H "Content-type: application/json" --data \
'{"type": "mrkdwn", "text": ":man_dancing: *New recipes were added to AutoPkgr Prod* :man_dancing:\n\n'"$new_recipes"'"}'
fi
}
# Gets any recipes that were removed from the list and sends them to #autopkg-alerts in Slack
slack_removed_recipes() {
removed_recipes="$(diff "$recipe_list_file" "$recipe_list_file_old" | grep "> local." | cut -c 3-)"
if [[ "$removed_recipes" > /dev/null ]]; then
curl -X POST "$slack_webhook_url" -H "Content-type: application/json" --data \
'{"type": "mrkdwn", "text": ":bomb: *Removed recipes from AutoPkgr Prod* :bomb:\n\n'"$removed_recipes"'"}'
fi
}
# Gets any existing recipes that were modified and sends them to #autopkg-alerts in Slack
slack_modified_recipes() {
modified_recipes="$(find "$HOME/Github/it-autopkg/RecipeOverrides" -type f -name "*.recipe" -mtime -10s)"
modified_recipe_list=()
for modified_recipe in $modified_recipes; do
modified_recipe_name=$(xmllint --xpath 'string(//key[.="Identifier"]/following-sibling::string[1])' "$modified_recipe")
modified_recipe_list+=("$modified_recipe_name")
done
modified_only=$(diff <(echo "${modified_recipe_list[*]}") <(echo "$new_recipes") | grep "< local." | cut -c 3-)
if [[ "$modified_only" > /dev/null ]]; then
curl -X POST "$slack_webhook_url" -H "Content-type: application/json" --data \
'{"type": "mrkdwn", "text": ":lower_left_ballpoint_pen: *Modified recipes on AutoPkgr Prod* :lower_left_ballpoint_pen:\n\n'"$modified_only"'"}'
fi
}
### SCRIPT ####
# Pull the latest version of the main branch for it-autopkg
git -C "$github_repo" pull || error_exit "$LINENO: An error has occurred during git pull"
# Create copy of recipe_list.txt before removing it and creating a new one, then run the functions.
if [ -f "$recipe_list_file" ]; then
cp "$recipe_list_file" "$recipe_list_file_old"
rm "$recipe_list_file"
update_recipe_list
slack_new_recipes
slack_removed_recipes
slack_modified_recipes
elif [ ! -f "$recipe_list_file" ]; then
update_recipe_list
recipe_list="$(cat "$recipe_list_file")"
curl -X POST "$slack_webhook_url" -H "Content-type: application/json" --data \
'{"type": "mrkdwn", "text": "*A new recipe list was created on AutoPkgr Prod with the following recipes:*\n\n'"$recipe_list"'"}'
else
error_exit "$LINENO: An error has occurred"
fi
# Print results
printf "\nNew Recipes:\n%s\n\nRemoved Recipes:\n%s\n\nModified Recipes:\n%s\n\n" "$new_recipes" "$removed_recipes" "$modified_only"
# Cleanup
if [ -f "$recipe_list_file_old" ]; then
rm "$recipe_list_file_old"
fi
exit 0

View File

@@ -0,0 +1,9 @@
#!/bin/sh
# Intended to be used with the onboarding script (https://github.com/skoobasteeve/jamfops/scripts/onboarding.sh)
for file in /tmp/.Onboarding/*; do
Team="${file##*/}"
done
echo "<result>$Team</result>"

View File

@@ -0,0 +1,11 @@
#!/bin/sh
# Locates VM images on a users machine with the below file extensions and lists them out with thier size.
find /Users/ -type f \( -name "*.hds" -o -name "*.vmdk" -o -name "*.vdi" -o -name "*.vhd" \) -exec du -sh {} \; > /tmp/vminfo
echo "<result>"
cat /tmp/vminfo
echo "</result>"
exit 0

View File

@@ -0,0 +1,74 @@
#### README ####
#
# This action "tests" your AutoPKG JSS recipes by running them on a macOS machine and uploading them to your JAMF instance via JSSImporter
# I recommend using a sandbox/dev instance for this, which your JAMF rep will happily provide for you on request.
#
#### REQUIREMENTS ####
#
# The below action assumes that your repository contains a RecipeOverrides folder at its root that contains your overrides
# It also assumes you have a file called repo_list.txt in the root of your repository which lists the parent repositories used by your recipes.
#
# This action also references (3) Github repository secrets:
# - JSS_USERNAME
# - JSS_PASSWORD
# - JSS_URL
#
# I HIGHLY RECOMMEND USING A JAMF SANDBOX/DEV ENVIRONMENT
#
####
name: AutoPkg Recipe Test
on:
pull_request:
branches:
- main
jobs:
AutoPkg:
runs-on: macos-latest
timeout-minutes: 15
steps:
- name: Checkout it-autopkg
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Set env variables
run: |
echo "NEW_RECIPES="$(git diff --name-only origin/main | grep ".*\.recipe$" | sort -u)"" >> $GITHUB_ENV
- name: Install AutoPkg
run: |
curl -L https://github.com/autopkg/autopkg/releases/download/v2.1/autopkg-2.1.pkg --output /tmp/autopkg.pkg
sudo installer -pkg /tmp/autopkg.pkg -target /
- name: Install JSSImporter
run: |
curl -L https://github.com/jssimporter/JSSImporter/releases/download/v1.1.2/jssimporter-1.1.2.pkg --output /tmp/jssimporter.pkg
sudo installer -pkg /tmp/jssimporter.pkg -target /
- name: Configure AutoPkg
env:
JSS_USERNAME: ${{ secrets.JSS_USERNAME }}
JSS_PASSWORD: ${{ secrets.JSS_PASSWORD }}
JSS_URL: ${{ secrets.JSS_URL }}
run: |
defaults write com.github.autopkg RECIPE_OVERRIDE_DIRS $(pwd)/RecipeOverrides/
defaults write com.github.autopkg RECIPE_REPO_DIR $(pwd)/repos/
defaults write com.github.autopkg FAIL_RECIPES_WITHOUT_TRUST_INFO -bool YES
defaults write com.github.autopkg JSS_URL $JSS_URL
defaults write com.github.autopkg API_USERNAME $JSS_USERNAME
defaults write com.github.autopkg API_PASSWORD $JSS_PASSWORD
- name: Clone AutoPkg parent repos
run: |
for repo in $(cat repo_list.txt); do autopkg repo-add $repo && autopkg repo-update $repo; done
- name: Verify trust info
run: |
for recipe in "$NEW_RECIPES"; do autopkg verify-trust-info -vv $recipe; done
- name: Run recipes
run: |
for recipe in "$NEW_RECIPES"; do autopkg run -vv $recipe --key STOP_IF_NO_JSS_UPLOAD=False; done

59
scripts/jamf-app-usage.py Normal file
View File

@@ -0,0 +1,59 @@
#!/usr/local/bin/python3
#### README ####
#
# Uses the JAMF API to pull application usage for all computers in your environment and export it in a CSV
# Can take a long time depending on your environment and selected date range.
#
#### REQUIREMENTS ####
#
# * Python3 'requests' module (pip3 install requests)
# * JAMF user credentials with read access to computer application usage
#
#### USER VARIABLES ####
# No trailing / please :)
jamf_url=''
api_user = ''
api_password = ''
# date_range = '2021-03-09_2021-03-10' for example
date_range = ''
import requests
from requests.auth import HTTPBasicAuth
import csv
CSVExport = open('JamfAppUsage.csv', 'w', newline='')
writer = csv.writer(CSVExport)
id_computer_list = []
apps_list = []
data_list = []
get_computers = requests.get("%s/JSSResource/computers" % jamf_url, auth=HTTPBasicAuth(api_user, api_password), headers={'Accept': 'application/json'}).json()
for c in get_computers['computers']:
criterias = [c['name'], c['id']]
id_computer_list.append(criterias)
for comp in id_computer_list:
get_usage = requests.get("%s/JSSResource/computerapplicationusage/id/%s/%s" % (jamf_url, comp[1], date_range), auth=HTTPBasicAuth(api_user, api_password), headers={'Accept': 'application/json'}).json()
try:
for u in get_usage['computer_application_usage']:
for a in u['apps']:
writer.writerow([comp[0], u['date'], a['name'], a['open'], a['foreground']])
except Exception as x:
print (x)
CSVExport.close

View File

@@ -0,0 +1,35 @@
#!/bin/sh
#### README ####
#
# The below script moves computer groups from one JAMF instance to another. Useful when setting up a sandbox environment that needs to match prod.
#
#### REQUIREMENTS ####
#
# * JAMF user credentials with read/write access to computer groups
#
#### USER VARIABLES ####
# No trailing / please :)
SOURCE_JAMF_URL=""
DEST_JAMF_URL=""
SOURCE_API_USER=""
SOURCE_API_PASS=""
DEST_API_USER=""
DEST_API_PASS=""
# Pulls group ID numbers from JAMF source and sorts them
PROD_GROUPS=$(curl -X GET "$SOURCE_JAMF_URL/JSSResource/computergroups" -H "accept: application/xml" -u "$SOURCE_API_USER":"$SOURCE_API_PASS" | xml ed -d '//computers' | grep "<id>" | grep -Eo '[0-9]{1,4}' | sort -n)
# Pulls groups from JAMF source and outputs XML files for each group
for id in $PROD_GROUPS; do
curl -X GET "$SOURCE_JAMF_URL/JSSResource/computergroups/id/$id" -H "accept: application/xml" -u "$SOURCE_API_USER":"$SOURCE_API_PASS" | xml ed -d '//computers' > /tmp/jamf/"$id".xml;
done
# Pushes groups to JAMF destination from previously created XML files
for group in /tmp/jamf/*; do
curl -X POST "$DEST_JAMF_URL/JSSResource/computergroups/id/0" -ku "$DEST_API_USER":"$DEST_API_PASS" -T "$group";
done
exit 0

50
scripts/jamf-onboarding.sh Executable file
View File

@@ -0,0 +1,50 @@
#!/bin/bash
#### README ####
#
# This script is intended to be used by IT staff who are manually configuring computers for new hires.
# It must be ran on the user's computer and can be invoked automatically during enrollment or manually via Self Service.
#
# The goal is to assign the computer to a user and then install packages and configurations specific to their department/team.
# It does this by giving computers a temporary Extension Attribute that adds them to a Smart Computer Group in JAMF.
#
#### REQUIREMENTS ####
#
# * Corresponding Extension Attribute that locates the temp file (https://github.com/skoobasteeve/jamfops/ext-attributes/onboarding-group-name.sh)
# * Smart Computer Groups in JAMF that add computers with the corresponding extension attribute.
#
# DON'T FORGET TO EDIT THE GROUP LIST ON LINE 35
# Get user email via a prompt.
results=$( /usr/bin/osascript -e "display dialog \"Assign computer to user:\" default answer \"Email address...\" buttons {\"Cancel\",\"OK\"} default button {\"OK\"}" )
username=$( echo "$results" | /usr/bin/awk -F "text returned:" '{print $2}' )
# Create temporary directory and prompt user to choose onboarding group
tempdir="/tmp/.Onboarding"
if [ -d "$tempdir" ];
then rm -rf "$tempdir"
fi
mkdir "$tempdir"
#### EDIT THIS LIST ####
# This where you would add the individual groups at your org. Use whatever makes sense for you.
# Note the formatting and don't break it.
groupchoice=$( osascript -e 'return choose from list {¬
"DEPARTMENT - TEAM",¬
"DEPARTMENT 2 - TEAM 2",¬
"LAST DEPARTMENT - TEAM 3"}' )
touch /tmp/.Onboarding/"$groupchoice"
# Clean up temp files
find /tmp/.Onboarding -type f -not -name "$groupchoice" -delete
# Run recon and assign email to user
jamf recon -endUsername "$username"
exit 0