#! /bin/bash # # desktop.sh # Backup script for desktops. Meant to be sourced by our main backup script # Copyright (C) 2020 Vintage Salt # # Distributed under terms of the MIT license. # set -e export OUTDIR="$BACKUPSDIR/{{ inventory_hostname }}" retention=7 # 7-day retention period # Sanity checks if [ -z "$BACKUPSDIR" ]; then log "BACKUPSDIR was undefined. Run the main backup script instead of this one." return 1 fi if ! [ -d "$OUTDIR" ]; then if ! mkdir "$OUTDIR"; then log "Unable to find or create output directory: $OUTDIR" return 2 fi fi # Purge oldest backup if we need to currentbackupcount="$(ls -1 "$OUTDIR" | wc -l)" if (( currentbackupcount >= retention )); then lastbackup="$(find "$OUTDIR" -name \*.tar.gz 2>/dev/null | sort | head -n 1)" if [ -f "$lastbackup" ]; then log "Removing old backup: $lastbackup" rm "$lastbackup" fi fi # WE MAKE BACKUP NOW SERGEI for dir in /home/*; do username="$(basename -- "$dir")" forcefile="$dir/.backup/force" [ -d "$dir/.backup" ] || continue for file in "$dir/.backup/"*; do [ -e "$file" ] || continue; done tar czhf "$OUTDIR/desktop-$username-{{ inventory_hostname }}-$(date -Iseconds).tar.gz" "$dir/.backup/"* if (( "$(date +%d)" == "1" )) || [ -f "$forcefile" ]; then log "Detected conditions for monthly dump" if command -v aws > /dev/null 2>&1 && aws s3 ls "s3://$s3bucket " > /dev/null 2>&1; then # Time for huge backups piped straight to S3 tar \ --exclude "$dir/.ansible" \ --exclude "$dir/.backup" \ --exclude "$dir/.cache" \ --exclude "$dir/.steam" \ --exclude "$dir/Downloads" \ --exclude "$dir/Dropbox" \ --exclude "$dir/Nextcloud" \ --exclude "$dir/snap" \ czf - "$dir/"* \ | gzip -c | aws s3 cp - "s3://$s3bucket" \ --recursive \ --only-show-errors \ --store-class STANDARD else log "Could not satisfy requirements for AWS CLI" fi [ -f "$forcefile" ] && rm "$forcefile" fi done