ansible/roles/backups/templates/backup.sh
2020-06-23 08:03:16 -05:00

66 lines
1.5 KiB
Bash

#! /bin/bash
#
# backup.sh
# General-purpose backup script that accepts subtasks
# Copyright (C) 2020 Vintage Salt <rehashedsalt@cock.li>
#
# Distributed under terms of the MIT license.
#
set -e
export BACKUPSDIR="/backups"
export OUTDIR="$BACKUPSDIR/out"
export MODULESDIR="/opt/backups/modules"
export DATE="$(date -Iseconds)"
# Helper functions
log() {
[ -z "$1" ] && return 1
printf "$(date -Iseconds): $1\n"
}
# Sanity checks
if ! [ -d "$MODULESDIR" ]; then
log "Unable to find modules directory: $MODULESDIR"
exit 1
fi
# Source an RC, if we have it
if [ -r "$MODULESDIR/backuprc" ]; then
source "$MODULESDIR/backuprc"
fi
# More sanity checks
if ! [ -d "$BACKUPSDIR" ]; then
log "Unable to find backups directory: $BACKUPSDIR"
exit 2
fi
# Do the do
log "Beginning backups"
for file in "$MODULESDIR"/*; do
# Just keep going if we don't have any tasks to do
[ -f "$file" ] || continue
# Execute the module and alert if it fails
log "Executing module: $file"
(
# Define a log function for our module to use
log() {
[ -z "$1" ] && return 1
printf "$(date -Iseconds): $1\n"
}
source "$file"
) || {
log "Error executing module: $file"
}
done
# If we have a fancy schmancy bucket, use it
s3bucket="{{ aws_backup_bucket }}"
if command -v aws > /dev/null 2>&1 && aws s3 ls "s3://$s3bucket" > /dev/null 2>&1; then
log "Moving files to S3 bucket $s3bucket"
nice -n 10 aws s3 mv "$BACKUPSDIR" "s3://$s3bucket" \
--recursive \
--only-show-errors \
--exclude "*.log" \
--storage-class STANDARD
fi