ansible/roles/backups/templates/backup.sh

66 lines
1.5 KiB
Bash
Raw Normal View History

#! /bin/bash
#
# backup.sh
# General-purpose backup script that accepts subtasks
# Copyright (C) 2020 Vintage Salt <rehashedsalt@cock.li>
#
# Distributed under terms of the MIT license.
#
set -e
export BACKUPSDIR="/backups"
2020-05-03 06:37:58 -05:00
export OUTDIR="$BACKUPSDIR/out"
export MODULESDIR="/opt/backups/modules"
export DATE="$(date -Iseconds)"
# Helper functions
log() {
[ -z "$1" ] && return 1
printf "$(date -Iseconds): $1\n"
}
# Sanity checks
if ! [ -d "$MODULESDIR" ]; then
log "Unable to find modules directory: $MODULESDIR"
2020-05-03 06:37:58 -05:00
exit 1
fi
# Source an RC, if we have it
if [ -r "$MODULESDIR/backuprc" ]; then
source "$MODULESDIR/backuprc"
fi
# More sanity checks
if ! [ -d "$BACKUPSDIR" ]; then
log "Unable to find backups directory: $BACKUPSDIR"
2020-05-03 06:37:58 -05:00
exit 2
fi
# Do the do
log "Beginning backups"
for file in "$MODULESDIR"/*; do
# Just keep going if we don't have any tasks to do
[ -f "$file" ] || continue
# Execute the module and alert if it fails
log "Executing module: $file"
(
2020-05-03 06:37:58 -05:00
# Define a log function for our module to use
log() {
[ -z "$1" ] && return 1
printf "$(date -Iseconds): $1\n"
}
source "$file"
) || {
log "Error executing module: $file"
}
done
# If we have a fancy schmancy bucket, use it
s3bucket="{{ aws_backup_bucket }}"
2020-06-09 05:16:22 -05:00
if command -v aws > /dev/null 2>&1 && aws s3 ls "s3://$s3bucket" > /dev/null 2>&1; then
log "Moving files to S3 bucket $s3bucket"
2020-06-09 05:14:47 -05:00
aws s3 mv "$BACKUPSDIR" "s3://$s3bucket" \
--recursive \
--only-show-errors \
--exclude "*.log" \
--storage-class STANDARD_IA
fi