130 lines
4.3 KiB
Bash
130 lines
4.3 KiB
Bash
#! /bin/bash
|
|
#
|
|
# s3backup.sh
|
|
# General-purpose, Ansible-managed backup script to push directories, DBs, and
|
|
# more up to an S3 bucket
|
|
#
|
|
# NOTICE: THIS FILE CONTAINS SECRETS
|
|
# This file may contain the following secrets depending on configuration:
|
|
# * An AWS access key
|
|
# * An AWS session token
|
|
# These are NOT things you want arbitrary readers to access! Ansible will
|
|
# attempt to ensure this file has 0700 permissions, but that won't stop you
|
|
# from changing that yourself
|
|
# DO NOT ALLOW THIS FILE TO BE READ BY NON-ROOT USERS
|
|
|
|
# NOTICE: DO NOT MODIFY THIS FILE
|
|
# Any changes made will be clobbered by Ansible
|
|
# Please make any configuration changes in the main repo
|
|
|
|
set -e
|
|
|
|
# AWS S3 configuration
|
|
# NOTE: THIS IS SECRET INFORMATION
|
|
export AWS_ACCESS_KEY_ID="{{ backup_s3_aws_access_key_id }}"
|
|
export AWS_SECRET_ACCESS_KEY="{{ backup_s3_aws_secret_access_key }}"
|
|
|
|
# Directories to backup
|
|
# Ansible will determine the entries here
|
|
|
|
# We use a bash array because it affords us some level of sanitization, enough
|
|
# to let us back up items whose paths contain spaces
|
|
declare -a DIRS
|
|
{% for item in backup_s3backup_list + backup_s3backup_list_extra %}
|
|
DIRS+=("{{ item }}")
|
|
{% endfor %}
|
|
# End directory manual configuration
|
|
|
|
# Helper functions
|
|
backup() {
|
|
# Takes a file or directory to backup and backs it up
|
|
[ -z "$*" ] && return 1
|
|
|
|
for dir in "$@"; do
|
|
echo "- $dir"
|
|
done
|
|
# First, we remove stale locks. This command will only remove locks that have not been
|
|
# updated in the last half hour. By default, restic updates them during an ongoing
|
|
# operation every 5 minutes, so this should be perfectly fine to do.
|
|
# What I'm not sure of (but should be fine because we auto-restart if need be) is if two
|
|
# processes doing this concurrently will cause issues. I'd hope not but you never know.
|
|
# restic-unlock(1)
|
|
/opt/restic-wrapper \
|
|
--verbose \
|
|
unlock
|
|
# Back up everything in the $DIRS array (which was passed as args)
|
|
# This results in some level of pollution with regard to what paths are backed up
|
|
# (especially on ostree systems where we do the etc diff) but that's syntactic and
|
|
# we can script around it.
|
|
/opt/restic-wrapper \
|
|
--verbose \
|
|
{% for item in backup_s3backup_exclude_list + backup_s3backup_exclude_list_extra %}
|
|
--exclude="{{ item }}" \
|
|
{% endfor %}
|
|
--exclude="/data/**/backup" \
|
|
--exclude="/data/**/backups" \
|
|
--exclude="*.bak" \
|
|
--exclude="*.tmp" \
|
|
--exclude="*.swp" \
|
|
backup \
|
|
"$@"
|
|
# In addition, we should also prune our backups
|
|
# https://restic.readthedocs.io/en/stable/060_forget.html
|
|
# --keep-daily n Keeps daily backups for the last n days
|
|
# --keep-weekly n Keeps weekly backups for the last n weeks
|
|
# --keep-montly n Keeps monthly backups for the last n months
|
|
# --keep-tag foo Keeps all snapshots tagged with "foo"
|
|
# --host "$HOSTNAME" Only act on *our* snapshots. We assume other machines are taking
|
|
# care of their own houses.
|
|
/opt/restic-wrapper \
|
|
--verbose \
|
|
forget \
|
|
--keep-daily 7 \
|
|
--keep-weekly 4 \
|
|
--keep-monthly 6 \
|
|
--keep-tag noremove \
|
|
--host "$HOSTNAME"
|
|
}
|
|
|
|
# Dump Postgres DBs, if possible
|
|
if command -v psql > /dev/null 2>&1; then
|
|
# Put down a place for us to store backups, if we don't have it already
|
|
backupdir="/opt/postgres-backups"
|
|
mkdir -p "$backupdir"
|
|
# Populate a list of databases
|
|
declare -a DATABASES
|
|
while read line; do
|
|
DATABASES+=("$line")
|
|
done < <(sudo -u postgres psql -t -A -c "SELECT datname FROM pg_database where datname not in ('template0', 'template1', 'postgres');" 2>/dev/null)
|
|
|
|
# pgdump all DBs, compress them, and pipe straight up to S3
|
|
echo "Commencing backup on the following databases:"
|
|
for dir in "${DATABASES[@]}"; do
|
|
echo "- $dir"
|
|
done
|
|
echo "Will upload resultant backups to {{ backup_s3_bucket }}"
|
|
for db in "${DATABASES[@]}"; do
|
|
echo "Backing up $db"
|
|
path="$backupdir/$db.pgsql.gz"
|
|
sudo -u postgres pg_dump "$db" \
|
|
| gzip -v9 \
|
|
> "$path"
|
|
DIRS+=("$path")
|
|
done
|
|
fi
|
|
|
|
# Tar up all items in the backup list, recursively, and pipe them straight
|
|
# up to S3
|
|
if [ -n "${DIRS[*]}" ]; then
|
|
echo "Commencing backup on the following items:"
|
|
for dir in "${DIRS[@]}"; do
|
|
echo "- $dir"
|
|
done
|
|
echo "Will ignore the following items:"
|
|
{% for item in backup_s3backup_exclude_list + backup_s3backup_exclude_list_extra %}
|
|
echo "- {{ item }}"
|
|
{% endfor %}
|
|
echo "Will upload resultant backups to {{ backup_s3_bucket }}"
|
|
backup ${DIRS[*]}
|
|
fi
|