ansible/roles/backup/templates/s3backup.sh

111 lines
3.3 KiB
Bash
Raw Normal View History

#! /bin/bash
#
# s3backup.sh
# General-purpose, Ansible-managed backup script to push directories, DBs, and
# more up to an S3 bucket
#
# NOTICE: THIS FILE CONTAINS SECRETS
# This file may contain the following secrets depending on configuration:
# * An AWS access key
# * An AWS session token
# These are NOT things you want arbitrary readers to access! Ansible will
# attempt to ensure this file has 0700 permissions, but that won't stop you
# from changing that yourself
# DO NOT ALLOW THIS FILE TO BE READ BY NON-ROOT USERS
# NOTICE: DO NOT MODIFY THIS FILE
# Any changes made will be clobbered by Ansible
# Please make any configuration changes in the main repo
set -e
# AWS S3 configuration
# NOTE: THIS IS SECRET INFORMATION
export AWS_ACCESS_KEY_ID="{{ backup_s3_aws_access_key_id }}"
export AWS_SECRET_ACCESS_KEY="{{ backup_s3_aws_secret_access_key }}"
# Directories to backup
# Ansible will determine the entries here
# We use a bash array because it affords us some level of sanitization, enough
# to let us back up items whose paths contain spaces
declare -a DIRS
{% for item in backup_s3backup_list + backup_s3backup_list_extra %}
DIRS+=("{{ item }}")
{% endfor %}
# End directory manual configuration
# If we have ostree, add diff'd configs to the list, too
if command -v ostree > /dev/null 2>&1; then
for file in $(
ostree admin config-diff 2>/dev/null | \
grep -e '^[A|M]' | \
awk '{print $2}'
); do
DIRS+=("/etc/$file")
done
fi
# Helper functions
backup() {
# Takes a file or directory to backup and backs it up
[ -z "$1" ] && return 1
dir="$1"
echo "- $dir"
nice -n 10 tar {{ backup_s3backup_tar_args }}{{ backup_s3backup_tar_args_extra }} \
{% for item in backup_s3backup_exclude_list + backup_s3backup_exclude_list_extra %}
--exclude "{{ item }}" \
{% endfor %}
"$dir" \
| aws s3 cp --expected-size 274877906944 - \
"s3://{{ backup_s3_bucket }}/{{ inventory_hostname }}/$dir/$(date "+{{ backup_dateformat }}").tar.gz"
}
# Tar up all items in the backup list, recursively, and pipe them straight
# up to S3
if [ -n "${DIRS[*]}" ]; then
echo "Commencing backup on the following items:"
for dir in "${DIRS[@]}"; do
echo "- $dir"
done
echo "Will ignore the following items:"
{% for item in backup_s3backup_exclude_list + backup_s3backup_exclude_list_extra %}
echo "- {{ item }}"
{% endfor %}
echo "Will upload resultant backups to {{ backup_s3_bucket }}"
for dir in "${DIRS[@]}"; do
if [ "$dir" == "/data" ]; then
for datadir in "$dir"/*; do
[ -e "$datadir" ] && backup "$datadir"
done
else
backup "$dir"
fi
done
fi
# Dump Postgres DBs, if possible
if command -v psql > /dev/null 2>&1; then
# Populate a list of databases
declare -a DATABASES
while read line; do
DATABASES+=("$line")
done < <(sudo -u postgres psql -t -A -c "SELECT datname FROM pg_database where datname not in ('template0', 'template1', 'postgres');" 2>/dev/null)
# pgdump all DBs, compress them, and pipe straight up to S3
echo "Commencing backup on the following databases:"
for dir in "${DATABASES[@]}"; do
echo "- $dir"
done
echo "Will upload resultant backups to {{ backup_s3_bucket }}"
for db in "${DATABASES[@]}"; do
echo "Backing up $db"
sudo -u postgres pg_dump "$db" \
| gzip -v9 \
| aws s3 cp - \
"s3://{{ backup_s3_bucket }}/{{ inventory_hostname }}/pgdump/$db/$(date "+{{ backup_dateformat }}").pgsql.gz"
done
fi