2020-12-24 09:06:20 -06:00
|
|
|
#! /bin/bash
|
|
|
|
#
|
|
|
|
# s3backup.sh
|
2021-05-04 13:38:39 -05:00
|
|
|
# General-purpose, Ansible-managed backup script to push directories, DBs, and
|
|
|
|
# more up to an S3 bucket
|
2020-12-24 09:06:20 -06:00
|
|
|
#
|
|
|
|
# NOTICE: THIS FILE CONTAINS SECRETS
|
|
|
|
# This file may contain the following secrets depending on configuration:
|
|
|
|
# * An AWS access key
|
|
|
|
# * An AWS session token
|
|
|
|
# These are NOT things you want arbitrary readers to access! Ansible will
|
|
|
|
# attempt to ensure this file has 0700 permissions, but that won't stop you
|
|
|
|
# from changing that yourself
|
|
|
|
# DO NOT ALLOW THIS FILE TO BE READ BY NON-ROOT USERS
|
|
|
|
|
|
|
|
# NOTICE: DO NOT MODIFY THIS FILE
|
|
|
|
# Any changes made will be clobbered by Ansible
|
|
|
|
# Please make any configuration changes in the main repo
|
|
|
|
|
|
|
|
set -e
|
|
|
|
|
2021-05-04 13:38:39 -05:00
|
|
|
# AWS S3 configuration
|
|
|
|
# NOTE: THIS IS SECRET INFORMATION
|
|
|
|
export AWS_ACCESS_KEY_ID="{{ backup_s3_aws_access_key_id }}"
|
|
|
|
export AWS_SECRET_ACCESS_KEY="{{ backup_s3_aws_secret_access_key }}"
|
|
|
|
|
2020-12-24 09:06:20 -06:00
|
|
|
# Directories to backup
|
|
|
|
# Ansible will determine the entries here
|
|
|
|
|
|
|
|
# We use a bash array because it affords us some level of sanitization, enough
|
|
|
|
# to let us back up items whose paths contain spaces
|
|
|
|
declare -a DIRS
|
2020-12-29 08:43:42 -06:00
|
|
|
{% for item in backup_s3backup_list + backup_s3backup_list_extra %}
|
2020-12-24 09:06:20 -06:00
|
|
|
DIRS+=("{{ item }}")
|
|
|
|
{% endfor %}
|
2022-10-20 22:35:49 -05:00
|
|
|
# End directory manual configuration
|
|
|
|
|
|
|
|
# If we have ostree, add diff'd configs to the list, too
|
|
|
|
if command -v ostree > /dev/null 2>&1; then
|
|
|
|
for file in $(
|
|
|
|
ostree admin config-diff 2>/dev/null | \
|
2024-06-10 22:54:03 -05:00
|
|
|
grep -oP '^[A|M]\s*\K.*'
|
2022-10-20 22:35:49 -05:00
|
|
|
); do
|
|
|
|
DIRS+=("/etc/$file")
|
|
|
|
done
|
|
|
|
fi
|
2020-12-24 09:06:20 -06:00
|
|
|
|
2024-03-29 16:11:34 -05:00
|
|
|
# Helper functions
|
|
|
|
backup() {
|
|
|
|
# Takes a file or directory to backup and backs it up
|
|
|
|
[ -z "$1" ] && return 1
|
|
|
|
|
|
|
|
dir="$1"
|
|
|
|
echo "- $dir"
|
|
|
|
|
|
|
|
nice -n 10 tar {{ backup_s3backup_tar_args }}{{ backup_s3backup_tar_args_extra }} \
|
|
|
|
{% for item in backup_s3backup_exclude_list + backup_s3backup_exclude_list_extra %}
|
|
|
|
--exclude "{{ item }}" \
|
|
|
|
{% endfor %}
|
|
|
|
"$dir" \
|
|
|
|
| aws s3 cp --expected-size 274877906944 - \
|
2024-06-10 22:26:29 -05:00
|
|
|
{% if backup_s3_aws_endpoint_url is defined %}
|
|
|
|
--endpoint-url="{{ backup_s3_aws_endpoint_url }}" \
|
|
|
|
{% endif %}
|
2024-06-10 22:35:58 -05:00
|
|
|
"s3://{{ backup_s3_bucket }}/$HOSTNAME/$dir/$(date "+{{ backup_dateformat }}").tar.gz"
|
2024-03-29 16:11:34 -05:00
|
|
|
}
|
|
|
|
|
2020-12-24 09:06:20 -06:00
|
|
|
# Tar up all items in the backup list, recursively, and pipe them straight
|
|
|
|
# up to S3
|
2021-05-04 13:38:39 -05:00
|
|
|
if [ -n "${DIRS[*]}" ]; then
|
|
|
|
echo "Commencing backup on the following items:"
|
|
|
|
for dir in "${DIRS[@]}"; do
|
|
|
|
echo "- $dir"
|
|
|
|
done
|
|
|
|
echo "Will ignore the following items:"
|
|
|
|
{% for item in backup_s3backup_exclude_list + backup_s3backup_exclude_list_extra %}
|
|
|
|
echo "- {{ item }}"
|
|
|
|
{% endfor %}
|
2024-03-29 16:11:34 -05:00
|
|
|
echo "Will upload resultant backups to {{ backup_s3_bucket }}"
|
|
|
|
for dir in "${DIRS[@]}"; do
|
|
|
|
if [ "$dir" == "/data" ]; then
|
|
|
|
for datadir in "$dir"/*; do
|
|
|
|
[ -e "$datadir" ] && backup "$datadir"
|
|
|
|
done
|
|
|
|
else
|
|
|
|
backup "$dir"
|
|
|
|
fi
|
|
|
|
done
|
2020-12-24 09:21:05 -06:00
|
|
|
fi
|
2020-12-24 09:06:20 -06:00
|
|
|
|
2021-05-04 13:38:39 -05:00
|
|
|
# Dump Postgres DBs, if possible
|
|
|
|
if command -v psql > /dev/null 2>&1; then
|
|
|
|
# Populate a list of databases
|
|
|
|
declare -a DATABASES
|
|
|
|
while read line; do
|
|
|
|
DATABASES+=("$line")
|
|
|
|
done < <(sudo -u postgres psql -t -A -c "SELECT datname FROM pg_database where datname not in ('template0', 'template1', 'postgres');" 2>/dev/null)
|
|
|
|
|
|
|
|
# pgdump all DBs, compress them, and pipe straight up to S3
|
|
|
|
echo "Commencing backup on the following databases:"
|
|
|
|
for dir in "${DATABASES[@]}"; do
|
|
|
|
echo "- $dir"
|
|
|
|
done
|
|
|
|
echo "Will upload resultant backups to {{ backup_s3_bucket }}"
|
|
|
|
for db in "${DATABASES[@]}"; do
|
|
|
|
echo "Backing up $db"
|
|
|
|
sudo -u postgres pg_dump "$db" \
|
|
|
|
| gzip -v9 \
|
|
|
|
| aws s3 cp - \
|
2024-06-10 22:26:29 -05:00
|
|
|
{% if backup_s3_aws_endpoint_url is defined %}
|
|
|
|
--endpoint-url="{{ backup_s3_aws_endpoint_url }}" \
|
|
|
|
{% endif %}
|
2024-06-10 22:35:58 -05:00
|
|
|
"s3://{{ backup_s3_bucket }}/$HOSTNAME/pgdump/$db/$(date "+{{ backup_dateformat }}").pgsql.gz"
|
2021-05-04 13:38:39 -05:00
|
|
|
done
|
|
|
|
fi
|