Update backup script to do it all, allowing for directory AND DB backups

This commit is contained in:
Salt 2021-05-04 13:38:39 -05:00
parent 26c45ff080
commit 7484dce678
3 changed files with 46 additions and 34 deletions

View File

@ -5,8 +5,6 @@
- hosts: psql1.desu.ltd
roles:
- role: backup
vars:
backup_script: s3pgdump
tags: [ backup ]
- role: motd
vars:
@ -47,8 +45,6 @@
- hosts: psql1.9iron.club
roles:
- role: backup
vars:
backup_script: s3pgdump
tags: [ backup ]
- role: motd
vars:

View File

@ -23,9 +23,6 @@
- { path: /srv/nfs, src: nfs }
tags: [ pis, storage ]
roles:
# - role: backup
# vars:
# backup_script: s3pgdump
- role: backup
vars:
backup_s3backup_list_extra:
@ -34,7 +31,6 @@
- /srv/nfs/k8s/default/web-jackett-config-pvc
- /srv/nfs/k8s/default/web-netbox-pvc
- /srv/nfs/k8s/default/web-transmission-config-pvc
- /var/lib/postgresql
backup_time: "Mon *-*-* 02:00:00"
tags: [ backup ]
- role: motd

View File

@ -1,8 +1,8 @@
#! /bin/bash
#
# s3backup.sh
# General-purpose, Ansible-managed backup script to push directories to
# an S3 bucket
# General-purpose, Ansible-managed backup script to push directories, DBs, and
# more up to an S3 bucket
#
# NOTICE: THIS FILE CONTAINS SECRETS
# This file may contain the following secrets depending on configuration:
@ -19,6 +19,11 @@
set -e
# AWS S3 configuration
# NOTE: THIS IS SECRET INFORMATION
export AWS_ACCESS_KEY_ID="{{ backup_s3_aws_access_key_id }}"
export AWS_SECRET_ACCESS_KEY="{{ backup_s3_aws_secret_access_key }}"
# Directories to backup
# Ansible will determine the entries here
@ -30,31 +35,46 @@ DIRS+=("{{ item }}")
{% endfor %}
# End directories
# AWS S3 configuration
# NOTE: THIS IS SECRET INFORMATION
export AWS_ACCESS_KEY_ID="{{ backup_s3_aws_access_key_id }}"
export AWS_SECRET_ACCESS_KEY="{{ backup_s3_aws_secret_access_key }}"
# Tar up all items in the backup list, recursively, and pipe them straight
# up to S3
if [ -z "${DIRS[*]}" ]; then
echo "No directories configured to back up!"
exit 0
fi
echo "Commencing backup on the following items:"
for dir in "${DIRS[@]}"; do
if [ -n "${DIRS[*]}" ]; then
echo "Commencing backup on the following items:"
for dir in "${DIRS[@]}"; do
echo "- $dir"
done
echo "Will ignore the following items:"
{% for item in backup_s3backup_exclude_list + backup_s3backup_exclude_list_extra %}
echo "- {{ item }}"
{% endfor %}
echo "Will upload resultant backup to {{ backup_s3_bucket }}"
nice -n 10 tar {{ backup_s3backup_tar_args }}{{ backup_s3backup_tar_args_extra }} \
{% for item in backup_s3backup_exclude_list + backup_s3backup_exclude_list_extra %}
done
echo "Will ignore the following items:"
{% for item in backup_s3backup_exclude_list + backup_s3backup_exclude_list_extra %}
echo "- {{ item }}"
{% endfor %}
echo "Will upload resultant backup to {{ backup_s3_bucket }}"
nice -n 10 tar {{ backup_s3backup_tar_args }}{{ backup_s3backup_tar_args_extra }} \
{% for item in backup_s3backup_exclude_list + backup_s3backup_exclude_list_extra %}
--exclude "{{ item }}" \
{% endfor %}
{% endfor %}
"${DIRS[@]}" \
| aws s3 cp - \
"s3://{{ backup_s3_bucket }}/{{ inventory_hostname }}/$(date "+{{ backup_dateformat }}").tar.gz"
fi
# Dump Postgres DBs, if possible
if command -v psql > /dev/null 2>&1; then
# Populate a list of databases
declare -a DATABASES
while read line; do
DATABASES+=("$line")
done < <(sudo -u postgres psql -t -A -c "SELECT datname FROM pg_database where datname not in ('template0', 'template1', 'postgres');" 2>/dev/null)
# pgdump all DBs, compress them, and pipe straight up to S3
echo "Commencing backup on the following databases:"
for dir in "${DATABASES[@]}"; do
echo "- $dir"
done
echo "Will upload resultant backups to {{ backup_s3_bucket }}"
for db in "${DATABASES[@]}"; do
echo "Backing up $db"
sudo -u postgres pg_dump "$db" \
| gzip -v9 \
| aws s3 cp - \
"s3://{{ backup_s3_bucket }}/{{ inventory_hostname }}/$db-$(date "+{{ backup_dateformat }}").pgsql.gz"
done
fi