#! /bin/bash # # s3backup.sh # General-purpose, Ansible-managed backup script to push directories to # an S3 bucket # # NOTICE: THIS FILE CONTAINS SECRETS # This file may contain the following secrets depending on configuration: # * An AWS access key # * An AWS session token # These are NOT things you want arbitrary readers to access! Ansible will # attempt to ensure this file has 0700 permissions, but that won't stop you # from changing that yourself # DO NOT ALLOW THIS FILE TO BE READ BY NON-ROOT USERS # NOTICE: DO NOT MODIFY THIS FILE # Any changes made will be clobbered by Ansible # Please make any configuration changes in the main repo set -e # Directories to backup # Ansible will determine the entries here # We use a bash array because it affords us some level of sanitization, enough # to let us back up items whose paths contain spaces declare -a DIRS {% for item in backup_s3backup_list + backup_s3backup_list_extra %} DIRS+=("{{ item }}") {% endfor %} # End directories # AWS S3 configuration # NOTE: THIS IS SECRET INFORMATION export AWS_ACCESS_KEY_ID="{{ backup_s3_aws_access_key_id }}" export AWS_SECRET_ACCESS_KEY="{{ backup_s3_aws_secret_access_key }}" # Tar up all items in the backup list, recursively, and pipe them straight # up to S3 if [ -z "${DIRS[*]}" ]; then echo "No directories configured to back up!" exit 0 fi echo "Commencing backup on the following items:" for dir in "${DIRS[@]}"; do echo "- $dir" done echo "Will ignore the following items:" {% for item in backup_s3backup_exclude_list + backup_s3backup_exclude_list_extra %} echo "- {{ item }}" {% endfor %} echo "Will upload resultant backup to {{ backup_s3_bucket }}" nice -n 10 tar {{ backup_s3backup_tar_args }}{{ backup_s3backup_tar_args_extra }} \ {% for item in backup_s3backup_exclude_list + backup_s3backup_exclude_list_extra %} --exclude "{{ item }}" \ {% endfor %} "${DIRS[@]}" \ | aws s3 cp - \ "s3://{{ backup_s3_bucket }}/{{ inventory_hostname }}/$(date "+{{ backup_dateformat }}").tar.gz"