Work on a basic implementation of backups

Still need to set up good defaults and do host-based configuration tho
This commit is contained in:
Salt 2020-12-24 09:06:20 -06:00
parent 4cbc53a687
commit 00fb2bb32e
9 changed files with 145 additions and 1 deletions

View File

@ -20,7 +20,7 @@ This branch is kinda-sorta a port of master, so it still needs to reach some for
* Monitoring (Doesn't necessarily have to be grafana)
* Forge server deployment? Terraria? What do I do about all these gameservers? Fork 'em into their own roles? I imagine Paper's already got something set up, too.
* Find a good role for Terraria servers
## Initialization

View File

@ -5,6 +5,31 @@ ansible_pull_repo: "https://git.9iron.club/salt/ansible"
ansible_pull_commit: rewrite
common_ansible_pubkey: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDfXVgMHeD2wtCAIVoDYQ+R19vKfhmR2FgUTkHhAzE2156fB/+IMB+6Qc4X3aFRIcUp+Ls8Vm8JQ3d0jvbcGQkgbAjRExQa71XGBmhxJCxzlCLBoQzBmTSnryL09LExoMynzVgrso8TQP92vZBGJFI/lLGAaop2l9pu+3cgM3sRaK+A11lcRCrS25C3hqPQhKC44zjzOt7sIoaG6RqG3CQ8jhE35bthQdBySOZVDgDKfjDyPuDzVxiKjsuNm4Ojzm0QW5gq6GkLOg2B8OSQ1TGQgBHQu4b8zsKBOUOdbZb0JLM8NdpH1cMntC0QBofy3DzqR/CFaSaBzUx+dnkBH0/pjBOrhHzzqZGOJayfC1igYki67HqzFV5IjhAVa+c4S9L/zbFk0+YZYdgMoKNlMU2LgzrSEastuXHD7NUy3fMP4BZbqg37SjQzFRXoUp5+ctVs9tCoy/qvvjT3UVGcn312eJrRRfWrYagU2nWKGyqbTOpsuOJ5OLlhopy6eP9+yRM= ansible"
# For backups
backup_s3backup_bucket: !vault |
$ANSIBLE_VAULT;1.1;AES256
61393939633736616361336162633564356434363963303737366236373332653265366132393439
3333643463306561616261636466303631373866353962310a356561633833633533353937323265
64656235616637366363323330346134656366663733393462346333613535633838333938653434
6133326433613239650a386333626339363263323134313830353963326265666336306130656534
6534
backup_s3backup_aws_access_key_id: !vault |
$ANSIBLE_VAULT;1.1;AES256
61353734383466366564333832643738313238666235336332303539383639626263633231396261
6165393062393266343661643466633163383164383032340a333833656566336331323565386162
35646665353539616538353339616531346564636466643639326366353165313861373761396537
3731653463643838330a383065313135343763636534656133343666363237356462326236643631
34366564373661396434663633346635663331393538363362376265653334623538
backup_s3backup_aws_secret_access_key: !vault |
$ANSIBLE_VAULT;1.1;AES256
64316231613337333231383837333930336561633164393762343838646136393165626361346637
3364643830346533623137643530323438366665393632320a633032336664616261353734343661
36646565383532616133353530343331663731663965656662363830363063303361373861663762
3032613362626233350a613464333230363830383334363032303730646134306331383733363036
34346334306633306664323337643433356336366633396239306539613539633535386238346662
6232313138393062626631386135383234376361643362353966
# For zerotier
zerotier_network_id: !vault |
$ANSIBLE_VAULT;1.1;AES256

View File

@ -0,0 +1,24 @@
# Which backup script to use. Configuration is somewhat unique to each script
backup_script: s3backup
# When to kick off backups using the systemd timer
backup_time: "*-*-* 02:00:00"
# What format should the datestamps in the filenames of any backups be in?
# Defaults to YYYY-MM-DD-hhmm
# So January 5th, 2021 at 3:41PM would be 2021-01-05-1541
backup_dateformat: "%Y-%m-%d-%H%M"
# List of files/directories to back up
# Note that tar is NOT instructed to recurse through symlinks
# If you want it to do that, end the path with a slash!
backup_s3backup_list:
- /root
backup_s3backup_list_extra: []
# Arguments to pass to tar
# Note that passing f here is probably a bad idea
backup_s3backup_tar_args: cz
backup_s3backup_tar_args_extra: ""
# Which bucket to upload the backup to
backup_s3backup_bucket: replaceme
# Credentials for the bucket
backup_s3backup_aws_access_key_id: REPLACEME
backup_s3backup_aws_secret_access_key: REPLACEME

View File

@ -0,0 +1,6 @@
#!/usr/bin/env ansible-playbook
# vim:ft=ansible:
---
- name: restart backup timer
systemd: name=backup.timer state=restarted daemon_reload=yes
become: yes

View File

@ -0,0 +1,13 @@
#!/usr/bin/env ansible-playbook
# vim:ft=ansible:
---
- name: template out backup script
template: src={{ backup_script }}.sh dest=/opt/backup.sh mode=0700 owner=root group=root
- name: assure systemd unit and timer
template: src=backup.{{ item }} dest=/etc/systemd/system/backup.{{ item }}
loop:
- service
- timer
notify: restart backup timer
- name: enable systemd timer
systemd: name=backup.timer state=started enabled=yes daemon_reload=yes

View File

@ -0,0 +1,10 @@
# vim:ft=systemd
[Unit]
Description=Nightly backup service
[Service]
MemoryMax=256M
ExecStart=/opt/backup.sh
[Install]
WantedBy=multi-user.target

View File

@ -0,0 +1,10 @@
# vim:ft=systemd
[Unit]
Description=Nightly backup timer
[Timer]
Persistent=true
OnCalendar={{ backup_time }}
[Install]
WantedBy=timers.target

View File

@ -0,0 +1,53 @@
#! /bin/bash
#
# s3backup.sh
# General-purpose, Ansible-managed backup script to push directories to
# an S3 bucket
#
# NOTICE: THIS FILE CONTAINS SECRETS
# This file may contain the following secrets depending on configuration:
# * An AWS access key
# * An AWS session token
# These are NOT things you want arbitrary readers to access! Ansible will
# attempt to ensure this file has 0700 permissions, but that won't stop you
# from changing that yourself
# DO NOT ALLOW THIS FILE TO BE READ BY NON-ROOT USERS
# NOTICE: DO NOT MODIFY THIS FILE
# Any changes made will be clobbered by Ansible
# Please make any configuration changes in the main repo
set -e
# Directories to backup
# Ansible will determine the entries here
# We use a bash array because it affords us some level of sanitization, enough
# to let us back up items whose paths contain spaces
declare -a DIRS
{% for item in backup_s3backup_list %}
DIRS+=("{{ item }}")
{% endfor %}
# Extra, probably host-specific directories
{% for item in backup_s3backup_list_extra %}
DIRS+=("{{ item }}")
{% endfor %}
# End directories
# AWS S3 configuration
# NOTE: THIS IS SECRET INFORMATION
export AWS_ACCESS_KEY_ID="{{ backup_s3backup_aws_access_key_id }}"
export AWS_SECRET_ACCESS_KEY="{{ backup_s3backup_aws_secret_access_key }}"
# Tar up all items in the backup list, recursively, and pipe them straight
# up to S3
echo "Commencing backup on the following items:"
for dir in "${DIRS[@]}"; do
echo "- $dir"
done
echo "Will upload resultant backup to {{ backup_s3backup_bucket }}"
nice -n 10 tar {{ backup_s3backup_tar_args }}{{ backup_s3backup_tar_args_extra }} "${DIRS[@]}" \
| aws s3 cp - \
"s3://{{ backup_s3backup_bucket }}/{{ inventory_hostname_short }}/$(date "+{{ backup_dateformat }}").tar.gz"

View File

@ -7,6 +7,9 @@
- role: common
become: yes
tags: [ common ]
- role: backup
become: yes
tags: [ backup, common ]
- role: ansible-pull
become: yes
tags: [ ansible, common ]