Only use restic for backups from now on

This commit is contained in:
Salt 2025-01-25 01:28:34 -06:00
parent 7f75bdb5cd
commit e6e8427227

View File

@ -40,67 +40,52 @@ backup() {
# Takes a file or directory to backup and backs it up
[ -z "$*" ] && return 1
if command -v restic > /dev/null 2>&1; then
for dir in "$@"; do
echo "- $dir"
done
# First, we remove stale locks. This command will only remove locks that have not been
# updated in the last half hour. By default, restic updates them during an ongoing
# operation every 5 minutes, so this should be perfectly fine to do.
# What I'm not sure of (but should be fine because we auto-restart if need be) is if two
# processes doing this concurrently will cause issues. I'd hope not but you never know.
# restic-unlock(1)
/opt/restic-wrapper \
--verbose \
unlock
# Back up everything in the $DIRS array (which was passed as args)
# This results in some level of pollution with regard to what paths are backed up
# (especially on ostree systems where we do the etc diff) but that's syntactic and
# we can script around it.
/opt/restic-wrapper \
--verbose \
{% for item in backup_s3backup_exclude_list + backup_s3backup_exclude_list_extra %}
--exclude="{{ item }}" \
{% endfor %}
--exclude="/data/**/backup" \
--exclude="/data/**/backups" \
--exclude="*.bak" \
--exclude="*.tmp" \
--exclude="*.swp" \
backup \
"$@"
# In addition, we should also prune our backups
# https://restic.readthedocs.io/en/stable/060_forget.html
# --keep-daily n Keeps daily backups for the last n days
# --keep-weekly n Keeps weekly backups for the last n weeks
# --keep-montly n Keeps monthly backups for the last n months
# --keep-tag foo Keeps all snapshots tagged with "foo"
# --host "$HOSTNAME" Only act on *our* snapshots. We assume other machines are taking
# care of their own houses.
# --prune Remove orphaned blobs when we remove snapshots
/opt/restic-wrapper \
--verbose \
forget \
--keep-daily 7 \
--keep-weekly 4 \
--keep-monthly 6 \
--keep-tag noremove \
--host "$HOSTNAME" \
--prune
else
dir="$@"
for dir in "$@"; do
echo "- $dir"
nice -n 10 tar {{ backup_s3backup_tar_args }}{{ backup_s3backup_tar_args_extra }} \
{% for item in backup_s3backup_exclude_list + backup_s3backup_exclude_list_extra %}
--exclude "{{ item }}" \
{% endfor %}
"$dir" \
| aws s3 cp --expected-size 274877906944 - \
{% if backup_s3_aws_endpoint_url is defined %}
--endpoint-url="{{ backup_s3_aws_endpoint_url }}" \
{% endif %}
"s3://{{ backup_s3_bucket }}/$HOSTNAME/$dir/$(date "+{{ backup_dateformat }}").tar.gz"
fi
done
# First, we remove stale locks. This command will only remove locks that have not been
# updated in the last half hour. By default, restic updates them during an ongoing
# operation every 5 minutes, so this should be perfectly fine to do.
# What I'm not sure of (but should be fine because we auto-restart if need be) is if two
# processes doing this concurrently will cause issues. I'd hope not but you never know.
# restic-unlock(1)
/opt/restic-wrapper \
--verbose \
unlock
# Back up everything in the $DIRS array (which was passed as args)
# This results in some level of pollution with regard to what paths are backed up
# (especially on ostree systems where we do the etc diff) but that's syntactic and
# we can script around it.
/opt/restic-wrapper \
--verbose \
{% for item in backup_s3backup_exclude_list + backup_s3backup_exclude_list_extra %}
--exclude="{{ item }}" \
{% endfor %}
--exclude="/data/**/backup" \
--exclude="/data/**/backups" \
--exclude="*.bak" \
--exclude="*.tmp" \
--exclude="*.swp" \
backup \
"$@"
# In addition, we should also prune our backups
# https://restic.readthedocs.io/en/stable/060_forget.html
# --keep-daily n Keeps daily backups for the last n days
# --keep-weekly n Keeps weekly backups for the last n weeks
# --keep-montly n Keeps monthly backups for the last n months
# --keep-tag foo Keeps all snapshots tagged with "foo"
# --host "$HOSTNAME" Only act on *our* snapshots. We assume other machines are taking
# care of their own houses.
# --prune Remove orphaned blobs when we remove snapshots
/opt/restic-wrapper \
--verbose \
forget \
--keep-daily 7 \
--keep-weekly 4 \
--keep-monthly 6 \
--keep-tag noremove \
--host "$HOSTNAME" \
--prune
}
# Dump Postgres DBs, if possible
@ -142,19 +127,5 @@ if [ -n "${DIRS[*]}" ]; then
echo "- {{ item }}"
{% endfor %}
echo "Will upload resultant backups to {{ backup_s3_bucket }}"
if command -v restic > /dev/null 2>&1; then
echo "Using restic for backups"
backup ${DIRS[*]}
else
echo "Using rudimentary tar and S3 for backups"
for dir in "${DIRS[@]}"; do
if [ "$dir" == "/data" ]; then
for datadir in "$dir"/*; do
[ -e "$datadir" ] && backup "$datadir"
done
else
backup "$dir"
fi
done
fi
backup ${DIRS[*]}
fi