Add new 'borg' backup directory

With start of some scripts I'll want for backup
master
Nathan Steel 9 months ago
parent ea8c3b7f15
commit 35fe368dad

@ -0,0 +1,41 @@
#!/bin/bash
# Cronjob needs to be on one line, spaced out for example/ in-case running cronjob from a script that just runs the below
# e.g. 0 0 * * * /home/nathan/backupScripts/docker_backup.sh \
# -c "duckdns homer" \
# -d /home/samba/share/Docker_prod \
# -b /home/nathan/testBack \
# -r pi2 \
# -R ~/backups/pi1/docker
while getopts d:b:r:R: flag
do
case "${flag}" in
d) DIR=${OPTARG};; # Directory to backup
b) BACKUP_DIR=${OPTARG};; # Where the backup is on local host
r) REMOTE=${OPTARG};; # user@remote or alias (from SSH config), prefer to use alias, as it can deal with port differences
R) REMOTE_DIR=${OPTARG};; # Location of remote backup i.e. /backup/borg/HOSTNAME/docker (then /npm, /vaultwarden, etc.)
esac
done
# /var/spool/cron/crontabs # Cron backup
# LOCAL
export BORG_REPO=$BACKUP_DIR
borg create ::{hostname}-{now} $DIR
#rdiff-backup $DIR/$i $BACKUP_DIR/$i # If a directory doesn't exist, it get created too
# Keep last 24 hours of backups, 7 daily backups (one a day/week), 4 weekly (one a week for a month), and 6 monthly, and 1 a year
borg prune \
--glob-archives '{hostname}-*' \
--keep-hourly 24 \
--keep-daily 7 \
--keep-weekly 4
# Create the remote directory for backup if it doesn't exist
ssh $REMOTE mkdir -p $REMOTE_DIR
rsync -azh -e ssh \
--delete \
$BACKUP_DIR \
$REMOTE:$REMOTE_DIR

@ -0,0 +1,90 @@
#!/bin/bash
# Cronjob needs to be on one line, spaced out for example/ in-case running cronjob from a script that just runs the below
# e.g. 0 0 * * * /home/nathan/backupScripts/docker_backup.sh \
# -c "duckdns homer" \
# -d /home/samba/share/Docker_prod \
# -b /home/nathan/testBack \
# -r pi2 \
# -R ~/backups/pi1/docker \
# -N 0
# Flags
while getopts c:d:b:r:R: flag
do
case "${flag}" in
c) CONTAINER_DIRS=${OPTARG};; # Will accept an 'array', just individual names spaced within quotes. i.e. "homer npm vaultwarden"
d) DIR=${OPTARG};; # /home/nathan/docker typically
b) BACKUP_DIR=${OPTARG};; # Where the backup is on local host
r) REMOTE=${OPTARG};; # user@remote or alias (from SSH config), prefer to use alias, as it can deal with port differences
R) REMOTE_DIR=${OPTARG};; # Location of remote backup i.e. /backup/borg/HOSTNAME/docker (then /npm, /vaultwarden, etc.)
# N) NOW=${OPTARG};; # 1/0 for yes/no do offsite backup now
esac
done
# Borg assistance: https://borgbackup.readthedocs.io/en/stable/quickstart.html
# export BORG_PASSPHRASE='' # If using encryption in borg, ignoring for now, to just have it work
# Script
DIRS=($CONTAINER_DIRS) # Put the CONTAINER_DIRS passed into an array that can be looped # DIRS=(homer npm) to hardcode
#DIRS=( "$DOCKER"/*/ )
for i in "${DIRS[@]}"
do
# Stop docker containers before backup incase any moving parts
echo $i
# If local directory doesn't exist for backup
if [ ! -d "$BACKUP_DIR/$i" ]; then
# Create new repo # --encryption=none # --encryption=repokey for encryption with key on server/in repo
borg init --encryption=none $BACKUP_DIR/$i # Will create repo if it doesn't exist 'A repo already exist...' 'error' otherwise
# borg init $REMOTE:$REMOTE_DIR/$i # Will create repo if it doesn't exist EXAMPLE for future, will need to do a different check too ig
# --encryption=repokey after init if you want encryption
# TODO: If using encryption, backup the
fi
docker compose stop
# LOCAL
export BORG_REPO=$BACKUP_DIR/$i
borg create ::{hostname}-{now} $DIR/$i
#rdiff-backup $DIR/$i $BACKUP_DIR/$i # If a directory doesn't exist, it get created too
# Keep last 24 hours of backups, 7 daily backups (one a day/week), 4 weekly (one a week for a month), and 6 monthly, and 1 a year
# Not 100% on this, but will keep this for now
borg prune \
--glob-archives '{hostname}-*' \
--keep-hourly 24 \
--keep-daily 7 \
--keep-weekly 4 \
--keep-monthly 6 \
--keep-yearly 1
#rdiff-backup --force --remove-older-than 1M $BACKUP_DIR/$i # Keep 1 month worth of backups
# OFFSITE
# TODO: This will be better as an actual unique off-site borg backup
docker compose start
done
# Nightly backup offsite (TEMP FOR NOW!!)
# If time is 00:00/midnight, rsync the entire directory of borg backups
# Inefficient for borg, but for now it'll work, will need a seperate borg on remote in future
# So in future, will just run a borg update straight to the server
if [ "$TIME" = 0000 ] || [ "$NOW" = 1 ]
then
# Create the remote directory for backup if it doesn't exist
ssh $REMOTE mkdir -p $REMOTE_DIR
# Copy the entire backup directory accross
# -e ssh makes it secure
rsync -azh -e ssh \
--delete \
$BACKUP_DIR/ \
$REMOTE:$REMOTE_DIR
# End-slash on backupDir here, as want to just backup the contents of the directory, not the directory itself
fi

@ -0,0 +1,2 @@
minecraft
factorio

@ -0,0 +1,9 @@
#!/bin/bash
source /home/nathan/backupScripts/docker_backup.sh \
-c "duckdns homer" \
-d /home/samba/share/Docker_prod \
-b /home/nathan/testBack \
-r pi2 \
-R ~/backups/pi1/docker \
-N 0

@ -0,0 +1,61 @@
#!/bin/bash
HOME=/home/nathan
DOCKER=$HOME/docker
BACKUP=$HOME/backup/
DIRS=('anetwork.uk' 'aney.co.uk' 'mariadb' 'npm' 'vaultwarden')
#DIRS=('nginx' 'npm' 'umami' 'uptimekuma' 'vaultwarden')
#DIRS=( "$DOCKER"/*/ )
REMOTE=alphavps
REMOTEBACKUP=/home/nathan/backups/docker/rn2
# Run the mariadb/mysql backup script for the docker container
/bin/bash /home/nathan/docker/mariadb/scripts/backup.txt
# Backup the docker containers
# requires rdiff-backup rsync docker-compose
# also setup .ssh/config for alphavps, and add to a cronjob (as root)
for i in "${DIRS[@]}"
do
#echo $i
# Stop docker containers before backup incase any moving parts
cd $DOCKER/$i
docker compose stop
rdiff-backup $DOCKER/$i $BACKUP/$i
rdiff-backup --force --remove-older-than 2M $BACKUP/$i
docker compose start
done
# Create the remote directory for backup if it doesn't exist
ssh $REMOTE mkdir -p $REMOTEBACKUP
# Copy the backup accross
# -e ssh makes it secure
rsync -azh -e ssh \
--delete \
$BACKUP \
$REMOTE:$REMOTEBACKUP
## Now backup the websites (one offs)
REMOTEBACKUP=/home/nathan/backups/websites/rn2
# Create the remote directory for backup if it doesn't exist
ssh $REMOTE mkdir -p $REMOTEBACKUP
rsync -azh -e ssh \
--delete \
$HOME/websites \
$REMOTE:$REMOTEBACKUP
## Crontab (has the backup scripts called)
REMOTEBACKUP=/home/nathan/backups/cron/rn2
# Create the remote directory for backup if it doesn't exist
ssh $REMOTE mkdir -p $REMOTEBACKUP
rsync -azh -e ssh \
--delete \
/var/spool/cron/crontabs \
$REMOTE:$REMOTEBACKUP

@ -0,0 +1,41 @@
#!/bin/bash
# TODO: Use a password file instead of passing password to script
# Flags, for running on different devices # Cronjob needs to be on one line
# e.g. 0 0 * * * /home/nathan/scripts/backup/backup_git.sh -d /srv/git -b /srv/dev-disk-by-uuid-d9f9e8fd-c473-450e-919e-c43200a6ac4a/gitBackup/ -r nathan@alphavps.aney.co.uk -R ~/backups/git/lilman/
while getopts s:d:b:r:R: flag
do
case "${flag}" in
d) DATA=${OPTARG};;
b) BACKUPDIR=${OPTARG};;
r) REMOTE=${OPTARG};;
R) REMOTEBACKUP=${OPTARG};;
esac
done
SERVER=rn1
BACKUPDIR=/home/nathan/backups/$SERVER
DATE=$(date +%Y%m%d)
PASSWORD=password
DBS="$(mysql -u admin -p$(echo $PASSWORD) -Bse 'show databases' | egrep -v '^Database$|hold$' | grep -v 'performance_schema\|information_schema\|mysql')"
REMOTE=alphavps
REMOTEBACKUP=~/backups/sql/$SERVER
for DB in ${DBS[@]};
do
mkdir -p $BACKUPDIR/$DB
# Take the backup
mysqldump -u admin -p$(echo $PASSWORD) $DB > $BACKUPDIR/$DB/${DB}_${DATE}
# tarball the backup
# Change the directory, to prevent backup to all path
cd $BACKUPDIR
tar -zcf $DB/${DB}_${DATE}.gz $DB/${DB}_${DATE}
# remove untarballed
rm $BACKUPDIR/$DB/${DB}_${DATE}
ssh $REMOTE mkdir -p $REMOTEBACKUP/$DB
scp $BACKUPDIR/$DB/${DB}_${DATE}.gz $REMOTE:$REMOTEBACKUP/$DB
done

@ -0,0 +1,34 @@
#!/bin/bash
# DO a flag for docker/standalone mysql
SERVER=rn2
BACKUPDIR=/home/nathan/backup/sql
DATE=$(date +%Y%m%d)
USER=nathan
PASSWORD=password
REMOTE=alphavps
REMOTEBACKUP=~/backups/sql/$SERVER
# To make this be for a non-docker mysql instance, just omit everything up to 'mysql'
DBS="$(sudo docker exec mariadb mysql -u$(echo $USER) -p$(echo $PASSWORD) -Bse 'show databases' | egrep -v '^Database$|hold$' | grep -v 'performance_schema\|information_schema\|mysql\|sys')"
for DB in ${DBS[@]};
do
mkdir -p $BACKUPDIR/$DB
# Take the backup
sudo docker exec -it mariadb mysqldump -u$(echo $USER) -p$(echo $PASSWORD) $DB > $BACKUPDIR/$DB/${DB}_${DATE}
# Change the directory, to prevent backup to all path
cd $BACKUPDIR
# tarball the backup
tar -zcf $DB/${DB}_${DATE}.gz $DB/${DB}_${DATE}
# remove untarballed
rm $BACKUPDIR/$DB/${DB}_${DATE}
#ssh $REMOTE mkdir -p $REMOTEBACKUP/$DB
#scp $BACKUPDIR/$DB/${DB}_${DATE}.gz $REMOTE:$REMOTEBACKUP/$DB
done
Loading…
Cancel
Save