This script run from cron will keep 7 days of backups in S3 overwriting as needed.
Your cron entry will look like
22 18 * * * /path/to/the/script/backup.sh
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 | #!/bin/bash cd ` dirname $0` DIR_ROOT= /var/www WEB_DIRS=` ls -1 $DIR_ROOT` DAY=` date +%a` DAY_NUM=` date +%u` BUCKET=mybackupbucket REGION=ap-northeast-1 for i in $WEB_DIRS do echo Checking $DIR_ROOT/${i} # create a file in each directory that you want to backup # the script will check for it backing up only the ones you want if [ -f $DIR_ROOT/${i} /backup -a -d $DIR_ROOT/${i} ] then echo Backup up $DIR_ROOT/${i} # the awscli tools can copy from stdin to a bucket so no need to create files locally tar --exclude $DIR_ROOT/${i} /tmp -cz $DIR_ROOT/${i} | aws s3 cp - s3: // $BUCKET/$DAY_NUM-${i}-$DAY. tar .gz --region $REGION fi done # create a readonly mysql backup user MYSQL_USER=myBackupUser MYSQL_PASS=mySecreteBackupUserPassword2017 DB_HOST=yourdbhost.example.com # skipdbs is a text file with databases to skip e.g. # mysql # information_schema # don't put a newline after the last db in skipdbs as grep will filter everything # and no databases will be backed up DBS=`mysql --host=$DB_HOST -p$MYSQL_PASS -u $MYSQL_USER \ --skip-column-names \ -e "show databases;" | \ awk '{print $1}' | grep -vf skipdbs` for DB in $DBS do echo BACKUP $DB mysqldump --host=$DB_HOST -p$MYSQL_PASS -u $MYSQL_USER \ $DB | gzip | aws s3 cp - s3: // $BUCKET/$DAY_NUM-$DB-$DAY-db.sql.gz --region $REGION done # copy your config and anything else you want to backup sudo tar -cz /etc | aws s3 cp - s3: // $BUCKET/$DAY_NUM-etc-$DAY. tar .gz --region $REGION |
0 Comments