itbm
4 years ago
committed by
GitHub
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with
51 additions and
0 deletions
-
mysql-backup-s3/Dockerfile
-
mysql-backup-s3/README.md
-
mysql-backup-s3/backup.sh
-
postgres-backup-s3/Dockerfile
-
postgres-backup-s3/README.md
-
postgres-backup-s3/backup.sh
|
|
@ -20,6 +20,7 @@ ENV S3_PREFIX 'backup' |
|
|
|
ENV S3_FILENAME **None** |
|
|
|
ENV MULTI_FILES no |
|
|
|
ENV SCHEDULE **None** |
|
|
|
ENV DELETE_OLDER_THAN **None** |
|
|
|
|
|
|
|
ADD run.sh run.sh |
|
|
|
ADD backup.sh backup.sh |
|
|
|
|
|
@ -26,9 +26,16 @@ $ docker run -e S3_ACCESS_KEY_ID=key -e S3_SECRET_ACCESS_KEY=secret -e S3_BUCKET |
|
|
|
- `S3_S3V4` set to `yes` to enable AWS Signature Version 4, required for [minio](https://minio.io) servers (default: no) |
|
|
|
- `MULTI_FILES` Allow to have one file per database if set `yes` default: no) |
|
|
|
- `SCHEDULE` backup schedule time, see explainatons below |
|
|
|
- `DELETE_OLDER_THAN` delete old backups, see explanation and warning below |
|
|
|
|
|
|
|
### Automatic Periodic Backups |
|
|
|
|
|
|
|
You can additionally set the `SCHEDULE` environment variable like `-e SCHEDULE="@daily"` to run the backup automatically. |
|
|
|
|
|
|
|
More information about the scheduling can be found [here](http://godoc.org/github.com/robfig/cron#hdr-Predefined_schedules). |
|
|
|
|
|
|
|
### Delete Old Backups |
|
|
|
|
|
|
|
You can additionally set the `DELETE_OLDER_THAN` environment variable like `-e DELETE_OLDER_THAN="30 days ago"` to delete old backups. |
|
|
|
|
|
|
|
WARNING: this will delete all files in the S3_PREFIX path, not just those created by this script. |
|
|
@ -107,4 +107,22 @@ else |
|
|
|
fi |
|
|
|
fi |
|
|
|
|
|
|
|
if [ "${DELETE_OLDER_THAN}" != "**None**" ]; then |
|
|
|
aws $AWS_ARGS s3 ls s3://$S3_BUCKET/$S3_PREFIX/ | grep " PRE " -v | while read -r line; |
|
|
|
do |
|
|
|
created=`echo $line|awk {'print $1" "$2'}` |
|
|
|
created=`date -d "$created" +%s` |
|
|
|
older_than=`date -d "$DELETE_OLDER_THAN" +%s` |
|
|
|
if [ $created -lt $older_than ] |
|
|
|
then |
|
|
|
fileName=`echo $line|awk {'print $4'}` |
|
|
|
if [ $fileName != "" ] |
|
|
|
then |
|
|
|
printf 'Deleting "%s"\n' $fileName |
|
|
|
aws $AWS_ARGS s3 rm s3://$S3_BUCKET/$S3_PREFIX/$fileName |
|
|
|
fi |
|
|
|
fi |
|
|
|
done; |
|
|
|
fi |
|
|
|
|
|
|
|
echo "SQL backup finished" |
|
|
@ -18,6 +18,7 @@ ENV S3_PATH 'backup' |
|
|
|
ENV S3_ENDPOINT **None** |
|
|
|
ENV S3_S3V4 no |
|
|
|
ENV SCHEDULE **None** |
|
|
|
ENV DELETE_OLDER_THAN **None** |
|
|
|
|
|
|
|
ADD run.sh run.sh |
|
|
|
ADD backup.sh backup.sh |
|
|
|
|
|
@ -41,3 +41,9 @@ You can additionally set the `SCHEDULE` environment variable like `-e SCHEDULE=" |
|
|
|
|
|
|
|
More information about the scheduling can be found [here](http://godoc.org/github.com/robfig/cron#hdr-Predefined_schedules). |
|
|
|
|
|
|
|
### Delete Old Backups |
|
|
|
|
|
|
|
You can additionally set the `DELETE_OLDER_THAN` environment variable like `-e DELETE_OLDER_THAN="30 days ago"` to delete old backups. |
|
|
|
|
|
|
|
WARNING: this will delete all files in the S3_PREFIX path, not just those created by this script. |
|
|
|
|
|
|
@ -65,4 +65,22 @@ echo "Uploading dump to $S3_BUCKET" |
|
|
|
|
|
|
|
cat dump.sql.gz | aws $AWS_ARGS s3 cp - s3://$S3_BUCKET/$S3_PREFIX/${POSTGRES_DATABASE}_$(date +"%Y-%m-%dT%H:%M:%SZ").sql.gz || exit 2 |
|
|
|
|
|
|
|
if [ "${DELETE_OLDER_THAN}" != "**None**" ]; then |
|
|
|
aws $AWS_ARGS s3 ls s3://$S3_BUCKET/$S3_PREFIX/ | grep " PRE " -v | while read -r line; |
|
|
|
do |
|
|
|
created=`echo $line|awk {'print $1" "$2'}` |
|
|
|
created=`date -d "$created" +%s` |
|
|
|
older_than=`date -d "$DELETE_OLDER_THAN" +%s` |
|
|
|
if [ $created -lt $older_than ] |
|
|
|
then |
|
|
|
fileName=`echo $line|awk {'print $4'}` |
|
|
|
if [ $fileName != "" ] |
|
|
|
then |
|
|
|
printf 'Deleting "%s"\n' $fileName |
|
|
|
aws $AWS_ARGS s3 rm s3://$S3_BUCKET/$S3_PREFIX/$fileName |
|
|
|
fi |
|
|
|
fi |
|
|
|
done; |
|
|
|
fi |
|
|
|
|
|
|
|
echo "SQL backup uploaded successfully" |