diff --git a/postgres-backup-s3/Dockerfile b/postgres-backup-s3/Dockerfile index af5740d..3fd968c 100644 --- a/postgres-backup-s3/Dockerfile +++ b/postgres-backup-s3/Dockerfile @@ -1,10 +1,11 @@ -FROM alpine:3.9 +FROM alpine:3.11 LABEL maintainer="Johannes Schickling " ADD install.sh install.sh RUN sh install.sh && rm install.sh ENV POSTGRES_DATABASE **None** +ENV POSTGRES_BACKUP_ALL **None** ENV POSTGRES_HOST **None** ENV POSTGRES_PORT 5432 ENV POSTGRES_USER **None** diff --git a/postgres-backup-s3/README.md b/postgres-backup-s3/README.md index e963929..b6aad85 100644 --- a/postgres-backup-s3/README.md +++ b/postgres-backup-s3/README.md @@ -19,6 +19,8 @@ postgres: pgbackups3: image: schickling/postgres-backup-s3 + depends_on: + - postgres links: - postgres environment: @@ -28,6 +30,7 @@ pgbackups3: S3_SECRET_ACCESS_KEY: secret S3_BUCKET: my-bucket S3_PREFIX: backup + POSTGRES_BACKUP_ALL: "false" POSTGRES_HOST: host POSTGRES_DATABASE: dbname POSTGRES_USER: user @@ -41,3 +44,16 @@ You can additionally set the `SCHEDULE` environment variable like `-e SCHEDULE=" More information about the scheduling can be found [here](http://godoc.org/github.com/robfig/cron#hdr-Predefined_schedules). +### Backup All Databases + +You can backup all available databases by setting `POSTGRES_BACKUP_ALL="true"`. + +Single archive with the name `all_.sql.gz` will be uploaded to S3 + +### Endpoints for S3 + +An Endpoint is the URL of the entry point for an AWS web service or S3 Compitable Storage Provider. + +You can specify an alternate endpoint by setting `S3_ENDPOINT` environment variable like `protocol://endpoint` + +**Note:** S3 Compitable Storage Provider requires `S3_ENDPOINT` environment variable diff --git a/postgres-backup-s3/backup.sh b/postgres-backup-s3/backup.sh index 3b43360..7ab2573 100644 --- a/postgres-backup-s3/backup.sh +++ b/postgres-backup-s3/backup.sh @@ -18,7 +18,7 @@ if [ "${S3_BUCKET}" = "**None**" ]; then exit 1 fi -if [ "${POSTGRES_DATABASE}" = "**None**" ]; then +if [ "${POSTGRES_DATABASE}" = "**None**" -a "${POSTGRES_BACKUP_ALL}" != "true" ]; then echo "You need to set the POSTGRES_DATABASE environment variable." exit 1 fi @@ -57,12 +57,43 @@ export AWS_DEFAULT_REGION=$S3_REGION export PGPASSWORD=$POSTGRES_PASSWORD POSTGRES_HOST_OPTS="-h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER $POSTGRES_EXTRA_OPTS" -echo "Creating dump of ${POSTGRES_DATABASE} database from ${POSTGRES_HOST}..." +if [ -z ${S3_PREFIX+x} ]; then + S3_PREFIX="/" +else + S3_PREFIX="/${S3_PREFIX}/" +fi + + +if [ "${POSTGRES_BACKUP_ALL}" == "true" ]; then + echo "Creating dump of all databases from ${POSTGRES_HOST}..." + + pg_dumpall -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER | gzip > dump.sql.gz + + echo "Uploading dump to $S3_BUCKET" + + cat dump.sql.gz | aws $AWS_ARGS s3 cp - "s3://${S3_BUCKET}${S3_PREFIX}all_$(date +"%Y-%m-%dT%H:%M:%SZ").sql.gz" || exit 2 -pg_dump $POSTGRES_HOST_OPTS $POSTGRES_DATABASE | gzip > dump.sql.gz + echo "SQL backup uploaded successfully" -echo "Uploading dump to $S3_BUCKET" + rm -rf dump.sql.gz +else + OIFS="$IFS" + IFS=',' + for DB in $POSTGRES_DATABASE + do + IFS="$OIFS" + + echo "Creating dump of ${DB} database from ${POSTGRES_HOST}..." + + pg_dump $POSTGRES_HOST_OPTS $DB | gzip > dump.sql.gz + + echo "Uploading dump to $S3_BUCKET" -cat dump.sql.gz | aws $AWS_ARGS s3 cp - s3://$S3_BUCKET/$S3_PREFIX/${POSTGRES_DATABASE}_$(date +"%Y-%m-%dT%H:%M:%SZ").sql.gz || exit 2 + cat dump.sql.gz | aws $AWS_ARGS s3 cp - "s3://${S3_BUCKET}${S3_PREFIX}${DB}_$(date +"%Y-%m-%dT%H:%M:%SZ").sql.gz" || exit 2 + + echo "SQL backup uploaded successfully" + + rm -rf dump.sql.gz + done +fi -echo "SQL backup uploaded successfully" diff --git a/postgres-backup-s3/install.sh b/postgres-backup-s3/install.sh index bca5a1b..8701728 100644 --- a/postgres-backup-s3/install.sh +++ b/postgres-backup-s3/install.sh @@ -7,7 +7,7 @@ set -eo pipefail apk update # install pg_dump -apk add postgresql +apk add postgresql-client # install s3 tools apk add python py2-pip