Browse Source

Update postgres-backup-s3 (#127)

- Bump alpine from 3.9 to 3.11
- Add Postgres v12 support (Fixes #115)
- Add multiple or all databases backup (Fixes #10)
- S3_PREFIX can be empty (Fixes #59)
- Add missing Documentation (Fixes #87)
pull/140/head
Alex Kulikovskikh 3 years ago
committed by GitHub
parent
commit
35e560069a
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
  1. 3
      postgres-backup-s3/Dockerfile
  2. 16
      postgres-backup-s3/README.md
  3. 43
      postgres-backup-s3/backup.sh
  4. 2
      postgres-backup-s3/install.sh

3
postgres-backup-s3/Dockerfile

@ -1,10 +1,11 @@
FROM alpine:3.9
FROM alpine:3.11
LABEL maintainer="Johannes Schickling <schickling.j@gmail.com>"
ADD install.sh install.sh
RUN sh install.sh && rm install.sh
ENV POSTGRES_DATABASE **None**
ENV POSTGRES_BACKUP_ALL **None**
ENV POSTGRES_HOST **None**
ENV POSTGRES_PORT 5432
ENV POSTGRES_USER **None**

16
postgres-backup-s3/README.md

@ -19,6 +19,8 @@ postgres:
pgbackups3:
image: schickling/postgres-backup-s3
depends_on:
- postgres
links:
- postgres
environment:
@ -28,6 +30,7 @@ pgbackups3:
S3_SECRET_ACCESS_KEY: secret
S3_BUCKET: my-bucket
S3_PREFIX: backup
POSTGRES_BACKUP_ALL: "false"
POSTGRES_HOST: host
POSTGRES_DATABASE: dbname
POSTGRES_USER: user
@ -41,3 +44,16 @@ You can additionally set the `SCHEDULE` environment variable like `-e SCHEDULE="
More information about the scheduling can be found [here](http://godoc.org/github.com/robfig/cron#hdr-Predefined_schedules).
### Backup All Databases
You can backup all available databases by setting `POSTGRES_BACKUP_ALL="true"`.
Single archive with the name `all_<timestamp>.sql.gz` will be uploaded to S3
### Endpoints for S3
An Endpoint is the URL of the entry point for an AWS web service or S3 Compitable Storage Provider.
You can specify an alternate endpoint by setting `S3_ENDPOINT` environment variable like `protocol://endpoint`
**Note:** S3 Compitable Storage Provider requires `S3_ENDPOINT` environment variable

43
postgres-backup-s3/backup.sh

@ -18,7 +18,7 @@ if [ "${S3_BUCKET}" = "**None**" ]; then
exit 1
fi
if [ "${POSTGRES_DATABASE}" = "**None**" ]; then
if [ "${POSTGRES_DATABASE}" = "**None**" -a "${POSTGRES_BACKUP_ALL}" != "true" ]; then
echo "You need to set the POSTGRES_DATABASE environment variable."
exit 1
fi
@ -57,12 +57,43 @@ export AWS_DEFAULT_REGION=$S3_REGION
export PGPASSWORD=$POSTGRES_PASSWORD
POSTGRES_HOST_OPTS="-h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER $POSTGRES_EXTRA_OPTS"
echo "Creating dump of ${POSTGRES_DATABASE} database from ${POSTGRES_HOST}..."
if [ -z ${S3_PREFIX+x} ]; then
S3_PREFIX="/"
else
S3_PREFIX="/${S3_PREFIX}/"
fi
if [ "${POSTGRES_BACKUP_ALL}" == "true" ]; then
echo "Creating dump of all databases from ${POSTGRES_HOST}..."
pg_dumpall -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER | gzip > dump.sql.gz
echo "Uploading dump to $S3_BUCKET"
cat dump.sql.gz | aws $AWS_ARGS s3 cp - "s3://${S3_BUCKET}${S3_PREFIX}all_$(date +"%Y-%m-%dT%H:%M:%SZ").sql.gz" || exit 2
pg_dump $POSTGRES_HOST_OPTS $POSTGRES_DATABASE | gzip > dump.sql.gz
echo "SQL backup uploaded successfully"
echo "Uploading dump to $S3_BUCKET"
rm -rf dump.sql.gz
else
OIFS="$IFS"
IFS=','
for DB in $POSTGRES_DATABASE
do
IFS="$OIFS"
echo "Creating dump of ${DB} database from ${POSTGRES_HOST}..."
pg_dump $POSTGRES_HOST_OPTS $DB | gzip > dump.sql.gz
echo "Uploading dump to $S3_BUCKET"
cat dump.sql.gz | aws $AWS_ARGS s3 cp - s3://$S3_BUCKET/$S3_PREFIX/${POSTGRES_DATABASE}_$(date +"%Y-%m-%dT%H:%M:%SZ").sql.gz || exit 2
cat dump.sql.gz | aws $AWS_ARGS s3 cp - "s3://${S3_BUCKET}${S3_PREFIX}${DB}_$(date +"%Y-%m-%dT%H:%M:%SZ").sql.gz" || exit 2
echo "SQL backup uploaded successfully"
rm -rf dump.sql.gz
done
fi
echo "SQL backup uploaded successfully"

2
postgres-backup-s3/install.sh

@ -7,7 +7,7 @@ set -eo pipefail
apk update
# install pg_dump
apk add postgresql
apk add postgresql-client
# install s3 tools
apk add python py2-pip

Loading…
Cancel
Save