Browse Source

Merge remote-tracking branch 'upstream/master' into pg14

pull/138/head
Georg Ledermann 2 years ago
parent
commit
4b386308f0
No known key found for this signature in database GPG Key ID: B6D83FBDDCEFBF3
  1. 1
      postgres-backup-s3/Dockerfile
  2. 16
      postgres-backup-s3/README.md
  3. 43
      postgres-backup-s3/backup.sh

1
postgres-backup-s3/Dockerfile

@ -5,6 +5,7 @@ ADD install.sh install.sh
RUN sh install.sh && rm install.sh
ENV POSTGRES_DATABASE **None**
ENV POSTGRES_BACKUP_ALL **None**
ENV POSTGRES_HOST **None**
ENV POSTGRES_PORT 5432
ENV POSTGRES_USER **None**

16
postgres-backup-s3/README.md

@ -19,6 +19,8 @@ postgres:
pgbackups3:
image: schickling/postgres-backup-s3
depends_on:
- postgres
links:
- postgres
environment:
@ -28,6 +30,7 @@ pgbackups3:
S3_SECRET_ACCESS_KEY: secret
S3_BUCKET: my-bucket
S3_PREFIX: backup
POSTGRES_BACKUP_ALL: "false"
POSTGRES_HOST: host
POSTGRES_DATABASE: dbname
POSTGRES_USER: user
@ -41,3 +44,16 @@ You can additionally set the `SCHEDULE` environment variable like `-e SCHEDULE="
More information about the scheduling can be found [here](http://godoc.org/github.com/robfig/cron#hdr-Predefined_schedules).
### Backup All Databases
You can backup all available databases by setting `POSTGRES_BACKUP_ALL="true"`.
Single archive with the name `all_<timestamp>.sql.gz` will be uploaded to S3
### Endpoints for S3
An Endpoint is the URL of the entry point for an AWS web service or S3 Compitable Storage Provider.
You can specify an alternate endpoint by setting `S3_ENDPOINT` environment variable like `protocol://endpoint`
**Note:** S3 Compitable Storage Provider requires `S3_ENDPOINT` environment variable

43
postgres-backup-s3/backup.sh

@ -18,7 +18,7 @@ if [ "${S3_BUCKET}" = "**None**" ]; then
exit 1
fi
if [ "${POSTGRES_DATABASE}" = "**None**" ]; then
if [ "${POSTGRES_DATABASE}" = "**None**" -a "${POSTGRES_BACKUP_ALL}" != "true" ]; then
echo "You need to set the POSTGRES_DATABASE environment variable."
exit 1
fi
@ -57,12 +57,43 @@ export AWS_DEFAULT_REGION=$S3_REGION
export PGPASSWORD=$POSTGRES_PASSWORD
POSTGRES_HOST_OPTS="-h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER $POSTGRES_EXTRA_OPTS"
echo "Creating dump of ${POSTGRES_DATABASE} database from ${POSTGRES_HOST}..."
if [ -z ${S3_PREFIX+x} ]; then
S3_PREFIX="/"
else
S3_PREFIX="/${S3_PREFIX}/"
fi
if [ "${POSTGRES_BACKUP_ALL}" == "true" ]; then
echo "Creating dump of all databases from ${POSTGRES_HOST}..."
pg_dumpall -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER | gzip > dump.sql.gz
echo "Uploading dump to $S3_BUCKET"
cat dump.sql.gz | aws $AWS_ARGS s3 cp - "s3://${S3_BUCKET}${S3_PREFIX}all_$(date +"%Y-%m-%dT%H:%M:%SZ").sql.gz" || exit 2
pg_dump $POSTGRES_HOST_OPTS $POSTGRES_DATABASE | gzip > dump.sql.gz
echo "SQL backup uploaded successfully"
echo "Uploading dump to $S3_BUCKET"
rm -rf dump.sql.gz
else
OIFS="$IFS"
IFS=','
for DB in $POSTGRES_DATABASE
do
IFS="$OIFS"
echo "Creating dump of ${DB} database from ${POSTGRES_HOST}..."
pg_dump $POSTGRES_HOST_OPTS $DB | gzip > dump.sql.gz
echo "Uploading dump to $S3_BUCKET"
cat dump.sql.gz | aws $AWS_ARGS s3 cp - s3://$S3_BUCKET/$S3_PREFIX/${POSTGRES_DATABASE}_$(date +"%Y-%m-%dT%H:%M:%SZ").sql.gz || exit 2
cat dump.sql.gz | aws $AWS_ARGS s3 cp - "s3://${S3_BUCKET}${S3_PREFIX}${DB}_$(date +"%Y-%m-%dT%H:%M:%SZ").sql.gz" || exit 2
echo "SQL backup uploaded successfully"
rm -rf dump.sql.gz
done
fi
echo "SQL backup uploaded successfully"
Loading…
Cancel
Save