Browse Source
Merge pull request #19 from kandoio/postgres-restore-s3
Merge pull request #19 from kandoio/postgres-restore-s3
postgres-restore-s3pull/20/head
Johannes Schickling
9 years ago
4 changed files with 152 additions and 0 deletions
-
21postgres-restore-s3/Dockerfile
-
41postgres-restore-s3/README.md
-
17postgres-restore-s3/install.sh
-
73postgres-restore-s3/restore.sh
@ -0,0 +1,21 @@ |
|||
FROM alpine:edge |
|||
MAINTAINER Johannes Schickling "schickling.j@gmail.com" |
|||
|
|||
ADD install.sh install.sh |
|||
RUN sh install.sh && rm install.sh |
|||
|
|||
ENV POSTGRES_DATABASE **None** |
|||
ENV POSTGRES_HOST **None** |
|||
ENV POSTGRES_PORT 5432 |
|||
ENV POSTGRES_USER **None** |
|||
ENV POSTGRES_PASSWORD **None** |
|||
ENV S3_ACCESS_KEY_ID **None** |
|||
ENV S3_SECRET_ACCESS_KEY **None** |
|||
ENV S3_BUCKET **None** |
|||
ENV S3_REGION us-west-1 |
|||
ENV S3_PATH 'backup' |
|||
ENV DROP_PUBLIC 'no' |
|||
|
|||
ADD restore.sh restore.sh |
|||
|
|||
CMD ["sh", "restore.sh"] |
@ -0,0 +1,41 @@ |
|||
# postgres-backup-s3 |
|||
|
|||
Backup PostgresSQL to S3 (supports periodic backups) |
|||
|
|||
## Usage |
|||
|
|||
Docker: |
|||
```sh |
|||
$ docker run -e S3_ACCESS_KEY_ID=key -e S3_SECRET_ACCESS_KEY=secret -e S3_BUCKET=my-bucket -e S3_PREFIX=backup -e POSTGRES_DATABASE=dbname -e POSTGRES_USER=user -e POSTGRES_PASSWORD=password -e POSTGRES_HOST=localhost schickling/postgres-backup-s3 |
|||
``` |
|||
|
|||
Docker Compose: |
|||
```yaml |
|||
postgres: |
|||
image: postgres |
|||
environment: |
|||
POSTGRES_USER: user |
|||
POSTGRES_PASSWORD: password |
|||
|
|||
pgbackups3: |
|||
image: schickling/postgres-backup-s3 |
|||
links: |
|||
- postgres |
|||
environment: |
|||
SCHEDULE: '@daily' |
|||
S3_REGION: region |
|||
S3_ACCESS_KEY_ID: key |
|||
S3_SECRET_ACCESS_KEY: secret |
|||
S3_BUCKET: my-bucket |
|||
S3_PREFIX: backup |
|||
POSTGRES_DATABASE: dbname |
|||
POSTGRES_USER: user |
|||
POSTGRES_PASSWORD: password |
|||
``` |
|||
|
|||
### Automatic Periodic Backups |
|||
|
|||
You can additionally set the `SCHEDULE` environment variable like `-e SCHEDULE="@daily"` to run the backup automatically. |
|||
|
|||
More information about the scheduling can be found [here](http://godoc.org/github.com/robfig/cron#hdr-Predefined_schedules). |
|||
|
@ -0,0 +1,17 @@ |
|||
#! /bin/sh |
|||
|
|||
# exit if a command fails |
|||
set -e |
|||
|
|||
apk update |
|||
|
|||
# install pg_dump |
|||
apk add 'postgresql>9.5.0' |
|||
|
|||
# install s3 tools |
|||
apk add python py-pip |
|||
pip install awscli |
|||
apk del py-pip |
|||
|
|||
# cleanup |
|||
rm -rf /var/cache/apk/* |
@ -0,0 +1,73 @@ |
|||
#! /bin/sh |
|||
|
|||
set -e |
|||
set -o pipefail |
|||
|
|||
if [ "${S3_ACCESS_KEY_ID}" = "**None**" ]; then |
|||
echo "You need to set the S3_ACCESS_KEY_ID environment variable." |
|||
exit 1 |
|||
fi |
|||
|
|||
if [ "${S3_SECRET_ACCESS_KEY}" = "**None**" ]; then |
|||
echo "You need to set the S3_SECRET_ACCESS_KEY environment variable." |
|||
exit 1 |
|||
fi |
|||
|
|||
if [ "${S3_BUCKET}" = "**None**" ]; then |
|||
echo "You need to set the S3_BUCKET environment variable." |
|||
exit 1 |
|||
fi |
|||
|
|||
if [ "${POSTGRES_DATABASE}" = "**None**" ]; then |
|||
echo "You need to set the POSTGRES_DATABASE environment variable." |
|||
exit 1 |
|||
fi |
|||
|
|||
if [ "${POSTGRES_HOST}" = "**None**" ]; then |
|||
if [ -n "${POSTGRES_PORT_5432_TCP_ADDR}" ]; then |
|||
POSTGRES_HOST=$POSTGRES_PORT_5432_TCP_ADDR |
|||
POSTGRES_PORT=$POSTGRES_PORT_5432_TCP_PORT |
|||
else |
|||
echo "You need to set the POSTGRES_HOST environment variable." |
|||
exit 1 |
|||
fi |
|||
fi |
|||
|
|||
if [ "${POSTGRES_USER}" = "**None**" ]; then |
|||
echo "You need to set the POSTGRES_USER environment variable." |
|||
exit 1 |
|||
fi |
|||
|
|||
if [ "${POSTGRES_PASSWORD}" = "**None**" ]; then |
|||
echo "You need to set the POSTGRES_PASSWORD environment variable or link to a container named POSTGRES." |
|||
exit 1 |
|||
fi |
|||
|
|||
# env vars needed for aws tools |
|||
export AWS_ACCESS_KEY_ID=$S3_ACCESS_KEY_ID |
|||
export AWS_SECRET_ACCESS_KEY=$S3_SECRET_ACCESS_KEY |
|||
export AWS_DEFAULT_REGION=$S3_REGION |
|||
|
|||
export PGPASSWORD=$POSTGRES_PASSWORD |
|||
POSTGRES_HOST_OPTS="-h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER" |
|||
|
|||
echo "Finding latest backup" |
|||
|
|||
LATEST_BACKUP=$(aws s3 ls s3://$S3_BUCKET/$S3_PREFIX/ | sort | tail -n 1 | awk '{ print $4 }') |
|||
|
|||
echo "Fetching ${LATEST_BACKUP} from S3" |
|||
|
|||
aws s3 cp s3://$S3_BUCKET/$S3_PREFIX/${LATEST_BACKUP} dump.sql.gz |
|||
gzip -d dump.sql.gz |
|||
|
|||
if [ "${DROP_PUBLIC}" == "yes" ]; then |
|||
echo "Recreating the public schema" |
|||
psql $POSTGRES_HOST_OPTS -d $POSTGRES_DATABASE -c "drop schema public cascade; create schema public;" |
|||
fi |
|||
|
|||
echo "Restoring ${LATEST_BACKUP}" |
|||
|
|||
psql $POSTGRES_HOST_OPTS -d $POSTGRES_DATABASE < dump.sql |
|||
|
|||
echo "Restore complete" |
|||
|
Write
Preview
Loading…
Cancel
Save
Reference in new issue