Browse Source

Adding encryption option to postgres s3 backup

pull/153/head
Mike Bianco 2 years ago
parent
commit
6506a9c0b1
  1. 4
      postgres-backup-s3/Dockerfile
  2. 4
      postgres-backup-s3/README.md
  3. 47
      postgres-backup-s3/backup.sh
  4. 15
      postgres-backup-s3/install.sh
  5. 3
      postgres-backup-s3/run.sh

4
postgres-backup-s3/Dockerfile

@ -1,4 +1,4 @@
FROM alpine:3.15
FROM webdevops/go-crond:master-alpine
LABEL maintainer="Johannes Schickling <schickling.j@gmail.com>" LABEL maintainer="Johannes Schickling <schickling.j@gmail.com>"
ADD install.sh install.sh ADD install.sh install.sh
@ -19,8 +19,10 @@ ENV S3_PATH 'backup'
ENV S3_ENDPOINT **None** ENV S3_ENDPOINT **None**
ENV S3_S3V4 no ENV S3_S3V4 no
ENV SCHEDULE **None** ENV SCHEDULE **None**
ENV ENCRYPTION_PASSWORD **None**
ADD run.sh run.sh ADD run.sh run.sh
ADD backup.sh backup.sh ADD backup.sh backup.sh
ENTRYPOINT []
CMD ["sh", "run.sh"] CMD ["sh", "run.sh"]

4
postgres-backup-s3/README.md

@ -57,3 +57,7 @@ An Endpoint is the URL of the entry point for an AWS web service or S3 Compitabl
You can specify an alternate endpoint by setting `S3_ENDPOINT` environment variable like `protocol://endpoint` You can specify an alternate endpoint by setting `S3_ENDPOINT` environment variable like `protocol://endpoint`
**Note:** S3 Compitable Storage Provider requires `S3_ENDPOINT` environment variable **Note:** S3 Compitable Storage Provider requires `S3_ENDPOINT` environment variable
### Encryption
You can additionally set the `ENCRYPTION_PASSWORD` environment variable like `-e ENCRYPTION_PASSWORD="superstrongpassword"` to encrypt the backup. It can be decrypted using `openssl aes-256-cbc -d -in backup.sql.gz.enc -out backup.sql.gz`.

47
postgres-backup-s3/backup.sh

@ -1,7 +1,6 @@
#! /bin/sh #! /bin/sh
set -eo pipefail set -eo pipefail
set -o pipefail
if [ "${S3_ACCESS_KEY_ID}" = "**None**" ]; then if [ "${S3_ACCESS_KEY_ID}" = "**None**" ]; then
echo "You need to set the S3_ACCESS_KEY_ID environment variable." echo "You need to set the S3_ACCESS_KEY_ID environment variable."
@ -63,19 +62,29 @@ else
S3_PREFIX="/${S3_PREFIX}/" S3_PREFIX="/${S3_PREFIX}/"
fi fi
if [ "${POSTGRES_BACKUP_ALL}" == "true" ]; then if [ "${POSTGRES_BACKUP_ALL}" == "true" ]; then
SRC_FILE=dump.sql.gz
DEST_FILE=all_$(date +"%Y-%m-%dT%H:%M:%SZ").sql.gz
echo "Creating dump of all databases from ${POSTGRES_HOST}..." echo "Creating dump of all databases from ${POSTGRES_HOST}..."
pg_dumpall -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER | gzip > $SRC_FILE
pg_dumpall -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER | gzip > dump.sql.gz
if [ "${ENCRYPTION_PASSWORD}" != "**None**" ]; then
echo "Encrypting ${SRC_FILE}"
openssl enc -aes-256-cbc -in $SRC_FILE -out ${SRC_FILE}.enc -k $ENCRYPTION_PASSWORD
if [ $? != 0 ]; then
>&2 echo "Error encrypting ${SRC_FILE}"
fi
rm $SRC_FILE
SRC_FILE="${SRC_FILE}.enc"
DEST_FILE="${DEST_FILE}.enc"
fi
echo "Uploading dump to $S3_BUCKET" echo "Uploading dump to $S3_BUCKET"
cat dump.sql.gz | aws $AWS_ARGS s3 cp - "s3://${S3_BUCKET}${S3_PREFIX}all_$(date +"%Y-%m-%dT%H:%M:%SZ").sql.gz" || exit 2
cat $SRC_FILE | aws $AWS_ARGS s3 cp - "s3://${S3_BUCKET}${S3_PREFIX}${DEST_FILE}" || exit 2
echo "SQL backup uploaded successfully" echo "SQL backup uploaded successfully"
rm -rf dump.sql.gz
rm -rf $SRC_FILE
else else
OIFS="$IFS" OIFS="$IFS"
IFS=',' IFS=','
@ -83,17 +92,27 @@ else
do do
IFS="$OIFS" IFS="$OIFS"
echo "Creating dump of ${DB} database from ${POSTGRES_HOST}..."
SRC_FILE=dump.sql.gz
DEST_FILE${DB}_$(date +"%Y-%m-%dT%H:%M:%SZ").sql.gz
pg_dump $POSTGRES_HOST_OPTS $DB | gzip > dump.sql.gz
if [ "${ENCRYPTION_PASSWORD}" != "**None**" ]; then
echo "Encrypting ${SRC_FILE}"
openssl enc -aes-256-cbc -in $SRC_FILE -out ${SRC_FILE}.enc -k $ENCRYPTION_PASSWORD
if [ $? != 0 ]; then
>&2 echo "Error encrypting ${SRC_FILE}"
fi
rm $SRC_FILE
SRC_FILE="${SRC_FILE}.enc"
DEST_FILE="${DEST_FILE}.enc"
fi
echo "Uploading dump to $S3_BUCKET"
echo "Creating dump of ${DB} database from ${POSTGRES_HOST}..."
pg_dump $POSTGRES_HOST_OPTS $DB | gzip > $SRC_FILE
cat dump.sql.gz | aws $AWS_ARGS s3 cp - "s3://${S3_BUCKET}${S3_PREFIX}${DB}_$(date +"%Y-%m-%dT%H:%M:%SZ").sql.gz" || exit 2
echo "Uploading dump to $S3_BUCKET"
cat $SRC_FILE | aws $AWS_ARGS s3 cp - "s3://${S3_BUCKET}${S3_PREFIX}${DEST_FILE}" || exit 2
echo "SQL backup uploaded successfully" echo "SQL backup uploaded successfully"
rm -rf dump.sql.gz
rm -rf $SRC_FILE
done done
fi fi

15
postgres-backup-s3/install.sh

@ -3,21 +3,8 @@
# exit if a command fails # exit if a command fails
set -eo pipefail set -eo pipefail
apk update apk update
# install pg_dump
apk add postgresql-client
# install s3 tools
apk add aws-cli
# install go-cron
apk add curl
curl -L https://github.com/odise/go-cron/releases/download/v0.0.6/go-cron-linux.gz | zcat > /usr/local/bin/go-cron
chmod u+x /usr/local/bin/go-cron
apk del curl
apk add postgresql-client openssl aws-cli
# cleanup # cleanup
rm -rf /var/cache/apk/* rm -rf /var/cache/apk/*

3
postgres-backup-s3/run.sh

@ -9,5 +9,6 @@ fi
if [ "${SCHEDULE}" = "**None**" ]; then if [ "${SCHEDULE}" = "**None**" ]; then
sh backup.sh sh backup.sh
else else
exec go-cron "$SCHEDULE" /bin/sh backup.sh
echo -e "SHELL=/bin/sh\n${SCHEDULE} /bin/sh /backup.sh" > /etc/crontabs/root
exec go-crond /etc/crontabs/root
fi fi
Loading…
Cancel
Save