From 6506a9c0b16f727bbd13b3fc4a7244c999ee5448 Mon Sep 17 00:00:00 2001 From: Mike Bianco Date: Tue, 23 Aug 2022 13:54:35 -0600 Subject: [PATCH] Adding encryption option to postgres s3 backup --- postgres-backup-s3/Dockerfile | 4 ++- postgres-backup-s3/README.md | 4 +++ postgres-backup-s3/backup.sh | 51 ++++++++++++++++++++++++----------- postgres-backup-s3/install.sh | 15 +---------- postgres-backup-s3/run.sh | 3 ++- 5 files changed, 45 insertions(+), 32 deletions(-) diff --git a/postgres-backup-s3/Dockerfile b/postgres-backup-s3/Dockerfile index b8ca628..25acf90 100644 --- a/postgres-backup-s3/Dockerfile +++ b/postgres-backup-s3/Dockerfile @@ -1,4 +1,4 @@ -FROM alpine:3.15 +FROM webdevops/go-crond:master-alpine LABEL maintainer="Johannes Schickling " ADD install.sh install.sh @@ -19,8 +19,10 @@ ENV S3_PATH 'backup' ENV S3_ENDPOINT **None** ENV S3_S3V4 no ENV SCHEDULE **None** +ENV ENCRYPTION_PASSWORD **None** ADD run.sh run.sh ADD backup.sh backup.sh +ENTRYPOINT [] CMD ["sh", "run.sh"] diff --git a/postgres-backup-s3/README.md b/postgres-backup-s3/README.md index b6aad85..25e6aa0 100644 --- a/postgres-backup-s3/README.md +++ b/postgres-backup-s3/README.md @@ -57,3 +57,7 @@ An Endpoint is the URL of the entry point for an AWS web service or S3 Compitabl You can specify an alternate endpoint by setting `S3_ENDPOINT` environment variable like `protocol://endpoint` **Note:** S3 Compitable Storage Provider requires `S3_ENDPOINT` environment variable + +### Encryption + +You can additionally set the `ENCRYPTION_PASSWORD` environment variable like `-e ENCRYPTION_PASSWORD="superstrongpassword"` to encrypt the backup. It can be decrypted using `openssl aes-256-cbc -d -in backup.sql.gz.enc -out backup.sql.gz`. \ No newline at end of file diff --git a/postgres-backup-s3/backup.sh b/postgres-backup-s3/backup.sh index 7ab2573..191dc40 100644 --- a/postgres-backup-s3/backup.sh +++ b/postgres-backup-s3/backup.sh @@ -1,7 +1,6 @@ #! /bin/sh set -eo pipefail -set -o pipefail if [ "${S3_ACCESS_KEY_ID}" = "**None**" ]; then echo "You need to set the S3_ACCESS_KEY_ID environment variable." @@ -60,22 +59,32 @@ POSTGRES_HOST_OPTS="-h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER $POSTG if [ -z ${S3_PREFIX+x} ]; then S3_PREFIX="/" else - S3_PREFIX="/${S3_PREFIX}/" + S3_PREFIX="/${S3_PREFIX}/" fi - if [ "${POSTGRES_BACKUP_ALL}" == "true" ]; then - echo "Creating dump of all databases from ${POSTGRES_HOST}..." + SRC_FILE=dump.sql.gz + DEST_FILE=all_$(date +"%Y-%m-%dT%H:%M:%SZ").sql.gz - pg_dumpall -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER | gzip > dump.sql.gz + echo "Creating dump of all databases from ${POSTGRES_HOST}..." + pg_dumpall -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER | gzip > $SRC_FILE + + if [ "${ENCRYPTION_PASSWORD}" != "**None**" ]; then + echo "Encrypting ${SRC_FILE}" + openssl enc -aes-256-cbc -in $SRC_FILE -out ${SRC_FILE}.enc -k $ENCRYPTION_PASSWORD + if [ $? != 0 ]; then + >&2 echo "Error encrypting ${SRC_FILE}" + fi + rm $SRC_FILE + SRC_FILE="${SRC_FILE}.enc" + DEST_FILE="${DEST_FILE}.enc" + fi echo "Uploading dump to $S3_BUCKET" - - cat dump.sql.gz | aws $AWS_ARGS s3 cp - "s3://${S3_BUCKET}${S3_PREFIX}all_$(date +"%Y-%m-%dT%H:%M:%SZ").sql.gz" || exit 2 + cat $SRC_FILE | aws $AWS_ARGS s3 cp - "s3://${S3_BUCKET}${S3_PREFIX}${DEST_FILE}" || exit 2 echo "SQL backup uploaded successfully" - - rm -rf dump.sql.gz + rm -rf $SRC_FILE else OIFS="$IFS" IFS=',' @@ -83,17 +92,27 @@ else do IFS="$OIFS" - echo "Creating dump of ${DB} database from ${POSTGRES_HOST}..." + SRC_FILE=dump.sql.gz + DEST_FILE${DB}_$(date +"%Y-%m-%dT%H:%M:%SZ").sql.gz - pg_dump $POSTGRES_HOST_OPTS $DB | gzip > dump.sql.gz + if [ "${ENCRYPTION_PASSWORD}" != "**None**" ]; then + echo "Encrypting ${SRC_FILE}" + openssl enc -aes-256-cbc -in $SRC_FILE -out ${SRC_FILE}.enc -k $ENCRYPTION_PASSWORD + if [ $? != 0 ]; then + >&2 echo "Error encrypting ${SRC_FILE}" + fi + rm $SRC_FILE + SRC_FILE="${SRC_FILE}.enc" + DEST_FILE="${DEST_FILE}.enc" + fi - echo "Uploading dump to $S3_BUCKET" + echo "Creating dump of ${DB} database from ${POSTGRES_HOST}..." + pg_dump $POSTGRES_HOST_OPTS $DB | gzip > $SRC_FILE - cat dump.sql.gz | aws $AWS_ARGS s3 cp - "s3://${S3_BUCKET}${S3_PREFIX}${DB}_$(date +"%Y-%m-%dT%H:%M:%SZ").sql.gz" || exit 2 + echo "Uploading dump to $S3_BUCKET" + cat $SRC_FILE | aws $AWS_ARGS s3 cp - "s3://${S3_BUCKET}${S3_PREFIX}${DEST_FILE}" || exit 2 echo "SQL backup uploaded successfully" - - rm -rf dump.sql.gz + rm -rf $SRC_FILE done fi - diff --git a/postgres-backup-s3/install.sh b/postgres-backup-s3/install.sh index 3f1c676..15ca915 100644 --- a/postgres-backup-s3/install.sh +++ b/postgres-backup-s3/install.sh @@ -3,21 +3,8 @@ # exit if a command fails set -eo pipefail - apk update - -# install pg_dump -apk add postgresql-client - -# install s3 tools -apk add aws-cli - -# install go-cron -apk add curl -curl -L https://github.com/odise/go-cron/releases/download/v0.0.6/go-cron-linux.gz | zcat > /usr/local/bin/go-cron -chmod u+x /usr/local/bin/go-cron -apk del curl - +apk add postgresql-client openssl aws-cli # cleanup rm -rf /var/cache/apk/* diff --git a/postgres-backup-s3/run.sh b/postgres-backup-s3/run.sh index c03f55d..ef80106 100644 --- a/postgres-backup-s3/run.sh +++ b/postgres-backup-s3/run.sh @@ -9,5 +9,6 @@ fi if [ "${SCHEDULE}" = "**None**" ]; then sh backup.sh else - exec go-cron "$SCHEDULE" /bin/sh backup.sh + echo -e "SHELL=/bin/sh\n${SCHEDULE} /bin/sh /backup.sh" > /etc/crontabs/root + exec go-crond /etc/crontabs/root fi