Browse Source

Merge pull request #51 from adamgoose/master

mysql-backup-s3: Adding support for custom S3 endpoint
pull/52/head
Johannes Schickling 8 years ago
committed by GitHub
parent
commit
8e10b6f5dd
  1. 1
      mysql-backup-s3/Dockerfile
  2. 1
      mysql-backup-s3/README.md
  3. 8
      mysql-backup-s3/backup.sh

1
mysql-backup-s3/Dockerfile

@ -14,6 +14,7 @@ ENV S3_ACCESS_KEY_ID **None**
ENV S3_SECRET_ACCESS_KEY **None**
ENV S3_BUCKET **None**
ENV S3_REGION us-west-1
ENV S3_ENDPOINT **None**
ENV S3_PREFIX 'backup'
ENV MULTI_FILES no
ENV SCHEDULE **None**

1
mysql-backup-s3/README.md

@ -21,6 +21,7 @@ $ docker run -e S3_ACCESS_KEY_ID=key -e S3_SECRET_ACCESS_KEY=secret -e S3_BUCKET
- `S3_BUCKET` your AWS S3 bucket path *required*
- `S3_PREFIX` path prefix in your bucket (default: 'backup')
- `S3_REGION` the AWS S3 bucket region (default: us-west-1)
- `S3_ENDPOINT` the AWS Endpoint URL, for S3 Compliant APIs such as [minio](https://minio.io) (default: none)
- `MULTI_FILES` Allow to have one file per database if set `yes` default: no)
- `SCHEDULE` backup schedule time, see explainatons below

8
mysql-backup-s3/backup.sh

@ -44,9 +44,15 @@ copy_s3 () {
SRC_FILE=$1
DEST_FILE=$2
if[ "${S3_ENDPOINT}" == "**None**" ]; then
AWS_ARGS=""
else
AWS_ARGS="--endpoint-url ${S3_ENDPOINT}"
fi
echo "Uploading ${DEST_FILE} on S3..."
cat $SRC_FILE | aws s3 cp - s3://$S3_BUCKET/$S3_PREFIX/$DEST_FILE
cat $SRC_FILE | aws $AWS_ARGS s3 cp - s3://$S3_BUCKET/$S3_PREFIX/$DEST_FILE
if [ $? != 0 ]; then
>&2 echo "Error uploading ${DEST_FILE} on S3"

Loading…
Cancel
Save