Browse Source

Added folder-backup-s3

pull/97/head
Zhong Huiwen 7 years ago
parent
commit
fb38f788d2
  1. 20
      folder-backup-s3/Dockerfile
  2. 22
      folder-backup-s3/README.md
  3. 102
      folder-backup-s3/backup.sh
  4. 24
      folder-backup-s3/install.sh
  5. 13
      folder-backup-s3/run.sh

20
folder-backup-s3/Dockerfile

@ -0,0 +1,20 @@
FROM alpine:latest
LABEL maintainer="Zhong Huiwen <zhonghuiwen@gmail.com>"
ADD install.sh install.sh
RUN sh install.sh && rm install.sh
ENV DATA_PATH **None**
ENV S3_ACCESS_KEY_ID **None**
ENV S3_SECRET_ACCESS_KEY **None**
ENV S3_BUCKET **None**
ENV S3_REGION us-west-1
ENV S3_ENDPOINT **None**
ENV S3_S3V4 no
ENV S3_PREFIX 'folder-backup'
ENV SCHEDULE **None**
ADD run.sh run.sh
ADD backup.sh backup.sh
CMD ["sh", "run.sh"]

22
folder-backup-s3/README.md

@ -0,0 +1,22 @@
# folder-backup-s3
Backup a folder in a docker container to S3
## Basic usuage
```sh
$ docker run -e S3_ACCESS_KEY_ID=key -e S3_SECRET_ACCESS_KEY=secret -e S3_BUCKET=my-bucket -e S3_PREFIX=backup -e MYSQL_USER=user -e MYSQL_PASSWORD=password -e MYSQL_HOST=localhost gameboy1990/folder-backup-s3
```
## Environment variables
- `DATA_PATH` your data folder to backup *required*
- `S3_ACCESS_KEY_ID` your AWS access key *required*
- `S3_SECRET_ACCESS_KEY` your AWS secret key *required*
- `S3_BUCKET` your AWS S3 bucket path *required*
- `S3_PREFIX` path prefix in your bucket (default: 'folder-backup')
- `S3_REGION` the AWS S3 bucket region (default: us-west-1)
- `S3_ENDPOINT` the AWS Endpoint URL, for S3 Compliant APIs such as [minio](https://minio.io) (default: none)
- `S3_S3V4` set to `yes` to enable AWS Signature Version 4, required for [minio](https://minio.io) servers (default: no)
- `SCHEDULE` backup schedule time, see explainatons below
- `DELETE_OLDER_THAN` delete files older than e.g. "30 days" reference arguments from [date](https://ss64.com/bash/date.html)

102
folder-backup-s3/backup.sh

@ -0,0 +1,102 @@
#! /bin/sh
set -e
if [ "${DATA_PATH}" == "**None**" ]; then
echo "Warning: You did not set the DATA_PATH environment variable."
fi
if [ "${S3_ACCESS_KEY_ID}" == "**None**" ]; then
echo "Warning: You did not set the S3_ACCESS_KEY_ID environment variable."
fi
if [ "${S3_SECRET_ACCESS_KEY}" == "**None**" ]; then
echo "Warning: You did not set the S3_SECRET_ACCESS_KEY environment variable."
fi
if [ "${S3_BUCKET}" == "**None**" ]; then
echo "You need to set the S3_BUCKET environment variable."
exit 1
fi
if [ "${S3_IAMROLE}" != "true" ]; then
# env vars needed for aws tools - only if an IAM role is not used
export AWS_ACCESS_KEY_ID=$S3_ACCESS_KEY_ID
export AWS_SECRET_ACCESS_KEY=$S3_SECRET_ACCESS_KEY
export AWS_DEFAULT_REGION=$S3_REGION
fi
DUMP_START_TIME=$(date +"%Y-%m-%dT%H%M%SZ")
copy_s3 () {
SRC_FILE=$1
DEST_FILE=$2
if [ "${S3_ENDPOINT}" == "**None**" ]; then
AWS_ARGS=""
else
AWS_ARGS="--endpoint-url ${S3_ENDPOINT}"
fi
echo "Uploading ${DEST_FILE} on S3..."
cat $SRC_FILE | aws $AWS_ARGS s3 cp - s3://$S3_BUCKET/$S3_PREFIX/$DEST_FILE
if [ $? != 0 ]; then
>&2 echo "Error uploading ${DEST_FILE} on S3"
fi
rm $SRC_FILE
}
deleteOld_s3 () {
if [ "${S3_ENDPOINT}" == "**None**" ]; then
AWS_ARGS=""
else
AWS_ARGS="--endpoint-url ${S3_ENDPOINT}"
fi
aws $AWS_ARGS s3 ls s3://$S3_BUCKET/$S3_PREFIX/ | while read -r line;
do
createDate=`echo $line|awk {'print $1" "$2'}`
createDate=`date -d"$createDate" +%s`
olderThan=`date -d"-${DELETE_OLDER_THAN}" +%s`
if [[ $createDate -lt $olderThan ]]
then
fileName=`echo $line|awk {'print $4'}`
echo $fileName
if [[ $fileName != "" ]]
then
aws $AWS_ARGS s3 rm s3://$S3_BUCKET/$S3_PREFIX/$fileName
echo "Deleted ${fileName} on S3"
fi
fi
done;
}
shouldDeleteOld_s3() {
if [ $? == 0 ]; then
if [ "${DELETE_OLDER_THAN}" != "**None**" ]; then
deleteOld_s3
fi
else
>&2 echo "Error deleting s3 files older than ${DELETE_OLDER_THAN}"
fi
}
echo "Creating folder backup for ${DATA_PATH}..."
DUMP_FILE=/tmp/${DUMP_START_TIME}.backup.tar.gz
rm -rf /tmp/*.backup.tar.gz
tar -zcf $DUMP_FILE -C $DATA_PATH .
if [ $? == 0 ]; then
S3_FILE="${DUMP_START_TIME}.backup.tar.gz"
copy_s3 $DUMP_FILE $S3_FILE
else
>&2 echo "Error creating dump of folder ${DATA_PATH}"
fi
shouldDeleteOld_s3
echo "Folder backup finished"

24
folder-backup-s3/install.sh

@ -0,0 +1,24 @@
#! /bin/sh
# exit if a command fails
set -e
apk update
# install s3 tools
apk add python py-pip
pip install awscli
apk del py-pip
# install go-cron
apk add curl
curl -L --insecure https://github.com/odise/go-cron/releases/download/v0.0.6/go-cron-linux.gz | zcat > /usr/local/bin/go-cron
chmod u+x /usr/local/bin/go-cron
apk del curl
# install coreutils for date -d
apk add --update coreutils
# cleanup
rm -rf /var/cache/apk/*

13
folder-backup-s3/run.sh

@ -0,0 +1,13 @@
#! /bin/sh
set -e
if [ "${S3_S3V4}" = "yes" ]; then
aws configure set default.s3.signature_version s3v4
fi
if [ "${SCHEDULE}" = "**None**" ]; then
sh backup.sh
else
exec go-cron "$SCHEDULE" /bin/sh backup.sh
fi
Loading…
Cancel
Save