Browse Source

Fixes old integration tests and adds one for mysql

pull/119/head
Rob Harrison 7 years ago
parent
commit
4ccd2653ac
  1. 111
      mysql-backup-s3/#backup.sh#
  2. 1
      mysql-backup-s3/.#backup.sh
  3. 1
      mysql-backup-s3/Dockerfile
  4. 27
      mysql-backup-s3/Dockerfile~
  5. 34
      mysql-backup-s3/README.md~
  6. 3
      mysql-backup-s3/backup.sh
  7. 110
      mysql-backup-s3/backup.sh~
  8. 21
      mysql-backup-s3/integration-tests.sh
  9. 4
      mysql-backup-s3/integration-tests.sh~
  10. 6
      mysql-backup-s3/integration-tests.test.yml
  11. 4
      mysql-backup-s3/run.sh
  12. 13
      mysql-backup-s3/run.sh~
  13. 2
      postgres-backup-s3/integration-tests.test.yml

111
mysql-backup-s3/#backup.sh#

@ -0,0 +1,111 @@
#! /bin/bash
set -e
set -o pipefail
if [ "${S3_ACCESS_KEY_ID}" == "**None**" ]; then
echo "Warning: You did not set the S3_ACCESS_KEY_ID environment variable."
fi
if [ "${S3_SECRET_ACCESS_KEY}" == "**None**" ]; then
echo "Warning: You did not set the S3_SECRET_ACCESS_KEY environment variable."
fi
if [ "${S3_BUCKET}" == "**None**" ]; then
echo "You need to set the S3_BUCKET environment variable."
exit 1
fi
if [ "${MYSQL_HOST}" == "**None**" ]; then
echo "You need to set the MYSQL_HOST environment variable."
exit 1
fi
if [ "${MYSQL_USER}" == "**None**" ]; then
echo "You need to set the MYSQL_USER environment variable."
exit 1
fi
if [ "${MYSQL_PASSWORD}" == "**None**" ]; then
echo "You need to set the MYSQL_PASSWORD environment variable or link to a container named MYSQL."
exit 1
fi
if [ "${S3_IAMROLE}" != "true" ]; then
# env vars needed for aws tools - only if an IAM role is not used
export AWS_ACCESS_KEY_ID=$S3_ACCESS_KEY_ID
export AWS_SECRET_ACCESS_KEY=$S3_SECRET_ACCESS_KEY
export AWS_DEFAULT_REGION=$S3_REGION
fi
MYSQL_HOST_OPTS="-h $MYSQL_HOST -P $MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD"
DUMP_START_TIME=$(date +"%Y-%m-%dT%H%M%SZ")
copy_s3 () {
SRC_FILE=$1
DEST_FILE=$2
if [ "${S3_ENDPOINT}" == "**None**" ]; then
AWS_ARGS=""
else
AWS_ARGS="--endpoint-url ${S3_ENDPOINT}"
fi
echo "Uploading ${DEST_FILE} on S3..."
cat $SRC_FILE | aws $AWS_ARGS s3 cp --storage-class STANDARD_IA - s3://$S3_BUCKET/$S3_PREFIX/$DEST_FILE
if [ $? != 0 ]; then
>&2 echo "Error uploading ${DEST_FILE} on S3"
fi
rm $SRC_FILE
}
# Multi file: yes
if [ ! -z "$(echo $MULTI_FILES | grep -i -E "(yes|true|1)")" ]; then
if [ "${MYSQLDUMP_DATABASE}" == "--all-databases" ]; then
DATABASES=`mysql $MYSQL_HOST_OPTS -e "SHOW DATABASES;" | grep -Ev "(Database|information_schema|performance_schema|mysql|sys|innodb)"`
else
DATABASES=$MYSQLDUMP_DATABASE
fi
for DB in $DATABASES; do
echo "Creating individual dump of ${DB} from ${MYSQL_HOST}..."
DUMP_FILE="/tmp/${DB}.sql.gz"
mysqldump $MYSQL_HOST_OPTS $MYSQLDUMP_OPTIONS --databases $DB | gzip > $DUMP_FILE
if [ $? == 0 ]; then
if [ "${S3_FILENAME}" == "**None**" ]; then
S3_FILE="${DUMP_START_TIME}.${DB}.sql.gz"
else
S3_FILE="${S3_FILENAME}.${DB}.sql.gz"
fi
copy_s3 $DUMP_FILE $S3_FILE
else
>&2 echo "Error creating dump of ${DB}"
fi
done
# Multi file: no
else
echo "Creating dump for ${MYSQLDUMP_DATABASE} from ${MYSQL_HOST}..."
DUMP_FILE="/tmp/dump.sql.gz"
mysqldump $MYSQL_HOST_OPTS $MYSQLDUMP_OPTIONS $MYSQLDUMP_DATABASE | gzip > $DUMP_FILE
if [ $? == 0 ]; then
if [ "${S3_FILENAME}" == "**None**" ]; then
S3_FILE="${DUMP_START_TIME}.dump.sql.gz"
else
S3_FILE="${S3_FILENAME}.sql.gz"
fi
copy_s3 $DUMP_FILE $S3_FILE
else
>&2 echo "Error creating dump of all databases"
fi
fi
echo "SQL backup finished"

1
mysql-backup-s3/.#backup.sh

@ -0,0 +1 @@
robwithhair@Robs-MacBook.local.98085

1
mysql-backup-s3/Dockerfile

@ -23,5 +23,6 @@ ENV SCHEDULE **None**
ADD run.sh run.sh ADD run.sh run.sh
ADD backup.sh backup.sh ADD backup.sh backup.sh
ADD integration-tests.sh integration-tests.sh
CMD ["sh", "run.sh"] CMD ["sh", "run.sh"]

27
mysql-backup-s3/Dockerfile~

@ -0,0 +1,27 @@
FROM alpine:latest
LABEL maintainer="Johannes Schickling <schickling.j@gmail.com>"
ADD install.sh install.sh
RUN sh install.sh && rm install.sh
ENV MYSQLDUMP_OPTIONS --quote-names --quick --add-drop-table --add-locks --allow-keywords --disable-keys --extended-insert --single-transaction --create-options --comments --net_buffer_length=16384
ENV MYSQLDUMP_DATABASE --all-databases
ENV MYSQL_HOST **None**
ENV MYSQL_PORT 3306
ENV MYSQL_USER **None**
ENV MYSQL_PASSWORD **None**
ENV S3_ACCESS_KEY_ID **None**
ENV S3_SECRET_ACCESS_KEY **None**
ENV S3_BUCKET **None**
ENV S3_REGION us-west-1
ENV S3_ENDPOINT **None**
ENV S3_S3V4 no
ENV S3_PREFIX 'backup'
ENV S3_FILENAME **None**
ENV MULTI_FILES no
ENV SCHEDULE **None**
ADD run.sh run.sh
ADD backup.sh backup.sh
CMD ["sh", "run.sh"]

34
mysql-backup-s3/README.md~

@ -0,0 +1,34 @@
# mysql-backup-s3
Backup MySQL to S3 (supports periodic backups & mutli files)
## Basic usage
```sh
$ docker run -e S3_ACCESS_KEY_ID=key -e S3_SECRET_ACCESS_KEY=secret -e S3_BUCKET=my-bucket -e S3_PREFIX=backup -e MYSQL_USER=user -e MYSQL_PASSWORD=password -e MYSQL_HOST=localhost schickling/mysql-backup-s3
```
## Environment variables
- `MYSQLDUMP_OPTIONS` mysqldump options (default: --quote-names --quick --add-drop-table --add-locks --allow-keywords --disable-keys --extended-insert --single-transaction --create-options --comments --net_buffer_length=16384)
- `MYSQLDUMP_DATABASE` list of databases you want to backup (default: --all-databases)
- `MYSQL_HOST` the mysql host *required*
- `MYSQL_PORT` the mysql port (default: 3306)
- `MYSQL_USER` the mysql user *required*
- `MYSQL_PASSWORD` the mysql password *required*
- `S3_ACCESS_KEY_ID` your AWS access key *required*
- `S3_SECRET_ACCESS_KEY` your AWS secret key *required*
- `S3_BUCKET` your AWS S3 bucket path *required*
- `S3_PREFIX` path prefix in your bucket (default: 'backup')
- `S3_FILENAME` a consistent filename to overwrite with your backup. If not set will use a timestamp.
- `S3_REGION` the AWS S3 bucket region (default: us-west-1)
- `S3_ENDPOINT` the AWS Endpoint URL, for S3 Compliant APIs such as [minio](https://minio.io) (default: none)
- `S3_S3V4` set to `yes` to enable AWS Signature Version 4, required for [minio](https://minio.io) servers (default: no)
- `MULTI_FILES` Allow to have one file per database if set `yes` default: no)
- `SCHEDULE` backup schedule time, see explainatons below
### Automatic Periodic Backups
You can additionally set the `SCHEDULE` environment variable like `-e SCHEDULE="@daily"` to run the backup automatically.
More information about the scheduling can be found [here](http://godoc.org/github.com/robfig/cron#hdr-Predefined_schedules).

3
mysql-backup-s3/backup.sh

@ -1,6 +1,7 @@
#! /bin/sh
#! /bin/bash
set -e set -e
set -o pipefail
if [ "${S3_ACCESS_KEY_ID}" == "**None**" ]; then if [ "${S3_ACCESS_KEY_ID}" == "**None**" ]; then
echo "Warning: You did not set the S3_ACCESS_KEY_ID environment variable." echo "Warning: You did not set the S3_ACCESS_KEY_ID environment variable."

110
mysql-backup-s3/backup.sh~

@ -0,0 +1,110 @@
#! /bin/sh
set -e
if [ "${S3_ACCESS_KEY_ID}" == "**None**" ]; then
echo "Warning: You did not set the S3_ACCESS_KEY_ID environment variable."
fi
if [ "${S3_SECRET_ACCESS_KEY}" == "**None**" ]; then
echo "Warning: You did not set the S3_SECRET_ACCESS_KEY environment variable."
fi
if [ "${S3_BUCKET}" == "**None**" ]; then
echo "You need to set the S3_BUCKET environment variable."
exit 1
fi
if [ "${MYSQL_HOST}" == "**None**" ]; then
echo "You need to set the MYSQL_HOST environment variable."
exit 1
fi
if [ "${MYSQL_USER}" == "**None**" ]; then
echo "You need to set the MYSQL_USER environment variable."
exit 1
fi
if [ "${MYSQL_PASSWORD}" == "**None**" ]; then
echo "You need to set the MYSQL_PASSWORD environment variable or link to a container named MYSQL."
exit 1
fi
if [ "${S3_IAMROLE}" != "true" ]; then
# env vars needed for aws tools - only if an IAM role is not used
export AWS_ACCESS_KEY_ID=$S3_ACCESS_KEY_ID
export AWS_SECRET_ACCESS_KEY=$S3_SECRET_ACCESS_KEY
export AWS_DEFAULT_REGION=$S3_REGION
fi
MYSQL_HOST_OPTS="-h $MYSQL_HOST -P $MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD"
DUMP_START_TIME=$(date +"%Y-%m-%dT%H%M%SZ")
copy_s3 () {
SRC_FILE=$1
DEST_FILE=$2
if [ "${S3_ENDPOINT}" == "**None**" ]; then
AWS_ARGS=""
else
AWS_ARGS="--endpoint-url ${S3_ENDPOINT}"
fi
echo "Uploading ${DEST_FILE} on S3..."
cat $SRC_FILE | aws $AWS_ARGS s3 cp --storage-class STANDARD_IA - s3://$S3_BUCKET/$S3_PREFIX/$DEST_FILE
if [ $? != 0 ]; then
>&2 echo "Error uploading ${DEST_FILE} on S3"
fi
rm $SRC_FILE
}
# Multi file: yes
if [ ! -z "$(echo $MULTI_FILES | grep -i -E "(yes|true|1)")" ]; then
if [ "${MYSQLDUMP_DATABASE}" == "--all-databases" ]; then
DATABASES=`mysql $MYSQL_HOST_OPTS -e "SHOW DATABASES;" | grep -Ev "(Database|information_schema|performance_schema|mysql|sys|innodb)"`
else
DATABASES=$MYSQLDUMP_DATABASE
fi
for DB in $DATABASES; do
echo "Creating individual dump of ${DB} from ${MYSQL_HOST}..."
DUMP_FILE="/tmp/${DB}.sql.gz"
mysqldump $MYSQL_HOST_OPTS $MYSQLDUMP_OPTIONS --databases $DB | gzip > $DUMP_FILE
if [ $? == 0 ]; then
if [ "${S3_FILENAME}" == "**None**" ]; then
S3_FILE="${DUMP_START_TIME}.${DB}.sql.gz"
else
S3_FILE="${S3_FILENAME}.${DB}.sql.gz"
fi
copy_s3 $DUMP_FILE $S3_FILE
else
>&2 echo "Error creating dump of ${DB}"
fi
done
# Multi file: no
else
echo "Creating dump for ${MYSQLDUMP_DATABASE} from ${MYSQL_HOST}..."
DUMP_FILE="/tmp/dump.sql.gz"
mysqldump $MYSQL_HOST_OPTS $MYSQLDUMP_OPTIONS $MYSQLDUMP_DATABASE | gzip > $DUMP_FILE
if [ $? == 0 ]; then
if [ "${S3_FILENAME}" == "**None**" ]; then
S3_FILE="${DUMP_START_TIME}.dump.sql.gz"
else
S3_FILE="${S3_FILENAME}.sql.gz"
fi
copy_s3 $DUMP_FILE $S3_FILE
else
>&2 echo "Error creating dump of all databases"
fi
fi
echo "SQL backup finished"

21
mysql-backup-s3/integration-tests.sh

@ -0,0 +1,21 @@
#! /bin/sh
export S3_ACCESS_KEY_ID=123
export S3_SECRET_ACCESS_KEY=123
export S3_BUCKET=123
export MYSQL_HOST=123
export MYSQL_USER=123
export MYSQL_PASSWORD=123
OUT=$(/bin/bash backup.sh)
if [ "$?" == "0" ]; then
echo "Backup exit code was 0"
exit 1
fi
if [ "$OUT" == "" ]; then
echo "OUTPUT was '$OUT'"
exit 1
fi

4
mysql-backup-s3/integration-tests.sh~

@ -0,0 +1,4 @@
#! /bin/sh
set -e

6
mysql-backup-s3/integration-tests.test.yml

@ -0,0 +1,6 @@
version: '3.6'
services:
sut:
build:
context: .
command: 'sh integration-tests.sh'

4
mysql-backup-s3/run.sh

@ -7,7 +7,7 @@ if [ "${S3_S3V4}" = "yes" ]; then
fi fi
if [ "${SCHEDULE}" = "**None**" ]; then if [ "${SCHEDULE}" = "**None**" ]; then
sh backup.sh
/bin/bash backup.sh
else else
exec go-cron "$SCHEDULE" /bin/sh backup.sh
exec go-cron "$SCHEDULE" /bin/bash backup.sh
fi fi

13
mysql-backup-s3/run.sh~

@ -0,0 +1,13 @@
#! /bin/sh
set -e
if [ "${S3_S3V4}" = "yes" ]; then
aws configure set default.s3.signature_version s3v4
fi
if [ "${SCHEDULE}" = "**None**" ]; then
sh backup.sh
else
exec go-cron "$SCHEDULE" /bin/sh backup.sh
fi

2
postgres-backup-s3/integration-tests.test.yml

@ -9,7 +9,7 @@ services:
sut: sut:
build: build:
context: . context: .
command: 'sh'
command: 'sh integration-tests.sh'
depends_on: depends_on:
- postgres - postgres
environment: environment:

Loading…
Cancel
Save