Browse Source

Add MULTI_FILE env var

Kept full retro-compatibility
pull/22/head
Olivier Cuypers 8 years ago
parent
commit
68bd0dc4b5
  1. 5
      mysql-backup-s3/Dockerfile
  2. 20
      mysql-backup-s3/README.md
  3. 45
      mysql-backup-s3/backup.sh

5
mysql-backup-s3/Dockerfile

@ -5,7 +5,7 @@ ADD install.sh install.sh
RUN sh install.sh && rm install.sh
ENV MYSQLDUMP_OPTIONS --quote-names --quick --add-drop-table --add-locks --allow-keywords --disable-keys --extended-insert --single-transaction --create-options --comments --net_buffer_length=16384
ENV MYSQLDUMP_DATABASES *ALL_IN_ONE*
ENV MYSQLDUMP_DATABASE --all-databases
ENV MYSQL_HOST **None**
ENV MYSQL_PORT 3306
ENV MYSQL_USER **None**
@ -14,7 +14,8 @@ ENV S3_ACCESS_KEY_ID **None**
ENV S3_SECRET_ACCESS_KEY **None**
ENV S3_BUCKET **None**
ENV S3_REGION us-west-1
ENV S3_PATH 'backup'
ENV S3_PREFIX 'backup'
ENV MULTI_FILES no
ENV SCHEDULE **None**
ADD run.sh run.sh

20
mysql-backup-s3/README.md

@ -1,13 +1,29 @@
# mysql-backup-s3
Backup MySQL to S3 (supports periodic backups)
Backup MySQL to S3 (supports periodic backups & mutli files)
## Usage
## Basic usage
```sh
$ docker run -e S3_ACCESS_KEY_ID=key -e S3_SECRET_ACCESS_KEY=secret -e S3_BUCKET=my-bucket -e S3_PREFIX=backup -e MYSQL_USER=user -e MYSQL_PASSWORD=password -e MYSQL_HOST=localhost schickling/mysql-backup-s3
```
## Environment variables
- `MYSQLDUMP_OPTIONS` mysqldump options (default: --quote-names --quick --add-drop-table --add-locks --allow-keywords --disable-keys --extended-insert --single-transaction --create-options --comments --net_buffer_length=16384)
- `MYSQLDUMP_DATABASE` list of databases you want to backup (default: --all-databases)
- `MYSQL_HOST` the mysql host *required*
- `MYSQL_PORT` the mysql port (default: 3306)
- `MYSQL_USER` the mysql user *required*
- `MYSQL_PASSWORD` the mysql password *required*
- `S3_ACCESS_KEY_ID` your AWS access key *required*
- `S3_SECRET_ACCESS_KEY` your AWS secret key *required*
- `S3_BUCKET` your AWS S3 bucket path *required*
- `S3_PREFIX` path prefix in your bucket (default: 'backup')
- `S3_REGION` the AWS S3 bucket region (default: us-west-1)
- `MULTI_FILES` Allow to have one file per database if set `yes` default: no)
- `SCHEDULE` backup schedule time, see explainatons below
### Automatic Periodic Backups
You can additionally set the `SCHEDULE` environment variable like `-e SCHEDULE="@daily"` to run the backup automatically.

45
mysql-backup-s3/backup.sh

@ -54,26 +54,16 @@ copy_s3 () {
rm $SRC_FILE
}
if [ "${MYSQLDUMP_DATABASES}" == "*ALL_IN_ONE*" ]; then
echo "Creating dump of all databases from ${MYSQL_HOST}..."
DUMP_FILE="/tmp/dump.sql.gz"
mysqldump $MYSQL_HOST_OPTS $MYSQLDUMP_OPTIONS --all-databases | gzip > $DUMP_FILE
if [ $? == 0 ]; then
S3_FILE="${DUMP_START_TIME}.${DB}.sql.gz"
copy_s3 $DUMP_FILE $S3_FILE
# Multi file: yes
if [ ! -z "$(echo $MULTI_FILES | grep -i -E "(yes|true|1)")" ]; then
if [ "${MYSQLDUMP_DATABASE}" == "--all-databases" ]; then
DATABASES=`mysql $MYSQL_HOST_OPTS -e "SHOW DATABASES;" | grep -Ev "(Database|information_schema|performance_schema|mysql)"`
else
>&2 echo "Error creating dump of all databases"
DATABASES=$MYSQLDUMP_DATABASE
fi
elif [ "${MYSQLDUMP_DATABASES}" == "*ALL*" ]; then
echo "Mysql options ${MYSQL_HOST_OPTS}";
DATABASES=`mysql $MYSQL_HOST_OPTS -e "SHOW DATABASES;" | grep -Ev "(Database|information_schema|performance_schema|mysql)"`
for DB in $DATABASES; do
echo "Creating dump of ${DB} database from ${MYSQL_HOST}..."
echo "Creating individual dump of ${DB} from ${MYSQL_HOST}..."
DUMP_FILE="/tmp/${DB}.sql.gz"
@ -87,23 +77,20 @@ elif [ "${MYSQLDUMP_DATABASES}" == "*ALL*" ]; then
>&2 echo "Error creating dump of ${DB}"
fi
done
# Multi file: no
else
for DB in $MYSQLDUMP_DATABASES; do
echo "Creating dump of ${DB} database from ${MYSQL_HOST}..."
echo "Creating dump for ${MYSQLDUMP_DATABASE} from ${MYSQL_HOST}..."
DUMP_FILE="/tmp/${DB}.sql.gz"
mysqldump $MYSQL_HOST_OPTS $MYSQLDUMP_OPTIONS --databases $DB | gzip > $DUMP_FILE
DUMP_FILE="/tmp/dump.sql.gz"
mysqldump $MYSQL_HOST_OPTS $MYSQLDUMP_OPTIONS $MYSQLDUMP_DATABASE | gzip > $DUMP_FILE
if [ $? == 0 ]; then
S3_FILE="${DUMP_START_TIME}.${DB}.sql.gz"
if [ $? == 0 ]; then
S3_FILE="${DUMP_START_TIME}.dump.sql.gz"
copy_s3 $DUMP_FILE $S3_FILE
else
>&2 echo "Error creating dump of database ${DB}"
fi
done
copy_s3 $DUMP_FILE $S3_FILE
else
>&2 echo "Error creating dump of all databases"
fi
fi
echo "SQL backup finished"
Loading…
Cancel
Save