You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

99 lines
2.5 KiB

  1. #! /bin/sh
  2. set -eo pipefail
  3. set -o pipefail
  4. if [ "${S3_ACCESS_KEY_ID}" = "**None**" ]; then
  5. echo "You need to set the S3_ACCESS_KEY_ID environment variable."
  6. exit 1
  7. fi
  8. if [ "${S3_SECRET_ACCESS_KEY}" = "**None**" ]; then
  9. echo "You need to set the S3_SECRET_ACCESS_KEY environment variable."
  10. exit 1
  11. fi
  12. if [ "${S3_BUCKET}" = "**None**" ]; then
  13. echo "You need to set the S3_BUCKET environment variable."
  14. exit 1
  15. fi
  16. if [ "${POSTGRES_DATABASE}" = "**None**" -a "${POSTGRES_BACKUP_ALL}" != "true" ]; then
  17. echo "You need to set the POSTGRES_DATABASE environment variable."
  18. exit 1
  19. fi
  20. if [ "${POSTGRES_HOST}" = "**None**" ]; then
  21. if [ -n "${POSTGRES_PORT_5432_TCP_ADDR}" ]; then
  22. POSTGRES_HOST=$POSTGRES_PORT_5432_TCP_ADDR
  23. POSTGRES_PORT=$POSTGRES_PORT_5432_TCP_PORT
  24. else
  25. echo "You need to set the POSTGRES_HOST environment variable."
  26. exit 1
  27. fi
  28. fi
  29. if [ "${POSTGRES_USER}" = "**None**" ]; then
  30. echo "You need to set the POSTGRES_USER environment variable."
  31. exit 1
  32. fi
  33. if [ "${POSTGRES_PASSWORD}" = "**None**" ]; then
  34. echo "You need to set the POSTGRES_PASSWORD environment variable or link to a container named POSTGRES."
  35. exit 1
  36. fi
  37. if [ "${S3_ENDPOINT}" == "**None**" ]; then
  38. AWS_ARGS=""
  39. else
  40. AWS_ARGS="--endpoint-url ${S3_ENDPOINT}"
  41. fi
  42. # env vars needed for aws tools
  43. export AWS_ACCESS_KEY_ID=$S3_ACCESS_KEY_ID
  44. export AWS_SECRET_ACCESS_KEY=$S3_SECRET_ACCESS_KEY
  45. export AWS_DEFAULT_REGION=$S3_REGION
  46. export PGPASSWORD=$POSTGRES_PASSWORD
  47. POSTGRES_HOST_OPTS="-h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER $POSTGRES_EXTRA_OPTS"
  48. if [ -z ${S3_PREFIX+x} ]; then
  49. S3_PREFIX="/"
  50. else
  51. S3_PREFIX="/${S3_PREFIX}/"
  52. fi
  53. if [ "${POSTGRES_BACKUP_ALL}" == "true" ]; then
  54. echo "Creating dump of all databases from ${POSTGRES_HOST}..."
  55. pg_dumpall -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER | gzip > dump.sql.gz
  56. echo "Uploading dump to $S3_BUCKET"
  57. cat dump.sql.gz | aws $AWS_ARGS s3 cp - "s3://${S3_BUCKET}${S3_PREFIX}all_$(date +"%Y-%m-%dT%H:%M:%SZ").sql.gz" || exit 2
  58. echo "SQL backup uploaded successfully"
  59. rm -rf dump.sql.gz
  60. else
  61. OIFS="$IFS"
  62. IFS=','
  63. for DB in $POSTGRES_DATABASE
  64. do
  65. IFS="$OIFS"
  66. echo "Creating dump of ${DB} database from ${POSTGRES_HOST}..."
  67. pg_dump $POSTGRES_HOST_OPTS $DB | gzip > dump.sql.gz
  68. echo "Uploading dump to $S3_BUCKET"
  69. cat dump.sql.gz | aws $AWS_ARGS s3 cp - "s3://${S3_BUCKET}${S3_PREFIX}${DB}_$(date +"%Y-%m-%dT%H:%M:%SZ").sql.gz" || exit 2
  70. echo "SQL backup uploaded successfully"
  71. rm -rf dump.sql.gz
  72. done
  73. fi