Skip to content
This repository was archived by the owner on Aug 7, 2023. It is now read-only.

Commit 5b7d2eb

Browse files
author
Mohit Garg
committed
Extracted from core production repo, added readme.
1 parent 4d74658 commit 5b7d2eb

File tree

8 files changed

+140
-0
lines changed

8 files changed

+140
-0
lines changed

Dockerfile

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
FROM alpine:latest
2+
LABEL maintainer="[email protected]"
3+
4+
RUN apk update && apk add --no-cache python3 py3-pip && pip3 install --upgrade pip && pip3 install awscli && apk add mysql-client && apk add bash && apk add openssl && apk add coreutils && mkdir -p /opt/backup
5+
ARG HOUR_OF_DAY
6+
#ENV CRON_HOUR=${HOUR_OF_DAY:-23}
7+
8+
WORKDIR /opt/backup
9+
COPY crontab.txt crontab.txt
10+
COPY mysql-backup.sh mysql-backup.sh
11+
COPY backup.sh backup.sh
12+
COPY clean.sh clean.sh
13+
COPY entry.sh entry.sh
14+
COPY script.sh script.sh
15+
RUN chmod 750 mysql-backup.sh backup.sh clean.sh script.sh entry.sh
16+
RUN if [[ -n "$HOUR_OF_DAY" ]] ; then echo $HOUR_OF_DAY && sed -i "s/23/$HOUR_OF_DAY/g" crontab.txt && cat crontab.txt ; else echo "Defaulting to cron hour 23" ; fi
17+
RUN /usr/bin/crontab crontab.txt
18+
19+
CMD ["/opt/backup/entry.sh"]

README.md

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
How to run:
2+
==========
3+
4+
1. Build the image
5+
6+
docker build -t backups .
7+
8+
2. Run the image
9+
10+
docker run -idt backups --env MYSQL_USERNAME=<> --env MYSQL_PASSWORD=<> --env SERVER=<> --env DB_NAME=<> --env BUCKET=<abc.bucket.com> --env AWS_ACCESS_KEY_ID=<> --env AWS_SECRET_ACCESS_KEY=<> --env HOUR_OF_DAY=23
11+
12+
3. Connect to the network of your DB container (Only when DB is in another container)
13+
14+
docker network connect <network_name> <backup_container>
15+
16+
4. Verify backups manually
17+
18+
docker exec -it <backup_container> bash
19+
20+
Inside the container: sh script.sh
21+
22+
23+
24+
How to run (docker-compose):
25+
==========================
26+
27+
Add this to your docker compose:
28+
29+
backups:
30+
build:
31+
context: ./backups
32+
args:
33+
HOUR_OF_DAY: 23
34+
depends_on:
35+
- <"db">
36+
networks:
37+
- <db>
38+
environment:
39+
- BUCKET_NAME=<abc.bucket.com>
40+
- MYSQL_USERNAME=
41+
- MYSQL_PASSWORD=
42+
- SERVER=
43+
- DB_NAME=
44+
# Specify AWS credentials or skip if using AWS IAM roles
45+
- AWS_ACCESS_KEY_ID=
46+
- AWS_SECRET_ACCESS_KEY=
47+
# The path of the DB dump in the container, not the host machine
48+
- FILE_PATH=/opt/backup/
49+
restart: always
50+
51+
Replace build: with image: if using dockerhub instead of local folder

backup.sh

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
#!/bin/bash
2+
3+
FILE=${1:-'backup'}
4+
BUCKET=${2}
5+
FILE_PATH=$(dirname $FILE_PATH$FILE)
6+
FILE_PATH="${FILE_PATH}""/"
7+
8+
echo "$FILE_PATH""$FILE"
9+
TAR_MD5_SUM=$(openssl md5 -binary "$FILE_PATH$FILE".tar.gz | base64)
10+
SQL_MD5_SUM=$(openssl md5 -binary $FILE_PATH$FILE".sql" | base64)
11+
REMOTE_SQL_MD5_SUM=$(aws s3api head-object --bucket $BUCKET --key "$FILE".tar.gz --query 'Metadata.sqlmd5checksum' --output text)
12+
13+
echo $SQL_MD5_SUM
14+
echo $REMOTE_SQL_MD5_SUM
15+
16+
if [[ "$SQL_MD5_SUM" == "$REMOTE_SQL_MD5_SUM" ]]
17+
then
18+
echo "No changes since last upload. Quitting."
19+
exit 0
20+
fi
21+
22+
echo "Changes found since last upload. Uploading now."
23+
24+
aws s3api put-object --bucket $BUCKET --key "$FILE".tar.gz --body "$FILE_PATH$FILE".tar.gz --content-md5 $TAR_MD5_SUM --metadata sqlmd5checksum=$SQL_MD5_SUM
25+
26+
echo "Backup complete"

clean.sh

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
#!/bin/bash
2+
3+
FILENAME=${1:-'backup'}
4+
TMP_PATH=${FILE_PATH:-~/}
5+
6+
echo "Removing the cache files..."
7+
# remove databases dump
8+
9+
echo ${TMP_PATH}${FILENAME}
10+
rm ${TMP_PATH}${FILENAME}.sql
11+
rm ${TMP_PATH}${FILENAME}.tar.gz
12+

crontab.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
0 23 * * 1-6 /opt/backup/script.sh /opt/backup > /dev/stdout

entry.sh

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
#!/bin/bash
2+
/usr/sbin/crond -f -l 8

mysql-backup.sh

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
#!/bin/bash
2+
3+
MYSQL_USERNAME=${1}
4+
MYSQL_PASSWORD=${2}
5+
SERVER=${3:-'db'}
6+
DATABASE=${4}
7+
FILENAME=${5:-'backup'}
8+
FILE_PATH=${6:/opt/backup}
9+
10+
mysqldump -h ${SERVER} --skip-dump-date --quick --user=${MYSQL_USERNAME} --password=${MYSQL_PASSWORD} ${DATABASE} > ${FILE_PATH}${FILENAME}.sql
11+
echo "Done backing up the database to a file."
12+
echo "Starting compression..."
13+
tar czf ${FILE_PATH}${FILENAME}.tar.gz ${FILE_PATH}${FILENAME}.sql
14+
echo "Done compressing the backup file."
15+
16+

script.sh

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
#!/bin/bash
2+
3+
SERVER=${SERVER:-"db"}
4+
FILE_NAME=${FILE_NAME:-"backup"}
5+
CURRENT_DIR=$(dirname $0)
6+
7+
echo "Step 1. Mysqldump"
8+
$CURRENT_DIR/mysql-backup.sh $MYSQL_USERNAME $MYSQL_PASSWORD $SERVER $DB_NAME $FILE_NAME $FILE_PATH
9+
echo "Step 2. Saving to S3"
10+
$CURRENT_DIR/backup.sh $FILE_NAME $BUCKET_NAME
11+
echo "Step 3. Cleaning it up"
12+
$CURRENT_DIR/clean.sh $FILE_NAME
13+
echo "Done"

0 commit comments

Comments
 (0)