Skip to content

Commit 4a929cb

Browse files
committed
A cronjob to create backups of Docker volumes and publish it to S3
1 parent 95a2f88 commit 4a929cb

File tree

4 files changed

+135
-0
lines changed

4 files changed

+135
-0
lines changed

Docker Volume S3 Backup/README.md

+19
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
# Docker Volume S3 Backup
2+
3+
## Introduction
4+
This is a script that gathers the data from a docker volume, compresses it and uploads it to a defined S3 bucket.
5+
6+
## Usage
7+
This is meant to run as a cronjob repeatedly.
8+
To run this:
9+
* Git clone this.
10+
* Set up the secrets.conf file.
11+
* Add the python file to your root user's crontab
12+
13+
# Helpful notes:
14+
If you would like to save your secrets in your repository, you can GPG encrypt/decrypt your secrets :)
15+
16+
To encrypt: `gpg --symmetric --cipher-algo AES256 filename.ext`
17+
18+
To decrypt: `gpg --decrypt filename.ext.gpg > filename.ext`
19+

Docker Volume S3 Backup/backup.py

+93
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,93 @@
1+
import subprocess, os
2+
import boto3
3+
from datetime import datetime
4+
from botocore.exceptions import ClientError
5+
from configparser import ConfigParser, NoOptionError, NoSectionError
6+
from boto3_type_annotations.s3 import Client
7+
8+
class ConfigError(Exception):
9+
def __init__(self, message):
10+
self.message = message
11+
12+
13+
class BackupCreationError(Exception):
14+
def __init__(self, message):
15+
self.message = message
16+
17+
18+
def current_path() -> str:
19+
return os.path.dirname(os.path.realpath(__file__))
20+
21+
class Config:
22+
def __init__(self, filename: str = "secrets.conf"):
23+
conf = ConfigParser()
24+
conf.read(os.path.join(current_path(), filename))
25+
self.conf = conf
26+
27+
def get_conf(self) -> ConfigParser:
28+
return self.conf
29+
30+
def get(self, section: str, option: str):
31+
if section is None or option is None:
32+
raise ConfigError("Please provide both SECTION and OPTION")
33+
try:
34+
return self.conf.get(section, option)
35+
except NoOptionError as e:
36+
raise ConfigError(f"There is no option {option} in section {section}")
37+
except NoSectionError as e:
38+
raise ConfigError(
39+
f"There is no section {section} in the configuration provided"
40+
)
41+
except Exception as e:
42+
raise ConfigError(f"An unknown error occured. {e}")
43+
44+
def create_backup(config: Config) -> str:
45+
today = datetime.now()
46+
timestamp = today.strftime("%Y-%m-%d")
47+
filename_base = config.get("backup_data", "filename")
48+
filename = f"{filename_base}-{timestamp}.tar.gz"
49+
# script = f"tar czf {filename} env"
50+
volume_name = config.get("backup_data", "volume_name")
51+
volume_directory = config.get("backup_data", "volume_directory")
52+
path_name = current_path()
53+
script = f"docker run --rm --volume {volume_name}:{volume_directory} --volume {path_name}:/backup ubuntu tar czf /backup/{filename} {volume_directory}"
54+
process = subprocess.run(
55+
script.split(), stderr=subprocess.PIPE, stdout=subprocess.PIPE
56+
)
57+
if process.returncode != 0:
58+
raise BackupCreationError(
59+
f"Backup could not be created.\n{process.stderr.decode('utf-8')}"
60+
)
61+
return filename
62+
63+
def upload_file(file_name: str, bucket_name: str, s3_client: Client) -> bool:
64+
today = datetime.now()
65+
try:
66+
response = s3_client.upload_file(os.path.join(current_path(), file_name), bucket_name, file_name)
67+
except ClientError as e:
68+
print(e)
69+
return False
70+
return True
71+
72+
def get_s3_client(conf: Config) -> Client:
73+
return boto3.client(
74+
"s3",
75+
aws_access_key_id=conf.get("aws_s3_credentials", "access_key_id"),
76+
aws_secret_access_key=conf.get("aws_s3_credentials", "secret_key"),
77+
)
78+
79+
def run():
80+
conf = Config()
81+
backup_filename = create_backup(conf)
82+
upload_success = upload_file(
83+
file_name=backup_filename,
84+
bucket_name=conf.get("aws_s3_bucket", "name"),
85+
s3_client=get_s3_client(conf),
86+
)
87+
if upload_success:
88+
subprocess.run(f"rm {os.path.join(current_path(), backup_filename)}".split())
89+
90+
if __name__ == "__main__":
91+
run()
92+
print("Done!")
93+
+8
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
boto3==1.15.1
2+
boto3-type-annotations==0.3.1
3+
botocore==1.18.1
4+
jmespath==0.10.0
5+
python-dateutil==2.8.1
6+
s3transfer==0.3.3
7+
six==1.15.0
8+
urllib3==1.25.10

Docker Volume S3 Backup/secrets.conf

+15
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
[general]
2+
3+
[aws_s3_credentials]
4+
access_key_id=
5+
secret_key=
6+
username=
7+
8+
[aws_s3_bucket]
9+
name=
10+
11+
[backup_data]
12+
filename=
13+
volume_name=
14+
volume_directory=
15+

0 commit comments

Comments
 (0)