Skip to content

Commit

Permalink
more configuration
Browse files Browse the repository at this point in the history
  • Loading branch information
s4ke committed Oct 13, 2024
1 parent 00eb388 commit d2baa29
Show file tree
Hide file tree
Showing 5 changed files with 46 additions and 45 deletions.
5 changes: 5 additions & 0 deletions devenv/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ services:
dockerfile: Dockerfile
environment:
SWARM_NODE_ID: 'umfh3tmcs0v9qyo7bkwsvlhxt'
SCRAPE_INTERVAL: "10"
env_file:
- .env
command: ["python", "-u", "exporter.py"]
Expand All @@ -33,6 +34,8 @@ services:
env_file:
- .env
command: ["python", "-u", "nodes.py"]
environment:
- SCRAPE_INTERVAL=60
volumes:
- /var/run/docker.sock:/var/run/docker.sock

Expand All @@ -42,4 +45,6 @@ services:
dockerfile: Dockerfile
env_file:
- .env
environment:
- SCRAPE_INTERVAL=10
command: ["python", "-u", "merger.py"]
18 changes: 9 additions & 9 deletions dns.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def print_debug(msg):
def print_timed(msg):
to_print = '{} [{}]: {}'.format(
datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
'docker_events',
'dns',
msg)
print(to_print)

Expand Down Expand Up @@ -114,22 +114,22 @@ def get_networks_from_container_id(container_id):

def load_network_data_from_dns_s3(bucket, object_name):
"""
Load network data from a JSON file stored in Hetzner S3.
Load network data from a JSON file stored in S3.
:param bucket: The S3 bucket name
:param object_name: The name of the object (file) in the S3 bucket
:return: The loaded network data as a dictionary, or an empty dictionary if the file is not found
"""
# Hetzner S3 configuration
# S3 configuration
dns_s3_endpoint = os.environ['DNS_S3_ENDPOINT']
hetzner_access_key = os.environ['HETZNER_ACCESS_KEY']
hetzner_secret_key = os.environ['HETZNER_SECRET_KEY']
dns_s3_access_key = os.environ['DNS_S3_ACCESS_KEY']
dns_s3_secret_key = os.environ['DNS_S3_SECRET_KEY']

# Create the S3 client with custom endpoint for Hetzner
# Create the S3 client with custom endpoint
s3_client = boto3.client('s3',
endpoint_url=dns_s3_endpoint,
aws_access_key_id=hetzner_access_key,
aws_secret_access_key=hetzner_secret_key)
aws_access_key_id=dns_s3_access_key,
aws_secret_access_key=dns_s3_secret_key)

try:
# Fetch the object from S3
Expand All @@ -146,7 +146,7 @@ def load_network_data_from_dns_s3(bucket, object_name):
print_timed(f"File {object_name} not found in bucket {bucket}. Returning empty dictionary.")
return {}
except Exception as e:
print_timed(f"Error fetching {object_name} from Hetzner S3: {e}")
print_timed(f"Error fetching {object_name} from S3: {e}")
return {}

def load_network_data():
Expand Down
32 changes: 14 additions & 18 deletions exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def handle_shutdown(signal: Any, frame: Any) -> None:
def print_timed(msg):
to_print = '{} [{}]: {}'.format(
datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
'docker_events',
'exporter',
msg)
print(to_print)

Expand Down Expand Up @@ -173,32 +173,30 @@ def save_network_data_to_json(network_data, filename):
print_timed(f"Network data saved to {filename}")


def upload_to_dns_s3(file_name, bucket, object_name=None):
"""Upload a file to Hetzner S3 bucket.
def upload_to_dns_s3(data, bucket, object_name=None):
"""Upload a file to S3 bucket.
:param file_name: File to upload
:param data: File to upload
:param bucket: Bucket to upload to
:param object_name: S3 object name. If not specified then file_name is used
:return: True if file was uploaded, else False
"""
# Hetzner S3 configuration
# S3 configuration
dns_s3_endpoint = os.environ['DNS_S3_ENDPOINT']
hetzner_access_key = os.environ['HETZNER_ACCESS_KEY']
hetzner_secret_key = os.environ['HETZNER_SECRET_KEY']
dns_s3_access_key = os.environ['DNS_S3_ACCESS_KEY']
dns_s3_secret_key = os.environ['DNS_S3_SECRET_KEY']

# Create the S3 client with custom endpoint for Hetzner
# Create the S3 client with custom endpoint
s3_client = boto3.client('s3',
endpoint_url=dns_s3_endpoint,
aws_access_key_id=hetzner_access_key,
aws_secret_access_key=hetzner_secret_key)
aws_access_key_id=dns_s3_access_key,
aws_secret_access_key=dns_s3_secret_key)

try:
if object_name is None:
object_name = file_name
s3_client.upload_file(file_name, bucket, object_name)
print_timed(f"File {file_name} uploaded to {bucket}/{object_name} on Hetzner S3")
s3_client.put_object(Bucket=bucket, Key=object_name, Body=json.dumps(data, indent=4))
print_timed(f"File {object_name} uploaded to {bucket}/{object_name} on S3")
except Exception as e:
print_timed(f"Error uploading {file_name} to Hetzner S3: {e}")
print_timed(f"Error uploading {object_name} to S3: {e}")
return False
return True

Expand All @@ -215,9 +213,7 @@ def upload_to_dns_s3(file_name, bucket, object_name=None):
network_data = fetch_containers_and_aliases(network_data)

filename = os.environ['SWARM_NODE_ID']
save_network_data_to_json(network_data, filename)

bucket_name = os.environ['DNS_S3_BUCKET_NAME']
upload_to_dns_s3(filename, bucket_name, object_name=f"node-data/{filename}.json")
upload_to_dns_s3(network_data, bucket_name, object_name=f"node-data/{filename}.json")

exit_event.wait(SCRAPE_INTERVAL)
16 changes: 8 additions & 8 deletions merger.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def print_timed(msg):
"""Print a message with a timestamp for better debugging."""
to_print = '{} [{}]: {}'.format(
datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
'docker_events',
'merger',
msg)
print(to_print)

Expand All @@ -32,8 +32,8 @@ def load_json_from_s3(bucket, key):
print_timed(f"Attempting to load JSON from S3: {key}")
s3_client = boto3.client('s3',
endpoint_url=os.getenv('DNS_S3_ENDPOINT'),
aws_access_key_id=os.getenv('HETZNER_ACCESS_KEY'),
aws_secret_access_key=os.getenv('HETZNER_SECRET_KEY'))
aws_access_key_id=os.getenv('DNS_S3_ACCESS_KEY'),
aws_secret_access_key=os.getenv('DNS_S3_SECRET_KEY'))
try:
response = s3_client.get_object(Bucket=bucket, Key=key)
content = response['Body'].read().decode('utf-8')
Expand All @@ -48,8 +48,8 @@ def save_json_to_s3(data, bucket, key):
print_timed(f"Attempting to upload JSON to S3: {key}")
s3_client = boto3.client('s3',
endpoint_url=os.getenv('DNS_S3_ENDPOINT'),
aws_access_key_id=os.getenv('HETZNER_ACCESS_KEY'),
aws_secret_access_key=os.getenv('HETZNER_SECRET_KEY'))
aws_access_key_id=os.getenv('DNS_S3_ACCESS_KEY'),
aws_secret_access_key=os.getenv('DNS_S3_SECRET_KEY'))
try:
s3_client.put_object(Bucket=bucket, Key=key, Body=json.dumps(data, indent=4))
print_timed(f"Successfully uploaded {key} to S3")
Expand Down Expand Up @@ -96,10 +96,10 @@ def clean_up_old_files(bucket, valid_files):
print_timed(f"Starting cleanup of old files in node-data folder.")
s3_client = boto3.client('s3',
endpoint_url=os.getenv('DNS_S3_ENDPOINT'),
aws_access_key_id=os.getenv('HETZNER_ACCESS_KEY'),
aws_secret_access_key=os.getenv('HETZNER_SECRET_KEY'))
aws_access_key_id=os.getenv('DNS_S3_ACCESS_KEY'),
aws_secret_access_key=os.getenv('DNS_S3_SECRET_KEY'))
try:
response = s3_client.list_objects_v2(Bucket=bucket, Prefix="node-data/")
response = s3_client.list_objects(Bucket=bucket, Prefix="node-data/")
if "Contents" in response:
for item in response["Contents"]:
file_key = item["Key"]
Expand Down
20 changes: 10 additions & 10 deletions nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def print_debug(msg):
def print_timed(msg):
to_print = '{} [{}]: {}'.format(
datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
'docker_events',
'nodes',
msg)
print(to_print)

Expand Down Expand Up @@ -84,31 +84,31 @@ def save_json(data, filename):


def upload_to_dns_s3(file_name, bucket, object_name=None):
"""Upload a file to Hetzner S3 bucket.
"""Upload a file to S3 bucket.
:param file_name: File to upload
:param bucket: Bucket to upload to
:param object_name: S3 object name. If not specified then file_name is used
:return: True if file was uploaded, else False
"""
# Hetzner S3 configuration
# S3 configuration
dns_s3_endpoint = os.environ['DNS_S3_ENDPOINT']
hetzner_access_key = os.environ['HETZNER_ACCESS_KEY']
hetzner_secret_key = os.environ['HETZNER_SECRET_KEY']
dns_s3_access_key = os.environ['DNS_S3_ACCESS_KEY']
dns_s3_secret_key = os.environ['DNS_S3_SECRET_KEY']

# Create the S3 client with custom endpoint for Hetzner
# Create the S3 client with custom endpoint
s3_client = boto3.client('s3',
endpoint_url=dns_s3_endpoint,
aws_access_key_id=hetzner_access_key,
aws_secret_access_key=hetzner_secret_key)
aws_access_key_id=dns_s3_access_key,
aws_secret_access_key=dns_s3_secret_key)

try:
if object_name is None:
object_name = file_name
s3_client.upload_file(file_name, bucket, object_name)
print_timed(f"File {file_name} uploaded to {bucket}/{object_name} on Hetzner S3")
print_timed(f"File {file_name} uploaded to {bucket}/{object_name} on S3")
except Exception as e:
print_timed(f"Error uploading {file_name} to Hetzner S3: {e}")
print_timed(f"Error uploading {file_name} to S3: {e}")
return False
return True

Expand Down

0 comments on commit d2baa29

Please sign in to comment.