Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Empty file added s3-tooling/README.md
Empty file.
104 changes: 104 additions & 0 deletions s3-tooling/s3_analysis.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
import boto3
import json
import argparse


def analyze_bucket(bucket_name, size_unit='bytes', calculate_cost=False):
s3_client = boto3.client('s3')
s3_resource = boto3.resource('s3')

try:
print(f"Analyzing bucket: {bucket_name}")
bucket_detail = {"Bucket Name": bucket_name}

# Get bucket size and subfolders
try:
bucket_size = 0
subfolders = set()
bucket_obj = s3_resource.Bucket(bucket_name)
for obj in bucket_obj.objects.all():
bucket_size += obj.size
subfolders.add('/'.join(obj.key.split('/')[:-1]) if '/' in obj.key else '')

# Convert size to the desired unit
if size_unit == 'mb':
bucket_size_display = bucket_size / (1024 ** 2) # Convert bytes to MB
bucket_detail['Size (MB)'] = round(bucket_size_display, 2)
elif size_unit == 'gb':
bucket_size_display = bucket_size / (1024 ** 3) # Convert bytes to GB
bucket_detail['Size (GB)'] = round(bucket_size_display, 2)
else: # Default is bytes
bucket_detail['Size (Bytes)'] = bucket_size

bucket_detail['Subfolders'] = list(subfolders)

# Calculate cost if flag is enabled
if calculate_cost:
bucket_size_gb = bucket_size / (1024 ** 3) # Convert bytes to GB
estimated_cost = bucket_size_gb * 0.023 # Standard S3 cost per GB
bucket_detail['Estimated Cost (USD)'] = round(estimated_cost, 2)
except Exception as e:
bucket_detail['Error (Size/Cost)'] = str(e)

# Get bucket versioning status
try:
versioning = s3_client.get_bucket_versioning(Bucket=bucket_name)
bucket_detail['Versioning'] = versioning.get('Status', 'Disabled')
except Exception as e:
bucket_detail['Error (Versioning)'] = str(e)

# Get bucket policies
try:
policy = s3_client.get_bucket_policy(Bucket=bucket_name)
bucket_detail['Policy'] = json.loads(policy['Policy'])
except Exception as e:
bucket_detail['Policy'] = "No Policy Attached"

# Get static website hosting status
try:
website = s3_client.get_bucket_website(Bucket=bucket_name)
bucket_detail['Static Hosting'] = "Enabled"
bucket_detail['Static Hosting Config'] = website
except Exception as e:
bucket_detail['Static Hosting'] = "Disabled"

# Get bucket encryption
try:
encryption = s3_client.get_bucket_encryption(Bucket=bucket_name)
bucket_detail['Encryption'] = encryption['ServerSideEncryptionConfiguration']
except Exception as e:
bucket_detail['Encryption'] = "Not Configured"

# Get bucket tags
try:
tags = s3_client.get_bucket_tagging(Bucket=bucket_name)
bucket_detail['Tags'] = tags['TagSet']
except Exception as e:
bucket_detail['Tags'] = "No Tags"

# Print formatted bucket details
print("\nBucket Analysis Report:")
for key, value in bucket_detail.items():
if isinstance(value, dict) or isinstance(value, list):
print(f"{key}:")
print(json.dumps(value, indent=4))
else:
print(f"{key}: {value}")

except Exception as e:
print(f"Error analyzing bucket: {str(e)}")


def main():
parser = argparse.ArgumentParser(description="Analyze an S3 bucket and optionally calculate cost.")
parser.add_argument("bucket_name", help="The name of the S3 bucket to analyze.")
parser.add_argument("--size", choices=['bytes', 'mb', 'gb'], default='bytes',
help="Specify the size unit for bucket size (bytes, mb, gb). Default is bytes.")
parser.add_argument("--cost", action="store_true", help="Calculate the estimated cost of the bucket.")
args = parser.parse_args()

analyze_bucket(args.bucket_name, size_unit=args.size, calculate_cost=args.cost)


if __name__ == "__main__":
main()
14 changes: 14 additions & 0 deletions s3-tooling/s3_list_all.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import boto3

def list_s3_buckets():
s3_client = boto3.client('s3')
try:
buckets = s3_client.list_buckets()['Buckets']
print("S3 Buckets:")
for bucket in buckets:
print(f" - {bucket['Name']}")
except Exception as e:
print(f"Error: {e}")

if __name__ == "__main__":
list_s3_buckets()
47 changes: 47 additions & 0 deletions s3-tooling/s3_list_basic_details.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import boto3
import argparse

def calculate_size_and_cost(bucket_name, size_unit):
s3 = boto3.client('s3')
paginator = s3.get_paginator('list_objects_v2')

total_size = 0
for page in paginator.paginate(Bucket=bucket_name):
if 'Contents' in page:
total_size += sum(obj['Size'] for obj in page['Contents'])

# Convert size based on flag
if size_unit == 'mb':
size = total_size / (1024 * 1024)
elif size_unit == 'gb':
size = total_size / (1024 * 1024 * 1024)
else:
size = total_size

# Calculate cost (example: $0.023 per GB for standard storage)
cost_per_gb = 0.023
cost = (total_size / (1024 * 1024 * 1024)) * cost_per_gb

return size, cost

def list_buckets(size_unit):
s3 = boto3.client('s3')
buckets = s3.list_buckets()

for bucket in buckets['Buckets']:
bucket_name = bucket['Name']
try:
size, cost = calculate_size_and_cost(bucket_name, size_unit)
if size_unit == 'bytes':
print(f"Bucket: {bucket_name}, Size: {size:.0f} Bytes, Cost: ${cost:.2f}")
else:
print(f"Bucket: {bucket_name}, Size: {size:.2f} {size_unit.upper()}, Cost: ${cost:.2f}")
except Exception as e:
print(f"Could not calculate size or cost for bucket {bucket_name}: {e}")

if __name__ == '__main__':
parser = argparse.ArgumentParser(description="List S3 buckets with their size and cost.")
parser.add_argument('--size', choices=['bytes', 'mb', 'gb'], default='bytes', help="Unit for displaying sizes (bytes, mb, or gb). Default is bytes.")
args = parser.parse_args()

list_buckets(args.size)