Hey everyone! I mentioned in an earlier post that I’m using Object Storage as the perfect excuse to learn a bit of Python (specifically Python3).
Over the weekend, I built out a little script that tests performance around bucket creation, write/read operations with a 1mb object, and bucket deletion from your local terminal. It’s missing error handling and repeat runs (contributions gratefully accepted), but it’s a quick way to measure performance in a single snapshot data point.
Give it a try, and share your results and where you ran the test from!
Also, if you have a better option for testing (especially with better sampling) share it here!
import boto3
import time
import string
import random
from botocore.config import Config
#########
# Setup #
#########
# # Configuration -- Keys
AWS_ACCESS_KEY_ID = "S3KEY"
AWS_SECRET_ACCESS_KEY = "S3SECRET"
R2_ACCESS_KEY_ID = "R2KEY"
R2_SECRET_ACCESS_KEY = "R2SECRET"
FASTLY_ACCESS_KEY_ID = "FASTLYKEY"
FASTLY_SECRET_ACCESS_KEY = "FASTLYSECRET"
# Configuration -- Endpoints
S3_ENDPOINT = "https://s3.amazonaws.com" # Amazon S3
R2_ENDPOINT = "<your-r2-endpoint>" # Cloudflare R2
FASTLY_ENDPOINT = "https://us-west.object.fastlystorage.app" # Fastly Object Storage
# Configuration -- Regions
S3_REGION = "us-west-2" # See full list at https://docs.aws.amazon.com/general/latest/gr/s3.html
R2_REGION = "wnam" # Must be one of: wnam, enam, weur, eeur, apac, auto
FASTLY_REGION = "us-west" # Must be one of: us-west, us-east, eu-central
# Configuration -- Test Data
def generate_random_string(length):
"""Generates a random string of the specified length."""
letters = string.ascii_letters + string.digits
return ''.join(random.choice(letters) for i in range(length))
BUCKET_NAME = "benchmark-bucket"
TEST_OBJECT_KEY = "test-object"
# generating a 1mb string
TEST_OBJECT_DATA = generate_random_string(1048576)
########
# Test #
########
# Initialize clients
s3_client = boto3.client(
"s3",
aws_access_key_id = AWS_ACCESS_KEY_ID,
aws_secret_access_key = AWS_SECRET_ACCESS_KEY,
region_name = S3_REGION,
)
r2_client = boto3.client(
"s3",
aws_access_key_id = R2_ACCESS_KEY_ID,
aws_secret_access_key = R2_SECRET_ACCESS_KEY,
endpoint_url = R2_ENDPOINT,
region_name = R2_REGION,
)
fsly_client = boto3.client(
"s3",
aws_access_key_id = FASTLY_ACCESS_KEY_ID,
aws_secret_access_key = FASTLY_SECRET_ACCESS_KEY,
endpoint_url = FASTLY_ENDPOINT,
region_name = FASTLY_REGION,
)
##########################
# Define the measurement #
##########################
#
# Takes the boto3 function and all params for the function separated by commas
# Captures a start time snapshot, executes the function, takes an end time, and measures the delta
# Returns the time delta and any output args from the source vendor
#
def measure_time(func, *args, **kwargs):
start_time = time.time()
result = func(*args, **kwargs)
elapsed_time = time.time() - start_time
return elapsed_time, result
# Test bucket creation
print("Testing bucket creation...")
s3_create_time, _ = measure_time(
s3_client.create_bucket, Bucket=BUCKET_NAME, CreateBucketConfiguration={'LocationConstraint': S3_REGION}
)
r2_create_time, _ = measure_time(
r2_client.create_bucket, Bucket=BUCKET_NAME, CreateBucketConfiguration={'LocationConstraint': R2_REGION}
)
fsly_create_time, _ = measure_time(
fsly_client.create_bucket, Bucket=BUCKET_NAME, CreateBucketConfiguration={'LocationConstraint': FASTLY_REGION}
)
print(f"S3 bucket creation time: {s3_create_time:.2f} seconds")
print(f"R2 bucket creation time: {r2_create_time:.2f} seconds")
print(f"Fastly bucket creation time: {fsly_create_time:.2f} seconds")
# Test object upload
print("Testing object upload...")
s3_upload_time, _ = measure_time(
s3_client.put_object, Bucket=BUCKET_NAME, Key=TEST_OBJECT_KEY, Body=TEST_OBJECT_DATA,
)
r2_upload_time, _ = measure_time(
r2_client.put_object, Bucket=BUCKET_NAME, Key=TEST_OBJECT_KEY, Body=TEST_OBJECT_DATA
)
fsly_upload_time, _ = measure_time(
fsly_client.put_object, Bucket=BUCKET_NAME, Key=TEST_OBJECT_KEY, Body=TEST_OBJECT_DATA
)
print(f"S3 object upload time: {s3_upload_time:.2f} seconds")
print(f"R2 object upload time: {r2_upload_time:.2f} seconds")
print(f"Fastly object upload time: {fsly_upload_time:.2f} seconds")
# Test object download
print("Testing object download...")
s3_download_time, _ = measure_time(
s3_client.get_object, Bucket=BUCKET_NAME, Key=TEST_OBJECT_KEY
)
r2_download_time, _ = measure_time(
r2_client.get_object, Bucket=BUCKET_NAME, Key=TEST_OBJECT_KEY
)
fsly_download_time, _ = measure_time(
fsly_client.get_object, Bucket=BUCKET_NAME, Key=TEST_OBJECT_KEY
)
print(f"S3 object download time: {s3_download_time:.2f} seconds")
print(f"R2 object download time: {r2_download_time:.2f} seconds")
print(f"Fastly object download time: {fsly_download_time:.2f} seconds")
# Deleting objects
print("Cleaning up...")
print("Testing object deletion")
s3_obj_deletion_time, _ = measure_time(
s3_client.delete_object, Bucket=BUCKET_NAME, Key=TEST_OBJECT_KEY
)
r2_obj_deletion_time, _ = measure_time(
r2_client.delete_object, Bucket=BUCKET_NAME, Key=TEST_OBJECT_KEY
)
fsly_obj_deletion_time, _ = measure_time(
fsly_client.delete_object, Bucket=BUCKET_NAME, Key=TEST_OBJECT_KEY
)
print(f"S3 object deletion time: {s3_obj_deletion_time:.2f} seconds")
print(f"R2 object deletion time: {r2_obj_deletion_time:.2f} seconds")
print(f"Fastly object deletion time: {fsly_obj_deletion_time:.2f} seconds")
# Deleting buckets
print("Testing bucket deletion")
s3_bu_deletion_time, _ = measure_time(
s3_client.delete_bucket, Bucket=BUCKET_NAME
)
r2_bu_deletion_time, _ = measure_time(
r2_client.delete_bucket, Bucket=BUCKET_NAME
)
fsly_bu_deletion_time, _ = measure_time(
fsly_client.delete_bucket, Bucket=BUCKET_NAME
)
print(f"S3 bucket deletion time: {s3_bu_deletion_time:.2f} seconds")
print(f"R2 bucket deletion time: {r2_bu_deletion_time:.2f} seconds")
print(f"Fastly bucket deletion time: {fsly_bu_deletion_time:.2f} seconds")
print("Test complete.")