Getting started with Object Storage using AWS CLI and a Node.js app

I’m kicking the tires on Fastly Object Storage. Thought I’d share this post for anyone who is getting started like me. Hope you find some of this interesting. Have fun!

Prerequisites

This post assumes that you have

  • Purchased Fastly Object storage
    • Your account is entitled to the product
    • Your account has the product enabled on it
  • A Fastly API Token set locally as $FASTLY_API_TOKEN
  • AWS CLI installed

Set the Fastly API Token in the shell environment

export FASTLY_API_TOKEN=xxxxx

Configure AWS CLI and create a new Fastly profile nano ~/.aws/config

[default]
region = us-west

[profile fastly]
region = us-west
endpoint_url = https://us-west.object.fastlystorage.app

Let’s Go

I’m using a two ways, the AWS CLI and a Node.js app to interact with Fastly Object Storage

# Create an access key using Fastly API token.
# It will create AWS_ACCESS_KEY and AWS_SECRET_KEY set of keys
curl \
-H "Fastly-Key: $FASTLY_API_TOKEN" \
-H "Content-Type: application/json" \
-X POST https://api.fastly.com/resources/object-storage/access-keys \
-d '{
  "description": "Demo access key",
  "permission": "read-write-admin",
  "buckets": ["my-bucket"]
}'

# Helpers
## List keys
curl \
-H "Fastly-Key: $FASTLY_API_TOKEN" \
-X GET https://api.fastly.com/resources/object-storage/access-keys
## Delete a key
curl -i -X DELETE "https://api.fastly.com/resources/object-storage/access-keys/[YOUR-ACCESS-KEY]" \
-H "Fastly-Key: $FASTLY_API_TOKEN"

## AWS CLI ##
# Configure the AWS CLI
aws configure --profile fastly

# Create a bucket
aws s3 mb s3://my-bucket --profile fastly

# List buckets
aws s3 ls --profile fastly

## Node.js app ##
# Set the environment variables in the `.env` file

# Start local server
node --watch --watch-preserve-output app.js

# Get a list of buckets from the local server
curl -X GET http://localhost:3000/buckets

# Upload a file to a bucket through the local server
curl -X POST -F "file=@./sample.txt" http://localhost:3000/upload

# Upload a folder to a bucket using AWS CLI
aws s3 cp /Users/jrandhawa/Documents/Demo/object-storage-demo/sample-folder s3://my-bucket/ --recursive --profile fastly


# List contents of a bucket using the AWS CLI
aws s3 ls my-bucket --profile fastly

# Copy a file from the bucket using the AWS CLI that we uploaded from the local server.
aws s3 cp s3://my-bucket/sample.txt ./sample2.txt --profile fastly

# You get the idea. This concludes the demo!

Node.js app

This is a simple Node.js app that does not use the AWS SDK (to make it challenging, I guess), and relies on Axios library to handle request and response.

const express = require("express");
const axios = require("axios");
const crypto = require("crypto");
const moment = require("moment");
const multer = require("multer");

require("dotenv").config();

const app = express();
const port = 3000;

// S3 credentials and config
const AWS_ACCESS_KEY = process.env.AWS_ACCESS_KEY;
const AWS_SECRET_KEY = process.env.AWS_SECRET_KEY;
const BUCKET_NAME = process.env.BUCKET_NAME;
const REGION = "us-west";
const AWS_S3_HOST = "object.fastlystorage.app";

// Set up multer for file uploads
const storage = multer.memoryStorage(); // Use memory storage for easy file manipulation
const upload = multer({ storage: storage });

// List Buckets Route (GET Request)
app.get("/buckets", (req, res) => {
  const date = moment.utc().format("YYYYMMDDTHHmmss") + "Z"; // ISO 8601 Date format (e.g., 20231121T120000Z)
  const dateStamp = moment.utc().format("YYYYMMDD"); // Date in YYYYMMDD format (e.g., 20231121)

  const method = "GET";
  const s3Path = "/"; // For listing all buckets
  const body = ""; // No body for listing buckets
  const headers = {
    "x-amz-date": date,
    "x-amz-content-sha256": crypto.createHash("sha256").update("").digest("hex"), // Empty string body hash
  };

  // Create the signature
  const signature = createSignature(method, s3Path, headers, body, REGION, "s3", dateStamp);
  headers["Authorization"] = signature;

  // Prepare the GET request to list buckets
  const url = `https://${REGION}.${AWS_S3_HOST}${s3Path}`;

  // Send the GET request to S3 to list the buckets
  axios
    .get(url, { headers })
    .then((response) => {
      res.status(200).json({ message: "Buckets listed successfully", data: response.data });
    })
    .catch((error) => {
      console.error("Error listing buckets:", error);
      res.status(500).json({ message: "Error listing buckets", error: error.message });
    });
});

// List Objects Route (GET Request)
app.get("/list-objects", (req, res) => {
  const date = moment.utc().format("YYYYMMDDTHHmmss") + "Z"; // ISO 8601 Date format (e.g., 20231121T120000Z)
  const dateStamp = moment.utc().format("YYYYMMDD"); // Date in YYYYMMDD format (e.g., 20231121)

  const method = "GET";
  const s3Path = `/${BUCKET_NAME}`; // List objects in the specified bucket
  const body = ""; // No body for listing objects
  const headers = {
    "x-amz-date": date,
    "x-amz-content-sha256": crypto.createHash("sha256").update("").digest("hex"), // Empty string body hash
  };

  // Create the signature
  const signature = createSignature(method, s3Path, headers, body, REGION, "s3", dateStamp);
  headers["Authorization"] = signature;

  // Prepare the GET request to list objects
  const url = `https://${REGION}.${AWS_S3_HOST}${s3Path}`;
  // Send the GET request to S3 to list objects
  axios
    .get(url, { headers })
    .then((response) => {
      res.status(200).json({
        message: "Objects listed successfully",
        data: response.data, // Response will contain the list of objects
      });
    })
    .catch((error) => {
      console.error("Error listing objects:", error);
      res.status(500).json({ message: "Error listing objects", error: error.message });
    });
});


// Upload File Route (POST Request)
app.post("/upload", upload.single("file"), (req, res) => {
  console.log(req.file);
  if (!req.file) {
    return res.status(400).json({ message: "No file uploaded" });
  }

  const file = req.file;
  const fileName = file.originalname;
  const fileBuffer = file.buffer;

  const date = moment.utc().format("YYYYMMDDTHHmmss") + "Z";
  const dateStamp = moment.utc().format("YYYYMMDD");

  const method = "PUT";
  const s3Path = `/${BUCKET_NAME}/${fileName}`;
  const body = fileBuffer; // The file content will be the body
  const headers = {
    "x-amz-date": date,
    "x-amz-content-sha256": crypto.createHash("sha256").update(fileBuffer).digest("hex"),
    "Content-Type": file.mimetype,
    "Content-Length": fileBuffer.length,
  };

  // Create the signature for the PUT request
  const signature = createSignature(method, s3Path, headers, body, REGION, "s3", dateStamp);
  headers["Authorization"] = signature;

  // Prepare the PUT request to upload the file to S3
  const url = `https://${REGION}.${AWS_S3_HOST}${s3Path}`;

  // Send the PUT request to upload the file to S3
  axios
    .put(url, fileBuffer, { headers })
    .then((response) => {
      res.status(200).json({ message: "File uploaded successfully", data: response.data });
    })
    .catch((error) => {
      console.error("Error uploading file:", error);
      res.status(500).json({ message: "Error uploading file", error: error.message });
    });
});

app.listen(port, () => {
  console.log(`Server is running on http://localhost:${port}`);
});

// Helper function to create AWS Signature V4
function createSignature(method, path, headers, body, region, service, date) {
  const signedHeaders = "host;x-amz-content-sha256;x-amz-date";
  const canonicalHeaders = `host:${REGION}.${AWS_S3_HOST}\nx-amz-content-sha256:${headers["x-amz-content-sha256"]}\nx-amz-date:${headers["x-amz-date"]}\n`;

  const canonicalRequest = `${method}\n${path}\n\n${canonicalHeaders}\n${signedHeaders}\n${crypto.createHash("sha256").update(body).digest("hex")}`;
  const algorithm = "AWS4-HMAC-SHA256";

  const dateKey = crypto.createHmac("sha256", `AWS4${AWS_SECRET_KEY}`).update(date.substring(0, 8)).digest();
  const regionKey = crypto.createHmac("sha256", dateKey).update(region).digest();
  const serviceKey = crypto.createHmac("sha256", regionKey).update(service).digest();
  const signingKey = crypto.createHmac("sha256", serviceKey).update("aws4_request").digest();

  const stringToSign = `${algorithm}\n${headers["x-amz-date"]}\n${date.substring(0, 8)}/${region}/${service}/aws4_request\n${crypto
    .createHash("sha256")
    .update(canonicalRequest)
    .digest("hex")}`;
  const signature = crypto.createHmac("sha256", signingKey).update(stringToSign).digest("hex");

  return `${algorithm} Credential=${AWS_ACCESS_KEY}/${date.substring(
    0,
    8
  )}/${region}/${service}/aws4_request, SignedHeaders=${signedHeaders}, Signature=${signature}`;
}

.env file contents look like this:

AWS_ACCESS_KEY=******
AWS_SECRET_KEY=******
BUCKET_NAME=******

Clean up

# Empty the bucket
aws s3 rm s3://my-bucket --recursive --profile fastly

# Delete the bucket
aws s3 rb s3://my-bucket --force --profile fastly

# Delete the access key
curl -i -X DELETE "https://api.fastly.com/resources/object-storage/access-keys/[YOUR-ACCESS-KEY]" \
-H "Fastly-Key: $FASTLY_API_TOKEN"
2 Likes