SLS Offline with dynamoDB, SQS and S3
Serverless Framework with offline services:
- DynamoDB
- SQS
- S3
Bundle
"devDependencies": {
"@aws-sdk/client-dynamodb": "^3.186.0",
"serverless": "^3.23.0",
"serverless-dynamodb-local": "^0.2.40",
"serverless-offline": "^8.8.1",
"serverless-offline-sqs": "^6.0.0",
"serverless-s3-local": "^0.6.22"
}
I didn’t look in depth, but if you install
serverless-offline
> 8; you will get a bunch of issues…
DynamoDB
This plugin seems to be unmaintained… I had to search for few hours to find good solutions
I do not like this approach at all, it isn’t dynamic… But it works.
setup-dynamodb.js
const fs = require("fs");
const {
DynamoDBClient,
ListTablesCommand,
CreateTableCommand,
} = require("@aws-sdk/client-dynamodb");
const yaml = require("js-yaml");
const cloudformationSchema = require("@serverless/utils/cloudformation-schema");
const SERVERLESS_CONFIG = __dirname + "/serverless.yml";
const mapping = {
"${self:custom.dynamodb.svc.name}": "service-name-dev-svc-table",
"${self:custom.dynamodb.svc1.name}": "service-name-dev-svc1-table",
};
const ddb = new DynamoDBClient({
credentials: {
accessKeyId: "fake-key",
secretAccessKey: "fake-secret",
},
endpoint: "http://localhost:9999",
region: "local",
});
async function getDynamoDBTableResources() {
const tables = Object.entries(
yaml.loadAll(fs.readFileSync(SERVERLESS_CONFIG), {
schema: cloudformationSchema,
})[0].resources.Resources
).filter(([, resource]) => resource.Type === "AWS::DynamoDB::Table");
return tables;
}
(async function main() {
console.info("Setting up local DynamoDB tables");
const tables = await getDynamoDBTableResources();
const existingTables = (await ddb.send(new ListTablesCommand({}))).TableNames;
for await ([logicalId, definition] of tables) {
const {
Properties: {
BillingMode,
TableName,
AttributeDefinitions,
KeySchema,
GlobalSecondaryIndexes,
LocalSecondaryIndexes,
ProvisionedThroughput,
},
} = definition;
if (existingTables.find((table) => table === TableName)) {
console.info(
`${logicalId}: DynamoDB Local - Table already exists: ${TableName.replace(
TableName,
mapping[TableName]
)}. Skipping..`
);
continue;
}
console.info(
`${logicalId}: DynamoDB Local - Create Table: ${TableName.replace(
TableName,
mapping[TableName]
)}.`
);
const result = await ddb.send(
new CreateTableCommand({
AttributeDefinitions,
BillingMode,
KeySchema,
LocalSecondaryIndexes,
GlobalSecondaryIndexes,
ProvisionedThroughput,
TableName: TableName.replace(TableName, mapping[TableName]),
})
);
console.info(
`${logicalId}: DynamoDB Local - Created table: ${TableName.replace(
TableName,
mapping[TableName]
)}`
);
}
})();
Script Source: https://medium.com/geekculture/using-dynamodb-locally-in-a-serverless-framework-project-92c996fcffdf
How I use this approach :
const { DynamoDBClient, PutItemCommand } = require('@aws-sdk/client-dynamodb');
const AWSXRay = require('aws-xray-sdk');
const client = AWSXRay.captureAWSv3Client(
new DynamoDBClient({
region: process.env.AWS_REGION,
endpoint:
process.env.IS_OFFLINE !== '' && process.env.IS_OFFLINE !== 'false'
? 'http://127.0.0.1:9999'
: null,
credentials: {
accessKeyId:
process.env.IS_OFFLINE !== '' && process.env.IS_OFFLINE !== 'false'
? 'LOCAL'
: null, // This specific key is required when working offline
secretAccessKey:
process.env.IS_OFFLINE !== '' && process.env.IS_OFFLINE !== 'false'
? 'LOCAL'
: null,
},
}),
);
...
My Jest Test to retrieve the data:
const { DynamoDBClient, ScanCommand } = require("@aws-sdk/client-dynamodb");
const AWSXRay = require("aws-xray-sdk");
const client = AWSXRay.captureAWSv3Client(
new DynamoDBClient({
region: process.env.AWS_REGION,
endpoint:
process.env.IS_OFFLINE !== "" && process.env.IS_OFFLINE !== "false"
? "http://127.0.0.1:9999"
: null,
credentials: {
accessKeyId:
process.env.IS_OFFLINE !== "" && process.env.IS_OFFLINE !== "false"
? "LOCAL"
: null, // This specific key is required when working offline
secretAccessKey:
process.env.IS_OFFLINE !== "" && process.env.IS_OFFLINE !== "false"
? "LOCAL"
: null,
},
})
);
describe("dynamodb getter", () => {
test("svc table", async () => {
const data = await client.send(
new ScanCommand({
TableName: "service-name-dev-svc-table",
})
);
console.log(data);
});
test("svc1 table", async () => {
const data = await client.send(
new ScanCommand({
TableName: "service-name-dev-svc1-table",
})
);
console.log(data);
});
});
S3
Nothing special has been done for this one.
To access the uploaded data I use curl
and my browser
to download files,
curl "http://localhost:4569/service-name-data-dev/webuxlab.com/accueil_webux_lab_2022_10_12/results.txt?x-id=PutObject"
In the code:
const { S3Client, PutObjectCommand } = require("@aws-sdk/client-s3");
const AWSXRay = require("aws-xray-sdk");
const client = AWSXRay.captureAWSv3Client(
new S3Client({
region: process.env.AWS_REGION,
credentials: {
accessKeyId:
process.env.IS_OFFLINE !== "" && process.env.IS_OFFLINE !== "false"
? "S3RVER"
: null,
secretAccessKey:
process.env.IS_OFFLINE !== "" && process.env.IS_OFFLINE !== "false"
? "S3RVER"
: null,
},
endpoint:
process.env.IS_OFFLINE !== "" && process.env.IS_OFFLINE !== "false"
? "http://127.0.0.1:4569"
: null,
forcePathStyle: true,
})
);
...
SQS
This one was tricky…
In my code I use it like this:
const { SQSClient, SendMessageBatchCommand } = require("@aws-sdk/client-sqs");
const AWSXRay = require("aws-xray-sdk");
const client = AWSXRay.captureAWSv3Client(
new SQSClient({
region: process.env.AWS_REGION,
endpoint:
process.env.QUEUE_ENDPOINT !== ""
? process.env.QUEUE_ENDPOINT ?? null
: null,
})
);
Serverless.yml
I’ll put only the configurations for the offline stuffs.
service: service-name
frameworkVersion: "3"
plugins:
- serverless-offline-sqs
- serverless-s3-local
- serverless-dynamodb-local
- serverless-offline
custom:
sqs:
name: "${self:service}-${sls:stage}-dispatch"
name_dlq: "${self:service}-${sls:stage}-dispatch-dlq"
urls:
true: "http://localhost:9324/000000000000/${self:service}-${sls:stage}-dispatch"
false: !Ref Queue
url: "${self:custom.sqs.urls.${env:IS_OFFLINE, false}}"
s3:
host: localhost
directory: /tmp
data_bucket_name: "${self:service}-data-${sls:stage}"
svc1_bucket_name: "${self:service}-svc1-${sls:stage}"
serverless-offline-sqs:
autoCreate: true
apiVersion: "2012-11-05"
endpoint: http://127.0.0.1:9324
region: "ca-central-1"
accessKeyId: root
secretAccessKey: root
skipCacheInvalidation: false
dynamodb:
svc:
name: "${self:service}-${sls:stage}-svc-table"
svc1:
name: "${self:service}-${sls:stage}-svc1-table"
functions:
svc1:
handler: svc1/src/handlers/index.handler
name: ${self:custom.tenant}-svc1-${sls:stage}
environment:
QUEUE_URL: ${self:custom.sqs.url}
QUEUE_ENDPOINT: ${env:QUEUE_ENDPOINT, ""}
package:
patterns:
- svc1/node_modules/**
- svc1/src/**
- svc1/package.json
events:
- http:
method: POST
path: /collect
private: true
cors: true
svc2:
handler: svc2/src/handlers/index.handler
name: ${self:custom.tenant}-svc2-${sls:stage}
environment:
SITEMAP_QUEUE_URL: ${self:custom.sqs.url}
QUEUE_ENDPOINT: ${env:QUEUE_ENDPOINT, ""}
package:
patterns:
- svc2/node_modules/**
- svc2/src/**
- svc2/package.json
events:
- http:
method: POST
path: /processing
private: true
cors: true
svc3:
handler: svc3/src/handlers/index.handler
name: ${self:custom.tenant}-svc3-${sls:stage}
description: Process URLs
environment:
SVC_TABLE_NAME: "${self:custom.dynamodb.svc.name}"
SVC1_TABLE_NAME: "${self:custom.dynamodb.svc1.name}"
DATA_BUCKET_NAME: "${self:custom.s3.data_bucket_name}"
SVC1_BUCKET_NAME: "${self:custom.s3.svc1_bucket_name}"
package:
patterns:
- svc3/node_modules/**
- svc3/src/**
- svc3/package.json
events:
- sqs:
arn:
Fn::GetAtt:
- Queue
- Arn
batchSize: 1
maximumBatchingWindow: 300
resources:
Resources:
Queue:
Type: AWS::SQS::Queue
Properties:
QueueName: ${self:custom.sqs.name}
VisibilityTimeout: 300
ReceiveMessageWaitTimeSeconds: 20
RedrivePolicy:
deadLetterTargetArn:
Fn::GetAtt:
- "DlqQueue"
- "Arn"
maxReceiveCount: 100
DlqQueue:
Type: AWS::SQS::Queue
Properties:
QueueName: "${self:custom.sqs.name_dlq}"
VisibilityTimeout: 300
ReceiveMessageWaitTimeSeconds: 20
DatabaseSvc:
Type: AWS::DynamoDB::Table
Properties:
TableName: ${self:custom.dynamodb.svc.name}
AttributeDefinitions:
- AttributeName: "foo"
AttributeType: "S"
- AttributeName: "bar"
AttributeType: "S"
KeySchema:
- AttributeName: "foo"
KeyType: "HASH"
- AttributeName: "bar"
KeyType: "RANGE"
ProvisionedThroughput:
ReadCapacityUnits: 1
WriteCapacityUnits: 1
DataS3Bucket:
Type: "AWS::S3::Bucket"
DeletionPolicy: Retain
UpdateReplacePolicy: Retain
Properties:
BucketName: "${self:custom.s3.data_bucket_name}"
VersioningConfiguration:
Status: Suspended
Svc1S3Bucket:
Type: "AWS::S3::Bucket"
DeletionPolicy: Retain
UpdateReplacePolicy: Retain
Properties:
BucketName: "${self:custom.s3.svc1_bucket_name}"
VersioningConfiguration:
Status: Suspended
Svc1Table:
Type: AWS::DynamoDB::Table
Properties:
TableName: ${self:custom.dynamodb.svc1.name}
AttributeDefinitions:
- AttributeName: "foo"
AttributeType: "S"
- AttributeName: "bar"
AttributeType: "S"
KeySchema:
- AttributeName: "foo"
KeyType: "HASH"
- AttributeName: "bar"
KeyType: "RANGE"
ProvisionedThroughput:
ReadCapacityUnits: 10
WriteCapacityUnits: 10
GlobalSecondaryIndexes:
- IndexName: "schedule"
KeySchema:
- AttributeName: "foo"
KeyType: "HASH"
- AttributeName: "bar"
KeyType: "RANGE"
Projection:
NonKeyAttributes:
- ...
ProjectionType: "INCLUDE"
ProvisionedThroughput:
ReadCapacityUnits: 1
WriteCapacityUnits: 1
Script to run everything locally
#!/bin/bash
set -e
trap 'kill $(jobs -p)' EXIT
# docker run --name elasticmq -p 9324:9324 -p 9325:9325 softwaremill/elasticmq-native
java -jar elasticmq-server-1.3.9.jar &
sleep 3
export AWS_ACCESS_KEY_ID=root
export AWS_SECRET_ACCESS_KEY=root
export AWS_SESSION_TOKEN=root
aws sqs create-queue --queue-name service-name-dev-dispatch-dlq --endpoint-url http://127.0.0.1:9324 --region ca-central-1
# aws sqs create-queue --queue-name service-name-dev-dispatch --endpoint-url http://127.0.0.1:9324 --region ca-central-1
unset AWS_ACCESS_KEY_ID
unset AWS_SECRET_ACCESS_KEY
unset AWS_SESSION_TOKEN
if [ -z .dynamodb ]; then
npx sls dynamodb install
fi
# npx sls dynamodb install
java -Djava.library.path=./.dynamodb/DynamoDBLocal_lib -jar ./.dynamodb/DynamoDBLocal.jar -inMemory -sharedDb -port 9999 &
sleep 5
# Review this script to adapt the mapping
node setup-dynamodb.js
export AWS_PROFILE="default"
export AWS_REGION="ca-central-1"
# npx sls dynamodb start --migrate --region=ca-central-1
export AWS_XRAY_CONTEXT_MISSING=LOG_ERROR
export AWS_XRAY_LOG_LEVEL=silent
export IS_OFFLINE=true
export QUEUE_ENDPOINT="http://localhost:9324/"
export IS_LOCAL=true
sleep 5
NODE_ENV=test SLS_DEBUG=* npx sls offline --stage=dev --region=ca-central-1
```
---
## Conclusion
This is how I've been able to test my whole flow locally.