Skip to content

Commit

Permalink
BRS-449: creating warmup functionality for Lambda (#86)
Browse files Browse the repository at this point in the history
  • Loading branch information
danieltruong authored Mar 16, 2022
1 parent 048c502 commit 976c9d9
Show file tree
Hide file tree
Showing 14 changed files with 415 additions and 7 deletions.
6 changes: 5 additions & 1 deletion lambda/responseUtil.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,5 +13,9 @@ exports.sendResponse = function (code, data, context) {
};

exports.checkWarmup = function (event) {
return event?.queryStringParameters?.warmup === true;
if (event?.warmup === true) {
return true;
} else {
return false;
}
}
66 changes: 66 additions & 0 deletions lambda/warmUp/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
const { v4: uuidv4 } = require('uuid');

const mainThreadConfigDefaults = {
configArray: [],
log: true,
delay: 1000
};

const configDefaults = {
funcName: null,
funcVersion: null,
concurrency: 'concurrency', // default concurrency field
log: true, // default logging to true
correlationId: uuidv4(), // default the correlationId
};

const { Worker } = require('worker_threads');
exports.handler = async (event) => {
const mainThreadConfig = Object.assign({}, mainThreadConfigDefaults, event);
if (mainThreadConfig.configArray.length > 0) {
const threads = new Set();
for (let i = 0; i < mainThreadConfig.configArray.length; i++) {
const config = Object.assign({}, configDefaults, mainThreadConfig.configArray[i]);
if (config.funcName && config.funcVersion) {
threads.add(new Worker(__dirname + '/worker.js', { workerData: { config: config } }));
logMessage(mainThreadConfigDefaults.log, `Worker added to thread list with config: ${config}`);
}
}

for (let worker of threads) {
worker.on('error', (err) => {
logMessage(mainThreadConfigDefaults.log, `We have an error: ${err}`);
threads.delete(worker);
});
worker.on('exit', () => {
threads.delete(worker);
logMessage(mainThreadConfigDefaults.log, `Thread exiting, ${threads.size} running...`);
if (threads.size === 0) {
logMessage(mainThreadConfigDefaults.log, 'Warm up complete.');
}
});
worker.on('message', (msg) => {
logMessage(mainThreadConfigDefaults.log, msg);
});
}

return new Promise(async (resolve) => {
while (true) {
await new Promise(r => setTimeout(r, mainThreadConfig.delay));
if (threads.size === 0) {
logMessage(mainThreadConfigDefaults.log, 'All threads complete.');
break;
} else {
logMessage(mainThreadConfigDefaults.log, '.');
}
}
resolve();
});
}
}; // end module

function logMessage(logBoolean, message) {
if (logBoolean === true) {
console.log(message);
}
}
64 changes: 64 additions & 0 deletions lambda/warmUp/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
# Lambda Warm Up Functions
This function utilizes threading generate concurrent calls to a given function.

## Issue
In the case of DUP, we often get high ammounts of traffic at 7AM. This is because 7 AM is when people are allowed to book passes. Since we received this wave of users at the same time, some people reported communication errors with our servers. This is because Lambda takes time to automatically spin up instances to handle load.

## Solution
One part of the solution was to create a way to warm up relevant Lambda functions prior to spikes in traffic. This function allows us to do that. The following functions have been equiped with a warm up break out:

```
readConfig
readPark
readFacility
generateCaptcha
verifyCaptcha
writePass
```

When creating a call to these functions, use the following payload:

```
{
"warmup": true
}
```

This will cause the function to skip all functionality and return a 200 OK. This is all that is required for Lambda to spin up instances of itself, given there are enough concurrent calls to the function.

## Current AWS Solution
The pipeline for this code is as follows:

```
yarn build > Terraform > Lambda
Terraform > Eventbridge > Invoke warmup
```

At the moment, we have EventBridge invoke the warm up function at 6:57 AM, 6:58 AM and 6:59 AM every day.

## Example payload
```
{
"configArray": [
{
"funcName": "readConfig",
"funcVersion": "latest",
"concurrency": "5",
"log": true
},
{
"funcName": "readPark",
"funcVersion": "latest",
"concurrency": "10"
},
{
"funcName": "readFacility",
"funcVersion": "2",
"concurrency": "100"
}
],
"delay": 1000,
"log": true
}
```
64 changes: 64 additions & 0 deletions lambda/warmUp/worker.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
const AWS = require('aws-sdk');
const lambda = new AWS.Lambda();
const { parentPort, workerData } = require('worker_threads');

const config = workerData.config;

let concurrency = config.concurrency
&& !isNaN(config.concurrency)
&& config.concurrency > 1
? config.concurrency : 1;

// Log it
if (config.log) {
const lastAccess = Date.now();
// Create log record

const log = {
action: 'warmer',
function: config.funcName + ':' + config.funcVersion,
correlationId: config.correlationId,
count: 1,
concurrency: concurrency,
lastAccessed: lastAccess,
lastAccessedSeconds: lastAccess === null ? null : ((Date.now() - lastAccess) / 1000).toFixed(1)
};
parentPort.postMessage(`{"message":"${config.funcName}: ${log}"}`);
}

// Fan out if concurrency is set higher than 1
if (concurrency > 1) {
// init promise array
let invocations = [];

// loop through concurrency count
for (let i = 2; i <= concurrency; i++) {

// Set the params and wait for the final function to finish
let params = {
FunctionName: config.funcName + ':' + config.funcVersion,
InvocationType: i === concurrency ? 'RequestResponse' : 'Event',
LogType: 'None',
Payload: Buffer.from(JSON.stringify({
'__WARMER_INVOCATION__': i, // send invocation number
'__WARMER_CONCURRENCY__': concurrency, // send total concurrency
'__WARMER_CORRELATIONID__': config.correlationId, // send correlation id
'warmup': true
}))
};

// Add promise to invocations array
invocations.push(lambda.invoke(params).promise());
} // end for

// Invoke concurrent functions
try {
Promise.all(invocations).then(() => {
if (config.log) {
parentPort.postMessage(`{"message":"${config.funcName} has been warmed up successfully."}`);
}
})
} catch (error) {
throw error;
}
}
11 changes: 5 additions & 6 deletions serverless.yml
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@ functions:
###########
readConfig:
handler: lambda/readConfig/index.handler
memory: 768
events:
- http:
method: GET
Expand All @@ -69,7 +68,6 @@ functions:
###########
readPark:
handler: lambda/readPark/index.handler
memory: 768
events:
- http:
method: GET
Expand All @@ -92,7 +90,6 @@ functions:
###########
readFacility:
handler: lambda/readFacility/index.handler
memory: 768
events:
- http:
method: GET
Expand All @@ -115,7 +112,6 @@ functions:
###########
readPass:
handler: lambda/readPass/index.handler
memory: 768
events:
- http:
method: GET
Expand All @@ -141,7 +137,6 @@ functions:
###########
exportPass:
handler: lambda/exportPass/index.handler
memory: 2048
events:
- http:
method: GET
Expand All @@ -153,7 +148,6 @@ functions:
###########
metric:
handler: lambda/metric/index.handler
memory: 1792
events:
- http:
method: GET
Expand All @@ -169,6 +163,11 @@ functions:
# aws lambda invoke /dev/null --endpoint-url http://localhost:3002 --function-name parks-reso-api-api-checkExpiry
checkExpiry:
handler: lambda/checkExpiry/index.handler
warmUp:
handler: lambda/warmUp/index.handler
package:
patterns:
- lambda/warmUp/worker.js

custom:
dynamodb:
Expand Down
22 changes: 22 additions & 0 deletions terraform/src/captcha.tf
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,18 @@ resource "aws_lambda_alias" "generateCaptchaLambdaLatest" {
function_version = aws_lambda_function.generateCaptchaLambda.version
}

resource "null_resource" "alias_provisioned_concurrency_transition_delay_generate_captcha_lambda" {
depends_on = [aws_lambda_alias.generateCaptchaLambdaLatest]
provisioner "local-exec" {
command = "sleep 240"
}
triggers = {
function_version = "${aws_lambda_function.generateCaptchaLambda.version}"
}
}

resource "aws_lambda_provisioned_concurrency_config" "generateCaptchaLambda" {
depends_on = [null_resource.alias_provisioned_concurrency_transition_delay_generate_captcha_lambda]
function_name = aws_lambda_alias.generateCaptchaLambdaLatest.function_name
provisioned_concurrent_executions = 2
qualifier = aws_lambda_alias.generateCaptchaLambdaLatest.name
Expand Down Expand Up @@ -74,7 +85,18 @@ resource "aws_lambda_alias" "verifyCaptchaLambdaLatest" {
function_version = aws_lambda_function.verifyCaptchaLambda.version
}

resource "null_resource" "alias_provisioned_concurrency_transition_delay_verify_captcha_lambda" {
depends_on = [aws_lambda_alias.verifyCaptchaLambdaLatest]
provisioner "local-exec" {
command = "sleep 240"
}
triggers = {
function_version = "${aws_lambda_function.verifyCaptchaLambda.version}"
}
}

resource "aws_lambda_provisioned_concurrency_config" "verifyCaptchaLambda" {
depends_on = [null_resource.alias_provisioned_concurrency_transition_delay_verify_captcha_lambda]
function_name = aws_lambda_alias.verifyCaptchaLambdaLatest.function_name
provisioned_concurrent_executions = 2
qualifier = aws_lambda_alias.verifyCaptchaLambdaLatest.name
Expand Down
2 changes: 2 additions & 0 deletions terraform/src/export.tf
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@ resource "aws_lambda_function" "exportPassLambda" {
timeout = 30
publish = "true"

memory_size = 2048

environment {
variables = {
TABLE_NAME = data.aws_ssm_parameter.db_name.value,
Expand Down
13 changes: 13 additions & 0 deletions terraform/src/facility.tf
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@ resource "aws_lambda_function" "readFacilityLambda" {
runtime = "nodejs14.x"
publish = "true"

memory_size = 768

environment {
variables = {
TABLE_NAME = data.aws_ssm_parameter.db_name.value
Expand All @@ -24,7 +26,18 @@ resource "aws_lambda_alias" "readFacilityLambdaLatest" {
function_version = aws_lambda_function.readFacilityLambda.version
}

resource "null_resource" "alias_provisioned_concurrency_transition_delay_read_facility_lambda" {
depends_on = [aws_lambda_alias.readFacilityLambdaLatest]
provisioner "local-exec" {
command = "sleep 240"
}
triggers = {
function_version = "${aws_lambda_function.readFacilityLambda.version}"
}
}

resource "aws_lambda_provisioned_concurrency_config" "readFacilityLambda" {
depends_on = [null_resource.alias_provisioned_concurrency_transition_delay_read_facility_lambda]
function_name = aws_lambda_alias.readFacilityLambdaLatest.function_name
provisioned_concurrent_executions = 2
qualifier = aws_lambda_alias.readFacilityLambdaLatest.name
Expand Down
Loading

0 comments on commit 976c9d9

Please sign in to comment.