Skip to content

Commit

Permalink
fix exports, update s3 to v3 sdk
Browse files Browse the repository at this point in the history
  • Loading branch information
davidclaveau committed Jul 2, 2024
1 parent ddabf2d commit 09a642b
Show file tree
Hide file tree
Showing 13 changed files with 591 additions and 527 deletions.
8 changes: 5 additions & 3 deletions .github/workflows/deploy_dev.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -68,13 +68,15 @@ jobs:
STACK_NAME: ${{ vars.AR_API_STACK_NAME }}
ACCOUNT_ID: ${{ vars.ACCOUNT_ID }}
AWS_ACCOUNT_LIST: ${{ vars.AWS_ACCOUNT_LIST }}
SSO_ISSUER: ${{ secrets.SSO_ISSUER }}
SSO_JWKSURI: ${{ secrets.SSO_JWKSURI }}
SSO_ISSUER: ${{ vars.SSO_ISSUER }}
SSO_JWKSURI: ${{ vars.SSO_JWKSURI }}
SSO_CLIENT_ID: ${{ secrets.SSO_CLIENT_ID }}
SSO_ORIGIN: ${{ vars.SSO_ORIGIN }}
S3_BUCKET_DATA: ${{ vars.S3_BUCKET_DATA }}
DATA_REGISTER_NAME_ENDPOINT: ${{ secrets.DATA_REGISTER_NAME_ENDPOINT }}
DATA_REGISTER_NAME_API_KEY: ${{ secrets.DATA_REGISTER_NAME_API_KEY }}
run: |
sam deploy --stack-name $STACK_NAME --no-confirm-changeset --no-fail-on-empty-changeset --parameter-overrides "AccountId=$ACCOUNT_ID" "Stage=$STAGE" "SSOIssuerUrl=$SSO_ISSUER" "SSOJWKSUri=$SSO_JWKSURI" "AWSAccountList=$AWS_ACCOUNT_LIST" "S3BucketData=$S3_BUCKET_DATA" "DataRegisterNameEndpoint=$DATA_REGISTER_NAME_ENDPOINT" "DataRegisterNameApiKey=$DATA_REGISTER_NAME_API_KEY"
sam deploy --stack-name $STACK_NAME --no-confirm-changeset --no-fail-on-empty-changeset --parameter-overrides "AccountId=$ACCOUNT_ID" "Stage=$STAGE" "SSOIssuerUrl=$SSO_ISSUER" "SSOJWKSUri=$SSO_JWKSURI" "SSOClientId=$SSO_CLIENT_ID" "SSOOrigin=$SSO_ORIGIN" "AWSAccountList=$AWS_ACCOUNT_LIST" "S3BucketData=$S3_BUCKET_DATA" "DataRegisterNameEndpoint=$DATA_REGISTER_NAME_ENDPOINT" "DataRegisterNameApiKey=$DATA_REGISTER_NAME_API_KEY"
- shell: bash
env:
Expand Down
12 changes: 7 additions & 5 deletions .github/workflows/deploy_prod.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -73,19 +73,21 @@ jobs:
sam build --cached
# Prevent prompts and failure when the stack is unchanged
- name: SAM deploy
env:
- name: SAM deploy
env:
STAGE: ${{ vars.AR_API_STAGE }}
STACK_NAME: ${{ vars.AR_API_STACK_NAME }}
ACCOUNT_ID: ${{ vars.ACCOUNT_ID }}
AWS_ACCOUNT_LIST: ${{ vars.AWS_ACCOUNT_LIST }}
SSO_ISSUER: ${{ secrets.SSO_ISSUER }}
SSO_JWKSURI: ${{ secrets.SSO_JWKSURI }}
SSO_ISSUER: ${{ vars.SSO_ISSUER }}
SSO_JWKSURI: ${{ vars.SSO_JWKSURI }}
SSO_CLIENT_ID: ${{ secrets.SSO_CLIENT_ID }}
SSO_ORIGIN: ${{ vars.SSO_ORIGIN }}
S3_BUCKET_DATA: ${{ vars.S3_BUCKET_DATA }}
DATA_REGISTER_NAME_ENDPOINT: ${{ secrets.DATA_REGISTER_NAME_ENDPOINT }}
DATA_REGISTER_NAME_API_KEY: ${{ secrets.DATA_REGISTER_NAME_API_KEY }}
run: |
sam deploy --stack-name $STACK_NAME --no-confirm-changeset --no-fail-on-empty-changeset --parameter-overrides "Stage=$STAGE" "SSOIssuerUrl=$SSO_ISSUER" "SSOJWKSUri=$SSO_JWKSURI" "AWSAccountList=$AWS_ACCOUNT_LIST" "S3BucketData=$S3_BUCKET_DATA" "DataRegisterNameEndpoint=$DATA_REGISTER_NAME_ENDPOINT" "DataRegisterNameApiKey=$DATA_REGISTER_NAME_API_KEY"
sam deploy --stack-name $STACK_NAME --no-confirm-changeset --no-fail-on-empty-changeset --parameter-overrides "AccountId=$ACCOUNT_ID" "Stage=$STAGE" "SSOIssuerUrl=$SSO_ISSUER" "SSOJWKSUri=$SSO_JWKSURI" "SSOClientId=$SSO_CLIENT_ID" "SSOOrigin=$SSO_ORIGIN" "AWSAccountList=$AWS_ACCOUNT_LIST" "S3BucketData=$S3_BUCKET_DATA" "DataRegisterNameEndpoint=$DATA_REGISTER_NAME_ENDPOINT" "DataRegisterNameApiKey=$DATA_REGISTER_NAME_API_KEY"
- shell: bash
env:
Expand Down
10 changes: 6 additions & 4 deletions .github/workflows/deploy_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -82,14 +82,16 @@ jobs:
STACK_NAME: ${{ vars.AR_API_STACK_NAME }}
ACCOUNT_ID: ${{ vars.ACCOUNT_ID }}
AWS_ACCOUNT_LIST: ${{ vars.AWS_ACCOUNT_LIST }}
SSO_ISSUER: ${{ secrets.SSO_ISSUER }}
SSO_JWKSURI: ${{ secrets.SSO_JWKSURI }}
SSO_ISSUER: ${{ vars.SSO_ISSUER }}
SSO_JWKSURI: ${{ vars.SSO_JWKSURI }}
SSO_CLIENT_ID: ${{ secrets.SSO_CLIENT_ID }}
SSO_ORIGIN: ${{ vars.SSO_ORIGIN }}
S3_BUCKET_DATA: ${{ vars.S3_BUCKET_DATA }}
DATA_REGISTER_NAME_ENDPOINT: ${{ secrets.DATA_REGISTER_NAME_ENDPOINT }}
DATA_REGISTER_NAME_API_KEY: ${{ secrets.DATA_REGISTER_NAME_API_KEY }}
run: |
sam deploy --stack-name $STACK_NAME --no-confirm-changeset --no-fail-on-empty-changeset --parameter-overrides "Stage=$STAGE" "SSOIssuerUrl=$SSO_ISSUER" "SSOJWKSUri=$SSO_JWKSURI" "AWSAccountList=$AWS_ACCOUNT_LIST" "S3BucketData=$S3_BUCKET_DATA" "DataRegisterNameEndpoint=$DATA_REGISTER_NAME_ENDPOINT" "DataRegisterNameApiKey=$DATA_REGISTER_NAME_API_KEY"
sam deploy --stack-name $STACK_NAME --no-confirm-changeset --no-fail-on-empty-changeset --parameter-overrides "AccountId=$ACCOUNT_ID" "Stage=$STAGE" "SSOIssuerUrl=$SSO_ISSUER" "SSOJWKSUri=$SSO_JWKSURI" "SSOClientId=$SSO_CLIENT_ID" "SSOOrigin=$SSO_ORIGIN" "AWSAccountList=$AWS_ACCOUNT_LIST" "S3BucketData=$S3_BUCKET_DATA" "DataRegisterNameEndpoint=$DATA_REGISTER_NAME_ENDPOINT" "DataRegisterNameApiKey=$DATA_REGISTER_NAME_API_KEY"
- shell: bash
env:
WEBHOOK_URL: ${{ secrets.WEBHOOK_URL }}
Expand Down
229 changes: 115 additions & 114 deletions arSam/handlers/export-variance/GET/index.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
const { S3 } = require("@aws-sdk/client-s3");
const { Lambda } = require("@aws-sdk/client-lambda");
const { marshall, unmarshall } = require('@aws-sdk/util-dynamodb');
const s3 = new S3();
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
const { S3Client, GetObjectCommand } = require("@aws-sdk/client-s3");
const { marshall } = require('@aws-sdk/util-dynamodb');

const defaultRegion = process.env.AWS_REGION || "ca-central-1";
const s3Client = new S3Client({ region: defaultRegion });
const bucket = process.env.S3_BUCKET_DATA || "parks-ar-assets-tools";

const IS_OFFLINE =
process.env.IS_OFFLINE && process.env.IS_OFFLINE === "true" ? true : false;
Expand All @@ -15,7 +19,7 @@ if (IS_OFFLINE) {

const lambda = new Lambda(options);

const { TABLE_NAME, dynamodb, sendResponse, logger } = require("/opt/baseLayer");
const { runQuery, TABLE_NAME, dynamodb, sendResponse, logger } = require("/opt/baseLayer");
const crypto = require('crypto');

const VARIANCE_EXPORT_FUNCTION_NAME =
Expand All @@ -33,107 +37,98 @@ exports.handler = async (event, context) => {
return sendResponse(200, {}, 'Success', null, context);
}

// decode permissions
let permissionObject = event.requestContext.authorizer;
permissionObject.roles = JSON.parse(permissionObject.roles);

if (!permissionObject.isAuthenticated) {
return sendResponse(403, { msg: "Error: Not authenticated" }, context);
}

let params = event?.queryStringParameters || {};
params['roles'] = permissionObject.roles;

// Must provide fiscal year end
if (!params?.fiscalYearEnd) {
return sendResponse(400, { msg: "No fiscal year end provided." }, context);
}

// generate a job id from params+role
let hashParams = {...params};
delete hashParams.getJob;
const decodedHash = JSON.stringify(hashParams) + JSON.stringify(permissionObject.roles);
const hash = crypto.createHash('md5').update(decodedHash).digest('hex');
const pk = "variance-exp-job";

// check for existing job
let existingJobQueryObj = {
TableName: TABLE_NAME,
ExpressionAttributeValues: {
":pk": { S: pk },
":sk": { S: hash }
},
KeyConditionExpression: "pk = :pk and sk = :sk"
}
try {
let permissionObject = event.requestContext.authorizer;
permissionObject.roles = JSON.parse(permissionObject.roles);

let jobObj = {};
if (!permissionObject.isAuthenticated) {
return sendResponse(403, { msg: "Error: Not authenticated" }, context);
}
let params = event?.queryStringParameters || {};
params['roles'] = permissionObject.roles;

try {
const res = await dynamodb.query(existingJobQueryObj);
jobObj = unmarshall(res?.Items?.[0]) || null;
} catch (error) {
logger.error("Error querying for existing job: ", error);
return sendResponse(500, { msg: "Error querying for existing job" }, context);
}
// Must provide fiscal year end
if (!params?.fiscalYearEnd) {
return sendResponse(400, { msg: "No fiscal year end provided." }, context);
}
// generate a job id from params+role
let hashParams = {...params};
delete hashParams.getJob;
const decodedHash = JSON.stringify(hashParams) + JSON.stringify(permissionObject.roles);
const hash = crypto.createHash('md5').update(decodedHash).digest('hex');
const pk = "variance-exp-job";

// check for existing job
let queryObj = {
TableName: TABLE_NAME,
ExpressionAttributeValues: {
":pk": { S: pk },
":sk": { S: hash }
},
KeyConditionExpression: "pk = :pk and sk = :sk"
}

if (params?.getJob) {
// We're trying to download an existing job
if (!jobObj || !jobObj?.sk) {
// Job doesn't exist.
return sendResponse(200, { msg: "Requested job does not exist" }, context);
} else if (
jobObj?.progressState === "complete" ||
jobObj?.progressState === "error"
) {
// Job is not currently running. Return signed URL
try {
let urlKey = jobObj?.key;
const res = (await runQuery(queryObj))[0];

if (params?.getJob) {
// We're trying to download an existing job

if (!res) {
// Job doesn't exist.
return sendResponse(200, { msg: "Requested job does not exist" }, context);
} else if (
res.progressState === "complete" ||
res.progressState === "error"
) {
// Job is not currently running. Return signed URL
let urlKey = res.key;
let message = 'Job completed';
if (jobObj?.progressState === 'error') {
urlKey = jobObj?.lastSuccessfulJob?.key;
if (res.progressState === 'error') {
urlKey = res.lastSuccessfulJob.key || {};
message = 'Job failed. Returning last successful job.';
}
let URL = "";
if (!process.env.IS_OFFLINE) {
if (!IS_OFFLINE) {
logger.debug('S3_BUCKET_DATA:', process.env.S3_BUCKET_DATA);
logger.debug('Url key:', urlKey);
URL = await s3.getSignedUrl("getObject", {
Bucket: process.env.S3_BUCKET_DATA,
Expires: EXPIRY_TIME,
Key: urlKey,
});
let command = new GetObjectCommand({ Bucket: bucket, Key: urlKey });
URL = await getSignedUrl(
s3Client,
command,
{ expiresIn: EXPIRY_TIME });
}
// send back new job object
delete jobObj.pk;
delete jobObj.sk;
delete jobObj.key;
return sendResponse(200, { msg: message, signedURL: URL, jobObj: jobObj }, context);
} catch (error) {
logger.error("Error getting signed URL: ", error);
return sendResponse(500, { msg: "Error getting signed URL" }, context);
delete res.pk;
delete res.sk;
delete res.key;
return sendResponse(
200,
{ msg: message, signedURL: URL, jobObj: res },
context
);
} else {
// Job is currently running. Return latest job object
delete res?.pk;
delete res?.sk;
delete res?.key;
return sendResponse(
200,
{ msg: "Job is currently running", jobObj: res },
context
);
}

} else {
// Job is currently running. Return latest job object
delete jobObj?.pk;
delete jobObj?.sk;
delete jobObj?.key;
return sendResponse(200, { msg: "Job is currently running", jobObj: jobObj }, context);
}
} else {
// We are trying to generate a new report
// If there's already a completed job, we want to save this in case the new job fails
let lastSuccessfulJob = {};
if (jobObj && jobObj?.progressState === "complete" && jobObj?.key) {
lastSuccessfulJob = {
key: jobObj?.key,
dateGenerated: jobObj?.dateGenerated || new Date().toISOString(),
// We are trying to generate a new report
// If there's already a completed job, we want to save this in case the new job fails
let lastSuccessfulJob = {};
if (res?.progressState === "complete" && res?.key) {
lastSuccessfulJob = {
key: res?.key,
dateGenerated: res?.dateGenerated || new Date().toISOString(),
}
} else if (res?.progressState === "error") {
lastSuccessfulJob = res?.lastSuccessfulJob || {};
}
} else if (jobObj?.progressState === "error") {
lastSuccessfulJob = jobObj?.lastSuccessfulJob || {};
}

try {
// create the new job object
const varianceExportPutObj = {
TableName: TABLE_NAME,
Expand All @@ -154,31 +149,37 @@ exports.handler = async (event, context) => {
};

logger.debug('Creating new job:', varianceExportPutObj);
let newJob;
try {
newJob = await dynamodb.putItem(varianceExportPutObj);
// Check if there's already a report being generated.
// If there are is no instance of a job or the job is 100% complete, generate a report.
logger.debug('New job created:', newJob);

// run the export function
const varianceExportParams = {
FunctionName: VARIANCE_EXPORT_FUNCTION_NAME,
InvocationType: "Event",
LogType: "None",
Payload: JSON.stringify({
jobId: hash,
params: params,
lastSuccessfulJob: lastSuccessfulJob
})
}
// Invoke the variance report export lambda
await lambda.invoke(varianceExportParams);

const newJob = await dynamodb.putItem(varianceExportPutObj);
logger.debug('New job created:', newJob);
return sendResponse(200, { msg: "Variance report export job created" }, context);
} catch (error) {
// a job already exists
logger.error("Error creating new job:", error);
return sendResponse(200, { msg: "Variance report export job already running" }, context);

// run the export function
const varianceExportParams = {
FunctionName: VARIANCE_EXPORT_FUNCTION_NAME,
InvocationType: "Event",
LogType: "None",
Payload: JSON.stringify({
jobId: hash,
params: params,
lastSuccessfulJob: lastSuccessfulJob
})
}

// Invoke the variance report export lambda
await lambda.invoke(varianceExportParams);

return sendResponse(200, { msg: "Variance report export job created" }, context);
} catch (error) {
// a job already exists
logger.error("Error creating new job:", error);
return sendResponse(200, { msg: "Variance report export job already running" }, context);

}
} catch (error) {
logger.error(error);
return sendResponse(400, { error: error }, context);
}
}
};
1 change: 1 addition & 0 deletions arSam/handlers/export-variance/GET/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
"@aws-sdk/client-lambda": "^3.568.0",
"@aws-sdk/client-s3": "^3.568.0",
"@aws-sdk/util-dynamodb": "^3.529.1",
"@aws-sdk/s3-request-presigner": "^3.568.0",
"crypto": "1.0.1"
}
}
20 changes: 11 additions & 9 deletions arSam/handlers/export-variance/invokable/index.js
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
const { S3 } = require('@aws-sdk/client-s3');
const s3 = new S3();
const { marshall } = require('@aws-sdk/util-dynamodb');
const fs = require('fs');

const { VARIANCE_CSV_SCHEMA, VARIANCE_STATE_DICTIONARY } = require("/opt/constantsLayer");
const { getParks, TABLE_NAME, dynamodb, runQuery, logger } = require("/opt/baseLayer");
const s3 = S3();

const FILE_PATH = process.env.FILE_PATH || "./";
const FILE_PATH = process.env.FILE_PATH || "/tmp/";
const FILE_NAME = process.env.FILE_NAME || "A&R_Variance_Report";

let LAST_SUCCESSFUL_JOB = {};
Expand Down Expand Up @@ -38,8 +38,13 @@ exports.handler = async (event, context) => {
throw new Error("Missing fiscal year end parameter");
}

await updateJobWithState(VARIANCE_STATE_DICTIONARY.FETCHING);
await updateJobWithState(
VARIANCE_STATE_DICTIONARY.FETCHING,
`Fetching all entries for ${roles}`
);

logger.info(`=== Exporting filtered data ===`);

// collect variance records
const records = await getVarianceRecords(fiscalYearEnd, roles);
await updateJobWithState(VARIANCE_STATE_DICTIONARY.FORMATTING);
Expand Down Expand Up @@ -281,13 +286,10 @@ async function uploadToS3(csvData) {
Bucket: process.env.S3_BUCKET_DATA,
Key: S3_KEY,
Body: buffer,
}

if (!process.env.IS_OFFLINE) {
await s3.putObject(params);
}
logger.debug("Uploaded to S3");
};

await s3.putObject(params);
logger.debug("File successfully uploaded to S3");
}

function convertMonth(monthNumber){
Expand Down
Loading

0 comments on commit 09a642b

Please sign in to comment.