Skip to content

Commit

Permalink
BRS-351: Add Null/Missing Data Export (#371)
Browse files Browse the repository at this point in the history
* add missing data export tab functionality

Signed-off-by: David <daveclaveau@gmail.com>

* revert local dynamo url commit

Signed-off-by: David <daveclaveau@gmail.com>

* resolving PR change suggestions

Signed-off-by: David <daveclaveau@gmail.com>

* fix test

Signed-off-by: David <daveclaveau@gmail.com>

---------

Signed-off-by: David <daveclaveau@gmail.com>
  • Loading branch information
davidclaveau authored Aug 30, 2024
1 parent 6017744 commit 7e9f7fc
Show file tree
Hide file tree
Showing 12 changed files with 1,408 additions and 468 deletions.
21 changes: 21 additions & 0 deletions arSam/__tests__/mock_data.json
Original file line number Diff line number Diff line change
Expand Up @@ -250,6 +250,27 @@
"progressPercentage": 100
}
],
"MISSING_JOBSLIST": [
{
"dateGenerated": "2023-01-05T22:12:49.314Z",
"lastSuccessfulJob": {
"dateGenerated": "2023-01-05T22:12:49.314Z",
"key": "176bafa03068c01779ffe93362a56056/A&R_Missing_Report.csv"
},
"progressDescription": "Job Complete.",
"progressState": "complete",
"sk": "176bafa03068c01779ffe93362a56056",
"pk": "missing-exp-job",
"progressPercentage": 100
}
],
"MISSING_MOCKJOB": {
"sk": "MISSING_MOCK_JOB_ID",
"progressPercentage": 0,
"key": "MOCK_S3_KEY",
"progressDescription": "",
"lastSuccessfulJob": {}
},
"VARIANCE_MOCKJOB": {
"sk": "VARIANCE_MOCK_JOB_ID",
"progressPercentage": 0,
Expand Down
4 changes: 1 addition & 3 deletions arSam/__tests__/settings.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,10 @@ const TABLE_NAME = process.env.TABLE_NAME || 'ParksAr-tests';
const CONFIG_TABLE_NAME = process.env.CONFIG_TABLE_NAME || 'ConfigAr-tests';
const NAME_CACHE_TABLE_NAME = process.env.NAME_CACHE_TABLE_NAME || 'NameCacheAr-tests';


module.exports = {
REGION,
ENDPOINT,
TABLE_NAME,
CONFIG_TABLE_NAME,
NAME_CACHE_TABLE_NAME
NAME_CACHE_TABLE_NAME,
};

14 changes: 13 additions & 1 deletion arSam/handlers/events/event.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,14 @@
"httpMethod": "POST",
"isBase64Encoded": false,
"queryStringParameters": {
"foo": "bar"
"getJob": "false",
"fiscalYearEnd": "2023"
},
"jobId": "5b263c707c578ae7d824844b251c69dc",
"params": {
"fiscalYearEnd": "2023",
"orcs": "0001",
"roles": "[\"sysadmin\"]"
},
"pathParameters": {
"proxy": "/path/to/resource"
Expand Down Expand Up @@ -40,6 +47,11 @@
"requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef",
"requestTime": "09/Apr/2015:12:34:56 +0000",
"requestTimeEpoch": 1428582896000,
"authorizer": {
"roles": "[\"sysadmin\"]",
"isAdmin": "true",
"isAuthenticated": "true"
},
"identity": {
"cognitoIdentityPoolId": null,
"accountId": null,
Expand Down
151 changes: 151 additions & 0 deletions arSam/handlers/export-missing/GET/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,151 @@
const bucket = process.env.S3_BUCKET_DATA || 'parks-ar-assets-tools';

const IS_OFFLINE = process.env.IS_OFFLINE && process.env.IS_OFFLINE === 'true' ? true : false;

const options = {};
if (IS_OFFLINE) {
options.region = 'local';
// For local we use port 3002 because we're hitting an invokable
options.endpoint = 'http://localhost:3002';
}

const {
dynamoClient,
exportPutObj,
getOne,
GetObjectCommand,
getSignedUrl,
lambda,
logger,
marshall,
PutItemCommand,
runQuery,
s3Client,
sendResponse,
TABLE_NAME
} = require('/opt/baseLayer');
const { createHash } = require('node:crypto');

const MISSING_EXPORT_FUNCTION_NAME =
process.env.MISSING_EXPORT_FUNCTION_NAME || 'ar-api-MissingExportInvokableFunction';

const EXPIRY_TIME = process.env.EXPORT_EXPIRY_TIME ? Number(process.env.EXPORT_EXPIRY_TIME) : 60 * 15; // 15 minutes

exports.handler = async (event, context) => {
logger.info('GET: Export Missing Data - ', event?.queryStringParameters);

// Allow CORS
if (event.httpMethod === 'OPTIONS') {
return sendResponse(200, {}, context);
}

try {
let permissionObject = event.requestContext.authorizer;
permissionObject.roles = JSON.parse(permissionObject?.roles);
permissionObject.isAdmin = JSON.parse(permissionObject?.isAdmin || false);
permissionObject.isAuthenticated = JSON.parse(permissionObject?.isAuthenticated || false);

if (!permissionObject.isAuthenticated) {
return sendResponse(403, { msg: 'Error: Not authenticated' }, context);
}
let params = event?.queryStringParameters || {};
params['roles'] = permissionObject.roles;

// Must provide fiscal year end
if (!params?.fiscalYearEnd) {
return sendResponse(400, { msg: 'No fiscal year end provided.' }, context);
}

// generate a job id from params+role
let hashParams = { ...params };

delete hashParams.getJob;
const decodedHash = JSON.stringify(hashParams) + JSON.stringify(permissionObject.roles);
const hash = createHash('md5').update(decodedHash).digest('hex');
const pk = 'missing-exp-job';


const res = await getOne(pk, hash);

if (params?.getJob) {
// We're trying to download an existing job

if (res == {}) {
// Job doesn't exist.
return sendResponse(200, { msg: 'Requested job does not exist' }, context);
} else if (res.progressState === 'complete' || res.progressState === 'error') {
// Job is not currently running. Return signed URL
let urlKey = res.key;
let message = 'Job completed';
if (res.progressState === 'error') {
urlKey = res.lastSuccessfulJob.key || {};
message = 'Job failed. Returning last successful job.';
}
let URL = '';
if (!IS_OFFLINE) {
logger.debug('S3_BUCKET_DATA:', bucket);
logger.debug('Url key:', urlKey);
let command = new GetObjectCommand({ Bucket: bucket, Key: urlKey });
URL = await getSignedUrl(s3Client, command, { expiresIn: EXPIRY_TIME });
}
// send back new job object
delete res.pk;
delete res.sk;
delete res.key;
return sendResponse(200, { msg: message, signedURL: URL, jobObj: res }, context);
} else {
// Job is currently running. Return latest job object
delete res?.pk;
delete res?.sk;
delete res?.key;
return sendResponse(200, { msg: 'Job is currently running', jobObj: res }, context);
}
} else {
// We are trying to generate a new report
// If there's already a completed job, we want to save this in case the new job fails
let lastSuccessfulJob = {};
if (res?.progressState === 'complete' && res?.key) {
lastSuccessfulJob = {
key: res?.key,
dateGenerated: res?.dateGenerated || new Date().toISOString
};
} else if (res?.progressState === 'error') {
lastSuccessfulJob = res?.lastSuccessfulJob || {};
}
// create the new job object
const missingExportPutObj = exportPutObj(pk, hash, params, lastSuccessfulJob);

logger.debug('Creating new job:', missingExportPutObj);
let newJob;
try {
newJob = await dynamoClient.send(new PutItemCommand(missingExportPutObj));
// Check if there's already a report being generated.
// If there are is no instance of a job or the job is 100% complete, generate a report.
logger.debug('New job created:', newJob);

// run the export function
const missingExportParams = {
FunctionName: MISSING_EXPORT_FUNCTION_NAME,
InvocationType: 'Event',
LogType: 'None',
Payload: JSON.stringify({
jobId: hash,
params: params,
lastSuccessfulJob: lastSuccessfulJob
}),
};
// Invoke the missing report export lambda
await lambda.invoke(missingExportParams);

return sendResponse(200, { msg: 'missing report export job created' }, context);
} catch (error) {
// a job already exists
logger.error('Error creating new job:', error);
return sendResponse(200, { msg: 'missing report export job already running' }, context);
}
}
} catch (error) {
logger.error(error);
return sendResponse(400, { error: error }, context);
}
};
Loading

0 comments on commit 7e9f7fc

Please sign in to comment.