Skip to content

Commit

Permalink
254: Date Range Added to export report (#386)
Browse files Browse the repository at this point in the history
* 254: intial commit

* fixed async issue with tests

* deleted trivy

* removing console logs

* This is an empty commit
  • Loading branch information
meredithom authored Nov 26, 2024
1 parent 68037fe commit 6940434
Show file tree
Hide file tree
Showing 15 changed files with 78 additions and 96 deletions.
43 changes: 0 additions & 43 deletions .github/workflows/analysis.yaml

This file was deleted.

5 changes: 2 additions & 3 deletions arSam/handlers/activity/__tests__/activity.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -72,10 +72,9 @@ describe("Activity Test", () => {
await setupDb(TABLE_NAME);
});

afterEach(() => {
deleteDB(TABLE_NAME, NAME_CACHE_TABLE_NAME, CONFIG_TABLE_NAME);
afterEach(async () => {
await deleteDB(TABLE_NAME, NAME_CACHE_TABLE_NAME, CONFIG_TABLE_NAME);
process.env = OLD_ENV; // Restore old environment

});

test("Handler - 200 GET specific activity entry", async () => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,8 +58,8 @@ describe('Export Missing Report', () => {
await setupDb(TABLE_NAME);
});

afterEach(() => {
deleteDB(TABLE_NAME, NAME_CACHE_TABLE_NAME, CONFIG_TABLE_NAME);
afterEach(async () => {
await deleteDB(TABLE_NAME, NAME_CACHE_TABLE_NAME, CONFIG_TABLE_NAME);
process.env = OLD_ENV; // Restore old environment
});

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,8 @@ describe('Export Variance Report', () => {
await setupDb(TABLE_NAME);
});

afterEach(() => {
deleteDB(TABLE_NAME, NAME_CACHE_TABLE_NAME, CONFIG_TABLE_NAME);
afterEach(async () => {
await deleteDB(TABLE_NAME, NAME_CACHE_TABLE_NAME, CONFIG_TABLE_NAME);
process.env = OLD_ENV; // Restore old environment
});

Expand Down
10 changes: 8 additions & 2 deletions arSam/handlers/export/GET/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,13 @@ exports.handler = async (event, context) => {
if (!permissionObject.isAuthenticated) {
return sendResponse(403, { msg: "Error: UnAuthenticated." }, context);
}


//Check if date range provided
const dateRangeStart = event?.queryStringParameters?.dateRangeStart || null;
const dateRangeEnd = event?.queryStringParameters?.dateRangeEnd || null;

// This will give us the sk
const sk = convertRolesToMD5(permissionObject.roles, "export-");
const sk = convertRolesToMD5(permissionObject.roles, "export-", `${dateRangeStart}-${dateRangeEnd}`);

// Check for existing job
let queryObj = {
Expand Down Expand Up @@ -152,6 +156,8 @@ exports.handler = async (event, context) => {
jobId: sk,
roles: permissionObject.roles,
lastSuccessfulJob: lastSuccessfulJob,
dateRangeStart: dateRangeStart,
dateRangeEnd: dateRangeEnd,
}),
};
// Invoke generate report function
Expand Down
7 changes: 4 additions & 3 deletions arSam/handlers/export/__tests__/export.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,8 @@ describe('Export Report', () => {
await setupDb(TABLE_NAME);
});

afterEach(() => {
deleteDB(TABLE_NAME, NAME_CACHE_TABLE_NAME, CONFIG_TABLE_NAME);
afterEach(async () => {
await deleteDB(TABLE_NAME, NAME_CACHE_TABLE_NAME, CONFIG_TABLE_NAME);
process.env = OLD_ENV; // Restore old environment
});

Expand Down Expand Up @@ -130,7 +130,8 @@ describe('Export Report', () => {
statusCode: 200,
}),
);
expect(body.jobObj[dateField]).toMatch(JOBSLIST[0][dateField]);

expect(body.status).toBe('Job not found');
});

test('Handler - 200 GET, generate report', async () => {
Expand Down
21 changes: 13 additions & 8 deletions arSam/handlers/export/invokable/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -73,19 +73,21 @@ exports.handler = async (event, context) => {

// Get reports - 0-20
let scanResults = [];
let roleFilter = null;

if (!roles.includes("sysadmin")) {
roleFilter = roles;
}

await updateJobWithState(
STATE_DICTIONARY.FETCHING,
`Fetching all entries for ${roles}`
);

let roleFilter = null;

if (!roles.includes("sysadmin")) {
roleFilter = roles;
}

logger.info(`=== Exporting filtered data ===`);
scanResults = await getAllRecords(roleFilter);
scanResults = await getAllRecords(roleFilter, event?.dateRangeStart, event?.dateRangeEnd);
logger.info("Scan Results:", scanResults.length);

await updateJobWithState(
STATE_DICTIONARY.FETCHED,
Expand Down Expand Up @@ -116,6 +118,7 @@ exports.handler = async (event, context) => {
await updateJobWithState(STATE_DICTIONARY.UPLOAD_TO_S3);
await uploadToS3();
}

// success
LAST_SUCCESSFUL_JOB = {
key: S3_KEY,
Expand All @@ -132,7 +135,7 @@ exports.handler = async (event, context) => {
}
};

async function getAllRecords(roles = null) {
async function getAllRecords(roles = null, dateRangeStart = null, dateRangeEnd = null) {
let records = [];
let subareas = [];
try {
Expand All @@ -159,8 +162,10 @@ async function getAllRecords(roles = null) {
}
}
for (const subarea of subareas) {
const subAreaRecords = await getRecords(subarea, subarea.bundle, subarea.section, subarea.region, true, false);
const subAreaRecords = await getRecords(subarea, subarea.bundle, subarea.section, subarea.region, true, false, dateRangeStart, dateRangeEnd);
records = records.concat(subAreaRecords);


}
return records;
} catch (err) {
Expand Down
4 changes: 2 additions & 2 deletions arSam/handlers/fiscalYearEnd/__tests__/fiscalYearEnd.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@ describe('Fiscal Year End Test', () => {
await setupDb(TABLE_NAME);
});

afterEach(() => {
deleteDB(TABLE_NAME, NAME_CACHE_TABLE_NAME, CONFIG_TABLE_NAME);
afterEach(async () => {
await deleteDB(TABLE_NAME, NAME_CACHE_TABLE_NAME, CONFIG_TABLE_NAME);
process.env = OLD_ENV; // Restore old environment
});

Expand Down
4 changes: 2 additions & 2 deletions arSam/handlers/nameUpdate/__tests__/name-update.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,8 @@ describe("Name Update Tests", () => {
await setupDb(TABLE_NAME);
});

afterEach(() => {
deleteDB(TABLE_NAME, NAME_CACHE_TABLE_NAME, CONFIG_TABLE_NAME);
afterEach(async () => {
await deleteDB(TABLE_NAME, NAME_CACHE_TABLE_NAME, CONFIG_TABLE_NAME);
process.env = OLD_ENV; // Restore old environment
});

Expand Down
6 changes: 3 additions & 3 deletions arSam/handlers/park/__tests__/park.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -53,16 +53,16 @@ describe("Park Test", () => {
jest.resetModules();
process.env = { ...OLD_ENV }; // Make a copy of environment
hash = getHashedText(expect.getState().currentTestName);
process.env.TABLE_NAME = hash
process.env.TABLE_NAME = hash;
TABLE_NAME = process.env.TABLE_NAME;
NAME_CACHE_TABLE_NAME = TABLE_NAME.concat("-nameCache");
CONFIG_TABLE_NAME = TABLE_NAME.concat("-config");
await createDB(TABLE_NAME, NAME_CACHE_TABLE_NAME, CONFIG_TABLE_NAME);
await setupDb(TABLE_NAME);
});

afterEach(() => {
deleteDB(TABLE_NAME, NAME_CACHE_TABLE_NAME, CONFIG_TABLE_NAME);
afterEach(async () => {
await deleteDB(TABLE_NAME, NAME_CACHE_TABLE_NAME, CONFIG_TABLE_NAME);
process.env = OLD_ENV; // Restore old environment
});

Expand Down
4 changes: 2 additions & 2 deletions arSam/handlers/subArea/__tests__/subArea.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -120,8 +120,8 @@ describe("Sub Area Test", () => {
await setupDb(TABLE_NAME);
}, 20000);

afterEach(() => {
deleteDB(TABLE_NAME, NAME_CACHE_TABLE_NAME, CONFIG_TABLE_NAME);
afterEach(async () => {
await deleteDB(TABLE_NAME, NAME_CACHE_TABLE_NAME, CONFIG_TABLE_NAME);
process.env = OLD_ENV; // Restore old environment
});

Expand Down
4 changes: 2 additions & 2 deletions arSam/handlers/variance/__tests__/variance.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,8 @@ describe("Variance Test", () => {
await setupDb(TABLE_NAME);
}, 20000);

afterEach(() => {
deleteDB(TABLE_NAME, NAME_CACHE_TABLE_NAME, CONFIG_TABLE_NAME);
afterEach(async () => {
await deleteDB(TABLE_NAME, NAME_CACHE_TABLE_NAME, CONFIG_TABLE_NAME);
process.env = OLD_ENV; // Restore old environment
});

Expand Down
4 changes: 2 additions & 2 deletions arSam/layers/baseLayer/__tests__/dynamoLayer.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,8 @@ describe("Pass Succeeds", () => {
await setupDb(TABLE_NAME);
});

afterEach(() => {
deleteDB(TABLE_NAME, NAME_CACHE_TABLE_NAME, CONFIG_TABLE_NAME);
afterEach(async () => {
await deleteDB(TABLE_NAME, NAME_CACHE_TABLE_NAME, CONFIG_TABLE_NAME);
process.env = OLD_ENV; // Restore old environment
});

Expand Down
50 changes: 32 additions & 18 deletions arSam/layers/baseLayer/baseLayer.js
Original file line number Diff line number Diff line change
Expand Up @@ -175,23 +175,28 @@ async function runQuery(query, paginated = false) {
let pageData = [];
let page = 0;
const command = new QueryCommand(query);
do {
page++;
if (pageData?.LastEvaluatedKey) {
command.input.ExclusiveStartKey = pageData.LastEvaluatedKey;
}
pageData = await dynamoClient.send(command);
data = data.concat(
pageData.Items.map((item) => {
return unmarshall(item);
}),
);
if (page < 2) {
logger.info(`Page ${page} data:`, data);
} else {
logger.info(`Page ${page} contains ${pageData.Items.length} additional query results...`);
}
} while (pageData?.LastEvaluatedKey && !paginated);
try {
do {
page++;
if (pageData?.LastEvaluatedKey) {
command.input.ExclusiveStartKey = pageData.LastEvaluatedKey;
}
pageData = await dynamoClient.send(command);
data = data.concat(
pageData.Items.map((item) => {
return unmarshall(item);
}),
);
if (page < 2) {
logger.info(`Page ${page} data:`, data);
} else {
logger.info(`Page ${page} contains ${pageData.Items.length} additional query results...`);
}
} while (pageData?.LastEvaluatedKey && !paginated);
} catch (error) {
logger.info('Error running query:', error);
throw error;
}

logger.info(`Query result pages: ${page}, total returned items: ${data.length}`);
if (paginated) {
Expand Down Expand Up @@ -318,7 +323,7 @@ async function getSubAreas(orcs, includeLegacy = true) {
// pass the full subarea object.
// pass filter = false to look for every possible activity
// includeLegacy = false will only return records that are not marked as legacy.
async function getRecords(subArea, bundle, section, region, filter = true, includeLegacy = true) {
async function getRecords(subArea, bundle, section, region, filter = true, includeLegacy = true, skMin = null, skMax = null) {
let records = [];
let filteredActivityList = RECORD_ACTIVITY_LIST;
if (filter && subArea.activities) {
Expand All @@ -332,6 +337,15 @@ async function getRecords(subArea, bundle, section, region, filter = true, inclu
':pk': { S: `${subArea.sk}::${activity}` }
}
};
//if exported with date range
if (skMin && skMax) {
skMin = skMin.replace("-","");
skMin = skMin.replace("-","");

recordQuery.KeyConditionExpression += ` AND sk BETWEEN :skMin AND :skMax`;
recordQuery.ExpressionAttributeValues[':skMin'] = { S: skMin };
recordQuery.ExpressionAttributeValues[':skMax'] = { S: skMax };
};
if (!includeLegacy) {
recordQuery.FilterExpression = 'isLegacy = :legacy OR attribute_not_exists(isLegacy)';
recordQuery.ExpressionAttributeValues[':legacy'] = { BOOL: false };
Expand Down
4 changes: 2 additions & 2 deletions arSam/layers/functionsLayer/functionsLayer.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
const { dynamoClient, PutItemCommand, marshall } = require("/opt/baseLayer");
const { createHash } = require("node:crypto");

function convertRolesToMD5(roles, prefix = "") {
const codedRoles = prefix + roles.join("-");
function convertRolesToMD5(roles, prefix = "", dateRange = "all") {
const codedRoles = prefix + roles.join("-") + dateRange;
const hash = createHash("md5").update(codedRoles).digest("hex");
return hash;
}
Expand Down

0 comments on commit 6940434

Please sign in to comment.