diff --git a/.circleci/config.yml b/.circleci/config.yml index 08ccf2f39a..9c550f7ac7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -26,6 +26,19 @@ executors: docker: - image: cimg/aws:2024.03 commands: + install_cf_tools: + description: "Install Cloud Foundry CLI" + steps: + - run: + name: Install CF tools + command: | + # Install Cloud Foundry CLI + wget -q -O - https://packages.cloudfoundry.org/debian/cli.cloudfoundry.org.key | sudo apt-key add - + echo "deb https://packages.cloudfoundry.org/debian stable main" | sudo tee /etc/apt/sources.list.d/cloudfoundry-cli.list + sudo apt-get update + sudo apt-get install -y cf8-cli + # Install plugin needed for connect-to-service + cf install-plugin -f https://github.com/cloud-gov/cf-service-connect/releases/download/v1.1.3/cf-service-connect_linux_amd64 sparse_checkout: description: "Checkout sparse directories from a specific branch." parameters: @@ -353,6 +366,14 @@ commands: -p ${<< parameters.cloudgov_password >>} \ -o << pipeline.parameters.cg_org >> \ -s ${<< parameters.cloudgov_space >>} + - run: + name: Acquire Lock + command: | + chmod +x ./automation/ci/scripts/*-lock.sh + ./automation/ci/scripts/acquire-lock.sh \ + "<< parameters.app_name >>" \ + "<< parameters.build_branch >>" \ + "<< pipeline.number >>" - run: name: Push application with deployment vars command: | @@ -386,6 +407,15 @@ commands: --var BUILD_COMMIT=<< parameters.build_commit >> \ --var BUILD_NUMBER=<< pipeline.number >> \ --var BUILD_TIMESTAMP="$(date +"%Y-%m-%d %H:%M:%S")" + - run: + name: Release Lock + command: | + chmod +x ./automation/ci/scripts/*-lock.sh + ./automation/ci/scripts/release-lock.sh \ + "<< parameters.app_name >>" \ + "<< parameters.build_branch >>" \ + "<< pipeline.number >>" + when: always # - run: # name: Push maintenance application # command: | @@ -418,12 +448,28 @@ commands: -p ${<< parameters.cloudgov_password >>} \ -o << pipeline.parameters.cg_org >> \ -s ${<< parameters.cloudgov_space >>} + - run: + name: Acquire Lock + command: | + chmod +x ./automation/ci/scripts/*-lock.sh + ./automation/ci/scripts/acquire-lock.sh \ + "tta-automation" \ + "<< pipeline.git.branch >>" \ + "<< pipeline.number >>" - run: name: Migrate database command: | cf run-task << parameters.app_name >> \ --command "yarn db:migrate:prod" \ --name "migrate" + - run: + name: Release Lock + command: | + chmod +x ./automation/ci/scripts/*-lock.sh + ./automation/ci/scripts/release-lock.sh \ + "tta-automation" \ + "<< pipeline.git.branch >>" \ + "<< pipeline.number >>" cf_automation_task: description: "Login to Cloud Foundry space, run automation task, and send notification" parameters: @@ -508,6 +554,14 @@ commands: -p ${<< parameters.cloudgov_password >>} \ -o << pipeline.parameters.cg_org >> \ -s ${<< parameters.cloudgov_space >>} + - run: + name: Acquire Lock + command: | + chmod +x ./automation/ci/scripts/*-lock.sh + ./automation/ci/scripts/acquire-lock.sh \ + "tta-automation" \ + "<< pipeline.git.branch >>" \ + "<< pipeline.number >>" - run: name: Start Log Monitoring command: | @@ -579,6 +633,14 @@ commands: slack_bot_token: $SLACK_BOT_TOKEN slack_channel: "acf-head-start-eng" message_text_file: "/tmp/message_file" + - run: + name: Release Lock + command: | + chmod +x ./automation/ci/scripts/*-lock.sh + ./automation/ci/scripts/release-lock.sh \ + "tta-automation" \ + "<< pipeline.git.branch >>" \ + "<< pipeline.number >>" - run: name: Logout of service account command: | @@ -696,7 +758,7 @@ parameters: type: string dev_git_branch: # change to feature branch to test deployment description: "Name of github branch that will deploy to dev" - default: "main" + default: "TTAHUB-3542/TTAHUB-3544/s3Queue-scanQueue-coverage" type: string sandbox_git_branch: # change to feature branch to test deployment default: "TTAHUB-3678/login" @@ -716,6 +778,21 @@ parameters: manual-trigger: type: boolean default: false + env_list: + description: "List of environments to manage (start/stop)" + type: string + default: "DEV,SANDBOX" + space_list: + description: "List of Cloud Foundry spaces corresponding to each environment" + type: string + default: "" + env_state: + description: "State of the environment to change (start, stop, restart, restage)" + type: string + default: "none" + manual-manage-env: + type: boolean + default: false manual-restore: type: boolean default: false @@ -1460,6 +1537,46 @@ jobs: rds_service_name: ttahub-prod s3_service_name: ttahub-db-backups backup_prefix: production + manage_env_apps: + executor: docker-executor + parameters: + env_list: + type: string + description: "Comma-separated list of environments to manage (both smarthub and similarity-api)" + default: "<< pipeline.parameters.env_list >>" + env_state: + type: string + description: "Action to perform on apps (start, stop, restart, restage)" + default: "<< pipeline.parameters.env_state >>" + check_activity: + type: boolean + description: "If true, only stop apps if inactive for more than activity_timeout minutes" + default: false + activity_timeout: + type: string + description: "number of minutes considered for inactivity" + default: "60" + steps: + - install_cf_tools + # Sparse checkout the automation scripts + - sparse_checkout: + directories: "automation/ci/scripts" + branch: "<< pipeline.git.branch >>" + # Perform the desired action on environments + - run: + name: Manage Apps + command: | + chmod +x ./automation/ci/scripts/*-lock.sh + chmod +x ./automation/ci/scripts/manage_apps.sh + ./automation/ci/scripts/manage_apps.sh \ + --env_list "<< parameters.env_list >>" \ + --env_state "<< parameters.env_state >>" \ + --check_activity "<< parameters.check_activity >>" \ + --activity_timeout << parameters.activity_timeout >> \ + --cg_api "<< pipeline.parameters.cg_api >>" \ + --cg_org "<< pipeline.parameters.cg_org >>" \ + --branch "<< pipeline.git.branch >>" \ + --build "<< pipeline.number >>" restore_production_for_processing: docker: - image: cimg/base:2024.05 @@ -1574,11 +1691,14 @@ workflows: build_test_deploy: when: and: + # Ensure the workflow is only triggered when `manual-trigger` is false + # and `env_state` is empty (i.e., it's not for starting/stopping environments) - equal: [false, << pipeline.parameters.manual-trigger >>] - equal: [false, << pipeline.parameters.manual-restore >>] - equal: [false, << pipeline.parameters.manual-process >>] - equal: [false, << pipeline.parameters.manual-backup >>] - equal: [false, << pipeline.parameters.manual-full-process >>] + - equal: [false, << pipeline.parameters.manual-manage-env >>] - equal: [false, << pipeline.parameters.manual-restore-staging >>] - equal: [false, << pipeline.parameters.manual-restore-sandbox >>] - equal: [false, << pipeline.parameters.manual-restore-dev >>] @@ -1707,6 +1827,61 @@ workflows: equal: [true, << pipeline.parameters.manual-trigger >>] jobs: - backup_upload_production + stop_lower_env_workflow: + triggers: + - schedule: + cron: "0 1 * * 2-6" # Runs at 6 PM PST M-F (1 AM UTC next day) + filters: + branches: + only: + - main + jobs: + - manage_env_apps: + env_state: "stop" + env_list: "<< pipeline.parameters.env_list >>" + start_lower_env_workflow: + triggers: + - schedule: + cron: "0 11 * * 1-5" # Runs at 6 AM EST M-F(11 AM UTC) + filters: + branches: + only: + - main + jobs: + - manage_env_apps: + env_state: "start" + env_list: "<< pipeline.parameters.env_list >>" + manual_manage_env_workflow: + when: + equal: [true, << pipeline.parameters.manual-manage-env >>] + jobs: + - manage_env_apps: + env_state: "<< pipeline.parameters.env_state >>" + env_list: "<< pipeline.parameters.env_list >>" + monitor_and_shutdown_envs: + triggers: + # Every 15 minutes from 11 AM to 11:59 PM UTC (6 AM to 6:59 PM EST, 3 AM to 3:59 PM PST), Monday to Friday + - schedule: + cron: "0,15,30,45 11-23 * * 1-5" + filters: + branches: + only: + - main + - TTAHUB-3071/shutdown-unutilized-envs + + # Every 15 minutes from 12 AM to 12:45 AM UTC (7 PM to 8:45 PM EST, 4 PM to 5:45 PM PST), Monday to Friday + - schedule: + cron: "0,15,30,45 0-3 * * 2-6" + filters: + branches: + only: + - main + - TTAHUB-3071/shutdown-unutilized-envs + jobs: + - manage_env_apps: + env_state: "stop" + env_list: "<< pipeline.parameters.env_list >>" + check_activity: true manual_restore_production: when: equal: [true, << pipeline.parameters.manual-restore >>] @@ -1750,4 +1925,4 @@ workflows: when: equal: [true, << pipeline.parameters.manual-restore-dev >>] jobs: - - restore_processed_to_dev \ No newline at end of file + - restore_processed_to_dev diff --git a/automation/ci/scripts/acquire-lock.sh b/automation/ci/scripts/acquire-lock.sh new file mode 100644 index 0000000000..b476e3469d --- /dev/null +++ b/automation/ci/scripts/acquire-lock.sh @@ -0,0 +1,59 @@ +#!/bin/bash + +# Convert environment to app name if necessary +APP_NAME=$( [ "$1" == "DEV" ] && echo "tta-smarthub-dev" || ([ "$1" == "SANDBOX" ] && echo "tta-smarthub-sandbox") || echo "$1" ) +BRANCH=$2 +BUILD_ID=$3 + +# Constants +LOCK_TIMEOUT=7200 # 2 hours in seconds + +# Fetch environment variables +LOCK_DATA=$(cf env "$APP_NAME" | grep -A 10 LOCK_APP | sed ':a;N;$!ba;s/\n/ /g' | grep -oP "[{][^}]+[}]") + +# Check if lock exists +if [ -n "$LOCK_DATA" ]; then + LOCK_TIMESTAMP=$(echo "$LOCK_DATA" | jq -r '.timestamp') + LOCK_BRANCH=$(echo "$LOCK_DATA" | jq -r '.branch') + LOCK_BUILD_ID=$(echo "$LOCK_DATA" | jq -r '.build_id') + + CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%SZ") + TIME_DIFF=$(($(date -d "$CURRENT_TIME" +%s) - $(date -d "$LOCK_TIMESTAMP" +%s))) + + if [ $TIME_DIFF -lt $LOCK_TIMEOUT ]; then + echo "App $APP_NAME is locked by branch $LOCK_BRANCH with build ID $LOCK_BUILD_ID." + exit 1 + fi + + echo "Lock is stale. Attempting to acquire lock..." +fi + +# Check if app is restaging +APP_STATE=$(cf apps | grep "$APP_NAME" | awk '{print $2}') +if [ "$APP_STATE" != "started" ] && [ "$APP_STATE" != "stopped" ]; then + echo "App $APP_NAME is currently $APP_STATE. Cannot acquire lock." + exit 1 +fi + +# Acquire lock +TIMESTAMP=$(date -u +"%Y-%m-%dT%H:%M:%SZ") +LOCK_DATA_JSON=$(jq -n \ + --arg branch "$BRANCH" \ + --arg build_id "$BUILD_ID" \ + --arg timestamp "$TIMESTAMP" \ + '{branch: $branch, build_id: $build_id, timestamp: $timestamp}') + +cf set-env "$APP_NAME" LOCK_APP "$LOCK_DATA_JSON" + +# Validate the lock +LOCK_DATA=$(cf env "$APP_NAME" | grep -A 10 LOCK_APP | sed ':a;N;$!ba;s/\n/ /g' | grep -oP "[{][^}]+[}]") +VALID_BRANCH=$(echo "$LOCK_DATA" | jq -r '.branch') +VALID_BUILD_ID=$(echo "$LOCK_DATA" | jq -r '.build_id') + +if [ "$VALID_BRANCH" == "$BRANCH" ] && [ "$VALID_BUILD_ID" == "$BUILD_ID" ]; then + echo "Lock successfully acquired for app $APP_NAME." + exit 0 +else + echo "Failed to acquire lock for app $APP_NAME." + exit 1 +fi diff --git a/automation/ci/scripts/manage_apps.sh b/automation/ci/scripts/manage_apps.sh new file mode 100644 index 0000000000..5e26712c97 --- /dev/null +++ b/automation/ci/scripts/manage_apps.sh @@ -0,0 +1,233 @@ +#!/bin/bash + +set -euo pipefail + +# Usage: manage_apps.sh --env_list --env_state --check_activity --cg_api --cg_org + +# Parse arguments +while [[ "$#" -gt 0 ]]; do + case $1 in + --env_list) env_list="$2"; shift ;; + --env_state) env_state="$2"; shift ;; + --check_activity) check_activity="$2"; shift ;; + --activity_timeout) activity_timeout=$2; shift ;; + --cg_api) cg_api="$2"; shift ;; + --cg_org) cg_org="$2"; shift ;; + --branch) branch="$2"; shift ;; + --build) build="$2"; shift ;; + *) echo "Unknown parameter: $1"; exit 1 ;; + esac + shift +done + +if [[ -z "${env_list:-}" || -z "${env_state:-}" || -z "${check_activity:-}" || -z "${cg_api:-}" || -z "${cg_org:-}" || -z "${branch:-}" || -z "${build:-}" ]]; then + echo "Error: Missing required arguments." + exit 1 +fi + +# Define prefixes for environments +primary_prefix="tta-smarthub" +secondary_prefixes=("tta-similarity-api") + +# Convert the comma-separated list into an array using substitution +apps=(${env_list//,/ }) + +for env in "${apps[@]}"; do + echo "Processing environment group: $env" + + # Check if the environment is PROD + if [ "$env" == "PROD" ]; then + echo "Error: Cannot process the PROD environment. Exiting." + exit 1 + fi + + # Normalize environment name to lowercase for suffix + env_suffix=$(echo "${env}" | tr '[:upper:]' '[:lower:]') + + # Dynamically derive variable names for environment + space_var="CLOUDGOV_${env^^}_SPACE" + username_var="CLOUDGOV_${env^^}_USERNAME" + password_var="CLOUDGOV_${env^^}_PASSWORD" + + # Resolve the actual values of the variables + space="${!space_var:-}" + username="${!username_var:-}" + password="${!password_var:-}" + + if [[ -z "$space" || -z "$username" || -z "$password" ]]; then + echo "Error: Missing required environment variable(s) for $env" + exit 1 + fi + + # Log in to Cloud Foundry + cf login \ + -a "$cg_api" \ + -u "$username" \ + -p "$password" \ + -o "$cg_org" \ + -s "$space" + + ./automation/ci/scripts/acquire-lock.sh \ + "$env" \ + "$branch" \ + "$build" + + # Perform activity check only for the primary prefix (tta-smarthub) + if [[ "$check_activity" == "true" && "$env_state" == "stop" ]]; then + app_name="${primary_prefix}-${env_suffix}" + + current_state=$(cf apps | grep "${app_name}" | awk '{print $2}' || echo "unknown") + if [ "$current_state" == "stopped" ]; then + echo "$app_name is already stopped." + + ./automation/ci/scripts/release-lock.sh \ + "$env" \ + "$branch" \ + "$build" + continue + fi + + echo "Fetching recent logs for $app_name..." + + # Safely fetch recent logs + recent_logs=$(cf logs --recent "$app_name" 2>/dev/null || echo "") + + # Check if logs are empty + if [[ -z "$recent_logs" ]]; then + echo "No recent logs found for $app_name. Defaulting activity duration to 43200 seconds (12 hours)." + activity_duration=43200 + else + # Filter for logs containing '"label":"REQUEST"' and "api" + request_logs=$(echo "$recent_logs" | grep '"label":"REQUEST"' | grep "api" || echo "") + + if [[ -z "$request_logs" ]]; then + echo "No matching activity logs found for $app_name. Defaulting activity duration to 43200 seconds (12 hours)." + activity_duration=43200 + else + # Extract the last activity timestamp + last_activity=$(echo "$request_logs" | awk '{print $1}' | tail -n 1 || echo "") + + if [[ -z "$last_activity" ]]; then + echo "Failed to extract activity timestamp. Defaulting activity duration to 43200 seconds (12 hours)." + activity_duration=43200 + else + # Safely calculate duration in seconds + current_time=$(date +%s) + activity_time=$(date -ud "$last_activity" +%s 2>/dev/null || echo "0") + + if [[ "$activity_time" -eq "0" ]]; then + echo "Invalid timestamp for last activity. Defaulting activity duration to 43200 seconds (12 hours)." + activity_duration=43200 + else + activity_duration=$((current_time - activity_time)) + fi + fi + fi + fi + + echo "Last activity duration for $app_name: $activity_duration seconds" + + echo "Fetching power-on events for $app_name..." + + # Get events output safely + events_output=$(cf events "$app_name" 2>/dev/null || echo "") + + # Check if events_output is empty + if [[ -z "$events_output" ]]; then + echo "No events found for $app_name. Defaulting power-on duration to 43200 seconds (12 hours)." + power_on_duration=43200 + else + # Filter for 'audit.app.start' and check if any matches are found + audit_start_events=$(echo "$events_output" | grep "audit.app.start" || echo "") + + if [[ -z "$audit_start_events" ]]; then + echo "No 'audit.app.start' event found for $app_name. Defaulting power-on duration to 43200 seconds (12 hours)." + power_on_duration=43200 + else + # Extract the last 'audit.app.start' timestamp + last_power_on=$(echo "$audit_start_events" | awk '{print $1, $2}' | tail -n 1 || echo "") + + if [[ -z "$last_power_on" ]]; then + echo "Failed to extract timestamp for 'audit.app.start'. Defaulting power-on duration to 43200 seconds (12 hours)." + power_on_duration=43200 + else + # Safely calculate duration in seconds + current_time=$(date +%s) + power_on_time=$(date -ud "$last_power_on" +%s 2>/dev/null || echo "0") + + if [[ "$power_on_time" -eq "0" ]]; then + echo "Invalid timestamp for last power-on. Defaulting power-on duration to 43200 seconds (12 hours)." + power_on_duration=43200 + else + power_on_duration=$((current_time - power_on_time)) + fi + fi + fi + fi + + echo "Last power-on duration for $app_name: $power_on_duration seconds" + + if [ "$activity_duration" -le $(($activity_timeout * 60)) ] || [ "$power_on_duration" -le $(($activity_timeout * 60)) ]; then + echo "$app_name has been active or powered on within the last $activity_timeout minutes. No action taken." + + ./automation/ci/scripts/release-lock.sh \ + "$env" \ + "$branch" \ + "$build" + continue + fi + fi + + # Perform the desired action on all apps in the environment group + for prefix in "$primary_prefix" "${secondary_prefixes[@]}"; do + app_name="${prefix}-${env_suffix}" + echo "Processing app: $app_name" + + # Get the current state of the app + current_state=$(cf apps | grep "$app_name" | awk '{print $2}' || echo "unknown") + echo "Current state of $app_name: $current_state" + + # Perform the desired action + case "$env_state" in + stop) + if [[ "$current_state" != "stopped" ]]; then + echo "Stopping $app_name..." + cf stop "$app_name" + else + echo "$app_name is already stopped." + fi + ;; + start) + if [[ "$current_state" != "started" ]]; then + echo "Starting $app_name..." + cf start "$app_name" + else + echo "$app_name is already started." + fi + ;; + restart) + echo "Restarting $app_name..." + cf restart "$app_name" + ;; + restage) + echo "Restaging $app_name..." + cf restage "$app_name" + ;; + *) + echo "Unknown env_state: $env_state" + + ./automation/ci/scripts/release-lock.sh \ + "$env" \ + "$branch" \ + "$build" + + exit 1 + ;; + esac + done + + ./automation/ci/scripts/release-lock.sh \ + "$env" \ + "$branch" \ + "$build" +done diff --git a/automation/ci/scripts/release-lock.sh b/automation/ci/scripts/release-lock.sh new file mode 100644 index 0000000000..bc187b9ef7 --- /dev/null +++ b/automation/ci/scripts/release-lock.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +# Convert environment to app name if necessary +APP_NAME=$( [ "$1" == "DEV" ] && echo "tta-smarthub-dev" || ([ "$1" == "SANDBOX" ] && echo "tta-smarthub-sandbox") || echo "$1" ) +BRANCH=$2 +BUILD_ID=$3 + +# Fetch environment variables +LOCK_DATA=$(cf env "$APP_NAME" | grep -A 10 LOCK_APP | sed ':a;N;$!ba;s/\n/ /g' | grep -oP "[{][^}]+[}]") + +# Check if lock exists +if [ -z "$LOCK_DATA" ]; then + echo "App $APP_NAME is not locked." + exit 0 +fi + +# Extract lock metadata +LOCK_BRANCH=$(echo "$LOCK_DATA" | jq -r '.branch') +LOCK_BUILD_ID=$(echo "$LOCK_DATA" | jq -r '.build_id') + +# Validate ownership +if [ "$LOCK_BRANCH" != "$BRANCH" ] || [ "$LOCK_BUILD_ID" != "$BUILD_ID" ]; then + echo "Cannot release lock: the app is locked by branch $LOCK_BRANCH with build ID $LOCK_BUILD_ID." + exit 1 +fi + +# Release lock +cf unset-env "$APP_NAME" LOCK_APP + +# Validate lock release +LOCK_DATA=$(cf env "$APP_NAME" | grep -A 10 LOCK_APP | sed ':a;N;$!ba;s/\n/ /g' | grep -oP "[{][^}]+[}]") + +if [ -z "$LOCK_DATA" ]; then + echo "Lock successfully released for app $APP_NAME." + exit 0 +else + echo "Failed to release lock for app $APP_NAME." + exit 1 +fi diff --git a/bin/latest_backup.sh b/bin/latest_backup.sh index a3d670d82c..b11486677d 100644 --- a/bin/latest_backup.sh +++ b/bin/latest_backup.sh @@ -235,12 +235,14 @@ download_and_verify() { local checksum_sha256=$(curl -s "$sha256_url") local checksum_md5=$(curl -s "$md5_url") - # Download file and generate hashes simultaneously - echo "Downloading file and generating hashes..." - $downloader "$backup_url" |\ - tee >(sha256sum | awk '{print $1}' > "${backup_file_name}.sha256") \ - >(md5sum | awk '{print $1}' > "${backup_file_name}.md5") \ - > "$backup_file_name" + # Download file + echo "Downloading file..." + $downloader "$backup_url" > "$backup_file_name" + + # Calculate hashes + echo "Calculating hashes..." + sha256sum "$backup_file_name" | awk '{print $1}' > "${backup_file_name}.sha256" + md5sum "$backup_file_name" | awk '{print $1}' > "${backup_file_name}.md5" # Verify SHA-256 checksum echo "Verifying SHA-256 checksum..." diff --git a/docs/logical_data_model.encoded b/docs/logical_data_model.encoded index 93aff94573..0751b6ac46 100644 --- a/docs/logical_data_model.encoded +++ b/docs/logical_data_model.encoded @@ -1 +1 @@ -xLrjSzmsalxENy7IVaZYccnjaijLdMhlQcLPTbndoof9pjOcbL9193I3c0Hc0L2Eaij_lmB05m04IO3ao7Q2JzA0O7VpGQFHwCRBFnW7b5TbaQJfPocu9oXFzvJS5h1awNl4Tod0smBcQKR9UvRUGSYJD6Nl4Du32igqG1ZoXprC2UKxOZpA2i1O-bkIfcdwbD0SqbDI49h-vPjl_lFal_wcbL_lKFOEGyRIhvCq_LsASYhUXYn9Qped6KVtY3zSmWpj4SIS_PQaJrAKfySfmXnk8KsFqxzl8OOKS9Z_Jv9kAS3yfLdXykpvsUNP-UdPLNhqGZndHtz5UP8IleFq37F47qRUUDFMGh1-yKaAMidsX8xnEp4gjkzd50g1JtXwd79KGiP1kLVOYv_fhmJ3z3Z5vFtdNuZwrJxth_zxajZu2UOd-pqbzv1EWxZy-oe6wSXRRncaZuD5ciC2JjLEKaouU8V3NtYICWvunTBZ5EAg74Bpb1G5k2KKSFBdGdG0Z-UeW8-1mnqguCXRKemG77o_V-YxNn1GF4Umo2yXsnFCPe0WI7W38Igt7xWbeBY6vRu074eXBYWfzwFsp_t6OI1BQOr9tRL-_6aCnKl8I4MpHmMGrZn7tcx5EWn4YhXiYiRaA9Z4V_BdpGXKDyRhloKf1gN2NPOdajCT5OKrINzgV_-_BxySQwRXbfnkGKBH5c5GJBISqLPBBSSsWDUwRPmLXC6hYkqXpay95bsX7MpqPvSGC8ARWIU5E7y6vBWac2GfOlBBOF5mbYCEnQVS4b9085wv0mMJTIuv1tlG4X4AqOriikNQP-r0TmzejWFqyw-__VOBJMUQzmOwxPlRdw4Kw9HKT8xXQmbuDsRxHjTIalS8WLgu3pasjbBF6oJNWEbZ6pPR2UTR_JLW-GJjVn-vtY2WctzUY3ySN51oZtAO3um3KrGTWq7_bOcclYPxb7sOTkQ2zSZxIhGFqW7y1qiV8FgVldJO9BEu7dPI1-WR4KCmvFbgzZZRVk6f4lexI64tjWuEawpQ2w2kHIIa8INxMyjJwtOJG-byAnZUe9-WpHisTMUdh_xtj7tybi4yIK6U8uu81iKvuIASb2bT4DDyx7gXjPTks-Xl4YSiuNJh5e3njmh3fmz-q_sGm025MqJmQCEB2e_KYv-a8Xg5P4_inFawqJIyZllLVSpiVrOpjxqEg3ZNf5CEYgmIq-3yoeZtDDhIMGu10g6NfEZwUGuBT0_fmsl8ToJlYPPN573rOJdCK0cApx_PVFM5IrsxgN7Vto-5qiWgFpE2RuLwu6q5jZOMukK2Ra6cLCQKnQdp6bTSbRUGlhcx09IZ3Er5VBHha2JTWoqK2C5Q355YrBGbsLEofQ0KRUPwJ1lZst_mxmeoBfN0ihGYNcr0mwhR4Z6XErRZoGUSGnD93kWMyWko5mqk5Dp1Nhe8pWSSEFHFFBb7b5UWU5OhWqjBMtklnmk5AeLRUfs2jzKtmfIj6I2Zam4ftc5z8LqigIYJQ9HWpwx9o2pF8jZ8VmyAb3yVHUpxkifSgrTFiKsTVIBwmHvi0OTly6j8jz2OhdDQmGC5ipMGoxB2A9Cg_ufiVMUVcVNUwEJQhFeHpJTxedbyEMq6dD4Xiab67IeqB-hbW8FExeT_krz0Ph9JBGgT6A6d9-bU3FQCipVuxRuWm1c-Eco7EBk7c2kxU84Ami-1yXzE8UQGodRoK19eKHbfUfjX2oKsE8zGC4Lyb6oGATkvsxY5Udf2O1UfmRRnCrJwYAXvBM9mzlxmqxUNbsVdrpVNBr-VNLsVlBvmzJ_o7fk8kztf06WSRFiEtvvazeUrY7cRSq-J8qps5VeLcb_p3Zuuchy9IGWP1ZJ4pA86cp0HZW_DiMQM4p0wS0tu2o5PWayjorqW_EufoDwr-cMW7Pp5387gmsKg2Jvf9eINUbH7Fh5A5cSrG_EO8H0UCcxZ1aaVbqsmGaTihvZFqipcL2xJcRqKXLUeH0MWY3-yeAJQ-wKmm_NVOZFngbNxGA4u-4jML3dCNzwT4twIbmWx6ldt4R2cWReBJVJEyyBNnKKPz-y0Uudb318Cv5jx4ImBB3a8dku0tdhOWv3nPErizlonnuwNOVZ-oXqn9qK-yAGD2YaIGzwYV2334_reZ86xQ-3mxD7Dgvv3KxYffRde4AQNjPSYQfCbCv7WlT00L7NOKRrt9pxVI53i4ExOa7qJ36k2WRMkrwKotEV4jSlfDJ0S65rYC_J9Hirket1NjM4LElZFnUEJIE8uWPaQsFNHVEBbTbJlrNCrpNIJ728zqjP31BVPWcPAj0DiftVwMj9pCt2Y5xycERf3gpMYp3VL-O29bXwSkVkvI_ELNGMfBW25c8Uu4I5BmJycJxcMv7i4z7IJn_IklrxiEINbLGaePpi8-JK75mN0-fHdVBaOvUuy-z9bP1BvSvbtUeNmktLNzVitvROJ8tYa3GiyIlakuRu06IphVERYwAoJw4gnsaD6OMsvFPlV-j8fRHW2Xgho_4MGIgct4Dw-M5qJvplUjFj4lT-RGFVH3H_d3ZSXCrImwOdBOiq0umeKpokS8xnzJJ3VzKXcEN8Pw5gDNOywqpfi4gIMGdSuL3wxsiCCWG8nVbO2L4nLSX92M2Q5fJinAP5UsLhpL2VffqJgLNCAfUVdHzm8iOZWbEJEznq3YAR7fDf_a5ZmL9Z62CpyWQB1CWhcjXwEQKoGIGp3z40JDh_0bJ_x6QEdyBqkgji2PSPImnsWCFydaaRBt7xsKYQ7dW4EV1C_AftqV3mJ_xAj50owaU59ZzFFNZuxExz--VpbsRFFZe-Fv2pEalf6lNFaZv9SI83lI9SJCoPD56Q4vgUaKckHwYeisy4Iz1LR-rYFiAN_2WQ5z2JAuQi0o7iiKkKdNCm1zNLPwN7wjpfmb476XYluDzBS1kBxvwDjPA6_Ph_BuHt2I0zNoMGjnE2sW0CB3Exu_WfoZl3MUCVgmaA3ycf624wepMl0thKlxW7lG_4Qe09ISrNAhKYfSlSgC6tajfU5pKrxjsCB52E_d6VOwzkUf3-JMyUImYTYcPs8iIrsrYXaLS7RbOMioeGsQnPHHmnpY07Ee0zidABj5j9s6MrRI--L5issTm8EjuG-Q93FMBOZ8pTwwDL_NBXICDnLXHC9WX4d5E6jt2z_LPKbe0Xw3zN6ZDSrLcipywE_7cdl7Ad9m3s2xr_cRWATS9UoNaEbUUieQ82HSORJeDflVTCxdZlt06EKaF8DtGACVhLNzNwx-GzlVmkMH2sikL8R1tfaZ84DOSPfbOs_GQK5QDQVJdlW4g5dGRFrha6V8pcbylOBxrJzQKsf2JUgqhvtrAeiXDDKNgYAXjgAzuwwKN1aWEv9en9L3RNc_QEm1JMIKfr_QOOHA-RxCUWuON1wUfiHIg7KnsSGvAyGXii3PBK2Ut1lGuxwHjQ5clmTOkAtbo-uN9N3LL39zp2YYOzeY-yCcLgM7uSA-obR3Zca9SB0o94Q_H43AdPkt7UQvaZNhgXyUkEkw1VtTUrGaUtnx_9oIWxHCMPEt3yLJLv50zTMEjj8FfhANN60Sq3pntDAMCtHIsaRyJH5hL_-n9pXHzJDFvnPFAWzZ8PmKTiygjs8qv0u3N7CM5UqSlQC-V0sJaqlwi_nZaCjdjH_Oj4iLUyQbV23Z_HF7u9rewatAVzZ66wjDLRiE9OkF5Q_-UFuElhthEtgzB2Adavmo_XjWLvi45TjLR6ve5vUq3JP7st6PA_Rj29IchgIYcT4Nw2f3lRb76IxqeGfC3MW6j3m4GCqhdz2XqC7ZNdmkfQkDna_lAqB2Jgi6Yv0N2ItpKzTqai-lea0ucQ4rgyNRIkKYqpkBFtLDp2rZJtlU82FRMj_-F8eIx2bwvG7llEGQwkwJu6_y2iEzo7RSd3zpbZJrLK-hUquFJ3wMt48Tst5cNXHRiFfrR6Ru36EjlFMAVsuT_NpUivIZOoKUs5Ud2EVTZpkuozTdNTgunzd2UkC3OeVo-wlQslhfr7-0VP_zJ7rs-tbjqzBtzqrvu3JDJQ6BhW89BGN77hxcBOjUzvXgBDtdzqXolXfRH1zwhx45s9tWLozEbtxdF0hCSdmawF-DY_zzvHPLFKMMLJsbbXKzXLuS4lUq1uypPPq7LdNv9LcOXu7uiqxIlr2hDcGHptwXI_2Z3ZBoEW0UoMUVyINi5oe3JeNa3A2CIoMEkisFUCKQMYUbXFwXRsD8lK2Ti6mTxIMEcrrsUfYU7UdxFra-0CwTpyfm-DIPjc_NG_s5DwXNnndHT-vYflkwTxtLBU5QmVV8NOtptVMkW2MTVIi6w7sUDZHZMIE9_-2W8Pk7XkvE-n3cRQnZOPqzCZZ2YAhJEpe-EpuMFm6cJWxznxG1uItEW1RJzk9nyxdkrIdyvvHQanYXZOjh3kvBTt7B9CzRCzPji-JaT8-9Vb3kno_ERRBMUZSbrXHD8A7VqJ1VxjF2H2UavgZHVkMz2RKiKFgcrfjBo47vhc6JObV0Az49NIgfTlB4EVqU5_v478aySTmcOFl4Sw-t17hzjYQe-m2Y-FjkbT5uHpDR6UX-txb5nt0TymXooZQEtlrofxb5PHkfNRegE_Eof2zWQp7PSpTx5K-i1xt6TPxJH3UTdcbFpictiS2ppaBUCrZQcUVFTjzF2glCaAXF7yLPBjLhPm82trb-R4455_Rn-hMFj2DxQqn8LdlKeNWzaKSdXJGRolg8T8nFgT9F4bBo9Ywww7DpQdJUPkTl4rciDt7sttOZQZQ9ECzpcQOU-2STSZlFQ08I7VGtBlkxGO49eKEEIWEZtrlcqC8L_PhFdpcJx2eDqU_kOUKeXoktKUgZiN4AyqkxKCTy7aFYWhsCmSwQn-DbI_rLJ5W2pleja3V3X-v7TwItxU7EQhvm0UoTfoyWovVZhkc4hxDrnxA6e37D00xKiBB0z-Hj6Fo2XuwOCB4zEqLsQffuFgH6dUSgm3RBccS7gSdbvSd__hnfsPSaU3To5f7ubm0yGad5TyHAYF8ZfC3cezqtKVa30hj28RdpWEWZ1Is9ed8LmDyBm-iG8oZZ3Lho6-4Jt5JYZjNRaXjneGz5-T2tPI0lfCLn1WfJp9Eqh9TV7CZcrm55PBqtnKe47-u0OnLLAmjkSa1szqIGFCcNGWAnKrxm2b0MD1IIx2Ozgn9gEkV8RFdig3HrUO-cgMntANtsY2ct5LtDkeHJupxuUHPFIlq0AMRGXBQYqdySF8S5HnIlonyLMBdj5Ykm1gzpUA7arSdzYLc7b4puSxVQ3LaMLdS0DOhkdte5NL0JWmvekulaarLATMGHVeAY0v3omd7yFqD9Uzlp9a7AN97QJtDks5ewsY9L-OYZZelIYDCep0DB2Q9KFMXig8v4Q8-H6FtVaIyt7Wv8lyHAZTK4Q9HYBGLWjPzRH6j4RIYNVV7AXghCBpiI6K4QmIiYDCeJh7lUS4KuR7W6RagEW9a0sHTfH01KW2DtlmSGNA1507g0HHd9GvOdZrxvG597l1XkVLXuW9sWFCR3hQzsBEeB45iFstmczNMc1kKGxcrfRzyKZrtvk7K2UetpcJD13v1jsn7bN915W6o08e3c0EKaIko8q0DG36wFOLGdAQEW3D-DNJ4QG2dY94B8_Tbq16W1g3nMviZJZPEOuSSHBHijE8uvm5I1CS1JWJ51cNm0j-8JulFy7s_4IGNI14W5q0HbHugmxV1HeHQXABo348JKR5G6RnQDiBJmHEX6OGcmcB14dAzwvyEcP1hNFzqy4pUxHfnCd4Qk9KqmkI4oN7bPKHg12dt_OmHicd8eefDuXMroWSbnqKB4QaXfAAj2i8g6gb9e292WmYvx--BA9ae3LqE675ouHILoGEaED05GXNxtHHE4Q11W0RkROimkM6A45aGnVqVWH15U6nuXcznYm3f04LWB62cxc2BA1aenLMWKmX5P6nat7T9HN04ECwxgo8q4fh1YwiYFXe-KMyiNE9ngiF8RAEXSS4I9mrkO1ptCqLa6cGYA1OezerF4HM9ojGTKX4cDqmJrqn5sDomHOXiY4Rkgue8AOJwrrr5P0GoqVjneUAutYe2qm0pV3jMnCZ2A6BZm6RuJgw8KODH1CK2fNyXLCH5uEA-5ofYex2nRxhwK-EIV9QUC4Oo96qlIv94pjFJew-EqY-__F4ljmN8tcts1K1umtSeUuSXO__zzKzVVYchQ5k0XLpjhusyzZXHkyzyYTcdfwJ7wLOKRTIVxacIZDpzP74Jbyp1t6BtMveUuQEqTw1uUYokDpFBDYaOP9OuhdO9u3zrvr1wRF4MLJQEClErrQLgGyNU0hLl94DN9CmMoBuSfYTJMs4pzA1-yu2oDJqYL3BvtW7b6cx9QTaUeqtLp7E6SEf5Ix9v0ZLvj9uKElEcqYeSZMl-N6oRPwjnCotcs8VsQgycWxkFR5BQFLsDbxA1KzyfQwt6K-iNKuoxggjn7-xfDlEXVXt2krrebnwdR0yp10h4U5kwwUwkryfCCPBUxHKpHtBrxd5z2wpzP2FBdrDIzjvP947lq6dXlLXb6yEx41oMiUoT-2Gz9gjmPQRkbOFfSNrgllN0oTowNYAecHttTqYdpIpVz4gKEMbSXXLzZqp-D-bgX7FkM7GAgrWrYYr-9RqZyWDOUDn88EOWejP6_Hcll4WOHqv761EzbDLIEESeuGFFdPoewCFDChezxqzdOBbuePo6D5-nS6jxA8-1d6tN2l9dFRLRJjoPwHJCNENDXD9nf7QWaNQTN5xRQklHnbN8ag4JcTUT-EqbqKPxkhdT2jmd-BauXP9tIgX0RXvjQfRhCEelrUwnsuvMNqKAtUaQPjuvN4juDZoOIJnPFjrlA6zIGaLC2nH3qtpfs076q1R3t44eM9WXBshJywNcqUwflI9kAdqeKRlLbR1cAwzefPncQJxSFNCg1fMspGSjO30R_P3_zakONZgBhhHTW49SnwAo7IhoUMWO4xRMdQtxLp2r_W8CCRQ9aZSORjH_r--9eYwBP3T-Jv9U7DIVwT0vQSAB77ZKZwwkmjyyti3WRpuQu6-V4XqqBtAzt_dHrgF6eRQALnUZZMVhPPjUKSTxSVYgZfuxebR1z5i3L0VaLGSBfkA0XAgV-Gj5MVriJX7Pc8zDvCog-hCvahLd0pUJQFMhRg1mYlP63kLZ6KLBQREEvdmvAvp3EuHXtgbdeP91EzH6QU2BtfCatFiUZ5Ti-c6BkVJDDjpeJPdesIf_ijOfm5QU2vbFQnxXz4tQ6uq7Iwp3GKFiiVNcSSefscuXXVdJXqtt8_4uvs_MC62VhsWC1BtpkJRzX-sRZoDYUqgVIr2hIkE-o47BVOhT4e5QJRxh23e0gdLxVVwZRHRUMelbQG-I6D0XzN2ZmtK0APpSfxYkfAAjHh2n7ZEa42i9CTP1_7h6Ie8CjOIJjNucsIfDiEq8-hJ1Olu1_dJfufeuJzHQUshHaTuCJZXMb6JKKhbb6J5KIwMPR29R5lE5rILnYS0sJgcIaQF45aZWj1Nceu7-kgOK0pWu-Hg7EgqrMesQwxRYgbAu1ykcr8BTEcPdzGmIDahzKadpPVAJ0H-GPvTzFhUuDY48gtxIvS_3S-vw-17NPPlslouCwGYHhnXh9tRXPC1zaJo3iRgU7CcQwvCCRbryV7fzqaYaN5lyCKlKwOpynfsSNygl2xrgto_Zh_edwUUWB0reaPROWpEiDPUPsRuwaqwrabv0Pr4igB445xsOaB8STddIndpM245jUh6eH0DwVbfQ1VHgGlsyD9l7PI1DeJ5ofYdcyMeSV9swykPdqNEd0Uf2VM5KKfzJCTj8AQ453TiAiHcqpTHaHratwhFeTlZM11jmGoavXw66AkcFD-4Sj6EADDBxhdG6pSOVgSJVJReZ9LH4acuSNskjDgt_7r-XlPolStdn47F7UkBb4lWmNiPl6DljOZI7bX2OFFhB1-RmSdcD7P--rwZsHIpRKq69MgHaFGcctPtCqXAbRL4mjNO_RzqRwzqUWcAY3lMORIgOUDsq5AxbRIikpMgMzkehjLRdERXYooc3itPIjzzwEmMu-6TgNq0mLqFLQXQQhkdRUGDLpjufBV6JHHkfXlRpH8qOQ-mh4EIR5MLj-KBwzgH3vs5EDfRnaUlOAxuJQueBmFZRVIvsweai5gsTd506BkUrOT7tN4Ef-uL655m-0P5G_gwbL8fRVodL7DhsWfwEjl4adBOhIrmJxoU9GwcFjG9U0VN1oRXYCJ0F3-LYZZQAj3xOXDBhZa8HAzefL4ltMLiQJD0UMpVT23kBbiDeTcGNMqpN0GGjXtlhGTBiEb-uFUM-9jFV8Z6BHVjA4thylgNVX1gQaedji_We_LtZIJXty6p6oCOJO-Pd3En1zIVjy4wPptOjFQBbpStUpS5wY_PNOQElsJyG2o_SrcLfUaaiP6lRCkIM4BGMKDlXaWM4ZVpmggG7Gjx_H7n7gcOvjbdUArnL7YFOgiTDqRgCl9JQkNjlhZvaFbi30tIY3bXA_sLryeEfa7NP686hwnqbrNRNyp4tvkEUlsO-TpjB3i47s8tKdDu5EIg4QvVYN6jLYDb3lUZGcyReRZZMph5bBxyQtZ0bQZgduUwDL7Fx2_FV8pMNnS1ceQL6n0ndh-IcRFNj1_CwcBt4UhwngUFhmKriYwCQjfSYdqiF3sV0byBV137D_EPSGRKCO1ljz-_6AevOHmIbwx9LYGDIBFEog1v1dVjjYaWJx9nBHNrRalm3uhmgY-J_0G00 \ No newline at end of file +xLrRSzqsadxdh-0g3vwuZcTpcMpLg3AfKMJ9SQqlAadEr2ILKa4a3WzY4ZW3W7AKRFxx5W1U010a0P97jXC-IGS4kviV6er6Dox_P1nGNXPv5CUFOd17K1hlARdDOS7YTuZlOe2p7APnGiax5cyXv54SixS8xm45HPmZ3Fdz7iQ4yXqn7YKvO2p-BOhZEFwMq1JIKv8J6d_ovPV_S_BtpxJownrAtcB8MFn55CU_Ir5EnTkGHN55voJXw0xn-qkOe3s2cBFVelXnZDBuE4QOmmpI-3Z-EeCOKi1X-aqKPnHW_joiS7bsVdPvzkhqx2e-UYQUiwE_eZIA2Tz1UeOvulS3RxnkQY5OlVWu1eraUqA7-9qO5Plt4mg5m2SyFeuvAY3ZeDWhxCNF-5U2ONmSO_BkBv-A-kK-zQ__UnJP-0dc9_iz9NUGZeCu__jA1kd0MsyOf0-3HPp278xLZc9Ck7Y7mrzu53CEUCdYunZYikX3yvJaERWb573ovqAq1uzpbCE7m66BSZZmbXGZ10VVx-_xxcS4vES8vkabP7k2sGGG10bl84JbvWCp0l9hMEnpmA4KueAIOZze_znl70Qoc5QOL5thnv-6KBo4Z9GqUL00QSLpuEr6hCD0f8ZBfgXDZe99_2T_tWn0TMEy_biKOr0et6Lv93bjgIYiIFfJ-_h_dZmPQgR-bXnlGK9HXZ38HrhEQAkvbYCBuBLi6oSvOV2gP7k8qrE2HLTe1XlzsIK4ZE1wu8aXJfy2EIu99b6CcFmocD-VfOX3yIctXEGGY5Us86UopYNdO0yQ8a8Xsf5Djbpph1dexW1DTe3-vOklllfKqvdwlPvE-_hsPoW5EYKbdMBu6WBUZTcyqRQKv3r0e1BkVOwDZTJJ1iabOFhGXir6mkbMVmbOFaBxtqMkCmZevf_duWy75vJSeHIcFUF065N7OD1zPQIfhyaU9U-ddRbWFV2-4kszT03_mT87YEwdpnqsoIIkXvtK0VfQn10CUJwVFCusNtYgXFuEKjYEhRt392lt0gXh4GcfISd-rl3Kkjt8K3hT2eOtw4Tey_fDtTdfY_yzRHw_OJ2DOf2cY2E2GVwAS15EIfGkY6a-ThtGs4ktRVGtYIEMSBnrSy3u6uNXj_T-q_sGm0256I9uiE55XGVgnOzZ4Or2iYUsudmHQHfUnVsgFcRsFwkPcru7L9nhr2a7ULAAQN1wPSHxQcrfBOS0WT1BahVzF8KvkeFq_YNaEv9sH4jB2hZwi1Gcg03vNxxSVFwfIrtRgN7Nto-4qiWgFpE2RuLwuAm4cOs5U3c76KYKofYoE7LUuuhBuXRIrziBG3bAq5x4Hzi6YU8zoA1025PYY1AZfYt9dfACWaAqcUinhOrlzy5_IiYuL0BBqefujG4DAsyBn8JkM8qd7t0EJIGve1da5sGl6bmek8EzT16S3pXmw9zuSeSeBq7-j586bvOszbwF5mfL2hVqEWLlgs-4AHipG0Od0rAym_f2kbXIKKRHAC2UNPE1MPv5i9R_3kGeFHz4xFkgobIhLqsnJPrzBFh17cI0mzVu9QGPDARhd9RmGC5i3UIYA35AfCg_89dlx3EntXkSN9jLFw3vPaSqZo-dzO0pE4JMoQX3XUP5lHHmqDhzy6_t2yYCTgfPeHCZzFGutel1dd7s6h_PvmGumrU7xH1dTn1pPHSlK4xuAw3yXrE8EQIodJoK15fK1fhUPjX0YGtE0nHC4Hya6sJAzifjt48zFI6mYrHls_WPAdr4rBmMCJXxV_ZfcylBirVNDzVFNvvTNP-yl73rF_8EcuYxtUa0Q1nikmxVdYJsXpM8UPjpJvCZJFOL-XMQN_CMtZkQlmv921bwDCJCeWQRC16E3ysn9fOJC3XmzVWB8Lc2ZotBdIFyzbkGl6tqoq0xE8iP0jNxorGIV59F22_rg8vyOfNCpcg6vZ5383natCODaZukcs25ZjXUCP-vcSsnNAOpUoaAhr21Sa0Hltz6IRdtIs66wx_5PkHLg_Q1Gd7mboGlKvW-txuY_2Gj47Oqyky3O8q3jGrDz4xpmfV5HHdtxm1xZkKC4lJaMpiHB0iiEGYUxW3EFEn1o7YoT3Px-jXZnyimV7VbZfYJFErvqWI558a-Rr4-aE69RZJ6mDqrS7ZswFfLpw4ftDJIL7G8qylQIn6roPAPIF1UQ03AAkoetjkAppSI53k4MpPaNmV3wc1WfUjLgGptEN5jSter30T6bzWCVR9Haziet5MjMOKE_bFn-2YIE4vWnWPsVJGVURaPrJlrN4qptYI7I8yqzH31JRQWdXBj0DlfdRwIz9KPED6BNv8KRT5YZIZpJLK-OAIbUsVkVkxIT6LNGQeBG27c8Mv4o5Bm3ybJhYJv7W4ztUGnlUkk5pkdfBWg0ELi1o6_vk0Y1rYVyWml5wFSdMUVkunC8lzAvfrU8FnssLNz_asPjM84ZxHXWKSftoNSvo21HPNFd1sTTH9TA5Pxo2YiBRTdyrjVEgODVS1GLPu-2r8fzHR2sRF5DN6wuqtRJzJxlGbq5ztmSUvm4p931UkUE2_M330kGNvUuXJXx2N1VDSXcQJ8PQ0hDNOzQarhlqcGMWhTuL3vx6eF4mHonFXP0L0-Lib9266P59RknAX4UcLhpLELf1yJgLFDAPIUdnzm8yGYWbEIEjzt322Q7fDo_rvYmMDX6YCmymUA1iiecDjwEAOnGIOn3DC3JTZy0bV-x2UCdiBtkYXlSvGOImrtWCBqNqWQBNFxsNEP77eAEF1D_AXqqlbm9lvvKoaOT2F34n_7Zvu_FNjr_Vpy-TdJHyV7Ht8MPqbzezuvyaT9BYJ0MzBcn8n94uK9eUafATLQfAeAItQm05t5rZxM8spflu918JsASlWg0F8EYrJv2HTpG7rTLdmSVrO5ZXB8M7y5_qQQsa1yVpxOHXRwI_Qx56uHHdguIYPh8GQtEUnPOB3b-ol8ESAPyOxLXOK6fUM449nGcvU0lMrUt0FUXU8bG3cavggKMv5IvUvLODh8RIyBcvhsRiOMA4P-ES-mrxSzZdycjuubXO_5CZiHOvljh578ofmtAmjPcGXjrYoYZXZc40ESG1_OE4LP1ccpZBQffVTAYsRJEm676Q7t6kGJbkt8o8qUUlKV5wuK3DioTo91CEA48fp9_V9NPL40Yg3tK6tCU5rZjJOpF_hhaVREaPeCt27uxddg9j0ATodhrLJgjuw21XWPPpWDglt6DxtZkNCFC4O9ALxJ363qgxsgzwlDtxpzBLWGjR3cIcqSw90n1ZQ46QTbClm6bHQWNdywxe5BX9u6pTRR1Zr7SahbxHTkLVrnJQa8rwhIl7VKg2o4urIUgOg6sehtZjfHS620xaYZ4bKrjVhzex05DP9IbN-fXn4hvliXw3XXS7nwcn1AeTJ7fn2aBn26ymDabG9xS6z2Zlf6rfcQ_1rYuhUNBpXVbSDLKCbtFw293sYBjuPC3SiFGuNzb2s57D82OM3aI8t-Y04LEpVkMarpf6iNL3wzSHVqYxkwTgZ8zl1tUJabX-WO4oVk7uicBwA1IokTRIGVJEMkEC0feEd3EIMiukYbjBLucg9MBtzapl03wkOVpYmUr1wkXd1MspoetLYd8Uiqn35ZNT3AsJFcmziuDRseFiSx3ZLxKl-HHRDKlMjKmFS-qdzx7wmRTRrH-H_2S6kjiM37iNJXeVhD7yVNqRzdQLUdWzNgwGItZDyMwDa6ST5M5PiBgkK5TPFzqMPCzhfDAoAbgIifUKBq1PhgO5_EGR8hJ9W2KmEg0GqVCK3ZkYTqE7JGc0SlQ-awa_73sxgGeCEYuGB4HNBR_DGvlUJ3cmWWRaHe_minLi9_F4f8NXOubkU-u2pUKbB41KbX_YZ-EX6dZ1z_adxrjoy_-_itDOKRSL8D-_g_vTDlsbqkVjqoAr4gKiEJEeI-8whHdcdd3NnePmM4hxUo19kxyBddEm-vMAlz5FWBl-9m7xBF1rVl33LTLUNZsj_J0QExjuTmrzwUaHVblfvE7Ri97kAiF6-Tqi-FLp-lfwpzI4Yn5-R5EPHgv-q-_5hOQST0_d7Mn4xOyVYXR2UxjlQI6kNlOKDP5rE_xMD-6x_qxM9JJanTOgVfZ9j5Gdl1eVUZQVkyfvskEdaxsnPHnayhW-XJzpc-4DiLv7BOyTgl2ByIinO_EQflyz9_Jvb5LM-HHMLlYKLbNu5xz_ZrRUhJR4dRaNLDNcXgvt72thIpsI_48X1vTgHVF2JCYBE430wmL-Bv1tohouNQyNC1AICOmtokisxJCuwbXkRnEw5VsDbvKgs85mmxPtMfurldBLU6swsozwFn5EZSJgKCZqkPPVzsFTXJk4M_E0wVthgAw-xftaXLjOLhI-0XjajFTzRiY1Xs8zESC-jpOgme-JS4D7JtsS3bO-_Bb0tjLAI3PoQN45bbOal8e-DZyHiaul3TEq3l4Svq039FMuc73lYxLCVxdb5gJ5hwTZqiEvajtESqapri3cgs3wIHqdvBqljcFNzzxfUpqBcniQHe00__28B_Vg4283mbjqVpvYtfFQXpYzGtDTfUG0xCWn7h4hy8O84Aw5HBjrSX3lhnlZCYv4pZ3-4s2Dype7sv8zPkqZL7rHaMnzjLB_FSENhPpa7x_SeFEe2lc4AMKBHtzkg5FSahADr8xTHItwsP8PiBMOVBa7lPgNnWFUuoh3Qh89orYQa_EoRUvmfFEKnuoNDk9nzUstuyAguIGg4ym1KakqMjd0W3VMxvhGJaNzZdybO-q8tjhJ4XMUTMXk3sHHoUvD1lB-iXqYy-nr4yZWl8cBeieadkgUDzwwt0TMgnsGZSVjYDg5fdypqETvXwOfrrIl0Te0X8Tz3S_Exj1WGcWGuvAFQFVM_gKmmNzclUVwPFiAXsLx--WvIY52xTHygMnSWhzJRlGvqGUWVo4dOp1Jfh7usLBtLLCM0FF6ZtKD_sdzaTtfBVDwT1gld0Ux9sdBo5Bb-EkwOIlitNdEeQWCSu03jImCi3NxAqO_8E7zfZmiJqxGN9r3Hm-SYCEwwLWwnD4iV7ySdbvSc_V_of7bSaU3Uorf5OW24-uQJaEq9bnDXnz43zaQuBZPp1e1t1y9Np12aZPQr98lBJ0D_94oyGuoXXzIBq6-5Jt1JYqfORKfanOS_5mL8t1Q1lPGKnHigZaAEuQ5RltCWcLq65AFvF2NB4xsy0OoKLQqbkKi1sjmJGj4uFWbnnqnvm2X0MjDGQBgOzgnFe-cV8RBaig7Mrlj1cRknsxiKuY2btrPrzUi0ZvtuuULhD2Zr0QQUGX7PY4Z-SdAESG-gNTIyBx9mMooLSWzTX_73ouiI-1SqZQYRS-HkjXYmhyXl0EaMtzvsY5iYH8KVK_KMoAKkb6h98N_7GOcWvuHY-kw7atOqvyo1vwYYjfdat36MEbdXXBjgeyg1qXMaGffx58ojKNQYiQCuAKHUYeOj3LhpSU3aY_ojKRgXMY0OXqbO4MXU-hMWDeBNQTN-iLcCLXfSTgLp16W1hnMaGfxNlUS6qmcF1Ct3Nz0gG3P1r5yCAa09eyEhe5IGNI2j07O3MMOa3bgTFNtX0aWPyM6fzcBZ17U3qXeEi4tiMbIM8pQVrVhFQ2dE3KYYtidINJwhdpZoSEeBznfci6S4dw9hkLQLSaAq0MG2j0vW3b84xjbU0Qe3MwFOOGkiqTG2SuRkbLvnwS5enSc7alarw1QW6QFZQvnKd6wUXGmrMj6oqrd7E0wGL70SuAoWsABqt0A_uDFZqNnYyWkG2QGN81T2AIWUL_kjaLuGQX5RbwOLcHSL2fVTksXMU5fuLcbveLbWiM4BkX_tJ8HDo3-oV3fxHos5Nd4oSUgurNM5omkIuCtD5QW0fzmMFAsJZaAqgDehNgbK-A3bkQbYHwaDgjLL1MAr6QXIe5Q5UXDmN0AuecIZrtIwuukJ2QLKc3v3MWIuGghNVZU6q0jGBKC_5RojCRfXMY2o8rlqVWH2jyDZmzD_QjWAa0xG2Om4pTkVQYfA1qheAT0oX5HaR6VVTKIiu0XpdNLKhD1AQtekgLdmqV5QziN1PZrFjHoPR3OqBbdNIu0R6SJ_JYaoDp0gA1OfzuzEAYaHbx0wf5IOtJ2FNJAsmkM5R8R8X6hYlQaL88TIzxcf5HY2pUZykL1pNMwt0rC3qtuwrOcHXj6BZmAR_JguLengZ5HGBbEw5K2ik0XVtlL0hDWeR-wQhFpKkqQFuX36I86bxMHH5SfuU7tr-53zvykCdjpb8tcxsEK1umtSeUOiXO__uugSdJsGLRGkmaAlvjP7N7aTgzvalCVlqbFWupaJHHlrfdqJHmDrF5ZTnoHnCBNQ_nV4HFeex1uh7n-9wCxDi4uDUPf7ZQfS0_cDt3iN75czHPUCeCrzRNAecLEmjKFD6r7T5mcI1xCjXEZ8r5JPz3wet3oXRtID29PFl7L2k_fOSb-qXsbJDF2S8frwq99icK9r7wqYXCwyhBSBHkERFnRP-jXWtqs8EVMYdzc8okFl89gRjqyLuAHiuzfsorc8yjNunnhYjkXZt_fvkC-_hro6ycuLsyN7CzZ1184UCDwMRzkvwhSmK8-hSNZ5n95Vl7bUtmDfFDhByd9AYyyui2Jg7JWllnIhR6Dw5u32FPUV49-anMOKBCtMl78oFgrFphGTEvjRr4a7DxBY_GpfjPlccKwBCIUqoBEXx9FA_KLSZd7FBebLOnQfMRF4hwXoH7y32uqO2CGSHjJRgpt3bMS8uSZh2cEYbh9B2EKS97tYsSgAY3pVpw7QyFvs1vUA5SkhJVCN2hUsXF7fojcqho9_Lr6u-SMUcKp1pbTSJIiUHse56sdLoUMMhhKSRLo6B-avaNdRYjvT4Q-rfvtOBS1_Xv-8KIjufeGAvVhIfM2x3gBjKkyCk6wo-Y1IwqzNFl7Muvl2iUJ2JU3H_kbzJtiI4YfWMoFkc-TAn0uoX3CFKGIXOc3ulQjFnfQRHxgYz8cugVIXHkzMLi6OhBsYbd6PfEjmTSoe6bQRDUorWC1lzaF_cIvXUEeckj5s0Gbp5ef8TojDfQ1WJjiQThVfNCBNk0WmnjecIrnXkr7_NxuYYBejaDtv7eauSr9zftpbfmeliUTIDhYx2tppUmE1lFkhXRvyI7JGlShtV-j7MezgXjeeN5wE9P-jbcqvHotjs-AgAdZkYLi7q6mDK1UHL1mkcue24gf_fCqLP_QnF4TcOZqtapAhwiocIDMS3rvDezQjge72AzaOEvMCPHNDfivRcT3bBdC8jmZ1jrJDNoQ0TQg8qy5ddIHJkVVV6ApRTiCLK-cORRlIcZFHibJzPwnHWAy-5pASLZt3w9ksDneD5rk6W8VROUlCuvXHb6QGm_VP-q_w8FCxvcpGCsESh6WD1vtokJVTXy-R3YDYSqkSIrAfIkozoq7AVOZT7e4QJjor11q2LRY_FVx3DmbjhjZpDGH8zEgGURdIu3Y25a_iOruLKjD48DjHZ-XHY8I66EeXVprX8aBwMSBBMpoHRf0cobg1F6ypc7-1F5owkaVD4hQMd5YtfJU-4Oq591YsLMvO1GpLNcj4cicKnNrXS4fS8REEKAXeuIco614vROJxgwQzBIj63WvElSQZJKgFLeBbkAYugXNkmh4ijsArZTXRF866Zr2-LD5qcFndm0NcssUTN5jSw167LZzhyUNZENGy_ugfiq_pNXY4T8V85O-t4BXmDkC-IfpTMrv4ZMd9zai6jZO-FhqyQY9HhYpycYJgUaT-ua_Dp-RMXL-txPVnL_oJz79Iv0ItI4BjtXhMcCZFRLsVIgbRK2UZCYWNLbk2Y4nDIbiCkZvgOpn8Xg4qd5XLfG4zFIwk0NaMecvTcqxZqf2aqHiuqnJHUjOElKrVU_2HwxZH0LQWlZ8ggSoec6oabjA1XMo4sWRQPccmfwqPz4jqMtufW3Mue9USmL5CbtV4cd8DsJ956Uj-MJg1P-oEb-Bk9TwI0YaZKZkDBhTMcrV_3AxJdytKkB-vZxbXFF5y27yRp-FLZcyr6isy9WScZktnW2Y-NrzZowNgDQb_4l3qLbAX5IjQJODhjoImDKTeK1LDhzlDMlR7QxI4eHkfGZyccWedNJKlXMjwsuDAiUccllb9hSPwzAxQSCZXhHrNxBRTJW8j_nlO517CrLArYeUbQCBs6blBkhXN5sWcigEgRX0CLsTv_BLj55csaSjlF8pKEBR5RGP2lrwarvJFfsrCPdOSvXPd7HwwEB_bEh5JE0UDlzgdO_oToOxHodK8Pk5pYcaNN7mwbxXTmKN3vX5b2-RiMQ2bktGLQKsZRouHQsiQJSDgkt7D5l9jw3gK-r0fu0THxf_YAHS_iFfIBEOOfqleG5aglFM15h9EdK6q8O4rfC45xnDwqu_OaMWwZsPATRJ5Tnneq7Ewi1r6zbttcffUxYantuyKa5iqhzS3rVKE_7hlgaf5DkVWe_btZIJXtyAn7tjg_O-PdzEnizIVjy7cPpoOmFKxcpStU8y8wMm4MOQFl13WG2o_SbcLf-buiPCFSFEJc4BIMPTj1c1c4ZQppegHx0kd_Gdn7odgHjrdUALnK7YE4gqTZqRACl9JQkITmgZuaFXiE0tIY2eDA_sLnyhq9bcNP6E6mwnqbPRRh-9WRyt77NpEVkvqbX-0zx0PgZk-j71J2jVTnghNAnUoXMiheJMDmZnrhPkwobr_rRyCIDPsJU_VshAdzXVdll9hAecSBqbmZOWR95t9JCdrJmxGEPY_SdcviggXzS1FRugY6xIJUfzB3_NdrkV3NeL4pQpfLK2slsEGG_BknYavMSKRfkcGg7K7oULuMrUEoKxzD4Qb3g5Efg6-Bedz2d9P57lq_ \ No newline at end of file diff --git a/docs/logical_data_model.puml b/docs/logical_data_model.puml index ef38c648e2..c9f32ec3fc 100644 --- a/docs/logical_data_model.puml +++ b/docs/logical_data_model.puml @@ -2598,6 +2598,7 @@ SessionReportPilots "1" --[#black,dashed,thickness=2]--{ "n" SessionReportPilot SessionReportPilots "1" --[#black,dashed,thickness=2]--{ "n" SessionReportPilotSupportingAttachments : sessionReport, sessionSupportingAttachments Topics "1" --[#black,dashed,thickness=2]--{ "n" ActivityReportObjectiveTopics : topic, activityReportObjectiveTopics Topics "1" --[#black,dashed,thickness=2]--{ "n" RoleTopics : topic, roleTopics +Topics "1" --[#black,dashed,thickness=2]--{ "n" Topics : mapsToTopic, mapsFromTopics UserSettings "1" --[#black,dashed,thickness=2]--{ "n" UserSettingOverrides : setting, userSettingOverrides Users "1" --[#black,dashed,thickness=2]--{ "n" ActivityReportApprovers : user, reportApprovers Users "1" --[#black,dashed,thickness=2]--{ "n" ActivityReportCollaborators : user, reportCollaborators diff --git a/frontend/src/pages/ActivityReport/Pages/__tests__/goalsObjectives.js b/frontend/src/pages/ActivityReport/Pages/__tests__/goalsObjectives.js index bd30dc0d3e..8e3846bcf7 100644 --- a/frontend/src/pages/ActivityReport/Pages/__tests__/goalsObjectives.js +++ b/frontend/src/pages/ActivityReport/Pages/__tests__/goalsObjectives.js @@ -21,13 +21,33 @@ const goalUrl = join('api', 'activity-reports', 'goals'); const spy = jest.fn(); +const defaultGoals = [{ + id: 1, + name: 'This is a test goal', + isNew: true, + goalIds: [1], + grants: [ + { + value: 1, label: 'Turtle 1', programs: [], id: 1, + }, + ], + objectives: [{ + id: 1, + title: 'title', + ttaProvided: 'tta', + status: 'In Progress', + courses: [], + }], +}]; + const RenderGoalsObjectives = ({ - grantIds, activityRecipientType, connectionActive = true, + grantIds, activityRecipientType, connectionActive = true, goalsToUse = defaultGoals, }) => { const activityRecipients = grantIds.map((activityRecipientId) => ({ activityRecipientId, id: activityRecipientId, })); const data = { activityRecipientType, activityRecipients }; + const hookForm = useForm({ mode: 'onChange', defaultValues: { @@ -39,24 +59,7 @@ const RenderGoalsObjectives = ({ ], }, collaborators: [], - goals: [{ - id: 1, - name: 'This is a test goal', - isNew: true, - goalIds: [1], - grants: [ - { - value: 1, label: 'Turtle 1', programs: [], id: 1, - }, - ], - objectives: [{ - id: 1, - title: 'title', - ttaProvided: 'tta', - status: 'In Progress', - courses: [], - }], - }], + goals: [...goalsToUse], objectivesWithoutGoals: [], approvers: [], ...data, @@ -100,6 +103,7 @@ const renderGoals = ( isGoalFormClosed = false, throwFetchError = false, toggleGoalForm = jest.fn(), + goalsToUse = defaultGoals, ) => { const query = grantIds.map((id) => `grantIds=${id}`).join('&'); const fetchResponse = throwFetchError ? 500 : goals; @@ -112,6 +116,7 @@ const renderGoals = ( grantIds={grantIds} activityRecipientType={activityRecipientType} connectionActive={!throwFetchError} + goalsToUse={goalsToUse} /> , @@ -254,6 +259,86 @@ describe('goals objectives', () => { expect(toggleGoalForm).toHaveBeenCalledWith(false); }); + it('can remove a goal while editing another', async () => { + const goalsToUse = [{ + id: 3, + name: 'Sample Goal to Remove', + isNew: true, + goalIds: [1], + grants: [ + { + value: 1, label: 'Turtle 1', programs: [], id: 1, + }, + ], + objectives: [{ + id: 1, + title: 'title', + ttaProvided: 'tta', + status: 'In Progress', + courses: [], + }], + }, + { + id: 4, + name: 'Sample Goal to Edit', + isNew: true, + goalIds: [1], + grants: [ + { + value: 1, label: 'Turtle 1', programs: [], id: 1, + }, + ], + objectives: [{ + id: 1, + title: 'title', + ttaProvided: 'tta', + status: 'In Progress', + courses: [], + }], + }]; + + const sampleGoals = [ + { name: 'Sample Goal to Remove', id: 3, objectives: [] }, + { name: 'Sample Goal to Edit', id: 4, objectives: [] }, + ]; + const isGoalFormClosed = true; + const throwFetchError = false; + const toggleGoalForm = jest.fn(); + fetchMock.restore(); + fetchMock.get('/api/activity-report/1/goals/edit?goalId=1', 200); + + renderGoals([1], 'recipient', sampleGoals, isGoalFormClosed, throwFetchError, toggleGoalForm, goalsToUse); + + // Verify both goals are visible + expect(await screen.findByText('Sample Goal to Remove')).toBeVisible(); + expect(await screen.findByText('Sample Goal to Edit')).toBeVisible(); + + // Edit the first goal + let actions = await screen.findByRole('button', { name: /actions for goal 4/i }); + act(() => userEvent.click(actions)); + const [editButton] = await screen.findAllByRole('button', { name: 'Edit' }); + act(async () => { + userEvent.click(editButton); + await waitFor(async () => { + expect(await screen.findByText('Sample Goal to Remove')).toBeVisible(); + expect(await screen.findByText('Sample Goal to Edit')).toBeVisible(); + }); + }); + + // Remove the first goal + actions = await screen.findByRole('button', { name: /actions for goal 3/i }); + act(() => userEvent.click(actions)); + const [removeButton] = await screen.findAllByRole('button', { name: 'Remove' }); + act(async () => { + userEvent.click(removeButton); + await waitFor(async () => { + // Assert the goal was removed while the goal being edited is visible still. + expect(screen.queryAllByText('Sample Goal to Remove').length).toBe(0); + expect(await screen.findByText('Sample Goal to Edit')).toBeVisible(); + }); + }); + }); + it('does not fetch if there are no grants', async () => { const goals = [{ name: 'This is a test goal', diff --git a/frontend/src/pages/ActivityReport/Pages/goalsObjectives.js b/frontend/src/pages/ActivityReport/Pages/goalsObjectives.js index 722fb8d349..99cf5a2528 100644 --- a/frontend/src/pages/ActivityReport/Pages/goalsObjectives.js +++ b/frontend/src/pages/ActivityReport/Pages/goalsObjectives.js @@ -103,6 +103,7 @@ const GoalsObjectives = ({ const activityRecipients = watch('activityRecipients'); const objectivesWithoutGoals = watch('objectivesWithoutGoals'); const pageState = getValues('pageState'); + const goalForEditing = watch('goalForEditing'); const { isRecipientReport, @@ -211,8 +212,9 @@ const GoalsObjectives = ({ onUpdateGoals(copyOfSelectedGoals); // if we have no goals, open the form up via the - // hander provided by the context - if (copyOfSelectedGoals.length === 0) { + // handler provided by the context + // Unless we are currently editing a goal and removing at the same time. + if (copyOfSelectedGoals.length === 0 && !goalForEditing) { setValue('goalForEditing', ''); setValue('goalName', ''); setValue('goalEndDate', ''); diff --git a/src/lib/importSystem/process.ts b/src/lib/importSystem/process.ts index 6e9b613385..bdee7e9554 100644 --- a/src/lib/importSystem/process.ts +++ b/src/lib/importSystem/process.ts @@ -1,605 +1,11 @@ -import { Model, Op } from 'sequelize'; -import { Readable } from 'stream'; -import { - remap, - collectChangedValues, - lowercaseKeys, - createRanges, -} from '../dataObjectUtils'; -import { filterDataToModel, modelForTable } from '../modelUtils'; -import EncodingConverter from '../stream/encoding'; -import Hasher, { getHash } from '../stream/hasher'; -import S3Client from '../stream/s3'; -import XMLStream, { SchemaNode } from '../stream/xml'; -import ZipStream, { FileInfo as ZipFileInfo } from '../stream/zip'; -import { - getNextFileToProcess, - recordAvailableDataFiles, - setImportFileStatus, - setImportDataFileStatusByPath, - updateAvailableDataFileMetadata, -} from './record'; -import { IMPORT_DATA_STATUSES, IMPORT_STATUSES } from '../../constants'; -import db from '../../models'; -import { auditLogger } from '../../logger'; - -type ProcessDefinition = { - fileName: string, - encoding: string, - tableName: string, - keys: string[], - remapDef: Record; -}; - -/** - * Process records according to the given process definition and XML client. - * @param processDefinition - The process definition object. - * @param xmlClient - The XML client object. - * @param fileDate - the data the file was modified - * @param recordActions - The record actions object containing arrays of promises for - * inserts, updates, and deletes. - * @param schema - the name of each of the columns within the data - * @returns A promise that resolves to the updated recordActions object and schema. - */ -const processRecords = async ( - processDefinition: ProcessDefinition, - xmlClient: XMLStream, - fileDate: Date, - recordActions: { - inserts, - updates, - deletes, - errors, - } = { - inserts: [], - updates: [], - deletes: [], - errors: [], - }, -): Promise<{ - inserts, - updates, - deletes, - errors, -}> => { - let record; - try { - record = await xmlClient.getNextObject(true); - } catch (err) { - // record the error into the recordActions and continue on successfully as - // other entries may be process successfully - recordActions.errors.push(err.message); - auditLogger.log('error', ` processRecords getNextObject ${err.message}`, err); - } - - // @ts-ignore - let model; - try { - model = modelForTable(db, processDefinition.tableName); - } catch (err) { - // record the error into the recordActions - recordActions.errors.push(err.message); - auditLogger.log('error', ` processRecords modelForTable ${err.message}`, err); - - // Unable to continue as a model is required to record any information - return Promise.reject(recordActions); - } - - if (record) { - try { - // TODO: column/key alpha sort to retain order - // 1. use the remap method to format data to structure needed - // 2. use the filterDataToModel to match what is expected - // 3. check for existing record - // 4a. if new - // 1. insert - // 2. recordActions.inserts.push(uuid) - // 4b. if found - // 1. use the collectChangedValues to find the values to update - // 2. update - // 2. recordActions.update.push(uuid) - - // Format the record data using the remap method - // This changes the attribute names and structure into what will be saved - const { mapped: data } = remap( - record, - lowercaseKeys(processDefinition.remapDef), - { - keepUnmappedValues: false, - // defines a custom fuction that will replace the resulting structure - // with the result of each function. - targetFunctions: { - // take in an object and generate a hash of that object - 'toHash.*': (toHash) => ({ hash: getHash(toHash) }), - }, - }, - ); - - // Filter the data to match the expected model - const { - matched: filteredData, - unmatched: droppedData, - } = await filterDataToModel(data, model); - - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const recordKey: Record = {}; - processDefinition.keys.forEach((key) => { - const value = filteredData[key]; - if (value) { - recordKey[key] = value; - } - // TODO: handle case where all/part of the key may have been dropped - }); - if (Object.keys(droppedData).length > 0) { - // TODO: add some kind of note/warning that mapped data was filtered out at the model level - // The message should include the importDataFileId, the recordKey, and the column names. - // The column values should be excluded to prevent posable display of PII - } - - // Check if there is an existing record with the same key value - const currentData = await model.findOne({ - where: { - ...recordKey, - }, - }); - - if (!currentData) { - // If the record is new, create it - const insert = model.create( - { - ...filteredData, - sourceCreatedAt: fileDate, - sourceUpdatedAt: fileDate, - }, - { - individualHooks: true, - returning: true, - }, - ); - recordActions.inserts.push(insert); - } else if (fileDate > currentData.sourceUpdatedAt) { - // If the record already exists, find the delta then update it - const delta = collectChangedValues(filteredData, currentData); - const update = model.update( - { - ...delta, - sourceUpdatedAt: fileDate, - ...(currentData.sourceDeletedAt && { sourceDeletedAt: null }), - updatedAt: new Date(), - }, - { - where: { id: currentData.id }, - individualHooks: true, - returning: true, - }, - ); - recordActions.updates.push(update); - } - } catch (err) { - // record the error into the recordActions and continue on successfully as - // other entries may be process successfully - recordActions.errors.push(err.message); - auditLogger.log('error', ` processRecords create/update ${err.message}`, err); - } - } else { - try { - // 1. Find all records not in recordActions.inserts and recordActions.update - // 2. delete - // 3. recordActions.delete.push(promises) - // 4. pass back recordActions - - const [ - affectedDataInserts, - affectedDataUpdates, - ] = await Promise.all([ - Promise.all(recordActions.inserts), - Promise.all(recordActions.updates), - ]); - - // Flatten the affectedDataUpdates array and extract the objects - const flattenedUpdates = affectedDataUpdates.flatMap( - // Assuming the second element of each sub-array is the array of objects - (update) => (Array.isArray(update[1]) ? update[1] : []), - ); - - // Combine the affected data from inserts and flattened updates - const affectedData = [ - ...affectedDataInserts, - ...flattenedUpdates, - ]; - - const affectedDataIds = affectedData?.map(({ id }) => id).filter((id) => id) || []; - const affectedRanges = createRanges(affectedDataIds); - - // mark the source date when the records no longer are present in the processed file - // "Delete" all records that are not in the affectedData array - if (affectedDataIds.length) { - const destroys = model.update( - { - sourceDeletedAt: fileDate, - }, - { - where: { - [Op.and]: affectedRanges.map((range) => ({ - id: { [Op.notBetween]: range }, - })), - sourceDeletedAt: null, - }, - individualHooks: true, - }, - ); - - recordActions.deletes.push(destroys); - } - } catch (err) { - // record the error into the recordActions - recordActions.deletes.push(err.message); - auditLogger.log('error', ` processRecords destroy ${err.message}`, err); - } - - return Promise.resolve(recordActions); - } - - // Recursively call the processRecords function to process the next record - return processRecords( - processDefinition, - xmlClient, - fileDate, - recordActions, - ); -}; - -/** - * Processes a file based on the provided process definition. - * - * @param processDefinition - The process definition object that contains information - * about how to process the file. - * @param fileInfo - Information about the file being processed. - * @param fileStream - The stream of the file being processed. - * @returns A promise that resolves to an object containing arrays of promises for - * inserts, updates, and deletes. - * @throws An error if the remapDefs property is not found in the processDefinition. - * @throws An error if the model property is not found in the processDefinition. - * @throws An error if the key property is not found in the processDefinition. - */ -const processFile = async ( - processDefinition: ProcessDefinition, - fileInfo: ZipFileInfo, - fileStream: Readable, -): Promise<{ - hash?: string, - schema?: SchemaNode, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - inserts?: Promise[], - // eslint-disable-next-line @typescript-eslint/no-explicit-any - updates?: Promise[], - // eslint-disable-next-line @typescript-eslint/no-explicit-any - deletes?: Promise[], - // eslint-disable-next-line @typescript-eslint/no-explicit-any - errors: Promise[], -}> => { - let result: { - hash?: string, - schema?: SchemaNode, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - inserts?: Promise[], - // eslint-disable-next-line @typescript-eslint/no-explicit-any - updates?: Promise[], - // eslint-disable-next-line @typescript-eslint/no-explicit-any - deletes?: Promise[], - // eslint-disable-next-line @typescript-eslint/no-explicit-any - errors: Promise[], - } = { - errors: [], - }; - - try { - // Check if remapDefs property exists in processDefinition, if not throw an error - if (!processDefinition?.remapDef) throw new Error('Remapping definitions not found'); - // Check if model property exists in processDefinition, if not throw an error - if (!processDefinition?.tableName) throw new Error('Model not found'); - // Check if key property exists in processDefinition, if not throw an error - if (!processDefinition?.keys) throw new Error('Keys not found'); - // Check if key property exists in processDefinition, if not throw an error - if (!processDefinition?.encoding) throw new Error('Encoding not found'); - - const hashStream = new Hasher('sha256'); - - const encodingConverter = new EncodingConverter('utf8', processDefinition.encoding); - - // Convert the fileStream to a usable stream while also calculation the hash - const usableStream = fileStream.pipe(hashStream).pipe(encodingConverter); - - // Create a new instance of XMLStream using the usableStream - const xmlClient = new XMLStream(usableStream, true); - await xmlClient.initialize(); - - // Check if key property exists in processDefinition, if not throw an error - if (!xmlClient) throw new Error('XMLStream failed'); - - const processedRecords = await processRecords(processDefinition, xmlClient, fileInfo.date); - - // hash needs to be collected after processRecords returns to make sure all the data has - // been processed for all records in the file - const hash = await hashStream.getHash(); - const schema = await xmlClient.getObjectSchema(); - - result = { - hash, - schema, - ...processedRecords, - }; - } catch (err) { - result.errors.push(err.message); - auditLogger.log('error', ` processFile ${err.message}`, err); - } - - return result; -}; - -/** - * Processes the files using the provided ZipStream object and the array of files to process. - * - * @param zipClient - The ZipStream object used to interact with the zip files. - * @param filesToProcess - An array of ZipFileInfo objects representing the files to be processed. - * @param processDefinitions - An array of strings representing the names of the files to - * be processed. - * @returns - A Promise that resolves when all files have been processed. - * @throws - If there is an error while processing a file. - */ -const processFilesFromZip = async ( - importFileId, // The ID of the import file - zipClient: ZipStream, // The client for working with ZIP files - filesToProcess: (ZipFileInfo)[], // An array of files to process - processDefinitions: ProcessDefinition[], // An array of process definitions - // eslint-disable-next-line @typescript-eslint/no-explicit-any -): Promise => { - // If there are no more files to process, exit the function - if (processDefinitions.length === 0) return Promise.resolve(); - - // Get the next file to process from the end of the processDefinitions array - const nextToProcess = processDefinitions.pop(); - - try { - const fileInfoToProcess = filesToProcess - // Find the ZipFileInfo object that matches the next file to process - .find(({ name }) => name === nextToProcess.fileName); - - if (fileInfoToProcess) { // If the file to process is found - setImportDataFileStatusByPath( - importFileId, - fileInfoToProcess, - IMPORT_DATA_STATUSES.PROCESSING, - ); - // Get the file stream for the file to process from the zipClient - const fileStream = await zipClient.getFileStream(fileInfoToProcess.name); - - // Throw an error if the file stream is not available - if (!fileStream) throw new Error(`Failed to get stream from ${fileInfoToProcess.name}`); - - const processingData = await processFile( - nextToProcess, // Pass the name of the file to process - fileInfoToProcess, // Pass the ZipFileInfo object of the file to process - fileStream, // Pass the file stream of the file to process - ); - - const { - schema = null, // The schema of the processed file - hash = null, // The hash of the processed file - } = processingData; - - const [ - inserts, // An array of insert operations - updates, // An array of update operations - deletes, // An array of delete operations - errors = [], // An array of errors - ] = await Promise.all([ - Promise.all(processingData?.inserts.map(async (i) => Promise.resolve(i))), - Promise.all(processingData?.updates.map(async (i) => Promise.resolve(i))), - Promise.all(processingData?.deletes.map(async (i) => Promise.resolve(i))), - Promise.all(processingData.errors.map(async (i) => Promise.resolve(i))), - ]); - - const [ - insertCount, // The number of insert operations - updateCount, // The number of update operations - deleteCount, // The number of delete operations - errorCounts, // An object containing the count of each error - ] = [ - inserts?.length || 0, - updates?.length || 0, - deletes?.length || 0, - errors.reduce((acc, error) => { - if (!acc[error]) { - acc[error] = 0; - } - acc[error] += 1; - return acc; - }, {}), - ]; - - // save/log file processing data - await updateAvailableDataFileMetadata( - importFileId, // Pass the import file ID - fileInfoToProcess, // Pass the ZipFileInfo object of the processed file - IMPORT_DATA_STATUSES.PROCESSED, - { - schema, - hash, - recordCounts: { - inserts: insertCount, - updates: updateCount, - deletes: deleteCount, - errors: errorCounts, - }, - }, - ); - } else { - // save/log file not processed - await updateAvailableDataFileMetadata( - importFileId, // Pass the import file ID - fileInfoToProcess, // Pass the ZipFileInfo object of the unprocessed file - IMPORT_DATA_STATUSES.PROCESSING_FAILED, - {}, - ); - } - } catch (err) { - await updateAvailableDataFileMetadata( - importFileId, // Pass the import file ID - { - name: nextToProcess.fileName, // Pass the name of the file that caused the error - }, - IMPORT_DATA_STATUSES.PROCESSING_FAILED, - { - recordCounts: { - errors: { - [err.message]: 1, // Add the error message to the error count object - }, - }, - }, - ); - auditLogger.log('error', ` processFilesFromZip ${err.message}`, err); - } - - // Recursively call the processFilesFromZip function to process the remaining files - return processFilesFromZip( - importFileId, // Pass the import file ID - zipClient, // Pass the zip client - filesToProcess, // Pass the array of files to process - processDefinitions, // Pass the array of process definitions - ); -}; - -/** - * Processes a zip file from S3. - * @param importId - The ID of the import. - * @throws {Error} If an error occurs while processing the zip file. - * @returns {Promise} A promise that resolves when the zip file has been processed - * successfully. - */ -const processZipFileFromS3 = async ( - importId: number, -) => { - const startTime = new Date(); // The start time for file collection - // Get the next file to process based on the importId - const importFile = await getNextFileToProcess(importId); - if (!importFile) return Promise.resolve(); - - // Destructure properties from the importFile object - const { - importFileId, - processAttempts = 0, - fileKey: key, - importDefinitions: processDefinitions, - } = importFile; - - // These must be let to properly wrap the population in a try/catch - let s3Client; - let s3FileStream; - - // Set the import file status to PROCESSING and increment the processAttempts count - await setImportFileStatus(importFileId, IMPORT_STATUSES.PROCESSING, null, processAttempts + 1); - - try { - // Create a new S3Client instance and download the file as a stream - s3Client = new S3Client(); - s3FileStream = await s3Client.downloadFileAsStream(key); - } catch (err) { - // If an error occurs, set the import file status to PROCESSING_FAILED - await setImportFileStatus(importFileId, IMPORT_STATUSES.PROCESSING_FAILED); - auditLogger.log('error', ` processZipFileFromS3 downloadFileAsStream ${err.message}`, err); - return { - error: err.message, - duration: new Date().getTime() - startTime.getTime(), - }; - } - - // These must be let to properly wrap the population in a try/catch - let zipClient; - let fileDetails; - - const neededFiles = processDefinitions.map(({ fileName: name, path }) => ({ name, path })); - - try { - // Create a new ZipStream instance using the downloaded file stream - zipClient = new ZipStream( - s3FileStream, - undefined, - neededFiles, - ); - - // Get details of all files in the zip archive - fileDetails = await zipClient.getAllFileDetails(); - // Record the available data files in the importFile - await recordAvailableDataFiles(importFileId, fileDetails); - } catch (err) { - // If an error occurs, set the import file status to PROCESSING_FAILED - await setImportFileStatus(importFileId, IMPORT_STATUSES.PROCESSING_FAILED); - auditLogger.log('error', ` processZipFileFromS3 getAllFileDetails ${err.message}`, err); - return { - error: err.message, - duration: new Date().getTime() - startTime.getTime(), - }; - } - - // Filter out null file details, and to the ones that streams were requested for - // then cast the remaining ones as ZipFileInfo type - const filteredFileDetails = fileDetails - .filter((fileDetail) => fileDetail) - .filter(({ name, path }) => neededFiles.some((neededFile) => ( - neededFile.name === name - && neededFile.path === path - ))) as ZipFileInfo[]; - - await Promise.all(fileDetails - .filter(({ name, path }) => !neededFiles.some((neededFile) => ( - neededFile.name === name - && neededFile.path === path - ))) - .map(async (fileDetail) => setImportDataFileStatusByPath( - importFileId, - fileDetail, - IMPORT_DATA_STATUSES.WILL_NOT_PROCESS, - ))); - - let results; - - try { - // Process files from the zip archive using the importFileId, zipClient, filteredFileDetails, - // and processDefinitions - results = await processFilesFromZip( - importFileId, - zipClient, - filteredFileDetails, - processDefinitions, - ); - } catch (err) { - // If an error occurs, set the import file status to PROCESSING_FAILED - await setImportFileStatus(importFileId, IMPORT_STATUSES.PROCESSING_FAILED); - auditLogger.log('error', `processZipFileFromS3 processFilesFromZip ${err.message}`, err); - return { - error: err.message, - file: { - name: fileDetails.name, - }, - duration: new Date().getTime() - startTime.getTime(), - }; - } - - // Set the import file status to PROCESSED - await setImportFileStatus(importFileId, IMPORT_STATUSES.PROCESSED); - return { - ...results, - file: { - name: fileDetails.name, - }, - duration: new Date().getTime() - startTime.getTime(), - }; -}; +import processFilesFromZip from './processFilesFromZip'; +import processZipFileFromS3 from './processZipFileFromS3'; +import processFile from './processFile'; +import processRecords from './processRecords'; export { - processRecords, - processFile, processFilesFromZip, + processFile, + processRecords, processZipFileFromS3, }; diff --git a/src/lib/importSystem/processFile.ts b/src/lib/importSystem/processFile.ts new file mode 100644 index 0000000000..b73ed322ff --- /dev/null +++ b/src/lib/importSystem/processFile.ts @@ -0,0 +1,95 @@ +import { Readable } from 'stream'; +import processRecords from './processRecords'; +import EncodingConverter from '../stream/encoding'; +import Hasher from '../stream/hasher'; +import XMLStream, { SchemaNode } from '../stream/xml'; +import { FileInfo as ZipFileInfo } from '../stream/zip'; +import { auditLogger } from '../../logger'; +import { ProcessDefinition } from './types'; + +/** + * Processes a file based on the provided process definition. + * + * @param processDefinition - The process definition object that contains information + * about how to process the file. + * @param fileInfo - Information about the file being processed. + * @param fileStream - The stream of the file being processed. + * @returns A promise that resolves to an object containing arrays of promises for + * inserts, updates, and deletes. + * @throws An error if the remapDefs property is not found in the processDefinition. + * @throws An error if the model property is not found in the processDefinition. + * @throws An error if the key property is not found in the processDefinition. + */ +const processFile = async ( + processDefinition: ProcessDefinition, + fileInfo: ZipFileInfo, + fileStream: Readable, +): Promise<{ + hash?: string, + schema?: SchemaNode, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + inserts?: Promise[], + // eslint-disable-next-line @typescript-eslint/no-explicit-any + updates?: Promise[], + // eslint-disable-next-line @typescript-eslint/no-explicit-any + deletes?: Promise[], + // eslint-disable-next-line @typescript-eslint/no-explicit-any + errors: Promise[], +}> => { + let result: { + hash?: string, + schema?: SchemaNode, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + inserts?: Promise[], + // eslint-disable-next-line @typescript-eslint/no-explicit-any + updates?: Promise[], + // eslint-disable-next-line @typescript-eslint/no-explicit-any + deletes?: Promise[], + // eslint-disable-next-line @typescript-eslint/no-explicit-any + errors: Promise[], + } = { + errors: [], + }; + + try { + // Check if remapDefs property exists in processDefinition, if not throw an error + if (!processDefinition?.remapDef) throw new Error('Remapping definitions not found'); + // Check if model property exists in processDefinition, if not throw an error + if (!processDefinition?.tableName) throw new Error('Model not found'); + // Check if key property exists in processDefinition, if not throw an error + if (!processDefinition?.keys) throw new Error('Keys not found'); + // Check if key property exists in processDefinition, if not throw an error + if (!processDefinition?.encoding) throw new Error('Encoding not found'); + + const hashStream = new Hasher('sha256'); + + const encodingConverter = new EncodingConverter('utf8', processDefinition.encoding); + + // Convert the fileStream to a usable stream while also calculation the hash + const usableStream = fileStream.pipe(hashStream).pipe(encodingConverter); + + // Create a new instance of XMLStream using the usableStream + const xmlClient = new XMLStream(usableStream, true); + await xmlClient.initialize(); + + const processedRecords = await processRecords(processDefinition, xmlClient, fileInfo.date); + + // hash needs to be collected after processRecords returns to make sure all the data has + // been processed for all records in the file + const hash = await hashStream.getHash(); + const schema = await xmlClient.getObjectSchema(); + + result = { + hash, + schema, + ...processedRecords, + }; + } catch (err) { + result.errors.push(err.message); + auditLogger.log('error', ` processFile ${err.message}`, err); + } + + return result; +}; + +export default processFile; diff --git a/src/lib/importSystem/processFilesFromZip.ts b/src/lib/importSystem/processFilesFromZip.ts new file mode 100644 index 0000000000..ac5142c4b9 --- /dev/null +++ b/src/lib/importSystem/processFilesFromZip.ts @@ -0,0 +1,144 @@ +import ZipStream, { FileInfo as ZipFileInfo } from '../stream/zip'; +import { + setImportDataFileStatusByPath, + updateAvailableDataFileMetadata, +} from './record'; +import { IMPORT_DATA_STATUSES } from '../../constants'; +import { auditLogger } from '../../logger'; +import processFile from './processFile'; +import { ProcessDefinition } from './types'; + +/** + * Processes the files using the provided ZipStream object and the array of files to process. + * + * @param zipClient - The ZipStream object used to interact with the zip files. + * @param filesToProcess - An array of ZipFileInfo objects representing the files to be processed. + * @param processDefinitions - An array of strings representing the names of the files to + * be processed. + * @returns - A Promise that resolves when all files have been processed. + * @throws - If there is an error while processing a file. + */ +const processFilesFromZip = async ( + importFileId, // The ID of the import file + zipClient: ZipStream, // The client for working with ZIP files + filesToProcess: (ZipFileInfo)[], // An array of files to process + processDefinitions: ProcessDefinition[], // An array of process definitions + // eslint-disable-next-line @typescript-eslint/no-explicit-any +): Promise => { + // If there are no more files to process, exit the function + if (processDefinitions.length === 0) return Promise.resolve(); + + // Get the next file to process from the end of the processDefinitions array + const nextToProcess = processDefinitions.pop(); + + try { + const fileInfoToProcess = filesToProcess + // Find the ZipFileInfo object that matches the next file to process + .find(({ name }) => name === nextToProcess.fileName); + + if (fileInfoToProcess) { // If the file to process is found + setImportDataFileStatusByPath( + importFileId, + fileInfoToProcess, + IMPORT_DATA_STATUSES.PROCESSING, + ); + // Get the file stream for the file to process from the zipClient + const fileStream = await zipClient.getFileStream(fileInfoToProcess.name); + + // Throw an error if the file stream is not available + if (!fileStream) throw new Error(`Failed to get stream from ${fileInfoToProcess.name}`); + + const processingData = await processFile( + nextToProcess, // Pass the name of the file to process + fileInfoToProcess, // Pass the ZipFileInfo object of the file to process + fileStream, // Pass the file stream of the file to process + ); + + const { + schema = null, // The schema of the processed file + hash = null, // The hash of the processed file + } = processingData; + + const [ + inserts, // An array of insert operations + updates, // An array of update operations + deletes, // An array of delete operations + errors = [], // An array of errors + ] = await Promise.all([ + Promise.all(processingData?.inserts.map(async (i) => Promise.resolve(i))), + Promise.all(processingData?.updates.map(async (i) => Promise.resolve(i))), + Promise.all(processingData?.deletes.map(async (i) => Promise.resolve(i))), + Promise.all(processingData.errors.map(async (i) => Promise.resolve(i))), + ]); + + const [ + insertCount, // The number of insert operations + updateCount, // The number of update operations + deleteCount, // The number of delete operations + errorCounts, // An object containing the count of each error + ] = [ + inserts?.length || 0, + updates?.length || 0, + deletes?.length || 0, + errors.reduce((acc, error) => { + if (!acc[error]) { + acc[error] = 0; + } + acc[error] += 1; + return acc; + }, {}), + ]; + + // save/log file processing data + await updateAvailableDataFileMetadata( + importFileId, // Pass the import file ID + fileInfoToProcess, // Pass the ZipFileInfo object of the processed file + IMPORT_DATA_STATUSES.PROCESSED, + { + schema, + hash, + recordCounts: { + inserts: insertCount, + updates: updateCount, + deletes: deleteCount, + errors: errorCounts, + }, + }, + ); + } else { + // save/log file not processed + await updateAvailableDataFileMetadata( + importFileId, // Pass the import file ID + fileInfoToProcess, // Pass the ZipFileInfo object of the unprocessed file + IMPORT_DATA_STATUSES.PROCESSING_FAILED, + {}, + ); + } + } catch (err) { + await updateAvailableDataFileMetadata( + importFileId, // Pass the import file ID + { + name: nextToProcess.fileName, // Pass the name of the file that caused the error + }, + IMPORT_DATA_STATUSES.PROCESSING_FAILED, + { + recordCounts: { + errors: { + [err.message]: 1, // Add the error message to the error count object + }, + }, + }, + ); + auditLogger.log('error', ` processFilesFromZip ${err.message}`, err); + } + + // Recursively call the processFilesFromZip function to process the remaining files + return processFilesFromZip( + importFileId, // Pass the import file ID + zipClient, // Pass the zip client + filesToProcess, // Pass the array of files to process + processDefinitions, // Pass the array of process definitions + ); +}; + +export default processFilesFromZip; diff --git a/src/lib/importSystem/processRecords.ts b/src/lib/importSystem/processRecords.ts new file mode 100644 index 0000000000..46956eca7b --- /dev/null +++ b/src/lib/importSystem/processRecords.ts @@ -0,0 +1,233 @@ +import { Op } from 'sequelize'; +import { + remap, + collectChangedValues, + lowercaseKeys, + createRanges, +} from '../dataObjectUtils'; +import { filterDataToModel, modelForTable } from '../modelUtils'; +import { getHash } from '../stream/hasher'; +import XMLStream from '../stream/xml'; +import db from '../../models'; +import { auditLogger } from '../../logger'; +import { ProcessDefinition } from './types'; + +/** + * Process records according to the given process definition and XML client. + * @param processDefinition - The process definition object. + * @param xmlClient - The XML client object. + * @param fileDate - the data the file was modified + * @param recordActions - The record actions object containing arrays of promises for + * inserts, updates, and deletes. + * @param schema - the name of each of the columns within the data + * @returns A promise that resolves to the updated recordActions object and schema. + */ +const processRecords = async ( + processDefinition: ProcessDefinition, + xmlClient: XMLStream, + fileDate: Date, + recordActions: { + inserts, + updates, + deletes, + errors, + } = { + inserts: [], + updates: [], + deletes: [], + errors: [], + }, +): Promise<{ + inserts, + updates, + deletes, + errors, +}> => { + let record; + try { + record = await xmlClient.getNextObject(true); + } catch (err) { + // record the error into the recordActions and continue on successfully as + // other entries may be process successfully + recordActions.errors.push(err.message); + auditLogger.log('error', ` processRecords getNextObject ${err.message}`, err); + } + + // @ts-ignore + let model; + try { + model = modelForTable(db, processDefinition.tableName); + } catch (err) { + // record the error into the recordActions + recordActions.errors.push(err.message); + auditLogger.log('error', ` processRecords modelForTable ${err.message}`, err); + + // Unable to continue as a model is required to record any information + return Promise.reject(recordActions); + } + + if (record) { + try { + // TODO: column/key alpha sort to retain order + // 1. use the remap method to format data to structure needed + // 2. use the filterDataToModel to match what is expected + // 3. check for existing record + // 4a. if new + // 1. insert + // 2. recordActions.inserts.push(uuid) + // 4b. if found + // 1. use the collectChangedValues to find the values to update + // 2. update + // 2. recordActions.update.push(uuid) + + // Format the record data using the remap method + // This changes the attribute names and structure into what will be saved + const { mapped: data } = remap( + record, + lowercaseKeys(processDefinition.remapDef), + { + keepUnmappedValues: false, + // defines a custom fuction that will replace the resulting structure + // with the result of each function. + targetFunctions: { + // take in an object and generate a hash of that object + 'toHash.*': (toHash) => ({ hash: getHash(toHash) }), + }, + }, + ); + + // Filter the data to match the expected model + const { + matched: filteredData, + unmatched: droppedData, + } = await filterDataToModel(data, model); + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const recordKey: Record = {}; + processDefinition.keys.forEach((key) => { + const value = filteredData[key]; + if (value) { + recordKey[key] = value; + } + // TODO: handle case where all/part of the key may have been dropped + }); + if (Object.keys(droppedData).length > 0) { + // TODO: add some kind of note/warning that mapped data was filtered out at the model level + // The message should include the importDataFileId, the recordKey, and the column names. + // The column values should be excluded to prevent posable display of PII + } + + // Check if there is an existing record with the same key value + const currentData = await model.findOne({ + where: { + ...recordKey, + }, + }); + + if (!currentData) { + // If the record is new, create it + const insert = model.create( + { + ...filteredData, + sourceCreatedAt: fileDate, + sourceUpdatedAt: fileDate, + }, + { + individualHooks: true, + returning: true, + }, + ); + recordActions.inserts.push(insert); + } else if (fileDate > currentData.sourceUpdatedAt) { + // If the record already exists, find the delta then update it + const delta = collectChangedValues(filteredData, currentData); + const update = model.update( + { + ...delta, + sourceUpdatedAt: fileDate, + ...(currentData.sourceDeletedAt && { sourceDeletedAt: null }), + updatedAt: new Date(), + }, + { + where: { id: currentData.id }, + individualHooks: true, + returning: true, + }, + ); + recordActions.updates.push(update); + } + } catch (err) { + // record the error into the recordActions and continue on successfully as + // other entries may be process successfully + recordActions.errors.push(err.message); + auditLogger.log('error', ` processRecords create/update ${err.message}`, err); + } + } else { + try { + // 1. Find all records not in recordActions.inserts and recordActions.update + // 2. delete + // 3. recordActions.delete.push(promises) + // 4. pass back recordActions + + const [ + affectedDataInserts, + affectedDataUpdates, + ] = await Promise.all([ + Promise.all(recordActions.inserts), + Promise.all(recordActions.updates), + ]); + + // Flatten the affectedDataUpdates array and extract the objects + const flattenedUpdates = affectedDataUpdates.flatMap( + // Assuming the second element of each sub-array is the array of objects + (update) => (Array.isArray(update[1]) ? update[1] : []), + ); + + // Combine the affected data from inserts and flattened updates + const affectedData = [ + ...affectedDataInserts, + ...flattenedUpdates, + ]; + + const affectedDataIds = affectedData?.map(({ id }) => id).filter((id) => id) || []; + const affectedRanges = createRanges(affectedDataIds); + + // mark the source date when the records no longer are present in the processed file + // "Delete" all records that are not in the affectedData array + if (affectedDataIds.length) { + const destroys = model.update( + { + sourceDeletedAt: fileDate, + }, + { + where: { + [Op.and]: affectedRanges.map((range) => ({ + id: { [Op.notBetween]: range }, + })), + sourceDeletedAt: null, + }, + individualHooks: true, + }, + ); + + recordActions.deletes.push(destroys); + } + } catch (err) { + // record the error into the recordActions + recordActions.deletes.push(err.message); + auditLogger.log('error', ` processRecords destroy ${err.message}`, err); + } + + return Promise.resolve(recordActions); + } + + // Recursively call the processRecords function to process the next record + return processRecords( + processDefinition, + xmlClient, + fileDate, + recordActions, + ); +}; + +export default processRecords; diff --git a/src/lib/importSystem/processZipFileFromS3.ts b/src/lib/importSystem/processZipFileFromS3.ts new file mode 100644 index 0000000000..00b66a177f --- /dev/null +++ b/src/lib/importSystem/processZipFileFromS3.ts @@ -0,0 +1,140 @@ +import S3Client from '../stream/s3'; +import ZipStream, { FileInfo as ZipFileInfo } from '../stream/zip'; +import { + getNextFileToProcess, + recordAvailableDataFiles, + setImportFileStatus, + setImportDataFileStatusByPath, +} from './record'; +import { IMPORT_DATA_STATUSES, IMPORT_STATUSES } from '../../constants'; +import { auditLogger } from '../../logger'; +import processFilesFromZip from './processFilesFromZip'; + +/** + * Processes a zip file from S3. + * @param importId - The ID of the import. + * @throws {Error} If an error occurs while processing the zip file. + * @returns {Promise} A promise that resolves when the zip file has been processed + * successfully. + */ +const processZipFileFromS3 = async ( + importId: number, +) => { + const startTime = new Date(); // The start time for file collection + // Get the next file to process based on the importId + const importFile = await getNextFileToProcess(importId); + if (!importFile) return Promise.resolve(); + + // Destructure properties from the importFile object + const { + importFileId, + processAttempts = 0, + fileKey: key, + importDefinitions: processDefinitions, + } = importFile; + + // These must be let to properly wrap the population in a try/catch + let s3Client; + let s3FileStream; + + // Set the import file status to PROCESSING and increment the processAttempts count + await setImportFileStatus(importFileId, IMPORT_STATUSES.PROCESSING, null, processAttempts + 1); + + try { + // Create a new S3Client instance and download the file as a stream + s3Client = new S3Client(); + s3FileStream = await s3Client.downloadFileAsStream(key); + } catch (err) { + // If an error occurs, set the import file status to PROCESSING_FAILED + await setImportFileStatus(importFileId, IMPORT_STATUSES.PROCESSING_FAILED); + auditLogger.log('error', ` processZipFileFromS3 downloadFileAsStream ${err.message}`, err); + return { + error: err.message, + duration: new Date().getTime() - startTime.getTime(), + }; + } + + // These must be let to properly wrap the population in a try/catch + let zipClient; + let fileDetails; + + const neededFiles = processDefinitions.map(({ fileName: name, path }) => ({ name, path })); + + try { + // Create a new ZipStream instance using the downloaded file stream + zipClient = new ZipStream( + s3FileStream, + undefined, + neededFiles, + ); + + // Get details of all files in the zip archive + fileDetails = await zipClient.getAllFileDetails(); + // Record the available data files in the importFile + await recordAvailableDataFiles(importFileId, fileDetails); + } catch (err) { + // If an error occurs, set the import file status to PROCESSING_FAILED + await setImportFileStatus(importFileId, IMPORT_STATUSES.PROCESSING_FAILED); + auditLogger.log('error', ` processZipFileFromS3 getAllFileDetails ${err.message}`, err); + return { + error: err.message, + duration: new Date().getTime() - startTime.getTime(), + }; + } + + // Filter out null file details, and to the ones that streams were requested for + // then cast the remaining ones as ZipFileInfo type + const filteredFileDetails = fileDetails + .filter((fileDetail) => fileDetail) + .filter(({ name, path }) => neededFiles.some((neededFile) => ( + neededFile.name === name + && neededFile.path === path + ))) as ZipFileInfo[]; + + await Promise.all(fileDetails + .filter(({ name, path }) => !neededFiles.some((neededFile) => ( + neededFile.name === name + && neededFile.path === path + ))) + .map(async (fileDetail) => setImportDataFileStatusByPath( + importFileId, + fileDetail, + IMPORT_DATA_STATUSES.WILL_NOT_PROCESS, + ))); + + let results; + + try { + // Process files from the zip archive using the importFileId, zipClient, filteredFileDetails, + // and processDefinitions + results = await processFilesFromZip( + importFileId, + zipClient, + filteredFileDetails, + processDefinitions, + ); + } catch (err) { + // If an error occurs, set the import file status to PROCESSING_FAILED + await setImportFileStatus(importFileId, IMPORT_STATUSES.PROCESSING_FAILED); + auditLogger.log('error', `processZipFileFromS3 processFilesFromZip ${err.message}`, err); + return { + error: err.message, + file: { + name: fileDetails.name, + }, + duration: new Date().getTime() - startTime.getTime(), + }; + } + + // Set the import file status to PROCESSED + await setImportFileStatus(importFileId, IMPORT_STATUSES.PROCESSED); + return { + ...results, + file: { + name: fileDetails.name, + }, + duration: new Date().getTime() - startTime.getTime(), + }; +}; + +export default processZipFileFromS3; diff --git a/src/lib/importSystem/tests/process.test.js b/src/lib/importSystem/tests/process.test.js index 8acc2276ea..9f87f5e0af 100644 --- a/src/lib/importSystem/tests/process.test.js +++ b/src/lib/importSystem/tests/process.test.js @@ -1,6 +1,5 @@ -// processRecords.test.ts import { DataTypes, Op } from 'sequelize'; -import { processRecords } from '../process'; +import processRecords from '../processRecords'; import XMLStream from '../../stream/xml'; import { modelForTable } from '../../modelUtils'; diff --git a/src/lib/importSystem/tests/processFile.test.js b/src/lib/importSystem/tests/processFile.test.js new file mode 100644 index 0000000000..75e17f05af --- /dev/null +++ b/src/lib/importSystem/tests/processFile.test.js @@ -0,0 +1,86 @@ +import processFile from '../processFile'; +import processRecords from '../processRecords'; +import { auditLogger } from '../../../logger'; +import XMLStream from '../../stream/xml'; +import Hasher from '../../stream/hasher'; + +jest.mock('../../stream/hasher'); +jest.mock('../../stream/encoding'); +jest.mock('../../stream/xml'); +jest.mock('../../../logger'); +jest.mock('../processRecords'); + +describe('processFile', () => { + const processDefinition = { + remapDef: {}, + tableName: 'tableName', + keys: ['key'], + encoding: 'utf8', + }; + const fileInfo = { date: '2022-01-01' }; + const fileStream = { + pipe: jest.fn().mockReturnThis(), + }; + + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('should throw an error if remapDef is not found', async () => { + const invalidProcessDefinition = { ...processDefinition, remapDef: undefined }; + + const result = await processFile(invalidProcessDefinition, fileInfo, fileStream); + + expect(result.errors).toContain('Remapping definitions not found'); + expect(auditLogger.log).toHaveBeenCalledWith('error', expect.stringContaining('Remapping definitions not found'), expect.any(Error)); + }); + + it('should throw an error if tableName is not found', async () => { + const invalidProcessDefinition = { ...processDefinition, tableName: undefined }; + + const result = await processFile(invalidProcessDefinition, fileInfo, fileStream); + + expect(result.errors).toContain('Model not found'); + expect(auditLogger.log).toHaveBeenCalledWith('error', expect.stringContaining('Model not found'), expect.any(Error)); + }); + + it('should throw an error if keys are not found', async () => { + const invalidProcessDefinition = { ...processDefinition, keys: undefined }; + + const result = await processFile(invalidProcessDefinition, fileInfo, fileStream); + + expect(result.errors).toContain('Keys not found'); + expect(auditLogger.log).toHaveBeenCalledWith('error', expect.stringContaining('Keys not found'), expect.any(Error)); + }); + + it('should throw an error if encoding is not found', async () => { + const invalidProcessDefinition = { ...processDefinition, encoding: undefined }; + + const result = await processFile(invalidProcessDefinition, fileInfo, fileStream); + + expect(result.errors).toContain('Encoding not found'); + expect(auditLogger.log).toHaveBeenCalledWith('error', expect.stringContaining('Encoding not found'), expect.any(Error)); + }); + + it('should return result', async () => { + Hasher.prototype.getHash.mockResolvedValue('hash'); + XMLStream.prototype.initialize.mockResolvedValue(null); + XMLStream.prototype.getObjectSchema.mockResolvedValue({}); + processRecords.mockResolvedValueOnce({ + errors: [], + }); + const result = await processFile(processDefinition, fileInfo, fileStream); + + expect(result).toStrictEqual({ hash: 'hash', schema: {}, errors: [] }); + }); + + it('handles generic error', async () => { + Hasher.prototype.getHash.mockResolvedValue('hash'); + XMLStream.prototype.initialize.mockResolvedValue(null); + XMLStream.prototype.getObjectSchema.mockResolvedValue({}); + processRecords.mockRejectedValueOnce(new Error('Generic error')); + const result = await processFile(processDefinition, fileInfo, fileStream); + + expect(result).toStrictEqual({ errors: ['Generic error'] }); + }); +}); diff --git a/src/lib/importSystem/tests/processFilesFromZip.test.js b/src/lib/importSystem/tests/processFilesFromZip.test.js new file mode 100644 index 0000000000..67fd981f3b --- /dev/null +++ b/src/lib/importSystem/tests/processFilesFromZip.test.js @@ -0,0 +1,310 @@ +/* eslint-disable max-len */ +import processFilesFromZip from '../processFilesFromZip'; +import { setImportDataFileStatusByPath, updateAvailableDataFileMetadata } from '../record'; +import { IMPORT_DATA_STATUSES, IMPORT_STATUSES } from '../../../constants'; +import { auditLogger } from '../../../logger'; +import processFile from '../processFile'; +import ZipStream from '../../stream/zip'; + +jest.mock('../record'); +jest.mock('../../../logger'); +jest.mock('../processFile'); +jest.mock('../../stream/zip'); + +describe('processFilesFromZip', () => { + const importFileId = 1; + let zipClient; + + const filesToProcess = [ + { name: 'file1.xml', path: 'path/to/file1.xml' }, + { name: 'file2.xml', path: 'path/to/file2.xml' }, + ]; + const processDefinitions = [ + { + fileName: 'file1.xml', + remapDef: {}, + tableName: 'table1', + keys: ['key1'], + encoding: 'utf8', + }, + { + fileName: 'file2.xml', + remapDef: {}, + tableName: 'table2', + keys: ['key2'], + encoding: 'utf8', + }, + ]; + + beforeEach(() => { + zipClient = new ZipStream(); + zipClient.getFileStream.mockImplementation(() => ({})); + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + it('exits early if processDefinitions is empty', async () => { + await processFilesFromZip(importFileId, zipClient, filesToProcess, []); + + expect(setImportDataFileStatusByPath).not.toHaveBeenCalled(); + expect(processFile).not.toHaveBeenCalled(); + expect(updateAvailableDataFileMetadata).not.toHaveBeenCalled(); + expect(auditLogger.log).not.toHaveBeenCalled(); + }); + + it('throws an error if there is a failure to get a file stream', async () => { + zipClient.getFileStream.mockImplementation(() => null); + + await processFilesFromZip(importFileId, zipClient, filesToProcess, processDefinitions.map((def) => ({ ...def }))); + + expect(updateAvailableDataFileMetadata).toHaveBeenCalledTimes(2); + expect(updateAvailableDataFileMetadata).toHaveBeenCalledWith(importFileId, { name: 'file1.xml' }, IMPORT_DATA_STATUSES.PROCESSING_FAILED, { recordCounts: { errors: { 'Failed to get stream from file1.xml': 1 } } }); + expect(updateAvailableDataFileMetadata).toHaveBeenCalledWith(importFileId, { name: 'file2.xml' }, IMPORT_DATA_STATUSES.PROCESSING_FAILED, { recordCounts: { errors: { 'Failed to get stream from file2.xml': 1 } } }); + }); + + it('should process all files successfully', async () => { + processFile.mockImplementation(() => ({ + errors: [], + schema: {}, + hash: 'hash', + inserts: [], + updates: [], + deletes: [], + })); + + await processFilesFromZip(importFileId, zipClient, filesToProcess, processDefinitions.map((def) => ({ ...def }))); + + expect(setImportDataFileStatusByPath).toHaveBeenCalledTimes(2); + expect(setImportDataFileStatusByPath).toHaveBeenCalledWith(importFileId, { name: 'file1.xml', path: 'path/to/file1.xml' }, IMPORT_DATA_STATUSES.PROCESSING); + expect(setImportDataFileStatusByPath).toHaveBeenCalledWith(importFileId, { name: 'file2.xml', path: 'path/to/file2.xml' }, IMPORT_DATA_STATUSES.PROCESSING); + expect(processFile).toHaveBeenCalledTimes(2); + expect(processFile).toHaveBeenCalledWith( + { + encoding: 'utf8', fileName: 'file2.xml', keys: ['key2'], remapDef: {}, tableName: 'table2', + }, + { name: 'file2.xml', path: 'path/to/file2.xml' }, + {}, + ); + expect(processFile).toHaveBeenCalledWith( + { + encoding: 'utf8', fileName: 'file1.xml', keys: ['key1'], remapDef: {}, tableName: 'table1', + }, + { name: 'file1.xml', path: 'path/to/file1.xml' }, + {}, + ); + expect(updateAvailableDataFileMetadata).toHaveBeenCalledTimes(2); + expect(updateAvailableDataFileMetadata).toHaveBeenCalledWith( + importFileId, + { name: 'file2.xml', path: 'path/to/file2.xml' }, + IMPORT_STATUSES.PROCESSED, + { + hash: 'hash', + recordCounts: { + deletes: 0, errors: {}, inserts: 0, updates: 0, + }, + schema: {}, + }, + ); + expect(updateAvailableDataFileMetadata).toHaveBeenCalledWith( + importFileId, + { name: 'file1.xml', path: 'path/to/file1.xml' }, + IMPORT_STATUSES.PROCESSED, + { + hash: 'hash', + recordCounts: { + deletes: 0, errors: {}, inserts: 0, updates: 0, + }, + schema: {}, + }, + ); + expect(setImportDataFileStatusByPath).toHaveBeenCalledWith(importFileId, { name: 'file1.xml', path: 'path/to/file1.xml' }, IMPORT_DATA_STATUSES.PROCESSING); + expect(setImportDataFileStatusByPath).toHaveBeenCalledWith(importFileId, { name: 'file2.xml', path: 'path/to/file2.xml' }, IMPORT_DATA_STATUSES.PROCESSING); + }); + + it('handles default values returned from processFile', async () => { + processFile.mockImplementation(() => ({ + inserts: [], + updates: [], + deletes: [], + errors: [], + })); + + await processFilesFromZip(importFileId, zipClient, filesToProcess, processDefinitions.map((def) => ({ ...def }))); + + expect(setImportDataFileStatusByPath).toHaveBeenCalledTimes(2); + expect(setImportDataFileStatusByPath).toHaveBeenCalledWith(importFileId, { name: 'file1.xml', path: 'path/to/file1.xml' }, IMPORT_DATA_STATUSES.PROCESSING); + expect(setImportDataFileStatusByPath).toHaveBeenCalledWith(importFileId, { name: 'file2.xml', path: 'path/to/file2.xml' }, IMPORT_DATA_STATUSES.PROCESSING); + expect(processFile).toHaveBeenCalledTimes(2); + expect(processFile).toHaveBeenCalledWith( + { + encoding: 'utf8', fileName: 'file2.xml', keys: ['key2'], remapDef: {}, tableName: 'table2', + }, + { name: 'file2.xml', path: 'path/to/file2.xml' }, + {}, + ); + expect(processFile).toHaveBeenCalledWith( + { + encoding: 'utf8', fileName: 'file1.xml', keys: ['key1'], remapDef: {}, tableName: 'table1', + }, + { name: 'file1.xml', path: 'path/to/file1.xml' }, + {}, + ); + expect(updateAvailableDataFileMetadata).toHaveBeenCalledTimes(2); + expect(updateAvailableDataFileMetadata).toHaveBeenCalledWith( + importFileId, + { name: 'file2.xml', path: 'path/to/file2.xml' }, + IMPORT_STATUSES.PROCESSED, + { + hash: null, + recordCounts: { + deletes: 0, errors: {}, inserts: 0, updates: 0, + }, + schema: null, + }, + ); + expect(updateAvailableDataFileMetadata).toHaveBeenCalledWith( + importFileId, + { name: 'file1.xml', path: 'path/to/file1.xml' }, + IMPORT_STATUSES.PROCESSED, + { + hash: null, + recordCounts: { + deletes: 0, errors: {}, inserts: 0, updates: 0, + }, + schema: null, + }, + ); + expect(setImportDataFileStatusByPath).toHaveBeenCalledWith(importFileId, { name: 'file1.xml', path: 'path/to/file1.xml' }, IMPORT_DATA_STATUSES.PROCESSING); + expect(setImportDataFileStatusByPath).toHaveBeenCalledWith(importFileId, { name: 'file2.xml', path: 'path/to/file2.xml' }, IMPORT_DATA_STATUSES.PROCESSING); + }); + + it('runs fns returned from processFile', async () => { + const insert = jest.fn(); + const update = jest.fn(); + const del = jest.fn(); + + processFile.mockImplementation(() => ({ + inserts: [insert], + updates: [update], + deletes: [del], + errors: ['Failure', 'Failure', 'Near-failure'], + })); + + const resolve = jest.spyOn(Promise, 'resolve'); + + await processFilesFromZip(importFileId, zipClient, filesToProcess, processDefinitions.map((def) => ({ ...def }))); + + expect(resolve).toHaveBeenCalledWith(insert); + expect(resolve).toHaveBeenCalledWith(update); + expect(resolve).toHaveBeenCalledWith(del); + + expect(updateAvailableDataFileMetadata).toHaveBeenCalledTimes(2); + expect(updateAvailableDataFileMetadata).toHaveBeenCalledWith( + importFileId, + { name: 'file2.xml', path: 'path/to/file2.xml' }, + IMPORT_STATUSES.PROCESSED, + { + hash: null, + recordCounts: { + deletes: 1, + errors: { + Failure: 2, + 'Near-failure': 1, + }, + inserts: 1, + updates: 1, + }, + schema: null, + }, + ); + expect(updateAvailableDataFileMetadata).toHaveBeenCalledWith( + importFileId, + { name: 'file1.xml', path: 'path/to/file1.xml' }, + IMPORT_STATUSES.PROCESSED, + { + hash: null, + recordCounts: { + deletes: 1, + errors: { + Failure: 2, + 'Near-failure': 1, + }, + inserts: 1, + updates: 1, + }, + schema: null, + }, + ); + }); + + it('should handle errors when processing a file', async () => { + processFile.mockRejectedValueOnce(new Error('Processing error')).mockResolvedValueOnce({ + errors: [], + schema: {}, + hash: 'hash', + inserts: [], + updates: [], + deletes: [], + }); + + await processFilesFromZip(importFileId, zipClient, filesToProcess, processDefinitions.map((def) => ({ ...def }))); + + expect(setImportDataFileStatusByPath).toHaveBeenCalledTimes(2); + expect(setImportDataFileStatusByPath).toHaveBeenCalledWith(importFileId, { name: 'file1.xml', path: 'path/to/file1.xml' }, IMPORT_DATA_STATUSES.PROCESSING); + expect(setImportDataFileStatusByPath).toHaveBeenCalledWith(importFileId, { name: 'file2.xml', path: 'path/to/file2.xml' }, IMPORT_DATA_STATUSES.PROCESSING); + expect(processFile).toHaveBeenCalledTimes(2); + expect(processFile).toHaveBeenCalledWith( + { + encoding: 'utf8', fileName: 'file2.xml', keys: ['key2'], remapDef: {}, tableName: 'table2', + }, + { name: 'file2.xml', path: 'path/to/file2.xml' }, + {}, + ); + expect(processFile).toHaveBeenCalledWith( + { + encoding: 'utf8', fileName: 'file1.xml', keys: ['key1'], remapDef: {}, tableName: 'table1', + }, + { name: 'file1.xml', path: 'path/to/file1.xml' }, + {}, + ); + expect(updateAvailableDataFileMetadata).toHaveBeenCalledTimes(2); + expect(updateAvailableDataFileMetadata).toHaveBeenCalledWith( + importFileId, + { name: 'file2.xml' }, + IMPORT_STATUSES.PROCESSING_FAILED, + { + recordCounts: { + errors: { + 'Processing error': 1, + }, + }, + }, + ); + expect(updateAvailableDataFileMetadata).toHaveBeenCalledWith( + importFileId, + { name: 'file1.xml', path: 'path/to/file1.xml' }, + IMPORT_STATUSES.PROCESSED, + { + hash: 'hash', + recordCounts: { + deletes: 0, errors: {}, inserts: 0, updates: 0, + }, + schema: {}, + }, + ); + expect(setImportDataFileStatusByPath).toHaveBeenCalledWith(importFileId, { name: 'file1.xml', path: 'path/to/file1.xml' }, IMPORT_DATA_STATUSES.PROCESSING); + expect(setImportDataFileStatusByPath).toHaveBeenCalledWith(importFileId, { name: 'file2.xml', path: 'path/to/file2.xml' }, IMPORT_DATA_STATUSES.PROCESSING); + }); + + it('should handle cases where there are no files to process', async () => { + await processFilesFromZip(importFileId, zipClient, [], processDefinitions.map((def) => ({ ...def }))); + + expect(setImportDataFileStatusByPath).not.toHaveBeenCalled(); + expect(processFile).not.toHaveBeenCalled(); + expect(updateAvailableDataFileMetadata).toHaveBeenCalledTimes(2); + expect(updateAvailableDataFileMetadata).toHaveBeenCalledWith(importFileId, undefined, IMPORT_STATUSES.PROCESSING_FAILED, {}); + expect(auditLogger.log).not.toHaveBeenCalled(); + }); +}); diff --git a/src/lib/importSystem/tests/processZipFileFromS3.test.js b/src/lib/importSystem/tests/processZipFileFromS3.test.js new file mode 100644 index 0000000000..ca3df959ef --- /dev/null +++ b/src/lib/importSystem/tests/processZipFileFromS3.test.js @@ -0,0 +1,151 @@ +/* eslint-disable max-len */ +import { processZipFileFromS3 } from '../process'; +import { + getNextFileToProcess, + setImportFileStatus, + recordAvailableDataFiles, +} from '../record'; +import processFilesFromZip from '../processFilesFromZip'; +import S3Client from '../../stream/s3'; +import ZipStream from '../../stream/zip'; // Adjust the import path +import { auditLogger } from '../../../logger'; // Adjust the import path +import { IMPORT_STATUSES } from '../../../constants'; // Adjust the import path + +jest.mock('../record'); +jest.mock('../../stream/s3'); +jest.mock('../../stream/zip'); +jest.mock('../../../logger'); +jest.mock('../processFilesFromZip'); + +describe('processZipFileFromS3', () => { + afterEach(() => { + jest.clearAllMocks(); + jest.resetAllMocks(); + }); + + it('should resolve if there is no file to process', async () => { + const importId = 1; + getNextFileToProcess.mockResolvedValue(null); + + await expect(processZipFileFromS3(importId)).resolves.toBeUndefined(); + }); + + it('assigns processAttempts to 0 if not set in params', async () => { + const importId = 1; + const importFile = { + importFileId: 1, + fileKey: 'fileKey', + importDefinitions: [{ fileName: 'fileName', path: 'path' }], + }; + getNextFileToProcess.mockResolvedValue(importFile); + S3Client.prototype.downloadFileAsStream.mockRejectedValue(new Error('S3 error')); + + const result = await processZipFileFromS3(importId); + + expect(setImportFileStatus).toHaveBeenCalledWith(importFile.importFileId, IMPORT_STATUSES.PROCESSING, null, 1); + expect(setImportFileStatus).toHaveBeenCalledWith(importFile.importFileId, IMPORT_STATUSES.PROCESSING_FAILED); + expect(auditLogger.log).toHaveBeenCalledWith('error', expect.stringContaining('S3 error'), expect.any(Error)); + expect(result).toEqual({ + error: 'S3 error', + duration: expect.any(Number), + }); + }); + + it('should handle error when downloading file from S3 fails', async () => { + const importId = 1; + const importFile = { + importFileId: 1, + processAttempts: 0, + fileKey: 'fileKey', + importDefinitions: [{ fileName: 'fileName', path: 'path' }], + }; + getNextFileToProcess.mockResolvedValue(importFile); + S3Client.prototype.downloadFileAsStream.mockRejectedValue(new Error('S3 error')); + + const result = await processZipFileFromS3(importId); + + expect(setImportFileStatus).toHaveBeenCalledWith(importFile.importFileId, IMPORT_STATUSES.PROCESSING, null, importFile.processAttempts + 1); + expect(setImportFileStatus).toHaveBeenCalledWith(importFile.importFileId, IMPORT_STATUSES.PROCESSING_FAILED); + expect(auditLogger.log).toHaveBeenCalledWith('error', expect.stringContaining('S3 error'), expect.any(Error)); + expect(result).toEqual({ + error: 'S3 error', + duration: expect.any(Number), + }); + }); + + it('should handle error when getting file details from zip archive fails', async () => { + const importId = 1; + const importFile = { + importFileId: 1, + processAttempts: 0, + fileKey: 'fileKey', + importDefinitions: [{ fileName: 'fileName', path: 'path' }], + }; + getNextFileToProcess.mockResolvedValue(importFile); + S3Client.prototype.downloadFileAsStream.mockResolvedValue('s3FileStream'); + ZipStream.prototype.getAllFileDetails.mockRejectedValue(new Error('Zip error')); + + const result = await processZipFileFromS3(importId); + + expect(setImportFileStatus).toHaveBeenCalledWith(importFile.importFileId, IMPORT_STATUSES.PROCESSING, null, importFile.processAttempts + 1); + expect(setImportFileStatus).toHaveBeenCalledWith(importFile.importFileId, IMPORT_STATUSES.PROCESSING_FAILED); + expect(auditLogger.log).toHaveBeenCalledWith('error', expect.stringContaining('Zip error'), expect.any(Error)); + expect(result).toEqual({ + error: 'Zip error', + duration: expect.any(Number), + }); + }); + it('should handle error when processing files from zip archive fails', async () => { + const importId = 1; + const importFile = { + importFileId: 1, + processAttempts: 0, + fileKey: 'fileKey', + importDefinitions: [{ fileName: 'fileName', path: 'path' }], + }; + getNextFileToProcess.mockResolvedValue(importFile); + S3Client.prototype.downloadFileAsStream.mockResolvedValue('s3FileStream'); + ZipStream.prototype.getAllFileDetails.mockResolvedValue([{ name: 'fileName', path: 'path' }]); + processFilesFromZip.mockRejectedValue(new Error('Process error')); + + const result = await processZipFileFromS3(importId); + + expect(setImportFileStatus).toHaveBeenCalledWith(importFile.importFileId, IMPORT_STATUSES.PROCESSING, null, importFile.processAttempts + 1); + expect(setImportFileStatus).toHaveBeenCalledWith(importFile.importFileId, IMPORT_STATUSES.PROCESSING_FAILED); + expect(auditLogger.log).toHaveBeenCalledWith('error', expect.stringContaining('Process error'), expect.any(Error)); + expect(result).toEqual({ + error: 'Process error', + file: { + name: undefined, + }, + duration: expect.any(Number), + }); + }); + + it('should process the zip file successfully', async () => { + const importId = 1; + const importFile = { + importFileId: 1, + processAttempts: 0, + fileKey: 'fileKey', + importDefinitions: [{ fileName: 'fileName', path: 'path' }], + }; + getNextFileToProcess.mockResolvedValue(importFile); + S3Client.prototype.downloadFileAsStream.mockResolvedValue('s3FileStream'); + ZipStream.prototype.getAllFileDetails.mockResolvedValue([{ name: 'fileName', path: 'path' }]); + processFilesFromZip.mockResolvedValue({ someResult: 'result' }); + + const result = await processZipFileFromS3(importId); + + expect(setImportFileStatus).toHaveBeenCalledWith(importFile.importFileId, IMPORT_STATUSES.PROCESSING, null, importFile.processAttempts + 1); + expect(recordAvailableDataFiles).toHaveBeenCalledWith(importFile.importFileId, [{ name: 'fileName', path: 'path' }]); + expect(setImportFileStatus).toHaveBeenCalledWith(importFile.importFileId, IMPORT_STATUSES.PROCESSED); + expect(result).toEqual({ + someResult: 'result', + file: { + name: undefined, + }, + duration: expect.any(Number), + }); + }); +}); diff --git a/src/lib/importSystem/types.ts b/src/lib/importSystem/types.ts new file mode 100644 index 0000000000..47d21c30d4 --- /dev/null +++ b/src/lib/importSystem/types.ts @@ -0,0 +1,7 @@ +export type ProcessDefinition = { + fileName: string, + encoding: string, + tableName: string, + keys: string[], + remapDef: Record; +}; diff --git a/src/lib/mailer/index.js b/src/lib/mailer/index.js index 563b3f901a..7ae322abc6 100644 --- a/src/lib/mailer/index.js +++ b/src/lib/mailer/index.js @@ -491,7 +491,7 @@ export const sendTrainingReportNotification = async (job, transport = defaultTra export const trSessionCreated = async (event, sessionId) => { if (process.env.CI) return; try { - if (!event.pocIds && !event.pocIds.length) { + if (!event.pocIds || !event.pocIds.length) { auditLogger.warn(`MAILER: No POCs found for TR ${event.id}`); } @@ -1286,7 +1286,11 @@ export const processNotificationQueue = () => { * @param {string} token * @returns Promise */ -export const sendEmailVerificationRequestWithToken = (user, token) => { +export const sendEmailVerificationRequestWithToken = ( + user, + token, + transport = defaultTransport, +) => { const toEmails = filterAndDeduplicateEmails([user.email]); if (toEmails.length === 0) { @@ -1298,7 +1302,7 @@ export const sendEmailVerificationRequestWithToken = (user, token) => { from: process.env.FROM_EMAIL_ADDRESS, }, send, - transport: defaultTransport, + transport, htmlToText: { wordwrap: 120, }, diff --git a/src/lib/mailer/index.test.js b/src/lib/mailer/index.test.js index 41d0838fa7..5910ffa2ec 100644 --- a/src/lib/mailer/index.test.js +++ b/src/lib/mailer/index.test.js @@ -22,6 +22,12 @@ import { trCollaboratorAdded, filterAndDeduplicateEmails, onCompletedNotification, + onFailedNotification, + programSpecialistRecipientReportApprovedNotification, + trOwnerAdded, + trEventComplete, + sendEmailVerificationRequestWithToken, + recipientApprovedDigest, } from '.'; import { EMAIL_ACTIONS, @@ -143,6 +149,8 @@ const submittedReport = { jest.mock('../../services/userSettings', () => ({ usersWithSetting: jest.fn().mockReturnValue(Promise.resolve([{ id: digestMockCollab.id }])), + // eslint-disable-next-line max-len + userSettingOverridesById: jest.fn().mockReturnValue(Promise.resolve([{ id: digestMockCollab.id }])), })); jest.mock('../../services/users', () => ({ @@ -257,6 +265,52 @@ describe('mailer tests', () => { }); }); + describe('onFailedNotification', () => { + afterEach(() => { + logger.info.mockClear(); + }); + + it('if multiple reports fail we log each', () => { + onFailedNotification({ + data: { + reports: [mockReport, { ...mockReport, displayId: 'mockReport-2' }], + }, + name: EMAIL_ACTIONS.APPROVED, + }, new Error('Error!')); + + expect(auditLogger.error).toHaveBeenCalledWith('job reportApproved failed for report mockReport-1 with error Error: Error!'); + expect(auditLogger.error).toHaveBeenCalledWith('job reportApproved failed for report mockReport-2 with error Error: Error!'); + }); + + it('if single report fails without a report object we log an error with unknown', () => { + onFailedNotification({ + data: { + reports: null, + report: { + author: { + email: 'sampleauthoremail@test.com', + }, + }, + }, + name: EMAIL_ACTIONS.APPROVED, + }, new Error('Error!')); + + expect(auditLogger.error).toHaveBeenCalledWith('job reportApproved failed for report unknown with error Error: Error!'); + }); + + it('if single report fails we log an error', () => { + onFailedNotification({ + data: { + reports: null, + report: mockReport, + }, + name: EMAIL_ACTIONS.APPROVED, + }, new Error('Error!')); + + expect(auditLogger.error).toHaveBeenCalledWith('job reportApproved failed for report mockReport-1 with error Error: Error!'); + }); + }); + describe('Changes requested by manager', () => { it('Tests that an email is sent', async () => { process.env.SEND_NOTIFICATIONS = 'true'; @@ -406,6 +460,14 @@ describe('mailer tests', () => { }, jsonTransport); expect(email).toBeNull(); }); + + it('Returns null if there are no toEmails', async () => { + process.env.SEND_NOTIFICATIONS = 'true'; + const email = await notifyApproverAssigned({ + data: { report: mockReport, newApprover: { user: { email: null } } }, + }, jsonTransport); + expect(email).toBeNull(); + }); }); describe('Add Collaborators', () => { @@ -430,6 +492,40 @@ describe('mailer tests', () => { }, jsonTransport); expect(email).toBeNull(); }); + + it('Returns null if there are no toEmails', async () => { + process.env.SEND_NOTIFICATIONS = 'true'; + const email = await notifyCollaboratorAssigned({ + data: { report: mockReport, newCollaborator: { user: { email: null } } }, + }, jsonTransport); + expect(email).toBeNull(); + }); + }); + + describe('sendEmailVerificationRequestWithToken', () => { + it('returns null if there are no emails to verify', async () => { + const email = await sendEmailVerificationRequestWithToken({ + email: null, + }, null); + expect(email).toBeNull(); + }); + + it('sends verification when there is a valid email', async () => { + const email = await sendEmailVerificationRequestWithToken( + { + email: 'test@test.gov', + }, + 'test-token-string', + jsonTransport, + ); + + // Expect email.send to have been called. + expect(email.envelope.from).toBe(process.env.FROM_EMAIL_ADDRESS); + expect(email.envelope.to).toStrictEqual(['test@test.gov']); + const message = JSON.parse(email.message); + expect(message.subject).toBe('Please verify your email address'); + expect(message.text).toContain('In order to verify your email address'); + }); }); describe('sendTrainingReportNotification', () => { @@ -516,9 +612,44 @@ describe('mailer tests', () => { }, jsonTransport); expect(email).toBeNull(); }); + + it('logs the info the job.data.report.id when we have no emails and job.data.report.displayId is null', async () => { + process.env.SEND_NOTIFICATIONS = 'true'; + process.env.CI = ''; + const data = { + emailTo: [], + templatePath: 'tr_session_created', + debugMessage: 'Congrats dude', + displayId: null, + reportPath: '/asdf/', + report: { + id: 123, + displayId: null, + }, + }; + const email = await sendTrainingReportNotification({ + data, + }, jsonTransport); + + expect(logger.info).toHaveBeenCalledWith('Did not send undefined notification for 123 preferences are not set or marked as "no-send"'); + }); }); describe('Collaborators digest', () => { + it('returns null if the user has no email', async () => { + process.env.SEND_NOTIFICATIONS = 'true'; + const email = await notifyDigest({ + data: { + user: { email: null }, + reports: [mockReport], + type: EMAIL_ACTIONS.COLLABORATOR_DIGEST, + freq: EMAIL_DIGEST_FREQ.DAILY, + subjectFreq: DAILY, + }, + }, jsonTransport); + expect(email).toBeNull(); + }); + it('tests that an email is sent for a daily setting', async () => { process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ @@ -1247,6 +1378,10 @@ describe('mailer tests', () => { it('"approved" digest which logs on bad date', async () => { await expect(approvedDigest('')).rejects.toThrow(); }); + + it('recipientApprovedDigest throws an error when the date is invalid', async () => { + await expect(recipientApprovedDigest('')).rejects.toThrow(); + }); }); describe('training report notifications', () => { @@ -1259,6 +1394,8 @@ describe('mailer tests', () => { process.env.CI = ''; }); afterEach(() => { + // After each test make sure we remove any mocks we set in that test. + jest.resetAllMocks(); process.env.CI = ''; }); const mockEvent = { @@ -1281,20 +1418,163 @@ describe('mailer tests', () => { ); }); it('trSessionCreated error', async () => { - userById.mockImplementation(() => Promise.resolve({ email: 'user@user.com' })); + userById.mockImplementationOnce(() => Promise.resolve({ email: 'user@user.com' })); await trSessionCreated(); expect(notificationQueueMock.add).toHaveBeenCalledTimes(0); expect(auditLogger.error).toHaveBeenCalledTimes(1); }); it('trSessionCreated early return on CI', async () => { process.env.CI = 'true'; - userById.mockImplementation(() => Promise.resolve({ email: 'user@user.com' })); + userById.mockImplementationOnce(() => Promise.resolve({ email: 'user@user.com' })); await trSessionCreated(mockEvent); expect(notificationQueueMock.add).toHaveBeenCalledTimes(0); expect(auditLogger.error).toHaveBeenCalledTimes(0); }); + + it('logs a warning if even.pocIds is an empty array', async () => { + userById.mockImplementationOnce(() => Promise.resolve({ email: 'test@testgov.gov' })); + await trSessionCreated({ + ...mockEvent, + id: 123, + pocIds: [], + }, 1); + // Expect auditLogger warning to have been called. + + expect(auditLogger.warn).toHaveBeenCalledTimes(1); + expect(auditLogger.warn).toHaveBeenCalledWith('MAILER: No POCs found for TR 123'); + }); + + it('logs a info if emailTo is an empty array', async () => { + userById.mockImplementationOnce(() => Promise.resolve({ email: null })); + await trSessionCreated({ + ...mockEvent, + id: 123, + pocIds: [1], + }, 1); + // Expect auditLogger warning to have been called. + + expect(logger.info).toHaveBeenCalledTimes(1); + expect(logger.info).toHaveBeenCalledWith('Did not send tr session created notification for 1234 preferences are not set or marked as "no-send"'); + }); + + it('trOwnerAdded returns if process.env.ci is true', async () => { + process.env.CI = 'true'; + await trOwnerAdded(); + expect(notificationQueueMock.add).toHaveBeenCalledTimes(0); + }); + + it('trOwnerAdded correctly gets added to the notificationQueue', async () => { + userById.mockImplementationOnce(() => Promise.resolve({ email: 'test.owner@govtest.gov' })); + const data = { + eventId: 'tr-1234', + emailTo: ['test.owner@govtest.gov'], + templatePath: 'tr_owner_added', + debugMessage: 'Congrats dude', + displayId: 'mockReport-1', + reportPath: '/asdf/', + report: { + id: 123, + displayId: 'mockReport-1', + }, + }; + + await trOwnerAdded({ + data, + }, jsonTransport); + expect(notificationQueueMock.add).toHaveBeenCalledWith( + EMAIL_ACTIONS.TRAINING_REPORT_EVENT_IMPORTED, + expect.any(Object), + ); + }); + + it('trOwnerAdded correctly logs exceptions', async () => { + userById.mockImplementationOnce(() => Promise.resolve({ email: 'test@gov.test' })); + const data = { + emailTo: ['test@gov.test'], + templatePath: 'tr_owner_added', + debugMessage: 'Congrats dude', + displayId: 'mockReport-1', + reportPath: '/asdf/', + report: { + id: 123, + displayId: 'mockReport-1', + }, + }; + + await trOwnerAdded({ + data, + }, jsonTransport); + expect(auditLogger.error).toHaveBeenCalledTimes(1); + }); + + it('trEventComplete returns if process.env.ci is true', async () => { + process.env.CI = 'true'; + await trEventComplete(); + expect(notificationQueueMock.add).toHaveBeenCalledTimes(0); + }); + + it('logs the appropriate message when there are no emails to send to', async () => { + userById.mockImplementationOnce(() => Promise.resolve({ email: null })); + await trEventComplete({ + collaboratorIds: [], + pocIds: [], + data: { + eventId: 'tr-1234', + }, + }, jsonTransport); + expect(logger.info).toHaveBeenCalledTimes(1); + expect(logger.info).toHaveBeenCalledWith('Did not send tr event complete notification for 1234 preferences are not set or marked as "no-send"'); + }); + + it('trEventComplete correctly gets added to the notificationQueue', async () => { + userById.mockImplementationOnce(() => Promise.resolve({ email: 'test.complete@test.gov' })); + const data = { + eventId: 'tr-1234', + emailTo: ['test.complete@test.gov'], + templatePath: 'tr_event_complete', + debugMessage: 'Congrats dude', + displayId: 'mockReport-1', + reportPath: '/asdf/', + report: { + id: 123, + displayId: 'mockReport-1', + }, + }; + await trEventComplete({ + ownerId: 1, + collaboratorIds: [2], + pocIds: [3], + data, + }, jsonTransport); + expect(notificationQueueMock.add).toHaveBeenCalledWith( + EMAIL_ACTIONS.TRAINING_REPORT_EVENT_COMPLETED, + expect.any(Object), + ); + }); + + it('trEventComplete correctly logs exceptions', async () => { + userById.mockImplementationOnce(() => Promise.resolve({ email: 'test@gov.test' })); + const data = { + emailTo: ['test@gov.test'], + templatePath: 'tr_event_complete', + debugMessage: 'Congrats dude', + displayId: 'mockReport-1', + reportPath: '/asdf/', + report: { + id: 123, + displayId: 'mockReport-1', + }, + }; + + await trEventComplete({ + data, + }, jsonTransport); + + expect(auditLogger.error).toHaveBeenCalledTimes(1); + }); + it('trCollaboratorAdded success', async () => { - userById.mockImplementation(() => Promise.resolve({ email: 'user@user.com' })); + userById.mockImplementationOnce(() => Promise.resolve({ email: 'user@user.com' })); await trCollaboratorAdded({ id: 1, data: { val: JSON.stringify(mockEvent.data) }, }, 1); @@ -1311,6 +1591,27 @@ describe('mailer tests', () => { expect(notificationQueueMock.add).toHaveBeenCalledTimes(0); expect(auditLogger.error).toHaveBeenCalledTimes(1); }); + + it('throws an error if the collaborator is not found', async () => { + userById.mockImplementationOnce(() => Promise.resolve(null)); + await trCollaboratorAdded({ + id: 1, data: { val: JSON.stringify(mockEvent.data) }, + }, 1); + + expect(auditLogger.error).toHaveBeenCalledTimes(1); + }); + + it('logs info if emailTo is an empty array', async () => { + userById.mockImplementationOnce(() => Promise.resolve({ email: null })); + await trCollaboratorAdded({ + id: 1, data: { val: JSON.stringify(mockEvent.data) }, + }, 1); + // Expect auditLogger warning to have been called. + + expect(logger.info).toHaveBeenCalledTimes(1); + expect(logger.info).toHaveBeenCalledWith('Did not send tr collaborator added notification for 1234 preferences are not set or marked as "no-send"'); + }); + it('trCollaboratorAdded early return', async () => { process.env.CI = 'true'; userById.mockImplementation(() => Promise.resolve({ email: 'user@user.com' })); @@ -1339,4 +1640,19 @@ describe('mailer tests', () => { expect(result).toEqual([]); }); }); + + describe('programSpecialistRecipientReportApprovedNotification', () => { + afterEach(() => { + logger.info.mockClear(); + }); + + it('audit logs an when an error is thrown from notificationQueue.add', async () => { + notificationQueueMock.add.mockImplementationOnce(() => { + throw new Error('Error adding to queue'); + }); + await programSpecialistRecipientReportApprovedNotification(mockProgramSpecialist, mockReport); + expect(auditLogger.error).toHaveBeenCalledTimes(1); + expect(auditLogger.error.mock.calls[0][0].message).toContain('Error adding to queue'); + }); + }); }); diff --git a/src/lib/mailer/logNotifications.test.js b/src/lib/mailer/logNotifications.test.js index 9c28f3ec44..3cc50164f1 100644 --- a/src/lib/mailer/logNotifications.test.js +++ b/src/lib/mailer/logNotifications.test.js @@ -13,6 +13,8 @@ describe('Email Notifications', () => { id: '3', name: EMAIL_ACTIONS.COLLABORATOR_ADDED, data: { + programSpecialists: [{ email: 'mockSpecialist@test.gov' }], + recipients: [{ name: 'Mock Recipient' }], report: { id: 1235, displayId: 'AR-04-1235', @@ -90,6 +92,28 @@ describe('Email Notifications', () => { expect(mailerLog.success).toEqual(false); expect(mailerLog.result).toEqual(result); }); + + it('handles missing new collaborator', async () => { + const collab = { ...mockJob.data.newCollaborator }; + mockJob.data.newCollaborator = null; + createMailerLogMock.mockResolvedValueOnce({ + jobId: mockJob.id, + emailTo: [''], + action: mockJob.name, + subject: 'Activity Report AR-04-1235: Added as collaborator', + activityReports: [mockJob.data.report.id], + success, + result, + }); + const mailerLog = await logEmailNotification(mockJob, success, result); + expect(mailerLog).not.toBeNull(); + expect(mailerLog.emailTo.length).toEqual(1); + expect(mailerLog.emailTo[0]).toEqual(''); + expect(mailerLog.subject).toEqual('Activity Report AR-04-1235: Added as collaborator'); + expect(mailerLog.success).toEqual(false); + expect(mailerLog.result).toEqual(result); + mockJob.data.newCollaborator = collab; + }); it('create a mailer log entry for a submitted report', async () => { mockJob.name = EMAIL_ACTIONS.SUBMITTED; createMailerLogMock.mockResolvedValueOnce({ @@ -109,6 +133,30 @@ describe('Email Notifications', () => { expect(mailerLog.success).toEqual(false); expect(mailerLog.result).toEqual(result); }); + + it('handles missing newApprover for a submitted report', async () => { + mockJob.name = EMAIL_ACTIONS.SUBMITTED; + const approv = { ...mockJob.data.newApprover }; + mockJob.data.newApprover = null; + createMailerLogMock.mockResolvedValueOnce({ + jobId: mockJob.id, + emailTo: [''], + action: mockJob.name, + subject: 'Activity Report AR-04-1235: Submitted for review', + activityReports: [mockJob.data.report.id], + success, + result, + }); + const mailerLog = await logEmailNotification(mockJob, success, result); + expect(mailerLog).not.toBeNull(); + expect(mailerLog.emailTo.length).toEqual(1); + expect(mailerLog.emailTo[0]).toEqual(''); + expect(mailerLog.subject).toEqual('Activity Report AR-04-1235: Submitted for review'); + expect(mailerLog.success).toEqual(false); + expect(mailerLog.result).toEqual(result); + mockJob.data.newApprover = approv; + }); + it('create a mailer log entry for a needs action report', async () => { mockJob.name = EMAIL_ACTIONS.NEEDS_ACTION; createMailerLogMock.mockResolvedValueOnce({ @@ -130,6 +178,30 @@ describe('Email Notifications', () => { expect(mailerLog.success).toEqual(false); expect(mailerLog.result).toEqual(result); }); + it('handles missing author for a needs action report', async () => { + const auth = mockJob.data.report.author; + mockJob.data.report.author = null; + mockJob.name = EMAIL_ACTIONS.NEEDS_ACTION; + createMailerLogMock.mockResolvedValueOnce({ + jobId: mockJob.id, + emailTo: ['', + mockJob.data.report.activityReportCollaborators[0].user.email], + action: mockJob.name, + subject: 'Activity Report AR-04-1235: Changes requested', + activityReports: [mockJob.data.report.id], + success, + result, + }); + const mailerLog = await logEmailNotification(mockJob, success, result); + expect(mailerLog).not.toBeNull(); + expect(mailerLog.emailTo.length).toEqual(2); + expect(mailerLog.emailTo[0]).toEqual(''); + expect(mailerLog.emailTo[1]).toEqual('mockCollaborator@test.gov'); + expect(mailerLog.subject).toEqual('Activity Report AR-04-1235: Changes requested'); + expect(mailerLog.success).toEqual(false); + expect(mailerLog.result).toEqual(result); + mockJob.data.report.author = auth; + }); it('create a mailer log entry for an approved report', async () => { mockJob.name = EMAIL_ACTIONS.APPROVED; createMailerLogMock.mockResolvedValue({ @@ -151,6 +223,52 @@ describe('Email Notifications', () => { expect(mailerLog.success).toEqual(false); expect(mailerLog.result).toEqual(result); }); + + it('handles missing author for an approved report', async () => { + mockJob.name = EMAIL_ACTIONS.APPROVED; + const auth = mockJob.data.report.author; + mockJob.data.report.author = null; + createMailerLogMock.mockResolvedValue({ + jobId: mockJob.id, + emailTo: ['', + mockJob.data.report.activityReportCollaborators[0].user.email], + action: mockJob.name, + subject: 'Activity Report AR-04-1235: Approved', + activityReports: [mockJob.data.report.id], + success, + result, + }); + const mailerLog = await logEmailNotification(mockJob, success, result); + expect(mailerLog).not.toBeNull(); + expect(mailerLog.emailTo.length).toBe(2); + expect(mailerLog.emailTo[0]).toEqual(''); + expect(mailerLog.emailTo[1]).toEqual('mockCollaborator@test.gov'); + expect(mailerLog.subject).toEqual('Activity Report AR-04-1235: Approved'); + expect(mailerLog.success).toEqual(false); + expect(mailerLog.result).toEqual(result); + mockJob.data.report.author = auth; + }); + it('create a mailer log entry for a recipient approved report', async () => { + mockJob.name = EMAIL_ACTIONS.RECIPIENT_REPORT_APPROVED; + createMailerLogMock.mockResolvedValue({ + jobId: mockJob.id, + emailTo: [mockJob.data.report.author.email, + mockJob.data.report.activityReportCollaborators[0].user.email], + action: mockJob.name, + subject: 'Activity Report AR-04-1235: Approved', + activityReports: [mockJob.data.report.id], + success, + result, + }); + const mailerLog = await logEmailNotification(mockJob, success, result); + expect(mailerLog).not.toBeNull(); + expect(mailerLog.emailTo.length).toBe(2); + expect(mailerLog.emailTo[0]).toEqual('mockAuthor@test.gov'); + expect(mailerLog.emailTo[1]).toEqual('mockCollaborator@test.gov'); + expect(mailerLog.subject).toEqual('Activity Report AR-04-1235: Approved'); + expect(mailerLog.success).toEqual(false); + expect(mailerLog.result).toEqual(result); + }); it('logs on error', async () => { createMailerLogMock.mockRejectedValueOnce(new Error('Problem creating mailer log')); mockJob.name = EMAIL_ACTIONS.APPROVED; diff --git a/src/lib/mailer/trainingReportTaskDueNotifications.test.js b/src/lib/mailer/trainingReportTaskDueNotifications.test.js index 2f4588035c..7f34eb9b72 100644 --- a/src/lib/mailer/trainingReportTaskDueNotifications.test.js +++ b/src/lib/mailer/trainingReportTaskDueNotifications.test.js @@ -872,4 +872,58 @@ describe('trainingReportTaskDueNotifications', () => { }, ]); }); + + it('return null if the user is not found', async () => { + getTrainingReportAlerts.mockResolvedValue([ + // 20 days past event startDate: should send email + { + id: 1, + eventId: 'RO1-012-1234', + eventName: 'Event 1', + alertType: 'missingEventInfo', + sessionName: '', + isSession: false, + ownerId: 1, + pocIds: [2], + collaboratorIds: [3], + startDate: moment().subtract(20, 'days').format('MM/DD/YYYY'), + endDate: today, + }, + ]); + + userById.mockResolvedValue(null); + + const emails = await trainingReportTaskDueNotifications(EMAIL_DIGEST_FREQ.DAILY); + + expect(emails).toEqual([null, null]); + expect(userById).toHaveBeenCalledWith(1); + expect(userById).toHaveBeenCalledWith(3); + }); + + it('return null if the user email is not found', async () => { + getTrainingReportAlerts.mockResolvedValue([ + // 20 days past event startDate: should send email + { + id: 1, + eventId: 'RO1-012-1234', + eventName: 'Event 1', + alertType: 'missingEventInfo', + sessionName: '', + isSession: false, + ownerId: 1, + pocIds: [2], + collaboratorIds: [3], + startDate: moment().subtract(20, 'days').format('MM/DD/YYYY'), + endDate: today, + }, + ]); + + userById.mockResolvedValue({ id: 1, email: null }); + + const emails = await trainingReportTaskDueNotifications(EMAIL_DIGEST_FREQ.DAILY); + + expect(emails).toEqual([null, null]); + expect(userById).toHaveBeenCalledWith(1); + expect(userById).toHaveBeenCalledWith(3); + }); }); diff --git a/src/lib/maintenance/db.nextBlock.test.js b/src/lib/maintenance/db.nextBlock.test.js index 5696622c20..226883d89f 100644 --- a/src/lib/maintenance/db.nextBlock.test.js +++ b/src/lib/maintenance/db.nextBlock.test.js @@ -39,4 +39,69 @@ describe('nextBlock', () => { raw: true, }); }); + + test('should use default offset and limit when log data is undefined', async () => { + MaintenanceLog.findOne.mockResolvedValueOnce({ data: {} }); + + const result = await nextBlock('type'); + + expect(result).toEqual({ + offset: 0, + limit: numOfModels, + }); + expect(MaintenanceLog.findOne).toHaveBeenCalledWith({ + where: { + category: MAINTENANCE_CATEGORY.DB, + type: 'type', + isSuccessful: true, + }, + order: [['id', 'DESC']], + raw: true, + }); + }); + + test('should calculate newOffset correctly when offset + limit is less than numOfModels', async () => { + MaintenanceLog.findOne.mockResolvedValueOnce({ + data: { offset: 0, limit: 1 }, + }); + + const result = await nextBlock('type'); + + expect(result).toEqual({ + offset: 1, + limit: 1, + }); + expect(MaintenanceLog.findOne).toHaveBeenCalledWith({ + where: { + category: MAINTENANCE_CATEGORY.DB, + type: 'type', + isSuccessful: true, + }, + order: [['id', 'DESC']], + raw: true, + }); + }); + + test('should calculate newLimit correctly when percent is not null', async () => { + MaintenanceLog.findOne.mockResolvedValueOnce({ + data: { offset: 0, limit: 1 }, + }); + + const percent = 0.5; // 50% of numOfModels + const result = await nextBlock('type', percent); + + expect(result).toEqual({ + offset: 1, + limit: Math.floor(numOfModels * percent), + }); + expect(MaintenanceLog.findOne).toHaveBeenCalledWith({ + where: { + category: MAINTENANCE_CATEGORY.DB, + type: 'type', + isSuccessful: true, + }, + order: [['id', 'DESC']], + raw: true, + }); + }); }); diff --git a/src/lib/maintenance/db.test.js b/src/lib/maintenance/db.test.js index 8352ee9929..079e013d1a 100644 --- a/src/lib/maintenance/db.test.js +++ b/src/lib/maintenance/db.test.js @@ -9,10 +9,16 @@ const { reindexTables, dailyMaintenance, dbMaintenance, + enqueueDBMaintenanceJob, } = require('./db'); const { sequelize, MaintenanceLog } = require('../../models'); const { auditLogger } = require('../../logger'); +jest.mock('./common', () => ({ + ...jest.requireActual('./common'), + enqueueMaintenanceJob: jest.fn(), +})); + describe('maintenance', () => { beforeAll(async () => { jest.resetAllMocks(); @@ -74,9 +80,8 @@ describe('maintenance', () => { const command = 'VACUUM ANALYZE'; const category = MAINTENANCE_CATEGORY.DB; const type = MAINTENANCE_TYPE.VACUUM_ANALYZE; - const model = MaintenanceLog; - await tableMaintenanceCommand(command, category, type, model); + await tableMaintenanceCommand(command, category, type, MaintenanceLog); const log = await MaintenanceLog.findOne({ order: [['id', 'DESC']], raw: true }); auditLogger.error(`tableMaintenanceCommand: ${JSON.stringify(log)}`); @@ -89,11 +94,10 @@ describe('maintenance', () => { describe('vacuumTable', () => { it('should call tableMaintenanceCommand with VACUUM and the given model', async () => { - const model = MaintenanceLog; const type = MAINTENANCE_TYPE.VACUUM_ANALYZE; const command = 'VACUUM ANALYZE'; - await vacuumTable(model); + await vacuumTable(MaintenanceLog); const log = await MaintenanceLog.findOne({ order: [['id', 'DESC']], raw: true }); @@ -106,11 +110,10 @@ describe('maintenance', () => { describe('reindexTable', () => { it('should call tableMaintenanceCommand with REINDEX and the given model', async () => { - const model = MaintenanceLog; const type = MAINTENANCE_TYPE.REINDEX; const command = 'REINDEX TABLE'; - await reindexTable(model); + await reindexTable(MaintenanceLog); const log = await MaintenanceLog.findOne({ order: [['id', 'DESC']], raw: true }); @@ -153,6 +156,21 @@ describe('maintenance', () => { && typeof log.data.benchmarks[0] === 'number') || (log.type === MAINTENANCE_TYPE.VACUUM_TABLES))).toBe(true); }); + + it('should use default offset and limit values', async () => { + const preLog = await MaintenanceLog.findOne({ order: [['id', 'DESC']], raw: true }); + + await vacuumTables(); + + const logs = await MaintenanceLog.findAll({ + where: { id: { [Op.gt]: preLog.id } }, + order: [['id', 'DESC']], + raw: true, + }); + + expect(logs.length).toBeGreaterThan(0); + expect(logs.every((log) => log.isSuccessful)).toBe(true); + }); }); describe('reindexTables', () => { @@ -187,6 +205,21 @@ describe('maintenance', () => { && typeof log.data.benchmarks[0] === 'number') || (log.type === MAINTENANCE_TYPE.REINDEX_TABLES))).toBe(true); }); + + it('should use default offset and limit values', async () => { + const preLog = await MaintenanceLog.findOne({ order: [['id', 'DESC']], raw: true }); + + await reindexTables(); + + const logs = await MaintenanceLog.findAll({ + where: { id: { [Op.gt]: preLog.id } }, + order: [['id', 'DESC']], + raw: true, + }); + + expect(logs.length).toBeGreaterThan(0); + expect(logs.every((log) => log.isSuccessful)).toBe(true); + }); }); describe('dailyMaintenance', () => { @@ -229,6 +262,21 @@ describe('maintenance', () => { || log.type === MAINTENANCE_TYPE.VACUUM_TABLES || log.type === MAINTENANCE_TYPE.DAILY_DB_MAINTENANCE))).toBe(true); }); + + it('should use default offset and limit values', async () => { + const preLog = await MaintenanceLog.findOne({ order: [['id', 'DESC']], raw: true }); + + await dailyMaintenance(); + + const logs = await MaintenanceLog.findAll({ + where: { id: { [Op.gt]: preLog.id } }, + order: [['id', 'DESC']], + raw: true, + }); + + expect(logs.length).toBeGreaterThan(0); + expect(logs.every((log) => log.isSuccessful)).toBe(true); + }); }); describe('dbMaintenance', () => { @@ -364,4 +412,25 @@ describe('maintenance', () => { expect(error).toBeDefined(); }); }); + + describe('enqueueDBMaintenanceJob', () => { + it('should enqueue a DB maintenance job with default percent value', async () => { + const type = MAINTENANCE_TYPE.DAILY_DB_MAINTENANCE; + const data = { someKey: 'someValue' }; + + // eslint-disable-next-line global-require + const enqueueSpy = require('./common').enqueueMaintenanceJob; + + await enqueueDBMaintenanceJob(type, data); + + expect(enqueueSpy).toHaveBeenCalledTimes(1); + expect(enqueueSpy).toHaveBeenCalledWith( + MAINTENANCE_CATEGORY.DB, + expect.objectContaining({ + type, + someKey: 'someValue', + }), + ); + }); + }); }); diff --git a/src/lib/maintenance/import.test.js b/src/lib/maintenance/import.test.js index 2ed0fd0648..a275e1b34b 100644 --- a/src/lib/maintenance/import.test.js +++ b/src/lib/maintenance/import.test.js @@ -6,6 +6,7 @@ import { importDownload, importProcess, importMaintenance, + enqueue, } from './import'; import { MAINTENANCE_TYPE, MAINTENANCE_CATEGORY } from '../../constants'; import { @@ -54,10 +55,10 @@ describe('import', () => { }); describe('enqueueImportMaintenanceJob', () => { - it('should enqueue a maintenance job with the correct category and type', () => { + it('should enqueue a maintenance job with the correct category and type', async () => { const type = MAINTENANCE_TYPE.IMPORT_SCHEDULE; const id = 123; - enqueueImportMaintenanceJob(type, id); + await enqueueImportMaintenanceJob(type, id); expect(enqueueMaintenanceJob).toHaveBeenCalledWith( MAINTENANCE_CATEGORY.IMPORT, { type, id }, @@ -310,6 +311,29 @@ describe('import', () => { ); }); + it('should not enqueue any jobs if downloadMore and processMore are falsy', async () => { + const id = 123; + downloadImport.mockResolvedValue([{}, {}]); + moreToDownload.mockResolvedValue(false); + moreToProcess.mockResolvedValue(false); + + await importDownload(id); + expect(maintenanceCommand).toHaveBeenCalledWith( + expect.any(Function), + MAINTENANCE_CATEGORY.IMPORT, + MAINTENANCE_TYPE.IMPORT_DOWNLOAD, + { id }, + ); + const anonymousFunction = maintenanceCommand.mock.calls[0][0]; + const results = await anonymousFunction(); + + expect(downloadImport).toHaveBeenCalledWith(id); + expect(moreToDownload).toHaveBeenCalledWith(id); + expect(moreToProcess).toHaveBeenCalledWith(id); + expect(enqueueMaintenanceJob).not.toHaveBeenCalled(); + expect(results?.isSuccessful).toBe(true); + }); + it('should return an object with isSuccessful false when download fails', async () => { const id = 123; downloadImport.mockResolvedValue([{}, {}]); @@ -536,4 +560,57 @@ describe('import', () => { expect(result?.isSuccessful).toBe(true); }); }); + + describe('enqueue function', () => { + const originalEnv = process.env; + + beforeEach(() => { + jest.resetModules(); + process.env = { ...originalEnv }; + }); + + afterEach(() => { + process.env = originalEnv; + }); + + it('should enqueue job when not in CI and in production with CF_INSTANCE_INDEX 0', () => { + process.env.CI = undefined; + process.env.NODE_ENV = 'production'; + process.env.CF_INSTANCE_INDEX = '0'; + + // eslint-disable-next-line global-require + const enqueued = require('./import').enqueue(); + expect(enqueued).toBe(true); + }); + + it('should not enqueue job when in CI', () => { + process.env.CI = 'true'; + process.env.NODE_ENV = 'production'; + process.env.CF_INSTANCE_INDEX = '0'; + + // eslint-disable-next-line global-require + const enqueued = require('./import').enqueue(); + expect(enqueued).toBe(false); + }); + + it('should enqueue job when not in production', () => { + process.env.CI = undefined; + process.env.NODE_ENV = 'development'; + process.env.CF_INSTANCE_INDEX = '0'; + + // eslint-disable-next-line global-require + const enqueued = require('./import').enqueue(); + expect(enqueued).toBe(true); + }); + + it('should not enqueue job when in production but CF_INSTANCE_INDEX is not 0', () => { + process.env.CI = undefined; + process.env.NODE_ENV = 'production'; + process.env.CF_INSTANCE_INDEX = '1'; + + // eslint-disable-next-line global-require + const enqueued = require('./import').enqueue(); + expect(enqueued).toBe(false); + }); + }); }); diff --git a/src/lib/maintenance/import.ts b/src/lib/maintenance/import.ts index 0099cd656b..5a414bb6fc 100644 --- a/src/lib/maintenance/import.ts +++ b/src/lib/maintenance/import.ts @@ -299,42 +299,41 @@ const importMaintenance = async (job) => { id, } = job.data; - // Declare a variable to hold the action to be performed - let action; - // Use a switch statement to determine the action based on the job type switch (type) { // If the job type is import schedule, call the importSchedule function case MAINTENANCE_TYPE.IMPORT_SCHEDULE: - action = await importSchedule(); - break; + return importSchedule(); // If the job type is import download, call the importDownload function with the provided id case MAINTENANCE_TYPE.IMPORT_DOWNLOAD: - action = await importDownload(id); - break; + return importDownload(id); // If the job type is import process, call the importProcess function with the provided id case MAINTENANCE_TYPE.IMPORT_PROCESS: - action = await importProcess(id); - break; + return importProcess(id); // If the job type does not match any case, throw an error default: throw new Error('Unknown type'); } - - // Return the result of the action performed - return action; }; addQueueProcessor(MAINTENANCE_CATEGORY.IMPORT, importMaintenance); // TODO: commented out to prevent scheduled execution, as there is a concurrency issue that still // needs to be addressed -if (!process.env.CI - && ((process.env.CF_INSTANCE_INDEX === '0' && process.env.NODE_ENV === 'production') - || process.env.NODE_ENV !== 'production')) { - enqueueImportMaintenanceJob(MAINTENANCE_TYPE.IMPORT_SCHEDULE, undefined, 'index', false); -} + +const enqueue = () => { + if (!process.env.CI + && ((process.env.CF_INSTANCE_INDEX === '0' && process.env.NODE_ENV === 'production') + || process.env.NODE_ENV !== 'production')) { + enqueueImportMaintenanceJob(MAINTENANCE_TYPE.IMPORT_SCHEDULE, undefined, 'index', false); + return true; + } + return false; +}; + +enqueue(); export { + enqueue, enqueueImportMaintenanceJob, scheduleImportCrons, importSchedule, diff --git a/src/lib/stream/encoding.ts b/src/lib/stream/encoding.ts index 97b382050c..f153f5c650 100644 --- a/src/lib/stream/encoding.ts +++ b/src/lib/stream/encoding.ts @@ -47,29 +47,23 @@ class EncodingConverter extends Transform { // eslint-disable-next-line no-underscore-dangle _transform( chunk: Buffer, - encoding: string, + _encoding: string, callback: (error?: Error | null, data?: Buffer) => void, ): void { if (this.detectEncoding) { // Continue collecting chunks until we have enough to detect the encoding - this.buffer = Buffer.concat([this.buffer, chunk]); + this.buffer = Buffer.concat([new Uint8Array(this.buffer), new Uint8Array(chunk)]); if (this.buffer.length >= 1024) { // We have enough data to detect the encoding this.detectEncoding = false; // Set flag to false as we've detected the encoding // Default to utf-8 if no encoding is detected - // this.sourceEncoding = chardet.detect(this.buffer) || 'utf-8'; - const detectedEncoding = chardet.detect(this.buffer); + const detectedEncoding = chardet.detect(new Uint8Array(this.buffer)); // Check if the detected encoding is supported - if (detectedEncoding - && EncodingConverter.supportedEncodings - .has(detectedEncoding.toLowerCase() as BufferEncoding)) { - this.sourceEncoding = detectedEncoding.toLowerCase(); - } else { - throw new Error(`Unsupported encoding detected: ${detectedEncoding}`); - } - - this.sourceEncoding = chardet.analyse(this.buffer)?.[0]?.name || 'utf-8'; + // eslint-disable-next-line max-len + this.sourceEncoding = detectedEncoding && EncodingConverter.supportedEncodings.has(detectedEncoding.toLowerCase() as BufferEncoding) + ? detectedEncoding.toLowerCase() + : 'utf-8'; // If the source encoding matches the target encoding, pass through the entire buffer if (this.sourceEncoding === this.targetEncoding) { @@ -118,7 +112,7 @@ class EncodingConverter extends Transform { if (this.detectEncoding && this.buffer.length > 0) { // If flush is called and we're still detecting the encoding, // perform the conversion on the remaining buffer. - this.sourceEncoding = chardet.detect(this.buffer) || 'utf-8'; + this.sourceEncoding = chardet.detect(new Uint8Array(this.buffer)) || 'utf-8'; // If the source encoding matches the target encoding, pass through the remaining buffer if (this.sourceEncoding === this.targetEncoding) { diff --git a/src/lib/stream/tests/buffer.test.js b/src/lib/stream/tests/buffer.test.js index a10173b87f..1178bf648f 100644 --- a/src/lib/stream/tests/buffer.test.js +++ b/src/lib/stream/tests/buffer.test.js @@ -160,4 +160,39 @@ describe('BufferStream', () => { // Verify the data matches what was written expect(data).toBe('test data'); }); + + it('should return a resolved promise with a readable stream if already finished', async () => { + bufferStream.end(); + const readable = await bufferStream.getReadableStream(); + expect(readable).toBeInstanceOf(Readable); + }); + + it('should return a promise that resolves immediately if the stream is finished', async () => { + bufferStream.write('test data'); + bufferStream.end(); + + // eslint-disable-next-line no-promise-executor-return + await new Promise((resolve) => bufferStream.on('finish', resolve)); + + const readable = await bufferStream.getReadableStream(); + const chunks = []; + readable.on('data', (chunk) => chunks.push(chunk)); + // eslint-disable-next-line no-promise-executor-return + await new Promise((resolve) => readable.on('end', resolve)); + expect(Buffer.concat(chunks).toString()).toBe('test data'); + }); + + it('should return the same promise if called multiple times before finishing', async () => { + const promise1 = bufferStream.getReadableStream(); + const promise2 = bufferStream.getReadableStream(); + expect(promise1).toBe(promise2); + bufferStream.write('test data'); + bufferStream.end(); + const readable = await promise1; + const chunks = []; + readable.on('data', (chunk) => chunks.push(chunk)); + // eslint-disable-next-line no-promise-executor-return + await new Promise((resolve) => readable.on('end', resolve)); + expect(Buffer.concat(chunks).toString()).toBe('test data'); + }); }); diff --git a/src/lib/stream/tests/encoding.test.js b/src/lib/stream/tests/encoding.test.js index 18b1da6fa7..e35fc740f4 100644 --- a/src/lib/stream/tests/encoding.test.js +++ b/src/lib/stream/tests/encoding.test.js @@ -144,4 +144,129 @@ describe('EncodingConverter', () => { }); }); }); + + it('should convert the buffer correctly in convertBuffer', () => { + const sourceEncoding = 'utf16le'; + const targetEncoding = 'utf-8'; + const converter = new EncodingConverter(targetEncoding, sourceEncoding); + const buffer = Buffer.from('Hello, world!', sourceEncoding); + const pushSpy = jest.spyOn(converter, 'push'); + + return new Promise((resolve, reject) => { + converter.buffer = buffer; + converter.convertBuffer((error) => { + if (error) { + reject(error); + return; + } + expect(error).toBeUndefined(); + expect(pushSpy).toHaveBeenCalled(); + // eslint-disable-next-line max-len + const result = Buffer.concat(pushSpy.mock.calls.map((call) => call[0])).toString(targetEncoding); + expect(result).toBe('Hello, world!'); + resolve(); + }); + }); + }); + + it('should call the callback with an error in convertBuffer on failure', () => { + const sourceEncoding = 'utf16le'; + const targetEncoding = 'utf-8'; + const converter = new EncodingConverter(targetEncoding, sourceEncoding); + const buffer = Buffer.from('Hello, world!', sourceEncoding); + const pushSpy = jest.spyOn(converter, 'push'); + jest.spyOn(buffer, 'toString').mockImplementation(() => { + throw new Error('Conversion error'); + }); + + return new Promise((resolve, reject) => { + converter.buffer = buffer; + converter.convertBuffer((error) => { + expect(error).toBeDefined(); + expect(error.message).toBe('Conversion error'); + expect(pushSpy).not.toHaveBeenCalled(); + resolve(); + }); + }); + }); + + it('should call the callback with an error in convertChunk on failure', () => { + const sourceEncoding = 'utf16le'; + const targetEncoding = 'utf-8'; + const converter = new EncodingConverter(targetEncoding, sourceEncoding); + const chunk = Buffer.from('Hello, world!', sourceEncoding); + const pushSpy = jest.spyOn(converter, 'push'); + jest.spyOn(chunk, 'toString').mockImplementation(() => { + throw new Error('Conversion error'); + }); + + return new Promise((resolve, reject) => { + converter.convertChunk(chunk, (error) => { + expect(error).toBeDefined(); + expect(error.message).toBe('Conversion error'); + expect(pushSpy).not.toHaveBeenCalled(); + resolve(); + }); + }); + }); + + it('should throw an error for unsupported source encoding', () => { + const targetEncoding = 'utf-8'; + const unsupportedSourceEncoding = 'unsupported-encoding'; + + expect(() => new EncodingConverter(targetEncoding, unsupportedSourceEncoding)) + .toThrow(`Unsupported encoding detected: ${unsupportedSourceEncoding}`); + }); + + it('should detect encoding when buffer length is >= 1024', () => { + const sourceEncoding = 'utf16le'; + const targetEncoding = 'utf-8'; + const converter = new EncodingConverter(targetEncoding); + + // Create a readable stream with a buffer length >= 1024 + const readable = new Readable(); + const largeBuffer = Buffer.alloc(1024, 'a', sourceEncoding); + readable.push(largeBuffer); + readable.push(null); // Signal end of stream + + const chunks = []; + + return new Promise((resolve) => { + converter.on('data', (chunk) => chunks.push(chunk)); + converter.on('end', () => { + const result = Buffer.concat(chunks).toString(targetEncoding); + expect(result).toBe(largeBuffer.toString(targetEncoding)); + resolve(); + }); + + readable.pipe(converter); + }); + }); + + it('should fall back to utf-8 when chardet.analyse does not detect encoding', () => { + const targetEncoding = 'utf-8'; + const converter = new EncodingConverter(targetEncoding); + + // Create a readable stream with a buffer length >= 1024 + const readable = new Readable(); + const largeBuffer = Buffer.alloc(1024, 'a', 'utf16le'); + readable.push(largeBuffer); + readable.push(null); // Signal end of stream + + const chunks = []; + + mockDetect.mockReturnValueOnce(undefined); + mockAnalyse.mockReturnValueOnce([]); + + return new Promise((resolve) => { + converter.on('data', (chunk) => chunks.push(chunk)); + converter.on('end', () => { + const result = Buffer.concat(chunks).toString(targetEncoding); + expect(result).toBe(largeBuffer.toString('utf-8')); + resolve(); + }); + + readable.pipe(converter); + }); + }); }); diff --git a/src/lib/stream/tests/zip.test.js b/src/lib/stream/tests/zip.test.js index e04e55075c..f11ea54f84 100644 --- a/src/lib/stream/tests/zip.test.js +++ b/src/lib/stream/tests/zip.test.js @@ -18,6 +18,7 @@ jest.mock('unzipper', () => { vars: { uncompressedSize: 100, lastModifiedDateTime: new Date('2020-01-01'), + crc32: '1234abcd', }, pipe: jest.fn(), autodrain: jest.fn(), @@ -61,6 +62,7 @@ describe('ZipStream', () => { type: 'File', size: 100, date: new Date('2020-01-01'), + crc32: '1234abcd', }); }); @@ -73,6 +75,7 @@ describe('ZipStream', () => { const fileDetails = await zipStream.getAllFileDetails(); expect(fileDetails).toEqual([ { + crc32: '1234abcd', name: 'file.txt', path: 'folder', type: 'File', @@ -86,4 +89,22 @@ describe('ZipStream', () => { const fileStream = await zipStream.getFileStream('nonexistent/file.txt'); expect(fileStream).toBeNull(); }); + + test('constructor should handle default filesNeedingStreams', async () => { + const zipStreamDefault = new ZipStream(mockReadable); + const files = await zipStreamDefault.listFiles(); + expect(files).toEqual(['folder/file.txt']); + }); + + test('getFileDetails should include crc32 if present', async () => { + const fileInfo = await zipStream.getFileDetails('folder/file.txt'); + expect(fileInfo).toEqual({ + name: 'file.txt', + path: 'folder', + type: 'File', + size: 100, + date: new Date('2020-01-01'), + crc32: '1234abcd', + }); + }); }); diff --git a/src/middleware/checkIdParamMiddleware.js b/src/middleware/checkIdParamMiddleware.js index b7d077b0b0..8a34832203 100644 --- a/src/middleware/checkIdParamMiddleware.js +++ b/src/middleware/checkIdParamMiddleware.js @@ -21,7 +21,7 @@ export function checkActivityReportIdParam(req, res, next) { return next(); } - const msg = `${errorMessage}: activityReportId ${req.params ? (req.params.activityReportId || 'undefined') : 'undefined'}`; + const msg = `${errorMessage}: activityReportId ${String(req?.params?.activityReportId)}`; auditLogger.error(msg); return res.status(httpCodes.BAD_REQUEST).send(msg); } @@ -44,7 +44,7 @@ export function checkFileIdParam(req, res, next) { return next(); } - const msg = `${errorMessage}: fileId ${req.params ? (req.params.fileId || 'undefined') : 'undefined'}`; + const msg = `${errorMessage}: fileId ${String(req?.params?.fileId)}`; auditLogger.error(msg); return res.status(httpCodes.BAD_REQUEST).send(msg); } @@ -63,7 +63,7 @@ export function checkReportIdParam(req, res, next) { return next(); } - const msg = `${errorMessage}: reportId ${req.params ? (req.params.reportId || 'undefined') : 'undefined'}`; + const msg = `${errorMessage}: reportId ${String(req?.params?.reportId)}`; auditLogger.error(msg); return res.status(httpCodes.BAD_REQUEST).send(msg); } @@ -82,7 +82,7 @@ export function checkObjectiveIdParam(req, res, next) { return next(); } - const msg = `${errorMessage}: objectiveId ${req.params ? (req.params.objectiveId || 'undefined') : 'undefined'}`; + const msg = `${errorMessage}: objectiveId ${String(req?.params?.objectiveId)}`; auditLogger.error(msg); return res.status(httpCodes.BAD_REQUEST).send(msg); } @@ -101,7 +101,7 @@ export function checkObjectiveTemplateIdParam(req, res, next) { return next(); } - const msg = `${errorMessage}: objectiveTemplateId ${req.params ? (req.params.objectiveTemplateId || 'undefined') : 'undefined'}`; + const msg = `${errorMessage}: objectiveTemplateId ${String(req?.params?.objectiveTemplateId)}`; auditLogger.error(msg); return res.status(httpCodes.BAD_REQUEST).send(msg); } @@ -120,7 +120,7 @@ export function checkGroupIdParam(req, res, next) { return next(); } - const msg = `${errorMessage}: groupId ${req.params ? (req.params.groupId || 'undefined') : 'undefined'}`; + const msg = `${errorMessage}: groupId ${String(req?.params?.groupId)}`; auditLogger.error(msg); return res.status(httpCodes.BAD_REQUEST).send(msg); } @@ -139,7 +139,7 @@ export function checkAlertIdParam(req, res, next) { return next(); } - const msg = `${errorMessage}: alertId ${req.params ? (req.params.alertId || 'undefined') : 'undefined'}`; + const msg = `${errorMessage}: alertId ${String(req?.params?.alertId)}`; auditLogger.error(msg); return res.status(httpCodes.BAD_REQUEST).send(msg); } @@ -158,7 +158,7 @@ export function checkGoalTemplateIdParam(req, res, next) { return next(); } - const msg = `${errorMessage}: goalTemplateId ${req.params ? (req.params.goalTemplateId || 'undefined') : 'undefined'}`; + const msg = `${errorMessage}: goalTemplateId ${String(req?.params?.goalTemplateId)}`; auditLogger.error(msg); return res.status(httpCodes.BAD_REQUEST).send(msg); } diff --git a/src/middleware/checkIdParamMiddleware.test.js b/src/middleware/checkIdParamMiddleware.test.js index d490bd5bba..bf890de715 100644 --- a/src/middleware/checkIdParamMiddleware.test.js +++ b/src/middleware/checkIdParamMiddleware.test.js @@ -12,6 +12,8 @@ import { checkIdIdParam, checkCommunicationLogIdParam, checkGoalGroupIdParam, + checkGoalTemplateIdParam, + checkSessionAttachmentIdParam, } from './checkIdParamMiddleware'; import { auditLogger } from '../logger'; @@ -67,6 +69,15 @@ describe('checkIdParamMiddleware', () => { expect(auditLogger.error).toHaveBeenCalledWith(`${errorMessage}: activityReportId undefined`); expect(mockNext).not.toHaveBeenCalled(); }); + + it('throw 400 if activity report param is undefined', () => { + const mockRequest = { path: '/api/endpoint', params: {} }; + + checkActivityReportIdParam(mockRequest, mockResponse, mockNext); + expect(mockResponse.status).toHaveBeenCalledWith(400); + expect(auditLogger.error).toHaveBeenCalledWith(`${errorMessage}: activityReportId undefined`); + expect(mockNext).not.toHaveBeenCalled(); + }); }); describe('checkFileIdParam', () => { @@ -110,6 +121,29 @@ describe('checkIdParamMiddleware', () => { expect(auditLogger.error).toHaveBeenCalledWith(`${errorMessage}: fileId undefined`); expect(mockNext).not.toHaveBeenCalled(); }); + + it('throw 400 if fileId param is undefined', () => { + const mockRequest = { path: '/api/endpoint', params: {} }; + + checkFileIdParam(mockRequest, mockResponse, mockNext); + expect(mockResponse.status).toHaveBeenCalledWith(400); + expect(auditLogger.error).toHaveBeenCalledWith(`${errorMessage}: fileId undefined`); + expect(mockNext).not.toHaveBeenCalled(); + }); + + it('throw 400 if fileId is undefined', () => { + const mockRequest = { + path: '/api/endpoint', + params: { + fileId: undefined, + }, + }; + + checkFileIdParam(mockRequest, mockResponse, mockNext); + expect(mockResponse.status).toHaveBeenCalledWith(400); + expect(auditLogger.error).toHaveBeenCalledWith(`${errorMessage}: fileId undefined`); + expect(mockNext).not.toHaveBeenCalled(); + }); }); describe('checkReportIdParam', () => { @@ -154,6 +188,15 @@ describe('checkIdParamMiddleware', () => { expect(auditLogger.error).toHaveBeenCalledWith(`${errorMessage}: reportId undefined`); expect(mockNext).not.toHaveBeenCalled(); }); + + it('throw 400 if reportId param is undefined', () => { + const mockRequest = { path: '/api/endpoint', params: {} }; + + checkReportIdParam(mockRequest, mockResponse, mockNext); + expect(mockResponse.status).toHaveBeenCalledWith(400); + expect(auditLogger.error).toHaveBeenCalledWith(`${errorMessage}: reportId undefined`); + expect(mockNext).not.toHaveBeenCalled(); + }); }); describe('checkObjectiveIdParam', () => { @@ -170,6 +213,15 @@ describe('checkIdParamMiddleware', () => { expect(mockNext).toHaveBeenCalled(); }); + it('throw 400 if param object is undefined', () => { + const mockRequest = { path: '/api/endpoint', params: {} }; + + checkObjectiveIdParam(mockRequest, mockResponse, mockNext); + expect(mockResponse.status).toHaveBeenCalledWith(400); + expect(auditLogger.error).toHaveBeenCalledWith(`${errorMessage}: objectiveId undefined`); + expect(mockNext).not.toHaveBeenCalled(); + }); + it('throw 400 if param is not string or integer', () => { const mockRequest = { path: '/api/endpoint', @@ -195,6 +247,15 @@ describe('checkIdParamMiddleware', () => { expect(auditLogger.error).toHaveBeenCalled(); expect(mockNext).not.toHaveBeenCalled(); }); + + it('throw 400 if objectiveId param is undefined', () => { + const mockRequest = { path: '/api/endpoint', params: {} }; + + checkObjectiveIdParam(mockRequest, mockResponse, mockNext); + expect(mockResponse.status).toHaveBeenCalledWith(400); + expect(auditLogger.error).toHaveBeenCalledWith(`${errorMessage}: objectiveId undefined`); + expect(mockNext).not.toHaveBeenCalled(); + }); }); describe('checkAlertIdParam', () => { @@ -252,6 +313,15 @@ describe('checkIdParamMiddleware', () => { expect(mockNext).toHaveBeenCalled(); }); + it('throw 400 if param object is undefined', () => { + const mockRequest = { path: '/api/endpoint', params: {} }; + + checkObjectiveTemplateIdParam(mockRequest, mockResponse, mockNext); + expect(mockResponse.status).toHaveBeenCalledWith(400); + expect(auditLogger.error).toHaveBeenCalledWith(`${errorMessage}: objectiveTemplateId undefined`); + expect(mockNext).not.toHaveBeenCalled(); + }); + it('throw 400 if param is not string or integer', () => { const mockRequest = { path: '/api/endpoint', @@ -277,6 +347,15 @@ describe('checkIdParamMiddleware', () => { expect(auditLogger.error).toHaveBeenCalled(); expect(mockNext).not.toHaveBeenCalled(); }); + + it('throw 400 if objectiveTemplateId param is undefined', () => { + const mockRequest = { path: '/api/endpoint', params: {} }; + + checkObjectiveTemplateIdParam(mockRequest, mockResponse, mockNext); + expect(mockResponse.status).toHaveBeenCalledWith(400); + expect(auditLogger.error).toHaveBeenCalledWith(`${errorMessage}: objectiveTemplateId undefined`); + expect(mockNext).not.toHaveBeenCalled(); + }); }); describe('checkGroupIdParam', () => { @@ -318,6 +397,15 @@ describe('checkIdParamMiddleware', () => { expect(auditLogger.error).toHaveBeenCalled(); expect(mockNext).not.toHaveBeenCalled(); }); + + it('throw 400 if groupId param is undefined', () => { + const mockRequest = { path: '/api/endpoint', params: {} }; + + checkGroupIdParam(mockRequest, mockResponse, mockNext); + expect(mockResponse.status).toHaveBeenCalledWith(400); + expect(auditLogger.error).toHaveBeenCalledWith(`${errorMessage}: groupId undefined`); + expect(mockNext).not.toHaveBeenCalled(); + }); }); describe('checkIdParam', () => { @@ -359,6 +447,15 @@ describe('checkIdParamMiddleware', () => { expect(auditLogger.error).toHaveBeenCalled(); expect(mockNext).not.toHaveBeenCalled(); }); + + it('throw 400 if arbitraryId param is undefined', () => { + const mockRequest = { path: '/api/endpoint', params: {} }; + + checkIdParam(mockRequest, mockResponse, mockNext, 'arbitraryId'); + expect(mockResponse.status).toHaveBeenCalledWith(400); + expect(auditLogger.error).toHaveBeenCalledWith(`${errorMessage}: arbitraryId undefined`); + expect(mockNext).not.toHaveBeenCalled(); + }); }); describe('checkRecipientIdParam', () => { @@ -483,6 +580,7 @@ describe('checkIdParamMiddleware', () => { expect(mockNext).not.toHaveBeenCalled(); }); }); + describe('checkCommunicationLogIdParam', () => { it('calls next if id is string or integer', () => { const mockRequest = { @@ -564,4 +662,48 @@ describe('checkIdParamMiddleware', () => { expect(mockNext).not.toHaveBeenCalled(); }); }); + + describe('checkGoalTemplateIdParam', () => { + it('calls next if goalTemplateId is string of integer', () => { + const mockRequest = { + path: '/api/endpoint', + params: { + goalTemplateId: '2', + }, + }; + + checkGoalTemplateIdParam(mockRequest, mockResponse, mockNext); + expect(mockResponse.status).not.toHaveBeenCalled(); + expect(mockNext).toHaveBeenCalled(); + }); + + it('throw 400 if param is not string or integer', () => { + const mockRequest = { + path: '/api/endpoint', + params: { + goalTemplateId: '2D', + }, + }; + + checkGoalTemplateIdParam(mockRequest, mockResponse, mockNext); + expect(mockResponse.status).toHaveBeenCalledWith(400); + expect(auditLogger.error).toHaveBeenCalledWith(`${errorMessage}: goalTemplateId 2D`); + expect(mockNext).not.toHaveBeenCalled(); + }); + }); + + describe('checkSessionAttachmentIdParam', () => { + it('calls next if sessionAttachmentId is string of integer', () => { + const mockRequest = { + path: '/api/endpoint', + params: { + sessionAttachmentId: '2', + }, + }; + + checkSessionAttachmentIdParam(mockRequest, mockResponse, mockNext); + expect(mockResponse.status).not.toHaveBeenCalled(); + expect(mockNext).toHaveBeenCalled(); + }); + }); }); diff --git a/src/migrations/20250107000000-clean-orphan-objectives.js b/src/migrations/20250107000000-clean-orphan-objectives.js new file mode 100644 index 0000000000..5f00acf99c --- /dev/null +++ b/src/migrations/20250107000000-clean-orphan-objectives.js @@ -0,0 +1,60 @@ +const { prepMigration } = require('../lib/migration'); + +/** @type {import('sequelize-cli').Migration} */ +module.exports = { + async up(queryInterface) { + await queryInterface.sequelize.transaction(async (transaction) => { + const sessionSig = __filename; + await prepMigration(queryInterface, transaction, sessionSig); + return queryInterface.sequelize.query(` + -- This marks as deleted any objectives for which all of the following are true: + -- - created within an AR + -- - Is currently linked to no AR + -- - Is not already marked as deleted + + DROP TABLE IF EXISTS orphan_obj; + CREATE TEMP TABLE orphan_obj AS + SELECT o.id oid + FROM "Objectives" o + LEFT JOIN "ActivityReportObjectives" aro + ON o.id = aro."objectiveId" + WHERE o."createdVia" = 'activityReport' + AND o."deletedAt" IS NULL + AND aro.id IS NULL + ORDER BY 1; + + DROP TABLE IF EXISTS updated_obj; + CREATE TEMP TABLE updated_obj + AS + WITH nowtime AS (SELECT NOW() nowts) + , updater AS ( + UPDATE "Objectives" + SET "deletedAt" = nowts + FROM orphan_obj + CROSS JOIN nowtime + WHERE oid = id + RETURNING id deleted_oid + ) + SELECT * FROM updater + ; + + -- The first two numbers should match and the last should be 0 + SELECT 1 ord,'orphaned objectives' item, COUNT(*) cnt FROM orphan_obj + UNION + SELECT 2, 'objectives marked deleted' , COUNT(*) FROM updated_obj + UNION + SELECT 3, 'remaining orphaned objectives', COUNT(*) FROM ( + SELECT * FROM orphan_obj + EXCEPT + SELECT * FROM updated_obj + ) a + ORDER BY 1 + ; + `); + }); + }, + + async down() { + // no rollbacks + }, +}; diff --git a/src/models/tests/activityReportObjectiveResource.test.js b/src/models/tests/activityReportObjectiveResource.test.js new file mode 100644 index 0000000000..09b7004880 --- /dev/null +++ b/src/models/tests/activityReportObjectiveResource.test.js @@ -0,0 +1,27 @@ +import { ActivityReportObjectiveResource, Resource } from '../index'; +import { SOURCE_FIELD } from '../../constants'; + +describe('ActivityReportObjectiveResource model', () => { + describe('isAutoDetected getter', () => { + it('returns false when sourceFields does not contain auto-detected fields', () => { + const instance = ActivityReportObjectiveResource.build({ + sourceFields: [SOURCE_FIELD.REPORTOBJECTIVE.USER_PROVIDED], + }); + expect(instance.isAutoDetected).toBe(false); + }); + }); + + describe('userProvidedUrl getter', () => { + it('returns an empty string if resource is not present', () => { + const instance = ActivityReportObjectiveResource.build({}); + expect(instance.userProvidedUrl).toBe(''); + }); + + it('returns the resource URL if resource is present', () => { + const resource = Resource.build({ url: 'http://example.com' }); + const instance = ActivityReportObjectiveResource.build({}); + instance.resource = resource; + expect(instance.userProvidedUrl).toBe('http://example.com'); + }); + }); +}); diff --git a/src/models/tests/collaboratorType.test.js b/src/models/tests/collaboratorType.test.js new file mode 100644 index 0000000000..5a6e165d7f --- /dev/null +++ b/src/models/tests/collaboratorType.test.js @@ -0,0 +1,23 @@ +import { CollaboratorType } from '..'; + +describe('CollaboratorType Model', () => { + let instance; + let mapsToInstance; + + beforeAll(async () => { + mapsToInstance = await CollaboratorType.create({ name: 'Mapped Collaborator', validForId: 1 }); + instance = await CollaboratorType.create({ name: 'Original Collaborator', mapsTo: mapsToInstance.id, validForId: 1 }); + }); + + it('should return correct latestName and latestId when mapsTo is not defined', () => { + const newInstance = CollaboratorType.build({ name: 'Standalone Collaborator', validForId: 1 }); + expect(newInstance.latestName).toEqual('Standalone Collaborator'); + expect(newInstance.latestId).toBeNull(); + }); + + it('should return correct latestName and latestId when mapsTo is defined', async () => { + const newInstance = await CollaboratorType.findByPk(instance.id); + expect(newInstance.latestName).toEqual('Mapped Collaborator'); + expect(newInstance.latestId).toEqual(mapsToInstance.id); + }); +}); diff --git a/src/models/tests/goalTemplateFieldPrompt.test.js b/src/models/tests/goalTemplateFieldPrompt.test.js new file mode 100644 index 0000000000..348d076818 --- /dev/null +++ b/src/models/tests/goalTemplateFieldPrompt.test.js @@ -0,0 +1,13 @@ +import { GoalTemplateFieldPrompt } from '..'; + +describe('GoalTemplateFieldPrompt Model', () => { + it('should return true for isRequired when validations.required is true', () => { + const instance = GoalTemplateFieldPrompt.build({ validations: { required: true } }); + expect(instance.isRequired).toBe(true); + }); + + it('should return false for isRequired when validations.required is false', () => { + const instance = GoalTemplateFieldPrompt.build({ validations: { required: false } }); + expect(instance.isRequired).toBe(false); + }); +}); diff --git a/src/models/tests/grant.test.js b/src/models/tests/grant.test.js index c6fa393033..4f87f8b26a 100644 --- a/src/models/tests/grant.test.js +++ b/src/models/tests/grant.test.js @@ -4,7 +4,7 @@ import db, { Program, } from '..'; -describe('Goals', () => { +describe('Grants', () => { let grant; beforeAll(async () => { grant = await Grant.unscoped().findOne({ @@ -36,10 +36,70 @@ describe('Goals', () => { expect(grant.numberWithProgramTypes) .toStrictEqual(`${grant.dataValues.number} ${grant.programTypes?.join(', ')}`); }); + it('numberWithProgramTypes with program types', async () => { + const grantWithPrograms = await Grant.unscoped().findOne({ + include: [ + { + model: Program, + as: 'programs', + where: { programType: { [db.Sequelize.Op.ne]: null } }, + }, + ], + order: [['id', 'ASC']], + limit: 1, + }); + expect(grantWithPrograms.numberWithProgramTypes) + .toContain(` - ${grantWithPrograms.programTypes.join(', ')}`); + }); it('recipientInfo', () => { expect(grant.recipientInfo) .toStrictEqual(grant.recipient ? `${grant.recipient.name} - ${grant.dataValues.number} - ${grant.dataValues.recipientId}` : `${grant.dataValues.number} - ${grant.dataValues.recipientId}`); }); + it('recipientNameWithPrograms with program types', async () => { + const grantWithPrograms = await Grant.unscoped().findOne({ + include: [ + { + model: Recipient.unscoped(), + as: 'recipient', + }, + { + model: Program, + as: 'programs', + where: { programType: { [db.Sequelize.Op.ne]: null } }, + }, + ], + order: [['id', 'ASC']], + limit: 1, + }); + const programsList = grantWithPrograms.programTypes.join(', '); + expect(grantWithPrograms.recipientNameWithPrograms) + .toContain(` - ${programsList}`); + }); + describe('grant without recipient', () => { + it('recipientInfo', async () => { + const g = await Grant.unscoped().findOne({ + order: [['id', 'ASC']], + limit: 1, + }); + expect(g.recipientInfo).toBe(`${g.dataValues.number} - ${g.dataValues.recipientId}`); + }); + + it('recipientNameWithPrograms', async () => { + const g = await Grant.unscoped().findOne({ + order: [['id', 'ASC']], + limit: 1, + }); + expect(g.recipientNameWithPrograms).toBe(`${g.dataValues.number} - ${g.dataValues.recipientId}`); + }); + + it('name', async () => { + const g = await Grant.unscoped().findOne({ + order: [['id', 'ASC']], + limit: 1, + }); + expect(g.name).toContain(`${g.numberWithProgramTypes}`); + }); + }); }); diff --git a/src/models/topic.js b/src/models/topic.js index f5d3b91062..9f89359d67 100644 --- a/src/models/topic.js +++ b/src/models/topic.js @@ -25,6 +25,22 @@ export default (sequelize, DataTypes) => { otherKey: 'activityReportObjectiveId', as: 'activityReportObjectives', }); + + models.Topic.belongsTo( + models.Topic, + { + foreignKey: 'mapsTo', + as: 'mapsToTopic', + }, + ); + + models.Topic.hasMany( + models.Topic, + { + foreignKey: 'mapsTo', + as: 'mapsFromTopics', + }, + ); } } Topic.init({ diff --git a/src/models/validFor.js b/src/models/validFor.js index 560e46a290..2809e168ca 100644 --- a/src/models/validFor.js +++ b/src/models/validFor.js @@ -11,12 +11,12 @@ const { ENTITY_TYPE } = require('../constants'); export default (sequelize, DataTypes) => { class ValidFor extends Model { static associate(models) { - ValidFor.belongsTo(models.ValidFor.scope(), { + ValidFor.belongsTo(models.ValidFor, { foreignKey: 'mapsTo', as: 'mapsToValidFor', }); - ValidFor.hasMany(models.ValidFor.scope(), { + ValidFor.hasMany(models.ValidFor, { foreignKey: 'mapsTo', as: 'mapsFromValidFor', }); @@ -80,22 +80,6 @@ export default (sequelize, DataTypes) => { key: 'id', }, }, - latestName: { - type: DataTypes.VIRTUAL(DataTypes.STRING), - get() { - return this.get('mapsTo') - ? this.get('mapsToCollaboratorType').get('name') - : this.get('name'); - }, - }, - latestId: { - type: DataTypes.VIRTUAL(DataTypes.INTEGER), - get() { - return this.get('mapsTo') - ? this.get('mapsToValidFor').get('id') - : this.get('id'); - }, - }, }, { sequelize, modelName: 'ValidFor', diff --git a/src/policies/event.test.js b/src/policies/event.test.js index 333c6d2e74..d1618f305e 100644 --- a/src/policies/event.test.js +++ b/src/policies/event.test.js @@ -466,4 +466,55 @@ describe('Event Report policies', () => { expect(policy.canSeeAlerts()).toBe(false); }); }); + + describe('canRead', () => { + it('is true if the user has read permissions in the region', () => { + const eventRegion1 = createEvent({ ownerId: authorRegion1, regionId: 1 }); + const policy = new EventReport(authorRegion1, eventRegion1); + expect(policy.canRead()).toBe(true); + }); + }); + + describe('hasPocInRegion', () => { + it('is true if the user has poc permissions in the region', () => { + const eventRegion1 = createEvent({ ownerId: pocRegion1, regionId: 1 }); + const policy = new EventReport(pocRegion1, eventRegion1); + expect(policy.hasPocInRegion()).toBe(true); + }); + }); + + describe('canWriteInRegion', () => { + it('is true if the user is an admin', () => { + const eventRegion1 = createEvent({ ownerId: admin, regionId: 1 }); + const policy = new EventReport(admin, eventRegion1); + expect(policy.canWriteInRegion()).toBe(true); + }); + + it('is true if the user has write permissions in the specified region', () => { + const eventRegion1 = createEvent({ ownerId: authorRegion1, regionId: 1 }); + const policy = new EventReport(authorRegion1, eventRegion1); + expect(policy.canWriteInRegion(1)).toBe(true); + }); + }); + + describe('readableRegions', () => { + it('returns an array of region IDs for which the user has read permissions', () => { + const policy = new EventReport(authorRegion1, {}); + expect(policy.readableRegions).toContain(1); + }); + }); + + describe('writableRegions', () => { + it('returns an array of region IDs for which the user has write permissions', () => { + const policy = new EventReport(authorRegion1, {}); + expect(policy.writableRegions).toContain(1); + }); + }); + + describe('canGetTrainingReportUsersInRegion', () => { + it('is true if the user has write or poc permissions in the region', () => { + const policy = new EventReport(pocRegion1, {}); + expect(policy.canGetTrainingReportUsersInRegion(1)).toBe(true); + }); + }); }); diff --git a/src/queries/api/dashboards/qa/class.sql b/src/queries/api/dashboards/qa/class.sql index 8fbb7f0f9b..386dc39559 100644 --- a/src/queries/api/dashboards/qa/class.sql +++ b/src/queries/api/dashboards/qa/class.sql @@ -606,12 +606,15 @@ BEGIN ) AS json_values WHERE json_values.value = ( CASE - -- Get the max reportDeliveryDate for the instructionalSupport domain + -- Get the max reportDeliveryDate for the instructionalSupport domain to apply the correct threshold logic + -- These dates are set by OHS policy and were delayed by two years in July 2024: + -- Final Rule to Delay Effective Date for Increasing the CLASS Instructional Support Domain Competitive Threshold + -- https://eclkc.ohs.acf.hhs.gov/policy/pi/acf-ohs-pi-24-07 WHEN (ARRAY_AGG(mcs."instructionalSupport" ORDER BY mcs."reportDeliveryDate" DESC))[1] >= 3 THEN 'Above all thresholds' - WHEN (MAX(mcs."reportDeliveryDate") >= '2025-08-01' + WHEN (MAX(mcs."reportDeliveryDate") >= '2027-08-01' AND (ARRAY_AGG(mcs."instructionalSupport" ORDER BY mcs."reportDeliveryDate" DESC))[1] < 2.5) THEN 'Below competitive' - WHEN (MAX(mcs."reportDeliveryDate") BETWEEN '2020-11-09' AND '2025-07-31' + WHEN (MAX(mcs."reportDeliveryDate") BETWEEN '2020-11-09' AND '2027-07-31' AND (ARRAY_AGG(mcs."instructionalSupport" ORDER BY mcs."reportDeliveryDate" DESC))[1] < 2.3) THEN 'Below competitive' ELSE 'Below quality' diff --git a/src/queries/dataRequests/internal/monthly-delivery-report.sql b/src/queries/dataRequests/internal/monthly-delivery-report.sql index faac191a71..7245a271e8 100644 --- a/src/queries/dataRequests/internal/monthly-delivery-report.sql +++ b/src/queries/dataRequests/internal/monthly-delivery-report.sql @@ -198,6 +198,39 @@ WITH JOIN "UserRoles" ur ON u.id = ur."userId" JOIN "Roles" r ON ur."roleId" = r.id ), + filtered_grants AS ( + SELECT DISTINCT + gr.* + FROM "Grants" gr + LEFT JOIN "GrantReplacements" grr + ON gr.id = grr."replacedGrantId" + WHERE gr."deleted" != true + AND (grr."replacementDate" IS NULL + OR grr."replacementDate" > '2020-08-31') + -- Filter for startDate dates between two values if ssdi.startDate is defined + AND (NULLIF(current_setting('ssdi.startDate', true), '') IS NULL + OR gr."inactivationDate" IS NULL + OR gr."inactivationDate"::date >= ( + SELECT MIN(value::timestamp)::date + FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.startDate', true), ''),'[]')::json) AS value + )) + AND (NULLIF(current_setting('ssdi.startDate', true), '') IS NULL + OR gr."startDate"::date <= ( + SELECT MAX(value::timestamp)::date + FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.startDate', true), ''),'[]')::json) AS value + )) + AND (NULLIF(current_setting('ssdi.startDate', true), '') IS NULL + OR gr."endDate"::date >= ( + SELECT MIN(value::timestamp)::date + FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.startDate', true), ''),'[]')::json) AS value + )) + -- Filter for region if ssdi.region is defined + AND (NULLIF(current_setting('ssdi.region', true), '') IS NULL + OR gr."regionId" in ( + SELECT value::integer AS my_array + FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.region', true), ''),'[]')::json) AS value + )) + ), grant_counts AS ( SELECT r.name, @@ -214,10 +247,9 @@ WITH ON u.id = a."userId" LEFT JOIN "ActivityRecipients" ar ON a.id = ar."activityReportId" - LEFT JOIN "Grants" gr + LEFT JOIN filtered_grants gr ON gr."id" = ar."grantId" - WHERE (gr.status = 'Active' OR gr.status IS NULL) - AND r.name IS NOT NULL + WHERE r.name IS NOT NULL GROUP BY r.name ), grant_count_users AS ( @@ -235,24 +267,16 @@ WITH ON u.id = a."userId" LEFT JOIN "ActivityRecipients" ar ON a.id = ar."activityReportId" - LEFT JOIN "Grants" gr + LEFT JOIN filtered_grants gr ON gr."id" = ar."grantId" - WHERE (gr.status = 'Active' OR gr.status IS NULL) - AND r.name IS NOT NULL + WHERE r.name IS NOT NULL GROUP BY 1,2 ), total_grants AS ( SELECT COUNT(DISTINCT gr.number) AS grant_count, COUNT(DISTINCT gr."recipientId") AS recipient_count - FROM "Grants" gr - WHERE gr.status = 'Active' - -- Filter for region if ssdi.region is defined - AND (NULLIF(current_setting('ssdi.region', true), '') IS NULL - OR gr."regionId" in ( - SELECT value::integer AS my_array - FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.region', true), ''),'[]')::json) AS value - )) + FROM filtered_grants gr ), recipient_data AS ( SELECT diff --git a/src/routes/courses/handlers.test.js b/src/routes/courses/handlers.test.js index c03f7dfd11..11aec20eb7 100644 --- a/src/routes/courses/handlers.test.js +++ b/src/routes/courses/handlers.test.js @@ -5,6 +5,8 @@ import { getCourseById, updateCourseById, createCourseByName, + courseAuthorization, + deleteCourseById, } from './handlers'; import { getAllCourses, @@ -14,10 +16,16 @@ import { import handleErrors from '../../lib/apiErrorHandler'; import { getUserReadRegions } from '../../services/accessValidation'; import { getCourseUrlWidgetData } from '../../services/dashboards/course'; +import { userById } from '../../services/users'; +import SCOPES from '../../middleware/scopeConstants'; jest.mock('../../services/course'); jest.mock('../../lib/apiErrorHandler'); +jest.mock('../../services/users', () => ({ + userById: jest.fn(), +})); + jest.mock('../../services/dashboards/course', () => ({ getCourseUrlWidgetData: jest.fn(), })); @@ -43,6 +51,77 @@ describe('Courses handlers', () => { await db.sequelize.close(); }); afterEach(() => jest.clearAllMocks()); + + describe('courseAuthorization', () => { + it('sends a 403', async () => { + const req = { + session: { userId: 1 }, + params: { id: 2 }, + }; + userById.mockResolvedValue({ id: 2, permissions: [] }); + const { course, isAuthorized } = await courseAuthorization(req, mockResponse); + expect(isAuthorized).toBe(false); + expect(course).toBeNull(); + expect(mockResponse.status).toHaveBeenCalledWith(403); + }); + + it('sends a 404', async () => { + const req = { + session: { userId: 1 }, + params: { id: 2 }, + }; + userById.mockResolvedValue({ + id: 2, + permissions: [{ + regionId: 14, + scopeId: SCOPES.ADMIN, + }], + }); + getById.mockResolvedValue(null); + const { course, isAuthorized } = await courseAuthorization(req, mockResponse); + expect(isAuthorized).toBe(false); + expect(course).toBeNull(); + expect(mockResponse.status).toHaveBeenCalledWith(404); + }); + + it('auths new course', async () => { + const req = { + session: { userId: 1 }, + params: { id: 2 }, + }; + userById.mockResolvedValue({ + id: 2, + permissions: [{ + regionId: 14, + scopeId: SCOPES.ADMIN, + }], + }); + const { course, isAuthorized } = await courseAuthorization(req, mockResponse, true); + expect(isAuthorized).toBe(true); + expect(course).toBeNull(); + expect(mockResponse.status).not.toHaveBeenCalled(); + }); + + it('returns course and authorization', async () => { + const req = { + session: { userId: 1 }, + params: { id: 2 }, + }; + userById.mockResolvedValue({ + id: 2, + permissions: [{ + regionId: 14, + scopeId: SCOPES.ADMIN, + }], + }); + getById.mockResolvedValue({ id: 1 }); + const { course, isAuthorized } = await courseAuthorization(req, mockResponse); + expect(isAuthorized).toBe(true); + expect(course).toStrictEqual({ id: 1 }); + expect(mockResponse.status).not.toHaveBeenCalled(); + }); + }); + it('get all courses', async () => { getAllCourses.mockResolvedValue([]); await allCourses(mockRequest, mockResponse); @@ -65,6 +144,15 @@ describe('Courses handlers', () => { await getCourseById(req, mockResponse); expect(mockResponse.json).toHaveBeenCalledWith(course); }); + + it('handles errors', async () => { + getById.mockRejectedValue(new Error('Test error')); + const req = { + params: { id: 1 }, + }; + await getCourseById(req, mockResponse); + expect(handleErrors).toHaveBeenCalled(); + }); }); describe('updateCourseById', () => { @@ -76,6 +164,13 @@ describe('Courses handlers', () => { destroy: jest.fn(), }; const newCourse = { id: 2, name: 'Test Course 2' }; + userById.mockResolvedValue({ + id: 2, + permissions: [{ + regionId: 14, + scopeId: SCOPES.ADMIN, + }], + }); getById.mockResolvedValue(course); createCourse.mockResolvedValue(newCourse); @@ -91,12 +186,70 @@ describe('Courses handlers', () => { expect(course.update).toHaveBeenCalledWith({ mapsTo: newCourse.id }); expect(course.destroy).toHaveBeenCalled(); }); + + it('handles unauthorized', async () => { + const course = { + id: 1, + name: 'Test Course 1', + update: jest.fn(), + destroy: jest.fn(), + }; + const newCourse = { id: 2, name: 'Test Course 2' }; + + getById.mockResolvedValue(course); + createCourse.mockResolvedValue(newCourse); + userById.mockResolvedValue({ id: 2, permissions: [] }); + const req = { + session: { userId: 1 }, + params: { id: 1 }, + body: { name: 'Updated Course' }, + }; + + await updateCourseById(req, mockResponse); + + expect(mockResponse.status).toHaveBeenCalledWith(403); + expect(course.update).not.toHaveBeenCalled(); + }); + + it('handles errors', async () => { + const course = { + id: 1, + name: 'Test Course 1', + update: jest.fn(), + destroy: jest.fn(), + }; + + getById.mockResolvedValue(course); + createCourse.mockRejectedValue(new Error('Test error')); + userById.mockResolvedValue({ + id: 2, + permissions: [{ + regionId: 14, + scopeId: SCOPES.ADMIN, + }], + }); + const req = { + session: { userId: 1 }, + params: { id: 1 }, + body: { name: 'Updated Course' }, + }; + + await updateCourseById(req, mockResponse); + expect(handleErrors).toHaveBeenCalled(); + }); }); describe('createCourseByName', () => { it('should create a course by name', async () => { const course = { id: 1, name: 'Test Course' }; createCourse.mockResolvedValue(course); + userById.mockResolvedValue({ + id: 2, + permissions: [{ + regionId: 14, + scopeId: SCOPES.ADMIN, + }], + }); const req = { session: { userId: 1 }, body: { name: 'Test Course' }, @@ -104,6 +257,98 @@ describe('Courses handlers', () => { await createCourseByName(req, mockResponse); expect(mockResponse.json).toHaveBeenCalledWith(course); }); + it('handles unauthorized', async () => { + const course = { id: 1, name: 'Test Course' }; + createCourse.mockResolvedValue(course); + userById.mockResolvedValue({ + id: 2, + permissions: [], + }); + const req = { + session: { userId: 1 }, + body: { name: 'Test Course' }, + }; + await createCourseByName(req, mockResponse); + expect(mockResponse.status).toHaveBeenCalledWith(403); + }); + it('handles errors', async () => { + createCourse.mockRejectedValue(new Error('Test error')); + userById.mockResolvedValue({ + id: 2, + permissions: [ + { + regionId: 14, + scopeId: SCOPES.ADMIN, + }, + ], + }); + const req = { + session: { userId: 1 }, + body: { name: 'Test Course' }, + }; + await createCourseByName(req, mockResponse); + expect(handleErrors).toHaveBeenCalled(); + }); + }); + + describe('deleteCourseById', () => { + it('should delete a course by id', async () => { + const course = { id: 1, name: 'Test Course', destroy: jest.fn() }; + getById.mockResolvedValue(course); + userById.mockResolvedValue({ + id: 2, + permissions: [{ + regionId: 14, + scopeId: SCOPES.ADMIN, + }], + }); + const req = { + session: { userId: 1 }, + params: { id: 1 }, + }; + await deleteCourseById(req, mockResponse); + expect(mockResponse.status).toHaveBeenCalledWith(204); + expect(course.destroy).toHaveBeenCalled(); + }); + it('handles unauthorized', async () => { + const course = { id: 1, name: 'Test Course', destroy: jest.fn() }; + getById.mockResolvedValue(course); + userById.mockResolvedValue({ + id: 2, + permissions: [], + }); + const req = { + session: { userId: 1 }, + params: { id: 1 }, + }; + await deleteCourseById(req, mockResponse); + expect(mockResponse.status).toHaveBeenCalledWith(403); + expect(course.destroy).not.toHaveBeenCalled(); + }); + it('handles errors', async () => { + const course = { + id: 1, + name: 'Test Course', + destroy: jest.fn(() => { + throw new Error('Test error'); + }), + }; + getById.mockResolvedValue(course); + userById.mockResolvedValue({ + id: 2, + permissions: [{ + regionId: 14, + scopeId: SCOPES.ADMIN, + }], + }); + const req = { + session: { userId: 1 }, + params: { id: 1 }, + }; + await deleteCourseById(req, mockResponse); + expect(handleErrors).toHaveBeenCalled(); + expect(course.destroy).toHaveBeenCalled(); + }); }); describe('getCourseUrlsWidgetData', () => { diff --git a/src/routes/courses/handlers.ts b/src/routes/courses/handlers.ts index 4ebaccc83f..81f66a0c1c 100644 --- a/src/routes/courses/handlers.ts +++ b/src/routes/courses/handlers.ts @@ -1,4 +1,3 @@ -/* eslint-disable import/prefer-default-export */ import { Request, Response } from 'express'; import filtersToScopes from '../../scopes'; import handleErrors from '../../lib/apiErrorHandler'; @@ -21,6 +20,50 @@ const logContext = { namespace, }; +export async function courseAuthorization(req: Request, res: Response, newCourse = false): Promise<{ + course: { + id: number; + update: (data: { mapsTo: number }) => Promise; + destroy: () => Promise; + } | null; + isAuthorized: boolean; +}> { + const userId = await currentUserId(req, res); + const user = await userById(userId); + const authorization = new UserPolicy(user); + + if (!authorization.isAdmin()) { + res.status(403).send('Forbidden'); + return { + course: null, + isAuthorized: false, + }; + } + + if (newCourse) { + return { + course: null, + isAuthorized: true, + }; + } + + const { id } = req.params; + + const course = await getById(Number(id)); + if (!course) { + res.status(404).send('Course not found'); + return { + course: null, + isAuthorized: false, + }; + } + + return { + course, + isAuthorized: true, + }; +} + export async function allCourses(req: Request, res: Response) { try { // we only verify site access for getting all courses @@ -44,19 +87,8 @@ export async function getCourseById(req: Request, res: Response) { export async function updateCourseById(req: Request, res: Response) { try { - const { id } = req.params; - const userId = await currentUserId(req, res); - const user = await userById(userId); - const authorization = new UserPolicy(user); - - if (!authorization.isAdmin()) { - res.status(403).send('Forbidden'); - return; - } - - const course = await getById(Number(id)); - if (!course) { - res.status(404).send('Course not found'); + const { isAuthorized, course } = await courseAuthorization(req, res); + if (!isAuthorized) { return; } @@ -76,12 +108,8 @@ export async function updateCourseById(req: Request, res: Response) { export async function createCourseByName(req: Request, res: Response) { try { - const userId = await currentUserId(req, res); - const user = await userById(userId); - const authorization = new UserPolicy(user); - - if (!authorization.isAdmin()) { - res.status(403).send('Forbidden'); + const { isAuthorized } = await courseAuthorization(req, res, true); + if (!isAuthorized) { return; } @@ -96,34 +124,20 @@ export async function createCourseByName(req: Request, res: Response) { export async function deleteCourseById(req: Request, res: Response) { try { - const { id } = req.params; - const userId = await currentUserId(req, res); - const user = await userById(userId); - const authorization = new UserPolicy(user); - - if (!authorization.isAdmin()) { - res.status(403).send('Forbidden'); + const { isAuthorized, course } = await courseAuthorization(req, res); + if (!isAuthorized) { return; } - const course = await getById(Number(id)); - if (!course) { - res.status(404).send('Course not found'); - return; - } - try { - await course.destroy(); - } catch (error) { - res.status(500).send('Could not destroy course.'); - return; - } + await course.destroy(); + res.status(204).send(); } catch (err) { await handleErrors(req, res, err, logContext); } } -export async function getCourseUrlWidgetDataWithCache(req, res) { +export async function getCourseUrlWidgetDataWithCache(req: Request, res: Response) { const userId = await currentUserId(req, res); const query = await setReadRegions(req.query, userId); const key = `getCourseWidgetUrlData?v=${COURSE_DATA_CACHE_VERSION}&${JSON.stringify(query)}`; diff --git a/src/routes/groups/handlers.test.js b/src/routes/groups/handlers.test.js index fe4b24665c..01512426e6 100644 --- a/src/routes/groups/handlers.test.js +++ b/src/routes/groups/handlers.test.js @@ -78,11 +78,11 @@ describe('Groups Handlers', () => { status: jest.fn(), sendStatus: jest.fn(), }; - currentUserId.mockReturnValueOnce(userId); + currentUserId.mockReturnValue(userId); }); it('should return 200 and the groups', async () => { const groupsResponse = [{ id: 1, name: 'Group 1' }]; - groups.mockReturnValueOnce(groupsResponse); + groups.mockReturnValue(groupsResponse); await getGroups(req, res); expect(res.json).toHaveBeenCalledWith(groupsResponse); }); @@ -108,7 +108,7 @@ describe('Groups Handlers', () => { sendStatus: jest.fn(), status: jest.fn(() => ({ json: jest.fn() })), }; - currentUserId.mockReturnValueOnce(userId); + currentUserId.mockReturnValue(userId); }); it('should return 200 and the group', async () => { const groupResponse = { @@ -120,10 +120,10 @@ describe('Groups Handlers', () => { collaboratorType: { name: GROUP_COLLABORATORS.CREATOR }, }], }; - GroupPolicy.mockImplementationOnce(() => ({ + GroupPolicy.mockImplementation(() => ({ canUseGroup: () => true, })); - group.mockReturnValueOnce(groupResponse); + group.mockReturnValue(groupResponse); await getGroup(req, res); expect(res.json).toHaveBeenCalledWith(groupResponse); }); @@ -138,13 +138,16 @@ describe('Groups Handlers', () => { collaboratorType: { name: GROUP_COLLABORATORS.CREATOR }, }], }; - group.mockReturnValueOnce(groupResponse); + GroupPolicy.mockImplementation(() => ({ + canUseGroup: () => false, + })); + group.mockReturnValue(groupResponse); await getGroup(req, res); expect(res.sendStatus).toHaveBeenCalledWith(httpCodes.FORBIDDEN); }); it('should return 500 if there is an error', async () => { - currentUserId.mockReturnValueOnce(userId); + currentUserId.mockReturnValue(userId); group.mockRejectedValueOnce(new Error('Error')); await getGroup(req, res); expect(res.sendStatus).toHaveBeenCalledWith(httpCodes.INTERNAL_SERVER_ERROR); @@ -171,48 +174,93 @@ describe('Groups Handlers', () => { sendStatus: jest.fn(), }; currentUserId - .mockReturnValueOnce(userId); + .mockReturnValue(userId); }); it('should return 200 and the group', async () => { const groupResponse = { id: 1, name: 'Group 1' }; - Grant.findAll.mockReturnValueOnce([{ + Grant.findAll.mockReturnValue([{ id: 1, regionId: 1, recipientId: 1, status: 'Active', }]); - // The below mock is called twice. - GroupPolicy.mockImplementationOnce(() => ({ + GroupPolicy.mockImplementation(() => ({ canAddToGroup: () => true, })); - GroupPolicy.mockImplementationOnce(() => ({ + checkGroupNameAvailable.mockReturnValue(Promise.resolve(true)); + potentialRecipientGrants.mockReturnValue([{ grantId: 1 }]); + createNewGroup.mockReturnValue(groupResponse); + await createGroup(req, res); + expect(checkGroupNameAvailable).toHaveBeenCalled(); + expect(res.json).toHaveBeenCalledWith(groupResponse); + }); + + it('uses default value for co-owners', async () => { + const groupResponse = { id: 1, name: 'Group 1' }; + Grant.findAll.mockReturnValue([{ + id: 1, + regionId: 1, + recipientId: 1, + status: 'Active', + }]); + + GroupPolicy.mockImplementation(() => ({ canAddToGroup: () => true, })); - checkGroupNameAvailable.mockReturnValueOnce(Promise.resolve(true)); - potentialRecipientGrants.mockReturnValueOnce([{ grantId: 1 }]); - createNewGroup.mockReturnValueOnce(groupResponse); - await createGroup(req, res); + + checkGroupNameAvailable.mockReturnValue(Promise.resolve(true)); + potentialRecipientGrants.mockReturnValue([{ grantId: 1 }]); + createNewGroup.mockReturnValue(groupResponse); + await createGroup({ + body: { + name: 'Group 1', + coOwners: [], + }, + }, res); expect(checkGroupNameAvailable).toHaveBeenCalled(); expect(res.json).toHaveBeenCalledWith(groupResponse); }); - it('should return 200 with an error if the group already exists', async () => { - Grant.findAll.mockReturnValueOnce([{ + it('used individuals from request body', async () => { + const groupResponse = { id: 1, name: 'Group 1' }; + Grant.findAll.mockReturnValue([{ id: 1, regionId: 1, recipientId: 1, status: 'Active', }]); - // The below mock is called twice. - GroupPolicy.mockImplementationOnce(() => ({ + + GroupPolicy.mockImplementation(() => ({ canAddToGroup: () => true, + canUseGroup: () => true, })); - GroupPolicy.mockImplementationOnce(() => ({ + checkGroupNameAvailable.mockReturnValue(Promise.resolve(true)); + potentialRecipientGrants.mockReturnValue([{ grantId: 1 }]); + createNewGroup.mockReturnValue(groupResponse); + await createGroup({ + body: { + name: 'Group 1', + coOwners: [2], + individuals: [1], + }, + }, res); + expect(checkGroupNameAvailable).toHaveBeenCalled(); + expect(res.json).toHaveBeenCalledWith(groupResponse); + }); + + it('should return 200 with an error if the group already exists', async () => { + Grant.findAll.mockReturnValue([{ + id: 1, + regionId: 1, + recipientId: 1, + status: 'Active', + }]); + GroupPolicy.mockImplementation(() => ({ canAddToGroup: () => true, })); - potentialRecipientGrants.mockReturnValueOnce([{ grantId: 1 }]); - checkGroupNameAvailable.mockReturnValueOnce(Promise.resolve(false)); + potentialRecipientGrants.mockReturnValue([{ grantId: 1 }]); + checkGroupNameAvailable.mockReturnValue(Promise.resolve(false)); await createGroup(req, res); expect(statusJson).toHaveBeenCalledWith({ message: 'This group name already exists, please use a different name', @@ -220,22 +268,94 @@ describe('Groups Handlers', () => { }); }); - it('should return 500 if there is an error', async () => { - Grant.findAll.mockReturnValueOnce([{ + it('should return FORBIDDEN if permissions bad', async () => { + Grant.findAll.mockReturnValue([{ id: 1, regionId: 1, recipientId: 1, status: 'Active', }]); - // The below mock is called twice. - GroupPolicy.mockImplementationOnce(() => ({ - canAddToGroup: () => true, + GroupPolicy.mockImplementation(() => ({ + canAddToGroup: () => false, + })); + potentialRecipientGrants.mockReturnValue([{ grantId: 1 }]); + checkGroupNameAvailable.mockReturnValue(Promise.resolve(false)); + await createGroup(req, res); + expect(res.sendStatus).toHaveBeenCalledWith(httpCodes.FORBIDDEN); + }); + + it('should return ACCEPTED if co-owner permissions bad', async () => { + Grant.findAll.mockReturnValue([{ + id: 1, + regionId: 1, + recipientId: 1, + status: 'Active', + }]); + GroupPolicy.mockImplementation((userData) => ({ + canAddToGroup: () => { + if (userData.id === 1) return true; + return false; + }, + })); + potentialRecipientGrants.mockReturnValue([{ grantId: 1 }]); + checkGroupNameAvailable.mockReturnValue(Promise.resolve(true)); + await createGroup({ + body: { + name: 'Group 1', + coOwners: [2], + }, + session: { + user: { + id: 1, + }, + }, + }, res); + expect(res.status).toHaveBeenCalledWith(httpCodes.ACCEPTED); + }); + + it('should return ACCEPTED if individuals permissions bad', async () => { + Grant.findAll.mockReturnValue([{ + id: 1, + regionId: 1, + recipientId: 1, + status: 'Active', + }]); + GroupPolicy.mockImplementation((userData) => ({ + canUseGroup: () => false, + canAddToGroup: () => { + if (userData.id === 1) return true; + return false; + }, })); - GroupPolicy.mockImplementationOnce(() => ({ + potentialRecipientGrants.mockReturnValue([{ grantId: 1 }]); + checkGroupNameAvailable.mockReturnValue(Promise.resolve(true)); + await createGroup({ + body: { + name: 'Group 1', + coOwners: [], + individuals: [2], + }, + session: { + user: { + id: 1, + }, + }, + }, res); + expect(res.status).toHaveBeenCalledWith(httpCodes.ACCEPTED); + }); + + it('should return 500 if there is an error', async () => { + Grant.findAll.mockReturnValue([{ + id: 1, + regionId: 1, + recipientId: 1, + status: 'Active', + }]); + GroupPolicy.mockImplementation(() => ({ canAddToGroup: () => true, })); - potentialRecipientGrants.mockReturnValueOnce([{ grantId: 1 }]); - checkGroupNameAvailable.mockReturnValueOnce(Promise.resolve(true)); + potentialRecipientGrants.mockReturnValue([{ grantId: 1 }]); + checkGroupNameAvailable.mockReturnValue(Promise.resolve(true)); createNewGroup.mockRejectedValue(new Error('Error')); await createGroup(req, res); expect(statusJson).toHaveBeenCalledWith({ message: 'There was an error saving your group' }); @@ -247,6 +367,7 @@ describe('Groups Handlers', () => { let res; const statusJson = jest.fn(); const userId = 1; + afterEach(() => jest.clearAllMocks()); beforeEach(() => { req = { params: { @@ -263,27 +384,147 @@ describe('Groups Handlers', () => { sendStatus: jest.fn(), status: jest.fn(() => ({ json: statusJson })), }; - currentUserId.mockReturnValueOnce(userId); + currentUserId.mockReturnValue(userId); }); it('should return 200 and the group', async () => { - Group.findAll.mockReturnValueOnce([{ id: 1, name: 'Group 1', userId: 1 }]); + Group.findAll.mockReturnValue([{ id: 1, name: 'Group 1', userId: 1 }]); const groupResponse = { id: 1, name: 'Group 1' }; editGroup.mockReturnValue(groupResponse); - GroupPolicy.mockImplementationOnce(() => ({ + GroupPolicy.mockImplementation(() => ({ canUseGroup: () => true, canEditGroup: () => true, canAddToGroup: () => true, })); const groupsResponse = [{ id: 1, name: 'Group 1' }]; - checkGroupNameAvailable.mockReturnValueOnce(Promise.resolve(true)); - groups.mockReturnValueOnce(groupsResponse); + checkGroupNameAvailable.mockReturnValue(Promise.resolve(true)); + groups.mockReturnValue(groupsResponse); await updateGroup(req, res); expect(res.json).toHaveBeenCalledWith(groupResponse); }); + it('should return 200 and the group with default req params', async () => { + Group.findAll.mockReturnValue([{ id: 1, name: 'Group 1', userId: 1 }]); + const groupResponse = { id: 1, name: 'Group 1' }; + editGroup.mockReturnValue(groupResponse); + + GroupPolicy.mockImplementation(() => ({ + canUseGroup: () => true, + canEditGroup: () => true, + canAddToGroup: () => true, + })); + const groupsResponse = [{ id: 1, name: 'Group 1' }]; + checkGroupNameAvailable.mockReturnValue(Promise.resolve(true)); + groups.mockReturnValue(groupsResponse); + await updateGroup({ + params: { + groupId: 1, + }, + body: { + name: 'Group 1', + }, + }, res); + expect(res.json).toHaveBeenCalledWith(groupResponse); + }); + + it('should return 200 and the group with populated req params', async () => { + Group.findAll.mockReturnValue([{ id: 1, name: 'Group 1', userId: 1 }]); + const groupResponse = { id: 1, name: 'Group 1' }; + editGroup.mockReturnValue(groupResponse); + + GroupPolicy.mockImplementation(() => ({ + canUseGroup: () => true, + canEditGroup: () => true, + canAddToGroup: () => true, + })); + const groupsResponse = [{ id: 1, name: 'Group 1' }]; + checkGroupNameAvailable.mockReturnValue(Promise.resolve(true)); + groups.mockReturnValue(groupsResponse); + await updateGroup({ + params: { + groupId: 1, + }, + body: { + name: 'Group 1', + coOwners: [2], + individuals: [3], + }, + session: { + user: { + id: 1, + }, + }, + }, res); + expect(res.json).toHaveBeenCalledWith(groupResponse); + }); + + it('should return ACCEPTED if coowners lack permissions', async () => { + Group.findAll.mockReturnValue([{ id: 1, name: 'Group 1', userId: 1 }]); + const groupResponse = { id: 1, name: 'Group 1' }; + editGroup.mockReturnValue(groupResponse); + + GroupPolicy.mockImplementation((userData) => ({ + canUseGroup: () => true, + canEditGroup: () => true, + canAddToGroup: () => { + if (userData.id === 1) return true; + return false; + }, + })); + const groupsResponse = [{ id: 1, name: 'Group 1' }]; + checkGroupNameAvailable.mockReturnValue(Promise.resolve(true)); + groups.mockReturnValue(groupsResponse); + await updateGroup({ + params: { + groupId: 1, + }, + body: { + name: 'Group 1', + coOwners: [2], + individuals: [3], + }, + session: { + user: { + id: 1, + }, + }, + }, res); + expect(res.status).toHaveBeenCalledWith(httpCodes.ACCEPTED); + }); + + it('should return ACCEPTED if individuals lack permissions', async () => { + Group.findAll.mockReturnValue([{ id: 1, name: 'Group 1', userId: 1 }]); + const groupResponse = { id: 1, name: 'Group 1' }; + editGroup.mockReturnValue(groupResponse); + + GroupPolicy.mockImplementation((userData) => ({ + canUseGroup: () => false, + canEditGroup: () => true, + canAddToGroup: () => true, + })); + const groupsResponse = [{ id: 1, name: 'Group 1' }]; + checkGroupNameAvailable.mockReturnValue(Promise.resolve(true)); + groups.mockReturnValue(groupsResponse); + await updateGroup({ + params: { + groupId: 1, + }, + body: { + name: 'Group 1', + coOwners: [], + individuals: [3], + }, + session: { + user: { + id: 1, + }, + }, + }, res); + expect(res.status).toHaveBeenCalledWith(httpCodes.ACCEPTED); + }); + it('should return 200 with an error message if the group already exists', async () => { - GroupPolicy.mockImplementationOnce(() => ({ + GroupPolicy.mockImplementation(() => ({ canUseGroup: () => true, canEditGroup: () => true, canAddToGroup: () => true, @@ -291,14 +532,37 @@ describe('Groups Handlers', () => { Group.findAll.mockReturnValue([{ id: 1, name: 'Group Old', userId: 1 }, { id: 2, name: 'Group 1', userId: 1 }]); const groupResponse = { id: 1, name: 'Group 1' }; editGroup.mockReturnValue(groupResponse); - await updateGroup(req, res); - expect(statusJson).toHaveBeenCalledWith({ + checkGroupNameAvailable.mockReturnValue(Promise.resolve(false)); + const json = jest.fn(); + await updateGroup({ + params: { + groupId: 1, + }, + body: { + name: 'Group 1', + coOwners: [], + individuals: [], + }, + session: { + user: { + id: 1, + }, + }, + }, { + json, + sendStatus: jest.fn(), + status: jest.fn(() => ({ json })), + }); + expect(json).toHaveBeenCalledWith({ message: 'This group name already exists, please use a different name', error: 'new-group-name', }); }); it('should return 403 if the user does not own the group', async () => { + GroupPolicy.mockImplementation(() => ({ + canEditGroup: () => false, + })); const groupResponse = { id: 1, name: 'Group 1', userId: 2 }; Group.findAll.mockReturnValue([{ id: 1, name: 'Group 1', userId: 2 }]); editGroup.mockReturnValue(groupResponse); @@ -307,16 +571,16 @@ describe('Groups Handlers', () => { }); it('should return 500 if there is an error', async () => { - GroupPolicy.mockImplementationOnce(() => ({ + GroupPolicy.mockImplementation(() => ({ canUseGroup: () => true, canEditGroup: () => true, canAddToGroup: () => true, })); const groupsResponse = [{ id: 1, name: 'Group 1' }]; - checkGroupNameAvailable.mockReturnValueOnce(Promise.resolve(true)); - groups.mockReturnValueOnce(groupsResponse); + checkGroupNameAvailable.mockReturnValue(Promise.resolve(true)); + groups.mockReturnValue(groupsResponse); Group.findAll.mockReturnValue([{ id: 1, name: 'Group 1', userId: 1 }]); - Grant.findAll.mockReturnValueOnce([ + Grant.findAll.mockReturnValue([ { regionId: 1 }, ]); editGroup.mockRejectedValue(new Error('Error')); @@ -340,13 +604,13 @@ describe('Groups Handlers', () => { sendStatus: jest.fn(), status: jest.fn(() => ({ json: jest.fn() })), }; - currentUserId.mockReturnValueOnce(userId); + currentUserId.mockReturnValue(userId); }); it('should return 200 and the group', async () => { - GroupPolicy.mockImplementationOnce(() => ({ + GroupPolicy.mockImplementation(() => ({ ownsGroup: () => true, })); - group.mockReturnValueOnce({ + group.mockReturnValue({ id: 1, name: '', grants: [], @@ -358,20 +622,20 @@ describe('Groups Handlers', () => { isPublic: false, }); const groupResponse = 1; - destroyGroup.mockReturnValueOnce(groupResponse); + destroyGroup.mockReturnValue(groupResponse); await deleteGroup(req, res); expect(res.json).toHaveBeenCalledWith(groupResponse); }); it('should return 200 if the group no longer exists', async () => { - group.mockReturnValueOnce(null); + group.mockReturnValue(null); await deleteGroup(req, res); expect(res.status).toHaveBeenCalledWith(httpCodes.OK); }); it('should return 403 if the user does not own the group', async () => { - // Group.findOne.mockReturnValueOnce({ id: 1, name: 'Group 1', userId: 2 }); - group.mockReturnValueOnce({ + // Group.findOne.mockReturnValue({ id: 1, name: 'Group 1', userId: 2 }); + group.mockReturnValue({ id: 1, name: '', grants: [], @@ -382,15 +646,18 @@ describe('Groups Handlers', () => { }], isPublic: false, }); + GroupPolicy.mockImplementation(() => ({ + ownsGroup: () => false, + })); await deleteGroup(req, res); expect(res.sendStatus).toHaveBeenCalledWith(httpCodes.FORBIDDEN); }); it('should return 500 if there is an error', async () => { - GroupPolicy.mockImplementationOnce(() => ({ + GroupPolicy.mockImplementation(() => ({ ownsGroup: () => true, })); - group.mockReturnValueOnce({ + group.mockReturnValue({ id: 1, name: '', grants: [], @@ -418,7 +685,7 @@ describe('Groups Handlers', () => { sendStatus: jest.fn(), status: jest.fn(() => ({ json: statusJson })), }; - currentUserId.mockReturnValueOnce(userId); + currentUserId.mockReturnValue(userId); }); it('should return 200 and the users', async () => { req = { @@ -431,7 +698,7 @@ describe('Groups Handlers', () => { // Mock return value once for potentialCoOwners(). const potentialCoOwnersResponse = [{ id: 1, name: 'User 1' }]; - potentialGroupUsers.mockReturnValueOnce(potentialCoOwnersResponse); + potentialGroupUsers.mockReturnValue(potentialCoOwnersResponse); // Mock return group. const mockGroup = { @@ -442,11 +709,33 @@ describe('Groups Handlers', () => { }; // Mock group policy. - GroupPolicy.mockImplementationOnce(() => ({ + GroupPolicy.mockImplementation(() => ({ + canEditGroup: () => true, + })); + + group.mockReturnValue(mockGroup); + await getEligibleUsersForGroup(req, res); + expect(res.json).toHaveBeenCalledWith(potentialCoOwnersResponse); + }); + + it('handles newGroup', async () => { + req = { + params: { + groupId: 'new', + }, + body: { + }, + }; + + // Mock return value once for potentialCoOwners(). + const potentialCoOwnersResponse = [{ id: 1, name: 'User 1' }]; + potentialGroupUsers.mockReturnValue(potentialCoOwnersResponse); + + // Mock group policy. + GroupPolicy.mockImplementation(() => ({ canEditGroup: () => true, })); - group.mockReturnValueOnce(mockGroup); await getEligibleUsersForGroup(req, res); expect(res.json).toHaveBeenCalledWith(potentialCoOwnersResponse); }); @@ -462,7 +751,7 @@ describe('Groups Handlers', () => { // Mock return value once for potentialCoOwners(). const potentialCoOwnersResponse = [{ id: 1, name: 'User 1' }]; - potentialGroupUsers.mockReturnValueOnce(potentialCoOwnersResponse); + potentialGroupUsers.mockReturnValue(potentialCoOwnersResponse); // Mock return group. const mockGroup = { @@ -473,11 +762,11 @@ describe('Groups Handlers', () => { }; // Mock group policy. - GroupPolicy.mockImplementationOnce(() => ({ + GroupPolicy.mockImplementation(() => ({ canEditGroup: () => false, })); - group.mockReturnValueOnce(mockGroup); + group.mockReturnValue(mockGroup); await getEligibleUsersForGroup(req, res); expect(res.sendStatus).toHaveBeenCalledWith(httpCodes.FORBIDDEN); }); @@ -498,7 +787,7 @@ describe('Groups Handlers', () => { collaboratorType: { name: GROUP_COLLABORATORS.CREATOR }, }], }; - group.mockReturnValueOnce(mockGroup); + group.mockReturnValue(mockGroup); // Mock return error for potentialCoOwners(). potentialGroupUsers.mockRejectedValue(new Error('Error')); @@ -520,7 +809,7 @@ describe('Groups Handlers', () => { sendStatus: jest.fn(), status: jest.fn(() => ({ json: statusJson })), }; - currentUserId.mockReturnValueOnce(userId); + currentUserId.mockReturnValue(userId); }); it('should return 200 and the recipients', async () => { req = { @@ -532,7 +821,7 @@ describe('Groups Handlers', () => { }; const resGrants = [{ id: 1, name: 'Grant 1' }]; - potentialRecipientGrants.mockReturnValueOnce(resGrants); + potentialRecipientGrants.mockReturnValue(resGrants); // Mock return group. const mockGroup = { @@ -543,11 +832,32 @@ describe('Groups Handlers', () => { }; // Mock group policy. - GroupPolicy.mockImplementationOnce(() => ({ + GroupPolicy.mockImplementation(() => ({ + canEditGroup: () => true, + })); + + group.mockReturnValue(mockGroup); + await getEligibleRecipientGrantsForGroup(req, res); + expect(res.json).toHaveBeenCalledWith(resGrants); + }); + + it('should return 200 and the recipients for a new group', async () => { + req = { + params: { + groupId: 'new', + }, + body: { + }, + }; + + const resGrants = [{ id: 1, name: 'Grant 1' }]; + potentialRecipientGrants.mockReturnValue(resGrants); + + // Mock group policy. + GroupPolicy.mockImplementation(() => ({ canEditGroup: () => true, })); - group.mockReturnValueOnce(mockGroup); await getEligibleRecipientGrantsForGroup(req, res); expect(res.json).toHaveBeenCalledWith(resGrants); }); @@ -563,7 +873,7 @@ describe('Groups Handlers', () => { // Mock return value once for potentialCoOwners(). const resGrants = [{ id: 1, name: 'Grant 1' }]; - potentialGroupUsers.mockReturnValueOnce(resGrants); + potentialGroupUsers.mockReturnValue(resGrants); // Mock return group. const mockGroup = { @@ -574,11 +884,11 @@ describe('Groups Handlers', () => { }; // Mock group policy. - GroupPolicy.mockImplementationOnce(() => ({ + GroupPolicy.mockImplementation(() => ({ canEditGroup: () => false, })); - group.mockReturnValueOnce(mockGroup); + group.mockReturnValue(mockGroup); await getEligibleRecipientGrantsForGroup(req, res); expect(res.sendStatus).toHaveBeenCalledWith(httpCodes.FORBIDDEN); }); @@ -599,7 +909,7 @@ describe('Groups Handlers', () => { collaboratorType: { name: GROUP_COLLABORATORS.CREATOR }, }], }; - group.mockReturnValueOnce(mockGroup); + group.mockReturnValue(mockGroup); // Mock return error for potentialCoOwners(). potentialRecipientGrants.mockRejectedValue(new Error('Error')); diff --git a/src/routes/testingOnly/handlers.test.js b/src/routes/testingOnly/handlers.test.js new file mode 100644 index 0000000000..84860fe559 --- /dev/null +++ b/src/routes/testingOnly/handlers.test.js @@ -0,0 +1,80 @@ +import { reseedDB, queryDB } from './handlers'; +import { reseed, query } from '../../../tests/utils/dbUtils'; +import handleErrors from '../../lib/apiErrorHandler'; + +jest.mock('../../../tests/utils/dbUtils'); +jest.mock('../../lib/apiErrorHandler'); + +describe('handlers', () => { + let req; + let res; + + beforeEach(() => { + req = { body: {} }; + res = { + status: jest.fn().mockReturnThis(), + json: jest.fn(), + }; + }); + + describe('reseedDB', () => { + it('should return 200 if reseed is successful', async () => { + reseed.mockResolvedValue(true); + await reseedDB(req, res); + expect(res.status).toHaveBeenCalledWith(200); + expect(res.json).toHaveBeenCalledWith(true); + }); + + it('should return 500 if reseed fails', async () => { + reseed.mockResolvedValue(false); + await reseedDB(req, res); + expect(res.status).toHaveBeenCalledWith(500); + expect(res.json).toHaveBeenCalledWith(false); + }); + + it('should handle errors', async () => { + const error = new Error('test error'); + reseed.mockRejectedValue(error); + await reseedDB(req, res); + expect(handleErrors).toHaveBeenCalledWith(req, res, error, 'reseedDB'); + }); + }); + + describe('queryDB', () => { + it('should return 200 if query is successful', async () => { + req.body = { command: 'SELECT * FROM users' }; + query.mockResolvedValue([{}]); + await queryDB(req, res); + expect(res.status).toHaveBeenCalledWith(200); + expect(res.json).toHaveBeenCalledWith([{}]); + }); + + it('should return 501 if query returns empty result', async () => { + req.body = { command: 'SELECT * FROM users' }; + query.mockResolvedValue([]); + await queryDB(req, res); + expect(res.status).toHaveBeenCalledWith(501); + expect(res.json).toHaveBeenCalledWith([]); + }); + + it('should throw error if req.body is missing', async () => { + req.body = null; + await queryDB(req, res); + expect(handleErrors).toHaveBeenCalledWith(req, res, new Error('req.body is required'), 'queryDB'); + }); + + it('should throw error if command is missing', async () => { + req.body = { options: {} }; + await queryDB(req, res); + expect(handleErrors).toHaveBeenCalledWith(req, res, new Error('command is required'), 'queryDB'); + }); + + it('should handle errors', async () => { + req.body = { command: 'SELECT * FROM users' }; + const error = new Error('test error'); + query.mockRejectedValue(error); + await queryDB(req, res); + expect(handleErrors).toHaveBeenCalledWith(req, res, error, 'queryDB'); + }); + }); +}); diff --git a/src/scopes/activityReport/deliveryMethod.js b/src/scopes/activityReport/deliveryMethod.js index d0a754ff3c..262d62d77b 100644 --- a/src/scopes/activityReport/deliveryMethod.js +++ b/src/scopes/activityReport/deliveryMethod.js @@ -1,7 +1,7 @@ import { Op } from 'sequelize'; import { sequelize } from '../../models'; -function formatDeliveryMethod(deliveryMethod) { +export function formatDeliveryMethod(deliveryMethod) { const method = deliveryMethod.toLowerCase(); if (method === 'in person') { return 'in-person'; diff --git a/src/scopes/activityReport/index.test.js b/src/scopes/activityReport/index.test.js index d39af3b847..5d72272321 100644 --- a/src/scopes/activityReport/index.test.js +++ b/src/scopes/activityReport/index.test.js @@ -43,6 +43,7 @@ import { } from '../../testUtils'; import { findOrCreateResources, processActivityReportForResourcesById } from '../../services/resource'; import { createActivityReportObjectiveFileMetaData } from '../../services/files'; +import { formatDeliveryMethod } from './deliveryMethod'; const mockUser = { id: faker.datatype.number(), @@ -3306,6 +3307,12 @@ describe('filtersToScopes', () => { }); }); + describe('formatDeliveryMethod', () => { + it('returns in-person for "in person"', () => { + expect(formatDeliveryMethod('in person')).toBe('in-person'); + }); + }); + it('includes delivery method', async () => { const filters = { 'deliveryMethod.in': ['in-person'] }; const { activityReport: scope } = await filtersToScopes(filters); diff --git a/src/services/communicationLog.test.js b/src/services/communicationLog.test.js index 687636bd0a..7720f1e207 100644 --- a/src/services/communicationLog.test.js +++ b/src/services/communicationLog.test.js @@ -100,7 +100,8 @@ describe('communicationLog services', () => { expect(result).toEqual([ [sequelize.literal('author.name asc')], - ['data.communicationDate', 'asc'], + // eslint-disable-next-line @typescript-eslint/quotes + [sequelize.literal(`(NULLIF(data ->> 'communicationDate',''))::DATE asc`)], ]); }); @@ -112,7 +113,8 @@ describe('communicationLog services', () => { expect(result).toEqual([ ['data.purpose', 'desc'], - ['data.communicationDate', 'desc'], + // eslint-disable-next-line @typescript-eslint/quotes + [sequelize.literal(`(NULLIF(data ->> 'communicationDate',''))::DATE desc`)], ]); }); @@ -124,7 +126,8 @@ describe('communicationLog services', () => { expect(result).toEqual([ ['data.result', 'asc'], - ['data.communicationDate', 'asc'], + // eslint-disable-next-line @typescript-eslint/quotes + [sequelize.literal(`(NULLIF(data ->> 'communicationDate',''))::DATE asc`)], ]); }); @@ -134,7 +137,10 @@ describe('communicationLog services', () => { const result = orderLogsBy(sortBy, sortDir); - expect(result).toEqual([['data.communicationDate', 'desc']]); + expect(result).toEqual( + // eslint-disable-next-line @typescript-eslint/quotes + [[sequelize.literal(`(NULLIF(data ->> 'communicationDate',''))::DATE desc`)]], + ); }); it('should return the correct result when sortBy is not provided', () => { @@ -142,7 +148,10 @@ describe('communicationLog services', () => { const result = orderLogsBy(undefined, sortDir); - expect(result).toEqual([['data.communicationDate', 'asc']]); + expect(result).toEqual( + // eslint-disable-next-line @typescript-eslint/quotes + [[sequelize.literal(`(NULLIF(data ->> 'communicationDate',''))::DATE asc`)]], + ); }); }); diff --git a/src/services/communicationLog.ts b/src/services/communicationLog.ts index 363040f61c..44f22d9501 100644 --- a/src/services/communicationLog.ts +++ b/src/services/communicationLog.ts @@ -56,8 +56,7 @@ export const orderLogsBy = (sortBy: string, sortDir: string): string[] => { result = [[ sequelize.literal(`author.name ${sortDir}`), ], [ - 'data.communicationDate', - sortDir, + sequelize.literal(`(NULLIF(data ->> 'communicationDate',''))::DATE ${sortDir}`), ]]; break; case 'purpose': @@ -66,8 +65,7 @@ export const orderLogsBy = (sortBy: string, sortDir: string): string[] => { sortDir, ], [ - 'data.communicationDate', - sortDir, + sequelize.literal(`(NULLIF(data ->> 'communicationDate',''))::DATE ${sortDir}`), ]]; break; case 'result': @@ -76,15 +74,13 @@ export const orderLogsBy = (sortBy: string, sortDir: string): string[] => { sortDir, ], [ - 'data.communicationDate', - sortDir, + sequelize.literal(`(NULLIF(data ->> 'communicationDate',''))::DATE ${sortDir}`), ]]; break; case 'communicationDate': default: result = [[ - 'data.communicationDate', - sortDir, + sequelize.literal(`(NULLIF(data ->> 'communicationDate',''))::DATE ${sortDir}`), ]]; break; } diff --git a/src/services/event.test.js b/src/services/event.test.js index 6d0b72a19d..f795402220 100644 --- a/src/services/event.test.js +++ b/src/services/event.test.js @@ -17,7 +17,15 @@ import { csvImport, validateFields, findEventHelper, + filterEventsByStatus, + findAllEvents, + findEventHelperBlob, + mapLineToData, + checkUserExists, + checkUserExistsByNationalCenter, } from './event'; +import { auditLogger } from '../logger'; +import * as mailer from '../lib/mailer'; describe('event service', () => { afterAll(async () => { @@ -126,6 +134,38 @@ describe('event service', () => { await destroyEvent(updated.id); }); + + it('calls trEventComplete when status is updated to COMPLETE', async () => { + const created = await createAnEvent(98_989); + + const mockEvent = { + toJSON: jest.fn().mockReturnValue({ + id: created.id, + ownerId: created.ownerId, + pocIds: created.pocIds, + collaboratorIds: created.collaboratorIds, + regionId: created.regionId, + data: created.data, + }), + update: jest.fn(), + }; + + jest.spyOn(db.EventReportPilot, 'findByPk').mockResolvedValue(mockEvent); + const trEventCompleteSpy = jest.spyOn(mailer, 'trEventComplete').mockResolvedValue(); + + await updateEvent(created.id, { + ownerId: created.ownerId, + pocIds: created.pocIds, + regionId: created.regionId, + collaboratorIds: created.collaboratorIds, + data: { status: TRS.COMPLETE }, + }); + + expect(trEventCompleteSpy).toHaveBeenCalledWith(mockEvent.toJSON()); + + await destroyEvent(created.id); + jest.restoreAllMocks(); + }); }); describe('finders', () => { @@ -744,6 +784,18 @@ ${email},${reportId},${eventTitle},${typeOfEvent},${ncTwo.name},${trainingType}, expect(result.skipped.length).toEqual(1); expect(result.skipped).toEqual(['Value "Invalid Audience" is invalid for column "Audience". Must be of one of Recipients, Regional office/TTA: R01-TR-5725']); }); + + it('defaults to `Creator` heading when `IST/Creator` is not found, but errors when Creator fallback is not found', async () => { + const reportId = 'R01-TR-5725'; + const newHeadings = headings.filter((h) => h !== 'IST/Creator'); + const d = `${newHeadings.join(',')} +${reportId},${eventTitle},${typeOfEvent},${ncTwo.name},${trainingType},${reasons},${vision},${targetPopulation},Recipients,${poc.name}`; + const b = Buffer.from(d); + const result = await csvImport(b); + expect(result.count).toEqual(0); + expect(result.errors.length).toEqual(1); + expect(result.errors).toEqual(['No creator listed on import for R01-TR-5725']); + }); }); describe('validateFields', () => { @@ -754,7 +806,6 @@ ${email},${reportId},${eventTitle},${typeOfEvent},${ncTwo.name},${trainingType}, describe('findEventHelper', () => { it('should set owner when ownerUser exists', async () => { - const eventId = 12345; const ownerId = 67890; const mockUser = { @@ -789,5 +840,351 @@ ${email},${reportId},${eventTitle},${typeOfEvent},${ncTwo.name},${trainingType}, await db.EventReportPilot.destroy({ where: { id: createdEvent.id } }); jest.restoreAllMocks(); }); + + it('should return default values when data, sessionReports, and eventReportPilotNationalCenterUsers are undefined', async () => { + const ownerId = 67890; + + // Create an event without data, sessionReports, and eventReportPilotNationalCenterUsers + const createdEvent = await db.EventReportPilot.create({ + ownerId, + pocIds: [ownerId], + collaboratorIds: [ownerId], + regionId: 1, + data: {}, + }); + + const foundEvent = await findEventHelper({ id: createdEvent.id }); + + expect(foundEvent).toHaveProperty('data', {}); + expect(foundEvent).toHaveProperty('sessionReports', []); + expect(foundEvent).toHaveProperty('eventReportPilotNationalCenterUsers', []); + + // Clean up + await db.EventReportPilot.destroy({ where: { id: createdEvent.id } }); + }); + }); + + describe('destroyEvent', () => { + it('logs an error when deleting session reports fails', async () => { + const eventId = 12345; + + jest.spyOn(db.SessionReportPilot, 'destroy').mockRejectedValue(new Error('Session report deletion error')); + const auditLoggerSpy = jest.spyOn(auditLogger, 'error'); + + await destroyEvent(eventId); + + expect(auditLoggerSpy).toHaveBeenCalledWith(`Error deleting session reports for event ${eventId}:`, expect.any(Error)); + + jest.restoreAllMocks(); + }); + + it('logs an error when deleting event report fails', async () => { + const eventId = 12345; + + jest.spyOn(db.EventReportPilot, 'destroy').mockRejectedValue(new Error('Event report deletion error')); + const auditLoggerSpy = jest.spyOn(auditLogger, 'error'); + + await destroyEvent(eventId); + + expect(auditLoggerSpy).toHaveBeenCalledWith(`Error deleting event report for event ${eventId}:`, expect.any(Error)); + + jest.restoreAllMocks(); + }); + }); + + describe('filterEventsByStatus', () => { + const userId = 123; + const event = { + id: 1, + ownerId: userId, + pocIds: [456], + collaboratorIds: [789], + regionId: 1, + data: { status: TRS.NOT_STARTED }, + sessionReports: [], + }; + + it('should return events for POC, owner, or collaborator when status is null', async () => { + const events = [event]; + + const filteredEvents = await filterEventsByStatus(events, null, userId); + + expect(filteredEvents).toHaveLength(1); + expect(filteredEvents[0]).toEqual(event); + }); + + it('should return events for collaborator when status is null', async () => { + const events = [event]; + + const filteredEvents = await filterEventsByStatus(events, null, 789); + + expect(filteredEvents).toHaveLength(1); + expect(filteredEvents[0]).toEqual(event); + }); + + it('should return events for owner when status is null', async () => { + const events = [event]; + + const filteredEvents = await filterEventsByStatus(events, null, userId); + + expect(filteredEvents).toHaveLength(1); + expect(filteredEvents[0]).toEqual(event); + }); + + it('should return events for admin without filtering', async () => { + const events = [event]; + + const filteredEvents = await filterEventsByStatus(events, TRS.NOT_STARTED, userId, true); + + expect(filteredEvents).toHaveLength(1); + expect(filteredEvents[0]).toEqual(event); + }); + + it('should return events with all sessions for owner, collaborator, or POC when status is IN_PROGRESS', async () => { + const inProgressEvent = { + ...event, + data: { status: TRS.IN_PROGRESS }, + sessionReports: [ + { id: 1, data: { status: TRS.COMPLETE } }, + { id: 2, data: { status: TRS.IN_PROGRESS } }, + ], + }; + const events = [inProgressEvent]; + + const filteredEvents = await filterEventsByStatus(events, TRS.IN_PROGRESS, userId); + + expect(filteredEvents).toHaveLength(1); + expect(filteredEvents[0].sessionReports).toHaveLength(2); + }); + + it('should return events with all sessions for collaborator when status is IN_PROGRESS', async () => { + const inProgressEvent = { + ...event, + data: { status: TRS.IN_PROGRESS }, + sessionReports: [ + { id: 1, data: { status: TRS.COMPLETE } }, + { id: 2, data: { status: TRS.IN_PROGRESS } }, + ], + }; + const events = [inProgressEvent]; + + const filteredEvents = await filterEventsByStatus(events, TRS.IN_PROGRESS, 789); + + expect(filteredEvents).toHaveLength(1); + expect(filteredEvents[0].sessionReports).toHaveLength(2); + }); + + it('should return events with all sessions for POC when status is IN_PROGRESS', async () => { + const inProgressEvent = { + ...event, + data: { status: TRS.IN_PROGRESS }, + sessionReports: [ + { id: 1, data: { status: TRS.COMPLETE } }, + { id: 2, data: { status: TRS.IN_PROGRESS } }, + ], + }; + const events = [inProgressEvent]; + + const filteredEvents = await filterEventsByStatus(events, TRS.IN_PROGRESS, 456); + + expect(filteredEvents).toHaveLength(1); + expect(filteredEvents[0].sessionReports).toHaveLength(2); + }); + + it('should return events with only complete sessions for non-owner, non-collaborator, non-POC when status is IN_PROGRESS', async () => { + const inProgressEvent = { + ...event, + data: { status: TRS.IN_PROGRESS }, + sessionReports: [ + { id: 1, data: { status: TRS.COMPLETE } }, + { id: 2, data: { status: TRS.IN_PROGRESS } }, + ], + }; + const events = [inProgressEvent]; + + const filteredEvents = await filterEventsByStatus(events, TRS.IN_PROGRESS, 999); + + expect(filteredEvents).toHaveLength(1); + expect(filteredEvents[0].sessionReports).toHaveLength(1); + expect(filteredEvents[0].sessionReports[0].data.status).toBe(TRS.COMPLETE); + }); + + it('should return events for all users when status is COMPLETE', async () => { + const completeEvent = { + ...event, + data: { status: TRS.COMPLETE }, + sessionReports: [ + { id: 1, data: { status: TRS.COMPLETE } }, + { id: 2, data: { status: TRS.IN_PROGRESS } }, + ], + }; + const events = [completeEvent]; + + const filteredEvents = await filterEventsByStatus(events, TRS.COMPLETE, 999); + + expect(filteredEvents).toHaveLength(1); + expect(filteredEvents[0].sessionReports).toHaveLength(2); + }); + + it('should return events for all users when status is SUSPENDED', async () => { + const suspendedEvent = { + ...event, + data: { status: TRS.SUSPENDED }, + sessionReports: [ + { id: 1, data: { status: TRS.COMPLETE } }, + { id: 2, data: { status: TRS.IN_PROGRESS } }, + ], + }; + const events = [suspendedEvent]; + + const filteredEvents = await filterEventsByStatus(events, TRS.SUSPENDED, 999); + + expect(filteredEvents).toHaveLength(1); + expect(filteredEvents[0].sessionReports).toHaveLength(2); + }); + + it('should return an empty array for an unknown status', async () => { + const events = [event]; + const filteredEvents = await filterEventsByStatus(events, 'UNKNOWN_STATUS', userId); + expect(filteredEvents).toHaveLength(0); + }); + }); + + describe('findAllEvents', () => { + it('should return all events', async () => { + const event1 = await createAnEvent(1); + const event2 = await createAnEvent(2); + + const events = await findAllEvents(); + + expect(events).toEqual( + expect.arrayContaining([ + expect.objectContaining({ id: event1.id }), + expect.objectContaining({ id: event2.id }), + ]), + ); + + await destroyEvent(event1.id); + await destroyEvent(event2.id); + }); + }); + + describe('findEventsByStatus', () => { + it('should handle default values for fallbackValue, allowNull, and scopes', async () => { + const createdEvent1 = await createAnEventWithStatus(50_500, null); + const foundEvents = await findEventsByStatus(null, [], 50_500); + const eventWithFallback = foundEvents.find((event) => event.id === createdEvent1.id); + expect(eventWithFallback.data.status).toBe(null); + await destroyEvent(createdEvent1.id); + }); + }); + + describe('findEventHelperBlob', () => { + it('should return null if no events are found', async () => { + jest.spyOn(db.EventReportPilot, 'findAll').mockResolvedValue(null); + const result = await findEventHelperBlob({ + key: 'status', + value: TRS.NOT_STARTED, + regions: [], + scopes: [], + }); + expect(result).toBeNull(); + jest.restoreAllMocks(); + }); + }); + + describe('mapLineToData', () => { + it('should map CSV line to data object correctly', () => { + const line = { + Audience: 'Recipients', + 'IST/Creator': 'creator@example.com', + 'Event Title': 'Event Title Example', + 'Event Duration': '2 days', + 'Event Duration/#NC Days of Support': '3 days', + 'Event ID': 'R01-TR-1234', + 'Overall Vision/Goal for the PD Event': 'Overall Vision', + 'Vision/Goal/Outcomes for the PD Event': 'Vision Outcome', + 'Reason for Activity': 'Complaint', + 'Reason(s) for PD': 'Planning/Coordination', + 'Target Population(s)': 'Program Staff\nAffected by Disaster', + 'Event Organizer - Type of Event': 'Regional office/TTA', + 'IST Name:': 'IST Name Example', + 'IST Name': 'IST Name Example 2', + 'Extra Column': 'Extra Value', + }; + + const expectedData = { + eventIntendedAudience: 'Recipients', + creator: 'creator@example.com', + eventName: 'Event Title Example', + trainingType: '3 days', + eventId: 'R01-TR-1234', + vision: 'Vision Outcome', + reasons: ['Planning/Coordination'], + targetPopulations: ['Program Staff', 'Affected by Disaster'], + eventOrganizer: 'Regional office/TTA', + istName: 'IST Name Example 2', + }; + + const result = mapLineToData(line); + expect(result).toEqual(expectedData); + }); + }); + + describe('checkUserExists', () => { + it('should return the user if they exist', async () => { + const mockUser = { + id: 1, + name: 'Test User', + email: 'test@example.com', + }; + + jest.spyOn(db.User, 'findOne').mockResolvedValue(mockUser); + + const result = await checkUserExists('email', 'test@example.com'); + expect(result).toEqual(mockUser); + + jest.restoreAllMocks(); + }); + + it('should throw an error if the user does not exist', async () => { + jest.spyOn(db.User, 'findOne').mockResolvedValue(null); + + await expect(checkUserExists('email', 'nonexistent@example.com')).rejects.toThrow('User with email: nonexistent@example.com does not exist'); + + jest.restoreAllMocks(); + }); + + it('should throw an error if the user does not exist by name', async () => { + jest.spyOn(db.User, 'findOne').mockResolvedValue(null); + + await expect(checkUserExists('name', 'Nonexistent User')).rejects.toThrow('User with name: Nonexistent User does not exist'); + + jest.restoreAllMocks(); + }); + }); + + describe('checkUserExistsByNationalCenter', () => { + it('should return the user if they exist', async () => { + const mockUser = { + id: 1, + name: 'Test User', + }; + + jest.spyOn(db.User, 'findOne').mockResolvedValue(mockUser); + + const result = await checkUserExistsByNationalCenter('Test National Center'); + expect(result).toEqual(mockUser); + + jest.restoreAllMocks(); + }); + + it('should throw an error if the user does not exist', async () => { + jest.spyOn(db.User, 'findOne').mockResolvedValue(null); + + await expect(checkUserExistsByNationalCenter('Nonexistent National Center')).rejects.toThrow('User associated with National Center: Nonexistent National Center does not exist'); + + jest.restoreAllMocks(); + }); }); }); diff --git a/src/services/event.ts b/src/services/event.ts index 4139c999c5..d64fb61437 100644 --- a/src/services/event.ts +++ b/src/services/event.ts @@ -100,8 +100,6 @@ export async function destroyEvent(id: number): Promise { } export async function findEventHelper(where, plural = false): Promise { - let event; - const query = { attributes: [ 'id', @@ -136,11 +134,7 @@ export async function findEventHelper(where, plural = false): Promise { @@ -741,7 +734,7 @@ const replacements: Record = { const applyReplacements = (value: string) => replacements[value] || value; -const mapLineToData = (line: Record) => { +export const mapLineToData = (line: Record) => { const data: Record = {}; Object.keys(line).forEach((key) => { @@ -756,7 +749,7 @@ const mapLineToData = (line: Record) => { return data; }; -const checkUserExists = async (key:'email' | 'name', value: string) => { +export const checkUserExists = async (key:'email' | 'name', value: string) => { const user = await db.User.findOne({ where: { [key]: { @@ -781,7 +774,7 @@ const checkUserExists = async (key:'email' | 'name', value: string) => { return user; }; -const checkUserExistsByNationalCenter = async (identifier: string) => { +export const checkUserExistsByNationalCenter = async (identifier: string) => { const user = await db.User.findOne({ attributes: ['id', 'name'], include: [ diff --git a/src/services/s3Queue.test.js b/src/services/s3Queue.test.js index d48c27a77a..212a85902b 100644 --- a/src/services/s3Queue.test.js +++ b/src/services/s3Queue.test.js @@ -1,7 +1,14 @@ import Queue from 'bull'; -import { addDeleteFileToQueue, s3Queue } from './s3Queue'; +import { + addDeleteFileToQueue, + s3Queue, + onFailedS3Queue, + onCompletedS3Queue, + processS3Queue, +} from './s3Queue'; import { FILE_STATUSES, S3_ACTIONS } from '../constants'; import db, { File } from '../models'; +import { auditLogger, logger } from '../logger'; jest.mock('bull'); @@ -28,7 +35,11 @@ describe('s3 queue manager tests', () => { }); beforeEach(() => { - Queue.mockImplementation(() => s3Queue); + Queue.mockImplementation(() => ({ + add: jest.fn(), + on: jest.fn(), + process: jest.fn(), + })); }); afterEach(() => { @@ -57,4 +68,61 @@ describe('s3 queue manager tests', () => { }, ); }); + + it('onFailedS3Queue logs an error', () => { + const job = { data: { key: 'test-key' } }; + const error = new Error('Test error'); + const auditLoggerSpy = jest.spyOn(auditLogger, 'error'); + onFailedS3Queue(job, error); + expect(auditLoggerSpy).toHaveBeenCalledWith('job test-key failed with error Error: Test error'); + }); + + it('onCompletedS3Queue logs info on success', () => { + const job = { data: { key: 'test-key' } }; + const result = { status: 200, data: { message: 'Success' } }; + const loggerSpy = jest.spyOn(logger, 'info'); + onCompletedS3Queue(job, result); + expect(loggerSpy).toHaveBeenCalledWith('job test-key completed with status 200 and result {"message":"Success"}'); + }); + + it('onCompletedS3Queue logs error on failure', () => { + const job = { data: { key: 'test-key' } }; + const result = { status: 400, data: { message: 'Failure' } }; + const auditLoggerSpy = jest.spyOn(auditLogger, 'error'); + onCompletedS3Queue(job, result); + expect(auditLoggerSpy).toHaveBeenCalledWith('job test-key completed with status 400 and result {"message":"Failure"}'); + }); + + it('s3Queue on failed event triggers onFailedS3Queue', () => { + const job = { data: { key: 'test-key' } }; + const error = new Error('Test error'); + const auditLoggerSpy = jest.spyOn(auditLogger, 'error'); + s3Queue.on.mockImplementation((event, callback) => { + if (event === 'failed') { + callback(job, error); + } + }); + s3Queue.on('failed', onFailedS3Queue); + expect(auditLoggerSpy).toHaveBeenCalledWith('job test-key failed with error Error: Test error'); + }); + + it('s3Queue on completed event triggers onCompletedS3Queue', () => { + const job = { data: { key: 'test-key' } }; + const result = { status: 200, data: { message: 'Success' } }; + const loggerSpy = jest.spyOn(logger, 'info'); + s3Queue.on.mockImplementation((event, callback) => { + if (event === 'completed') { + callback(job, result); + } + }); + s3Queue.on('completed', onCompletedS3Queue); + expect(loggerSpy).toHaveBeenCalledWith('job test-key completed with status 200 and result {"message":"Success"}'); + }); + + it('processS3Queue sets up listeners and processes the queue', () => { + processS3Queue(); + expect(s3Queue.on).toHaveBeenCalledWith('failed', onFailedS3Queue); + expect(s3Queue.on).toHaveBeenCalledWith('completed', onCompletedS3Queue); + expect(s3Queue.process).toHaveBeenCalled(); + }); }); diff --git a/src/services/scanQueue.js b/src/services/scanQueue.js index 6c6a65f26e..b9ade26a18 100644 --- a/src/services/scanQueue.js +++ b/src/services/scanQueue.js @@ -30,9 +30,9 @@ const addToScanQueue = (fileKey) => { const onFailedScanQueue = (job, error) => auditLogger.error(`job ${job.data.key} failed with error ${error}`); const onCompletedScanQueue = (job, result) => { if (result.status === 200) { - logger.info(`job ${job.data.key} completed with status ${result.status} and result ${result.data}`); + logger.info(`job ${job.data.key} completed with status ${result.status} and result ${JSON.stringify(result.data)}`); } else { - auditLogger.error(`job ${job.data.key} completed with status ${result.status} and result ${result.data}`); + auditLogger.error(`job ${job.data.key} completed with status ${result.status} and result ${JSON.stringify(result.data)}`); } }; const processScanQueue = () => { @@ -41,7 +41,12 @@ const processScanQueue = () => { scanQueue.on('completed', onCompletedScanQueue); increaseListeners(scanQueue); const processFileFromJob = async (job) => processFile(job.data.key); - scanQueue.process(transactionQueueWrapper(processFileFromJob)); + scanQueue.process( + transactionQueueWrapper( + processFileFromJob, + 'scan', + ), + ); }; export { diff --git a/src/services/scanQueue.test.js b/src/services/scanQueue.test.js index 6c5d683bb2..685d62d890 100644 --- a/src/services/scanQueue.test.js +++ b/src/services/scanQueue.test.js @@ -1,5 +1,11 @@ import Queue from 'bull'; -import addToScanQueue, { scanQueue } from './scanQueue'; +import addToScanQueue, { + scanQueue, + onFailedScanQueue, + onCompletedScanQueue, + processScanQueue, +} from './scanQueue'; +import { auditLogger, logger } from '../logger'; jest.mock('bull'); @@ -44,4 +50,61 @@ describe('addToScanQueue', () => { }), ); }); + + it('onFailedScanQueue logs an error', () => { + const job = { data: { key: 'test-key' } }; + const error = new Error('Test error'); + const auditLoggerSpy = jest.spyOn(auditLogger, 'error'); + onFailedScanQueue(job, error); + expect(auditLoggerSpy).toHaveBeenCalledWith('job test-key failed with error Error: Test error'); + }); + + it('onCompletedScanQueue logs info on success', () => { + const job = { data: { key: 'test-key' } }; + const result = { status: 200, data: { message: 'Success' } }; + const loggerSpy = jest.spyOn(logger, 'info'); + onCompletedScanQueue(job, result); + expect(loggerSpy).toHaveBeenCalledWith('job test-key completed with status 200 and result {"message":"Success"}'); + }); + + it('onCompletedScanQueue logs error on failure', () => { + const job = { data: { key: 'test-key' } }; + const result = { status: 400, data: { message: 'Failure' } }; + const auditLoggerSpy = jest.spyOn(auditLogger, 'error'); + onCompletedScanQueue(job, result); + expect(auditLoggerSpy).toHaveBeenCalledWith('job test-key completed with status 400 and result {"message":"Failure"}'); + }); + + it('scanQueue on failed event triggers onFailedScanQueue', () => { + const job = { data: { key: 'test-key' } }; + const error = new Error('Test error'); + const auditLoggerSpy = jest.spyOn(auditLogger, 'error'); + scanQueue.on.mockImplementation((event, callback) => { + if (event === 'failed') { + callback(job, error); + } + }); + scanQueue.on('failed', onFailedScanQueue); + expect(auditLoggerSpy).toHaveBeenCalledWith('job test-key failed with error Error: Test error'); + }); + + it('scanQueue on completed event triggers onCompletedScanQueue', () => { + const job = { data: { key: 'test-key' } }; + const result = { status: 200, data: { message: 'Success' } }; + const loggerSpy = jest.spyOn(logger, 'info'); + scanQueue.on.mockImplementation((event, callback) => { + if (event === 'completed') { + callback(job, result); + } + }); + scanQueue.on('completed', onCompletedScanQueue); + expect(loggerSpy).toHaveBeenCalledWith('job test-key completed with status 200 and result {"message":"Success"}'); + }); + + it('processScanQueue sets up listeners and processes the queue', () => { + processScanQueue(); + expect(scanQueue.on).toHaveBeenCalledWith('failed', onFailedScanQueue); + expect(scanQueue.on).toHaveBeenCalledWith('completed', onCompletedScanQueue); + expect(scanQueue.process).toHaveBeenCalled(); + }); }); diff --git a/src/services/sessionReports.test.js b/src/services/sessionReports.test.js index c2c5184535..8ed947ceae 100644 --- a/src/services/sessionReports.test.js +++ b/src/services/sessionReports.test.js @@ -2,8 +2,7 @@ import faker from '@faker-js/faker'; import db, { SessionReportPilotFile, SessionReportPilotSupportingAttachment, - Grant, - Recipient, + Grant, Recipient, SessionReportPilot, } from '../models'; import { createEvent, destroyEvent } from './event'; @@ -15,6 +14,7 @@ import { updateSession, getPossibleSessionParticipants, findSessionHelper, + validateFields, } from './sessionReports'; import sessionReportPilot from '../models/sessionReportPilot'; import { createGrant, createGoal, destroyGoal } from '../testUtils'; @@ -57,6 +57,12 @@ describe('session reports service', () => { await destroySession(created.id); }); + + it('throws an error when the event is not found', async () => { + await expect(createSession({ eventId: 999999, data: { card: 'ace' } })) + .rejects + .toThrow('Event with id 999999 not found'); + }); }); describe('updateSession', () => { @@ -317,5 +323,54 @@ describe('session reports service', () => { expect(sessions[1].data.startDate).toBe('04/20/2022'); expect(sessions[2].data.startDate).toBe('01/01/2023'); }); + + it('should return null if no sessions are found', async () => { + jest.spyOn(db.SessionReportPilot, 'findAll').mockResolvedValueOnce(null); + const sessions = await findSessionHelper({ eventId: 999999 }, true); + expect(sessions).toBeNull(); + }); + + it('should return a single session when plural is false', async () => { + const session = await findSessionHelper({ id: sessionIds[0] }, false); + expect(session).toHaveProperty('id', sessionIds[0]); + }); + + it('should return multiple sessions when plural is true', async () => { + const sessions = await findSessionHelper({ eventId: createdEvent.id }, true); + expect(sessions.length).toBe(3); + }); + + it('should return default values when data, files, supportingAttachments, and event are undefined', async () => { + const createdSession = await SessionReportPilot.create({ + eventId: createdEvent.id, + data: {}, + }); + + const foundSession = await findSessionHelper({ id: createdSession.id }); + + expect(foundSession).toHaveProperty('data', {}); + expect(foundSession).toHaveProperty('files', []); + expect(foundSession).toHaveProperty('supportingAttachments', []); + + await SessionReportPilot.destroy({ where: { id: createdSession.id } }); + }); + + it('should return null for the eventId when session.event is null', async () => { + jest.spyOn(db.SessionReportPilot, 'findOne').mockResolvedValueOnce({ id: 999 }); + const foundSession = await findSessionHelper({ id: 'it doesnt matter' }); + expect(foundSession).toHaveProperty('eventId', null); + expect(foundSession).toHaveProperty('id', 999); + expect(foundSession).toHaveProperty('data', {}); + expect(foundSession).toHaveProperty('files', []); + expect(foundSession).toHaveProperty('supportingAttachments', []); + }); + }); + + describe('validateFields', () => { + it('throws an error when there are missingFields', () => { + expect(() => { + validateFields({ field1: 'value1' }, ['field1', 'field2']); + }).toThrow(); + }); }); }); diff --git a/src/services/sessionReports.ts b/src/services/sessionReports.ts index 8384c26ed2..33973217f2 100644 --- a/src/services/sessionReports.ts +++ b/src/services/sessionReports.ts @@ -10,7 +10,7 @@ const { SessionReportPilotSupportingAttachment, } = db; -const validateFields = (request, requiredFields) => { +export const validateFields = (request, requiredFields) => { const missingFields = requiredFields.filter((field) => !request[field]); if (missingFields.length) { @@ -43,8 +43,6 @@ type WhereOptions = { // eslint-disable-next-line max-len export async function findSessionHelper(where: WhereOptions, plural = false): Promise { - let session; - const query = { attributes: [ 'id', @@ -72,11 +70,9 @@ export async function findSessionHelper(where: WhereOptions, plural = false): Pr ], }; - if (plural) { - session = await SessionReportPilot.findAll(query); - } else { - session = await SessionReportPilot.findOne(query); - } + const session = plural + ? await SessionReportPilot.findAll(query) + : await SessionReportPilot.findOne(query); if (!session) { return null; @@ -104,7 +100,7 @@ export async function findSessionHelper(where: WhereOptions, plural = false): Pr files: session?.files ?? [], supportingAttachments: session?.supportingAttachments ?? [], updatedAt: session?.updatedAt, - event: session?.event ?? {}, + event: session?.event, }; } diff --git a/src/testUtils.js b/src/testUtils.js index 1566bfb833..2b36d9a4e8 100644 --- a/src/testUtils.js +++ b/src/testUtils.js @@ -68,9 +68,10 @@ export async function createUser(user) { } function defaultRegion() { - const number = faker.datatype.number({ min: 50, max: 2000 }); + // eslint-disable-next-line max-len + const number = faker.unique(() => faker.datatype.number({ min: 50, max: 2000 })); return { - id: faker.unique(() => number, { maxRetries: 20 }), + id: number, name: `Region ${number}`, }; } @@ -81,7 +82,7 @@ export async function createRegion(region) { function defaultGrant() { return { - id: faker.datatype.number({ min: 10000, max: 30000 }), + id: faker.unique(() => faker.datatype.number({ min: 10000, max: 30000 })), number: `0${faker.datatype.number({ min: 1, max: 9999 })}${faker.animal.type()}`, regionId: 10, status: 'Active', @@ -92,7 +93,7 @@ function defaultGrant() { export async function createRecipient(recipient) { return Recipient.create({ - id: faker.datatype.number({ min: 10000, max: 30000 }), + id: faker.unique(() => faker.datatype.number({ min: 10000, max: 30000 })), // eslint-disable-next-line max-len name: faker.company.companyName() + faker.company.companySuffix() + faker.datatype.number({ min: 1, max: 1000 }), uei: 'NNA5N2KHMGN2', diff --git a/src/tools/processData.js b/src/tools/processData.js index fbdb06b3a0..a30ffa3e5c 100644 --- a/src/tools/processData.js +++ b/src/tools/processData.js @@ -69,7 +69,7 @@ const hsesUsers = [ name: 'Jon Pyers', hsesUsername: 'test.tta.jon', hsesUserId: '52829', email: 'jon.pyers@adhocteam.us', }, { - name: 'Heather Smith', hsesUsername: 'test.tta.heather', hsesUserId: '55174', email: 'no-send_smith92@yahoo.com', + name: 'Heather Smith', hsesUsername: 'test.tta.heather', hsesUserId: '52456', email: 'no-send_smith92@yahoo.com', }, { name: 'Tammy Smith', hsesUsername: 'test.tta.tammy', hsesUserId: '53719', email: 'no-send_smith93@yahoo.com', diff --git a/src/widgets/regionalGoalDashboard/goalsPercentage.test.js b/src/widgets/regionalGoalDashboard/goalsPercentage.test.js new file mode 100644 index 0000000000..9835443349 --- /dev/null +++ b/src/widgets/regionalGoalDashboard/goalsPercentage.test.js @@ -0,0 +1,71 @@ +import goalsPercentage from './goalsPercentage'; +import db from '../../models'; +import { + createGoal, destroyGoal, createGrant, createRecipient, +} from '../../testUtils'; +import { GOAL_STATUS } from './goalsByStatus'; + +const { Grant, Recipient } = db; + +describe('goalsPercentage', () => { + const goals = []; + let recipient; + let grant; + let response; + + beforeAll(async () => { + try { + recipient = await createRecipient(); + const recipientId = recipient.id; + grant = await createGrant({ recipientId }); + const grantId = grant.id; + + goals.push(await createGoal({ + status: GOAL_STATUS.IN_PROGRESS, + grantId, + recipientId, + onApprovedAR: true, + })); + goals.push(await createGoal({ + status: GOAL_STATUS.IN_PROGRESS, + grantId, + recipientId, + onApprovedAR: true, + })); + + response = await goalsPercentage({ goal: { id: goals.map((g) => g.id) } }); + } catch (error) { + // eslint-disable-next-line no-console + console.log('goalsPercentageTest: ', error); + } + }); + + afterAll(async () => { + const promises = goals.map((goal) => destroyGoal(goal)); + await Promise.all(promises); + await Grant.destroy({ + where: { + id: grant.id, + }, + individualHooks: true, + }); + await Recipient.destroy({ + where: { + id: recipient.id, + }, + }); + await db.sequelize.close(); + }); + + it('calculates the correct numerator', () => { + expect(response.numerator).toBe(goals.length); + }); + + it('calculates the correct denominator', () => { + expect(response.denominator).toBe(goals.length); + }); + + it('calculates the correct percentage', () => { + expect(response.percentage).toBe(100); + }); +}); diff --git a/src/widgets/regionalGoalDashboard/topicsByGoalStatus.test.js b/src/widgets/regionalGoalDashboard/topicsByGoalStatus.test.js new file mode 100644 index 0000000000..6bc00f7801 --- /dev/null +++ b/src/widgets/regionalGoalDashboard/topicsByGoalStatus.test.js @@ -0,0 +1,93 @@ +import topicsByGoalStatus from './topicsByGoalStatus'; +import db from '../../models'; +import { GOAL_STATUS } from './goalsByStatus'; + +jest.mock('../../models', () => ({ + Goal: { + findAll: jest.fn(), + }, +})); + +describe('topicsByGoalStatus', () => { + let response; + + beforeAll(async () => { + db.Goal.findAll.mockResolvedValue([ + { + topic: 'Health', + total: 2, + 'Not Started': 0, + 'In Progress': 2, + Closed: 0, + Suspended: 0, + }, + { + topic: 'Education', + total: 1, + 'Not Started': 0, + 'In Progress': 1, + Closed: 0, + Suspended: 0, + }, + ]); + + response = await topicsByGoalStatus({ goal: { id: [1, 2, 3] } }); + }); + + it('calculates the correct number of topics', () => { + expect(response.length).toBe(2); + }); + + it('calculates the correct number of goals per topic', () => { + const healthTopic = response.find((t) => t.topic === 'Health'); + const educationTopic = response.find((t) => t.topic === 'Education'); + expect(healthTopic.total).toBe(2); + expect(healthTopic.statuses['In Progress']).toBe(2); + expect(educationTopic.total).toBe(1); + expect(educationTopic.statuses['In Progress']).toBe(1); + }); + + it('handles the case where the topic is not in the accumulator', async () => { + db.Goal.findAll.mockResolvedValue([ + { + topic: 'Safety', + total: 1, + 'Not Started': 1, + 'In Progress': 0, + Closed: 0, + Suspended: 0, + }, + ]); + + const funcResponse = await topicsByGoalStatus({ goal: { id: [1] } }); + const safetyTopic = funcResponse.find((t) => t.topic === 'Safety'); + expect(safetyTopic.total).toBe(1); + expect(safetyTopic.statuses['Not Started']).toBe(1); + }); + + it('handles the case where the topic is already in the accumulator', async () => { + db.Goal.findAll.mockResolvedValue([ + { + topic: 'Health', + total: 2, + 'Not Started': 0, + 'In Progress': 0, + Closed: 2, + Suspended: 0, + }, + ]); + + const funcResponse = await topicsByGoalStatus({ goal: { id: [1, 2] } }); + const healthTopic = funcResponse.find((t) => t.topic === 'Health'); + expect(healthTopic).toStrictEqual({ + topic: 'Health', + statuses: { + 'Not Started': 0, + 'In Progress': 0, + Suspended: 0, + Closed: 2, + }, + total: 2, + }); + }); +}); diff --git a/src/widgets/regionalGoalDashboard/topicsByGoalStatus.ts b/src/widgets/regionalGoalDashboard/topicsByGoalStatus.ts index 9be852b2b5..f13649820f 100644 --- a/src/widgets/regionalGoalDashboard/topicsByGoalStatus.ts +++ b/src/widgets/regionalGoalDashboard/topicsByGoalStatus.ts @@ -1,10 +1,15 @@ -import { Op } from 'sequelize'; -import { TOPICS } from '@ttahub/common'; +import { Sequelize, Op } from 'sequelize'; import { GOAL_STATUS } from '../../constants'; -import { - // @ts-ignore - Goal, Objective, Topic, ActivityReportObjective, ActivityReportObjectiveTopic, -} from '../../models'; +import db from '../../models'; + +const { + Goal, + Objective, + Topic, + ActivityReportObjective, + ActivityReportObjectiveTopic, + ActivityReport, +} = db; type Status = keyof typeof GOAL_STATUS; @@ -15,19 +20,19 @@ type TopicResponse = { }; export default async function topicsByGoalStatus(scopes): Promise { - // Goal -> Objective -> ObjectiveTopic -> Topic - // Legacy solution: - // Goal -> ARGoal -> AR -> AR.topics (array of enums) - - type QueryResults = { - id: number; - status: Status; - 'objectives.activityReportObjectives.activityReportObjectiveTopics.topic.id': number; - 'objectives.activityReportObjectives.activityReportObjectiveTopics.topic.topic': typeof TOPICS[number]; - }; - - const allTopics = await Goal.findAll({ - attributes: ['id', 'status'], + const queryResults = await Goal.findAll({ + attributes: [ + [Sequelize.literal('COALESCE("%2"."name", "%1"."name")'), 'topic'], + // eslint-disable-next-line @typescript-eslint/quotes + [Sequelize.literal(`COUNT(DISTINCT "Goal"."id") FILTER (WHERE "Goal"."status" = 'Not Started')`), 'Not Started'], + // eslint-disable-next-line @typescript-eslint/quotes + [Sequelize.literal(`COUNT(DISTINCT "Goal"."id") FILTER (WHERE "Goal"."status" = 'In Progress')`), 'In Progress'], + // eslint-disable-next-line @typescript-eslint/quotes + [Sequelize.literal(`COUNT(DISTINCT "Goal"."id") FILTER (WHERE "Goal"."status" = 'Closed')`), 'Closed'], + // eslint-disable-next-line @typescript-eslint/quotes + [Sequelize.literal(`COUNT(DISTINCT "Goal"."id") FILTER (WHERE "Goal"."status" = 'Suspended')`), 'Suspended'], + [Sequelize.literal('COUNT(DISTINCT "Goal"."id")'), 'total'], + ], where: { [Op.and]: [ scopes.goal, @@ -42,60 +47,64 @@ export default async function topicsByGoalStatus(scopes): Promise { - const { status, 'objectives.activityReportObjectives.activityReportObjectiveTopics.topic.topic': topic } = goal; - if (topic && !acc[topic]) { - acc[topic] = { ...Object.values(GOAL_STATUS).reduce((a, s) => ({ ...a, [s]: 0 }), {}) }; - } - - if (acc[topic]) { - acc[topic][status] += 1; - } - - return acc; - }, {}); - - sanitized = Object.entries(sanitized).reduce((acc, [topic, statuses]) => { - acc[topic].total = Object.values(statuses).reduce((a, s) => a + s, 0); - return acc; - }, sanitized); + }); - // Format this so it's more easily digestible by the frontend - const response: TopicResponse[] = Object.entries(sanitized) - .map(([topic, statuses]) => ({ topic, statuses })) - // eslint-disable-next-line @typescript-eslint/no-explicit-any - .map((obj: any) => { - const { statuses } = obj; - const { total } = statuses; - delete statuses.total; - return { ...obj, total }; - }); + // Transform queryResults to TopicResponse[] + const response: TopicResponse[] = queryResults.map((result) => { + const { topic, total, ...statuses } = result; + return { + topic, + statuses, + total, + }; + }); return response; }