From 25b84c04dd69da85c1763c47475739b4bc7c11de Mon Sep 17 00:00:00 2001 From: svariant Date: Wed, 20 Nov 2024 17:14:16 +0100 Subject: [PATCH 01/14] [PPANTT-170] feat: Performance test init --- .devops/code-review-pipelines.yml | 64 ---- .devops/deploy-pipelines-aks.yml | 223 ------------- .devops/deploy-pipelines-standard.yml | 298 ------------------ .devops/performance-test-pipelines.yml | 39 +++ .gitignore | 5 +- performance-test/src/Dockerfile | 11 + performance-test/src/dev.environment.json | 10 + performance-test/src/gpd_ingestion_test.js | 39 +++ performance-test/src/local.environment.json | 10 + performance-test/src/modules/common.js | 11 + performance-test/src/modules/pg_gpd_client.js | 56 ++++ performance-test/src/modules/redis_client.js | 34 ++ performance-test/src/package.json | 11 + performance-test/src/scripts/review_script.js | 137 ++++++++ performance-test/src/uat.environment.json | 10 + 15 files changed, 372 insertions(+), 586 deletions(-) delete mode 100644 .devops/code-review-pipelines.yml delete mode 100644 .devops/deploy-pipelines-aks.yml delete mode 100644 .devops/deploy-pipelines-standard.yml create mode 100644 .devops/performance-test-pipelines.yml create mode 100644 performance-test/src/Dockerfile create mode 100644 performance-test/src/dev.environment.json create mode 100644 performance-test/src/gpd_ingestion_test.js create mode 100644 performance-test/src/local.environment.json create mode 100644 performance-test/src/modules/common.js create mode 100644 performance-test/src/modules/pg_gpd_client.js create mode 100644 performance-test/src/modules/redis_client.js create mode 100644 performance-test/src/package.json create mode 100644 performance-test/src/scripts/review_script.js create mode 100644 performance-test/src/uat.environment.json diff --git a/.devops/code-review-pipelines.yml b/.devops/code-review-pipelines.yml deleted file mode 100644 index b4dc602..0000000 --- a/.devops/code-review-pipelines.yml +++ /dev/null @@ -1,64 +0,0 @@ -# Maven -# Build your Java project and run tests with Apache Maven. -# Add steps that analyze code, save build artifacts, deploy, and more: -# https://docs.microsoft.com/azure/devops/pipelines/languages/java - -# Automatically triggered on PR -# https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=azure-devops&tabs=schema%2Cparameter-schema#pr-trigger -trigger: - - main -pr: - - main - -pool: - vmImage: 'ubuntu-latest' - -variables: - MAVEN_CACHE_FOLDER: $(Pipeline.Workspace)/.m2/repository - MAVEN_OPTS: '-Dmaven.repo.local=$(MAVEN_CACHE_FOLDER)' - -steps: - - task: Cache@2 - inputs: - key: 'maven | "$(Agent.OS)" | pom.xml' - restoreKeys: | - maven | "$(Agent.OS)" - maven - path: $(MAVEN_CACHE_FOLDER) - displayName: Cache Maven local repo - - - task: SonarCloudPrepare@1 - displayName: 'Prepare SonarCloud analysis configuration' - inputs: - SonarCloud: '$(SONARCLOUD_SERVICE_CONN)' - organization: '$(SONARCLOUD_ORG)' - scannerMode: Other - extraProperties: | - sonar.projectKey=$(SONARCLOUD_PROJECT_KEY) - sonar.projectName=$(SONARCLOUD_PROJECT_NAME) - sonar.coverage.exclusions=**/config/*,**/*Mock*,**/model/**,**/entity/* - sonar.cpd.exclusions=**/model/**,**/entity/* - - - - task: Maven@3 - displayName: 'Run Junit Test' - inputs: - mavenPomFile: 'pom.xml' - mavenOptions: '-Xmx3072m $(MAVEN_OPTS)' - mavenVersionOption: 'Default' - mavenAuthenticateFeed: false - javaHomeOption: 'JDKVersion' - jdkVersionOption: '1.11' - publishJUnitResults: true - testResultsFiles: '**/surefire-reports/TEST-*.xml' - goals: 'clean verify' - sonarQubeRunAnalysis: true - codeCoverageToolOption: 'JaCoCo' - effectivePomSkip: false - isJacocoCoverageReportXML: true - sqMavenPluginVersionChoice: 'latest' - - - task: SonarCloudPublish@1 - displayName: 'Publish SonarCloud results on build summary' - inputs: - pollingTimeoutSec: '300' diff --git a/.devops/deploy-pipelines-aks.yml b/.devops/deploy-pipelines-aks.yml deleted file mode 100644 index 33356a6..0000000 --- a/.devops/deploy-pipelines-aks.yml +++ /dev/null @@ -1,223 +0,0 @@ -# Only manual triggers -trigger: none -pr: none - -pool: - vmImage: 'ubuntu-22.04' - -parameters: - - name: ENV - displayName: Target Environment - type: string - default: dev - values: - - dev - - uat - - prod - - name: SEMVER - displayName: "When packing a release, define the version bump to apply. Use only buildNumber or skip for manual deployment" - type: string - values: - - major - - minor - - patch - - buildNumber - - skip - default: skip - - name: "FORCE_REPLACE_DOCKER_IMAGE" - displayName: "Force the existing docker image to be replaced" - type: boolean - default: False - values: - - False - - True - - name: TEST - displayName: Run integration/smoke tests - type: boolean - default: true - -variables: - imageRepository: '$(IMAGE_REPOSITORY_NAME)' - nameSpace: '$(K8S_NAMESPACE)' - - ${{ if eq(parameters['ENV'], 'dev') }}: - dockerRegistryServiceConnection: $(DEV_CONTAINER_REGISTRY_SERVICE_CONN) - dockerRegistryFqdn: $(DEV_CONTAINER_NAMESPACE) - kubernetesServiceConnection: '$(DEV_KUBERNETES_SERVICE_CONN)' - poolImage: 'pagopa-dev-linux' - appInsightsServiceConn: "$(TF_APPINSIGHTS_SERVICE_CONN_DEV)" - appInsightsResourceId: "$(TF_APPINSIGHTS_RESOURCE_ID_DEV)" - ${{ if eq(parameters['ENV'], 'uat') }}: - dockerRegistryServiceConnection: $(UAT_CONTAINER_REGISTRY_SERVICE_CONN) - dockerRegistryFqdn: $(UAT_CONTAINER_NAMESPACE) - kubernetesServiceConnection: '$(UAT_KUBERNETES_SERVICE_CONN)' - poolImage: 'pagopa-uat-linux' - appInsightsServiceConn: "$(TF_APPINSIGHTS_SERVICE_CONN_UAT)" - appInsightsResourceId: "$(TF_APPINSIGHTS_RESOURCE_ID_UAT)" - ${{ if eq(parameters['ENV'], 'prod') }}: - dockerRegistryServiceConnection: $(PROD_CONTAINER_REGISTRY_SERVICE_CONN) - dockerRegistryFqdn: $(PROD_CONTAINER_NAMESPACE) - kubernetesServiceConnection: '$(PROD_KUBERNETES_SERVICE_CONN)' - poolImage: 'pagopa-prod-linux' - appInsightsServiceConn: "$(TF_APPINSIGHTS_SERVICE_CONN_PROD)" - appInsightsResourceId: "$(TF_APPINSIGHTS_RESOURCE_ID_PROD)" - - ${{ if eq(variables['Build.SourceBranchName'], 'merge') }}: - sourceBranch: "main" # force to main branch - ${{ if ne(variables['Build.SourceBranchName'], 'merge') }}: - sourceBranch: ${{ variables['Build.SourceBranchName'] }} - - - -resources: - repositories: - - repository: pagopaCommons - type: github - name: pagopa/azure-pipeline-templates - ref: refs/tags/v2.10.1 - endpoint: 'io-azure-devops-github-ro' - -stages: - - # Create a release on GitHub - - stage: Release - jobs: - - job: make_release - steps: - - checkout: self - clean: true - persistCredentials: true - - - ${{ if ne(parameters.SEMVER, 'skip') }}: - - template: templates/maven-github-release/template.yaml@pagopaCommons - parameters: - release_branch: $(sourceBranch) - gitEmail: $(GIT_EMAIL) - gitUsername: $(GIT_USERNAME) - gitHubConnection: $(GITHUB_CONNECTION) - ${{ if ne(parameters.SEMVER, 'skip') }}: - semver: '${{ parameters.SEMVER }}' - ${{ if eq(parameters.SEMVER, 'skip') }}: - semver: 'buildNumber' # this case is impossible due to main condition, but it is necessary to work property - - - template: templates/maven-github-current-version/template.yaml@pagopaCommons - - # Build and Push Docker Image - - stage: Build - dependsOn: Release - variables: - current_version: $[ stageDependencies.Release.make_release.outputs['current_version.value'] ] - jobs: - - job: "build" - steps: - - checkout: self - persistCredentials: true - - - script: | - git checkout $(sourceBranch) - displayName: Checkout and update branch - - - template: templates/docker-release/template.yaml@pagopaCommons - parameters: - CONTAINER_REGISTRY_SERVICE_CONN: $(dockerRegistryServiceConnection) - CONTAINER_REGISTRY_FQDN: $(dockerRegistryFqdn) - DOCKER_IMAGE_NAME: $(imageRepository) - DOCKER_IMAGE_TAG: $(current_version) - FORCE_REPLACE_DOCKER_IMAGE: ${{ parameters.FORCE_REPLACE_DOCKER_IMAGE }} - - # # Testing the docker image - # - stage: Smoke_Test - # dependsOn: - # - Build - # condition: and(succeeded(), eq('${{ parameters.TEST }}', 'true')) - # jobs: - # - job: smoke_tests - # steps: - # - checkout: self - # persistCredentials: true - # - # - script: | - # git checkout $(sourceBranch) - # displayName: Checkout and update branch - # - # - task: Docker@2 - # displayName: "Docker login" - # inputs: - # containerRegistry: "$(dockerRegistryServiceConnection)" - # command: "login" - # - # - task: Bash@3 - # displayName: 'Run Smoke Tests' - # inputs: - # targetType: 'inline' - # script: | - # cd ./integration-test - # sh run_integration_test.sh - # env: - # containerRegistry: $(dockerRegistryFqdn) - - # Deploy on K8s with Helm - - stage: Deploy - # condition: not(failed('Smoke_Test')) - dependsOn: - - Release - - Build - # - Smoke_Test - variables: - deploy_version: $[ stageDependencies.Release.make_release.outputs['current_version.value'] ] - jobs: - - deployment: "deploy" - pool: - name: $(poolImage) - environment: ${{ parameters['ENV'] }} - strategy: - runOnce: - deploy: - steps: - - checkout: self - persistCredentials: true - - - script: | - git checkout $(sourceBranch) - displayName: Checkout and update branch - - - template: templates/helm-microservice-chart-setup/template.yaml@pagopaCommons - parameters: - DEPLOY_VERSION: $(deploy_version) - - - template: templates/helm-microservice-chart-deploy/template.yaml@pagopaCommons - parameters: - DO_DEPLOY: true - ENV: ${{ parameters['ENV'] }} - KUBERNETES_SERVICE_CONN: ${{ variables.kubernetesServiceConnection }} - NAMESPACE: $(nameSpace) - APP_NAME: $(imageRepository) - VALUE_FILE: "helm/values-${{ parameters['ENV'] }}.yaml" - DEPLOY_VERSION: $(deploy_version) - APPINSIGHTS_SERVICE_CONN: ${{ variables.appInsightsServiceConn }} - APPINSIGHTS_RESOURCE_ID: ${{ variables.appInsightsResourceId }} - - - # Run Tests -# - stage: Integration_Test -# dependsOn: -# - Deploy -# condition: and(succeeded(), eq('${{ parameters.TEST }}', 'true')) -# jobs: -# - job: integration_tests -# steps: -# - checkout: self -# persistCredentials: true -# -# - script: | -# git checkout $(sourceBranch) -# displayName: Checkout and update branch -# -# - task: Bash@3 -# displayName: 'Run Integration Tests' -# inputs: -# targetType: 'inline' -# script: | -# cd ./integration-test/src -# yarn install -# yarn test:${{ parameters.ENV }} diff --git a/.devops/deploy-pipelines-standard.yml b/.devops/deploy-pipelines-standard.yml deleted file mode 100644 index e134ac5..0000000 --- a/.devops/deploy-pipelines-standard.yml +++ /dev/null @@ -1,298 +0,0 @@ -parameters: - - name: ENV - displayName: Target Environment - type: string - default: dev - values: - - dev - - uat - - prod - - name: SEMVER - displayName: "When packing a release, define the version bump to apply. Use only buildNumber or skip for manual deployment" - type: string - values: - - major - - minor - - patch - - buildNumber - - skip - default: skip - - name: TEST - displayName: Run tests - type: boolean - default: false - -variables: - ${{ if eq(parameters['ENV'], 'dev') }}: - AZURE_SUBSCRIPTION: $(DEV_AZURE_SUBSCRIPTION) - RESOURCE_GROUP: $(DEV_WEB_APP_RESOURCE_GROUP_NAME) - APP_NAME: $(DEV_WEB_APP_NAME) - STAGE: "d" - dockerRegistryServiceConnection: $(DEV_CONTAINER_REGISTRY) - dockerNamespace: $(DEV_CONTAINER_NAMESPACE) - - ${{ if eq(parameters['ENV'], 'uat') }}: - AZURE_SUBSCRIPTION: $(UAT_AZURE_SUBSCRIPTION) - RESOURCE_GROUP: $(UAT_WEB_APP_RESOURCE_GROUP_NAME) - APP_NAME: $(UAT_WEB_APP_NAME) - STAGE: "u" - dockerRegistryServiceConnection: $(UAT_CONTAINER_REGISTRY) - dockerNamespace: $(UAT_CONTAINER_NAMESPACE) - - ${{ if eq(parameters['ENV'], 'prod') }}: - AZURE_SUBSCRIPTION: $(PROD_AZURE_SUBSCRIPTION) - RESOURCE_GROUP: $(PROD_WEB_APP_RESOURCE_GROUP_NAME) - APP_NAME: $(PROD_WEB_APP_NAME) - STAGE: "p" - dockerRegistryServiceConnection: $(PROD_CONTAINER_REGISTRY) - dockerNamespace: $(PROD_CONTAINER_NAMESPACE) - - ${{ if eq(variables['Build.SourceBranchName'], 'merge') }}: - SOURCE_BRANCH: "main" # force to main branch - ${{ if ne(variables['Build.SourceBranchName'], 'merge') }}: - SOURCE_BRANCH: ${{ variables['Build.SourceBranchName'] }} - - - - MAVEN_CACHE_FOLDER: $(Pipeline.Workspace)/.m2/repository - MAVEN_OPTS: "-Dmaven.repo.local=$(MAVEN_CACHE_FOLDER)" - title: "" - sha: "" - tag: "" - -# Only manual triggers -trigger: none -pr: none - -pool: - vmImage: ubuntu-latest - -stages: - - # Create a release on GitHub - - stage: release_service - condition: ne('${{ parameters.SEMVER }}', 'skip') - pool: - vmImage: "ubuntu-latest" - jobs: - - job: releaseService - steps: - - checkout: self - clean: true - persistCredentials: true - - - script: | - git checkout $(SOURCE_BRANCH) - - - script: | - git config --global user.name "$(GIT_USERNAME)" - git config --global user.email "$(GIT_EMAIL)" - - - template: azure-templates/maven-versioning.yml - parameters: - semver: "${{ parameters.SEMVER }}" - - - task: Bash@3 - name: pomversion - inputs: - targetType: "inline" - script: | - version=$(mvn -f pom.xml help:evaluate -Dexpression=project.version -q -DforceStdout) - echo "##vso[task.setvariable variable=next;isOutput=true]$version" - failOnStderr: true - - - script: | - git add pom.xml - git commit -m "Bump version [skip ci]" - git push origin $(SOURCE_BRANCH) - - - - script: | - HEAD_SHA=$(git rev-parse HEAD) - TAG="$(pomversion.next)" - TITLE="Release $(pomversion.next)" - echo "##vso[task.setvariable variable=title]$TITLE" - echo "##vso[task.setvariable variable=sha]$HEAD_SHA" - echo "##vso[task.setvariable variable=tag]$TAG" - - - script: | - echo $(tag) - echo $(title) - echo $(sha) - - - # create new release - - task: GitHubRelease@0 - inputs: - gitHubConnection: $(GITHUB_CONNECTION) - repositoryName: $(Build.Repository.Name) - action: create - target: $(sha) - tagSource: manual - tag: $(tag) - title: $(title) - addChangelog: true - - # Deploy on Azure - - stage: deploy - condition: not(failed('releaseService')) - pool: - vmImage: "ubuntu-latest" - jobs: - - job: deployJava - steps: - - - script: | - echo ${{variables['Build.SourceBranchName']}} - echo $(SOURCE_BRANCH) - echo $(TEST) - - - checkout: self - clean: true - persistCredentials: true - submodules: true - - - script: | - echo "Checkout on $(SOURCE_BRANCH)" - git checkout $(SOURCE_BRANCH) - displayName: Checkout on source branch - - # - task: Bash@3 - # name: application_properties - # inputs: - # targetType: "inline" - # script: | - # cp src/main/resources/application-azure-$(STAGE).properties src/main/resources/application.properties - # cat src/main/resources/application.properties - # failOnStderr: true - - - task: Cache@2 - inputs: - key: 'maven | "$(Agent.OS)" | pom.xml' - restoreKeys: | - maven | "$(Agent.OS)" - maven - path: $(MAVEN_CACHE_FOLDER) - displayName: Set Maven Cache - - - task: Bash@3 - # When the maven task is installed, mvn can be used in a script - name: pomversion - inputs: - targetType: "inline" - script: | - version=$(mvn -f pom.xml help:evaluate -Dexpression=project.version -q -DforceStdout) - echo "##vso[task.setvariable variable=next;isOutput=true]$version" - failOnStderr: true - - - task: Maven@3 - inputs: - mavenPomFile: 'pom.xml' - publishJUnitResults: false - javaHomeOption: 'JDKVersion' - jdkVersionOption: '1.11' - mavenVersionOption: 'Default' - mavenOptions: '-Xmx3072m $(MAVEN_OPTS)' - mavenAuthenticateFeed: false - effectivePomSkip: false - sonarQubeRunAnalysis: false - - - task: Docker@2 - displayName: Build and push an image to container registry - inputs: - containerRegistry: '$(dockerRegistryServiceConnection)' - repository: '$(IMAGE_REPOSITORY)' - command: 'buildAndPush' - tags: | - $(Build.BuildId) - latest - $(pomversion.next) - - # deploy project-fn - - task: AzureFunctionAppContainer@1 - displayName: Deploy Function App [DEV] - inputs: - azureSubscription: $(AZURE_SUBSCRIPTION) - appName: "${{variables.DEV_WEB_APP_NAME}}-fn-project" - imageName: "${{variables.DEV_CONTAINER_NAMESPACE}}/project:$(Build.BuildId)" - slotName: production - - - script: | - echo "##vso[task.setvariable variable=version;isOutput=true]$(pomversion.next)" - name: dockerTag - - - # Run test - - stage: test - # run this stage only if 'test' is enabled - condition: and(not(failed('deployJava')), eq('${{ parameters.TEST }}', 'true')) - pool: - vmImage: "ubuntu-latest" - jobs: - # is needed to wait for startup of application - - job: waitStartup - pool: Server - steps: - - task: Delay@1 - inputs: - delayForMinutes: '10' - - # - job: integrationTests - # dependsOn: waitStartup - # steps: - # - script: | - # git checkout $(SOURCE_BRANCH) - - # - script: | - # yarn global add newman - # displayName: 'newman installation' - - # - script: | - # newman run api-test/Project.postman_collection.json --environment=api-test/Azure.postman_environment.json --reporters cli,junit --reporter-junit-export Results/api-config-TEST.xml --verbose - # displayName: 'Run api test' - # continueOnError: false - - # - task: PublishTestResults@2 - # condition: always() - # inputs: - # testResultsFormat: 'JUnit' - # testResultsFiles: '**/*-TEST.xml' - # searchFolder: '$(System.DefaultWorkingDirectory)' - - - job: deployUAT - dependsOn: integrationTests - variables: - version: $[ stageDependencies.deploy.deployJava.outputs['dockerTag.version'] ] - steps: - - task: Maven@3 - inputs: - mavenPomFile: 'pom.xml' - publishJUnitResults: false - javaHomeOption: 'JDKVersion' - jdkVersionOption: '1.11' - mavenVersionOption: 'Default' - mavenOptions: '-Xmx3072m $(MAVEN_OPTS)' - mavenAuthenticateFeed: false - effectivePomSkip: false - sonarQubeRunAnalysis: false - - - task: Docker@2 - displayName: Build and push an image to UAT container registry - inputs: - containerRegistry: '$(UAT_CONTAINER_REGISTRY)' - repository: '$(IMAGE_REPOSITORY)' - command: 'buildAndPush' - tags: | - $(Build.BuildId) - latest - $(version) - - # deploy project-fn - - task: AzureFunctionAppContainer@1 - displayName: Deploy Function App [UAT] - condition: in('${{ parameters.ENV }}', 'uat') - inputs: - azureSubscription: $(AZURE_SUBSCRIPTION) - appName: "${{variables.UAT_WEB_APP_NAME}}-fn-project}" - imageName: "${{variables.UAT_CONTAINER_NAMESPACE}}/project:$(Build.BuildId)" - slotName: production \ No newline at end of file diff --git a/.devops/performance-test-pipelines.yml b/.devops/performance-test-pipelines.yml new file mode 100644 index 0000000..bce1442 --- /dev/null +++ b/.devops/performance-test-pipelines.yml @@ -0,0 +1,39 @@ +# azure-pipelines.yml +trigger: none + +parameters: + - name: "ENVIRONMENT" + displayName: "Environment" + type: string + values: + - "dev" + - "uat" + default: "uat" + - name: "NUMBER_OF_EVENTS" + displayName: "Number of events" + type: number + default: 50 +variables: + ${{ if eq(parameters['ENVIRONMENT'], 'dev') }}: + poolImage: "pagopa-dev-loadtest-linux" + PG_GPD_PASSWORD: "$(DEV_PG_GPD_PASSWORD)" + ${{ if eq(parameters['ENVIRONMENT'], 'uat') }}: + poolImage: "pagopa-uat-loadtest-linux" + PG_GPD_PASSWORD: "$(UAT_PG_GPD_PASSWORD)" + +pool: + name: $(poolImage) + +steps: + - script: | + cd ./performance-test/src + npm install + docker build -f ./Dockerfile -t exec-node . + docker run --rm --name initToRunk6 \ + -e PG_GPD_PASSWORD=${PG_GPD_PASSWORD} \ + -e NUMBER_OF_EVENTS=${NUMBER_OF_EVENTS} \ + exec-node + displayName: Run GPD Ingestion Timestamp Review + env: + PG_GPD_PASSWORD: ${{ variables.PG_GPD_PASSWORD }} + NUMBER_OF_EVENTS: ${{ variables.NUMBER_OF_EVENTS }} diff --git a/.gitignore b/.gitignore index e6cdb91..e0543d7 100644 --- a/.gitignore +++ b/.gitignore @@ -41,4 +41,7 @@ obj/ **/.terraform **/node_modules yarn.lock -**/*.copy \ No newline at end of file +**/*.copy + +# Misc +.azure/ \ No newline at end of file diff --git a/performance-test/src/Dockerfile b/performance-test/src/Dockerfile new file mode 100644 index 0000000..598cccd --- /dev/null +++ b/performance-test/src/Dockerfile @@ -0,0 +1,11 @@ +# specify the node base image with your desired version node: +FROM node:16 + +WORKDIR /src/node-app + +COPY ./ ./ + +RUN yarn install + +ENTRYPOINT ["yarn", "start-performance-review"] + diff --git a/performance-test/src/dev.environment.json b/performance-test/src/dev.environment.json new file mode 100644 index 0000000..a3b5694 --- /dev/null +++ b/performance-test/src/dev.environment.json @@ -0,0 +1,10 @@ +{ + "environment": [ + { + "env": "dev", + "PG_GPD_USERNAME": "apduser", + "PG_GPD_SERVER_NAME": "pagopa-d-weu-gpd-pgflex.postgres.database.azure.com", + "PG_GPD_DATABASE_NAME": "apd" + } + ] +} \ No newline at end of file diff --git a/performance-test/src/gpd_ingestion_test.js b/performance-test/src/gpd_ingestion_test.js new file mode 100644 index 0000000..9b25f20 --- /dev/null +++ b/performance-test/src/gpd_ingestion_test.js @@ -0,0 +1,39 @@ + + +import { insertPaymentPositionWithValidFiscalCode, insertPaymentPositionWithInvalidFiscalCode, deletePaymentPositions } from "./modules/pg_gpd_client.js"; +import { REDIS_ARRAY_IDS_TOKENIZED, REDIS_ARRAY_IDS_NOT_TOKENIZED } from "./modules/common.js"; +import { setValueRedis } from "./modules/redis_client.js"; + +const NUMBER_OF_EVENTS = JSON.parse(open(__ENV.NUMBER_OF_EVENTS)); + +export function setup() { + const arrayIdTokenized = []; + const arrayIdNotTokenized = []; + + return { arrayIdTokenized, arrayIdNotTokenized }; +} + +export default function (arrayIds) { + console.log("ENVVV", NUMBER_OF_EVENTS); + // SAVE ON DB paymentPositions + for (let i = 0; i < NUMBER_OF_EVENTS; i++) { + const idValidFiscalCode = "PERFORMANCE_GPD_INGESTION_VALID_FISCAL_CODE" + new Date().getTime(); + insertPaymentPositionWithValidFiscalCode(idValidFiscalCode); + arrayIds.arrayIdTokenized.push(idValidFiscalCode); + console.log("Inserted in database paymentOptions with valid fiscal code with ids: ", JSON.stringify(arrayIds.arrayIdTokenized)); + + const idInvalidFiscalCode = "PERFORMANCE_GPD_INGESTION_INVALID_FISCAL_CODE" + new Date().getTime(); + insertPaymentPositionWithInvalidFiscalCode(idInvalidFiscalCode); + arrayIds.arrayIdNotTokenized.push(idInvalidFiscalCode); + console.log("Inserted in database paymentOptions with invalid fiscal code with ids: ", JSON.stringify(arrayIds.arrayIdNotTokenized)); + } + + // SAVE ID ARRAYS ON REDIS + setValueRedis(REDIS_ARRAY_IDS_TOKENIZED, arrayIds.arrayIdTokenized); + setValueRedis(REDIS_ARRAY_IDS_NOT_TOKENIZED, arrayIds.arrayIdNotTokenized); +} + +export function teardown() { + // DELETE paymentPositions + deletePaymentPositions(); +} \ No newline at end of file diff --git a/performance-test/src/local.environment.json b/performance-test/src/local.environment.json new file mode 100644 index 0000000..a8375b6 --- /dev/null +++ b/performance-test/src/local.environment.json @@ -0,0 +1,10 @@ +{ + "environment": [ + { + "env": "local", + "PG_GPD_USERNAME": "apduser", + "PG_GPD_SERVER_NAME": "pagopa-d-weu-gpd-pgflex.postgres.database.azure.com", + "PG_GPD_DATABASE_NAME": "apd" + } + ] +} diff --git a/performance-test/src/modules/common.js b/performance-test/src/modules/common.js new file mode 100644 index 0000000..42ae21a --- /dev/null +++ b/performance-test/src/modules/common.js @@ -0,0 +1,11 @@ +export const VALID_CF = "PRFGPD24S20B157N"; +export const INVALID_CF = "invalidCF"; +export const entityIdentifier = "PERFORMANCE_TEST_GPD_INGESTION"; +export const REDIS_ARRAY_IDS_TOKENIZED = "redisTokenized"; +export const REDIS_ARRAY_IDS_NOT_TOKENIZED = "redisNotTokenized"; + +export function randomString(length, charset) { + let res = ''; + while (length--) res += charset[(Math.random() * charset.length) | 0]; + return res; +} diff --git a/performance-test/src/modules/pg_gpd_client.js b/performance-test/src/modules/pg_gpd_client.js new file mode 100644 index 0000000..2ec407a --- /dev/null +++ b/performance-test/src/modules/pg_gpd_client.js @@ -0,0 +1,56 @@ +import { entityIdentifier, INVALID_CF, VALID_CF } from './common.js'; +import { Pool } from 'pg'; + +const varsArray = new SharedArray('vars', function () { + return JSON.parse(open(`./${__ENV.VARS}`)).environment; +}); +export const ENV_VARS = varsArray[0]; + +//COMMON +const username = `${ENV_VARS.PG_GPD_USERNAME}`; +const serverName = `${ENV_VARS.PG_GPD_SERVER_NAME}`; +const databaseName = `${ENV_VARS.PG_GPD_DATABASE_NAME}`; +//SECRETS +const password = `${__ENV.PG_GPD_PASSWORD}`; + +const pool = new Pool({ + user: username, + database: databaseName, + password: password, + host: serverName, + port: 5432, + ssl: true +}); + +const connection = { + pool, + query: (...args) => { + return pool.connect().then((client) => { + return client.query(...args).then((res) => { + client.release(); + return res.rows; + }); + }); + }, +}; + +async function shutDownPool() { + await pool.end(); +} + +async function insertPaymentPositionWithValidFiscalCode(id) { + await connection.query(`INSERT INTO apd.apd.payment_position (id, city, civic_number, company_name, country, email, fiscal_code, full_name, inserted_date, iupd, last_updated_date, max_due_date, min_due_date, office_name, organization_fiscal_code, phone, postal_code, province, publish_date, region, status, street_name, "type", validity_date, "version", switch_to_expired, payment_date, pull, pay_stand_in, service_type) VALUES('${id}', 'Pizzo Calabro', '11', 'SkyLab Inc. - Edit', 'IT', 'micheleventimiglia@skilabmail.com', 'VNTMHL76M09H501D', 'Michele Ventimiglia', '2024-11-12 16:09:43.477', '${entityIdentifier}', '2024-11-12 16:09:43.479', '2024-12-12 16:09:43.323', '2024-12-12 16:09:43.323', 'SkyLab - Sede via Washington - Edit', '${VALID_CF}', '333-123456789', '89812', 'VV', '2024-11-12 16:09:43.479', 'CA', 'VALID', 'via Washington', 'F', '2024-11-12 16:09:43.479', 0, false, NULL, true, false, 'GPD');`); +} + +async function insertPaymentPositionWithInvalidFiscalCode(id) { + await connection.query(`INSERT INTO apd.apd.payment_position (id, city, civic_number, company_name, country, email, fiscal_code, full_name, inserted_date, iupd, last_updated_date, max_due_date, min_due_date, office_name, organization_fiscal_code, phone, postal_code, province, publish_date, region, status, street_name, "type", validity_date, "version", switch_to_expired, payment_date, pull, pay_stand_in, service_type) VALUES('${id}', 'Pizzo Calabro', '11', 'SkyLab Inc. - Edit', 'IT', 'micheleventimiglia@skilabmail.com', 'VNTMHL76M09H501D', 'Michele Ventimiglia', '2024-11-12 16:09:43.477', '${entityIdentifier}', '2024-11-12 16:09:43.479', '2024-12-12 16:09:43.323', '2024-12-12 16:09:43.323', 'SkyLab - Sede via Washington - Edit', '${INVALID_CF}', '333-123456789', '89812', 'VV', '2024-11-12 16:09:43.479', 'CA', 'VALID', 'via Washington', 'F', '2024-11-12 16:09:43.479', 0, false, NULL, true, false, 'GPD');`); +} + +async function deletePaymentPositions() { + await connection.query(`DELETE FROM apd.apd.payment_position WHERE iupd='${entityIdentifier}'`); +} + +module.exports = { + shutDownPool, + insertPaymentPositionWithValidFiscalCode, insertPaymentPositionWithInvalidFiscalCode, deletePaymentPositions +} \ No newline at end of file diff --git a/performance-test/src/modules/redis_client.js b/performance-test/src/modules/redis_client.js new file mode 100644 index 0000000..f634866 --- /dev/null +++ b/performance-test/src/modules/redis_client.js @@ -0,0 +1,34 @@ +import {createClient} from "redis"; + +const redisHost = "127.0.0.1"; +const redisPort = "6379"; + +const client = createClient({ + socket: { + port: redisPort, + host: redisHost + } +}); + +client.on('error', err => console.log('Redis Client Error', err)) +client.connect(); + +client.on('connect', function () { + console.log('Connected!'); +}); + +async function readFromRedisWithKey(key) { + return await client.get(key); +} + +async function setValueRedis({key, value}){ + return await client.set(key, value); +} + +async function shutDownClient() { + await client.quit(); + } + +module.exports = { + readFromRedisWithKey, shutDownClient, setValueRedis + } \ No newline at end of file diff --git a/performance-test/src/package.json b/performance-test/src/package.json new file mode 100644 index 0000000..93e82d5 --- /dev/null +++ b/performance-test/src/package.json @@ -0,0 +1,11 @@ +{ + "type": "module", + "dependencies": {}, + "scripts": { + "start-performance-review": "export $(cat .env | xargs) && node ./gpd_ingestion_test.js" + }, + "devDependencies": { + "pg": "^8.13.1", + "redis": "^4.7.0" + } +} \ No newline at end of file diff --git a/performance-test/src/scripts/review_script.js b/performance-test/src/scripts/review_script.js new file mode 100644 index 0000000..4e4128d --- /dev/null +++ b/performance-test/src/scripts/review_script.js @@ -0,0 +1,137 @@ +import { REDIS_ARRAY_IDS_NOT_TOKENIZED, REDIS_ARRAY_IDS_TOKENIZED } from "../modules/common.js"; +import { readFromRedisWithKey } from "../modules/redis_client.js"; + +const REDIS_RAW_SUFFIX = "-raw-c"; +const REDIS_ING_SUFFIX = "-ing-c"; + +// Performance Debezium connector +// 1. Retrieve messages from topic "raw" +// 2. Calculate difference between timestamps -> obj.source.tsMs (time of insert on db) : obj.tsMs (time of insert on eventhub) +// Performance gpd-ingestion-manager +// 1. Retrieve messages from topic "ingested" +// 2. Calculate difference between raw and ingested timestamps -> rawMsg.timestamp (timestamp of the message from topic raw) : ingestedMsg.timestamp (timestamp of the message from topic raw) +const reviewIngestionTimeToProcess = async () => { + let arrayTimePsgToRaw = []; + let totalTimePsgToRaw = 0; + let minTimePsgToRaw = null; + let maxTimePsgToRaw = null; + let failedRaw = 0; + + let arrayTimeRawToTokenize = []; + let totalTimeRawToTokenize = 0; + let minTimeRawToTokenize = null; + let maxTimeRawToTokenize = null; + let failedTokenized = 0; + + let arrayTimeRawToIngest = []; + let totalTimeRawToIngest = 0; + let minTimeRawToIngest = null; + let maxTimeRawToIngest = null; + let failedIngested = 0; + + // RETRIEVE ARRAYS OF IDS + const tokenizedIds = readFromRedisWithKey(REDIS_ARRAY_IDS_TOKENIZED); + const notTokenizedIds = readFromRedisWithKey(REDIS_ARRAY_IDS_NOT_TOKENIZED); + + for (const id of tokenizedIds) { + // RETRIEVE RAW MESSAGE FROM REDIS + const rawMsg = readFromRedisWithKey(id + REDIS_RAW_SUFFIX); + + if (rawMsg) { + const rawMsgValue = JSON.parse(rawMsg.value.toString()); + console.log("Processing raw message with id: " + rawMsgValue.after.id); + + // CALCULATE TIME TO CAPTURE + let timePsgToRaw = rawMsgValue.source.tsMs - rawMsgValue.tsMs; + arrayTimePsgToRaw.push(timePsgToRaw); + totalTimePsgToRaw += timePsgToRaw; + minTimePsgToRaw = minTimePsgToRaw === null || timePsgToRaw < minTimePsgToRaw ? timePsgToRaw : minTimePsgToRaw; + maxTimePsgToRaw = maxTimePsgToRaw === null || timePsgToRaw > maxTimePsgToRaw ? timePsgToRaw : maxTimePsgToRaw; + + // RETRIEVE TOKENIZED MESSAGE FROM REDIS WITH RAW OBJ ID + const tokenizedMsg = readFromRedisWithKey(id + REDIS_ING_SUFFIX); + + if (tokenizedMsg) { + const tokenizedMsgValue = JSON.parse(tokenizedMsg.value.toString()); + console.log("Processing tokenized message with id: " + tokenizedMsgValue.after.id); + + // CALCULATE TIME TO TOKENIZE + let timeRawToTokenize = rawMsg.timestamp - tokenizedMsgValue.timestamp; + arrayTimeRawToTokenize.push(timeRawToTokenize); + totalTimeRawToTokenize += timeRawToTokenize; + minTimeRawToTokenize = minTimeRawToTokenize === null || timeRawToTokenize < minTimeRawToTokenize ? timeRawToTokenize : minTimeRawToTokenize; + maxTimeRawToTokenize = maxTimeRawToTokenize === null || timeRawToTokenize > maxTimeRawToTokenize ? timeRawToTokenize : maxTimeRawToTokenize; + } else { + failedTokenized += 1; + } + } else { + failedRaw += 1; + } + + } + + for (const id of notTokenizedIds) { + // RETRIEVE RAW MESSAGE FROM REDIS + const rawMsg = readFromRedisWithKey(id + REDIS_RAW_SUFFIX); + + if (rawMsg) { + const rawMsgValue = JSON.parse(rawMsg.value.toString()); + console.log("Processing raw message with id: " + rawMsgValue.after.id); + + // CALCULATE TIME TO CAPTURE + let timePsgToRaw = rawMsgValue.source.tsMs - rawMsgValue.tsMs; + arrayTimePsgToRaw.push(timePsgToRaw); + totalTimePsgToRaw += timePsgToRaw; + minTimePsgToRaw = minTimePsgToRaw === null || timePsgToRaw < minTimePsgToRaw ? timePsgToRaw : minTimePsgToRaw; + maxTimePsgToRaw = maxTimePsgToRaw === null || timePsgToRaw > maxTimePsgToRaw ? timePsgToRaw : maxTimePsgToRaw; + + // RETRIEVE INGESTED MESSAGE FROM REDIS WITH RAW OBJ ID + const ingestedMsg = readFromRedisWithKey(id + REDIS_ING_SUFFIX); + + if (ingestedMsg) { + const ingestedMsgValue = JSON.parse(ingestedMsg.value.toString()); + console.log("Processing ingested message with id: " + ingestedMsgValue.after.id); + + // CALCULATE TIME TO INGEST WITHOUT TOKENIZER + let timeRawToIngest = rawMsg.timestamp - ingestedMsgValue.timestamp; + arrayTimeRawToIngest.push(timeRawToIngest); + totalTimeRawToIngest += timeRawToIngest; + minTimeRawToIngest = minTimeRawToIngest === null || timeRawToIngest < minTimeRawToIngest ? timeRawToIngest : minTimeRawToIngest; + maxTimeRawToIngest = maxTimeRawToIngest === null || timeRawToIngest > maxTimeRawToIngest ? timeRawToIngest : maxTimeRawToIngest; + } else { + failedIngested += 1; + } + } else { + failedRaw += 1; + } + } + + console.log("/////////////////////////////////"); + console.log("/----------- METRICS -----------/"); + console.log("/////////////////////////////////"); + console.log("--------------------------------"); + console.log(`mean time psg to evh..............: ${totalTimePsgToRaw ? getTimeString(Math.round(totalTimePsgToRaw / arrayTimePsgToRaw.length)) : "-"}`); + console.log(`mean time to tokenize.............: ${totalTimeRawToTokenize ? getTimeString(Math.round(totalTimeRawToTokenize / arrayTimeRawToTokenize.length)) : "-"}`); + console.log(`mean time to ingest...............: ${totalTimeRawToIngest ? getTimeString(Math.round(totalTimeRawToIngest / arrayTimeRawToIngest.length)) : "-"}`); + console.log("--------------------------------"); + console.log(`min time psg to evh...............: ${minTimePsgToRaw ? getTimeString(minTimePsgToRaw) : "-"}`); + console.log(`min time to tokenize..............: ${minTimeRawToTokenize ? getTimeString(minTimeRawToTokenize) : "-"}`); + console.log(`min time to ingest................: ${minTimeRawToIngest ? getTimeString(minTimeRawToIngest) : "-"}`); + console.log("--------------------------------"); + console.log(`max time psg to evh...............: ${maxTimePsgToRaw ? getTimeString(maxTimePsgToRaw) : "-"}`); + console.log(`max time to tokenize..............: ${maxTimeRawToTokenize ? getTimeString(maxTimeRawToTokenize) : "-"}`); + console.log(`max time to ingest................: ${maxTimeRawToIngest ? getTimeString(maxTimeRawToIngest) : "-"}`); + console.log("--------------------------------"); + console.log(`failed to be captured.............: ${failedRaw}`); + console.log(`failed to be tokenized............: ${failedTokenized}`); + console.log(`failed to be ingested.............: ${failedIngested}`); + console.log("/////////////////////////////////"); + console.log("/------------- END -------------/"); + console.log("/////////////////////////////////"); +} + +function getTimeString(time) { + return `${time}ms | ${time / 1000}s`; +} + +reviewIngestionTimeToProcess(); \ No newline at end of file diff --git a/performance-test/src/uat.environment.json b/performance-test/src/uat.environment.json new file mode 100644 index 0000000..b937371 --- /dev/null +++ b/performance-test/src/uat.environment.json @@ -0,0 +1,10 @@ +{ + "environment": [ + { + "env": "uat", + "PG_GPD_USERNAME": "apduser", + "PG_GPD_SERVER_NAME": "pagopa-u-weu-gpd-pgflex.postgres.database.azure.com", + "PG_GPD_DATABASE_NAME": "apd" + } + ] +} From 3b806a53fe87a3d3eaec64a34a1c50aecf09c7cf Mon Sep 17 00:00:00 2001 From: svariant Date: Wed, 20 Nov 2024 17:42:48 +0100 Subject: [PATCH 02/14] =?UTF-8?q?[PPANTT-170]=C2=A0feat:=20Improve=20perfo?= =?UTF-8?q?rmance=20test?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .devops/performance-test-pipelines.yml | 35 +++++++++++++++++-- performance-test/src/DockerfileLoad | 11 ++++++ .../src/{Dockerfile => DockerfileReview} | 0 performance-test/src/gpd_ingestion_test.js | 26 ++++++-------- performance-test/src/modules/redis_client.js | 8 ++--- performance-test/src/package.json | 3 +- performance-test/src/scripts/review_script.js | 22 +++++++----- 7 files changed, 74 insertions(+), 31 deletions(-) create mode 100644 performance-test/src/DockerfileLoad rename performance-test/src/{Dockerfile => DockerfileReview} (100%) diff --git a/.devops/performance-test-pipelines.yml b/.devops/performance-test-pipelines.yml index bce1442..5518198 100644 --- a/.devops/performance-test-pipelines.yml +++ b/.devops/performance-test-pipelines.yml @@ -13,27 +13,56 @@ parameters: displayName: "Number of events" type: number default: 50 + - name: "TIMEOUT" + displayName: "Timeout in seconds" + type: number + default: 10 variables: ${{ if eq(parameters['ENVIRONMENT'], 'dev') }}: poolImage: "pagopa-dev-loadtest-linux" PG_GPD_PASSWORD: "$(DEV_PG_GPD_PASSWORD)" + INGESTION_EVENTHUB_CONN_STRING: "${DEV_INGESTION_EVENTHUB_CONN_STRING}" ${{ if eq(parameters['ENVIRONMENT'], 'uat') }}: poolImage: "pagopa-uat-loadtest-linux" PG_GPD_PASSWORD: "$(UAT_PG_GPD_PASSWORD)" + INGESTION_EVENTHUB_CONN_STRING: "${UAT_INGESTION_EVENTHUB_CONN_STRING}" + pool: name: $(poolImage) steps: + - script: | + cd ./test-utils + chmod +x ./run_compose.sh + ./run_compose.sh "${{ variables.INGESTION_EVENTHUB_CONN_STRING }}" "gpd-ingestion.apd.payment_position,cdc-raw-auto.apd.payment_position" + displayName: Run GPD Ingestion Timestamp Review + env: + INGESTION_EVENTHUB_CONN_STRING: ${{ variables.INGESTION_EVENTHUB_CONN_STRING }} + - script: | cd ./performance-test/src npm install - docker build -f ./Dockerfile -t exec-node . - docker run --rm --name initToRunk6 \ + docker build -f ./DockerfileLoad -t exec-node . + docker run --rm --name loadPostgres \ -e PG_GPD_PASSWORD=${PG_GPD_PASSWORD} \ -e NUMBER_OF_EVENTS=${NUMBER_OF_EVENTS} \ exec-node - displayName: Run GPD Ingestion Timestamp Review + displayName: Run insert events on postgres env: PG_GPD_PASSWORD: ${{ variables.PG_GPD_PASSWORD }} NUMBER_OF_EVENTS: ${{ variables.NUMBER_OF_EVENTS }} + + - script: | + timeout ${TIMEOUT}s + displayName: Wait for timeout + env: + TIMEOUT: ${{ variables.TIMEOUT }} + + - script: | + cd ./performance-test/src + npm install + docker build -f ./DockerfileReview -t exec-node . + docker run --rm --name ingestionReview \ + exec-node + displayName: Run timestamp review diff --git a/performance-test/src/DockerfileLoad b/performance-test/src/DockerfileLoad new file mode 100644 index 0000000..06bc8d5 --- /dev/null +++ b/performance-test/src/DockerfileLoad @@ -0,0 +1,11 @@ +# specify the node base image with your desired version node: +FROM node:16 + +WORKDIR /src/node-app + +COPY ./ ./ + +RUN yarn install + +ENTRYPOINT ["yarn", "start-load-events"] + diff --git a/performance-test/src/Dockerfile b/performance-test/src/DockerfileReview similarity index 100% rename from performance-test/src/Dockerfile rename to performance-test/src/DockerfileReview diff --git a/performance-test/src/gpd_ingestion_test.js b/performance-test/src/gpd_ingestion_test.js index 9b25f20..37c3dab 100644 --- a/performance-test/src/gpd_ingestion_test.js +++ b/performance-test/src/gpd_ingestion_test.js @@ -6,34 +6,30 @@ import { setValueRedis } from "./modules/redis_client.js"; const NUMBER_OF_EVENTS = JSON.parse(open(__ENV.NUMBER_OF_EVENTS)); -export function setup() { +export default function insertEvents() { const arrayIdTokenized = []; const arrayIdNotTokenized = []; - return { arrayIdTokenized, arrayIdNotTokenized }; -} - -export default function (arrayIds) { - console.log("ENVVV", NUMBER_OF_EVENTS); + console.log("Selected number of events: ", NUMBER_OF_EVENTS); // SAVE ON DB paymentPositions for (let i = 0; i < NUMBER_OF_EVENTS; i++) { const idValidFiscalCode = "PERFORMANCE_GPD_INGESTION_VALID_FISCAL_CODE" + new Date().getTime(); insertPaymentPositionWithValidFiscalCode(idValidFiscalCode); - arrayIds.arrayIdTokenized.push(idValidFiscalCode); - console.log("Inserted in database paymentOptions with valid fiscal code with ids: ", JSON.stringify(arrayIds.arrayIdTokenized)); + arrayIdTokenized.push(idValidFiscalCode); + console.log("Inserted in database paymentOptions with valid fiscal code with ids: ", JSON.stringify(arrayIdTokenized)); const idInvalidFiscalCode = "PERFORMANCE_GPD_INGESTION_INVALID_FISCAL_CODE" + new Date().getTime(); insertPaymentPositionWithInvalidFiscalCode(idInvalidFiscalCode); - arrayIds.arrayIdNotTokenized.push(idInvalidFiscalCode); - console.log("Inserted in database paymentOptions with invalid fiscal code with ids: ", JSON.stringify(arrayIds.arrayIdNotTokenized)); + arrayIdNotTokenized.push(idInvalidFiscalCode); + console.log("Inserted in database paymentOptions with invalid fiscal code with ids: ", JSON.stringify(arrayIdNotTokenized)); } // SAVE ID ARRAYS ON REDIS - setValueRedis(REDIS_ARRAY_IDS_TOKENIZED, arrayIds.arrayIdTokenized); - setValueRedis(REDIS_ARRAY_IDS_NOT_TOKENIZED, arrayIds.arrayIdNotTokenized); -} + setValueRedis(REDIS_ARRAY_IDS_TOKENIZED, arrayIdTokenized); + setValueRedis(REDIS_ARRAY_IDS_NOT_TOKENIZED, arrayIdNotTokenized); -export function teardown() { // DELETE paymentPositions deletePaymentPositions(); -} \ No newline at end of file +} + +insertEvents(); \ No newline at end of file diff --git a/performance-test/src/modules/redis_client.js b/performance-test/src/modules/redis_client.js index f634866..caa9899 100644 --- a/performance-test/src/modules/redis_client.js +++ b/performance-test/src/modules/redis_client.js @@ -1,4 +1,4 @@ -import {createClient} from "redis"; +import { createClient } from "redis"; const redisHost = "127.0.0.1"; const redisPort = "6379"; @@ -21,14 +21,14 @@ async function readFromRedisWithKey(key) { return await client.get(key); } -async function setValueRedis({key, value}){ +async function setValueRedis({ key, value }) { return await client.set(key, value); } async function shutDownClient() { await client.quit(); - } +} module.exports = { readFromRedisWithKey, shutDownClient, setValueRedis - } \ No newline at end of file +} \ No newline at end of file diff --git a/performance-test/src/package.json b/performance-test/src/package.json index 93e82d5..ca31871 100644 --- a/performance-test/src/package.json +++ b/performance-test/src/package.json @@ -2,7 +2,8 @@ "type": "module", "dependencies": {}, "scripts": { - "start-performance-review": "export $(cat .env | xargs) && node ./gpd_ingestion_test.js" + "start-load-events": "export $(cat .env | xargs) && node ./gpd_ingestion_test.js", + "start-performance-review": "export $(cat .env | xargs) && node ./scripts/review_script.js" }, "devDependencies": { "pg": "^8.13.1", diff --git a/performance-test/src/scripts/review_script.js b/performance-test/src/scripts/review_script.js index 4e4128d..2095574 100644 --- a/performance-test/src/scripts/review_script.js +++ b/performance-test/src/scripts/review_script.js @@ -1,5 +1,5 @@ import { REDIS_ARRAY_IDS_NOT_TOKENIZED, REDIS_ARRAY_IDS_TOKENIZED } from "../modules/common.js"; -import { readFromRedisWithKey } from "../modules/redis_client.js"; +import { readFromRedisWithKey, shutDownClient } from "../modules/redis_client.js"; const REDIS_RAW_SUFFIX = "-raw-c"; const REDIS_ING_SUFFIX = "-ing-c"; @@ -38,7 +38,7 @@ const reviewIngestionTimeToProcess = async () => { const rawMsg = readFromRedisWithKey(id + REDIS_RAW_SUFFIX); if (rawMsg) { - const rawMsgValue = JSON.parse(rawMsg.value.toString()); + const rawMsgValue = JSON.parse(rawMsg).value; console.log("Processing raw message with id: " + rawMsgValue.after.id); // CALCULATE TIME TO CAPTURE @@ -52,7 +52,7 @@ const reviewIngestionTimeToProcess = async () => { const tokenizedMsg = readFromRedisWithKey(id + REDIS_ING_SUFFIX); if (tokenizedMsg) { - const tokenizedMsgValue = JSON.parse(tokenizedMsg.value.toString()); + const tokenizedMsgValue = JSON.parse(tokenizedMsg).value; console.log("Processing tokenized message with id: " + tokenizedMsgValue.after.id); // CALCULATE TIME TO TOKENIZE @@ -62,9 +62,11 @@ const reviewIngestionTimeToProcess = async () => { minTimeRawToTokenize = minTimeRawToTokenize === null || timeRawToTokenize < minTimeRawToTokenize ? timeRawToTokenize : minTimeRawToTokenize; maxTimeRawToTokenize = maxTimeRawToTokenize === null || timeRawToTokenize > maxTimeRawToTokenize ? timeRawToTokenize : maxTimeRawToTokenize; } else { + console.log("Fail to tokenize message with id: " + id); failedTokenized += 1; } } else { + console.log("Fail to capture message with id: " + id); failedRaw += 1; } @@ -76,7 +78,7 @@ const reviewIngestionTimeToProcess = async () => { if (rawMsg) { const rawMsgValue = JSON.parse(rawMsg.value.toString()); - console.log("Processing raw message with id: " + rawMsgValue.after.id); + console.log("Processing raw message with id: " + id); // CALCULATE TIME TO CAPTURE let timePsgToRaw = rawMsgValue.source.tsMs - rawMsgValue.tsMs; @@ -90,7 +92,7 @@ const reviewIngestionTimeToProcess = async () => { if (ingestedMsg) { const ingestedMsgValue = JSON.parse(ingestedMsg.value.toString()); - console.log("Processing ingested message with id: " + ingestedMsgValue.after.id); + console.log("Processing ingested message with id: " + id); // CALCULATE TIME TO INGEST WITHOUT TOKENIZER let timeRawToIngest = rawMsg.timestamp - ingestedMsgValue.timestamp; @@ -99,9 +101,11 @@ const reviewIngestionTimeToProcess = async () => { minTimeRawToIngest = minTimeRawToIngest === null || timeRawToIngest < minTimeRawToIngest ? timeRawToIngest : minTimeRawToIngest; maxTimeRawToIngest = maxTimeRawToIngest === null || timeRawToIngest > maxTimeRawToIngest ? timeRawToIngest : maxTimeRawToIngest; } else { + console.log("Fail to ingest message with id: " + id); failedIngested += 1; } } else { + console.log("Fail to capture message with id: " + id); failedRaw += 1; } } @@ -110,15 +114,15 @@ const reviewIngestionTimeToProcess = async () => { console.log("/----------- METRICS -----------/"); console.log("/////////////////////////////////"); console.log("--------------------------------"); - console.log(`mean time psg to evh..............: ${totalTimePsgToRaw ? getTimeString(Math.round(totalTimePsgToRaw / arrayTimePsgToRaw.length)) : "-"}`); + console.log(`mean time to capture..............: ${totalTimePsgToRaw ? getTimeString(Math.round(totalTimePsgToRaw / arrayTimePsgToRaw.length)) : "-"}`); console.log(`mean time to tokenize.............: ${totalTimeRawToTokenize ? getTimeString(Math.round(totalTimeRawToTokenize / arrayTimeRawToTokenize.length)) : "-"}`); console.log(`mean time to ingest...............: ${totalTimeRawToIngest ? getTimeString(Math.round(totalTimeRawToIngest / arrayTimeRawToIngest.length)) : "-"}`); console.log("--------------------------------"); - console.log(`min time psg to evh...............: ${minTimePsgToRaw ? getTimeString(minTimePsgToRaw) : "-"}`); + console.log(`min time to capture...............: ${minTimePsgToRaw ? getTimeString(minTimePsgToRaw) : "-"}`); console.log(`min time to tokenize..............: ${minTimeRawToTokenize ? getTimeString(minTimeRawToTokenize) : "-"}`); console.log(`min time to ingest................: ${minTimeRawToIngest ? getTimeString(minTimeRawToIngest) : "-"}`); console.log("--------------------------------"); - console.log(`max time psg to evh...............: ${maxTimePsgToRaw ? getTimeString(maxTimePsgToRaw) : "-"}`); + console.log(`max time to capture...............: ${maxTimePsgToRaw ? getTimeString(maxTimePsgToRaw) : "-"}`); console.log(`max time to tokenize..............: ${maxTimeRawToTokenize ? getTimeString(maxTimeRawToTokenize) : "-"}`); console.log(`max time to ingest................: ${maxTimeRawToIngest ? getTimeString(maxTimeRawToIngest) : "-"}`); console.log("--------------------------------"); @@ -128,6 +132,8 @@ const reviewIngestionTimeToProcess = async () => { console.log("/////////////////////////////////"); console.log("/------------- END -------------/"); console.log("/////////////////////////////////"); + + shutDownClient(); } function getTimeString(time) { From c9c88f22783597da0517603c0609c80841a046a0 Mon Sep 17 00:00:00 2001 From: svariant Date: Wed, 20 Nov 2024 18:22:22 +0100 Subject: [PATCH 03/14] [PPANTT-170] feat: Remove k6 and change perf test scripts to module --- performance-test/src/gpd_ingestion_test.js | 19 +++++++------- performance-test/src/modules/common.js | 22 ++++++++-------- performance-test/src/modules/pg_gpd_client.js | 25 ++++++++----------- performance-test/src/modules/redis_client.js | 2 +- performance-test/src/package.json | 6 +++-- performance-test/src/scripts/review_script.js | 4 +-- 6 files changed, 40 insertions(+), 38 deletions(-) diff --git a/performance-test/src/gpd_ingestion_test.js b/performance-test/src/gpd_ingestion_test.js index 37c3dab..2f19262 100644 --- a/performance-test/src/gpd_ingestion_test.js +++ b/performance-test/src/gpd_ingestion_test.js @@ -1,28 +1,29 @@ -import { insertPaymentPositionWithValidFiscalCode, insertPaymentPositionWithInvalidFiscalCode, deletePaymentPositions } from "./modules/pg_gpd_client.js"; -import { REDIS_ARRAY_IDS_TOKENIZED, REDIS_ARRAY_IDS_NOT_TOKENIZED } from "./modules/common.js"; -import { setValueRedis } from "./modules/redis_client.js"; +const { insertPaymentPositionWithValidFiscalCode, insertPaymentPositionWithInvalidFiscalCode, deletePaymentPositions } = require("./modules/pg_gpd_client.js"); +const { REDIS_ARRAY_IDS_TOKENIZED, REDIS_ARRAY_IDS_NOT_TOKENIZED } = require("./modules/common.js"); +const { setValueRedis } = require("./modules/redis_client.js"); -const NUMBER_OF_EVENTS = JSON.parse(open(__ENV.NUMBER_OF_EVENTS)); +const NUMBER_OF_EVENTS = JSON.parse(process.env.NUMBER_OF_EVENTS); -export default function insertEvents() { +function insertEvents() { const arrayIdTokenized = []; const arrayIdNotTokenized = []; console.log("Selected number of events: ", NUMBER_OF_EVENTS); // SAVE ON DB paymentPositions for (let i = 0; i < NUMBER_OF_EVENTS; i++) { - const idValidFiscalCode = "PERFORMANCE_GPD_INGESTION_VALID_FISCAL_CODE" + new Date().getTime(); + const idValidFiscalCode = "PERFORMANCE_GPD_INGESTION_VALID_FISCAL_CODE_" + new Date().getTime(); insertPaymentPositionWithValidFiscalCode(idValidFiscalCode); arrayIdTokenized.push(idValidFiscalCode); - console.log("Inserted in database paymentOptions with valid fiscal code with ids: ", JSON.stringify(arrayIdTokenized)); - const idInvalidFiscalCode = "PERFORMANCE_GPD_INGESTION_INVALID_FISCAL_CODE" + new Date().getTime(); + const idInvalidFiscalCode = "PERFORMANCE_GPD_INGESTION_INVALID_FISCAL_CODE_" + new Date().getTime(); insertPaymentPositionWithInvalidFiscalCode(idInvalidFiscalCode); arrayIdNotTokenized.push(idInvalidFiscalCode); - console.log("Inserted in database paymentOptions with invalid fiscal code with ids: ", JSON.stringify(arrayIdNotTokenized)); } + console.log("Inserted in database paymentOptions with valid fiscal code with ids: ", JSON.stringify(arrayIdTokenized)); + console.log("Inserted in database paymentOptions with invalid fiscal code with ids: ", JSON.stringify(arrayIdNotTokenized)); + // SAVE ID ARRAYS ON REDIS setValueRedis(REDIS_ARRAY_IDS_TOKENIZED, arrayIdTokenized); diff --git a/performance-test/src/modules/common.js b/performance-test/src/modules/common.js index 42ae21a..e5ade40 100644 --- a/performance-test/src/modules/common.js +++ b/performance-test/src/modules/common.js @@ -1,11 +1,13 @@ -export const VALID_CF = "PRFGPD24S20B157N"; -export const INVALID_CF = "invalidCF"; -export const entityIdentifier = "PERFORMANCE_TEST_GPD_INGESTION"; -export const REDIS_ARRAY_IDS_TOKENIZED = "redisTokenized"; -export const REDIS_ARRAY_IDS_NOT_TOKENIZED = "redisNotTokenized"; +const VALID_CF = "PRFGPD24S20B157N"; +const INVALID_CF = "invalidCF"; +const ENTITY_IDENTIFIER = "PERFORMANCE_TEST_GPD_INGESTION"; +const REDIS_ARRAY_IDS_TOKENIZED = "redisTokenized"; +const REDIS_ARRAY_IDS_NOT_TOKENIZED = "redisNotTokenized"; -export function randomString(length, charset) { - let res = ''; - while (length--) res += charset[(Math.random() * charset.length) | 0]; - return res; -} +module.exports = { + VALID_CF, + INVALID_CF, + ENTITY_IDENTIFIER, + REDIS_ARRAY_IDS_TOKENIZED, + REDIS_ARRAY_IDS_NOT_TOKENIZED +} \ No newline at end of file diff --git a/performance-test/src/modules/pg_gpd_client.js b/performance-test/src/modules/pg_gpd_client.js index 2ec407a..3090151 100644 --- a/performance-test/src/modules/pg_gpd_client.js +++ b/performance-test/src/modules/pg_gpd_client.js @@ -1,17 +1,14 @@ -import { entityIdentifier, INVALID_CF, VALID_CF } from './common.js'; -import { Pool } from 'pg'; - -const varsArray = new SharedArray('vars', function () { - return JSON.parse(open(`./${__ENV.VARS}`)).environment; -}); -export const ENV_VARS = varsArray[0]; +const { ENTITY_IDENTIFIER, INVALID_CF, VALID_CF } = require('./common.js'); +const { Pool } = require('pg'); //COMMON -const username = `${ENV_VARS.PG_GPD_USERNAME}`; -const serverName = `${ENV_VARS.PG_GPD_SERVER_NAME}`; -const databaseName = `${ENV_VARS.PG_GPD_DATABASE_NAME}`; +const username = process.env.PG_GPD_USERNAME; +const serverName = process.env.PG_GPD_SERVER_NAME; +const databaseName = process.env.PG_GPD_DATABASE_NAME; //SECRETS -const password = `${__ENV.PG_GPD_PASSWORD}`; +const password = process.env.PG_GPD_PASSWORD; + +// TODO LOAD ENVS FROM ENV FILE const pool = new Pool({ user: username, @@ -39,15 +36,15 @@ async function shutDownPool() { } async function insertPaymentPositionWithValidFiscalCode(id) { - await connection.query(`INSERT INTO apd.apd.payment_position (id, city, civic_number, company_name, country, email, fiscal_code, full_name, inserted_date, iupd, last_updated_date, max_due_date, min_due_date, office_name, organization_fiscal_code, phone, postal_code, province, publish_date, region, status, street_name, "type", validity_date, "version", switch_to_expired, payment_date, pull, pay_stand_in, service_type) VALUES('${id}', 'Pizzo Calabro', '11', 'SkyLab Inc. - Edit', 'IT', 'micheleventimiglia@skilabmail.com', 'VNTMHL76M09H501D', 'Michele Ventimiglia', '2024-11-12 16:09:43.477', '${entityIdentifier}', '2024-11-12 16:09:43.479', '2024-12-12 16:09:43.323', '2024-12-12 16:09:43.323', 'SkyLab - Sede via Washington - Edit', '${VALID_CF}', '333-123456789', '89812', 'VV', '2024-11-12 16:09:43.479', 'CA', 'VALID', 'via Washington', 'F', '2024-11-12 16:09:43.479', 0, false, NULL, true, false, 'GPD');`); + await connection.query(`INSERT INTO apd.apd.payment_position (id, city, civic_number, company_name, country, email, fiscal_code, full_name, inserted_date, iupd, last_updated_date, max_due_date, min_due_date, office_name, organization_fiscal_code, phone, postal_code, province, publish_date, region, status, street_name, "type", validity_date, "version", switch_to_expired, payment_date, pull, pay_stand_in, service_type) VALUES('${id}', 'Pizzo Calabro', '11', 'SkyLab Inc. - Edit', 'IT', 'micheleventimiglia@skilabmail.com', 'VNTMHL76M09H501D', 'Michele Ventimiglia', '2024-11-12 16:09:43.477', '${ENTITY_IDENTIFIER}', '2024-11-12 16:09:43.479', '2024-12-12 16:09:43.323', '2024-12-12 16:09:43.323', 'SkyLab - Sede via Washington - Edit', '${VALID_CF}', '333-123456789', '89812', 'VV', '2024-11-12 16:09:43.479', 'CA', 'VALID', 'via Washington', 'F', '2024-11-12 16:09:43.479', 0, false, NULL, true, false, 'GPD');`); } async function insertPaymentPositionWithInvalidFiscalCode(id) { - await connection.query(`INSERT INTO apd.apd.payment_position (id, city, civic_number, company_name, country, email, fiscal_code, full_name, inserted_date, iupd, last_updated_date, max_due_date, min_due_date, office_name, organization_fiscal_code, phone, postal_code, province, publish_date, region, status, street_name, "type", validity_date, "version", switch_to_expired, payment_date, pull, pay_stand_in, service_type) VALUES('${id}', 'Pizzo Calabro', '11', 'SkyLab Inc. - Edit', 'IT', 'micheleventimiglia@skilabmail.com', 'VNTMHL76M09H501D', 'Michele Ventimiglia', '2024-11-12 16:09:43.477', '${entityIdentifier}', '2024-11-12 16:09:43.479', '2024-12-12 16:09:43.323', '2024-12-12 16:09:43.323', 'SkyLab - Sede via Washington - Edit', '${INVALID_CF}', '333-123456789', '89812', 'VV', '2024-11-12 16:09:43.479', 'CA', 'VALID', 'via Washington', 'F', '2024-11-12 16:09:43.479', 0, false, NULL, true, false, 'GPD');`); + await connection.query(`INSERT INTO apd.apd.payment_position (id, city, civic_number, company_name, country, email, fiscal_code, full_name, inserted_date, iupd, last_updated_date, max_due_date, min_due_date, office_name, organization_fiscal_code, phone, postal_code, province, publish_date, region, status, street_name, "type", validity_date, "version", switch_to_expired, payment_date, pull, pay_stand_in, service_type) VALUES('${id}', 'Pizzo Calabro', '11', 'SkyLab Inc. - Edit', 'IT', 'micheleventimiglia@skilabmail.com', 'VNTMHL76M09H501D', 'Michele Ventimiglia', '2024-11-12 16:09:43.477', '${ENTITY_IDENTIFIER}', '2024-11-12 16:09:43.479', '2024-12-12 16:09:43.323', '2024-12-12 16:09:43.323', 'SkyLab - Sede via Washington - Edit', '${INVALID_CF}', '333-123456789', '89812', 'VV', '2024-11-12 16:09:43.479', 'CA', 'VALID', 'via Washington', 'F', '2024-11-12 16:09:43.479', 0, false, NULL, true, false, 'GPD');`); } async function deletePaymentPositions() { - await connection.query(`DELETE FROM apd.apd.payment_position WHERE iupd='${entityIdentifier}'`); + await connection.query(`DELETE FROM apd.apd.payment_position WHERE iupd='${ENTITY_IDENTIFIER}'`); } module.exports = { diff --git a/performance-test/src/modules/redis_client.js b/performance-test/src/modules/redis_client.js index caa9899..a7f8f02 100644 --- a/performance-test/src/modules/redis_client.js +++ b/performance-test/src/modules/redis_client.js @@ -1,4 +1,4 @@ -import { createClient } from "redis"; +const { createClient } = require('redis'); const redisHost = "127.0.0.1"; const redisPort = "6379"; diff --git a/performance-test/src/package.json b/performance-test/src/package.json index ca31871..8706458 100644 --- a/performance-test/src/package.json +++ b/performance-test/src/package.json @@ -1,5 +1,6 @@ { - "type": "module", + "name": "pagopa-gpd-ingestion-manager", + "version": "0.0.1", "dependencies": {}, "scripts": { "start-load-events": "export $(cat .env | xargs) && node ./gpd_ingestion_test.js", @@ -7,6 +8,7 @@ }, "devDependencies": { "pg": "^8.13.1", - "redis": "^4.7.0" + "redis": "^4.7.0", + "dotenv": "^16.1.4" } } \ No newline at end of file diff --git a/performance-test/src/scripts/review_script.js b/performance-test/src/scripts/review_script.js index 2095574..7e0d7c2 100644 --- a/performance-test/src/scripts/review_script.js +++ b/performance-test/src/scripts/review_script.js @@ -1,5 +1,5 @@ -import { REDIS_ARRAY_IDS_NOT_TOKENIZED, REDIS_ARRAY_IDS_TOKENIZED } from "../modules/common.js"; -import { readFromRedisWithKey, shutDownClient } from "../modules/redis_client.js"; +const { REDIS_ARRAY_IDS_NOT_TOKENIZED, REDIS_ARRAY_IDS_TOKENIZED } = require("../modules/common.js"); +const { readFromRedisWithKey, shutDownClient } = require("../modules/redis_client.js"); const REDIS_RAW_SUFFIX = "-raw-c"; const REDIS_ING_SUFFIX = "-ing-c"; From 4178726f44ac7f7b67f59c3e38fbd083f3a53da3 Mon Sep 17 00:00:00 2001 From: svariant Date: Thu, 21 Nov 2024 10:22:29 +0100 Subject: [PATCH 04/14] [PPANTT-170] feat: Improve performance test env config --- .devops/performance-test-pipelines.yml | 6 ++++-- performance-test/src/.env.dev | 3 +++ performance-test/src/.env.uat | 3 +++ performance-test/src/DockerfileLoad | 2 +- performance-test/src/dev.environment.json | 10 ---------- performance-test/src/gpd_ingestion_test.js | 1 + performance-test/src/local.environment.json | 10 ---------- performance-test/src/package.json | 3 ++- performance-test/src/uat.environment.json | 10 ---------- 9 files changed, 14 insertions(+), 34 deletions(-) create mode 100644 performance-test/src/.env.dev create mode 100644 performance-test/src/.env.uat delete mode 100644 performance-test/src/dev.environment.json delete mode 100644 performance-test/src/local.environment.json delete mode 100644 performance-test/src/uat.environment.json diff --git a/.devops/performance-test-pipelines.yml b/.devops/performance-test-pipelines.yml index 5518198..6707c15 100644 --- a/.devops/performance-test-pipelines.yml +++ b/.devops/performance-test-pipelines.yml @@ -45,19 +45,21 @@ steps: npm install docker build -f ./DockerfileLoad -t exec-node . docker run --rm --name loadPostgres \ + -e ENV=${ENV} -e PG_GPD_PASSWORD=${PG_GPD_PASSWORD} \ -e NUMBER_OF_EVENTS=${NUMBER_OF_EVENTS} \ exec-node displayName: Run insert events on postgres env: + ENV: ${{ parameters.ENV }} PG_GPD_PASSWORD: ${{ variables.PG_GPD_PASSWORD }} - NUMBER_OF_EVENTS: ${{ variables.NUMBER_OF_EVENTS }} + NUMBER_OF_EVENTS: ${{ parameters.NUMBER_OF_EVENTS }} - script: | timeout ${TIMEOUT}s displayName: Wait for timeout env: - TIMEOUT: ${{ variables.TIMEOUT }} + TIMEOUT: ${{ parameters.TIMEOUT }} - script: | cd ./performance-test/src diff --git a/performance-test/src/.env.dev b/performance-test/src/.env.dev new file mode 100644 index 0000000..50aeeec --- /dev/null +++ b/performance-test/src/.env.dev @@ -0,0 +1,3 @@ +PG_GPD_USERNAME=apduser +PG_GPD_SERVER_NAME=pagopa-d-weu-gpd-pgflex.postgres.database.azure.com +PG_GPD_DATABASE_NAME=apd \ No newline at end of file diff --git a/performance-test/src/.env.uat b/performance-test/src/.env.uat new file mode 100644 index 0000000..e971a87 --- /dev/null +++ b/performance-test/src/.env.uat @@ -0,0 +1,3 @@ +PG_GPD_USERNAME=apduser +PG_GPD_SERVER_NAME=pagopa-u-weu-gpd-pgflex.postgres.database.azure.com +PG_GPD_DATABASE_NAME=apd \ No newline at end of file diff --git a/performance-test/src/DockerfileLoad b/performance-test/src/DockerfileLoad index 06bc8d5..a3769e1 100644 --- a/performance-test/src/DockerfileLoad +++ b/performance-test/src/DockerfileLoad @@ -7,5 +7,5 @@ COPY ./ ./ RUN yarn install -ENTRYPOINT ["yarn", "start-load-events"] +ENTRYPOINT yarn start-load-events-${ENV:-dev} diff --git a/performance-test/src/dev.environment.json b/performance-test/src/dev.environment.json deleted file mode 100644 index a3b5694..0000000 --- a/performance-test/src/dev.environment.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "environment": [ - { - "env": "dev", - "PG_GPD_USERNAME": "apduser", - "PG_GPD_SERVER_NAME": "pagopa-d-weu-gpd-pgflex.postgres.database.azure.com", - "PG_GPD_DATABASE_NAME": "apd" - } - ] -} \ No newline at end of file diff --git a/performance-test/src/gpd_ingestion_test.js b/performance-test/src/gpd_ingestion_test.js index 2f19262..5db6108 100644 --- a/performance-test/src/gpd_ingestion_test.js +++ b/performance-test/src/gpd_ingestion_test.js @@ -31,6 +31,7 @@ function insertEvents() { // DELETE paymentPositions deletePaymentPositions(); + console.log("Deleted payment positions"); } insertEvents(); \ No newline at end of file diff --git a/performance-test/src/local.environment.json b/performance-test/src/local.environment.json deleted file mode 100644 index a8375b6..0000000 --- a/performance-test/src/local.environment.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "environment": [ - { - "env": "local", - "PG_GPD_USERNAME": "apduser", - "PG_GPD_SERVER_NAME": "pagopa-d-weu-gpd-pgflex.postgres.database.azure.com", - "PG_GPD_DATABASE_NAME": "apd" - } - ] -} diff --git a/performance-test/src/package.json b/performance-test/src/package.json index 8706458..dc1564e 100644 --- a/performance-test/src/package.json +++ b/performance-test/src/package.json @@ -3,7 +3,8 @@ "version": "0.0.1", "dependencies": {}, "scripts": { - "start-load-events": "export $(cat .env | xargs) && node ./gpd_ingestion_test.js", + "start-load-events-dev": "export $(cat .env.dev | xargs) && node ./gpd_ingestion_test.js", + "start-load-events-uat": "export $(cat .env.uat | xargs) && node ./gpd_ingestion_test.js", "start-performance-review": "export $(cat .env | xargs) && node ./scripts/review_script.js" }, "devDependencies": { diff --git a/performance-test/src/uat.environment.json b/performance-test/src/uat.environment.json deleted file mode 100644 index b937371..0000000 --- a/performance-test/src/uat.environment.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "environment": [ - { - "env": "uat", - "PG_GPD_USERNAME": "apduser", - "PG_GPD_SERVER_NAME": "pagopa-u-weu-gpd-pgflex.postgres.database.azure.com", - "PG_GPD_DATABASE_NAME": "apd" - } - ] -} From d2b680ad687f18a399dd0db408f153f8d04850f1 Mon Sep 17 00:00:00 2001 From: svariant Date: Thu, 21 Nov 2024 11:47:10 +0100 Subject: [PATCH 05/14] [PPANTT-170] feat: Clean code --- .devops/performance-test-pipelines.yml | 12 +--- .github/workflows/code_review.yml | 77 -------------------------- performance-test/src/DockerfileLoad | 11 ---- performance-test/src/DockerfileReview | 11 ---- 4 files changed, 3 insertions(+), 108 deletions(-) delete mode 100644 performance-test/src/DockerfileLoad delete mode 100644 performance-test/src/DockerfileReview diff --git a/.devops/performance-test-pipelines.yml b/.devops/performance-test-pipelines.yml index 6707c15..f92701d 100644 --- a/.devops/performance-test-pipelines.yml +++ b/.devops/performance-test-pipelines.yml @@ -43,12 +43,8 @@ steps: - script: | cd ./performance-test/src npm install - docker build -f ./DockerfileLoad -t exec-node . - docker run --rm --name loadPostgres \ - -e ENV=${ENV} - -e PG_GPD_PASSWORD=${PG_GPD_PASSWORD} \ - -e NUMBER_OF_EVENTS=${NUMBER_OF_EVENTS} \ - exec-node + export PG_GPD_PASSWORD=${PG_GPD_PASSWORD} NUMBER_OF_EVENTS=${NUMBER_OF_EVENTS} && \ + npm run start-load-events-${ENV} displayName: Run insert events on postgres env: ENV: ${{ parameters.ENV }} @@ -64,7 +60,5 @@ steps: - script: | cd ./performance-test/src npm install - docker build -f ./DockerfileReview -t exec-node . - docker run --rm --name ingestionReview \ - exec-node + npm run start-performance-review displayName: Run timestamp review diff --git a/.github/workflows/code_review.yml b/.github/workflows/code_review.yml index de72a93..77c0c21 100644 --- a/.github/workflows/code_review.yml +++ b/.github/workflows/code_review.yml @@ -44,80 +44,3 @@ jobs: maven_version: 3.9.3 coverage_exclusions: "**/config/*,**/*Mock*,**/model/**,**/entity/**,**/producer/**,**/enumeration/**,**/exception/**" cpd_exclusions: "**/model/**,**/entity/*" - -# smoke-test: -# name: Smoke Test -# runs-on: ubuntu-latest -# environment: -# name: dev -# steps: -# - name: Checkout -# id: checkout -# uses: actions/checkout@1f9a0c22da41e6ebfa534300ef656657ea2c6707 -# -# - name: Login -# id: login -# # from https://github.com/Azure/login/commits/master -# uses: azure/login@92a5484dfaf04ca78a94597f4f19fea633851fa2 -# with: -# client-id: ${{ secrets.CLIENT_ID }} -# tenant-id: ${{ secrets.TENANT_ID }} -# subscription-id: ${{ secrets.SUBSCRIPTION_ID }} -# -# - name: Run Service on Docker -# shell: bash -# id: run_service_docker -# run: | -# cd ./docker -# chmod +x ./run_docker.sh -# ./run_docker.sh local -# -# - name: Run Integration Tests -# shell: bash -# id: run_integration_test -# run: | -# export CUCUMBER_PUBLISH_TOKEN=${{ secrets.CUCUMBER_PUBLISH_TOKEN }} -# export RECEIPTS_COSMOS_CONN_STRING='${{ secrets.RECEIPTS_COSMOS_CONN_STRING }}' -# export BIZEVENTS_COSMOS_CONN_STRING='${{ secrets.BIZEVENTS_COSMOS_CONN_STRING }}' -# cd ./integration-test -# chmod +x ./run_integration_test.sh -# ./run_integration_test.sh local - -# delete_github_deployments: -# runs-on: ubuntu-latest -# needs: smoke-test -# if: ${{ always() }} -# steps: -# - name: Dump GitHub context -# env: -# GITHUB_CONTEXT: ${{ toJSON(github) }} -# run: echo "$GITHUB_CONTEXT" -# -# - name: Delete Previous deployments -# uses: actions/github-script@v6 -# env: -# SHA_HEAD: ${{ (github.event_name == 'pull_request' && github.event.pull_request.head.sha) || github.sha}} -# with: -# script: | -# const { SHA_HEAD } = process.env -# -# const deployments = await github.rest.repos.listDeployments({ -# owner: context.repo.owner, -# repo: context.repo.repo, -# sha: SHA_HEAD -# }); -# await Promise.all( -# deployments.data.map(async (deployment) => { -# await github.rest.repos.createDeploymentStatus({ -# owner: context.repo.owner, -# repo: context.repo.repo, -# deployment_id: deployment.id, -# state: 'inactive' -# }); -# return github.rest.repos.deleteDeployment({ -# owner: context.repo.owner, -# repo: context.repo.repo, -# deployment_id: deployment.id -# }); -# }) -# ); diff --git a/performance-test/src/DockerfileLoad b/performance-test/src/DockerfileLoad deleted file mode 100644 index a3769e1..0000000 --- a/performance-test/src/DockerfileLoad +++ /dev/null @@ -1,11 +0,0 @@ -# specify the node base image with your desired version node: -FROM node:16 - -WORKDIR /src/node-app - -COPY ./ ./ - -RUN yarn install - -ENTRYPOINT yarn start-load-events-${ENV:-dev} - diff --git a/performance-test/src/DockerfileReview b/performance-test/src/DockerfileReview deleted file mode 100644 index 598cccd..0000000 --- a/performance-test/src/DockerfileReview +++ /dev/null @@ -1,11 +0,0 @@ -# specify the node base image with your desired version node: -FROM node:16 - -WORKDIR /src/node-app - -COPY ./ ./ - -RUN yarn install - -ENTRYPOINT ["yarn", "start-performance-review"] - From 930688a2633aaf33df435b4ccebac547678c1f1a Mon Sep 17 00:00:00 2001 From: svariant Date: Thu, 21 Nov 2024 11:49:32 +0100 Subject: [PATCH 06/14] [PPANTT-170] fix: Perf test insert events script + improved review script --- performance-test/src/gpd_ingestion_test.js | 28 +++++++----- performance-test/src/modules/pg_gpd_client.js | 8 ++-- performance-test/src/package.json | 2 +- performance-test/src/scripts/review_script.js | 45 ++++++++++++------- 4 files changed, 52 insertions(+), 31 deletions(-) diff --git a/performance-test/src/gpd_ingestion_test.js b/performance-test/src/gpd_ingestion_test.js index 5db6108..ecd8fce 100644 --- a/performance-test/src/gpd_ingestion_test.js +++ b/performance-test/src/gpd_ingestion_test.js @@ -2,23 +2,24 @@ const { insertPaymentPositionWithValidFiscalCode, insertPaymentPositionWithInvalidFiscalCode, deletePaymentPositions } = require("./modules/pg_gpd_client.js"); const { REDIS_ARRAY_IDS_TOKENIZED, REDIS_ARRAY_IDS_NOT_TOKENIZED } = require("./modules/common.js"); -const { setValueRedis } = require("./modules/redis_client.js"); +const { setValueRedis, shutDownClient } = require("./modules/redis_client.js"); const NUMBER_OF_EVENTS = JSON.parse(process.env.NUMBER_OF_EVENTS); -function insertEvents() { +async function insertEvents() { const arrayIdTokenized = []; const arrayIdNotTokenized = []; console.log("Selected number of events: ", NUMBER_OF_EVENTS); // SAVE ON DB paymentPositions for (let i = 0; i < NUMBER_OF_EVENTS; i++) { - const idValidFiscalCode = "PERFORMANCE_GPD_INGESTION_VALID_FISCAL_CODE_" + new Date().getTime(); - insertPaymentPositionWithValidFiscalCode(idValidFiscalCode); + const timestamp = Number(new Date().getTime()); + const idValidFiscalCode = timestamp + i; + await insertPaymentPositionWithValidFiscalCode(idValidFiscalCode); arrayIdTokenized.push(idValidFiscalCode); - const idInvalidFiscalCode = "PERFORMANCE_GPD_INGESTION_INVALID_FISCAL_CODE_" + new Date().getTime(); - insertPaymentPositionWithInvalidFiscalCode(idInvalidFiscalCode); + const idInvalidFiscalCode = timestamp + i + (NUMBER_OF_EVENTS * 2); + await insertPaymentPositionWithInvalidFiscalCode(idInvalidFiscalCode); arrayIdNotTokenized.push(idInvalidFiscalCode); } console.log("Inserted in database paymentOptions with valid fiscal code with ids: ", JSON.stringify(arrayIdTokenized)); @@ -26,12 +27,19 @@ function insertEvents() { // SAVE ID ARRAYS ON REDIS - setValueRedis(REDIS_ARRAY_IDS_TOKENIZED, arrayIdTokenized); - setValueRedis(REDIS_ARRAY_IDS_NOT_TOKENIZED, arrayIdNotTokenized); + await setValueRedis({ key: REDIS_ARRAY_IDS_TOKENIZED, value: JSON.stringify(arrayIdTokenized) }); + await setValueRedis({ key: REDIS_ARRAY_IDS_NOT_TOKENIZED, value: JSON.stringify(arrayIdNotTokenized) }); // DELETE paymentPositions - deletePaymentPositions(); + await deletePaymentPositions(); console.log("Deleted payment positions"); + + await shutDownClient(); + + return null; } -insertEvents(); \ No newline at end of file +insertEvents().then(() => { + console.log("Insert script ended"); + process.exit(); +}); \ No newline at end of file diff --git a/performance-test/src/modules/pg_gpd_client.js b/performance-test/src/modules/pg_gpd_client.js index 3090151..cc9022e 100644 --- a/performance-test/src/modules/pg_gpd_client.js +++ b/performance-test/src/modules/pg_gpd_client.js @@ -8,7 +8,9 @@ const databaseName = process.env.PG_GPD_DATABASE_NAME; //SECRETS const password = process.env.PG_GPD_PASSWORD; -// TODO LOAD ENVS FROM ENV FILE +// TODO PG USER AS A SECRET + +// TODO DEVOPS const pool = new Pool({ user: username, @@ -36,11 +38,11 @@ async function shutDownPool() { } async function insertPaymentPositionWithValidFiscalCode(id) { - await connection.query(`INSERT INTO apd.apd.payment_position (id, city, civic_number, company_name, country, email, fiscal_code, full_name, inserted_date, iupd, last_updated_date, max_due_date, min_due_date, office_name, organization_fiscal_code, phone, postal_code, province, publish_date, region, status, street_name, "type", validity_date, "version", switch_to_expired, payment_date, pull, pay_stand_in, service_type) VALUES('${id}', 'Pizzo Calabro', '11', 'SkyLab Inc. - Edit', 'IT', 'micheleventimiglia@skilabmail.com', 'VNTMHL76M09H501D', 'Michele Ventimiglia', '2024-11-12 16:09:43.477', '${ENTITY_IDENTIFIER}', '2024-11-12 16:09:43.479', '2024-12-12 16:09:43.323', '2024-12-12 16:09:43.323', 'SkyLab - Sede via Washington - Edit', '${VALID_CF}', '333-123456789', '89812', 'VV', '2024-11-12 16:09:43.479', 'CA', 'VALID', 'via Washington', 'F', '2024-11-12 16:09:43.479', 0, false, NULL, true, false, 'GPD');`); + await connection.query(`INSERT INTO apd.apd.payment_position (id, city, civic_number, company_name, country, email, fiscal_code, full_name, inserted_date, iupd, last_updated_date, max_due_date, min_due_date, office_name, organization_fiscal_code, phone, postal_code, province, publish_date, region, status, street_name, "type", validity_date, "version", switch_to_expired, payment_date, pull, pay_stand_in, service_type) VALUES('${id}', 'Pizzo Calabro', '11', 'SkyLab Inc. - Edit', 'IT', 'micheleventimiglia@skilabmail.com', '${VALID_CF}', 'Michele Ventimiglia', '2024-11-12 16:09:43.477', '${ENTITY_IDENTIFIER}', '2024-11-12 16:09:43.479', '2024-12-12 16:09:43.323', '2024-12-12 16:09:43.323', 'SkyLab - Sede via Washington - Edit', 'ORG_FISCAL_CODE_${id}', '333-123456789', '89812', 'VV', '2024-11-12 16:09:43.479', 'CA', 'VALID', 'via Washington', 'F', '2024-11-12 16:09:43.479', 0, false, NULL, true, false, 'GPD');`); } async function insertPaymentPositionWithInvalidFiscalCode(id) { - await connection.query(`INSERT INTO apd.apd.payment_position (id, city, civic_number, company_name, country, email, fiscal_code, full_name, inserted_date, iupd, last_updated_date, max_due_date, min_due_date, office_name, organization_fiscal_code, phone, postal_code, province, publish_date, region, status, street_name, "type", validity_date, "version", switch_to_expired, payment_date, pull, pay_stand_in, service_type) VALUES('${id}', 'Pizzo Calabro', '11', 'SkyLab Inc. - Edit', 'IT', 'micheleventimiglia@skilabmail.com', 'VNTMHL76M09H501D', 'Michele Ventimiglia', '2024-11-12 16:09:43.477', '${ENTITY_IDENTIFIER}', '2024-11-12 16:09:43.479', '2024-12-12 16:09:43.323', '2024-12-12 16:09:43.323', 'SkyLab - Sede via Washington - Edit', '${INVALID_CF}', '333-123456789', '89812', 'VV', '2024-11-12 16:09:43.479', 'CA', 'VALID', 'via Washington', 'F', '2024-11-12 16:09:43.479', 0, false, NULL, true, false, 'GPD');`); + await connection.query(`INSERT INTO apd.apd.payment_position (id, city, civic_number, company_name, country, email, fiscal_code, full_name, inserted_date, iupd, last_updated_date, max_due_date, min_due_date, office_name, organization_fiscal_code, phone, postal_code, province, publish_date, region, status, street_name, "type", validity_date, "version", switch_to_expired, payment_date, pull, pay_stand_in, service_type) VALUES('${id}', 'Pizzo Calabro', '11', 'SkyLab Inc. - Edit', 'IT', 'micheleventimiglia@skilabmail.com', '${INVALID_CF}', 'Michele Ventimiglia', '2024-11-12 16:09:43.477', '${ENTITY_IDENTIFIER}', '2024-11-12 16:09:43.479', '2024-12-12 16:09:43.323', '2024-12-12 16:09:43.323', 'SkyLab - Sede via Washington - Edit', 'ORG_FISCAL_CODE_${id}', '333-123456789', '89812', 'VV', '2024-11-12 16:09:43.479', 'CA', 'VALID', 'via Washington', 'F', '2024-11-12 16:09:43.479', 0, false, NULL, true, false, 'GPD');`); } async function deletePaymentPositions() { diff --git a/performance-test/src/package.json b/performance-test/src/package.json index dc1564e..6460415 100644 --- a/performance-test/src/package.json +++ b/performance-test/src/package.json @@ -5,7 +5,7 @@ "scripts": { "start-load-events-dev": "export $(cat .env.dev | xargs) && node ./gpd_ingestion_test.js", "start-load-events-uat": "export $(cat .env.uat | xargs) && node ./gpd_ingestion_test.js", - "start-performance-review": "export $(cat .env | xargs) && node ./scripts/review_script.js" + "start-performance-review": "node ./scripts/review_script.js" }, "devDependencies": { "pg": "^8.13.1", diff --git a/performance-test/src/scripts/review_script.js b/performance-test/src/scripts/review_script.js index 7e0d7c2..1b6e8af 100644 --- a/performance-test/src/scripts/review_script.js +++ b/performance-test/src/scripts/review_script.js @@ -6,7 +6,7 @@ const REDIS_ING_SUFFIX = "-ing-c"; // Performance Debezium connector // 1. Retrieve messages from topic "raw" -// 2. Calculate difference between timestamps -> obj.source.tsMs (time of insert on db) : obj.tsMs (time of insert on eventhub) +// 2. Calculate difference between timestamps -> obj.ts_ms (time of insert on eventhub) : obj.source.ts_ms (time of insert on db) // Performance gpd-ingestion-manager // 1. Retrieve messages from topic "ingested" // 2. Calculate difference between raw and ingested timestamps -> rawMsg.timestamp (timestamp of the message from topic raw) : ingestedMsg.timestamp (timestamp of the message from topic raw) @@ -30,30 +30,33 @@ const reviewIngestionTimeToProcess = async () => { let failedIngested = 0; // RETRIEVE ARRAYS OF IDS - const tokenizedIds = readFromRedisWithKey(REDIS_ARRAY_IDS_TOKENIZED); - const notTokenizedIds = readFromRedisWithKey(REDIS_ARRAY_IDS_NOT_TOKENIZED); + const tokenizedIds = await readFromRedisWithKey(REDIS_ARRAY_IDS_TOKENIZED); + const arrTokenizedParsed = JSON.parse(tokenizedIds); + const notTokenizedIds = await readFromRedisWithKey(REDIS_ARRAY_IDS_NOT_TOKENIZED); + const arrNotTokenizedParsed = JSON.parse(notTokenizedIds); - for (const id of tokenizedIds) { + for (const id of arrTokenizedParsed) { // RETRIEVE RAW MESSAGE FROM REDIS - const rawMsg = readFromRedisWithKey(id + REDIS_RAW_SUFFIX); + console.log("Retrieving from Redis message with id: " + id); + const rawMsg = await readFromRedisWithKey(id + REDIS_RAW_SUFFIX); if (rawMsg) { const rawMsgValue = JSON.parse(rawMsg).value; - console.log("Processing raw message with id: " + rawMsgValue.after.id); + console.log("Processing raw message with id: " + id); // CALCULATE TIME TO CAPTURE - let timePsgToRaw = rawMsgValue.source.tsMs - rawMsgValue.tsMs; + let timePsgToRaw = rawMsgValue.ts_ms - rawMsgValue.source.ts_ms; arrayTimePsgToRaw.push(timePsgToRaw); totalTimePsgToRaw += timePsgToRaw; minTimePsgToRaw = minTimePsgToRaw === null || timePsgToRaw < minTimePsgToRaw ? timePsgToRaw : minTimePsgToRaw; maxTimePsgToRaw = maxTimePsgToRaw === null || timePsgToRaw > maxTimePsgToRaw ? timePsgToRaw : maxTimePsgToRaw; // RETRIEVE TOKENIZED MESSAGE FROM REDIS WITH RAW OBJ ID - const tokenizedMsg = readFromRedisWithKey(id + REDIS_ING_SUFFIX); + const tokenizedMsg = await readFromRedisWithKey(id + REDIS_ING_SUFFIX); if (tokenizedMsg) { const tokenizedMsgValue = JSON.parse(tokenizedMsg).value; - console.log("Processing tokenized message with id: " + tokenizedMsgValue.after.id); + console.log("Processing tokenized message with id: " + id); // CALCULATE TIME TO TOKENIZE let timeRawToTokenize = rawMsg.timestamp - tokenizedMsgValue.timestamp; @@ -72,26 +75,27 @@ const reviewIngestionTimeToProcess = async () => { } - for (const id of notTokenizedIds) { + for (const id of arrNotTokenizedParsed) { // RETRIEVE RAW MESSAGE FROM REDIS - const rawMsg = readFromRedisWithKey(id + REDIS_RAW_SUFFIX); + console.log("Retrieving from Redis message with id: " + id); + const rawMsg = await readFromRedisWithKey(id + REDIS_RAW_SUFFIX); if (rawMsg) { - const rawMsgValue = JSON.parse(rawMsg.value.toString()); + const rawMsgValue = JSON.parse(rawMsg).value; console.log("Processing raw message with id: " + id); // CALCULATE TIME TO CAPTURE - let timePsgToRaw = rawMsgValue.source.tsMs - rawMsgValue.tsMs; + let timePsgToRaw = rawMsgValue.ts_ms - rawMsgValue.source.ts_ms; arrayTimePsgToRaw.push(timePsgToRaw); totalTimePsgToRaw += timePsgToRaw; minTimePsgToRaw = minTimePsgToRaw === null || timePsgToRaw < minTimePsgToRaw ? timePsgToRaw : minTimePsgToRaw; maxTimePsgToRaw = maxTimePsgToRaw === null || timePsgToRaw > maxTimePsgToRaw ? timePsgToRaw : maxTimePsgToRaw; // RETRIEVE INGESTED MESSAGE FROM REDIS WITH RAW OBJ ID - const ingestedMsg = readFromRedisWithKey(id + REDIS_ING_SUFFIX); + const ingestedMsg = await readFromRedisWithKey(id + REDIS_ING_SUFFIX); if (ingestedMsg) { - const ingestedMsgValue = JSON.parse(ingestedMsg.value.toString()); + const ingestedMsgValue = JSON.parse(ingestedMsg).value; console.log("Processing ingested message with id: " + id); // CALCULATE TIME TO INGEST WITHOUT TOKENIZER @@ -114,6 +118,8 @@ const reviewIngestionTimeToProcess = async () => { console.log("/----------- METRICS -----------/"); console.log("/////////////////////////////////"); console.log("--------------------------------"); + console.log(`total messages....................: ${arrTokenizedParsed.length + arrNotTokenizedParsed.length}`); + console.log("--------------------------------"); console.log(`mean time to capture..............: ${totalTimePsgToRaw ? getTimeString(Math.round(totalTimePsgToRaw / arrayTimePsgToRaw.length)) : "-"}`); console.log(`mean time to tokenize.............: ${totalTimeRawToTokenize ? getTimeString(Math.round(totalTimeRawToTokenize / arrayTimeRawToTokenize.length)) : "-"}`); console.log(`mean time to ingest...............: ${totalTimeRawToIngest ? getTimeString(Math.round(totalTimeRawToIngest / arrayTimeRawToIngest.length)) : "-"}`); @@ -133,11 +139,16 @@ const reviewIngestionTimeToProcess = async () => { console.log("/------------- END -------------/"); console.log("/////////////////////////////////"); - shutDownClient(); + await shutDownClient(); + + return null; } function getTimeString(time) { return `${time}ms | ${time / 1000}s`; } -reviewIngestionTimeToProcess(); \ No newline at end of file +reviewIngestionTimeToProcess().then(() => { + console.log("Review script ended"); + process.exit(); +});; \ No newline at end of file From ed00c536e21ad695f965bb7ebaa9033ace958d1b Mon Sep 17 00:00:00 2001 From: svariant Date: Thu, 21 Nov 2024 11:51:26 +0100 Subject: [PATCH 07/14] [PPANTT-170] chore: clean code --- performance-test/src/scripts/review_script.js | 1 - 1 file changed, 1 deletion(-) diff --git a/performance-test/src/scripts/review_script.js b/performance-test/src/scripts/review_script.js index 1b6e8af..c12382c 100644 --- a/performance-test/src/scripts/review_script.js +++ b/performance-test/src/scripts/review_script.js @@ -149,6 +149,5 @@ function getTimeString(time) { } reviewIngestionTimeToProcess().then(() => { - console.log("Review script ended"); process.exit(); });; \ No newline at end of file From 04b216e092f3f59ea51fe5a81a6d0d9f725e96f5 Mon Sep 17 00:00:00 2001 From: svariant Date: Thu, 21 Nov 2024 13:15:14 +0100 Subject: [PATCH 08/14] [PPANTT-170] fix: Perf test insert db fixed ids --- performance-test/src/gpd_ingestion_test.js | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/performance-test/src/gpd_ingestion_test.js b/performance-test/src/gpd_ingestion_test.js index ecd8fce..fdf4c68 100644 --- a/performance-test/src/gpd_ingestion_test.js +++ b/performance-test/src/gpd_ingestion_test.js @@ -13,17 +13,17 @@ async function insertEvents() { console.log("Selected number of events: ", NUMBER_OF_EVENTS); // SAVE ON DB paymentPositions for (let i = 0; i < NUMBER_OF_EVENTS; i++) { - const timestamp = Number(new Date().getTime()); - const idValidFiscalCode = timestamp + i; + const uniqueId = 120798 + i; + const idValidFiscalCode = uniqueId; await insertPaymentPositionWithValidFiscalCode(idValidFiscalCode); arrayIdTokenized.push(idValidFiscalCode); - const idInvalidFiscalCode = timestamp + i + (NUMBER_OF_EVENTS * 2); + const idInvalidFiscalCode = uniqueId + (NUMBER_OF_EVENTS * 2); await insertPaymentPositionWithInvalidFiscalCode(idInvalidFiscalCode); arrayIdNotTokenized.push(idInvalidFiscalCode); } - console.log("Inserted in database paymentOptions with valid fiscal code with ids: ", JSON.stringify(arrayIdTokenized)); - console.log("Inserted in database paymentOptions with invalid fiscal code with ids: ", JSON.stringify(arrayIdNotTokenized)); + console.log("Inserted in database paymentPositions with valid fiscal code with ids: ", JSON.stringify(arrayIdTokenized)); + console.log("Inserted in database paymentPositions with invalid fiscal code with ids: ", JSON.stringify(arrayIdNotTokenized)); // SAVE ID ARRAYS ON REDIS From 59eb34af887a1247e57788fd5c116d3ddaebcc63 Mon Sep 17 00:00:00 2001 From: svariant Date: Thu, 21 Nov 2024 15:22:18 +0100 Subject: [PATCH 09/14] [PPANTT-170] feat: Completed performance tests --- performance-test/src/scripts/review_script.js | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/performance-test/src/scripts/review_script.js b/performance-test/src/scripts/review_script.js index c12382c..3801b8d 100644 --- a/performance-test/src/scripts/review_script.js +++ b/performance-test/src/scripts/review_script.js @@ -38,10 +38,10 @@ const reviewIngestionTimeToProcess = async () => { for (const id of arrTokenizedParsed) { // RETRIEVE RAW MESSAGE FROM REDIS console.log("Retrieving from Redis message with id: " + id); - const rawMsg = await readFromRedisWithKey(id + REDIS_RAW_SUFFIX); - + const retrievedMsg = await readFromRedisWithKey(id + REDIS_RAW_SUFFIX); + const rawMsg = JSON.parse(retrievedMsg); if (rawMsg) { - const rawMsgValue = JSON.parse(rawMsg).value; + const rawMsgValue = rawMsg.value; console.log("Processing raw message with id: " + id); // CALCULATE TIME TO CAPTURE @@ -55,11 +55,11 @@ const reviewIngestionTimeToProcess = async () => { const tokenizedMsg = await readFromRedisWithKey(id + REDIS_ING_SUFFIX); if (tokenizedMsg) { - const tokenizedMsgValue = JSON.parse(tokenizedMsg).value; + const tokenizedMsgValue = JSON.parse(tokenizedMsg); console.log("Processing tokenized message with id: " + id); // CALCULATE TIME TO TOKENIZE - let timeRawToTokenize = rawMsg.timestamp - tokenizedMsgValue.timestamp; + let timeRawToTokenize = Number(tokenizedMsgValue.timestamp) - Number(rawMsg.timestamp); arrayTimeRawToTokenize.push(timeRawToTokenize); totalTimeRawToTokenize += timeRawToTokenize; minTimeRawToTokenize = minTimeRawToTokenize === null || timeRawToTokenize < minTimeRawToTokenize ? timeRawToTokenize : minTimeRawToTokenize; @@ -78,10 +78,10 @@ const reviewIngestionTimeToProcess = async () => { for (const id of arrNotTokenizedParsed) { // RETRIEVE RAW MESSAGE FROM REDIS console.log("Retrieving from Redis message with id: " + id); - const rawMsg = await readFromRedisWithKey(id + REDIS_RAW_SUFFIX); - + const retrievedMsg = await readFromRedisWithKey(id + REDIS_RAW_SUFFIX); + const rawMsg = JSON.parse(retrievedMsg); if (rawMsg) { - const rawMsgValue = JSON.parse(rawMsg).value; + const rawMsgValue = rawMsg.value; console.log("Processing raw message with id: " + id); // CALCULATE TIME TO CAPTURE @@ -95,11 +95,11 @@ const reviewIngestionTimeToProcess = async () => { const ingestedMsg = await readFromRedisWithKey(id + REDIS_ING_SUFFIX); if (ingestedMsg) { - const ingestedMsgValue = JSON.parse(ingestedMsg).value; + const ingestedMsgValue = JSON.parse(ingestedMsg); console.log("Processing ingested message with id: " + id); // CALCULATE TIME TO INGEST WITHOUT TOKENIZER - let timeRawToIngest = rawMsg.timestamp - ingestedMsgValue.timestamp; + let timeRawToIngest = Number(ingestedMsgValue.timestamp) - Number(rawMsg.timestamp); arrayTimeRawToIngest.push(timeRawToIngest); totalTimeRawToIngest += timeRawToIngest; minTimeRawToIngest = minTimeRawToIngest === null || timeRawToIngest < minTimeRawToIngest ? timeRawToIngest : minTimeRawToIngest; From eeb17f0845e5fe523c143d9f02c2b46a3bc3d652 Mon Sep 17 00:00:00 2001 From: svariant Date: Thu, 21 Nov 2024 15:47:20 +0100 Subject: [PATCH 10/14] [PPANTT-170] feat: improvements --- performance-test/src/gpd_ingestion_test.js | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/performance-test/src/gpd_ingestion_test.js b/performance-test/src/gpd_ingestion_test.js index fdf4c68..b188889 100644 --- a/performance-test/src/gpd_ingestion_test.js +++ b/performance-test/src/gpd_ingestion_test.js @@ -7,12 +7,15 @@ const { setValueRedis, shutDownClient } = require("./modules/redis_client.js"); const NUMBER_OF_EVENTS = JSON.parse(process.env.NUMBER_OF_EVENTS); async function insertEvents() { + // Clean up paymentPositions + await deletePaymentPositions(); + const arrayIdTokenized = []; const arrayIdNotTokenized = []; console.log("Selected number of events: ", NUMBER_OF_EVENTS); // SAVE ON DB paymentPositions - for (let i = 0; i < NUMBER_OF_EVENTS; i++) { + for (let i = 0; i < (Math.floor(NUMBER_OF_EVENTS / 2)); i++) { const uniqueId = 120798 + i; const idValidFiscalCode = uniqueId; await insertPaymentPositionWithValidFiscalCode(idValidFiscalCode); @@ -22,8 +25,8 @@ async function insertEvents() { await insertPaymentPositionWithInvalidFiscalCode(idInvalidFiscalCode); arrayIdNotTokenized.push(idInvalidFiscalCode); } - console.log("Inserted in database paymentPositions with valid fiscal code with ids: ", JSON.stringify(arrayIdTokenized)); - console.log("Inserted in database paymentPositions with invalid fiscal code with ids: ", JSON.stringify(arrayIdNotTokenized)); + console.log(`Inserted ${arrayIdTokenized.length} elements in database paymentPositions with valid fiscal code with ids: `, JSON.stringify(arrayIdTokenized)); + console.log(`Inserted ${arrayIdNotTokenized.length} elements in database paymentPositions with invalid fiscal code with ids: `, JSON.stringify(arrayIdNotTokenized)); // SAVE ID ARRAYS ON REDIS From 395427d0f6e0fb3005e238578f48e76be78f7168 Mon Sep 17 00:00:00 2001 From: svariant Date: Thu, 21 Nov 2024 16:38:25 +0100 Subject: [PATCH 11/14] [PPANTT-170] feat: Perf test db user as a secret --- .devops/performance-test-pipelines.yml | 6 ++++-- performance-test/src/.env.dev | 1 - performance-test/src/.env.uat | 1 - 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.devops/performance-test-pipelines.yml b/.devops/performance-test-pipelines.yml index f92701d..46cb796 100644 --- a/.devops/performance-test-pipelines.yml +++ b/.devops/performance-test-pipelines.yml @@ -21,13 +21,14 @@ variables: ${{ if eq(parameters['ENVIRONMENT'], 'dev') }}: poolImage: "pagopa-dev-loadtest-linux" PG_GPD_PASSWORD: "$(DEV_PG_GPD_PASSWORD)" + PG_GPD_USER: "$(DEV_PG_GPD_USER)" INGESTION_EVENTHUB_CONN_STRING: "${DEV_INGESTION_EVENTHUB_CONN_STRING}" ${{ if eq(parameters['ENVIRONMENT'], 'uat') }}: poolImage: "pagopa-uat-loadtest-linux" PG_GPD_PASSWORD: "$(UAT_PG_GPD_PASSWORD)" + PG_GPD_USER: "$(UAT_PG_GPD_USER)" INGESTION_EVENTHUB_CONN_STRING: "${UAT_INGESTION_EVENTHUB_CONN_STRING}" - pool: name: $(poolImage) @@ -43,11 +44,12 @@ steps: - script: | cd ./performance-test/src npm install - export PG_GPD_PASSWORD=${PG_GPD_PASSWORD} NUMBER_OF_EVENTS=${NUMBER_OF_EVENTS} && \ + export PG_GPD_USER=${PG_GPD_USER} PG_GPD_PASSWORD=${PG_GPD_PASSWORD} NUMBER_OF_EVENTS=${NUMBER_OF_EVENTS} && \ npm run start-load-events-${ENV} displayName: Run insert events on postgres env: ENV: ${{ parameters.ENV }} + PG_GPD_USER: ${{ variables.PG_GPD_USER }} PG_GPD_PASSWORD: ${{ variables.PG_GPD_PASSWORD }} NUMBER_OF_EVENTS: ${{ parameters.NUMBER_OF_EVENTS }} diff --git a/performance-test/src/.env.dev b/performance-test/src/.env.dev index 50aeeec..1997c53 100644 --- a/performance-test/src/.env.dev +++ b/performance-test/src/.env.dev @@ -1,3 +1,2 @@ -PG_GPD_USERNAME=apduser PG_GPD_SERVER_NAME=pagopa-d-weu-gpd-pgflex.postgres.database.azure.com PG_GPD_DATABASE_NAME=apd \ No newline at end of file diff --git a/performance-test/src/.env.uat b/performance-test/src/.env.uat index e971a87..0c14a8e 100644 --- a/performance-test/src/.env.uat +++ b/performance-test/src/.env.uat @@ -1,3 +1,2 @@ -PG_GPD_USERNAME=apduser PG_GPD_SERVER_NAME=pagopa-u-weu-gpd-pgflex.postgres.database.azure.com PG_GPD_DATABASE_NAME=apd \ No newline at end of file From cb8f7ac724c776a6dbd0581eae6786ecfc82f7b6 Mon Sep 17 00:00:00 2001 From: svariant Date: Thu, 21 Nov 2024 17:54:34 +0100 Subject: [PATCH 12/14] [PPANTT-170] fix: PG username perf test pipeline --- .devops/performance-test-pipelines.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.devops/performance-test-pipelines.yml b/.devops/performance-test-pipelines.yml index 46cb796..4f10214 100644 --- a/.devops/performance-test-pipelines.yml +++ b/.devops/performance-test-pipelines.yml @@ -21,12 +21,12 @@ variables: ${{ if eq(parameters['ENVIRONMENT'], 'dev') }}: poolImage: "pagopa-dev-loadtest-linux" PG_GPD_PASSWORD: "$(DEV_PG_GPD_PASSWORD)" - PG_GPD_USER: "$(DEV_PG_GPD_USER)" + PG_GPD_USERNAME: "$(DEV_PG_GPD_USERNAME)" INGESTION_EVENTHUB_CONN_STRING: "${DEV_INGESTION_EVENTHUB_CONN_STRING}" ${{ if eq(parameters['ENVIRONMENT'], 'uat') }}: poolImage: "pagopa-uat-loadtest-linux" PG_GPD_PASSWORD: "$(UAT_PG_GPD_PASSWORD)" - PG_GPD_USER: "$(UAT_PG_GPD_USER)" + PG_GPD_USERNAME: "$(UAT_PG_GPD_USERNAME)" INGESTION_EVENTHUB_CONN_STRING: "${UAT_INGESTION_EVENTHUB_CONN_STRING}" pool: @@ -44,12 +44,12 @@ steps: - script: | cd ./performance-test/src npm install - export PG_GPD_USER=${PG_GPD_USER} PG_GPD_PASSWORD=${PG_GPD_PASSWORD} NUMBER_OF_EVENTS=${NUMBER_OF_EVENTS} && \ + export PG_GPD_USERNAME=${PG_GPD_USERNAME} PG_GPD_PASSWORD=${PG_GPD_PASSWORD} NUMBER_OF_EVENTS=${NUMBER_OF_EVENTS} && \ npm run start-load-events-${ENV} displayName: Run insert events on postgres env: ENV: ${{ parameters.ENV }} - PG_GPD_USER: ${{ variables.PG_GPD_USER }} + PG_GPD_USERNAME: ${{ variables.PG_GPD_USERNAME }} PG_GPD_PASSWORD: ${{ variables.PG_GPD_PASSWORD }} NUMBER_OF_EVENTS: ${{ parameters.NUMBER_OF_EVENTS }} From 5c59e9c9f2614a75bc2f0cd4bf01c4fdb8684bae Mon Sep 17 00:00:00 2001 From: svariant Date: Fri, 22 Nov 2024 14:28:01 +0100 Subject: [PATCH 13/14] [PPANTT-170] feat: Clean code --- performance-test/src/modules/pg_gpd_client.js | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/performance-test/src/modules/pg_gpd_client.js b/performance-test/src/modules/pg_gpd_client.js index cc9022e..0699b43 100644 --- a/performance-test/src/modules/pg_gpd_client.js +++ b/performance-test/src/modules/pg_gpd_client.js @@ -2,15 +2,11 @@ const { ENTITY_IDENTIFIER, INVALID_CF, VALID_CF } = require('./common.js'); const { Pool } = require('pg'); //COMMON -const username = process.env.PG_GPD_USERNAME; const serverName = process.env.PG_GPD_SERVER_NAME; const databaseName = process.env.PG_GPD_DATABASE_NAME; //SECRETS const password = process.env.PG_GPD_PASSWORD; - -// TODO PG USER AS A SECRET - -// TODO DEVOPS +const username = process.env.PG_GPD_USERNAME; const pool = new Pool({ user: username, From 3509bf25349e0c56a28319a1b3791e48befcab15 Mon Sep 17 00:00:00 2001 From: svariant Date: Fri, 22 Nov 2024 14:29:22 +0100 Subject: [PATCH 14/14] [PPANTT-170] feat: Improved perf test review script --- performance-test/src/scripts/review_script.js | 148 +++++++++--------- 1 file changed, 77 insertions(+), 71 deletions(-) diff --git a/performance-test/src/scripts/review_script.js b/performance-test/src/scripts/review_script.js index 3801b8d..6c65ecd 100644 --- a/performance-test/src/scripts/review_script.js +++ b/performance-test/src/scripts/review_script.js @@ -35,90 +35,96 @@ const reviewIngestionTimeToProcess = async () => { const notTokenizedIds = await readFromRedisWithKey(REDIS_ARRAY_IDS_NOT_TOKENIZED); const arrNotTokenizedParsed = JSON.parse(notTokenizedIds); - for (const id of arrTokenizedParsed) { - // RETRIEVE RAW MESSAGE FROM REDIS - console.log("Retrieving from Redis message with id: " + id); - const retrievedMsg = await readFromRedisWithKey(id + REDIS_RAW_SUFFIX); - const rawMsg = JSON.parse(retrievedMsg); - if (rawMsg) { - const rawMsgValue = rawMsg.value; - console.log("Processing raw message with id: " + id); - - // CALCULATE TIME TO CAPTURE - let timePsgToRaw = rawMsgValue.ts_ms - rawMsgValue.source.ts_ms; - arrayTimePsgToRaw.push(timePsgToRaw); - totalTimePsgToRaw += timePsgToRaw; - minTimePsgToRaw = minTimePsgToRaw === null || timePsgToRaw < minTimePsgToRaw ? timePsgToRaw : minTimePsgToRaw; - maxTimePsgToRaw = maxTimePsgToRaw === null || timePsgToRaw > maxTimePsgToRaw ? timePsgToRaw : maxTimePsgToRaw; - - // RETRIEVE TOKENIZED MESSAGE FROM REDIS WITH RAW OBJ ID - const tokenizedMsg = await readFromRedisWithKey(id + REDIS_ING_SUFFIX); - - if (tokenizedMsg) { - const tokenizedMsgValue = JSON.parse(tokenizedMsg); - console.log("Processing tokenized message with id: " + id); - - // CALCULATE TIME TO TOKENIZE - let timeRawToTokenize = Number(tokenizedMsgValue.timestamp) - Number(rawMsg.timestamp); - arrayTimeRawToTokenize.push(timeRawToTokenize); - totalTimeRawToTokenize += timeRawToTokenize; - minTimeRawToTokenize = minTimeRawToTokenize === null || timeRawToTokenize < minTimeRawToTokenize ? timeRawToTokenize : minTimeRawToTokenize; - maxTimeRawToTokenize = maxTimeRawToTokenize === null || timeRawToTokenize > maxTimeRawToTokenize ? timeRawToTokenize : maxTimeRawToTokenize; + if(arrTokenizedParsed){ + for (const id of arrTokenizedParsed) { + // RETRIEVE RAW MESSAGE FROM REDIS + console.log("Retrieving from Redis message with id: " + id); + const retrievedMsg = await readFromRedisWithKey(id + REDIS_RAW_SUFFIX); + const rawMsg = JSON.parse(retrievedMsg); + if (rawMsg) { + const rawMsgValue = rawMsg.value; + console.log("Processing raw message with id: " + id); + + // CALCULATE TIME TO CAPTURE + let timePsgToRaw = rawMsgValue.ts_ms - rawMsgValue.source.ts_ms; + arrayTimePsgToRaw.push(timePsgToRaw); + totalTimePsgToRaw += timePsgToRaw; + minTimePsgToRaw = minTimePsgToRaw === null || timePsgToRaw < minTimePsgToRaw ? timePsgToRaw : minTimePsgToRaw; + maxTimePsgToRaw = maxTimePsgToRaw === null || timePsgToRaw > maxTimePsgToRaw ? timePsgToRaw : maxTimePsgToRaw; + + // RETRIEVE TOKENIZED MESSAGE FROM REDIS WITH RAW OBJ ID + const tokenizedMsg = await readFromRedisWithKey(id + REDIS_ING_SUFFIX); + + if (tokenizedMsg) { + const tokenizedMsgValue = JSON.parse(tokenizedMsg); + console.log("Processing tokenized message with id: " + id); + + // CALCULATE TIME TO TOKENIZE + let timeRawToTokenize = Number(tokenizedMsgValue.timestamp) - Number(rawMsg.timestamp); + arrayTimeRawToTokenize.push(timeRawToTokenize); + totalTimeRawToTokenize += timeRawToTokenize; + minTimeRawToTokenize = minTimeRawToTokenize === null || timeRawToTokenize < minTimeRawToTokenize ? timeRawToTokenize : minTimeRawToTokenize; + maxTimeRawToTokenize = maxTimeRawToTokenize === null || timeRawToTokenize > maxTimeRawToTokenize ? timeRawToTokenize : maxTimeRawToTokenize; + } else { + console.log("Fail to tokenize message with id: " + id); + failedTokenized += 1; + } } else { - console.log("Fail to tokenize message with id: " + id); - failedTokenized += 1; + console.log("Fail to capture message with id: " + id); + failedRaw += 1; } - } else { - console.log("Fail to capture message with id: " + id); - failedRaw += 1; - } + } } - for (const id of arrNotTokenizedParsed) { - // RETRIEVE RAW MESSAGE FROM REDIS - console.log("Retrieving from Redis message with id: " + id); - const retrievedMsg = await readFromRedisWithKey(id + REDIS_RAW_SUFFIX); - const rawMsg = JSON.parse(retrievedMsg); - if (rawMsg) { - const rawMsgValue = rawMsg.value; - console.log("Processing raw message with id: " + id); - - // CALCULATE TIME TO CAPTURE - let timePsgToRaw = rawMsgValue.ts_ms - rawMsgValue.source.ts_ms; - arrayTimePsgToRaw.push(timePsgToRaw); - totalTimePsgToRaw += timePsgToRaw; - minTimePsgToRaw = minTimePsgToRaw === null || timePsgToRaw < minTimePsgToRaw ? timePsgToRaw : minTimePsgToRaw; - maxTimePsgToRaw = maxTimePsgToRaw === null || timePsgToRaw > maxTimePsgToRaw ? timePsgToRaw : maxTimePsgToRaw; - - // RETRIEVE INGESTED MESSAGE FROM REDIS WITH RAW OBJ ID - const ingestedMsg = await readFromRedisWithKey(id + REDIS_ING_SUFFIX); - - if (ingestedMsg) { - const ingestedMsgValue = JSON.parse(ingestedMsg); - console.log("Processing ingested message with id: " + id); - - // CALCULATE TIME TO INGEST WITHOUT TOKENIZER - let timeRawToIngest = Number(ingestedMsgValue.timestamp) - Number(rawMsg.timestamp); - arrayTimeRawToIngest.push(timeRawToIngest); - totalTimeRawToIngest += timeRawToIngest; - minTimeRawToIngest = minTimeRawToIngest === null || timeRawToIngest < minTimeRawToIngest ? timeRawToIngest : minTimeRawToIngest; - maxTimeRawToIngest = maxTimeRawToIngest === null || timeRawToIngest > maxTimeRawToIngest ? timeRawToIngest : maxTimeRawToIngest; + if(arrNotTokenizedParsed){ + for (const id of arrNotTokenizedParsed) { + // RETRIEVE RAW MESSAGE FROM REDIS + console.log("Retrieving from Redis message with id: " + id); + const retrievedMsg = await readFromRedisWithKey(id + REDIS_RAW_SUFFIX); + const rawMsg = JSON.parse(retrievedMsg); + if (rawMsg) { + const rawMsgValue = rawMsg.value; + console.log("Processing raw message with id: " + id); + + // CALCULATE TIME TO CAPTURE + let timePsgToRaw = rawMsgValue.ts_ms - rawMsgValue.source.ts_ms; + arrayTimePsgToRaw.push(timePsgToRaw); + totalTimePsgToRaw += timePsgToRaw; + minTimePsgToRaw = minTimePsgToRaw === null || timePsgToRaw < minTimePsgToRaw ? timePsgToRaw : minTimePsgToRaw; + maxTimePsgToRaw = maxTimePsgToRaw === null || timePsgToRaw > maxTimePsgToRaw ? timePsgToRaw : maxTimePsgToRaw; + + // RETRIEVE INGESTED MESSAGE FROM REDIS WITH RAW OBJ ID + const ingestedMsg = await readFromRedisWithKey(id + REDIS_ING_SUFFIX); + + if (ingestedMsg) { + const ingestedMsgValue = JSON.parse(ingestedMsg); + console.log("Processing ingested message with id: " + id); + + // CALCULATE TIME TO INGEST WITHOUT TOKENIZER + let timeRawToIngest = Number(ingestedMsgValue.timestamp) - Number(rawMsg.timestamp); + arrayTimeRawToIngest.push(timeRawToIngest); + totalTimeRawToIngest += timeRawToIngest; + minTimeRawToIngest = minTimeRawToIngest === null || timeRawToIngest < minTimeRawToIngest ? timeRawToIngest : minTimeRawToIngest; + maxTimeRawToIngest = maxTimeRawToIngest === null || timeRawToIngest > maxTimeRawToIngest ? timeRawToIngest : maxTimeRawToIngest; + } else { + console.log("Fail to ingest message with id: " + id); + failedIngested += 1; + } } else { - console.log("Fail to ingest message with id: " + id); - failedIngested += 1; + console.log("Fail to capture message with id: " + id); + failedRaw += 1; } - } else { - console.log("Fail to capture message with id: " + id); - failedRaw += 1; } } console.log("/////////////////////////////////"); console.log("/----------- METRICS -----------/"); console.log("/////////////////////////////////"); - console.log("--------------------------------"); - console.log(`total messages....................: ${arrTokenizedParsed.length + arrNotTokenizedParsed.length}`); + if(arrNotTokenizedParsed && arrNotTokenizedParsed){ + console.log("--------------------------------"); + console.log(`total messages....................: ${arrTokenizedParsed.length + arrNotTokenizedParsed.length}`); + } console.log("--------------------------------"); console.log(`mean time to capture..............: ${totalTimePsgToRaw ? getTimeString(Math.round(totalTimePsgToRaw / arrayTimePsgToRaw.length)) : "-"}`); console.log(`mean time to tokenize.............: ${totalTimeRawToTokenize ? getTimeString(Math.round(totalTimeRawToTokenize / arrayTimeRawToTokenize.length)) : "-"}`);