diff --git a/.devops/code-review-pipelines.yml b/.devops/code-review-pipelines.yml deleted file mode 100644 index b4dc602..0000000 --- a/.devops/code-review-pipelines.yml +++ /dev/null @@ -1,64 +0,0 @@ -# Maven -# Build your Java project and run tests with Apache Maven. -# Add steps that analyze code, save build artifacts, deploy, and more: -# https://docs.microsoft.com/azure/devops/pipelines/languages/java - -# Automatically triggered on PR -# https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=azure-devops&tabs=schema%2Cparameter-schema#pr-trigger -trigger: - - main -pr: - - main - -pool: - vmImage: 'ubuntu-latest' - -variables: - MAVEN_CACHE_FOLDER: $(Pipeline.Workspace)/.m2/repository - MAVEN_OPTS: '-Dmaven.repo.local=$(MAVEN_CACHE_FOLDER)' - -steps: - - task: Cache@2 - inputs: - key: 'maven | "$(Agent.OS)" | pom.xml' - restoreKeys: | - maven | "$(Agent.OS)" - maven - path: $(MAVEN_CACHE_FOLDER) - displayName: Cache Maven local repo - - - task: SonarCloudPrepare@1 - displayName: 'Prepare SonarCloud analysis configuration' - inputs: - SonarCloud: '$(SONARCLOUD_SERVICE_CONN)' - organization: '$(SONARCLOUD_ORG)' - scannerMode: Other - extraProperties: | - sonar.projectKey=$(SONARCLOUD_PROJECT_KEY) - sonar.projectName=$(SONARCLOUD_PROJECT_NAME) - sonar.coverage.exclusions=**/config/*,**/*Mock*,**/model/**,**/entity/* - sonar.cpd.exclusions=**/model/**,**/entity/* - - - - task: Maven@3 - displayName: 'Run Junit Test' - inputs: - mavenPomFile: 'pom.xml' - mavenOptions: '-Xmx3072m $(MAVEN_OPTS)' - mavenVersionOption: 'Default' - mavenAuthenticateFeed: false - javaHomeOption: 'JDKVersion' - jdkVersionOption: '1.11' - publishJUnitResults: true - testResultsFiles: '**/surefire-reports/TEST-*.xml' - goals: 'clean verify' - sonarQubeRunAnalysis: true - codeCoverageToolOption: 'JaCoCo' - effectivePomSkip: false - isJacocoCoverageReportXML: true - sqMavenPluginVersionChoice: 'latest' - - - task: SonarCloudPublish@1 - displayName: 'Publish SonarCloud results on build summary' - inputs: - pollingTimeoutSec: '300' diff --git a/.devops/deploy-pipelines-aks.yml b/.devops/deploy-pipelines-aks.yml deleted file mode 100644 index 33356a6..0000000 --- a/.devops/deploy-pipelines-aks.yml +++ /dev/null @@ -1,223 +0,0 @@ -# Only manual triggers -trigger: none -pr: none - -pool: - vmImage: 'ubuntu-22.04' - -parameters: - - name: ENV - displayName: Target Environment - type: string - default: dev - values: - - dev - - uat - - prod - - name: SEMVER - displayName: "When packing a release, define the version bump to apply. Use only buildNumber or skip for manual deployment" - type: string - values: - - major - - minor - - patch - - buildNumber - - skip - default: skip - - name: "FORCE_REPLACE_DOCKER_IMAGE" - displayName: "Force the existing docker image to be replaced" - type: boolean - default: False - values: - - False - - True - - name: TEST - displayName: Run integration/smoke tests - type: boolean - default: true - -variables: - imageRepository: '$(IMAGE_REPOSITORY_NAME)' - nameSpace: '$(K8S_NAMESPACE)' - - ${{ if eq(parameters['ENV'], 'dev') }}: - dockerRegistryServiceConnection: $(DEV_CONTAINER_REGISTRY_SERVICE_CONN) - dockerRegistryFqdn: $(DEV_CONTAINER_NAMESPACE) - kubernetesServiceConnection: '$(DEV_KUBERNETES_SERVICE_CONN)' - poolImage: 'pagopa-dev-linux' - appInsightsServiceConn: "$(TF_APPINSIGHTS_SERVICE_CONN_DEV)" - appInsightsResourceId: "$(TF_APPINSIGHTS_RESOURCE_ID_DEV)" - ${{ if eq(parameters['ENV'], 'uat') }}: - dockerRegistryServiceConnection: $(UAT_CONTAINER_REGISTRY_SERVICE_CONN) - dockerRegistryFqdn: $(UAT_CONTAINER_NAMESPACE) - kubernetesServiceConnection: '$(UAT_KUBERNETES_SERVICE_CONN)' - poolImage: 'pagopa-uat-linux' - appInsightsServiceConn: "$(TF_APPINSIGHTS_SERVICE_CONN_UAT)" - appInsightsResourceId: "$(TF_APPINSIGHTS_RESOURCE_ID_UAT)" - ${{ if eq(parameters['ENV'], 'prod') }}: - dockerRegistryServiceConnection: $(PROD_CONTAINER_REGISTRY_SERVICE_CONN) - dockerRegistryFqdn: $(PROD_CONTAINER_NAMESPACE) - kubernetesServiceConnection: '$(PROD_KUBERNETES_SERVICE_CONN)' - poolImage: 'pagopa-prod-linux' - appInsightsServiceConn: "$(TF_APPINSIGHTS_SERVICE_CONN_PROD)" - appInsightsResourceId: "$(TF_APPINSIGHTS_RESOURCE_ID_PROD)" - - ${{ if eq(variables['Build.SourceBranchName'], 'merge') }}: - sourceBranch: "main" # force to main branch - ${{ if ne(variables['Build.SourceBranchName'], 'merge') }}: - sourceBranch: ${{ variables['Build.SourceBranchName'] }} - - - -resources: - repositories: - - repository: pagopaCommons - type: github - name: pagopa/azure-pipeline-templates - ref: refs/tags/v2.10.1 - endpoint: 'io-azure-devops-github-ro' - -stages: - - # Create a release on GitHub - - stage: Release - jobs: - - job: make_release - steps: - - checkout: self - clean: true - persistCredentials: true - - - ${{ if ne(parameters.SEMVER, 'skip') }}: - - template: templates/maven-github-release/template.yaml@pagopaCommons - parameters: - release_branch: $(sourceBranch) - gitEmail: $(GIT_EMAIL) - gitUsername: $(GIT_USERNAME) - gitHubConnection: $(GITHUB_CONNECTION) - ${{ if ne(parameters.SEMVER, 'skip') }}: - semver: '${{ parameters.SEMVER }}' - ${{ if eq(parameters.SEMVER, 'skip') }}: - semver: 'buildNumber' # this case is impossible due to main condition, but it is necessary to work property - - - template: templates/maven-github-current-version/template.yaml@pagopaCommons - - # Build and Push Docker Image - - stage: Build - dependsOn: Release - variables: - current_version: $[ stageDependencies.Release.make_release.outputs['current_version.value'] ] - jobs: - - job: "build" - steps: - - checkout: self - persistCredentials: true - - - script: | - git checkout $(sourceBranch) - displayName: Checkout and update branch - - - template: templates/docker-release/template.yaml@pagopaCommons - parameters: - CONTAINER_REGISTRY_SERVICE_CONN: $(dockerRegistryServiceConnection) - CONTAINER_REGISTRY_FQDN: $(dockerRegistryFqdn) - DOCKER_IMAGE_NAME: $(imageRepository) - DOCKER_IMAGE_TAG: $(current_version) - FORCE_REPLACE_DOCKER_IMAGE: ${{ parameters.FORCE_REPLACE_DOCKER_IMAGE }} - - # # Testing the docker image - # - stage: Smoke_Test - # dependsOn: - # - Build - # condition: and(succeeded(), eq('${{ parameters.TEST }}', 'true')) - # jobs: - # - job: smoke_tests - # steps: - # - checkout: self - # persistCredentials: true - # - # - script: | - # git checkout $(sourceBranch) - # displayName: Checkout and update branch - # - # - task: Docker@2 - # displayName: "Docker login" - # inputs: - # containerRegistry: "$(dockerRegistryServiceConnection)" - # command: "login" - # - # - task: Bash@3 - # displayName: 'Run Smoke Tests' - # inputs: - # targetType: 'inline' - # script: | - # cd ./integration-test - # sh run_integration_test.sh - # env: - # containerRegistry: $(dockerRegistryFqdn) - - # Deploy on K8s with Helm - - stage: Deploy - # condition: not(failed('Smoke_Test')) - dependsOn: - - Release - - Build - # - Smoke_Test - variables: - deploy_version: $[ stageDependencies.Release.make_release.outputs['current_version.value'] ] - jobs: - - deployment: "deploy" - pool: - name: $(poolImage) - environment: ${{ parameters['ENV'] }} - strategy: - runOnce: - deploy: - steps: - - checkout: self - persistCredentials: true - - - script: | - git checkout $(sourceBranch) - displayName: Checkout and update branch - - - template: templates/helm-microservice-chart-setup/template.yaml@pagopaCommons - parameters: - DEPLOY_VERSION: $(deploy_version) - - - template: templates/helm-microservice-chart-deploy/template.yaml@pagopaCommons - parameters: - DO_DEPLOY: true - ENV: ${{ parameters['ENV'] }} - KUBERNETES_SERVICE_CONN: ${{ variables.kubernetesServiceConnection }} - NAMESPACE: $(nameSpace) - APP_NAME: $(imageRepository) - VALUE_FILE: "helm/values-${{ parameters['ENV'] }}.yaml" - DEPLOY_VERSION: $(deploy_version) - APPINSIGHTS_SERVICE_CONN: ${{ variables.appInsightsServiceConn }} - APPINSIGHTS_RESOURCE_ID: ${{ variables.appInsightsResourceId }} - - - # Run Tests -# - stage: Integration_Test -# dependsOn: -# - Deploy -# condition: and(succeeded(), eq('${{ parameters.TEST }}', 'true')) -# jobs: -# - job: integration_tests -# steps: -# - checkout: self -# persistCredentials: true -# -# - script: | -# git checkout $(sourceBranch) -# displayName: Checkout and update branch -# -# - task: Bash@3 -# displayName: 'Run Integration Tests' -# inputs: -# targetType: 'inline' -# script: | -# cd ./integration-test/src -# yarn install -# yarn test:${{ parameters.ENV }} diff --git a/.devops/deploy-pipelines-standard.yml b/.devops/deploy-pipelines-standard.yml deleted file mode 100644 index e134ac5..0000000 --- a/.devops/deploy-pipelines-standard.yml +++ /dev/null @@ -1,298 +0,0 @@ -parameters: - - name: ENV - displayName: Target Environment - type: string - default: dev - values: - - dev - - uat - - prod - - name: SEMVER - displayName: "When packing a release, define the version bump to apply. Use only buildNumber or skip for manual deployment" - type: string - values: - - major - - minor - - patch - - buildNumber - - skip - default: skip - - name: TEST - displayName: Run tests - type: boolean - default: false - -variables: - ${{ if eq(parameters['ENV'], 'dev') }}: - AZURE_SUBSCRIPTION: $(DEV_AZURE_SUBSCRIPTION) - RESOURCE_GROUP: $(DEV_WEB_APP_RESOURCE_GROUP_NAME) - APP_NAME: $(DEV_WEB_APP_NAME) - STAGE: "d" - dockerRegistryServiceConnection: $(DEV_CONTAINER_REGISTRY) - dockerNamespace: $(DEV_CONTAINER_NAMESPACE) - - ${{ if eq(parameters['ENV'], 'uat') }}: - AZURE_SUBSCRIPTION: $(UAT_AZURE_SUBSCRIPTION) - RESOURCE_GROUP: $(UAT_WEB_APP_RESOURCE_GROUP_NAME) - APP_NAME: $(UAT_WEB_APP_NAME) - STAGE: "u" - dockerRegistryServiceConnection: $(UAT_CONTAINER_REGISTRY) - dockerNamespace: $(UAT_CONTAINER_NAMESPACE) - - ${{ if eq(parameters['ENV'], 'prod') }}: - AZURE_SUBSCRIPTION: $(PROD_AZURE_SUBSCRIPTION) - RESOURCE_GROUP: $(PROD_WEB_APP_RESOURCE_GROUP_NAME) - APP_NAME: $(PROD_WEB_APP_NAME) - STAGE: "p" - dockerRegistryServiceConnection: $(PROD_CONTAINER_REGISTRY) - dockerNamespace: $(PROD_CONTAINER_NAMESPACE) - - ${{ if eq(variables['Build.SourceBranchName'], 'merge') }}: - SOURCE_BRANCH: "main" # force to main branch - ${{ if ne(variables['Build.SourceBranchName'], 'merge') }}: - SOURCE_BRANCH: ${{ variables['Build.SourceBranchName'] }} - - - - MAVEN_CACHE_FOLDER: $(Pipeline.Workspace)/.m2/repository - MAVEN_OPTS: "-Dmaven.repo.local=$(MAVEN_CACHE_FOLDER)" - title: "" - sha: "" - tag: "" - -# Only manual triggers -trigger: none -pr: none - -pool: - vmImage: ubuntu-latest - -stages: - - # Create a release on GitHub - - stage: release_service - condition: ne('${{ parameters.SEMVER }}', 'skip') - pool: - vmImage: "ubuntu-latest" - jobs: - - job: releaseService - steps: - - checkout: self - clean: true - persistCredentials: true - - - script: | - git checkout $(SOURCE_BRANCH) - - - script: | - git config --global user.name "$(GIT_USERNAME)" - git config --global user.email "$(GIT_EMAIL)" - - - template: azure-templates/maven-versioning.yml - parameters: - semver: "${{ parameters.SEMVER }}" - - - task: Bash@3 - name: pomversion - inputs: - targetType: "inline" - script: | - version=$(mvn -f pom.xml help:evaluate -Dexpression=project.version -q -DforceStdout) - echo "##vso[task.setvariable variable=next;isOutput=true]$version" - failOnStderr: true - - - script: | - git add pom.xml - git commit -m "Bump version [skip ci]" - git push origin $(SOURCE_BRANCH) - - - - script: | - HEAD_SHA=$(git rev-parse HEAD) - TAG="$(pomversion.next)" - TITLE="Release $(pomversion.next)" - echo "##vso[task.setvariable variable=title]$TITLE" - echo "##vso[task.setvariable variable=sha]$HEAD_SHA" - echo "##vso[task.setvariable variable=tag]$TAG" - - - script: | - echo $(tag) - echo $(title) - echo $(sha) - - - # create new release - - task: GitHubRelease@0 - inputs: - gitHubConnection: $(GITHUB_CONNECTION) - repositoryName: $(Build.Repository.Name) - action: create - target: $(sha) - tagSource: manual - tag: $(tag) - title: $(title) - addChangelog: true - - # Deploy on Azure - - stage: deploy - condition: not(failed('releaseService')) - pool: - vmImage: "ubuntu-latest" - jobs: - - job: deployJava - steps: - - - script: | - echo ${{variables['Build.SourceBranchName']}} - echo $(SOURCE_BRANCH) - echo $(TEST) - - - checkout: self - clean: true - persistCredentials: true - submodules: true - - - script: | - echo "Checkout on $(SOURCE_BRANCH)" - git checkout $(SOURCE_BRANCH) - displayName: Checkout on source branch - - # - task: Bash@3 - # name: application_properties - # inputs: - # targetType: "inline" - # script: | - # cp src/main/resources/application-azure-$(STAGE).properties src/main/resources/application.properties - # cat src/main/resources/application.properties - # failOnStderr: true - - - task: Cache@2 - inputs: - key: 'maven | "$(Agent.OS)" | pom.xml' - restoreKeys: | - maven | "$(Agent.OS)" - maven - path: $(MAVEN_CACHE_FOLDER) - displayName: Set Maven Cache - - - task: Bash@3 - # When the maven task is installed, mvn can be used in a script - name: pomversion - inputs: - targetType: "inline" - script: | - version=$(mvn -f pom.xml help:evaluate -Dexpression=project.version -q -DforceStdout) - echo "##vso[task.setvariable variable=next;isOutput=true]$version" - failOnStderr: true - - - task: Maven@3 - inputs: - mavenPomFile: 'pom.xml' - publishJUnitResults: false - javaHomeOption: 'JDKVersion' - jdkVersionOption: '1.11' - mavenVersionOption: 'Default' - mavenOptions: '-Xmx3072m $(MAVEN_OPTS)' - mavenAuthenticateFeed: false - effectivePomSkip: false - sonarQubeRunAnalysis: false - - - task: Docker@2 - displayName: Build and push an image to container registry - inputs: - containerRegistry: '$(dockerRegistryServiceConnection)' - repository: '$(IMAGE_REPOSITORY)' - command: 'buildAndPush' - tags: | - $(Build.BuildId) - latest - $(pomversion.next) - - # deploy project-fn - - task: AzureFunctionAppContainer@1 - displayName: Deploy Function App [DEV] - inputs: - azureSubscription: $(AZURE_SUBSCRIPTION) - appName: "${{variables.DEV_WEB_APP_NAME}}-fn-project" - imageName: "${{variables.DEV_CONTAINER_NAMESPACE}}/project:$(Build.BuildId)" - slotName: production - - - script: | - echo "##vso[task.setvariable variable=version;isOutput=true]$(pomversion.next)" - name: dockerTag - - - # Run test - - stage: test - # run this stage only if 'test' is enabled - condition: and(not(failed('deployJava')), eq('${{ parameters.TEST }}', 'true')) - pool: - vmImage: "ubuntu-latest" - jobs: - # is needed to wait for startup of application - - job: waitStartup - pool: Server - steps: - - task: Delay@1 - inputs: - delayForMinutes: '10' - - # - job: integrationTests - # dependsOn: waitStartup - # steps: - # - script: | - # git checkout $(SOURCE_BRANCH) - - # - script: | - # yarn global add newman - # displayName: 'newman installation' - - # - script: | - # newman run api-test/Project.postman_collection.json --environment=api-test/Azure.postman_environment.json --reporters cli,junit --reporter-junit-export Results/api-config-TEST.xml --verbose - # displayName: 'Run api test' - # continueOnError: false - - # - task: PublishTestResults@2 - # condition: always() - # inputs: - # testResultsFormat: 'JUnit' - # testResultsFiles: '**/*-TEST.xml' - # searchFolder: '$(System.DefaultWorkingDirectory)' - - - job: deployUAT - dependsOn: integrationTests - variables: - version: $[ stageDependencies.deploy.deployJava.outputs['dockerTag.version'] ] - steps: - - task: Maven@3 - inputs: - mavenPomFile: 'pom.xml' - publishJUnitResults: false - javaHomeOption: 'JDKVersion' - jdkVersionOption: '1.11' - mavenVersionOption: 'Default' - mavenOptions: '-Xmx3072m $(MAVEN_OPTS)' - mavenAuthenticateFeed: false - effectivePomSkip: false - sonarQubeRunAnalysis: false - - - task: Docker@2 - displayName: Build and push an image to UAT container registry - inputs: - containerRegistry: '$(UAT_CONTAINER_REGISTRY)' - repository: '$(IMAGE_REPOSITORY)' - command: 'buildAndPush' - tags: | - $(Build.BuildId) - latest - $(version) - - # deploy project-fn - - task: AzureFunctionAppContainer@1 - displayName: Deploy Function App [UAT] - condition: in('${{ parameters.ENV }}', 'uat') - inputs: - azureSubscription: $(AZURE_SUBSCRIPTION) - appName: "${{variables.UAT_WEB_APP_NAME}}-fn-project}" - imageName: "${{variables.UAT_CONTAINER_NAMESPACE}}/project:$(Build.BuildId)" - slotName: production \ No newline at end of file diff --git a/.devops/performance-test-pipelines.yml b/.devops/performance-test-pipelines.yml new file mode 100644 index 0000000..4f10214 --- /dev/null +++ b/.devops/performance-test-pipelines.yml @@ -0,0 +1,66 @@ +# azure-pipelines.yml +trigger: none + +parameters: + - name: "ENVIRONMENT" + displayName: "Environment" + type: string + values: + - "dev" + - "uat" + default: "uat" + - name: "NUMBER_OF_EVENTS" + displayName: "Number of events" + type: number + default: 50 + - name: "TIMEOUT" + displayName: "Timeout in seconds" + type: number + default: 10 +variables: + ${{ if eq(parameters['ENVIRONMENT'], 'dev') }}: + poolImage: "pagopa-dev-loadtest-linux" + PG_GPD_PASSWORD: "$(DEV_PG_GPD_PASSWORD)" + PG_GPD_USERNAME: "$(DEV_PG_GPD_USERNAME)" + INGESTION_EVENTHUB_CONN_STRING: "${DEV_INGESTION_EVENTHUB_CONN_STRING}" + ${{ if eq(parameters['ENVIRONMENT'], 'uat') }}: + poolImage: "pagopa-uat-loadtest-linux" + PG_GPD_PASSWORD: "$(UAT_PG_GPD_PASSWORD)" + PG_GPD_USERNAME: "$(UAT_PG_GPD_USERNAME)" + INGESTION_EVENTHUB_CONN_STRING: "${UAT_INGESTION_EVENTHUB_CONN_STRING}" + +pool: + name: $(poolImage) + +steps: + - script: | + cd ./test-utils + chmod +x ./run_compose.sh + ./run_compose.sh "${{ variables.INGESTION_EVENTHUB_CONN_STRING }}" "gpd-ingestion.apd.payment_position,cdc-raw-auto.apd.payment_position" + displayName: Run GPD Ingestion Timestamp Review + env: + INGESTION_EVENTHUB_CONN_STRING: ${{ variables.INGESTION_EVENTHUB_CONN_STRING }} + + - script: | + cd ./performance-test/src + npm install + export PG_GPD_USERNAME=${PG_GPD_USERNAME} PG_GPD_PASSWORD=${PG_GPD_PASSWORD} NUMBER_OF_EVENTS=${NUMBER_OF_EVENTS} && \ + npm run start-load-events-${ENV} + displayName: Run insert events on postgres + env: + ENV: ${{ parameters.ENV }} + PG_GPD_USERNAME: ${{ variables.PG_GPD_USERNAME }} + PG_GPD_PASSWORD: ${{ variables.PG_GPD_PASSWORD }} + NUMBER_OF_EVENTS: ${{ parameters.NUMBER_OF_EVENTS }} + + - script: | + timeout ${TIMEOUT}s + displayName: Wait for timeout + env: + TIMEOUT: ${{ parameters.TIMEOUT }} + + - script: | + cd ./performance-test/src + npm install + npm run start-performance-review + displayName: Run timestamp review diff --git a/.github/workflows/code_review.yml b/.github/workflows/code_review.yml index c33c2cd..3369f25 100644 --- a/.github/workflows/code_review.yml +++ b/.github/workflows/code_review.yml @@ -44,80 +44,3 @@ jobs: maven_version: 3.9.3 coverage_exclusions: "**/config/*,**/*Mock*,**/model/**,**/entity/**,**/producer/**,**/enumeration/**,**/exception/**" cpd_exclusions: "**/model/**,**/entity/*" - -# smoke-test: -# name: Smoke Test -# runs-on: ubuntu-latest -# environment: -# name: dev -# steps: -# - name: Checkout -# id: checkout -# uses: actions/checkout@1f9a0c22da41e6ebfa534300ef656657ea2c6707 -# -# - name: Login -# id: login -# # from https://github.com/Azure/login/commits/master -# uses: azure/login@92a5484dfaf04ca78a94597f4f19fea633851fa2 -# with: -# client-id: ${{ secrets.CLIENT_ID }} -# tenant-id: ${{ secrets.TENANT_ID }} -# subscription-id: ${{ secrets.SUBSCRIPTION_ID }} -# -# - name: Run Service on Docker -# shell: bash -# id: run_service_docker -# run: | -# cd ./docker -# chmod +x ./run_docker.sh -# ./run_docker.sh local -# -# - name: Run Integration Tests -# shell: bash -# id: run_integration_test -# run: | -# export CUCUMBER_PUBLISH_TOKEN=${{ secrets.CUCUMBER_PUBLISH_TOKEN }} -# export RECEIPTS_COSMOS_CONN_STRING='${{ secrets.RECEIPTS_COSMOS_CONN_STRING }}' -# export BIZEVENTS_COSMOS_CONN_STRING='${{ secrets.BIZEVENTS_COSMOS_CONN_STRING }}' -# cd ./integration-test -# chmod +x ./run_integration_test.sh -# ./run_integration_test.sh local - -# delete_github_deployments: -# runs-on: ubuntu-latest -# needs: smoke-test -# if: ${{ always() }} -# steps: -# - name: Dump GitHub context -# env: -# GITHUB_CONTEXT: ${{ toJSON(github) }} -# run: echo "$GITHUB_CONTEXT" -# -# - name: Delete Previous deployments -# uses: actions/github-script@v6 -# env: -# SHA_HEAD: ${{ (github.event_name == 'pull_request' && github.event.pull_request.head.sha) || github.sha}} -# with: -# script: | -# const { SHA_HEAD } = process.env -# -# const deployments = await github.rest.repos.listDeployments({ -# owner: context.repo.owner, -# repo: context.repo.repo, -# sha: SHA_HEAD -# }); -# await Promise.all( -# deployments.data.map(async (deployment) => { -# await github.rest.repos.createDeploymentStatus({ -# owner: context.repo.owner, -# repo: context.repo.repo, -# deployment_id: deployment.id, -# state: 'inactive' -# }); -# return github.rest.repos.deleteDeployment({ -# owner: context.repo.owner, -# repo: context.repo.repo, -# deployment_id: deployment.id -# }); -# }) -# ); diff --git a/performance-test/src/.env.dev b/performance-test/src/.env.dev new file mode 100644 index 0000000..1997c53 --- /dev/null +++ b/performance-test/src/.env.dev @@ -0,0 +1,2 @@ +PG_GPD_SERVER_NAME=pagopa-d-weu-gpd-pgflex.postgres.database.azure.com +PG_GPD_DATABASE_NAME=apd \ No newline at end of file diff --git a/performance-test/src/.env.uat b/performance-test/src/.env.uat new file mode 100644 index 0000000..0c14a8e --- /dev/null +++ b/performance-test/src/.env.uat @@ -0,0 +1,2 @@ +PG_GPD_SERVER_NAME=pagopa-u-weu-gpd-pgflex.postgres.database.azure.com +PG_GPD_DATABASE_NAME=apd \ No newline at end of file diff --git a/performance-test/src/gpd_ingestion_test.js b/performance-test/src/gpd_ingestion_test.js new file mode 100644 index 0000000..b188889 --- /dev/null +++ b/performance-test/src/gpd_ingestion_test.js @@ -0,0 +1,48 @@ + + +const { insertPaymentPositionWithValidFiscalCode, insertPaymentPositionWithInvalidFiscalCode, deletePaymentPositions } = require("./modules/pg_gpd_client.js"); +const { REDIS_ARRAY_IDS_TOKENIZED, REDIS_ARRAY_IDS_NOT_TOKENIZED } = require("./modules/common.js"); +const { setValueRedis, shutDownClient } = require("./modules/redis_client.js"); + +const NUMBER_OF_EVENTS = JSON.parse(process.env.NUMBER_OF_EVENTS); + +async function insertEvents() { + // Clean up paymentPositions + await deletePaymentPositions(); + + const arrayIdTokenized = []; + const arrayIdNotTokenized = []; + + console.log("Selected number of events: ", NUMBER_OF_EVENTS); + // SAVE ON DB paymentPositions + for (let i = 0; i < (Math.floor(NUMBER_OF_EVENTS / 2)); i++) { + const uniqueId = 120798 + i; + const idValidFiscalCode = uniqueId; + await insertPaymentPositionWithValidFiscalCode(idValidFiscalCode); + arrayIdTokenized.push(idValidFiscalCode); + + const idInvalidFiscalCode = uniqueId + (NUMBER_OF_EVENTS * 2); + await insertPaymentPositionWithInvalidFiscalCode(idInvalidFiscalCode); + arrayIdNotTokenized.push(idInvalidFiscalCode); + } + console.log(`Inserted ${arrayIdTokenized.length} elements in database paymentPositions with valid fiscal code with ids: `, JSON.stringify(arrayIdTokenized)); + console.log(`Inserted ${arrayIdNotTokenized.length} elements in database paymentPositions with invalid fiscal code with ids: `, JSON.stringify(arrayIdNotTokenized)); + + + // SAVE ID ARRAYS ON REDIS + await setValueRedis({ key: REDIS_ARRAY_IDS_TOKENIZED, value: JSON.stringify(arrayIdTokenized) }); + await setValueRedis({ key: REDIS_ARRAY_IDS_NOT_TOKENIZED, value: JSON.stringify(arrayIdNotTokenized) }); + + // DELETE paymentPositions + await deletePaymentPositions(); + console.log("Deleted payment positions"); + + await shutDownClient(); + + return null; +} + +insertEvents().then(() => { + console.log("Insert script ended"); + process.exit(); +}); \ No newline at end of file diff --git a/performance-test/src/modules/common.js b/performance-test/src/modules/common.js new file mode 100644 index 0000000..e5ade40 --- /dev/null +++ b/performance-test/src/modules/common.js @@ -0,0 +1,13 @@ +const VALID_CF = "PRFGPD24S20B157N"; +const INVALID_CF = "invalidCF"; +const ENTITY_IDENTIFIER = "PERFORMANCE_TEST_GPD_INGESTION"; +const REDIS_ARRAY_IDS_TOKENIZED = "redisTokenized"; +const REDIS_ARRAY_IDS_NOT_TOKENIZED = "redisNotTokenized"; + +module.exports = { + VALID_CF, + INVALID_CF, + ENTITY_IDENTIFIER, + REDIS_ARRAY_IDS_TOKENIZED, + REDIS_ARRAY_IDS_NOT_TOKENIZED +} \ No newline at end of file diff --git a/performance-test/src/modules/pg_gpd_client.js b/performance-test/src/modules/pg_gpd_client.js new file mode 100644 index 0000000..0699b43 --- /dev/null +++ b/performance-test/src/modules/pg_gpd_client.js @@ -0,0 +1,51 @@ +const { ENTITY_IDENTIFIER, INVALID_CF, VALID_CF } = require('./common.js'); +const { Pool } = require('pg'); + +//COMMON +const serverName = process.env.PG_GPD_SERVER_NAME; +const databaseName = process.env.PG_GPD_DATABASE_NAME; +//SECRETS +const password = process.env.PG_GPD_PASSWORD; +const username = process.env.PG_GPD_USERNAME; + +const pool = new Pool({ + user: username, + database: databaseName, + password: password, + host: serverName, + port: 5432, + ssl: true +}); + +const connection = { + pool, + query: (...args) => { + return pool.connect().then((client) => { + return client.query(...args).then((res) => { + client.release(); + return res.rows; + }); + }); + }, +}; + +async function shutDownPool() { + await pool.end(); +} + +async function insertPaymentPositionWithValidFiscalCode(id) { + await connection.query(`INSERT INTO apd.apd.payment_position (id, city, civic_number, company_name, country, email, fiscal_code, full_name, inserted_date, iupd, last_updated_date, max_due_date, min_due_date, office_name, organization_fiscal_code, phone, postal_code, province, publish_date, region, status, street_name, "type", validity_date, "version", switch_to_expired, payment_date, pull, pay_stand_in, service_type) VALUES('${id}', 'Pizzo Calabro', '11', 'SkyLab Inc. - Edit', 'IT', 'micheleventimiglia@skilabmail.com', '${VALID_CF}', 'Michele Ventimiglia', '2024-11-12 16:09:43.477', '${ENTITY_IDENTIFIER}', '2024-11-12 16:09:43.479', '2024-12-12 16:09:43.323', '2024-12-12 16:09:43.323', 'SkyLab - Sede via Washington - Edit', 'ORG_FISCAL_CODE_${id}', '333-123456789', '89812', 'VV', '2024-11-12 16:09:43.479', 'CA', 'VALID', 'via Washington', 'F', '2024-11-12 16:09:43.479', 0, false, NULL, true, false, 'GPD');`); +} + +async function insertPaymentPositionWithInvalidFiscalCode(id) { + await connection.query(`INSERT INTO apd.apd.payment_position (id, city, civic_number, company_name, country, email, fiscal_code, full_name, inserted_date, iupd, last_updated_date, max_due_date, min_due_date, office_name, organization_fiscal_code, phone, postal_code, province, publish_date, region, status, street_name, "type", validity_date, "version", switch_to_expired, payment_date, pull, pay_stand_in, service_type) VALUES('${id}', 'Pizzo Calabro', '11', 'SkyLab Inc. - Edit', 'IT', 'micheleventimiglia@skilabmail.com', '${INVALID_CF}', 'Michele Ventimiglia', '2024-11-12 16:09:43.477', '${ENTITY_IDENTIFIER}', '2024-11-12 16:09:43.479', '2024-12-12 16:09:43.323', '2024-12-12 16:09:43.323', 'SkyLab - Sede via Washington - Edit', 'ORG_FISCAL_CODE_${id}', '333-123456789', '89812', 'VV', '2024-11-12 16:09:43.479', 'CA', 'VALID', 'via Washington', 'F', '2024-11-12 16:09:43.479', 0, false, NULL, true, false, 'GPD');`); +} + +async function deletePaymentPositions() { + await connection.query(`DELETE FROM apd.apd.payment_position WHERE iupd='${ENTITY_IDENTIFIER}'`); +} + +module.exports = { + shutDownPool, + insertPaymentPositionWithValidFiscalCode, insertPaymentPositionWithInvalidFiscalCode, deletePaymentPositions +} \ No newline at end of file diff --git a/performance-test/src/modules/redis_client.js b/performance-test/src/modules/redis_client.js new file mode 100644 index 0000000..a7f8f02 --- /dev/null +++ b/performance-test/src/modules/redis_client.js @@ -0,0 +1,34 @@ +const { createClient } = require('redis'); + +const redisHost = "127.0.0.1"; +const redisPort = "6379"; + +const client = createClient({ + socket: { + port: redisPort, + host: redisHost + } +}); + +client.on('error', err => console.log('Redis Client Error', err)) +client.connect(); + +client.on('connect', function () { + console.log('Connected!'); +}); + +async function readFromRedisWithKey(key) { + return await client.get(key); +} + +async function setValueRedis({ key, value }) { + return await client.set(key, value); +} + +async function shutDownClient() { + await client.quit(); +} + +module.exports = { + readFromRedisWithKey, shutDownClient, setValueRedis +} \ No newline at end of file diff --git a/performance-test/src/package.json b/performance-test/src/package.json new file mode 100644 index 0000000..6460415 --- /dev/null +++ b/performance-test/src/package.json @@ -0,0 +1,15 @@ +{ + "name": "pagopa-gpd-ingestion-manager", + "version": "0.0.1", + "dependencies": {}, + "scripts": { + "start-load-events-dev": "export $(cat .env.dev | xargs) && node ./gpd_ingestion_test.js", + "start-load-events-uat": "export $(cat .env.uat | xargs) && node ./gpd_ingestion_test.js", + "start-performance-review": "node ./scripts/review_script.js" + }, + "devDependencies": { + "pg": "^8.13.1", + "redis": "^4.7.0", + "dotenv": "^16.1.4" + } +} \ No newline at end of file diff --git a/performance-test/src/scripts/review_script.js b/performance-test/src/scripts/review_script.js new file mode 100644 index 0000000..6c65ecd --- /dev/null +++ b/performance-test/src/scripts/review_script.js @@ -0,0 +1,159 @@ +const { REDIS_ARRAY_IDS_NOT_TOKENIZED, REDIS_ARRAY_IDS_TOKENIZED } = require("../modules/common.js"); +const { readFromRedisWithKey, shutDownClient } = require("../modules/redis_client.js"); + +const REDIS_RAW_SUFFIX = "-raw-c"; +const REDIS_ING_SUFFIX = "-ing-c"; + +// Performance Debezium connector +// 1. Retrieve messages from topic "raw" +// 2. Calculate difference between timestamps -> obj.ts_ms (time of insert on eventhub) : obj.source.ts_ms (time of insert on db) +// Performance gpd-ingestion-manager +// 1. Retrieve messages from topic "ingested" +// 2. Calculate difference between raw and ingested timestamps -> rawMsg.timestamp (timestamp of the message from topic raw) : ingestedMsg.timestamp (timestamp of the message from topic raw) +const reviewIngestionTimeToProcess = async () => { + let arrayTimePsgToRaw = []; + let totalTimePsgToRaw = 0; + let minTimePsgToRaw = null; + let maxTimePsgToRaw = null; + let failedRaw = 0; + + let arrayTimeRawToTokenize = []; + let totalTimeRawToTokenize = 0; + let minTimeRawToTokenize = null; + let maxTimeRawToTokenize = null; + let failedTokenized = 0; + + let arrayTimeRawToIngest = []; + let totalTimeRawToIngest = 0; + let minTimeRawToIngest = null; + let maxTimeRawToIngest = null; + let failedIngested = 0; + + // RETRIEVE ARRAYS OF IDS + const tokenizedIds = await readFromRedisWithKey(REDIS_ARRAY_IDS_TOKENIZED); + const arrTokenizedParsed = JSON.parse(tokenizedIds); + const notTokenizedIds = await readFromRedisWithKey(REDIS_ARRAY_IDS_NOT_TOKENIZED); + const arrNotTokenizedParsed = JSON.parse(notTokenizedIds); + + if(arrTokenizedParsed){ + for (const id of arrTokenizedParsed) { + // RETRIEVE RAW MESSAGE FROM REDIS + console.log("Retrieving from Redis message with id: " + id); + const retrievedMsg = await readFromRedisWithKey(id + REDIS_RAW_SUFFIX); + const rawMsg = JSON.parse(retrievedMsg); + if (rawMsg) { + const rawMsgValue = rawMsg.value; + console.log("Processing raw message with id: " + id); + + // CALCULATE TIME TO CAPTURE + let timePsgToRaw = rawMsgValue.ts_ms - rawMsgValue.source.ts_ms; + arrayTimePsgToRaw.push(timePsgToRaw); + totalTimePsgToRaw += timePsgToRaw; + minTimePsgToRaw = minTimePsgToRaw === null || timePsgToRaw < minTimePsgToRaw ? timePsgToRaw : minTimePsgToRaw; + maxTimePsgToRaw = maxTimePsgToRaw === null || timePsgToRaw > maxTimePsgToRaw ? timePsgToRaw : maxTimePsgToRaw; + + // RETRIEVE TOKENIZED MESSAGE FROM REDIS WITH RAW OBJ ID + const tokenizedMsg = await readFromRedisWithKey(id + REDIS_ING_SUFFIX); + + if (tokenizedMsg) { + const tokenizedMsgValue = JSON.parse(tokenizedMsg); + console.log("Processing tokenized message with id: " + id); + + // CALCULATE TIME TO TOKENIZE + let timeRawToTokenize = Number(tokenizedMsgValue.timestamp) - Number(rawMsg.timestamp); + arrayTimeRawToTokenize.push(timeRawToTokenize); + totalTimeRawToTokenize += timeRawToTokenize; + minTimeRawToTokenize = minTimeRawToTokenize === null || timeRawToTokenize < minTimeRawToTokenize ? timeRawToTokenize : minTimeRawToTokenize; + maxTimeRawToTokenize = maxTimeRawToTokenize === null || timeRawToTokenize > maxTimeRawToTokenize ? timeRawToTokenize : maxTimeRawToTokenize; + } else { + console.log("Fail to tokenize message with id: " + id); + failedTokenized += 1; + } + } else { + console.log("Fail to capture message with id: " + id); + failedRaw += 1; + } + + } + } + + if(arrNotTokenizedParsed){ + for (const id of arrNotTokenizedParsed) { + // RETRIEVE RAW MESSAGE FROM REDIS + console.log("Retrieving from Redis message with id: " + id); + const retrievedMsg = await readFromRedisWithKey(id + REDIS_RAW_SUFFIX); + const rawMsg = JSON.parse(retrievedMsg); + if (rawMsg) { + const rawMsgValue = rawMsg.value; + console.log("Processing raw message with id: " + id); + + // CALCULATE TIME TO CAPTURE + let timePsgToRaw = rawMsgValue.ts_ms - rawMsgValue.source.ts_ms; + arrayTimePsgToRaw.push(timePsgToRaw); + totalTimePsgToRaw += timePsgToRaw; + minTimePsgToRaw = minTimePsgToRaw === null || timePsgToRaw < minTimePsgToRaw ? timePsgToRaw : minTimePsgToRaw; + maxTimePsgToRaw = maxTimePsgToRaw === null || timePsgToRaw > maxTimePsgToRaw ? timePsgToRaw : maxTimePsgToRaw; + + // RETRIEVE INGESTED MESSAGE FROM REDIS WITH RAW OBJ ID + const ingestedMsg = await readFromRedisWithKey(id + REDIS_ING_SUFFIX); + + if (ingestedMsg) { + const ingestedMsgValue = JSON.parse(ingestedMsg); + console.log("Processing ingested message with id: " + id); + + // CALCULATE TIME TO INGEST WITHOUT TOKENIZER + let timeRawToIngest = Number(ingestedMsgValue.timestamp) - Number(rawMsg.timestamp); + arrayTimeRawToIngest.push(timeRawToIngest); + totalTimeRawToIngest += timeRawToIngest; + minTimeRawToIngest = minTimeRawToIngest === null || timeRawToIngest < minTimeRawToIngest ? timeRawToIngest : minTimeRawToIngest; + maxTimeRawToIngest = maxTimeRawToIngest === null || timeRawToIngest > maxTimeRawToIngest ? timeRawToIngest : maxTimeRawToIngest; + } else { + console.log("Fail to ingest message with id: " + id); + failedIngested += 1; + } + } else { + console.log("Fail to capture message with id: " + id); + failedRaw += 1; + } + } + } + + console.log("/////////////////////////////////"); + console.log("/----------- METRICS -----------/"); + console.log("/////////////////////////////////"); + if(arrNotTokenizedParsed && arrNotTokenizedParsed){ + console.log("--------------------------------"); + console.log(`total messages....................: ${arrTokenizedParsed.length + arrNotTokenizedParsed.length}`); + } + console.log("--------------------------------"); + console.log(`mean time to capture..............: ${totalTimePsgToRaw ? getTimeString(Math.round(totalTimePsgToRaw / arrayTimePsgToRaw.length)) : "-"}`); + console.log(`mean time to tokenize.............: ${totalTimeRawToTokenize ? getTimeString(Math.round(totalTimeRawToTokenize / arrayTimeRawToTokenize.length)) : "-"}`); + console.log(`mean time to ingest...............: ${totalTimeRawToIngest ? getTimeString(Math.round(totalTimeRawToIngest / arrayTimeRawToIngest.length)) : "-"}`); + console.log("--------------------------------"); + console.log(`min time to capture...............: ${minTimePsgToRaw ? getTimeString(minTimePsgToRaw) : "-"}`); + console.log(`min time to tokenize..............: ${minTimeRawToTokenize ? getTimeString(minTimeRawToTokenize) : "-"}`); + console.log(`min time to ingest................: ${minTimeRawToIngest ? getTimeString(minTimeRawToIngest) : "-"}`); + console.log("--------------------------------"); + console.log(`max time to capture...............: ${maxTimePsgToRaw ? getTimeString(maxTimePsgToRaw) : "-"}`); + console.log(`max time to tokenize..............: ${maxTimeRawToTokenize ? getTimeString(maxTimeRawToTokenize) : "-"}`); + console.log(`max time to ingest................: ${maxTimeRawToIngest ? getTimeString(maxTimeRawToIngest) : "-"}`); + console.log("--------------------------------"); + console.log(`failed to be captured.............: ${failedRaw}`); + console.log(`failed to be tokenized............: ${failedTokenized}`); + console.log(`failed to be ingested.............: ${failedIngested}`); + console.log("/////////////////////////////////"); + console.log("/------------- END -------------/"); + console.log("/////////////////////////////////"); + + await shutDownClient(); + + return null; +} + +function getTimeString(time) { + return `${time}ms | ${time / 1000}s`; +} + +reviewIngestionTimeToProcess().then(() => { + process.exit(); +});; \ No newline at end of file