diff --git a/.circleci/config.yml b/.circleci/config.yml index 35827b80424368..003efda4d779fa 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,7 +15,7 @@ experimental: templates: job_template: &job_template docker: - - image: gcr.io/datadoghq/agent-circleci-runner:v48262719-bfb00f80 + - image: gcr.io/datadoghq/agent-circleci-runner:v48372186-ff395e52 environment: USE_SYSTEM_LIBS: "1" working_directory: /go/src/github.com/DataDog/datadog-agent diff --git a/.ddqa/config.toml b/.ddqa/config.toml index 25324ad07625f0..eac6d757e52829 100644 --- a/.ddqa/config.toml +++ b/.ddqa/config.toml @@ -215,8 +215,8 @@ github_labels = ["team/container-app"] jira_project = "INPLAT" jira_issue_type = "Task" jira_statuses = ["Selected For Development", "In Progress", "Done"] -github_team = "apm-onboarding" -github_labels = ["team/apm-onboarding"] +github_team = "injection-platform" +github_labels = ["team/injection-platform"] [teams."Agent Release Management"] jira_project = "AGNTR" diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 45a9556aa4e545..86aeb529e652f8 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -44,6 +44,8 @@ /service.datadog.yaml @DataDog/agent-delivery /static-analysis.datadog.yml @DataDog/software-integrity-and-trust @DataDog/agent-devx-infra +/modules.yml @DataDog/agent-shared-components + /.circleci/ @DataDog/agent-devx-infra /.github/CODEOWNERS # do not notify anyone @@ -99,7 +101,7 @@ /.gitlab/package_deps_build/package_deps_build.yml @DataDog/agent-devx-infra @DataDog/ebpf-platform /.gitlab/powershell_script_signing/powershell_script_signing.yml @DataDog/agent-delivery @DataDog/windows-agent /.gitlab/source_test/golang_deps_diff.yml @DataDog/agent-devx-infra @DataDog/agent-devx-loops -/.gitlab/source_test/include.yml @DataDog/agent-devx-infra +/.gitlab/source_test/* @DataDog/agent-devx-infra /.gitlab/source_test/linux.yml @DataDog/agent-devx-infra @DataDog/agent-devx-loops /.gitlab/source_test/macos.yml @DataDog/agent-devx-infra @DataDog/agent-devx-loops /.gitlab/source_test/notify.yml @DataDog/agent-devx-infra @DataDog/agent-devx-loops @@ -306,7 +308,6 @@ /comp/checks/agentcrashdetect @DataDog/windows-kernel-integrations /comp/checks/windowseventlog @DataDog/windows-agent /comp/checks/winregistry @DataDog/windows-agent -/comp/core/agenttelemetry @DataDog/agent-metrics-logs /comp/core/autodiscovery @DataDog/container-platform /comp/core/sysprobeconfig @DataDog/ebpf-platform /comp/core/tagger @DataDog/container-platform @@ -322,6 +323,9 @@ /comp/haagent @DataDog/network-device-monitoring @DataDog/remote-config @DataDog/fleet # END COMPONENTS +# Additional notification to @iglendd about Agent Telemetry changes for optional approval and governance acknowledgement +/comp/core/agenttelemetry @DataDog/agent-shared-components @iglendd + # pkg /pkg/ @DataDog/agent-shared-components /pkg/api/ @DataDog/agent-shared-components @@ -365,7 +369,7 @@ /pkg/clusteragent/ @DataDog/container-platform /pkg/clusteragent/autoscaling/ @DataDog/container-integrations /pkg/clusteragent/admission/mutate/autoscaling @DataDog/container-integrations -/pkg/clusteragent/admission/mutate/autoinstrumentation/ @DataDog/container-platform @DataDog/apm-onboarding +/pkg/clusteragent/admission/mutate/autoinstrumentation/ @DataDog/container-platform @DataDog/injection-platform /pkg/clusteragent/orchestrator/ @DataDog/container-app /pkg/clusteragent/telemetry/ @DataDog/apm-trace-storage /pkg/collector/ @DataDog/agent-metrics-logs @@ -443,6 +447,7 @@ /pkg/util/ecs/ @DataDog/container-integrations /pkg/util/funcs/ @DataDog/ebpf-platform /pkg/util/kernel/ @DataDog/ebpf-platform +/pkg/util/safeelf/ @DataDog/ebpf-platform /pkg/util/ktime @DataDog/agent-security /pkg/util/kubernetes/ @DataDog/container-integrations @DataDog/container-platform @DataDog/container-app /pkg/util/podman/ @DataDog/container-integrations @@ -564,6 +569,7 @@ /tasks/libs/ciproviders/ @DataDog/agent-devx-infra /tasks/libs/common/omnibus.py @DataDog/agent-delivery /tasks/omnibus.py @DataDog/agent-delivery +/tasks/release.py @DataDog/agent-delivery /tasks/unit_tests/components_tests.py @DataDog/agent-shared-components /tasks/unit_tests/omnibus_tests.py @DataDog/agent-delivery /tasks/unit_tests/testdata/components_src/ @DataDog/agent-shared-components @@ -610,7 +616,7 @@ /test/new-e2e/tests/otel @DataDog/opentelemetry /test/new-e2e/tests/process @DataDog/processes /test/new-e2e/tests/sysprobe-functional @DataDog/windows-kernel-integrations -/test/new-e2e/tests/security-agent-functional @DataDog/windows-kernel-integrations +/test/new-e2e/tests/security-agent-functional @DataDog/windows-kernel-integrations @DataDog/agent-security /test/new-e2e/tests/cws @DataDog/agent-security /test/new-e2e/tests/agent-metrics-logs @DataDog/agent-metrics-logs /test/new-e2e/tests/windows @DataDog/windows-agent @DataDog/windows-kernel-integrations diff --git a/.github/workflows/buildimages-update.yml b/.github/workflows/buildimages-update.yml index 9c71a457e67aaf..1138e8557d0115 100644 --- a/.github/workflows/buildimages-update.yml +++ b/.github/workflows/buildimages-update.yml @@ -37,7 +37,8 @@ jobs: - name: Checkout branch uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 with: - persist-credentials: false + # credentials are needed to create the PR at the end of the workflow + persist-credentials: true - name: Fetch branch env: @@ -89,6 +90,7 @@ jobs: id: update_build_images env: TEST_VERSION_FLAG: ${{ inputs.test_version && '--test' || '--no-test' }} + # INCLUDE_OTEL_MODULES must be used without quotes to be ignored when empty INCLUDE_OTEL_MODULES: ${{ inputs.include_otel_modules && '--include-otel-modules' || '' }} CURRENT_GO_VERSION: ${{ steps.current_go_version.outputs.GO_VERSION }} INPUT_GO_VERSION: ${{ inputs.go_version }} @@ -98,7 +100,7 @@ jobs: inv -e buildimages.update --tag "$IMAGES_ID" "$TEST_VERSION_FLAG" echo "MESSAGE=Update buildimages ID to $IMAGES_ID" >> $GITHUB_OUTPUT else - inv -e update-go --image-tag "$IMAGES_ID" "$TEST_VERSION_FLAG" "$INCLUDE_OTEL_MODULES" -v "$INPUT_GO_VERSION" + inv -e update-go --image-tag "$IMAGES_ID" "$TEST_VERSION_FLAG" $INCLUDE_OTEL_MODULES -v "$INPUT_GO_VERSION" echo "MESSAGE=Update Go version to $INPUT_GO_VERSION" >> $GITHUB_OUTPUT fi @@ -145,6 +147,7 @@ jobs: IMAGES_ID: ${{ inputs.images_id }} CURRENT_GO_VERSION: ${{ steps.current_go_version.outputs.GO_VERSION }} INPUT_GO_VERSION: ${{ inputs.go_version }} + # INPUT_TEST_VERSION must be used without quotes to be ignored when empty INPUT_TEST_VERSION: ${{ inputs.test_version && '--test' || '' }} GITHUB_REF: ${{ github.ref }} run: | @@ -154,7 +157,7 @@ jobs: "$IMAGES_ID" \ "$CURRENT_GO_VERSION" \ "$INPUT_GO_VERSION" \ - "$INPUT_TEST_VERSION" > $TMP_PR_BODY_PATH + $INPUT_TEST_VERSION > $TMP_PR_BODY_PATH # Create the PR gh pr create \ diff --git a/.github/workflows/create_rc_pr.yml b/.github/workflows/create_rc_pr.yml index c801cb82fc45d8..ae15581d3cdea8 100644 --- a/.github/workflows/create_rc_pr.yml +++ b/.github/workflows/create_rc_pr.yml @@ -57,6 +57,7 @@ jobs: strategy: matrix: value: ${{fromJSON(needs.find_release_branches.outputs.branches)}} + fail-fast: false steps: - name: Checkout release branch uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 @@ -87,7 +88,11 @@ jobs: MATRIX: ${{ matrix.value }} WARNING: ${{ needs.find_release_branches.outputs.warning }} run: | - echo "CHANGES=$(inv -e release.check-for-changes -r "$MATRIX" "$WARNING")" >> $GITHUB_OUTPUT + if [ -n "${{ needs.find_release_branches.outputs.warning }}" ]; then + echo "CHANGES=$(inv -e release.check-for-changes -r "$MATRIX" "$WARNING")" >> $GITHUB_OUTPUT + else + echo "CHANGES=$(inv -e release.check-for-changes -r "$MATRIX")" >> $GITHUB_OUTPUT + fi - name: Create RC PR if: ${{ steps.check_for_changes.outputs.CHANGES == 'true'}} diff --git a/.github/workflows/cws-btfhub-sync.yml b/.github/workflows/cws-btfhub-sync.yml index 639020761ef483..08d0ad720c1da7 100644 --- a/.github/workflows/cws-btfhub-sync.yml +++ b/.github/workflows/cws-btfhub-sync.yml @@ -83,11 +83,18 @@ jobs: echo "ARTIFACT_NAME=constants-${{ matrix.cone }}" | tr '/' '-' >> $GITHUB_OUTPUT - name: Sync constants + if: ${{ !inputs.force_refresh }} env: ARTIFACT_NAME: ${{ steps.artifact-name.outputs.ARTIFACT_NAME }} - FORCE_REFRESH: ${{ inputs.force_refresh && '--force-refresh' || '' }} run: | - inv -e security-agent.generate-btfhub-constants --archive-path=./dev/dist/archive --output-path=./"$ARTIFACT_NAME".json "$FORCE_REFRESH" + inv -e security-agent.generate-btfhub-constants --archive-path=./dev/dist/archive --output-path=./"$ARTIFACT_NAME".json + + - name: Force sync constants + if: ${{ inputs.force_refresh }} + env: + ARTIFACT_NAME: ${{ steps.artifact-name.outputs.ARTIFACT_NAME }} + run: | + inv -e security-agent.generate-btfhub-constants --archive-path=./dev/dist/archive --output-path=./"$ARTIFACT_NAME".json --force-refresh - name: Upload artifact uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4.3.6 @@ -106,7 +113,6 @@ jobs: uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 with: ref: ${{ inputs.base_branch || 'main' }} - persist-credentials: false - name: Install python uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 @@ -158,8 +164,8 @@ jobs: title: 'CWS: sync BTFHub constants', owner, repo, - head: "$BRANCH_NAME", - base: "$BASE_BRANCH", + head: process.env.BRANCH_NAME, + base: process.env.BASE_BRANCH, body: [ '### What does this PR do?', 'This PR syncs the BTFHub constants used by CWS', diff --git a/.github/workflows/go-update-commenter.yml b/.github/workflows/go-update-commenter.yml index 38cb8257372acd..1028110bc4fda4 100644 --- a/.github/workflows/go-update-commenter.yml +++ b/.github/workflows/go-update-commenter.yml @@ -5,7 +5,9 @@ on: # Only run on PR label events (in particular not on every commit) types: [ labeled ] -permissions: {} +permissions: + # write permissions are needed to create the comment + pull-requests: write jobs: old-versions-match: @@ -44,7 +46,7 @@ jobs: run: | set -euo pipefail # build the base of the Github URL to the current commit - GITHUB_HEAD_URL="GITHUB_SERVER_URL/$GITHUB_REPOSITORY/blob/$GITHUB_SHA" + GITHUB_HEAD_URL="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/blob/$GITHUB_SHA" { echo "matches<= 2.18 - objdump -p $CI_PROJECT_DIR/$SYSTEM_PROBE_BINARIES_DIR/system-probe | egrep 'GLIBC_2\.(1[8-9]|[2-9][0-9])' && exit 1 - inv -e system-probe.save-build-outputs $CI_PROJECT_DIR/sysprobe-build-outputs.tar.xz diff --git a/.gitlab/common/container_publish_job_templates.yml b/.gitlab/common/container_publish_job_templates.yml index ed92c28a79aa30..600b32655894b1 100644 --- a/.gitlab/common/container_publish_job_templates.yml +++ b/.gitlab/common/container_publish_job_templates.yml @@ -6,7 +6,7 @@ SRC_CWS_INSTRUMENTATION: registry.ddbuild.io/ci/datadog-agent/cws-instrumentation .docker_publish_job_definition: - image: registry.ddbuild.io/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES + image: registry.ddbuild.io/ci/datadog-agent-buildimages/docker_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES tags: ["arch:amd64"] variables: <<: *docker_variables diff --git a/.gitlab/common/macos.yml b/.gitlab/common/macos.yml index 59b89f44aec66d..5dcf60ab2e130f 100644 --- a/.gitlab/common/macos.yml +++ b/.gitlab/common/macos.yml @@ -33,6 +33,11 @@ fi - pyenv activate $VENV_NAME +.vault_login: + # Point the CLI to our internal vault + - export VAULT_ADDR=https://vault.us1.ddbuild.io + - vault login -method=aws -no-print + .macos_gitlab: before_script: # Selecting the current Go version diff --git a/.gitlab/common/test_infra_version.yml b/.gitlab/common/test_infra_version.yml index bf7b69c621a144..3bf60ee98c97fe 100644 --- a/.gitlab/common/test_infra_version.yml +++ b/.gitlab/common/test_infra_version.yml @@ -4,4 +4,4 @@ variables: # and check the job creating the image to make sure you have the right SHA prefix TEST_INFRA_DEFINITIONS_BUILDIMAGES_SUFFIX: "" # Make sure to update test-infra-definitions version in go.mod as well - TEST_INFRA_DEFINITIONS_BUILDIMAGES: 0269b182e897 + TEST_INFRA_DEFINITIONS_BUILDIMAGES: b436617374bf diff --git a/.gitlab/e2e/e2e.yml b/.gitlab/e2e/e2e.yml index 40a95688fb0d8a..d5a5b7a8ce3f66 100644 --- a/.gitlab/e2e/e2e.yml +++ b/.gitlab/e2e/e2e.yml @@ -24,9 +24,10 @@ - ARM_CLIENT_SECRET=$($CI_PROJECT_DIR/tools/ci/fetch_secret.sh $E2E_TESTS_AZURE_CLIENT_SECRET) || exit $?; export ARM_CLIENT_SECRET - ARM_TENANT_ID=$($CI_PROJECT_DIR/tools/ci/fetch_secret.sh $E2E_TESTS_AZURE_TENANT_ID) || exit $?; export ARM_TENANT_ID - ARM_SUBSCRIPTION_ID=$($CI_PROJECT_DIR/tools/ci/fetch_secret.sh $E2E_TESTS_AZURE_SUBSCRIPTION_ID) || exit $?; export ARM_SUBSCRIPTION_ID - # Setup GCP credentials. https://www.pulumi.com/registry/packages/gcp/installation-configuration/ + # Setup GCP credentials. https://www.pulumi.com/registry/packages/gcp/service-account/ # The service account is called `agent-e2e-tests` - - GOOGLE_CREDENTIALS=$($CI_PROJECT_DIR/tools/ci/fetch_secret.sh $E2E_TESTS_GCP_CREDENTIALS) || exit $?; export GOOGLE_CREDENTIALS + - $CI_PROJECT_DIR/tools/ci/fetch_secret.sh $E2E_TESTS_GCP_CREDENTIALS > ~/gcp-credentials.json || exit $? + - export GOOGLE_APPLICATION_CREDENTIALS=~/gcp-credentials.json # Generate external links to CI VISIBILITY, used by artifacts:reports:annotations - inv -e gitlab.generate-ci-visibility-links --output=$EXTERNAL_LINKS_PATH variables: @@ -500,7 +501,7 @@ new-e2e-otel-eks-init: variables: TARGETS: ./tests/otel TEAM: otel - EXTRA_PARAMS: --run TestOTelAgentIAEKS + EXTRA_PARAMS: --run "TestOTelAgentIA(EKS|USTEKS)" E2E_INIT_ONLY: "true" SHOULD_RUN_IN_FLAKES_FINDER: "false" @@ -519,7 +520,7 @@ new-e2e-otel-eks: - new-e2e-otel-eks-init variables: TARGETS: ./tests/otel - EXTRA_PARAMS: --run TestOTelAgentIAEKS + EXTRA_PARAMS: --run "TestOTelAgentIA(EKS|USTEKS)" TEAM: otel E2E_PRE_INITIALIZED: "true" @@ -535,7 +536,7 @@ new-e2e-otel: - qa_agent_ot variables: TARGETS: ./tests/otel - EXTRA_PARAMS: --skip TestOTelAgentIAEKS + EXTRA_PARAMS: --skip "TestOTelAgentIA(EKS|USTEKS)" TEAM: otel .new-e2e_package_signing: @@ -656,4 +657,3 @@ new-e2e-eks-cleanup-on-failure: - !reference [.except_mergequeue] - when: always allow_failure: true - diff --git a/.gitlab/functional_test/regression_detector.yml b/.gitlab/functional_test/regression_detector.yml index 4009cf1119e941..07496f6419feb7 100644 --- a/.gitlab/functional_test/regression_detector.yml +++ b/.gitlab/functional_test/regression_detector.yml @@ -18,20 +18,13 @@ single-machine-performance-regression_detector: - outputs/regression_signal.json # for debugging, also on S3 - outputs/bounds_check_signal.json # for debugging, also on S3 - outputs/junit.xml # for debugging, also on S3 + - outputs/report.json # for debugging, also on S3 + - outputs/decision_record.md # for posterity, this is appended to final PR comment when: always variables: - SMP_VERSION: 0.18.1 - # At present we require two artifacts to exist for the 'baseline' and the - # 'comparison'. We are guaranteed by the structure of the pipeline that - # 'comparison' exists, not so much with 'baseline' as it has to come from main - # merge pipeline run. This is solved in datadog-agent by updating a file in S3 - # with the SHA of the merge base from main. It's solved in Vector by - # building Vector twice for each Regression Detector run. - # - # We allow failure for now. _Unfortunately_ this also means that if the - # Regression Detector finds a performance issue with a PR it will not be - # flagged. - allow_failure: true + SMP_VERSION: 0.18.2 + # See 'decision_record.md' for the determination of whether this job passes or fails. + allow_failure: false script: # Ensure output files exist for artifact downloads step - mkdir outputs # Also needed for smp job sync step @@ -129,10 +122,65 @@ single-machine-performance-regression_detector: # uploading JUnit XML, so the upload command below respects that convention. - DATADOG_API_KEY="$("$CI_PROJECT_DIR"/tools/ci/fetch_secret.sh "$AGENT_API_KEY_ORG2" token)" || exit $?; export DATADOG_API_KEY - datadog-ci junit upload --service datadog-agent outputs/junit.xml - # Finally, exit 1 if the job signals a regression else 0. - - RUST_LOG="${RUST_LOG}" ./smp --team-id ${SMP_AGENT_TEAM_ID} --api-base ${SMP_API} --aws-named-profile ${AWS_NAMED_PROFILE} - job result - --submission-metadata submission_metadata + # Run quality gate check script + - | + python3 <<'EOF' + import json + import sys + + try: + with open('outputs/report.json') as f: + data = json.load(f) + except FileNotFoundError: + print("Machine readable report not found.") + sys.exit(1) + except json.JSONDecodeError as e: + print(f"Error parsing JSON report: {e}") + sys.exit(1) + + experiments = data.get('experiments', {}) + failed = False + decision_record = [] + + for exp_name, exp_data in experiments.items(): + if exp_name.startswith('quality_gate_'): + bounds_checks = exp_data.get('bounds_checks', {}) + for check_name, check_data in bounds_checks.items(): + results = check_data.get('results', {}) + comparison = results.get('comparison', []) + num_total = len(comparison) + failed_replicates = [ + replicate for replicate in comparison if not replicate.get('passed', False) + ] + num_failed = len(failed_replicates) + num_passed = num_total - num_failed + if failed_replicates: + decision_record.append( + f"- **{exp_name}**, bounds check **{check_name}**: {num_passed}/{num_total} replicas passed. Failed {num_failed} which is > 0. Gate **FAILED**." + ) + failed = True + else: + decision_record.append( + f"- **{exp_name}**, bounds check **{check_name}**: {num_passed}/{num_total} replicas passed. Gate passed." + ) + + with open('outputs/decision_record.md', 'w') as f: + # Extra newline since this is appended to another report + f.write('\n\n## CI Pass/Fail Decision\n\n') + if failed: + f.write('❌ **Failed.** Some Quality Gates were violated.\n\n') + f.write('\n'.join(decision_record)) + else: + f.write('✅ **Passed.** All Quality Gates passed.\n\n') + f.write('\n'.join(decision_record)) + + if failed: + print("Quality gate failed, see decision record") + sys.exit(1) + else: + print("Quality gate passed.") + sys.exit(0) + EOF # Shamelessly adapted from golang_deps_commenter job config in # golang_deps_diff.yml at commit 01da274032e510d617161cf4e264a53292f44e55. @@ -140,10 +188,10 @@ single-machine-performance-regression_detector-pr-comment: stage: functional_test rules: - !reference [.except_main_or_release_branch] - - when: on_success + - when: always image: name: "486234852809.dkr.ecr.us-east-1.amazonaws.com/pr-commenter:3" - entrypoint: [""] # disable entrypoint script for the pr-commenter image + entrypoint: [""] # disable entrypoint script for the pr-commenter image tags: ["arch:amd64"] needs: - job: single-machine-performance-regression_detector @@ -151,11 +199,11 @@ single-machine-performance-regression_detector-pr-comment: expire_in: 1 weeks paths: - report_as_json_string.txt # for debugging transform to valid JSON string - - pr_comment_payload.json # for debugging PR commenter JSON payload bugs + - pr_comment_payload.json # for debugging PR commenter JSON payload bugs variables: # Not using the entrypoint script for the pr-commenter image FF_KUBERNETES_HONOR_ENTRYPOINT: false - allow_failure: true # allow_failure here should have same setting as in job above + allow_failure: true # allow_failure here should have same setting as in job above script: # ignore error message about no PR, because it happens for dev branches without PRs # Prevent posting empty Regression Detector report if Markdown report is not found or # has zero size. @@ -176,7 +224,8 @@ single-machine-performance-regression_detector-pr-comment: # to escape double quotes to distinguish literal quotes in the report from # the double quotes that delimit the value of the "message" field in the # payload. - - cat outputs/report.md | sed -z 's/\n/\\n/g' | sed -z 's/"/\\"/g' > report_as_json_string.txt + # Appends the Decision Record to final report + - cat outputs/report.md outputs/decision_record.md | sed -z 's/\n/\\n/g' | sed -z 's/"/\\"/g' > report_as_json_string.txt - cat report_as_json_string.txt # Transforming the Markdown report to a valid JSON string is easy to foul # up, so to make debugging easier, we store the payload in a variable to diff --git a/.gitlab/integration_test/otel.yml b/.gitlab/integration_test/otel.yml index 13a3662987a186..aecb75afd9917b 100644 --- a/.gitlab/integration_test/otel.yml +++ b/.gitlab/integration_test/otel.yml @@ -36,7 +36,7 @@ docker_image_build_otel: - cp test/integration/docker/otel_agent_build_tests.py /tmp/otel-ci/ - wget https://github.com/mikefarah/yq/releases/download/3.4.1/yq_linux_amd64 -O /usr/bin/yq && chmod +x /usr/bin/yq - - export OTELCOL_VERSION=v$(/usr/bin/yq r /tmp/otel-ci/manifest.yaml dist.otelcol_version) + - export OTELCOL_VERSION=v$(/usr/bin/yq r /tmp/otel-ci/manifest.yaml dist.version) - yq w -i /tmp/otel-ci/manifest.yaml "receivers[+] gomod" "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8sobjectsreceiver ${OTELCOL_VERSION}" - yq w -i /tmp/otel-ci/manifest.yaml "processors[+] gomod" @@ -54,3 +54,39 @@ docker_image_build_otel: - if: $CI_COMMIT_REF_NAME =~ /.*-skip-cancel$/ when: never - when: always + + +ddflare_extension_ocb_build: + stage: integration_test + image: registry.ddbuild.io/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES + tags: ["arch:amd64"] + needs: ["go_deps"] + before_script: + - !reference [.retrieve_linux_go_deps] + - mkdir -p /tmp/otel-ci + - cp test/otel/testdata/* /tmp/otel-ci/ + - wget -O /tmp/otel-ci/ocb https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/cmd%2Fbuilder%2Fv0.113.0/ocb_0.113.0_linux_amd64 + - chmod +x /tmp/otel-ci/ocb + script: + - echo 'Building collector with OCB and test ddflare extension' + - /tmp/otel-ci/ocb --config=/tmp/otel-ci/builder-config.yaml > ocb-output.log 2>&1 + - grep -q 'Compiled' ocb-output.log || (echo "OCB failed to compile" && exit 1) + - "grep -q '{\"binary\": \"/tmp/otel-ci/otelcol-custom/otelcol-custom\"}' ocb-output.log || (echo \"OCB failed to compile\" && exit 1)" + - /tmp/otel-ci/otelcol-custom/otelcol-custom --config /tmp/otel-ci/collector-config.yaml > otelcol-custom.log 2>&1 & + - OTELCOL_PID=$! # Capture the process ID + - sleep 10 # Wait for the process to start + - grep -q 'Everything is ready. Begin running and processing data.' otelcol-custom.log || (echo "custom collector failed to start" && kill $OTELCOL_PID && exit 1) + - curl -k https://localhost:7777 > flare-info.log 2>&1 # get the dd flare info + - "grep -q '\"provided_configuration\": \"\"' flare-info.log || (echo \"provided config should not be supported with ocb\" && kill $OTELCOL_PID && exit 1)" + - grep -q 'extensions:\\n - ddflare\\n' flare-info.log || (echo "ddflare extension should be enabled" && kill $OTELCOL_PID && exit 1) + - kill $OTELCOL_PID # Kill the process + rules: + - if: $CI_PIPELINE_SOURCE =~ /^schedule.*$/ + when: never + - if: $CI_COMMIT_TAG + when: never + - if: $CI_COMMIT_MESSAGE =~ /.*\[skip cancel\].*/ + when: never + - if: $CI_COMMIT_REF_NAME =~ /.*-skip-cancel$/ + when: never + - when: always diff --git a/.gitlab/internal_kubernetes_deploy/internal_kubernetes_deploy.yml b/.gitlab/internal_kubernetes_deploy/internal_kubernetes_deploy.yml index c3b93f90ed13ea..f98da3272164d1 100644 --- a/.gitlab/internal_kubernetes_deploy/internal_kubernetes_deploy.yml +++ b/.gitlab/internal_kubernetes_deploy/internal_kubernetes_deploy.yml @@ -67,8 +67,5 @@ notify-slack: needs: ["internal_kubernetes_deploy_experimental"] script: - export SDM_JWT=$(vault read -field=token identity/oidc/token/sdm) - # Python 3.12 changes default behavior how packages are installed. - # In particular, --break-system-packages command line option is - # required to use the old behavior or use a virtual env. https://github.com/actions/runner-images/issues/8615 - - python3 -m pip install -r tasks/requirements.txt --break-system-packages + - python3 -m pip install -r tasks/requirements.txt - inv pipeline.changelog ${CI_COMMIT_SHORT_SHA} || exit $? diff --git a/.gitlab/kernel_matrix_testing/common.yml b/.gitlab/kernel_matrix_testing/common.yml index dd6d1a5abe6599..75eb68e927fb12 100644 --- a/.gitlab/kernel_matrix_testing/common.yml +++ b/.gitlab/kernel_matrix_testing/common.yml @@ -335,7 +335,7 @@ notify_ebpf_complexity_changes: # Python 3.12 changes default behavior how packages are installed. # In particular, --break-system-packages command line option is # required to use the old behavior or use a virtual env. https://github.com/actions/runner-images/issues/8615 - - python3 -m pip install -r tasks/kernel_matrix_testing/requirements.txt --break-system-packages # Required for printing the tables + - python3 -m pip install -r tasks/kernel_matrix_testing/requirements-ci.txt --break-system-packages # Required for printing the tables - python3 -m pip install -r tasks/libs/requirements-github.txt --break-system-packages - !reference [.setup_agent_github_app] - GITLAB_TOKEN=$($CI_PROJECT_DIR/tools/ci/fetch_secret.sh $GITLAB_TOKEN read_api) || exit $?; export GITLAB_TOKEN diff --git a/.gitlab/package_build/linux.yml b/.gitlab/package_build/linux.yml index 54df0971f046fc..5dd7698bb2edad 100644 --- a/.gitlab/package_build/linux.yml +++ b/.gitlab/package_build/linux.yml @@ -80,36 +80,6 @@ before_script: - export RELEASE_VERSION=$RELEASE_VERSION_7 -# Temporary custom agent build test to prevent regression -# This test will be removed when custom path are used to build macos agent -# with in-house macos runner builds. -datadog-agent-7-x64-custom-path-test: - extends: [.agent_build_x86, .agent_7_build] - rules: - - !reference [.except_mergequeue] - - when: on_success - stage: package_build - script: - - mkdir /custom - - export CONFIG_DIR="/custom" - - export INSTALL_DIR="/custom/datadog-agent" - - !reference [.agent_build_script] - - ls -la $OMNIBUS_PACKAGE_DIR - - ls -la $INSTALL_DIR - - ls -la /custom/etc - - (ls -la /opt/datadog-agent 2>/dev/null && exit 1) || echo "/opt/datadog-agent has correctly not been generated" - - (ls -la /etc/datadog-agent 2>/dev/null && exit 1) || echo "/etc/datadog-agent has correctly not been generated" - variables: - KUBERNETES_CPU_REQUEST: 16 - KUBERNETES_MEMORY_REQUEST: "32Gi" - KUBERNETES_MEMORY_LIMIT: "32Gi" - artifacts: - expire_in: 2 weeks - paths: - - $OMNIBUS_PACKAGE_DIR - cache: - - !reference [.cache_omnibus_ruby_deps, cache] - # build Agent 7 binaries for x86_64 datadog-agent-7-x64: extends: [.agent_build_common, .agent_build_x86, .agent_7_build] diff --git a/.gitlab/source_test/common.yml b/.gitlab/source_test/common.yml new file mode 100644 index 00000000000000..35ad4187c3333d --- /dev/null +++ b/.gitlab/source_test/common.yml @@ -0,0 +1,8 @@ +--- +.upload_junit_source: + - $CI_PROJECT_DIR/tools/ci/junit_upload.sh + +.upload_coverage: + # Upload coverage files to Codecov. Never fail on coverage upload. + - CODECOV_TOKEN=$($CI_PROJECT_DIR/tools/ci/fetch_secret.sh $CODECOV token) || exit $?; export CODECOV_TOKEN + - inv -e coverage.upload-to-codecov $COVERAGE_CACHE_FLAG || true diff --git a/.gitlab/source_test/include.yml b/.gitlab/source_test/include.yml index 60666c8d4f5a16..629e88b5512943 100644 --- a/.gitlab/source_test/include.yml +++ b/.gitlab/source_test/include.yml @@ -4,6 +4,7 @@ # security scans & go.mod checks. include: + - .gitlab/source_test/common.yml # Included first for shared definitions - .gitlab/source_test/ebpf.yml - .gitlab/source_test/linux.yml - .gitlab/source_test/macos.yml diff --git a/.gitlab/source_test/linux.yml b/.gitlab/source_test/linux.yml index 0b7540f16e71a2..aea05f28d4345b 100644 --- a/.gitlab/source_test/linux.yml +++ b/.gitlab/source_test/linux.yml @@ -45,14 +45,6 @@ annotations: - $EXTERNAL_LINKS_PATH -.upload_junit_source: - - $CI_PROJECT_DIR/tools/ci/junit_upload.sh - -.upload_coverage: - # Upload coverage files to Codecov. Never fail on coverage upload. - - CODECOV_TOKEN=$($CI_PROJECT_DIR/tools/ci/fetch_secret.sh $CODECOV token) || exit $?; export CODECOV_TOKEN - - inv -e coverage.upload-to-codecov $COVERAGE_CACHE_FLAG || true - .linux_x64: image: registry.ddbuild.io/ci/datadog-agent-buildimages/deb_x64$DATADOG_AGENT_BUILDIMAGES_SUFFIX:$DATADOG_AGENT_BUILDIMAGES tags: ["arch:amd64"] diff --git a/.gitlab/source_test/macos.yml b/.gitlab/source_test/macos.yml index dc8a7ab8f16896..338f2f9eca347c 100644 --- a/.gitlab/source_test/macos.yml +++ b/.gitlab/source_test/macos.yml @@ -64,18 +64,12 @@ tests_macos: annotations: - $EXTERNAL_LINKS_PATH -.upload_junit_source: - - $CI_PROJECT_DIR/tools/ci/junit_upload.sh - -.upload_coverage: - # Upload coverage files to Codecov. Never fail on coverage upload. - - CODECOV_TOKEN=$($CI_PROJECT_DIR/tools/ci/fetch_secret.sh $CODECOV_TOKEN) || exit $?; export CODECOV_TOKEN - - inv -e coverage.upload-to-codecov $COVERAGE_CACHE_FLAG || true - tests_macos_gitlab_amd64: extends: .tests_macos_gitlab tags: ["macos:monterey-amd64", "specific:true"] after_script: + - !reference [.vault_login] + - !reference [.select_python_env_commands] - !reference [.upload_junit_source] - !reference [.upload_coverage] @@ -85,5 +79,7 @@ tests_macos_gitlab_arm64: !reference [.manual] tags: ["macos:monterey-arm64", "specific:true"] after_script: + - !reference [.vault_login] + - !reference [.select_python_env_commands] - !reference [.upload_junit_source] - !reference [.upload_coverage] diff --git a/.golangci.yml b/.golangci.yml index 5e6c781919a56c..0358a505b01187 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -605,6 +605,8 @@ linters-settings: desc: "Not really forbidden to use, but it is usually imported by mistake instead of github.com/stretchr/testify/assert" - pkg: "github.com/tj/assert" desc: "Not really forbidden to use, but it is usually imported by mistake instead of github.com/stretchr/testify/assert, and confusing since it actually has the behavior of github.com/stretchr/testify/require" + - pkg: "debug/elf" + desc: "prefer pkg/util/safeelf to prevent panics during parsing" errcheck: exclude-functions: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b68d756cb8d3f5..c665daa0c6a33b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -78,7 +78,7 @@ repos: description: Format .net file of the MSI installer language: system # The dotnet format tool requires a solution file to operate. - entry: dotnet format ./tools/windows/DatadogAgentInstaller --include + entry: dotnet format ./tools/windows/DatadogAgentInstaller --include types: [c#] - id: go-mod-tidy name: go-mod-tidy @@ -141,7 +141,7 @@ repos: - id: check-go-modules-in-python name: check-go-modules-in-python description: Validate all go modules are declared in Invoke tasks - entry: 'inv modules.validate' + entry: 'inv modules.validate --fix-format' language: system pass_filenames: false - files: .*go\.mod$ + files: (.*go\.mod|modules\.yml|.*gomodules\.py|.*modules\.py)$ diff --git a/.run/Build process-agent.run.xml b/.run/Build process-agent.run.xml index 83f150ffea6752..86a692091e4426 100644 --- a/.run/Build process-agent.run.xml +++ b/.run/Build process-agent.run.xml @@ -12,7 +12,7 @@