From b7adb9f781606ec196e6c4abd8b196449f3dbcfc Mon Sep 17 00:00:00 2001 From: "Brian J. Murrell" Date: Wed, 3 May 2023 13:42:58 -0400 Subject: [PATCH] DAOS-13224 build test: Build and test on EL9 TODO: Summarize changes Quick-functional: true Provisioning-pool: bmurrell-automate EL9-VM9-label: stage_vm9 Repo-files-PR: PR-16 PR-repos-ubuntu20: hdf5@PR-38:8 ior@PR-10:7 hdf5-vol-daos@PR-38:14 mpifileutils-pkg@PR-34:7 PR-repos-el8: dpdk@master spdk@master:lastBuild testmpio@PR-9:lastBuild PR-repos-el9: testmpio@PR-9:lastBuild PR-repos-leap15: testmpio@PR-9:lastBuild PR-repos: argobots@PR-24 raft@PR-72 isa-l@PR-11 isa-l_crypto@PR-9 mercury@PR-102:lastBuild dpdk@PR-26 spdk@PR-59:lastBuild mpich@PR-64 hdf5@PR-39:lastBuild ior@PR-10:lastBuild dtcmp@PR-6:lastBuild lwgrp@PR-6 libcircle@PR-6:lastBuild hdf5-vol-daos@PR-38:lastBuild mpifileutils-pkg@PR-34:lastBuild MACSio@PR-12:lastBuild json-cwx@PR-4 simul@PR-3 romio@PR-10:lastBuild mpi4py@PR-11:lastBuild Skip-build-ubuntu20-rpm: false Skip-build-el8-rpm: false Skip-build-leap15-rpm: false Skip-build-el8-gcc: true Skip-build-el8-gcc-debug: true Skip-build-el8-gcc-release: true Skip-build-leap15-gcc: true Skip-build-leap15-icc: true Skip-build-ubuntu-clang: true Skip-func-test-leap15: false Skip-func-hw-test: true Test-tag: test_daos_dfuse_unit_pil4dfs dfuse_mu test_mpi4py Fixes: DAOS-12427 Required-githooks: true Signed-off-by: Brian J. Murrell --- Jenkinsfile | 101 ++++++++++++------ ci/functional/required_packages.sh | 46 -------- ci/parse_ci_envs.sh | 5 + ci/provisioning/post_provision_config.sh | 6 +- .../post_provision_config_common.sh | 19 ++-- .../post_provision_config_common_functions.sh | 85 ++++++++------- .../post_provision_config_nodes_EL_8.sh | 14 ++- ci/rpm/test_daos_node.sh | 39 +++---- debian/changelog | 6 ++ debian/control | 4 +- debian/libdaos0.install | 2 +- site_scons/env_modules.py | 2 + site_scons/site_tools/daos_builder.py | 5 + src/tests/ftest/harness/skip_list.py | 22 ++-- src/tests/ftest/mpiio/llnl_mpi4py.yaml | 2 +- src/tests/ftest/process_core_files.py | 16 ++- src/tests/ftest/util/apricot/apricot/test.py | 5 +- src/tests/ftest/util/mpiio_test_base.py | 12 +++ utils/githooks/pre-commit.d/20-flake.sh | 5 + utils/githooks/pre-commit.d/40-pylint.sh | 15 +-- utils/rpms/daos.rpmlintrc | 2 +- utils/rpms/daos.spec | 45 +++++++- utils/rpms/packaging/Dockerfile.mockbuild | 14 ++- utils/rpms/packaging/Dockerfile.ubuntu.20.04 | 7 +- utils/rpms/packaging/Makefile_distro_vars.mk | 12 +++ utils/rpms/packaging/Makefile_packaging.mk | 6 +- utils/rpms/packaging/debian_chrootbuild | 13 +++ utils/rpms/packaging/rpm_chrootbuild | 28 +++-- 28 files changed, 336 insertions(+), 202 deletions(-) delete mode 100755 ci/functional/required_packages.sh diff --git a/Jenkinsfile b/Jenkinsfile index ebdf447bbc9e..522170ad0eb1 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -15,7 +15,7 @@ // To use a test branch (i.e. PR) until it lands to master // I.e. for testing library changes -//@Library(value='pipeline-lib@your_branch') _ +@Library(value="pipeline-lib@bmurrell/el9") _ /* groovylint-disable-next-line CompileStatic */ job_status_internal = [:] @@ -230,6 +230,11 @@ pipeline { string(name: 'CI_EL8_TARGET', defaultValue: '', description: 'Image to used for EL 8 CI tests. I.e. el8, el8.3, etc.') + /* pipeline{} is too big for this + string(name: 'CI_EL9_TARGET', + defaultValue: '', + description: 'Image to used for EL 9 CI tests. I.e. el9, el9.1, etc.') + */ string(name: 'CI_LEAP15_TARGET', defaultValue: '', description: 'Image to use for OpenSUSE Leap CI tests. I.e. leap15, leap15.2, etc.') @@ -239,6 +244,11 @@ pipeline { booleanParam(name: 'CI_RPM_el8_NOBUILD', defaultValue: false, description: 'Do not build RPM packages for EL 8') + /* pipeline{} is too big for this + booleanParam(name: 'CI_RPM_el9_NOBUILD', + defaultValue: false, + description: 'Do not build RPM packages for EL 9') + */ booleanParam(name: 'CI_RPM_leap15_NOBUILD', defaultValue: false, description: 'Do not build RPM packages for Leap 15') @@ -266,6 +276,11 @@ pipeline { booleanParam(name: 'CI_FUNCTIONAL_el8_TEST', defaultValue: true, description: 'Run the Functional on EL 8 test stage') + /* pipeline{} is too big for this + booleanParam(name: 'CI_FUNCTIONAL_el9_TEST', + defaultValue: true, + description: 'Run the Functional on EL 9 test stage') + */ booleanParam(name: 'CI_FUNCTIONAL_leap15_TEST', defaultValue: true, description: 'Run the Functional on Leap 15 test stage' + @@ -545,6 +560,43 @@ pipeline { } } } + stage('Build RPM on EL 9') { + when { + beforeAgent true + expression { !skipStage() } + } + agent { + dockerfile { + filename 'packaging/Dockerfile.mockbuild' + dir 'utils/rpms' + label 'docker_runner' + additionalBuildArgs dockerBuildArgs() + args '--cap-add=SYS_ADMIN' + } + } + steps { + job_step_update(buildRpm()) + } + post { + success { + fixup_rpmlintrc() + buildRpmPost condition: 'success', rpmlint: true + } + unstable { + buildRpmPost condition: 'unstable' + } + failure { + buildRpmPost condition: 'failure' + } + unsuccessful { + buildRpmPost condition: 'unsuccessful' + } + cleanup { + buildRpmPost condition: 'cleanup' + job_status_update() + } + } + } stage('Build RPM on Leap 15.4') { when { beforeAgent true @@ -956,13 +1008,13 @@ pipeline { } } } // stage('Functional on EL 8') - stage('Functional on Leap 15.4') { + stage('Functional on EL 9') { when { beforeAgent true expression { !skipStage() } } agent { - label cachedCommitPragma(pragma: 'Leap15-VM9-label', def_val: params.FUNCTIONAL_VM_LABEL) + label cachedCommitPragma(pragma: 'EL9-VM9-label', def_val: params.FUNCTIONAL_VM_LABEL) } steps { job_step_update( @@ -976,15 +1028,15 @@ pipeline { functionalTestPostV2() job_status_update() } - } // post - } // stage('Functional on Leap 15.4') - stage('Functional on Ubuntu 20.04') { + } + } // stage('Functional on EL 9') + stage('Functional on Leap 15.4') { when { beforeAgent true expression { !skipStage() } } agent { - label cachedCommitPragma(pragma: 'Ubuntu-VM9-label', def_val: params.FUNCTIONAL_VM_LABEL) + label cachedCommitPragma(pragma: 'Leap15-VM9-label', def_val: params.FUNCTIONAL_VM_LABEL) } steps { job_step_update( @@ -999,44 +1051,29 @@ pipeline { job_status_update() } } // post - } // stage('Functional on Ubuntu 20.04') - stage('Scan EL 8 RPMs') { + } // stage('Functional on Leap 15.4') + stage('Functional on Ubuntu 20.04') { when { beforeAgent true expression { !skipStage() } } agent { - dockerfile { - filename 'ci/docker/Dockerfile.maldet.el.8' - label 'docker_runner' - additionalBuildArgs dockerBuildArgs() + - " -t ${sanitized_JOB_NAME}-el8 " + - ' --build-arg REPOS="' + prRepos() + '"' + - ' --build-arg BUILD_URL="' + env.BUILD_URL + '"' - } + label cachedCommitPragma(pragma: 'Ubuntu-VM9-label', def_val: params.FUNCTIONAL_VM_LABEL) } steps { job_step_update( - runTest(script: 'export DAOS_PKG_VERSION=' + - daosPackagesVersion(next_version) + '\n' + - 'utils/scripts/helpers/scan_daos_maldet.sh', - junit_files: 'maldetect_el8.xml', - failure_artifacts: env.STAGE_NAME, - ignore_failure: true, - description: env.STAGE_NAME, - context: 'test/' + env.STAGE_NAME)) + functionalTest( + inst_repos: daosRepos(), + inst_rpms: functionalPackages(1, next_version, 'tests-internal'), + test_function: 'runTestFunctionalV2')) } post { always { - junit 'maldetect_el8.xml' - archiveArtifacts artifacts: 'maldetect_el8.xml' + functionalTestPostV2() job_status_update() - // Force a job failure if anything was found - sh label: 'Check if anything was found.', - script: '! grep "/tmp/commit_title" + "cat >/tmp/commit_fixes" git log --pretty=format:%h --abbrev-commit --abbrev=7 | retry_cmd 60 ssh -i ci_key -l jenkins "${NODELIST%%,*}" "cat >/tmp/commit_list" retry_cmd 600 ssh root@"${NODELIST%%,*}" "mkdir -p /scratch && " \ diff --git a/ci/provisioning/post_provision_config_common.sh b/ci/provisioning/post_provision_config_common.sh index 8a342124bcb3..e965194ec228 100755 --- a/ci/provisioning/post_provision_config_common.sh +++ b/ci/provisioning/post_provision_config_common.sh @@ -24,31 +24,28 @@ if [ -n "$repo_files_pr" ]; then REPO_FILE_URL="${JENKINS_URL:-https://build.hpdd.intel.com/}job/daos-do/job/repo-files/job/$branch/$build_number/artifact/" fi -id=$(lsb_release -si) -release=$(lsb_release -sr) +. /etc/os-release # shellcheck disable=SC2034 EXCLUDE_UPGRADE=mercury,daos,daos-\* if rpm -qa | grep mlnx; then # packages not to allow upgrading if MLNX OFED is installed EXCLUDE_UPGRADE+=,openmpi,\*mlnx\*,\*ucx\* fi -case "$id" in - CentOS|Rocky|AlmaLinux|RedHatEnterpriseServer) - if [ "${release%%.*}" = 7 ]; then - DISTRO_NAME=centos${release%%.*} +case "$ID_LIKE" in + *rhel*) + if [ "$VERSION_ID" = "7" ]; then + DISTRO_NAME=centos"$VERSION_ID" EXCLUDE_UPGRADE+=,fuse else - DISTRO_NAME=el${release%%.*} + DISTRO_NAME=el${VERSION_ID%%.*} EXCLUDE_UPGRADE+=,dpdk\* fi REPOS_DIR=/etc/yum.repos.d DISTRO_GENERIC=el - # shellcheck disable=SC2034 - LSB_RELEASE=redhat-lsb-core ;; - openSUSE) + *suse*) # shellcheck disable=SC2034 - DISTRO_NAME=leap${release%%.*} + DISTRO_NAME=leap${VERSION_ID%%.*} # shellcheck disable=SC2034 DISTRO_GENERIC=sl # shellcheck disable=SC2034 diff --git a/ci/provisioning/post_provision_config_common_functions.sh b/ci/provisioning/post_provision_config_common_functions.sh index afe7b9c70ca5..f937193e1a9b 100755 --- a/ci/provisioning/post_provision_config_common_functions.sh +++ b/ci/provisioning/post_provision_config_common_functions.sh @@ -146,7 +146,8 @@ timeout_cmd() { fetch_repo_config() { local repo_server="$1" - local repo_file="daos_ci-${DISTRO_NAME}-$repo_server" + . /etc/os-release + local repo_file="daos_ci-${ID}${VERSION_ID%%.*}-$repo_server" local repopath="${REPOS_DIR}/$repo_file" if ! curl -f -o "$repopath" "$REPO_FILE_URL$repo_file.repo"; then return 1 @@ -182,19 +183,18 @@ rpm_test_version() { set_local_repo() { local repo_server="$1" - rm -f "$REPOS_DIR"/daos_ci-"$DISTRO_NAME".repo - ln "$REPOS_DIR"/daos_ci-"$DISTRO_NAME"{-"$repo_server",.repo} + . /etc/os-release + + rm -f "$REPOS_DIR"/daos_ci-"${ID}${VERSION_ID%%.*}".repo + ln "$REPOS_DIR"/daos_ci-"${ID}${VERSION_ID%%.*}"{-"$repo_server",.repo} - local version - version="$(lsb_release -sr)" - version=${version%%.*} if [ "$repo_server" = "artifactory" ] && { [[ $(pr_repos) = *daos@PR-* ]] || [ -z "$(rpm_test_version)" ]; } && [[ ! ${CHANGE_TARGET:-$BRANCH_NAME} =~ ^[-0-9A-Za-z]+-testing ]]; then # Disable the daos repo so that the Jenkins job repo or a PR-repos*: repo is # used for daos packages dnf -y config-manager \ - --disable daos-stack-daos-"${DISTRO_GENERIC}"-"$version"-x86_64-stable-local-artifactory + --disable daos-stack-daos-"${DISTRO_GENERIC}"-"${VERSION_ID%%.*}"-x86_64-stable-local-artifactory fi dnf repolist } @@ -257,7 +257,7 @@ post_provision_config_nodes() { slurm-example-configs slurmctld slurm-slurmmd fi - lsb_release -a + cat /etc/os-release # start with everything fully up-to-date # all subsequent package installs beyond this will install the newest packages @@ -268,7 +268,7 @@ post_provision_config_nodes() { else cmd+=(upgrade) fi - if ! "${cmd[@]}"; then + if ! "${cmd[@]}" --exclude golang-*.daos.*; then dump_repos return 1 fi @@ -278,47 +278,47 @@ post_provision_config_nodes() { install_mofed fi - if [ -n "$INST_REPOS" ]; then - local repo - for repo in $INST_REPOS; do - branch="master" - build_number="lastSuccessfulBuild" - if [[ $repo = *@* ]]; then - branch="${repo#*@}" - repo="${repo%@*}" - if [[ $branch = *:* ]]; then - build_number="${branch#*:}" - branch="${branch%:*}" - fi + local repos_added=() + local repo + local inst_repos=() + # shellcheck disable=SC2153 + read -ra inst_repos <<< "$INST_REPOS" + for repo in "${inst_repos[@]}"; do + branch="master" + build_number="lastSuccessfulBuild" + if [[ $repo = *@* ]]; then + branch="${repo#*@}" + repo="${repo%@*}" + if [[ " ${repos_added[*]} " = *\ ${repo}\ * ]]; then + # don't add duplicates, first found wins + continue fi - local repo_url="${JENKINS_URL}"job/daos-stack/job/"${repo}"/job/"${branch//\//%252F}"/"${build_number}"/artifact/artifacts/$DISTRO_NAME/ - dnf -y config-manager --add-repo="${repo_url}" - disable_gpg_check "$repo_url" - done - fi - inst_rpms=() + repos_added+=("$repo") + if [[ $branch = *:* ]]; then + build_number="${branch#*:}" + branch="${branch%:*}" + fi + fi + local repo_url="${JENKINS_URL}"job/daos-stack/job/"${repo}"/job/"${branch//\//%252F}"/"${build_number}"/artifact/artifacts/$DISTRO_NAME/ + dnf -y config-manager --add-repo="${repo_url}" + disable_gpg_check "$repo_url" + done + local inst_rpms=() + # shellcheck disable=SC2153 if [ -n "$INST_RPMS" ]; then + # use eval here, rather than say, read -ra to take advantage of bash globbing eval "inst_rpms=($INST_RPMS)" time dnf -y erase "${inst_rpms[@]}" fi rm -f /etc/profile.d/openmpi.sh rm -f /tmp/daos_control.log - if [ -n "${LSB_RELEASE:-}" ]; then - if ! rpm -q "$LSB_RELEASE"; then - retry_dnf 360 install "$LSB_RELEASE" - fi - fi # shellcheck disable=SC2001 - if ! rpm -q "$(echo "$INST_RPMS" | - sed -e 's/--exclude [^ ]*//' \ - -e 's/[^ ]*-daos-[0-9][0-9]*//g')"; then - if [ -n "$INST_RPMS" ]; then - if ! retry_dnf 360 install "${inst_rpms[@]}"; then - rc=${PIPESTATUS[0]} - dump_repos - return "$rc" - fi + if [ ${#inst_rpms[@]} -gt 0 ]; then + if ! retry_dnf 360 install "${inst_rpms[@]}"; then + rc=${PIPESTATUS[0]} + dump_repos + return "$rc" fi fi @@ -352,12 +352,11 @@ EOF distro_custom - lsb_release -a + cat /etc/os-release if [ -f /etc/do-release ]; then cat /etc/do-release fi - cat /etc/os-release return 0 } diff --git a/ci/provisioning/post_provision_config_nodes_EL_8.sh b/ci/provisioning/post_provision_config_nodes_EL_8.sh index 57f5af8cabc7..00c9ad72f9a2 100644 --- a/ci/provisioning/post_provision_config_nodes_EL_8.sh +++ b/ci/provisioning/post_provision_config_nodes_EL_8.sh @@ -1,6 +1,6 @@ #!/bin/bash # -# (C) Copyright 2021-2022 Intel Corporation. +# (C) Copyright 2021-2023 Intel Corporation. # # SPDX-License-Identifier: BSD-2-Clause-Patent @@ -16,8 +16,14 @@ group_repo_post() { distro_custom() { # install avocado - dnf -y install python3-avocado{,-plugins-{output-html,varianter-yaml-to-mux}} \ - clustershell + local avocado_rpms=(python3-avocado{,-plugins-{output-html,varianter-yaml-to-mux}}) + if [ -z "$(dnf repoquery "${avocado_rpms[@]}")" ]; then + avocado_rpms=() + pip install "avocado-framework<83.0" + pip install "avocado-framework-plugin-result-html<83.0" + pip install "avocado-framework-plugin-varianter-yaml-to-mux<83.0" + fi + dnf -y install "${avocado_rpms[@]}" clustershell # for Launchable's pip install dnf -y install python3-setuptools.noarch @@ -47,7 +53,7 @@ install_mofed() { stream=false - gversion="$(lsb_release -sr)" + gversion="$VERSION_ID" if [ "$gversion" == "8" ]; then gversion="8.6" stream=true diff --git a/ci/rpm/test_daos_node.sh b/ci/rpm/test_daos_node.sh index c7b274735932..932e2d840a25 100755 --- a/ci/rpm/test_daos_node.sh +++ b/ci/rpm/test_daos_node.sh @@ -1,22 +1,23 @@ #!/bin/bash +. /etc/os-release + YUM=dnf -id="$(lsb_release -si)" -if [ "$id" = "CentOS" ] || - [ "$id" = "AlmaLinux" ] || - [ "$id" = "Rocky" ] || - [ "$id" = "RedHatEnterpriseServer" ]; then - if [[ $(lsb_release -sr) = 8* ]]; then - OPENMPI_RPM=openmpi - OPENMPI=mpi/openmpi-x86_64 - else +case "$ID_LIKE" in + *rhel*) + if [[ $VERSION_ID = [89].* ]]; then + OPENMPI_RPM=openmpi + OPENMPI=mpi/openmpi-x86_64 + else + OPENMPI_RPM=openmpi3 + OPENMPI=mpi/openmpi3-x86_64 + fi + ;; + *suse*) OPENMPI_RPM=openmpi3 - OPENMPI=mpi/openmpi3-x86_64 - fi -elif [ "$(lsb_release -si)" = "openSUSE" ]; then - OPENMPI_RPM=openmpi3 - OPENMPI=gnu-openmpi -fi + OPENMPI=gnu-openmpi + ;; +esac set -uex sudo $YUM -y install daos-client-"${DAOS_PKG_VERSION}" @@ -29,9 +30,9 @@ if ! sudo $YUM -y history undo last; then $YUM history exit 1 fi -sudo $YUM -y erase $OPENMPI_RPM +sudo $YUM -y erase "$OPENMPI_RPM" sudo $YUM -y install daos-client-tests-"${DAOS_PKG_VERSION}" -if rpm -q $OPENMPI_RPM; then +if rpm -q "$OPENMPI_RPM"; then echo "$OPENMPI_RPM RPM should not be installed as a dependency of daos-client-tests" exit 1 fi @@ -45,7 +46,7 @@ if ! sudo $YUM -y history undo last; then exit 1 fi sudo $YUM -y install daos-server-tests-"${DAOS_PKG_VERSION}" -if rpm -q $OPENMPI_RPM; then +if rpm -q "$OPENMPI_RPM"; then echo "$OPENMPI_RPM RPM should not be installed as a dependency of daos-server-tests" exit 1 fi @@ -107,7 +108,7 @@ sudo PYTHONPATH="$FTEST/util" \ cat /etc/daos/daos_server.yml cat /etc/daos/daos_agent.yml cat /etc/daos/daos.yml -if ! module load $OPENMPI; then +if ! module load "$OPENMPI"; then echo "Unable to load OpenMPI module: $OPENMPI" module avail module list diff --git a/debian/changelog b/debian/changelog index afe3bd2dafe0..568a4d3fd3cc 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +daos (2.3.107-7) unstable; urgency=medium + [ Brian J. Murrell ] + * NOOP change to keep in parity with RPM version + + -- Brian J. Murrell Tue, 06 Jun 2023 11:32:05 -0400 + daos (2.3.107-6) unstable; urgency=medium [ Jeff Olivier ] * Add lmdb-devel and bio_ut for MD on SSD diff --git a/debian/control b/debian/control index 336cbd724cbb..fbddfbfdcf04 100644 --- a/debian/control +++ b/debian/control @@ -29,7 +29,7 @@ Build-Depends: debhelper (>= 10), libboost-dev, libspdk-dev (>= 22.01.2), libipmctl-dev, - libraft-dev (= 0.9.1-1401.gc18bcb8), + libraft-dev (= 0.9.2-2411.gf0c57a7), python3-tabulate, liblz4-dev, liblmdb-dev, @@ -171,7 +171,7 @@ Section: net Architecture: any Multi-Arch: same Depends: ${shlibs:Depends}, ${misc:Depends}, openmpi-bin, - ipmctl (>=03.00.00.0468), libfabric (>= 1.15.1-1), libfabric (<< 1.18), spdk-tools (>= 22.01.2) + ipmctl (>=03.00.00.0468), libfabric1 (>= 1.15.1-1), libfabric1 (<< 1.18), spdk-tools (>= 22.01.2) Description: The Distributed Asynchronous Object Storage (DAOS) is an open-source software-defined object store designed from the ground up for massively distributed Non Volatile Memory (NVM). DAOS takes advantage diff --git a/debian/libdaos0.install b/debian/libdaos0.install index 50d2e3335ab7..2c53d546397f 100644 --- a/debian/libdaos0.install +++ b/debian/libdaos0.install @@ -1 +1 @@ -usr/lib64/*.so.* \ No newline at end of file +usr/lib64/*.so.* diff --git a/site_scons/env_modules.py b/site_scons/env_modules.py index f54bd0d33a59..9bcfd8f91f43 100644 --- a/site_scons/env_modules.py +++ b/site_scons/env_modules.py @@ -56,6 +56,7 @@ def _module_func(self, command, *arguments): # pylint: disable=no-self-use # pylint: disable=consider-using-with try: + print(f"Going to run {cmd}") proc = Popen(cmd, stdout=PIPE, stderr=PIPE) except OSError as error: if error.errno == errno.ENOENT: @@ -107,6 +108,7 @@ def _mpi_module(self, mpi): self._module_func('unload', to_unload) for to_load in load: + print(f"Trying to load {to_load}") if self._module_func('is-avail', to_load)[0] and \ self._module_func('load', to_load)[0]: print(f'Loaded {to_load}') diff --git a/site_scons/site_tools/daos_builder.py b/site_scons/site_tools/daos_builder.py index 36676952ffd7..c22b4405b525 100644 --- a/site_scons/site_tools/daos_builder.py +++ b/site_scons/site_tools/daos_builder.py @@ -193,6 +193,11 @@ def _find_mpicc(env): """Find mpicc""" mpicc = WhereIs('mpicc') if not mpicc: + import subprocess + cmd = ['rpm', '-ql', 'openmpi-devel'] + print("%s output:\n%s" % (' '.join(cmd), + subprocess.run(cmd, stdout=subprocess.PIPE, + check=False).stdout.decode())) return False env.Replace(CC="mpicc") diff --git a/src/tests/ftest/harness/skip_list.py b/src/tests/ftest/harness/skip_list.py index 8ebcc6f2472c..6a9e7792b1a3 100644 --- a/src/tests/ftest/harness/skip_list.py +++ b/src/tests/ftest/harness/skip_list.py @@ -14,7 +14,7 @@ class TestHarnessSkipsBase(Test): def __init__(self, *args, **kwargs): """Initialize a Test object.""" super().__init__(*args, **kwargs) - self.commit_title_file = os.path.join(os.sep, 'tmp', 'commit_title') + self.commit_fixes_file = os.path.join(os.sep, 'tmp', 'commit_fixes') def setUp(self): """Use our own CI-skip-list-master to test to run these tests.""" @@ -29,40 +29,40 @@ def setUp(self): [['DAOS-9999', 'test_method_name', 'test_case_6']]|abcd123''') self.cancel_file = self.cancel_file - # create a temporary commit_title file + # create a temporary commit_fixes file try: - os.rename(self.commit_title_file, self.commit_title_file + '.orig') + os.rename(self.commit_fixes_file, self.commit_fixes_file + '.orig') except OSError as excpt: if excpt.errno == errno.ENOENT: pass else: self.fail("Could not rename {0}" - "{{,.orig}}: {1}".format(self.commit_title_file, + "{{,.orig}}: {1}".format(self.commit_fixes_file, excpt)) try: - with open(self.commit_title_file, 'w') as cf_handle: + with open(self.commit_fixes_file, 'w') as cf_handle: cf_handle.write("DAOS-9999 test: Fixing DAOS-9999") except Exception as excpt: # pylint: disable=broad-except self.fail("Could not create {0}: " - "{1}".format(self.commit_title_file, excpt)) + "{1}".format(self.commit_fixes_file, excpt)) super().setUp() def tearDown(self): - """Put back the original commit_title file.""" + """Put back the original commit_fixes file.""" try: - os.unlink(self.commit_title_file) + os.unlink(self.commit_fixes_file) except Exception as excpt: # pylint: disable=broad-except self.fail("Could not remove {0}: " - "{1}".format(self.commit_title_file, excpt)) + "{1}".format(self.commit_fixes_file, excpt)) try: - os.rename(self.commit_title_file + '.orig', self.commit_title_file) + os.rename(self.commit_fixes_file + '.orig', self.commit_fixes_file) except OSError as excpt: if excpt.errno == errno.ENOENT: pass except Exception as excpt: # pylint: disable=broad-except self.fail("Could not rename {0}{{.orig,}}: " - "{1}".format(self.commit_title_file, excpt)) + "{1}".format(self.commit_fixes_file, excpt)) super().tearDown() diff --git a/src/tests/ftest/mpiio/llnl_mpi4py.yaml b/src/tests/ftest/mpiio/llnl_mpi4py.yaml index 7693075b348e..f8030730beb9 100644 --- a/src/tests/ftest/mpiio/llnl_mpi4py.yaml +++ b/src/tests/ftest/mpiio/llnl_mpi4py.yaml @@ -24,4 +24,4 @@ client_processes: np: 8 test_repo: llnl: "testmpio" - mpi4py: "/usr/lib64/python3.6/site-packages/mpi4py/tests" + mpi4py: "$python_lib/mpi4py/tests" diff --git a/src/tests/ftest/process_core_files.py b/src/tests/ftest/process_core_files.py index f6114519c398..3a6ecf20439d 100644 --- a/src/tests/ftest/process_core_files.py +++ b/src/tests/ftest/process_core_files.py @@ -222,8 +222,16 @@ def install_debuginfo_packages(self): self.log.info("Installing debuginfo packages for stacktrace creation") install_pkgs = [{'name': 'gdb'}] if self.is_el(): - install_pkgs.append({'name': 'python3-debuginfo'}) - + if self.distro_info.name.lower() == "almalinux": + # pylint: disable=consider-using-f-string + install_pkgs.append({'name': 'python%s.%s-debuginfo' % (sys.version_info.major, + sys.version_info.minor)}) + elif self.distro_info.name.lower() == "rocky": + # https://bugs.rockylinux.org/view.php?id=3499 + pass + else: + # pylint: disable=consider-using-f-string + install_pkgs.append({'name': 'python%s-debuginfo' % sys.version_info.major}) cmds = [] # -debuginfo packages that don't get installed with debuginfo-install @@ -250,9 +258,9 @@ def install_debuginfo_packages(self): dnf_args.extend( ["--enablerepo=*-debuginfo", "--exclude", "nvml-debuginfo", "libpmemobj", "python36", "openmpi3", "gcc"]) - elif self.is_el() and self.distro_info.version == "8": + elif self.is_el() and int(self.distro_info.version) >= 8: dnf_args.extend( - ["--enablerepo=*-debuginfo", "libpmemobj", "python3", "openmpi", "gcc"]) + ["libpmemobj", "python3", "openmpi", "gcc"]) else: raise RunException(f"Unsupported distro: {self.distro_info}") cmds.append(["sudo", "dnf", "-y", "install"] + dnf_args) diff --git a/src/tests/ftest/util/apricot/apricot/test.py b/src/tests/ftest/util/apricot/apricot/test.py index 30314f02849e..099f7f8ff818 100644 --- a/src/tests/ftest/util/apricot/apricot/test.py +++ b/src/tests/ftest/util/apricot/apricot/test.py @@ -198,9 +198,8 @@ def cancel_for_ticket(ticket, skip_list): # first see if it's being fixed in this PR try: with open(os.path.join(os.sep, 'tmp', - 'commit_title')) as commit_handle: - if commit_handle.read().strip().startswith( - ticket + " "): + 'commit_fixes')) as commit_handle: + if ticket in commit_handle.read().splitlines(): # fix is in this PR self.log.info("This test variant is included " "in the skip list for ticket %s, " diff --git a/src/tests/ftest/util/mpiio_test_base.py b/src/tests/ftest/util/mpiio_test_base.py index e83887c0382b..75c8bf182d8c 100644 --- a/src/tests/ftest/util/mpiio_test_base.py +++ b/src/tests/ftest/util/mpiio_test_base.py @@ -5,6 +5,7 @@ """ import os +import site from apricot import TestWithServers @@ -68,6 +69,17 @@ def run_test(self, test_repo, test_name): for kwargs in kwargs_list: manager = get_job_manager(self) + # replace python lib path place holder with real python path + if "$python_lib" in kwargs['path']: + kwargs['path'] = None + for path in site.getsitepackages(): + test_path = path.replace('$python_lib', path) + if os.path.isfile(os.path.join(test_path, test_name)): + kwargs['path'] = test_path + break + if not kwargs['path']: + self.fail("Could not find {0} in repo {1}".format(test_name, test_repo)) + # fix up a relative test_repo specification if not kwargs["path"].startswith("/"): mpi_path = os.path.split(manager.command_path)[0] diff --git a/utils/githooks/pre-commit.d/20-flake.sh b/utils/githooks/pre-commit.d/20-flake.sh index 5a0ccfdb6316..402ac48bc6c3 100755 --- a/utils/githooks/pre-commit.d/20-flake.sh +++ b/utils/githooks/pre-commit.d/20-flake.sh @@ -21,6 +21,11 @@ if ! command -v flake8 > /dev/null 2>&1; then exit 0 fi +if [ ! -f .flake8 ]; then + echo " No .flake8, skipping flake checks" + exit 0 +fi + echo " Checking uncommitted code with flake." git diff -u | flake8 --diff diff --git a/utils/githooks/pre-commit.d/40-pylint.sh b/utils/githooks/pre-commit.d/40-pylint.sh index 5f2c3a5e6580..9c6ba8a92f65 100755 --- a/utils/githooks/pre-commit.d/40-pylint.sh +++ b/utils/githooks/pre-commit.d/40-pylint.sh @@ -10,10 +10,13 @@ echo "Pylint:" # shellcheck disable=SC1091 . utils/githooks/find_base.sh -if [ "$TARGET" = "HEAD" ]; then - echo " Checking against HEAD" - git diff HEAD --name-only | ./utils/cq/daos_pylint.py --files-from-stdin -else - echo " Checking against branch ${TARGET}" - git diff "$TARGET"... --name-only | ./utils/cq/daos_pylint.py --files-from-stdin + +if [ -f utils/cq/daos_pylint.py ]; then + if [ "$TARGET" = "HEAD" ]; then + echo " Checking against HEAD" + git diff HEAD --name-only | ./utils/cq/daos_pylint.py --files-from-stdin + else + echo " Checking against branch ${TARGET}" + git diff "$TARGET"... --name-only | ./utils/cq/daos_pylint.py --files-from-stdin + fi fi diff --git a/utils/rpms/daos.rpmlintrc b/utils/rpms/daos.rpmlintrc index 92688d2319bb..7d3ba20d286c 100644 --- a/utils/rpms/daos.rpmlintrc +++ b/utils/rpms/daos.rpmlintrc @@ -52,4 +52,4 @@ addFilter("daos-client-tests.x86_64: E: devel-dependency protobuf-c-devel") # a functional test builds daos from source, so it needs the various *-devel packages for daos' build dependencies. addFilter("daos-client-tests.x86_64: E: devel-dependency capstone-devel") addFilter("daos-client-tests.x86_64: E: explicit-lib-dependency libcapstone-devel") -addFilter("daos-client-tests.x86_64: E: devel-dependency libcapstone-devel") +addFilter("daos-client-tests.x86_64: E: devel-dependency l(ibcapstone|mdb)-devel") \ No newline at end of file diff --git a/utils/rpms/daos.spec b/utils/rpms/daos.spec index 9b1dce51c411..35c4c1806bf4 100644 --- a/utils/rpms/daos.spec +++ b/utils/rpms/daos.spec @@ -16,7 +16,7 @@ Name: daos Version: 2.3.107 -Release: 6%{?relval}%{?dist} +Release: 7%{?relval}%{?dist} Summary: DAOS Storage Engine License: BSD-2-Clause-Patent @@ -32,10 +32,11 @@ BuildRequires: libfabric-devel >= %{libfabric_version}, libfabric-devel < %{libf BuildRequires: mercury-devel >= %{mercury_version} BuildRequires: gcc-c++ %if (0%{?rhel} >= 8) -BuildRequires: openmpi-devel +%global openmpi openmpi %else -BuildRequires: openmpi3-devel +%global openmpi openmpi3 %endif +BuildRequires: %{openmpi}-devel BuildRequires: hwloc-devel %if ("%{?compiler_args}" == "COMPILER=covc") BuildRequires: bullseye @@ -73,7 +74,7 @@ BuildRequires: libisa-l_crypto-devel BuildRequires: libisal-devel BuildRequires: libisal_crypto-devel %endif -BuildRequires: daos-raft-devel = 0.9.2-1.403.g3d20556%{?dist} +BuildRequires: daos-raft-devel = 0.9.2-2.411.gf0c57a7%{?dist} BuildRequires: openssl-devel BuildRequires: libevent-devel BuildRequires: libyaml-devel @@ -87,7 +88,11 @@ BuildRequires: numactl-devel BuildRequires: CUnit-devel # needed to retrieve PMM region info through control-plane BuildRequires: libipmctl-devel +%if (0%{?rhel} >= 9) +BuildRequires: python-devel +%else BuildRequires: python36-devel +%endif BuildRequires: python3-distro BuildRequires: Lmod %else @@ -199,6 +204,8 @@ This is the package is a metapackage to install all of the test packages Summary: The entire internal DAOS test suite Requires: %{name}-tests = %{version}-%{release} Requires: %{name}-client-tests-openmpi%{?_isa} = %{version}-%{release} +Requires: %{name}-client-tests-mpich = %{version}-%{release} +Requires: %{name}-serialize%{?_isa} = %{version}-%{release} BuildArch: noarch %description tests-internal @@ -218,12 +225,14 @@ Requires: git Requires: dbench Requires: lbzip2 Requires: attr +Requires: ior %if (0%{?suse_version} >= 1315) Requires: lua-lmod Requires: libcapstone-devel %else Requires: Lmod Requires: capstone-devel +Requires: lmdb-devel %endif %description client-tests @@ -232,10 +241,32 @@ This is the package needed to run the DAOS test suite (client tests) %package client-tests-openmpi Summary: The DAOS client test suite - tools which need openmpi Requires: %{name}-client-tests%{?_isa} = %{version}-%{release} +Requires: hdf5-%{openmpi}-tests +Requires: hdf5-vol-daos-%{openmpi}-tests +Requires: MACSio-%{openmpi} +Requires: simul-%{openmpi} %description client-tests-openmpi This is the package needed to run the DAOS client test suite openmpi tools +%package client-tests-mpich +Summary: The DAOS client test suite - tools which need mpich +BuildArch: noarch +Requires: %{name}-client-tests%{?_isa} = %{version}-%{release} +Requires: mpifileutils-mpich +Requires: testmpio +Requires: mpich +Requires: ior +Requires: hdf5-mpich-tests +Requires: hdf5-vol-daos-mpich-tests +Requires: MACSio-mpich +Requires: simul-mpich +Requires: romio-tests +Requires: python3-mpi4py-tests + +%description client-tests-mpich +This is the package needed to run the DAOS client test suite mpich tools + %package server-tests Summary: The DAOS server test suite (server tests) Requires: %{name}-server%{?_isa} = %{version}-%{release} @@ -510,6 +541,9 @@ getent passwd daos_agent >/dev/null || useradd -s /sbin/nologin -r -g daos_agent %doc README.md %{_libdir}/libdpar_mpi.so +%files client-tests-mpich +%doc README.md + %files server-tests %doc README.md %{_bindir}/evt_ctl @@ -558,6 +592,9 @@ getent passwd daos_agent >/dev/null || useradd -s /sbin/nologin -r -g daos_agent # No files in a shim package %changelog +* Tue Jun 06 2023 Brian J. Murrell 2.3.107-7 +- Build on EL9 + * Fri May 26 2023 Jeff Olivier 2.3.107-6 - Add lmdb-devel and bio_ut for MD on SSD diff --git a/utils/rpms/packaging/Dockerfile.mockbuild b/utils/rpms/packaging/Dockerfile.mockbuild index c804819061a7..9a4de88c6170 100644 --- a/utils/rpms/packaging/Dockerfile.mockbuild +++ b/utils/rpms/packaging/Dockerfile.mockbuild @@ -1,5 +1,5 @@ # -# Copyright 2018-2022 Intel Corporation +# Copyright 2018-2023 Intel Corporation # # 'recipe' for Docker to build an RPM # @@ -52,6 +52,18 @@ RUN (cd $(python3 -c 'import site; print(site.getsitepackages()[-1])') && fi) < rpmlint--ignore-unused-rpmlintrc.patch; \ rm -f rpmlint--ignore-unused-rpmlintrc.patch +# Prime the mock build environment to save time on each build +# https://rpm-software-management.github.io/mock/#mock-inside-podman-fedora-toolbox-or-docker-container +# But it's not working yet +# https://github.com/rpm-software-management/mock/discussions/1095 +# https://github.com/rpm-software-management/mock/issues/1100 +#RUN for chroot in {opensuse-leap-15.4,rocky+epel-{8,9}}-x86_64; do \ +# ls -l /var/cache/mock/$chroot/root_cache/; \ +# id; \ +# su - build -c "id; mock -r i\"$chroot\" --shell id"; \ +# ls -l /var/cache/mock/$chroot/root_cache/; \ +# done; + # show the release that was built ARG CACHEBUST RUN cat /etc/os-release diff --git a/utils/rpms/packaging/Dockerfile.ubuntu.20.04 b/utils/rpms/packaging/Dockerfile.ubuntu.20.04 index c2b1828559db..ec76bfd10861 100644 --- a/utils/rpms/packaging/Dockerfile.ubuntu.20.04 +++ b/utils/rpms/packaging/Dockerfile.ubuntu.20.04 @@ -42,9 +42,10 @@ RUN if [ -n "$REPO_FILE_URL" ]; then \ # Install basic tools RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y \ autoconf bash ca-certificates curl debhelper dh-make \ - dpkg-dev dh-python doxygen gcc git git-buildpackage locales \ - make patch pbuilder pkg-config python3-dev python3-distro \ - python3-distutils rpm scons wget cmake valgrind rpmdevtools + dpkg-dev dh-python doxygen gcc git git-buildpackage \ + javahelper locales make patch pbuilder pkg-config \ + python3-dev python3-distro python3-distutils rpm scons wget \ + cmake valgrind rpmdevtools # use same UID as host and default value of 1000 if not specified ARG UID=1000 diff --git a/utils/rpms/packaging/Makefile_distro_vars.mk b/utils/rpms/packaging/Makefile_distro_vars.mk index e9e0784668ae..6a7f88b60724 100644 --- a/utils/rpms/packaging/Makefile_distro_vars.mk +++ b/utils/rpms/packaging/Makefile_distro_vars.mk @@ -47,6 +47,18 @@ DISTRO_VERSION ?= $(VERSION_ID) ORIG_TARGET_VER := 8 SED_EXPR := 1s/$(DIST)//p endif +ifeq ($(patsubst %epel-9-x86_64,,$(lastword $(subst +, ,$(CHROOT_NAME)))),) +DIST := $(shell rpm $(COMMON_RPM_ARGS) --eval %{?dist}) +VERSION_ID := 9 +DISTRO_ID := el9 +DISTRO_BASE := EL_9 +ifneq ($(DISTRO_VERSION_EL9),) +override DISTRO_VERSION := $(DISTRO_VERSION_EL9) +endif +DISTRO_VERSION ?= $(VERSION_ID) +ORIG_TARGET_VER := 9 +SED_EXPR := 1s/$(DIST)//p +endif ifeq ($(CHROOT_NAME),opensuse-leap-15.2-x86_64) VERSION_ID := 15.2 DISTRO_ID := sl15.2 diff --git a/utils/rpms/packaging/Makefile_packaging.mk b/utils/rpms/packaging/Makefile_packaging.mk index 7adda97cd9c2..b19489e481a6 100644 --- a/utils/rpms/packaging/Makefile_packaging.mk +++ b/utils/rpms/packaging/Makefile_packaging.mk @@ -39,6 +39,7 @@ PR_REPOS ?= $(shell git show -s --format=%B | sed -ne 's/^PR-rep LEAP_15_PR_REPOS ?= $(shell git show -s --format=%B | sed -ne 's/^PR-repos-leap15: *\(.*\)/\1/p') EL_7_PR_REPOS ?= $(shell git show -s --format=%B | sed -ne 's/^PR-repos-el7: *\(.*\)/\1/p') EL_8_PR_REPOS ?= $(shell git show -s --format=%B | sed -ne 's/^PR-repos-el8: *\(.*\)/\1/p') +EL_9_PR_REPOS ?= $(shell git show -s --format=%B | sed -ne 's/^PR-repos-el9: *\(.*\)/\1/p') UBUNTU_20_04_PR_REPOS ?= $(shell git show -s --format=%B | sed -ne 's/^PR-repos-ubuntu20: *\(.*\)/\1/p') REPO_FILES_PR ?= $(shell git show -s --format=%B | sed -ne 's/^Repo-files-PR: *\(.*\)/\1/p') @@ -66,7 +67,7 @@ DEB_BUILD := $(DEB_TOP)/$(NAME)-$(VERSION) DEB_TARBASE := $(DEB_TOP)/$(DEB_NAME)_$(VERSION) SOURCE ?= $(eval SOURCE := $(shell CHROOT_NAME=$(CHROOT_NAME) $(SPECTOOL) $(COMMON_RPM_ARGS) -S -l $(SPEC) | sed -e 2,\$$d -e 's/\#/\\\#/g' -e 's/.*: *//'))$(SOURCE) PATCHES ?= $(eval PATCHES := $(shell CHROOT_NAME=$(CHROOT_NAME) $(SPECTOOL) $(COMMON_RPM_ARGS) -l $(SPEC) | sed -ne 1d -e 's/.*: *//' -e 's/.*\///' -e '/\.patch/p'))$(PATCHES) -OTHER_SOURCES := $(eval OTHER_SOURCES := $(shell CHROOT_NAME=$(CHROOT_NAME) $(SPECTOOL) $(COMMON_RPM_ARGS) -l $(SPEC) | sed -ne 1d -e 's/.*: *//' -e 's/.*\///' -e '/\.patch/d' -e p))$(OTHER_SOURCES) +OTHER_SOURCES := $(eval OTHER_SOURCES := $(shell CHROOT_NAME=$(CHROOT_NAME) $(SPECTOOL) $(COMMON_RPM_ARGS) -l $(SPEC) | sed -ne 1d -e '/already present/d' -e '/^Patch.*:/d' -e 's/Source.*: *//' -e 's/.*\///' -e p))$(OTHER_SOURCES) SOURCES := $(addprefix _topdir/SOURCES/,$(notdir $(SOURCE)) $(PATCHES) $(OTHER_SOURCES)) ifeq ($(ID_LIKE),debian) DEBS := $(addsuffix _$(VERSION)-1_amd64.deb,$(shell sed -n '/-udeb/b; s,^Package:[[:blank:]],$(DEB_TOP)/,p' $(TOPDIR)/debian/control)) @@ -84,7 +85,7 @@ define distro_map case $(DISTRO_ID) in \ el7) distro="centos7" \ ;; \ - el8) distro="el8" \ + el*) distro="$(DISTRO_ID)" \ ;; \ sle12.3) distro="sles12.3" \ ;; \ @@ -416,6 +417,7 @@ packaging_check: --exclude install \ --exclude packaging \ --exclude utils \ + --exclude .vscode \ -bur $(PACKAGING_CHECK_DIR)/ packaging/; then \ exit 1; \ fi diff --git a/utils/rpms/packaging/debian_chrootbuild b/utils/rpms/packaging/debian_chrootbuild index 03b232a0aab8..cc2cc96d8b0a 100755 --- a/utils/rpms/packaging/debian_chrootbuild +++ b/utils/rpms/packaging/debian_chrootbuild @@ -12,6 +12,7 @@ sudo pbuilder create \ $DISTRO_ID_OPT repo_args="" +repos_added=() for repo in $DISTRO_BASE_PR_REPOS $PR_REPOS; do branch="master" build_number="lastSuccessfulBuild" @@ -23,6 +24,11 @@ for repo in $DISTRO_BASE_PR_REPOS $PR_REPOS; do branch="${branch%:*}" fi fi + if [[ " ${repos_added[*]} " = *\ ${repo}\ * ]]; then + # don't add duplicates, first found wins + continue + fi + repos_added+=("$repo") repo_args="$repo_args|deb [trusted=yes] ${JENKINS_URL:-https://build.hpdd.intel.com/}job/daos-stack/job/$repo/job/$branch/$build_number/artifact/artifacts/$DISTRO/ ./" done @@ -31,6 +37,13 @@ repo_args+="|$(curl -sSf "$REPO_FILE_URL"daos_ci-"$DISTRO"-artifactory.list | -e 's/signed-by=.*\.gpg/trusted=yes/' | sed -e ':a; N; $!ba; s/\n/|/g')" for repo in $JOB_REPOS; do + repo_name=${repo##*://} + repo_name=${repo_name//\//_} + if [[ " ${repos_added[*]} " = *\ ${repo_name}\ * ]]; then + # don't add duplicates, first found wins + continue + fi + repos_added+=("$repo_name") repo_args+="|deb ${repo} $VERSION_CODENAME main" done # NB: This PPA is needed to support modern go toolchains on ubuntu 20.04. diff --git a/utils/rpms/packaging/rpm_chrootbuild b/utils/rpms/packaging/rpm_chrootbuild index fb6aa0045f2d..bfb49ed37ab5 100755 --- a/utils/rpms/packaging/rpm_chrootbuild +++ b/utils/rpms/packaging/rpm_chrootbuild @@ -18,6 +18,11 @@ config_opts['module_setup_commands'] = [ ('disable', 'go-toolset') ] EOF +elif [[ $CHROOT_NAME == *epel-9-x86_64 ]]; then + # DO NOT LAND + # waiting for an answer on https://github.com/rpm-software-management/mock/discussions/1078 + ln -sf /etc/mock/almalinux-9-x86_64.cfg "$mock_config_dir" + ln -sf /etc/mock/rocky-9-x86_64.cfg "$mock_config_dir" fi # Use dnf on CentOS 7 @@ -65,6 +70,7 @@ if [ -n "${ARTIFACTORY_URL:-}" ] && "$LOCAL_REPOS"; then fi fi +repos_added=() for repo in $DISTRO_BASE_PR_REPOS $PR_REPOS; do branch="master" build_number="lastSuccessfulBuild" @@ -76,20 +82,30 @@ for repo in $DISTRO_BASE_PR_REPOS $PR_REPOS; do branch="${branch%:*}" fi fi - repo_adds+=("--enablerepo $repo:$branch:$build_number") - echo -e "[$repo:$branch:$build_number]\n\ -name=$repo:$branch:$build_number\n\ -baseurl=${JENKINS_URL:-https://build.hpdd.intel.com/}job/daos-stack/job/$repo/job/$branch/$build_number/artifact/artifacts/$DISTRO/\n\ + if [[ " ${repos_added[*]} " = *\ ${repo}\ * ]]; then + # don't add duplicates, first found wins + continue + fi + repos_added+=("$repo") + repo_adds+=("--enablerepo $repo:${branch//[@\/]/_}:$build_number") + echo -e "[$repo:${branch//[@\/]/_}:$build_number]\n\ +name=$repo:${branch//[@\/]/_}:$build_number\n\ +baseurl=${JENKINS_URL:-https://build.hpdd.intel.com/}job/daos-stack/job/$repo/job/${branch//\//%2F}/$build_number/artifact/artifacts/$DISTRO/\n\ enabled=1\n\ gpgcheck=False\n" >> "$cfg_file" done for repo in $JOB_REPOS; do repo_name=${repo##*://} repo_name=${repo_name//\//_} + if [[ " ${repos_added[*]} " = *\ ${repo_name}\ * ]]; then + # don't add duplicates, first found wins + continue + fi + repos_added+=("$repo_name") repo_adds+=("--enablerepo $repo_name") - echo -e "[${repo_name//@/_}]\n\ + echo -e "[${repo_name//[@\/]/_}]\n\ name=${repo_name}\n\ -baseurl=${repo}\n\ +baseurl=${repo//\//%2F}\n\ enabled=1\n" >> "$cfg_file" done echo "\"\"\"" >> "$cfg_file"