diff --git a/.ci/pipeline/ci.yml b/.ci/pipeline/ci.yml index daf55bca726..315ca5cabb7 100755 --- a/.ci/pipeline/ci.yml +++ b/.ci/pipeline/ci.yml @@ -49,615 +49,615 @@ jobs: displayName: 'editorconfig-checker' failOnStderr: true -# - job: 'LinuxMakeGNU_MKL' -# timeoutInMinutes: 0 -# variables: -# release.dir: '__release_lnx_gnu' -# platform.type : 'lnx32e' -# pool: -# vmImage: '$(VM_IMAGE)' -# steps: -# - script: | -# .ci/env/apt.sh dev-base -# displayName: 'apt-get and conda install' -# - script: | -# .ci/env/apt.sh mkl -# displayName: 'mkl installation' -# - script: | -# .ci/scripts/describe_system.sh -# displayName: 'System info' -# - script: | -# .ci/scripts/build.sh --compiler gnu --optimizations avx2 --target daal --conda-env ci-env -# displayName: 'make daal' -# - script: | -# .ci/scripts/build.sh --compiler gnu --optimizations avx2 --target onedal_c -# displayName: 'make onedal_c' -# - task: PublishPipelineArtifact@1 -# inputs: -# artifactName: '$(platform.type) build' -# targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' -# displayName: 'Upload build artifacts' -# continueOnError: true -# - script: | -# .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler gnu --interface daal/cpp --build-system cmake -# displayName: 'daal/cpp examples' -# - script: | -# .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler gnu --interface oneapi/cpp --build-system cmake -# displayName: 'oneapi/cpp examples' -# - script: | -# .ci/scripts/test.sh --test-kind samples --build-dir $(release.dir) --compiler gnu --interface daal/cpp/mpi --conda-env ci-env --build-system cmake -# displayName: 'daal/cpp/mpi samples' -# - script: | -# deploy/nuget/prepare_dal_nuget.sh --release-dir $(release.dir) --build-nupkg yes -# tree -h -I include __nuget/inteldal*/ -# ls -lh __nuget/inteldal*.nupkg -# displayName: 'nuget pkg' -# - task: PublishPipelineArtifact@1 -# inputs: -# artifactName: '$(platform.type) fail' -# targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' -# displayName: 'Uploading on fail' -# condition: failed() -# continueOnError: true +- job: 'LinuxMakeGNU_MKL' + timeoutInMinutes: 0 + variables: + release.dir: '__release_lnx_gnu' + platform.type : 'lnx32e' + pool: + vmImage: '$(VM_IMAGE)' + steps: + - script: | + .ci/env/apt.sh dev-base + displayName: 'apt-get and conda install' + - script: | + .ci/env/apt.sh mkl + displayName: 'mkl installation' + - script: | + .ci/scripts/describe_system.sh + displayName: 'System info' + - script: | + .ci/scripts/build.sh --compiler gnu --optimizations avx2 --target daal --conda-env ci-env + displayName: 'make daal' + - script: | + .ci/scripts/build.sh --compiler gnu --optimizations avx2 --target onedal_c + displayName: 'make onedal_c' + - task: PublishPipelineArtifact@1 + inputs: + artifactName: '$(platform.type) build' + targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' + displayName: 'Upload build artifacts' + continueOnError: true + - script: | + .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler gnu --interface daal/cpp --build-system cmake + displayName: 'daal/cpp examples' + - script: | + .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler gnu --interface oneapi/cpp --build-system cmake + displayName: 'oneapi/cpp examples' + - script: | + .ci/scripts/test.sh --test-kind samples --build-dir $(release.dir) --compiler gnu --interface daal/cpp/mpi --conda-env ci-env --build-system cmake + displayName: 'daal/cpp/mpi samples' + - script: | + deploy/nuget/prepare_dal_nuget.sh --release-dir $(release.dir) --build-nupkg yes + tree -h -I include __nuget/inteldal*/ + ls -lh __nuget/inteldal*.nupkg + displayName: 'nuget pkg' + - task: PublishPipelineArtifact@1 + inputs: + artifactName: '$(platform.type) fail' + targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' + displayName: 'Uploading on fail' + condition: failed() + continueOnError: true -# - job: 'LinuxMakeGNU_OpenBLAS_ARM' -# timeoutInMinutes: 0 -# variables: -# release.dir: '__release_lnx_gnu' -# platform.type : 'lnxarm' -# OPENBLAS_CACHE_DIR: $(Pipeline.Workspace)/openblas-aarch64-gnu -# TBB_CACHE_DIR: $(Pipeline.Workspace)/tbb-aarch64-gnu -# pool: -# vmImage: '$(VM_IMAGE)' -# steps: -# - script: | -# .ci/env/apt.sh dev-base -# displayName: 'apt-get and conda install' -# - script: | -# .ci/env/apt.sh gnu-cross-compilers aarch64 -# displayName: 'arm-compiler installation' -# - script: | -# .ci/env/apt.sh qemu-apt -# displayName: 'qemu-emulation installation' -# - script: | -# .ci/scripts/describe_system.sh -# displayName: 'System info' -# - task: Cache@2 -# inputs: -# key: '"gcc" | "aarch64" | "openblas" | "$(OPENBLAS_VERSION)" | "$(VM_IMAGE)" | "ILP64"' -# path: $(OPENBLAS_CACHE_DIR) -# cacheHitVar: OPENBLAS_RESTORED -# - script: | -# .ci/env/openblas.sh --target ARMV8 --host-compiler gcc --compiler aarch64-linux-gnu-gcc --cflags -march=armv8-a+sve --cross-compile --target-arch aarch64 --prefix $(OPENBLAS_CACHE_DIR) --version $(OPENBLAS_VERSION) -# displayName: 'Build OpenBLAS' -# condition: ne(variables.OPENBLAS_RESTORED, 'true') -# - task: Cache@2 -# inputs: -# key: '"gcc" | "aarch64" | "tbb" | "$(TBB_VERSION)" | "$(VM_IMAGE)"' -# path: $(TBB_CACHE_DIR) -# cacheHitVar: TBB_RESTORED -# - script: | -# .ci/env/tbb.sh --cross-compile --toolchain-file $(Build.Repository.LocalPath)/.ci/env/arm-gnu-crosscompile-toolchain.cmake --target-arch aarch64 --prefix $(TBB_CACHE_DIR) --version $(TBB_VERSION) -# displayName: 'Build oneTBB' -# condition: ne(variables.TBB_RESTORED, 'true') -# - script: | -# .ci/scripts/build.sh --compiler gnu --optimizations sve --target daal --backend-config ref --conda-env ci-env --cross-compile --plat lnxarm --blas-dir $(OPENBLAS_CACHE_DIR) --tbb-dir $(TBB_CACHE_DIR) -# displayName: 'make daal' -# - script: | -# .ci/scripts/build.sh --compiler gnu --optimizations sve --target onedal_c --backend-config ref --cross-compile --plat lnxarm --blas-dir $(OPENBLAS_CACHE_DIR) --tbb-dir $(TBB_CACHE_DIR) -# displayName: 'make onedal_c' -# - task: PublishPipelineArtifact@1 -# inputs: -# artifactName: '$(platform.type) ARM OpenBLAS gnu build' -# targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' -# displayName: 'Upload build artifacts' -# continueOnError: true -# - script: | -# export QEMU_LD_PREFIX=/usr/aarch64-linux-gnu -# export QEMU_CPU="max,sve-default-vector-length=256" -# export TBBROOT=$(TBB_CACHE_DIR) -# export ARCH_ONEDAL=aarch64 -# .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler gnu --interface daal/cpp --build-system cmake --platform lnxarm --cross-compile --backend ref -# displayName: 'daal/cpp examples' -# - script: | -# export QEMU_LD_PREFIX=/usr/aarch64-linux-gnu -# export QEMU_CPU="max,sve-default-vector-length=256" -# export TBBROOT=$(TBB_CACHE_DIR) -# export ARCH_ONEDAL=aarch64 -# .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler gnu --interface oneapi/cpp --build-system cmake --platform lnxarm --cross-compile --backend ref -# displayName: 'oneapi/cpp examples' -# - task: PublishPipelineArtifact@1 -# inputs: -# artifactName: '$(platform.type) fail' -# targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' -# displayName: 'Uploading on fail' -# condition: failed() -# continueOnError: true +- job: 'LinuxMakeGNU_OpenBLAS_ARM' + timeoutInMinutes: 0 + variables: + release.dir: '__release_lnx_gnu' + platform.type : 'lnxarm' + OPENBLAS_CACHE_DIR: $(Pipeline.Workspace)/openblas-aarch64-gnu + TBB_CACHE_DIR: $(Pipeline.Workspace)/tbb-aarch64-gnu + pool: + vmImage: '$(VM_IMAGE)' + steps: + - script: | + .ci/env/apt.sh dev-base + displayName: 'apt-get and conda install' + - script: | + .ci/env/apt.sh gnu-cross-compilers aarch64 + displayName: 'arm-compiler installation' + - script: | + .ci/env/apt.sh qemu-apt + displayName: 'qemu-emulation installation' + - script: | + .ci/scripts/describe_system.sh + displayName: 'System info' + - task: Cache@2 + inputs: + key: '"gcc" | "aarch64" | "openblas" | "$(OPENBLAS_VERSION)" | "$(VM_IMAGE)" | "ILP64"' + path: $(OPENBLAS_CACHE_DIR) + cacheHitVar: OPENBLAS_RESTORED + - script: | + .ci/env/openblas.sh --target ARMV8 --host-compiler gcc --compiler aarch64-linux-gnu-gcc --cflags -march=armv8-a+sve --cross-compile --target-arch aarch64 --prefix $(OPENBLAS_CACHE_DIR) --version $(OPENBLAS_VERSION) + displayName: 'Build OpenBLAS' + condition: ne(variables.OPENBLAS_RESTORED, 'true') + - task: Cache@2 + inputs: + key: '"gcc" | "aarch64" | "tbb" | "$(TBB_VERSION)" | "$(VM_IMAGE)"' + path: $(TBB_CACHE_DIR) + cacheHitVar: TBB_RESTORED + - script: | + .ci/env/tbb.sh --cross-compile --toolchain-file $(Build.Repository.LocalPath)/.ci/env/arm-gnu-crosscompile-toolchain.cmake --target-arch aarch64 --prefix $(TBB_CACHE_DIR) --version $(TBB_VERSION) + displayName: 'Build oneTBB' + condition: ne(variables.TBB_RESTORED, 'true') + - script: | + .ci/scripts/build.sh --compiler gnu --optimizations sve --target daal --backend-config ref --conda-env ci-env --cross-compile --plat lnxarm --blas-dir $(OPENBLAS_CACHE_DIR) --tbb-dir $(TBB_CACHE_DIR) + displayName: 'make daal' + - script: | + .ci/scripts/build.sh --compiler gnu --optimizations sve --target onedal_c --backend-config ref --cross-compile --plat lnxarm --blas-dir $(OPENBLAS_CACHE_DIR) --tbb-dir $(TBB_CACHE_DIR) + displayName: 'make onedal_c' + - task: PublishPipelineArtifact@1 + inputs: + artifactName: '$(platform.type) ARM OpenBLAS gnu build' + targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' + displayName: 'Upload build artifacts' + continueOnError: true + - script: | + export QEMU_LD_PREFIX=/usr/aarch64-linux-gnu + export QEMU_CPU="max,sve-default-vector-length=256" + export TBBROOT=$(TBB_CACHE_DIR) + export ARCH_ONEDAL=aarch64 + .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler gnu --interface daal/cpp --build-system cmake --platform lnxarm --cross-compile --backend ref + displayName: 'daal/cpp examples' + - script: | + export QEMU_LD_PREFIX=/usr/aarch64-linux-gnu + export QEMU_CPU="max,sve-default-vector-length=256" + export TBBROOT=$(TBB_CACHE_DIR) + export ARCH_ONEDAL=aarch64 + .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler gnu --interface oneapi/cpp --build-system cmake --platform lnxarm --cross-compile --backend ref + displayName: 'oneapi/cpp examples' + - task: PublishPipelineArtifact@1 + inputs: + artifactName: '$(platform.type) fail' + targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' + displayName: 'Uploading on fail' + condition: failed() + continueOnError: true -# - job: 'LinuxMakeLLVM_OpenBLAS_ARM' -# timeoutInMinutes: 0 -# variables: -# release.dir: '__release_lnx_clang' -# platform.type : 'lnxarm' -# OPENBLAS_CACHE_DIR : $(Pipeline.Workspace)/openblas-aarch64-clang -# TBB_CACHE_DIR : $(Pipeline.Workspace)/tbb-aarch64-clang -# SYSROOT_CACHE_DIR: $(Pipeline.Workspace)/sysroot-aarch64 -# pool: -# vmImage: '$(VM_IMAGE)' -# steps: -# - script: | -# .ci/env/apt.sh dev-base -# displayName: 'apt-get and conda install' -# - script: | -# .ci/env/apt.sh gnu-cross-compilers aarch64 -# displayName: 'arm-compiler installation' -# - script: | -# .ci/env/apt.sh llvm-version 18 -# displayName: 'llvm 18 installation' -# - script: | -# .ci/env/apt.sh qemu-apt -# displayName: 'qemu-emulation installation' -# - task: Cache@2 -# inputs: -# key: '"aarch64" | "sysroot" | "$(SYSROOT_OS)"' -# path: $(SYSROOT_CACHE_DIR) -# cacheHitVar: SYSROOT_RESTORED -# - script: | -# .ci/env/apt.sh build-sysroot $(Pipeline.Workspace) arm64 $(SYSROOT_OS) sysroot-aarch64 -# displayName: 'build aarch64 sysroot' -# condition: ne(variables.SYSROOT_RESTORED, 'true') -# - script: | -# .ci/scripts/describe_system.sh -# displayName: 'System info' -# - task: Cache@2 -# inputs: -# key: '"clang" | "18" | "aarch64" | "openblas" | "$(OPENBLAS_VERSION)" | "$(VM_IMAGE)" | "ILP64"' -# path: $(OPENBLAS_CACHE_DIR) -# cacheHitVar: OPENBLAS_RESTORED -# - script: | -# .ci/env/openblas.sh --target ARMV8 --host-compiler gcc --compiler clang --target-arch arm --cflags -march=armv8-a+sve --cross-compile --prefix $(OPENBLAS_CACHE_DIR) --sysroot $(SYSROOT_CACHE_DIR) --version $(OPENBLAS_VERSION) -# displayName: 'Build OpenBLAS' -# condition: ne(variables.OPENBLAS_RESTORED, 'true') -# - task: Cache@2 -# inputs: -# key: '"clang" | "aarch64" | "tbb" | "$(TBB_VERSION)" | "$(VM_IMAGE)"' -# path: $(TBB_CACHE_DIR) -# cacheHitVar: TBB_RESTORED -# - script: | -# export ONEDAL_SYSROOT=$(SYSROOT_CACHE_DIR) -# .ci/env/tbb.sh --cross-compile --toolchain-file $(Build.Repository.LocalPath)/.ci/env/arm-clang-crosscompile-toolchain.cmake --target-arch aarch64 --prefix $(TBB_CACHE_DIR) --version $(TBB_VERSION) -# displayName: 'Build oneTBB' -# condition: ne(variables.TBB_RESTORED, 'true') -# - script: | -# .ci/scripts/build.sh --compiler clang --optimizations sve --target daal --backend-config ref --conda-env ci-env --cross-compile --plat lnxarm --sysroot $(SYSROOT_CACHE_DIR) --blas-dir $(OPENBLAS_CACHE_DIR) --tbb-dir $(TBB_CACHE_DIR) -# displayName: 'make daal' -# - script: | -# .ci/scripts/build.sh --compiler clang --optimizations sve --target onedal_c --backend-config ref --cross-compile --plat lnxarm --sysroot $(SYSROOT_CACHE_DIR) --blas-dir $(OPENBLAS_CACHE_DIR) --tbb-dir $(TBB_CACHE_DIR) -# displayName: 'make onedal_c' -# - task: PublishPipelineArtifact@1 -# inputs: -# artifactName: '$(platform.type) ARM OpenBLAS LLVM build' -# targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' -# displayName: 'Upload build artifacts' -# continueOnError: true -# - script: | -# export QEMU_LD_PREFIX=$(SYSROOT_CACHE_DIR) -# export QEMU_CPU="max,sve-default-vector-length=256" -# export TBBROOT=$(TBB_CACHE_DIR) -# export ARCH_ONEDAL=aarch64 -# export ONEDAL_SYSROOT=$(SYSROOT_CACHE_DIR) -# .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler clang --interface daal/cpp --build-system cmake --platform lnxarm --cross-compile --backend ref -# displayName: 'daal/cpp examples' -# - script: | -# export QEMU_LD_PREFIX=$(SYSROOT_CACHE_DIR) -# export QEMU_CPU="max,sve-default-vector-length=256" -# export TBBROOT=$(TBB_CACHE_DIR) -# export ARCH_ONEDAL=aarch64 -# export ONEDAL_SYSROOT=$(SYSROOT_CACHE_DIR) -# .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler clang --interface oneapi/cpp --build-system cmake --platform lnxarm --cross-compile --backend ref -# displayName: 'oneapi/cpp examples' -# - task: PublishPipelineArtifact@1 -# inputs: -# artifactName: '$(platform.type) fail' -# targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' -# displayName: 'Uploading on fail' -# condition: failed() -# continueOnError: true +- job: 'LinuxMakeLLVM_OpenBLAS_ARM' + timeoutInMinutes: 0 + variables: + release.dir: '__release_lnx_clang' + platform.type : 'lnxarm' + OPENBLAS_CACHE_DIR : $(Pipeline.Workspace)/openblas-aarch64-clang + TBB_CACHE_DIR : $(Pipeline.Workspace)/tbb-aarch64-clang + SYSROOT_CACHE_DIR: $(Pipeline.Workspace)/sysroot-aarch64 + pool: + vmImage: '$(VM_IMAGE)' + steps: + - script: | + .ci/env/apt.sh dev-base + displayName: 'apt-get and conda install' + - script: | + .ci/env/apt.sh gnu-cross-compilers aarch64 + displayName: 'arm-compiler installation' + - script: | + .ci/env/apt.sh llvm-version 18 + displayName: 'llvm 18 installation' + - script: | + .ci/env/apt.sh qemu-apt + displayName: 'qemu-emulation installation' + - task: Cache@2 + inputs: + key: '"aarch64" | "sysroot" | "$(SYSROOT_OS)"' + path: $(SYSROOT_CACHE_DIR) + cacheHitVar: SYSROOT_RESTORED + - script: | + .ci/env/apt.sh build-sysroot $(Pipeline.Workspace) arm64 $(SYSROOT_OS) sysroot-aarch64 + displayName: 'build aarch64 sysroot' + condition: ne(variables.SYSROOT_RESTORED, 'true') + - script: | + .ci/scripts/describe_system.sh + displayName: 'System info' + - task: Cache@2 + inputs: + key: '"clang" | "18" | "aarch64" | "openblas" | "$(OPENBLAS_VERSION)" | "$(VM_IMAGE)" | "ILP64"' + path: $(OPENBLAS_CACHE_DIR) + cacheHitVar: OPENBLAS_RESTORED + - script: | + .ci/env/openblas.sh --target ARMV8 --host-compiler gcc --compiler clang --target-arch arm --cflags -march=armv8-a+sve --cross-compile --prefix $(OPENBLAS_CACHE_DIR) --sysroot $(SYSROOT_CACHE_DIR) --version $(OPENBLAS_VERSION) + displayName: 'Build OpenBLAS' + condition: ne(variables.OPENBLAS_RESTORED, 'true') + - task: Cache@2 + inputs: + key: '"clang" | "aarch64" | "tbb" | "$(TBB_VERSION)" | "$(VM_IMAGE)"' + path: $(TBB_CACHE_DIR) + cacheHitVar: TBB_RESTORED + - script: | + export ONEDAL_SYSROOT=$(SYSROOT_CACHE_DIR) + .ci/env/tbb.sh --cross-compile --toolchain-file $(Build.Repository.LocalPath)/.ci/env/arm-clang-crosscompile-toolchain.cmake --target-arch aarch64 --prefix $(TBB_CACHE_DIR) --version $(TBB_VERSION) + displayName: 'Build oneTBB' + condition: ne(variables.TBB_RESTORED, 'true') + - script: | + .ci/scripts/build.sh --compiler clang --optimizations sve --target daal --backend-config ref --conda-env ci-env --cross-compile --plat lnxarm --sysroot $(SYSROOT_CACHE_DIR) --blas-dir $(OPENBLAS_CACHE_DIR) --tbb-dir $(TBB_CACHE_DIR) + displayName: 'make daal' + - script: | + .ci/scripts/build.sh --compiler clang --optimizations sve --target onedal_c --backend-config ref --cross-compile --plat lnxarm --sysroot $(SYSROOT_CACHE_DIR) --blas-dir $(OPENBLAS_CACHE_DIR) --tbb-dir $(TBB_CACHE_DIR) + displayName: 'make onedal_c' + - task: PublishPipelineArtifact@1 + inputs: + artifactName: '$(platform.type) ARM OpenBLAS LLVM build' + targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' + displayName: 'Upload build artifacts' + continueOnError: true + - script: | + export QEMU_LD_PREFIX=$(SYSROOT_CACHE_DIR) + export QEMU_CPU="max,sve-default-vector-length=256" + export TBBROOT=$(TBB_CACHE_DIR) + export ARCH_ONEDAL=aarch64 + export ONEDAL_SYSROOT=$(SYSROOT_CACHE_DIR) + .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler clang --interface daal/cpp --build-system cmake --platform lnxarm --cross-compile --backend ref + displayName: 'daal/cpp examples' + - script: | + export QEMU_LD_PREFIX=$(SYSROOT_CACHE_DIR) + export QEMU_CPU="max,sve-default-vector-length=256" + export TBBROOT=$(TBB_CACHE_DIR) + export ARCH_ONEDAL=aarch64 + export ONEDAL_SYSROOT=$(SYSROOT_CACHE_DIR) + .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler clang --interface oneapi/cpp --build-system cmake --platform lnxarm --cross-compile --backend ref + displayName: 'oneapi/cpp examples' + - task: PublishPipelineArtifact@1 + inputs: + artifactName: '$(platform.type) fail' + targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' + displayName: 'Uploading on fail' + condition: failed() + continueOnError: true -# - job: 'LinuxMakeLLVM_OpenBLAS_rv64' -# timeoutInMinutes: 0 -# variables: -# release.dir: '__release_lnx_clang' -# platform.type : 'lnxriscv64' -# OPENBLAS_VERSION : 'v0.3.27' -# OPENBLAS_CACHE_DIR : $(Pipeline.Workspace)/openblas-riscv64-clang -# TBB_VERSION : 'v2021.10.0' -# TBB_CACHE_DIR : $(Pipeline.Workspace)/tbb-riscv64-clang -# SYSROOT_CACHE_DIR: $(Pipeline.Workspace)/sysroot-riscv64 -# pool: -# vmImage: 'ubuntu-22.04' -# steps: -# - script: | -# .ci/env/apt.sh dev-base -# displayName: 'apt-get and conda install' -# - script: | -# .ci/env/apt.sh gnu-cross-compilers riscv64 -# displayName: 'riscv64-compiler installation' -# - script: | -# .ci/env/apt.sh llvm-version 18 -# displayName: 'llvm 18 installation' -# - script: | -# .ci/env/apt.sh qemu-deb -# displayName: 'qemu-emulation installation' -# - task: Cache@2 -# inputs: -# key: '"riscv64" | "sysroot"' -# path: $(SYSROOT_CACHE_DIR) -# cacheHitVar: SYSROOT_RESTORED -# - script: | -# .ci/env/apt.sh build-sysroot $(Pipeline.Workspace) riscv64 $(SYSROOT_OS) sysroot-riscv64 -# displayName: 'Build riscv64 sysroot' -# condition: ne(variables.SYSROOT_RESTORED, 'true') -# - script: | -# .ci/scripts/describe_system.sh -# displayName: 'System info' -# - task: Cache@2 -# inputs: -# key: '"clang" | "riscv64" | "openblas" | "$(OPENBLAS_VERSION)" | "ILP64"' -# path: $(OPENBLAS_CACHE_DIR) -# cacheHitVar: OPENBLAS_RESTORED -# - script: | -# .ci/env/openblas.sh --target RISCV64_ZVL128B --host-compiler gcc --compiler clang --target-arch riscv64 --cross-compile --prefix $(OPENBLAS_CACHE_DIR) --sysroot $(SYSROOT_CACHE_DIR) --version $(OPENBLAS_VERSION) -# displayName: 'Build OpenBLAS' -# condition: ne(variables.OPENBLAS_RESTORED, 'true') -# - task: Cache@2 -# inputs: -# key: '"clang" | "riscv64" | "tbb" | "$(TBB_VERSION)"' -# path: $(TBB_CACHE_DIR) -# cacheHitVar: TBB_RESTORED -# - script: | -# export ONEDAL_SYSROOT=$(SYSROOT_CACHE_DIR) -# .ci/env/tbb.sh --cross-compile --toolchain-file $(Build.Repository.LocalPath)/.ci/env/riscv64-clang-crosscompile-toolchain.cmake --target-arch riscv64 --prefix $(TBB_CACHE_DIR) --version $(TBB_VERSION) -# displayName: 'Build oneTBB' -# condition: ne(variables.TBB_RESTORED, 'true') -# - script: | -# .ci/scripts/build.sh --compiler clang --optimizations rv64 --target daal --backend-config ref --conda-env ci-env --cross-compile --plat lnxriscv64 --sysroot $(SYSROOT_CACHE_DIR) --blas-dir $(OPENBLAS_CACHE_DIR) --tbb-dir $(TBB_CACHE_DIR) -# displayName: 'make daal' -# - script: | -# .ci/scripts/build.sh --compiler clang --optimizations rv64 --target onedal_c --backend-config ref --cross-compile --plat lnxriscv64 --sysroot $(SYSROOT_CACHE_DIR) --blas-dir $(OPENBLAS_CACHE_DIR) --tbb-dir $(TBB_CACHE_DIR) -# displayName: 'make onedal_c' -# - task: PublishPipelineArtifact@1 -# inputs: -# artifactName: '$(platform.type) RISCV64 OpenBLAS build' -# targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' -# displayName: 'Upload build artifacts' -# continueOnError: true -# - script: | -# export QEMU_LD_PREFIX=$(SYSROOT_CACHE_DIR) -# export QEMU_CPU="max" -# export TBBROOT=$(TBB_CACHE_DIR) -# export ARCH_ONEDAL=riscv64 -# export ONEDAL_SYSROOT=$(SYSROOT_CACHE_DIR) -# .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler clang --interface daal/cpp --build-system cmake --platform lnxriscv64 --cross-compile --backend ref -# displayName: 'daal/cpp examples' -# - script: | -# export QEMU_LD_PREFIX=$(SYSROOT_CACHE_DIR) -# export QEMU_CPU="max" -# export TBBROOT=$(TBB_CACHE_DIR) -# export ARCH_ONEDAL=riscv64 -# export ONEDAL_SYSROOT=$(SYSROOT_CACHE_DIR) -# .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler clang --interface oneapi/cpp --build-system cmake --platform lnxriscv64 --cross-compile --backend ref -# displayName: 'oneapi/cpp examples' -# - task: PublishPipelineArtifact@1 -# inputs: -# artifactName: '$(platform.type) fail' -# targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' -# displayName: 'Uploading on fail' -# condition: failed() -# continueOnError: true +- job: 'LinuxMakeLLVM_OpenBLAS_rv64' + timeoutInMinutes: 0 + variables: + release.dir: '__release_lnx_clang' + platform.type : 'lnxriscv64' + OPENBLAS_VERSION : 'v0.3.27' + OPENBLAS_CACHE_DIR : $(Pipeline.Workspace)/openblas-riscv64-clang + TBB_VERSION : 'v2021.10.0' + TBB_CACHE_DIR : $(Pipeline.Workspace)/tbb-riscv64-clang + SYSROOT_CACHE_DIR: $(Pipeline.Workspace)/sysroot-riscv64 + pool: + vmImage: 'ubuntu-22.04' + steps: + - script: | + .ci/env/apt.sh dev-base + displayName: 'apt-get and conda install' + - script: | + .ci/env/apt.sh gnu-cross-compilers riscv64 + displayName: 'riscv64-compiler installation' + - script: | + .ci/env/apt.sh llvm-version 18 + displayName: 'llvm 18 installation' + - script: | + .ci/env/apt.sh qemu-deb + displayName: 'qemu-emulation installation' + - task: Cache@2 + inputs: + key: '"riscv64" | "sysroot"' + path: $(SYSROOT_CACHE_DIR) + cacheHitVar: SYSROOT_RESTORED + - script: | + .ci/env/apt.sh build-sysroot $(Pipeline.Workspace) riscv64 $(SYSROOT_OS) sysroot-riscv64 + displayName: 'Build riscv64 sysroot' + condition: ne(variables.SYSROOT_RESTORED, 'true') + - script: | + .ci/scripts/describe_system.sh + displayName: 'System info' + - task: Cache@2 + inputs: + key: '"clang" | "riscv64" | "openblas" | "$(OPENBLAS_VERSION)" | "ILP64"' + path: $(OPENBLAS_CACHE_DIR) + cacheHitVar: OPENBLAS_RESTORED + - script: | + .ci/env/openblas.sh --target RISCV64_ZVL128B --host-compiler gcc --compiler clang --target-arch riscv64 --cross-compile --prefix $(OPENBLAS_CACHE_DIR) --sysroot $(SYSROOT_CACHE_DIR) --version $(OPENBLAS_VERSION) + displayName: 'Build OpenBLAS' + condition: ne(variables.OPENBLAS_RESTORED, 'true') + - task: Cache@2 + inputs: + key: '"clang" | "riscv64" | "tbb" | "$(TBB_VERSION)"' + path: $(TBB_CACHE_DIR) + cacheHitVar: TBB_RESTORED + - script: | + export ONEDAL_SYSROOT=$(SYSROOT_CACHE_DIR) + .ci/env/tbb.sh --cross-compile --toolchain-file $(Build.Repository.LocalPath)/.ci/env/riscv64-clang-crosscompile-toolchain.cmake --target-arch riscv64 --prefix $(TBB_CACHE_DIR) --version $(TBB_VERSION) + displayName: 'Build oneTBB' + condition: ne(variables.TBB_RESTORED, 'true') + - script: | + .ci/scripts/build.sh --compiler clang --optimizations rv64 --target daal --backend-config ref --conda-env ci-env --cross-compile --plat lnxriscv64 --sysroot $(SYSROOT_CACHE_DIR) --blas-dir $(OPENBLAS_CACHE_DIR) --tbb-dir $(TBB_CACHE_DIR) + displayName: 'make daal' + - script: | + .ci/scripts/build.sh --compiler clang --optimizations rv64 --target onedal_c --backend-config ref --cross-compile --plat lnxriscv64 --sysroot $(SYSROOT_CACHE_DIR) --blas-dir $(OPENBLAS_CACHE_DIR) --tbb-dir $(TBB_CACHE_DIR) + displayName: 'make onedal_c' + - task: PublishPipelineArtifact@1 + inputs: + artifactName: '$(platform.type) RISCV64 OpenBLAS build' + targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' + displayName: 'Upload build artifacts' + continueOnError: true + - script: | + export QEMU_LD_PREFIX=$(SYSROOT_CACHE_DIR) + export QEMU_CPU="max" + export TBBROOT=$(TBB_CACHE_DIR) + export ARCH_ONEDAL=riscv64 + export ONEDAL_SYSROOT=$(SYSROOT_CACHE_DIR) + .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler clang --interface daal/cpp --build-system cmake --platform lnxriscv64 --cross-compile --backend ref + displayName: 'daal/cpp examples' + - script: | + export QEMU_LD_PREFIX=$(SYSROOT_CACHE_DIR) + export QEMU_CPU="max" + export TBBROOT=$(TBB_CACHE_DIR) + export ARCH_ONEDAL=riscv64 + export ONEDAL_SYSROOT=$(SYSROOT_CACHE_DIR) + .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler clang --interface oneapi/cpp --build-system cmake --platform lnxriscv64 --cross-compile --backend ref + displayName: 'oneapi/cpp examples' + - task: PublishPipelineArtifact@1 + inputs: + artifactName: '$(platform.type) fail' + targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' + displayName: 'Uploading on fail' + condition: failed() + continueOnError: true -# - job: 'LinuxMakeGNU_OpenBLAS_x86' -# timeoutInMinutes: 0 -# variables: -# release.dir: '__release_lnx_gnu' -# platform.type : 'lnx32e' -# OPENBLAS_CACHE_DIR: $(Pipeline.Workspace)/openblas-x86_64-gnu -# pool: -# vmImage: '$(VM_IMAGE)' -# steps: -# - script: | -# .ci/env/apt.sh dev-base -# displayName: 'apt-get and conda install' -# - script: | -# .ci/scripts/describe_system.sh -# displayName: 'System info' -# - task: Cache@2 -# inputs: -# key: '"gcc" | "x86_64" | "openblas" | "$(OPENBLAS_VERSION)" | "$(VM_IMAGE)" | "ILP64"' -# path: $(OPENBLAS_CACHE_DIR) -# cacheHitVar: OPENBLAS_RESTORED -# - script: | -# .ci/env/openblas.sh --target-arch x86_64 --prefix $(OPENBLAS_CACHE_DIR) --version $(OPENBLAS_VERSION) -# displayName: 'Build OpenBLAS' -# condition: ne(variables.OPENBLAS_RESTORED, 'true') -# - script: | -# .ci/scripts/build.sh --compiler gnu --optimizations avx2 --target daal --backend-config ref --conda-env ci-env --blas-dir $(OPENBLAS_CACHE_DIR) -# displayName: 'make daal' -# - script: | -# .ci/scripts/build.sh --compiler gnu --optimizations avx2 --target onedal_c --backend-config ref --blas-dir $(OPENBLAS_CACHE_DIR) -# displayName: 'make onedal_c' -# - task: PublishPipelineArtifact@1 -# inputs: -# artifactName: '$(platform.type) OpenBLAS build' -# targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' -# displayName: 'Upload build artifacts' -# continueOnError: true -# - script: | -# .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler gnu --interface daal/cpp --build-system cmake --backend ref -# displayName: 'daal/cpp examples' -# - script: | -# .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler gnu --interface oneapi/cpp --build-system cmake --backend ref -# displayName: 'oneapi/cpp examples' -# - task: PublishPipelineArtifact@1 -# inputs: -# artifactName: '$(platform.type) fail' -# targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' -# displayName: 'Uploading on fail' -# condition: failed() -# continueOnError: true +- job: 'LinuxMakeGNU_OpenBLAS_x86' + timeoutInMinutes: 0 + variables: + release.dir: '__release_lnx_gnu' + platform.type : 'lnx32e' + OPENBLAS_CACHE_DIR: $(Pipeline.Workspace)/openblas-x86_64-gnu + pool: + vmImage: '$(VM_IMAGE)' + steps: + - script: | + .ci/env/apt.sh dev-base + displayName: 'apt-get and conda install' + - script: | + .ci/scripts/describe_system.sh + displayName: 'System info' + - task: Cache@2 + inputs: + key: '"gcc" | "x86_64" | "openblas" | "$(OPENBLAS_VERSION)" | "$(VM_IMAGE)" | "ILP64"' + path: $(OPENBLAS_CACHE_DIR) + cacheHitVar: OPENBLAS_RESTORED + - script: | + .ci/env/openblas.sh --target-arch x86_64 --prefix $(OPENBLAS_CACHE_DIR) --version $(OPENBLAS_VERSION) + displayName: 'Build OpenBLAS' + condition: ne(variables.OPENBLAS_RESTORED, 'true') + - script: | + .ci/scripts/build.sh --compiler gnu --optimizations avx2 --target daal --backend-config ref --conda-env ci-env --blas-dir $(OPENBLAS_CACHE_DIR) + displayName: 'make daal' + - script: | + .ci/scripts/build.sh --compiler gnu --optimizations avx2 --target onedal_c --backend-config ref --blas-dir $(OPENBLAS_CACHE_DIR) + displayName: 'make onedal_c' + - task: PublishPipelineArtifact@1 + inputs: + artifactName: '$(platform.type) OpenBLAS build' + targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' + displayName: 'Upload build artifacts' + continueOnError: true + - script: | + .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler gnu --interface daal/cpp --build-system cmake --backend ref + displayName: 'daal/cpp examples' + - script: | + .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler gnu --interface oneapi/cpp --build-system cmake --backend ref + displayName: 'oneapi/cpp examples' + - task: PublishPipelineArtifact@1 + inputs: + artifactName: '$(platform.type) fail' + targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' + displayName: 'Uploading on fail' + condition: failed() + continueOnError: true -# - job: 'LinuxMakeDPCPP' -# timeoutInMinutes: 0 -# variables: -# release.dir: '__release_lnx' -# platform.type : 'lnx32e' -# pool: -# vmImage: '$(VM_IMAGE)' -# steps: -# - script: | -# .ci/env/apt.sh dev-base -# displayName: 'apt-get and conda install' -# - script: | -# .ci/env/apt.sh dpcpp -# displayName: 'dpcpp installation' -# - script: | -# .ci/env/apt.sh mkl -# displayName: 'mkl installation' -# - script: | -# source /opt/intel/oneapi/compiler/latest/env/vars.sh -# .ci/scripts/describe_system.sh -# displayName: 'System info' -# - script: | -# source /opt/intel/oneapi/compiler/latest/env/vars.sh -# .ci/scripts/build.sh --compiler icx --optimizations avx2 --target daal --conda-env ci-env -# displayName: 'make daal' -# - script: | -# source /opt/intel/oneapi/compiler/latest/env/vars.sh -# .ci/scripts/build.sh --compiler icx --optimizations avx2 --target onedal_dpc -# displayName: 'make onedal_dpc' -# - task: PublishPipelineArtifact@1 -# inputs: -# artifactName: '$(platform.type) DPC build' -# targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' -# displayName: 'Upload build artifacts' -# continueOnError: true -# - script: | -# source /opt/intel/oneapi/compiler/latest/env/vars.sh -# source /opt/intel/oneapi/mkl/latest/env/vars.sh -# .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler clang --interface daal/cpp --build-system cmake -# displayName: 'daal/cpp examples' -# - script: | -# source /opt/intel/oneapi/compiler/latest/env/vars.sh -# source /opt/intel/oneapi/mkl/latest/env/vars.sh -# .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler clang --interface oneapi/cpp --build-system cmake -# displayName: 'oneapi/cpp examples' -# - script: | -# source /opt/intel/oneapi/compiler/latest/env/vars.sh -# source /opt/intel/oneapi/mkl/latest/env/vars.sh -# .ci/scripts/test.sh --test-kind samples --build-dir $(release.dir) --compiler gnu --interface daal/cpp/mpi --conda-env ci-env --build-system cmake -# displayName: 'daal/cpp/mpi samples' -# - task: PublishPipelineArtifact@1 -# inputs: -# artifactName: '$(platform.type) DPC fail' -# targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' -# displayName: 'Uploading on fail' -# condition: failed() +- job: 'LinuxMakeDPCPP' + timeoutInMinutes: 0 + variables: + release.dir: '__release_lnx' + platform.type : 'lnx32e' + pool: + vmImage: '$(VM_IMAGE)' + steps: + - script: | + .ci/env/apt.sh dev-base + displayName: 'apt-get and conda install' + - script: | + .ci/env/apt.sh dpcpp + displayName: 'dpcpp installation' + - script: | + .ci/env/apt.sh mkl + displayName: 'mkl installation' + - script: | + source /opt/intel/oneapi/compiler/latest/env/vars.sh + .ci/scripts/describe_system.sh + displayName: 'System info' + - script: | + source /opt/intel/oneapi/compiler/latest/env/vars.sh + .ci/scripts/build.sh --compiler icx --optimizations avx2 --target daal --conda-env ci-env + displayName: 'make daal' + - script: | + source /opt/intel/oneapi/compiler/latest/env/vars.sh + .ci/scripts/build.sh --compiler icx --optimizations avx2 --target onedal_dpc + displayName: 'make onedal_dpc' + - task: PublishPipelineArtifact@1 + inputs: + artifactName: '$(platform.type) DPC build' + targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' + displayName: 'Upload build artifacts' + continueOnError: true + - script: | + source /opt/intel/oneapi/compiler/latest/env/vars.sh + source /opt/intel/oneapi/mkl/latest/env/vars.sh + .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler clang --interface daal/cpp --build-system cmake + displayName: 'daal/cpp examples' + - script: | + source /opt/intel/oneapi/compiler/latest/env/vars.sh + source /opt/intel/oneapi/mkl/latest/env/vars.sh + .ci/scripts/test.sh --test-kind examples --build-dir $(release.dir) --compiler clang --interface oneapi/cpp --build-system cmake + displayName: 'oneapi/cpp examples' + - script: | + source /opt/intel/oneapi/compiler/latest/env/vars.sh + source /opt/intel/oneapi/mkl/latest/env/vars.sh + .ci/scripts/test.sh --test-kind samples --build-dir $(release.dir) --compiler gnu --interface daal/cpp/mpi --conda-env ci-env --build-system cmake + displayName: 'daal/cpp/mpi samples' + - task: PublishPipelineArtifact@1 + inputs: + artifactName: '$(platform.type) DPC fail' + targetPath: '$(Build.Repository.LocalPath)/$(release.dir)' + displayName: 'Uploading on fail' + condition: failed() -# - job: 'LinuxBazel' -# timeoutInMinutes: 0 -# pool: -# vmImage: '$(VM_IMAGE)' -# variables: -# platform.type : 'lnx32e' -# BAZEL_CACHE_DIR: $(Pipeline.Workspace)/.bazel-cache -# BAZEL_VERSION: $(Pipeline.Workspace)/bazelisk-linux-amd64 -# BAZEL_CACHE_MAX_SIZE_KB: 4194304 # Size in kilobytes ~ 4Gb -# steps: -# - script: | -# # sourcing done to set bazel version value from script -# source .ci/env/bazelisk.sh -# echo "##vso[task.setvariable variable=BAZEL_VERSION]${BAZEL_VERSION}" -# displayName: 'install-bazel' -# - script: | -# .ci/scripts/describe_system.sh -# displayName: 'System info' -# - task: Cache@2 -# inputs: -# # Commit ID is added to a cache key. Caches are immutable by design, -# # so we always need to change a key to upload the last version -# # of the Bazel cache. Cache lookup is based on `restoreKeys` option. -# key: '"$(BAZEL_VERSION)" | "$(Agent.OS)" | "v1" | "$(Build.SourceVersion)"' -# restoreKeys: | -# "$(BAZEL_VERSION)" | "$(Agent.OS)" | "v1" -# path: $(BAZEL_CACHE_DIR) -# displayName: 'bazel-cache' +- job: 'LinuxBazel' + timeoutInMinutes: 0 + pool: + vmImage: '$(VM_IMAGE)' + variables: + platform.type : 'lnx32e' + BAZEL_CACHE_DIR: $(Pipeline.Workspace)/.bazel-cache + BAZEL_VERSION: $(Pipeline.Workspace)/bazelisk-linux-amd64 + BAZEL_CACHE_MAX_SIZE_KB: 4194304 # Size in kilobytes ~ 4Gb + steps: + - script: | + # sourcing done to set bazel version value from script + source .ci/env/bazelisk.sh + echo "##vso[task.setvariable variable=BAZEL_VERSION]${BAZEL_VERSION}" + displayName: 'install-bazel' + - script: | + .ci/scripts/describe_system.sh + displayName: 'System info' + - task: Cache@2 + inputs: + # Commit ID is added to a cache key. Caches are immutable by design, + # so we always need to change a key to upload the last version + # of the Bazel cache. Cache lookup is based on `restoreKeys` option. + key: '"$(BAZEL_VERSION)" | "$(Agent.OS)" | "v1" | "$(Build.SourceVersion)"' + restoreKeys: | + "$(BAZEL_VERSION)" | "$(Agent.OS)" | "v1" + path: $(BAZEL_CACHE_DIR) + displayName: 'bazel-cache' -# - script: | -# if [ -f "${BAZEL_CACHE_DIR}/cache-clean-timestamp" ]; then -# echo -# echo "Last time the cache is cleaned:" -# cat "${BAZEL_CACHE_DIR}/cache-clean-timestamp" -# echo -# fi + - script: | + if [ -f "${BAZEL_CACHE_DIR}/cache-clean-timestamp" ]; then + echo + echo "Last time the cache is cleaned:" + cat "${BAZEL_CACHE_DIR}/cache-clean-timestamp" + echo + fi -# # Create `.bazelrc` and set cache directory -# # Minimal CPU instruction set in Azure is AVX2 -# echo "build --disk_cache=$(BAZEL_CACHE_DIR) --cpu=avx2" > ~/.bazelrc + # Create `.bazelrc` and set cache directory + # Minimal CPU instruction set in Azure is AVX2 + echo "build --disk_cache=$(BAZEL_CACHE_DIR) --cpu=avx2" > ~/.bazelrc -# # Display oneDAL build configuration -# bazel build @config//:dump -# echo -# cat bazel-bin/external/config/config.json -# echo -# displayName: 'bazel-configure' -# - script: | -# bazel build :release -# displayName: 'release' -# - task: PublishPipelineArtifact@1 -# inputs: -# artifactName: '$(platform.type) Bazel build' -# targetPath: '$(Build.Repository.LocalPath)/bazel-bin/release' -# displayName: 'Upload build artifacts' -# - script: | -# bazel test //examples/oneapi/cpp:all \ -# --test_link_mode=dev \ -# --test_thread_mode=par -# displayName: 'cpp-examples-thread-dev' + # Display oneDAL build configuration + bazel build @config//:dump + echo + cat bazel-bin/external/config/config.json + echo + displayName: 'bazel-configure' + - script: | + bazel build :release + displayName: 'release' + - task: PublishPipelineArtifact@1 + inputs: + artifactName: '$(platform.type) Bazel build' + targetPath: '$(Build.Repository.LocalPath)/bazel-bin/release' + displayName: 'Upload build artifacts' + - script: | + bazel test //examples/oneapi/cpp:all \ + --test_link_mode=dev \ + --test_thread_mode=par + displayName: 'cpp-examples-thread-dev' -# - script: | -# export DALROOT=`pwd`/bazel-bin/release/daal/latest -# bazel test //examples/oneapi/cpp:all \ -# --test_link_mode=release_static \ -# --test_thread_mode=par -# displayName: 'cpp-examples-thread-release-static' + - script: | + export DALROOT=`pwd`/bazel-bin/release/daal/latest + bazel test //examples/oneapi/cpp:all \ + --test_link_mode=release_static \ + --test_thread_mode=par + displayName: 'cpp-examples-thread-release-static' -# # The issue that bazel doesnt link MKL libs via -Wl, --start-group.. -# # oneDAL make build pass this test -# # TODO: add cycle linking in bazel -# # - script: | -# # export DALROOT=`pwd`/bazel-bin/release/daal/latest -# # bazel test //examples/oneapi/cpp:all \ -# # --test_link_mode=release_dynamic \ -# # --test_thread_mode=par -# # displayName: 'cpp-examples-thread-release-dynamic' + # The issue that bazel doesnt link MKL libs via -Wl, --start-group.. + # oneDAL make build pass this test + # TODO: add cycle linking in bazel + # - script: | + # export DALROOT=`pwd`/bazel-bin/release/daal/latest + # bazel test //examples/oneapi/cpp:all \ + # --test_link_mode=release_dynamic \ + # --test_thread_mode=par + # displayName: 'cpp-examples-thread-release-dynamic' -# - script: | -# bazel test //cpp/daal:tests -# displayName: 'daal-tests-algorithms' + - script: | + bazel test //cpp/daal:tests + displayName: 'daal-tests-algorithms' -# - script: | -# bazel test //cpp/oneapi/dal:tests \ -# --config=host \ -# --test_link_mode=dev \ -# --test_thread_mode=par -# displayName: 'cpp-tests-thread-dev' + - script: | + bazel test //cpp/oneapi/dal:tests \ + --config=host \ + --test_link_mode=dev \ + --test_thread_mode=par + displayName: 'cpp-tests-thread-dev' -# - script: | -# # Clear cache if its size exceeds some predefined value -# cache_size=$(du -sk "${BAZEL_CACHE_DIR}" | cut -f1) -# cache_size_mb=$(du -sm "${BAZEL_CACHE_DIR}" | cut -f1) -# echo "Bazel cache dir is ${BAZEL_CACHE_DIR}" -# echo "Bazel cache size is ${cache_size_mb}Mb" -# if [ ${cache_size} -ge ${BAZEL_CACHE_MAX_SIZE_KB} ]; then -# echo "Run cache cleanup..." -# echo "Current cache directory content:" -# ls -1 "${BAZEL_CACHE_DIR}" -# echo "--------------------------------" -# echo "Run bazel clean with expunge" -# echo "Remove cache directory" -# rm -r "${BAZEL_CACHE_DIR}" -# mkdir -p "${BAZEL_CACHE_DIR}" -# echo "Write timestamp to the cache" -# date > "${BAZEL_CACHE_DIR}/cache-clean-timestamp" -# echo "Current cache directory content:" -# ls -1 "${BAZEL_CACHE_DIR}" -# echo "--------------------------------" -# else -# echo "No need for cleanup" -# fi -# displayName: 'bazel-cache-limit' + - script: | + # Clear cache if its size exceeds some predefined value + cache_size=$(du -sk "${BAZEL_CACHE_DIR}" | cut -f1) + cache_size_mb=$(du -sm "${BAZEL_CACHE_DIR}" | cut -f1) + echo "Bazel cache dir is ${BAZEL_CACHE_DIR}" + echo "Bazel cache size is ${cache_size_mb}Mb" + if [ ${cache_size} -ge ${BAZEL_CACHE_MAX_SIZE_KB} ]; then + echo "Run cache cleanup..." + echo "Current cache directory content:" + ls -1 "${BAZEL_CACHE_DIR}" + echo "--------------------------------" + echo "Run bazel clean with expunge" + echo "Remove cache directory" + rm -r "${BAZEL_CACHE_DIR}" + mkdir -p "${BAZEL_CACHE_DIR}" + echo "Write timestamp to the cache" + date > "${BAZEL_CACHE_DIR}/cache-clean-timestamp" + echo "Current cache directory content:" + ls -1 "${BAZEL_CACHE_DIR}" + echo "--------------------------------" + else + echo "No need for cleanup" + fi + displayName: 'bazel-cache-limit' -# - job: LinuxSklearnex -# dependsOn: LinuxMakeGNU_MKL -# timeoutInMinutes: 0 -# variables: -# release.dir: '__release_lnx_gnu' -# platform.type : 'lnx32e' -# pool: -# vmImage: '$(VM_IMAGE)' -# maxParallel: 2 -# strategy: -# matrix: -# Python311: -# python.version: '3.11' -# steps: -# - task: DownloadPipelineArtifact@2 -# inputs: -# artifact: '$(platform.type) build' -# path: $(Pipeline.Workspace) -# - script: .ci/env/apt.sh dev-base -# displayName: 'apt-get' -# - script: | -# .ci/scripts/describe_system.sh -# displayName: 'System info' -# - script: | -# conda update -y -q conda -# conda create -q -y -n CB -c conda-forge python=$(python.version) tbb mpich -# displayName: 'Conda create' -# - script: | -# git clone https://github.com/intel/scikit-learn-intelex.git sklearnex -# displayName: Clone sklearnex -# - script: | -# source /usr/share/miniconda/etc/profile.d/conda.sh -# conda activate CB -# pip install -r sklearnex/dependencies-dev -# pip install -r sklearnex/requirements-test.txt -# displayName: Create python environment -# - script: | -# source /usr/share/miniconda/etc/profile.d/conda.sh -# conda activate CB -# export DALROOT=$(Pipeline.Workspace)/daal/latest -# source ${DALROOT}/env/vars.sh -# export PREFIX=${CONDA_PREFIX} -# cd sklearnex -# export PYTHON=python -# ./conda-recipe/build.sh -# displayName: sklearnex build -# - task: PublishPipelineArtifact@1 -# inputs: -# artifactName: '$(platform.type) sklearnex build' -# targetPath: '$(Build.Repository.LocalPath)/sklearnex' -# displayName: 'Upload sklearnex build artifacts' -# continueOnError: true -# - script: | -# source /usr/share/miniconda/etc/profile.d/conda.sh -# conda activate CB -# source $(Pipeline.Workspace)/daal/latest/env/vars.sh -# ./sklearnex/conda-recipe/run_test.sh -# timeoutInMinutes: 15 -# displayName: sklearnex test -# - script: | -# source /usr/share/miniconda/etc/profile.d/conda.sh -# conda activate CB -# source $(Pipeline.Workspace)/daal/latest/env/vars.sh -# ret_code=0 -# python -m sklearnex sklearnex/tests/run_examples.py -# ret_code=$(($ret_code + $?)) -# exit $ret_code -# displayName: sklearnex examples -# - script: | -# source /usr/share/miniconda/etc/profile.d/conda.sh -# conda activate CB -# source $(Pipeline.Workspace)/daal/latest/env/vars.sh -# $(Pipeline.Workspace)/s/.ci/scripts/conformance_test.sh --build-dir $(Pipeline.Workspace) --conda-dir $HOME/miniconda --oneapi-dir /opt/intel/oneapi -# timeoutInMinutes: 15 -# displayName: Conformance tests -# - task: PublishPipelineArtifact@1 -# inputs: -# artifactName: '$(platform.type) conformance tests report' -# targetPath: '$(Pipeline.Workspace)/s/.ci/scripts/conformance-scripts/' -# displayName: 'Upload conformance tests artifacts' -# continueOnError: true +- job: LinuxSklearnex + dependsOn: LinuxMakeGNU_MKL + timeoutInMinutes: 0 + variables: + release.dir: '__release_lnx_gnu' + platform.type : 'lnx32e' + pool: + vmImage: '$(VM_IMAGE)' + maxParallel: 2 + strategy: + matrix: + Python311: + python.version: '3.11' + steps: + - task: DownloadPipelineArtifact@2 + inputs: + artifact: '$(platform.type) build' + path: $(Pipeline.Workspace) + - script: .ci/env/apt.sh dev-base + displayName: 'apt-get' + - script: | + .ci/scripts/describe_system.sh + displayName: 'System info' + - script: | + conda update -y -q conda + conda create -q -y -n CB -c conda-forge python=$(python.version) tbb mpich + displayName: 'Conda create' + - script: | + git clone https://github.com/intel/scikit-learn-intelex.git sklearnex + displayName: Clone sklearnex + - script: | + source /usr/share/miniconda/etc/profile.d/conda.sh + conda activate CB + pip install -r sklearnex/dependencies-dev + pip install -r sklearnex/requirements-test.txt + displayName: Create python environment + - script: | + source /usr/share/miniconda/etc/profile.d/conda.sh + conda activate CB + export DALROOT=$(Pipeline.Workspace)/daal/latest + source ${DALROOT}/env/vars.sh + export PREFIX=${CONDA_PREFIX} + cd sklearnex + export PYTHON=python + ./conda-recipe/build.sh + displayName: sklearnex build + - task: PublishPipelineArtifact@1 + inputs: + artifactName: '$(platform.type) sklearnex build' + targetPath: '$(Build.Repository.LocalPath)/sklearnex' + displayName: 'Upload sklearnex build artifacts' + continueOnError: true + - script: | + source /usr/share/miniconda/etc/profile.d/conda.sh + conda activate CB + source $(Pipeline.Workspace)/daal/latest/env/vars.sh + ./sklearnex/conda-recipe/run_test.sh + timeoutInMinutes: 15 + displayName: sklearnex test + - script: | + source /usr/share/miniconda/etc/profile.d/conda.sh + conda activate CB + source $(Pipeline.Workspace)/daal/latest/env/vars.sh + ret_code=0 + python -m sklearnex sklearnex/tests/run_examples.py + ret_code=$(($ret_code + $?)) + exit $ret_code + displayName: sklearnex examples + - script: | + source /usr/share/miniconda/etc/profile.d/conda.sh + conda activate CB + source $(Pipeline.Workspace)/daal/latest/env/vars.sh + $(Pipeline.Workspace)/s/.ci/scripts/conformance_test.sh --build-dir $(Pipeline.Workspace) --conda-dir $HOME/miniconda --oneapi-dir /opt/intel/oneapi + timeoutInMinutes: 15 + displayName: Conformance tests + - task: PublishPipelineArtifact@1 + inputs: + artifactName: '$(platform.type) conformance tests report' + targetPath: '$(Pipeline.Workspace)/s/.ci/scripts/conformance-scripts/' + displayName: 'Upload conformance tests artifacts' + continueOnError: true - job: 'WindowsMakeVC' timeoutInMinutes: 0 diff --git a/.ci/scripts/install_windows.bat b/.ci/scripts/install_windows.bat index 7a66a0b3ca5..a9c0ad23ccb 100644 --- a/.ci/scripts/install_windows.bat +++ b/.ci/scripts/install_windows.bat @@ -22,9 +22,9 @@ curl.exe --output %TEMP%\webimage.exe --url %URL% --retry 5 --retry-delay 5 start /b /wait %TEMP%\webimage.exe -s -x -f webimage_extracted --log extract.log del %TEMP%\webimage.exe if "%COMPONENTS%"=="" ( - webimage_extracted\bootstrapper.exe -s --action install --eula=accept --install-dir=C:\temp\oneapi\ -p=NEED_VS2017_INTEGRATION=1 -p=NEED_VS2019_INTEGRATION=1 -p=NEED_VS2022_INTEGRATION=1 --log-dir=. + webimage_extracted\bootstrapper.exe -s --action install --eula=accept --install-dir=C:\temp\oneapi\ -p=NEED_VS2017_INTEGRATION=0 -p=NEED_VS2019_INTEGRATION=0 -p=NEED_VS2022_INTEGRATION=0 --log-dir=. ) else ( - webimage_extracted\bootstrapper.exe -s --action install --components=%COMPONENTS% --eula=accept --install-dir=C:\temp\oneapi\ -p=NEED_VS2017_INTEGRATION=1 -p=NEED_VS2019_INTEGRATION=1 -p=NEED_VS2022_INTEGRATION=1 --log-dir=. + webimage_extracted\bootstrapper.exe -s --action install --components=%COMPONENTS% --eula=accept --install-dir=C:\temp\oneapi\ -p=NEED_VS2017_INTEGRATION=0 -p=NEED_VS2019_INTEGRATION=0 -p=NEED_VS2022_INTEGRATION=0 --log-dir=. ) set installer_exit_code=%ERRORLEVEL% rd /s/q "webimage_extracted" diff --git a/cpp/daal/src/algorithms/pca/pca_partialresult_svd.h b/cpp/daal/src/algorithms/pca/pca_partialresult_svd.h index 9686aa0582b..a4ffc43435a 100644 --- a/cpp/daal/src/algorithms/pca/pca_partialresult_svd.h +++ b/cpp/daal/src/algorithms/pca/pca_partialresult_svd.h @@ -42,7 +42,7 @@ namespace pca */ template DAAL_EXPORT services::Status PartialResult::allocate(const daal::algorithms::Input * input, const daal::algorithms::Parameter * parameter, - const int method) + const int method) { services::Status s; set(nObservationsSVD, HomogenNumericTable::create(1, 1, NumericTableIface::doAllocate, 0, &s)); @@ -56,7 +56,7 @@ DAAL_EXPORT services::Status PartialResult::allocate(const daal::algor template DAAL_EXPORT services::Status PartialResult::initialize(const daal::algorithms::Input * input, const daal::algorithms::Parameter * parameter, - const int method) + const int method) { services::Status s; DAAL_CHECK_STATUS(s, get(nObservationsSVD)->assign((algorithmFPType)0.0)) diff --git a/cpp/daal/src/algorithms/pca/pca_partialresult_svd_fpt.cpp b/cpp/daal/src/algorithms/pca/pca_partialresult_svd_fpt.cpp index e1caa54cfcd..25456e4dbb5 100644 --- a/cpp/daal/src/algorithms/pca/pca_partialresult_svd_fpt.cpp +++ b/cpp/daal/src/algorithms/pca/pca_partialresult_svd_fpt.cpp @@ -30,9 +30,10 @@ namespace algorithms namespace pca { template DAAL_EXPORT services::Status PartialResult::allocate(const daal::algorithms::Input * input, - const daal::algorithms::Parameter * parameter, const int method); + const daal::algorithms::Parameter * parameter, const int method); template DAAL_EXPORT services::Status PartialResult::initialize(const daal::algorithms::Input * input, - const daal::algorithms::Parameter * parameter, const int method); + const daal::algorithms::Parameter * parameter, + const int method); } // namespace pca } // namespace algorithms