From a923b27f3865d7397b4c6156a5a39bec3cff89c7 Mon Sep 17 00:00:00 2001 From: Janusz Lisiecki Date: Thu, 28 Dec 2023 17:38:41 +0100 Subject: [PATCH] Split conda built into core and python bindings - builds DALI into a core that is independent of python version and just DALI with only python bindings that is Python version dependent Signed-off-by: Janusz Lisiecki --- CMakeLists.txt | 3 + conda/recipe/{build.sh => build_core.sh} | 62 +++-- conda/recipe/build_python.sh | 235 ++++++++++++++++++ conda/recipe/meta.yaml | 179 ++++++++----- dali/operators/python_function/CMakeLists.txt | 27 +- dali/python/CMakeLists.txt | 38 ++- 6 files changed, 434 insertions(+), 110 deletions(-) rename conda/recipe/{build.sh => build_core.sh} (88%) create mode 100644 conda/recipe/build_python.sh diff --git a/CMakeLists.txt b/CMakeLists.txt index cdf8ebacd61..a4e57f632d7 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -196,6 +196,9 @@ if (BUILD_PYTHON) message(STATUS "Generating python stubs using interpreter: ${PYTHON_STUBGEN_INTERPRETER}") endif () +cmake_dependent_option(PREBUILD_DALI_LIBS "Use prebuild DALI libs to compile python bindings" ON + "BUILD_PYTHON" OFF) + if (STATIC_LIBS) message (STATUS "Building static libraries") set(LIBTYPE STATIC) diff --git a/conda/recipe/build.sh b/conda/recipe/build_core.sh similarity index 88% rename from conda/recipe/build.sh rename to conda/recipe/build_core.sh index 62b179989cc..8bbd10f9d54 100644 --- a/conda/recipe/build.sh +++ b/conda/recipe/build_core.sh @@ -71,7 +71,8 @@ cmake -DCUDA_TOOLKIT_ROOT_DIR=/usr/local/cuda \ -DBUILD_TEST=${BUILD_TEST:-ON} \ -DBUILD_BENCHMARK=${BUILD_BENCHMARK:-ON} \ -DBUILD_NVTX=${BUILD_NVTX} \ - -DBUILD_PYTHON=${BUILD_PYTHON:-ON} \ + -DBUILD_PYTHON=OFF \ + -DPREBUILD_DALI_LIBS=OFF \ -DPYTHON_STUBGEN_INTERPRETER=${PYTHON} \ -DBUILD_LMDB=${BUILD_LMDB:-ON} \ -DBUILD_JPEG_TURBO=${BUILD_JPEG_TURBO:-ON} \ @@ -123,32 +124,17 @@ DEPS_LIST=( "lib/libnvcv_types.so.0" ) -DEPS_SONAME=( - "libavformat.so.60" - "libavcodec.so.60" - "libavfilter.so.9" - "libavutil.so.58" - "libswscale.so.7" - "libcvcuda.so.0" - "libnvcv_types.so.0" -) - if [ "$BUILD_NVCOMP" = "ON" ]; then DEPS_LIST+=( "${DEPS_PATH}/cuda/lib64/libnvcomp.so" "${DEPS_PATH}/cuda/lib64/libnvcomp_gdeflate.so" "${DEPS_PATH}/cuda/lib64/libnvcomp_bitcomp.so" ) - - DEPS_SONAME+=( - "libnvcomp.so" - "libnvcomp_gdeflate.so" - "libnvcomp_bitcomp.so" - ) fi -PKGNAME_PATH=dali/python/nvidia/dali/ -mkdir -p $PKGNAME_PATH/.libs +PKGNAME_PATH=$PWD/dali/python/nvidia/dali +DEPS_LIB_DST_PATH=$PKGNAME_PATH/dali_deps_libs +mkdir -p $DEPS_LIB_DST_PATH # copy needed dependent .so files and tag them with their hash original=() @@ -163,7 +149,7 @@ copy_and_patch() { return fi patchedname=$(fname_with_sha256 $filepath) - patchedpath=$PKGNAME_PATH/.libs/$patchedname + patchedpath=$DEPS_LIB_DST_PATH/$patchedname original+=("$filename") patched+=("$patchedname") @@ -210,6 +196,9 @@ sofile_list=() while IFS= read -r -d $'\0'; do sofile_list+=("$REPLY") done < <(find $PKGNAME_PATH -name '*.so*' -print0) +while IFS= read -r -d $'\0'; do + sofile_list+=("$REPLY") +done < <(find $DEPS_LIB_DST_PATH -name '*.so*' -print0) while IFS= read -r -d $'\0'; do sofile_list+=("$REPLY") done < <(find $PKGNAME_PATH -name '*.bin' -print0) @@ -220,22 +209,27 @@ done wait echo "Fixed hashed names" -# set RPATH of backend_impl.so and similar to $ORIGIN, $ORIGIN$UPDIRS, $ORIGIN$UPDIRS/.libs -PKGNAME_PATH=$PWD/dali/python/nvidia/dali +# set RPATH of backend_impl.so and similar to $ORIGIN, $ORIGIN$UPDIRS, $ORIGIN$UPDIRS/dali_deps_libs find $PKGNAME_PATH -type f -name "*.so*" -o -name "*.bin" | while read FILE; do UPDIRS=$(dirname $(echo "$FILE" | sed "s|$PKGNAME_PATH||") | sed 's/[^\/][^\/]*/../g') - echo "Setting rpath of $FILE to '\$ORIGIN:\$ORIGIN$UPDIRS:\$ORIGIN$UPDIRS/.libs'" - patchelf --set-rpath "\$ORIGIN:\$ORIGIN$UPDIRS:\$ORIGIN$UPDIRS/.libs" $FILE + echo "Setting rpath of $FILE to '\$ORIGIN:\$ORIGIN$UPDIRS:\$ORIGIN$UPDIRS/dali_deps_libs'" + patchelf --set-rpath "\$ORIGIN:\$ORIGIN$UPDIRS:\$ORIGIN$UPDIRS/dali_deps_libs" $FILE patchelf --print-rpath $FILE + if [[ "$FILE" == *".so"* ]]; then + cp $FILE $BUILD_PREFIX/lib/; + fi + if [[ "$FILE" == *"dali_deps_libs"* ]]; then + mkdir -p $BUILD_PREFIX/lib/dali_deps_libs/ + cp $FILE $BUILD_PREFIX/lib/dali_deps_libs/; + fi + if [[ "$FILE" == *".bin"* ]]; then + cp $FILE $BUILD_PREFIX/bin/; + fi done -# pip install -$PYTHON -m pip install --no-deps --ignore-installed -v dali/python - -# Build tensorflow plugin -export LD_LIBRARY_PATH="$PREFIX/libjpeg-turbo/lib:$PREFIX/lib:$LD_LIBRARY_PATH" -DALI_PATH=$($PYTHON -c 'import nvidia.dali as dali; import os; print(os.path.dirname(dali.__file__))') -echo "DALI_PATH is ${DALI_PATH}" - -# Move tfrecord2idx to host env so it can be found at runtime -cp $SRC_DIR/tools/tfrecord2idx $PREFIX/bin +# copy generated headers for the bindings build +find -iname *.pb.h | while read FILE; do + echo $FILE $BUILD_PREFIX/include/$FILE + mkdir -p $(dirname $BUILD_PREFIX/include/$FILE) + cp $FILE $BUILD_PREFIX/include/$FILE +done diff --git a/conda/recipe/build_python.sh b/conda/recipe/build_python.sh new file mode 100644 index 00000000000..ec53ceccf2e --- /dev/null +++ b/conda/recipe/build_python.sh @@ -0,0 +1,235 @@ +#!/bin/bash +# +# (C) Copyright IBM Corp. 2019. All Rights Reserved. +# (C) Copyright NVIDIA CORPORATION. 2019. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +#Determine Architecture + +ARCH="$(arch)" +if [ ${ARCH} = "x86_64" ]; then + ARCH_LONGNAME="x86_64-conda_cos6" +elif [ ${ARCH} = "ppc64le" ]; then + ARCH_LONGNAME="powerpc64le-conda_cos7" +else + echo "Error: Unsupported Architecture. Expected: [x86_64|ppc64le] Actual: ${ARCH}" + exit 1 +fi + +# Create 'gcc' and 'g++' symlinks so nvcc can find it +ln -s $CC $BUILD_PREFIX/bin/gcc +ln -s $CXX $BUILD_PREFIX/bin/g++ + +# Force -std=c++17 in CXXFLAGS +export CXXFLAGS=${CXXFLAGS/-std=c++??/-std=c++17} + +# For some reason `aligned_alloc` is present when we use compiler version 5.4.x +# Adding NO_ALIGNED_ALLOC definition for cutt +export CXXFLAGS="${CXXFLAGS} -DNO_ALIGNED_ALLOC" +export PATH=/usr/local/cuda/bin:${PATH} + +# For some reason `aligned_alloc` is present when we use compiler version 5.4.x +# Adding NO_ALIGNED_ALLOC definition for cutt +export CXXFLAGS="${CXXFLAGS} -DNO_ALIGNED_ALLOC" +export PATH=/usr/local/cuda/bin:${PATH} + +# make it on by default for CUDA 11.x +if [ "${CUDA_VERSION/./}" -ge 110 ]; then + export WITH_DYNAMIC_CUDA_TOOLKIT_DEFAULT=ON +else + export WITH_DYNAMIC_CUDA_TOOLKIT_DEFAULT=OFF +fi + + +export BUILD_NVCOMP=${BUILD_NVCOMP:-OFF} + +# Create build directory for cmake and enter it +mkdir $SRC_DIR/build +cd $SRC_DIR/build + +# allow DALI import all dependencies in the build env +export LD_LIBRARY_PATH="$PREFIX/libjpeg-turbo/lib:$PREFIX/lib:$LD_LIBRARY_PATH" + +# Build +cmake -DCUDA_TOOLKIT_ROOT_DIR=/usr/local/cuda \ + -DCUDA_rt_LIBRARY=$BUILD_PREFIX/${ARCH_LONGNAME}-linux-gnu/sysroot/usr/lib/librt.so \ + -DCUDA_CUDA_LIBRARY=/usr/local/cuda/targets/${ARCH}-linux/lib/stubs/libcuda.so \ + -DCUDA_TARGET_ARCHS=${CUDA_TARGET_ARCHS} \ + -DNVJPEG_ROOT_DIR=/usr/local/cuda \ + -DFFMPEG_ROOT_DIR=$PREFIX/lib \ + -DCMAKE_PREFIX_PATH="$PREFIX/libjpeg-turbo;$PREFIX" \ + -DCMAKE_INSTALL_PREFIX=$PREFIX \ + -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE:-Release} \ + -DBUILD_TEST=${BUILD_TEST:-ON} \ + -DBUILD_BENCHMARK=${BUILD_BENCHMARK:-ON} \ + -DBUILD_NVTX=${BUILD_NVTX} \ + -DBUILD_PYTHON=ON \ + -DPREBUILD_DALI_LIBS=ON \ + -DPYTHON_STUBGEN_INTERPRETER=${PYTHON} \ + -DBUILD_LMDB=${BUILD_LMDB:-ON} \ + -DBUILD_JPEG_TURBO=${BUILD_JPEG_TURBO:-ON} \ + -DBUILD_OPENCV=${BUILD_OPENCV:-ON} \ + -DBUILD_PROTOBUF=${BUILD_PROTOBUF:-ON} \ + -DBUILD_NVJPEG=${BUILD_NVJPEG:-ON} \ + -DBUILD_NVJPEG2K=${BUILD_NVJPEG2K} \ + -DBUILD_LIBTIFF=${BUILD_LIBTIFF:-ON} \ + -DBUILD_LIBSND=${BUILD_LIBSND:-ON} \ + -DBUILD_LIBTAR=${BUILD_LIBTAR:-ON} \ + -DBUILD_FFTS=${BUILD_FFTS:-ON} \ + -DBUILD_NVOF=${BUILD_NVOF:-ON} \ + -DBUILD_NVDEC=${BUILD_NVDEC:-ON} \ + -DBUILD_NVML=${BUILD_NVML:-ON} \ + -DBUILD_CUFILE=${BUILD_CUFILE:-ON} \ + -DBUILD_NVCOMP=${BUILD_NVCOMP} \ + -DBUILD_CVCUDA=${BUILD_CVCUDA:-ON} \ + -DLINK_LIBCUDA=${LINK_LIBCUDA:-OFF} \ + -DWITH_DYNAMIC_CUDA_TOOLKIT=${WITH_DYNAMIC_CUDA_TOOLKIT:-${WITH_DYNAMIC_CUDA_TOOLKIT_DEFAULT}}\ + -DWITH_DYNAMIC_NVJPEG=${WITH_DYNAMIC_NVJPEG:-ON} \ + -DWITH_DYNAMIC_CUFFT=${WITH_DYNAMIC_CUFFT:-ON} \ + -DWITH_DYNAMIC_NPP=${WITH_DYNAMIC_NPP:-ON} \ + -DVERBOSE_LOGS=${VERBOSE_LOGS:-OFF} \ + -DWERROR=${WERROR:-ON} \ + -DBUILD_WITH_ASAN=${BUILD_WITH_ASAN:-OFF} \ + -DBUILD_WITH_LSAN=${BUILD_WITH_LSAN:-OFF} \ + -DBUILD_WITH_UBSAN=${BUILD_WITH_UBSAN:-OFF} \ + -DDALI_BUILD_FLAVOR=${NVIDIA_DALI_BUILD_FLAVOR} \ + -DTIMESTAMP=${DALI_TIMESTAMP} -DGIT_SHA=${GIT_SHA-${GIT_FULL_HASH}} \ + .. +make -j"$(nproc --all)" dali_python python_function_plugin dali_python_generate_stubs copy_post_build_target + +# # bundle FFmpeg to make sure DALI ships and uses own version +# fname_with_sha256() { +# HASH=$(sha256sum $1 | cut -c1-8) +# BASENAME=$(basename $1) +# INITNAME=$(echo $BASENAME | cut -f1 -d".") +# ENDNAME=$(echo $BASENAME | cut -f 2- -d".") +# echo "$INITNAME-$HASH.$ENDNAME" +# } + +# DEPS_LIST=( +# "$PREFIX/lib/libavformat.so.60" +# "$PREFIX/lib/libavcodec.so.60" +# "$PREFIX/lib/libavfilter.so.9" +# "$PREFIX/lib/libavutil.so.58" +# "$PREFIX/lib/libswscale.so.7" +# "lib/libcvcuda.so.0" +# "lib/libnvcv_types.so.0" +# ) + +# if [ "$BUILD_NVCOMP" = "ON" ]; then +# DEPS_LIST+=( +# "${DEPS_PATH}/cuda/lib64/libnvcomp.so" +# "${DEPS_PATH}/cuda/lib64/libnvcomp_gdeflate.so" +# "${DEPS_PATH}/cuda/lib64/libnvcomp_bitcomp.so" +# ) +# fi + +# PKGNAME_PATH=$BUILD_PREFIX/lib +# DEPS_LIB_DST_PATH=$PKGNAME_PATH/dali_deps_libs +# mkdir -p $DEPS_LIB_DST_PATH + +# # copy needed dependent .so files and tag them with their hash +# original=() +# patched=() + +# copy_and_patch() { +# local filepath=$1 +# filename=$(basename $filepath) + +# if [[ ! -f "$filepath" ]]; then +# echo "Didn't find $filename, skipping..." +# return +# fi +# patchedname=$(fname_with_sha256 $filepath) +# patchedpath=$DEPS_LIB_DST_PATH/$patchedname +# original+=("$filename") +# patched+=("$patchedname") + +# echo "Copying $filepath to $patchedpath" +# cp $filepath $patchedpath + +# echo "Patching DT_SONAME field in $patchedpath" +# patchelf --set-soname $patchedname $patchedpath & +# } + +# echo "Patching DT_SONAMEs..." +# for filepath in "${DEPS_LIST[@]}"; do +# copy_and_patch $filepath +# done +# wait +# echo "Patched DT_SONAMEs" + +# patch_hashed_names() { +# local sofile=$1 +# local patch_cmd="" +# needed_so_files=$(patchelf --print-needed $sofile) +# for ((j=0;j<${#original[@]};++j)); do +# origname=${original[j]} +# patchedname=${patched[j]} +# if [[ "$origname" != "$patchedname" ]]; then +# set +e +# echo $needed_so_files | grep $origname 2>&1 >/dev/null +# ERRCODE=$? +# set -e +# if [ "$ERRCODE" -eq "0" ]; then +# echo "patching $sofile entry $origname to $patchedname" +# patch_cmd="$patch_cmd --replace-needed $origname $patchedname" +# fi +# fi +# done +# if [ -n "$patch_cmd" ]; then +# echo "running $patch_cmd on $sofile" +# patchelf $patch_cmd $sofile +# fi +# } +# echo "Patching to fix the so names to the hashed names..." +# # get list of files to iterate over +# SRC_PKGNAME_PATH=$PWD/dali/python/nvidia/dali +# sofile_list=() +# while IFS= read -r -d $'\0'; do +# sofile_list+=("$REPLY") +# done < <(find $SRC_PKGNAME_PATH -name '*.so*' -print0) +# while IFS= read -r -d $'\0'; do +# sofile_list+=("$REPLY") +# done < <(find $SRC_PKGNAME_PATH -name '*.bin' -print0) +# for ((i=0;i<${#sofile_list[@]};++i)); do +# sofile=${sofile_list[i]} +# patch_hashed_names $sofile & +# done +# wait +# echo "Fixed hashed names" + +# # set RPATH of backend_impl.so and similar to $ORIGIN, $ORIGIN$UPDIRS, $ORIGIN$UPDIRS/dali_deps_libs +# find $SRC_PKGNAME_PATH -type f -name "*.so*" -o -name "*.bin" | while read FILE; do +# UPDIRS=$(dirname $(echo "$FILE" | sed "s|$SRC_PKGNAME_PATH||") | sed 's/[^\/][^\/]*/../g') +# echo "Setting rpath of $FILE to '\$ORIGIN:\$ORIGIN$UPDIRS:\$ORIGIN$UPDIRS/dali_deps_libs'" +# patchelf --set-rpath "\$ORIGIN:\$ORIGIN$UPDIRS:\$ORIGIN$UPDIRS/dali_deps_libs" $FILE +# patchelf --print-rpath $FILE +# if [ "$FILE" == *".so"* ]; then +# cp $FILE $PKGNAME_PATH; +# fi +# if [ "$FILE" == *".bin"* ]; then +# cp $FILE $BUILD_PREFIX/bin; +# fi +# done + +# pip install +$PYTHON -m pip install --no-deps --ignore-installed -v dali/python + +DALI_PATH=$($PYTHON -c 'import nvidia.dali as dali; import os; print(os.path.dirname(dali.__file__))') +echo "DALI_PATH is ${DALI_PATH}" + +# Move tfrecord2idx to host env so it can be found at runtime +cp $SRC_DIR/tools/tfrecord2idx $PREFIX/bin diff --git a/conda/recipe/meta.yaml b/conda/recipe/meta.yaml index 61024bfe0e2..0a560040ef7 100644 --- a/conda/recipe/meta.yaml +++ b/conda/recipe/meta.yaml @@ -14,7 +14,7 @@ # limitations under the License. package: - name: nvidia-dali-cuda{{ environ.get('CUDA_VERSION', '') | replace(".","") }} + name: nvidia-dali-cuda{{ environ.get('CUDA_VERSION', '') | replace(".","") }}-split version: {{ environ.get('DALI_CONDA_BUILD_VERSION', '') }} source: @@ -57,63 +57,122 @@ build: - LD_LIBRARY_PATH - DALI_CONDA_BUILD_VERSION - CUDA_VERSION - string: py{{ python | replace(".","") }}_{{ environ.get('NVIDIA_BUILD_ID', '') }} -requirements: - build: - skip: True # [not linux] - - {{ compiler('c') }} - - {{ compiler('cxx')}} - - pkg-config - - cmake >=3.12.4 - - make - - patchelf - - python-clang - - git-lfs - - astunparse >=1.6.0 - - gast >=0.3.3 - - dm-tree >=0.1.8 - - black =23.11.0 - - black-jupyter =23.11.0 - host: - - python - - future - - protobuf - - libprotobuf-static - - libjpeg-turbo - - dali-opencv - - dali-ffmpeg - - lmdb - - libtiff - - libsndfile - - libtar - - libvorbis =1.3.7 - # dali-opencv we that depends on libtiff also depends on libwebp-base (silently) - # we link it statically so it doesn't carry the dependency, so we need to add it manually - - libwebp-base - - openjpeg - - cfitsio - - astunparse >=1.6.0 - - gast >=0.3.3 - - dm-tree >=0.1.8 - run: - - python - - future - - libjpeg-turbo - - lmdb - - libtiff - - libsndfile - - libvorbis =1.3.7 - # dali-opencv we that depends on libtiff also depends on libwebp-base (silently) - # we link it statically so it doesn't carry the dependency, so we need to add it manually - - libwebp-base - # libprotobuf-static we link statically depends on libabseil so add protobuf here as a runtime - # dependency to install the right version on the libabseil (as protobuf depends on - # libprotobuf-static and a newer version of libprotobuf-static may be available than - # the protobuf was build with) - - protobuf - - openjpeg - - cfitsio - - astunparse >=1.6.0 - - gast >=0.3.3 - - dm-tree >=0.1.8 +outputs: + - name: nvidia-dali-core-cuda{{ environ.get('CUDA_VERSION', '') | replace(".","") }} + string: {{ environ.get('NVIDIA_BUILD_ID', '') }} + script: build_core.sh # [unix] + build: + run_exports: + - {{ pin_subpackage('nvidia-dali-core-cuda{{ environ.get('CUDA_VERSION', '') | replace(".","") }}', max_pin='x.x.x') }} + requirements: + build: + skip: True # [not linux] + - {{ compiler('c') }} + - {{ compiler('cxx')}} + - pkg-config + - cmake >=3.12.4 + - make + - patchelf + - python-clang + - git-lfs + - astunparse >=1.6.0 + - gast >=0.3.3 + host: + - future + - protobuf + - libprotobuf-static + - libjpeg-turbo + - dali-opencv + - dali-ffmpeg + - lmdb + - libtiff + - libsndfile + - libtar + - libvorbis =1.3.7 + # dali-opencv we that depends on libtiff also depends on libwebp-base (silently) + # we link it statically so it doesn't carry the dependency, so we need to add it manually + - libwebp-base + - openjpeg + - cfitsio + run: + - future + - libjpeg-turbo + - lmdb + - libtiff + - libsndfile + - libvorbis =1.3.7 + # dali-opencv we that depends on libtiff also depends on libwebp-base (silently) + # we link it statically so it doesn't carry the dependency, so we need to add it manually + - libwebp-base + # libprotobuf-static we link statically depends on libabseil so add protobuf here as a runtime + # dependency to install the right version on the libabseil (as protobuf depends on + # libprotobuf-static and a newer version of libprotobuf-static may be available than + # the protobuf was build with) + - protobuf + - openjpeg + - cfitsio + - name: nvidia-dali-cuda{{ environ.get('CUDA_VERSION', '') | replace(".","") }} + string: py{{ python | replace(".","") }}_{{ environ.get('NVIDIA_BUILD_ID', '') }} + script: build_python.sh # [unix] + requirements: + build: + skip: True # [not linux] + - {{ compiler('c') }} + - {{ compiler('cxx')}} + - pkg-config + - cmake >=3.12.4 + - make + - patchelf + - python-clang + - git-lfs + - astunparse >=1.6.0 + - gast >=0.3.3 + - dm-tree >=0.1.8 + - black =23.11.0 + - black-jupyter =23.11.0 + - {{ pin_subpackage('nvidia-dali-core-cuda{{ environ.get('CUDA_VERSION', '') | replace(".","") }}', exact=True) }} + host: + - python + - future + - protobuf + - libprotobuf-static + - libjpeg-turbo + - dali-opencv + - dali-ffmpeg + - lmdb + - libtiff + - libsndfile + - libtar + - libvorbis =1.3.7 + # dali-opencv we that depends on libtiff also depends on libwebp-base (silently) + # we link it statically so it doesn't carry the dependency, so we need to add it manually + - libwebp-base + - openjpeg + - cfitsio + - astunparse >=1.6.0 + - gast >=0.3.3 + - dm-tree >=0.1.8 + - {{ pin_subpackage('nvidia-dali-core-cuda{{ environ.get('CUDA_VERSION', '') | replace(".","") }}', exact=True) }} + run: + - python + - future + - libjpeg-turbo + - lmdb + - libtiff + - libsndfile + - libvorbis =1.3.7 + # dali-opencv we that depends on libtiff also depends on libwebp-base (silently) + # we link it statically so it doesn't carry the dependency, so we need to add it manually + - libwebp-base + # libprotobuf-static we link statically depends on libabseil so add protobuf here as a runtime + # dependency to install the right version on the libabseil (as protobuf depends on + # libprotobuf-static and a newer version of libprotobuf-static may be available than + # the protobuf was build with) + - protobuf + - openjpeg + - cfitsio + - astunparse >=1.6.0 + - gast >=0.3.3 + - dm-tree >=0.1.8 + - {{ pin_subpackage('nvidia-dali-core-cuda{{ environ.get('CUDA_VERSION', '') | replace(".","") }}', exact=True) }} diff --git a/dali/operators/python_function/CMakeLists.txt b/dali/operators/python_function/CMakeLists.txt index 79c5836f153..bdc02eb9636 100644 --- a/dali/operators/python_function/CMakeLists.txt +++ b/dali/operators/python_function/CMakeLists.txt @@ -16,10 +16,23 @@ collect_headers(DALI_INST_HDRS PARENT_SCOPE) collect_sources(PYTHON_FUNCTION_SRCS PARENT_SCOPE) collect_test_sources(DALI_OPERATOR_TEST_SRCS PARENT_SCOPE) -build_per_python_lib(${dali_python_function_lib} - OUTPUT_NAME ${dali_python_function_lib} - OUTPUT_DIR ${DALI_LIBRARY_OUTPUT_DIR} - PUBLIC_LIBS dali - PRIV_LIBS ${DALI_LIBS} - EXCLUDE_LIBS ${exclude_libs} - SRC ${PYTHON_FUNCTION_SRCS}) + +if (PREBUILD_DALI_LIBS) + # find prebuild DALI libs + find_library(PREBUILD_DALI_LIB NAMES dali) + build_per_python_lib(${dali_python_function_lib} + OUTPUT_NAME ${dali_python_function_lib} + OUTPUT_DIR ${DALI_LIBRARY_OUTPUT_DIR} + PUBLIC_LIBS ${PREBUILD_DALI_LIB} + PRIV_LIBS ${DALI_LIBS} + EXCLUDE_LIBS ${exclude_libs} + SRC ${PYTHON_FUNCTION_SRCS}) +else(PREBUILD_DALI_LIBS) + build_per_python_lib(${dali_python_function_lib} + OUTPUT_NAME ${dali_python_function_lib} + OUTPUT_DIR ${DALI_LIBRARY_OUTPUT_DIR} + PUBLIC_LIBS dali + PRIV_LIBS ${DALI_LIBS} + EXCLUDE_LIBS ${exclude_libs} + SRC ${PYTHON_FUNCTION_SRCS}) +endif(PREBUILD_DALI_LIBS) \ No newline at end of file diff --git a/dali/python/CMakeLists.txt b/dali/python/CMakeLists.txt index 1057de1069a..cfdb002c60c 100644 --- a/dali/python/CMakeLists.txt +++ b/dali/python/CMakeLists.txt @@ -16,13 +16,28 @@ collect_headers(DALI_INST_HDRS PARENT_SCOPE) collect_sources(DALI_PYTHON_BACKEND_SRCS) -build_per_python_lib(dali_python - OUTPUT_NAME backend_impl - OUTPUT_DIR ${DALI_LIBRARY_OUTPUT_DIR} - PUBLIC_LIBS dali dali_operators dali_kernels dali_core ${CUDART_LIB} - PRIV_LIBS ${CUDA_LIBRARIES} dynlink_cuda - EXCLUDE_LIBS ${exclude_libs} - SRC ${DALI_PYTHON_BACKEND_SRCS}) +if (PREBUILD_DALI_LIBS) +# find prebuild DALI libs + find_library(PREBUILD_DALI_LIB NAMES dali) + find_library(PREBUILD_DALI_OPERATORS_LIB NAMES dali_operators) + find_library(PREBUILD_DALI_KERNELES_LIB NAMES dali_kernels) + find_library(PREBUILD_DALI_CORE_LIB NAMES dali_core) + build_per_python_lib(dali_python + OUTPUT_NAME backend_impl + OUTPUT_DIR ${DALI_LIBRARY_OUTPUT_DIR} + PUBLIC_LIBS ${PREBUILD_DALI_LIB} ${PREBUILD_DALI_OPERATORS_LIB} ${PREBUILD_DALI_KERNELES_LIB} ${PREBUILD_DALI_CORE_LIB} ${CUDART_LIB} + PRIV_LIBS ${CUDA_LIBRARIES} dynlink_cuda + EXCLUDE_LIBS ${exclude_libs} + SRC ${DALI_PYTHON_BACKEND_SRCS}) +else (PREBUILD_DALI_LIBS) + build_per_python_lib(dali_python + OUTPUT_NAME backend_impl + OUTPUT_DIR ${DALI_LIBRARY_OUTPUT_DIR} + PUBLIC_LIBS dali dali_operators dali_kernels dali_core ${CUDART_LIB} + PRIV_LIBS ${CUDA_LIBRARIES} dynlink_cuda + EXCLUDE_LIBS ${exclude_libs} + SRC ${DALI_PYTHON_BACKEND_SRCS}) +endif (PREBUILD_DALI_LIBS) if (DALI_BUILD_FLAVOR) set(DALI_FLAVOR "${DALI_BUILD_FLAVOR} ") @@ -53,8 +68,13 @@ if(NOT ${CMAKE_CROSSCOMPILING}) set(PYTHONPATH "${PYTHON_TARGET_PATH}") endif() - add_custom_target(dali_python_generate_stubs ALL - DEPENDS dali_python ${dali_python_function_lib} dali dali_operators copy_post_build_target) + if (PREBUILD_DALI_LIBS) + add_custom_target(dali_python_generate_stubs ALL + DEPENDS dali_python ${dali_python_function_lib}) + else (PREBUILD_DALI_LIBS) + add_custom_target(dali_python_generate_stubs ALL + DEPENDS dali_python ${dali_python_function_lib} dali dali_operators copy_post_build_target) + endif (PREBUILD_DALI_LIBS) # Build the .pyi stubs, adjusting the PYTHONPATH for the invokation, allowing to use the # backend from the current build.