diff --git a/.github/workflows/android_cmake.yml b/.github/workflows/android_cmake.yml deleted file mode 100644 index 2424254339c7..000000000000 --- a/.github/workflows/android_cmake.yml +++ /dev/null @@ -1,41 +0,0 @@ -name: Android CMake build - -on: - push: - paths-ignore: - - 'doc/**' - - 'docker/**' - branches-ignore: - - 'backport**' - - 'dependabot**' - pull_request: - paths-ignore: - - 'doc/**' - - 'docker/**' - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} - cancel-in-progress: true - -permissions: - contents: read - -jobs: - - android_cmake_build: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - - name: Cache - uses: actions/cache@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 - id: cache - with: - path: | - ${{ github.workspace }}/ccache.tar.gz - key: ${{ runner.os }}-cache-android-cmake-${{ github.run_id }} - restore-keys: ${{ runner.os }}-cache-android-cmake- - - - name: Build - run: docker run -e WORK_DIR="$PWD" -v $PWD:$PWD ubuntu:20.04 $PWD/.github/workflows/android_cmake/start.sh diff --git a/.github/workflows/auto_tag_stable.yml b/.github/workflows/auto_tag_stable.yml deleted file mode 100644 index 71f005635758..000000000000 --- a/.github/workflows/auto_tag_stable.yml +++ /dev/null @@ -1,47 +0,0 @@ -name: Update Stable Tag - -on: - push: - branches: - - '**' # Matches all branches, but we later filter on the one matching the STABLE_BRANCH repository variable - -permissions: - contents: read - -jobs: - update-stable-tag: - runs-on: ubuntu-latest - if: github.repository == 'OSGeo/GDAL' - permissions: - contents: write - steps: - - name: Check branch match - id: check_branch - env: - STABLE_BRANCH: ${{ vars.STABLE_BRANCH }} # Repository variable - run: | - echo "Push detected on branch $GITHUB_REF" - if [[ "${GITHUB_REF#refs/heads/}" != "${STABLE_BRANCH}" ]]; then - echo "This workflow only runs for branch $STABLE_BRANCH. Skipping further steps." - echo "run=false" >> $GITHUB_OUTPUT - else - echo "run=true" >> $GITHUB_OUTPUT - fi - - - name: Checkout code - if: steps.check_branch.outputs.run == 'true' - uses: actions/checkout@v4 - - - name: Tag - if: steps.check_branch.outputs.run == 'true' - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - git config --global user.email "GDAL-bot@example.com" - git config --global user.name "GDAL-bot" - touch .dummy-file - git add .dummy-file - # Do that so that stable doesn't have the same git sha as the stable branch, so ReadTheDocs triggers a build - git commit -a -m "Add .dummy-file" - git checkout -b stable - git push -f origin stable diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml deleted file mode 100644 index f04f0a2f8976..000000000000 --- a/.github/workflows/backport.yml +++ /dev/null @@ -1,19 +0,0 @@ -name: Backport -on: - pull_request_target: - types: - - closed - - labeled - -permissions: {} -jobs: - backport: - runs-on: ubuntu-latest - name: Backport - steps: - - name: Backport Bot - id: backport - if: github.event.pull_request.merged && ( ( github.event.action == 'closed' && contains( join( github.event.pull_request.labels.*.name ), 'backport') ) || contains( github.event.label.name, 'backport' ) ) - uses: m-kuhn/backport@7f3cab83e4b3b26aefcffda21851c3dc3d389f45 # v1.2.7 - with: - github_token: ${{ secrets.BACKPORT_TOKEN }} diff --git a/.github/workflows/cifuzz.yml b/.github/workflows/cifuzz.yml deleted file mode 100644 index 218911710dd0..000000000000 --- a/.github/workflows/cifuzz.yml +++ /dev/null @@ -1,39 +0,0 @@ -name: CIFuzz - -on: - pull_request: - paths-ignore: - - 'doc/**' - - 'docker/**' - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} - cancel-in-progress: true - -permissions: - contents: read - -jobs: - Fuzzing: - runs-on: ubuntu-latest - if: github.repository == 'OSGeo/gdal' - - steps: - - name: Build Fuzzers - id: build - uses: google/oss-fuzz/infra/cifuzz/actions/build_fuzzers@56f58e25299ee6559fa40369bd4af908000c0c36 # master on 2023/12/31 - with: - oss-fuzz-project-name: 'gdal' - dry-run: false - - name: Run Fuzzers - uses: google/oss-fuzz/infra/cifuzz/actions/run_fuzzers@56f58e25299ee6559fa40369bd4af908000c0c36 # master on 2023/12/31 - with: - oss-fuzz-project-name: 'gdal' - fuzz-seconds: 600 - dry-run: false - - name: Upload Crash - uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 - if: failure() && steps.build.outcome == 'success' - with: - name: artifacts - path: ./out/artifacts diff --git a/.github/workflows/clang_static_analyzer.yml b/.github/workflows/clang_static_analyzer.yml deleted file mode 100644 index 070b99a4f24b..000000000000 --- a/.github/workflows/clang_static_analyzer.yml +++ /dev/null @@ -1,32 +0,0 @@ -name: Clang Static Analyzer - -on: - push: - paths-ignore: - - 'doc/**' - - 'docker/**' - branches-ignore: - - 'backport**' - - 'dependabot**' - pull_request: - paths-ignore: - - 'doc/**' - - 'docker/**' - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} - cancel-in-progress: true - -permissions: - contents: read - -jobs: - - clang_static_analyzer: - runs-on: ubuntu-24.04 - steps: - - name: Checkout - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - - name: Run - run: docker run --rm -v $PWD:$PWD ubuntu:24.04 sh -c "cd $PWD && apt update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends sudo software-properties-common && DEBIAN_FRONTEND=noninteractive sh ./ci/travis/csa_common/before_install.sh && sh ./ci/travis/csa_common/install.sh && sh ./ci/travis/csa_common/script.sh" diff --git a/.github/workflows/cmake_builds.yml b/.github/workflows/cmake_builds.yml index 938179c23dbd..ebc40a771007 100644 --- a/.github/workflows/cmake_builds.yml +++ b/.github/workflows/cmake_builds.yml @@ -28,568 +28,6 @@ permissions: jobs: - build-linux-ubuntu-focal: - runs-on: ubuntu-20.04 - env: - CMAKE_OPTIONS: -DPython_LOOKUP_VERSION=3.8 -DUSE_CCACHE=ON -DSWIG_REGENERATE_PYTHON=ON - cache-name: cmake-ubuntu-focal - steps: - - name: Checkout GDAL - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - name: Setup cache - uses: actions/cache@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 - id: cache - with: - path: ${{ github.workspace }}/.ccache - key: ${{ runner.os }}-${{ env.cache-name }}-${{ github.base_ref }}${{ github.ref_name }}-${{ github.run_id }} - restore-keys: | - ${{ runner.os }}-${{ env.cache-name }}-${{ github.base_ref }} - ${{ runner.os }}-${{ env.cache-name }} - - name: Install dependency - run: | - sudo add-apt-repository -y ppa:ubuntugis/ubuntugis-unstable - sudo apt-get update - sudo apt-get install -y -q bison libaec-dev libjpeg-dev libgif-dev liblzma-dev libzstd-dev libgeos-dev git \ - libcurl4-gnutls-dev libproj-dev libxml2-dev libxerces-c-dev libnetcdf-dev netcdf-bin \ - libpoppler-dev libpoppler-private-dev gpsbabel libhdf4-alt-dev libhdf5-serial-dev libpodofo-dev poppler-utils \ - libfreexl-dev unixodbc-dev libwebp-dev libepsilon-dev liblcms2-2 libcrypto++-dev libkml-dev \ - libmysqlclient-dev libarmadillo-dev wget libfyba-dev libjsoncpp-dev libexpat1-dev \ - libclc-dev ocl-icd-opencl-dev libsqlite3-dev sqlite3-pcre libpcre3-dev libspatialite-dev libsfcgal-dev fossil libcairo2-dev libjson-c-dev libdeflate-dev liblz4-dev libblosc-dev libarchive-dev \ - libqhull-dev libcfitsio-dev libogdi-dev libopenjp2-7-dev libheif-dev \ - python3-dev libpython3-dev libpython3.8-dev python3.8-dev python3-numpy python3-lxml pyflakes python3-setuptools python3-pip python3-venv \ - python3-pytest swig doxygen texlive-latex-base make cppcheck ccache g++ \ - libpq-dev libpqtypes-dev postgresql-12 postgresql-12-postgis-3 postgresql-client-12 postgresql-12-postgis-3-scripts - # MSSQL: client side - curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add - - curl https://packages.microsoft.com/config/ubuntu/20.04/prod.list | sudo tee /etc/apt/sources.list.d/msprod.list - sudo apt-get update - sudo ACCEPT_EULA=Y apt-get install -y msodbcsql17 unixodbc-dev - # HANA: client side - # Install hdbsql tool - curl -v -j -k -s -L -H "Cookie: eula_3_2_agreed=tools.hana.ondemand.com/developer-license-3_2.txt" https://tools.hana.ondemand.com/additional/hanaclient-latest-linux-x64.tar.gz --output hanaclient-latest-linux-x64.tar.gz \ - && tar -xvf hanaclient-latest-linux-x64.tar.gz \ - && sudo mkdir /usr/sap \ - && sudo ./client/hdbinst -a client --sapmnt=/usr/sap \ - && rm -rf client \ - && rm hanaclient* - export PATH=/usr/sap/hdbclient:$PATH - # Download and compile odbc-cpp-wrapper - sudo apt-get install -y -q cmake - wget https://github.com/SAP/odbc-cpp-wrapper/archive/refs/tags/v1.1.tar.gz -O odbc-cpp-wrapper.tar.gz \ - && mkdir odbc-cpp-wrapper \ - && tar -xvf odbc-cpp-wrapper.tar.gz -C odbc-cpp-wrapper --strip-components=1 \ - && mkdir odbc-cpp-wrapper/build \ - && cd odbc-cpp-wrapper/build \ - && /usr/bin/cmake .. \ - && make -j 2 \ - && sudo make install \ - && cd ../.. \ - && rm -rf odbc-cpp-wrapper - sudo ldconfig - # - # Install Arrow C++ - sudo apt-get install -y -V ca-certificates lsb-release wget - wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb - sudo apt-get install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb - sudo apt-get update - sudo apt-get install -y -V libarrow-dev libparquet-dev libarrow-dataset-dev - # - # Workaround bug in ogdi packaging - sudo ln -s /usr/lib/ogdi/libvrf.so /usr/lib - # - python3 -m pip install -U pip wheel setuptools numpy importlib_metadata - python3 -m pip install -r $GITHUB_WORKSPACE/autotest/requirements.txt - - - name: Build libjxl - run: | - # Build libjxl - # libjxl being still unstable, if the main branch fails to compile/test - # you can replace JXL_TREEISH=main by JXL_TREEISH=sha1_of_known_working_commit - JXL_TREEISH=main - git clone https://github.com/libjxl/libjxl.git --recursive \ - && cd libjxl \ - && git checkout ${JXL_TREEISH} \ - && mkdir build \ - && cd build \ - && sudo apt-get install -y --no-install-recommends libgflags-dev libbrotli-dev \ - && /usr/bin/cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTING=OFF -DJPEGXL_FORCE_SYSTEM_BROTLI:BOOL=ON -DJPEGXL_ENABLE_JPEGLI_LIBJPEG=OFF -DCMAKE_INSTALL_PREFIX=/usr .. \ - && make -j$(nproc) \ - && sudo make -j$(nproc) install \ - && cd ../.. \ - && rm -rf libjxl - - - name: Build libQB3 - run: | - # Build libQB3 - # Used by the MRF driver - git clone https://github.com/lucianpls/QB3.git \ - && mkdir QB3/QB3lib/build \ - && cd QB3/QB3lib/build \ - && /usr/bin/cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr .. \ - && make -j$(nproc) \ - && sudo make -j$(nproc) install \ - && cd ../../.. \ - && rm -rf QB3 - - - name: Build libbasisu - run: | - # Used by the BASISU and KTX2 drivers - git clone -b cmake https://github.com/rouault/basis_universal.git \ - && mkdir basis_universal/build \ - && cd basis_universal/build \ - && /usr/bin/cmake -DBUILD_SHARED_LIBS=ON -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr .. \ - && make -j$(nproc) \ - && sudo make -j$(nproc) install \ - && cd ../..\ - && rm -rf basis_universal - - - name: Install pdfium - run: | - wget -q https://github.com/rouault/pdfium_build_gdal_3_10/releases/download/pdfium_6677_v1/install-ubuntu2004-rev6677.tar.gz \ - && tar -xzf install-ubuntu2004-rev6677.tar.gz \ - && sudo chown -R root:root install \ - && sudo mv install/lib/* /usr/lib/ \ - && sudo mv install/include/* /usr/include/ \ - && sudo rm -rf install-ubuntu2004-rev6677.tar.gz install \ - && sudo apt-get update -y \ - && sudo apt-get install -y --fix-missing --no-install-recommends liblcms2-dev - - name: Configure ccache - run: | - echo CCACHE_BASEDIR=$PWD >> ${GITHUB_ENV} - echo CCACHE_DIR=$PWD/.ccache >> ${GITHUB_ENV} - echo CCACHE_MAXSIZE=250M >> ${GITHUB_ENV} - ccache -z - working-directory: ${{ github.workspace }} - - name: Configure - run: | - export PATH=$CMAKE_DIR:/usr/local/bin:/usr/bin:/bin # Avoid CMake config from brew etc. - mkdir -p $GITHUB_WORKSPACE/superbuild - cd $GITHUB_WORKSPACE/superbuild - printf "cmake_minimum_required(VERSION 3.16)\nproject(test)\nenable_testing()\nadd_subdirectory(gdal)" > CMakeLists.txt - ln -s .. gdal - mkdir build - cd build - cmake ${CMAKE_OPTIONS} -Werror=dev -DCMAKE_INSTALL_PREFIX=$GITHUB_WORKSPACE/install-gdal -DCMAKE_UNITY_BUILD=${CMAKE_UNITY_BUILD} -DCMAKE_C_FLAGS=-Werror -DCMAKE_CXX_FLAGS=-Werror -DGDAL_USE_PUBLICDECOMPWT:BOOL=ON -DPUBLICDECOMPWT_URL=https://github.com/rouault/PublicDecompWT .. -DWERROR_DEV_FLAG="-Werror=dev" - - name: Build - run: | - cmake --build $GITHUB_WORKSPACE/superbuild/build -- -j$(nproc) - env: - GIT_LFS_SKIP_SMUDGE: 1 # for PublicDecompWT github repository clone - - name: Build fake ossfuzz fuzzers - run: | - cd $GITHUB_WORKSPACE/superbuild/build - g++ -c $GITHUB_WORKSPACE/fuzzers/fuzzingengine.cpp - ar r libFuzzingEngine.a fuzzingengine.o - CXX=g++ CXXFLAGS="${CXXFLAGS} -I$PWD/gdal/port -I$PWD/gdal/gcore -L$PWD" LIBGDAL="-L$PWD/gdal -lgdal" SRC=/tmp OUT=/tmp $GITHUB_WORKSPACE/fuzzers/build_google_oss_fuzzers.sh - OUT=/tmp $GITHUB_WORKSPACE/fuzzers/build_seed_corpus.sh - - name: test autotest/cpp/gdallimits - run: | - sudo apt-get install -y numactl - # - echo "Limits without restrictions:" - LD_LIBRARY_PATH=$GITHUB_WORKSPACE/superbuild/build $GITHUB_WORKSPACE/superbuild/build/gdal/autotest/cpp/gdallimits - # - echo "Limits with ulimit -m 100000; numactl -C 0" - (ulimit -m 100000; LD_LIBRARY_PATH=$GITHUB_WORKSPACE/superbuild/build numactl -C 0 $GITHUB_WORKSPACE/superbuild/build/gdal/autotest/cpp/gdallimits) > out.txt - cat out.txt - grep "CPLGetNumCPUs = 1" out.txt >/dev/null - grep "CPLGetUsablePhysicalRAM = 97 MB" out.txt >/dev/null - - name: test (with command targets) - run: | - cmake --build $GITHUB_WORKSPACE/superbuild/build --target quicktest -- -j$(nproc) - - name: test (with ctest) - run: | - cd $GITHUB_WORKSPACE/superbuild/build - ctest -V - - name: install and uninstall - run: | - cmake --build $GITHUB_WORKSPACE/superbuild/build --target install -- -j$(nproc) - cmake --build $GITHUB_WORKSPACE/superbuild/build --target uninstall - find $GITHUB_WORKSPACE/install-gdal || /bin/true - - name: install - run: | - export PATH=$CMAKE_DIR:/usr/local/bin:/usr/bin:/bin # Avoid CMake config from brew etc. - touch $GITHUB_WORKSPACE/man/man1/gdalinfo.1 - touch $GITHUB_WORKSPACE/man/man1/gdaladdo.1 - (cd $GITHUB_WORKSPACE/superbuild/build; cmake ..) - cmake --build $GITHUB_WORKSPACE/superbuild/build --target install -- -j$(nproc) - test -f $GITHUB_WORKSPACE/install-gdal/share/man/man1/gdalinfo.1 - test -f $GITHUB_WORKSPACE/install-gdal/share/man/man1/gdaladdo.1 - export LD_LIBRARY_PATH=$GITHUB_WORKSPACE/install-gdal/lib - $GITHUB_WORKSPACE/install-gdal/bin/gdalinfo --version - PYTHONPATH=$GITHUB_WORKSPACE/install-gdal/lib/python3.8/site-packages python3 -c "from osgeo import gdal;print(gdal.VersionInfo(None))" - # Test fix for https://github.com/conda-forge/gdal-feedstock/issues/995 - PYTHONWARNINGS="error" PYTHONPATH=$GITHUB_WORKSPACE/install-gdal/lib/python3.8/site-packages python3 -c "from osgeo import gdal" - PYTHONPATH=$GITHUB_WORKSPACE/install-gdal/lib/python3.8/site-packages python3 $GITHUB_WORKSPACE/scripts/check_doc.py - - name: CMake with rpath - run: | - export PATH=$CMAKE_DIR:/usr/local/bin:/usr/bin:/bin # Avoid CMake config from brew etc. - (cd $GITHUB_WORKSPACE/superbuild/build; cmake .. "-DCMAKE_INSTALL_PREFIX=$GITHUB_WORKSPACE/install-gdal-with-rpath" "-DCMAKE_INSTALL_RPATH=$GITHUB_WORKSPACE/install-gdal-with-rpath/lib") - cmake --build $GITHUB_WORKSPACE/superbuild/build --target install -- -j$(nproc) - # For some reason, during the install phase of above invocation, the Python bindings are rebuilt after the build phase, and without the rpath... Can't reproduce that locally - # PYTHONPATH=$GITHUB_WORKSPACE/install-gdal-with-rpath/lib/python3.8/site-packages python -c "from osgeo import gdal;print(gdal.VersionInfo(None))" - - name: Rerun using Mono - run: | - export PATH=$CMAKE_DIR:/usr/local/bin:/usr/bin:/bin # Avoid CMake config from brew etc. - cd $GITHUB_WORKSPACE/superbuild/build - rm -rf swig/csharp - cmake ${CMAKE_OPTIONS} -DCSHARP_MONO=ON -DCMAKE_INSTALL_PREFIX=$GITHUB_WORKSPACE/install-gdal -UCMAKE_INSTALL_RPATH -DCMAKE_UNITY_BUILD=${CMAKE_UNITY_BUILD} -DCMAKE_C_FLAGS=-Werror -DCMAKE_CXX_FLAGS=-Werror .. - cmake --build $GITHUB_WORKSPACE/superbuild/build --target install -- -j$(nproc) - # Below fails with errors like 'System.InvalidProgramException: Invalid IL code in CreateData:Main (string[]): IL_00c4: callvirt 0x0a00000c' - # ctest -V -R "^csharp.*" - - name: Standalone Python bindings build from source - run: | - (cd $GITHUB_WORKSPACE/superbuild/build/gdal/swig/python && python setup.py sdist) - mv $GITHUB_WORKSPACE/superbuild/build/gdal/swig/python/dist/gdal-*.tar.gz gdal-python.tar.gz - mv $GITHUB_WORKSPACE/superbuild/build/gdal/swig/python gdal-swig-python - python3 -m venv myvenv - source myvenv/bin/activate - # Set PATH so that gdal-config is found - PATH=$GITHUB_WORKSPACE/install-gdal/bin:$PATH pip install gdal-python.tar.gz[numpy] - LD_LIBRARY_PATH=$GITHUB_WORKSPACE/install-gdal/lib python -c "from osgeo import gdal_array" - which gdal_edit - cp $GITHUB_WORKSPACE/autotest/gcore/data/byte.tif . - LD_LIBRARY_PATH=$GITHUB_WORKSPACE/install-gdal/lib ldd myvenv/lib/python3.8/site-packages/osgeo/_gdal.cpython-38-x86_64-linux-gnu.so - LD_LIBRARY_PATH=$GITHUB_WORKSPACE/install-gdal/lib gdal_edit byte.tif -mo FOO=BAR - rm -f myvenv/bin/gdal_edit - rm -f myvenv/bin/gdal_edit.py - - name: Standalone gdal-utils package from wheel - run: | - mv gdal-swig-python $GITHUB_WORKSPACE/superbuild/build/gdal/swig/python - (cd $GITHUB_WORKSPACE/superbuild/build/gdal/swig/python/gdal-utils && python setup.py bdist_wheel) - mv $GITHUB_WORKSPACE/superbuild/build/gdal/swig/python/gdal-utils/dist/*.whl gdal_utils-X.Y.Z.T-py3-none-any.whl - rm -rf $GITHUB_WORKSPACE/superbuild/build/gdal/swig/python - python3 -m venv myvenv - source myvenv/bin/activate - # Set PATH so that gdal-config is found - PATH=$GITHUB_WORKSPACE/install-gdal/bin:$PATH pip install gdal_utils-X.Y.Z.T-py3-none-any.whl - which gdal_edit - cp $GITHUB_WORKSPACE/autotest/gcore/data/byte.tif . - LD_LIBRARY_PATH=$GITHUB_WORKSPACE/install-gdal/lib gdal_edit byte.tif -mo FOO=BAR - pip uninstall -y gdal-utils - - name: Test post-install usage (with pkg-config) - run: | - ./autotest/postinstall/test_pkg-config.sh $GITHUB_WORKSPACE/install-gdal - - name: Test post-install usage (with gdal-config) - run: | - ./autotest/postinstall/test_gdal-config.sh $GITHUB_WORKSPACE/install-gdal - - name: Test post-install usage (with CMake) - run: | - ./autotest/postinstall/test_cmake.sh $GITHUB_WORKSPACE/install-gdal - - name: Rerun with static linkage - run: | - export PATH=$CMAKE_DIR:/usr/local/bin:/usr/bin:/bin # Avoid CMake config from brew etc. - cd $GITHUB_WORKSPACE/superbuild/build - rm -f CMakeCache.txt - cmake ${CMAKE_OPTIONS} .. \ - -DBUILD_SHARED_LIBS=OFF \ - -DBUILD_TESTING=OFF \ - -DCSHARP_MONO=OFF \ - -DGDAL_USE_HDF4=OFF \ - -DGDAL_USE_HDF5=OFF \ - -DCMAKE_INSTALL_PREFIX=$GITHUB_WORKSPACE/install-gdal-static - cmake --build $GITHUB_WORKSPACE/superbuild/build --target install -- -j$(nproc) - - name: Test post-install usage (static, with pkg-config) - run: | - ./autotest/postinstall/test_pkg-config.sh $GITHUB_WORKSPACE/install-gdal-static --static - - name: Test post-install usage (static, with gdal-config) - run: | - ./autotest/postinstall/test_gdal-config.sh $GITHUB_WORKSPACE/install-gdal-static --static - - name: Test post-install usage (static, with CMake) - run: | - ./autotest/postinstall/test_cmake.sh $GITHUB_WORKSPACE/install-gdal-static --static - - name: ccache statistics - run: ccache -s - - build-windows-msys2-mingw: - runs-on: windows-2022 - env: - generator: MinGW Makefiles - cache-name: cmake-mingw64 - defaults: - run: - shell: msys2 {0} - steps: - # To avoid git clone to mess with the line endings of GDAL autotest data - # files that look like text, but should be handled as binary content - - name: Set git core.autocrlf to false - shell: bash -l {0} - run: | - git config --global core.autocrlf false - - name: Checkout GDAL - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - name: Install development packages - uses: msys2/setup-msys2@ddf331adaebd714795f1042345e6ca57bd66cea8 # v2.24.1 - with: - msystem: MINGW64 - update: true - # Disable mingw-w64-x86_64-crypto++ as it crashes at runtime on eedai_3 test. Likely a build issue with crypto++ itself - install: | - base-devel git mingw-w64-x86_64-toolchain mingw-w64-x86_64-cmake mingw-w64-x86_64-ccache - mingw-w64-x86_64-pcre mingw-w64-x86_64-xerces-c mingw-w64-x86_64-zstd mingw-w64-x86_64-libarchive - mingw-w64-x86_64-geos mingw-w64-x86_64-libspatialite mingw-w64-x86_64-proj - mingw-w64-x86_64-cgal mingw-w64-x86_64-libfreexl mingw-w64-x86_64-hdf5 mingw-w64-x86_64-netcdf mingw-w64-x86_64-poppler mingw-w64-x86_64-podofo mingw-w64-x86_64-postgresql - mingw-w64-x86_64-libgeotiff mingw-w64-x86_64-libpng mingw-w64-x86_64-libtiff mingw-w64-x86_64-openjpeg2 - mingw-w64-x86_64-python-pip mingw-w64-x86_64-python-numpy mingw-w64-x86_64-python-pytest mingw-w64-x86_64-python-setuptools mingw-w64-x86_64-python-lxml mingw-w64-x86_64-swig mingw-w64-x86_64-python-psutil mingw-w64-x86_64-blosc mingw-w64-x86_64-libavif - - name: Setup cache - uses: actions/cache@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 - id: cache - with: - path: ${{ github.workspace }}\.ccache - key: ${{ runner.os }}-${{ env.cache-name }}-${{ github.base_ref }}${{ github.ref_name }}-${{ github.run_id }} - restore-keys: | - ${{ runner.os }}-${{ env.cache-name }}-${{ github.base_ref }} - ${{ runner.os }}-${{ env.cache-name }} - - name: Configure ccache - run: | - echo CCACHE_BASEDIR=$PWD >> ${GITHUB_ENV} - echo CCACHE_DIR=$PWD/.ccache >> ${GITHUB_ENV} - echo CCACHE_MAXSIZE=250M >> ${GITHUB_ENV} - ccache -z - working-directory: ${{ github.workspace }} - - name: populate JAVA_HOME - run: | - echo "JAVA_HOME=$JAVA_HOME_11_X64" >> ${GITHUB_ENV} - - name: Install python dependencies for autotest - run: | - # One of the dependencies of jsonschema 4.18 is rpds_py which requires a Rust compiler - python -m pip install "jsonschema<4.18" - python -m pip install -r autotest/requirements.txt - # Disable mySQL since C:/mysql/lib/mysqlclient.lib (unrelated to msys) is found, which causes linking issues - # Set explicitly CMAKE_C|CXX_COMPILER otherwise C:/ProgramData/chocolatey/bin/gcc.exe would be used - # Disable GDAL_ENABLE_DRIVER_HDF5 because of https://github.com/OSGeo/gdal/issues/11181 - - name: Configure - run: | - cmake -S . -B build -G "${generator}" -Werror=dev \ - -DCMAKE_BUILD_TYPE=release \ - -DCMAKE_C_FLAGS=-Werror \ - -DCMAKE_CXX_FLAGS=-Werror \ - -DWERROR_DEV_FLAG=-Werror=dev \ - -DCMAKE_C_COMPILER_LAUNCHER=ccache \ - -DCMAKE_CXX_COMPILER_LAUNCHER=ccache \ - -DCMAKE_PREFIX_PATH=/mingw64 \ - -DCMAKE_INSTALL_PREFIX=$PWD/install-gdal \ - "-DCMAKE_UNITY_BUILD=${CMAKE_UNITY_BUILD}" \ - -DGDAL_USE_MYSQL:BOOL=OFF \ - -DGDAL_ENABLE_DRIVER_HDF5=OFF - working-directory: ${{ github.workspace }} - - name: Build - run: cmake --build build -j 3 - working-directory: ${{ github.workspace }} - - name: test - run: cmake --build build --target quicktest - working-directory: ${{ github.workspace }} - - name: test (with ctest) - run: | - ctest --test-dir $GITHUB_WORKSPACE/build -C Release -V -j 3 - env: - BUILD_NAME: mingw64 - SKIP_OGR_GMLAS_HUGE_PROCESSING_TIME: YES - SKIP_OGR_GMLAS_HTTP_RELATED: YES - SKIP_TIFF_JPEG12: YES - SKIP_VSIRAR: YES - working-directory: ${{ github.workspace }} - - name: Install - run: cmake --build build --target install -j 3 - working-directory: ${{ github.workspace }} - - name: Test post-install usage (with pkg-config) - run: ./autotest/postinstall/test_pkg-config.sh $PWD/install-gdal - working-directory: ${{ github.workspace }} - - name: ccache statistics - run: ccache -s - - build-windows-conda: - runs-on: windows-2022 - env: - architecture: x64 - #generator: Visual Studio 17 2022 - generator: Ninja - cache-name: cmake-msvc - steps: - # To avoid git clone to mess with the line endings of GDAL autotest data - # files that look like text, but should be handled as binary content - - name: Set git core.autocrlf to false - run: | - git config --global core.autocrlf false - - name: Checkout GDAL - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - uses: ilammy/msvc-dev-cmd@0b201ec74fa43914dc39ae48a89fd1d8cb592756 # v1.13.0 - - name: populate JAVA_HOME - shell: pwsh - run: | - echo "JAVA_HOME=$env:JAVA_HOME_11_X64" >> %GITHUB_ENV% - - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 - with: - activate-environment: gdalenv - miniforge-version: latest - use-mamba: true - auto-update-conda: true - use-only-tar-bz2: false - - name: Install dependency - shell: bash -l {0} - run: | - conda install --yes --quiet curl libiconv icu python=3.10 swig numpy pytest pytest-env pytest-benchmark filelock zlib lxml jsonschema setuptools - # FIXME: remove libnetcdf=4.9.2=nompi_h5902ca5_107 pinning as soon as https://github.com/conda-forge/libnetcdf-feedstock/issues/182 is resolved - conda install --yes --quiet proj geos hdf4 hdf5 kealib \ - libnetcdf=4.9.2=nompi_h5902ca5_107 openjpeg poppler libtiff libpng xerces-c expat libxml2 kealib json-c \ - cfitsio freexl geotiff libjpeg-turbo libpq libspatialite libwebp-base pcre pcre2 postgresql \ - sqlite tiledb zstd cryptopp cgal doxygen librttopo libkml openssl xz \ - openjdk ant qhull armadillo blas blas-devel libblas libcblas liblapack liblapacke blosc libarchive \ - arrow-cpp pyarrow libaec libheif libavif cmake fsspec - - name: Check CMake version - shell: bash -l {0} - run: | - cmake --version - - name: Install pdfium - shell: bash -l {0} - run: | - curl -LOs https://github.com/rouault/pdfium_build_gdal_3_10/releases/download/pdfium_6677_v1/install-win10-vs2019-x64-rev6677.zip - unzip install-win10-vs2019-x64-rev6677.zip - mv install install-pdfium - - - name: Remove conflicting libraries - shell: bash -l {0} - run: | - rm -rf C:/Strawberry || /bin/true - rm -rf "C:/Program Files/OpenSSL/lib" || /bin/true - - - name: Configure - shell: bash -l {0} - # Disable MySQL because of "error LNK2038: mismatch detected for '_MSC_VER': value '1800' doesn't match value '1900' in ogrmysqldatasource.obj" and other errors - # Note that the leading space in CMAKE_C/CXX_FLAGS=" /WX" is due to using Bash on Windows that would - # otherwise interpret /bla has a file relative to the Bash root directory and would replace it by a path like c:\Program Files\git\WX - # BUILD_JAVA_BINDINGS=OFF because we get "Error occurred during initialization of VM. Corrupted ZIP library: C:\Miniconda\envs\gdalenv\Library\bin\zip.dll" when running java. Not reproducible on a standard VM - # Build PDF driver as plugin due to the PDFium build including libopenjp2 symbols which would conflict with external libopenjp2 - run: | - mkdir -p $GITHUB_WORKSPACE/build - cmake -G "${generator}" -Werror=dev "-DCMAKE_INSTALL_PREFIX=$GITHUB_WORKSPACE/install-gdal" "-DUSE_CCACHE=ON" "-DCMAKE_PREFIX_PATH=${CONDA}/envs/gdalenv" -DCMAKE_UNITY_BUILD=${CMAKE_UNITY_BUILD} -S "$GITHUB_WORKSPACE" -B "$GITHUB_WORKSPACE/build" -DGDAL_ENABLE_PLUGINS:BOOL=ON -DGDAL_ENABLE_PLUGINS_NO_DEPS:BOOL=ON -DGDAL_USE_PUBLICDECOMPWT:BOOL=ON -DPUBLICDECOMPWT_URL=https://github.com/rouault/PublicDecompWT -DBUILD_JAVA_BINDINGS=OFF -DBUILD_CSHARP_BINDINGS=ON -DGDAL_USE_MYSQL:BOOL=OFF -DCMAKE_C_FLAGS=" /WX" -DCMAKE_CXX_FLAGS=" /WX" -DWERROR_DEV_FLAG="-Werror=dev" -DCMAKE_BUILD_TYPE=Release -DPDFIUM_ROOT=$GITHUB_WORKSPACE/install-pdfium -DGDAL_ENABLE_DRIVER_PDF_PLUGIN:BOOL=ON -DCMAKE_UNITY_BUILD=ON -DOGR_ENABLE_DRIVER_TAB_PLUGIN=OFF -DOGR_ENABLE_DRIVER_GEOJSON_PLUGIN=OFF - - name: Build - shell: bash -l {0} - run: cmake --build $GITHUB_WORKSPACE/build --config Release -j 2 - env: - GIT_LFS_SKIP_SMUDGE: 1 # for PublicDecompWT github repository clone - - name: test - shell: bash -l {0} - run: | - cmake --build $GITHUB_WORKSPACE/build --config Release --target quicktest - - name: test (with ctest) - shell: bash -l {0} - run: | - ctest --test-dir $GITHUB_WORKSPACE/build -C Release -V -j 3 - env: - SKIP_OGR_GMLAS_HUGE_PROCESSING_TIME: YES - SKIP_OGR_GMLAS_HTTP_RELATED: YES - SKIP_GDAL_HTTP_SSL_VERIFYSTATUS: YES - BUILD_NAME: "build-windows-conda" - - name: Install - shell: bash -l {0} - run: | - cmake --build $GITHUB_WORKSPACE/build --config Release --target install - export PATH=$GITHUB_WORKSPACE/install-gdal/bin:$PATH - gdalinfo --version - python -VV - PYTHONPATH=$GITHUB_WORKSPACE/install-gdal/lib/site-packages python -c "from osgeo import gdal;print(gdal.VersionInfo(None))" - export PATH=$GITHUB_WORKSPACE/install-gdal/Scripts:$PATH - PYTHONPATH=$GITHUB_WORKSPACE/install-gdal/lib/site-packages gdal_edit --version - - name: Check the build includes the expected drivers - shell: bash -l {0} - run: | - export PATH=$GITHUB_WORKSPACE/install-gdal/bin:$PATH - gdalinfo --formats > found_formats.txt - ogrinfo --formats >> found_formats.txt - cat found_formats.txt - cat $GITHUB_WORKSPACE/.github/workflows/windows_conda_expected_gdalinfo_formats.txt $GITHUB_WORKSPACE/.github/workflows/windows_conda_expected_ogrinfo_formats.txt > expected_formats.txt - dos2unix expected_formats.txt - dos2unix found_formats.txt - diff -u expected_formats.txt found_formats.txt - - name: Show gdal.pc - shell: bash -l {0} - run: cat $GITHUB_WORKSPACE/build/gdal.pc - - name: Test python setup.py install - shell: bash -l {0} - run: | - cd $GITHUB_WORKSPACE/build/swig/python - python setup.py install - - build-windows-minimum: - runs-on: windows-2022 - env: - architecture: x64 - generator: Visual Studio 17 2022 - GDAL_PYTHON_BINDINGS_WITHOUT_NUMPY: YES - steps: - # To avoid git clone to mess with the line endings of GDAL autotest data - # files that look like text, but should be handled as binary content - - name: Set git core.autocrlf to false - run: | - git config --global core.autocrlf false - - name: Checkout GDAL - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 - with: - activate-environment: gdalenv - miniforge-version: latest - use-mamba: true - auto-update-conda: true - use-only-tar-bz2: false - - name: Install dependency - shell: bash -l {0} - run: | - conda install --yes --quiet proj pytest pytest-env pytest-benchmark filelock lxml cmake setuptools - - name: Check CMake version - shell: bash -l {0} - run: | - cmake --version - - name: Configure - shell: bash -l {0} - run: | - mkdir -p $GITHUB_WORKSPACE/build - cmake -A ${architecture} -G "${generator}" "-DCMAKE_PREFIX_PATH=${CONDA}/envs/gdalenv" -Werror=dev "-DCMAKE_CXX_COMPILER_LAUNCHER=clcache" -DCMAKE_UNITY_BUILD=${CMAKE_UNITY_BUILD} -S "$GITHUB_WORKSPACE" -B "$GITHUB_WORKSPACE/build" -DCMAKE_C_FLAGS=" /WX" -DCMAKE_CXX_FLAGS=" /WX" -DGDAL_USE_EXTERNAL_LIBS:BOOL=OFF -DWERROR_DEV_FLAG="-Werror=dev" - - name: Build - shell: bash -l {0} - run: cmake --build $GITHUB_WORKSPACE/build --config RelWithDebInfo -j 2 - - name: Configure with even less dependencies - shell: bash -l {0} - run: | - rm -f build/CMakeCache.txt - cmake -A ${architecture} -G "${generator}" "-DCMAKE_PREFIX_PATH=${CONDA}/envs/gdalenv" -Werror=dev "-DCMAKE_CXX_COMPILER_LAUNCHER=clcache" -DCMAKE_UNITY_BUILD=${CMAKE_UNITY_BUILD} -S "$GITHUB_WORKSPACE" -B "$GITHUB_WORKSPACE/build" -DCMAKE_C_FLAGS=" /WX" -DCMAKE_CXX_FLAGS=" /WX" -DGDAL_USE_EXTERNAL_LIBS:BOOL=OFF -DGDAL_USE_PNG_INTERNAL=OFF -DGDAL_USE_JPEG_INTERNAL=OFF -DGDAL_USE_JPEG12_INTERNAL=OFF -DGDAL_USE_GIF_INTERNAL=OFF -DGDAL_USE_LERC_INTERNAL=OFF -DGDAL_USE_LERCV1_INTERNAL=OFF -DGDAL_USE_QHULL_INTERNAL=OFF -DGDAL_USE_OPENCAD_INTERNAL=OFF -DWERROR_DEV_FLAG="-Werror=dev" - - name: Configure with even less dependencies, and disabling all optional drivers - shell: bash -l {0} - run: | - rm -f build/CMakeCache.txt - cmake -A ${architecture} -G "${generator}" "-DCMAKE_PREFIX_PATH=${CONDA}/envs/gdalenv" -Werror=dev "-DCMAKE_CXX_COMPILER_LAUNCHER=clcache" -DCMAKE_UNITY_BUILD=${CMAKE_UNITY_BUILD} -S "$GITHUB_WORKSPACE" -B "$GITHUB_WORKSPACE/build" -DCMAKE_C_FLAGS=" /WX" -DCMAKE_CXX_FLAGS=" /WX" -DGDAL_USE_EXTERNAL_LIBS:BOOL=OFF -DGDAL_USE_PNG_INTERNAL=OFF -DGDAL_USE_JPEG_INTERNAL=OFF -DGDAL_USE_JPEG12_INTERNAL=OFF -DGDAL_USE_GIF_INTERNAL=OFF -DGDAL_USE_LERC_INTERNAL=OFF -DGDAL_USE_LERCV1_INTERNAL=OFF -DGDAL_USE_QHULL_INTERNAL=OFF -DGDAL_USE_OPENCAD_INTERNAL=OFF -DGDAL_BUILD_OPTIONAL_DRIVERS=OFF -DOGR_BUILD_OPTIONAL_DRIVERS=OFF -DGDAL_ENABLE_DRIVER_DERIVED=ON -DWERROR_DEV_FLAG="-Werror=dev" - - name: Build - shell: bash -l {0} - run: cmake --build $GITHUB_WORKSPACE/build --config RelWithDebInfo -j 2 - - - name: Re-enable shapefile driver (otherwise lots of python tests would fail) - shell: bash -l {0} - run: | - cmake -S "$GITHUB_WORKSPACE" -B "$GITHUB_WORKSPACE/build" -DOGR_ENABLE_DRIVER_SHAPE=ON -DOGR_ENABLE_DRIVER_SHAPE_PLUGIN=ON - - name: Build - shell: bash -l {0} - run: cmake --build $GITHUB_WORKSPACE/build --config RelWithDebInfo -j 2 - - # Works around https://github.com/actions/runner-images/issues/10055 - - name: Remove conflicting libraries - shell: bash -l {0} - run: | - find "C:/hostedtoolcache/windows/Java_Temurin-Hotspot_jdk" -name "msvcp140.dll" -exec rm {} \; - - - name: test (with ctest) - shell: bash -l {0} - run: | - # gnm_test has suddenly started failing around June 16th 2024 - # Related to image windows-latest 20240603.1.0 / actions/runner-images#10004 - echo "def test_dummy(): pass" > $GITHUB_WORKSPACE/autotest/gnm/gnm_test.py - ctest --test-dir $GITHUB_WORKSPACE/build -C RelWithDebInfo -V -j 3 - env: - SKIP_GDAL_HTTP_SSL_VERIFYSTATUS: YES - BUILD_NAME: "build-windows-minimum" - - name: Show gdal.pc - shell: bash -l {0} - run: cat $GITHUB_WORKSPACE/build/gdal.pc - build-mac: # M1 MacOS runner: https://github.blog/changelog/2024-01-30-github-actions-introducing-the-new-m1-macos-runner-available-to-open-source/ runs-on: macOS-14 @@ -672,45 +110,3 @@ jobs: ./autotest/postinstall/test_cmake.sh $GITHUB_WORKSPACE/install-gdal --dynamic --disable-odbc - name: ccache statistics run: ccache -s - - # Disabled for now since bindings in master use GDALDatasetGetFieldDomainNames - # but libgdal from Conda has not it yet. - build-windows-conda-csharp-only: - if: ${{ false }} # disable for now - runs-on: windows-2022 - env: - architecture: x64 - generator: Visual Studio 17 2022 - - steps: - # To avoid git clone to mess with the line endings of GDAL autotest data - # files that look like text, but should be handled as binary content - - name: Set git core.autocrlf to false - run: | - git config --global core.autocrlf false - - name: Checkout GDAL - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 - with: - activate-environment: gdalenv - python-version: 3.9 - channels: conda-forge - - name: Install dependency - shell: bash -l {0} - run: | - conda install --yes --quiet --name gdalenv curl libiconv icu python=3.9 swig numpy pytest pytest-env pytest-benchmark filelock zlib clcache lxml - conda install --yes --quiet --name gdalenv -c conda-forge libgdal - - name: Configure - shell: bash -l {0} - run: | - mkdir -p $GITHUB_WORKSPACE/build - cmake -A ${architecture} -G "${generator}" -DGDAL_CSHARP_ONLY=ON -DCMAKE_C_FLAGS=" /WX" -DCMAKE_CXX_FLAGS=" /WX" -S "$GITHUB_WORKSPACE" -B "$GITHUB_WORKSPACE/build" - - name: Build - shell: bash -l {0} - run: cmake --build $GITHUB_WORKSPACE/build --config Release --target csharp_samples -j 2 - - name: test (with ctest) - shell: bash -l {0} - run: | - ctest --test-dir $GITHUB_WORKSPACE/build -C Release -V -j 3 -R "^csharp.*" - env: - SKIP_OGR_GMLAS_HUGE_PROCESSING_TIME: YES diff --git a/.github/workflows/code_checks.yml b/.github/workflows/code_checks.yml deleted file mode 100644 index 3b2fbf0ee127..000000000000 --- a/.github/workflows/code_checks.yml +++ /dev/null @@ -1,222 +0,0 @@ -name: Code Checks - -on: - push: - paths-ignore: - - 'doc/**' - branches-ignore: - - 'backport**' - - 'dependabot**' - pull_request: - paths-ignore: - - 'doc/**' - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} - cancel-in-progress: true - -permissions: - contents: read - -jobs: - - cppcheck_2004: - runs-on: ubuntu-20.04 - steps: - - name: Checkout - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - - name: Install Requirements - run: | - sudo apt update - sudo apt install -y cppcheck libsqlite3-dev ccache sqlite3 libproj-dev cmake - - - name: Run cmake - run: | - mkdir build - cd build - cmake .. - - - name: Run cppcheck test - run: | - cd build - ../scripts/cppcheck.sh - - cppcheck_2404: - runs-on: ubuntu-latest - container: ubuntu:24.04 - steps: - - name: Checkout - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - - name: Install Requirements - run: | - apt update - apt install -y cppcheck libsqlite3-dev ccache sqlite3 libproj-dev cmake g++ make - - - name: Run cmake - run: | - mkdir build - cd build - cmake .. - - - name: Run cppcheck test - run: | - cd build - ../scripts/cppcheck.sh - - cppcheck_master: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - - name: Install Requirements - run: | - sudo apt update - sudo apt install -y git libsqlite3-dev ccache sqlite3 libproj-dev cmake g++ make - - - name: Build cppcheck - run: | - git clone https://github.com/danmar/cppcheck - cd cppcheck - mkdir build - cd build - cmake .. -DCMAKE_BUILD_TYPE=Release - make -j$(nproc) - sudo make install - cd ../.. - - - name: Run cmake - run: | - mkdir build - cd build - cmake .. - - - name: Run cppcheck test - run: | - cd build - # Do not fail the job. This is just used as a tool to monitor how we are regarding recent cppcheck - ../scripts/cppcheck.sh || /bin/true - - code_quality_checks: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - - name: Detect tabulations - run: ./scripts/detect_tabulations.sh - - - name: Detect printf - run: ./scripts/detect_printf.sh - - - name: Detect self assignments - run: ./scripts/detect_self_assignment.sh - - - name: Detect suspicious char digit zero - run: ./scripts/detect_suspicious_char_digit_zero.sh - - - name: Detect missing includes - run: ./scripts/detect_missing_include.sh - - # Helps detecting updates of internal libjson-c where replacement - # of strtod() -> CPLStrtod() is missing. The later function is not - # locale sensitive. An alternative would be to make sure that HAVE_USELOCALE - # or HAVE_SETLOCALE are passed on Windows, but avoiding to mess with - # locale seems to be a better option - - name: Detect invalid use of atof() or strtod() in internal libjson - run: | - grep -e "CPLStrtod(" ../ogr/ogrsf_frmts/geojson/libjson/*.c >/dev/null && echo "CPLStrtod() found as expected" - if grep -e "strtod(" ogr/ogrsf_frmts/geojson/libjson/*.c; then echo "Unexpected use of strtod(). Use CPLStrtod() instead"; /bin/false; fi - if grep -e "atof(" ogr/ogrsf_frmts/geojson/libjson/*.c; then echo "Unexpected use of atof()."; /bin/false; fi - - - name: Shellcheck - # SC2129: (style): Consider using { cmd1; cmd2; } >> file instead of individual redirects - run: shellcheck -e SC2086,SC2046,SC2164,SC2054,SC2129 $(find . -name '*.sh' -a -not -name ltmain.sh -a -not -wholename "./autotest/*" -a -not -wholename "./.github/*") - - binary_files: - runs-on: ubuntu-latest - steps: - - - name: Install Requirements - run: | - sudo apt-get install -y python3 coreutils - - - name: Checkout - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - - name: Detect binary files - run: python3 ./scripts/check_binaries.py - - linting: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 - - uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # v3.0.1 - - doxygen: - runs-on: ubuntu-latest - container: ghcr.io/osgeo/proj-docs - steps: - - name: Checkout - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - - name: Run doxygen - run: | - cd doc - make doxygen_check_warnings - - other_checks: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - - name: Install Requirements - run: | - sudo apt install python3-pip wget - # ruamel.yaml.clib 0.2.9 throws a 'TypeError: a string or stream input is required' when running cffconvert --validate - sudo pip3 install cffconvert "ruamel.yaml.clib<0.2.9" - - - name: Validate citation file - run: | - cffconvert --validate - cffconvert -f bibtex - cffconvert -f apalike - - cmake-lint: - # Disabled because of https://github.com/OSGeo/gdal/pull/5326#issuecomment-1042617407 - if: false - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - name: Set up Python - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 - with: - python-version: 3.8 - - name: Install lint tool - run: | - python -m pip install --upgrade pip - pip install cmake-format pyyaml - - name: Check cmakelist - run: find . -name CMakeLists.txt |xargs cmake-format --check - - validate_xml: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - - name: Install requirements - run: | - sudo apt update - sudo apt install -y libxml2-utils - - - name: Run xmllint checks - run: | - xmllint --schema ./frmts/nitf/data/nitf_spec.xsd ./frmts/nitf/data/nitf_spec.xml --noout - xmllint --schema ./ogr/ogrsf_frmts/vdv/data/vdv452.xsd ./ogr/ogrsf_frmts/vdv/data/vdv452.xml --noout - xmllint --schema ./ogr/ogrsf_frmts/gmlas/data/gmlasconf.xsd ./ogr/ogrsf_frmts/gmlas/data/gmlasconf.xml --noout diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml deleted file mode 100644 index f26c7e499044..000000000000 --- a/.github/workflows/codeql.yml +++ /dev/null @@ -1,134 +0,0 @@ -name: "CodeQL" - -on: - push: - paths-ignore: - - 'doc/**' - - 'docker/**' - branches-ignore: - - 'backport**' - - 'dependabot**' - pull_request: - paths-ignore: - - 'doc/**' - - 'docker/**' - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} - cancel-in-progress: true - -permissions: - contents: read - -jobs: - analyze: - name: Analyze - # Runner size impacts CodeQL analysis time. To learn more, please see: - # - https://gh.io/recommended-hardware-resources-for-running-codeql - # - https://gh.io/supported-runners-and-hardware-resources - # - https://gh.io/using-larger-runners - # Consider using larger runners for possible analysis time improvements. - runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }} - timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }} - permissions: - actions: read - contents: read - security-events: write - - strategy: - fail-fast: false - matrix: - language: [ 'c-cpp' ] - # CodeQL supports [ 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' ] - # Use only 'java-kotlin' to analyze code written in Java, Kotlin or both - # Use only 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both - # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support - - steps: - - name: Checkout repository - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - - name: Install dependencies - run: | - sudo apt-get update - sudo apt-get install -y g++ swig python3-numpy libproj-dev libqhull-dev - sudo apt-get install -y \ - libblosc-dev \ - libboost-dev \ - libcairo2-dev \ - libcfitsio-dev \ - libcrypto++-dev \ - libcurl4-gnutls-dev \ - libexpat-dev \ - libfcgi-dev \ - libfyba-dev \ - libfreexl-dev \ - libgeos-dev \ - libgeotiff-dev \ - libgif-dev \ - libhdf5-serial-dev \ - libjpeg-dev \ - libkml-dev \ - liblcms2-2 \ - liblz4-dev \ - liblzma-dev \ - libmysqlclient-dev \ - libnetcdf-dev \ - libogdi-dev \ - libopenexr-dev \ - libopenjp2-7-dev \ - libpcre3-dev \ - libpng-dev \ - libpoppler-dev \ - libpoppler-private-dev \ - libpq-dev \ - libproj-dev \ - librasterlite2-dev \ - libspatialite-dev \ - libssl-dev \ - libwebp-dev \ - libxerces-c-dev \ - libxml2-dev \ - libxslt-dev \ - libzstd-dev \ - unixodbc-dev - - - name: Install latest ninja - run: pip install ninja - - - name: Configure - if: matrix.language == 'c-cpp' - run: | - cmake -S . -B build \ - -DCMAKE_BUILD_TYPE=Debug \ - -DGDAL_USE_LERC_INTERNAL=OFF \ - -DBUILD_TESTING=OFF \ - -G Ninja \ - - # Initializes the CodeQL tools for scanning. - # We do that after running CMake to avoid CodeQL to trigger during CMake time, - # in particular during HDF5 detection which is terribly slow (https://github.com/OSGeo/gdal/issues/9549) - - name: Initialize CodeQL - uses: github/codeql-action/init@662472033e021d55d94146f66f6058822b0b39fd # v3.27.0 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - - # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs - # queries: security-extended,security-and-quality - config: | - query-filters: - - exclude: - id: cpp/non-https-url - - - name: Build - if: matrix.language == 'c-cpp' - run: | - cmake --build build -j$(nproc) - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@662472033e021d55d94146f66f6058822b0b39fd # v3.27.0 - with: - category: "/language:${{matrix.language}}" diff --git a/.github/workflows/conda.yml b/.github/workflows/conda.yml deleted file mode 100644 index 4ef81cea9244..000000000000 --- a/.github/workflows/conda.yml +++ /dev/null @@ -1,89 +0,0 @@ -name: Conda - -on: - push: - paths-ignore: - - 'doc/**' - - 'docker/**' - branches-ignore: - - 'backport**' - - 'dependabot**' - - # Disabled because run is quite slow, especially for Mac - #pull_request: - # paths-ignore: - # - 'doc/**' - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} - cancel-in-progress: true - -permissions: - contents: read # to fetch code (actions/checkout) - -jobs: - build: - name: Conda ${{ matrix.platform }} - - runs-on: ${{ matrix.platform }} - strategy: - fail-fast: true - matrix: - # macos-13: Intel - # macos-14: arm64 - platform: ['ubuntu-latest','windows-latest','macos-13','macos-14'] - - env: - GHA_CI_PLATFORM: ${{ matrix.platform }} - CACHE_NUMBER: 0 - - steps: - - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - - name: Support longpaths - run: git config --system core.longpaths true - if: matrix.platform == 'windows-latest' - - - name: Cache Conda Environment - uses: actions/cache@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 - with: - path: ~/conda_pkgs_dir - key: ${{ runner.os }}-${{ steps.get-date.outputs.today }}-conda-${{ env.CACHE_NUMBER }} - - - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 - with: - miniforge-version: latest - use-mamba: true - channels: conda-forge - auto-update-conda: true - - - name: Remove conflicting libraries - shell: bash -l {0} - if: matrix.platform == 'windows-latest' - run: | - rm -rf C:/Strawberry || /bin/true - - - name: Setup - shell: bash -l {0} - run: | - source ./ci/travis/conda/setup.sh - - - name: Build - shell: bash -l {0} - run: | - source ../ci/travis/conda/compile.sh - working-directory: ./gdal-feedstock - - - uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 - with: - name: ${{ matrix.platform }}-conda-package - path: ./gdal-feedstock/packages/ - - - name: Deploy to gdal-master Conda channel - if: github.ref == 'refs/heads/master' - shell: bash -l {0} - env: - ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} - run: | - source ../ci/travis/conda/upload.sh || true - working-directory: ./gdal-feedstock diff --git a/.github/workflows/coverity_scan.yml b/.github/workflows/coverity_scan.yml deleted file mode 100644 index 961177d2e54b..000000000000 --- a/.github/workflows/coverity_scan.yml +++ /dev/null @@ -1,77 +0,0 @@ -name: coverity-scan - -# Controls when the action will run. -on: - # Run this action on a schedule (we're allowed a maximum of two per day) - schedule: - - cron: '0 18 * * SUN' # Sunday at 18:00 UTC - - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: - -permissions: - contents: read - -jobs: - - coverity_scan: - runs-on: ubuntu-latest - if: github.repository == 'OSGeo/gdal' - - strategy: - fail-fast: false - matrix: - include: - - id: coverity_scan - container: ubuntu_20.04 - build_script: build.sh - - # Store the components of the container name as environment variables: - # ${CONTAINER_REGISTRY}/${CONTAINER_REGISTRY_USER}/${CONTAINER_NAME} - env: - CONTAINER_REGISTRY: ${{ vars.gdal_container_registry || 'ghcr.io' }} - CONTAINER_REGISTRY_USER: ${{ vars.gdal_container_registry_user || github.repository_owner }} - CONTAINER_NAME: gdal-deps - CONTAINER_TAG: ${{ matrix.container }}-${{ github.base_ref || github.ref_name }} - GDAL_SOURCE_DIR: /gdal # Directory to which workspace (source root) will be mounted inside container - - defaults: - run: - # bash is needed to use ${CONTAINER_REGISTRY_USER,,}, which forces the - # username to lower-case as required by docker. - shell: bash - - steps: - - name: Checkout - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - - name: Login to GHCR - if: env.CONTAINER_REGISTRY == 'ghcr.io' - run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u $ --password-stdin - - - name: Set variables - shell: bash - # This logic needs to be kept in sync between linux_build.yml and converity_scan.yml - run: | - CONTAINER_TAG_CLEAN=$(echo ${CONTAINER_TAG} | tr -d -c "[:alnum:].-") - echo "CONTAINER_TAG_CLEAN=${CONTAINER_TAG_CLEAN}" - echo "CONTAINER_TAG_CLEAN=${CONTAINER_TAG_CLEAN}" >> ${GITHUB_ENV} - CACHE_CONTAINER_TAG_CLEAN=$(echo ${CACHE_CONTAINER_TAG} | tr -d -c "[:alnum:].-") - echo "CACHE_CONTAINER_TAG_CLEAN=${CACHE_CONTAINER_TAG_CLEAN}" - echo "CACHE_CONTAINER_TAG_CLEAN=${CACHE_CONTAINER_TAG_CLEAN}" >> ${GITHUB_ENV} - echo "CONTAINER_NAME_FULL=${CONTAINER_REGISTRY}/${CONTAINER_REGISTRY_USER,,}/${CONTAINER_NAME}:${CONTAINER_TAG_CLEAN}" >>${GITHUB_ENV} - - - name: Build - run: | - mkdir -p build-${{ matrix.id }} - docker run --name gdal-build \ - --rm \ - -e COVERITY_SCAN_TOKEN -e COVERITY_SCAN_EMAIL \ - -v $(pwd):/${GDAL_SOURCE_DIR}:rw \ - -v ${{ github.workspace }}/.ccache:/root/.ccache:rw \ - --workdir ${GDAL_SOURCE_DIR}/build-${{ matrix.id }} \ - ${CONTAINER_NAME_FULL} \ - "${GDAL_SOURCE_DIR}/.github/workflows/${{ matrix.id }}/${{ matrix.build_script }}" - env: - COVERITY_SCAN_TOKEN: ${{ secrets.COVERITY_SCAN_TOKEN }} - COVERITY_SCAN_EMAIL: ${{ secrets.COVERITY_SCAN_EMAIL }} diff --git a/.github/workflows/delete_untagged_containers.yml b/.github/workflows/delete_untagged_containers.yml deleted file mode 100644 index e37a0012c1c4..000000000000 --- a/.github/workflows/delete_untagged_containers.yml +++ /dev/null @@ -1,29 +0,0 @@ -name: Delete untagged containers - -on: - push: - paths-ignore: - - 'doc/**' - - 'docker/**' - branches-ignore: - - 'backport**' - - 'dependabot**' - -permissions: - contents: read - -jobs: - delete-untagged-containers: - name: Delete all containers from gdal-deps without tags - runs-on: ubuntu-latest - if: github.repository == 'OSGeo/gdal' - steps: - - name: Delete all containers from gdal-deps without tags - uses: Chizkiyahu/delete-untagged-ghcr-action@b302990b6c629f3b272a31f3c3a268e1f7d0ffae # v4.0.1 - with: - token: ${{ secrets.PAT_TOKEN_DELETE_UNTAGGED_CONTAINERS }} - repository_owner: ${{ github.repository_owner }} - repository: ${{ github.repository }} - package_name: gdal-deps - untagged_only: true - owner_type: org diff --git a/.github/workflows/doc_checks.yml b/.github/workflows/doc_checks.yml deleted file mode 100644 index 545cb4eb5375..000000000000 --- a/.github/workflows/doc_checks.yml +++ /dev/null @@ -1,94 +0,0 @@ -name: Docs - -on: - push: - branches-ignore: - - 'backport**' - - 'dependabot**' - pull_request: - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} - cancel-in-progress: true - -permissions: - contents: read - -jobs: - docs: - name: Documentation checks - - runs-on: ubuntu-latest - strategy: - fail-fast: true - container: ghcr.io/osgeo/proj-docs - - steps: - - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - name: Setup environment - shell: bash -l {0} - run: | - apt update - apt install -y libproj-dev swig - python3 -m pip install -r doc/requirements.txt - python3 -m pip install numpy setuptools - pushd . - mkdir build - cd build - export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH - cmake .. \ - -DCMAKE_BUILD_TYPE=Release \ - -DCMAKE_INSTALL_PREFIX=/usr \ - -DBUILD_APPS=ON \ - -DBUILD_TESTING=OFF \ - -DGDAL_BUILD_OPTIONAL_DRIVERS=OFF \ - -DOGR_BUILD_OPTIONAL_DRIVERS=OFF - cmake --build . -j$(nproc) - cmake --install . - # With the new ghcr.io/osgeo/proj-docs image based on Ubuntu 24.04 - # a venv is activated. The above does not install the - # Python bindings into it (and the ones in the system are not found - # without overriding PYTHONPATH), so do it through pip install - cd swig/python - python3 setup.py sdist - cp dist/* /tmp/gdal.tar.gz - python3 -m pip install /tmp/gdal.tar.gz - ldconfig - popd - - - name: Update components - shell: bash -l {0} - run: | - pip install -U "sphinx-rtd-theme>=3.0.0" "sphinxcontrib-spelling>=8.0.0" - - - name: Print versions - shell: bash -l {0} - run: | - python3 --version - sphinx-build --version - python3 -m pip list --not-required --format=columns - - name: Lint .rst files - shell: bash -l {0} - run: | - if find . -name '*.rst' | xargs grep -P '\t'; then echo 'Tabs are bad, please use four spaces in .rst files.'; false; fi - if find . -name '*.rst' | xargs grep "\.\.versionadded"; then echo 'Wrong annotation. Should be .. versionadded'; false; fi - if find . -name '*.rst' | xargs grep "\.\.note"; then echo 'Wrong annotation. Should be .. note'; false; fi - if find . -name '*.rst' | xargs grep "\.\.warning"; then echo 'Wrong annotation. Should be .. warning'; false; fi - if find . -name '*.rst' | xargs grep "\.\.codeblock"; then echo 'Wrong annotation. Should be .. codeblock'; false; fi - working-directory: ./doc - - name: Doxygen - shell: bash -l {0} - run: | - mkdir -p doc/build - doxygen Doxyfile - - name: Generated RST files - shell: bash -l {0} - run: | - make generated_rst_files - working-directory: ./doc - - name: Spelling - shell: bash -l {0} - run: | - sed -i '/html_extra_path/d' source/conf.py # avoid WARNING: html_extra_path entry '../build/html_extra' is placed inside outdir - make spelling - working-directory: ./doc diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml deleted file mode 100644 index 7b34b00db0f2..000000000000 --- a/.github/workflows/docker.yml +++ /dev/null @@ -1,58 +0,0 @@ -name: Docker - -on: - pull_request: - paths: - - 'docker/**' - - '.github/workflows/docker.yml' - - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} - cancel-in-progress: true - -permissions: - contents: read - -jobs: - - docker_builds: - - strategy: - fail-fast: false - matrix: - include: - - name: alpine-small - arch: linux/amd64 - - - name: alpine-normal - arch: linux/amd64 - - - name: ubuntu-small - arch: linux/amd64 - - - name: ubuntu-small - arch: linux/arm64 - - - name: ubuntu-full - arch: linux/amd64 - - - name: ubuntu-full - arch: linux/arm64 - - name: ${{ matrix.name }}-${{ matrix.arch }} - - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - - name: Build - shell: bash -l {0} - run: | - docker run --rm --privileged linuxkit/binfmt:v0.8 - cd docker/${{ matrix.name }} - ./build.sh --platform ${{ matrix.arch }} diff --git a/.github/workflows/linux_build.yml b/.github/workflows/linux_build.yml deleted file mode 100644 index 72807eda42c0..000000000000 --- a/.github/workflows/linux_build.yml +++ /dev/null @@ -1,407 +0,0 @@ -name: Linux Builds - -on: - push: - paths-ignore: - - 'doc/**' - - 'docker/**' - branches-ignore: - - 'backport**' - - 'dependabot**' - pull_request: - paths-ignore: - - 'doc/**' - - 'docker/**' - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} - cancel-in-progress: true - -permissions: - contents: read - -jobs: - - linux-build: - - # Store the components of the container name as environment variables: - # ${CONTAINER_REGISTRY}/${CONTAINER_REGISTRY_USER}/${CONTAINER_NAME}:${CONTAINER_TAG} - # - # Additionally, CACHE_CONTAINER_TAG may be used as as source for the - # Docker build cache. So if the Dockerfile in a feature branch is - # unchanged relative to master, a full container rebuild should not - # be required. - env: - CONTAINER_REGISTRY: ${{ vars.gdal_container_registry || 'ghcr.io' }} - CONTAINER_REGISTRY_USER: ${{ vars.gdal_container_registry_user || github.repository_owner }} - CONTAINER_NAME: gdal-deps - CONTAINER_TAG: ${{ matrix.container }}-${{ github.base_ref || github.ref_name }} - CACHE_CONTAINER_TAG: ${{ matrix.container }}-master - - permissions: - packages: write - - strategy: - fail-fast: false - matrix: - # Matrix variables: - # - # * name : readable summary of configuration, used for display - # * id : used as a ccache key, and to create a build subdirectory - # * container : build environment container and path to build script - # * use_avx2 : if true, determine arch at runtime and use in ccache key - # * build_script : name of custom build script, if any. Will be executed - # inside container, from build subdirectory. - # * before_test_script : name of script to run before tests, if any. - # Will be executed outside container, from - # workspace root. Can be used to start docker - # containers as services for testing. - # * test_script : name of custom test script, if any. Will be executed - # inside container, from build subdirectory. - # * travis_branch : value of TRAVIS_BRANCH environment variable, - # used for test skipping - include: - - name: Alpine, gcc - id: alpine - container: alpine - build_script: build.sh - os: ubuntu-22.04 - - - name: Alpine, clang 32-bit - id: alpine_32bit - container: alpine_32bit - build_script: build.sh - test_script: test.sh - travis_branch: alpine_32bit - os: ubuntu-22.04 - - - name: Fedora Rawhide, clang++ - id: fedora_rawhide - travis_branch: fedora_rawhide - container: fedora_rawhide - build_script: build.sh - os: ubuntu-22.04 - - - name: Ubuntu 24.04, gcc - id: ubuntu_24.04 - travis_branch: ubuntu_2404 - container: ubuntu_24.04 - before_test_script: services.sh - build_script: build.sh - test_script: test.sh - os: ubuntu-22.04 - - - name: Ubuntu 22.04, gcc - id: ubuntu_22.04 - travis_branch: ubuntu_2204 - container: ubuntu_22.04 - before_test_script: services.sh - build_script: build.sh - test_script: test.sh - os: ubuntu-22.04 - - - name: Ubuntu 22.04, clang ASAN - id: asan - travis_branch: sanitize - container: ubuntu_22.04 - build_script: build.sh - test_script: test.sh - os: ubuntu-22.04 - - - name: Ubuntu 20.04, gcc - id: ubuntu_20.04 - travis_branch: ubuntu_2004 - container: ubuntu_20.04 - use_avx2: true - build_script: build.sh - test_script: test.sh - os: ubuntu-22.04 - - - name: Ubuntu 20.04, coverage - id: coverage - travis_branch: ubuntu_2004 - container: ubuntu_20.04 - before_test_script: services.sh - build_script: build.sh - test_script: test.sh - os: ubuntu-22.04 - - - name: Ubuntu 20.04, benchmarks - id: benchmarks - travis_branch: ubuntu_2004 - container: ubuntu_20.04 - build_script: build.sh - test_script: test.sh - os: ubuntu-22.04 - - - name: Ubuntu 20.04, Intel compiler - id: icc - container: icc - build_script: build.sh - os: ubuntu-22.04 - - name: ${{ matrix.name }} - - runs-on: ${{ matrix.os }} - - defaults: - run: - # bash is needed to use ${CONTAINER_REGISTRY_USER,,}, which forces the - # username to lower-case as required by docker. - shell: bash - - steps: - - name: Set variables - # This logic needs to be kept in sync between linux_build.yml and converity_scan.yml - run: | - CONTAINER_TAG_CLEAN=$(echo ${CONTAINER_TAG} | tr -d -c "[:alnum:].-") - echo "CONTAINER_TAG_CLEAN=${CONTAINER_TAG_CLEAN}" - echo "CONTAINER_TAG_CLEAN=${CONTAINER_TAG_CLEAN}" >> ${GITHUB_ENV} - CACHE_CONTAINER_TAG_CLEAN=$(echo ${CACHE_CONTAINER_TAG} | tr -d -c "[:alnum:].-") - echo "CACHE_CONTAINER_TAG_CLEAN=${CACHE_CONTAINER_TAG_CLEAN}" - echo "CACHE_CONTAINER_TAG_CLEAN=${CACHE_CONTAINER_TAG_CLEAN}" >> ${GITHUB_ENV} - echo "CONTAINER_NAME_FULL=${CONTAINER_REGISTRY}/${CONTAINER_REGISTRY_USER,,}/${CONTAINER_NAME}:${CONTAINER_TAG_CLEAN}" >>${GITHUB_ENV} - - # Work around segfaults in ASan/MSan jobs - # Cf https://github.com/libjpeg-turbo/libjpeg-turbo/commit/2dfe6c0fe9e18671105e94f7cbf044d4a1d157e6 - # and https://github.com/actions/runner-images/issues/9491 - - name: Set up build - run: | - sudo sysctl vm.mmap_rnd_bits=28 - - - name: Checkout - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - - name: Login to Docker Hub - if: env.CONTAINER_REGISTRY == 'docker.io' - uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Login to GHCR - if: env.CONTAINER_REGISTRY == 'ghcr.io' - run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u $ --password-stdin - - # Pull build environment in forks or pull requests, unless [skip cache] is included in the commit message - - name: Pull build environment - if: "(github.repository_owner != 'OSGeo' || github.event_name == 'pull_request') && !contains(github.event.head_commit.message, '[skip cache]')" - run: | - docker pull ${CONTAINER_REGISTRY}/osgeo/${CONTAINER_NAME}:${CONTAINER_TAG_CLEAN} || true - docker pull ${CONTAINER_REGISTRY}/osgeo/${CONTAINER_NAME}:${CACHE_CONTAINER_TAG_CLEAN} || true - docker pull ${CONTAINER_NAME_FULL} || true - echo "DOCKER_BUILD_CACHE_FROM=--cache-from ${CONTAINER_REGISTRY}/osgeo/${CONTAINER_NAME}:${CONTAINER_TAG_CLEAN} --cache-from ${CONTAINER_REGISTRY}/osgeo/${CONTAINER_NAME}:${CACHE_CONTAINER_TAG_CLEAN} --cache-from ${CONTAINER_NAME_FULL}" >>${GITHUB_ENV} - - - name: Prepare build context - run: | - mkdir docker-build-context - cp autotest/requirements.txt docker-build-context - - - name: Update build environment - env: - DOCKER_BUILDKIT: 1 - run: | - # FIXME: for some reason, the fedora rawhide container pushed by - # master job is corrupted (looks like it contains an outdated layer - # symlinking libssl.so.3 to an older version of the actual file), - # once it is pushed. But in the job that generates it, - # compilation & tests work fine. It looks like some weird caching - # issue - if test "${{ matrix.container }}" = "fedora_rawhide"; then - DOCKER_BUILD_CACHE_FROM="" - else - BUILD_ARG_INLINE_CACHE="--build-arg BUILDKIT_INLINE_CACHE=1" - fi - docker build \ - ${BUILD_ARG_INLINE_CACHE} \ - ${DOCKER_BUILD_CACHE_FROM} \ - -t ${CONTAINER_NAME_FULL} \ - -f .github/workflows/${{ matrix.container }}/Dockerfile.ci \ - docker-build-context - - # Get the architecture so we can use it as part of the cache key, - # but only if we are going to use avx2 in the build. If we are not, - # including the arch will cause unnecessary cache misses. - - name: Get Architecture - id: get-arch - if: matrix.use_avx2 - run: | - export ARCH=$(cc -march=native -### -E - < /dev/null 2>&1 | sed -ne 's/.*cc1 .*-march=\([^ "]*\)[ "].*/\1/p') - echo "Architecture: $ARCH" - echo "arch=$ARCH" >> $GITHUB_OUTPUT - - # cache the .ccache directory - # key it on the runner os, build type, deps, and arch - # It's especially important to include arch in the key because we - # may get runtime errors with -mavx2 from objects built on a - # different architecture. - - name: Restore build cache - id: restore-cache - uses: actions/cache/restore@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 - with: - path: ${{ github.workspace }}/.ccache - key: ${{ matrix.id }}-${{ steps.get-arch.outputs.arch }}-${{ github.ref_name }}-${{ github.run_id }} - restore-keys: | - ${{ matrix.id }}-${{ steps.get-arch.outputs.arch }}-${{ github.ref_name }} - ${{ matrix.id }}-${{ steps.get-arch.outputs.arch }} - - - name: Prepare ccache - run: | - mkdir -p ${{ github.workspace }}/.ccache - chmod -R a+rw ${{ github.workspace }}/.ccache - docker run --rm \ - -v ${{ github.workspace }}/.ccache:/.ccache \ - -u $(id -u ${USER}):$(id -g ${USER}) \ - ${CONTAINER_NAME_FULL} \ - sh -c "ccache -M 1G && ccache -sp && ccache -z" - - # FIXME the default BUILD_CMD here isn't working...we get an error - # about the quotes not matching. - - name: Build - env: - TRAVIS: yes - TRAVIS_BRANCH: ${{ matrix.travis_branch }} - BUILD_NAME: ${{ matrix.travis_branch }} - run: | - if test -f ".github/workflows/${{ matrix.id }}/${{ matrix.build_script }}"; then - BUILD_CMD="$(pwd)/.github/workflows/${{ matrix.id }}/${{ matrix.build_script }}" - else - BUILD_CMD="sh -c 'cmake .. && make -j$(nproc)'" - fi - - # For cache - mkdir -p .gdal - - mkdir -p build-${{ matrix.id }} - docker run --name gdal-build \ - --rm \ - -e CI \ - -e GITHUB_WORKFLOW \ - -e TRAVIS \ - -e TRAVIS_BRANCH \ - -e BUILD_NAME \ - -e "GDAL_SOURCE_DIR=$(pwd)" \ - -u $(id -u ${USER}):$(id -g ${USER}) \ - -v $(pwd)/.gdal:/.gdal:rw \ - -v $(pwd):$(pwd):rw \ - -v ${{ github.workspace }}/.ccache:/.ccache:rw \ - --workdir $(pwd)/build-${{ matrix.id }} \ - ${CONTAINER_NAME_FULL} \ - ${BUILD_CMD} - - - name: Summarize ccache - run: | - docker run --rm \ - -v ${{ github.workspace }}/.ccache:/.ccache \ - -u $(id -u ${USER}):$(id -g ${USER}) \ - ${CONTAINER_NAME_FULL} \ - ccache -s - - - name: Save build cache - uses: actions/cache/save@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 - with: - path: ${{ github.workspace }}/.ccache - key: ${{ steps.restore-cache.outputs.cache-primary-key }} - - - name: Start test services - if: matrix.before_test_script - run: | - .github/workflows/${{ matrix.id }}/${{ matrix.before_test_script }} - - # --security-opt seccomp=unconfined, so that the userfaulfd syscall is availabledocker run \ - - name: Run tests - env: - TRAVIS: yes - TRAVIS_BRANCH: ${{ matrix.travis_branch }} - BUILD_NAME: ${{ matrix.travis_branch }} - run: | - if test -f ".github/workflows/${{ matrix.id }}/${{ matrix.test_script }}"; then - TEST_CMD="$(pwd)/.github/workflows/${{ matrix.id }}/${{ matrix.test_script }}" - else - TEST_CMD="ctest -V -j $(nproc)" - fi - - if test "${{ matrix.id }}" = "benchmarks"; then - if test -f /sys/devices/system/cpu/intel_pstate/no_turbo; then - echo "Disable TurboBoost" - echo 1 | sudo tee /sys/devices/system/cpu/intel_pstate/no_turbo - fi - fi - - # For cache - mkdir -p .gdal - - docker run \ - -e CI \ - -e GITHUB_WORKFLOW \ - -e TRAVIS \ - -e TRAVIS_BRANCH \ - -e BUILD_NAME \ - -e "GDAL_SOURCE_DIR=$(pwd)" \ - -u $(id -u ${USER}):$(id -g ${USER}) \ - --security-opt seccomp=unconfined \ - --add-host=host.docker.internal:host-gateway \ - --rm \ - -v $(pwd)/.gdal:/.gdal \ - -v $(pwd):$(pwd) \ - --workdir $(pwd)/build-${{ matrix.id }} \ - ${CONTAINER_NAME_FULL} \ - ${TEST_CMD} - - - name: Coveralls - uses: coverallsapp/github-action@cfd0633edbd2411b532b808ba7a8b5e04f76d2c8 # v2.3.4 - if: ${{ matrix.id == 'coverage' }} - with: - format: lcov - file: build-coverage/gdal_filtered.info - - - name: Push build environment - if: github.event_name == 'push' - continue-on-error: true - env: - DOCKER_BUILDKIT: 1 - run: | - docker push ${CONTAINER_NAME_FULL} - - - name: Upload coverage artifacts - uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 - if: ${{ matrix.id == 'coverage' }} - with: - name: coverage_index.html - path: build-${{ matrix.id }}/coverage_html/index.html - - - name: Upload coverage artifacts - uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 - if: ${{ matrix.id == 'coverage' }} - with: - name: HTML - path: build-${{ matrix.id }}/coverage_html/* - - - name: Deploy ssh key (for coverage) - if: ${{ matrix.id == 'coverage' && github.event_name == 'push' && github.ref_name == 'master' && github.repository == 'OSGeo/gdal' }} - shell: bash -l {0} - run: | - mkdir $HOME/.ssh && echo "${{ secrets.GDAL_TEST_COVERAGE_RESULTS_SSH_KEY }}" > $HOME/.ssh/id_rsa - chmod 700 $HOME/.ssh && chmod 600 $HOME/.ssh/id_rsa - ssh-keyscan -t rsa github.com >> $HOME/.ssh/known_hosts - eval `ssh-agent -s` - ssh-add $HOME/.ssh/id_rsa - - - name: Deploy to https://github.com/OSGeo/gdal-test-coverage-results - if: ${{ matrix.id == 'coverage' && github.event_name == 'push' && github.ref_name == 'master' && github.repository == 'OSGeo/gdal' }} - shell: bash -l {0} - run: | - set -x - set -e - mkdir -p output_html/coverage_html - cp -r $GITHUB_WORKSPACE/build-${{ matrix.id }}/coverage_html/* output_html/coverage_html/ - cd output_html - git init - git config user.email "gdal-test-coverage-results-bot@example.com" - git config user.name "GDAL test coverage results bot" - git remote add origin git@github.com:gdalautotest-coverage-results/gdalautotest-coverage-results.github.io - git remote -v - echo "Results of coverage of GDAL autotest See https://gdalautotest-coverage-results.github.io/coverage_html/index.html" > README.md - git add -A - git commit -m "Update with OSGeo/gdal commit $GITHUB_SHA" - git push -f origin master diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml deleted file mode 100644 index 31f680caaa15..000000000000 --- a/.github/workflows/macos.yml +++ /dev/null @@ -1,55 +0,0 @@ -name: MacOS build - -on: - push: - paths-ignore: - - 'doc/**' - - 'docker/**' - branches-ignore: - - 'backport**' - - 'dependabot**' - pull_request: - paths-ignore: - - 'doc/**' - - 'docker/**' - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} - cancel-in-progress: true - -permissions: - contents: read # to fetch code (actions/checkout) - -jobs: - - macos_build: - # Arm64 - runs-on: macos-14 - steps: - - - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - - uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3.0.4 - with: - channels: conda-forge - auto-update-conda: true - - - name: Install Requirements - shell: bash -l {0} - run: | - source ./ci/travis/osx/before_install.sh - - - name: Build - shell: bash -l {0} - run: | - source ./ci/travis/osx/install.sh - - - name: Run tests - shell: bash -l {0} - run: | - source ./ci/travis/osx/script.sh - env: - # Emulate a few Travis-CI env variable so that some tests get properly skipped - TRAVIS: true - TRAVIS_OS_NAME: osx - BUILD_NAME: macos_build_conda diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml deleted file mode 100644 index b1342e321fab..000000000000 --- a/.github/workflows/scorecard.yml +++ /dev/null @@ -1,76 +0,0 @@ -# This workflow uses actions that are not certified by GitHub. They are provided -# by a third-party and are governed by separate terms of service, privacy -# policy, and support documentation. - -name: Scorecard supply-chain security -on: - # For Branch-Protection check. Only the default branch is supported. See - # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection - branch_protection_rule: - # To guarantee Maintained check is occasionally updated. See - # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained - schedule: - - cron: '28 23 * * 1' - push: - branches: [ "master" ] - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} - cancel-in-progress: true - -# Declare default permissions as read only. -permissions: read-all - -jobs: - analysis: - name: Scorecard analysis - runs-on: ubuntu-latest - permissions: - # Needed to upload the results to code-scanning dashboard. - security-events: write - # Needed to publish results and get a badge (see publish_results below). - id-token: write - # Uncomment the permissions below if installing in a private repository. - # contents: read - # actions: read - - steps: - - name: "Checkout code" - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - with: - persist-credentials: false - - - name: "Run analysis" - uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0 - with: - results_file: results.sarif - results_format: sarif - # (Optional) "write" PAT token. Uncomment the `repo_token` line below if: - # - you want to enable the Branch-Protection check on a *public* repository, or - # - you are installing Scorecard on a *private* repository - # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat. - # repo_token: ${{ secrets.SCORECARD_TOKEN }} - - # Public repositories: - # - Publish results to OpenSSF REST API for easy access by consumers - # - Allows the repository to include the Scorecard badge. - # - See https://github.com/ossf/scorecard-action#publishing-results. - # For private repositories: - # - `publish_results` will always be set to `false`, regardless - # of the value entered here. - publish_results: true - - # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF - # format to the repository Actions tab. - - name: "Upload artifact" - uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 - with: - name: SARIF file - path: results.sarif - retention-days: 5 - - # Upload the results to GitHub's code scanning dashboard. - - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@662472033e021d55d94146f66f6058822b0b39fd # v3.27.0 - with: - sarif_file: results.sarif diff --git a/.github/workflows/slow_tests.yml b/.github/workflows/slow_tests.yml deleted file mode 100644 index e723f8aa9f23..000000000000 --- a/.github/workflows/slow_tests.yml +++ /dev/null @@ -1,187 +0,0 @@ -name: Slow tests - -# Controls when the action will run. -on: - # Run this action on a schedule - schedule: - - cron: '0 3 * * *' # Everyday at 03:00 UTC - - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} - cancel-in-progress: true - -permissions: - contents: read - -jobs: - - slow_tests: - runs-on: ubuntu-latest - - strategy: - fail-fast: false - matrix: - include: - - id: slow_tests - container: ubuntu_22.04 - build_script: build.sh - test_script: test.sh - - # Store the components of the container name as environment variables: - # ${CONTAINER_REGISTRY}/${CONTAINER_REGISTRY_USER}/${CONTAINER_NAME} - env: - CONTAINER_REGISTRY: ${{ vars.gdal_container_registry || 'ghcr.io' }} - CONTAINER_REGISTRY_USER: ${{ vars.gdal_container_registry_user || github.repository_owner }} - CONTAINER_NAME: gdal-deps - CONTAINER_TAG: ${{ matrix.container }}-${{ github.base_ref || github.ref_name }} - GDAL_SOURCE_DIR: /gdal # Directory to which workspace (source root) will be mounted inside container - - defaults: - run: - # bash is needed to use ${CONTAINER_REGISTRY_USER,,}, which forces the - # username to lower-case as required by docker. - shell: bash - - steps: - - name: Checkout - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - - name: Login to GHCR - if: env.CONTAINER_REGISTRY == 'ghcr.io' - run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u $ --password-stdin - - - name: Set variables - shell: bash - run: | - CONTAINER_TAG_CLEAN=$(echo ${CONTAINER_TAG} | tr -d -c "[:alnum:].-") - echo "CONTAINER_TAG_CLEAN=${CONTAINER_TAG_CLEAN}" - echo "CONTAINER_TAG_CLEAN=${CONTAINER_TAG_CLEAN}" >> ${GITHUB_ENV} - CACHE_CONTAINER_TAG_CLEAN=$(echo ${CACHE_CONTAINER_TAG} | tr -d -c "[:alnum:].-") - echo "CACHE_CONTAINER_TAG_CLEAN=${CACHE_CONTAINER_TAG_CLEAN}" - echo "CACHE_CONTAINER_TAG_CLEAN=${CACHE_CONTAINER_TAG_CLEAN}" >> ${GITHUB_ENV} - echo "CONTAINER_NAME_FULL=${CONTAINER_REGISTRY}/${CONTAINER_REGISTRY_USER,,}/${CONTAINER_NAME}:${CONTAINER_TAG_CLEAN}" >>${GITHUB_ENV} - - # Pull build environment in forks or pull requests, unless [skip cache] is included in the commit message - - name: Pull build environment - if: "(github.repository_owner != 'OSGeo' || github.event_name == 'pull_request') && !contains(github.event.head_commit.message, '[skip cache]')" - run: | - docker pull ${CONTAINER_REGISTRY}/osgeo/${CONTAINER_NAME}:${CONTAINER_TAG_CLEAN} || true - docker pull ${CONTAINER_REGISTRY}/osgeo/${CONTAINER_NAME}:${CACHE_CONTAINER_TAG_CLEAN} || true - docker pull ${CONTAINER_NAME_FULL} || true - echo "DOCKER_BUILD_CACHE_FROM=--cache-from ${CONTAINER_REGISTRY}/osgeo/${CONTAINER_NAME}:${CONTAINER_TAG_CLEAN} --cache-from ${CONTAINER_REGISTRY}/osgeo/${CONTAINER_NAME}:${CACHE_CONTAINER_TAG_CLEAN} --cache-from ${CONTAINER_NAME_FULL}" >>${GITHUB_ENV} - - - name: Prepare build context - run: | - mkdir docker-build-context - cp autotest/requirements.txt docker-build-context - - - name: Update build environment - env: - DOCKER_BUILDKIT: 1 - run: | - # FIXME: for some reason, the fedora rawhide container pushed by - # master job is corrupted (looks like it contains an outdated layer - # symlinking libssl.so.3 to an older version of the actual file), - # once it is pushed. But in the job that generates it, - # compilation & tests work fine. It looks like some weird caching - # issue - if test "${{ matrix.container }}" = "fedora_rawhide"; then - DOCKER_BUILD_CACHE_FROM="" - else - BUILD_ARG_INLINE_CACHE="--build-arg BUILDKIT_INLINE_CACHE=1" - fi - docker build \ - ${BUILD_ARG_INLINE_CACHE} \ - ${DOCKER_BUILD_CACHE_FROM} \ - -t ${CONTAINER_NAME_FULL} \ - -f .github/workflows/${{ matrix.container }}/Dockerfile.ci \ - docker-build-context - - # cache the .ccache directory - # key it on the runner os, build type, deps, and arch - # It's especially important to include arch in the key because we - # may get runtime errors with -mavx2 from objects built on a - # different architecture. - - name: Restore build cache - id: restore-cache - uses: actions/cache/restore@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 - with: - path: ${{ github.workspace }}/.ccache - key: ${{ matrix.id }}-${{ steps.get-arch.outputs.arch }}-${{ github.ref_name }}-${{ github.run_id }} - restore-keys: | - ${{ matrix.id }}-${{ steps.get-arch.outputs.arch }}-${{ github.ref_name }} - ${{ matrix.id }}-${{ steps.get-arch.outputs.arch }} - - - name: Prepare ccache - run: | - mkdir -p ${{ github.workspace }}/.ccache - chmod -R a+rw ${{ github.workspace }}/.ccache - docker run --rm \ - -v ${{ github.workspace }}/.ccache:/.ccache \ - -u $(id -u ${USER}):$(id -g ${USER}) \ - ${CONTAINER_NAME_FULL} \ - sh -c "ccache -M 1G && ccache -sp && ccache -z" - - - name: Build - run: | - mkdir -p build-${{ matrix.id }} - docker run --name gdal-build \ - --rm \ - -e "GDAL_SOURCE_DIR=$(pwd)" \ - -u $(id -u ${USER}):$(id -g ${USER}) \ - -v $(pwd):$(pwd):rw \ - -v ${{ github.workspace }}/.ccache:/.ccache:rw \ - --workdir $(pwd)/build-${{ matrix.id }} \ - ${CONTAINER_NAME_FULL} \ - $(pwd)/.github/workflows/${{ matrix.id }}/${{ matrix.build_script }} - - - name: Summarize ccache - run: | - docker run --rm \ - -v ${{ github.workspace }}/.ccache:/.ccache \ - -u $(id -u ${USER}):$(id -g ${USER}) \ - ${CONTAINER_NAME_FULL} \ - ccache -s - - - name: Save build cache - uses: actions/cache/save@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 - with: - path: ${{ github.workspace }}/.ccache - key: ${{ steps.restore-cache.outputs.cache-primary-key }} - - - name: Push build environment - # if: github.event_name == 'push' - continue-on-error: true - env: - DOCKER_BUILDKIT: 1 - run: | - docker push ${CONTAINER_NAME_FULL} - - - name: Run tests - env: - TRAVIS: yes - TRAVIS_BRANCH: ${{ matrix.travis_branch }} - run: | - TEST_CMD="$(pwd)/.github/workflows/${{ matrix.id }}/${{ matrix.test_script }}" - - # For cache - mkdir .gdal - - docker run \ - -e CI \ - -e GITHUB_WORKFLOW \ - -e TRAVIS \ - -e TRAVIS_BRANCH \ - -e "GDAL_SOURCE_DIR=$(pwd)" \ - -u $(id -u ${USER}):$(id -g ${USER}) \ - --security-opt seccomp=unconfined \ - --add-host=host.docker.internal:host-gateway \ - --rm \ - -v $(pwd)/.gdal:/.gdal \ - -v $(pwd):$(pwd) \ - --workdir $(pwd)/build-${{ matrix.id }} \ - ${CONTAINER_NAME_FULL} \ - ${TEST_CMD} diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml deleted file mode 100644 index 6fd9616bba74..000000000000 --- a/.github/workflows/stale.yml +++ /dev/null @@ -1,55 +0,0 @@ -name: 👓 Handle stale issues -on: - schedule: - - cron: "30 2 * * *" - -permissions: - contents: read - -jobs: - stale: - permissions: - # issues: write # for actions/stale to close stale issues - pull-requests: write # for actions/stale to close stale PRs - if: github.repository_owner == 'OSGeo' - runs-on: ubuntu-latest - steps: - - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9.0.0 - with: - repo-token: ${{ secrets.GITHUB_TOKEN }} - stale-pr-message: > - The GDAL project highly values your contribution and would love to see - this work merged! - Unfortunately this PR has not had any activity in the last 28 days and - is being automatically marked as "stale". - If you think this pull request should be merged, please check - - - that all unit tests are passing - - - that all comments by reviewers have been addressed - - - that there is enough information for reviewers, in particular link - to any issues which this pull request fixes - - - that you have written unit tests where possible - - In case you should have any uncertainty, please leave a comment and we will - be happy to help you proceed with this pull request. - - If there is no further activity on this pull request, it will be closed in 2 - weeks. - - - close-pr-message: > - While we hate to see this happen, this PR has been automatically closed because - it has not had any activity in the last 6 weeks. If this pull request should be - reconsidered, please follow the guidelines in the previous comment and reopen - this pull request. Or, if you have any further questions, just ask! We love to - help, and if there's anything the GDAL project can do to help push this PR forward - please let us know how we can assist. - - - stale-pr-label: 'stale' - days-before-pr-stale: 28 - days-before-pr-close: 14 - operations-per-run: 1000 diff --git a/.github/workflows/windows_build.yml b/.github/workflows/windows_build.yml deleted file mode 100644 index 91146477d8be..000000000000 --- a/.github/workflows/windows_build.yml +++ /dev/null @@ -1,206 +0,0 @@ -name: Windows builds - -on: - push: - paths-ignore: - - 'doc/**' - - 'docker/**' - branches-ignore: - - 'backport**' - - 'dependabot**' - pull_request: - paths-ignore: - - 'doc/**' - - 'docker/**' - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} - cancel-in-progress: true - -permissions: - contents: read - -jobs: - - windows_build: - runs-on: windows-2022 - - strategy: - matrix: - include: - - VS_VERSION: Visual Studio 17 - VS_VER: 2022 - SDK: release-1930 - platform: x86 - PROJ_BRANCH: "9.3" - LIBZSTD_URL: "https://github.com/facebook/zstd/releases/download/v1.4.5/zstd-v1.4.5-win32.zip" - LIBDEFLATE_URL: "https://github.com/ebiggers/libdeflate/releases/download/v1.6/libdeflate-1.6-windows-i686-bin.zip" - SWIG_URL: "https://freefr.dl.sourceforge.net/project/swig/swigwin/swigwin-4.1.1/swigwin-4.1.1.zip" - - env: - VS_VERSION: ${{ matrix.VS_VERSION }} - VS_VER: ${{ matrix.VS_VER }} - SDK: ${{ matrix.SDK }} - platform: ${{ matrix.platform }} - PROJ_BRANCH: "${{ matrix.PROJ_BRANCH }}" - LIBZSTD_URL: "${{ matrix.LIBZSTD_URL }}" - LIBDEFLATE_URL: "${{ matrix.LIBDEFLATE_URL }}" - SWIG_URL: "${{ matrix.SWIG_URL }}" - APPVEYOR: true # to skip some tests - PYTHON_VERSION: "3.10.5" - - steps: - - # To avoid git clone to mess with the line endings of GDAL autotest data - # files that look like text, but should be handled as binary content - - name: Set git core.autocrlf to false - run: | - git config --global core.autocrlf false - - - name: Checkout - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - - - name: Set environment - shell: pwsh - run: | - $env:PATH="C:\hostedtoolcache\windows\Python\$env:PYTHON_VERSION\$env:platform;C:\hostedtoolcache\windows\Python\$env:PYTHON_VERSION\$env:platform\Scripts;$env:PATH" - if($env:platform -eq "x86") - { - $env:ARCHITECTURE="x86" - $env:WIN64_ARG="" - $env:CMAKE_ARCHITECTURE="Win32" - } - else - { - $env:ARCHITECTURE="amd64" - $env:WIN64_ARG="WIN64=YES" - $env:CMAKE_ARCHITECTURE="x64" - } - echo "PATH=$env:PATH" >> $env:GITHUB_ENV - echo "ARCHITECTURE=$env:ARCHITECTURE" >> $env:GITHUB_ENV - echo "WIN64_ARG=$env:WIN64_ARG" >> $env:GITHUB_ENV - echo "CMAKE_ARCHITECTURE=$env:CMAKE_ARCHITECTURE" >> $env:GITHUB_ENV - - - name: Set compiler environment - shell: cmd - run: | - if "%VS_VER%" == "2022" CALL "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\Common7\Tools\VsDevCmd.bat" -arch=%ARCHITECTURE% - echo PATH=%PATH%>> %GITHUB_ENV% - echo INCLUDE=%INCLUDE%>> %GITHUB_ENV% - echo LIB=%LIB%>> %GITHUB_ENV% - echo LIBPATH=%LIBPATH%>> %GITHUB_ENV% - - - name: Remove conflicting libraries - shell: bash -l {0} - run: | - rm -rf C:/Strawberry || /bin/true - - - name: Detect AVX2 - shell: bash - run: | - set -e - cl .github/workflows/detect-avx2.c - if ./detect-avx2.exe; then - echo "AVX2 available on CPU" - echo "ARCH_FLAGS=/arch:AVX2" >> $GITHUB_ENV - else - echo "AVX2 not available on CPU." - echo "ARCH_FLAGS=" >> $GITHUB_ENV - fi - - - name: Build - shell: pwsh - run: | - $ErrorActionPreference = 'continue' - function exec - { - param ( [ScriptBlock] $ScriptBlock ) - & $ScriptBlock 2>&1 | ForEach-Object -Process { "$_" } - if ($LastExitCode -ne 0) { exit $LastExitCode } - } - echo "ARCH_FLAGS = $env:ARCH_FLAGS" - exec { git clone --depth=1 -b $env:PROJ_BRANCH https://github.com/OSGeo/PROJ proj } - Import-PackageProvider NuGet -Force - Set-PSRepository -Name PSGallery -InstallationPolicy Trusted - Install-Module Pscx -AllowClobber - Install-Module VSSetup -Scope CurrentUser - exec { pip install numpy } - $env:SDK_ZIP="$env:SDK"+"-dev.zip" - $env:SDK_URL="http://download.gisinternals.com/sdk/downloads/$env:SDK_ZIP" - if(-Not (Test-Path -Path downloads)) { mkdir downloads } - cd downloads - if(-Not (Test-Path -Path $env:SDK_ZIP )) { Invoke-WebRequest "$env:SDK_URL" -OutFile "$env:SDK_ZIP" } - $env:LIBZSTD_ZIP="libzstd.zip" - if(-Not (Test-Path -Path $env:LIBZSTD_ZIP -PathType Leaf)) { Invoke-WebRequest "$env:LIBZSTD_URL" -OutFile "$env:LIBZSTD_ZIP" } - $env:SWIG_ZIP="swigwin-4.1.1.zip" - if(-Not (Test-Path -Path $env:SWIG_ZIP -PathType Leaf)) { Invoke-WebRequest "$env:SWIG_URL" -OutFile "$env:SWIG_ZIP" } - # $env:LIBDEFLATE_ZIP="libdeflate.zip" - # if(-Not (Test-Path -Path $env:LIBDEFLATE_ZIP -PathType Leaf)) { Invoke-WebRequest "$env:LIBDEFLATE_URL" -OutFile "$env:LIBDEFLATE_ZIP" } - cd .. - mkdir sdk - cd sdk - exec { 7z x ..\downloads\$env:SDK_ZIP } - cd $env:SDK - #exec { 7z x ..\..\downloads\$env:LIBZSTD_ZIP } - exec { 7z x ..\..\downloads\$env:SWIG_ZIP } - # exec { 7z x -y ..\..\downloads\$env:LIBDEFLATE_ZIP } - cd .. - $env:SDK_PREFIX="$env:GITHUB_WORKSPACE\sdk\$env:SDK" - $env:SDK_LIB="$env:SDK_PREFIX\lib" - cd $env:GITHUB_WORKSPACE - cd proj - mkdir build - cd build - $env:VCPKG_PLATFORM="$env:platform"+"-windows" - exec { vcpkg install sqlite3:$env:VCPKG_PLATFORM } - Invoke-WebRequest "https://sqlite.org/2018/sqlite-tools-win32-x86-3250100.zip" -OutFile "sqlite-tools-win32-x86-3250100.zip" - 7z x sqlite-tools-win32-x86-3250100.zip - copy sqlite-tools-win32-x86-3250100/sqlite3.exe $env:GITHUB_WORKSPACE - $env:PATH="$env:GITHUB_WORKSPACE;$env:PATH" - $env:PROJ_INSTALL_DIR="$env:GITHUB_WORKSPACE"+"\install-proj" - $env:CMAKE_INSTALL_PREFIX="-DCMAKE_INSTALL_PREFIX=" + $env:PROJ_INSTALL_DIR - cmake -G $env:VS_VERSION -A $env:CMAKE_ARCHITECTURE .. $env:CMAKE_INSTALL_PREFIX -DBUILD_TESTING=OFF -DCMAKE_BUILD_TYPE=Release -DBUILD_LIBPROJ_SHARED=ON -DCMAKE_TOOLCHAIN_FILE=c:/vcpkg/scripts/buildsystems/vcpkg.cmake -DBUILD_SHARED_LIBS=ON -DENABLE_CURL=OFF -DENABLE_TIFF=OFF -DBUILD_PROJSYNC=OFF - exec { cmake --build . --config Release --target install } - # - cd $env:GITHUB_WORKSPACE - mkdir build - cd build - $env:SDK_BIN="$env:SDK_PREFIX\bin" - $env:PATH="$env:SDK_BIN;$env:SDK\swigwin-4.1.1;$env:PATH" - $env:GDAL_INSTALL_DIR="$env:GITHUB_WORKSPACE"+"\install-gdal" - $env:CMAKE_INSTALL_PREFIX="-DCMAKE_INSTALL_PREFIX=" + $env:GDAL_INSTALL_DIR - $env:PROJ_ROOT="-DPROJ_ROOT=" + $env:PROJ_INSTALL_DIR - $env:CMAKE_PREFIX_PATH="-DCMAKE_PREFIX_PATH=" + $env:SDK_PREFIX - $env:MYSQL_LIBRARY="-DMYSQL_LIBRARY=" + $env:SDK_LIB + "\libmysql.lib" - $env:POPPLER_EXTRA_LIBRARIES="-DPOPPLER_EXTRA_LIBRARIES=" + $env:SDK_LIB + "\freetype.lib;" + $env:SDK_LIB + "\harfbuzz.lib" - # We disable Spatialite and external geotiff as they link to gisinternals proj9 and we have the following linking error - # referring to a file from the gisinternals build machine... - # LINK : fatal error LNK1181: cannot open input file 'E:\buildsystem\release-1930\lib\proj9.lib' [D:\a\gdal\gdal\build\GDAL.vcxproj] - # Disabling HDF4 (and netCDF) for similar reason: LINK : fatal error LNK1181: cannot open input file 'E:\buildsystem\release-1930\lib\mfhdf.lib' - # Disabling KEA because of "keadataset.obj : error LNK2019: unresolved external symbol "public: static class H5::FileAccPropList const & const H5::FileAccPropList::DEFAULT" - cmake -G $env:VS_VERSION -A $env:CMAKE_ARCHITECTURE .. $env:CMAKE_INSTALL_PREFIX -DCMAKE_BUILD_TYPE=Release $env:CMAKE_PREFIX_PATH -DCMAKE_C_FLAGS=" /WX $env:ARCH_FLAGS" -DCMAKE_CXX_FLAGS=" /WX $env:ARCH_FLAGS" -DGDAL_USE_DEFLATE=OFF $env:PROJ_ROOT $env:MYSQL_LIBRARY $env:POPPLER_EXTRA_LIBRARIES -DGDAL_USE_ZLIB_INTERNAL=ON -DECW_INTERFACE_COMPILE_DEFINITIONS="_MBCS;_UNICODE;UNICODE;_WINDOWS;LIBECWJ2;WIN32;_WINDLL;NO_X86_MMI" -DBUILD_CSHARP_BINDINGS=OFF -DBUILD_JAVA_BINDINGS=OFF -DOGR_ENABLE_DRIVER_LIBKML=OFF -DGDAL_USE_SPATIALITE=OFF -DGDAL_USE_GEOTIFF_INTERNAL=ON -DGDAL_USE_HDF4=OFF -DGDAL_USE_NETCDF=OFF -DGDAL_USE_WEBP=OFF -DGDAL_USE_KEA=OFF - exec { cmake --build . --config Release --target install } - - - name: Run tests - shell: pwsh - run: | - $ErrorActionPreference = 'continue' - function exec - { - param ( [ScriptBlock] $ScriptBlock ) - & $ScriptBlock 2>&1 | ForEach-Object -Process { "$_" } - if ($LastExitCode -ne 0) { exit $LastExitCode } - } - $env:SDK_PREFIX="$env:GITHUB_WORKSPACE\sdk\$env:SDK" - $env:SDK_BIN="$env:SDK_PREFIX\bin" - $env:PATH="$env:GITHUB_WORKSPACE\build\Release;$env:GITHUB_WORKSPACE\build\apps\Release;$env:GITHUB_WORKSPACE\install-proj\bin;$env:SDK_BIN;$env:PATH" - $env:GDAL_DATA="$env:GITHUB_WORKSPACE\data" - $env:DO_NOT_FAIL_ON_RECODE_ERRORS="YES" - $env:MDB_ODBC_DRIVER_INSTALLED="YES" - # The ca-bundle.crt file which we could point to is invalid in the current SDK - # See https://github.com/gisinternals/buildsystem/issues/104 - $env:GDAL_HTTP_UNSAFESSL="YES" - cd $env:GITHUB_WORKSPACE\autotest - python3 -m pip install -Ur requirements.txt - # For some reason I can't debug remotely, gdal.dll can'be loaded. Probably something missing in the path - # exec { ctest --test-dir $env:GITHUB_WORKSPACE\build -C Release -V -j 3 -R autotest }