diff --git a/.github/ISSUE_TEMPLATE/~release-checklist.md b/.github/ISSUE_TEMPLATE/~release-checklist.md index b2510ba3f6..d10f2fbd25 100644 --- a/.github/ISSUE_TEMPLATE/~release-checklist.md +++ b/.github/ISSUE_TEMPLATE/~release-checklist.md @@ -13,7 +13,6 @@ about: Checklist for core developers to complete as part of making a release * [ ] Verify that the release notes files correctly summarize all development changes since the last release. * [ ] Add any new use citations or published statistical models to the [Use and Citations page][citations_page]. * [ ] Verify that the citations on the [Use and Citations page][citations_page] are up to date with their current [INSPIRE](https://inspirehep.net/) record. Checking the [Dimensions listing of publication citations](https://app.dimensions.ai/discover/publication?or_subset_publication_citations=pub.1135154020) can be helpful to catch citations that are now journal publications. -* [ ] Update the ``codemeta.json`` file in the release PR if its requirements have updated. * [ ] Update the [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) GitHub Action used for deployment to TestPyPI and PyPI to the latest stable release. * [ ] Make a release to [TestPyPI][TestPyPI_pyhf] using the [workflow dispatch event trigger](https://github.com/scikit-hep/pyhf/actions/workflows/publish-package.yml). * [ ] Verify that the project README is displaying correctly on [TestPyPI][TestPyPI_pyhf]. diff --git a/.github/release.yml b/.github/release.yml new file mode 100644 index 0000000000..9d1e0987bf --- /dev/null +++ b/.github/release.yml @@ -0,0 +1,5 @@ +changelog: + exclude: + authors: + - dependabot + - pre-commit-ci diff --git a/.github/workflows/bump-version.yml b/.github/workflows/bump-version.yml index 8083fa8a43..e574a2ab34 100644 --- a/.github/workflows/bump-version.yml +++ b/.github/workflows/bump-version.yml @@ -12,11 +12,9 @@ on: - minor - major release_candidate: - type: choice - description: 'Release candidate?' - options: - - false - - true + type: boolean + description: 'Release candidate' + default: false new_version: description: 'New version to bump to' required: true @@ -27,20 +25,21 @@ on: required: true type: string force: - type: choice - description: 'Force override check?' - options: - - false - - true + type: boolean + description: 'Force override check' + default: false dry_run: - type: choice - description: 'Perform a dry run to check?' - options: - - true - - false + type: boolean + description: 'Perform a dry run to check' + default: true + +permissions: + contents: read jobs: bump-version: + permissions: + contents: write # for Git to git push runs-on: ubuntu-latest if: github.repository == 'scikit-hep/pyhf' @@ -204,8 +203,8 @@ jobs: - name: Install Python dependencies run: | - python -m pip install --upgrade pip setuptools wheel - python -m pip install tbump + python -m pip install uv + uv pip install --system tbump python -m pip list - name: Setup Git user to push new tag diff --git a/.github/workflows/ci-windows.yml b/.github/workflows/ci-windows.yml index c9f3e8e68b..b625754baf 100644 --- a/.github/workflows/ci-windows.yml +++ b/.github/workflows/ci-windows.yml @@ -10,6 +10,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true +permissions: + contents: read + jobs: test: @@ -29,8 +32,8 @@ jobs: - name: Install dependencies run: | - python -m pip install --upgrade pip setuptools wheel - python -m pip install --upgrade '.[all,test]' + python -m pip install uv + uv pip install --system --upgrade '.[all,test]' - name: List installed Python packages run: python -m pip list diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 65d7e656dd..28a449ab63 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,6 +15,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true +permissions: + contents: read + jobs: test: @@ -31,8 +34,8 @@ jobs: include: - os: macos-latest python-version: '3.12' - # Apple silicon runner - - os: macos-14 + # Intel runner + - os: macos-13 python-version: '3.12' steps: @@ -44,8 +47,16 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install dependencies + if: matrix.python-version != '3.8' + run: | + python -m pip install uv + uv pip install --system --upgrade ".[all,test]" + + # c.f. https://github.com/astral-sh/uv/issues/2062 + - name: Install dependencies (Python 3.8) + if: matrix.python-version == '3.8' run: | - python -m pip install --upgrade pip setuptools wheel + python -m pip install --upgrade pip python -m pip install --upgrade ".[all,test]" - name: List installed Python packages @@ -96,11 +107,11 @@ jobs: - name: Test docstring examples with doctest # TODO: Don't currently try to match amd64 and arm64 floating point for docs, but will in the future. - if: matrix.python-version == '3.12' && matrix.os != 'macos-14' + if: matrix.python-version == '3.12' && matrix.os != 'macos-latest' run: coverage run --data-file=.coverage-doctest --module pytest src/ README.rst - name: Coverage report for doctest only - if: matrix.python-version == '3.12' && matrix.os != 'macos-14' + if: matrix.python-version == '3.12' && matrix.os != 'macos-latest' run: | coverage report --data-file=.coverage-doctest coverage xml --data-file=.coverage-doctest -o doctest-coverage.xml diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 7d680f58d3..507b0c7808 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -15,8 +15,15 @@ concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true +permissions: + contents: read + jobs: analyze: + permissions: + actions: read # for github/codeql-action/init to get workflow details + contents: read # for actions/checkout to fetch code + security-events: write # for github/codeql-action/analyze to upload SARIF results name: Analyze runs-on: ubuntu-latest diff --git a/.github/workflows/dependencies-head.yml b/.github/workflows/dependencies-head.yml index 1e4651251f..c714392d81 100644 --- a/.github/workflows/dependencies-head.yml +++ b/.github/workflows/dependencies-head.yml @@ -10,13 +10,16 @@ concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true +permissions: + contents: read + jobs: release-candidates: runs-on: ${{ matrix.os }} strategy: matrix: - os: [ubuntu-latest, macos-latest] + os: [ubuntu-latest, macos-latest, macos-13] python-version: ['3.12'] steps: @@ -29,8 +32,8 @@ jobs: - name: Install dependencies run: | - python -m pip install --upgrade pip setuptools wheel - python -m pip --no-cache-dir --quiet install --upgrade --pre ".[all,test]" + python -m pip install uv + uv pip --no-cache --quiet install --system --upgrade --pre ".[all,test]" python -m pip list - name: List release candidates, alpha, and beta releases @@ -57,14 +60,13 @@ jobs: with: python-version: ${{ matrix.python-version }} - # Use nightly SciPy wheels from Anaconda's PyPI - # c.f. https://twitter.com/ralfgommers/status/1419917265781334025 - name: Install dependencies run: | - python -m pip install --upgrade pip setuptools wheel - python -m pip --no-cache-dir --quiet install --upgrade ".[all,test]" - python -m pip uninstall --yes scipy - python -m pip install --upgrade --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple scipy + python -m pip install uv + uv pip --no-cache --quiet install --system --upgrade ".[all,test]" + uv pip uninstall --system scipy + # uv wants to upgrade dependencies (numpy) to a dev release too, so don't --upgrade + uv pip install --system --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple scipy python -m pip list - name: Test with pytest @@ -87,11 +89,11 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | - python -m pip install --upgrade pip setuptools wheel - python -m pip --no-cache-dir --quiet install --upgrade ".[all,test]" - python -m pip uninstall --yes iminuit - python -m pip install --upgrade cython - python -m pip install --upgrade git+https://github.com/scikit-hep/iminuit.git + python -m pip install uv + uv pip --no-cache --quiet install --system --upgrade ".[all,test]" + uv pip uninstall --system iminuit + uv pip install --system --upgrade cython + uv pip install --system --upgrade git+https://github.com/scikit-hep/iminuit.git python -m pip list - name: Test with pytest run: | @@ -113,10 +115,10 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | - python -m pip install --upgrade pip setuptools wheel - python -m pip --no-cache-dir --quiet install --upgrade ".[all,test]" - python -m pip uninstall --yes uproot - python -m pip install --upgrade git+https://github.com/scikit-hep/uproot5.git + python -m pip install uv + uv pip --no-cache --quiet install --system --upgrade ".[all,test]" + uv pip uninstall --system uproot + uv pip install --system --upgrade git+https://github.com/scikit-hep/uproot5.git python -m pip list - name: Test with pytest run: | @@ -140,16 +142,17 @@ jobs: - name: Install dependencies run: | - python -m pip install --upgrade pip setuptools wheel - python -m pip --no-cache-dir --quiet install --upgrade ".[all,test]" - python -m pip uninstall --yes matplotlib - # Need to use --extra-index-url as dependencies aren't on scientific-python-nightly-wheels package index. + python -m pip install uv + uv pip --no-cache --quiet install --system --upgrade ".[all,test]" + uv pip uninstall --system matplotlib + # Need to use --extra-index-url as all dependencies aren't on scientific-python-nightly-wheels package index. # Need to use --pre as dev releases will need priority over stable releases. - python -m pip install \ - --upgrade \ + # Note that uv and pip differ on --extra-index-url priority + # c.f. https://github.com/scientific-python/upload-nightly-action/issues/76 + uv pip install --system \ --pre \ - --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple \ - --extra-index-url https://pypi.org/simple/ \ + --index-url https://pypi.org/simple/ \ + --extra-index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple \ matplotlib - name: List installed Python packages @@ -175,10 +178,10 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | - python -m pip install --upgrade pip setuptools wheel - python -m pip --no-cache-dir --quiet install --upgrade ".[all,test]" - python -m pip uninstall --yes pytest - python -m pip install --upgrade git+https://github.com/pytest-dev/pytest.git + python -m pip install uv + uv pip --no-cache --quiet install --system --upgrade ".[all,test]" + uv pip uninstall --system pytest + uv pip install --system --upgrade git+https://github.com/pytest-dev/pytest.git python -m pip list - name: Test with pytest run: | diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 484c9400fe..eab191755d 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -19,9 +19,15 @@ concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true +permissions: + contents: read + jobs: docker: name: Build, test, and publish Docker images to Docker Hub + permissions: + contents: read + packages: write # for docker to push to registry runs-on: ubuntu-latest steps: @@ -75,7 +81,7 @@ jobs: - name: Test build id: docker_build_test - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: context: . file: docker/Dockerfile @@ -113,7 +119,7 @@ jobs: # every PR will trigger a push event on main, so check the push event is actually coming from main if: github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository == 'scikit-hep/pyhf' id: docker_build_latest - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: context: . file: docker/Dockerfile @@ -130,7 +136,7 @@ jobs: - name: Build and publish to registry with release tag if: github.event_name == 'release' && github.event.action == 'published' && github.repository == 'scikit-hep/pyhf' id: docker_build_release - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: context: . file: docker/Dockerfile diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 155100d0ed..c7a99fda4d 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -11,6 +11,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true +permissions: + contents: read + jobs: build: name: Build docs @@ -28,9 +31,9 @@ jobs: - name: Install Python dependencies run: | - python -m pip install --upgrade pip setuptools wheel - python -m pip --quiet install --upgrade .[docs,test] - python -m pip install yq + python -m pip install uv + uv pip --quiet install --system --upgrade ".[docs,test]" + uv pip install --system yq python -m pip list - name: Install apt-get dependencies diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index fdce2d7b28..5cc93cb35c 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -8,6 +8,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true +permissions: + contents: read + jobs: lint: diff --git a/.github/workflows/lower-bound-requirements.yml b/.github/workflows/lower-bound-requirements.yml index 96b5c6a3d8..5bdd837c68 100644 --- a/.github/workflows/lower-bound-requirements.yml +++ b/.github/workflows/lower-bound-requirements.yml @@ -6,6 +6,9 @@ on: - cron: '1 0 * * *' workflow_dispatch: +permissions: + contents: read + jobs: test: @@ -26,8 +29,9 @@ jobs: - name: Install dependencies and force lowest bound run: | - python -m pip install --upgrade pip setuptools wheel - python -m pip --no-cache-dir install --constraint tests/constraints.txt ".[all,test]" + python -m pip install uv + uv pip install --system --upgrade 'setuptools<70.0.0' + uv pip --no-cache install --system --constraint tests/constraints.txt ".[all,test]" - name: List installed Python packages run: python -m pip list diff --git a/.github/workflows/merged.yml b/.github/workflows/merged.yml index 3923caaffe..cce0e42670 100644 --- a/.github/workflows/merged.yml +++ b/.github/workflows/merged.yml @@ -5,6 +5,9 @@ on: types: [closed] workflow_dispatch: +permissions: + contents: read + jobs: binder: name: Trigger Binder build diff --git a/.github/workflows/notebooks.yml b/.github/workflows/notebooks.yml index 7ab2b048f8..52b6fb1af4 100644 --- a/.github/workflows/notebooks.yml +++ b/.github/workflows/notebooks.yml @@ -10,6 +10,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true +permissions: + contents: read + jobs: test: @@ -28,9 +31,9 @@ jobs: - name: Install dependencies run: | - python -m pip install --upgrade pip setuptools wheel + python -m pip install uv # FIXME: c.f. https://github.com/scikit-hep/pyhf/issues/2104 - python -m pip install --upgrade ".[all,test]" 'jupyter-client<8.0.0' + uv pip install --system --upgrade ".[all,test]" 'jupyter-client<8.0.0' - name: List installed Python packages run: python -m pip list diff --git a/.github/workflows/publish-package.yml b/.github/workflows/publish-package.yml index cd5d4a9ba7..ce5d0e8ffa 100644 --- a/.github/workflows/publish-package.yml +++ b/.github/workflows/publish-package.yml @@ -17,20 +17,25 @@ on: workflow_dispatch: inputs: publish: - type: choice - description: 'Publish to TestPyPI?' - options: - - false - - true + type: boolean + description: 'Publish to TestPyPI' + default: false concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true +permissions: + contents: read + jobs: build: name: Build Python distribution runs-on: ubuntu-latest + permissions: + id-token: write + attestations: write + steps: - uses: actions/checkout@v4 with: @@ -43,19 +48,20 @@ jobs: - name: Install python-build and twine run: | - python -m pip install --upgrade pip - python -m pip install build twine + python -m pip install uv + uv pip install --system --upgrade pip + uv pip install --system build twine python -m pip list - name: Build a sdist and wheel if: github.event_name != 'schedule' run: | - python -m build . + python -m build --installer uv . - name: Build a sdist and wheel and check for warnings if: github.event_name == 'schedule' run: | - PYTHONWARNINGS=error,default::DeprecationWarning python -m build . + PYTHONWARNINGS=error,default::DeprecationWarning python -m build --installer uv . - name: Verify untagged commits have dev versions if: "!startsWith(github.ref, 'refs/tags/')" @@ -97,8 +103,18 @@ jobs: - name: List contents of wheel run: python -m zipfile --list dist/pyhf-*.whl + - name: Generate artifact attestation for sdist and wheel + # If publishing to TestPyPI or PyPI + if: >- + (github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') && github.repository == 'scikit-hep/pyhf') + || (github.event_name == 'workflow_dispatch' && github.event.inputs.publish == 'true' && github.repository == 'scikit-hep/pyhf') + || (github.event_name == 'release' && github.event.action == 'published' && github.repository == 'scikit-hep/pyhf') + uses: actions/attest-build-provenance@1c608d11d69870c2092266b3f9a6f3abbf17002c # v1.4.3 + with: + subject-path: "dist/pyhf-*" + - name: Upload distribution artifact - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v4.4.0 with: name: dist-artifact path: dist @@ -126,19 +142,39 @@ jobs: - name: List all files run: ls -lh dist + - name: Verify sdist artifact attestation + # If publishing to TestPyPI or PyPI + if: >- + (github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') && github.repository == 'scikit-hep/pyhf') + || (github.event_name == 'workflow_dispatch' && github.event.inputs.publish == 'true' && github.repository == 'scikit-hep/pyhf') + || (github.event_name == 'release' && github.event.action == 'published' && github.repository == 'scikit-hep/pyhf') + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: gh attestation verify dist/pyhf-*.tar.gz --repo ${{ github.repository }} + + - name: Verify wheel artifact attestation + # If publishing to TestPyPI or PyPI + if: >- + (github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') && github.repository == 'scikit-hep/pyhf') + || (github.event_name == 'workflow_dispatch' && github.event.inputs.publish == 'true' && github.repository == 'scikit-hep/pyhf') + || (github.event_name == 'release' && github.event.action == 'published' && github.repository == 'scikit-hep/pyhf') + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: gh attestation verify dist/pyhf-*.whl --repo ${{ github.repository }} + - name: Publish distribution 📦 to Test PyPI # Publish to TestPyPI on tag events of if manually triggered # Compare to 'true' string as booleans get turned into strings in the console if: >- (github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') && github.repository == 'scikit-hep/pyhf') || (github.event_name == 'workflow_dispatch' && github.event.inputs.publish == 'true' && github.repository == 'scikit-hep/pyhf') - uses: pypa/gh-action-pypi-publish@v1.8.14 + uses: pypa/gh-action-pypi-publish@v1.10.2 with: repository-url: https://test.pypi.org/legacy/ print-hash: true - name: Publish distribution 📦 to PyPI if: github.event_name == 'release' && github.event.action == 'published' && github.repository == 'scikit-hep/pyhf' - uses: pypa/gh-action-pypi-publish@v1.8.14 + uses: pypa/gh-action-pypi-publish@v1.10.2 with: print-hash: true diff --git a/.github/workflows/release_tests.yml b/.github/workflows/release_tests.yml index b149d22d11..0569562704 100644 --- a/.github/workflows/release_tests.yml +++ b/.github/workflows/release_tests.yml @@ -10,6 +10,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true +permissions: + contents: read + jobs: pypi_release: @@ -22,6 +25,10 @@ jobs: include: - os: macos-latest python-version: '3.12' + # Intel runner + - os: macos-13 + python-version: '3.12' + fail-fast: false steps: - uses: actions/checkout@v4 @@ -33,18 +40,15 @@ jobs: - name: Install from PyPI run: | - python -m pip install --upgrade pip setuptools wheel - python -m pip install --pre pyhf[backends,xmlio] - python -m pip install pytest + python -m pip install uv + uv pip install --system --pre 'pyhf[backends,xmlio]' + uv pip install --system pytest python -m pip list - name: Canary test public API run: | + # Override the ini option for filterwarnings with an empty list to disable error on filterwarnings + # as testing the latest release API still works, not the release is warning free. + # Though still show warnings by setting warning control to 'default'. + export PYTHONWARNINGS='default' pytest tests/test_public_api.py - - # FIXME: c.f. https://github.com/proycon/codemetapy/issues/24 - - name: Verify requirements in codemeta.json - run: | - python -m pip install jq "codemetapy>=2.3.0" - codemetapy --inputtype python --no-extras pyhf > codemeta_generated.json - diff <(jq -S .softwareRequirements codemeta.json) <(jq -S .softwareRequirements codemeta_generated.json) diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml new file mode 100644 index 0000000000..4d605498e3 --- /dev/null +++ b/.github/workflows/scorecard.yml @@ -0,0 +1,73 @@ +# This workflow uses actions that are not certified by GitHub. They are provided +# by a third-party and are governed by separate terms of service, privacy +# policy, and support documentation. + +name: Scorecard supply-chain security +on: + # For Branch-Protection check. Only the default branch is supported. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection + branch_protection_rule: + # To guarantee Maintained check is occasionally updated. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained + schedule: + - cron: '23 3 * * 0' + push: + branches: [ "main" ] + +# Declare default permissions as read only. +permissions: read-all + +jobs: + analysis: + name: Scorecard analysis + runs-on: ubuntu-latest + permissions: + # Needed to upload the results to code-scanning dashboard. + security-events: write + # Needed to publish results and get a badge (see publish_results below). + id-token: write + # Uncomment the permissions below if installing in a private repository. + # contents: read + # actions: read + + steps: + - name: "Checkout code" + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + with: + persist-credentials: false + + - name: "Run analysis" + uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0 + with: + results_file: results.sarif + results_format: sarif + # (Optional) "write" PAT token. Uncomment the `repo_token` line below if: + # - you want to enable the Branch-Protection check on a *public* repository, or + # - you are installing Scorecard on a *private* repository + # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action?tab=readme-ov-file#authentication-with-fine-grained-pat-optional. + # repo_token: ${{ secrets.SCORECARD_TOKEN }} + + # Public repositories: + # - Publish results to OpenSSF REST API for easy access by consumers + # - Allows the repository to include the Scorecard badge. + # - See https://github.com/ossf/scorecard-action#publishing-results. + # For private repositories: + # - `publish_results` will always be set to `false`, regardless + # of the value entered here. + publish_results: true + + # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF + # format to the repository Actions tab. + - name: "Upload artifact" + uses: actions/upload-artifact@b18b1d32f3f31abcdc29dee3f2484801fe7822f4 # v4.3.6 + with: + name: SARIF file + path: results.sarif + retention-days: 5 + + # Upload the results to GitHub's code scanning dashboard (optional). + # Commenting out will disable upload of results to your repo's Code Scanning dashboard + - name: "Upload to code-scanning" + uses: github/codeql-action/upload-sarif@1b1aada464948af03b950897e5eb522f92603cc2 # v3.24.9 + with: + sarif_file: results.sarif diff --git a/.github/workflows/semantic-pr-check.yml b/.github/workflows/semantic-pr-check.yml index 143ec8377e..f7f0d13283 100644 --- a/.github/workflows/semantic-pr-check.yml +++ b/.github/workflows/semantic-pr-check.yml @@ -11,9 +11,15 @@ concurrency: group: ${{ github.workflow }}-${{ github.ref }}-${{ github.event.number }} cancel-in-progress: true +permissions: + contents: read + jobs: main: + permissions: + pull-requests: read # for amannn/action-semantic-pull-request to analyze PRs + statuses: write # for amannn/action-semantic-pull-request to mark status of analyzed PR name: Validate PR title runs-on: ubuntu-latest diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0a69bd27a6..3c42892e73 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,7 +4,7 @@ ci: repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: - id: check-added-large-files - id: check-case-conflict @@ -35,25 +35,25 @@ repos: - id: rst-inline-touching-normal - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.3.5" + rev: "v0.5.0" hooks: - id: ruff args: ["--fix", "--show-fixes"] - repo: https://github.com/psf/black-pre-commit-mirror - rev: 24.3.0 + rev: 24.4.2 hooks: - id: black-jupyter types_or: [python, pyi, jupyter] - repo: https://github.com/adamchainz/blacken-docs - rev: 1.16.0 + rev: 1.18.0 hooks: - id: blacken-docs - additional_dependencies: [black==24.3.0] + additional_dependencies: [black==24.4.2] - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.9.0 + rev: v1.10.1 # check the oldest and newest supported Pythons hooks: - &mypy @@ -68,8 +68,16 @@ repos: args: ["--python-version=3.12"] - repo: https://github.com/codespell-project/codespell - rev: v2.2.6 + rev: v2.3.0 hooks: - id: codespell files: ^.*\.(py|md|rst)$ args: ["-w", "-L", "hist,gaus"] + +- repo: https://github.com/python-jsonschema/check-jsonschema + rev: 0.28.6 + hooks: + - id: check-readthedocs + args: ["--verbose"] + - id: check-github-workflows + args: ["--verbose"] diff --git a/.readthedocs.yaml b/.readthedocs.yaml index bd5f4ea9cd..5bd7c51f22 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -13,6 +13,13 @@ build: apt_packages: - curl - jq + jobs: + post_create_environment: + - pip install uv + post_install: + # VIRTUAL_ENV needs to be set manually for now. + # See https://github.com/readthedocs/readthedocs.org/pull/11152/ + - VIRTUAL_ENV=$READTHEDOCS_VIRTUALENV_PATH uv pip install '.[docs]' # Build documentation in the docs/ directory with Sphinx sphinx: @@ -20,11 +27,3 @@ sphinx: # If using Sphinx, optionally build your docs in additional formats such as PDF and ePub formats: all - -# python -m pip install .[docs] -python: - install: - - method: pip - path: . - extra_requirements: - - docs diff --git a/codemeta.json b/codemeta.json deleted file mode 100644 index 1b9ca9c817..0000000000 --- a/codemeta.json +++ /dev/null @@ -1,127 +0,0 @@ -{ - "@context": [ - "https://doi.org/10.5063/schema/codemeta-2.0", - "https://w3id.org/software-iodata", - "https://raw.githubusercontent.com/jantman/repostatus.org/master/badges/latest/ontology.jsonld", - "https://schema.org", - "https://w3id.org/software-types" - ], - "@type": "SoftwareSourceCode", - "applicationCategory": [ - "Scientific/Engineering", - "Scientific/Engineering > Physics" - ], - "audience": { - "@type": "Audience", - "audienceType": "Science/Research" - }, - "author": [ - { - "@type": "Person", - "email": "lukas.heinrich@cern.ch", - "familyName": "Heinrich", - "identifier": "https://orcid.org/0000-0002-4048-7584", - "givenName": "Lukas" - }, - { - "@type": "Person", - "email": "matthew.feickert@cern.ch", - "familyName": "Feickert", - "identifier": "https://orcid.org/0000-0003-4124-7862", - "givenName": "Matthew" - }, - { - "@type": "Person", - "email": "gstark@cern.ch", - "familyName": "Stark", - "identifier": "https://orcid.org/0000-0001-6616-3433", - "givenName": "Giordon" - } - ], - "codeRepository": "https://github.com/scikit-hep/pyhf", - "description": "pure-Python HistFactory implementation with tensors and autodiff", - "developmentStatus": "4 - Beta", - "identifier": "pyhf", - "issueTracker": "https://github.com/scikit-hep/pyhf/issues", - "keywords": "physics fitting numpy scipy tensorflow pytorch jax", - "license": "http://spdx.org/licenses/Apache-2.0", - "name": "pyhf", - "releaseNotes": "https://pyhf.readthedocs.io/en/stable/release-notes.html", - "runtimePlatform": [ - "Python 3", - "Python 3 Only", - "Python 3.12", - "Python 3.11", - "Python 3.10", - "Python 3.8", - "Python 3.9", - "Python Implementation CPython" - ], - "softwareHelp": "https://pyhf.readthedocs.io/", - "softwareRequirements": [ - { - "@type": "SoftwareApplication", - "identifier": "click", - "name": "click", - "runtimePlatform": "Python 3", - "version": ">=8.0.0" - }, - { - "@type": "SoftwareApplication", - "identifier": "importlib-resources", - "name": "importlib-resources", - "runtimePlatform": "Python 3", - "version": ">=1.4.0" - }, - { - "@type": "SoftwareApplication", - "identifier": "jsonpatch", - "name": "jsonpatch", - "runtimePlatform": "Python 3", - "version": ">=1.15" - }, - { - "@type": "SoftwareApplication", - "identifier": "jsonschema", - "name": "jsonschema", - "runtimePlatform": "Python 3", - "version": ">=4.15.0" - }, - { - "@type": "SoftwareApplication", - "identifier": "numpy", - "name": "numpy", - "runtimePlatform": "Python 3" - }, - { - "@type": "SoftwareApplication", - "identifier": "pyyaml", - "name": "pyyaml", - "runtimePlatform": "Python 3", - "version": ">=5.1" - }, - { - "@type": "SoftwareApplication", - "identifier": "scipy", - "name": "scipy", - "runtimePlatform": "Python 3", - "version": ">=1.3.2" - }, - { - "@type": "SoftwareApplication", - "identifier": "tqdm", - "name": "tqdm", - "runtimePlatform": "Python 3", - "version": ">=4.56.0" - } - ], - "targetProduct": { - "@type": "CommandLineApplication", - "description": "The pyhf command line interface.", - "executableName": "pyhf", - "name": "pyhf", - "runtimePlatform": "Python 3" - }, - "url": "https://github.com/scikit-hep/pyhf", - "version": "0.7.6" -} diff --git a/docker/Dockerfile b/docker/Dockerfile index 154d279e35..6c4e62eb31 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,12 +1,12 @@ ARG BASE_IMAGE=python:3.12-slim-bullseye # hadolint ignore=DL3006 -FROM ${BASE_IMAGE} as base +FROM ${BASE_IMAGE} AS base -FROM base as builder +FROM base AS builder # Set PATH to pickup virtual environment by default ENV PATH=/usr/local/venv/bin:"${PATH}" COPY . /code -# hadolint ignore=DL3003,SC2102 +# hadolint ignore=DL3003,SC2102,SC1091 RUN apt-get -qq -y update && \ apt-get -qq -y install --no-install-recommends \ git && \ @@ -14,10 +14,11 @@ RUN apt-get -qq -y update && \ apt-get -y autoremove && \ rm -rf /var/lib/apt/lists/* && \ python -m venv /usr/local/venv && \ + . /usr/local/venv/bin/activate && \ cd /code && \ - python -m pip --no-cache-dir install --upgrade pip setuptools wheel && \ - python -m pip --no-cache-dir install '.[xmlio,contrib]' && \ - python -m pip list + python -m pip --no-cache-dir install --upgrade uv && \ + uv pip install --no-cache '.[xmlio,contrib]' && \ + uv pip list FROM base @@ -47,10 +48,11 @@ RUN adduser \ COPY --from=builder --chown=moby /usr/local/venv /usr/local/venv/ +# Does NOT define USER as ENV USER moby -ENV USER ${USER} -ENV HOME /home/moby +ENV USER=moby +ENV HOME=/home/moby WORKDIR ${HOME}/work # Use C.UTF-8 locale to avoid issues with ASCII encoding @@ -59,4 +61,8 @@ ENV LANG=C.UTF-8 ENV PATH=${HOME}/.local/bin:${PATH} +# The first ever run of the CLI API incurs some overhead so do that during the +# build instead of making the user wait +RUN pyhf --version + ENTRYPOINT ["/usr/local/venv/bin/pyhf"] diff --git a/docker/gpu/Dockerfile b/docker/gpu/Dockerfile index 0bc93597bc..288ef916f2 100644 --- a/docker/gpu/Dockerfile +++ b/docker/gpu/Dockerfile @@ -1,6 +1,6 @@ -FROM nvidia/cuda:10.1-cudnn7-runtime-ubuntu18.04 as base +FROM nvidia/cuda:10.1-cudnn7-runtime-ubuntu18.04 AS base -FROM base as builder +FROM base AS builder # hadolint ignore=DL3015 RUN apt-get update -y && \ apt-get install -y \ @@ -14,7 +14,7 @@ COPY . /code COPY ./docker/gpu/install_backend.sh /code/install_backend.sh WORKDIR /code ARG BACKEND=tensorflow -RUN python3 -m pip --no-cache-dir install --upgrade pip setuptools wheel && \ +RUN python3 -m pip --no-cache-dir install --upgrade pip wheel && \ /bin/bash install_backend.sh ${BACKEND} && \ python3 -m pip list diff --git a/docs/conf.py b/docs/conf.py index 60ab7b974d..801ef81cef 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -15,11 +15,11 @@ # add these directories to sys.path here. If the directory is relative to the # documentation root, use Path('../relative_path_to_dir').resolve() to make it absolute, like shown here. +import importlib.metadata import sys from pathlib import Path import jupytext -from pkg_resources import get_distribution sys.path.insert(0, str(Path('./exts').resolve())) @@ -55,7 +55,6 @@ def setup(app): 'sphinx.ext.viewcode', 'sphinx.ext.githubpages', 'sphinx.ext.intersphinx', - 'sphinx_rtd_theme', 'sphinxcontrib.bibtex', 'sphinx.ext.napoleon', 'sphinx_click.ext', @@ -123,7 +122,7 @@ def setup(app): # |version| and |release|, also used in various other places throughout the # built documents. # The full version, including alpha/beta/rc tags. -release = get_distribution('pyhf').version +release = importlib.metadata.version("pyhf") # for example take major/minor/patch version = '.'.join(release.split('.')[:3]) @@ -243,13 +242,13 @@ def setup(app): # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'sphinx_rtd_theme' +html_theme = 'pydata_sphinx_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # -html_theme_options = {} +html_theme_options = {"header_links_before_dropdown": 6} # Add any paths that contain custom themes here, relative to this directory. html_theme_path = [] diff --git a/docs/contributors.rst b/docs/contributors.rst index d29fd0974b..292efde1c7 100644 --- a/docs/contributors.rst +++ b/docs/contributors.rst @@ -34,3 +34,4 @@ Contributors include: - Daniel Werner - Jonas Rembser - Lorenz Gaertner +- Melissa Weber Mendonça diff --git a/docs/development.rst b/docs/development.rst index fa6149a8a9..3d3341cd12 100644 --- a/docs/development.rst +++ b/docs/development.rst @@ -267,19 +267,9 @@ sdist and wheel, and then deploy them to PyPI_. Context Files and Archive Metadata ---------------------------------- -The ``.zenodo.json`` and ``codemeta.json`` files have the version number -automatically updated through ``tbump``, though their additional metadata -should be checked periodically by the dev team (probably every release). -The ``codemeta.json`` file can be generated automatically **from a PyPI install** -of ``pyhf`` using ``codemetapy`` - -.. code-block:: console - - codemetapy --no-extras pyhf > codemeta.json - -though the ``author`` metadata will still need to be checked and revised by hand. -The ``.zenodo.json`` is currently generated by hand, so it is worth using -``codemeta.json`` as a guide to edit it. +The ``.zenodo.json`` file has the version number automatically updated through +``tbump``, though its additional metadata should be checked periodically by +the dev team (probably every release). .. _bump version GitHub Actions workflow: https://github.com/scikit-hep/pyhf/actions/workflows/bump-version.yml .. _PyPI: https://pypi.org/project/pyhf/ diff --git a/docs/index.rst b/docs/index.rst index fbb5bf9925..7bddc7b269 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -8,25 +8,20 @@ intro likelihood - learn examples - outreach + api + cli installation development faq + learn babel - cli - api + outreach citations governance/ROADMAP release-notes contributors -.. raw:: html - - View me on GitHub - - .. raw:: html

Warning: This is a development version. The latest stable version is at ReadTheDocs.

diff --git a/pyproject.toml b/pyproject.toml index 034be5f52f..cc39287ab3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,7 +50,7 @@ dependencies = [ "jsonpatch>=1.15", "jsonschema>=4.15.0", # for utils "pyyaml>=5.1", # for parsing CLI equal-delimited options - "scipy>=1.5.1", # requires numpy, which is required by pyhf and tensorflow + "scipy>=1.5.2", # requires numpy, which is required by pyhf and tensorflow "tqdm>=4.56.0", # for readxml "numpy", # compatible versions controlled through scipy ] @@ -107,18 +107,18 @@ test = [ "pytest-mpl", "ipympl>=0.3.0", "pydocstyle", - "papermill~=2.5.0", - "scrapbook~=0.5.0", - "jupyter", + "papermill>=2.5.0", + "scrapbook>=0.5.0", + "notebook>=6.5.7", "graphviz", "pytest-socket>=0.2.0", # c.f. PR #1917 ] docs = [ "pyhf[xmlio,contrib]", "sphinx>=7.0.0", # c.f. https://github.com/scikit-hep/pyhf/pull/2271 - "sphinxcontrib-bibtex~=2.1", + "sphinxcontrib-bibtex>=2.1", "sphinx-click", - "sphinx-rtd-theme>=1.3.0", # c.f. https://github.com/scikit-hep/pyhf/pull/2271 + "pydata-sphinx-theme>=0.15.3", "nbsphinx!=0.8.8", # c.f. https://github.com/spatialaudio/nbsphinx/issues/620 "ipywidgets", "sphinx-issues", @@ -133,7 +133,7 @@ develop = [ "tbump>=6.7.0", "pre-commit", "nox", - "codemetapy>=2.3.0", + "uv>=0.1.39" ] [tool.hatch.version] @@ -227,6 +227,7 @@ filterwarnings = [ "ignore:jsonschema.RefResolver is deprecated as of v4.18.0, in favor of the:DeprecationWarning", # Issue #2139 "ignore:Skipping device Apple Paravirtual device that does not support Metal 2.0:UserWarning", # Can't fix given hardware/virtualized device 'ignore:Type google._upb._message.[A-Z]+ uses PyType_Spec with a metaclass that has custom:DeprecationWarning', # protobuf via tensorflow + "ignore:jax.xla_computation is deprecated. Please use the AOT APIs:DeprecationWarning", # jax v0.4.30 ] [tool.coverage.run] diff --git a/src/pyhf/tensor/jax_backend.py b/src/pyhf/tensor/jax_backend.py index 5fabe4c2fc..172c03d8a2 100644 --- a/src/pyhf/tensor/jax_backend.py +++ b/src/pyhf/tensor/jax_backend.py @@ -321,7 +321,7 @@ def percentile(self, tensor_in, q, axis=None, interpolation="linear"): .. versionadded:: 0.7.0 """ - return jnp.percentile(tensor_in, q, axis=axis, interpolation=interpolation) + return jnp.percentile(tensor_in, q, axis=axis, method=interpolation) def stack(self, sequence, axis=0): return jnp.stack(sequence, axis=axis) diff --git a/tbump.toml b/tbump.toml index edc6fa00e6..935bbeba09 100644 --- a/tbump.toml +++ b/tbump.toml @@ -48,9 +48,6 @@ src = "src/pyhf/data/citation.bib" [[file]] src = ".zenodo.json" -[[file]] -src = "codemeta.json" - [[file]] src = "CITATION.cff" diff --git a/tests/conftest.py b/tests/conftest.py index ad2d9d7cba..b17910a6a0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,10 +1,10 @@ import json import pathlib +import shutil import sys import tarfile import pytest -from setuptools._distutils import dir_util import pyhf @@ -164,9 +164,6 @@ def datadir(tmp_path, request): test_dir = pathlib.Path(request.module.__file__).with_suffix('') if test_dir.is_dir(): - dir_util.copy_tree(test_dir, str(tmp_path)) - # shutil is nicer, but doesn't work: https://bugs.python.org/issue20849 - # Once pyhf is Python 3.8+ only then the below can be used. - # shutil.copytree(test_dir, tmp_path) + shutil.copytree(test_dir, tmp_path, dirs_exist_ok=True) return tmp_path diff --git a/tests/constraints.txt b/tests/constraints.txt index 6a7e13f7ac..7954b2be44 100644 --- a/tests/constraints.txt +++ b/tests/constraints.txt @@ -1,5 +1,5 @@ # core -scipy==1.5.1 # c.f. PR #2081 +scipy==1.5.2 # c.f. PR #2469 click==8.0.0 # c.f. PR #1958, #1909 tqdm==4.56.0 jsonschema==4.15.0 # c.f. PR #1979 diff --git a/tests/test_optim.py b/tests/test_optim.py index 36032ac090..8e95203df1 100644 --- a/tests/test_optim.py +++ b/tests/test_optim.py @@ -401,7 +401,9 @@ def test_optim_uncerts_minuit(backend, source, spec, mu): return_uncertainties=True, ) assert result.shape == (2, 2) - assert pytest.approx([0.26418431, 0.0]) == pyhf.tensorlib.tolist(result[:, 1]) + assert pytest.approx([0.26418431, 0.0], rel=1e-5) == pyhf.tensorlib.tolist( + result[:, 1] + ) @pytest.mark.parametrize('mu', [1.0], ids=['mu=1'])