Skip to content

Add project status update to README and bound NumPy #4786

Add project status update to README and bound NumPy

Add project status update to README and bound NumPy #4786

Workflow file for this run

name: Tests
on:
push:
branches:
- main
pull_request:
branches:
- main
# Cancels all previous workflow runs for pull requests that have not completed.
concurrency:
# The concurrency group contains the workflow name and the branch name for pull requests
# or the commit hash for any other events.
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.head_ref || github.sha }}
cancel-in-progress: true
jobs:
changes:
name: "Check for changes"
runs-on: ubuntu-latest
outputs:
changes: ${{ steps.changes.outputs.src }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- uses: dorny/paths-filter@v2
id: changes
with:
filters: |
python: &python
- 'aesara/**/*.py'
- 'tests/**/*.py'
- 'aesara/**/*.pyx'
- 'tests/**/*.pyx'
- '*.py'
src:
- *python
- 'aesara/**/*.c'
- 'tests/**/*.c'
- 'aesara/**/*.h'
- 'tests/**/*.h'
- '.github/workflows/*.yml'
- 'setup.cfg'
- 'requirements.txt'
style:
name: Check code style
needs: changes
runs-on: ubuntu-latest
if: ${{ needs.changes.outputs.changes == 'true' }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.9"
cache: "pip"
cache-dependency-path: "pyproject.toml"
- uses: pre-commit/action@v3.0.0
test:
name: "Test py${{ matrix.python-version }}: ${{ matrix.part }}"
needs:
- changes
- style
runs-on: ubuntu-latest
if: ${{ needs.changes.outputs.changes == 'true' && needs.style.result == 'success' }}
strategy:
fail-fast: true
matrix:
# Numba doesn't yet support 3.11, so we test primarly on 3.10, and
# below we add an include for 3.11 without Numba.
python-version: ["3.8", "3.10"]
fast-compile: [0]
float32: [0]
install-numba: [1]
part:
- "tests --ignore=tests/tensor --ignore=tests/sparse --ignore=tests/tensor/nnet --ignore=tests/tensor/signal"
- "tests/tensor tests/sparse --ignore=tests/tensor/test_basic.py --ignore=tests/tensor/test_math.py --ignore=tests/tensor/test_math_scipy.py --ignore=tests/tensor/test_inplace.py --ignore=tests/tensor/test_elemwise.py --ignore=tests/tensor/rewriting/test_basic.py --ignore=tests/tensor/rewriting/test_math.py --ignore=tests/tensor/nnet --ignore=tests/tensor/signal"
- "tests/tensor/test_basic.py tests/tensor/test_math.py tests/tensor/test_math_scipy.py tests/tensor/test_inplace.py"
- "tests/tensor/test_elemwise.py tests/tensor/rewriting/test_basic.py tests/tensor/rewriting/test_math.py"
- "tests/tensor/nnet/test_conv.py"
include:
- python-version: "3.8"
fast-compile: 1
float32: 1
install-numba: 1
part: "tests --ignore=tests/tensor/nnet --ignore=tests/tensor/signal"
- python-version: "3.8"
fast-compile: 1
float32: 0
install-numba: 1
part: "tests --ignore=tests/tensor/nnet --ignore=tests/tensor/signal"
- python-version: "3.11"
fast-compile: 0
float32: 0
install-numba: 0
part: "tests/scan/test_basic.py"
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Python ${{ matrix.python-version }}
uses: conda-incubator/setup-miniconda@v2
with:
miniforge-variant: Mambaforge
miniforge-version: latest
channels: conda-forge,defaults
channel-priority: true
python-version: ${{ matrix.python-version }}
- name: Create matrix id
id: matrix-id
env:
MATRIX_CONTEXT: ${{ toJson(matrix) }}
run: |
echo $MATRIX_CONTEXT
export MATRIX_ID=`echo $MATRIX_CONTEXT | md5sum | cut -c 1-32`
echo $MATRIX_ID
echo "::set-output name=id::$MATRIX_ID"
- name: Install dependencies
shell: bash -l {0}
run: |
mamba install --yes -q -c conda-forge "python~=${PYTHON_VERSION}=*_cpython" mkl "numpy>=1.23.3,<2.0.0" scipy pip mkl-service graphviz cython pytest coverage pytest-cov pytest-benchmark sympy filelock etuples logical-unification miniKanren cons typing_extensions "setuptools>=48.0.0"
if [[ $INSTALL_NUMBA == "1" ]]; then mamba install --yes -q -c conda-forge -c numba "python~=${PYTHON_VERSION}=*_cpython" "numba>=0.57.0"; fi
mamba install --yes -q -c conda-forge "python~=${PYTHON_VERSION}=*_cpython" "numpy>=1.23.3,<2.0.0" jax jaxlib
pip install --no-deps -e ./
mamba list && pip freeze
python -c 'import aesara; print(aesara.config.__str__(print_doc=False))'
python -c 'import aesara; assert(aesara.config.blas__ldflags != "")'
env:
PYTHON_VERSION: ${{ matrix.python-version }}
INSTALL_NUMBA: ${{ matrix.install-numba }}
- name: Run tests
shell: bash -l {0}
run: |
if [[ $FAST_COMPILE == "1" ]]; then export AESARA_FLAGS=$AESARA_FLAGS,mode=FAST_COMPILE; fi
if [[ $FLOAT32 == "1" ]]; then export AESARA_FLAGS=$AESARA_FLAGS,floatX=float32; fi
export AESARA_FLAGS=$AESARA_FLAGS,warn__ignore_bug_before=all,on_opt_error=raise,on_shape_error=raise,gcc__cxxflags=-pipe
python -m pytest -x -r A --verbose --runslow --cov=aesara/ --cov-report=xml:coverage/coverage-${MATRIX_ID}.xml --no-cov-on-fail $PART --benchmark-skip
env:
MATRIX_ID: ${{ steps.matrix-id.outputs.id }}
MKL_THREADING_LAYER: GNU
MKL_NUM_THREADS: 1
OMP_NUM_THREADS: 1
PART: ${{ matrix.part }}
FAST_COMPILE: ${{ matrix.fast-compile }}
FLOAT32: ${{ matrix.float32 }}
- name: Upload coverage file
uses: actions/upload-artifact@v3
with:
name: coverage
path: coverage/coverage-${{ steps.matrix-id.outputs.id }}.xml
benchmarks:
name: "Benchmarks"
needs:
- changes
- style
runs-on: ubuntu-latest
if: ${{ needs.changes.outputs.changes == 'true' && needs.style.result == 'success' }}
strategy:
fail-fast: true
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Python 3.9
uses: conda-incubator/setup-miniconda@v2
with:
miniforge-variant: Mambaforge
miniforge-version: latest
channels: conda-forge,defaults
channel-priority: true
python-version: 3.9
- name: Install dependencies
shell: bash -l {0}
run: |
mamba install --yes -q -c conda-forge -c numba "python~=${PYTHON_VERSION}=*_cpython" mkl "numpy<2.0.0" scipy pip mkl-service cython pytest jax jaxlib pytest-benchmark "numba>=0.57.0"
pip install -e ./
mamba list && pip freeze
python -c 'import aesara; print(aesara.config.__str__(print_doc=False))'
python -c 'import aesara; assert(aesara.config.blas__ldflags != "")'
env:
PYTHON_VERSION: 3.9
- name: Download previous benchmark data
uses: actions/cache@v3
with:
path: ./cache
key: ${{ runner.os }}-benchmark
- name: Run benchmarks
shell: bash -l {0}
run: |
export AESARA_FLAGS=mode=FAST_COMPILE,warn__ignore_bug_before=all,on_opt_error=raise,on_shape_error=raise,gcc__cxxflags=-pipe
python -m pytest --benchmark-only --benchmark-json output.json
- name: Store benchmark result
uses: benchmark-action/github-action-benchmark@v1
with:
name: Python Benchmark with pytest-benchmark
tool: 'pytest'
output-file-path: output.json
external-data-json-path: ./cache/benchmark-data.json
alert-threshold: '200%'
github-token: ${{ secrets.GITHUB_TOKEN }}
comment-on-alert: ${{ github.event_name == 'push' }}
fail-on-alert: true
auto-push: false
all-checks:
if: ${{ always() }}
runs-on: ubuntu-latest
name: "All tests"
needs: [changes, style, test]
steps:
- name: Check build matrix status
if: ${{ needs.changes.outputs.changes == 'true' && (needs.style.result != 'success' || needs.test.result != 'success') }}
run: exit 1
upload-coverage:
runs-on: ubuntu-latest
name: "Upload coverage"
needs: [changes, all-checks]
if: ${{ needs.changes.outputs.changes == 'true' && needs.all-checks.result == 'success' }}
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.7
cache: "pip"
cache-dependency-path: "pyproject.toml"
- name: Install dependencies
run: |
python -m pip install -U coverage>=5.1 coveralls
- name: Download coverage file
uses: actions/download-artifact@v3
with:
name: coverage
path: coverage
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
with:
directory: ./coverage/
fail_ci_if_error: true