Skip to content

Benchmarks

Benchmarks #360

Workflow file for this run

name: Benchmarks
on:
schedule:
- cron: '30 21 * * *' # Check at 21:30 every day
jobs:
Check_Pyccel_Version:
runs-on: ubuntu-latest
outputs:
old_version: ${{ steps.check_version.outputs.old_version }}
new_version: ${{ steps.check_version.outputs.new_version }}
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.9
uses: actions/setup-python@v4
with:
python-version: 3.9
- name: Install pyccel
run: |
python -m pip install --upgrade pip
python -m pip install wheel
python -m pip install pyccel
- name: Check Version
id: check_version
run: |
old_version=$(cat current_version.txt)
new_version=$(python -c "from pyccel import __version__; print(__version__)")
echo "old_version=${old_version}" >> $GITHUB_OUTPUT
echo "new_version=${new_version}" >> $GITHUB_OUTPUT
Benchmark:
runs-on: ubuntu-latest
if: ${{ needs.Check_Pyccel_Version.outputs.old_version != needs.Check_Pyccel_Version.outputs.new_version }}
needs: Check_Pyccel_Version
strategy:
matrix:
python-minor-version: [8, 9, 10, 11, 12]
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.${{ matrix.python-minor-version }}
uses: actions/setup-python@v4
with:
python-version: 3.${{ matrix.python-minor-version }}
- name: Install dependencies
uses: ./.github/actions/linux_install
- name: Install python dependencies
run: |
pip3 install .
pip3 freeze > version_specific_results/pypi_performance_3${{ matrix.python-minor-version }}_${{needs.Check_Pyccel_Version.outputs.new_version }}_requirements.txt
- name: Generate pyccel configs
uses: ./.github/actions/generate_pyccel_config
- name: Benchmark
run: |
source $INTEL_SOURCE_FILE
cd benchmarks
python run_benchmarks.py --pyperf --verbose --pyccel-config-files pyccel_fortran_gnu.json pyccel_c_gnu.json pyccel_fortran_intel.json pyccel_c_intel.json --pythran-config-files pythran_gnu.config pythran_intel.config
cd ..
FILE=version_specific_results/pypi_performance_3${{ matrix.python-minor-version }}_${{needs.Check_Pyccel_Version.outputs.new_version }}.md
echo "### Performance Comparison (as of ${{needs.Check_Pyccel_Version.outputs.new_version }})" > ${FILE}
cat benchmarks/bench.out >> ${FILE}
python analysis/plot_results_figures.py ${FILE}
shell: bash
working-directory: ./.
- name: Record Failure
run: echo "Python 3.${{ matrix.python-minor-version }} benchmarks failed!" > version_specific_results/pypi_performance_3${{ matrix.python-minor-version }}.md
if: ${{ failure() }}
- name: Add & Commit
uses: EndBug/add-and-commit@v9
if: ${{ always() }}
with:
message: 'Update performance comparison'
add: "['version_specific_results/pypi_performance_3${{ matrix.python-minor-version }}_${{needs.Check_Pyccel_Version.outputs.new_version }}.md', 'version_specific_results/pypi_performance_3${{ matrix.python-minor-version }}_compilation.svg', 'version_specific_results/pypi_performance_3${{ matrix.python-minor-version }}_execution.svg']"
default_author: github_actions
pull: '--rebase --autostash'
BuildReadme:
runs-on: ubuntu-latest
needs: [Check_Pyccel_Version, Benchmark]
steps:
- uses: actions/checkout@v3
- name: Write README
uses: ./.github/actions/create_readme
with:
version: ${{ needs.Check_Pyccel_Version.outputs.new_version }}
- name: Save newly analysed version
run:
echo "${{needs.Check_Pyccel_Version.outputs.new_version }}" > current_version.txt
- name: Add & Commit
uses: EndBug/add-and-commit@v9
with:
message: 'Update README and version'
add: "['README.md', 'current_version.txt']"
default_author: github_actions
pull: '--rebase --autostash'