Documentation about Praos and the HFC/civic time #4762
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: CI | |
# Limit concurrent runs of this workflow within a single PR | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} | |
cancel-in-progress: true | |
on: | |
workflow_dispatch: | |
inputs: | |
nothunks: | |
description: | |
Run the test run under NoThunks conditions instead of the default variant | |
type: boolean | |
default: false | |
nothunks-safe-only: | |
description: | |
Run only the test suites which are known to pass under NoThunks conditions | |
type: boolean | |
default: false | |
pull_request: | |
merge_group: | |
push: | |
branches: | |
- main | |
schedule: | |
# every night, at midnight | |
- cron: "0 0 * * *" | |
# on the first day of each month, at midnight | |
- cron: "0 0 1 * *" | |
jobs: | |
build-test-bench-haddocks: | |
name: Build and test | |
runs-on: ubuntu-latest | |
strategy: | |
fail-fast: false | |
matrix: | |
ghc: ["8.10.7", "9.6.6", "9.10.1"] | |
variant: [default, no-thunks] | |
test-set: [all, no-thunks-safe] | |
exclude: | |
- variant: | |
${{ (github.event_name == 'schedule' || inputs.nothunks) && 'default' || 'no-thunks' }} | |
- test-set: | |
${{ (github.event_name == 'schedule' || inputs.nothunks-safe-only) && 'all' || 'no-thunks-safe' }} | |
env: | |
# Modify this value to "invalidate" the Cabal cache. | |
CABAL_CACHE_VERSION: "2024-07-04" | |
CABAL: "3.12.1.0" | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Install base libraries | |
uses: input-output-hk/actions/base@latest | |
with: | |
use-sodium-vrf: false | |
- name: Install Haskell | |
uses: input-output-hk/actions/haskell@latest | |
id: setup-haskell | |
with: | |
ghc-version: ${{ matrix.ghc }} | |
cabal-version: $CABAL | |
- name: Configure Cabal | |
run: | | |
cp .github/workflows/cabal.project.${{ matrix.variant }}.local ./cabal.project.local | |
- name: Update Cabal (Hackage and CHaP) | |
run: | | |
cabal clean | |
cabal update | |
# We create a `dependencies.txt` file that can be used to index the cabal | |
# store cache. | |
# | |
# We do not use `plan.json` directly because adding a dependency to our | |
# Cabal files which was already present somewhere else would result in a | |
# diferent plan, even though the set of dependencies is the same. | |
# | |
# In the future we should consider using `cabal-cache` like in the | |
# `cardano-node`'s GitHub workflow. | |
- name: Record dependencies to be used as cache keys | |
id: record-deps | |
run: | | |
cabal build all --dry-run --minimize-conflict-set | |
cat dist-newstyle/cache/plan.json \ | |
| jq '.["install-plan"][].id' \ | |
| sort \ | |
| uniq > dependencies.txt | |
# This is used for caching the cabal store. | |
- name: Store month number as environment variable | |
run: echo "MONTHNUM=$(/usr/bin/date -u '+%m')" >> $GITHUB_ENV | |
# Restore the cabal store cache. See `save-cabal-store`. | |
# | |
# When we restore a previous cache and store a new key, we store both files | |
# that were already in the cache, and new files that were created. To | |
# prevent the cache from growing to quickly, we reset the cache each month. | |
# | |
# NOTE: it's vital that all restore-keys include the month number. | |
# Otherwise, we could restore a cache from a previous month, which makes | |
# caches grow unboundedly. | |
- name: "Restore cache for cabal-store" | |
uses: actions/cache/restore@v4 | |
id: restore-cabal-store | |
with: | |
path: ${{ steps.setup-haskell.outputs.cabal-store }} | |
key: cabal-store-${{ env.CABAL_CACHE_VERSION }}-${{ runner.os }}-${{ matrix.ghc }}-${{ matrix.variant }}-${{ env.MONTHNUM }}-${{ hashFiles('dependencies.txt') }} | |
restore-keys: | | |
cabal-store-${{ env.CABAL_CACHE_VERSION }}-${{ runner.os }}-${{ matrix.ghc }}-${{ matrix.variant }}-${{ env.MONTHNUM }}-${{ hashFiles('dependencies.txt') }} | |
cabal-store-${{ env.CABAL_CACHE_VERSION }}-${{ runner.os }}-${{ matrix.ghc }}-${{ matrix.variant }}-${{ env.MONTHNUM }}- | |
- name: Build dependencies | |
id: build-dependencies | |
run: cabal build --only-dependencies all -j | |
# Save the cabal store cache if the dependencies were succesfully built, | |
# even if subsequent consensus builds/tests/benchmarks could fail. This | |
# should shave off some running time because dependencies don't have to be | |
# rebuilt if any of the later steps fail and the job has to be re-run. | |
# | |
# See https://github.com/actions/cache/tree/v3/save#always-save-cache. | |
- name: "Save cache for cabal-store" | |
uses: actions/cache/save@v4 | |
id: save-cabal-store | |
if: steps.build-dependencies.outcome == 'success' && steps.restore-cabal-store.outputs.cache-hit != 'true' | |
with: | |
path: ${{ steps.setup-haskell.outputs.cabal-store }} | |
key: ${{ steps.restore-cabal-store.outputs.cache-primary-key }} | |
- name: Build projects [build] | |
run: cabal build all -j | |
- name: Test | |
if: matrix.test-set == 'all' | |
run: cabal test all -j --test-show-details=streaming | |
- name: Test (NoThunks-safe tests only) | |
if: matrix.test-set == 'no-thunks-safe' | |
run: cabal test ouroboros-consensus:consensus-test ouroboros-consensus:doctest ouroboros-consensus:infra-test ouroboros-consensus:storage-test ouroboros-consensus-cardano:byron-test ouroboros-consensus-cardano:shelley-test ouroboros-consensus-diffusion:infra-test ouroboros-consensus-protocol:protocol-test -j --test-show-details=streaming | |
- name: Identify benchmark executables | |
run: | | |
cp $(cabal list-bin mempool-bench) mempool-bench | |
- name: Upload benchmark executables | |
uses: actions/upload-artifact@v4 | |
with: | |
name: benchmark-exes-${{ runner.os }}-${{ matrix.ghc }} | |
path: mempool-bench | |
retention-days: 10 | |
# NB: build the haddocks at the end to avoid unecessary recompilations. | |
# We build the haddocks only for one GHC version. | |
- name: Build Haddock documentation | |
if: | | |
github.event_name == 'push' | |
&& github.ref == 'refs/heads/main' | |
&& matrix.ghc=='9.6.6' | |
run: | | |
# need for latex, dvisvgm and standalone | |
sudo apt install texlive-latex-extra texlive-latex-base | |
# cabal-docspec doesn't work with XDG https://github.com/phadej/cabal-extras/issues/136 | |
sed -i 's_-- store-dir:_store-dir: /home/runner/.local/state/cabal/store_g' ~/.config/cabal/config | |
export CABAL_CONFIG=~/.config/cabal/config | |
./scripts/docs/haddocks.sh | |
tar vzcf haddocks.tgz ./docs/website/static/haddocks | |
- name: Upload haddocks as an artifact | |
if: | | |
github.event_name == 'push' | |
&& github.ref == 'refs/heads/main' | |
&& matrix.ghc=='9.6.6' | |
uses: actions/upload-artifact@v4 | |
with: | |
name: haddocks | |
path: haddocks.tgz | |
retention-days: 1 | |
benchmarks: | |
name: Run benchmarks | |
needs: build-test-bench-haddocks | |
runs-on: ubuntu-latest | |
strategy: | |
fail-fast: false | |
matrix: | |
ghc: ["8.10.7", "9.6.6", "9.10.1"] | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Install base libraries | |
uses: input-output-hk/actions/base@latest | |
with: | |
use-sodium-vrf: false | |
- name: Download benchmark executables | |
uses: actions/download-artifact@v4 | |
with: | |
name: benchmark-exes-${{ runner.os }}-${{ matrix.ghc }} | |
- name: Set permissions for benchmark executables | |
run: | | |
chmod u+x mempool-bench | |
- name: Create baseline-benchmark | |
if: github.event_name == 'push' && github.ref == 'refs/heads/main' | |
run: | | |
./mempool-bench \ | |
--timeout=60 --csv mempool-benchmarks.csv \ | |
+RTS -T | |
# TODO: we only care about saving the baseline results when we run on the | |
# main branch. However we need to restore the cache when we run the | |
# benchmarks during PRs. The caching mechanism of GitHub actions does not | |
# allow not to save a cache entry. | |
# | |
# The `run_id` is only relevant to store a new benchmarking result when we | |
# run on the `main` branch. If we run this workflow in the context of a PR, | |
# then we will save the same results we just restored. | |
- name: Cache benchmark baseline results | |
uses: actions/cache@v4 | |
with: | |
path: baseline-mempool-benchmarks.csv | |
key: baseline-mempool-benchmarks-${{ runner.os }}-${{ matrix.ghc }}-${{ github.run_id }} | |
restore-keys: baseline-mempool-benchmarks-${{ runner.os }}-${{ matrix.ghc }} | |
# We only update the cache if we just ran a benchmark on main. | |
- name: Copy baseline-benchmark to cache | |
if: github.event_name == 'push' && github.ref == 'refs/heads/main' | |
run: cp mempool-benchmarks.csv baseline-mempool-benchmarks.csv | |
# TODO: this will be necessary when we publish the benchmarks results. | |
# - name: Upload mempool benchmark baseline results | |
# if: github.event_name == 'push' && github.ref == 'refs/heads/main' | |
# uses: actions/upload-artifact@v4 | |
# with: | |
# name: baseline-mempool-benchmarks | |
# path: baseline-mempool-benchmarks.csv | |
# The `fail-if-slower` value is determined ad-hoc based on the variability | |
# we observed in our benchmarks. | |
- name: Run mempool benchmarks on pull requests | |
if: ${{ github.event_name == 'pull_request' }} | |
run: | | |
if [ -f baseline-mempool-benchmarks.csv ]; then | |
./mempool-bench \ | |
--timeout=60 --baseline baseline-mempool-benchmarks.csv \ | |
--fail-if-slower 100 \ | |
+RTS -T | |
else | |
echo "No baseline benchmarks found. This likely happened when adding a new GHC version to the build matrix." | |
echo "Benchmarks comparison skipped." | |
fi | |
deploy-documentation: | |
name: Deploy documentation to GitHub Pages | |
if: github.event_name == 'push' && github.ref == 'refs/heads/main' | |
needs: build-test-bench-haddocks | |
runs-on: ubuntu-latest | |
defaults: | |
run: | |
working-directory: ./docs/website | |
strategy: | |
fail-fast: false | |
steps: | |
- uses: actions/checkout@v4 | |
- uses: actions/setup-node@v4 | |
with: | |
node-version: 18 | |
cache: yarn | |
cache-dependency-path: './docs/website/yarn.lock' | |
- uses: cachix/install-nix-action@v30 | |
with: | |
extra_nix_config: | | |
accept-flake-config = true | |
- name: Build PDFs (Consensus report) | |
run: | | |
nix build -L .#consensus-pdfs | |
cp -r --no-preserve=mode,ownership result/ static/pdfs | |
- name: Download haddocks | |
uses: actions/download-artifact@v4 | |
with: | |
name: haddocks | |
- name: Copy haddocks | |
run: | | |
cd ../../ | |
tar vzxf haddocks.tgz | |
- name: Install dependencies | |
run: yarn install --frozen-lockfile | |
- name: Build website | |
run: yarn build | |
- uses: actions/upload-pages-artifact@v3 | |
with: | |
path: ./docs/website/build | |
- name: Deploy | |
id: deployment | |
uses: actions/deploy-pages@v4 | |
# https://github.com/actions/deploy-pages | |
permissions: | |
pages: write | |
id-token: write | |
environment: | |
name: github-pages | |
url: ${{ steps.deployment.outputs.page_url }} |