diff --git a/.dockerignore b/.dockerignore index 42e8a818a418..d51b5556178f 100644 --- a/.dockerignore +++ b/.dockerignore @@ -46,11 +46,11 @@ packages/beacon-node/mainnet_pubkeys.csv # Autogenerated docs packages/**/docs packages/**/typedocs -docs/packages -docs/contributing.md -docs/assets -docs/reference/cli.md -/site +docs/pages/**/*-cli.md +docs/pages/assets +docs/pages/api/api-reference.md +docs/pages/contribution/getting-started.md +docs/site # Lodestar artifacts .lodestar diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index a19def8e72de..6e27a89c3044 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -4,21 +4,41 @@ on: push: branches: - stable + workflow_dispatch: + inputs: + ref: + description: 'Ref to deploy, defaults to `unstable`' + required: false + default: 'unstable' + type: string jobs: docs: runs-on: buildjet-4vcpu-ubuntu-2204 + env: + DEPLOY_REF: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.ref || 'stable' }} steps: - # - Uses YAML anchors in the future + # Log out the ref being deployed + - name: Log Deployment Ref + if: github.event_name == 'workflow_dispatch' + run: | + echo "Deploying ref: $DEPLOY_REF" + + # Checkout the correct ref being deployed - uses: actions/checkout@v3 + with: + ref: ${{ env.DEPLOY_REF }} + - uses: actions/setup-node@v3 with: node-version: 20 check-latest: true cache: yarn + - name: Node.js version id: node run: echo "v8CppApiVersion=$(node --print "process.versions.modules")" >> $GITHUB_OUTPUT + - name: Restore dependencies uses: actions/cache@master id: cache-deps @@ -27,13 +47,14 @@ jobs: node_modules packages/*/node_modules key: ${{ runner.os }}-${{ steps.node.outputs.v8CppApiVersion }}-${{ hashFiles('**/yarn.lock', '**/package.json') }} + - name: Install & build if: steps.cache-deps.outputs.cache-hit != 'true' run: yarn install --frozen-lockfile && yarn build + - name: Build run: yarn build if: steps.cache-deps.outputs.cache-hit == 'true' - # - name: Build and collect docs run: yarn build:docs @@ -45,15 +66,17 @@ jobs: uses: actions/setup-python@v1 - name: Install dependencies + working-directory: docs run: | python -m pip install --upgrade pip - pip install -r docs/requirements.txt + pip install -r requirements.txt - name: Build docs - run: mkdocs build --site-dir site -v --clean + working-directory: docs + run: mkdocs build --verbose --clean --site-dir site - name: Deploy uses: peaceiris/actions-gh-pages@v3 with: github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./site + publish_dir: ./docs/site diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index b1b305ca49ff..43ceee898d85 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -270,7 +270,10 @@ jobs: packages/*/.git-data.json key: ${{ runner.os }}-node-${{ matrix.node }}-${{ github.sha }} fail-on-cache-miss: true - + - name: Install Chrome browser + run: npx @puppeteer/browsers install chromedriver@latest --path /tmp + - name: Install Firefox browser + run: npx @puppeteer/browsers install firefox@latest --path /tmp - name: Browser tests run: | export DISPLAY=':99.0' diff --git a/.gitignore b/.gitignore index ce1ec6074979..a0deed473c4a 100644 --- a/.gitignore +++ b/.gitignore @@ -40,11 +40,15 @@ packages/api/oapi-schemas # Autogenerated docs packages/**/docs packages/**/typedocs -docs/assets -docs/packages -docs/reference -docs/contributing.md -/site +docs/pages/**/*-cli.md +docs/pages/assets +docs/pages/images +docs/pages/security.md +docs/pages/lightclient-prover/lightclient.md +docs/pages/lightclient-prover/prover.md +docs/pages/api/api-reference.md +docs/pages/contribution/getting-started.md +docs/site # Testnet artifacts .lodestar diff --git a/.wordlist.txt b/.wordlist.txt index b7cff203f57c..42510b175a07 100644 --- a/.wordlist.txt +++ b/.wordlist.txt @@ -1,14 +1,19 @@ APIs +Andreas +Antonopoulos AssemblyScript BLS BeaconNode Besu +Buterin CLA CLI CTRL +Casper Chai ChainSafe Customizations +DPoS Discv DockerHub Dockerized @@ -19,22 +24,33 @@ ENR ENRs ESLint ETH +Edgington Erigon EthStaker +EtherScan Ethereum +EthereumJS +FINDNODE FX Flamegraph Flamegraphs +Geth Github Gossipsub Grafana HackMD +Homebrew +IPFS IPv Infura JSON +JSObjects JWT +KDE LGPL LGPLv +LMD +LPoS LTS Lerna MEV @@ -45,10 +61,12 @@ NVM Nethermind NodeJS NodeSource +OSI PR PRs Plaintext PoS +Prysm Quickstart RPC SHA @@ -57,64 +75,102 @@ SSZ Stakehouse TOC TTD +Teku TypeScript UI UID +UPnP UTF VM Vitalik Wagyu api async +backfill beaconcha +blockchain bootnode bootnodes chainConfig chainsafe +chiado cli cmd +codebase config configs const constantish coreutils cors +cryptocurrency cryptographic dApp dApps +ddos decrypt deserialization +dev devnet devnets +devtools +eg +enodes enum +env envs +ephemery flamegraph flamegraphs +gnosis goerli +heapdump +heaptrack +holesky interop +js keypair keystore keystores +libp lightclient linter +lldb +llnode lockfile mainnet +malloc mdns merkle merkleization monorepo +multiaddr +multifork namespace namespaced namespaces nodemodule +orchestrator +osx overriden params +pid plaintext +pre +premined produceBlockV +protolambda prover +repo +repos req reqresp +responder +ropsten runtime +scalability +secp +sepolia sharding ssz stakers @@ -131,4 +187,6 @@ utils validator validators wip +xcode yaml +yamux diff --git a/dashboards/lodestar_block_processor.json b/dashboards/lodestar_block_processor.json index d1a856f2f71d..8e68d611cc0d 100644 --- a/dashboards/lodestar_block_processor.json +++ b/dashboards/lodestar_block_processor.json @@ -110,6 +110,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -192,6 +193,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -276,6 +278,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 2, "pointSize": 5, @@ -359,6 +362,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 2, "pointSize": 5, @@ -442,6 +446,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 2, "pointSize": 5, @@ -525,6 +530,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -607,6 +613,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -758,7 +765,7 @@ "reverse": false } }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -862,7 +869,7 @@ "reverse": false } }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -942,7 +949,7 @@ "reverse": false } }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -987,6 +994,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 4, @@ -1072,6 +1080,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1148,22 +1157,22 @@ "axisPlacement": "auto", "barAlignment": 0, "drawStyle": "line", - "fillOpacity": 22, - "gradientMode": "opacity", + "fillOpacity": 0, + "gradientMode": "none", "hideFrom": { - "graph": false, "legend": false, "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, - "pointSize": 4, + "pointSize": 5, "scaleDistribution": { "type": "linear" }, - "showPoints": "never", - "spanNulls": true, + "showPoints": "auto", + "spanNulls": false, "stacking": { "group": "A", "mode": "none" @@ -1183,21 +1192,19 @@ "x": 0, "y": 50 }, - "id": 524, + "id": 534, "options": { - "graph": {}, "legend": { "calcs": [], "displayMode": "list", "placement": "bottom", - "showLegend": false + "showLegend": true }, "tooltip": { "mode": "multi", "sort": "none" } }, - "pluginVersion": "7.4.5", "targets": [ { "datasource": { @@ -1205,15 +1212,14 @@ "uid": "${DS_PROMETHEUS}" }, "editorMode": "code", - "exemplar": false, - "expr": "rate(lodestar_stfn_epoch_transition_commit_seconds_sum[$rate_interval])\n/\nrate(lodestar_stfn_epoch_transition_commit_seconds_count[$rate_interval])", - "interval": "", - "legendFormat": "epoch transition", + "expr": "rate(lodestar_stfn_epoch_transition_step_seconds_sum[$rate_interval])\n/\nrate(lodestar_stfn_epoch_transition_step_seconds_count[$rate_interval])", + "instant": false, + "legendFormat": "{{step}}", "range": true, "refId": "A" } ], - "title": "Epoch transition commit step avg time", + "title": "Epoch Transition By Steps", "type": "timeseries" }, { @@ -1241,6 +1247,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1325,9 +1332,10 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, - "pointSize": 5, + "pointSize": 4, "scaleDistribution": { "type": "linear" }, @@ -1342,25 +1350,9 @@ } }, "mappings": [], - "unit": "percentunit" + "unit": "s" }, - "overrides": [ - { - "matcher": { - "id": "byName", - "options": "process block time" - }, - "properties": [ - { - "id": "color", - "value": { - "fixedColor": "orange", - "mode": "fixed" - } - } - ] - } - ] + "overrides": [] }, "gridPos": { "h": 8, @@ -1368,7 +1360,7 @@ "x": 0, "y": 58 }, - "id": 122, + "id": 524, "options": { "graph": {}, "legend": { @@ -1389,14 +1381,16 @@ "type": "prometheus", "uid": "${DS_PROMETHEUS}" }, + "editorMode": "code", "exemplar": false, - "expr": "rate(lodestar_stfn_epoch_transition_seconds_sum[13m])", + "expr": "rate(lodestar_stfn_epoch_transition_commit_seconds_sum[$rate_interval])\n/\nrate(lodestar_stfn_epoch_transition_commit_seconds_count[$rate_interval])", "interval": "", - "legendFormat": "process block time", + "legendFormat": "epoch transition", + "range": true, "refId": "A" } ], - "title": "Epoch transition utilization rate", + "title": "Epoch transition commit step avg time", "type": "timeseries" }, { @@ -1424,6 +1418,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1523,6 +1518,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1540,20 +1536,19 @@ } }, "mappings": [], - "min": 0, - "unit": "none" + "unit": "percentunit" }, "overrides": [ { "matcher": { "id": "byName", - "options": "number of epoch transition" + "options": "process block time" }, "properties": [ { "id": "color", "value": { - "fixedColor": "yellow", + "fixedColor": "orange", "mode": "fixed" } } @@ -1567,7 +1562,7 @@ "x": 0, "y": 66 }, - "id": 124, + "id": 122, "options": { "graph": {}, "legend": { @@ -1589,13 +1584,13 @@ "uid": "${DS_PROMETHEUS}" }, "exemplar": false, - "expr": "384 * rate(lodestar_stfn_epoch_transition_seconds_count[13m])", + "expr": "rate(lodestar_stfn_epoch_transition_seconds_sum[13m])", "interval": "", - "legendFormat": "number of epoch transition", + "legendFormat": "process block time", "refId": "A" } ], - "title": "Epoch transitions / epoch", + "title": "Epoch transition utilization rate", "type": "timeseries" }, { @@ -1623,6 +1618,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1722,6 +1718,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1739,9 +1736,26 @@ } }, "mappings": [], - "unit": "s" + "min": 0, + "unit": "none" }, - "overrides": [] + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "number of epoch transition" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "yellow", + "mode": "fixed" + } + } + ] + } + ] }, "gridPos": { "h": 8, @@ -1749,7 +1763,7 @@ "x": 0, "y": 74 }, - "id": 526, + "id": 124, "options": { "graph": {}, "legend": { @@ -1770,15 +1784,14 @@ "type": "prometheus", "uid": "${DS_PROMETHEUS}" }, - "editorMode": "code", - "expr": "rate(lodestar_stfn_hash_tree_root_seconds_sum[$rate_interval])\n/ on(source)\nrate(lodestar_stfn_hash_tree_root_seconds_count[$rate_interval])", + "exemplar": false, + "expr": "384 * rate(lodestar_stfn_epoch_transition_seconds_count[13m])", "interval": "", - "legendFormat": "__auto", - "range": true, + "legendFormat": "number of epoch transition", "refId": "A" } ], - "title": "State hash_tree_root avg time", + "title": "Epoch transitions / epoch", "type": "timeseries" }, { @@ -1806,6 +1819,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -1901,6 +1915,91 @@ "title": "State SSZ cache miss rate on preState", "type": "timeseries" }, + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 22, + "gradientMode": "opacity", + "hideFrom": { + "graph": false, + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": true, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 82 + }, + "id": 526, + "options": { + "graph": {}, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": false + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "7.4.5", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "editorMode": "code", + "expr": "rate(lodestar_stfn_hash_tree_root_seconds_sum[$rate_interval])\n/ on(source)\nrate(lodestar_stfn_hash_tree_root_seconds_count[$rate_interval])", + "interval": "", + "legendFormat": "__auto", + "range": true, + "refId": "A" + } + ], + "title": "State hash_tree_root avg time", + "type": "timeseries" + }, { "collapsed": false, "datasource": { @@ -1911,7 +2010,7 @@ "h": 1, "w": 24, "x": 0, - "y": 82 + "y": 90 }, "id": 92, "panels": [], @@ -1936,7 +2035,7 @@ "h": 3, "w": 24, "x": 0, - "y": 83 + "y": 91 }, "id": 154, "options": { @@ -1948,7 +2047,7 @@ "content": "Verifies signature sets in a thread pool of workers. Must ensure that signatures are verified fast and efficiently.", "mode": "markdown" }, - "pluginVersion": "9.3.2", + "pluginVersion": "10.1.1", "targets": [ { "datasource": { @@ -1989,6 +2088,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2014,7 +2114,7 @@ "h": 8, "w": 12, "x": 0, - "y": 86 + "y": 94 }, "id": 94, "options": { @@ -2069,6 +2169,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2093,7 +2194,7 @@ "h": 8, "w": 12, "x": 12, - "y": 86 + "y": 94 }, "id": 519, "options": { @@ -2150,6 +2251,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2175,7 +2277,7 @@ "h": 8, "w": 12, "x": 0, - "y": 94 + "y": 102 }, "id": 151, "options": { @@ -2236,6 +2338,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2261,7 +2364,7 @@ "h": 8, "w": 12, "x": 12, - "y": 94 + "y": 102 }, "id": 96, "options": { @@ -2322,6 +2425,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2347,7 +2451,7 @@ "h": 5, "w": 12, "x": 0, - "y": 102 + "y": 110 }, "id": 150, "options": { @@ -2408,6 +2512,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2433,7 +2538,7 @@ "h": 8, "w": 12, "x": 12, - "y": 102 + "y": 110 }, "id": 95, "options": { @@ -2494,6 +2599,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2520,7 +2626,7 @@ "h": 6, "w": 12, "x": 0, - "y": 107 + "y": 115 }, "id": 148, "options": { @@ -2591,6 +2697,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2616,7 +2723,7 @@ "h": 7, "w": 12, "x": 12, - "y": 110 + "y": 118 }, "id": 147, "options": { @@ -2677,6 +2784,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2702,7 +2810,7 @@ "h": 5, "w": 12, "x": 0, - "y": 113 + "y": 121 }, "id": 98, "options": { @@ -2759,6 +2867,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2800,7 +2909,7 @@ "h": 7, "w": 12, "x": 12, - "y": 117 + "y": 125 }, "id": 153, "options": { @@ -2870,6 +2979,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -2895,7 +3005,7 @@ "h": 6, "w": 12, "x": 0, - "y": 118 + "y": 126 }, "id": 97, "options": { @@ -2937,7 +3047,7 @@ "h": 1, "w": 24, "x": 0, - "y": 124 + "y": 132 }, "id": 309, "panels": [], @@ -2977,6 +3087,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3032,7 +3143,7 @@ "h": 8, "w": 12, "x": 0, - "y": 125 + "y": 133 }, "id": 305, "options": { @@ -3088,6 +3199,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3128,7 +3240,7 @@ "h": 8, "w": 12, "x": 12, - "y": 125 + "y": 133 }, "id": 307, "options": { @@ -3195,6 +3307,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3219,7 +3332,7 @@ "h": 8, "w": 12, "x": 0, - "y": 133 + "y": 141 }, "id": 335, "options": { @@ -3286,6 +3399,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3310,7 +3424,7 @@ "h": 8, "w": 12, "x": 12, - "y": 133 + "y": 141 }, "id": 334, "options": { @@ -3351,7 +3465,7 @@ "h": 1, "w": 24, "x": 0, - "y": 141 + "y": 149 }, "id": 136, "panels": [], @@ -3393,6 +3507,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3418,7 +3533,7 @@ "h": 8, "w": 12, "x": 0, - "y": 142 + "y": 150 }, "id": 130, "options": { @@ -3477,6 +3592,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3517,7 +3633,7 @@ "h": 8, "w": 12, "x": 12, - "y": 142 + "y": 150 }, "id": 140, "options": { @@ -3577,6 +3693,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3618,7 +3735,7 @@ "h": 8, "w": 12, "x": 0, - "y": 150 + "y": 158 }, "id": 132, "options": { @@ -3701,6 +3818,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineStyle": { "fill": "solid" @@ -3745,7 +3863,7 @@ "h": 8, "w": 12, "x": 12, - "y": 150 + "y": 158 }, "id": 138, "options": { @@ -3817,6 +3935,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3866,7 +3985,7 @@ "h": 8, "w": 12, "x": 0, - "y": 158 + "y": 166 }, "id": 531, "options": { @@ -3957,6 +4076,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -3981,7 +4101,7 @@ "h": 8, "w": 12, "x": 12, - "y": 158 + "y": 166 }, "id": 533, "options": { @@ -4026,7 +4146,7 @@ } ], "refresh": "10s", - "schemaVersion": 37, + "schemaVersion": 38, "style": "dark", "tags": [ "lodestar" diff --git a/dashboards/lodestar_bls_thread_pool.json b/dashboards/lodestar_bls_thread_pool.json index a8021ace1102..160312a92d57 100644 --- a/dashboards/lodestar_bls_thread_pool.json +++ b/dashboards/lodestar_bls_thread_pool.json @@ -13,7 +13,10 @@ "list": [ { "builtIn": 1, - "datasource": "-- Grafana --", + "datasource": { + "type": "datasource", + "uid": "grafana" + }, "enable": true, "hide": true, "iconColor": "rgba(0, 211, 255, 1)", @@ -32,7 +35,6 @@ "fiscalYearStartMonth": 0, "graphTooltip": 1, "id": null, - "iteration": 1661342107287, "links": [ { "asDropdown": true, @@ -53,6 +55,10 @@ "panels": [ { "collapsed": false, + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "gridPos": { "h": 1, "w": 24, @@ -61,10 +67,23 @@ }, "id": 92, "panels": [], + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "refId": "A" + } + ], "title": "BLS worker pool", "type": "row" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "gridPos": { "h": 3, "w": 24, @@ -73,12 +92,21 @@ }, "id": 154, "options": { + "code": { + "language": "plaintext", + "showLineNumbers": false, + "showMiniMap": false + }, "content": "Verifies signature sets in a thread pool of workers. Must ensure that signatures are verified fast and efficiently.", "mode": "markdown" }, - "pluginVersion": "8.4.2", + "pluginVersion": "10.1.1", "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "expr": "rate(lodestar_bls_thread_pool_time_seconds_sum[$rate_interval])", "interval": "", "legendFormat": "{{workerId}}", @@ -89,6 +117,10 @@ "type": "text" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "description": "Utilization rate = total CPU time per worker per second. Graph is stacked. This ratios should be high since BLS verification is the limiting factor in the node's throughput.", "fieldConfig": { "defaults": { @@ -96,6 +128,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -107,6 +141,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -139,7 +174,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "multi", @@ -149,6 +185,10 @@ "pluginVersion": "8.4.0-beta1", "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "expr": "rate(lodestar_bls_thread_pool_time_seconds_sum[$rate_interval])", "interval": "", "legendFormat": "{{workerId}}", @@ -159,12 +199,18 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "fieldConfig": { "defaults": { "color": { "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -176,6 +222,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -207,7 +254,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "single", @@ -231,6 +279,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "description": "Average sync time to validate a single signature set. Note that the set may have been verified in batch. In most normal hardware this value should be ~1-2ms", "fieldConfig": { "defaults": { @@ -238,6 +290,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -250,6 +304,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -282,8 +337,9 @@ "graph": {}, "legend": { "calcs": [], - "displayMode": "hidden", - "placement": "bottom" + "displayMode": "list", + "placement": "bottom", + "showLegend": false }, "tooltip": { "mode": "single", @@ -296,6 +352,10 @@ "pluginVersion": "7.4.5", "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "expr": "sum(rate(lodestar_bls_thread_pool_time_seconds_sum[$rate_interval]))/sum(rate(lodestar_bls_thread_pool_success_jobs_signature_sets_count[$rate_interval]))", "interval": "", "legendFormat": "pool", @@ -306,6 +366,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "description": "Raw throughput of the thread pool. How many individual signature sets are successfully validated per second", "fieldConfig": { "defaults": { @@ -313,6 +377,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -325,6 +391,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -357,8 +424,9 @@ "graph": {}, "legend": { "calcs": [], - "displayMode": "hidden", - "placement": "bottom" + "displayMode": "list", + "placement": "bottom", + "showLegend": false }, "tooltip": { "mode": "single", @@ -371,6 +439,10 @@ "pluginVersion": "7.4.5", "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "expr": "rate(lodestar_bls_thread_pool_success_jobs_signature_sets_count[$rate_interval])", "interval": "", "legendFormat": "pool", @@ -381,6 +453,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "description": "Total length of the job queue. Note: this queue is not bounded", "fieldConfig": { "defaults": { @@ -388,6 +464,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -400,6 +478,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -432,8 +511,9 @@ "graph": {}, "legend": { "calcs": [], - "displayMode": "hidden", - "placement": "bottom" + "displayMode": "list", + "placement": "bottom", + "showLegend": false }, "tooltip": { "mode": "single", @@ -446,6 +526,10 @@ "pluginVersion": "7.4.5", "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "expr": "lodestar_bls_thread_pool_queue_length", "interval": "", "legendFormat": "pool", @@ -456,6 +540,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "description": "How much async time job spent waiting in the job queue before being picked up. This number should be really low <100ms to ensure signatures are validated fast.", "fieldConfig": { "defaults": { @@ -463,6 +551,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -475,6 +565,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -507,8 +598,9 @@ "graph": {}, "legend": { "calcs": [], - "displayMode": "hidden", - "placement": "bottom" + "displayMode": "list", + "placement": "bottom", + "showLegend": false }, "tooltip": { "mode": "single", @@ -521,6 +613,10 @@ "pluginVersion": "7.4.5", "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "expr": "rate(lodestar_bls_thread_pool_queue_job_wait_time_seconds_sum[$rate_interval])/rate(lodestar_bls_thread_pool_queue_job_wait_time_seconds_count[$rate_interval])", "interval": "", "legendFormat": "pool", @@ -531,6 +627,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "description": "Async time from sending a message to the worker and the worker receiving it.", "fieldConfig": { "defaults": { @@ -538,6 +638,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -550,6 +652,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -584,7 +687,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "multi", @@ -621,6 +725,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "description": "What percentage of total signature sets were verified in batch, which is an optimization to reduce verification costs by x2. For a synced node this should be ~100%", "fieldConfig": { "defaults": { @@ -628,6 +736,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -640,6 +750,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -672,8 +783,9 @@ "graph": {}, "legend": { "calcs": [], - "displayMode": "hidden", - "placement": "bottom" + "displayMode": "list", + "placement": "bottom", + "showLegend": false }, "tooltip": { "mode": "single", @@ -686,6 +798,10 @@ "pluginVersion": "7.4.5", "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "expr": "rate(lodestar_bls_thread_pool_batch_sigs_success_total[$rate_interval])/rate(lodestar_bls_thread_pool_success_jobs_signature_sets_count[$rate_interval])", "interval": "", "legendFormat": "pool", @@ -696,6 +812,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "description": "Average signatures per set. This number is decided by the time of object submitted to the pool:\n- Sync blocks: 128\n- Aggregates: 3\n- Attestations: 1", "fieldConfig": { "defaults": { @@ -703,6 +823,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -715,6 +837,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -747,8 +870,9 @@ "graph": {}, "legend": { "calcs": [], - "displayMode": "hidden", - "placement": "bottom" + "displayMode": "list", + "placement": "bottom", + "showLegend": false }, "tooltip": { "mode": "multi", @@ -758,6 +882,10 @@ "pluginVersion": "7.4.5", "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "expr": "rate(lodestar_bls_thread_pool_sig_sets_started_total[$rate_interval])/(rate(lodestar_bls_thread_pool_jobs_started_total[$rate_interval])>0)", "interval": "", "legendFormat": "pool", @@ -768,6 +896,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "description": "How many individual signature sets are invalid vs (valid + invalid). We don't control this number since peers may send us invalid signatures. This number should be very low since we should ban bad peers. If it's too high the batch optimization may not be worth it.", "fieldConfig": { "defaults": { @@ -775,6 +907,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -786,6 +920,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -834,7 +969,8 @@ "legend": { "calcs": [], "displayMode": "list", - "placement": "bottom" + "placement": "bottom", + "showLegend": true }, "tooltip": { "mode": "multi", @@ -871,6 +1007,10 @@ "type": "timeseries" }, { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "description": "Average sets per job. A set may contain +1 signatures. This number should be higher than 1 to reduce communication costs", "fieldConfig": { "defaults": { @@ -878,6 +1018,8 @@ "mode": "palette-classic" }, "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, @@ -890,6 +1032,7 @@ "tooltip": false, "viz": false }, + "insertNulls": false, "lineInterpolation": "linear", "lineWidth": 1, "pointSize": 5, @@ -922,8 +1065,9 @@ "graph": {}, "legend": { "calcs": [], - "displayMode": "hidden", - "placement": "bottom" + "displayMode": "list", + "placement": "bottom", + "showLegend": false }, "tooltip": { "mode": "multi", @@ -933,6 +1077,10 @@ "pluginVersion": "7.4.5", "targets": [ { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, "expr": "rate(lodestar_bls_thread_pool_jobs_started_total[$rate_interval])/rate(lodestar_bls_thread_pool_job_groups_started_total[$rate_interval])", "interval": "", "legendFormat": "pool", @@ -941,10 +1089,105 @@ ], "title": "BLS worker pool - sets per job", "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 42 + }, + "id": 520, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "editorMode": "code", + "expr": "rate(lodestar_bls_thread_pool_signature_deserialization_main_thread_time_seconds_sum[$rate_interval]) * 384", + "instant": false, + "legendFormat": "signature_deserialization", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "editorMode": "code", + "expr": "rate(lodestar_bls_thread_pool_pubkeys_aggregation_main_thread_time_seconds_sum[$rate_interval]) * 384", + "hide": false, + "instant": false, + "legendFormat": "pubkey_aggregation", + "range": true, + "refId": "B" + } + ], + "title": "BLS jobItemWorkReq cpu time per epoch", + "type": "timeseries" } ], "refresh": "10s", - "schemaVersion": 35, + "schemaVersion": 38, "style": "dark", "tags": [ "lodestar" diff --git a/docs/images/heap-dumps/devtools.png b/docs/images/heap-dumps/devtools.png new file mode 100644 index 000000000000..9bdef24f7e20 Binary files /dev/null and b/docs/images/heap-dumps/devtools.png differ diff --git a/docs/images/heap-dumps/load-profile.png b/docs/images/heap-dumps/load-profile.png new file mode 100644 index 000000000000..c6e04d0922f4 Binary files /dev/null and b/docs/images/heap-dumps/load-profile.png differ diff --git a/docs/images/heap-dumps/memory-tab.png b/docs/images/heap-dumps/memory-tab.png new file mode 100644 index 000000000000..857309571971 Binary files /dev/null and b/docs/images/heap-dumps/memory-tab.png differ diff --git a/docs/install/docker.md b/docs/install/docker.md deleted file mode 100644 index 40468e7ad7aa..000000000000 --- a/docs/install/docker.md +++ /dev/null @@ -1,29 +0,0 @@ -# Install with Docker - -The [`chainsafe/lodestar`](https://hub.docker.com/r/chainsafe/lodestar) Docker Hub repository is maintained actively. It contains the `lodestar` CLI preinstalled. - - -!!! info - The Docker Hub image tagged as `chainsafe/lodestar:next` is run on CI every commit on our `unstable` branch. - For `stable` releases, the image is tagged as `chainsafe/lodestar:latest`. - - -Ensure you have Docker installed by issuing the command: - -```bash -docker -v -``` - -It should return a non error message such as `Docker version xxxx, build xxxx`. - -Pull, run the image and Lodestar should now be ready to use - -```bash -docker pull chainsafe/lodestar -docker run chainsafe/lodestar --help -``` - - -!!! info - Docker is the recommended setup for Lodestar. Use our [Lodestar Quickstart scripts](https://github.com/ChainSafe/lodestar-quickstart) with Docker for detailed instructions. - diff --git a/docs/install/npm.md b/docs/install/npm.md deleted file mode 100644 index 805141d01523..000000000000 --- a/docs/install/npm.md +++ /dev/null @@ -1,6 +0,0 @@ -# Install from NPM [not recommended] - - -!!! danger - For mainnet (production) usage, we only recommend installing with docker due to [NPM supply chain attacks](https://hackaday.com/2021/10/22/supply-chain-attack-npm-library-used-by-facebook-and-others-was-compromised/). Until a [safer installation method has been found](https://github.com/ChainSafe/lodestar/issues/3596), do not use this install method except for experimental purposes only. - diff --git a/docs/install/source.md b/docs/install/source.md deleted file mode 100644 index 4fba0a625111..000000000000 --- a/docs/install/source.md +++ /dev/null @@ -1,54 +0,0 @@ -# Install from source - -## Prerequisites - -Make sure to have [Yarn installed](https://classic.yarnpkg.com/en/docs/install). It is also recommended to [install NVM (Node Version Manager)](https://github.com/nvm-sh/nvm) and use the LTS version (currently v20) of [NodeJS](https://nodejs.org/en/). - - -!!! info - NodeJS versions older than the current LTS are not supported by Lodestar. We recommend running the latest Node LTS. - It is important to make sure the NodeJS version is not changed after reboot by setting a default `nvm alias default && nvm use default`. - -!!! note - Node Version Manager (NVM) will only install NodeJS for use with the active user. If you intend on setting up Lodestar to run under another user, we recommend using [NodeSource's source for NodeJS](https://github.com/nodesource/distributions/blob/master/README.md#installation-instructions) so you can install NodeJS globally. - - -## Clone repository - -Clone the repository locally and build from the stable release branch. - -```bash -git clone -b stable https://github.com/chainsafe/lodestar.git -``` - -Switch to created directory. - -```bash -cd lodestar -``` - -## Install packages - -Install across all packages. Lodestar follows a [monorepo](https://github.com/lerna/lerna) structure, so all commands below must be run in the project root. - -```bash -yarn install -``` - -## Build source code - -Build across all packages. - -```bash -yarn run build -``` - -## Lodestar CLI - -Lodestar should now be ready for use. - -```bash -./lodestar --help -``` - -See [Command Line Reference](./../reference/cli.md) for further information. diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml new file mode 100644 index 000000000000..270a01b311de --- /dev/null +++ b/docs/mkdocs.yml @@ -0,0 +1,144 @@ +site_name: Lodestar Documentation +site_description: Lodestar Documentation - Typescript Ethereum Consensus client +site_url: https://chainsafe.github.io/lodestar + +repo_name: chainsafe/lodestar +repo_url: https://github.com/chainsafe/lodestar + +docs_dir: pages + +# Configuration +theme: + name: material + logo: assets/lodestar_icon_300.png + favicon: assets/round-icon.ico + nav_style: dark + palette: + - scheme: preference + media: "(prefers-color-scheme: light)" + primary: black + accent: deep purple + toggle: + icon: material/weather-night + name: Switch to dark mode + - scheme: slate + media: "(prefers-color-scheme: dark)" + primary: black + accent: deep purple + toggle: + icon: material/weather-sunny + name: Switch to light mode + +plugins: + - search + - mermaid2: + version: 8.6.4 + arguments: + theme: | + ^(window.matchMedia && window.matchMedia('(prefers-color-scheme: dark)').matches) ? 'dark' : 'light' + +markdown_extensions: + - meta + - codehilite: + guess_lang: false + - admonition + - toc: + permalink: true + - pymdownx.superfences: + # make exceptions to highlighting of code (for mermaid): + custom_fences: + - name: mermaid + class: mermaid + format: !!python/name:mermaid2.fence_mermaid + - pymdownx.emoji: + emoji_index: !!python/name:material.extensions.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg + +extra_css: + - stylesheets/extras.css + +# Socials +extra: + social: + - icon: fontawesome/brands/github-alt + link: https://github.com/ChainSafe/lodestar + - icon: fontawesome/brands/twitter + link: https://twitter.com/lodestar_eth + - icon: fontawesome/brands/discord + link: https://discord.gg/yjyvFRP + - icon: fontawesome/brands/medium + link: https://blog.chainsafe.io + +# Customize left navigation menu +nav: + - Home: index.md + - Introduction: introduction.md + - Security: security.md + - Getting Started: + - Quick Start: getting-started/quick-start.md + - Installation: getting-started/installation.md + # - Creating a JWT: getting-started/creating-a-jwt.md + - Starting a Node: getting-started/starting-a-node.md + - Data Retention: data-retention.md + - Beacon Node: + - Configuration: beacon-management/beacon-cli.md + - Networking: beacon-management/networking.md + - MEV and Builder Integration: beacon-management/mev-and-builder-integration.md + - Syncing: beacon-management/syncing.md + - Validator: + - Configuration: validator-management/validator-cli.md + # - Key Management: validator-management/key-management.md + # - Withdrawals: validator-management/withdrawals.md + # - Multiple and Fall-Back Validation: validator-management/multiple-and-fallback-validation.md + - Bootnode: + - Configuration: bootnode/bootnode-cli.md + - Light Client and Prover: + - Light Client: lightclient-prover/lightclient.md + - Light Client Configuration: lightclient-prover/lightclient-cli.md + - Prover: lightclient-prover/prover.md + # - Prover Configuration: lightclient-prover/prover-cli.md + - Logging and Metrics: + - Prometheus and Grafana: logging-and-metrics/prometheus-grafana.md + - Client Monitoring: logging-and-metrics/client-monitoring.md + # - Log Management: logging-and-metrics/log-management.md + # - Metrics Management: logging-and-metrics/metrics-management.md + # - Dashboards: logging-and-metrics/dashboards.md + # - Api: + # - Using the API: api/using-the-api.md + # - API Reference: api/api-reference.md // Auto-generate from API endpoint + # - Troubleshooting: + # - Installation Issues: troubleshooting/installation-issues.md + # - Syncing Issues: troubleshooting/syncing-issues.md + # - Validation Issues: troubleshooting/validation-issues.md + # - Execution Layer Issues: troubleshooting/execution-layer-issues.md + - Supporting Libraries: supporting-libraries/index.md + # - libp2p: supporting-libraries/libp2p.md + # - "@chainsafe/ssz": supporting-libraries/ssz.md + # - "@chainsafe/blst": supporting-libraries/blst.md + # - "@chainsafe/libp2p-gossipsub": supporting-libraries/gossipsub.md + - Contributing: + - Getting Started: contribution/getting-started.md + # - Bug Reports: contribution/bug-reports.md + - Dependency Graph: contribution/depgraph.md + # - Repo: contribution/repo.md + - Testing: + - Overview: contribution/testing/index.md + # - Unit Tests: contribution/testing/unit-tests.md + # - Integration Tests: contribution/testing/integration-tests.md + # - E2E Tests: contribution/testing/e2e-tests.md + - Simulation Tests: contribution/testing/simulation-tests.md + # - Spec Tests: contribution/testing/spec-tests.md + # - Performance Tests: contribution/testing/performance-tests.md + # - PR Submission: contribution/pr-submission.md + - Tools: + # - Debugging: tools/debugging.md + # - perf: tools/perf.md + - Flame Graphs: tools/flamegraphs.md + - Heap Dumps: tools/heap-dumps.md + - Core Dumps: tools/core-dumps.md + - Advanced Topics: + # - Migrating from Other Clients: advanced-topics/migrating-from-other-clients.md + # - Block Exploration: advanced-topics/block-exploration.md + # - Slashing Protection: advanced-topics/slashing-protection.md + - Setting Up a Testnet: advanced-topics/setting-up-a-testnet.md + # - Doppelganger Detection: advanced-topics/doppelganger-detection.md \ No newline at end of file diff --git a/docs/pages/advanced-topics/block-exploration.md b/docs/pages/advanced-topics/block-exploration.md new file mode 100644 index 000000000000..05ee657bb607 --- /dev/null +++ b/docs/pages/advanced-topics/block-exploration.md @@ -0,0 +1 @@ +# Block Exploration diff --git a/docs/pages/advanced-topics/doppelganger-detection.md b/docs/pages/advanced-topics/doppelganger-detection.md new file mode 100644 index 000000000000..165590bda55a --- /dev/null +++ b/docs/pages/advanced-topics/doppelganger-detection.md @@ -0,0 +1 @@ +# Doppelganger Detection diff --git a/docs/pages/advanced-topics/migrating-from-other-clients.md b/docs/pages/advanced-topics/migrating-from-other-clients.md new file mode 100644 index 000000000000..302314a27b23 --- /dev/null +++ b/docs/pages/advanced-topics/migrating-from-other-clients.md @@ -0,0 +1 @@ +# Migration From Other Clients diff --git a/docs/usage/local.md b/docs/pages/advanced-topics/setting-up-a-testnet.md similarity index 99% rename from docs/usage/local.md rename to docs/pages/advanced-topics/setting-up-a-testnet.md index 51465d68c92b..a6350b3a03de 100644 --- a/docs/usage/local.md +++ b/docs/pages/advanced-topics/setting-up-a-testnet.md @@ -1,4 +1,4 @@ -# Local testnet +# Setting-Up a Testnet To quickly test and run Lodestar we recommend starting a local testnet. We recommend a simple configuration of two beacon nodes with multiple validators diff --git a/docs/pages/advanced-topics/slashing-protection.md b/docs/pages/advanced-topics/slashing-protection.md new file mode 100644 index 000000000000..527cbb06040a --- /dev/null +++ b/docs/pages/advanced-topics/slashing-protection.md @@ -0,0 +1 @@ +# Slashing Protection diff --git a/docs/pages/api/using-the-api.md b/docs/pages/api/using-the-api.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/docs/usage/mev-integration.md b/docs/pages/beacon-management/mev-and-builder-integration.md similarity index 97% rename from docs/usage/mev-integration.md rename to docs/pages/beacon-management/mev-and-builder-integration.md index c2f2529edbe6..c2f9db9b6846 100644 --- a/docs/usage/mev-integration.md +++ b/docs/pages/beacon-management/mev-and-builder-integration.md @@ -20,7 +20,7 @@ All you have to do is: 1. Provide lodestar beacon node with a Builder endpoint (which corresponds to the network you are running) via these additional flags: ```shell - --builder --builder.urls + --builder --builder.url ``` 2. Run lodestar validator client with these additional flags ```shell diff --git a/docs/pages/beacon-management/networking.md b/docs/pages/beacon-management/networking.md new file mode 100644 index 000000000000..993b1cdfda26 --- /dev/null +++ b/docs/pages/beacon-management/networking.md @@ -0,0 +1,91 @@ +# Networking + +Starting up Lodestar will automatically connect it to peers on the network. Peers are found through the discv5 protocol and once peers are established communications happen via gossipsub over libp2p. While not necessary, having a basic understanding of how the various protocols and transport work will help with debugging and troubleshooting as some of the more common challenges come up with [firewalls](#firewall-management) and [NAT traversal](#nat-traversal). + +## Networking Flags + +Some of the important Lodestar flags related to networking are: + +- [`--discv5`](./beacon-cli.md#-discv5) +- [`--listenAddress`](./beacon-cli.md#-listenaddress) +- [`--port`](./beacon-cli.md#-port) +- [`--discoveryPort`](./beacon-cli.md#-discoveryport) +- [`--listenAddress6`](./beacon-cli.md#-listenaddress6) +- [`--port6`](./beacon-cli.md#-port6) +- [`--discoveryPort6`](./beacon-cli.md#-discoveryport6) +- [`--bootnodes`](./beacon-cli.md#-bootnodes) +- [`--deterministicLongLivedAttnets`](./beacon-cli.md#-deterministiclonglivedattnets) +- [`--subscribeAllSubnets`](./beacon-cli.md#-subscribeallsubnets) +- [`--disablePeerScoring`](./beacon-cli.md#-disablepeerscoring) +- [`--enr.ip`](./beacon-cli.md#-enrip) +- [`--enr.tcp`](./beacon-cli.md#-enrtcp) +- [`--enr.udp`](./beacon-cli.md#-enrudp) +- [`--enr.ip6`](./beacon-cli.md#-enrip6) +- [`--enr.tcp6`](./beacon-cli.md#-enrtcp6) +- [`--enr.udp6`](./beacon-cli.md#-enrudp6) +- [`--nat`](./beacon-cli.md#-nat) +- [`--private`](./beacon-cli.md#`-private`) + +## Peer Discovery (Discv5) + +In Ethereum, discv5 plays a pivotal role in the peer discovery process, facilitating nodes to find and locate each other in order to form the peer-to-peer network​. The process begins with an interaction between new nodes and bootnodes at start-up. Bootnodes are nodes with hard-coded addresses, or can be overridden via the cli flag [`--bootnodes`](./beacon-cli.md#-bootnodes), to bootstrap the discovery process​. Through a method called FINDNODE-NODES, a new node establishes a bond with each bootnode, and it returns a list of peers for the new node to connect to. Following this trail, the new node engages through FINDNODE-NODES with the provided peers to further establish a web of connections​. + +Discv5 operates as a peer advertisement medium in this network, where nodes can act as both providers and consumers of data. Every participating node in the Discv5 protocol discovers peer data from other nodes and later relays it, making the discovery process dynamic and efficient​. + +Discv5 is designed to be a standalone protocol running via UDP on a dedicated port solely for peer discovery. Peer data is exchanged via self-certified, flexible peer records (ENRs). These key features cater to the Ethereum network​ and being a good peer often means running a discv5 worker​. Lodestar offers simple configuration to setup and run a bootnode independently of a beacon node. See the [bootnode cli](../bootnode/bootnode-cli.md) page for more information and configuration options. + +## ENR + +Ethereum Node Records (ENRs) are a standardized format utilized for peer discovery - see [EIP-778](https://eips.ethereum.org/EIPS/eip-778) for the specification. An ENR consists of a set of key-value pairs. These pairs include crucial information such as the node's ID, IP address, the port on which it's listening, and the protocols it supports. This information helps other nodes in the network locate and connect to the node. + +The primary purpose of ENRs is to facilitate node discovery and connectivity in the Ethereum network. Nodes use ENRs to announce their presence and capabilities to other nodes, making it easier to establish and maintain a robust, interconnected network. + +Note that bootnodes are announced via ENR. + +## Peer Communication (gossipsub and ReqResp) + +Gossipsub and ReqResp are the two mechanisms that beacon nodes use to exchange chain data. Gossipsub is used disseminate the most recent relevant data proactively throughout the network. ReqResp is used to directly ask specific peers for specific information (eg: during syncing). + +### Gossipsub + +GossipSub is a foundational protocol in peer-to-peer (P2P) communication, particularly decentralized networks like Ethereum and IPFS. At its core, GossipSub efficiently propagates data, filtered by topic, through a P2P network. It organizes peers into a collection of overlay networks, each associated with a distinct topic. By routing data through relevant overlay networks based on topics of interest, large amounts of data can be efficiently disseminated without excessive bandwidth, latency, etc. + +In GossipSub, nodes can subscribe to topics, effectively joining the corresponding overlay to receive messages published to a specific topic. This topic-based structure enables nodes to congregate around shared interests, ensuring that relevant messages are delivered to all interested parties. Each message published to a topic gets disseminated and relayed to all subscribed peers, similar to a chat room. + +Messages are propagated through a blend of eager-push and lazy-pull models. Specifically, the protocol employs "mesh links" to carry full messages actively and "gossip links" to carry only message identifiers (lazy-pull propagation model). This hybrid approach allows for both active message propagation and reactive message retrieval​ which is an extension of the traditional hub-and-spoke pub/sub model. + +### ReqResp + +ReqResp is the domain of protocols that establish a flexible, on-demand mechanism to retrieve historical data and data missed by gossip. This family of methods, implemented as separate libp2p protocols, operate between a single requester and responder. A method is initiated via a libp2p protocol ID, with the initiator sending a request message and the responder sending a response message. Every method defines a specific request and response message type, and a specific protocol ID. This framework also facilitates streaming responses and robust error handling. + +## Data Transport (libp2p) + +Libp2p is a modular and extensible network stack that serves as the data transport layer below both gossipsub and ReqResp and facilitates the lower-level peer-to-peer communications. It provides a suite of protocols for various networking functionalities including network transports, connection encryption and protocol multiplexing. Its modular design allows for the easy addition, replacement, or upgrading of protocols, ensuring an adaptable and evolving networking stack. + +Libp2p operates at the lower levels of the OSI model, particularly at the Transport and Network layers. Libp2p supports both TCP and UDP protocols for establishing connections and data transmission. Combined with libp2p's modular design it can integrate with various networking technologies to facilitating both routing and addressing. + +## Firewall Management + +If your setup is behind a firewall there are a few ports that will need to be opened to allow for P2P discovery and communication. There are also some ports that need to be protected to prevent unwanted access or DDOS attacks on your node. + +Ports that should be opened: + +- 30303/TCP+UDP - Execution layer p2p communication port +- 9000/TCP+UDP - Beacon Node P2P communication port +- 9090/TCP - Lodestar IPv6 P2P communication port +- 13000/TCP - Prysm P2P communication port +- 12000/UDP - Prysm P2P communication port + +Ports that should be inbound protected: + +- 9596/TCP - Lodestar Beacon-Node JSON RPC api calls +- 5062/TCP - Lodestar validator key manager api calls +- 18550/TCP - Lodestar MEV Boost/Builder port +- 8008/TCP - Lodestar Metrics +- 5064/TCP - Validator Metrics +- 8545/TCP - Execution client JSON RPC port api calls +- 8551/TCP - Execution engine port for Lodestar to communicate with the execution client + +## NAT Traversal + +Lodestar does not support UPnP. If you are behind a NAT you will need to manually forward the ports listed above. diff --git a/docs/pages/beacon-management/syncing.md b/docs/pages/beacon-management/syncing.md new file mode 100644 index 000000000000..40b5b4ba96b5 --- /dev/null +++ b/docs/pages/beacon-management/syncing.md @@ -0,0 +1,42 @@ +# Syncing + +Syncing an Ethereum node involves obtaining a copy of the blockchain data from other peers in the network to reach a consistent state. This process is crucial for new nodes or nodes that have been offline and need to catch up with the network's current state. Syncing can be performed for both the execution layer and the beacon chain, although the focus here will be primarily on the beacon chain. + +Lodestar allows for several methods of syncing however the recommended method is `checkpoint sync` as it is the fastest and least resource intensive. It is generally a good idea to sync via a [`--checkpointSyncUrl`](./beacon-cli.md#-checkpointsyncurl). If starting at a specific point is necessary specify the [`--checkpointState`](./beacon-cli.md#-checkpointstate) that should be where the sync begins. + +## Weak Subjectivity + +Weak subjectivity is a concept specific to Proof of Stake (PoS) systems, addressing how new nodes can safely join the network and synchronize with the correct blockchain history. Unlike in Proof of Work (PoW) systems, where a node can trust the longest chain due to the significant computational effort required to forge it, PoS systems present different challenges. In PoS, the cost of creating or altering blockchain history is lower, as it is not based on computational work but on the stake held by validators. This difference raises the possibility that an attacker, if possessing sufficient stake, could feasibly create a misleading version of the blockchain history. + +The concept of weak subjectivity becomes particularly crucial in two scenarios: when new nodes join the network and when existing nodes reconnect after a significant period of being offline. During these times, the 'weak subjectivity period' defines a time frame within which a client, upon rejoining, can reliably process blocks to reach the consensus chain head. This approach is essential for mitigating the risks associated with long-range attacks, which could occur if nodes relied solely on the longest chain principle without any initial trust in a specific network state. + +To counter these risks, weak subjectivity requires new nodes to obtain a recent, trusted state of the blockchain from a reliable source upon joining the network. This state includes vital information about the current set of validators and their stakes. Starting from this trusted state helps new nodes avoid being misled by false histories, as any attempt to rewrite history beyond this point would require an unrealistically large portion of the total stake. + +## Syncing Methods + +### Checkpoint Sync + +Checkpoint sync, also known as state sync, allows a node to sync to a specific state checkpoint without having to process all historical data leading up to that point. In the context of a beacon node, this involves syncing to a recent finalized checkpoint, allowing the node to quickly join the network and participate in consensus activities. This is especially beneficial for new nodes or nodes that have been offline for a considerable duration. + +### Historical Sync + +Historical sync involves processing all blocks from the genesis block or from a specified starting point to the current block. This is the most comprehensive sync method but also the most resource and time-intensive. For beacon nodes, historical sync is crucial for nodes that aim to maintain a complete history of the beacon chain, facilitating a deeper understanding and analysis of the network's history. In the execution layer, it ensures a complete historical record of the execution layer data. + +### Range Sync + +Range sync involves syncing blocks within a specified range, beneficial when a node is only temporarily offline and needs to catch up over a short range. In the beacon node context, this entails requesting and processing blocks within a defined range, ensuring the node quickly gets updated to the current network state. + +### Backfill Sync + +This is another version of checkpoint sync that allows a node that has not been historically synchronized to verify data prior to the checkpoint. It is done via downloading a checkpoint and then fetch blocks backwards from that point until the desired data can be verified. It is a relatively inexpensive sync from a cpu perspective because it only checks the block hashes and verifies the proposer signatures along the way. + +## Syncing Lodestar + +The implementation of the different syncing styles in Lodestar are actually one of two types under the hood, range sync and unknown-parent sync. Range sync is used when the start point of syncing is known. In the case of historical and checkpoint sync the starting points are well defined, genesis and the last finalized epoch boundary. Snapshot sync is not supported by Lodestar. If the starting point for sync is not known Lodestar must first determine where the starting point is. While the discussion about how that happens is out of scope for this document, the gist is that the beacon node will listen to gossipsub for blocks being broadcast on the network. It will also request [`MetaData`](https://github.com/ethereum/consensus-specs/blob/dev/specs/phase0/p2p-interface.md#getmetadata) from its peers and use that to start requesting the correct blocks from the network. + +There are several flags that can be used to configure the sync process. + +- [`--checkpointSyncUrl`](./beacon-cli.md#-checkpointsyncurl) +- [`--checkpointState`](./beacon-cli.md#-checkpointstate) +- [`--wssCheckpoint`](./beacon-cli.md#-wsscheckpoint) +- [`--forceCheckpointSync`](./beacon-cli.md#-forcecheckpointsync) diff --git a/docs/pages/contribution/bug-reports.md b/docs/pages/contribution/bug-reports.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/docs/design/depgraph.md b/docs/pages/contribution/depgraph.md similarity index 100% rename from docs/design/depgraph.md rename to docs/pages/contribution/depgraph.md diff --git a/docs/pages/contribution/pr-submission.md b/docs/pages/contribution/pr-submission.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/docs/pages/contribution/repo.md b/docs/pages/contribution/repo.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/docs/pages/contribution/testing/end-to-end-tests.md b/docs/pages/contribution/testing/end-to-end-tests.md new file mode 100644 index 000000000000..3f405128c7cb --- /dev/null +++ b/docs/pages/contribution/testing/end-to-end-tests.md @@ -0,0 +1,3 @@ +# End-To-End Tests + +Check back soon for more information!! We are in the process of updating our docs. diff --git a/docs/pages/contribution/testing/index.md b/docs/pages/contribution/testing/index.md new file mode 100644 index 000000000000..9de62895323c --- /dev/null +++ b/docs/pages/contribution/testing/index.md @@ -0,0 +1,27 @@ +# Testing + +Testing is critical to the Lodestar project and there are many types of tests that are run to build a product that is both effective AND efficient. This page will help to break down the different types of tests you will find in the Lodestar repo. + +### Unit Tests + +This is the most fundamental type of test in most code bases. In all instances mocks, stubs and other forms of isolation are used to test code on a functional, unit level. See the [Unit Tests](./unit-tests.md) page for more information. + +### Spec Tests + +The Ethereum Consensus Specifications are what ensure that the various consensus clients do not diverge on critical computations and will work harmoniously on the network. See the [Spec Tests](./spec-tests.md) page for more information. + +### Performance Tests + +Node.js is an unforgiving virtual machine when it comes to high performance, multi-threaded applications. In order to ensure that Lodestar can not only keep up with the chain, but to push the boundary of what is possible, there are lots of performance tests that benchmark programming paradigms and prevent regression. See the [Performance Testing](./performance-tests.md) page for more information. + +### End-To-End Tests + +E2E test are where Lodestar is run in its full form, often from the CLI as a user would to check that the system as a whole works as expected. These tests are meant to exercise the entire system in isolation and there is no network interaction, nor interaction with any other code outside of Lodestar. See the [End-To-End Testing](./end-to-end-tests.md) page for more information. + +### Integration Tests + +Integration tests are meant to test how Lodestar interacts with other clients, but are not considered full simulations. This is where Lodestar may make API calls or otherwise work across the process boundary, but there is required mocking, stubbing, or class isolation. An example of this is using the `ExecutionEngine` class to make API calls to a Geth instance to check that the http requests are properly formatted. + +### Simulation Tests + +These are the most comprehensive types of tests. They aim to test Lodestar in a fully functioning ephemeral devnet environment. See the [Simulation Testing](./simulation-tests.md) page for more information. diff --git a/docs/pages/contribution/testing/integration-tests.md b/docs/pages/contribution/testing/integration-tests.md new file mode 100644 index 000000000000..b45110033460 --- /dev/null +++ b/docs/pages/contribution/testing/integration-tests.md @@ -0,0 +1,27 @@ +# Integration Tests + +The following tests are found in `packages/beacon-node` + +#### `test:sim:withdrawals` + +This test simulates capella blocks with withdrawals. It tests lodestar against Geth and EthereumJS. + +There are two ENV variables that are required to run this test: + +- `EL_BINARY_DIR`: the docker image setup to handle the test case +- `EL_SCRIPT_DIR`: the script that will be used to start the EL client. All of the scripts can be found in `packages/beacon-node/test/scripts/el-interop` and the `EL_SCRIPT_DIR` is the sub-directory name in that root that should be used to run the test. + +The command to run this test is: + +`EL_BINARY_DIR=g11tech/geth:withdrawals EL_SCRIPT_DIR=gethdocker yarn mocha test/sim/withdrawal-interop.test.ts` + +The images used by this test during CI are: + +- `GETH_WITHDRAWALS_IMAGE: g11tech/geth:withdrawalsfeb8` +- `ETHEREUMJS_WITHDRAWALS_IMAGE: g11tech/ethereumjs:blobs-b6b63` + +#### `test:sim:merge-interop` + +#### `test:sim:mergemock` + +#### `yarn test:sim:blobs` diff --git a/docs/pages/contribution/testing/performance-tests.md b/docs/pages/contribution/testing/performance-tests.md new file mode 100644 index 000000000000..6e2d9c86319b --- /dev/null +++ b/docs/pages/contribution/testing/performance-tests.md @@ -0,0 +1,3 @@ +# Performance Tests + +Check back soon for more information!! We are in the process of updating our docs. diff --git a/docs/pages/contribution/testing/simulation-tests.md b/docs/pages/contribution/testing/simulation-tests.md new file mode 100644 index 000000000000..c1059e5c4177 --- /dev/null +++ b/docs/pages/contribution/testing/simulation-tests.md @@ -0,0 +1,141 @@ +# Simulation Tests + +"Sim" testing for Lodestar is the most comprehensive, and complex, testing that is run. The goal is to fully simulate a testnet and to actuate the code in a way that closely mimics what will happen when turning on Lodestar in the wild. This is a very complex task and requires a lot of moving parts to work together. The following sections will describe the various components and how they work together. + +At a very high level, simulation testing will setup a testnet from genesis and let proceed through "normal" execution exactly as the nodes would under production circumstances. To get feedback there are regular checks along the way to asses how the testnet nodes are working. These "assertions" can be added and removed at will to allow developers to check for specific conditions in a tightly controlled, reproducible, environment to get high quality and actionable feedback on how Lodestar performs. The end goal of these tests is to to run a full Lodestar client in an environment that is as close to what an end user would experience. + +These tests usually setup full testnets with multiple consensus clients and their paired execution node. In many instance we are looking to just exercise the Lodestar code but there are some places where there is also testing to see how Lodestar works in relation to the other consensus clients, like Lighthouse. As you can imagine, there is quite a bit of machinery that is responsible for setting up and managing the simulations and assertions. This section will help to go over those bits and pieces. Many, but not all, of these classes can be found in `packages/cli/test/utils/simulation`. + +## Running Sim Tests + +There are a number of sim tests that are available and each has a slightly different purpose. All are run by CI and must pass for a PR to be valid for merging. Most tests require a couple of environment variables to be set. + +### Environment Variables + +To see what typical values for these are check out the `test-sim.yaml` workflow file in the `.github/workflows` directory. + +- `GETH_DOCKER_IMAGE`: The geth docker image that will be used +- `NETHERMIND_IMAGE`: The nethermind docker image that will be used +- `LIGHTHOUSE_IMAGE`: The lighthouse docker image that will be used + +### `test:sim:multifork` + +The multi-fork sim test checks most of the functionality Lodestar provides. Is verifies that Lodestar is capable of peering, moving through all of the forks and using various sync methods in a testnet environment. Lodestar is tested with both Geth and Nethermind as the execution client. It also checks a Lighthouse/Geth node for cross client compatibility. + +```sh +GETH_DOCKER_IMAGE=ethereum/client-go:v1.11.6 \ + LIGHTHOUSE_DOCKER_IMAGE=sigp/lighthouse:latest-amd64-modern-dev \ + NETHERMIND_DOCKER_IMAGE=nethermind/nethermind:1.18.0 \ + yarn workspace @chainsafe/lodestar test:sim:multifork +``` + +### `test:sim:endpoints` + +This tests that various endpoints of the beacon node and validator client are working as expected. + +```sh +GETH_DOCKER_IMAGE=ethereum/client-go:v1.11.6 \ + yarn workspace @chainsafe/lodestar test:sim:endpoints +``` + +### `test:sim:deneb` + +This test is still included in our CI but is no longer as important as it once was. Lodestar is often the first client to implement new features and this test was created before geth was upgraded with the features required to support the Deneb fork. To test that Lodestar was ready this test uses mocked geth instances. It is left as a placeholder for when the next fork comes along that requires a similar approach. + +### `test:sim:mixedcleint` + +Checks that Lodestar is compatible with other consensus validators and vice-versa. All tests use Geth as the EL. + +```sh +GETH_DOCKER_IMAGE=ethereum/client-go:v1.11.6 \ + LIGHTHOUSE_DOCKER_IMAGE=sigp/lighthouse:latest-amd64-modern-dev \ + yarn workspace @chainsafe/lodestar test:sim:mixedclient +``` + +## Sim Test Infrastructure + +When setting up and running the simulations, interactions with the nodes is through the published node API's. All functionality is actuated via http request and by "plugging in" this way it is possible to run the nodes in a stand-alone fashion, as they would be run in production, but to still achieve a tightly monitored and controlled environment. If code needs to be executed on a "class by class" basis or with mocking involved then the test is not a simulation test and would fall into one of the other testing categories. See the [Testing Overview](./index.md) page for more information on the other types of tests available for Lodestar. + +### Simulation Environment + +The simulation environment has many pieces and those are orchestrated by the `SimulationEnvironment` class. The testnet nodes will be run as a mixture of Docker containers and bare metal code execution via Node.js. In order to monitor the various clients there is a `SimulationTracker` that's primary function is to `register` assertions that will track and gauge how the nodes are doing during the simulation. See the section on [Simulation Assertions](#simulation-assertions) below for more information on them. There is an `EpochClock` that has helper functions related to timing of slots and epochs and there is also a `Runner` that will help to start/stop the various Docker container and spawn the Node.js child processes as necessary. + +The `SimulationEnvironment` is the orchestrator for all the various functions to great the test net and start it from genesis. It is also how the various forks are configured to exercise code through various fork transitions. + +### Simulation Assertions + +These are the secret sauce for making the simulation tests meaningful. There are several predefined assertions that can be added to a simulation tracker and one can also create custom assertions and add them to the environment. Assertions can be added per slot, per epoch, per fork or per node. They can even be added to check conditions across nodes. + +Assertions are added to the `SimulationTracker` with the `register` method and the tracker follows the environment to make sure that assertions are run at the appropriate times, and on the correct targets. + +Assertions are implemented via API calls to the various targets and meta from the API calls is stored and used to assert that the desired conditions were met. Any information that can be retrieved via API call can be added to the assertion `stores` for validation, and validations can be asserted at a specific time or on an interval. + +There are a number of assertions that are added to simulations by default. They are: + +- `inclusionDelayAssertion` +- `attestationsCountAssertion` +- `attestationParticipationAssertion` +- `connectedPeerCountAssertion` +- `finalizedAssertion` +- `headAssertion` +- `missedBlocksAssertion` +- `syncCommitteeParticipationAssertion` + +Because of the flexibility, and complexity, there is a section specifically for how to create custom assertions below. See [custom assertions](#custom-assertions) for more info. + +### Custom Assertions + +Check back soon for more information on how to create custom assertions. + +### Simulation Reports + +Sim tests that are run using the simulation framework output a table of information to the console. The table summarizes the state of all of the nodes and the network at each slot. + +Here is an example of the table and how to interpret it: + +```sh +┼─────────────────────────────────────────────────────────────────────────────────────────────────┼ +│ fork │ eph │ slot │ head │ finzed │ peers │ attCount │ incDelay │ errors │ +┼─────────────────────────────────────────────────────────────────────────────────────────────────┼ +│ capella │ 9/0 │ 72 │ 0x95c4.. │ 56 │ 3 │ 16 │ 1.00 │ 0 │ +│ capella │ 9/1 │ 73 │ 0x9dfc.. │ 56 │ 3 │ 16 │ 1.00 │ 0 │ +│ capella │ 9/2 │ 74 │ 0xdf3f.. │ 56 │ 3 │ 16 │ 1.00 │ 0 │ +│ capella │ 9/3 │ 75 │ 0xbeae.. │ 56 │ 3 │ 16 │ 1.00 │ 0 │ +│ capella │ 9/4 │ 76 │ 0x15fa.. │ 56 │ 3 │ 16 │ 1.00 │ 0 │ +│ capella │ 9/5 │ 77 │ 0xf8ff.. │ 56 │ 2,3,3,2 │ 16 │ 1.00 │ 0 │ +│ capella │ 9/6 │ 78 │ 0x8199.. │ 56 │ 2,3,3,2 │ 16 │ 1.20 │ 0 │ +│ capella │ 9/7 │ 79 │ different │ 56 │ 2,3,3,2 │ 16 │ 1.50 │ 2 │ +┼─────────────────────────────────────────────────────────────────────────────────────────────────┼ +│ Att Participation: H: 0.75, S: 1.00, T: 0.75 - SC Participation: 1.00 │ +┼─────────────────────────────────────────────────────────────────────────────────────────────────┼ +``` + +#### Slot Information + +- `fork`: shows what fork is currently being tested +- `eph`: During simulation tests the Lodestar repo is setup to use 8 slot per epoch so what is shown is the epoch number and the slot number within that epoch as `epoch/slot` +- `slot`: The slot number that is currently being processed +- `head`: If all clients have the the same head the first couple of bytes of the hash are shown. If all clients do not have the same head `different` is reported. +- `finzed`: Shows the number of the last finalized slot +- `peers`: The number of peers that each node is connected to. If all have the same number then only a single value is shown. If they do not have the same number of peers count for each node is reported in a comma-separated list +- `attCount`: The number of attestations that the node has seen. +- `incDelay`: The average number of slots inclusion delay was experienced for the attestations. Often attestations for the current head arrive more than one slot behind and this value tracks that +- `errors`: The number of errors that were encountered during the slot + +#### Epoch Information + +- `H`: The percentage of nodes, at epoch transition, that voted for the head block +- `S`: The percentage of nodes, at epoch transition, that voted for the source block +- `T`: The percentage of nodes, at epoch transition, that voted for the target block +- `SC Participation`: The sync committee participation rate + +### Simulation Logging + +The simulation environment will capture all of the logs from all nodes that are running. The logs can be found in the `packages/cli/test-logs` directory. The logs are named with the following convention: + +`-_.log` + +Some examples are: + +- `node-1-beacon_lodestar.log`: The is the first node in the simulation. It is the consensus layer. It is running the lodestar validator client. +- `range-sync-execution_geth.log`: This is the node that was added to test pulling history in range sync mode. It was the execution layer and was running the geth execution client. diff --git a/docs/pages/contribution/testing/spec-tests.md b/docs/pages/contribution/testing/spec-tests.md new file mode 100644 index 000000000000..b7a65dafd072 --- /dev/null +++ b/docs/pages/contribution/testing/spec-tests.md @@ -0,0 +1,3 @@ +# Specification Tests + +Check back soon for more information!! We are in the process of updating our docs. diff --git a/docs/pages/contribution/testing/unit-tests.md b/docs/pages/contribution/testing/unit-tests.md new file mode 100644 index 000000000000..cbf4b4ae2264 --- /dev/null +++ b/docs/pages/contribution/testing/unit-tests.md @@ -0,0 +1,3 @@ +# Unit Tests + +Check back soon for more information!! We are in the process of updating our docs. diff --git a/docs/pages/data-retention.md b/docs/pages/data-retention.md new file mode 100644 index 000000000000..41daa8dc458d --- /dev/null +++ b/docs/pages/data-retention.md @@ -0,0 +1,54 @@ +# Data Retention + +There are two components for an ethereum node database, the execution client and the beacon node. Both need to hold data for a full node to work correctly. In particular the execution node holds state such as wallet information and smart contract code. It also holds the execution blocks with the transaction record. The beacon node is responsible for holding beacon node blocks and state. The beacon state is responsible primarily for the validator information. + +There are several processes that need to store data for Lodestar. These data sets can grow quite large over time so it is important to understand how to manage them so the host machine can support operations effectively. + +```bash +$executionDir # this changes depending on the execution client + └── execution-db + +$dataDir # specified by --dataDir on the beacon command +├── .log_rotate_audit.json +├── beacon.log # there can be many of these +├── enr +├── peer-id.json +├── chain-db # default if --dbDir not specified +│ └── (db files) +└── peerstore # default if --peerStoreDir not specified + └── (peerstore files) + +$dataDir # specified by --dataDir on the validator command +├── .log_rotate_audit.json +├── validator.log # there can be many of these +├── validator-db # default if --validatorsDbDir not specified +│ └── (db files) +├── proposerConfigs # default if --proposerDir not specified +│ └── (config files) +├── cache # default if --cacheDir not specified +│ └── (cache files) +├── secrets # default if --secretsDir not specified +│ ├── 0x8e41b969493454318c27ec6fac90645769331c07ebc8db5037... +│ └── 0xa329f988c16993768299643d918a2694892c012765d896a16f... +├── keystores # default if --keystoresDir not specified +│ ├── 0x8e41b969493454318c27ec6fac90645769331c07ebc8db5037... +│ │ └── voting-keystore.json +│ └── 0xa329f988c16993768299643d918a2694892c012765d896a16f... +│ └── voting-keystore.json +└── remoteKeys # default if --remoteKeysDir not specified + └── 0xa329f988c16993768299643d918a2694892c012765d896a16f.json +``` + +## Data Management + +Configuring your node to store and prune data is key to success. On average you can expect for the database to grow by the follow amounts: + +- `execution-db` grows at 2-30GB per week +- `chain-db` grows at 1GB per month +- `validator-db` grows at less than 2MB per year, per key (2000 keys = 4GB per year) + +`keystores`, `keystore-cache` and `peerstore` are not usually very large and are not expected to grow much during normal operation. + +Logs can also become quite large so please check out the section on [log management](./logging-and-metrics/log-management.md) for more information. + +There is really only one flag that is needed to manage the data for Lodestar, [`--dataDir`](./beacon-management/beacon-cli.md#-datadir). Other than that handling log management is really the heart of the data management story. Beacon node data is what it is. Depending on the execution client that is chosen, there may be flags to help with data storage growth but that is outside the scope of this document. diff --git a/docs/pages/getting-started/installation.md b/docs/pages/getting-started/installation.md new file mode 100644 index 000000000000..4fdfc3e82367 --- /dev/null +++ b/docs/pages/getting-started/installation.md @@ -0,0 +1,93 @@ +# Installation + +## Docker Installation + +The [`chainsafe/lodestar`](https://hub.docker.com/r/chainsafe/lodestar) Docker Hub repository is maintained actively. It contains the `lodestar` CLI preinstalled. + + +!!! info + The Docker Hub image tagged as `chainsafe/lodestar:next` is run on CI every commit on our `unstable` branch. + For `stable` releases, the image is tagged as `chainsafe/lodestar:latest`. + + +Ensure you have Docker installed by issuing the command: + +```bash +docker -v +``` + +It should return a non error message such as `Docker version xxxx, build xxxx`. + +Pull, run the image and Lodestar should now be ready to use + +```bash +docker pull chainsafe/lodestar +docker run chainsafe/lodestar --help +``` + + +!!! info + Docker is the recommended setup for Lodestar. Use our [Lodestar Quickstart scripts](https://github.com/ChainSafe/lodestar-quickstart) with Docker for detailed instructions. + + +## Build from Source + +### Prerequisites + +Make sure to have [Yarn installed](https://classic.yarnpkg.com/en/docs/install). It is also recommended to [install NVM (Node Version Manager)](https://github.com/nvm-sh/nvm) and use the LTS version (currently v20) of [NodeJS](https://nodejs.org/en/). + + +!!! info + NodeJS versions older than the current LTS are not supported by Lodestar. We recommend running the latest Node LTS. + It is important to make sure the NodeJS version is not changed after reboot by setting a default `nvm alias default && nvm use default`. + +!!! note + Node Version Manager (NVM) will only install NodeJS for use with the active user. If you intend on setting up Lodestar to run under another user, we recommend using [NodeSource's source for NodeJS](https://github.com/nodesource/distributions/blob/master/README.md#installation-instructions) so you can install NodeJS globally. + + +### Clone repository + +Clone the repository locally and build from the stable release branch. + +```bash +git clone -b stable https://github.com/chainsafe/lodestar.git +``` + +Switch to created directory. + +```bash +cd lodestar +``` + +### Install packages + +Install across all packages. Lodestar follows a [monorepo](https://github.com/lerna/lerna) structure, so all commands below must be run in the project root. + +```bash +yarn install +``` + +### Build source code + +Build across all packages. + +```bash +yarn run build +``` + +### Lodestar CLI + +Lodestar should now be ready for use. + +```bash +./lodestar --help +``` + +See [Command Line Reference](./../reference/cli.md) for further information. + +## Install from NPM [not recommended] + + +!!! danger + For mainnet (production) usage, we only recommend installing with docker due to [NPM supply chain attacks](https://hackaday.com/2021/10/22/supply-chain-attack-npm-library-used-by-facebook-and-others-was-compromised/). Until a [safer installation method has been found](https://github.com/ChainSafe/lodestar/issues/3596), do not use this install method except for experimental purposes only. + diff --git a/docs/quickstart.md b/docs/pages/getting-started/quick-start.md similarity index 100% rename from docs/quickstart.md rename to docs/pages/getting-started/quick-start.md diff --git a/docs/usage/beacon-management.md b/docs/pages/getting-started/starting-a-node.md similarity index 98% rename from docs/usage/beacon-management.md rename to docs/pages/getting-started/starting-a-node.md index 46b6f2e456c8..dd11381bde10 100644 --- a/docs/usage/beacon-management.md +++ b/docs/pages/getting-started/starting-a-node.md @@ -14,7 +14,7 @@ Make sure Lodestar is installed in your local environment, following the chosen ./lodestar --help ``` -For a complete list of beacon node CLI commands and options, see the [Command Line Reference](../../reference/cli/) +For a complete list of beacon node CLI commands and options, see the [`beacon` CLI Command](../beacon-management/beacon-cli.md) section. To select a known testnet or mainnet, use the `--network` flag. `mainnet` is selected by default, and a list of available networks is listed with the `--help` flag. Setting the `--network` flag will conveniently configure the beacon node or validator client for the selected network. For power users, any configuration option should be able to be overridden. @@ -181,4 +181,4 @@ Apr-20 15:16:17.017[] info: Synced - slot: 6264979 - head: 0xde9 6. Peer info: Current total number of outbound or inbound peers, for e.g.: `peers: 27` -For more insight into how a Lodestar beacon node is functioning, you may setup lodestar metrics and use the prepared Grafana dashboards that are found in the repository. Check out our section on [Prometheus and Grafana](./prometheus-grafana.md) for more details. +For more insight into how a Lodestar beacon node is functioning, you may setup lodestar metrics and use the prepared Grafana dashboards that are found in the repository. Check out our section on [Prometheus and Grafana](../logging-and-metrics/prometheus-grafana.md) for more details. diff --git a/docs/pages/getting-started/starting-a-node.new.md b/docs/pages/getting-started/starting-a-node.new.md new file mode 100644 index 000000000000..b66e797b29ed --- /dev/null +++ b/docs/pages/getting-started/starting-a-node.new.md @@ -0,0 +1,21 @@ +# Starting a Node + +## Prerequisites + +### Creating a Client Communication JWT + +### Creating a Validator Keystore + +## Base Considerations + +### Execution Client + +### Beacon Node + +### Validator Client + +## Production Considerations + +### Ingress/Egress + +### Fail-Over diff --git a/docs/pages/google0c42298b7ec08b7e.html b/docs/pages/google0c42298b7ec08b7e.html new file mode 100644 index 000000000000..7edebde149af --- /dev/null +++ b/docs/pages/google0c42298b7ec08b7e.html @@ -0,0 +1 @@ +google-site-verification: google0c42298b7ec08b7e.html \ No newline at end of file diff --git a/docs/index.md b/docs/pages/index.md similarity index 62% rename from docs/index.md rename to docs/pages/index.md index 82674eb89fe8..4af149a7a0ef 100644 --- a/docs/index.md +++ b/docs/pages/index.md @@ -1,19 +1,19 @@ ![lodestar logo](assets/lodestar_icon_text_black_stroke.png) -## Welcome to the Lodestar documentation! +## Welcome to the Lodestar documentation > **Lodestar is an open-source Ethereum Consensus client and Typescript ecosystem, maintained by ChainSafe Systems** ### Getting started -- Follow the installation method for [source install](install/source.md), [NPM install](install/npm.md), or [Docker install](install/docker.md) to install Lodestar. Or use our [Lodestar Quickstart scripts](https://github.com/ChainSafe/lodestar-quickstart). -- Use [Lodestar libraries](libraries) in your next Ethereum Typescript project. -- Run a beacon node on [mainnet or a public testnet](usage/beacon-management.md). -- Utilize the whole stack by [starting a local testnet](usage/local). -- View the Lodestar [CLI commands and options](https://chainsafe.github.io/lodestar/reference/cli/) -- Prospective contributors can read the [contributing section](https://chainsafe.github.io/lodestar/contributing/) to understand how we develop and test on Lodestar. +- Follow the installation method for [source install](./getting-started/installation.md/#build-from-source) or [Docker install](./getting-started/installation.md/#docker-installation) to install Lodestar. Or use our [Lodestar Quickstart scripts](https://github.com/ChainSafe/lodestar-quickstart). +- Use [Lodestar libraries](./supporting-libraries/index.md) in your next Ethereum Typescript project. +- Run a beacon node on [mainnet or a public testnet](./getting-started/starting-a-node.md). +- Utilize the whole stack by [starting a local testnet](./advanced-topics/setting-up-a-testnet.md). +- View the Lodestar [CLI commands and options](./beacon-management/beacon-cli.md) +- Prospective contributors can read the [contributing section](./contribution/getting-started.md) to understand how we develop and test on Lodestar. - If you have questions [submit an issue](https://github.com/ChainSafe/lodestar/issues/new) or join us on [Discord](https://discord.gg/yjyvFRP)! -- Please note our [security policy](https://github.com/ChainSafe/lodestar/blob/unstable/SECURITY.md). +- Please note our [security policy](./security.md). - Sign up to our [mailing list](https://chainsafe.typeform.com/lodestar) for announcements and any critical information about Lodestar. ## Specifications diff --git a/docs/pages/introduction.md b/docs/pages/introduction.md new file mode 100644 index 000000000000..776b018641b8 --- /dev/null +++ b/docs/pages/introduction.md @@ -0,0 +1,34 @@ +# Introduction + +Ethereum is one of the most profoundly important inventions in recent history. It is a decentralized, open-source blockchain featuring smart contract functionality. It is the second-largest cryptocurrency by market capitalization, after Bitcoin, and is the most actively used blockchain. Ethereum was proposed in 2013 by programmer Vitalik Buterin. Development was crowdfunded in 2014, and the network went live on 30 July 2015, with 72 million coins premined. ChainSafe was founded not too long afterwards and has been actively working in the Ethereum space ever since. We are proud to develop Lodestar and to present this documentation as a resource for the Ethereum community. + +## Proof of Stake + +In Ethereum's Proof of Stake (PoS) model, validators replace miners from the Proof of Work (PoW) system. Validators are Ethereum stakeholders who lock up a portion of their Ether as a stake. The protocol randomly selects these validators to propose new blocks. The chance of being chosen is tied to the size of their stake: the more Ether staked, the higher the probability of being selected to propose the block. Proposers receive transaction fees and block rewards as incentives. Validators are also responsible for voting on the validity of blocks proposed by other validators. However, they face penalties, known as slashing, for actions like double-signing, votes on a block that is not in the majority or going offline, ensuring network integrity and reliability. The PoS mechanism significantly reduces energy consumption compared to PoW, because it does not require extensive computational power. Moreover, PoS tends to facilitate faster transaction validations and block creations, enhancing the overall performance and scalability of the network. + +## Consensus Clients + +In an effort to promote client diversity there are several beacon-nodes being developed. Each is programmed in a different language and by a different team. The following is a list of the current beacon-node clients: + +- [Lodestar](https://chainsafe.io/lodestar.html) +- [Prysm](https://prysmaticlabs.com/) +- [Lighthouse](https://lighthouse.sigmaprime.io/) +- [Teku](https://consensys.net/knowledge-base/ethereum-2/teku/) +- [Nimbus](https://nimbus.team/) + +## Why Client Diversity? + +The Ethereum network's robustness is significantly enhanced by its client diversity, whereby multiple, independently-developed clients conforming to a common specification facilitate seamless interaction and function equivalently across nodes. This client variety not only fosters a rich ecosystem but also provides a buffer against network-wide issues stemming from bugs or malicious attacks targeted at particular clients. For instance, during the Shanghai denial-of-service attack in 2016, the diversified client structure enabled the network to withstand the assault, underscoring the resilience afforded by multiple client configurations. + +On the consensus layer, client distribution is crucial for maintaining network integrity and finality, ensuring transactions are irreversible once validated. A balanced spread of nodes across various clients helps mitigate risks associated with potential bugs or attacks that could, in extreme cases, derail the consensus process or lead to incorrect chain splits, thereby jeopardizing the network's stability and trust. While the data suggests a dominance of Prysm client on the consensus layer, efforts are ongoing to promote a more even distribution among others like Lighthouse, Teku, and Nimbus. Encouraging the adoption of minority clients, bolstering their documentation, and leveraging real-time client diversity dashboards are among the strategies being employed to enhance client diversity, which in turn fortifies the Ethereum consensus layer against adversities and fosters a healthier decentralized network ecosystem. + +The non-finality event in May 2023 on the Ethereum network posed a significant challenge. The issue arose from attestations for a fork, which necessitated state replays to validate the attestations, causing a notable strain on system resources. As a result, nodes fell out of sync, which deterred the accurate tracking of the actual head of the chain. This situation was exacerbated by a decline in attestations during specific epochs, further hampering the consensus mechanism. The Lodestar team noticed late attestations several weeks prior to the event and implemented a feature that attempted to address such challenges by not processing untimely attestations, and thus not requiring expensive state replays​. While it was done for slightly different reasons, the result was the same. Lodestar was able to follow the chain correctly and helped to stabilize the network. This example underscored the importance of client diversity and network resilience against potential forks and replay attacks. These are considered realistic threats, especially in the context of system complexity like in Ethereum's consensus mechanism. + +## Ethereum Reading List + +- [Ethereum Docs](https://ethereum.org/en/developers/docs/) +- [Upgrading Ethereum](https://eth2book.info/capella/) by Ben Edgington +- [Ethereum Book](https://github.com/ethereumbook/ethereumbook) by Andreas M. Antonopoulos and Gavin Wood +- [Ethereum Consensus Specification](https://github.com/ethereum/consensus-specs) +- [Casper the Friendly Finality Gadget](https://browse.arxiv.org/pdf/1710.09437.pdf) by Vitalik Buterin and Virgil Griffith +- [LMD Ghost](https://github.com/protolambda/lmd-ghost) by protolambda diff --git a/docs/pages/lightclient-prover/.gitkeep b/docs/pages/lightclient-prover/.gitkeep new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/docs/usage/client-monitoring.md b/docs/pages/logging-and-metrics/client-monitoring.md similarity index 100% rename from docs/usage/client-monitoring.md rename to docs/pages/logging-and-metrics/client-monitoring.md diff --git a/docs/pages/logging-and-metrics/dashboards.md b/docs/pages/logging-and-metrics/dashboards.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/docs/pages/logging-and-metrics/log-management.md b/docs/pages/logging-and-metrics/log-management.md new file mode 100644 index 000000000000..a0ee1d5fec07 --- /dev/null +++ b/docs/pages/logging-and-metrics/log-management.md @@ -0,0 +1,3 @@ +# Log Management + +Check back soon for more information!! diff --git a/docs/pages/logging-and-metrics/metrics-management.md b/docs/pages/logging-and-metrics/metrics-management.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/docs/usage/prometheus-grafana.md b/docs/pages/logging-and-metrics/prometheus-grafana.md similarity index 100% rename from docs/usage/prometheus-grafana.md rename to docs/pages/logging-and-metrics/prometheus-grafana.md diff --git a/docs/pages/reference/cli.md b/docs/pages/reference/cli.md new file mode 100644 index 000000000000..1b57913b99fc --- /dev/null +++ b/docs/pages/reference/cli.md @@ -0,0 +1,8 @@ +# Page relocated + +_**Welcome! This page has been moved. Please checkout our new docs layout from the Table of Contents! Below are some helpful links to the CLI pages that were split out from this original document**_ + +- [Beacon Node CLI](../beacon-management/beacon-cli.md) +- [Validator CLI](../validator-management/validator-cli.md) +- [Bootnode CLI](../bootnode/bootnode-cli.md) +- [Light Client CLI](../lightclient-prover/lightclient-cli.md) diff --git a/docs/pages/supporting-libraries/index.md b/docs/pages/supporting-libraries/index.md new file mode 100644 index 000000000000..555294393ec1 --- /dev/null +++ b/docs/pages/supporting-libraries/index.md @@ -0,0 +1,27 @@ +# Supporting Libraries + +## Networking + +### LibP2P + +- [`@chainsafe/js-libp2p-noise`](https://github.com/NodeFactoryIo/js-libp2p-noise) - [Noise](https://noiseprotocol.org/noise.html) handshake for `js-libp2p` +- [`@chainsafe/js-libp2p-gossipsub`](https://github.com/ChainSafe/js-libp2p-gossipsub) - [Gossipsub](https://github.com/libp2p/specs/tree/master/pubsub/gossipsub) protocol for `js-libp2p` +- [`@chainsafe/libp2p-yamux`](https://github.com/ChainSafe/js-libp2p-yamux) + +### Discv5 + +- [`discv5`](https://github.com/ChainSafe/discv5) - [Discv5](https://github.com/ethereum/devp2p/blob/master/discv5/discv5.md) protocol + +## Serialization and Hashing + +- [`@chainsafe/ssz`](https://github.com/ChainSafe/ssz) - Simple Serialize (SSZ) +- [`@chainsafe/persistent-merkle-tree`](https://github.com/ChainSafe/persistent-merkle-tree) - binary merkle tree implemented as a [persistent data structure](https://en.wikipedia.org/wiki/Persistent_data_structure) +- [`@chainsafe/as-sha256`](https://github.com/ChainSafe/as-sha256) - Small AssemblyScript implementation of SHA256 + +## BLS + +- [`@chainsafe/bls`](https://github.com/ChainSafe/bls) - Isomorphic Ethereum Consensus BLS sign / verify / aggregate +- [`@chainsafe/blst-ts`](https://github.com/ChainSafe/blst-ts) - Node specific Ethereum Consensus BLS sign / verify / aggregate +- [`@chainsafe/bls-keystore`](https://github.com/ChainSafe/bls-keystore) - store / retrieve a BLS secret key from an [EIP-2335](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2335.md) JSON keystore +- [`@chainsafe/bls-keygen`](https://github.com/ChainSafe/bls-keygen) - utility functions to generate BLS secret keys, following [EIP-2333](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2333.md) and [EIP-2334](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2334.md) +- [`@chainsafe/bls-hd-key`](https://github.com/ChainSafe/bls-hd-key) - low level [EIP-2333](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2333.md) and [EIP-2334](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2334.md) functionality diff --git a/docs/libraries/index.md b/docs/pages/supporting-libraries/libraries.md similarity index 100% rename from docs/libraries/index.md rename to docs/pages/supporting-libraries/libraries.md diff --git a/docs/pages/tools/core-dumps.md b/docs/pages/tools/core-dumps.md new file mode 100644 index 000000000000..98d564eb9308 --- /dev/null +++ b/docs/pages/tools/core-dumps.md @@ -0,0 +1,66 @@ +# Core Dump Analysis + +Core dump analysis is some ninja level stuff. Once you get the hang of it you will feel like you have super powers. It will up your game to a whole new level because you will be able to debug issues that seemed impossible before. Post-crash analysis is a very powerful tool to have in your tool belt. A core dump has all of the objects in memory as well as all of the stack frame information at the exact moment the dump was taken, usually when a hard crash occurs. + +It is important to note that debug symbols will greatly aid you in your debugging for issues related to native code like `C/C++`. When compiled languages are optimized the compiler will often strip out identifiers and all that will be remaining are mangled symbols and addresses. Compiling with debug symbols will leave all of the identifiers, file names and line numbers in-tact. + +While it is not always practical to be running code in a Debug version of node, if you run across a persistent issue it will be helpful to recreate it on a debug build and to use that for analysis. + +It is important to note that the EXACT binary that was running when the dump was created MUST be loaded when doing analysis. There is a lot of information in the dump that is specific to the binary that was running (like function offsets, etc). If you load a different binary you will get a lot of errors and the analysis will not be useful (if it loads at all). + +It is also a nice-to-know that you can create the dump on linux, using a linux compiled version of node, and then read it on a mac. All that is needed is to download the node binary and dump file to the mac. It is possible to load them into a mac compiled version of llnode and all will work as expected. Its just the meta in the linux binary that is needed for analysis, it doesn't actually run the code. + +## Installing `llnode` + +`llnode` is a Node.js plugin for the [LLDB](https://lldb.llvm.org/) debugger. It is the officially sanctioned tool from Node and powerful way to do postmortem analysis of Node.js processes. The process for install is pretty straight-forward unless you have an M1 mac. XCode ships with an instance of `lldb` and installing `llnode` is as simple as running `npm install -g llnode`. + +On an M1 mac the install will work fine but the plugin will crash at load time. See [this issue](https://github.com/nodejs/llnode/issues/430#issuecomment-1844628224) for updates. The workaround is to install `lldb` via homebrew. + +```sh +# should only be necessary on M1 macs at time of writing +$ brew install llvm +$ echo 'export PATH="/opt/homebrew/opt/llvm/bin:$PATH"' >> ~/.zshrc +$ # note that its before recopying PATH to make sure it resolves +$ zsh ~/.zshrc +$ which llvm-config +/opt/homebrew/opt/llvm/bin/llvm-config # if this is not what comes up restart the shell +$ npm install -g llnode +$ llnode +(lldb) plugin load '/Users/ninja_user/.nvm/versions/node/v20.5.1/lib/node_modules/llnode/llnode.dylib' +(lldb) settings set prompt '(llnode) ' +(llnode) +``` + +## Collecting a core dump + +Before a core dump can be created the system must be enabled. + +```sh +ulimit -c unlimited +``` + +This is a critical step. If that command is not run the core will not be dumped to disk. + +Core dumps are normally created by the kernel when certain process signals are encountered. `SIGSEGV` is the most common signal that will cause a dump and its sent by the kernel to the process when a segfault occurs. `SIGSEGV` is not the only signal that works and you can see the full list [here](https://man7.org/linux/man-pages/man7/signal.7.html) under the "Standard Signals" section (all the ones that say "Core" in the "Action" column). + +If you want to create a dump on demand you can use the `gcore` command on linux. This will create a dump of the process without killing it. If you don't mind termination you can also use `kill -SIGSEGV ` to send the a dump signal to the process. + +## Analyzing a core dump + +Once you collect the core dump you can load it into `llnode` for debugging. + +```sh +# remember that the node binary must be the exact same one that was running when the core was created +$ llnode -f /path/to/node_debug -c /Users/ninja_user/coredumps/node.coredump +(lldb) target create "node_debug" --core "node.coredump" +Core file '/Users/ninja_user/coredumps/node.coredump' (x86_64) was loaded. +(lldb) plugin load '/Users/ninja_user/.nvm/versions/node/v20.5.1/lib/node_modules/llnode/llnode.dylib' +(lldb) settings set prompt '(llnode) ' +(llnode) +``` + +Once the dump is loaded the first few steps will be to figure out what types of objects were in memory and what was the processor working on when the crash occurred. Lets start with the stack trace. + +There are two distinct commands for pulling the stack because node is both a native runtime and a virtual machine. The `bt`, back trace, command will pull the native stack frames and the `v8 bt` command will use the `llnode` plugin to pull the JavaScript stack frames. Newer versions of `llnode` will automatically pull the JavaScript stack frames when the `bt` command is run but it is still good to know the difference. It is also possible to add the `all` verb to the `bt` command and it will pull the back trace for all threads. + +To start looking through memory there are two commands that are helpful. The `v8 findjsobjects` command will list all of the JavaScript objects in memory. The `v8 findjsinstances` command will list all of the instances of a particular JavaScript object. diff --git a/docs/pages/tools/debugging.md b/docs/pages/tools/debugging.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/docs/tools/flamegraphs.md b/docs/pages/tools/flamegraphs.md similarity index 100% rename from docs/tools/flamegraphs.md rename to docs/pages/tools/flamegraphs.md diff --git a/docs/pages/tools/heap-dumps.md b/docs/pages/tools/heap-dumps.md new file mode 100644 index 000000000000..379f7e4de2f2 --- /dev/null +++ b/docs/pages/tools/heap-dumps.md @@ -0,0 +1,279 @@ +# Heap Dump Analysis + +There are a number of reason why one would want to do a heap dump but in particular, they are helpful for find memory intensive operations and leaks. There are two major types of heap dumps that are available to node developers. The first is a JavaScript heap dump, and the second is a native heap dump. The JS heap dump is much more common and is the default heap dump that is generated by `node`. It is useful when analyzing JS generated objects that are managed by the runtime. However there is one major limitation to the JS heap dump, and that is that it does not include native objects. This is where the native heap dump comes in handy. The native heap dump is a snapshot of the entire process memory, and includes objects that are allocated by `C/C++` code, including native modules in use by the application. The limitation to the native heap dump is that it will not include any JS objects that are allocated by the `V8` runtime. Those are generally created within `mmap`'ed pages and the native heap dump tools are specific to `C` objects that are created with `malloc` and destroyed via `free`. `C++` is also covered as `new` and `delete` are wrappers around `malloc` and `free`. This is why it is important to understand how to analyze both types of memory usage. + +## JavaScript Heap Dump + +Node has built in `V8` heap dump access and its a very powerful tool for analyzing memory usage. Understanding how the dump is created will both help to understand how it is displayed and how to use the analysis more effectively. + +The `V8` heap dump is a stop the world process because walking the entire heap graph is necessary to create one. This is similar to a full, major garbage collection event. The VM starts at the heap entrance node and walks the entire graph and makes note of every edge that connects each node along the way. Nodes are JSObjects and edges are references between those objects. + +By time the whole heap is walked the full size and values of all nodes are known and all of the connections between those nodes is well understood. The object that is returned is a set of three arrays, the nodes, the edges and the string values that are encountered (because strings are themselves arrays of characters in `C` so they are treated a bit differently by `V8`). + +### Creating a `V8` heap dump + +There are two functions for creating a heap dump but both call the same functionality under the hood. One streams the result, `require("v8").getHeapSnapshot([options])`, and is primarily intended for use by the Chrome devtools button to "take a snapshot". The second writes the heap dump to a file, `require("v8").writeHeapSnapshot(filename[,options])`. + +The optional `options` argument, in both cases, is the same and contains two props.`exposeInternals` and `exposeNumericValues` to enrich the dump. In many cases its the application layer that one wants to debug so `exposeInternals` is not usually necessary. In `V8` numbers are stored as 32bit integers and the size of pointers is also 32bits. So as an optimization, the pointer to the numeric value can be eliminated and the value itself can be stored in the `Address` of the `Value` instead. `exposeNumericValues` transcribes those "pointers" to the actual numeric value and appends them to the dump. + +Because heap analysis happens frequently during Lodestar development there is a helper api endpoint to capture a heap dump. **It is IMPORTANT** that this endpoint is not public facing as it will open the threat of DDOS attack. + +The endpoint accepts a `POST` request and you may include an optional `dirpath` query parameter to specify the directory where the heap dump will be written. If the `dirpath` is not specified then the heap dump will be written to the current working directory. + +To create a Lodestar heap dump you can use the following command: + +```sh +curl -X POST http://localhost:9596/eth/v1/lodestar/write_heapdump?dirpath=/some/directory/path +``` + +### Viewing a `V8` heap dump + +It is best to analyze on a local development machine so if Lodestar is running on a cloud instance download the dump to the local environment. Open Chrome, or any Chromium based browser (the example photos were taken using Brave). In the url bar type `chrome:://inspect` to bring up the DevTools menu (in brave the url will be rewritten to `brave://inspect`). + +![DevTools](../images/heap-dumps/devtools.png) + +Click on the `Open dedicated DevTools for Node` link to open the node specific window and click on the `Memory` tab as shown below. + +![Memory Tab](../images/heap-dumps/memory-tab.png) + +Load the profile by either right clicking on the left pane or by clicking the `Load` button at the bottom. + +![Load Profile](../images/heap-dumps/load-profile.png) + +### Analyzing a `V8` heap dump + +Analysis is as much an art as it is a science and the best way to learn is to do it a few times. Generally the goal is looking for memory leaks but reducing memory overhead is also something that happens. This guide will focus on leaks. With memory leaks one is looking for why objects have references that prevent them from being garbage collected. + +To spot sources of leaks, focus on objects that have large quantities or very large `retained size`. Retained size is the amount of memory that would be freed if the object was garbage collected. As an example if there is an object that has lots and lots of instances, like 100,000, and they are all pushed into an array then the array will have a very large retained size. This is because the array is holding references to all of the objects that it contains. + + + + +If it is not immediately apparent what objects are being leaked then another tool in your arsenal will be to take a second snapshot and compare it to the first. This will show what objects have been created/changed since the first snapshot. + +If there is an object that has a large retained size but is roughly the same, but not exactly the same, changes are that is NOT the leak. Some objects can get quite large during runtime but if its roughly the same size over time, but not exactly the same, it means that the application is modifying the object (why its not exactly identical in size) but if it hasn't grown significantly over time it can be assumed it is probably the working size of the instances. + +Try to focus on objects that are growing in size or in number over time. Growing in size means the object is holding references to other objects and growing in number means a function closure somewhere is retaining the small instances. + + + + +That is the science part, but these clues are just breadcrumbs to follow. In order to actually resolve the leak, one needs to go into the code to figure out where those objects are being created, or more often, why the references to them are being retained. This is where the art comes in. + +Having a good understanding of the codebase will help to narrow down where to look. It is also common that the leak is not coming directly from Lodestar code, but rather one of the dependencies so be careful not to rule those out. + +## Native Heap Dump + +_**note: collecting a native heap dump is only supported on linux, analysis can be done from linux or Mac**_ + +There are several tools that can be used to do native heap dump analysis. The most common are [`massif`](https://valgrind.org/docs/manual/ms-manual.html) from the [`Valgrind`](https://valgrind.org/) suite, google's [`gperftools`](https://github.com/gperftools/gperftools) and `heaptrack` from [KDE](https://community.kde.org/Main_Page). Of the three, `heaptrack` is the most user friendly tool, and it is specifically designed for the task. It is much faster than `Valgrind`, easier to integrate than `gperftools` and also includes a gui for result analysis. Often times there are also memory allocations that are not related to memory leaks, and tools like `Valgrind` and `gperftools` become less useful. This is why `heaptrack` is the recommended tool for heap dump analysis on Lodestar. + +There are a few things that will make the results with `heaptrack` far better. The most important is using debug builds of all libraries included in a binary, including the application itself. This will make the results usable. Not to say that they will be useless without debug symbols but it will be kinda tough to optimize functions without knowing the function names nor the file and line numbers. + +This is the heart of what `heaptrack` will do for us. It hooks into the memory allocation and adds in stack traces for each `malloc` call site. That way every time memory is reserved there is a way to track back where it happened in the code. `heaptrack` also hooks into the `free` function and checks that versus the allocations to check for memory leaks and for temporary variables that can be optimized. This also allows for optimization of how many of each object is created by identifying high frequency allocations. + +Generally the .heapdump file will be created on a cloud server and then copied to a local machine for analysis, mostly because the gui is not available through ssh. The gui is not required for analysis but it is much easier to use than the command line tools. The first step will be to install `heaptrack` on the target server and to capture a profile. + +### Build collection tools + +Assume the following directory structure: + +```sh +├── beacon-node +│   ├── db +│   ├── logs +│   ├── start-lodestar.sh +│   └── rc-config.yml +├── lodestar +└── node # step below will clone this repo +``` + +We will start from the directory that contains `lodestar` and the `beacon-node` files. + +```sh +# Install heaptrack +$ sudo apt-get update +$ sudo apt-get -y install heaptrack + +# Using a debug build of node is recommended and it can be build +# from source. Clone the node repo to get started. +$ git clone https://github.com/nodejs/node.git +$ cd node + +# Use whichever version of node you prefer +$ git checkout v20.10.0 +$ ./configure --debug + +# This command only builds the debug version of node and assumes +# that a release version of node is already installed on the system +$ make -C out BUILDTYPE=Debug -j$(nproc --all) + +# Move the debug version of node the the same folder that the release +# version is installed in and name it `node_debug`. This will put the +# debug binary on the path and allow you to run it with the +# `node_debug` command +$ cp out/Debug/node "$(which node)_debug" +$ which node_debug +/your/home/directory/.nvm/versions/node/v20.10.0/bin/node_debug + +# Return to the lodestar repo +$ cd ../lodestar + +# Clean the build artifacts and node_modules +$ yarn clean && yarn clean:nm + +# Install the dependencies +$ yarn install + +# Ensure that all native modules are rebuilt with debug symbols. Some +# modules are prebuilt, like classic-level, and the debug symbols may +# not be included. If the the debugging exercise is focussed around +# one of these dependencies, then you will need to manually clone those +# repos and manually build them with debug symbols. +$ npm rebuild --debug +``` + +### Collect a heap dump + +```sh +# Move to th `beacon-node` directory +$ cd ../beacon-node + +# Start lodestar with profiling enabled +$ heaptrack \ +$ --output ./lodestar.heapdump \ +$ node_debug \ +$ --max-old-space-size=8192 \ +$ ../lodestar/packages/cli/bin/lodestar.js \ +$ beacon \ +$ --rcConfig ./rc-config.yml \ +$ > /dev/null 2>&1 & +# Wait some period of time for the heap dump data to be collected + +# The data will not be persisted until the process is stopped. You can gracefully +# stop the process with the following command and if you want to hard kill it +# add `-9` to the end of the `kill` command although that should not be necessary +$ ps aux | grep lodestar | grep -v grep | awk '{print $2}' | head -n 1 | xargs kill +``` + +### Collecting a heap dump on a running process + +Collecting a heap dump can also be done on a running process. There are both advantages and disadvantages to this approach. The main advantage is that you can collect a heap dump without having to restart. The down side is that the dump will only include allocations/de-allocations while the tracker is running. This means that all the non-paired calls to malloc/free will register as leaks. It will also not give a true representation of how the heap is being used. On the upside, however the dump will be much smaller in size. + +It is important to note a warning that is in the `heaptrack` source code: + +_WARNING: Runtime-attaching heaptrack is UNSTABLE and can lead to CRASHES in your application, especially after you detach heaptrack again. You are hereby warned, use it at your own risk!_ + +```sh +# Move to th `beacon-node` directory +$ cd ../beacon-node + +# Start lodestar +$ node_debug \ +$ --max-old-space-size=8192 \ +$ ../lodestar/packages/cli/bin/lodestar.js \ +$ beacon \ +$ --rcConfig ./rc-config.yml \ +$ > /dev/null 2>&1 & +# Wait some period of time to start collecting the dump + +# GDB is required to inject heaptrack into a running process +# so you may need to install it +$ sudo apt-get update +$ sudo apt-get install -y gdb + +# Elevated `perf` permissions are also required depending on your +# system configuration. Change until the next reboot +$ echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope + +# Get the pid of the lodestar process +$ export LODESTAR_PID=$(ps aux | grep lodestar | grep -v grep | awk '{print $2}' | head -n 1) + +# Inject heaptrack into the running process +$ heaptrack --pid $LODESTAR_PID + +heaptrack output will be written to "/home/user/beacon-node/heaptrack.node_debug.111868.zst" +/usr/lib/heaptrack/libheaptrack_preload.so +injecting heaptrack into application via GDB, this might take some time... +injection finished +# Wait some period of time to collect the heap dump. See below +# for the termination command that can be run from a separate +# terminal when ready to stop collecting data +Terminated +removing heaptrack injection via GDB, this might take some time... +Heaptrack finished! Now run the following to investigate the data: + + heaptrack --analyze "/home/user/beacon-node/heaptrack.node_debug.111868.zst" +``` + +There is a trap in `heaptrack` but the process uses a nested shell to do the actual injection so it is not possible to just Ctrl+C out of the injected process without corrupting the output file. To properly kill the collection one needs to target the nested shell pid. Here is a helper command to target that process: + +```sh +ps -ef | grep '[h]eaptrack --pid' | awk '$3 == '$(ps -ef | grep '[h]eaptrack --pid' | awk '$3 != 1 {print $2}' | head -n 1)' {print $2}' | xargs -r kill +``` + +After working with the injected process for a while, I cannot honestly recommend it. It can work in a pinch, and is best suited for when the profiled process can be exited gracefully without repercussions (not on mainnet for instance). The benefit, though, is that the heapdump will be much smaller and targeted to runtime (will not have the transient, startup allocations) which can make it easier to see what is happening. + +### Installing `heaptrack-gui` on Linux + +```sh +# You can you apt, apt-get or aptitude to install the gui +$ sudo apt-get update +$ sudo apt-get install -y heaptrack-gui +``` + +### Installing `heaptrack-gui` on OSX + +At the time of writing this there is no official pre-built binary for OSX. This was a bit of a challenge but it was WELL worth the effort as the tool works very well. There were a number of bugs along the way while "using the docs" so your mileage may vary, but this is what worked for me. + +Most of the dependencies can be installed via Homebrew and the tool itself needs to be built from source. There was one dependency that needed to be built from source. This process assumes a working folder that the repos can be cloned into. + +```sh +# Start in the root folder where the repos will be cloned +$ brew install qt@5 + +# prepare tap of kde-mac/kde +$ brew tap kde-mac/kde https://invent.kde.org/packaging/homebrew-kde.git +$ "$(brew --repo kde-mac/kde)/tools/do-caveats.sh" + +# install the kde-mac and other required dependencies +$ brew install kde-mac/kde/kf5-kcoreaddons \ +$ kde-mac/kde/kf5-kitemmodels \ +$ kde-mac/kde/kf5-kconfigwidgets \ +$ kde-mac/kde/kdiagram \ +$ extra-cmake-modules \ +$ ki18n \ +$ threadweaver \ +$ boost \ +$ zstd \ +$ gettext + +# There is a bug in the current version of kde-mac/kde and one dependency needs +# to be built manually. This is the workaround to get it built. +$ git clone https://invent.kde.org/frameworks/kio.git +$ mkdir kio/build +$ cd kio/build +$ export CMAKE_PREFIX_PATH=$(brew --prefix qt@5) +$ cmake -G Ninja -DCMAKE_BUILD_TYPE=Release .. +$ ninja +$ sudo ninja install +$ cd ../.. + +# Now make sure that the dependencies are available to the system during runtime +$ ln -sfv "$(brew --prefix)/share/kf5" "$HOME/Library/Application Support" +$ ln -sfv "$(brew --prefix)/share/knotifications5" "$HOME/Library/Application Support" +$ ln -sfv "$(brew --prefix)/share/kservices5" "$HOME/Library/Application Support" +$ ln -sfv "$(brew --prefix)/share/kservicetypes5" "$HOME/Library/Application Support" + +# We are now ready to build the heaptrack_gui binaries for analysis on OSX +$ git clone https://invent.kde.org/sdk/heaptrack.git +$ cd heaptrack +$ mkdir build +$ cd build +$ CMAKE_PREFIX_PATH=$(brew --prefix qt@5) PATH=$PATH:/opt/homebrew/opt/gettext/bin cmake .. +$ cmake -DCMAKE_BUILD_TYPE=Release .. +$ make heaptrack_gui +$ sudo make install +# You can now find heaptrack_gui with your gui Applications. It is default +# placed as /Applications/KDE/heaptrack_gui.app +``` diff --git a/docs/pages/tools/perf.md b/docs/pages/tools/perf.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/docs/pages/trouble-shooting.md b/docs/pages/trouble-shooting.md new file mode 100644 index 000000000000..144aeb90ce20 --- /dev/null +++ b/docs/pages/trouble-shooting.md @@ -0,0 +1 @@ +# Trouble Shooting diff --git a/docs/pages/validator-management/key-management.md b/docs/pages/validator-management/key-management.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/docs/pages/validator-management/multiple-and-fallback-validation.md b/docs/pages/validator-management/multiple-and-fallback-validation.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/docs/usage/validator-management.md b/docs/pages/validator-management/validator-management.md similarity index 100% rename from docs/usage/validator-management.md rename to docs/pages/validator-management/validator-management.md diff --git a/docs/pages/validator-management/withdrawals.md b/docs/pages/validator-management/withdrawals.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/lerna.json b/lerna.json index 4130f46a317b..bb0c43fed5fe 100644 --- a/lerna.json +++ b/lerna.json @@ -4,7 +4,7 @@ ], "npmClient": "yarn", "useNx": true, - "version": "1.12.0", + "version": "1.13.0", "stream": true, "command": { "version": { diff --git a/mkdocs.yml b/mkdocs.yml deleted file mode 100644 index 759a8dfd7151..000000000000 --- a/mkdocs.yml +++ /dev/null @@ -1,85 +0,0 @@ -site_name: Lodestar Documentation -site_description: Lodestar Documentation - Typescript Ethereum Consensus client -site_url: https://chainsafe.github.io/lodestar - -repo_name: chainsafe/lodestar -repo_url: https://github.com/chainsafe/lodestar - -# Configuration -theme: - name: material - logo: assets/lodestar_icon_300.png - favicon: assets/round-icon.ico - palette: - - scheme: preference - media: "(prefers-color-scheme: light)" - primary: black - accent: deep purple - toggle: - icon: material/weather-night - name: Switch to dark mode - - scheme: slate - media: "(prefers-color-scheme: dark)" - primary: black - accent: deep purple - toggle: - icon: material/weather-sunny - name: Switch to light mode - nav_style: dark - -plugins: - - search - - mermaid2: - version: 8.6.4 - arguments: - theme: | - ^(window.matchMedia && window.matchMedia('(prefers-color-scheme: dark)').matches) ? 'dark' : 'light' - -markdown_extensions: - - meta - - codehilite: - guess_lang: false - - admonition - - toc: - permalink: true - - pymdownx.superfences: - # make exceptions to highlighting of code (for mermaid): - custom_fences: - - name: mermaid - class: mermaid - format: !!python/name:mermaid2.fence_mermaid -extra_css: - - stylesheets/extras.css - -# Socials -extra: - social: - - icon: fontawesome/brands/github-alt - link: https://github.com/ChainSafe/lodestar - - icon: fontawesome/brands/twitter - link: https://twitter.com/ChainSafeth - - icon: fontawesome/brands/discord - link: https://discord.gg/yjyvFRP - - icon: fontawesome/brands/medium - link: https://blog.chainsafe.io - -# Customize left navigation menu -nav: - - Getting Started: index.md - - Installation: - - Install from source: install/source.md - - Install from NPM: install/npm.md - - Install with Docker: install/docker.md - - Using Lodestar: - - Beacon management: usage/beacon-management.md - - Local testnet: usage/local.md - - Validator management: usage/validator-management.md - - Prometheus & Grafana Setup: usage/prometheus-grafana.md - - MEV Builder Integration: usage/mev-integration.md - - Client monitoring: usage/client-monitoring.md - - Reference: - - Command line: reference/cli.md - - Libraries: libraries/index.md - - Design: - - Lodestar package structure: design/depgraph.md - - Contributing: contributing.md diff --git a/package.json b/package.json index c8910209a83b..8e6dad1fdea2 100644 --- a/package.json +++ b/package.json @@ -11,7 +11,7 @@ "clean": "rm -rf ./packages/*/lib ./packages/*/*.tsbuildinfo", "clean:nm": "rm -rf ./packages/*/node_modules ./node_modules", "build": "lerna run build", - "build:docs": "lerna run build:refdocs && ./scripts/prepare-docs.sh", + "build:docs": "lerna run check-readme && lerna run build:docs && ./scripts/prepare-docs.sh", "build:watch": "lerna exec --parallel -- 'yarn run build:watch'", "build:ifchanged": "lerna exec -- ../../scripts/build_if_changed.sh", "lint": "eslint --color --ext .ts packages/*/src packages/*/test", @@ -22,6 +22,7 @@ "check-build": "lerna run check-build", "check-readme": "lerna run check-readme", "check-types": "lerna run check-types", + "check-spelling": "pyspelling -c .pyspelling.yml -v", "coverage": "lerna run coverage", "test": "lerna run test --concurrency 1", "test:unit": "lerna run test:unit --concurrency 1", @@ -52,6 +53,8 @@ "@types/sinon-chai": "^3.2.9", "@typescript-eslint/eslint-plugin": "6.7.2", "@typescript-eslint/parser": "6.7.2", + "@vitest/coverage-v8": "^1.1.0", + "@vitest/browser": "^1.1.0", "c8": "^8.0.1", "chai": "^4.3.8", "chai-as-promised": "^7.1.1", @@ -59,12 +62,13 @@ "crypto-browserify": "^3.12.0", "electron": "^26.2.2", "eslint": "^8.50.0", - "eslint-plugin-import": "^2.28.1", - "eslint-plugin-prettier": "^5.0.0", + "eslint-import-resolver-typescript": "^3.6.1", "eslint-plugin-chai-expect": "^3.0.0", + "eslint-plugin-import": "^2.28.1", "eslint-plugin-mocha": "^10.2.0", - "eslint-import-resolver-typescript": "^3.6.1", + "eslint-plugin-prettier": "^5.0.0", "https-browserify": "^1.0.0", + "jsdom": "^23.0.1", "karma": "^6.4.2", "karma-chai": "^0.1.0", "karma-chrome-launcher": "^3.2.0", @@ -93,13 +97,18 @@ "ts-node": "^10.9.1", "typescript": "^5.2.2", "typescript-docs-verifier": "^2.5.0", - "webpack": "^5.88.2", + "vite-plugin-node-polyfills": "^0.18.0", + "vite-plugin-top-level-await": "^1.4.1", + "vitest": "^1.1.0", + "vitest-when": "^0.3.0", "wait-port": "^1.1.0", - "vitest": "^0.34.6", - "vitest-when": "^0.2.0", - "@vitest/coverage-v8": "^0.34.6" + "webdriverio": "^8.27.0", + "webpack": "^5.88.2" }, "resolutions": { - "dns-over-http-resolver": "^2.1.1" + "dns-over-http-resolver": "^2.1.1", + "chai": "^4.3.10", + "loupe": "^2.3.6", + "vite": "^5.0.0" } } diff --git a/packages/api/.mocharc.yaml b/packages/api/.mocharc.yaml deleted file mode 100644 index f9375365e517..000000000000 --- a/packages/api/.mocharc.yaml +++ /dev/null @@ -1,8 +0,0 @@ -colors: true -timeout: 2000 -exit: true -extension: ["ts"] -require: - - ./test/setup.ts -node-option: - - "loader=ts-node/esm" diff --git a/packages/api/.nycrc.json b/packages/api/.nycrc.json deleted file mode 100644 index 69aa626339a0..000000000000 --- a/packages/api/.nycrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../../.nycrc.json" -} diff --git a/packages/api/package.json b/packages/api/package.json index 2f1c5953a673..2dfcbc73b65c 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.12.0", + "version": "1.13.0", "type": "module", "exports": { ".": { @@ -65,16 +65,16 @@ "lint:fix": "yarn run lint --fix", "pretest": "yarn run check-types", "test": "yarn test:unit && yarn test:e2e", - "test:unit": "nyc --cache-dir .nyc_output/.cache -e .ts mocha 'test/unit/**/*.test.ts'", + "test:unit": "vitest --run --dir test/unit/ --coverage", "check-readme": "typescript-docs-verifier" }, "dependencies": { "@chainsafe/persistent-merkle-tree": "^0.6.1", "@chainsafe/ssz": "^0.14.0", - "@lodestar/config": "^1.12.0", - "@lodestar/params": "^1.12.0", - "@lodestar/types": "^1.12.0", - "@lodestar/utils": "^1.12.0", + "@lodestar/config": "^1.13.0", + "@lodestar/params": "^1.13.0", + "@lodestar/types": "^1.13.0", + "@lodestar/utils": "^1.13.0", "eventsource": "^2.0.2", "qs": "^6.11.1" }, diff --git a/packages/api/src/beacon/client/events.ts b/packages/api/src/beacon/client/events.ts index 57ac3d5e2dcd..574a0290bb4d 100644 --- a/packages/api/src/beacon/client/events.ts +++ b/packages/api/src/beacon/client/events.ts @@ -65,4 +65,4 @@ export function getClient(config: ChainForkConfig, client: IHttpClient): ApiClie } // https://github.com/EventSource/eventsource/blob/82e034389bd2c08d532c63172b8e858c5b185338/lib/eventsource.js#L143 -type EventSourceError = {status: number; message: string}; +type EventSourceError = {status?: number; message: string}; diff --git a/packages/api/src/beacon/routes/beacon/block.ts b/packages/api/src/beacon/routes/beacon/block.ts index afa0df31473d..023dcf2b80d9 100644 --- a/packages/api/src/beacon/routes/beacon/block.ts +++ b/packages/api/src/beacon/routes/beacon/block.ts @@ -1,16 +1,7 @@ /* eslint-disable @typescript-eslint/naming-convention */ import {ContainerType, ListCompositeType, ValueOf} from "@chainsafe/ssz"; import {ChainForkConfig} from "@lodestar/config"; -import { - allForks, - Slot, - ssz, - RootHex, - deneb, - phase0, - isSignedBlockContents, - isSignedBlindedBlockContents, -} from "@lodestar/types"; +import {allForks, Slot, ssz, RootHex, deneb, phase0, isSignedBlockContents} from "@lodestar/types"; import {ForkName, ForkSeq} from "@lodestar/params"; import {Endpoint, RequestCodec, RouteDefinitions, Schema} from "../../../utils/index.js"; import { @@ -40,18 +31,14 @@ export const RootResponseType = new ContainerType({ }); export const SignedBlockContentsType = new ContainerType({ signedBlock: ssz.deneb.SignedBeaconBlock, - signedBlobSidecars: ssz.deneb.SignedBlobSidecars, -}); -export const SignedBlindedBlockContentsType = new ContainerType({ - signedBlindedBlock: ssz.deneb.SignedBlindedBeaconBlock, - signedBlindedBlobSidecars: ssz.deneb.SignedBlindedBlobSidecars, + kzgProofs: ssz.deneb.KZGProofs, + blobs: ssz.deneb.Blobs, }); export type BlockHeaderResponse = ValueOf; export type BlockHeadersResponse = ValueOf; export type RootResponse = ValueOf; export type SignedBlockContents = ValueOf; -export type SignedBlindedBlockContents = ValueOf; export type BlockId = RootHex | Slot | "head" | "genesis" | "finalized"; @@ -191,7 +178,7 @@ export type Endpoints = { */ publishBlindedBlock: Endpoint< "POST", - {signedBlindedBlockOrContents: allForks.SignedBlindedBeaconBlockOrContents}, + {signedBlindedBlock: allForks.SignedBlindedBeaconBlock}, {body: unknown; headers: {"Eth-Consensus-Version": ForkName}}, EmptyResponseData, EmptyMeta @@ -200,7 +187,7 @@ export type Endpoints = { publishBlindedBlockV2: Endpoint< "POST", { - signedBlindedBlockOrContents: allForks.SignedBlindedBeaconBlockOrContents; + signedBlindedBlock: allForks.SignedBlindedBeaconBlock; broadcastValidation?: BroadcastValidation; }, {body: unknown; headers: {"Eth-Consensus-Version": ForkName}; query: {broadcast_validation?: string}}, @@ -430,17 +417,10 @@ export function getDefinitions(config: ChainForkConfig): RouteDefinitions { - const slot = isSignedBlindedBlockContents(signedBlindedBlockOrContents) - ? signedBlindedBlockOrContents.signedBlindedBlock.message.slot - : signedBlindedBlockOrContents.message.slot; + writeReqJson: ({signedBlindedBlock}) => { + const slot = signedBlindedBlock.message.slot; return { - body: - config.getForkSeq(slot) < ForkSeq.deneb - ? config - .getBlindedForkTypes(slot) - .SignedBeaconBlock.toJson(signedBlindedBlockOrContents as allForks.SignedBlindedBeaconBlock) - : SignedBlindedBlockContentsType.toJson(signedBlindedBlockOrContents as SignedBlindedBlockContents), + body: config.getBlindedForkTypes(slot).SignedBeaconBlock.toJson(signedBlindedBlock), headers: { "Eth-Consensus-Version": config.getForkName(slot), }, @@ -451,23 +431,13 @@ export function getDefinitions(config: ChainForkConfig): RouteDefinitions { - const slot = isSignedBlindedBlockContents(signedBlindedBlockOrContents) - ? signedBlindedBlockOrContents.signedBlindedBlock.message.slot - : signedBlindedBlockOrContents.message.slot; + writeReqSsz: ({signedBlindedBlock}) => { + const slot = signedBlindedBlock.message.slot; return { - body: - config.getForkSeq(slot) < ForkSeq.deneb - ? config - .getBlindedForkTypes(slot) - .SignedBeaconBlock.serialize(signedBlindedBlockOrContents as allForks.SignedBlindedBeaconBlock) - : SignedBlindedBlockContentsType.serialize(signedBlindedBlockOrContents as SignedBlindedBlockContents), + body: config.getBlindedForkTypes(slot).SignedBeaconBlock.serialize(signedBlindedBlock), headers: { "Eth-Consensus-Version": config.getForkName(slot), }, @@ -478,10 +448,7 @@ export function getDefinitions(config: ChainForkConfig): RouteDefinitions { - const slot = isSignedBlindedBlockContents(signedBlindedBlockOrContents) - ? signedBlindedBlockOrContents.signedBlindedBlock.message.slot - : signedBlindedBlockOrContents.message.slot; + writeReqJson: ({signedBlindedBlock, broadcastValidation}) => { + const slot = signedBlindedBlock.message.slot; return { - body: - config.getForkSeq(slot) < ForkSeq.deneb - ? config - .getBlindedForkTypes(slot) - .SignedBeaconBlock.toJson(signedBlindedBlockOrContents as allForks.SignedBlindedBeaconBlock) - : SignedBlindedBlockContentsType.toJson(signedBlindedBlockOrContents as SignedBlindedBlockContents), + body: config.getBlindedForkTypes(slot).SignedBeaconBlock.toJson(signedBlindedBlock), + headers: { "Eth-Consensus-Version": config.getForkName(slot), }, @@ -519,24 +480,14 @@ export function getDefinitions(config: ChainForkConfig): RouteDefinitions { - const slot = isSignedBlindedBlockContents(signedBlindedBlockOrContents) - ? signedBlindedBlockOrContents.signedBlindedBlock.message.slot - : signedBlindedBlockOrContents.message.slot; + writeReqSsz: ({signedBlindedBlock, broadcastValidation}) => { + const slot = signedBlindedBlock.message.slot; return { - body: - config.getForkSeq(slot) < ForkSeq.deneb - ? config - .getBlindedForkTypes(slot) - .SignedBeaconBlock.serialize(signedBlindedBlockOrContents as allForks.SignedBlindedBeaconBlock) - : SignedBlindedBlockContentsType.serialize(signedBlindedBlockOrContents as SignedBlindedBlockContents), + body: config.getBlindedForkTypes(slot).SignedBeaconBlock.serialize(signedBlindedBlock), headers: { "Eth-Consensus-Version": config.getForkName(slot), }, @@ -548,10 +499,7 @@ export function getDefinitions(config: ChainForkConfig): RouteDefinitions; @@ -681,11 +680,9 @@ export const definitions: RouteDefinitions = { fromJson: (val) => val as ProduceBlockMeta, toHeadersObject: (meta) => ({ "Eth-Consensus-Version": meta.version, - "Eth-Execution-Payload-Value": String(meta.executionPayloadValue), }), fromHeaders: (headers) => ({ version: headers.get("Eth-Consensus-Version")! as ForkName, - executionPayloadValue: BigInt(headers.get("Eth-Execution-Payload-Value")!), }), }, }, @@ -702,6 +699,7 @@ export const definitions: RouteDefinitions = { feeRecipient, builderSelection, strictFeeRecipientCheck, + blindedLocal, }) => ({ params: {slot}, query: { @@ -711,6 +709,7 @@ export const definitions: RouteDefinitions = { fee_recipient: feeRecipient, builder_selection: builderSelection, strict_fee_recipient_check: strictFeeRecipientCheck, + blinded_local: blindedLocal, }, }), parseReq: ({params, query}) => ({ @@ -721,6 +720,7 @@ export const definitions: RouteDefinitions = { feeRecipient: query.fee_recipient, builderSelection: query.builder_selection as BuilderSelection, strictFeeRecipientCheck: query.strict_fee_recipient_check, + blindedLocal: query.blinded_local, }), schema: { params: {slot: Schema.UintRequired}, @@ -731,6 +731,7 @@ export const definitions: RouteDefinitions = { fee_recipient: Schema.String, builder_selection: Schema.String, strict_fee_recipient_check: Schema.Boolean, + blinded_local: Schema.Boolean, }, }, }, @@ -738,9 +739,7 @@ export const definitions: RouteDefinitions = { data: WithMeta( ({version, executionPayloadBlinded}) => (executionPayloadBlinded - ? isForkBlobs(version) - ? BlindedBlockContentsType - : ssz[version as ForkName.bellatrix].BlindedBeaconBlock + ? ssz.allForksBlinded[isForkExecution(version) ? version : ForkName.bellatrix].BeaconBlock : isForkBlobs(version) ? BlockContentsType : ssz[version].BeaconBlock) as Type @@ -750,13 +749,17 @@ export const definitions: RouteDefinitions = { fromJson: (val) => val as ProduceBlockV3Meta, toHeadersObject: (meta) => ({ "Eth-Consensus-Version": meta.version, - "Eth-Execution-Payload-Value": String(meta.executionPayloadValue), + "Eth-Execution-Payload-Source": String(meta.executionPayloadSource), "Eth-Execution-Payload-Blinded": String(meta.executionPayloadBlinded), + "Eth-Execution-Payload-Value": String(meta.executionPayloadValue), + "Eth-Consensus-Block-Value": String(meta.consensusBlockValue), }), fromHeaders: (headers) => ({ version: headers.get("Eth-Consensus-Version")! as ForkName, - executionPayloadValue: BigInt(headers.get("Eth-Execution-Payload-Value")!), + executionPayloadSource: headers.get("Eth-Execution-Payload-Source")! as ProducedBlockSource, executionPayloadBlinded: Boolean(headers.get("Eth-Execution-Payload-Blinded")!), + executionPayloadValue: BigInt(headers.get("Eth-Execution-Payload-Value")!), + consensusBlockValue: BigInt(headers.get("Eth-Consensus-Block-Value")!), }), }, }, @@ -783,23 +786,15 @@ export const definitions: RouteDefinitions = { }, }, resp: { - data: WithVersion( - (fork) => - // TODO fix the else branch - (isForkBlobs(fork) - ? BlindedBlockContentsType - : ssz[fork as ForkName.bellatrix].BlindedBeaconBlock) as Type - ), + data: WithVersion((fork) => ssz.allForksBlinded[isForkExecution(fork) ? fork : ForkName.bellatrix].BeaconBlock), meta: { toJson: (meta) => meta, fromJson: (val) => val as ProduceBlockMeta, toHeadersObject: (meta) => ({ "Eth-Consensus-Version": meta.version, - "Eth-Execution-Payload-Value": String(meta.executionPayloadValue), }), fromHeaders: (headers) => ({ version: headers.get("Eth-Consensus-Version")! as ForkName, - executionPayloadValue: BigInt(headers.get("Eth-Execution-Payload-Value")!), }), }, }, diff --git a/packages/api/src/beacon/server/validator.ts b/packages/api/src/beacon/server/validator.ts index 6bf446e05a16..5d6c22557060 100644 --- a/packages/api/src/beacon/server/validator.ts +++ b/packages/api/src/beacon/server/validator.ts @@ -4,6 +4,28 @@ import {ServerRoutes, getGenericJsonServer} from "../../utils/server/index.js"; import {ServerApi} from "../../interfaces.js"; export function getRoutes(config: ChainForkConfig, api: ServerApi): ServerRoutes { - // All routes return JSON, use a server auto-generator - return getGenericJsonServer, ReqTypes>({routesData, getReturnTypes, getReqSerializers}, config, api); + const reqSerializers = getReqSerializers(); + const returnTypes = getReturnTypes(); + + // Most of routes return JSON, use a server auto-generator + const serverRoutes = getGenericJsonServer, ReqTypes>( + {routesData, getReturnTypes, getReqSerializers}, + config, + api + ); + return { + ...serverRoutes, + produceBlockV3: { + ...serverRoutes.produceBlockV3, + handler: async (req, res) => { + const response = await api.produceBlockV3(...reqSerializers.produceBlockV3.parseReq(req)); + void res.header("Eth-Consensus-Version", response.version); + void res.header("Eth-Execution-Payload-Blinded", response.executionPayloadBlinded); + void res.header("Eth-Execution-Payload-Value", response.executionPayloadValue); + void res.header("Eth-Consensus-Block-Value", response.consensusBlockValue); + + return returnTypes.produceBlockV3.toJson(response); + }, + }, + }; } diff --git a/packages/api/src/builder/routes.ts b/packages/api/src/builder/routes.ts index 0523e471e0f3..42f6a04273b0 100644 --- a/packages/api/src/builder/routes.ts +++ b/packages/api/src/builder/routes.ts @@ -18,11 +18,6 @@ import { WithVersion, } from "../utils/codecs.js"; import {WireFormat} from "../utils/headers.js"; -import { - PreBlobSignedBlindedBeaconBlock, - SignedBlindedBlockContents, - SignedBlindedBlockContentsType, -} from "../beacon/routes/beacon/block.js"; // See /packages/api/src/routes/index.ts for reasoning and instructions to add new routes @@ -60,7 +55,7 @@ export type Endpoints = { submitBlindedBlock: Endpoint< "POST", - {signedBlindedBlock: PreBlobSignedBlindedBeaconBlock} | SignedBlindedBlockContents, + {signedBlindedBlock: allForks.SignedBlindedBeaconBlock}, {body: unknown; headers: {"Eth-Consensus-Version": ForkName}}, allForks.ExecutionPayload | allForks.ExecutionPayloadAndBlobsBundle, VersionMeta @@ -125,13 +120,7 @@ export function getDefinitions(config: ChainForkConfig): RouteDefinitions { const slot = args.signedBlindedBlock.message.slot; return { - body: - config.getForkSeq(slot) < ForkSeq.deneb - ? config.getBlindedForkTypes(slot).SignedBeaconBlock.toJson(args.signedBlindedBlock) - : SignedBlindedBlockContentsType.toJson({ - signedBlindedBlock: args.signedBlindedBlock, - signedBlindedBlobSidecars: (args as SignedBlindedBlockContents).signedBlindedBlobSidecars, - }), + body: config.getBlindedForkTypes(slot).SignedBeaconBlock.toJson(args.signedBlindedBlock), headers: { "Eth-Consensus-Version": config.getForkName(slot), }, @@ -141,22 +130,14 @@ export function getDefinitions(config: ChainForkConfig): RouteDefinitions { const slot = args.signedBlindedBlock.message.slot; return { - body: - config.getForkSeq(slot) < ForkSeq.deneb - ? config.getBlindedForkTypes(slot).SignedBeaconBlock.serialize(args.signedBlindedBlock) - : SignedBlindedBlockContentsType.serialize({ - signedBlindedBlock: args.signedBlindedBlock, - signedBlindedBlobSidecars: (args as SignedBlindedBlockContents).signedBlindedBlobSidecars, - }), + body: config.getBlindedForkTypes(slot).SignedBeaconBlock.serialize(args.signedBlindedBlock), headers: { "Eth-Consensus-Version": config.getForkName(slot), }, @@ -166,11 +147,9 @@ export function getDefinitions(config: ChainForkConfig): RouteDefinitions ({ signed_block: blockSerializer(data.signedBlock).toJson(data.signedBlock), - signed_blob_sidecars: ssz.deneb.SignedBlobSidecars.toJson(data.signedBlobSidecars), + kzg_proofs: ssz.deneb.KZGProofs.toJson(data.kzgProofs), + blobs: ssz.deneb.Blobs.toJson(data.blobs), }), - fromJson: (data: {signed_block: unknown; signed_blob_sidecars: unknown}) => ({ + fromJson: (data: {signed_block: unknown; kzg_proofs: unknown; blobs: unknown}) => ({ signedBlock: blockSerializer(data.signed_block as allForks.SignedBeaconBlock).fromJson(data.signed_block), - signedBlobSidecars: ssz.deneb.SignedBlobSidecars.fromJson(data.signed_blob_sidecars), + kzgProofs: ssz.deneb.KZGProofs.fromJson(data.kzg_proofs), + blobs: ssz.deneb.Blobs.fromJson(data.blobs), }), }; } @@ -25,44 +27,13 @@ export function allForksBlockContentsResSerializer(fork: ForkBlobs): TypeJson ({ block: (ssz.allForks[fork].BeaconBlock as allForks.AllForksSSZTypes["BeaconBlock"]).toJson(data.block), - blob_sidecars: ssz.deneb.BlobSidecars.toJson(data.blobSidecars), + kzg_proofs: ssz.deneb.KZGProofs.toJson(data.kzgProofs), + blobs: ssz.deneb.Blobs.toJson(data.blobs), }), - fromJson: (data: {block: unknown; blob_sidecars: unknown}) => ({ + fromJson: (data: {block: unknown; blob_sidecars: unknown; kzg_proofs: unknown; blobs: unknown}) => ({ block: ssz.allForks[fork].BeaconBlock.fromJson(data.block), - blobSidecars: ssz.deneb.BlobSidecars.fromJson(data.blob_sidecars), - }), - }; -} - -export function allForksSignedBlindedBlockContentsReqSerializer( - blockSerializer: (data: allForks.SignedBlindedBeaconBlock) => TypeJson -): TypeJson { - return { - toJson: (data) => ({ - signed_blinded_block: blockSerializer(data.signedBlindedBlock).toJson(data.signedBlindedBlock), - signed_blinded_blob_sidecars: ssz.deneb.SignedBlindedBlobSidecars.toJson(data.signedBlindedBlobSidecars), - }), - - fromJson: (data: {signed_blinded_block: unknown; signed_blinded_blob_sidecars: unknown}) => ({ - signedBlindedBlock: blockSerializer(data.signed_blinded_block as allForks.SignedBlindedBeaconBlock).fromJson( - data.signed_blinded_block - ), - signedBlindedBlobSidecars: ssz.deneb.SignedBlindedBlobSidecars.fromJson(data.signed_blinded_blob_sidecars), - }), - }; -} - -export function allForksBlindedBlockContentsResSerializer(fork: ForkBlobs): TypeJson { - return { - toJson: (data) => ({ - blinded_block: (ssz.allForksBlinded[fork].BeaconBlock as allForks.AllForksBlindedSSZTypes["BeaconBlock"]).toJson( - data.blindedBlock - ), - blinded_blob_sidecars: ssz.deneb.BlindedBlobSidecars.toJson(data.blindedBlobSidecars), - }), - fromJson: (data: {blinded_block: unknown; blinded_blob_sidecars: unknown}) => ({ - blindedBlock: ssz.allForksBlinded[fork].BeaconBlock.fromJson(data.blinded_block), - blindedBlobSidecars: ssz.deneb.BlindedBlobSidecars.fromJson(data.blinded_blob_sidecars), + kzgProofs: ssz.deneb.KZGProofs.fromJson(data.kzg_proofs), + blobs: ssz.deneb.Blobs.fromJson(data.blobs), }), }; } diff --git a/packages/api/src/utils/client/httpClient.ts b/packages/api/src/utils/client/httpClient.ts index f40b93345a73..d0b3e3849439 100644 --- a/packages/api/src/utils/client/httpClient.ts +++ b/packages/api/src/utils/client/httpClient.ts @@ -52,8 +52,7 @@ export class HttpClient implements IHttpClient { private readonly urlsScore: number[]; get baseUrl(): string { - // Don't leak username/password to caller - return new URL(this.urlsInits[0].baseUrl).origin; + return this.urlsInits[0].baseUrl; } constructor(opts: HttpClientOptions, {logger, metrics}: HttpClientModules = {}) { diff --git a/packages/api/src/utils/client/metrics.ts b/packages/api/src/utils/client/metrics.ts index c8bc3c0637a4..65089e92e7ec 100644 --- a/packages/api/src/utils/client/metrics.ts +++ b/packages/api/src/utils/client/metrics.ts @@ -1,49 +1,9 @@ +import {Gauge, GaugeExtra, Histogram} from "@lodestar/utils"; + export type Metrics = { - requestTime: Histogram<"routeId">; - streamTime: Histogram<"routeId">; - requestErrors: Gauge<"routeId">; - requestToFallbacks: Gauge<"routeId">; - urlsScore: Gauge<"urlIndex">; + requestTime: Histogram<{routeId: string}>; + streamTime: Histogram<{routeId: string}>; + requestErrors: Gauge<{routeId: string}>; + requestToFallbacks: Gauge<{routeId: string}>; + urlsScore: GaugeExtra<{urlIndex: number}>; }; - -type LabelValues = Partial>; -type CollectFn = (metric: Gauge) => void; - -export interface Gauge { - /** - * Increment gauge for given labels - * @param labels Object with label keys and values - * @param value The value to increment with - */ - inc(labels: LabelValues, value?: number): void; - - /** - * Increment gauge - * @param value The value to increment with - */ - inc(value?: number): void; - - /** - * Set gauge value for labels - * @param labels Object with label keys and values - * @param value The value to set - */ - set(labels: LabelValues, value: number): void; - - /** - * Set gauge value - * @param value The value to set - */ - set(value: number): void; - - addCollect(collectFn: CollectFn): void; -} - -export interface Histogram { - /** - * Start a timer where the value in seconds will observed - * @param labels Object with label keys and values - * @return Function to invoke when timer should be stopped - */ - startTimer(labels?: LabelValues): (labels?: LabelValues) => number; -} diff --git a/packages/api/test/globalSetup.ts b/packages/api/test/globalSetup.ts new file mode 100644 index 000000000000..0ab57c057472 --- /dev/null +++ b/packages/api/test/globalSetup.ts @@ -0,0 +1,2 @@ +export async function setup(): Promise {} +export async function teardown(): Promise {} diff --git a/packages/api/test/setup.ts b/packages/api/test/setup.ts deleted file mode 100644 index b83e6cb78511..000000000000 --- a/packages/api/test/setup.ts +++ /dev/null @@ -1,6 +0,0 @@ -import chai from "chai"; -import chaiAsPromised from "chai-as-promised"; -import sinonChai from "sinon-chai"; - -chai.use(chaiAsPromised); -chai.use(sinonChai); diff --git a/packages/api/test/unit/beacon/genericServerTest/beacon.test.ts b/packages/api/test/unit/beacon/genericServerTest/beacon.test.ts index ae4428b9fc8e..7972e4bfca65 100644 --- a/packages/api/test/unit/beacon/genericServerTest/beacon.test.ts +++ b/packages/api/test/unit/beacon/genericServerTest/beacon.test.ts @@ -1,3 +1,4 @@ +import {describe} from "vitest"; import {createChainForkConfig, defaultChainConfig} from "@lodestar/config"; import {Api, ReqTypes} from "../../../../src/beacon/routes/beacon/index.js"; import {getClient} from "../../../../src/beacon/client/beacon.js"; diff --git a/packages/api/test/unit/beacon/genericServerTest/config.test.ts b/packages/api/test/unit/beacon/genericServerTest/config.test.ts index da791aa2c334..e11e4cbff6cb 100644 --- a/packages/api/test/unit/beacon/genericServerTest/config.test.ts +++ b/packages/api/test/unit/beacon/genericServerTest/config.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {config} from "@lodestar/config/default"; import {Api, ReqTypes, getReturnTypes} from "../../../../src/beacon/routes/config.js"; import {getClient} from "../../../../src/beacon/client/config.js"; @@ -27,6 +27,6 @@ describe("beacon / config", () => { const jsonRes = returnTypes.getSpec.toJson({data: partialJsonSpec}); const specRes = returnTypes.getSpec.fromJson(jsonRes); - expect(specRes).to.deep.equal({data: partialJsonSpec}, "Wrong toJson -> fromJson"); + expect(specRes).toEqual({data: partialJsonSpec}); }); }); diff --git a/packages/api/test/unit/beacon/genericServerTest/debug.test.ts b/packages/api/test/unit/beacon/genericServerTest/debug.test.ts index 44b080e29bf4..6f7889677ec6 100644 --- a/packages/api/test/unit/beacon/genericServerTest/debug.test.ts +++ b/packages/api/test/unit/beacon/genericServerTest/debug.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect, MockInstance} from "vitest"; import {toHexString} from "@chainsafe/ssz"; import {ssz} from "@lodestar/types"; import {config} from "@lodestar/config/default"; @@ -11,40 +11,42 @@ import {registerRoute} from "../../../../src/utils/server/registerRoute.js"; import {HttpClient} from "../../../../src/utils/client/httpClient.js"; import {testData} from "../testData/debug.js"; -describe("beacon / debug", function () { +describe( + "beacon / debug", + function () { + describe("Run generic server test", () => { + runGenericServerTest(config, getClient, getRoutes, testData); + }); + + // Get state by SSZ + + describe("getState() in SSZ format", () => { + const {baseUrl, server} = getTestServer(); + const mockApi = getMockApi(routesData); + for (const route of Object.values(getRoutes(config, mockApi))) { + registerRoute(server, route); + } + + for (const method of ["getState" as const, "getStateV2" as const]) { + it(method, async () => { + const state = ssz.phase0.BeaconState.defaultValue(); + const stateSerialized = ssz.phase0.BeaconState.serialize(state); + (mockApi[method] as MockInstance).mockResolvedValue(stateSerialized); + + const httpClient = new HttpClient({baseUrl}); + const client = getClient(config, httpClient); + + const res = await client[method]("head", "ssz"); + + expect(res.ok).toBe(true); + + if (res.ok) { + expect(toHexString(res.response)).toBe(toHexString(stateSerialized)); + } + }); + } + }); + }, // Extend timeout since states are very big - this.timeout(30 * 1000); - - describe("Run generic server test", () => { - runGenericServerTest(config, getClient, getRoutes, testData); - }); - - // Get state by SSZ - - describe("getState() in SSZ format", () => { - const {baseUrl, server} = getTestServer(); - const mockApi = getMockApi(routesData); - for (const route of Object.values(getRoutes(config, mockApi))) { - registerRoute(server, route); - } - - for (const method of ["getState" as const, "getStateV2" as const]) { - it(method, async () => { - const state = ssz.phase0.BeaconState.defaultValue(); - const stateSerialized = ssz.phase0.BeaconState.serialize(state); - mockApi[method].resolves(stateSerialized); - - const httpClient = new HttpClient({baseUrl}); - const client = getClient(config, httpClient); - - const res = await client[method]("head", "ssz"); - - expect(res.ok).to.be.true; - - if (res.ok) { - expect(toHexString(res.response)).to.equal(toHexString(stateSerialized), "returned state value is not equal"); - } - }); - } - }); -}); + {timeout: 30 * 1000} +); diff --git a/packages/api/test/unit/beacon/genericServerTest/events.test.ts b/packages/api/test/unit/beacon/genericServerTest/events.test.ts index deaf0da9c1b9..48ff8ad3d157 100644 --- a/packages/api/test/unit/beacon/genericServerTest/events.test.ts +++ b/packages/api/test/unit/beacon/genericServerTest/events.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect, beforeEach, afterEach} from "vitest"; import {sleep} from "@lodestar/utils"; import {config} from "@lodestar/config/default"; import {Api, routesData, EventType, BeaconEvent} from "../../../../src/beacon/routes/events.js"; @@ -16,7 +16,9 @@ describe("beacon / events", () => { } let controller: AbortController; - beforeEach(() => (controller = new AbortController())); + beforeEach(() => { + controller = new AbortController(); + }); afterEach(() => controller.abort()); it("Receive events", async () => { @@ -38,9 +40,9 @@ describe("beacon / events", () => { const eventsReceived: BeaconEvent[] = []; await new Promise((resolve, reject) => { - mockApi.eventstream.callsFake(async (topics, signal, onEvent) => { + mockApi.eventstream.mockImplementation(async (topics, signal, onEvent) => { try { - expect(topics).to.deep.equal(topicsToRequest, "Wrong received topics"); + expect(topics).toEqual(topicsToRequest); for (const event of eventsToSend) { onEvent(event); await sleep(5); @@ -58,6 +60,6 @@ describe("beacon / events", () => { }); }); - expect(eventsReceived).to.deep.equal(eventsToSend, "Wrong received events"); + expect(eventsReceived).toEqual(eventsToSend); }); }); diff --git a/packages/api/test/unit/beacon/genericServerTest/lightclient.test.ts b/packages/api/test/unit/beacon/genericServerTest/lightclient.test.ts index 888236dd32f7..10031a150490 100644 --- a/packages/api/test/unit/beacon/genericServerTest/lightclient.test.ts +++ b/packages/api/test/unit/beacon/genericServerTest/lightclient.test.ts @@ -1,3 +1,4 @@ +import {describe} from "vitest"; import {config} from "@lodestar/config/default"; import {Api, ReqTypes} from "../../../../src/beacon/routes/lightclient.js"; import {getClient} from "../../../../src/beacon/client/lightclient.js"; diff --git a/packages/api/test/unit/beacon/genericServerTest/node.test.ts b/packages/api/test/unit/beacon/genericServerTest/node.test.ts index cf87e78da4c1..059bd4ca2c88 100644 --- a/packages/api/test/unit/beacon/genericServerTest/node.test.ts +++ b/packages/api/test/unit/beacon/genericServerTest/node.test.ts @@ -1,3 +1,4 @@ +import {describe} from "vitest"; import {config} from "@lodestar/config/default"; import {Api, ReqTypes} from "../../../../src/beacon/routes/node.js"; import {getClient} from "../../../../src/beacon/client/node.js"; diff --git a/packages/api/test/unit/beacon/genericServerTest/proofs.test.ts b/packages/api/test/unit/beacon/genericServerTest/proofs.test.ts index 5b3a8dea5b91..4619d20d989f 100644 --- a/packages/api/test/unit/beacon/genericServerTest/proofs.test.ts +++ b/packages/api/test/unit/beacon/genericServerTest/proofs.test.ts @@ -1,3 +1,4 @@ +import {describe} from "vitest"; import {config} from "@lodestar/config/default"; import {Api, ReqTypes} from "../../../../src/beacon/routes/proof.js"; import {getClient} from "../../../../src/beacon/client/proof.js"; diff --git a/packages/api/test/unit/beacon/genericServerTest/validator.test.ts b/packages/api/test/unit/beacon/genericServerTest/validator.test.ts index 399747a82d54..5a87ea9eee5f 100644 --- a/packages/api/test/unit/beacon/genericServerTest/validator.test.ts +++ b/packages/api/test/unit/beacon/genericServerTest/validator.test.ts @@ -1,3 +1,4 @@ +import {describe} from "vitest"; import {config} from "@lodestar/config/default"; import {Api, ReqTypes} from "../../../../src/beacon/routes/validator.js"; import {getClient} from "../../../../src/beacon/client/validator.js"; diff --git a/packages/api/test/unit/beacon/oapiSpec.test.ts b/packages/api/test/unit/beacon/oapiSpec.test.ts index 5bfacce6a683..1a300eba6f36 100644 --- a/packages/api/test/unit/beacon/oapiSpec.test.ts +++ b/packages/api/test/unit/beacon/oapiSpec.test.ts @@ -1,12 +1,12 @@ import path from "node:path"; import {fileURLToPath} from "node:url"; -import {expect} from "chai"; +import {describe, it, beforeAll, expect} from "vitest"; import {createChainForkConfig, defaultChainConfig} from "@lodestar/config"; import {OpenApiFile} from "../../utils/parseOpenApiSpec.js"; import {routes} from "../../../src/beacon/index.js"; import {ReqSerializers} from "../../../src/utils/types.js"; import {Schema} from "../../../src/utils/schema.js"; -import {runTestCheckAgainstSpec} from "../../utils/checkAgainstSpec.js"; +import {IgnoredProperty, runTestCheckAgainstSpec} from "../../utils/checkAgainstSpec.js"; import {fetchOpenApiSpec} from "../../utils/fetchOpenApiSpec.js"; // Import all testData and merge below import {testData as beaconTestData} from "./testData/beacon.js"; @@ -23,7 +23,7 @@ import {testData as validatorTestData} from "./testData/validator.js"; // eslint-disable-next-line @typescript-eslint/naming-convention const __dirname = path.dirname(fileURLToPath(import.meta.url)); -const version = "v2.3.0"; +const version = "v2.4.2"; const openApiFile: OpenApiFile = { url: `https://github.com/ethereum/beacon-APIs/releases/download/${version}/beacon-node-oapi.json`, filepath: path.join(__dirname, "../../../oapi-schemas/beacon-node-oapi.json"), @@ -84,11 +84,105 @@ const testDatas = { ...validatorTestData, }; +const ignoredOperations = [ + /* missing route */ + /* https://github.com/ChainSafe/lodestar/issues/5694 */ + "getSyncCommitteeRewards", + "getBlockRewards", + "getAttestationsRewards", + "getDepositSnapshot", // Won't fix for now, see https://github.com/ChainSafe/lodestar/issues/5697 + "getBlindedBlock", // https://github.com/ChainSafe/lodestar/issues/5699 + "getNextWithdrawals", // https://github.com/ChainSafe/lodestar/issues/5696 + "getDebugForkChoice", // https://github.com/ChainSafe/lodestar/issues/5700 + /* https://github.com/ChainSafe/lodestar/issues/6080 */ + "getLightClientBootstrap", + "getLightClientUpdatesByRange", + "getLightClientFinalityUpdate", + "getLightClientOptimisticUpdate", + "getPoolBLSToExecutionChanges", + "submitPoolBLSToExecutionChange", +]; + +const ignoredProperties: Record = { + /* + https://github.com/ChainSafe/lodestar/issues/5693 + missing finalized + */ + getStateRoot: {response: ["finalized"]}, + getStateFork: {response: ["finalized"]}, + getStateFinalityCheckpoints: {response: ["finalized"]}, + getStateValidators: {response: ["finalized"]}, + getStateValidator: {response: ["finalized"]}, + getStateValidatorBalances: {response: ["finalized"]}, + getEpochCommittees: {response: ["finalized"]}, + getEpochSyncCommittees: {response: ["finalized"]}, + getStateRandao: {response: ["finalized"]}, + getBlockHeaders: {response: ["finalized"]}, + getBlockHeader: {response: ["finalized"]}, + getBlockV2: {response: ["finalized"]}, + getBlockRoot: {response: ["finalized"]}, + getBlockAttestations: {response: ["finalized"]}, + getStateV2: {response: ["finalized"]}, + + /* + https://github.com/ChainSafe/lodestar/issues/6168 + /query/syncing_status - must be integer + */ + getHealth: {request: ["query.syncing_status"]}, + + /** + * https://github.com/ChainSafe/lodestar/issues/6185 + * - must have required property 'query' + */ + getBlobSidecars: {request: ["query"]}, + + /* + https://github.com/ChainSafe/lodestar/issues/4638 + /query - must have required property 'skip_randao_verification' + */ + produceBlockV2: {request: ["query.skip_randao_verification"]}, + produceBlindedBlock: {request: ["query.skip_randao_verification"]}, +}; + const openApiJson = await fetchOpenApiSpec(openApiFile); -runTestCheckAgainstSpec(openApiJson, routesData, reqSerializers, returnTypes, testDatas, { - // TODO: Investigate why schema validation fails otherwise - routesDropOneOf: ["produceBlockV2", "produceBlindedBlock", "publishBlindedBlock"], -}); +runTestCheckAgainstSpec( + openApiJson, + routesData, + reqSerializers, + returnTypes, + testDatas, + { + // TODO: Investigate why schema validation fails otherwise (see https://github.com/ChainSafe/lodestar/issues/6187) + routesDropOneOf: [ + "produceBlockV2", + "produceBlockV3", + "produceBlindedBlock", + "publishBlindedBlock", + "publishBlindedBlockV2", + ], + }, + ignoredOperations, + ignoredProperties +); + +const ignoredTopics = [ + /* + https://github.com/ChainSafe/lodestar/issues/6167 + eventTestData[bls_to_execution_change] does not match spec's example + */ + "bls_to_execution_change", + /* + https://github.com/ChainSafe/lodestar/issues/6170 + Error: Invalid slot=0 fork=phase0 for lightclient fork types + */ + "light_client_finality_update", + "light_client_optimistic_update", + /* + https://github.com/ethereum/beacon-APIs/pull/379 + SyntaxError: Unexpected non-whitespace character after JSON at position 629 (line 1 column 630) + */ + "payload_attributes", +]; // eventstream types are defined as comments in the description of "examples". // The function runTestCheckAgainstSpec() can't handle those, so the custom code before: @@ -104,7 +198,7 @@ describe("eventstream event data", () => { const eventstreamExamples = openApiJson.paths["/eth/v1/events"]["get"].responses["200"].content?.["text/event-stream"].examples; - before("Check eventstreamExamples exists", () => { + beforeAll(() => { if (!eventstreamExamples) { throw Error(`eventstreamExamples not defined: ${eventstreamExamples}`); } @@ -113,7 +207,9 @@ describe("eventstream event data", () => { const eventSerdes = routes.events.getEventSerdes(config); const knownTopics = new Set(Object.values(routes.events.eventTypes)); - for (const [topic, {value}] of Object.entries(eventstreamExamples ?? {})) { + for (const [topic, {value}] of Object.entries(eventstreamExamples ?? {}).filter( + ([topic]) => !ignoredTopics.includes(topic) + )) { it(topic, () => { if (!knownTopics.has(topic)) { throw Error(`topic ${topic} not implemented`); @@ -130,13 +226,12 @@ describe("eventstream event data", () => { if (testEvent == null) { throw Error(`No eventTestData for ${topic}`); } - const testEventJson = eventSerdes.toJson({ type: topic as routes.events.EventType, message: testEvent, } as routes.events.BeaconEvent); - expect(testEventJson).deep.equals(exampleDataJson, `eventTestData[${topic}] does not match spec's example`); + expect(testEventJson).toEqual(exampleDataJson); }); } }); diff --git a/packages/api/test/unit/beacon/testData/beacon.ts b/packages/api/test/unit/beacon/testData/beacon.ts index 54b2537648cb..7fa8368c590b 100644 --- a/packages/api/test/unit/beacon/testData/beacon.ts +++ b/packages/api/test/unit/beacon/testData/beacon.ts @@ -9,8 +9,8 @@ import { } from "../../../../src/beacon/routes/beacon/index.js"; import {GenericServerTestCases} from "../../../utils/genericServerTest.js"; -const root = Buffer.alloc(32, 1); -const randao = Buffer.alloc(32, 1); +const root = new Uint8Array(32).fill(1); +const randao = new Uint8Array(32).fill(1); const balance = 32e9; const pubkeyHex = toHexString(Buffer.alloc(48, 1)); diff --git a/packages/api/test/unit/beacon/testData/config.ts b/packages/api/test/unit/beacon/testData/config.ts index 463d584880a3..642ed5e7e224 100644 --- a/packages/api/test/unit/beacon/testData/config.ts +++ b/packages/api/test/unit/beacon/testData/config.ts @@ -15,7 +15,7 @@ export const testData: GenericServerTestCases = { res: { data: { chainId: 1, - address: Buffer.alloc(20, 1), + address: new Uint8Array(20).fill(1), }, }, }, diff --git a/packages/api/test/unit/beacon/testData/events.ts b/packages/api/test/unit/beacon/testData/events.ts index 92e413037bcf..af33f4a2b011 100644 --- a/packages/api/test/unit/beacon/testData/events.ts +++ b/packages/api/test/unit/beacon/testData/events.ts @@ -4,7 +4,7 @@ import {Api, EventData, EventType, blobSidecarSSE} from "../../../../src/beacon/ import {GenericServerTestCases} from "../../../utils/genericServerTest.js"; const abortController = new AbortController(); -const root = Buffer.alloc(32, 0); +const root = new Uint8Array(32); /* eslint-disable @typescript-eslint/no-empty-function, @typescript-eslint/naming-convention */ diff --git a/packages/api/test/unit/beacon/testData/lightclient.ts b/packages/api/test/unit/beacon/testData/lightclient.ts index 553f11d685d1..13e08e365987 100644 --- a/packages/api/test/unit/beacon/testData/lightclient.ts +++ b/packages/api/test/unit/beacon/testData/lightclient.ts @@ -46,6 +46,6 @@ export const testData: GenericServerTestCases = { }, getCommitteeRoot: { args: [1, 2], - res: {data: [Buffer.alloc(32, 0), Buffer.alloc(32, 1)]}, + res: {data: [Uint8Array.from(Buffer.alloc(32, 0)), Uint8Array.from(Buffer.alloc(32, 1))]}, }, }; diff --git a/packages/api/test/unit/beacon/testData/validator.ts b/packages/api/test/unit/beacon/testData/validator.ts index da245646f8d5..c10f67fa4095 100644 --- a/packages/api/test/unit/beacon/testData/validator.ts +++ b/packages/api/test/unit/beacon/testData/validator.ts @@ -1,12 +1,12 @@ import {ForkName} from "@lodestar/params"; -import {ssz} from "@lodestar/types"; +import {ssz, ProducedBlockSource} from "@lodestar/types"; import {Api} from "../../../../src/beacon/routes/validator.js"; import {GenericServerTestCases} from "../../../utils/genericServerTest.js"; -const ZERO_HASH = Buffer.alloc(32, 0); -const ZERO_HASH_HEX = "0x" + ZERO_HASH.toString("hex"); -const randaoReveal = Buffer.alloc(96, 1); -const selectionProof = Buffer.alloc(96, 1); +const ZERO_HASH = new Uint8Array(32); +const ZERO_HASH_HEX = "0x" + Buffer.from(ZERO_HASH).toString("hex"); +const randaoReveal = new Uint8Array(96).fill(1); +const selectionProof = new Uint8Array(96).fill(1); const graffiti = "a".repeat(32); const feeRecipient = "0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"; @@ -17,7 +17,7 @@ export const testData: GenericServerTestCases = { executionOptimistic: true, data: [ { - pubkey: Buffer.alloc(48, 1), + pubkey: new Uint8Array(48).fill(1), validatorIndex: 2, committeeIndex: 3, committeeLength: 4, @@ -33,7 +33,7 @@ export const testData: GenericServerTestCases = { args: [1000], res: { executionOptimistic: true, - data: [{slot: 1, validatorIndex: 2, pubkey: Buffer.alloc(48, 3)}], + data: [{slot: 1, validatorIndex: 2, pubkey: new Uint8Array(48).fill(3)}], dependentRoot: ZERO_HASH_HEX, }, }, @@ -41,19 +41,32 @@ export const testData: GenericServerTestCases = { args: [1000, [1, 2, 3]], res: { executionOptimistic: true, - data: [{pubkey: Buffer.alloc(48, 1), validatorIndex: 2, validatorSyncCommitteeIndices: [3]}], + data: [{pubkey: Uint8Array.from(Buffer.alloc(48, 1)), validatorIndex: 2, validatorSyncCommitteeIndices: [3]}], }, }, produceBlock: { - args: [32000, randaoReveal, graffiti], + args: [ + 32000, + randaoReveal, + graffiti, + undefined, + {feeRecipient: undefined, builderSelection: undefined, strictFeeRecipientCheck: undefined}, + ] as unknown as GenericServerTestCases["produceBlock"]["args"], res: {data: ssz.phase0.BeaconBlock.defaultValue()}, }, produceBlockV2: { - args: [32000, randaoReveal, graffiti], + args: [ + 32000, + randaoReveal, + graffiti, + undefined, + {feeRecipient: undefined, builderSelection: undefined, strictFeeRecipientCheck: undefined}, + ] as unknown as GenericServerTestCases["produceBlockV2"]["args"], res: { data: ssz.altair.BeaconBlock.defaultValue(), version: ForkName.altair, executionPayloadValue: ssz.Wei.defaultValue(), + consensusBlockValue: ssz.Gwei.defaultValue(), }, }, produceBlockV3: { @@ -68,15 +81,24 @@ export const testData: GenericServerTestCases = { data: ssz.altair.BeaconBlock.defaultValue(), version: ForkName.altair, executionPayloadValue: ssz.Wei.defaultValue(), + consensusBlockValue: ssz.Gwei.defaultValue(), executionPayloadBlinded: false, + executionPayloadSource: ProducedBlockSource.engine, }, }, produceBlindedBlock: { - args: [32000, randaoReveal, graffiti], + args: [ + 32000, + randaoReveal, + graffiti, + undefined, + {feeRecipient: undefined, builderSelection: undefined, strictFeeRecipientCheck: undefined}, + ] as unknown as GenericServerTestCases["produceBlindedBlock"]["args"], res: { data: ssz.bellatrix.BlindedBeaconBlock.defaultValue(), version: ForkName.bellatrix, executionPayloadValue: ssz.Wei.defaultValue(), + consensusBlockValue: ssz.Gwei.defaultValue(), }, }, produceAttestationData: { diff --git a/packages/api/test/unit/builder/builder.test.ts b/packages/api/test/unit/builder/builder.test.ts index 8a4766e64f00..56b8eee45ea5 100644 --- a/packages/api/test/unit/builder/builder.test.ts +++ b/packages/api/test/unit/builder/builder.test.ts @@ -1,3 +1,4 @@ +import {describe} from "vitest"; import {createChainForkConfig, defaultChainConfig} from "@lodestar/config"; import {Api, ReqTypes} from "../../../src/builder/routes.js"; import {getClient} from "../../../src/builder/client.js"; diff --git a/packages/api/test/unit/builder/testData.ts b/packages/api/test/unit/builder/testData.ts index 94ef3c393b20..e198e6971905 100644 --- a/packages/api/test/unit/builder/testData.ts +++ b/packages/api/test/unit/builder/testData.ts @@ -7,7 +7,7 @@ import {GenericServerTestCases} from "../../utils/genericServerTest.js"; // randomly pregenerated pubkey const pubkeyRand = "0x84105a985058fc8740a48bf1ede9d223ef09e8c6b1735ba0a55cf4a9ff2ff92376b778798365e488dab07a652eb04576"; -const root = Buffer.alloc(32, 1); +const root = new Uint8Array(32).fill(1); export const testData: GenericServerTestCases = { status: { diff --git a/packages/api/test/unit/client/fetch.test.ts b/packages/api/test/unit/client/fetch.test.ts index e0f87e1c57e2..80e5f58b164a 100644 --- a/packages/api/test/unit/client/fetch.test.ts +++ b/packages/api/test/unit/client/fetch.test.ts @@ -1,6 +1,6 @@ import crypto from "node:crypto"; import http from "node:http"; -import {expect} from "chai"; +import {describe, it, expect, afterEach} from "vitest"; import {FetchError, FetchErrorType, fetch} from "../../../src/utils/client/fetch.js"; describe("FetchError", function () { @@ -116,12 +116,16 @@ describe("FetchError", function () { ); } - await expect(fetch(url, {signal: signalHandler?.()})).to.be.rejected.then((error: FetchError) => { - expect(error.type).to.be.equal(testCase.errorType); - expect(error.code).to.be.equal(testCase.errorCode); + await expect(fetch(url, {signal: signalHandler?.()})).rejects.toSatisfy((err) => { + expect(err).toBeInstanceOf(FetchError); + expect((err as FetchError).code).toBe(testCase.errorCode); + expect((err as FetchError).type).toBe(testCase.errorType); + if (testCase.expectCause) { - expect(error.cause).to.be.instanceof(Error); + expect((err as FetchError).cause).toBeInstanceOf(Error); } + + return true; }); }); } diff --git a/packages/api/test/unit/client/format.test.ts b/packages/api/test/unit/client/format.test.ts index 0e388c3cb825..2ab73c9295c5 100644 --- a/packages/api/test/unit/client/format.test.ts +++ b/packages/api/test/unit/client/format.test.ts @@ -1,9 +1,9 @@ -import {expect} from "chai"; +import {describe, expect, it} from "vitest"; import {EventType} from "../../../src/beacon/routes/events.js"; import {stringifyQuery} from "../../../src/utils/client/format.js"; describe("client / utils / format", () => { it("Should repeat topic query", () => { - expect(stringifyQuery({topics: [EventType.finalizedCheckpoint]})).to.equal("topics=finalized_checkpoint"); + expect(stringifyQuery({topics: [EventType.finalizedCheckpoint]})).toBe("topics=finalized_checkpoint"); }); }); diff --git a/packages/api/test/unit/client/httpClient.test.ts b/packages/api/test/unit/client/httpClient.test.ts index 85dd1106b996..b22727d6a22b 100644 --- a/packages/api/test/unit/client/httpClient.test.ts +++ b/packages/api/test/unit/client/httpClient.test.ts @@ -1,5 +1,5 @@ import {IncomingMessage} from "node:http"; -import {expect} from "chai"; +import {describe, it, afterEach, expect} from "vitest"; import fastify, {RouteOptions} from "fastify"; import {ErrorAborted, TimeoutError, toBase64} from "@lodestar/utils"; import {HttpClient, HttpError} from "../../../src/utils/client/index.js"; @@ -52,8 +52,8 @@ describe("httpClient json client", () => { const {body: resBody, status} = await httpClient.json({url, method: "GET"}); - expect(status).to.equal(HttpStatusCode.OK); - expect(resBody).to.deep.equal({test: 1}, "Wrong res body"); + expect(status).toBe(HttpStatusCode.OK); + expect(resBody).toEqual({test: 1}); }); it("should handle successful POST request correctly", async () => { @@ -76,10 +76,10 @@ describe("httpClient json client", () => { const {body: resBodyReceived, status} = await httpClient.json({url, method: "POST", query, body}); - expect(status).to.equal(HttpStatusCode.OK); - expect(resBodyReceived).to.deep.equal(resBody, "Wrong resBody"); - expect(queryReceived).to.deep.equal(query, "Wrong query"); - expect(bodyReceived).to.deep.equal(body, "Wrong body"); + expect(status).toBe(HttpStatusCode.OK); + expect(resBodyReceived).toEqual(resBody); + expect(queryReceived).toEqual(query); + expect(bodyReceived).toEqual(body); }); it("should handle http status code 404 correctly", async () => { @@ -94,8 +94,8 @@ describe("httpClient json client", () => { return Promise.reject(Error("did not throw")); // So it doesn't gets catch {} } catch (e) { if (!(e instanceof HttpError)) throw Error(`Not an HttpError: ${(e as Error).message}`); - expect(e.message).to.equal("Not Found: Route GET:/test-route not found", "Wrong error message"); - expect(e.status).to.equal(404, "Wrong error status code"); + expect(e.message).toBe("Not Found: Route GET:/test-route not found"); + expect(e.status).toBe(404); } }); @@ -112,8 +112,8 @@ describe("httpClient json client", () => { return Promise.reject(Error("did not throw")); } catch (e) { if (!(e instanceof HttpError)) throw Error(`Not an HttpError: ${(e as Error).message}`); - expect(e.message).to.equal("Internal Server Error: Test error"); - expect(e.status).to.equal(500, "Wrong error status code"); + expect(e.message).toBe("Internal Server Error: Test error"); + expect(e.status).toBe(500); } }); @@ -130,8 +130,8 @@ describe("httpClient json client", () => { return Promise.reject(Error("did not throw")); } catch (e) { if (!(e instanceof HttpError)) throw Error(`Not an HttpError: ${(e as Error).message}`); - expect(e.message).to.equal("Service Unavailable: Node is syncing"); - expect(e.status).to.equal(503, "Wrong error status code"); + expect(e.message).toBe("Service Unavailable: Node is syncing"); + expect(e.status).toBe(503); } }); @@ -139,7 +139,7 @@ describe("httpClient json client", () => { const {baseUrl} = await getServer({ ...testRoute, handler: async (req) => { - expect(req.headers.authorization).to.equal("Basic dXNlcjpwYXNzd29yZA=="); + expect(req.headers.authorization).toBe("Basic dXNlcjpwYXNzd29yZA=="); return {}; }, }); @@ -159,13 +159,13 @@ describe("httpClient json client", () => { let {baseUrl} = await getServer({ ...testRoute, handler: async (req) => { - expect(req.headers.authorization).to.equal(`Basic ${toBase64(`${username}:${password}`)}`); + expect(req.headers.authorization).toBe(`Basic ${toBase64(`${username}:${password}`)}`); return {}; }, }); // Since `new URL()` is what URI-encodes, we have to do string manipulation to set the username/password // First validate the assumption that the URL starts with http:// - expect(baseUrl.indexOf("http://")).to.equal(0); + expect(baseUrl.indexOf("http://")).toBe(0); // We avoid using baseUrl.replace() because it treats $ as a special character baseUrl = `http://${username}:${password}@${baseUrl.substring("http://".length)}`; @@ -174,16 +174,6 @@ describe("httpClient json client", () => { await httpClient.json(testRoute); }); - it("should not leak user credentials in baseUrl getter", () => { - const url = new URL("http://localhost"); - url.username = "user"; - url.password = "password"; - const httpClient = new HttpClient({baseUrl: url.toString()}); - - expect(httpClient.baseUrl.includes(url.username)).to.be.false; - expect(httpClient.baseUrl.includes(url.password)).to.be.false; - }); - it("should handle aborting request with timeout", async () => { const {baseUrl} = await getServer({ ...testRoute, diff --git a/packages/api/test/unit/client/httpClientFallback.test.ts b/packages/api/test/unit/client/httpClientFallback.test.ts index 2c0846d00148..ff02095b1cc6 100644 --- a/packages/api/test/unit/client/httpClientFallback.test.ts +++ b/packages/api/test/unit/client/httpClientFallback.test.ts @@ -1,5 +1,4 @@ -import Sinon from "sinon"; -import {expect} from "chai"; +import {describe, it, beforeEach, afterEach, expect, vi} from "vitest"; import {HttpClient} from "../../../src/utils/client/index.js"; describe("httpClient fallback", () => { @@ -8,7 +7,7 @@ describe("httpClient fallback", () => { // Using fetchSub instead of actually setting up servers because there are some strange // race conditions, where the server stub doesn't count the call in time before the test is over. - const fetchStub = Sinon.stub<[URL], ReturnType>(); + const fetchStub = vi.fn(); let httpClient: HttpClient; @@ -37,7 +36,7 @@ describe("httpClient fallback", () => { fetch: fetchStub as typeof fetch, }); - fetchStub.callsFake(async (url) => { + fetchStub.mockImplementation(async (url) => { // Simulate network delay await new Promise((r) => setTimeout(r, 10)); const i = getServerIndex(url); @@ -50,7 +49,6 @@ describe("httpClient fallback", () => { }); afterEach(() => { - fetchStub.reset(); serverErrors.clear(); }); @@ -58,13 +56,13 @@ describe("httpClient fallback", () => { function assertServerCallCount(step: number, expectedCallCounts: number[]): void { const callCounts: number[] = []; for (let i = 0; i < serverCount; i++) callCounts[i] = 0; - for (const call of fetchStub.getCalls()) { - callCounts[getServerIndex(call.args[0])]++; + for (const call of fetchStub.mock.calls) { + callCounts[getServerIndex(call)]++; } - expect(callCounts.join(",")).equals(expectedCallCounts.join(","), `step ${step} - callCounts`); + expect(callCounts.join(",")).toBe(expectedCallCounts.join(",")); - fetchStub.resetHistory(); + fetchStub.mockClear(); // eslint-disable-next-line no-console if (DEBUG_LOGS) console.log("completed assertions step", step); @@ -114,7 +112,7 @@ describe("httpClient fallback", () => { serverErrors.set(0, true); serverErrors.set(1, true); serverErrors.set(2, true); - await expect(requestTestRoute()).rejectedWith("test_error_server_2"); + await expect(requestTestRoute()).rejects.toThrow("test_error_server_2"); assertServerCallCount(0, [1, 1, 1]); }); }); diff --git a/packages/api/test/unit/client/httpClientOptions.test.ts b/packages/api/test/unit/client/httpClientOptions.test.ts index 0409a41f5aa9..af0968777219 100644 --- a/packages/api/test/unit/client/httpClientOptions.test.ts +++ b/packages/api/test/unit/client/httpClientOptions.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {HttpClient} from "../../../src/index.js"; describe("HTTPClient options", () => { @@ -10,7 +10,7 @@ describe("HTTPClient options", () => { it("Single root baseUrl option", () => { const httpClient = new HttpClient({baseUrl: baseUrl1, bearerToken: bearerToken1}); - expect(httpClient["urlsOpts"]).deep.equals([{baseUrl: baseUrl1, bearerToken: bearerToken1}]); + expect(httpClient["urlsOpts"]).toEqual([{baseUrl: baseUrl1, bearerToken: bearerToken1}]); }); it("Multiple urls option with common bearerToken", () => { @@ -19,7 +19,7 @@ describe("HTTPClient options", () => { bearerToken: bearerToken1, }); - expect(httpClient["urlsOpts"]).deep.equals([ + expect(httpClient["urlsOpts"]).toEqual([ {baseUrl: baseUrl1, bearerToken: bearerToken1}, {baseUrl: baseUrl2, bearerToken: bearerToken1}, ]); @@ -33,7 +33,7 @@ describe("HTTPClient options", () => { ], }); - expect(httpClient["urlsOpts"]).deep.equals([ + expect(httpClient["urlsOpts"]).toEqual([ {baseUrl: baseUrl1, bearerToken: bearerToken1}, {baseUrl: baseUrl2, bearerToken: bearerToken2}, ]); @@ -46,7 +46,7 @@ describe("HTTPClient options", () => { urls: [{baseUrl: baseUrl2, bearerToken: bearerToken2}], }); - expect(httpClient["urlsOpts"]).deep.equals([ + expect(httpClient["urlsOpts"]).toEqual([ {baseUrl: baseUrl1, bearerToken: bearerToken1}, {baseUrl: baseUrl2, bearerToken: bearerToken2}, ]); @@ -62,29 +62,29 @@ describe("HTTPClient options", () => { {baseUrl: baseUrl2, bearerToken: bearerToken2}, ], }); - expect(httpClient["urlsOpts"]).deep.equals([ + expect(httpClient["urlsOpts"]).toEqual([ {baseUrl: baseUrl1, bearerToken: bearerToken1}, {baseUrl: baseUrl2, bearerToken: bearerToken2}, ]); }); it("Throw if empty baseUrl", () => { - expect(() => new HttpClient({baseUrl: ""})).to.throw(Error); + expect(() => new HttpClient({baseUrl: ""})).toThrow(Error); }); it("Throw if invalid baseUrl", () => { - expect(() => new HttpClient({baseUrl: "invalid"})).to.throw(Error); + expect(() => new HttpClient({baseUrl: "invalid"})).toThrow(Error); }); it("Throw if empty value in urls option", () => { - expect(() => new HttpClient({urls: [""]})).to.throw(Error); + expect(() => new HttpClient({urls: [""]})).toThrow(Error); }); it("Throw if invalid value in urls option", () => { - expect(() => new HttpClient({urls: ["invalid"]})).to.throw(Error); + expect(() => new HttpClient({urls: ["invalid"]})).toThrow(Error); }); it("Throw if invalid username/password", () => { - expect(() => new HttpClient({baseUrl: "http://hasa%:%can'tbedecoded@localhost"})).to.throw(Error); + expect(() => new HttpClient({baseUrl: "http://hasa%:%can'tbedecoded@localhost"})).toThrow(Error); }); }); diff --git a/packages/api/test/unit/client/urlFormat.test.ts b/packages/api/test/unit/client/urlFormat.test.ts index 851742ac1ed5..5b8e1f294976 100644 --- a/packages/api/test/unit/client/urlFormat.test.ts +++ b/packages/api/test/unit/client/urlFormat.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import { compileRouteUrlFormater, toColonNotationPath, @@ -55,14 +55,14 @@ describe("utils / urlFormat", () => { for (const {urlTemplate, colonNotation, tokens, cases} of testCases) { it(urlTemplate, () => { - expect(urlToTokens(urlTemplate)).deep.equal(tokens, "Wrong tokens"); + expect(urlToTokens(urlTemplate)).toEqual(tokens); - expect(toColonNotationPath(urlTemplate)).equal(colonNotation, "Wrong colonNotation"); + expect(toColonNotationPath(urlTemplate)).toBe(colonNotation); const utlFormater = compileRouteUrlFormater(urlTemplate); - for (const [i, {args, url}] of cases.entries()) { - expect(utlFormater(args)).to.equal(url, `wrong case ${i}`); + for (const [_, {args, url}] of cases.entries()) { + expect(utlFormater(args)).toBe(url); } }); } diff --git a/packages/api/test/unit/keymanager/keymanager.test.ts b/packages/api/test/unit/keymanager/keymanager.test.ts index f00e6e754a51..1adf5b1e44da 100644 --- a/packages/api/test/unit/keymanager/keymanager.test.ts +++ b/packages/api/test/unit/keymanager/keymanager.test.ts @@ -1,3 +1,4 @@ +import {describe} from "vitest"; import {config} from "@lodestar/config/default"; import {Api, ReqTypes} from "../../../src/keymanager/routes.js"; import {getClient} from "../../../src/keymanager/client.js"; diff --git a/packages/api/test/unit/utils/acceptHeader.test.ts b/packages/api/test/unit/utils/acceptHeader.test.ts index b92d39799743..67b77864eafa 100644 --- a/packages/api/test/unit/utils/acceptHeader.test.ts +++ b/packages/api/test/unit/utils/acceptHeader.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {parseAcceptHeader} from "../../../src/utils/acceptHeader.js"; import {ResponseFormat} from "../../../src/interfaces.js"; @@ -30,10 +30,8 @@ describe("utils / acceptHeader", () => { {header: "application/json;q=1,application/octet-stream;q=1", expected: "ssz"}, ]; - for (const testCase of testCases) { - it(`should correctly parse the header ${testCase.header}`, () => { - expect(parseAcceptHeader(testCase.header)).to.equal(testCase.expected); - }); - } + it.each(testCases)("should correctly parse the header $header", ({header, expected}) => { + expect(parseAcceptHeader(header)).toBe(expected); + }); }); }); diff --git a/packages/api/test/unit/utils/serdes.test.ts b/packages/api/test/unit/utils/serdes.test.ts index c390e3e6b6da..5b55ef66805e 100644 --- a/packages/api/test/unit/utils/serdes.test.ts +++ b/packages/api/test/unit/utils/serdes.test.ts @@ -1,70 +1,68 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {fromGraffitiHex, toGraffitiHex} from "../../../src/utils/serdes.js"; describe("utils / serdes", () => { describe("toGraffitiHex", () => { it("should convert a UTF-8 graffiti to hex", () => { - expect(toGraffitiHex("a".repeat(32))).to.equal( - "0x6161616161616161616161616161616161616161616161616161616161616161" - ); + expect(toGraffitiHex("a".repeat(32))).toBe("0x6161616161616161616161616161616161616161616161616161616161616161"); }); it("should convert a graffiti with Unicode symbols to hex", () => { - expect(toGraffitiHex("🦇🔊".repeat(4))).to.equal( + expect(toGraffitiHex("🦇🔊".repeat(4))).toBe( "0xf09fa687f09f948af09fa687f09f948af09fa687f09f948af09fa687f09f948a" ); }); it("should trim the hex graffiti if it is too long", () => { - expect(toGraffitiHex("a".repeat(50))).to.equal(toGraffitiHex("a".repeat(32))); + expect(toGraffitiHex("a".repeat(50))).toBe(toGraffitiHex("a".repeat(32))); }); it("should trim the hex graffiti if the last character is a Unicode symbol", () => { - expect(toGraffitiHex("a".repeat(31) + "🐼")).to.equal( + expect(toGraffitiHex("a".repeat(31) + "🐼")).toBe( "0x61616161616161616161616161616161616161616161616161616161616161f0" ); }); it("should right-pad the hex graffiti with zeros if it is too short", () => { - expect(toGraffitiHex("a")).to.equal("0x6100000000000000000000000000000000000000000000000000000000000000"); - expect(toGraffitiHex("ab")).to.equal("0x6162000000000000000000000000000000000000000000000000000000000000"); - expect(toGraffitiHex("abc")).to.equal("0x6162630000000000000000000000000000000000000000000000000000000000"); + expect(toGraffitiHex("a")).toBe("0x6100000000000000000000000000000000000000000000000000000000000000"); + expect(toGraffitiHex("ab")).toBe("0x6162000000000000000000000000000000000000000000000000000000000000"); + expect(toGraffitiHex("abc")).toBe("0x6162630000000000000000000000000000000000000000000000000000000000"); }); }); describe("fromGraffitiHex", () => { it("should convert a hex graffiti to UTF-8", () => { - expect(fromGraffitiHex("0x6161616161616161616161616161616161616161616161616161616161616161")).to.equal( + expect(fromGraffitiHex("0x6161616161616161616161616161616161616161616161616161616161616161")).toBe( "a".repeat(32) ); }); it("should convert a hex graffiti with Unicode symbols to UTF-8", () => { - expect(fromGraffitiHex("0xf09fa687f09f948af09fa687f09f948af09fa687f09f948af09fa687f09f948a")).to.equal( + expect(fromGraffitiHex("0xf09fa687f09f948af09fa687f09f948af09fa687f09f948af09fa687f09f948a")).toBe( "🦇🔊".repeat(4) ); }); it("should convert a padded hex graffiti to UTF-8", () => { - expect(fromGraffitiHex("0x6100000000000000000000000000000000000000000000000000000000000000")).to.equal( + expect(fromGraffitiHex("0x6100000000000000000000000000000000000000000000000000000000000000")).toBe( // null bytes will not be displayed/ignored later on "a" + "\u0000".repeat(31) ); }); it("should decode a hex graffiti with a cut off Unicode character at the end", () => { - expect(fromGraffitiHex("0x61616161616161616161616161616161616161616161616161616161616161f0")).to.equal( + expect(fromGraffitiHex("0x61616161616161616161616161616161616161616161616161616161616161f0")).toBe( // last character will be displayed as � "a".repeat(31) + "\ufffd" ); }); it("should not throw an error if an invalid hex graffiti is provided", () => { - expect(() => fromGraffitiHex("a")).to.not.throw(); + expect(() => fromGraffitiHex("a")).not.toThrow(); }); it("should return the provided graffiti string if decoding fails", () => { - expect(fromGraffitiHex("a")).to.equal("a"); + expect(fromGraffitiHex("a")).toBe("a"); }); }); }); diff --git a/packages/api/test/utils/checkAgainstSpec.ts b/packages/api/test/utils/checkAgainstSpec.ts index eba274e16ef6..ed65279bca22 100644 --- a/packages/api/test/utils/checkAgainstSpec.ts +++ b/packages/api/test/utils/checkAgainstSpec.ts @@ -1,16 +1,16 @@ import Ajv, {ErrorObject} from "ajv"; -import {expect} from "chai"; +import {expect, describe, beforeAll, it} from "vitest"; import {ReqGeneric, ReqSerializer, ReturnTypes, RouteDef} from "../../src/utils/types.js"; -import {applyRecursively, OpenApiJson, parseOpenApiSpec, ParseOpenApiSpecOpts} from "./parseOpenApiSpec.js"; +import {applyRecursively, JsonSchema, OpenApiJson, parseOpenApiSpec, ParseOpenApiSpecOpts} from "./parseOpenApiSpec.js"; import {GenericServerTestCases} from "./genericServerTest.js"; const ajv = new Ajv({ - // strict: true, - // strictSchema: true, + strict: true, + strictTypes: false, // TODO Enable once beacon-APIs is fixed. See https://github.com/ChainSafe/lodestar/issues/6206 allErrors: true, }); -// TODO: Still necessary? +// Ensure embedded schema 'example' do not fail validation ajv.addKeyword({ keyword: "example", validate: () => true, @@ -19,24 +19,76 @@ ajv.addKeyword({ ajv.addFormat("hex", /^0x[a-fA-F0-9]+$/); +/** + * A set of properties that will be ignored during tests execution. + * This allows for a black-list mechanism to have a test pass while some part of the spec is not yet implemented. + * + * Properties can be nested using dot notation, following JSONPath semantic. + * + * Example: + * - query + * - query.skip_randao_verification + */ +export type IgnoredProperty = { + /** + * Properties to ignore in the request schema + */ + request?: string[]; + /** + * Properties to ignore in the response schema + */ + response?: string[]; +}; + +/** + * Recursively remove a property from a schema + * + * @param schema Schema to remove a property from + * @param property JSONPath like property to remove from the schema + */ +function deleteNested(schema: JsonSchema | undefined, property: string): void { + const properties = schema?.properties; + if (property.includes(".")) { + // Extract first segment, keep the rest as dotted + const [key, ...rest] = property.split("."); + deleteNested(properties?.[key], rest.join(".")); + } else { + // Remove property from 'required' + if (schema?.required) { + schema.required = schema.required?.filter((e) => property !== e); + } + // Remove property from 'properties' + delete properties?.[property]; + } +} + export function runTestCheckAgainstSpec( openApiJson: OpenApiJson, routesData: Record, reqSerializers: Record>, returnTypes: Record[string]>, testDatas: Record[string]>, - opts?: ParseOpenApiSpecOpts + opts?: ParseOpenApiSpecOpts, + ignoredOperations: string[] = [], + ignoredProperties: Record = {} ): void { const openApiSpec = parseOpenApiSpec(openApiJson, opts); for (const [operationId, routeSpec] of openApiSpec.entries()) { + const isIgnored = ignoredOperations.some((id) => id === operationId); + if (isIgnored) { + continue; + } + + const ignoredProperty = ignoredProperties[operationId]; + describe(operationId, () => { const {requestSchema, responseOkSchema} = routeSpec; const routeId = operationId; const testData = testDatas[routeId]; const routeData = routesData[routeId]; - before("route is defined", () => { + beforeAll(() => { if (routeData == null) { throw Error(`No routeData for ${routeId}`); } @@ -68,7 +120,15 @@ export function runTestCheckAgainstSpec( stringifyProperties((reqJson as ReqGeneric).params ?? {}); stringifyProperties((reqJson as ReqGeneric).query ?? {}); - // Validate response + const ignoredProperties = ignoredProperty?.request; + if (ignoredProperties) { + // Remove ignored properties from schema validation + for (const property of ignoredProperties) { + deleteNested(routeSpec.requestSchema, property); + } + } + + // Validate request validateSchema(routeSpec.requestSchema, reqJson, "request"); }); } @@ -87,6 +147,13 @@ export function runTestCheckAgainstSpec( } } + const ignoredProperties = ignoredProperty?.response; + if (ignoredProperties) { + // Remove ignored properties from schema validation + for (const property of ignoredProperties) { + deleteNested(routeSpec.responseOkSchema, property); + } + } // Validate response validateSchema(responseOkSchema, resJson, "response"); }); diff --git a/packages/api/test/utils/genericServerTest.ts b/packages/api/test/utils/genericServerTest.ts index d5e091bc25af..f0f805b7469a 100644 --- a/packages/api/test/utils/genericServerTest.ts +++ b/packages/api/test/utils/genericServerTest.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {it, expect, MockInstance} from "vitest"; import {ChainForkConfig} from "@lodestar/config"; import {ReqGeneric, Resolves} from "../../src/utils/index.js"; import {FetchOpts, HttpClient, IHttpClient} from "../../src/utils/client/index.js"; @@ -44,30 +44,25 @@ export function runGenericServerTest< it(routeId as string, async () => { // Register mock data for this route // TODO: Look for the type error - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-expect-error - mockApi[routeId].resolves(testCases[routeId].res); + (mockApi[routeId] as MockInstance).mockResolvedValue(testCases[routeId].res); // Do the call const res = await (client[routeId] as APIClientHandler)(...(testCase.args as any[])); // Use spy to assert argument serialization if (testCase.query) { - expect(httpClient.opts?.query).to.deep.equal(testCase.query, "Wrong fetch opts.query"); + expect(httpClient.opts?.query).toEqual(testCase.query); } // Assert server handler called with correct args - expect(mockApi[routeId].callCount).to.equal(1, `mockApi[${routeId as string}] must be called once`); + expect(mockApi[routeId] as MockInstance).toHaveBeenCalledTimes(1); // if mock api args are > testcase args, there may be some undefined extra args parsed towards the end // to obtain a match, ignore the extra args - expect(mockApi[routeId].getCall(0).args.slice(0, testCase.args.length)).to.deep.equal( - testCase.args, - `mockApi[${routeId as string}] wrong args` - ); + expect(mockApi[routeId] as MockInstance).toHaveBeenNthCalledWith(1, ...(testCase.args as any[])); // Assert returned value is correct - expect(res.response).to.deep.equal(testCase.res, "Wrong returned value"); + expect(res.response).toEqual(testCase.res); }); } } diff --git a/packages/api/test/utils/parseOpenApiSpec.ts b/packages/api/test/utils/parseOpenApiSpec.ts index 5faf0082012d..84b024e5950e 100644 --- a/packages/api/test/utils/parseOpenApiSpec.ts +++ b/packages/api/test/utils/parseOpenApiSpec.ts @@ -11,7 +11,7 @@ type RouteUrl = string; /** "get" | "post" */ type HttpMethod = string; -type JsonSchema = { +export type JsonSchema = { type: "object"; properties?: Record; required?: string[]; diff --git a/packages/api/test/utils/utils.ts b/packages/api/test/utils/utils.ts index 793f8b2c61ef..8faa2c90d187 100644 --- a/packages/api/test/utils/utils.ts +++ b/packages/api/test/utils/utils.ts @@ -1,6 +1,6 @@ +import {beforeAll, afterAll, MockedObject, vi} from "vitest"; import qs from "qs"; import fastify, {FastifyInstance} from "fastify"; -import Sinon from "sinon"; import {mapValues} from "@lodestar/utils"; import {ServerApi} from "../../src/interfaces.js"; @@ -19,7 +19,7 @@ export function getTestServer(): {baseUrl: string; server: FastifyInstance} { done(); }); - before("start server", async () => { + beforeAll(async () => { await new Promise((resolve, reject) => { server.listen({port}, function (err, address) { if (err !== null && err != undefined) { @@ -31,7 +31,7 @@ export function getTestServer(): {baseUrl: string; server: FastifyInstance} { }); }); - after("stop server", async () => { + afterAll(async () => { await server.close(); }); @@ -41,6 +41,6 @@ export function getTestServer(): {baseUrl: string; server: FastifyInstance} { /** Type helper to get a Sinon mock object type with Api */ export function getMockApi>( routeIds: Record -): Sinon.SinonStubbedInstance> & ServerApi { - return mapValues(routeIds, () => Sinon.stub()) as Sinon.SinonStubbedInstance> & ServerApi; +): MockedObject> & ServerApi { + return mapValues(routeIds, () => vi.fn()) as MockedObject> & ServerApi; } diff --git a/packages/api/vitest.config.ts b/packages/api/vitest.config.ts new file mode 100644 index 000000000000..9f325a6477e2 --- /dev/null +++ b/packages/api/vitest.config.ts @@ -0,0 +1,12 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + restoreMocks: true, + }, + }) +); diff --git a/packages/beacon-node/package.json b/packages/beacon-node/package.json index c8cff7cbf28c..396dac8c54eb 100644 --- a/packages/beacon-node/package.json +++ b/packages/beacon-node/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.12.0", + "version": "1.13.0", "type": "module", "exports": { ".": { @@ -80,7 +80,7 @@ "test:unit:minimal": "vitest --run --segfaultRetry 3 --dir test/unit/ --coverage", "test:unit:mainnet": "LODESTAR_PRESET=mainnet nyc --cache-dir .nyc_output/.cache -e .ts mocha 'test/unit-mainnet/**/*.test.ts'", "test:unit": "yarn test:unit:minimal && yarn test:unit:mainnet", - "test:e2e": "LODESTAR_PRESET=minimal vitest --run --segfaultRetry 3 --single-thread --dir test/e2e", + "test:e2e": "LODESTAR_PRESET=minimal vitest --run --segfaultRetry 3 --poolOptions.threads.singleThread true --dir test/e2e", "test:sim": "mocha 'test/sim/**/*.test.ts'", "test:sim:merge-interop": "mocha 'test/sim/merge-interop.test.ts'", "test:sim:mergemock": "mocha 'test/sim/mergemock.test.ts'", @@ -100,7 +100,7 @@ "@chainsafe/bls": "7.1.1", "@chainsafe/blst": "^0.2.9", "@chainsafe/discv5": "^5.1.0", - "@chainsafe/libp2p-gossipsub": "^10.1.0", + "@chainsafe/libp2p-gossipsub": "^10.1.1", "@chainsafe/libp2p-noise": "^13.0.1", "@chainsafe/persistent-merkle-tree": "^0.6.1", "@chainsafe/prometheus-gc-stats": "^1.0.0", @@ -119,18 +119,18 @@ "@libp2p/peer-id-factory": "^3.0.4", "@libp2p/prometheus-metrics": "^2.0.7", "@libp2p/tcp": "8.0.8", - "@lodestar/api": "^1.12.0", - "@lodestar/config": "^1.12.0", - "@lodestar/db": "^1.12.0", - "@lodestar/fork-choice": "^1.12.0", - "@lodestar/light-client": "^1.12.0", - "@lodestar/logger": "^1.12.0", - "@lodestar/params": "^1.12.0", - "@lodestar/reqresp": "^1.12.0", - "@lodestar/state-transition": "^1.12.0", - "@lodestar/types": "^1.12.0", - "@lodestar/utils": "^1.12.0", - "@lodestar/validator": "^1.12.0", + "@lodestar/api": "^1.13.0", + "@lodestar/config": "^1.13.0", + "@lodestar/db": "^1.13.0", + "@lodestar/fork-choice": "^1.13.0", + "@lodestar/light-client": "^1.13.0", + "@lodestar/logger": "^1.13.0", + "@lodestar/params": "^1.13.0", + "@lodestar/reqresp": "^1.13.0", + "@lodestar/state-transition": "^1.13.0", + "@lodestar/types": "^1.13.0", + "@lodestar/utils": "^1.13.0", + "@lodestar/validator": "^1.13.0", "@multiformats/multiaddr": "^12.1.3", "@types/datastore-level": "^3.0.0", "buffer-xor": "^2.0.2", @@ -145,7 +145,7 @@ "jwt-simple": "0.5.6", "libp2p": "0.46.12", "multiformats": "^11.0.1", - "prom-client": "^14.2.0", + "prom-client": "^15.1.0", "qs": "^6.11.1", "snappyjs": "^0.7.0", "strict-event-emitter-types": "^2.0.0", diff --git a/packages/beacon-node/src/api/impl/beacon/blocks/index.ts b/packages/beacon-node/src/api/impl/beacon/blocks/index.ts index 3fe93345ed32..a4775d523959 100644 --- a/packages/beacon-node/src/api/impl/beacon/blocks/index.ts +++ b/packages/beacon-node/src/api/impl/beacon/blocks/index.ts @@ -1,17 +1,13 @@ import {fromHexString, toHexString} from "@chainsafe/ssz"; import {ApplicationMethods, routes} from "@lodestar/api"; -import { - computeTimeAtSlot, - parseSignedBlindedBlockOrContents, - reconstructFullBlockOrContents, - DataAvailableStatus, -} from "@lodestar/state-transition"; +import {computeTimeAtSlot, reconstructFullBlockOrContents} from "@lodestar/state-transition"; import {SLOTS_PER_HISTORICAL_ROOT} from "@lodestar/params"; import {sleep, toHex} from "@lodestar/utils"; import {allForks, deneb, isSignedBlockContents, ProducedBlockSource} from "@lodestar/types"; import {BlockSource, getBlockInput, ImportBlockOpts, BlockInput} from "../../../../chain/blocks/types.js"; import {promiseAllMaybeAsync} from "../../../../util/promises.js"; import {isOptimisticBlock} from "../../../../util/forkChoice.js"; +import {computeBlobSidecars} from "../../../../util/blobs.js"; import {BlockError, BlockErrorCode} from "../../../../chain/errors/index.js"; import {OpSource} from "../../../../metrics/validatorMonitor.js"; import {NetworkEvent} from "../../../../network/index.js"; @@ -49,22 +45,23 @@ export function getBeaconBlockApi({ opts: PublishBlockOpts = {} ) => { const seenTimestampSec = Date.now() / 1000; - let blockForImport: BlockInput, signedBlock: allForks.SignedBeaconBlock, signedBlobs: deneb.SignedBlobSidecars; + let blockForImport: BlockInput, signedBlock: allForks.SignedBeaconBlock, blobSidecars: deneb.BlobSidecars; if (isSignedBlockContents(signedBlockOrContents)) { - ({signedBlock, signedBlobSidecars: signedBlobs} = signedBlockOrContents); + ({signedBlock} = signedBlockOrContents); + blobSidecars = computeBlobSidecars(config, signedBlock, signedBlockOrContents); blockForImport = getBlockInput.postDeneb( config, signedBlock, BlockSource.api, - signedBlobs.map((sblob) => sblob.message), + blobSidecars, // don't bundle any bytes for block and blobs null, - signedBlobs.map(() => null) + blobSidecars.map(() => null) ); } else { signedBlock = signedBlockOrContents; - signedBlobs = []; + blobSidecars = []; // TODO: Once API supports submitting data as SSZ, replace null with blockBytes blockForImport = getBlockInput.preDeneb(config, signedBlock, BlockSource.api, null); } @@ -77,9 +74,11 @@ export function getBeaconBlockApi({ const slot = signedBlock.message.slot; const fork = config.getForkName(slot); const blockRoot = toHex(chain.config.getForkTypes(slot).BeaconBlock.hashTreeRoot(signedBlock.message)); + // bodyRoot should be the same to produced block + const bodyRoot = toHex(chain.config.getForkTypes(slot).BeaconBlockBody.hashTreeRoot(signedBlock.message.body)); const blockLocallyProduced = chain.producedBlockRoot.has(blockRoot) || chain.producedBlindedBlockRoot.has(blockRoot); - const valLogMeta = {broadcastValidation, blockRoot, blockLocallyProduced, slot}; + const valLogMeta = {broadcastValidation, blockRoot, bodyRoot, blockLocallyProduced, slot}; switch (broadcastValidation) { case routes.beacon.BroadcastValidation.gossip: { @@ -88,6 +87,11 @@ export function getBeaconBlockApi({ await validateGossipBlock(config, chain, signedBlock, fork); } catch (error) { chain.logger.error("Gossip validations failed while publishing the block", valLogMeta, error as Error); + chain.persistInvalidSszValue( + chain.config.getForkTypes(slot).SignedBeaconBlock, + signedBlock, + "api_reject_gossip_failure" + ); throw error; } } @@ -105,6 +109,11 @@ export function getBeaconBlockApi({ blockInput: blockForImport, peer: IDENTITY_PEER_ID, }); + chain.persistInvalidSszValue( + chain.config.getForkTypes(slot).SignedBeaconBlock, + signedBlock, + "api_reject_parent_unknown" + ); throw new BlockError(signedBlock, { code: BlockErrorCode.PARENT_UNKNOWN, parentRoot: toHexString(signedBlock.message.parentRoot), @@ -112,20 +121,20 @@ export function getBeaconBlockApi({ } try { - await verifyBlocksInEpoch.call( - chain as BeaconChain, - parentBlock, - [blockForImport], - [DataAvailableStatus.available], - { - ...opts, - verifyOnly: true, - skipVerifyBlockSignatures: true, - skipVerifyExecutionPayload: true, - } - ); + await verifyBlocksInEpoch.call(chain as BeaconChain, parentBlock, [blockForImport], { + ...opts, + verifyOnly: true, + skipVerifyBlockSignatures: true, + skipVerifyExecutionPayload: true, + seenTimestampSec, + }); } catch (error) { chain.logger.error("Consensus checks failed while publishing the block", valLogMeta, error as Error); + chain.persistInvalidSszValue( + chain.config.getForkTypes(slot).SignedBeaconBlock, + signedBlock, + "api_reject_consensus_failure" + ); throw error; } } @@ -180,18 +189,15 @@ export function getBeaconBlockApi({ } throw e; }), - ...signedBlobs.map((signedBlob) => () => network.publishBlobSidecar(signedBlob)), + ...blobSidecars.map((blobSidecar) => () => network.publishBlobSidecar(blobSidecar)), ]; await promiseAllMaybeAsync(publishPromises); }; const publishBlindedBlock: ApplicationMethods["publishBlindedBlock"] = async ( - {signedBlindedBlockOrContents}, + {signedBlindedBlock}, opts: PublishBlockOpts = {} ) => { - const {signedBlindedBlock, signedBlindedBlobSidecars} = - parseSignedBlindedBlockOrContents(signedBlindedBlockOrContents); - const slot = signedBlindedBlock.message.slot; const blockRoot = toHex( chain.config @@ -202,28 +208,32 @@ export function getBeaconBlockApi({ // Either the payload/blobs are cached from i) engine locally or ii) they are from the builder // // executionPayload can be null or a real payload in locally produced so check for presence of root - const source = chain.producedBlockRoot.has(blockRoot) ? ProducedBlockSource.engine : ProducedBlockSource.builder; - - const executionPayload = chain.producedBlockRoot.get(blockRoot) ?? null; - const blobSidecars = executionPayload - ? chain.producedBlobSidecarsCache.get(toHex(executionPayload.blockHash)) - : undefined; - const blobs = blobSidecars ? blobSidecars.map((blobSidecar) => blobSidecar.blob) : null; - - chain.logger.debug("Assembling blinded block for publishing", {source, blockRoot, slot}); - - const signedBlockOrContents = - source === ProducedBlockSource.engine - ? reconstructFullBlockOrContents({signedBlindedBlock, signedBlindedBlobSidecars}, {executionPayload, blobs}) - : await reconstructBuilderBlockOrContents(chain, signedBlindedBlockOrContents); - - // the full block is published by relay and it's possible that the block is already known to us - // by gossip - // - // see: https://github.com/ChainSafe/lodestar/issues/5404 - chain.logger.info("Publishing assembled block", {blockRoot, slot, source}); - // TODO: opts are not type safe, add ServerOpts in Endpoint type definition? - return publishBlock({signedBlockOrContents}, {...opts, ignoreIfKnown: true}); + const executionPayload = chain.producedBlockRoot.get(blockRoot); + if (executionPayload !== undefined) { + const source = ProducedBlockSource.engine; + chain.logger.debug("Reconstructing signedBlockOrContents", {blockRoot, slot, source}); + + const contents = executionPayload + ? chain.producedContentsCache.get(toHex(executionPayload.blockHash)) ?? null + : null; + const signedBlockOrContents = reconstructFullBlockOrContents(signedBlindedBlock, {executionPayload, contents}); + + chain.logger.info("Publishing assembled block", {blockRoot, slot, source}); + return publishBlock(signedBlockOrContents, opts); + } else { + const source = ProducedBlockSource.builder; + chain.logger.debug("Reconstructing signedBlockOrContents", {blockRoot, slot, source}); + + const signedBlockOrContents = await reconstructBuilderBlockOrContents(chain, signedBlindedBlock); + + // the full block is published by relay and it's possible that the block is already known to us + // by gossip + // + // see: https://github.com/ChainSafe/lodestar/issues/5404 + chain.logger.info("Publishing assembled block", {blockRoot, slot, source}); + // TODO: opts are not type safe, add ServerOpts in Endpoint type definition? + return publishBlock({signedBlockOrContents}, {...opts, ignoreIfKnown: true}); + } }; return { @@ -407,13 +417,13 @@ export function getBeaconBlockApi({ async function reconstructBuilderBlockOrContents( chain: ApiModules["chain"], - signedBlindedBlockOrContents: allForks.SignedBlindedBeaconBlockOrContents + signedBlindedBlock: allForks.SignedBlindedBeaconBlock ): Promise { const executionBuilder = chain.executionBuilder; if (!executionBuilder) { throw Error("executionBuilder required to publish SignedBlindedBeaconBlock"); } - const signedBlockOrContents = await executionBuilder.submitBlindedBlock(signedBlindedBlockOrContents); + const signedBlockOrContents = await executionBuilder.submitBlindedBlock(signedBlindedBlock); return signedBlockOrContents; } diff --git a/packages/beacon-node/src/api/impl/validator/index.ts b/packages/beacon-node/src/api/impl/validator/index.ts index d9c6906229b7..40276ebe2bfa 100644 --- a/packages/beacon-node/src/api/impl/validator/index.ts +++ b/packages/beacon-node/src/api/impl/validator/index.ts @@ -9,7 +9,6 @@ import { computeEpochAtSlot, getCurrentSlot, beaconBlockToBlinded, - blobSidecarsToBlinded, } from "@lodestar/state-transition"; import { GENESIS_SLOT, @@ -31,11 +30,11 @@ import { allForks, BLSSignature, isBlindedBeaconBlock, - isBlindedBlockContents, + isBlockContents, phase0, } from "@lodestar/types"; import {ExecutionStatus} from "@lodestar/fork-choice"; -import {toHex, racePromisesWithCutoff, RaceEvent} from "@lodestar/utils"; +import {toHex, racePromisesWithCutoff, RaceEvent, gweiToWei} from "@lodestar/utils"; import {AttestationError, AttestationErrorCode, GossipAction, SyncCommitteeError} from "../../../chain/errors/index.js"; import {validateApiAggregateAndProof} from "../../../chain/validation/index.js"; import {ZERO_HASH} from "../../../constants/index.js"; @@ -280,7 +279,7 @@ export function getValidatorApi({ ); } - const produceBlindedBlockOrContents = async function produceBlindedBlockOrContents( + const produceBuilderBlindedBlock = async function produceBuilderBlindedBlock( slot: Slot, randaoReveal: BLSSignature, graffiti: string, @@ -288,7 +287,12 @@ export function getValidatorApi({ { skipHeadChecksAndUpdate, }: Omit & {skipHeadChecksAndUpdate?: boolean} = {} - ): Promise { + ): Promise { + const version = config.getForkName(slot); + if (!isForkExecution(version)) { + throw Error(`Invalid fork=${version} for produceBuilderBlindedBlock`); + } + const source = ProducedBlockSource.builder; metrics?.blockProductionRequests.inc({source}); @@ -314,7 +318,7 @@ export function getValidatorApi({ let timer; try { timer = metrics?.blockProductionTime.startTimer(); - const {block, executionPayloadValue} = await chain.produceBlindedBlock({ + const {block, executionPayloadValue, consensusBlockValue} = await chain.produceBlindedBlock({ slot, randaoReveal, graffiti: toGraffitiBuffer(graffiti || ""), @@ -325,30 +329,21 @@ export function getValidatorApi({ logger.verbose("Produced blinded block", { slot, executionPayloadValue, + consensusBlockValue, root: toHexString(config.getBlindedForkTypes(slot).BeaconBlock.hashTreeRoot(block)), }); - const version = config.getForkName(block.slot); - if (isForkBlobs(version)) { - const blockHash = toHex((block as bellatrix.BlindedBeaconBlock).body.executionPayloadHeader.blockHash); - const blindedBlobSidecars = chain.producedBlindedBlobSidecarsCache.get(blockHash); - if (blindedBlobSidecars === undefined) { - throw Error("blobSidecars missing in cache"); - } - return { - data: {blindedBlock: block, blindedBlobSidecars} as allForks.BlindedBlockContents, - version, - executionPayloadValue, - }; - } else { - return {data: block, version, executionPayloadValue}; + if (chain.opts.persistProducedBlocks) { + void chain.persistBlock(block, "produced_builder_block"); } + + return {data: block, version, executionPayloadValue, consensusBlockValue}; } finally { if (timer) timer({source}); } }; - const produceFullBlockOrContents = async function produceFullBlockOrContents( + const produceEngineFullBlockOrContents = async function produceEngineFullBlockOrContents( slot: Slot, randaoReveal: BLSSignature, graffiti: string, @@ -375,13 +370,12 @@ export function getValidatorApi({ let timer; try { timer = metrics?.blockProductionTime.startTimer(); - const {block, executionPayloadValue} = await chain.produceBlock({ + const {block, executionPayloadValue, consensusBlockValue} = await chain.produceBlock({ slot, randaoReveal, graffiti: toGraffitiBuffer(graffiti || ""), feeRecipient, }); - const version = config.getForkName(block.slot); if (strictFeeRecipientCheck && feeRecipient && isForkExecution(version)) { const blockFeeRecipient = toHexString((block as bellatrix.BeaconBlock).body.executionPayload.feeRecipient); @@ -395,206 +389,238 @@ export function getValidatorApi({ logger.verbose("Produced execution block", { slot, executionPayloadValue, + consensusBlockValue, root: toHexString(config.getForkTypes(slot).BeaconBlock.hashTreeRoot(block)), }); + if (chain.opts.persistProducedBlocks) { + void chain.persistBlock(block, "produced_engine_block"); + } if (isForkBlobs(version)) { const blockHash = toHex((block as bellatrix.BeaconBlock).body.executionPayload.blockHash); - const blobSidecars = chain.producedBlobSidecarsCache.get(blockHash); - if (blobSidecars === undefined) { - throw Error("blobSidecars missing in cache"); + const contents = chain.producedContentsCache.get(blockHash); + if (contents === undefined) { + throw Error("contents missing in cache"); } - return {data: {block, blobSidecars} as allForks.BlockContents, version, executionPayloadValue}; + + return { + data: {block, ...contents} as allForks.BlockContents, + version, + executionPayloadValue, + consensusBlockValue, + }; } else { - return {data: block, version, executionPayloadValue}; + return {data: block, version, executionPayloadValue, consensusBlockValue}; } } finally { if (timer) timer({source}); } }; - const produceBlockV3: ServerApi["produceBlockV3"] = async function produceBlockV3( - slot, - randaoReveal, - graffiti, - // TODO deneb: skip randao verification - _skipRandaoVerification?: boolean, - {feeRecipient, builderSelection, strictFeeRecipientCheck}: routes.validator.ExtraProduceBlockOps = {} - ) { - notWhileSyncing(); - await waitForSlot(slot); // Must never request for a future slot > currentSlot - - // Process the queued attestations in the forkchoice for correct head estimation - // forkChoice.updateTime() might have already been called by the onSlot clock - // handler, in which case this should just return. - chain.forkChoice.updateTime(slot); - chain.recomputeForkChoiceHead(); - - const fork = config.getForkName(slot); - // set some sensible opts - builderSelection = builderSelection ?? routes.validator.BuilderSelection.MaxProfit; - const isBuilderEnabled = - ForkSeq[fork] >= ForkSeq.bellatrix && - chain.executionBuilder !== undefined && - builderSelection !== routes.validator.BuilderSelection.ExecutionOnly; - - logger.verbose("Assembling block with produceBlockV3 ", { - fork, - builderSelection, + const produceEngineOrBuilderBlock: ServerApi["produceBlockV3"] = + async function produceEngineOrBuilderBlock( slot, - isBuilderEnabled, - strictFeeRecipientCheck, - }); - // Start calls for building execution and builder blocks - const blindedBlockPromise = isBuilderEnabled - ? // can't do fee recipient checks as builder bid doesn't return feeRecipient as of now - produceBlindedBlockOrContents(slot, randaoReveal, graffiti, { - feeRecipient, - // skip checking and recomputing head in these individual produce calls - skipHeadChecksAndUpdate: true, - }).catch((e) => { - logger.error("produceBlindedBlockOrContents failed to produce block", {slot}, e); - return null; - }) - : null; + randaoReveal, + graffiti, + // TODO deneb: skip randao verification + _skipRandaoVerification?: boolean, + {feeRecipient, builderSelection, strictFeeRecipientCheck}: routes.validator.ExtraProduceBlockOps = {} + ) { + notWhileSyncing(); + await waitForSlot(slot); // Must never request for a future slot > currentSlot - const fullBlockPromise = - // At any point either the builder or execution or both flows should be active. - // - // Ideally such a scenario should be prevented on startup, but proposerSettingsFile or keymanager - // configurations could cause a validator pubkey to have builder disabled with builder selection builder only - // (TODO: independently make sure such an options update is not successful for a validator pubkey) - // - // So if builder is disabled ignore builder selection of builderonly if caused by user mistake - !isBuilderEnabled || builderSelection !== routes.validator.BuilderSelection.BuilderOnly - ? // TODO deneb: builderSelection needs to be figured out if to be done beacon side - // || builderSelection !== BuilderSelection.BuilderOnly - produceFullBlockOrContents(slot, randaoReveal, graffiti, { + // Process the queued attestations in the forkchoice for correct head estimation + // forkChoice.updateTime() might have already been called by the onSlot clock + // handler, in which case this should just return. + chain.forkChoice.updateTime(slot); + chain.recomputeForkChoiceHead(); + + const fork = config.getForkName(slot); + // set some sensible opts + builderSelection = builderSelection ?? routes.validator.BuilderSelection.MaxProfit; + const isBuilderEnabled = + ForkSeq[fork] >= ForkSeq.bellatrix && + chain.executionBuilder !== undefined && + builderSelection !== routes.validator.BuilderSelection.ExecutionOnly; + + logger.verbose("Assembling block with produceEngineOrBuilderBlock ", { + fork, + builderSelection, + slot, + isBuilderEnabled, + strictFeeRecipientCheck, + }); + // Start calls for building execution and builder blocks + const blindedBlockPromise = isBuilderEnabled + ? // can't do fee recipient checks as builder bid doesn't return feeRecipient as of now + produceBuilderBlindedBlock(slot, randaoReveal, graffiti, { feeRecipient, - strictFeeRecipientCheck, // skip checking and recomputing head in these individual produce calls skipHeadChecksAndUpdate: true, }).catch((e) => { - logger.error("produceFullBlockOrContents failed to produce block", {slot}, e); + logger.error("produceBuilderBlindedBlock failed to produce block", {slot}, e); return null; }) : null; - let blindedBlock, fullBlock; - if (blindedBlockPromise !== null && fullBlockPromise !== null) { - // reference index of promises in the race - const promisesOrder = [ProducedBlockSource.builder, ProducedBlockSource.engine]; - [blindedBlock, fullBlock] = await racePromisesWithCutoff< - routes.validator.ProduceBlockOrContentsRes | routes.validator.ProduceBlindedBlockOrContentsRes | null - >( - [blindedBlockPromise, fullBlockPromise], - BLOCK_PRODUCTION_RACE_CUTOFF_MS, - BLOCK_PRODUCTION_RACE_TIMEOUT_MS, - // Callback to log the race events for better debugging capability - (event: RaceEvent, delayMs: number, index?: number) => { - const eventRef = index !== undefined ? {source: promisesOrder[index]} : {}; - logger.verbose("Block production race (builder vs execution)", { - event, - ...eventRef, - delayMs, - cutoffMs: BLOCK_PRODUCTION_RACE_CUTOFF_MS, - timeoutMs: BLOCK_PRODUCTION_RACE_TIMEOUT_MS, - slot, - }); + const fullBlockPromise = + // At any point either the builder or execution or both flows should be active. + // + // Ideally such a scenario should be prevented on startup, but proposerSettingsFile or keymanager + // configurations could cause a validator pubkey to have builder disabled with builder selection builder only + // (TODO: independently make sure such an options update is not successful for a validator pubkey) + // + // So if builder is disabled ignore builder selection of builderonly if caused by user mistake + !isBuilderEnabled || builderSelection !== routes.validator.BuilderSelection.BuilderOnly + ? // TODO deneb: builderSelection needs to be figured out if to be done beacon side + // || builderSelection !== BuilderSelection.BuilderOnly + produceEngineFullBlockOrContents(slot, randaoReveal, graffiti, { + feeRecipient, + strictFeeRecipientCheck, + // skip checking and recomputing head in these individual produce calls + skipHeadChecksAndUpdate: true, + }).catch((e) => { + logger.error("produceEngineFullBlockOrContents failed to produce block", {slot}, e); + return null; + }) + : null; + + let blindedBlock, fullBlock; + if (blindedBlockPromise !== null && fullBlockPromise !== null) { + // reference index of promises in the race + const promisesOrder = [ProducedBlockSource.builder, ProducedBlockSource.engine]; + [blindedBlock, fullBlock] = await racePromisesWithCutoff< + routes.validator.ProduceBlockOrContentsRes | routes.validator.ProduceBlindedBlockRes | null + >( + [blindedBlockPromise, fullBlockPromise], + BLOCK_PRODUCTION_RACE_CUTOFF_MS, + BLOCK_PRODUCTION_RACE_TIMEOUT_MS, + // Callback to log the race events for better debugging capability + (event: RaceEvent, delayMs: number, index?: number) => { + const eventRef = index !== undefined ? {source: promisesOrder[index]} : {}; + logger.verbose("Block production race (builder vs execution)", { + event, + ...eventRef, + delayMs, + cutoffMs: BLOCK_PRODUCTION_RACE_CUTOFF_MS, + timeoutMs: BLOCK_PRODUCTION_RACE_TIMEOUT_MS, + slot, + }); + } + ); + if (blindedBlock instanceof Error) { + // error here means race cutoff exceeded + logger.error("Failed to produce builder block", {slot}, blindedBlock); + blindedBlock = null; } - ); - if (blindedBlock instanceof Error) { - // error here means race cutoff exceeded - logger.error("Failed to produce builder block", {slot}, blindedBlock); - blindedBlock = null; - } - if (fullBlock instanceof Error) { - logger.error("Failed to produce execution block", {slot}, fullBlock); + if (fullBlock instanceof Error) { + logger.error("Failed to produce execution block", {slot}, fullBlock); + fullBlock = null; + } + } else if (blindedBlockPromise !== null && fullBlockPromise === null) { + blindedBlock = await blindedBlockPromise; fullBlock = null; + } else if (blindedBlockPromise === null && fullBlockPromise !== null) { + blindedBlock = null; + fullBlock = await fullBlockPromise; + } else { + throw Error( + `Internal Error: Neither builder nor execution proposal flow activated isBuilderEnabled=${isBuilderEnabled} builderSelection=${builderSelection}` + ); } - } else if (blindedBlockPromise !== null && fullBlockPromise === null) { - blindedBlock = await blindedBlockPromise; - fullBlock = null; - } else if (blindedBlockPromise === null && fullBlockPromise !== null) { - blindedBlock = null; - fullBlock = await fullBlockPromise; - } else { - throw Error( - `Internal Error: Neither builder nor execution proposal flow activated isBuilderEnabled=${isBuilderEnabled} builderSelection=${builderSelection}` - ); - } - const builderPayloadValue = blindedBlock?.executionPayloadValue ?? BigInt(0); - const enginePayloadValue = fullBlock?.executionPayloadValue ?? BigInt(0); + const builderPayloadValue = blindedBlock?.executionPayloadValue ?? BigInt(0); + const enginePayloadValue = fullBlock?.executionPayloadValue ?? BigInt(0); + const consensusBlockValueBuilder = blindedBlock?.consensusBlockValue ?? BigInt(0); + const consensusBlockValueEngine = fullBlock?.consensusBlockValue ?? BigInt(0); + + const blockValueBuilder = builderPayloadValue + gweiToWei(consensusBlockValueBuilder); // Total block value is in wei + const blockValueEngine = enginePayloadValue + gweiToWei(consensusBlockValueEngine); // Total block value is in wei - let selectedSource: ProducedBlockSource | null = null; + let executionPayloadSource: ProducedBlockSource | null = null; - if (fullBlock && blindedBlock) { - switch (builderSelection) { - case routes.validator.BuilderSelection.MaxProfit: { - // If executionPayloadValues are zero, than choose builder as most likely beacon didn't provide executionPayloadValue - // and builder blocks are most likely thresholded by a min bid - if (enginePayloadValue >= builderPayloadValue && enginePayloadValue !== BigInt(0)) { - selectedSource = ProducedBlockSource.engine; - } else { - selectedSource = ProducedBlockSource.builder; + if (fullBlock && blindedBlock) { + switch (builderSelection) { + case routes.validator.BuilderSelection.MaxProfit: { + if (blockValueEngine >= blockValueBuilder) { + executionPayloadSource = ProducedBlockSource.engine; + } else { + executionPayloadSource = ProducedBlockSource.builder; + } + break; } - break; - } - case routes.validator.BuilderSelection.ExecutionOnly: { - selectedSource = ProducedBlockSource.engine; - break; - } + case routes.validator.BuilderSelection.ExecutionOnly: { + executionPayloadSource = ProducedBlockSource.engine; + break; + } - // For everything else just select the builder - default: { - selectedSource = ProducedBlockSource.builder; + // For everything else just select the builder + default: { + executionPayloadSource = ProducedBlockSource.builder; + } } + logger.verbose(`Selected executionPayloadSource=${executionPayloadSource} block`, { + builderSelection, + // winston logger doesn't like bigint + enginePayloadValue: `${enginePayloadValue}`, + builderPayloadValue: `${builderPayloadValue}`, + consensusBlockValueEngine: `${consensusBlockValueEngine}`, + consensusBlockValueBuilder: `${consensusBlockValueBuilder}`, + blockValueEngine: `${blockValueEngine}`, + blockValueBuilder: `${blockValueBuilder}`, + slot, + }); + } else if (fullBlock && !blindedBlock) { + executionPayloadSource = ProducedBlockSource.engine; + logger.verbose("Selected engine block: no builder block produced", { + // winston logger doesn't like bigint + enginePayloadValue: `${enginePayloadValue}`, + consensusBlockValueEngine: `${consensusBlockValueEngine}`, + blockValueEngine: `${blockValueEngine}`, + slot, + }); + } else if (blindedBlock && !fullBlock) { + executionPayloadSource = ProducedBlockSource.builder; + logger.verbose("Selected builder block: no engine block produced", { + // winston logger doesn't like bigint + builderPayloadValue: `${builderPayloadValue}`, + consensusBlockValueBuilder: `${consensusBlockValueBuilder}`, + blockValueBuilder: `${blockValueBuilder}`, + slot, + }); } - logger.verbose(`Selected ${selectedSource} block`, { - builderSelection, - // winston logger doesn't like bigint - enginePayloadValue: `${enginePayloadValue}`, - builderPayloadValue: `${builderPayloadValue}`, - slot, - }); - } else if (fullBlock && !blindedBlock) { - selectedSource = ProducedBlockSource.engine; - logger.verbose("Selected engine block: no builder block produced", { - // winston logger doesn't like bigint - enginePayloadValue: `${enginePayloadValue}`, - slot, - }); - } else if (blindedBlock && !fullBlock) { - selectedSource = ProducedBlockSource.builder; - logger.verbose("Selected builder block: no engine block produced", { - // winston logger doesn't like bigint - builderPayloadValue: `${builderPayloadValue}`, - slot, - }); - } - if (selectedSource === null) { - throw Error(`Failed to produce engine or builder block for slot=${slot}`); - } + if (executionPayloadSource === null) { + throw Error(`Failed to produce engine or builder block for slot=${slot}`); + } - if (selectedSource === ProducedBlockSource.engine) { - return {...fullBlock, executionPayloadBlinded: false} as routes.validator.ProduceBlockOrContentsRes & { - executionPayloadBlinded: false; - }; - } else { - return {...blindedBlock, executionPayloadBlinded: true} as routes.validator.ProduceBlindedBlockOrContentsRes & { - executionPayloadBlinded: true; - }; - } - }; + if (executionPayloadSource === ProducedBlockSource.engine) { + return { + ...fullBlock, + executionPayloadBlinded: false, + executionPayloadSource, + } as routes.validator.ProduceBlockOrContentsRes & { + executionPayloadBlinded: false; + executionPayloadSource: ProducedBlockSource; + }; + } else { + return { + ...blindedBlock, + executionPayloadBlinded: true, + executionPayloadSource, + } as routes.validator.ProduceBlindedBlockRes & { + executionPayloadBlinded: true; + executionPayloadSource: ProducedBlockSource; + }; + } + }; const produceBlock: ServerApi["produceBlock"] = async function produceBlock( slot, randaoReveal, graffiti ) { - const producedData = await produceFullBlockOrContents(slot, randaoReveal, graffiti); + const producedData = await produceEngineFullBlockOrContents(slot, randaoReveal, graffiti); if (isForkBlobs(producedData.version)) { throw Error(`Invalid call to produceBlock for deneb+ fork=${producedData.version}`); } else { @@ -604,45 +630,85 @@ export function getValidatorApi({ } }; - const produceBlindedBlock: ServerApi["produceBlindedBlock"] = - async function produceBlindedBlock(slot, randaoReveal, graffiti) { - const producedData = await produceBlockV3(slot, randaoReveal, graffiti); - let blindedProducedData: routes.validator.ProduceBlindedBlockOrContentsRes; - - if (isForkBlobs(producedData.version)) { - if (isBlindedBlockContents(producedData.data as allForks.FullOrBlindedBlockContents)) { - blindedProducedData = producedData as routes.validator.ProduceBlindedBlockOrContentsRes; - } else { - // - const {block, blobSidecars} = producedData.data as allForks.BlockContents; - const blindedBlock = beaconBlockToBlinded(config, block as allForks.AllForksExecution["BeaconBlock"]); - const blindedBlobSidecars = blobSidecarsToBlinded(blobSidecars); + const produceEngineOrBuilderBlindedBlock: ServerApi["produceBlindedBlock"] = + async function produceEngineOrBuilderBlindedBlock(slot, randaoReveal, graffiti) { + const {data, executionPayloadValue, consensusBlockValue, version} = await produceEngineOrBuilderBlock( + slot, + randaoReveal, + graffiti + ); + if (!isForkExecution(version)) { + throw Error(`Invalid fork=${version} for produceEngineOrBuilderBlindedBlock`); + } + const executionPayloadBlinded = true; + + if (isBlockContents(data)) { + const {block} = data; + const blindedBlock = beaconBlockToBlinded(config, block as allForks.AllForksExecution["BeaconBlock"]); + return {executionPayloadValue, consensusBlockValue, data: blindedBlock, executionPayloadBlinded, version}; + } else if (isBlindedBeaconBlock(data)) { + return {executionPayloadValue, consensusBlockValue, data, executionPayloadBlinded, version}; + } else { + const blindedBlock = beaconBlockToBlinded(config, data as allForks.AllForksExecution["BeaconBlock"]); + return {executionPayloadValue, consensusBlockValue, data: blindedBlock, executionPayloadBlinded, version}; + } + }; - blindedProducedData = { - ...producedData, - data: {blindedBlock, blindedBlobSidecars}, - } as routes.validator.ProduceBlindedBlockOrContentsRes; - } + const produceBlockV3: ServerApi["produceBlockV3"] = async function produceBlockV3( + slot, + randaoReveal, + graffiti, + skipRandaoVerification?: boolean, + opts: routes.validator.ExtraProduceBlockOps = {} + ) { + const produceBlockEngineOrBuilderRes = await produceEngineOrBuilderBlock( + slot, + randaoReveal, + graffiti, + skipRandaoVerification, + opts + ); + + if (opts.blindedLocal === true && ForkSeq[produceBlockEngineOrBuilderRes.version] >= ForkSeq.bellatrix) { + if (produceBlockEngineOrBuilderRes.executionPayloadBlinded) { + return produceBlockEngineOrBuilderRes; } else { - if (isBlindedBeaconBlock(producedData.data)) { - blindedProducedData = producedData as routes.validator.ProduceBlindedBlockOrContentsRes; - } else { - const block = producedData.data; + if (isBlockContents(produceBlockEngineOrBuilderRes.data)) { + const {block} = produceBlockEngineOrBuilderRes.data; const blindedBlock = beaconBlockToBlinded(config, block as allForks.AllForksExecution["BeaconBlock"]); - blindedProducedData = { - ...producedData, + return { + ...produceBlockEngineOrBuilderRes, data: blindedBlock, - } as routes.validator.ProduceBlindedBlockOrContentsRes; + executionPayloadBlinded: true, + } as routes.validator.ProduceBlindedBlockRes & { + executionPayloadBlinded: true; + executionPayloadSource: ProducedBlockSource; + }; + } else { + const blindedBlock = beaconBlockToBlinded( + config, + produceBlockEngineOrBuilderRes.data as allForks.AllForksExecution["BeaconBlock"] + ); + return { + ...produceBlockEngineOrBuilderRes, + data: blindedBlock, + executionPayloadBlinded: true, + } as routes.validator.ProduceBlindedBlockRes & { + executionPayloadBlinded: true; + executionPayloadSource: ProducedBlockSource; + }; } } - return blindedProducedData; - }; + } else { + return produceBlockEngineOrBuilderRes; + } + }; return { produceBlock, - produceBlockV2: produceFullBlockOrContents, + produceBlockV2: produceEngineFullBlockOrContents, produceBlockV3, - produceBlindedBlock, + produceBlindedBlock: produceEngineOrBuilderBlindedBlock, async produceAttestationData(committeeIndex, slot) { notWhileSyncing(); diff --git a/packages/beacon-node/src/api/rest/activeSockets.ts b/packages/beacon-node/src/api/rest/activeSockets.ts index ba8a35c80119..9f1b0f1a78a3 100644 --- a/packages/beacon-node/src/api/rest/activeSockets.ts +++ b/packages/beacon-node/src/api/rest/activeSockets.ts @@ -1,12 +1,11 @@ import http, {Server} from "node:http"; import {Socket} from "node:net"; -import {waitFor} from "@lodestar/utils"; -import {IGauge} from "../../metrics/index.js"; +import {Gauge, GaugeExtra, waitFor} from "@lodestar/utils"; export type SocketMetrics = { - activeSockets: IGauge; - socketsBytesRead: IGauge; - socketsBytesWritten: IGauge; + activeSockets: GaugeExtra; + socketsBytesRead: Gauge; + socketsBytesWritten: Gauge; }; // Use relatively short timeout to speed up shutdown diff --git a/packages/beacon-node/src/api/rest/base.ts b/packages/beacon-node/src/api/rest/base.ts index b9c86ff5f372..8a1f59ef3c27 100644 --- a/packages/beacon-node/src/api/rest/base.ts +++ b/packages/beacon-node/src/api/rest/base.ts @@ -3,9 +3,8 @@ import fastify, {FastifyBodyParser, FastifyContentTypeParser, FastifyInstance, F import fastifyCors from "@fastify/cors"; import bearerAuthPlugin from "@fastify/bearer-auth"; import {RouteConfig} from "@lodestar/api/beacon/server"; -import {ErrorAborted, Logger} from "@lodestar/utils"; +import {ErrorAborted, Gauge, Histogram, Logger} from "@lodestar/utils"; import {isLocalhostIP} from "../../util/ip.js"; -import {IGauge, IHistogram} from "../../metrics/index.js"; import {ApiError, NodeIsSyncing} from "../impl/errors.js"; import {HttpActiveSocketsTracker, SocketMetrics} from "./activeSockets.js"; @@ -25,9 +24,9 @@ export type RestApiServerModules = { }; export type RestApiServerMetrics = SocketMetrics & { - requests: IGauge<"operationId">; - responseTime: IHistogram<"operationId">; - errors: IGauge<"operationId">; + requests: Gauge<{operationId: string}>; + responseTime: Histogram<{operationId: string}>; + errors: Gauge<{operationId: string}>; }; /** @@ -98,6 +97,11 @@ export class RestApiServer { metrics?.requests.inc({operationId}); }); + server.addHook("preHandler", async (req, _res) => { + const {operationId} = req.routeConfig as RouteConfig; + this.logger.debug(`Exec ${req.id as string} ${req.ip} ${operationId}`); + }); + // Log after response server.addHook("onResponse", async (req, res) => { const {operationId} = req.routeConfig as RouteConfig; diff --git a/packages/beacon-node/src/chain/blocks/importBlock.ts b/packages/beacon-node/src/chain/blocks/importBlock.ts index feaddfbad39d..12b43359fa4e 100644 --- a/packages/beacon-node/src/chain/blocks/importBlock.ts +++ b/packages/beacon-node/src/chain/blocks/importBlock.ts @@ -7,7 +7,6 @@ import { computeStartSlotAtEpoch, isStateValidatorsNodesPopulated, RootCache, - kzgCommitmentToVersionedHash, } from "@lodestar/state-transition"; import {routes} from "@lodestar/api"; import {ForkChoiceError, ForkChoiceErrorCode, EpochDifference, AncestorStatus} from "@lodestar/fork-choice"; @@ -16,6 +15,7 @@ import {ZERO_HASH_HEX} from "../../constants/index.js"; import {toCheckpointHex} from "../stateCache/index.js"; import {isOptimisticBlock} from "../../util/forkChoice.js"; import {isQueueErrorAborted} from "../../util/queue/index.js"; +import {kzgCommitmentToVersionedHash} from "../../util/blobs.js"; import {ChainEvent, ReorgEventData} from "../emitter.js"; import {REPROCESS_MIN_TIME_TO_NEXT_SLOT_SEC} from "../reprocess.js"; import type {BeaconChain} from "../chain.js"; diff --git a/packages/beacon-node/src/chain/blocks/index.ts b/packages/beacon-node/src/chain/blocks/index.ts index 569fd0771022..8f4c7fa5f0f1 100644 --- a/packages/beacon-node/src/chain/blocks/index.ts +++ b/packages/beacon-node/src/chain/blocks/index.ts @@ -58,11 +58,7 @@ export async function processBlocks( } try { - const {relevantBlocks, dataAvailabilityStatuses, parentSlots, parentBlock} = verifyBlocksSanityChecks( - this, - blocks, - opts - ); + const {relevantBlocks, parentSlots, parentBlock} = verifyBlocksSanityChecks(this, blocks, opts); // No relevant blocks, skip verifyBlocksInEpoch() if (relevantBlocks.length === 0 || parentBlock === null) { @@ -72,13 +68,8 @@ export async function processBlocks( // Fully verify a block to be imported immediately after. Does not produce any side-effects besides adding intermediate // states in the state cache through regen. - const {postStates, proposerBalanceDeltas, segmentExecStatus} = await verifyBlocksInEpoch.call( - this, - parentBlock, - relevantBlocks, - dataAvailabilityStatuses, - opts - ); + const {postStates, dataAvailabilityStatuses, proposerBalanceDeltas, segmentExecStatus} = + await verifyBlocksInEpoch.call(this, parentBlock, relevantBlocks, opts); // If segmentExecStatus has lvhForkchoice then, the entire segment should be invalid // and we need to further propagate diff --git a/packages/beacon-node/src/chain/blocks/types.ts b/packages/beacon-node/src/chain/blocks/types.ts index 5f1ac8833578..aff5a64c9929 100644 --- a/packages/beacon-node/src/chain/blocks/types.ts +++ b/packages/beacon-node/src/chain/blocks/types.ts @@ -1,14 +1,13 @@ -import {toHexString} from "@chainsafe/ssz"; import {CachedBeaconStateAllForks, computeEpochAtSlot, DataAvailableStatus} from "@lodestar/state-transition"; import {MaybeValidExecutionStatus} from "@lodestar/fork-choice"; -import {allForks, deneb, Slot, RootHex} from "@lodestar/types"; +import {allForks, deneb, Slot} from "@lodestar/types"; import {ForkSeq, MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS} from "@lodestar/params"; import {ChainForkConfig} from "@lodestar/config"; -import {pruneSetToMax} from "@lodestar/utils"; export enum BlockInputType { preDeneb = "preDeneb", postDeneb = "postDeneb", + blobsPromise = "blobsPromise", } /** Enum to represent where blocks come from */ @@ -19,9 +18,18 @@ export enum BlockSource { byRoot = "req_resp_by_root", } +export enum GossipedInputType { + block = "block", + blob = "blob", +} + +export type BlobsCache = Map; +export type BlockInputBlobs = {blobs: deneb.BlobSidecars; blobsBytes: (Uint8Array | null)[]}; + export type BlockInput = {block: allForks.SignedBeaconBlock; source: BlockSource; blockBytes: Uint8Array | null} & ( | {type: BlockInputType.preDeneb} - | {type: BlockInputType.postDeneb; blobs: deneb.BlobSidecars; blobsBytes: (Uint8Array | null)[]} + | ({type: BlockInputType.postDeneb} & BlockInputBlobs) + | {type: BlockInputType.blobsPromise; blobsCache: BlobsCache; availabilityPromise: Promise} ); export function blockRequiresBlobs(config: ChainForkConfig, blockSlot: Slot, clockSlot: Slot): boolean { @@ -32,125 +40,7 @@ export function blockRequiresBlobs(config: ChainForkConfig, blockSlot: Slot, clo ); } -export enum GossipedInputType { - block = "block", - blob = "blob", -} -type GossipedBlockInput = - | {type: GossipedInputType.block; signedBlock: allForks.SignedBeaconBlock; blockBytes: Uint8Array | null} - | {type: GossipedInputType.blob; signedBlob: deneb.SignedBlobSidecar; blobBytes: Uint8Array | null}; -type BlockInputCacheType = { - block?: allForks.SignedBeaconBlock; - blockBytes?: Uint8Array | null; - blobs: Map; - blobsBytes: Map; -}; - -const MAX_GOSSIPINPUT_CACHE = 5; -// ssz.deneb.BlobSidecars.elementType.fixedSize; -const BLOBSIDECAR_FIXED_SIZE = 131256; - export const getBlockInput = { - blockInputCache: new Map(), - - getGossipBlockInput( - config: ChainForkConfig, - gossipedInput: GossipedBlockInput - ): - | {blockInput: BlockInput; blockInputMeta: {pending: null; haveBlobs: number; expectedBlobs: number}} - | {blockInput: null; blockInputMeta: {pending: GossipedInputType.block; haveBlobs: number; expectedBlobs: null}} - | {blockInput: null; blockInputMeta: {pending: GossipedInputType.blob; haveBlobs: number; expectedBlobs: number}} { - let blockHex; - let blockCache; - - if (gossipedInput.type === GossipedInputType.block) { - const {signedBlock, blockBytes} = gossipedInput; - - blockHex = toHexString( - config.getForkTypes(signedBlock.message.slot).BeaconBlock.hashTreeRoot(signedBlock.message) - ); - blockCache = this.blockInputCache.get(blockHex) ?? { - blobs: new Map(), - blobsBytes: new Map(), - }; - - blockCache.block = signedBlock; - blockCache.blockBytes = blockBytes; - } else { - const {signedBlob, blobBytes} = gossipedInput; - blockHex = toHexString(signedBlob.message.blockRoot); - blockCache = this.blockInputCache.get(blockHex); - - // If a new entry is going to be inserted, prune out old ones - if (blockCache === undefined) { - pruneSetToMax(this.blockInputCache, MAX_GOSSIPINPUT_CACHE); - blockCache = {blobs: new Map(), blobsBytes: new Map()}; - } - - // TODO: freetheblobs check if its the same blob or a duplicate and throw/take actions - blockCache.blobs.set(signedBlob.message.index, signedBlob.message); - // easily splice out the unsigned message as blob is a fixed length type - blockCache.blobsBytes.set(signedBlob.message.index, blobBytes?.slice(0, BLOBSIDECAR_FIXED_SIZE) ?? null); - } - - this.blockInputCache.set(blockHex, blockCache); - const {block: signedBlock, blockBytes} = blockCache; - - if (signedBlock !== undefined) { - // block is available, check if all blobs have shown up - const {slot, body} = signedBlock.message; - const {blobKzgCommitments} = body as deneb.BeaconBlockBody; - const blockInfo = `blockHex=${blockHex}, slot=${slot}`; - - if (blobKzgCommitments.length < blockCache.blobs.size) { - throw Error( - `Received more blobs=${blockCache.blobs.size} than commitments=${blobKzgCommitments.length} for ${blockInfo}` - ); - } - if (blobKzgCommitments.length === blockCache.blobs.size) { - const blobSidecars = []; - const blobsBytes = []; - - for (let index = 0; index < blobKzgCommitments.length; index++) { - const blobSidecar = blockCache.blobs.get(index); - if (blobSidecar === undefined) { - throw Error(`Missing blobSidecar at index=${index} for ${blockInfo}`); - } - blobSidecars.push(blobSidecar); - blobsBytes.push(blockCache.blobsBytes.get(index) ?? null); - } - - return { - // TODO freetheblobs: collate and add serialized data for the postDeneb blockinput - blockInput: getBlockInput.postDeneb( - config, - signedBlock, - BlockSource.gossip, - blobSidecars, - blockBytes ?? null, - blobsBytes - ), - blockInputMeta: {pending: null, haveBlobs: blockCache.blobs.size, expectedBlobs: blobKzgCommitments.length}, - }; - } else { - return { - blockInput: null, - blockInputMeta: { - pending: GossipedInputType.blob, - haveBlobs: blockCache.blobs.size, - expectedBlobs: blobKzgCommitments.length, - }, - }; - } - } else { - // will need to wait for the block to showup - return { - blockInput: null, - blockInputMeta: {pending: GossipedInputType.block, haveBlobs: blockCache.blobs.size, expectedBlobs: null}, - }; - } - }, - preDeneb( config: ChainForkConfig, block: allForks.SignedBeaconBlock, @@ -188,6 +78,27 @@ export const getBlockInput = { blobsBytes, }; }, + + blobsPromise( + config: ChainForkConfig, + block: allForks.SignedBeaconBlock, + source: BlockSource, + blobsCache: BlobsCache, + blockBytes: Uint8Array | null, + availabilityPromise: Promise + ): BlockInput { + if (config.getForkSeq(block.message.slot) < ForkSeq.deneb) { + throw Error(`Pre Deneb block slot ${block.message.slot}`); + } + return { + type: BlockInputType.blobsPromise, + block, + source, + blobsCache, + blockBytes, + availabilityPromise, + }; + }, }; export enum AttestationImportOpt { diff --git a/packages/beacon-node/src/chain/blocks/verifyBlock.ts b/packages/beacon-node/src/chain/blocks/verifyBlock.ts index 72db1d801b48..94a42a39a6ae 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlock.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlock.ts @@ -5,7 +5,7 @@ import { isStateValidatorsNodesPopulated, DataAvailableStatus, } from "@lodestar/state-transition"; -import {bellatrix} from "@lodestar/types"; +import {bellatrix, deneb} from "@lodestar/types"; import {ForkName} from "@lodestar/params"; import {ProtoBlock, ExecutionStatus} from "@lodestar/fork-choice"; import {ChainForkConfig} from "@lodestar/config"; @@ -14,13 +14,14 @@ import {BlockError, BlockErrorCode} from "../errors/index.js"; import {BlockProcessOpts} from "../options.js"; import {RegenCaller} from "../regen/index.js"; import type {BeaconChain} from "../chain.js"; -import {BlockInput, ImportBlockOpts} from "./types.js"; +import {BlockInput, ImportBlockOpts, BlockInputType} from "./types.js"; import {POS_PANDA_MERGE_TRANSITION_BANNER} from "./utils/pandaMergeTransitionBanner.js"; import {CAPELLA_OWL_BANNER} from "./utils/ownBanner.js"; import {DENEB_BLOWFISH_BANNER} from "./utils/blowfishBanner.js"; import {verifyBlocksStateTransitionOnly} from "./verifyBlocksStateTransitionOnly.js"; import {verifyBlocksSignatures} from "./verifyBlocksSignatures.js"; import {verifyBlocksExecutionPayload, SegmentExecStatus} from "./verifyBlocksExecutionPayloads.js"; +import {verifyBlocksDataAvailability} from "./verifyBlocksDataAvailability.js"; import {writeBlockInputToDb} from "./writeBlockInputToDb.js"; /** @@ -38,12 +39,12 @@ export async function verifyBlocksInEpoch( this: BeaconChain, parentBlock: ProtoBlock, blocksInput: BlockInput[], - dataAvailabilityStatuses: DataAvailableStatus[], opts: BlockProcessOpts & ImportBlockOpts ): Promise<{ postStates: CachedBeaconStateAllForks[]; proposerBalanceDeltas: number[]; segmentExecStatus: SegmentExecStatus; + dataAvailabilityStatuses: DataAvailableStatus[]; }> { const blocks = blocksInput.map(({block}) => block); if (blocks.length === 0) { @@ -88,7 +89,12 @@ export async function verifyBlocksInEpoch( try { // batch all I/O operations to reduce overhead - const [segmentExecStatus, {postStates, proposerBalanceDeltas}] = await Promise.all([ + const [ + segmentExecStatus, + {dataAvailabilityStatuses, availableTime}, + {postStates, proposerBalanceDeltas, verifyStateTime}, + {verifySignaturesTime}, + ] = await Promise.all([ // Execution payloads opts.skipVerifyExecutionPayload !== true ? verifyBlocksExecutionPayload(this, parentBlock, blocks, preState0, abortController.signal, opts) @@ -98,12 +104,16 @@ export async function verifyBlocksInEpoch( mergeBlockFound: null, } as SegmentExecStatus), + // data availability for the blobs + verifyBlocksDataAvailability(this, blocksInput, opts), + // Run state transition only // TODO: Ensure it yields to allow flushing to workers and engine API verifyBlocksStateTransitionOnly( preState0, blocksInput, - dataAvailabilityStatuses, + // hack availability for state transition eval as availability is separately determined + blocks.map(() => DataAvailableStatus.available), this.logger, this.metrics, abortController.signal, @@ -113,7 +123,7 @@ export async function verifyBlocksInEpoch( // All signatures at once opts.skipVerifyBlockSignatures !== true ? verifyBlocksSignatures(this.bls, this.logger, this.metrics, preState0, blocks, opts) - : Promise.resolve(), + : Promise.resolve({verifySignaturesTime: Date.now()}), // ideally we want to only persist blocks after verifying them however the reality is there are // rarely invalid blocks we'll batch all I/O operation here to reduce the overhead if there's @@ -151,7 +161,35 @@ export async function verifyBlocksInEpoch( } } - return {postStates, proposerBalanceDeltas, segmentExecStatus}; + if (segmentExecStatus.execAborted === null) { + const {executionStatuses, executionTime} = segmentExecStatus; + if ( + blocksInput.length === 1 && + // gossip blocks have seenTimestampSec + opts.seenTimestampSec !== undefined && + blocksInput[0].type !== BlockInputType.preDeneb && + executionStatuses[0] === ExecutionStatus.Valid + ) { + // Find the max time when the block was actually verified + const fullyVerifiedTime = Math.max(executionTime, verifyStateTime, verifySignaturesTime); + const recvTofullyVerifedTime = fullyVerifiedTime / 1000 - opts.seenTimestampSec; + this.metrics?.gossipBlock.receivedToFullyVerifiedTime.observe(recvTofullyVerifedTime); + + const verifiedToBlobsAvailabiltyTime = Math.max(availableTime - fullyVerifiedTime, 0) / 1000; + const numBlobs = (blocksInput[0].block as deneb.SignedBeaconBlock).message.body.blobKzgCommitments.length; + + this.metrics?.gossipBlock.verifiedToBlobsAvailabiltyTime.observe({numBlobs}, verifiedToBlobsAvailabiltyTime); + this.logger.verbose("Verified blockInput fully with blobs availability", { + slot: blocksInput[0].block.message.slot, + recvTofullyVerifedTime, + verifiedToBlobsAvailabiltyTime, + type: blocksInput[0].type, + numBlobs, + }); + } + } + + return {postStates, dataAvailabilityStatuses, proposerBalanceDeltas, segmentExecStatus}; } finally { abortController.abort(); } diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksDataAvailability.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksDataAvailability.ts new file mode 100644 index 000000000000..9c45469d56dd --- /dev/null +++ b/packages/beacon-node/src/chain/blocks/verifyBlocksDataAvailability.ts @@ -0,0 +1,126 @@ +import {computeTimeAtSlot, DataAvailableStatus} from "@lodestar/state-transition"; +import {ChainForkConfig} from "@lodestar/config"; +import {deneb, UintNum64} from "@lodestar/types"; +import {Logger} from "@lodestar/utils"; +import {BlockError, BlockErrorCode} from "../errors/index.js"; +import {validateBlobSidecars} from "../validation/blobSidecar.js"; +import {Metrics} from "../../metrics/metrics.js"; +import {BlockInput, BlockInputType, ImportBlockOpts, BlobSidecarValidation} from "./types.js"; + +// proposer boost is not available post 3 sec so try pulling using unknown block hash +// post 3 sec after throwing the availability error +const BLOB_AVAILABILITY_TIMEOUT = 3_000; + +/** + * Verifies some early cheap sanity checks on the block before running the full state transition. + * + * - Parent is known to the fork-choice + * - Check skipped slots limit + * - check_block_relevancy() + * - Block not in the future + * - Not genesis block + * - Block's slot is < Infinity + * - Not finalized slot + * - Not already known + */ +export async function verifyBlocksDataAvailability( + chain: {config: ChainForkConfig; genesisTime: UintNum64; logger: Logger; metrics: Metrics | null}, + blocks: BlockInput[], + opts: ImportBlockOpts +): Promise<{dataAvailabilityStatuses: DataAvailableStatus[]; availableTime: number}> { + if (blocks.length === 0) { + throw Error("Empty partiallyVerifiedBlocks"); + } + + const dataAvailabilityStatuses: DataAvailableStatus[] = []; + const seenTime = opts.seenTimestampSec !== undefined ? opts.seenTimestampSec * 1000 : Date.now(); + + for (const blockInput of blocks) { + // Validate status of only not yet finalized blocks, we don't need yet to propogate the status + // as it is not used upstream anywhere + const dataAvailabilityStatus = await maybeValidateBlobs(chain, blockInput, opts); + dataAvailabilityStatuses.push(dataAvailabilityStatus); + } + + const availableTime = blocks[blocks.length - 1].type === BlockInputType.blobsPromise ? Date.now() : seenTime; + if (blocks.length === 1 && opts.seenTimestampSec !== undefined && blocks[0].type !== BlockInputType.preDeneb) { + const recvToAvailableTime = availableTime / 1000 - opts.seenTimestampSec; + const numBlobs = (blocks[0].block as deneb.SignedBeaconBlock).message.body.blobKzgCommitments.length; + + chain.metrics?.gossipBlock.receivedToBlobsAvailabilityTime.observe({numBlobs}, recvToAvailableTime); + chain.logger.verbose("Verified blobs availability", { + slot: blocks[0].block.message.slot, + recvToAvailableTime, + type: blocks[0].type, + }); + } + + return {dataAvailabilityStatuses, availableTime}; +} + +async function maybeValidateBlobs( + chain: {config: ChainForkConfig; genesisTime: UintNum64}, + blockInput: BlockInput, + opts: ImportBlockOpts +): Promise { + switch (blockInput.type) { + case BlockInputType.preDeneb: + return DataAvailableStatus.preDeneb; + + case BlockInputType.postDeneb: + if (opts.validBlobSidecars === BlobSidecarValidation.Full) { + return DataAvailableStatus.available; + } + + // eslint-disable-next-line no-fallthrough + case BlockInputType.blobsPromise: { + // run full validation + const {block} = blockInput; + const blockSlot = block.message.slot; + + const blobsData = + blockInput.type === BlockInputType.postDeneb + ? blockInput + : await raceWithCutoff(chain, blockInput, blockInput.availabilityPromise); + const {blobs} = blobsData; + + const {blobKzgCommitments} = (block as deneb.SignedBeaconBlock).message.body; + const beaconBlockRoot = chain.config.getForkTypes(blockSlot).BeaconBlock.hashTreeRoot(block.message); + + // if the blob siddecars have been individually verified then we can skip kzg proof check + // but other checks to match blobs with block data still need to be performed + const skipProofsCheck = opts.validBlobSidecars === BlobSidecarValidation.Individual; + validateBlobSidecars(blockSlot, beaconBlockRoot, blobKzgCommitments, blobs, {skipProofsCheck}); + + return DataAvailableStatus.available; + } + } +} + +/** + * Wait for blobs to become available with a cutoff time. If fails then throw DATA_UNAVAILABLE error + * which may try unknownblock/blobs fill (by root). + */ +async function raceWithCutoff( + chain: {config: ChainForkConfig; genesisTime: UintNum64}, + blockInput: BlockInput, + availabilityPromise: Promise +): Promise { + const {block} = blockInput; + const blockSlot = block.message.slot; + + const cutoffTime = Math.max( + computeTimeAtSlot(chain.config, blockSlot, chain.genesisTime) * 1000 + BLOB_AVAILABILITY_TIMEOUT - Date.now(), + 0 + ); + const cutoffTimeout = new Promise((_resolve, reject) => setTimeout(reject, cutoffTime)); + + try { + await Promise.race([availabilityPromise, cutoffTimeout]); + } catch (e) { + // throw unavailable so that the unknownblock/blobs can be triggered to pull the block + throw new BlockError(block, {code: BlockErrorCode.DATA_UNAVAILABLE}); + } + // we can only be here if availabilityPromise has resolved else an error will be thrown + return availabilityPromise; +} diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts index 7f4edd14c618..5dbe104c9541 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts @@ -5,7 +5,6 @@ import { isExecutionBlockBodyType, isMergeTransitionBlock as isMergeTransitionBlockFn, isExecutionEnabled, - kzgCommitmentToVersionedHash, } from "@lodestar/state-transition"; import {bellatrix, allForks, Slot, deneb} from "@lodestar/types"; import { @@ -24,6 +23,7 @@ import {ForkSeq, SAFE_SLOTS_TO_IMPORT_OPTIMISTICALLY} from "@lodestar/params"; import {IExecutionEngine} from "../../execution/engine/interface.js"; import {BlockError, BlockErrorCode} from "../errors/index.js"; import {IClock} from "../../util/clock.js"; +import {kzgCommitmentToVersionedHash} from "../../util/blobs.js"; import {BlockProcessOpts} from "../options.js"; import {ExecutionPayloadStatus} from "../../execution/engine/interface.js"; import {IEth1ForBlockProduction} from "../../eth1/index.js"; @@ -45,6 +45,7 @@ export type SegmentExecStatus = | { execAborted: null; executionStatuses: MaybeValidExecutionStatus[]; + executionTime: number; mergeBlockFound: bellatrix.BeaconBlock | null; } | {execAborted: ExecAbortType; invalidSegmentLVH?: LVHInvalidResponse; mergeBlockFound: null}; @@ -243,8 +244,9 @@ export async function verifyBlocksExecutionPayload( } } - if (blocks.length === 1 && opts.seenTimestampSec !== undefined) { - const recvToVerifiedExecPayload = Date.now() / 1000 - opts.seenTimestampSec; + const executionTime = Date.now(); + if (blocks.length === 1 && opts.seenTimestampSec !== undefined && executionStatuses[0] === ExecutionStatus.Valid) { + const recvToVerifiedExecPayload = executionTime / 1000 - opts.seenTimestampSec; chain.metrics?.gossipBlock.receivedToExecutionPayloadVerification.observe(recvToVerifiedExecPayload); chain.logger.verbose("Verified execution payload", { slot: blocks[0].message.slot, @@ -255,6 +257,7 @@ export async function verifyBlocksExecutionPayload( return { execAborted: null, executionStatuses, + executionTime, mergeBlockFound, }; } diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksSanityChecks.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksSanityChecks.ts index 9fb7d04f1ed8..e62355a4889d 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlocksSanityChecks.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlocksSanityChecks.ts @@ -1,12 +1,11 @@ -import {computeStartSlotAtEpoch, DataAvailableStatus} from "@lodestar/state-transition"; +import {computeStartSlotAtEpoch} from "@lodestar/state-transition"; import {ChainForkConfig} from "@lodestar/config"; import {IForkChoice, ProtoBlock} from "@lodestar/fork-choice"; -import {Slot, deneb} from "@lodestar/types"; +import {Slot} from "@lodestar/types"; import {toHexString} from "@lodestar/utils"; import {IClock} from "../../util/clock.js"; import {BlockError, BlockErrorCode} from "../errors/index.js"; -import {validateBlobSidecars} from "../validation/blobSidecar.js"; -import {BlockInput, BlockInputType, ImportBlockOpts, BlobSidecarValidation} from "./types.js"; +import {BlockInput, ImportBlockOpts} from "./types.js"; /** * Verifies some early cheap sanity checks on the block before running the full state transition. @@ -26,7 +25,6 @@ export function verifyBlocksSanityChecks( opts: ImportBlockOpts ): { relevantBlocks: BlockInput[]; - dataAvailabilityStatuses: DataAvailableStatus[]; parentSlots: Slot[]; parentBlock: ProtoBlock | null; } { @@ -35,7 +33,6 @@ export function verifyBlocksSanityChecks( } const relevantBlocks: BlockInput[] = []; - const dataAvailabilityStatuses: DataAvailableStatus[] = []; const parentSlots: Slot[] = []; let parentBlock: ProtoBlock | null = null; @@ -64,10 +61,6 @@ export function verifyBlocksSanityChecks( } } - // Validate status of only not yet finalized blocks, we don't need yet to propogate the status - // as it is not used upstream anywhere - const dataAvailabilityStatus = maybeValidateBlobs(chain.config, blockInput, opts); - let parentBlockSlot: Slot; if (relevantBlocks.length > 0) { @@ -105,7 +98,6 @@ export function verifyBlocksSanityChecks( // Block is relevant relevantBlocks.push(blockInput); - dataAvailabilityStatuses.push(dataAvailabilityStatus); parentSlots.push(parentBlockSlot); } @@ -115,35 +107,5 @@ export function verifyBlocksSanityChecks( throw Error(`Internal error, parentBlock should not be null for relevantBlocks=${relevantBlocks.length}`); } - return {relevantBlocks, dataAvailabilityStatuses, parentSlots, parentBlock}; -} - -function maybeValidateBlobs( - config: ChainForkConfig, - blockInput: BlockInput, - opts: ImportBlockOpts -): DataAvailableStatus { - switch (blockInput.type) { - case BlockInputType.postDeneb: { - if (opts.validBlobSidecars === BlobSidecarValidation.Full) { - return DataAvailableStatus.available; - } - - // run full validation - const {block, blobs} = blockInput; - const blockSlot = block.message.slot; - const {blobKzgCommitments} = (block as deneb.SignedBeaconBlock).message.body; - const beaconBlockRoot = config.getForkTypes(blockSlot).BeaconBlock.hashTreeRoot(block.message); - - // if the blob siddecars have been individually verified then we can skip kzg proof check - // but other checks to match blobs with block data still need to be performed - const skipProofsCheck = opts.validBlobSidecars === BlobSidecarValidation.Individual; - validateBlobSidecars(blockSlot, beaconBlockRoot, blobKzgCommitments, blobs, {skipProofsCheck}); - - return DataAvailableStatus.available; - } - - case BlockInputType.preDeneb: - return DataAvailableStatus.preDeneb; - } + return {relevantBlocks, parentSlots, parentBlock}; } diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts index fbbef969b696..14ad46a35c1e 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlocksSignatures.ts @@ -20,7 +20,7 @@ export async function verifyBlocksSignatures( preState0: CachedBeaconStateAllForks, blocks: allForks.SignedBeaconBlock[], opts: ImportBlockOpts -): Promise { +): Promise<{verifySignaturesTime: number}> { const isValidPromises: Promise[] = []; // Verifies signatures after running state transition, so all SyncCommittee signed roots are known at this point. @@ -46,17 +46,20 @@ export async function verifyBlocksSignatures( } } - if (blocks.length === 1 && opts.seenTimestampSec !== undefined) { - const recvToSigVer = Date.now() / 1000 - opts.seenTimestampSec; - metrics?.gossipBlock.receivedToSignaturesVerification.observe(recvToSigVer); - logger.verbose("Verified block signatures", {slot: blocks[0].message.slot, recvToSigVer}); - } - // `rejectFirstInvalidResolveAllValid()` returns on isValid result with its index const res = await rejectFirstInvalidResolveAllValid(isValidPromises); if (!res.allValid) { throw new BlockError(blocks[res.index], {code: BlockErrorCode.INVALID_SIGNATURE, state: preState0}); } + + const verifySignaturesTime = Date.now(); + if (blocks.length === 1 && opts.seenTimestampSec !== undefined) { + const recvToSigVer = verifySignaturesTime / 1000 - opts.seenTimestampSec; + metrics?.gossipBlock.receivedToSignaturesVerification.observe(recvToSigVer); + logger.verbose("Verified block signatures", {slot: blocks[0].message.slot, recvToSigVer}); + } + + return {verifySignaturesTime}; } type AllValidRes = {allValid: true} | {allValid: false; index: number}; diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksStateTransitionOnly.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksStateTransitionOnly.ts index 2afc9543f847..7d15d4e4f6ce 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlocksStateTransitionOnly.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlocksStateTransitionOnly.ts @@ -3,6 +3,7 @@ import { stateTransition, ExecutionPayloadStatus, DataAvailableStatus, + StateHashTreeRootSource, } from "@lodestar/state-transition"; import {ErrorAborted, Logger, sleep} from "@lodestar/utils"; import {Metrics} from "../../metrics/index.js"; @@ -27,7 +28,7 @@ export async function verifyBlocksStateTransitionOnly( metrics: Metrics | null, signal: AbortSignal, opts: BlockProcessOpts & ImportBlockOpts -): Promise<{postStates: CachedBeaconStateAllForks[]; proposerBalanceDeltas: number[]}> { +): Promise<{postStates: CachedBeaconStateAllForks[]; proposerBalanceDeltas: number[]; verifyStateTime: number}> { const postStates: CachedBeaconStateAllForks[] = []; const proposerBalanceDeltas: number[] = []; @@ -57,7 +58,9 @@ export async function verifyBlocksStateTransitionOnly( metrics ); - const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer(); + const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer({ + source: StateHashTreeRootSource.blockTransition, + }); const stateRoot = postState.hashTreeRoot(); hashTreeRootTimer?.(); @@ -90,12 +93,13 @@ export async function verifyBlocksStateTransitionOnly( } } + const verifyStateTime = Date.now(); if (blocks.length === 1 && opts.seenTimestampSec !== undefined) { const slot = blocks[0].block.message.slot; - const recvToTransition = Date.now() / 1000 - opts.seenTimestampSec; + const recvToTransition = verifyStateTime / 1000 - opts.seenTimestampSec; metrics?.gossipBlock.receivedToStateTransition.observe(recvToTransition); - logger.verbose("Transitioned gossip block", {slot, recvToTransition}); + logger.verbose("Verified block state transition", {slot, recvToTransition}); } - return {postStates, proposerBalanceDeltas}; + return {postStates, proposerBalanceDeltas, verifyStateTime}; } diff --git a/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts b/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts index 0603ed7e7f7e..0b94d32b84ec 100644 --- a/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts +++ b/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts @@ -13,7 +13,7 @@ export async function writeBlockInputToDb(this: BeaconChain, blocksInput: BlockI const fnPromises: Promise[] = []; for (const blockInput of blocksInput) { - const {block, blockBytes, type} = blockInput; + const {block, blockBytes} = blockInput; const blockRoot = this.config.getForkTypes(block.message.slot).BeaconBlock.hashTreeRoot(block.message); const blockRootHex = toHex(blockRoot); if (blockBytes) { @@ -29,8 +29,13 @@ export async function writeBlockInputToDb(this: BeaconChain, blocksInput: BlockI root: blockRootHex, }); - if (type === BlockInputType.postDeneb) { - const {blobs: blobSidecars} = blockInput; + if (blockInput.type === BlockInputType.postDeneb || blockInput.type === BlockInputType.blobsPromise) { + const blobSidecars = + blockInput.type == BlockInputType.postDeneb + ? blockInput.blobs + : // At this point of import blobs are available and can be safely awaited + (await blockInput.availabilityPromise).blobs; + // NOTE: Old blobs are pruned on archive fnPromises.push(this.db.blobSidecars.add({blockRoot, slot: block.message.slot, blobSidecars})); this.logger.debug("Persisted blobSidecars to hot DB", { diff --git a/packages/beacon-node/src/chain/bls/index.ts b/packages/beacon-node/src/chain/bls/index.ts index 3ee72ac66cbd..f9898b13776b 100644 --- a/packages/beacon-node/src/chain/bls/index.ts +++ b/packages/beacon-node/src/chain/bls/index.ts @@ -1,4 +1,4 @@ export type {IBlsVerifier} from "./interface.js"; -export type {BlsMultiThreadWorkerPoolModules} from "./multithread/index.js"; +export type {BlsMultiThreadWorkerPoolModules, JobQueueItemType} from "./multithread/index.js"; export {BlsMultiThreadWorkerPool} from "./multithread/index.js"; export {BlsSingleThreadVerifier} from "./singleThread.js"; diff --git a/packages/beacon-node/src/chain/bls/multithread/index.ts b/packages/beacon-node/src/chain/bls/multithread/index.ts index 9b0006566253..235ec1536be7 100644 --- a/packages/beacon-node/src/chain/bls/multithread/index.ts +++ b/packages/beacon-node/src/chain/bls/multithread/index.ts @@ -41,6 +41,8 @@ export type BlsMultiThreadWorkerPoolOptions = { blsVerifyAllMultiThread?: boolean; }; +export type {JobQueueItemType}; + // 1 worker for the main thread const blsPoolSize = Math.max(defaultPoolSize - 1, 1); diff --git a/packages/beacon-node/src/chain/bls/multithread/jobItem.ts b/packages/beacon-node/src/chain/bls/multithread/jobItem.ts index 4ae05cdab913..8b5c63df2eeb 100644 --- a/packages/beacon-node/src/chain/bls/multithread/jobItem.ts +++ b/packages/beacon-node/src/chain/bls/multithread/jobItem.ts @@ -56,7 +56,7 @@ export function jobItemWorkReq(job: JobQueueItem, format: PointFormat, metrics: opts: job.opts, sets: job.sets.map((set) => ({ // this can throw, handled in the consumer code - publicKey: getAggregatedPubkey(set).toBytes(format), + publicKey: getAggregatedPubkey(set, metrics).toBytes(format), signature: set.signature, message: set.signingRoot, })), diff --git a/packages/beacon-node/src/chain/bls/utils.ts b/packages/beacon-node/src/chain/bls/utils.ts index 0b1010de27f6..4a3a027f31ac 100644 --- a/packages/beacon-node/src/chain/bls/utils.ts +++ b/packages/beacon-node/src/chain/bls/utils.ts @@ -1,14 +1,19 @@ import type {PublicKey} from "@chainsafe/bls/types"; import bls from "@chainsafe/bls"; import {ISignatureSet, SignatureSetType} from "@lodestar/state-transition"; +import {Metrics} from "../../metrics/metrics.js"; -export function getAggregatedPubkey(signatureSet: ISignatureSet): PublicKey { +export function getAggregatedPubkey(signatureSet: ISignatureSet, metrics: Metrics | null = null): PublicKey { switch (signatureSet.type) { case SignatureSetType.single: return signatureSet.pubkey; - case SignatureSetType.aggregate: - return bls.PublicKey.aggregate(signatureSet.pubkeys); + case SignatureSetType.aggregate: { + const timer = metrics?.blsThreadPool.pubkeysAggregationMainThreadDuration.startTimer(); + const pubkeys = bls.PublicKey.aggregate(signatureSet.pubkeys); + timer?.(); + return pubkeys; + } default: throw Error("Unknown signature set type"); diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index 3464aad8b673..5e38cf23f5de 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -1,5 +1,5 @@ import path from "node:path"; -import {CompositeTypeAny, fromHexString, toHexString, TreeView, Type} from "@chainsafe/ssz"; +import {CompositeTypeAny, fromHexString, TreeView, Type, toHexString} from "@chainsafe/ssz"; import { BeaconStateAllForks, CachedBeaconStateAllForks, @@ -26,6 +26,8 @@ import { deneb, Wei, bellatrix, + isBlindedBeaconBlock, + Gwei, } from "@lodestar/types"; import {CheckpointWithHex, ExecutionStatus, IForkChoice, ProtoBlock} from "@lodestar/fork-choice"; import {ProcessShutdownCallback} from "@lodestar/validator"; @@ -77,6 +79,7 @@ import {BlockInput} from "./blocks/types.js"; import {SeenAttestationDatas} from "./seenCache/seenAttestationData.js"; import {ShufflingCache} from "./shufflingCache.js"; import {StateContextCache} from "./stateCache/stateContextCache.js"; +import {SeenGossipBlockInput} from "./seenCache/index.js"; import {CheckpointStateCache} from "./stateCache/stateContextCheckpointsCache.js"; /** @@ -85,7 +88,6 @@ import {CheckpointStateCache} from "./stateCache/stateContextCheckpointsCache.js * allow some margin if the node overloads. */ const DEFAULT_MAX_CACHED_PRODUCED_ROOTS = 4; -const DEFAULT_MAX_CACHED_BLOB_SIDECARS = 4; export class BeaconChain implements IBeaconChain { readonly genesisTime: UintNum64; @@ -123,6 +125,7 @@ export class BeaconChain implements IBeaconChain { readonly seenSyncCommitteeMessages = new SeenSyncCommitteeMessages(); readonly seenContributionAndProof: SeenContributionAndProof; readonly seenAttestationDatas: SeenAttestationDatas; + readonly seenGossipBlockInput = new SeenGossipBlockInput(); // Seen cache for liveness checks readonly seenBlockAttesters = new SeenBlockAttesters(); @@ -134,8 +137,7 @@ export class BeaconChain implements IBeaconChain { readonly checkpointBalancesCache: CheckpointBalancesCache; readonly shufflingCache: ShufflingCache; /** Map keyed by executionPayload.blockHash of the block for those blobs */ - readonly producedBlobSidecarsCache = new Map(); - readonly producedBlindedBlobSidecarsCache = new Map(); + readonly producedContentsCache = new Map(); // Cache payload from the local execution so that produceBlindedBlock or produceBlockV3 and // send and get signed/published blinded versions which beacon can assemble into full before @@ -468,20 +470,22 @@ export class BeaconChain implements IBeaconChain { return data && {block: data, executionOptimistic: false}; } - produceBlock(blockAttributes: BlockAttributes): Promise<{block: allForks.BeaconBlock; executionPayloadValue: Wei}> { + produceBlock( + blockAttributes: BlockAttributes + ): Promise<{block: allForks.BeaconBlock; executionPayloadValue: Wei; consensusBlockValue: Gwei}> { return this.produceBlockWrapper(BlockType.Full, blockAttributes); } produceBlindedBlock( blockAttributes: BlockAttributes - ): Promise<{block: allForks.BlindedBeaconBlock; executionPayloadValue: Wei}> { + ): Promise<{block: allForks.BlindedBeaconBlock; executionPayloadValue: Wei; consensusBlockValue: Gwei}> { return this.produceBlockWrapper(BlockType.Blinded, blockAttributes); } async produceBlockWrapper( blockType: T, {randaoReveal, graffiti, slot, feeRecipient}: BlockAttributes - ): Promise<{block: AssembledBlockType; executionPayloadValue: Wei}> { + ): Promise<{block: AssembledBlockType; executionPayloadValue: Wei; consensusBlockValue: Gwei}> { const head = this.forkChoice.getHead(); const state = await this.regen.getBlockSlotState( head.blockRoot, @@ -522,7 +526,9 @@ export class BeaconChain implements IBeaconChain { stateRoot: ZERO_HASH, body, } as AssembledBlockType; - block.stateRoot = computeNewStateRoot(this.metrics, state, block); + + const {newStateRoot, proposerReward} = computeNewStateRoot(this.metrics, state, block); + block.stateRoot = newStateRoot; const blockRoot = blockType === BlockType.Full ? this.config.getForkTypes(slot).BeaconBlock.hashTreeRoot(block) @@ -546,35 +552,12 @@ export class BeaconChain implements IBeaconChain { // publishing the blinded block's full version if (blobs.type === BlobsResultType.produced) { // body is of full type here - const blockHash = blobs.blockHash; - const blobSidecars = blobs.blobSidecars.map((blobSidecar) => ({ - ...blobSidecar, - blockRoot, - slot, - blockParentRoot: parentBlockRoot, - proposerIndex, - })); - - this.producedBlobSidecarsCache.set(blockHash, blobSidecars); - this.metrics?.blockProductionCaches.producedBlobSidecarsCache.set(this.producedBlobSidecarsCache.size); - } else if (blobs.type === BlobsResultType.blinded) { - // body is of blinded type here - const blockHash = blobs.blockHash; - const blindedBlobSidecars = blobs.blobSidecars.map((blindedBlobSidecar) => ({ - ...blindedBlobSidecar, - blockRoot, - slot, - blockParentRoot: parentBlockRoot, - proposerIndex, - })); - - this.producedBlindedBlobSidecarsCache.set(blockHash, blindedBlobSidecars); - this.metrics?.blockProductionCaches.producedBlindedBlobSidecarsCache.set( - this.producedBlindedBlobSidecarsCache.size - ); + const {blockHash, contents} = blobs; + this.producedContentsCache.set(blockHash, contents); + this.metrics?.blockProductionCaches.producedContentsCache.set(this.producedContentsCache.size); } - return {block, executionPayloadValue}; + return {block, executionPayloadValue, consensusBlockValue: proposerReward}; } /** @@ -587,14 +570,14 @@ export class BeaconChain implements IBeaconChain { * kzg_aggregated_proof=compute_proof_from_blobs(blobs), * ) */ - getBlobSidecars(beaconBlock: deneb.BeaconBlock): deneb.BlobSidecars { + getContents(beaconBlock: deneb.BeaconBlock): deneb.Contents { const blockHash = toHex(beaconBlock.body.executionPayload.blockHash); - const blobSidecars = this.producedBlobSidecarsCache.get(blockHash); - if (!blobSidecars) { - throw Error(`No blobSidecars for executionPayload.blockHash ${blockHash}`); + const contents = this.producedContentsCache.get(blockHash); + if (!contents) { + throw Error(`No contents for executionPayload.blockHash ${blockHash}`); } - return blobSidecars; + return contents; } async processBlock(block: BlockInput, opts?: ImportBlockOpts): Promise { @@ -645,21 +628,32 @@ export class BeaconChain implements IBeaconChain { return this.reprocessController.waitForBlockOfAttestation(slot, root); } + persistBlock(data: allForks.BeaconBlock | allForks.BlindedBeaconBlock, suffix?: string): void { + const slot = data.slot; + if (isBlindedBeaconBlock(data)) { + const sszType = this.config.getBlindedForkTypes(slot).BeaconBlock; + void this.persistSszObject("BlindedBeaconBlock", sszType.serialize(data), sszType.hashTreeRoot(data), suffix); + } else { + const sszType = this.config.getForkTypes(slot).BeaconBlock; + void this.persistSszObject("BeaconBlock", sszType.serialize(data), sszType.hashTreeRoot(data), suffix); + } + } + persistInvalidSszValue(type: Type, sszObject: T, suffix?: string): void { if (this.opts.persistInvalidSszObjects) { - void this.persistInvalidSszObject(type.typeName, type.serialize(sszObject), type.hashTreeRoot(sszObject), suffix); + void this.persistSszObject(type.typeName, type.serialize(sszObject), type.hashTreeRoot(sszObject), suffix); } } persistInvalidSszBytes(typeName: string, sszBytes: Uint8Array, suffix?: string): void { if (this.opts.persistInvalidSszObjects) { - void this.persistInvalidSszObject(typeName, sszBytes, sszBytes, suffix); + void this.persistSszObject(typeName, sszBytes, sszBytes, suffix); } } persistInvalidSszView(view: TreeView, suffix?: string): void { if (this.opts.persistInvalidSszObjects) { - void this.persistInvalidSszObject(view.type.typeName, view.serialize(), view.hashTreeRoot(), suffix); + void this.persistSszObject(view.type.typeName, view.serialize(), view.hashTreeRoot(), suffix); } } @@ -795,16 +789,12 @@ export class BeaconChain implements IBeaconChain { return {state: blockState, stateId: "block_state_any_epoch", shouldWarn: true}; } - private async persistInvalidSszObject( + private async persistSszObject( typeName: string, bytes: Uint8Array, root: Uint8Array, suffix?: string ): Promise { - if (!this.opts.persistInvalidSszObjects) { - return; - } - const now = new Date(); // yyyy-MM-dd const dateStr = now.toISOString().split("T")[0]; @@ -869,19 +859,8 @@ export class BeaconChain implements IBeaconChain { this.metrics?.blockProductionCaches.producedBlindedBlockRoot.set(this.producedBlindedBlockRoot.size); if (this.config.getForkSeq(slot) >= ForkSeq.deneb) { - pruneSetToMax( - this.producedBlobSidecarsCache, - this.opts.maxCachedBlobSidecars ?? DEFAULT_MAX_CACHED_BLOB_SIDECARS - ); - this.metrics?.blockProductionCaches.producedBlobSidecarsCache.set(this.producedBlobSidecarsCache.size); - - pruneSetToMax( - this.producedBlindedBlobSidecarsCache, - this.opts.maxCachedBlobSidecars ?? DEFAULT_MAX_CACHED_BLOB_SIDECARS - ); - this.metrics?.blockProductionCaches.producedBlindedBlobSidecarsCache.set( - this.producedBlindedBlobSidecarsCache.size - ); + pruneSetToMax(this.producedContentsCache, this.opts.maxCachedProducedRoots ?? DEFAULT_MAX_CACHED_PRODUCED_ROOTS); + this.metrics?.blockProductionCaches.producedContentsCache.set(this.producedContentsCache.size); } const metrics = this.metrics; diff --git a/packages/beacon-node/src/chain/errors/blobSidecarError.ts b/packages/beacon-node/src/chain/errors/blobSidecarError.ts index e242cbcb11ba..f38aa883002c 100644 --- a/packages/beacon-node/src/chain/errors/blobSidecarError.ts +++ b/packages/beacon-node/src/chain/errors/blobSidecarError.ts @@ -21,6 +21,7 @@ export enum BlobSidecarErrorCode { PARENT_UNKNOWN = "BLOB_SIDECAR_ERROR_PARENT_UNKNOWN", NOT_LATER_THAN_PARENT = "BLOB_SIDECAR_ERROR_NOT_LATER_THAN_PARENT", PROPOSAL_SIGNATURE_INVALID = "BLOB_SIDECAR_ERROR_PROPOSAL_SIGNATURE_INVALID", + INCLUSION_PROOF_INVALID = "BLOB_SIDECAR_ERROR_INCLUSION_PROOF_INVALID", INCORRECT_PROPOSER = "BLOB_SIDECAR_ERROR_INCORRECT_PROPOSER", } @@ -37,6 +38,7 @@ export type BlobSidecarErrorType = | {code: BlobSidecarErrorCode.PARENT_UNKNOWN; parentRoot: RootHex} | {code: BlobSidecarErrorCode.NOT_LATER_THAN_PARENT; parentSlot: Slot; slot: Slot} | {code: BlobSidecarErrorCode.PROPOSAL_SIGNATURE_INVALID} + | {code: BlobSidecarErrorCode.INCLUSION_PROOF_INVALID; slot: Slot; blobIdx: number} | {code: BlobSidecarErrorCode.INCORRECT_PROPOSER; proposerIndex: ValidatorIndex}; export class BlobSidecarGossipError extends GossipActionError {} diff --git a/packages/beacon-node/src/chain/errors/blockError.ts b/packages/beacon-node/src/chain/errors/blockError.ts index ee06927a4fc1..6ab15275934e 100644 --- a/packages/beacon-node/src/chain/errors/blockError.ts +++ b/packages/beacon-node/src/chain/errors/blockError.ts @@ -63,6 +63,8 @@ export enum BlockErrorCode { /** The attestation head block is too far behind the attestation slot, causing many skip slots. This is deemed a DoS risk */ TOO_MANY_SKIPPED_SLOTS = "TOO_MANY_SKIPPED_SLOTS", + /** The blobs are unavailable */ + DATA_UNAVAILABLE = "BLOCK_ERROR_DATA_UNAVAILABLE", } type ExecutionErrorStatus = Exclude< @@ -103,7 +105,8 @@ export type BlockErrorType = | {code: BlockErrorCode.TOO_MUCH_GAS_USED; gasUsed: number; gasLimit: number} | {code: BlockErrorCode.SAME_PARENT_HASH; blockHash: RootHex} | {code: BlockErrorCode.TRANSACTIONS_TOO_BIG; size: number; max: number} - | {code: BlockErrorCode.EXECUTION_ENGINE_ERROR; execStatus: ExecutionErrorStatus; errorMessage: string}; + | {code: BlockErrorCode.EXECUTION_ENGINE_ERROR; execStatus: ExecutionErrorStatus; errorMessage: string} + | {code: BlockErrorCode.DATA_UNAVAILABLE}; export class BlockGossipError extends GossipActionError {} diff --git a/packages/beacon-node/src/chain/interface.ts b/packages/beacon-node/src/chain/interface.ts index 7fa60fd76ace..880a5e86071a 100644 --- a/packages/beacon-node/src/chain/interface.ts +++ b/packages/beacon-node/src/chain/interface.ts @@ -1,5 +1,17 @@ import {CompositeTypeAny, TreeView, Type} from "@chainsafe/ssz"; -import {allForks, UintNum64, Root, phase0, Slot, RootHex, Epoch, ValidatorIndex, deneb, Wei} from "@lodestar/types"; +import { + allForks, + UintNum64, + Root, + phase0, + Slot, + RootHex, + Epoch, + ValidatorIndex, + deneb, + Wei, + Gwei, +} from "@lodestar/types"; import { BeaconStateAllForks, CachedBeaconStateAllForks, @@ -37,6 +49,7 @@ import {CheckpointBalancesCache} from "./balancesCache.js"; import {IChainOptions} from "./options.js"; import {AssembledBlockType, BlockAttributes, BlockType} from "./produceBlock/produceBlockBody.js"; import {SeenAttestationDatas} from "./seenCache/seenAttestationData.js"; +import {SeenGossipBlockInput} from "./seenCache/index.js"; import {ShufflingCache} from "./shufflingCache.js"; export {BlockType, type AssembledBlockType}; @@ -90,14 +103,14 @@ export interface IBeaconChain { readonly seenSyncCommitteeMessages: SeenSyncCommitteeMessages; readonly seenContributionAndProof: SeenContributionAndProof; readonly seenAttestationDatas: SeenAttestationDatas; + readonly seenGossipBlockInput: SeenGossipBlockInput; // Seen cache for liveness checks readonly seenBlockAttesters: SeenBlockAttesters; readonly beaconProposerCache: BeaconProposerCache; readonly checkpointBalancesCache: CheckpointBalancesCache; - readonly producedBlobSidecarsCache: Map; + readonly producedContentsCache: Map; readonly producedBlockRoot: Map; - readonly producedBlindedBlobSidecarsCache: Map; readonly shufflingCache: ShufflingCache; readonly producedBlindedBlockRoot: Set; readonly opts: IChainOptions; @@ -139,12 +152,14 @@ export interface IBeaconChain { */ getBlockByRoot(root: RootHex): Promise<{block: allForks.SignedBeaconBlock; executionOptimistic: boolean} | null>; - getBlobSidecars(beaconBlock: deneb.BeaconBlock): deneb.BlobSidecars; + getContents(beaconBlock: deneb.BeaconBlock): deneb.Contents; - produceBlock(blockAttributes: BlockAttributes): Promise<{block: allForks.BeaconBlock; executionPayloadValue: Wei}>; + produceBlock( + blockAttributes: BlockAttributes + ): Promise<{block: allForks.BeaconBlock; executionPayloadValue: Wei; consensusBlockValue: Gwei}>; produceBlindedBlock( blockAttributes: BlockAttributes - ): Promise<{block: allForks.BlindedBeaconBlock; executionPayloadValue: Wei}>; + ): Promise<{block: allForks.BlindedBeaconBlock; executionPayloadValue: Wei; consensusBlockValue: Gwei}>; /** Process a block until complete */ processBlock(block: BlockInput, opts?: ImportBlockOpts): Promise; @@ -159,6 +174,7 @@ export interface IBeaconChain { updateBeaconProposerData(epoch: Epoch, proposers: ProposerPreparationData[]): Promise; + persistBlock(data: allForks.BeaconBlock | allForks.BlindedBeaconBlock, suffix?: string): void; persistInvalidSszValue(type: Type, sszObject: T | Uint8Array, suffix?: string): void; persistInvalidSszBytes(type: string, sszBytes: Uint8Array, suffix?: string): void; /** Persist bad items to persistInvalidSszObjectsDir dir, for example invalid state, attestations etc. */ diff --git a/packages/beacon-node/src/chain/opPools/aggregatedAttestationPool.ts b/packages/beacon-node/src/chain/opPools/aggregatedAttestationPool.ts index f9911275b6ee..00309d322a11 100644 --- a/packages/beacon-node/src/chain/opPools/aggregatedAttestationPool.ts +++ b/packages/beacon-node/src/chain/opPools/aggregatedAttestationPool.ts @@ -166,10 +166,16 @@ export class AggregatedAttestationPool { } } - return attestationsByScore - .sort((a, b) => b.score - a.score) - .slice(0, MAX_ATTESTATIONS) - .map((attestation) => attestation.attestation); + const sortedAttestationsByScore = attestationsByScore.sort((a, b) => b.score - a.score); + const attestationsForBlock: phase0.Attestation[] = []; + for (const [i, attestationWithScore] of sortedAttestationsByScore.entries()) { + if (i >= MAX_ATTESTATIONS) { + break; + } + // attestations could be modified in this op pool, so we need to clone for block + attestationsForBlock.push(ssz.phase0.Attestation.clone(attestationWithScore.attestation)); + } + return attestationsForBlock; } /** diff --git a/packages/beacon-node/src/chain/opPools/opPool.ts b/packages/beacon-node/src/chain/opPools/opPool.ts index cee8d0614c30..bb436319cd53 100644 --- a/packages/beacon-node/src/chain/opPools/opPool.ts +++ b/packages/beacon-node/src/chain/opPools/opPool.ts @@ -19,6 +19,7 @@ import {IBeaconDb} from "../../db/index.js"; import {SignedBLSToExecutionChangeVersioned} from "../../util/types.js"; import {BlockType} from "../interface.js"; import {Metrics} from "../../metrics/metrics.js"; +import {BlockProductionStep} from "../produceBlock/produceBlockBody.js"; import {isValidBlsToExecutionChangeForBlockInclusion} from "./utils.js"; type HexRoot = string; @@ -201,7 +202,7 @@ export class OpPool { } } endProposerSlashing?.({ - step: "proposerSlashing", + step: BlockProductionStep.proposerSlashing, }); const endAttesterSlashings = stepsMetrics?.startTimer(); @@ -235,7 +236,7 @@ export class OpPool { } } endAttesterSlashings?.({ - step: "attesterSlashings", + step: BlockProductionStep.attesterSlashings, }); const endVoluntaryExits = stepsMetrics?.startTimer(); @@ -256,7 +257,7 @@ export class OpPool { } } endVoluntaryExits?.({ - step: "voluntaryExits", + step: BlockProductionStep.voluntaryExits, }); const endBlsToExecutionChanges = stepsMetrics?.startTimer(); @@ -270,7 +271,7 @@ export class OpPool { } } endBlsToExecutionChanges?.({ - step: "blsToExecutionChanges", + step: BlockProductionStep.blsToExecutionChanges, }); return [attesterSlashings, proposerSlashings, voluntaryExits, blsToExecutionChanges]; diff --git a/packages/beacon-node/src/chain/options.ts b/packages/beacon-node/src/chain/options.ts index 518c73e072d7..cc7795ade0a1 100644 --- a/packages/beacon-node/src/chain/options.ts +++ b/packages/beacon-node/src/chain/options.ts @@ -14,6 +14,7 @@ export type IChainOptions = BlockProcessOpts & LightClientServerOpts & { blsVerifyAllMainThread?: boolean; blsVerifyAllMultiThread?: boolean; + persistProducedBlocks?: boolean; persistInvalidSszObjects?: boolean; persistInvalidSszObjectsDir?: string; skipCreateStateCacheIfAvailable?: boolean; diff --git a/packages/beacon-node/src/chain/prepareNextSlot.ts b/packages/beacon-node/src/chain/prepareNextSlot.ts index 43fac1d1b120..e2bffd5bc8c6 100644 --- a/packages/beacon-node/src/chain/prepareNextSlot.ts +++ b/packages/beacon-node/src/chain/prepareNextSlot.ts @@ -1,4 +1,9 @@ -import {computeEpochAtSlot, isExecutionStateType, computeTimeAtSlot} from "@lodestar/state-transition"; +import { + computeEpochAtSlot, + isExecutionStateType, + computeTimeAtSlot, + StateHashTreeRootSource, +} from "@lodestar/state-transition"; import {ChainForkConfig} from "@lodestar/config"; import {ForkSeq, SLOTS_PER_EPOCH, ForkExecution} from "@lodestar/params"; import {Slot} from "@lodestar/types"; @@ -104,6 +109,14 @@ export class PrepareNextSlotScheduler { RegenCaller.precomputeEpoch ); + // cache HashObjects for faster hashTreeRoot() later, especially for computeNewStateRoot() if we need to produce a block at slot 0 of epoch + // see https://github.com/ChainSafe/lodestar/issues/6194 + const hashTreeRootTimer = this.metrics?.stateHashTreeRootTime.startTimer({ + source: StateHashTreeRootSource.prepareNextSlot, + }); + prepareState.hashTreeRoot(); + hashTreeRootTimer?.(); + // assuming there is no reorg, it caches the checkpoint state & helps avoid doing a full state transition in the next slot // + when gossip block comes, we need to validate and run state transition // + if next slot is a skipped slot, it'd help getting target checkpoint state faster to validate attestations diff --git a/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts b/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts index bac501ed725c..ccc0595d0db6 100644 --- a/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts +++ b/packages/beacon-node/src/chain/produceBlock/computeNewStateRoot.ts @@ -2,9 +2,10 @@ import { CachedBeaconStateAllForks, DataAvailableStatus, ExecutionPayloadStatus, + StateHashTreeRootSource, stateTransition, } from "@lodestar/state-transition"; -import {allForks, Root} from "@lodestar/types"; +import {allForks, Gwei, Root} from "@lodestar/types"; import {ZERO_HASH} from "../../constants/index.js"; import {Metrics} from "../../metrics/index.js"; @@ -17,7 +18,7 @@ export function computeNewStateRoot( metrics: Metrics | null, state: CachedBeaconStateAllForks, block: allForks.FullOrBlindedBeaconBlock -): Root { +): {newStateRoot: Root; proposerReward: Gwei} { // Set signature to zero to re-use stateTransition() function which requires the SignedBeaconBlock type const blockEmptySig = {message: block, signature: ZERO_HASH} as allForks.FullOrBlindedSignedBeaconBlock; @@ -41,5 +42,14 @@ export function computeNewStateRoot( metrics ); - return postState.hashTreeRoot(); + const {attestations, syncAggregate, slashing} = postState.proposerRewards; + const proposerReward = BigInt(attestations + syncAggregate + slashing); + + const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer({ + source: StateHashTreeRootSource.computeNewStateRoot, + }); + const newStateRoot = postState.hashTreeRoot(); + hashTreeRootTimer?.(); + + return {newStateRoot, proposerReward}; } diff --git a/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts b/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts index 1c522c54a93d..3c2bec223eca 100644 --- a/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts +++ b/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts @@ -35,17 +35,30 @@ import {PayloadId, IExecutionEngine, IExecutionBuilder, PayloadAttributes} from import {ZERO_HASH, ZERO_HASH_HEX} from "../../constants/index.js"; import {IEth1ForBlockProduction} from "../../eth1/index.js"; import {numToQuantity} from "../../eth1/provider/utils.js"; -import { - validateBlobsAndKzgCommitments, - validateBlindedBlobsAndKzgCommitments, -} from "./validateBlobsAndKzgCommitments.js"; +import {validateBlobsAndKzgCommitments} from "./validateBlobsAndKzgCommitments.js"; // Time to provide the EL to generate a payload from new payload id const PAYLOAD_GENERATION_TIME_MS = 500; -enum PayloadPreparationType { + +export enum PayloadPreparationType { Fresh = "Fresh", Cached = "Cached", Reorged = "Reorged", + Blinded = "Blinded", +} + +/** + * Block production steps tracked in metrics + */ +export enum BlockProductionStep { + proposerSlashing = "proposerSlashing", + attesterSlashings = "attesterSlashings", + voluntaryExits = "voluntaryExits", + blsToExecutionChanges = "blsToExecutionChanges", + attestations = "attestations", + eth1DataAndDeposits = "eth1DataAndDeposits", + syncAggregate = "syncAggregate", + executionPayload = "executionPayload", } export type BlockAttributes = { @@ -74,8 +87,8 @@ export enum BlobsResultType { export type BlobsResult = | {type: BlobsResultType.preDeneb} - | {type: BlobsResultType.produced; blobSidecars: deneb.BlobSidecars; blockHash: RootHex} - | {type: BlobsResultType.blinded; blobSidecars: deneb.BlindedBlobSidecars; blockHash: RootHex}; + | {type: BlobsResultType.produced; contents: deneb.Contents; blockHash: RootHex} + | {type: BlobsResultType.blinded}; export async function produceBlockBody( this: BeaconChain, @@ -134,13 +147,13 @@ export async function produceBlockBody( const endAttestations = stepsMetrics?.startTimer(); const attestations = this.aggregatedAttestationPool.getAttestationsForBlock(this.forkChoice, currentState); endAttestations?.({ - step: "attestations", + step: BlockProductionStep.attestations, }); const endEth1DataAndDeposits = stepsMetrics?.startTimer(); const {eth1Data, deposits} = await this.eth1.getEth1DataAndDeposits(currentState); endEth1DataAndDeposits?.({ - step: "eth1DataAndDeposits", + step: BlockProductionStep.eth1DataAndDeposits, }); const blockBody: phase0.BeaconBlockBody = { @@ -165,7 +178,7 @@ export async function produceBlockBody( (blockBody as altair.BeaconBlockBody).syncAggregate = syncAggregate; } endSyncAggregate?.({ - step: "syncAggregate", + step: BlockProductionStep.syncAggregate, }); Object.assign(logMeta, { @@ -221,7 +234,7 @@ export async function produceBlockBody( executionPayloadValue = builderRes.executionPayloadValue; const fetchedTime = Date.now() / 1000 - computeTimeAtSlot(this.config, blockSlot, this.genesisTime); - const prepType = "blinded"; + const prepType = PayloadPreparationType.Blinded; this.metrics?.blockPayload.payloadFetchedTime.observe({prepType}, fetchedTime); this.logger.verbose("Fetched execution payload header from builder", { slot: blockSlot, @@ -231,35 +244,14 @@ export async function produceBlockBody( }); if (ForkSeq[fork] >= ForkSeq.deneb) { - const {blindedBlobsBundle} = builderRes; - if (blindedBlobsBundle === undefined) { - throw Error(`Invalid builder getHeader response for fork=${fork}, missing blindedBlobsBundle`); - } - - // validate blindedBlobsBundle - if (this.opts.sanityCheckExecutionEngineBlobs) { - validateBlindedBlobsAndKzgCommitments(builderRes.header, blindedBlobsBundle); + const {blobKzgCommitments} = builderRes; + if (blobKzgCommitments === undefined) { + throw Error(`Invalid builder getHeader response for fork=${fork}, missing blobKzgCommitments`); } - (blockBody as deneb.BlindedBeaconBlockBody).blobKzgCommitments = blindedBlobsBundle.commitments; - const blockHash = toHex(builderRes.header.blockHash); - - const blobSidecars = Array.from({length: blindedBlobsBundle.blobRoots.length}, (_v, index) => { - const blobRoot = blindedBlobsBundle.blobRoots[index]; - const commitment = blindedBlobsBundle.commitments[index]; - const proof = blindedBlobsBundle.proofs[index]; - const blindedBlobSidecar = { - index, - blobRoot, - kzgProof: proof, - kzgCommitment: commitment, - }; - // Other fields will be injected after postState is calculated - return blindedBlobSidecar; - }) as deneb.BlindedBlobSidecars; - blobsResult = {type: BlobsResultType.blinded, blobSidecars, blockHash}; - - Object.assign(logMeta, {blobs: blindedBlobsBundle.commitments.length}); + (blockBody as deneb.BlindedBeaconBlockBody).blobKzgCommitments = blobKzgCommitments; + blobsResult = {type: BlobsResultType.blinded}; + Object.assign(logMeta, {blobs: blobKzgCommitments.length}); } else { blobsResult = {type: BlobsResultType.preDeneb}; } @@ -332,23 +324,10 @@ export async function produceBlockBody( (blockBody as deneb.BeaconBlockBody).blobKzgCommitments = blobsBundle.commitments; const blockHash = toHex(executionPayload.blockHash); + const contents = {kzgProofs: blobsBundle.proofs, blobs: blobsBundle.blobs}; + blobsResult = {type: BlobsResultType.produced, contents, blockHash}; - const blobSidecars = Array.from({length: blobsBundle.blobs.length}, (_v, index) => { - const blob = blobsBundle.blobs[index]; - const commitment = blobsBundle.commitments[index]; - const proof = blobsBundle.proofs[index]; - const blobSidecar = { - index, - blob, - kzgProof: proof, - kzgCommitment: commitment, - }; - // Other fields will be injected after postState is calculated - return blobSidecar; - }) as deneb.BlobSidecars; - blobsResult = {type: BlobsResultType.produced, blobSidecars, blockHash}; - - Object.assign(logMeta, {blobs: blobSidecars.length}); + Object.assign(logMeta, {blobs: blobsBundle.commitments.length}); } else { blobsResult = {type: BlobsResultType.preDeneb}; } @@ -380,7 +359,7 @@ export async function produceBlockBody( executionPayloadValue = BigInt(0); } endExecutionPayload?.({ - step: "executionPayload", + step: BlockProductionStep.executionPayload, }); if (ForkSeq[fork] >= ForkSeq.capella) { @@ -502,7 +481,7 @@ async function prepareExecutionPayloadHeader( ): Promise<{ header: allForks.ExecutionPayloadHeader; executionPayloadValue: Wei; - blindedBlobsBundle?: deneb.BlindedBlobsBundle; + blobKzgCommitments?: deneb.BlobKzgCommitments; }> { if (!chain.executionBuilder) { throw Error("executionBuilder required"); diff --git a/packages/beacon-node/src/chain/produceBlock/validateBlobsAndKzgCommitments.ts b/packages/beacon-node/src/chain/produceBlock/validateBlobsAndKzgCommitments.ts index 0d00d0c8bd72..54e90672d189 100644 --- a/packages/beacon-node/src/chain/produceBlock/validateBlobsAndKzgCommitments.ts +++ b/packages/beacon-node/src/chain/produceBlock/validateBlobsAndKzgCommitments.ts @@ -1,4 +1,4 @@ -import {allForks, deneb} from "@lodestar/types"; +import {allForks} from "@lodestar/types"; import {BlobsBundle} from "../../execution/index.js"; /** @@ -13,15 +13,3 @@ export function validateBlobsAndKzgCommitments(payload: allForks.ExecutionPayloa ); } } - -export function validateBlindedBlobsAndKzgCommitments( - payload: allForks.ExecutionPayloadHeader, - blindedBlobsBundle: deneb.BlindedBlobsBundle -): void { - // sanity-check that the KZG commitments match the blobs (as produced by the execution engine) - if (blindedBlobsBundle.blobRoots.length !== blindedBlobsBundle.commitments.length) { - throw Error( - `BlindedBlobs bundle blobs len ${blindedBlobsBundle.blobRoots.length} != commitments len ${blindedBlobsBundle.commitments.length}` - ); - } -} diff --git a/packages/beacon-node/src/chain/regen/queued.ts b/packages/beacon-node/src/chain/regen/queued.ts index 5305502c8c05..dfda56cc1eea 100644 --- a/packages/beacon-node/src/chain/regen/queued.ts +++ b/packages/beacon-node/src/chain/regen/queued.ts @@ -221,7 +221,7 @@ export class QueuedStateRegenerator implements IStateRegenerator { private jobQueueProcessor = async (regenRequest: RegenRequest): Promise => { const metricsLabels = { caller: regenRequest.args[regenRequest.args.length - 1] as RegenCaller, - entrypoint: regenRequest.key, + entrypoint: regenRequest.key as RegenFnName, }; let timer; try { diff --git a/packages/beacon-node/src/chain/reprocess.ts b/packages/beacon-node/src/chain/reprocess.ts index 3ab6056fb3af..4c91ef07ff69 100644 --- a/packages/beacon-node/src/chain/reprocess.ts +++ b/packages/beacon-node/src/chain/reprocess.ts @@ -11,7 +11,7 @@ export const REPROCESS_MIN_TIME_TO_NEXT_SLOT_SEC = 2; /** * Reprocess status for metrics */ -enum ReprocessStatus { +export enum ReprocessStatus { /** * There are too many attestations that have unknown block root. */ @@ -140,7 +140,10 @@ export class ReprocessController { for (const awaitingPromise of awaitingPromisesByRoot.values()) { const {resolve, addedTimeMs} = awaitingPromise; resolve(false); - this.metrics?.reprocessApiAttestations.waitSecBeforeReject.set((now - addedTimeMs) / 1000); + this.metrics?.reprocessApiAttestations.waitSecBeforeReject.set( + {reason: ReprocessStatus.expired}, + (now - addedTimeMs) / 1000 + ); this.metrics?.reprocessApiAttestations.reject.inc({reason: ReprocessStatus.expired}); } diff --git a/packages/beacon-node/src/chain/seenCache/index.ts b/packages/beacon-node/src/chain/seenCache/index.ts index f354a37f93ee..250e6581c312 100644 --- a/packages/beacon-node/src/chain/seenCache/index.ts +++ b/packages/beacon-node/src/chain/seenCache/index.ts @@ -2,3 +2,4 @@ export {SeenAggregators, SeenAttesters} from "./seenAttesters.js"; export {SeenBlockProposers} from "./seenBlockProposers.js"; export {SeenSyncCommitteeMessages} from "./seenCommittee.js"; export {SeenContributionAndProof} from "./seenCommitteeContribution.js"; +export {SeenGossipBlockInput} from "./seenGossipBlockInput.js"; diff --git a/packages/beacon-node/src/chain/seenCache/seenAttestationData.ts b/packages/beacon-node/src/chain/seenCache/seenAttestationData.ts index ded54a5b4a54..a19476497e9f 100644 --- a/packages/beacon-node/src/chain/seenCache/seenAttestationData.ts +++ b/packages/beacon-node/src/chain/seenCache/seenAttestationData.ts @@ -17,7 +17,7 @@ export type AttestationDataCacheEntry = { subnet: number; }; -enum RejectReason { +export enum RejectReason { // attestation data reaches MAX_CACHE_SIZE_PER_SLOT reached_limit = "reached_limit", // attestation data is too old diff --git a/packages/beacon-node/src/chain/seenCache/seenGossipBlockInput.ts b/packages/beacon-node/src/chain/seenCache/seenGossipBlockInput.ts new file mode 100644 index 000000000000..8b767975c112 --- /dev/null +++ b/packages/beacon-node/src/chain/seenCache/seenGossipBlockInput.ts @@ -0,0 +1,170 @@ +import {toHexString} from "@chainsafe/ssz"; +import {deneb, RootHex, ssz, allForks} from "@lodestar/types"; +import {ChainForkConfig} from "@lodestar/config"; +import {pruneSetToMax} from "@lodestar/utils"; +import {BLOBSIDECAR_FIXED_SIZE} from "@lodestar/params"; + +import { + BlockInput, + getBlockInput, + BlockSource, + BlockInputBlobs, + BlobsCache, + GossipedInputType, +} from "../blocks/types.js"; + +type GossipedBlockInput = + | {type: GossipedInputType.block; signedBlock: allForks.SignedBeaconBlock; blockBytes: Uint8Array | null} + | {type: GossipedInputType.blob; blobSidecar: deneb.BlobSidecar; blobBytes: Uint8Array | null}; + +type BlockInputCacheType = { + block?: allForks.SignedBeaconBlock; + blockBytes?: Uint8Array | null; + blobsCache: BlobsCache; + // promise and its callback cached for delayed resolution + availabilityPromise: Promise; + resolveAvailability: (blobs: BlockInputBlobs) => void; +}; + +const MAX_GOSSIPINPUT_CACHE = 5; + +/** + * SeenGossipBlockInput tracks and caches the live blobs and blocks on the network to solve data availability + * for the blockInput. If no block has been seen yet for some already seen blobs, it responds will null, but + * on the first block or the consequent blobs it responds with blobs promise till all blobs become available. + * + * One can start processing block on blobs promise blockInput response and can await on the promise before + * fully importing the block. The blobs promise is gets resolved as soon as all blobs corresponding to that + * block are seen by SeenGossipBlockInput + */ +export class SeenGossipBlockInput { + private blockInputCache = new Map(); + + prune(): void { + pruneSetToMax(this.blockInputCache, MAX_GOSSIPINPUT_CACHE); + } + + getGossipBlockInput( + config: ChainForkConfig, + gossipedInput: GossipedBlockInput + ): + | { + blockInput: BlockInput; + blockInputMeta: {pending: GossipedInputType.blob | null; haveBlobs: number; expectedBlobs: number}; + } + | {blockInput: null; blockInputMeta: {pending: GossipedInputType.block; haveBlobs: number; expectedBlobs: null}} { + let blockHex; + let blockCache; + + if (gossipedInput.type === GossipedInputType.block) { + const {signedBlock, blockBytes} = gossipedInput; + + blockHex = toHexString( + config.getForkTypes(signedBlock.message.slot).BeaconBlock.hashTreeRoot(signedBlock.message) + ); + blockCache = this.blockInputCache.get(blockHex) ?? getEmptyBlockInputCacheEntry(); + + blockCache.block = signedBlock; + blockCache.blockBytes = blockBytes; + } else { + const {blobSidecar, blobBytes} = gossipedInput; + const blockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(blobSidecar.signedBlockHeader.message); + blockHex = toHexString(blockRoot); + blockCache = this.blockInputCache.get(blockHex) ?? getEmptyBlockInputCacheEntry(); + + // TODO: freetheblobs check if its the same blob or a duplicate and throw/take actions + blockCache.blobsCache.set(blobSidecar.index, { + blobSidecar, + // easily splice out the unsigned message as blob is a fixed length type + blobBytes: blobBytes?.slice(0, BLOBSIDECAR_FIXED_SIZE) ?? null, + }); + } + + if (!this.blockInputCache.has(blockHex)) { + this.blockInputCache.set(blockHex, blockCache); + } + const {block: signedBlock, blockBytes, blobsCache, availabilityPromise, resolveAvailability} = blockCache; + + if (signedBlock !== undefined) { + // block is available, check if all blobs have shown up + const {slot, body} = signedBlock.message; + const {blobKzgCommitments} = body as deneb.BeaconBlockBody; + const blockInfo = `blockHex=${blockHex}, slot=${slot}`; + + if (blobKzgCommitments.length < blobsCache.size) { + throw Error( + `Received more blobs=${blobsCache.size} than commitments=${blobKzgCommitments.length} for ${blockInfo}` + ); + } + + if (blobKzgCommitments.length === blobsCache.size) { + const allBlobs = getBlockInputBlobs(blobsCache); + resolveAvailability(allBlobs); + const {blobs, blobsBytes} = allBlobs; + return { + blockInput: getBlockInput.postDeneb( + config, + signedBlock, + BlockSource.gossip, + blobs, + blockBytes ?? null, + blobsBytes + ), + blockInputMeta: {pending: null, haveBlobs: blobs.length, expectedBlobs: blobKzgCommitments.length}, + }; + } else { + return { + blockInput: getBlockInput.blobsPromise( + config, + signedBlock, + BlockSource.gossip, + blobsCache, + blockBytes ?? null, + availabilityPromise + ), + blockInputMeta: { + pending: GossipedInputType.blob, + haveBlobs: blobsCache.size, + expectedBlobs: blobKzgCommitments.length, + }, + }; + } + } else { + // will need to wait for the block to showup + return { + blockInput: null, + blockInputMeta: {pending: GossipedInputType.block, haveBlobs: blobsCache.size, expectedBlobs: null}, + }; + } + } +} + +function getEmptyBlockInputCacheEntry(): BlockInputCacheType { + // Capture both the promise and its callbacks. + // It is not spec'ed but in tests in Firefox and NodeJS the promise constructor is run immediately + let resolveAvailability: ((blobs: BlockInputBlobs) => void) | null = null; + const availabilityPromise = new Promise((resolveCB) => { + resolveAvailability = resolveCB; + }); + if (resolveAvailability === null) { + throw Error("Promise Constructor was not executed immediately"); + } + const blobsCache = new Map(); + return {availabilityPromise, resolveAvailability, blobsCache}; +} + +function getBlockInputBlobs(blobsCache: BlobsCache): BlockInputBlobs { + const blobs = []; + const blobsBytes = []; + + for (let index = 0; index < blobsCache.size; index++) { + const blobCache = blobsCache.get(index); + if (blobCache === undefined) { + throw Error(`Missing blobSidecar at index=${index}`); + } + const {blobSidecar, blobBytes} = blobCache; + blobs.push(blobSidecar); + blobsBytes.push(blobBytes); + } + return {blobs, blobsBytes}; +} diff --git a/packages/beacon-node/src/chain/shufflingCache.ts b/packages/beacon-node/src/chain/shufflingCache.ts index c8468f3b6db5..23177142d846 100644 --- a/packages/beacon-node/src/chain/shufflingCache.ts +++ b/packages/beacon-node/src/chain/shufflingCache.ts @@ -167,6 +167,23 @@ export class ShufflingCache { } } + /** + * Same to get() function but synchronous. + */ + getSync(shufflingEpoch: Epoch, decisionRootHex: RootHex): EpochShuffling | null { + const cacheItem = this.itemsByDecisionRootByEpoch.getOrDefault(shufflingEpoch).get(decisionRootHex); + if (cacheItem === undefined) { + return null; + } + + if (isShufflingCacheItem(cacheItem)) { + return cacheItem.shuffling; + } + + // ignore promise + return null; + } + private add(shufflingEpoch: Epoch, decisionBlock: RootHex, cacheItem: CacheItem): void { this.itemsByDecisionRootByEpoch.getOrDefault(shufflingEpoch).set(decisionBlock, cacheItem); pruneSetToMax(this.itemsByDecisionRootByEpoch, this.maxEpochs); diff --git a/packages/beacon-node/src/chain/stateCache/datastore/db.ts b/packages/beacon-node/src/chain/stateCache/datastore/db.ts new file mode 100644 index 000000000000..fef38a7f8dd2 --- /dev/null +++ b/packages/beacon-node/src/chain/stateCache/datastore/db.ts @@ -0,0 +1,38 @@ +import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; +import {phase0, ssz} from "@lodestar/types"; +import {IBeaconDb} from "../../../db/interface.js"; +import {CPStateDatastore, DatastoreKey} from "./types.js"; + +/** + * Implementation of CPStateDatastore using db. + */ +export class DbCPStateDatastore implements CPStateDatastore { + constructor(private readonly db: IBeaconDb) {} + + async write(cpKey: phase0.Checkpoint, state: CachedBeaconStateAllForks): Promise { + const serializedCheckpoint = checkpointToDatastoreKey(cpKey); + const stateBytes = state.serialize(); + await this.db.checkpointState.putBinary(serializedCheckpoint, stateBytes); + return serializedCheckpoint; + } + + async remove(serializedCheckpoint: DatastoreKey): Promise { + await this.db.checkpointState.delete(serializedCheckpoint); + } + + async read(serializedCheckpoint: DatastoreKey): Promise { + return this.db.checkpointState.getBinary(serializedCheckpoint); + } + + async readKeys(): Promise { + return this.db.checkpointState.keys(); + } +} + +export function datastoreKeyToCheckpoint(key: DatastoreKey): phase0.Checkpoint { + return ssz.phase0.Checkpoint.deserialize(key); +} + +export function checkpointToDatastoreKey(cp: phase0.Checkpoint): DatastoreKey { + return ssz.phase0.Checkpoint.serialize(cp); +} diff --git a/packages/beacon-node/src/chain/stateCache/datastore/index.ts b/packages/beacon-node/src/chain/stateCache/datastore/index.ts new file mode 100644 index 000000000000..c37de5292a38 --- /dev/null +++ b/packages/beacon-node/src/chain/stateCache/datastore/index.ts @@ -0,0 +1,2 @@ +export * from "./types.js"; +export * from "./db.js"; diff --git a/packages/beacon-node/src/chain/stateCache/datastore/types.ts b/packages/beacon-node/src/chain/stateCache/datastore/types.ts new file mode 100644 index 000000000000..66ea67f93500 --- /dev/null +++ b/packages/beacon-node/src/chain/stateCache/datastore/types.ts @@ -0,0 +1,13 @@ +import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; +import {phase0} from "@lodestar/types"; + +// With db implementation, persistedKey is serialized data of a checkpoint +export type DatastoreKey = Uint8Array; + +// Make this generic to support testing +export interface CPStateDatastore { + write: (cpKey: phase0.Checkpoint, state: CachedBeaconStateAllForks) => Promise; + remove: (key: DatastoreKey) => Promise; + read: (key: DatastoreKey) => Promise; + readKeys: () => Promise; +} diff --git a/packages/beacon-node/src/chain/stateCache/fifoBlockStateCache.ts b/packages/beacon-node/src/chain/stateCache/fifoBlockStateCache.ts new file mode 100644 index 000000000000..854983101c04 --- /dev/null +++ b/packages/beacon-node/src/chain/stateCache/fifoBlockStateCache.ts @@ -0,0 +1,181 @@ +import {toHexString} from "@chainsafe/ssz"; +import {RootHex} from "@lodestar/types"; +import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; +import {routes} from "@lodestar/api"; +import {Metrics} from "../../metrics/index.js"; +import {LinkedList} from "../../util/array.js"; +import {MapTracker} from "./mapMetrics.js"; +import {BlockStateCache} from "./types.js"; + +export type FIFOBlockStateCacheOpts = { + maxBlockStates?: number; +}; + +/** + * Regen state if there's a reorg distance > 32 slots. + */ +export const DEFAULT_MAX_BLOCK_STATES = 32; + +/** + * New implementation of BlockStateCache that keeps the most recent n states consistently + * - Maintain a linked list (FIFO) with special handling for head state, which is always the first item in the list + * - Prune per add() instead of per checkpoint so it only keeps n historical states consistently, prune from tail + * - No need to prune per finalized checkpoint + * + * Given this block tree with Block 11 as head: + * ``` + Block 10 + | + +-----+-----+ + | | + Block 11 Block 12 + ^ | + | | + head Block 13 + * ``` + * The maintained key order would be: 11 -> 13 -> 12 -> 10, and state 10 will be pruned first. + */ +export class FIFOBlockStateCache implements BlockStateCache { + /** + * Max number of states allowed in the cache + */ + readonly maxStates: number; + + private readonly cache: MapTracker; + /** + * Key order to implement FIFO cache + */ + private readonly keyOrder: LinkedList; + private readonly metrics: Metrics["stateCache"] | null | undefined; + + constructor(opts: FIFOBlockStateCacheOpts, {metrics}: {metrics?: Metrics | null}) { + this.maxStates = opts.maxBlockStates ?? DEFAULT_MAX_BLOCK_STATES; + this.cache = new MapTracker(metrics?.stateCache); + if (metrics) { + this.metrics = metrics.stateCache; + metrics.stateCache.size.addCollect(() => metrics.stateCache.size.set(this.cache.size)); + } + this.keyOrder = new LinkedList(); + } + + /** + * Set a state as head, happens when importing a block and head block is changed. + */ + setHeadState(item: CachedBeaconStateAllForks | null): void { + if (item !== null) { + this.add(item, true); + } + } + + /** + * Get a state from this cache given a state root hex. + */ + get(rootHex: RootHex): CachedBeaconStateAllForks | null { + this.metrics?.lookups.inc(); + const item = this.cache.get(rootHex); + if (!item) { + return null; + } + + this.metrics?.hits.inc(); + this.metrics?.stateClonedCount.observe(item.clonedCount); + + return item; + } + + /** + * Add a state to this cache. + * @param isHead if true, move it to the head of the list. Otherwise add to the 2nd position. + * In importBlock() steps, normally it'll call add() with isHead = false first. Then call setHeadState() to set the head. + */ + add(item: CachedBeaconStateAllForks, isHead = false): void { + const key = toHexString(item.hashTreeRoot()); + if (this.cache.get(key) != null) { + if (!this.keyOrder.has(key)) { + throw Error(`State exists but key not found in keyOrder: ${key}`); + } + if (isHead) { + this.keyOrder.moveToHead(key); + } else { + this.keyOrder.moveToSecond(key); + } + // same size, no prune + return; + } + + // new state + this.metrics?.adds.inc(); + this.cache.set(key, item); + if (isHead) { + this.keyOrder.unshift(key); + } else { + // insert after head + const head = this.keyOrder.first(); + if (head == null) { + // should not happen, however handle just in case + this.keyOrder.unshift(key); + } else { + this.keyOrder.insertAfter(head, key); + } + } + this.prune(key); + } + + get size(): number { + return this.cache.size; + } + + /** + * Prune the cache from tail to keep the most recent n states consistently. + * The tail of the list is the oldest state, in case regen adds back the same state, + * it should stay next to head so that it won't be pruned right away. + * The FIFO cache helps with this. + */ + prune(lastAddedKey: string): void { + while (this.keyOrder.length > this.maxStates) { + const key = this.keyOrder.last(); + // it does not make sense to prune the last added state + // this only happens when max state is 1 in a short period of time + if (key === lastAddedKey) { + break; + } + if (!key) { + // should not happen + throw new Error("No key"); + } + this.keyOrder.pop(); + this.cache.delete(key); + } + } + + /** + * No need for this implementation + * This is only to conform to the old api + */ + deleteAllBeforeEpoch(): void {} + + /** + * ONLY FOR DEBUGGING PURPOSES. For lodestar debug API. + */ + clear(): void { + this.cache.clear(); + } + + /** ONLY FOR DEBUGGING PURPOSES. For lodestar debug API */ + dumpSummary(): routes.lodestar.StateCacheItem[] { + return Array.from(this.cache.entries()).map(([key, state]) => ({ + slot: state.slot, + root: toHexString(state.hashTreeRoot()), + reads: this.cache.readCount.get(key) ?? 0, + lastRead: this.cache.lastRead.get(key) ?? 0, + checkpointState: false, + })); + } + + /** + * For unit test only. + */ + dumpKeyOrder(): string[] { + return this.keyOrder.toArray(); + } +} diff --git a/packages/beacon-node/src/chain/stateCache/index.ts b/packages/beacon-node/src/chain/stateCache/index.ts index 69fb34a77e4c..b16d87c3fa0d 100644 --- a/packages/beacon-node/src/chain/stateCache/index.ts +++ b/packages/beacon-node/src/chain/stateCache/index.ts @@ -1,2 +1,3 @@ export * from "./stateContextCache.js"; export * from "./stateContextCheckpointsCache.js"; +export * from "./fifoBlockStateCache.js"; diff --git a/packages/beacon-node/src/chain/stateCache/mapMetrics.ts b/packages/beacon-node/src/chain/stateCache/mapMetrics.ts index eb52755bfc00..bb33323015d4 100644 --- a/packages/beacon-node/src/chain/stateCache/mapMetrics.ts +++ b/packages/beacon-node/src/chain/stateCache/mapMetrics.ts @@ -1,8 +1,8 @@ -import {IAvgMinMax} from "../../metrics/index.js"; +import {AvgMinMax} from "@lodestar/utils"; type MapTrackerMetrics = { - reads: IAvgMinMax; - secondsSinceLastRead: IAvgMinMax; + reads: AvgMinMax; + secondsSinceLastRead: AvgMinMax; }; export class MapTracker extends Map { diff --git a/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts new file mode 100644 index 000000000000..8ad5c5098118 --- /dev/null +++ b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts @@ -0,0 +1,645 @@ +import {fromHexString, toHexString} from "@chainsafe/ssz"; +import {phase0, Epoch, RootHex} from "@lodestar/types"; +import {CachedBeaconStateAllForks, computeStartSlotAtEpoch, getBlockRootAtSlot} from "@lodestar/state-transition"; +import {Logger, MapDef} from "@lodestar/utils"; +import {routes} from "@lodestar/api"; +import {loadCachedBeaconState} from "@lodestar/state-transition"; +import {Metrics} from "../../metrics/index.js"; +import {IClock} from "../../util/clock.js"; +import {ShufflingCache} from "../shufflingCache.js"; +import {MapTracker} from "./mapMetrics.js"; +import {CheckpointHex, CheckpointStateCache, CacheItemType} from "./types.js"; +import {CPStateDatastore, DatastoreKey, datastoreKeyToCheckpoint} from "./datastore/index.js"; + +type GetHeadStateFn = () => CachedBeaconStateAllForks; + +type PersistentCheckpointStateCacheModules = { + metrics?: Metrics | null; + logger: Logger; + clock?: IClock | null; + shufflingCache: ShufflingCache; + datastore: CPStateDatastore; + getHeadState?: GetHeadStateFn; +}; + +type PersistentCheckpointStateCacheOpts = { + // Keep max n states in memory, persist the rest to disk + maxCPStateEpochsInMemory?: number; +}; + +/** checkpoint serialized as a string */ +type CacheKey = string; + +type InMemoryCacheItem = { + type: CacheItemType.inMemory; + state: CachedBeaconStateAllForks; + // if a cp state is reloaded from disk, it'll keep track of persistedKey to allow us to remove it from disk later + // it also helps not to persist it again + persistedKey?: DatastoreKey; +}; + +type PersistedCacheItem = { + type: CacheItemType.persisted; + value: DatastoreKey; +}; + +type CacheItem = InMemoryCacheItem | PersistedCacheItem; + +type LoadedStateBytesData = {persistedKey: DatastoreKey; stateBytes: Uint8Array}; + +/** + * Before n-historical states, lodestar keeps mostly 3 states in memory with 1 finalized state + * Since Jan 2024, lodestar stores the finalized state in disk and keeps up to 2 epochs in memory + */ +export const DEFAULT_MAX_CP_STATE_EPOCHS_IN_MEMORY = 2; + +/** + * An implementation of CheckpointStateCache that keep up to n epoch checkpoint states in memory and persist the rest to disk + * - If it's more than `maxEpochsInMemory` epochs old, it will persist n last epochs to disk based on the view of the block + * - Once a chain gets finalized we'll prune all states from memory and disk for epochs < finalizedEpoch + * - In get*() apis if shouldReload is true, it will reload from disk. The reload() api is expensive and should only be called in some important flows: + * - Get state for block processing + * - updateHeadState + * - as with any cache, the state could be evicted from memory at any time, so we should always check if the state is in memory or not + * - Each time we process a state, we only persist exactly 1 checkpoint state per epoch based on the view of block and prune all others. The persisted + * checkpoint state could be finalized and used later in archive task, it's also used to regen states. + * - When we process multiple states in the same epoch, we could persist different checkpoint states of the same epoch because each block could have its + * own view. See unit test of this file `packages/beacon-node/test/unit/chain/stateCache/persistentCheckpointsCache.test.ts` for more details. + * + * The below diagram shows Previous Root Checkpoint State is persisted for epoch (n-2) and Current Root Checkpoint State is persisted for epoch (n-1) + * while at epoch (n) and (n+1) we have both of them in memory + * + * ╔════════════════════════════════════╗═══════════════╗ + * ║ persisted to db or fs ║ in memory ║ + * ║ reload if needed ║ ║ + * ║ -----------------------------------║---------------║ + * ║ epoch: (n-2) (n-1) ║ n (n+1) ║ + * ║ |-------|-------|----║--|-------|----║ + * ║ ^ ^ ║ ^ ^ ║ + * ║ ║ ^ ^ ║ + * ╚════════════════════════════════════╝═══════════════╝ + * + * The "in memory" checkpoint states are similar to the old implementation: we have both Previous Root Checkpoint State and Current Root Checkpoint State per epoch. + * However in the "persisted to db or fs" part, we usually only persist 1 checkpoint state per epoch, the one that could potentially be justified/finalized later + * based on the view of blocks. + */ +export class PersistentCheckpointStateCache implements CheckpointStateCache { + private readonly cache: MapTracker; + /** Epoch -> Set */ + private readonly epochIndex = new MapDef>(() => new Set()); + private readonly metrics: Metrics["cpStateCache"] | null | undefined; + private readonly logger: Logger; + private readonly clock: IClock | null | undefined; + private preComputedCheckpoint: string | null = null; + private preComputedCheckpointHits: number | null = null; + private readonly maxEpochsInMemory: number; + private readonly datastore: CPStateDatastore; + private readonly shufflingCache: ShufflingCache; + private readonly getHeadState?: GetHeadStateFn; + + constructor( + {metrics, logger, clock, shufflingCache, datastore, getHeadState}: PersistentCheckpointStateCacheModules, + opts: PersistentCheckpointStateCacheOpts + ) { + this.cache = new MapTracker(metrics?.cpStateCache); + if (metrics) { + this.metrics = metrics.cpStateCache; + metrics.cpStateCache.size.addCollect(() => { + let persistCount = 0; + let inMemoryCount = 0; + const memoryEpochs = new Set(); + const persistentEpochs = new Set(); + for (const [key, cacheItem] of this.cache.entries()) { + const {epoch} = fromCacheKey(key); + if (isPersistedCacheItem(cacheItem)) { + persistCount++; + persistentEpochs.add(epoch); + } else { + inMemoryCount++; + memoryEpochs.add(epoch); + } + } + metrics.cpStateCache.size.set({type: CacheItemType.persisted}, persistCount); + metrics.cpStateCache.size.set({type: CacheItemType.inMemory}, inMemoryCount); + metrics.cpStateCache.epochSize.set({type: CacheItemType.persisted}, persistentEpochs.size); + metrics.cpStateCache.epochSize.set({type: CacheItemType.inMemory}, memoryEpochs.size); + }); + } + this.logger = logger; + this.clock = clock; + if (opts.maxCPStateEpochsInMemory !== undefined && opts.maxCPStateEpochsInMemory < 0) { + throw new Error("maxEpochsInMemory must be >= 0"); + } + this.maxEpochsInMemory = opts.maxCPStateEpochsInMemory ?? DEFAULT_MAX_CP_STATE_EPOCHS_IN_MEMORY; + // Specify different datastore for testing + this.datastore = datastore; + this.shufflingCache = shufflingCache; + this.getHeadState = getHeadState; + } + + /** + * Reload checkpoint state keys from the last run. + */ + async init(): Promise { + const persistedKeys = await this.datastore.readKeys(); + for (const persistedKey of persistedKeys) { + const cp = datastoreKeyToCheckpoint(persistedKey); + this.cache.set(toCacheKey(cp), {type: CacheItemType.persisted, value: persistedKey}); + this.epochIndex.getOrDefault(cp.epoch).add(toHexString(cp.root)); + } + this.logger.info("Loaded persisted checkpoint states from the last run", { + count: persistedKeys.length, + maxEpochsInMemory: this.maxEpochsInMemory, + }); + } + + /** + * Get a state from cache, it may reload from disk. + * This is an expensive api, should only be called in some important flows: + * - Validate a gossip block + * - Get block for processing + * - Regen head state + */ + async getOrReload(cp: CheckpointHex): Promise { + const stateOrStateBytesData = await this.getStateOrLoadDb(cp); + if (stateOrStateBytesData === null || isCachedBeaconState(stateOrStateBytesData)) { + return stateOrStateBytesData; + } + const {persistedKey, stateBytes} = stateOrStateBytesData; + const logMeta = {persistedKey: toHexString(persistedKey)}; + this.logger.debug("Reload: read state successful", logMeta); + this.metrics?.stateReloadSecFromSlot.observe(this.clock?.secFromSlot(this.clock?.currentSlot ?? 0) ?? 0); + const seedState = this.findSeedStateToReload(cp) ?? this.getHeadState?.(); + if (seedState == null) { + throw new Error("No seed state found for cp " + toCacheKey(cp)); + } + this.metrics?.stateReloadEpochDiff.observe(Math.abs(seedState.epochCtx.epoch - cp.epoch)); + this.logger.debug("Reload: found seed state", {...logMeta, seedSlot: seedState.slot}); + + try { + const timer = this.metrics?.stateReloadDuration.startTimer(); + const newCachedState = loadCachedBeaconState(seedState, stateBytes, { + shufflingGetter: this.shufflingCache.getSync.bind(this.shufflingCache), + }); + newCachedState.commit(); + const stateRoot = toHexString(newCachedState.hashTreeRoot()); + timer?.(); + this.logger.debug("Reload: cached state load successful", { + ...logMeta, + stateSlot: newCachedState.slot, + stateRoot, + seedSlot: seedState.slot, + }); + + // only remove persisted state once we reload successfully + const cpKey = toCacheKey(cp); + this.cache.set(cpKey, {type: CacheItemType.inMemory, state: newCachedState, persistedKey}); + this.epochIndex.getOrDefault(cp.epoch).add(cp.rootHex); + // don't prune from memory here, call it at the last 1/3 of slot 0 of an epoch + return newCachedState; + } catch (e) { + this.logger.debug("Reload: error loading cached state", logMeta, e as Error); + return null; + } + } + + /** + * Return either state or state bytes loaded from db. + */ + async getStateOrBytes(cp: CheckpointHex): Promise { + const stateOrLoadedState = await this.getStateOrLoadDb(cp); + if (stateOrLoadedState === null || isCachedBeaconState(stateOrLoadedState)) { + return stateOrLoadedState; + } + return stateOrLoadedState.stateBytes; + } + + /** + * Return either state or state bytes with persisted key loaded from db. + */ + async getStateOrLoadDb(cp: CheckpointHex): Promise { + const cpKey = toCacheKey(cp); + const inMemoryState = this.get(cpKey); + if (inMemoryState) { + return inMemoryState; + } + + const cacheItem = this.cache.get(cpKey); + if (cacheItem === undefined) { + return null; + } + + if (isInMemoryCacheItem(cacheItem)) { + // should not happen, in-memory state is handled above + throw new Error("Expected persistent key"); + } + + const persistedKey = cacheItem.value; + const dbReadTimer = this.metrics?.stateReloadDbReadTime.startTimer(); + const stateBytes = await this.datastore.read(persistedKey); + dbReadTimer?.(); + + if (stateBytes === null) { + return null; + } + return {persistedKey, stateBytes}; + } + + /** + * Similar to get() api without reloading from disk + */ + get(cpOrKey: CheckpointHex | string): CachedBeaconStateAllForks | null { + this.metrics?.lookups.inc(); + const cpKey = typeof cpOrKey === "string" ? cpOrKey : toCacheKey(cpOrKey); + const cacheItem = this.cache.get(cpKey); + + if (cacheItem === undefined) { + return null; + } + + this.metrics?.hits.inc(); + + if (cpKey === this.preComputedCheckpoint) { + this.preComputedCheckpointHits = (this.preComputedCheckpointHits ?? 0) + 1; + } + + if (isInMemoryCacheItem(cacheItem)) { + const {state} = cacheItem; + this.metrics?.stateClonedCount.observe(state.clonedCount); + return state; + } + + return null; + } + + /** + * Add a state of a checkpoint to this cache, prune from memory if necessary. + */ + add(cp: phase0.Checkpoint, state: CachedBeaconStateAllForks): void { + const cpHex = toCheckpointHex(cp); + const key = toCacheKey(cpHex); + const cacheItem = this.cache.get(key); + this.metrics?.adds.inc(); + if (cacheItem !== undefined && isPersistedCacheItem(cacheItem)) { + const persistedKey = cacheItem.value; + // was persisted to disk, set back to memory + this.cache.set(key, {type: CacheItemType.inMemory, state, persistedKey}); + this.logger.verbose("Added checkpoint state to memory but a persisted key existed", { + epoch: cp.epoch, + rootHex: cpHex.rootHex, + persistedKey: toHexString(persistedKey), + }); + } else { + this.cache.set(key, {type: CacheItemType.inMemory, state}); + this.logger.verbose("Added checkpoint state to memory", {epoch: cp.epoch, rootHex: cpHex.rootHex}); + } + this.epochIndex.getOrDefault(cp.epoch).add(cpHex.rootHex); + } + + /** + * Searches in-memory state for the latest cached state with a `root` without reload, starting with `epoch` and descending + */ + getLatest(rootHex: RootHex, maxEpoch: Epoch): CachedBeaconStateAllForks | null { + // sort epochs in descending order, only consider epochs lte `epoch` + const epochs = Array.from(this.epochIndex.keys()) + .sort((a, b) => b - a) + .filter((e) => e <= maxEpoch); + for (const epoch of epochs) { + if (this.epochIndex.get(epoch)?.has(rootHex)) { + const inMemoryState = this.get({rootHex, epoch}); + if (inMemoryState) { + return inMemoryState; + } + } + } + return null; + } + + /** + * Searches state for the latest cached state with a `root`, reload if needed, starting with `epoch` and descending + * This is expensive api, should only be called in some important flows: + * - Validate a gossip block + * - Get block for processing + * - Regen head state + */ + async getOrReloadLatest(rootHex: RootHex, maxEpoch: Epoch): Promise { + // sort epochs in descending order, only consider epochs lte `epoch` + const epochs = Array.from(this.epochIndex.keys()) + .sort((a, b) => b - a) + .filter((e) => e <= maxEpoch); + for (const epoch of epochs) { + if (this.epochIndex.get(epoch)?.has(rootHex)) { + try { + const state = await this.getOrReload({rootHex, epoch}); + if (state) { + return state; + } + } catch (e) { + this.logger.debug("Error get or reload state", {epoch, rootHex}, e as Error); + } + } + } + return null; + } + + /** + * Update the precomputed checkpoint and return the number of his for the + * previous one (if any). + */ + updatePreComputedCheckpoint(rootHex: RootHex, epoch: Epoch): number | null { + const previousHits = this.preComputedCheckpointHits; + this.preComputedCheckpoint = toCacheKey({rootHex, epoch}); + this.preComputedCheckpointHits = 0; + return previousHits; + } + + /** + * This is just to conform to the old implementation + */ + prune(): void { + // do nothing + } + + /** + * Prune all checkpoint states before the provided finalized epoch. + */ + pruneFinalized(finalizedEpoch: Epoch): void { + for (const epoch of this.epochIndex.keys()) { + if (epoch < finalizedEpoch) { + this.deleteAllEpochItems(epoch).catch((e) => + this.logger.debug("Error delete all epoch items", {epoch, finalizedEpoch}, e as Error) + ); + } + } + } + + /** + * After processing a block, prune from memory based on the view of that block. + * This is likely persist 1 state per epoch, at the last 1/3 of slot 0 of an epoch although it'll be called on every last 1/3 of slot. + * Given the following block b was processed with b2, b1, b0 are ancestors in epoch (n-2), (n-1), n respectively + * + * epoch: (n-2) (n-1) n (n+1) + * |-----------|-----------|-----------|-----------| + * ^ ^ ^ ^ + * | | | | + * block chain: b2---------->b1--------->b0-->b + * + * After processing block b, if maxEpochsInMemory is: + * - 2 then we'll persist {root: b2, epoch n-2} checkpoint state to disk + * - 1 then we'll persist {root: b2, epoch n-2} and {root: b1, epoch n-1} checkpoint state to disk + * - 0 then we'll persist {root: b2, epoch n-2} and {root: b1, epoch n-1} and {root: b0, epoch n} checkpoint state to disk + * - if any old epochs checkpoint states are persisted, no need to do it again + * + * Note that for each epoch there could be multiple checkpoint states, usually 2, one for Previous Root Checkpoint State and one for Current Root Checkpoint State. + * We normally only persist 1 checkpoint state per epoch, the one that could potentially be justified/finalized later based on the view of the block. + * Other checkpoint states are pruned from memory. + * + * This design also covers the reorg scenario. Given block c in the same epoch n where c.slot > b.slot, c is not descendant of b, and c is built on top of c0 + * instead of b0 (epoch (n - 1)) + * + * epoch: (n-2) (n-1) n (n+1) + * |-----------|-----------|-----------|-----------| + * ^ ^ ^ ^ ^ ^ + * | | | | | | + * block chain: b2---------->b1----->c0->b0-->b | + * ║ | + * ╚═══════════>c (reorg) + * + * After processing block c, if maxEpochsInMemory is: + * - 0 then we'll persist {root: c0, epoch: n} checkpoint state to disk. Note that regen should populate {root: c0, epoch: n} checkpoint state before. + * + * epoch: (n-1) n (n+1) + * |-------------------------------------------------------------|-------------------------------------------------------------| + * ^ ^ ^ ^ + * _______ | | | | + * | | | | | | + * | db |====== reload ======> {root: b1, epoch: n-1} cp state ======> c0 block state ======> {root: c0, epoch: n} cp state =====> c block state + * |_______| + * + * + * + * - 1 then we'll persist {root: b1, epoch n-1} checkpoint state to disk. Note that at epoch n there is both {root: b0, epoch: n} and {root: c0, epoch: n} checkpoint states in memory + * - 2 then we'll persist {root: b2, epoch n-2} checkpoint state to disk, there are also 2 checkpoint states in memory at epoch n, same to the above (maxEpochsInMemory=1) + * + * As of Nov 2023, it takes 1.3s to 1.5s to persist a state on holesky on fast server. TODO: + * - improve state serialization time + * - or research how to only store diff against the finalized state + */ + async processState(blockRootHex: RootHex, state: CachedBeaconStateAllForks): Promise { + let persistCount = 0; + // it's important to sort the epochs in ascending order, in case of big reorg we always want to keep the most recent checkpoint states + const sortedEpochs = Array.from(this.epochIndex.keys()).sort((a, b) => a - b); + if (sortedEpochs.length <= this.maxEpochsInMemory) { + return 0; + } + + for (const lowestEpoch of sortedEpochs.slice(0, sortedEpochs.length - this.maxEpochsInMemory)) { + const epochBoundarySlot = computeStartSlotAtEpoch(lowestEpoch); + const epochBoundaryRoot = + epochBoundarySlot === state.slot ? fromHexString(blockRootHex) : getBlockRootAtSlot(state, epochBoundarySlot); + const epochBoundaryHex = toHexString(epochBoundaryRoot); + + // for each epoch, usually there are 2 rootHex respective to the 2 checkpoint states: Previous Root Checkpoint State and Current Root Checkpoint State + for (const rootHex of this.epochIndex.get(lowestEpoch) ?? []) { + const cpKey = toCacheKey({epoch: lowestEpoch, rootHex}); + const cacheItem = this.cache.get(cpKey); + + if (cacheItem !== undefined && isInMemoryCacheItem(cacheItem)) { + // this is state in memory, we don't care if the checkpoint state is already persisted + let {persistedKey} = cacheItem; + const {state} = cacheItem; + const logMeta = { + stateSlot: state.slot, + rootHex, + epochBoundaryHex, + persistedKey: persistedKey ? toHexString(persistedKey) : "", + }; + + if (rootHex === epochBoundaryHex) { + if (persistedKey) { + // no need to persist + this.logger.verbose("Pruned checkpoint state from memory but no need to persist", logMeta); + } else { + // persist and do not update epochIndex + this.metrics?.statePersistSecFromSlot.observe(this.clock?.secFromSlot(this.clock?.currentSlot ?? 0) ?? 0); + const timer = this.metrics?.statePersistDuration.startTimer(); + const cpPersist = {epoch: lowestEpoch, root: epochBoundaryRoot}; + persistedKey = await this.datastore.write(cpPersist, state); + timer?.(); + persistCount++; + this.logger.verbose("Pruned checkpoint state from memory and persisted to disk", { + ...logMeta, + persistedKey: toHexString(persistedKey), + }); + } + // overwrite cpKey, this means the state is deleted from memory + this.cache.set(cpKey, {type: CacheItemType.persisted, value: persistedKey}); + } else { + if (persistedKey) { + // persisted file will be eventually deleted by the archive task + // this also means the state is deleted from memory + this.cache.set(cpKey, {type: CacheItemType.persisted, value: persistedKey}); + // do not update epochIndex + } else { + // delete the state from memory + this.cache.delete(cpKey); + this.epochIndex.get(lowestEpoch)?.delete(rootHex); + } + this.metrics?.statePruneFromMemoryCount.inc(); + this.logger.verbose("Pruned checkpoint state from memory", logMeta); + } + } + } + } + + return persistCount; + } + + /** + * Find a seed state to reload the state of provided checkpoint. Based on the design of n-historical state: + * + * ╔════════════════════════════════════╗═══════════════╗ + * ║ persisted to db or fs ║ in memory ║ + * ║ reload if needed ║ ║ + * ║ -----------------------------------║---------------║ + * ║ epoch: (n-2) (n-1) ║ n (n+1) ║ + * ║ |-------|-------|----║--|-------|----║ + * ║ ^ ^ ║ ^ ^ ║ + * ║ ║ ^ ^ ║ + * ╚════════════════════════════════════╝═══════════════╝ + * + * we always reload an epoch in the past. We'll start with epoch n then (n+1) prioritizing ones with the same view of `reloadedCp`. + * + * This could return null and we should get head state in that case. + */ + findSeedStateToReload(reloadedCp: CheckpointHex): CachedBeaconStateAllForks | null { + const maxEpoch = Math.max(...Array.from(this.epochIndex.keys())); + const reloadedCpSlot = computeStartSlotAtEpoch(reloadedCp.epoch); + let firstState: CachedBeaconStateAllForks | null = null; + // no need to check epochs before `maxEpoch - this.maxEpochsInMemory + 1` before they are all persisted + for (let epoch = maxEpoch - this.maxEpochsInMemory + 1; epoch <= maxEpoch; epoch++) { + // if there's at least 1 state in memory in an epoch, just return the 1st one + if (firstState !== null) { + return firstState; + } + + for (const rootHex of this.epochIndex.get(epoch) || []) { + const cpKey = toCacheKey({rootHex, epoch}); + const cacheItem = this.cache.get(cpKey); + if (cacheItem === undefined) { + // should not happen + continue; + } + if (isInMemoryCacheItem(cacheItem)) { + const {state} = cacheItem; + if (firstState === null) { + firstState = state; + } + + // amongst states of the same epoch, choose the one with the same view of reloadedCp + if ( + reloadedCpSlot < state.slot && + toHexString(getBlockRootAtSlot(state, reloadedCpSlot)) === reloadedCp.rootHex + ) { + return state; + } + } + } + } + + return firstState; + } + + clear(): void { + this.cache.clear(); + this.epochIndex.clear(); + } + + /** ONLY FOR DEBUGGING PURPOSES. For lodestar debug API */ + dumpSummary(): routes.lodestar.StateCacheItem[] { + return Array.from(this.cache.keys()).map((key) => { + const cp = fromCacheKey(key); + // TODO: add checkpoint key and persistent key to the summary + return { + slot: computeStartSlotAtEpoch(cp.epoch), + root: cp.rootHex, + reads: this.cache.readCount.get(key) ?? 0, + lastRead: this.cache.lastRead.get(key) ?? 0, + checkpointState: true, + }; + }); + } + + /** ONLY FOR DEBUGGING PURPOSES. For spec tests on error */ + dumpCheckpointKeys(): string[] { + return Array.from(this.cache.keys()); + } + + /** + * Delete all items of an epoch from disk and memory + */ + private async deleteAllEpochItems(epoch: Epoch): Promise { + let persistCount = 0; + const rootHexes = this.epochIndex.get(epoch) || []; + for (const rootHex of rootHexes) { + const key = toCacheKey({rootHex, epoch}); + const cacheItem = this.cache.get(key); + + if (cacheItem) { + const persistedKey = isPersistedCacheItem(cacheItem) ? cacheItem.value : cacheItem.persistedKey; + if (persistedKey) { + await this.datastore.remove(persistedKey); + persistCount++; + this.metrics?.persistedStateRemoveCount.inc(); + } + } + this.cache.delete(key); + } + this.epochIndex.delete(epoch); + this.logger.verbose("Pruned finalized checkpoints states for epoch", { + epoch, + persistCount, + rootHexes: Array.from(rootHexes).join(","), + }); + } +} + +function toCheckpointHex(checkpoint: phase0.Checkpoint): CheckpointHex { + return { + epoch: checkpoint.epoch, + rootHex: toHexString(checkpoint.root), + }; +} + +function toCacheKey(cp: CheckpointHex | phase0.Checkpoint): CacheKey { + if (isCheckpointHex(cp)) { + return `${cp.rootHex}_${cp.epoch}`; + } + return `${toHexString(cp.root)}_${cp.epoch}`; +} + +function fromCacheKey(key: CacheKey): CheckpointHex { + const [rootHex, epoch] = key.split("_"); + return { + rootHex, + epoch: Number(epoch), + }; +} + +function isCachedBeaconState( + stateOrBytes: CachedBeaconStateAllForks | LoadedStateBytesData +): stateOrBytes is CachedBeaconStateAllForks { + return (stateOrBytes as CachedBeaconStateAllForks).slot !== undefined; +} + +function isInMemoryCacheItem(cacheItem: CacheItem): cacheItem is InMemoryCacheItem { + return cacheItem.type === CacheItemType.inMemory; +} + +function isPersistedCacheItem(cacheItem: CacheItem): cacheItem is PersistedCacheItem { + return cacheItem.type === CacheItemType.persisted; +} + +function isCheckpointHex(cp: CheckpointHex | phase0.Checkpoint): cp is CheckpointHex { + return (cp as CheckpointHex).rootHex !== undefined; +} diff --git a/packages/beacon-node/src/chain/stateCache/stateContextCache.ts b/packages/beacon-node/src/chain/stateCache/stateContextCache.ts index 44523abf799c..3a04c4f4a258 100644 --- a/packages/beacon-node/src/chain/stateCache/stateContextCache.ts +++ b/packages/beacon-node/src/chain/stateCache/stateContextCache.ts @@ -4,15 +4,16 @@ import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; import {routes} from "@lodestar/api"; import {Metrics} from "../../metrics/index.js"; import {MapTracker} from "./mapMetrics.js"; +import {BlockStateCache} from "./types.js"; const MAX_STATES = 3 * 32; /** - * In memory cache of CachedBeaconState - * - * Similar API to Repository + * Old implementation of StateCache + * - Prune per checkpoint so number of states ranges from 96 to 128 + * - Keep a separate head state to make sure it is always available */ -export class StateContextCache { +export class StateContextCache implements BlockStateCache { /** * Max number of states allowed in the cache */ diff --git a/packages/beacon-node/src/chain/stateCache/stateContextCheckpointsCache.ts b/packages/beacon-node/src/chain/stateCache/stateContextCheckpointsCache.ts index 0cb48f0e2ded..a177db9b7c87 100644 --- a/packages/beacon-node/src/chain/stateCache/stateContextCheckpointsCache.ts +++ b/packages/beacon-node/src/chain/stateCache/stateContextCheckpointsCache.ts @@ -5,6 +5,7 @@ import {MapDef} from "@lodestar/utils"; import {routes} from "@lodestar/api"; import {Metrics} from "../../metrics/index.js"; import {MapTracker} from "./mapMetrics.js"; +import {CheckpointStateCache as CheckpointStateCacheInterface, CacheItemType} from "./types.js"; export type CheckpointHex = {epoch: Epoch; rootHex: RootHex}; const MAX_EPOCHS = 10; @@ -14,8 +15,9 @@ const MAX_EPOCHS = 10; * belonging to checkpoint * * Similar API to Repository + * TODO: rename to MemoryCheckpointStateCache in the next PR of n-historical states */ -export class CheckpointStateCache { +export class CheckpointStateCache implements CheckpointStateCacheInterface { private readonly cache: MapTracker; /** Epoch -> Set */ private readonly epochIndex = new MapDef>(() => new Set()); @@ -27,11 +29,32 @@ export class CheckpointStateCache { this.cache = new MapTracker(metrics?.cpStateCache); if (metrics) { this.metrics = metrics.cpStateCache; - metrics.cpStateCache.size.addCollect(() => metrics.cpStateCache.size.set(this.cache.size)); - metrics.cpStateCache.epochSize.addCollect(() => metrics.cpStateCache.epochSize.set(this.epochIndex.size)); + metrics.cpStateCache.size.addCollect(() => + metrics.cpStateCache.size.set({type: CacheItemType.inMemory}, this.cache.size) + ); + metrics.cpStateCache.epochSize.addCollect(() => + metrics.cpStateCache.epochSize.set({type: CacheItemType.inMemory}, this.epochIndex.size) + ); } } + async getOrReload(cp: CheckpointHex): Promise { + return this.get(cp); + } + + async getStateOrBytes(cp: CheckpointHex): Promise { + return this.get(cp); + } + + async getOrReloadLatest(rootHex: string, maxEpoch: number): Promise { + return this.getLatest(rootHex, maxEpoch); + } + + async processState(): Promise { + // do nothing, this class does not support prunning + return 0; + } + get(cp: CheckpointHex): CachedBeaconStateAllForks | null { this.metrics?.lookups.inc(); const cpKey = toCheckpointKey(cp); diff --git a/packages/beacon-node/src/chain/stateCache/types.ts b/packages/beacon-node/src/chain/stateCache/types.ts new file mode 100644 index 000000000000..5867d7d356c1 --- /dev/null +++ b/packages/beacon-node/src/chain/stateCache/types.ts @@ -0,0 +1,73 @@ +import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; +import {Epoch, RootHex, phase0} from "@lodestar/types"; +import {routes} from "@lodestar/api"; + +export type CheckpointHex = {epoch: Epoch; rootHex: RootHex}; + +/** + * Lodestar currently keeps two state caches around. + * + * 1. BlockStateCache is keyed by state root, and intended to keep extremely recent states around (eg: post states from the latest blocks) + * These states are most likely to be useful for state transition of new blocks. + * + * 2. CheckpointStateCache is keyed by checkpoint, and intended to keep states which have just undergone an epoch transition. + * These states are useful for gossip verification and for avoiding an epoch transition during state transition of first-in-epoch blocks + */ + +/** + * Store up to n recent block states. + * + * The cache key is state root + */ +export interface BlockStateCache { + get(rootHex: RootHex): CachedBeaconStateAllForks | null; + add(item: CachedBeaconStateAllForks): void; + setHeadState(item: CachedBeaconStateAllForks | null): void; + clear(): void; + size: number; + prune(headStateRootHex: RootHex): void; + deleteAllBeforeEpoch(finalizedEpoch: Epoch): void; + dumpSummary(): routes.lodestar.StateCacheItem[]; +} + +/** + * Store checkpoint states to preserve epoch transition, this helps lodestar run exactly 1 epoch transition per epoch in normal network conditions. + * + * There are 2 types of checkpoint states: + * + * - Previous Root Checkpoint State: where root is from previous epoch, this is added when we prepare for next slot, + * or to validate gossip block + * ``` + * epoch: (n-2) (n-1) n (n+1) + * |-------|-------|-------|-------| + * root ---------------------^ + * ``` + * + * - Current Root Checkpoint State: this is added when we process block slot 0 of epoch n, note that this block could + * be skipped so we don't always have this checkpoint state + * ``` + * epoch: (n-2) (n-1) n (n+1) + * |-------|-------|-------|-------| + * root ---------------------^ + * ``` + */ +export interface CheckpointStateCache { + init?: () => Promise; + getOrReload(cp: CheckpointHex): Promise; + getStateOrBytes(cp: CheckpointHex): Promise; + get(cpOrKey: CheckpointHex | string): CachedBeaconStateAllForks | null; + add(cp: phase0.Checkpoint, state: CachedBeaconStateAllForks): void; + getLatest(rootHex: RootHex, maxEpoch: Epoch): CachedBeaconStateAllForks | null; + getOrReloadLatest(rootHex: RootHex, maxEpoch: Epoch): Promise; + updatePreComputedCheckpoint(rootHex: RootHex, epoch: Epoch): number | null; + prune(finalizedEpoch: Epoch, justifiedEpoch: Epoch): void; + pruneFinalized(finalizedEpoch: Epoch): void; + processState(blockRootHex: RootHex, state: CachedBeaconStateAllForks): Promise; + clear(): void; + dumpSummary(): routes.lodestar.StateCacheItem[]; +} + +export enum CacheItemType { + persisted = "persisted", + inMemory = "in-memory", +} diff --git a/packages/beacon-node/src/chain/validation/attestation.ts b/packages/beacon-node/src/chain/validation/attestation.ts index 31e105911ab4..eae171631025 100644 --- a/packages/beacon-node/src/chain/validation/attestation.ts +++ b/packages/beacon-node/src/chain/validation/attestation.ts @@ -541,7 +541,7 @@ export function verifyHeadBlockAndTargetRoot( targetRoot: Root, attestationSlot: Slot, attestationEpoch: Epoch, - caller: string, + caller: RegenCaller, maxSkipSlots?: number ): ProtoBlock { const headBlock = verifyHeadBlockIsKnown(chain, beaconBlockRoot); diff --git a/packages/beacon-node/src/chain/validation/blobSidecar.ts b/packages/beacon-node/src/chain/validation/blobSidecar.ts index b5aab323c269..f1ea7bfa95c8 100644 --- a/packages/beacon-node/src/chain/validation/blobSidecar.ts +++ b/packages/beacon-node/src/chain/validation/blobSidecar.ts @@ -1,7 +1,7 @@ -import {ChainForkConfig} from "@lodestar/config"; -import {deneb, Root, Slot} from "@lodestar/types"; -import {toHex} from "@lodestar/utils"; -import {getBlobProposerSignatureSet, computeStartSlotAtEpoch} from "@lodestar/state-transition"; +import {deneb, Root, Slot, ssz} from "@lodestar/types"; +import {toHex, verifyMerkleBranch} from "@lodestar/utils"; +import {computeStartSlotAtEpoch, getBlockHeaderProposerSignatureSet} from "@lodestar/state-transition"; +import {KZG_COMMITMENT_INCLUSION_PROOF_DEPTH, KZG_COMMITMENT_SUBTREE_INDEX0} from "@lodestar/params"; import {BlobSidecarGossipError, BlobSidecarErrorCode} from "../errors/blobSidecarError.js"; import {GossipAction} from "../errors/gossipValidation.js"; @@ -11,13 +11,11 @@ import {IBeaconChain} from "../interface.js"; import {RegenCaller} from "../regen/index.js"; export async function validateGossipBlobSidecar( - config: ChainForkConfig, chain: IBeaconChain, - signedBlob: deneb.SignedBlobSidecar, + blobSidecar: deneb.BlobSidecar, gossipIndex: number ): Promise { - const blobSidecar = signedBlob.message; - const blobSlot = blobSidecar.slot; + const blobSlot = blobSidecar.signedBlockHeader.message.slot; // [REJECT] The sidecar is for the correct topic -- i.e. sidecar.index matches the topic {index}. if (blobSidecar.index !== gossipIndex) { @@ -58,9 +56,10 @@ export async function validateGossipBlobSidecar( // reboot if the `observed_block_producers` cache is empty. In that case, without this // check, we will load the parent and state from disk only to find out later that we // already know this block. - const blockRoot = toHex(blobSidecar.blockRoot); - if (chain.forkChoice.getBlockHex(blockRoot) !== null) { - throw new BlobSidecarGossipError(GossipAction.IGNORE, {code: BlobSidecarErrorCode.ALREADY_KNOWN, root: blockRoot}); + const blockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(blobSidecar.signedBlockHeader.message); + const blockHex = toHex(blockRoot); + if (chain.forkChoice.getBlockHex(blockHex) !== null) { + throw new BlobSidecarGossipError(GossipAction.IGNORE, {code: BlobSidecarErrorCode.ALREADY_KNOWN, root: blockHex}); } // TODO: freetheblobs - check for badblock @@ -69,7 +68,7 @@ export async function validateGossipBlobSidecar( // _[IGNORE]_ The blob's block's parent (defined by `sidecar.block_parent_root`) has been seen (via both // gossip and non-gossip sources) (a client MAY queue blocks for processing once the parent block is // retrieved). - const parentRoot = toHex(blobSidecar.blockParentRoot); + const parentRoot = toHex(blobSidecar.signedBlockHeader.message.parentRoot); const parentBlock = chain.forkChoice.getBlockHex(parentRoot); if (parentBlock === null) { // If fork choice does *not* consider the parent to be a descendant of the finalized block, @@ -97,18 +96,16 @@ export async function validateGossipBlobSidecar( // getBlockSlotState also checks for whether the current finalized checkpoint is an ancestor of the block. // As a result, we throw an IGNORE (whereas the spec says we should REJECT for this scenario). // this is something we should change this in the future to make the code airtight to the spec. - // _[IGNORE]_ The blob's block's parent (defined by `sidecar.block_parent_root`) has been seen (via both - // gossip and non-gossip sources) // _[REJECT]_ The blob's block's parent (defined by `sidecar.block_parent_root`) passes validation - // The above validation will happen while importing + // [IGNORE] The block's parent (defined by block.parent_root) has been seen (via both gossip and non-gossip sources) (a client MAY queue blocks for processing once the parent block is retrieved). + // [REJECT] The block's parent (defined by block.parent_root) passes validation. const blockState = await chain.regen - .getBlockSlotState(parentRoot, blobSlot, {dontTransferCache: true}, RegenCaller.validateGossipBlob) + .getBlockSlotState(parentRoot, blobSlot, {dontTransferCache: true}, RegenCaller.validateGossipBlock) .catch(() => { throw new BlobSidecarGossipError(GossipAction.IGNORE, {code: BlobSidecarErrorCode.PARENT_UNKNOWN, parentRoot}); }); - // _[REJECT]_ The proposer signature, `signed_blob_sidecar.signature`, is valid with respect to the - // `sidecar.proposer_index` pubkey. - const signatureSet = getBlobProposerSignatureSet(blockState, signedBlob); + // [REJECT] The proposer signature, signed_beacon_block.signature, is valid with respect to the proposer_index pubkey. + const signatureSet = getBlockHeaderProposerSignatureSet(blockState, blobSidecar.signedBlockHeader); // Don't batch so verification is not delayed if (!(await chain.bls.verifySignatureSets([signatureSet], {verifyOnMainThread: true}))) { throw new BlobSidecarGossipError(GossipAction.REJECT, { @@ -116,6 +113,15 @@ export async function validateGossipBlobSidecar( }); } + // verify if the blob inclusion proof is correct + if (!validateInclusionProof(blobSidecar)) { + throw new BlobSidecarGossipError(GossipAction.REJECT, { + code: BlobSidecarErrorCode.INCLUSION_PROOF_INVALID, + slot: blobSidecar.signedBlockHeader.message.slot, + blobIdx: blobSidecar.index, + }); + } + // _[IGNORE]_ The sidecar is the only sidecar with valid signature received for the tuple // `(sidecar.block_root, sidecar.index)` // @@ -127,7 +133,7 @@ export async function validateGossipBlobSidecar( // If the `proposer_index` cannot immediately be verified against the expected shuffling, the sidecar // MAY be queued for later processing while proposers for the block's branch are calculated -- in such // a case _do not_ `REJECT`, instead `IGNORE` this message. - const proposerIndex = blobSidecar.proposerIndex; + const proposerIndex = blobSidecar.signedBlockHeader.message.proposerIndex; if (blockState.epochCtx.getBeaconProposer(blobSlot) !== proposerIndex) { throw new BlobSidecarGossipError(GossipAction.REJECT, { code: BlobSidecarErrorCode.INCORRECT_PROPOSER, @@ -168,16 +174,18 @@ export function validateBlobSidecars( const proofs = []; for (let index = 0; index < blobSidecars.length; index++) { const blobSidecar = blobSidecars[index]; + const blobBlockHeader = blobSidecar.signedBlockHeader.message; + const blobBlockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(blobBlockHeader); if ( - blobSidecar.slot !== blockSlot || - !byteArrayEquals(blobSidecar.blockRoot, blockRoot) || + blobBlockHeader.slot !== blockSlot || + !byteArrayEquals(blobBlockRoot, blockRoot) || blobSidecar.index !== index || !byteArrayEquals(expectedKzgCommitments[index], blobSidecar.kzgCommitment) ) { throw new Error( - `Invalid blob with slot=${blobSidecar.slot} blockRoot=${toHex(blockRoot)} index=${ + `Invalid blob with slot=${blobBlockHeader.slot} blobBlockRoot=${toHex(blobBlockRoot)} index=${ blobSidecar.index - } for the block root=${toHex(blockRoot)} slot=${blockSlot} index=${index}` + } for the block blockRoot=${toHex(blockRoot)} slot=${blockSlot} index=${index}` ); } blobs.push(blobSidecar.blob); @@ -207,3 +215,13 @@ function validateBlobsAndProofs( throw Error("Invalid verifyBlobKzgProofBatch"); } } + +function validateInclusionProof(blobSidecar: deneb.BlobSidecar): boolean { + return verifyMerkleBranch( + ssz.deneb.KZGCommitment.hashTreeRoot(blobSidecar.kzgCommitment), + blobSidecar.kzgCommitmentInclusionProof, + KZG_COMMITMENT_INCLUSION_PROOF_DEPTH, + KZG_COMMITMENT_SUBTREE_INDEX0 + blobSidecar.index, + blobSidecar.signedBlockHeader.message.bodyRoot + ); +} diff --git a/packages/beacon-node/src/db/beacon.ts b/packages/beacon-node/src/db/beacon.ts index 58b99f2a37e0..07cc47fa54d8 100644 --- a/packages/beacon-node/src/db/beacon.ts +++ b/packages/beacon-node/src/db/beacon.ts @@ -21,6 +21,7 @@ import { BLSToExecutionChangeRepository, } from "./repositories/index.js"; import {PreGenesisState, PreGenesisStateLastProcessedBlock} from "./single/index.js"; +import {CheckpointStateRepository} from "./repositories/checkpointState.js"; export type BeaconDbModules = { config: ChainForkConfig; @@ -35,6 +36,7 @@ export class BeaconDb implements IBeaconDb { blobSidecarsArchive: BlobSidecarsArchiveRepository; stateArchive: StateArchiveRepository; + checkpointState: CheckpointStateRepository; voluntaryExit: VoluntaryExitRepository; proposerSlashing: ProposerSlashingRepository; @@ -67,6 +69,7 @@ export class BeaconDb implements IBeaconDb { this.blobSidecarsArchive = new BlobSidecarsArchiveRepository(config, db); this.stateArchive = new StateArchiveRepository(config, db); + this.checkpointState = new CheckpointStateRepository(config, db); this.voluntaryExit = new VoluntaryExitRepository(config, db); this.blsToExecutionChange = new BLSToExecutionChangeRepository(config, db); this.proposerSlashing = new ProposerSlashingRepository(config, db); diff --git a/packages/beacon-node/src/db/buckets.ts b/packages/beacon-node/src/db/buckets.ts index 1a3abfa33623..9dffd0608d52 100644 --- a/packages/beacon-node/src/db/buckets.ts +++ b/packages/beacon-node/src/db/buckets.ts @@ -28,6 +28,8 @@ export enum Bucket { phase0_proposerSlashing = 14, // ValidatorIndex -> ProposerSlashing phase0_attesterSlashing = 15, // Root -> AttesterSlashing capella_blsToExecutionChange = 16, // ValidatorIndex -> SignedBLSToExecutionChange + // checkpoint states + allForks_checkpointState = 17, // Root -> allForks.BeaconState // allForks_pendingBlock = 25, // Root -> SignedBeaconBlock // DEPRECATED on v0.30.0 phase0_depositEvent = 19, // depositIndex -> DepositEvent diff --git a/packages/beacon-node/src/db/interface.ts b/packages/beacon-node/src/db/interface.ts index 58bf25c57aa7..6ffb8992f635 100644 --- a/packages/beacon-node/src/db/interface.ts +++ b/packages/beacon-node/src/db/interface.ts @@ -19,6 +19,7 @@ import { BLSToExecutionChangeRepository, } from "./repositories/index.js"; import {PreGenesisState, PreGenesisStateLastProcessedBlock} from "./single/index.js"; +import {CheckpointStateRepository} from "./repositories/checkpointState.js"; /** * The DB service manages the data layer of the beacon chain @@ -36,6 +37,8 @@ export interface IBeaconDb { // finalized states stateArchive: StateArchiveRepository; + // checkpoint states + checkpointState: CheckpointStateRepository; // op pool voluntaryExit: VoluntaryExitRepository; diff --git a/packages/beacon-node/src/db/repositories/blobSidecars.ts b/packages/beacon-node/src/db/repositories/blobSidecars.ts index 576a03df9e61..e5750ed31b58 100644 --- a/packages/beacon-node/src/db/repositories/blobSidecars.ts +++ b/packages/beacon-node/src/db/repositories/blobSidecars.ts @@ -2,6 +2,7 @@ import {ValueOf, ContainerType} from "@chainsafe/ssz"; import {ChainForkConfig} from "@lodestar/config"; import {Db, Repository} from "@lodestar/db"; import {ssz} from "@lodestar/types"; + import {Bucket, getBucketNameByValue} from "../buckets.js"; export const blobSidecarsWrapperSsz = new ContainerType( @@ -14,10 +15,7 @@ export const blobSidecarsWrapperSsz = new ContainerType( ); export type BlobSidecarsWrapper = ValueOf; - export const BLOB_SIDECARS_IN_WRAPPER_INDEX = 44; -// ssz.deneb.BlobSidecars.elementType.fixedSize; -export const BLOBSIDECAR_FIXED_SIZE = 131256; /** * blobSidecarsWrapper by block root (= hash_tree_root(SignedBeaconBlock.message)) diff --git a/packages/beacon-node/src/db/repositories/checkpointState.ts b/packages/beacon-node/src/db/repositories/checkpointState.ts new file mode 100644 index 000000000000..8848f4d26d3a --- /dev/null +++ b/packages/beacon-node/src/db/repositories/checkpointState.ts @@ -0,0 +1,31 @@ +import {ChainForkConfig} from "@lodestar/config"; +import {Db, Repository} from "@lodestar/db"; +import {BeaconStateAllForks} from "@lodestar/state-transition"; +import {ssz} from "@lodestar/types"; +import {Bucket, getBucketNameByValue} from "../buckets.js"; + +/** + * Store temporary checkpoint states. + * We should only put/get binary data from this repository, consumer will load it into an existing state ViewDU object. + */ +export class CheckpointStateRepository extends Repository { + constructor(config: ChainForkConfig, db: Db) { + // Pick some type but won't be used. Casted to any because no type can match `BeaconStateAllForks` + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-explicit-any + const type = ssz.phase0.BeaconState as any; + const bucket = Bucket.allForks_checkpointState; + super(config, db, bucket, type, getBucketNameByValue(bucket)); + } + + getId(): Uint8Array { + throw Error("CheckpointStateRepository does not work with value"); + } + + encodeValue(): Uint8Array { + throw Error("CheckpointStateRepository does not work with value"); + } + + decodeValue(): BeaconStateAllForks { + throw Error("CheckpointStateRepository does not work with value"); + } +} diff --git a/packages/beacon-node/src/eth1/provider/eth1Provider.ts b/packages/beacon-node/src/eth1/provider/eth1Provider.ts index 2d1feeb8d1e7..d594c74a3abc 100644 --- a/packages/beacon-node/src/eth1/provider/eth1Provider.ts +++ b/packages/beacon-node/src/eth1/provider/eth1Provider.ts @@ -1,7 +1,7 @@ import {toHexString} from "@chainsafe/ssz"; import {phase0} from "@lodestar/types"; import {ChainConfig} from "@lodestar/config"; -import {fromHex, isErrorAborted, createElapsedTimeTracker} from "@lodestar/utils"; +import {fromHex, isErrorAborted, createElapsedTimeTracker, toSafePrintableUrl} from "@lodestar/utils"; import {Logger} from "@lodestar/logger"; import {FetchError, isFetchError} from "@lodestar/api"; @@ -75,7 +75,6 @@ export class Eth1Provider implements IEth1Provider { this.depositContractAddress = toHexString(config.DEPOSIT_CONTRACT_ADDRESS); const providerUrls = opts.providerUrls ?? DEFAULT_PROVIDER_URLS; - this.logger?.info("Eth1 provider", {urls: providerUrls.toString()}); this.rpc = new JsonRpcHttpClient(providerUrls, { signal, // Don't fallback with is truncated error. Throw early and let the retry on this class handle it @@ -85,6 +84,7 @@ export class Eth1Provider implements IEth1Provider { jwtVersion: opts.jwtVersion, metrics: metrics, }); + this.logger?.info("Eth1 provider", {urls: providerUrls.map(toSafePrintableUrl).toString()}); this.rpc.emitter.on(JsonRpcHttpClientEvent.RESPONSE, () => { const oldState = this.state; diff --git a/packages/beacon-node/src/eth1/provider/jsonRpcHttpClient.ts b/packages/beacon-node/src/eth1/provider/jsonRpcHttpClient.ts index 3a1b4ddb0ce1..faa4e310e10a 100644 --- a/packages/beacon-node/src/eth1/provider/jsonRpcHttpClient.ts +++ b/packages/beacon-node/src/eth1/provider/jsonRpcHttpClient.ts @@ -1,8 +1,7 @@ import {EventEmitter} from "events"; import StrictEventEmitter from "strict-event-emitter-types"; import {fetch} from "@lodestar/api"; -import {ErrorAborted, TimeoutError, isValidHttpUrl, retry} from "@lodestar/utils"; -import {IGauge, IHistogram} from "../../metrics/interface.js"; +import {ErrorAborted, Gauge, Histogram, TimeoutError, isValidHttpUrl, retry} from "@lodestar/utils"; import {IJson, RpcPayload} from "../interface.js"; import {JwtClaim, encodeJwtToken} from "./jwt.js"; @@ -58,13 +57,13 @@ export type ReqOpts = { }; export type JsonRpcHttpClientMetrics = { - requestTime: IHistogram<"routeId">; - streamTime: IHistogram<"routeId">; - requestErrors: IGauge<"routeId">; - requestUsedFallbackUrl: IGauge<"routeId">; - activeRequests: IGauge<"routeId">; - configUrlsCount: IGauge; - retryCount: IGauge<"routeId">; + requestTime: Histogram<{routeId: string}>; + streamTime: Histogram<{routeId: string}>; + requestErrors: Gauge<{routeId: string}>; + requestUsedFallbackUrl: Gauge<{routeId: string}>; + activeRequests: Gauge<{routeId: string}>; + configUrlsCount: Gauge; + retryCount: Gauge<{routeId: string}>; }; export interface IJsonRpcHttpClient { diff --git a/packages/beacon-node/src/execution/builder/http.ts b/packages/beacon-node/src/execution/builder/http.ts index 43710bca83e1..c47e8471f199 100644 --- a/packages/beacon-node/src/execution/builder/http.ts +++ b/packages/beacon-node/src/execution/builder/http.ts @@ -1,22 +1,18 @@ import {byteArrayEquals, toHexString} from "@chainsafe/ssz"; import {allForks, bellatrix, Slot, Root, BLSPubkey, ssz, deneb, Wei} from "@lodestar/types"; -import { - parseSignedBlindedBlockOrContents, - parseExecutionPayloadAndBlobsBundle, - reconstructFullBlockOrContents, -} from "@lodestar/state-transition"; +import {parseExecutionPayloadAndBlobsBundle, reconstructFullBlockOrContents} from "@lodestar/state-transition"; import {ChainForkConfig} from "@lodestar/config"; import {Logger} from "@lodestar/logger"; import {getClient, Api as BuilderApi} from "@lodestar/api/builder"; import {SLOTS_PER_EPOCH, ForkExecution} from "@lodestar/params"; - +import {toSafePrintableUrl} from "@lodestar/utils"; import {ApiError} from "@lodestar/api"; import {Metrics} from "../../metrics/metrics.js"; import {IExecutionBuilder} from "./interface.js"; export type ExecutionBuilderHttpOpts = { enabled: boolean; - urls: string[]; + url: string; timeout?: number; faultInspectionWindow?: number; allowedFaults?: number; @@ -29,7 +25,7 @@ export type ExecutionBuilderHttpOpts = { export const defaultExecutionBuilderHttpOpts: ExecutionBuilderHttpOpts = { enabled: false, - urls: ["http://localhost:8661"], + url: "http://localhost:8661", timeout: 12000, }; @@ -48,9 +44,8 @@ export class ExecutionBuilderHttp implements IExecutionBuilder { metrics: Metrics | null = null, logger?: Logger ) { - const baseUrl = opts.urls[0]; + const baseUrl = opts.url; if (!baseUrl) throw Error("No Url provided for executionBuilder"); - logger?.info("External builder", {urls: opts.urls.toString()}); this.api = getClient( { baseUrl, @@ -59,6 +54,7 @@ export class ExecutionBuilderHttp implements IExecutionBuilder { }, {config, metrics: metrics?.builderHttpClient} ); + logger?.info("External builder", {url: toSafePrintableUrl(baseUrl)}); this.config = config; this.issueLocalFcUWithFeeRecipient = opts.issueLocalFcUWithFeeRecipient; @@ -110,26 +106,23 @@ export class ExecutionBuilderHttp implements IExecutionBuilder { ): Promise<{ header: allForks.ExecutionPayloadHeader; executionPayloadValue: Wei; - blindedBlobsBundle?: deneb.BlindedBlobsBundle; + blobKzgCommitments?: deneb.BlobKzgCommitments; }> { const res = await this.api.getHeader(slot, parentHash, proposerPubKey); ApiError.assert(res, "execution.builder.getheader"); const {header, value: executionPayloadValue} = res.response.data.message; - const {blindedBlobsBundle} = res.response.data.message as deneb.BuilderBid; - return {header, executionPayloadValue, blindedBlobsBundle}; + const {blobKzgCommitments} = res.response.data.message as deneb.BuilderBid; + return {header, executionPayloadValue, blobKzgCommitments}; } async submitBlindedBlock( - signedBlindedBlockOrContents: allForks.SignedBlindedBeaconBlockOrContents + signedBlindedBlock: allForks.SignedBlindedBeaconBlock ): Promise { - const res = await this.api.submitBlindedBlock(signedBlindedBlockOrContents); + const res = await this.api.submitBlindedBlock(signedBlindedBlock); ApiError.assert(res, "execution.builder.submitBlindedBlock"); const {data} = res.response; const {executionPayload, blobsBundle} = parseExecutionPayloadAndBlobsBundle(data); - const {signedBlindedBlock, signedBlindedBlobSidecars} = - parseSignedBlindedBlockOrContents(signedBlindedBlockOrContents); - // some validations for execution payload const expectedTransactionsRoot = signedBlindedBlock.message.body.executionPayloadHeader.transactionsRoot; const actualTransactionsRoot = ssz.bellatrix.Transactions.hashTreeRoot(executionPayload.transactions); @@ -141,7 +134,7 @@ export class ExecutionBuilderHttp implements IExecutionBuilder { ); } - const blobs = blobsBundle ? blobsBundle.blobs : null; - return reconstructFullBlockOrContents({signedBlindedBlock, signedBlindedBlobSidecars}, {executionPayload, blobs}); + const contents = blobsBundle ? {blobs: blobsBundle.blobs, kzgProofs: blobsBundle.proofs} : null; + return reconstructFullBlockOrContents(signedBlindedBlock, {executionPayload, contents}); } } diff --git a/packages/beacon-node/src/execution/builder/interface.ts b/packages/beacon-node/src/execution/builder/interface.ts index e9a2cabb69ef..8754a3616610 100644 --- a/packages/beacon-node/src/execution/builder/interface.ts +++ b/packages/beacon-node/src/execution/builder/interface.ts @@ -25,9 +25,7 @@ export interface IExecutionBuilder { ): Promise<{ header: allForks.ExecutionPayloadHeader; executionPayloadValue: Wei; - blindedBlobsBundle?: deneb.BlindedBlobsBundle; + blobKzgCommitments?: deneb.BlobKzgCommitments; }>; - submitBlindedBlock( - signedBlock: allForks.SignedBlindedBeaconBlockOrContents - ): Promise; + submitBlindedBlock(signedBlock: allForks.SignedBlindedBeaconBlock): Promise; } diff --git a/packages/beacon-node/src/execution/engine/index.ts b/packages/beacon-node/src/execution/engine/index.ts index 1692ea61cf92..2d92a439c86d 100644 --- a/packages/beacon-node/src/execution/engine/index.ts +++ b/packages/beacon-node/src/execution/engine/index.ts @@ -1,4 +1,4 @@ -import {fromHex} from "@lodestar/utils"; +import {fromHex, toSafePrintableUrl} from "@lodestar/utils"; import {JsonRpcHttpClient} from "../../eth1/provider/jsonRpcHttpClient.js"; import {IExecutionEngine} from "./interface.js"; import {ExecutionEngineDisabled} from "./disabled.js"; @@ -31,7 +31,6 @@ export function getExecutionEngineHttp( opts: ExecutionEngineHttpOpts, modules: ExecutionEngineModules ): IExecutionEngine { - modules.logger.info("Execution client", {urls: opts.urls.toString()}); const rpc = new JsonRpcHttpClient(opts.urls, { ...opts, signal: modules.signal, @@ -40,6 +39,7 @@ export function getExecutionEngineHttp( jwtId: opts.jwtId, jwtVersion: opts.jwtVersion, }); + modules.logger.info("Execution client", {urls: opts.urls.map(toSafePrintableUrl).toString()}); return new ExecutionEngineHttp(rpc, modules); } diff --git a/packages/beacon-node/src/execution/engine/mock.ts b/packages/beacon-node/src/execution/engine/mock.ts index 83a5ea3a7ed6..5779713435a5 100644 --- a/packages/beacon-node/src/execution/engine/mock.ts +++ b/packages/beacon-node/src/execution/engine/mock.ts @@ -1,5 +1,4 @@ import crypto from "node:crypto"; -import {kzgCommitmentToVersionedHash} from "@lodestar/state-transition"; import {bellatrix, deneb, RootHex, ssz} from "@lodestar/types"; import {fromHex, toHex} from "@lodestar/utils"; import { @@ -12,6 +11,7 @@ import { } from "@lodestar/params"; import {ZERO_HASH_HEX} from "../../constants/index.js"; import {ckzg} from "../../util/kzg.js"; +import {kzgCommitmentToVersionedHash} from "../../util/blobs.js"; import {quantityToNum} from "../../eth1/provider/utils.js"; import { EngineApiRpcParamTypes, diff --git a/packages/beacon-node/src/metrics/index.ts b/packages/beacon-node/src/metrics/index.ts index fb2781333d66..a56591a04090 100644 --- a/packages/beacon-node/src/metrics/index.ts +++ b/packages/beacon-node/src/metrics/index.ts @@ -1,5 +1,4 @@ export * from "./metrics.js"; export * from "./server/index.js"; -export * from "./interface.js"; export * from "./nodeJsMetrics.js"; export {RegistryMetricCreator} from "./utils/registryMetricCreator.js"; diff --git a/packages/beacon-node/src/metrics/interface.ts b/packages/beacon-node/src/metrics/interface.ts deleted file mode 100644 index 2e2a267ca13c..000000000000 --- a/packages/beacon-node/src/metrics/interface.ts +++ /dev/null @@ -1,14 +0,0 @@ -import {Gauge, Histogram} from "prom-client"; - -type CollectFn = (metric: IGauge) => void; - -export type IGauge = Pick, "inc" | "dec" | "set"> & { - addCollect: (collectFn: CollectFn) => void; -}; - -export type IHistogram = Pick, "observe" | "startTimer">; - -export type IAvgMinMax = { - addGetValuesFn(getValuesFn: () => number[]): void; - set(values: number[]): void; -}; diff --git a/packages/beacon-node/src/metrics/metrics/beacon.ts b/packages/beacon-node/src/metrics/metrics/beacon.ts index 8d9094f19a25..9366174ef6c6 100644 --- a/packages/beacon-node/src/metrics/metrics/beacon.ts +++ b/packages/beacon-node/src/metrics/metrics/beacon.ts @@ -1,4 +1,6 @@ +import {ProducedBlockSource} from "@lodestar/types"; import {RegistryMetricCreator} from "../utils/registryMetricCreator.js"; +import {BlockProductionStep, PayloadPreparationType} from "../../chain/produceBlock/index.js"; export type BeaconMetrics = ReturnType; @@ -46,7 +48,7 @@ export function createBeaconMetrics(register: RegistryMetricCreator) { // Additional Metrics // TODO: Implement - currentValidators: register.gauge<"status">({ + currentValidators: register.gauge<{status: string}>({ name: "beacon_current_validators", labelNames: ["status"], help: "number of validators in current epoch", @@ -115,55 +117,35 @@ export function createBeaconMetrics(register: RegistryMetricCreator) { buckets: [1, 2, 3, 5, 7, 10, 20, 30, 50, 100], }), - blockProductionTime: register.histogram<"source">({ + blockProductionTime: register.histogram<{source: ProducedBlockSource}>({ name: "beacon_block_production_seconds", help: "Full runtime of block production", buckets: [0.1, 1, 2, 4, 10], labelNames: ["source"], }), - executionBlockProductionTimeSteps: register.histogram<"step">({ + executionBlockProductionTimeSteps: register.histogram<{step: BlockProductionStep}>({ name: "beacon_block_production_execution_steps_seconds", help: "Detailed steps runtime of execution block production", buckets: [0.01, 0.1, 0.2, 0.5, 1], - /** - * - proposerSlashing - * - attesterSlashings - * - voluntaryExits - * - blsToExecutionChanges - * - attestations - * - eth1DataAndDeposits - * - syncAggregate - * - executionPayload - */ labelNames: ["step"], }), - builderBlockProductionTimeSteps: register.histogram<"step">({ + builderBlockProductionTimeSteps: register.histogram<{step: BlockProductionStep}>({ name: "beacon_block_production_builder_steps_seconds", help: "Detailed steps runtime of builder block production", buckets: [0.01, 0.1, 0.2, 0.5, 1], - /** - * - proposerSlashing - * - attesterSlashings - * - voluntaryExits - * - blsToExecutionChanges - * - attestations - * - eth1DataAndDeposits - * - syncAggregate - * - executionPayload - */ labelNames: ["step"], }), - blockProductionRequests: register.gauge<"source">({ + blockProductionRequests: register.gauge<{source: ProducedBlockSource}>({ name: "beacon_block_production_requests_total", help: "Count of all block production requests", labelNames: ["source"], }), - blockProductionSuccess: register.gauge<"source">({ + blockProductionSuccess: register.gauge<{source: ProducedBlockSource}>({ name: "beacon_block_production_successes_total", help: "Count of blocks successfully produced", labelNames: ["source"], }), - blockProductionNumAggregated: register.histogram<"source">({ + blockProductionNumAggregated: register.histogram<{source: ProducedBlockSource}>({ name: "beacon_block_production_num_aggregated_total", help: "Count of all aggregated attestations in our produced block", buckets: [32, 64, 96, 128], @@ -173,34 +155,30 @@ export function createBeaconMetrics(register: RegistryMetricCreator) { blockProductionCaches: { producedBlockRoot: register.gauge({ name: "beacon_blockroot_produced_cache_total", - help: "Count of cached produded block roots", + help: "Count of cached produced block roots", }), producedBlindedBlockRoot: register.gauge({ name: "beacon_blinded_blockroot_produced_cache_total", - help: "Count of cached produded blinded block roots", + help: "Count of cached produced blinded block roots", }), - producedBlobSidecarsCache: register.gauge({ - name: "beacon_blobsidecars_produced_cache_total", - help: "Count of cached produced blob sidecars", - }), - producedBlindedBlobSidecarsCache: register.gauge({ - name: "beacon_blinded_blobsidecars_produced_cache_total", - help: "Count of cached produced blinded blob sidecars", + producedContentsCache: register.gauge({ + name: "beacon_contents_produced_cache_total", + help: "Count of cached produced blob contents", }), }, blockPayload: { payloadAdvancePrepTime: register.histogram({ name: "beacon_block_payload_prepare_time", - help: "Time for perparing payload in advance", + help: "Time for preparing payload in advance", buckets: [0.1, 1, 3, 5, 10], }), - payloadFetchedTime: register.histogram<"prepType">({ + payloadFetchedTime: register.histogram<{prepType: PayloadPreparationType}>({ name: "beacon_block_payload_fetched_time", help: "Time to fetch the payload from EL", labelNames: ["prepType"], }), - emptyPayloads: register.gauge<"prepType">({ + emptyPayloads: register.gauge<{prepType: PayloadPreparationType}>({ name: "beacon_block_payload_empty_total", help: "Count of payload with empty transactions", labelNames: ["prepType"], diff --git a/packages/beacon-node/src/metrics/metrics/lodestar.ts b/packages/beacon-node/src/metrics/metrics/lodestar.ts index a68fdae0551f..ea2251b3dce5 100644 --- a/packages/beacon-node/src/metrics/metrics/lodestar.ts +++ b/packages/beacon-node/src/metrics/metrics/lodestar.ts @@ -1,6 +1,22 @@ +import {EpochTransitionStep, StateCloneSource, StateHashTreeRootSource} from "@lodestar/state-transition"; import {allForks} from "@lodestar/types"; -import {RegistryMetricCreator} from "../utils/registryMetricCreator.js"; +import {BlockSource} from "../../chain/blocks/types.js"; +import {JobQueueItemType} from "../../chain/bls/index.js"; +import {BlockErrorCode} from "../../chain/errors/index.js"; +import {InsertOutcome} from "../../chain/opPools/types.js"; +import {RegenCaller, RegenFnName} from "../../chain/regen/interface.js"; +import {ReprocessStatus} from "../../chain/reprocess.js"; +import {RejectReason} from "../../chain/seenCache/seenAttestationData.js"; +import {ExecutionPayloadStatus} from "../../execution/index.js"; +import {GossipType} from "../../network/index.js"; +import {CannotAcceptWorkReason, ReprocessRejectReason} from "../../network/processor/index.js"; +import {BackfillSyncMethod} from "../../sync/backfill/backfill.js"; +import {PendingBlockType} from "../../sync/interface.js"; +import {PeerSyncType, RangeSyncType} from "../../sync/utils/remoteSyncType.js"; import {LodestarMetadata} from "../options.js"; +import {RegistryMetricCreator} from "../utils/registryMetricCreator.js"; +import {OpSource} from "../validatorMonitor.js"; +import {CacheItemType} from "../../chain/stateCache/types.js"; export type LodestarMetrics = ReturnType; @@ -14,7 +30,7 @@ export function createLodestarMetrics( anchorState?: Pick ) { if (metadata) { - register.static({ + register.static({ name: "lodestar_version", help: "Lodestar version", value: metadata, @@ -33,34 +49,34 @@ export function createLodestarMetrics( return { gossipValidationQueue: { - length: register.gauge<"topic">({ + length: register.gauge<{topic: GossipType}>({ name: "lodestar_gossip_validation_queue_length", help: "Count of total gossip validation queue length", labelNames: ["topic"], }), - keySize: register.gauge<"topic">({ + keySize: register.gauge<{topic: GossipType}>({ name: "lodestar_gossip_validation_queue_key_size", help: "Count of total gossip validation queue key size", labelNames: ["topic"], }), - droppedJobs: register.gauge<"topic">({ + droppedJobs: register.gauge<{topic: GossipType}>({ name: "lodestar_gossip_validation_queue_dropped_jobs_total", help: "Count of total gossip validation queue dropped jobs", labelNames: ["topic"], }), - jobTime: register.histogram<"topic">({ + jobTime: register.histogram<{topic: GossipType}>({ name: "lodestar_gossip_validation_queue_job_time_seconds", help: "Time to process gossip validation queue job in seconds", labelNames: ["topic"], buckets: [0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10], }), - jobWaitTime: register.histogram<"topic">({ + jobWaitTime: register.histogram<{topic: GossipType}>({ name: "lodestar_gossip_validation_queue_job_wait_time_seconds", help: "Time from job added to the queue to starting the job in seconds", labelNames: ["topic"], buckets: [0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10], }), - concurrency: register.gauge<"topic">({ + concurrency: register.gauge<{topic: GossipType}>({ name: "lodestar_gossip_validation_queue_concurrency", help: "Current count of jobs being run on network processor for topic", labelNames: ["topic"], @@ -79,22 +95,22 @@ export function createLodestarMetrics( }, networkProcessor: { - gossipValidationAccept: register.gauge<"topic">({ + gossipValidationAccept: register.gauge<{topic: GossipType}>({ name: "lodestar_gossip_validation_accept_total", help: "Count of total gossip validation accept", labelNames: ["topic"], }), - gossipValidationIgnore: register.gauge<"topic">({ + gossipValidationIgnore: register.gauge<{topic: GossipType}>({ name: "lodestar_gossip_validation_ignore_total", help: "Count of total gossip validation ignore", labelNames: ["topic"], }), - gossipValidationReject: register.gauge<"topic">({ + gossipValidationReject: register.gauge<{topic: GossipType}>({ name: "lodestar_gossip_validation_reject_total", help: "Count of total gossip validation reject", labelNames: ["topic"], }), - gossipValidationError: register.gauge<"topic" | "error">({ + gossipValidationError: register.gauge<{topic: GossipType; error: string}>({ name: "lodestar_gossip_validation_error_total", help: "Count of total gossip validation errors detailed", labelNames: ["topic", "error"], @@ -108,7 +124,7 @@ export function createLodestarMetrics( help: "Total calls to network processor execute work fn", buckets: [0, 1, 5, 128], }), - canNotAcceptWork: register.gauge<"reason">({ + canNotAcceptWork: register.gauge<{reason: CannotAcceptWorkReason}>({ name: "lodestar_network_processor_can_not_accept_work_total", help: "Total times network processor can not accept work on executeWork", labelNames: ["reason"], @@ -121,7 +137,7 @@ export function createLodestarMetrics( help: "Current count of pending items in reqRespBridgeReqCaller data structure", }), }, - networkWorkerWireEventsOnMainThreadLatency: register.histogram<"eventName">({ + networkWorkerWireEventsOnMainThreadLatency: register.histogram<{eventName: string}>({ name: "lodestar_network_worker_wire_events_on_main_thread_latency_seconds", help: "Latency in seconds to transmit network events to main thread across worker port", labelNames: ["eventName"], @@ -206,19 +222,19 @@ export function createLodestarMetrics( }, apiRest: { - responseTime: register.histogram<"operationId">({ + responseTime: register.histogram<{operationId: string}>({ name: "lodestar_api_rest_response_time_seconds", help: "REST API time to fulfill a request by operationId", labelNames: ["operationId"], // Request times range between 1ms to 100ms in normal conditions. Can get to 1-5 seconds if overloaded buckets: [0.01, 0.1, 1], }), - requests: register.gauge<"operationId">({ + requests: register.gauge<{operationId: string}>({ name: "lodestar_api_rest_requests_total", help: "REST API total count requests by operationId", labelNames: ["operationId"], }), - errors: register.gauge<"operationId">({ + errors: register.gauge<{operationId: string}>({ name: "lodestar_api_rest_errors_total", help: "REST API total count of errors by operationId", labelNames: ["operationId"], @@ -286,6 +302,12 @@ export function createLodestarMetrics( help: "Time to call commit after process a single epoch transition in seconds", buckets: [0.01, 0.05, 0.1, 0.2, 0.5, 0.75, 1], }), + epochTransitionStepTime: register.histogram<{step: EpochTransitionStep}>({ + name: "lodestar_stfn_epoch_transition_step_seconds", + help: "Time to call each step of epoch transition in seconds", + labelNames: ["step"], + buckets: [0.01, 0.05, 0.1, 0.2, 0.5, 0.75, 1], + }), processBlockTime: register.histogram({ name: "lodestar_stfn_process_block_seconds", help: "Time to process a single block in seconds", @@ -298,27 +320,28 @@ export function createLodestarMetrics( help: "Time to call commit after process a single block in seconds", buckets: [0.005, 0.01, 0.02, 0.05, 0.1, 1], }), - stateHashTreeRootTime: register.histogram({ + stateHashTreeRootTime: register.histogram<{source: StateHashTreeRootSource}>({ name: "lodestar_stfn_hash_tree_root_seconds", help: "Time to compute the hash tree root of a post state in seconds", - buckets: [0.005, 0.01, 0.02, 0.05, 0.1, 1], + buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5], + labelNames: ["source"], }), - preStateBalancesNodesPopulatedMiss: register.gauge<"source">({ + preStateBalancesNodesPopulatedMiss: register.gauge<{source: StateCloneSource}>({ name: "lodestar_stfn_balances_nodes_populated_miss_total", help: "Total count state.balances nodesPopulated is false on stfn", labelNames: ["source"], }), - preStateBalancesNodesPopulatedHit: register.gauge<"source">({ + preStateBalancesNodesPopulatedHit: register.gauge<{source: StateCloneSource}>({ name: "lodestar_stfn_balances_nodes_populated_hit_total", help: "Total count state.balances nodesPopulated is true on stfn", labelNames: ["source"], }), - preStateValidatorsNodesPopulatedMiss: register.gauge<"source">({ + preStateValidatorsNodesPopulatedMiss: register.gauge<{source: StateCloneSource}>({ name: "lodestar_stfn_validators_nodes_populated_miss_total", help: "Total count state.validators nodesPopulated is false on stfn", labelNames: ["source"], }), - preStateValidatorsNodesPopulatedHit: register.gauge<"source">({ + preStateValidatorsNodesPopulatedHit: register.gauge<{source: StateCloneSource}>({ name: "lodestar_stfn_validators_nodes_populated_hit_total", help: "Total count state.validators nodesPopulated is true on stfn", labelNames: ["source"], @@ -355,7 +378,7 @@ export function createLodestarMetrics( }, blsThreadPool: { - jobsWorkerTime: register.gauge<"workerId">({ + jobsWorkerTime: register.gauge<{workerId: number}>({ name: "lodestar_bls_thread_pool_time_seconds_sum", help: "Total time spent verifying signature sets measured on the worker", labelNames: ["workerId"], @@ -364,7 +387,7 @@ export function createLodestarMetrics( name: "lodestar_bls_thread_pool_success_jobs_signature_sets_count", help: "Count of total verified signature sets", }), - errorAggregateSignatureSetsCount: register.gauge<"type">({ + errorAggregateSignatureSetsCount: register.gauge<{type: JobQueueItemType}>({ name: "lodestar_bls_thread_pool_error_aggregate_signature_sets_count", help: "Count of error when aggregating pubkeys or signatures", labelNames: ["type"], @@ -390,12 +413,12 @@ export function createLodestarMetrics( name: "lodestar_bls_thread_pool_job_groups_started_total", help: "Count of total jobs groups started in bls thread pool, job groups include +1 jobs", }), - totalJobsStarted: register.gauge<"type">({ + totalJobsStarted: register.gauge<{type: JobQueueItemType}>({ name: "lodestar_bls_thread_pool_jobs_started_total", help: "Count of total jobs started in bls thread pool, jobs include +1 signature sets", labelNames: ["type"], }), - totalSigSetsStarted: register.gauge<"type">({ + totalSigSetsStarted: register.gauge<{type: JobQueueItemType}>({ name: "lodestar_bls_thread_pool_sig_sets_started_total", help: "Count of total signature sets started in bls thread pool, sig sets include 1 pk, msg, sig", labelNames: ["type"], @@ -453,9 +476,15 @@ export function createLodestarMetrics( name: "lodestar_bls_thread_pool_batchable_sig_sets_total", help: "Count of total batchable signature sets", }), - signatureDeserializationMainThreadDuration: register.gauge({ + signatureDeserializationMainThreadDuration: register.histogram({ name: "lodestar_bls_thread_pool_signature_deserialization_main_thread_time_seconds", help: "Total time spent deserializing signatures on main thread", + buckets: [0.001, 0.005, 0.01, 0.1], + }), + pubkeysAggregationMainThreadDuration: register.histogram({ + name: "lodestar_bls_thread_pool_pubkeys_aggregation_main_thread_time_seconds", + help: "Total time spent aggregating pubkeys on main thread", + buckets: [0.001, 0.005, 0.01, 0.1], }), }, @@ -480,29 +509,29 @@ export function createLodestarMetrics( name: "lodestar_sync_status", help: "Range sync status: [Stalled, SyncingFinalized, SyncingHead, Synced]", }), - syncPeersBySyncType: register.gauge<"syncType">({ + syncPeersBySyncType: register.gauge<{syncType: PeerSyncType}>({ name: "lodestar_sync_range_sync_peers", help: "Count of peers by sync type [FullySynced, Advanced, Behind]", labelNames: ["syncType"], }), - syncSwitchGossipSubscriptions: register.gauge<"action">({ + syncSwitchGossipSubscriptions: register.gauge<{action: string}>({ name: "lodestar_sync_switch_gossip_subscriptions", help: "Sync switched gossip subscriptions on/off", labelNames: ["action"], }), syncRange: { - syncChainsEvents: register.gauge<"syncType" | "event">({ + syncChainsEvents: register.gauge<{syncType: RangeSyncType; event: string}>({ name: "lodestar_sync_chains_events_total", help: "Total number of sync chains events events, labeled by syncType", labelNames: ["syncType", "event"], }), - syncChains: register.gauge<"syncType">({ + syncChains: register.gauge<{syncType: RangeSyncType}>({ name: "lodestar_sync_chains_count", help: "Count of sync chains by syncType", labelNames: ["syncType"], }), - syncChainsPeers: register.histogram<"syncType">({ + syncChainsPeers: register.histogram<{syncType: RangeSyncType}>({ name: "lodestar_sync_chains_peer_count_by_type", help: "Count of sync chain peers by syncType", labelNames: ["syncType"], @@ -515,12 +544,12 @@ export function createLodestarMetrics( }, syncUnknownBlock: { - switchNetworkSubscriptions: register.gauge<"action">({ + switchNetworkSubscriptions: register.gauge<{action: string}>({ name: "lodestar_sync_unknown_block_network_subscriptions_count", help: "Switch network subscriptions on/off", labelNames: ["action"], }), - requests: register.gauge<"type">({ + requests: register.gauge<{type: PendingBlockType}>({ name: "lodestar_sync_unknown_block_requests_total", help: "Total number of unknown block events or requests", labelNames: ["type"], @@ -574,43 +603,43 @@ export function createLodestarMetrics( // Gossip attestation gossipAttestation: { - useHeadBlockState: register.gauge<"caller">({ + useHeadBlockState: register.gauge<{caller: RegenCaller}>({ name: "lodestar_gossip_attestation_use_head_block_state_count", help: "Count of gossip attestation verification using head block state", labelNames: ["caller"], }), - useHeadBlockStateDialedToTargetEpoch: register.gauge<"caller">({ + useHeadBlockStateDialedToTargetEpoch: register.gauge<{caller: RegenCaller}>({ name: "lodestar_gossip_attestation_use_head_block_state_dialed_to_target_epoch_count", help: "Count of gossip attestation verification using head block state and dialed to target epoch", labelNames: ["caller"], }), - headSlotToAttestationSlot: register.histogram<"caller">({ + headSlotToAttestationSlot: register.histogram<{caller: RegenCaller}>({ name: "lodestar_gossip_attestation_head_slot_to_attestation_slot", help: "Slot distance between attestation slot and head slot", labelNames: ["caller"], buckets: [0, 1, 2, 4, 8, 16, 32, 64], }), - shufflingCacheHit: register.gauge<"caller">({ + shufflingCacheHit: register.gauge<{caller: RegenCaller}>({ name: "lodestar_gossip_attestation_shuffling_cache_hit_count", help: "Count of gossip attestation verification shuffling cache hit", labelNames: ["caller"], }), - shufflingCacheMiss: register.gauge<"caller">({ + shufflingCacheMiss: register.gauge<{caller: RegenCaller}>({ name: "lodestar_gossip_attestation_shuffling_cache_miss_count", help: "Count of gossip attestation verification shuffling cache miss", labelNames: ["caller"], }), - shufflingCacheRegenHit: register.gauge<"caller">({ + shufflingCacheRegenHit: register.gauge<{caller: RegenCaller}>({ name: "lodestar_gossip_attestation_shuffling_cache_regen_hit_count", help: "Count of gossip attestation verification shuffling cache regen hit", labelNames: ["caller"], }), - shufflingCacheRegenMiss: register.gauge<"caller">({ + shufflingCacheRegenMiss: register.gauge<{caller: RegenCaller}>({ name: "lodestar_gossip_attestation_shuffling_cache_regen_miss_count", help: "Count of gossip attestation verification shuffling cache regen miss", labelNames: ["caller"], }), - attestationSlotToClockSlot: register.histogram<"caller">({ + attestationSlotToClockSlot: register.histogram<{caller: RegenCaller}>({ name: "lodestar_gossip_attestation_attestation_slot_to_clock_slot", help: "Slot distance between clock slot and attestation slot", labelNames: ["caller"], @@ -642,29 +671,46 @@ export function createLodestarMetrics( receivedToGossipValidate: register.histogram({ name: "lodestar_gossip_block_received_to_gossip_validate", help: "Time elapsed between block received and block validated", - buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5, 2, 4], + buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4], }), receivedToStateTransition: register.histogram({ name: "lodestar_gossip_block_received_to_state_transition", help: "Time elapsed between block received and block state transition", - buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5, 2, 4], + buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4], }), receivedToSignaturesVerification: register.histogram({ name: "lodestar_gossip_block_received_to_signatures_verification", help: "Time elapsed between block received and block signatures verification", - buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5, 2, 4], + buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4], }), receivedToExecutionPayloadVerification: register.histogram({ name: "lodestar_gossip_block_received_to_execution_payload_verification", help: "Time elapsed between block received and execution payload verification", - buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5, 2, 4], + buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4], + }), + receivedToBlobsAvailabilityTime: register.histogram<{numBlobs: number}>({ + name: "lodestar_gossip_block_received_to_blobs_availability_time", + help: "Time elapsed between block received and blobs became available", + buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4], + labelNames: ["numBlobs"], + }), + receivedToFullyVerifiedTime: register.histogram({ + name: "lodestar_gossip_block_received_to_fully_verified_time", + help: "Time elapsed between block received and fully verified state, signatures and payload", + buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4], + }), + verifiedToBlobsAvailabiltyTime: register.histogram<{numBlobs: number}>({ + name: "lodestar_gossip_block_verified_to_blobs_availability_time", + help: "Time elapsed between block verified and blobs became available", + buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4], + labelNames: ["numBlobs"], }), receivedToBlockImport: register.histogram({ name: "lodestar_gossip_block_received_to_block_import", help: "Time elapsed between block received and block import", - buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5, 2, 4], + buckets: [0.05, 0.1, 0.3, 0.5, 0.7, 1, 1.3, 1.6, 2, 2.5, 3, 3.5, 4], }), - processBlockErrors: register.gauge<"error">({ + processBlockErrors: register.gauge<{error: BlockErrorCode | "NOT_BLOCK_ERROR"}>({ name: "lodestar_gossip_block_process_block_errors", help: "Count of errors, by error type, while processing blocks", labelNames: ["error"], @@ -695,13 +741,13 @@ export function createLodestarMetrics( name: "lodestar_import_block_set_head_after_first_interval_total", help: "Total times an imported block is set as head after the first slot interval", }), - bySource: register.gauge<"source">({ + bySource: register.gauge<{source: BlockSource}>({ name: "lodestar_import_block_by_source_total", help: "Total number of imported blocks by source", labelNames: ["source"], }), }, - engineNotifyNewPayloadResult: register.gauge<"result">({ + engineNotifyNewPayloadResult: register.gauge<{result: ExecutionPayloadStatus}>({ name: "lodestar_execution_engine_notify_new_payload_result_total", help: "The total result of calling notifyNewPayload execution engine api", labelNames: ["result"], @@ -715,7 +761,7 @@ export function createLodestarMetrics( name: "lodestar_backfill_prev_fin_or_ws_slot", help: "Slot of previous finalized or wsCheckpoint block to be validated", }), - totalBlocks: register.gauge<"method">({ + totalBlocks: register.gauge<{method: BackfillSyncMethod}>({ name: "lodestar_backfill_sync_blocks_total", help: "Total amount of backfilled blocks", labelNames: ["method"], @@ -746,7 +792,7 @@ export function createLodestarMetrics( name: "lodestar_oppool_attestation_pool_size", help: "Current size of the AttestationPool = total attestations unique by data and slot", }), - attestationPoolInsertOutcome: register.counter<"insertOutcome">({ + attestationPoolInsertOutcome: register.counter<{insertOutcome: InsertOutcome}>({ name: "lodestar_attestation_pool_insert_outcome_total", help: "Total number of InsertOutcome as a result of adding an attestation in a pool", labelNames: ["insertOutcome"], @@ -771,7 +817,7 @@ export function createLodestarMetrics( name: "lodestar_oppool_sync_committee_message_pool_size", help: "Current size of the SyncCommitteeMessagePool unique by slot subnet and block root", }), - syncCommitteeMessagePoolInsertOutcome: register.counter<"insertOutcome">({ + syncCommitteeMessagePoolInsertOutcome: register.counter<{insertOutcome: InsertOutcome}>({ name: "lodestar_oppool_sync_committee_message_insert_outcome_total", help: "Total number of InsertOutcome as a result of adding a SyncCommitteeMessage to pool", labelNames: ["insertOutcome"], @@ -797,7 +843,7 @@ export function createLodestarMetrics( // Validator Monitor Metrics (per-epoch summaries) // Only track prevEpochOnChainBalance per index - prevEpochOnChainBalance: register.gauge<"index">({ + prevEpochOnChainBalance: register.gauge<{index: number}>({ name: "validator_monitor_prev_epoch_on_chain_balance", help: "Balance of validator after an epoch", labelNames: ["index"], @@ -906,12 +952,12 @@ export function createLodestarMetrics( help: "The count of times a sync signature was seen inside an aggregate", buckets: [0, 1, 2, 3, 5, 10], }), - prevEpochAttestationSummary: register.gauge<"summary">({ + prevEpochAttestationSummary: register.gauge<{summary: string}>({ name: "validator_monitor_prev_epoch_attestation_summary", help: "Best guess of the node of the result of previous epoch validators attestation actions and causality", labelNames: ["summary"], }), - prevEpochBlockProposalSummary: register.gauge<"summary">({ + prevEpochBlockProposalSummary: register.gauge<{summary: string}>({ name: "validator_monitor_prev_epoch_block_proposal_summary", help: "Best guess of the node of the result of previous epoch validators block proposal actions and causality", labelNames: ["summary"], @@ -919,12 +965,12 @@ export function createLodestarMetrics( // Validator Monitor Metrics (real-time) - unaggregatedAttestationTotal: register.gauge<"src">({ + unaggregatedAttestationTotal: register.gauge<{src: OpSource}>({ name: "validator_monitor_unaggregated_attestation_total", help: "Number of unaggregated attestations seen", labelNames: ["src"], }), - unaggregatedAttestationDelaySeconds: register.histogram<"src">({ + unaggregatedAttestationDelaySeconds: register.histogram<{src: OpSource}>({ name: "validator_monitor_unaggregated_attestation_delay_seconds", help: "The delay between when the validator should send the attestation and when it was received", labelNames: ["src"], @@ -938,23 +984,23 @@ export function createLodestarMetrics( // refine if we want more reasonable values buckets: [0, 10, 20, 30], }), - aggregatedAttestationTotal: register.gauge<"src">({ + aggregatedAttestationTotal: register.gauge<{src: OpSource}>({ name: "validator_monitor_aggregated_attestation_total", help: "Number of aggregated attestations seen", labelNames: ["src"], }), - aggregatedAttestationDelaySeconds: register.histogram<"src">({ + aggregatedAttestationDelaySeconds: register.histogram<{src: OpSource}>({ name: "validator_monitor_aggregated_attestation_delay_seconds", help: "The delay between then the validator should send the aggregate and when it was received", labelNames: ["src"], buckets: [0.1, 0.25, 0.5, 1, 2, 5, 10], }), - attestationInAggregateTotal: register.gauge<"src">({ + attestationInAggregateTotal: register.gauge<{src: OpSource}>({ name: "validator_monitor_attestation_in_aggregate_total", help: "Number of times an attestation has been seen in an aggregate", labelNames: ["src"], }), - attestationInAggregateDelaySeconds: register.histogram<"src">({ + attestationInAggregateDelaySeconds: register.histogram<{src: OpSource}>({ name: "validator_monitor_attestation_in_aggregate_delay_seconds", help: "The delay between when the validator should send the aggregate and when it was received", labelNames: ["src"], @@ -978,12 +1024,12 @@ export function createLodestarMetrics( name: "validator_monitor_sync_signature_in_aggregate_total", help: "Number of times a sync signature has been seen in an aggregate", }), - beaconBlockTotal: register.gauge<"src">({ + beaconBlockTotal: register.gauge<{src: OpSource}>({ name: "validator_monitor_beacon_block_total", help: "Total number of beacon blocks seen", labelNames: ["src"], }), - beaconBlockDelaySeconds: register.histogram<"src">({ + beaconBlockDelaySeconds: register.histogram<{src: OpSource}>({ name: "validator_monitor_beacon_block_delay_seconds", help: "The delay between when the validator should send the block and when it was received", labelNames: ["src"], @@ -1053,13 +1099,15 @@ export function createLodestarMetrics( name: "lodestar_cp_state_cache_adds_total", help: "Total number of items added in checkpoint state cache", }), - size: register.gauge({ + size: register.gauge<{type: CacheItemType}>({ name: "lodestar_cp_state_cache_size", help: "Checkpoint state cache size", + labelNames: ["type"], }), - epochSize: register.gauge({ + epochSize: register.gauge<{type: CacheItemType}>({ name: "lodestar_cp_state_epoch_size", help: "Checkpoint state cache size", + labelNames: ["type"], }), reads: register.avgMinMax({ name: "lodestar_cp_state_epoch_reads", @@ -1074,6 +1122,44 @@ export function createLodestarMetrics( help: "Histogram of cloned count per state every time state.clone() is called", buckets: [1, 2, 5, 10, 50, 250], }), + statePersistDuration: register.histogram({ + name: "lodestar_cp_state_cache_state_persist_seconds", + help: "Histogram of time to persist state to db", + buckets: [0.1, 0.5, 1, 2, 3, 4], + }), + statePruneFromMemoryCount: register.gauge({ + name: "lodestar_cp_state_cache_state_prune_from_memory_count", + help: "Total number of states pruned from memory", + }), + statePersistSecFromSlot: register.histogram({ + name: "lodestar_cp_state_cache_state_persist_seconds_from_slot", + help: "Histogram of time to persist state to db since the clock slot", + buckets: [0, 2, 4, 6, 8, 10, 12], + }), + stateReloadDuration: register.histogram({ + name: "lodestar_cp_state_cache_state_reload_seconds", + help: "Histogram of time to load state from db", + buckets: [0, 2, 4, 6, 8, 10, 12], + }), + stateReloadEpochDiff: register.histogram({ + name: "lodestar_cp_state_cache_state_reload_epoch_diff", + help: "Histogram of epoch difference between seed state epoch and loaded state epoch", + buckets: [0, 1, 2, 4, 8, 16, 32], + }), + stateReloadSecFromSlot: register.histogram({ + name: "lodestar_cp_state_cache_state_reload_seconds_from_slot", + help: "Histogram of time to load state from db since the clock slot", + buckets: [0, 2, 4, 6, 8, 10, 12], + }), + stateReloadDbReadTime: register.histogram({ + name: "lodestar_cp_state_cache_state_reload_db_read_seconds", + help: "Histogram of time to load state bytes from db", + buckets: [0.01, 0.05, 0.1, 0.2, 0.5], + }), + persistedStateRemoveCount: register.gauge({ + name: "lodestar_cp_state_cache_persisted_state_remove_count", + help: "Total number of persisted states removed", + }), }, balancesCache: { @@ -1085,7 +1171,7 @@ export function createLodestarMetrics( name: "lodestar_balances_cache_misses_total", help: "Total number of balances cache misses", }), - closestStateResult: register.counter<"stateId">({ + closestStateResult: register.counter<{stateId: string}>({ name: "lodestar_balances_cache_closest_state_result_total", help: "Total number of stateIds returned as closest justified balances state by id", labelNames: ["stateId"], @@ -1163,7 +1249,7 @@ export function createLodestarMetrics( name: "lodestar_seen_cache_attestation_data_miss_total", help: "Total number of attestation data miss in SeenAttestationData", }), - reject: register.gauge<"reason">({ + reject: register.gauge<{reason: RejectReason}>({ name: "lodestar_seen_cache_attestation_data_reject_total", help: "Total number of attestation data rejected in SeenAttestationData", labelNames: ["reason"], @@ -1171,23 +1257,23 @@ export function createLodestarMetrics( }, }, - regenFnCallTotal: register.gauge<"entrypoint" | "caller">({ + regenFnCallTotal: register.gauge<{entrypoint: RegenFnName; caller: RegenCaller}>({ name: "lodestar_regen_fn_call_total", help: "Total number of calls for regen functions", labelNames: ["entrypoint", "caller"], }), - regenFnQueuedTotal: register.gauge<"entrypoint" | "caller">({ + regenFnQueuedTotal: register.gauge<{entrypoint: RegenFnName; caller: RegenCaller}>({ name: "lodestar_regen_fn_queued_total", help: "Total number of calls queued for regen functions", labelNames: ["entrypoint", "caller"], }), - regenFnCallDuration: register.histogram<"entrypoint" | "caller">({ + regenFnCallDuration: register.histogram<{entrypoint: RegenFnName; caller: RegenCaller}>({ name: "lodestar_regen_fn_call_duration", help: "regen function duration", labelNames: ["entrypoint", "caller"], buckets: [0.1, 1, 10, 100], }), - regenFnTotalErrors: register.gauge<"entrypoint" | "caller">({ + regenFnTotalErrors: register.gauge<{entrypoint: RegenFnName; caller: RegenCaller}>({ name: "lodestar_regen_fn_errors_total", help: "regen function total errors", labelNames: ["entrypoint", "caller"], @@ -1199,7 +1285,7 @@ export function createLodestarMetrics( // Precompute next epoch transition precomputeNextEpochTransition: { - count: register.counter<"result">({ + count: register.counter<{result: string}>({ name: "lodestar_precompute_next_epoch_transition_result_total", labelNames: ["result"], help: "Total number of precomputeNextEpochTransition runs by result", @@ -1228,14 +1314,15 @@ export function createLodestarMetrics( name: "lodestar_reprocess_attestations_wait_time_resolve_seconds", help: "Time to wait for unknown block in seconds", }), - reject: register.gauge<"reason">({ + reject: register.gauge<{reason: ReprocessStatus}>({ name: "lodestar_reprocess_attestations_reject_total", help: "Total number of attestations are rejected to reprocess", labelNames: ["reason"], }), - waitSecBeforeReject: register.gauge<"reason">({ + waitSecBeforeReject: register.gauge<{reason: ReprocessStatus}>({ name: "lodestar_reprocess_attestations_wait_time_reject_seconds", help: "Time to wait for unknown block before being rejected", + labelNames: ["reason"], }), }, @@ -1257,24 +1344,25 @@ export function createLodestarMetrics( name: "lodestar_reprocess_gossip_attestations_wait_time_resolve_seconds", help: "Time to wait for unknown block in seconds", }), - reject: register.gauge<"reason">({ + reject: register.gauge<{reason: ReprocessRejectReason}>({ name: "lodestar_reprocess_gossip_attestations_reject_total", help: "Total number of attestations are rejected to reprocess", labelNames: ["reason"], }), - waitSecBeforeReject: register.gauge<"reason">({ + waitSecBeforeReject: register.gauge<{reason: ReprocessRejectReason}>({ name: "lodestar_reprocess_gossip_attestations_wait_time_reject_seconds", help: "Time to wait for unknown block before being rejected", + labelNames: ["reason"], }), }, lightclientServer: { - onSyncAggregate: register.gauge<"event">({ + onSyncAggregate: register.gauge<{event: string}>({ name: "lodestar_lightclient_server_on_sync_aggregate_event_total", help: "Total number of relevant events onSyncAggregate fn", labelNames: ["event"], }), - highestSlot: register.gauge<"item">({ + highestSlot: register.gauge<{item: string}>({ name: "lodestar_lightclient_server_highest_slot", help: "Current highest slot of items stored by LightclientServer", labelNames: ["item"], @@ -1385,7 +1473,11 @@ export function createLodestarMetrics( }), // Merge details - eth1MergeBlockDetails: register.gauge<"terminalBlockHash" | "terminalBlockNumber" | "terminalBlockTD">({ + eth1MergeBlockDetails: register.gauge<{ + terminalBlockHash: string; + terminalBlockNumber: string; + terminalBlockTD: string; + }>({ name: "lodestar_eth1_merge_block_details", help: "If found then 1 with terminal block details", labelNames: ["terminalBlockHash", "terminalBlockNumber", "terminalBlockTD"], @@ -1393,36 +1485,36 @@ export function createLodestarMetrics( }, eth1HttpClient: { - requestTime: register.histogram<"routeId">({ + requestTime: register.histogram<{routeId: string}>({ name: "lodestar_eth1_http_client_request_time_seconds", help: "eth1 JsonHttpClient - histogram or roundtrip request times", labelNames: ["routeId"], // Provide max resolution on problematic values around 1 second buckets: [0.1, 0.5, 1, 2, 5, 15], }), - streamTime: register.histogram<"routeId">({ + streamTime: register.histogram<{routeId: string}>({ name: "lodestar_eth1_http_client_stream_time_seconds", help: "eth1 JsonHttpClient - streaming time by routeId", labelNames: ["routeId"], // Provide max resolution on problematic values around 1 second buckets: [0.1, 0.5, 1, 2, 5, 15], }), - requestErrors: register.gauge<"routeId">({ + requestErrors: register.gauge<{routeId: string}>({ name: "lodestar_eth1_http_client_request_errors_total", help: "eth1 JsonHttpClient - total count of request errors", labelNames: ["routeId"], }), - retryCount: register.gauge<"routeId">({ + retryCount: register.gauge<{routeId: string}>({ name: "lodestar_eth1_http_client_request_retries_total", help: "eth1 JsonHttpClient - total count of request retries", labelNames: ["routeId"], }), - requestUsedFallbackUrl: register.gauge({ + requestUsedFallbackUrl: register.gauge<{routeId: string}>({ name: "lodestar_eth1_http_client_request_used_fallback_url_total", help: "eth1 JsonHttpClient - total count of requests on fallback url(s)", labelNames: ["routeId"], }), - activeRequests: register.gauge({ + activeRequests: register.gauge<{routeId: string}>({ name: "lodestar_eth1_http_client_active_requests", help: "eth1 JsonHttpClient - current count of active requests", labelNames: ["routeId"], @@ -1434,36 +1526,36 @@ export function createLodestarMetrics( }, executionEnginerHttpClient: { - requestTime: register.histogram<"routeId">({ + requestTime: register.histogram<{routeId: string}>({ name: "lodestar_execution_engine_http_client_request_time_seconds", help: "ExecutionEngineHttp client - histogram or roundtrip request times", labelNames: ["routeId"], // Provide max resolution on problematic values around 1 second buckets: [0.1, 0.5, 1, 2, 5, 15], }), - streamTime: register.histogram<"routeId">({ + streamTime: register.histogram<{routeId: string}>({ name: "lodestar_execution_engine_http_client_stream_time_seconds", help: "ExecutionEngineHttp client - streaming time by routeId", labelNames: ["routeId"], // Provide max resolution on problematic values around 1 second buckets: [0.1, 0.5, 1, 2, 5, 15], }), - requestErrors: register.gauge<"routeId">({ + requestErrors: register.gauge<{routeId: string}>({ name: "lodestar_execution_engine_http_client_request_errors_total", help: "ExecutionEngineHttp client - total count of request errors", labelNames: ["routeId"], }), - retryCount: register.gauge<"routeId">({ + retryCount: register.gauge<{routeId: string}>({ name: "lodestar_execution_engine_http_client_request_retries_total", help: "ExecutionEngineHttp client - total count of request retries", labelNames: ["routeId"], }), - requestUsedFallbackUrl: register.gauge({ + requestUsedFallbackUrl: register.gauge<{routeId: string}>({ name: "lodestar_execution_engine_http_client_request_used_fallback_url_total", help: "ExecutionEngineHttp client - total count of requests on fallback url(s)", labelNames: ["routeId"], }), - activeRequests: register.gauge({ + activeRequests: register.gauge<{routeId: string}>({ name: "lodestar_execution_engine_http_client_active_requests", help: "ExecutionEngineHttp client - current count of active requests", labelNames: ["routeId"], @@ -1475,32 +1567,32 @@ export function createLodestarMetrics( }, builderHttpClient: { - requestTime: register.histogram<"routeId">({ + requestTime: register.histogram<{routeId: string}>({ name: "lodestar_builder_http_client_request_time_seconds", help: "Histogram of builder http client request time by routeId", labelNames: ["routeId"], // Expected times are ~ 50-500ms, but in an overload NodeJS they can be greater buckets: [0.01, 0.1, 1, 5], }), - streamTime: register.histogram<"routeId">({ + streamTime: register.histogram<{routeId: string}>({ name: "lodestar_builder_http_client_stream_time_seconds", help: "Builder api - streaming time by routeId", labelNames: ["routeId"], // Provide max resolution on problematic values around 1 second buckets: [0.1, 0.5, 1, 2, 5, 15], }), - requestErrors: register.gauge<"routeId">({ + requestErrors: register.gauge<{routeId: string}>({ name: "lodestar_builder_http_client_request_errors_total", help: "Total count of errors on builder http client requests by routeId", labelNames: ["routeId"], }), - requestToFallbacks: register.gauge<"routeId">({ + requestToFallbacks: register.gauge<{routeId: string}>({ name: "lodestar_builder_http_client_request_to_fallbacks_total", help: "Total count of requests to fallback URLs on builder http API by routeId", labelNames: ["routeId"], }), - urlsScore: register.gauge<"urlIndex">({ + urlsScore: register.gauge<{urlIndex: number}>({ name: "lodestar_builder_http_client_urls_score", help: "Current score of builder http URLs by url index", labelNames: ["urlIndex"], @@ -1508,22 +1600,22 @@ export function createLodestarMetrics( }, db: { - dbReadReq: register.gauge<"bucket">({ + dbReadReq: register.gauge<{bucket: string}>({ name: "lodestar_db_read_req_total", help: "Total count of db read requests, may read 0 or more items", labelNames: ["bucket"], }), - dbReadItems: register.gauge<"bucket">({ + dbReadItems: register.gauge<{bucket: string}>({ name: "lodestar_db_read_items_total", help: "Total count of db read items, item = key | value | entry", labelNames: ["bucket"], }), - dbWriteReq: register.gauge<"bucket">({ + dbWriteReq: register.gauge<{bucket: string}>({ name: "lodestar_db_write_req_total", help: "Total count of db write requests, may write 0 or more items", labelNames: ["bucket"], }), - dbWriteItems: register.gauge<"bucket">({ + dbWriteItems: register.gauge<{bucket: string}>({ name: "lodestar_db_write_items_total", help: "Total count of db write items", labelNames: ["bucket"], diff --git a/packages/beacon-node/src/metrics/server/http.ts b/packages/beacon-node/src/metrics/server/http.ts index b699471e07d5..d8fbb289e951 100644 --- a/packages/beacon-node/src/metrics/server/http.ts +++ b/packages/beacon-node/src/metrics/server/http.ts @@ -15,6 +15,11 @@ export type HttpMetricsServer = { close(): Promise; }; +enum RequestStatus { + success = "success", + error = "error", +} + export async function getHttpMetricsServer( opts: HttpMetricsServerOpts, { @@ -26,7 +31,7 @@ export async function getHttpMetricsServer( // New registry to metric the metrics. Using the same registry would deadlock the .metrics promise const httpServerRegister = new RegistryMetricCreator(); - const scrapeTimeMetric = httpServerRegister.histogram<"status">({ + const scrapeTimeMetric = httpServerRegister.histogram<{status: RequestStatus}>({ name: "lodestar_metrics_scrape_seconds", help: "Lodestar metrics server async time to scrape metrics", labelNames: ["status"], @@ -40,7 +45,7 @@ export async function getHttpMetricsServer( if (req.method === "GET" && req.url && req.url.includes("/metrics")) { const timer = scrapeTimeMetric.startTimer(); const metricsRes = await Promise.all([wrapError(register.metrics()), getOtherMetrics()]); - timer({status: metricsRes[0].err ? "error" : "success"}); + timer({status: metricsRes[0].err ? RequestStatus.error : RequestStatus.success}); // Ensure we only writeHead once if (metricsRes[0].err) { diff --git a/packages/beacon-node/src/metrics/utils/avgMinMax.ts b/packages/beacon-node/src/metrics/utils/avgMinMax.ts index 43f51c821790..709c83ee38d6 100644 --- a/packages/beacon-node/src/metrics/utils/avgMinMax.ts +++ b/packages/beacon-node/src/metrics/utils/avgMinMax.ts @@ -1,21 +1,21 @@ import {GaugeConfiguration} from "prom-client"; +import {AvgMinMax as IAvgMinMax, LabelKeys, LabelsGeneric} from "@lodestar/utils"; import {GaugeExtra} from "./gauge.js"; type GetValuesFn = () => number[]; -type Labels = Partial>; /** * Special non-standard "Histogram" that captures the avg, min and max of values */ -export class AvgMinMax { - private readonly sum: GaugeExtra; - private readonly avg: GaugeExtra; - private readonly min: GaugeExtra; - private readonly max: GaugeExtra; +export class AvgMinMax implements IAvgMinMax { + private readonly sum: GaugeExtra; + private readonly avg: GaugeExtra; + private readonly min: GaugeExtra; + private readonly max: GaugeExtra; private getValuesFn: GetValuesFn | null = null; - constructor(configuration: GaugeConfiguration) { + constructor(configuration: GaugeConfiguration>) { this.sum = new GaugeExtra({...configuration, name: `${configuration.name}_sum`}); this.avg = new GaugeExtra({...configuration, name: `${configuration.name}_avg`}); this.min = new GaugeExtra({...configuration, name: `${configuration.name}_min`}); @@ -33,8 +33,8 @@ export class AvgMinMax { } set(values: number[]): void; - set(labels: Labels, values: number[]): void; - set(arg1?: Labels | number[], arg2?: number[]): void { + set(labels: Labels, values: number[]): void; + set(arg1?: Labels | number[], arg2?: number[]): void { if (arg2 === undefined) { const values = arg1 as number[]; const {sum, avg, min, max} = getStats(values); @@ -44,7 +44,7 @@ export class AvgMinMax { this.max.set(max); } else { const values = (arg2 !== undefined ? arg2 : arg1) as number[]; - const labels = arg1 as Labels; + const labels = arg1 as Labels; const {sum, avg, min, max} = getStats(values); this.sum.set(labels, sum); this.avg.set(labels, avg); diff --git a/packages/beacon-node/src/metrics/utils/gauge.ts b/packages/beacon-node/src/metrics/utils/gauge.ts index fb95fe25d24d..1f527adfcb64 100644 --- a/packages/beacon-node/src/metrics/utils/gauge.ts +++ b/packages/beacon-node/src/metrics/utils/gauge.ts @@ -1,29 +1,16 @@ -import {Gauge, GaugeConfiguration} from "prom-client"; -import {IGauge} from "../interface.js"; - -type CollectFn = (metric: IGauge) => void; -type Labels = Partial>; +import {Gauge} from "prom-client"; +import {CollectFn, Gauge as IGauge, LabelKeys, LabelsGeneric} from "@lodestar/utils"; /** - * Extends the prom-client Gauge with extra features: - * - Add multiple collect functions after instantiation - * - Create child gauges with fixed labels + * Extends the prom-client Gauge to be able to add multiple collect functions after instantiation */ -export class GaugeExtra extends Gauge implements IGauge { - private collectFns: CollectFn[] = []; - - constructor(configuration: GaugeConfiguration) { - super(configuration); - } +export class GaugeExtra extends Gauge> implements IGauge { + private collectFns: CollectFn[] = []; - addCollect(collectFn: CollectFn): void { + addCollect(collectFn: CollectFn): void { this.collectFns.push(collectFn); } - child(labels: Labels): GaugeChild { - return new GaugeChild(labels, this); - } - /** * @override Metric.collect */ @@ -33,48 +20,3 @@ export class GaugeExtra extends Gauge implements IGauge { } } } - -export class GaugeChild implements IGauge { - gauge: GaugeExtra; - labelsParent: Labels; - constructor(labelsParent: Labels, gauge: GaugeExtra) { - this.gauge = gauge; - this.labelsParent = labelsParent; - } - - // Sorry for this mess, `prom-client` API choices are not great - // If the function signature was `inc(value: number, labels?: Labels)`, this would be simpler - inc(value?: number): void; - inc(labels: Labels, value?: number): void; - inc(arg1?: Labels | number, arg2?: number): void { - if (typeof arg1 === "object") { - this.gauge.inc({...this.labelsParent, ...arg1}, arg2 ?? 1); - } else { - this.gauge.inc(this.labelsParent, arg1 ?? 1); - } - } - - dec(value?: number): void; - dec(labels: Labels, value?: number): void; - dec(arg1?: Labels | number, arg2?: number): void { - if (typeof arg1 === "object") { - this.gauge.dec({...this.labelsParent, ...arg1}, arg2 ?? 1); - } else { - this.gauge.dec(this.labelsParent, arg1 ?? 1); - } - } - - set(value: number): void; - set(labels: Labels, value: number): void; - set(arg1?: Labels | number, arg2?: number): void { - if (typeof arg1 === "object") { - this.gauge.set({...this.labelsParent, ...arg1}, arg2 ?? 0); - } else { - this.gauge.set(this.labelsParent, arg1 ?? 0); - } - } - - addCollect(collectFn: CollectFn): void { - this.gauge.addCollect(() => collectFn(this)); - } -} diff --git a/packages/beacon-node/src/metrics/utils/histogram.ts b/packages/beacon-node/src/metrics/utils/histogram.ts deleted file mode 100644 index 4490929629f2..000000000000 --- a/packages/beacon-node/src/metrics/utils/histogram.ts +++ /dev/null @@ -1,48 +0,0 @@ -import {Histogram, HistogramConfiguration} from "prom-client"; -import {IHistogram} from "../interface.js"; - -type Labels = Partial>; - -/** - * Extends the prom-client Histogram with extra features: - * - Add multiple collect functions after instantiation - * - Create child histograms with fixed labels - */ -export class HistogramExtra extends Histogram implements IHistogram { - constructor(configuration: HistogramConfiguration) { - super(configuration); - } - - child(labels: Labels): HistogramChild { - return new HistogramChild(labels, this); - } -} - -export class HistogramChild implements IHistogram { - histogram: HistogramExtra; - labelsParent: Labels; - constructor(labelsParent: Labels, histogram: HistogramExtra) { - this.histogram = histogram; - this.labelsParent = labelsParent; - } - - // Sorry for this mess, `prom-client` API choices are not great - // If the function signature was `observe(value: number, labels?: Labels)`, this would be simpler - observe(value?: number): void; - observe(labels: Labels, value?: number): void; - observe(arg1?: Labels | number, arg2?: number): void { - if (typeof arg1 === "object") { - this.histogram.observe({...this.labelsParent, ...arg1}, arg2 ?? 0); - } else { - this.histogram.observe(this.labelsParent, arg1 ?? 0); - } - } - - startTimer(arg1?: Labels): (labels?: Labels) => number { - if (typeof arg1 === "object") { - return this.histogram.startTimer({...this.labelsParent, ...arg1}); - } else { - return this.histogram.startTimer(this.labelsParent); - } - } -} diff --git a/packages/beacon-node/src/metrics/utils/registryMetricCreator.ts b/packages/beacon-node/src/metrics/utils/registryMetricCreator.ts index 8864eb2c74c4..adec6f984702 100644 --- a/packages/beacon-node/src/metrics/utils/registryMetricCreator.ts +++ b/packages/beacon-node/src/metrics/utils/registryMetricCreator.ts @@ -1,33 +1,41 @@ -import {Gauge, GaugeConfiguration, Registry, HistogramConfiguration, CounterConfiguration, Counter} from "prom-client"; +import {Gauge, Registry, Counter, Histogram} from "prom-client"; +import { + AvgMinMaxConfig, + CounterConfig, + GaugeConfig, + HistogramConfig, + AvgMinMax as IAvgMinMax, + Counter as ICounter, + GaugeExtra as IGaugeExtra, + Histogram as IHistogram, + LabelKeys, + LabelsGeneric, + MetricsRegisterCustom, + NoLabels, + StaticConfig, +} from "@lodestar/utils"; import {AvgMinMax} from "./avgMinMax.js"; import {GaugeExtra} from "./gauge.js"; -import {HistogramExtra} from "./histogram.js"; -type StaticConfiguration = { - name: GaugeConfiguration["name"]; - help: GaugeConfiguration["help"]; - value: Record; -}; - -export class RegistryMetricCreator extends Registry { - gauge(configuration: GaugeConfiguration): GaugeExtra { - return new GaugeExtra({...configuration, registers: [this]}); +export class RegistryMetricCreator extends Registry implements MetricsRegisterCustom { + gauge(configuration: GaugeConfig): IGaugeExtra { + return new GaugeExtra({...configuration, registers: [this]}); } - histogram(configuration: HistogramConfiguration): HistogramExtra { - return new HistogramExtra({...configuration, registers: [this]}); + histogram(configuration: HistogramConfig): IHistogram { + return new Histogram>({...configuration, registers: [this]}); } - avgMinMax(configuration: GaugeConfiguration): AvgMinMax { - return new AvgMinMax({...configuration, registers: [this]}); + avgMinMax(configuration: AvgMinMaxConfig): IAvgMinMax { + return new AvgMinMax({...configuration, registers: [this]}); } /** Static metric to send string-based data such as versions, config params, etc */ - static({name, help, value}: StaticConfiguration): void { + static({name, help, value}: StaticConfig): void { new Gauge({name, help, labelNames: Object.keys(value), registers: [this]}).set(value, 1); } - counter(configuration: CounterConfiguration): Counter { - return new Counter({...configuration, registers: [this]}); + counter(configuration: CounterConfig): ICounter { + return new Counter>({...configuration, registers: [this]}); } } diff --git a/packages/beacon-node/src/monitoring/service.ts b/packages/beacon-node/src/monitoring/service.ts index f50f992ebe1f..9581c5f11c92 100644 --- a/packages/beacon-node/src/monitoring/service.ts +++ b/packages/beacon-node/src/monitoring/service.ts @@ -1,8 +1,7 @@ import {Registry} from "prom-client"; import {fetch} from "@lodestar/api"; -import {ErrorAborted, Logger, TimeoutError} from "@lodestar/utils"; +import {ErrorAborted, Histogram, Logger, TimeoutError} from "@lodestar/utils"; import {RegistryMetricCreator} from "../metrics/index.js"; -import {HistogramExtra} from "../metrics/utils/histogram.js"; import {defaultMonitoringOptions, MonitoringOptions} from "./options.js"; import {createClientStats} from "./clientStats.js"; import {ClientStats} from "./types.js"; @@ -25,6 +24,11 @@ enum Status { Closed = "closed", } +enum SendDataStatus { + Success = "success", + Error = "error", +} + export type Client = "beacon" | "validator"; /** @@ -38,8 +42,8 @@ export class MonitoringService { private readonly register: Registry; private readonly logger: Logger; - private readonly collectDataMetric: HistogramExtra; - private readonly sendDataMetric: HistogramExtra<"status">; + private readonly collectDataMetric: Histogram; + private readonly sendDataMetric: Histogram<{status: SendDataStatus}>; private status = Status.Started; private initialDelayTimeout?: NodeJS.Timeout; @@ -193,7 +197,7 @@ export class MonitoringService { throw e; } } finally { - timer({status: res?.ok ? "success" : "error"}); + timer({status: res?.ok ? SendDataStatus.Success : SendDataStatus.Error}); clearTimeout(timeout); } } diff --git a/packages/beacon-node/src/network/core/metrics.ts b/packages/beacon-node/src/network/core/metrics.ts index 4f416ad4fba2..0137ce1f0540 100644 --- a/packages/beacon-node/src/network/core/metrics.ts +++ b/packages/beacon-node/src/network/core/metrics.ts @@ -1,4 +1,8 @@ import {RegistryMetricCreator} from "../../metrics/utils/registryMetricCreator.js"; +import {SubnetType} from "../metadata.js"; +import {DiscoveredPeerStatus} from "../peers/discover.js"; +import {SubnetSource} from "../subnets/attnetsService.js"; +import {DLLSubnetSource} from "../subnets/dllAttnetsService.js"; export type NetworkCoreMetrics = ReturnType; @@ -13,12 +17,12 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { name: "libp2p_peers", help: "number of connected peers", }), - peersByDirection: register.gauge<"direction">({ + peersByDirection: register.gauge<{direction: string}>({ name: "lodestar_peers_by_direction_count", help: "number of peers, labeled by direction", labelNames: ["direction"], }), - peersByClient: register.gauge<"client">({ + peersByClient: register.gauge<{client: string}>({ name: "lodestar_peers_by_client_count", help: "number of peers, labeled by client", labelNames: ["client"], @@ -28,14 +32,14 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { help: "Histogram of current count of long lived attnets of connected peers", buckets: [0, 4, 16, 32, 64], }), - peerScoreByClient: register.histogram<"client">({ + peerScoreByClient: register.histogram<{client: string}>({ name: "lodestar_app_peer_score", help: "Current peer score at lodestar app side", // Min score = -100, max score = 100, disconnect = -20, ban = -50 buckets: [-100, -50, -20, 0, 25], labelNames: ["client"], }), - peerGossipScoreByClient: register.histogram<"client">({ + peerGossipScoreByClient: register.histogram<{client: string}>({ name: "lodestar_gossip_score_by_client", help: "Gossip peer score by client", labelNames: ["client"], @@ -53,27 +57,27 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { name: "lodestar_peers_sync_count", help: "Current count of peers useful for sync", }), - peerConnectedEvent: register.gauge<"direction" | "status">({ + peerConnectedEvent: register.gauge<{direction: string; status: string}>({ name: "lodestar_peer_connected_total", help: "Total number of peer:connected event, labeled by direction", labelNames: ["direction", "status"], }), - peerDisconnectedEvent: register.gauge<"direction">({ + peerDisconnectedEvent: register.gauge<{direction: string}>({ name: "lodestar_peer_disconnected_total", help: "Total number of peer:disconnected event, labeled by direction", labelNames: ["direction"], }), - peerGoodbyeReceived: register.gauge<"reason">({ + peerGoodbyeReceived: register.gauge<{reason: string}>({ name: "lodestar_peer_goodbye_received_total", help: "Total number of goodbye received, labeled by reason", labelNames: ["reason"], }), - peerLongConnectionDisconnect: register.gauge<"reason">({ + peerLongConnectionDisconnect: register.gauge<{reason: string}>({ name: "lodestar_peer_long_connection_disconnect_total", help: "For peers with long connection, track disconnect reason", labelNames: ["reason"], }), - peerGoodbyeSent: register.gauge<"reason">({ + peerGoodbyeSent: register.gauge<{reason: string}>({ name: "lodestar_peer_goodbye_sent_total", help: "Total number of goodbye sent, labeled by reason", labelNames: ["reason"], @@ -82,22 +86,22 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { name: "lodestar_peers_requested_total_to_connect", help: "Prioritization results total peers count requested to connect", }), - peersRequestedToDisconnect: register.gauge<"reason">({ + peersRequestedToDisconnect: register.gauge<{reason: string}>({ name: "lodestar_peers_requested_total_to_disconnect", help: "Prioritization results total peers count requested to disconnect", labelNames: ["reason"], }), - peersRequestedSubnetsToQuery: register.gauge<"type">({ + peersRequestedSubnetsToQuery: register.gauge<{type: SubnetType}>({ name: "lodestar_peers_requested_total_subnets_to_query", help: "Prioritization results total subnets to query and discover peers in", labelNames: ["type"], }), - peersRequestedSubnetsPeerCount: register.gauge<"type">({ + peersRequestedSubnetsPeerCount: register.gauge<{type: SubnetType}>({ name: "lodestar_peers_requested_total_subnets_peers_count", help: "Prioritization results total peers in subnets to query and discover peers in", labelNames: ["type"], }), - peersReportPeerCount: register.gauge<"reason">({ + peersReportPeerCount: register.gauge<{reason: string}>({ name: "lodestar_peers_report_peer_count", help: "network.reportPeer count by reason", labelNames: ["reason"], @@ -115,12 +119,12 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { name: "lodestar_discovery_peers_to_connect", help: "Current peers to connect count from discoverPeers requests", }), - subnetPeersToConnect: register.gauge<"type">({ + subnetPeersToConnect: register.gauge<{type: SubnetType}>({ name: "lodestar_discovery_subnet_peers_to_connect", help: "Current peers to connect count from discoverPeers requests", labelNames: ["type"], }), - subnetsToConnect: register.gauge<"type">({ + subnetsToConnect: register.gauge<{type: SubnetType}>({ name: "lodestar_discovery_subnets_to_connect", help: "Current subnets to connect count from discoverPeers requests", labelNames: ["type"], @@ -129,7 +133,7 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { name: "lodestar_discovery_cached_enrs_size", help: "Current size of the cachedENRs Set", }), - findNodeQueryRequests: register.gauge<"action">({ + findNodeQueryRequests: register.gauge<{action: string}>({ name: "lodestar_discovery_find_node_query_requests_total", help: "Total count of find node queries started", labelNames: ["action"], @@ -143,7 +147,7 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { name: "lodestar_discovery_find_node_query_enrs_total", help: "Total count of found ENRs in queries", }), - discoveredStatus: register.gauge<"status">({ + discoveredStatus: register.gauge<{status: DiscoveredPeerStatus}>({ name: "lodestar_discovery_discovered_status_total_count", help: "Total count of status results of PeerDiscovery.onDiscovered() function", labelNames: ["status"], @@ -152,7 +156,7 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { name: "lodestar_discovery_total_dial_attempts", help: "Total dial attempts by peer discovery", }), - dialTime: register.histogram<"status">({ + dialTime: register.histogram<{status: string}>({ name: "lodestar_discovery_dial_time_seconds", help: "Time to dial peers in seconds", labelNames: ["status"], @@ -161,62 +165,13 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { }, reqResp: { - rateLimitErrors: register.gauge<"method">({ + rateLimitErrors: register.gauge<{method: string}>({ name: "beacon_reqresp_rate_limiter_errors_total", help: "Count rate limiter errors", labelNames: ["method"], }), }, - gossipValidationAccept: register.gauge<"topic">({ - name: "lodestar_gossip_validation_accept_total", - help: "Count of total gossip validation accept", - labelNames: ["topic"], - }), - gossipValidationIgnore: register.gauge<"topic">({ - name: "lodestar_gossip_validation_ignore_total", - help: "Count of total gossip validation ignore", - labelNames: ["topic"], - }), - gossipValidationReject: register.gauge<"topic">({ - name: "lodestar_gossip_validation_reject_total", - help: "Count of total gossip validation reject", - labelNames: ["topic"], - }), - gossipValidationError: register.gauge<"topic" | "error">({ - name: "lodestar_gossip_validation_error_total", - help: "Count of total gossip validation errors detailed", - labelNames: ["topic", "error"], - }), - - gossipValidationQueueLength: register.gauge<"topic">({ - name: "lodestar_gossip_validation_queue_length", - help: "Count of total gossip validation queue length", - labelNames: ["topic"], - }), - gossipValidationQueueDroppedJobs: register.gauge<"topic">({ - name: "lodestar_gossip_validation_queue_dropped_jobs_total", - help: "Count of total gossip validation queue dropped jobs", - labelNames: ["topic"], - }), - gossipValidationQueueJobTime: register.histogram<"topic">({ - name: "lodestar_gossip_validation_queue_job_time_seconds", - help: "Time to process gossip validation queue job in seconds", - labelNames: ["topic"], - buckets: [0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10], - }), - gossipValidationQueueJobWaitTime: register.histogram<"topic">({ - name: "lodestar_gossip_validation_queue_job_wait_time_seconds", - help: "Time from job added to the queue to starting the job in seconds", - labelNames: ["topic"], - buckets: [0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10], - }), - gossipValidationQueueConcurrency: register.gauge<"topic">({ - name: "lodestar_gossip_validation_queue_concurrency", - help: "Current count of jobs being run on network processor for topic", - labelNames: ["topic"], - }), - discv5: { decodeEnrAttemptCount: register.counter({ name: "lodestar_discv5_decode_enr_attempt_count", @@ -237,14 +192,14 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { name: "lodestar_attnets_service_committee_subscriptions_total", help: "Count of committee subscriptions", }), - subscriptionsCommitteeMeshPeers: register.histogram<"subnet">({ + subscriptionsCommitteeMeshPeers: register.histogram<{subnet: number}>({ name: "lodestar_attnets_service_committee_subscriptions_mesh_peers", help: "Histogram of mesh peers per committee subscription", labelNames: ["subnet"], // Dlow = 6, D = 8, DHi = 12 plus 2 more buckets buckets: [0, 4, 6, 8, 12], }), - subscriptionsCommitteeTimeToStableMesh: register.histogram<"subnet">({ + subscriptionsCommitteeTimeToStableMesh: register.histogram<{subnet: number}>({ name: "lodestar_attnets_service_committee_subscriptions_time_to_stable_mesh_seconds", help: "Histogram of time until committee subscription is considered healthy (>= 6 mesh peers)", labelNames: ["subnet"], @@ -259,12 +214,12 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { name: "lodestar_attnets_service_long_lived_subscriptions_total", help: "Count of long lived subscriptions", }), - subscribeSubnets: register.gauge<"subnet" | "src">({ + subscribeSubnets: register.gauge<{subnet: number; src: SubnetSource | DLLSubnetSource}>({ name: "lodestar_attnets_service_subscribe_subnets_total", help: "Count of subscribe_subnets calls", labelNames: ["subnet", "src"], }), - unsubscribeSubnets: register.gauge<"subnet" | "src">({ + unsubscribeSubnets: register.gauge<{subnet: number; src: SubnetSource | DLLSubnetSource}>({ name: "lodestar_attnets_service_unsubscribe_subnets_total", help: "Count of unsubscribe_subnets calls", labelNames: ["subnet", "src"], @@ -280,12 +235,12 @@ export function createNetworkCoreMetrics(register: RegistryMetricCreator) { name: "lodestar_syncnets_service_committee_subscriptions_total", help: "Count of syncnet committee subscriptions", }), - subscribeSubnets: register.gauge<"subnet">({ + subscribeSubnets: register.gauge<{subnet: number}>({ name: "lodestar_syncnets_service_subscribe_subnets_total", help: "Count of syncnet subscribe_subnets calls", labelNames: ["subnet"], }), - unsubscribeSubnets: register.gauge<"subnet">({ + unsubscribeSubnets: register.gauge<{subnet: number}>({ name: "lodestar_syncnets_service_unsubscribe_subnets_total", help: "Count of syncnet unsubscribe_subnets calls", labelNames: ["subnet"], @@ -303,7 +258,7 @@ export function getNetworkCoreWorkerMetrics(register: RegistryMetricCreator) { name: "lodestar_network_worker_reqresp_bridge_caller_pending_count", help: "Current count of pending elements in respBridgeCaller", }), - networkWorkerWireEventsOnWorkerThreadLatency: register.histogram<"eventName">({ + networkWorkerWireEventsOnWorkerThreadLatency: register.histogram<{eventName: string}>({ name: "lodestar_network_worker_wire_events_on_worker_thread_latency_seconds", help: "Latency in seconds to transmit network events to worker thread across parent port", labelNames: ["eventName"], diff --git a/packages/beacon-node/src/network/discv5/worker.ts b/packages/beacon-node/src/network/discv5/worker.ts index 1b50ee86aa29..e09b063d13d1 100644 --- a/packages/beacon-node/src/network/discv5/worker.ts +++ b/packages/beacon-node/src/network/discv5/worker.ts @@ -3,12 +3,20 @@ import path from "node:path"; import fs from "node:fs"; import {createFromProtobuf} from "@libp2p/peer-id-factory"; import {Multiaddr, multiaddr} from "@multiformats/multiaddr"; -import {Gauge} from "prom-client"; import {expose} from "@chainsafe/threads/worker"; import {Observable, Subject} from "@chainsafe/threads/observable"; -import {createKeypairFromPeerId, Discv5, ENR, ENRData, SignableENR, SignableENRData} from "@chainsafe/discv5"; +import { + createKeypairFromPeerId, + Discv5, + ENR, + ENRData, + IDiscv5CreateOptions, + SignableENR, + SignableENRData, +} from "@chainsafe/discv5"; import {createBeaconConfig} from "@lodestar/config"; import {getNodeLogger} from "@lodestar/logger/node"; +import {Gauge} from "@lodestar/utils"; import {RegistryMetricCreator} from "../../metrics/index.js"; import {collectNodeJSMetrics} from "../../metrics/nodeJsMetrics.js"; import {profileNodeJS, writeHeapSnapshot} from "../../util/profile.js"; @@ -28,14 +36,14 @@ const logger = getNodeLogger(workerData.loggerOpts); // Set up metrics, nodejs and discv5-specific let metricsRegistry: RegistryMetricCreator | undefined; -let enrRelevanceMetric: Gauge<"status"> | undefined; +let enrRelevanceMetric: Gauge<{status: string}> | undefined; let closeMetrics: () => void | undefined; if (workerData.metrics) { metricsRegistry = new RegistryMetricCreator(); closeMetrics = collectNodeJSMetrics(metricsRegistry, "discv5_worker_"); // add enr relevance metric - enrRelevanceMetric = metricsRegistry.gauge<"status">({ + enrRelevanceMetric = metricsRegistry.gauge<{status: string}>({ name: "lodestar_discv5_discovered_status_total_count", help: "Total count of status results of enrRelevance() function", labelNames: ["status"], @@ -56,7 +64,7 @@ const discv5 = Discv5.create({ ip6: workerData.bindAddrs.ip6 ? multiaddr(workerData.bindAddrs.ip6) : undefined, }, config: workerData.config, - metricsRegistry, + metricsRegistry: metricsRegistry as IDiscv5CreateOptions["metricsRegistry"], }); // Load boot enrs diff --git a/packages/beacon-node/src/network/gossip/interface.ts b/packages/beacon-node/src/network/gossip/interface.ts index 8e9013487a06..600f96193296 100644 --- a/packages/beacon-node/src/network/gossip/interface.ts +++ b/packages/beacon-node/src/network/gossip/interface.ts @@ -70,7 +70,7 @@ export type SSZTypeOfGossipTopic = T extends {type: infer export type GossipTypeMap = { [GossipType.beacon_block]: allForks.SignedBeaconBlock; - [GossipType.blob_sidecar]: deneb.SignedBlobSidecar; + [GossipType.blob_sidecar]: deneb.BlobSidecar; [GossipType.beacon_aggregate_and_proof]: phase0.SignedAggregateAndProof; [GossipType.beacon_attestation]: phase0.Attestation; [GossipType.voluntary_exit]: phase0.SignedVoluntaryExit; @@ -85,7 +85,7 @@ export type GossipTypeMap = { export type GossipFnByType = { [GossipType.beacon_block]: (signedBlock: allForks.SignedBeaconBlock) => Promise | void; - [GossipType.blob_sidecar]: (signedBlobSidecar: deneb.SignedBlobSidecar) => Promise | void; + [GossipType.blob_sidecar]: (blobSidecar: deneb.BlobSidecar) => Promise | void; [GossipType.beacon_aggregate_and_proof]: (aggregateAndProof: phase0.SignedAggregateAndProof) => Promise | void; [GossipType.beacon_attestation]: (attestation: phase0.Attestation) => Promise | void; [GossipType.voluntary_exit]: (voluntaryExit: phase0.SignedVoluntaryExit) => Promise | void; diff --git a/packages/beacon-node/src/network/gossip/metrics.ts b/packages/beacon-node/src/network/gossip/metrics.ts index 3711669edddf..c2b5d0b32338 100644 --- a/packages/beacon-node/src/network/gossip/metrics.ts +++ b/packages/beacon-node/src/network/gossip/metrics.ts @@ -1,4 +1,6 @@ +import {ForkName} from "@lodestar/params"; import {RegistryMetricCreator} from "../../metrics/index.js"; +import {GossipType} from "./interface.js"; export type Eth2GossipsubMetrics = ReturnType; @@ -6,12 +8,12 @@ export type Eth2GossipsubMetrics = ReturnType export function createEth2GossipsubMetrics(register: RegistryMetricCreator) { return { gossipPeer: { - scoreByThreshold: register.gauge<"threshold">({ + scoreByThreshold: register.gauge<{threshold: string}>({ name: "lodestar_gossip_peer_score_by_threshold_count", help: "Gossip peer score by threshold", labelNames: ["threshold"], }), - meshPeersByClient: register.gauge<"client">({ + meshPeersByClient: register.gauge<{client: string}>({ name: "lodestar_gossip_mesh_peers_by_client_count", help: "number of mesh peers, labeled by client", labelNames: ["client"], @@ -22,34 +24,34 @@ export function createEth2GossipsubMetrics(register: RegistryMetricCreator) { }), }, gossipMesh: { - peersByType: register.gauge<"type" | "fork">({ + peersByType: register.gauge<{type: GossipType; fork: ForkName}>({ name: "lodestar_gossip_mesh_peers_by_type_count", help: "Number of connected mesh peers per gossip type", labelNames: ["type", "fork"], }), - peersByBeaconAttestationSubnet: register.gauge<"subnet" | "fork">({ + peersByBeaconAttestationSubnet: register.gauge<{subnet: string; fork: ForkName}>({ name: "lodestar_gossip_mesh_peers_by_beacon_attestation_subnet_count", help: "Number of connected mesh peers per beacon attestation subnet", labelNames: ["subnet", "fork"], }), - peersBySyncCommitteeSubnet: register.gauge<"subnet" | "fork">({ + peersBySyncCommitteeSubnet: register.gauge<{subnet: number; fork: ForkName}>({ name: "lodestar_gossip_mesh_peers_by_sync_committee_subnet_count", help: "Number of connected mesh peers per sync committee subnet", labelNames: ["subnet", "fork"], }), }, gossipTopic: { - peersByType: register.gauge<"type" | "fork">({ + peersByType: register.gauge<{type: GossipType; fork: ForkName}>({ name: "lodestar_gossip_topic_peers_by_type_count", help: "Number of connected topic peers per gossip type", labelNames: ["type", "fork"], }), - peersByBeaconAttestationSubnet: register.gauge<"subnet" | "fork">({ + peersByBeaconAttestationSubnet: register.gauge<{subnet: string; fork: ForkName}>({ name: "lodestar_gossip_topic_peers_by_beacon_attestation_subnet_count", help: "Number of connected topic peers per beacon attestation subnet", labelNames: ["subnet", "fork"], }), - peersBySyncCommitteeSubnet: register.gauge<"subnet" | "fork">({ + peersBySyncCommitteeSubnet: register.gauge<{subnet: number; fork: ForkName}>({ name: "lodestar_gossip_topic_peers_by_sync_committee_subnet_count", help: "Number of connected topic peers per sync committee subnet", labelNames: ["subnet", "fork"], diff --git a/packages/beacon-node/src/network/gossip/topic.ts b/packages/beacon-node/src/network/gossip/topic.ts index de1a571c3330..c5cd68ffa1de 100644 --- a/packages/beacon-node/src/network/gossip/topic.ts +++ b/packages/beacon-node/src/network/gossip/topic.ts @@ -85,7 +85,7 @@ export function getGossipSSZType(topic: GossipTopic) { // beacon_block is updated in altair to support the updated SignedBeaconBlock type return ssz[topic.fork].SignedBeaconBlock; case GossipType.blob_sidecar: - return ssz.deneb.SignedBlobSidecar; + return ssz.deneb.BlobSidecar; case GossipType.beacon_aggregate_and_proof: return ssz.phase0.SignedAggregateAndProof; case GossipType.beacon_attestation: diff --git a/packages/beacon-node/src/network/interface.ts b/packages/beacon-node/src/network/interface.ts index 047263d15022..9531c8529acf 100644 --- a/packages/beacon-node/src/network/interface.ts +++ b/packages/beacon-node/src/network/interface.ts @@ -44,7 +44,7 @@ export interface INetwork extends INetworkCorePublic { // Gossip publishBeaconBlock(signedBlock: allForks.SignedBeaconBlock): Promise; - publishBlobSidecar(signedBlobSidecar: deneb.SignedBlobSidecar): Promise; + publishBlobSidecar(blobSidecar: deneb.BlobSidecar): Promise; publishBeaconAggregateAndProof(aggregateAndProof: phase0.SignedAggregateAndProof): Promise; publishBeaconAttestation(attestation: phase0.Attestation, subnet: number): Promise; publishVoluntaryExit(voluntaryExit: phase0.SignedVoluntaryExit): Promise; diff --git a/packages/beacon-node/src/network/network.ts b/packages/beacon-node/src/network/network.ts index d2571a2a92e0..200bd4fd3a8d 100644 --- a/packages/beacon-node/src/network/network.ts +++ b/packages/beacon-node/src/network/network.ts @@ -288,14 +288,14 @@ export class Network implements INetwork { }); } - async publishBlobSidecar(signedBlobSidecar: deneb.SignedBlobSidecar): Promise { - const fork = this.config.getForkName(signedBlobSidecar.message.slot); - const index = signedBlobSidecar.message.index; - return this.publishGossip( - {type: GossipType.blob_sidecar, fork, index}, - signedBlobSidecar, - {ignoreDuplicatePublishError: true} - ); + async publishBlobSidecar(blobSidecar: deneb.BlobSidecar): Promise { + const slot = blobSidecar.signedBlockHeader.message.slot; + const fork = this.config.getForkName(slot); + const index = blobSidecar.index; + + return this.publishGossip({type: GossipType.blob_sidecar, fork, index}, blobSidecar, { + ignoreDuplicatePublishError: true, + }); } async publishBeaconAggregateAndProof(aggregateAndProof: phase0.SignedAggregateAndProof): Promise { diff --git a/packages/beacon-node/src/network/peers/discover.ts b/packages/beacon-node/src/network/peers/discover.ts index 2090e8bedab6..2805f67b4763 100644 --- a/packages/beacon-node/src/network/peers/discover.ts +++ b/packages/beacon-node/src/network/peers/discover.ts @@ -43,7 +43,7 @@ enum QueryStatusCode { } type QueryStatus = {code: QueryStatusCode.NotActive} | {code: QueryStatusCode.Active; count: number}; -enum DiscoveredPeerStatus { +export enum DiscoveredPeerStatus { bad_score = "bad_score", already_connected = "already_connected", already_dialing = "already_dialing", diff --git a/packages/beacon-node/src/network/processor/extractSlotRootFns.ts b/packages/beacon-node/src/network/processor/extractSlotRootFns.ts index 24fcfaae6cbc..d31cb3e2d7f9 100644 --- a/packages/beacon-node/src/network/processor/extractSlotRootFns.ts +++ b/packages/beacon-node/src/network/processor/extractSlotRootFns.ts @@ -4,7 +4,7 @@ import { getBlockRootFromSignedAggregateAndProofSerialized, getSlotFromAttestationSerialized, getSlotFromSignedAggregateAndProofSerialized, - getSlotFromSignedBlobSidecarSerialized, + getSlotFromBlobSidecarSerialized, getSlotFromSignedBeaconBlockSerialized, } from "../../util/sszBytes.js"; import {GossipType} from "../gossip/index.js"; @@ -43,7 +43,7 @@ export function createExtractBlockSlotRootFns(): ExtractSlotRootFns { return {slot}; }, [GossipType.blob_sidecar]: (data: Uint8Array): SlotOptionalRoot | null => { - const slot = getSlotFromSignedBlobSidecarSerialized(data); + const slot = getSlotFromBlobSidecarSerialized(data); if (slot === null) { return null; diff --git a/packages/beacon-node/src/network/processor/gossipHandlers.ts b/packages/beacon-node/src/network/processor/gossipHandlers.ts index 2e9ab3bb5a11..d31183828b85 100644 --- a/packages/beacon-node/src/network/processor/gossipHandlers.ts +++ b/packages/beacon-node/src/network/processor/gossipHandlers.ts @@ -116,7 +116,7 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler fork: ForkName, peerIdStr: string, seenTimestampSec: number - ): Promise { + ): Promise { const slot = signedBlock.message.slot; const forkTypes = config.getForkTypes(slot); const blockHex = prettyBytes(forkTypes.BeaconBlock.hashTreeRoot(signedBlock.message)); @@ -126,13 +126,21 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler let blockInput; let blockInputMeta; if (config.getForkSeq(signedBlock.message.slot) >= ForkSeq.deneb) { - const blockInputRes = getBlockInput.getGossipBlockInput(config, { + const blockInputRes = chain.seenGossipBlockInput.getGossipBlockInput(config, { type: GossipedInputType.block, signedBlock, blockBytes, }); + blockInput = blockInputRes.blockInput; blockInputMeta = blockInputRes.blockInputMeta; + + // blockInput can't be returned null, improve by enforcing via return types + if (blockInput === null) { + throw Error( + `Invalid null blockInput returned by getGossipBlockInput for type=${GossipedInputType.block} blockHex=${blockHex} slot=${slot}` + ); + } } else { blockInput = getBlockInput.preDeneb(config, signedBlock, BlockSource.gossip, blockBytes); blockInputMeta = {}; @@ -170,20 +178,23 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler } async function validateBeaconBlob( - signedBlob: deneb.SignedBlobSidecar, + blobSidecar: deneb.BlobSidecar, blobBytes: Uint8Array, gossipIndex: number, peerIdStr: string, seenTimestampSec: number ): Promise { - const slot = signedBlob.message.slot; - const blockHex = prettyBytes(signedBlob.message.blockRoot); + const blobBlockHeader = blobSidecar.signedBlockHeader.message; + const slot = blobBlockHeader.slot; + const blockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(blobBlockHeader); + const blockHex = prettyBytes(blockRoot); + const delaySec = chain.clock.secFromSlot(slot, seenTimestampSec); const recvToVal = Date.now() / 1000 - seenTimestampSec; - const {blockInput, blockInputMeta} = getBlockInput.getGossipBlockInput(config, { + const {blockInput, blockInputMeta} = chain.seenGossipBlockInput.getGossipBlockInput(config, { type: GossipedInputType.blob, - signedBlob, + blobSidecar, blobBytes, }); @@ -200,7 +211,7 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler }); try { - await validateGossipBlobSidecar(config, chain, signedBlob, gossipIndex); + await validateGossipBlobSidecar(chain, blobSidecar, gossipIndex); return blockInput; } catch (e) { if (e instanceof BlobSidecarGossipError) { @@ -211,7 +222,11 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler } if (e.action === GossipAction.REJECT) { - chain.persistInvalidSszValue(ssz.deneb.SignedBlobSidecar, signedBlob, `gossip_reject_slot_${slot}`); + chain.persistInvalidSszValue( + ssz.deneb.BlobSidecar, + blobSidecar, + `gossip_reject_slot_${slot}_index_${blobSidecar.index}` + ); } } @@ -250,10 +265,20 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler // Returns the delay between the start of `block.slot` and `current time` const delaySec = chain.clock.secFromSlot(signedBlock.message.slot); metrics?.gossipBlock.elapsedTimeTillProcessed.observe(delaySec); + chain.seenGossipBlockInput.prune(); }) .catch((e) => { if (e instanceof BlockError) { switch (e.type.code) { + case BlockErrorCode.DATA_UNAVAILABLE: { + // TODO: create a newevent unknownBlobs and only pull blobs + const slot = signedBlock.message.slot; + const forkTypes = config.getForkTypes(slot); + const rootHex = toHexString(forkTypes.BeaconBlock.hashTreeRoot(signedBlock.message)); + + events.emit(NetworkEvent.unknownBlock, {rootHex, peer: peerIdStr}); + break; + } // ALREADY_KNOWN should not happen with ignoreIfKnown=true above // PARENT_UNKNOWN should not happen, we handled this in validateBeaconBlock() function above case BlockErrorCode.ALREADY_KNOWN: @@ -268,6 +293,7 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler } metrics?.gossipBlock.processBlockErrors.inc({error: e instanceof BlockError ? e.type.code : "NOT_BLOCK_ERROR"}); logger.error("Error receiving block", {slot: signedBlock.message.slot, peer: peerIdStr}, e as Error); + chain.seenGossipBlockInput.prune(); }); } @@ -288,15 +314,7 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler peerIdStr, seenTimestampSec ); - if (blockInput !== null) { - handleValidBeaconBlock(blockInput, peerIdStr, seenTimestampSec); - } else { - // TODO DENEB: - // - // If block + blobs not fully received in the slot within some deadline, we should trigger block/blob - // pull using req/resp by root pre-emptively even though it will be trigged on seeing any block/blob - // gossip on next slot via missing parent checks - } + handleValidBeaconBlock(blockInput, peerIdStr, seenTimestampSec); }, [GossipType.blob_sidecar]: async ({ @@ -306,13 +324,25 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler seenTimestampSec, }: GossipHandlerParamGeneric) => { const {serializedData} = gossipData; - const signedBlob = sszDeserialize(topic, serializedData); - if (config.getForkSeq(signedBlob.message.slot) < ForkSeq.deneb) { + const blobSidecar = sszDeserialize(topic, serializedData); + if (config.getForkSeq(blobSidecar.signedBlockHeader.message.slot) < ForkSeq.deneb) { throw new GossipActionError(GossipAction.REJECT, {code: "PRE_DENEB_BLOCK"}); } - const blockInput = await validateBeaconBlob(signedBlob, serializedData, topic.index, peerIdStr, seenTimestampSec); + const blockInput = await validateBeaconBlob( + blobSidecar, + serializedData, + topic.index, + peerIdStr, + seenTimestampSec + ); if (blockInput !== null) { - handleValidBeaconBlock(blockInput, peerIdStr, seenTimestampSec); + // TODO DENEB: + // + // With blobsPromise the block import would have been attempted with the receipt of the block gossip + // and should have resolved the availability promise, however we could track if the block processing + // was halted and requeue it + // + // handleValidBeaconBlock(blockInput, peerIdStr, seenTimestampSec); } else { // TODO DENEB: // diff --git a/packages/beacon-node/src/network/processor/index.ts b/packages/beacon-node/src/network/processor/index.ts index 1d1fd82a4522..3d067c626f76 100644 --- a/packages/beacon-node/src/network/processor/index.ts +++ b/packages/beacon-node/src/network/processor/index.ts @@ -93,7 +93,7 @@ const PROCESS_UNKNOWN_BLOCK_GOSSIP_OBJECTS_YIELD_EVERY_MS = 50; /** * Reprocess reject reason for metrics */ -enum ReprocessRejectReason { +export enum ReprocessRejectReason { /** * There are too many attestations that have unknown block root. */ @@ -107,9 +107,9 @@ enum ReprocessRejectReason { /** * Cannot accept work reason for metrics */ -enum CannotAcceptWorkReason { +export enum CannotAcceptWorkReason { /** - * Validating or procesing gossip block at current slot. + * Validating or processing gossip block at current slot. */ processingCurrentSlotBlock = "processing_current_slot_block", /** @@ -344,7 +344,10 @@ export class NetworkProcessor { for (const gossipMessages of gossipMessagesByRoot.values()) { for (const message of gossipMessages) { this.metrics?.reprocessGossipAttestations.reject.inc({reason: ReprocessRejectReason.expired}); - this.metrics?.reprocessGossipAttestations.waitSecBeforeReject.set(nowSec - message.seenTimestampSec); + this.metrics?.reprocessGossipAttestations.waitSecBeforeReject.set( + {reason: ReprocessRejectReason.expired}, + nowSec - message.seenTimestampSec + ); // TODO: Should report the dropped job to gossip? It will be eventually pruned from the mcache } } diff --git a/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRange.ts b/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRange.ts index 10e7071f4fdb..41d3e901c41d 100644 --- a/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRange.ts +++ b/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRange.ts @@ -78,7 +78,9 @@ export function matchBlockWithBlobs( const blobSidecars: deneb.BlobSidecar[] = []; let blobSidecar: deneb.BlobSidecar; - while ((blobSidecar = allBlobSidecars[blobSideCarIndex])?.slot === block.data.message.slot) { + while ( + (blobSidecar = allBlobSidecars[blobSideCarIndex])?.signedBlockHeader.message.slot === block.data.message.slot + ) { blobSidecars.push(blobSidecar); lastMatchedSlot = block.data.message.slot; blobSideCarIndex++; @@ -111,14 +113,14 @@ export function matchBlockWithBlobs( if ( allBlobSidecars[blobSideCarIndex] !== undefined && // If there are no blobs, the blobs request can give 1 block outside the requested range - allBlobSidecars[blobSideCarIndex].slot <= endSlot + allBlobSidecars[blobSideCarIndex].signedBlockHeader.message.slot <= endSlot ) { throw Error( `Unmatched blobSidecars, blocks=${allBlocks.length}, blobs=${ allBlobSidecars.length } lastMatchedSlot=${lastMatchedSlot}, pending blobSidecars slots=${allBlobSidecars .slice(blobSideCarIndex) - .map((blb) => blb.slot) + .map((blb) => blb.signedBlockHeader.message.slot) .join(",")}` ); } diff --git a/packages/beacon-node/src/network/reqresp/handlers/blobSidecarsByRange.ts b/packages/beacon-node/src/network/reqresp/handlers/blobSidecarsByRange.ts index 2cd852492220..e3655cd90c6f 100644 --- a/packages/beacon-node/src/network/reqresp/handlers/blobSidecarsByRange.ts +++ b/packages/beacon-node/src/network/reqresp/handlers/blobSidecarsByRange.ts @@ -1,10 +1,10 @@ -import {GENESIS_SLOT, MAX_REQUEST_BLOCKS_DENEB} from "@lodestar/params"; +import {GENESIS_SLOT, MAX_REQUEST_BLOCKS_DENEB, BLOBSIDECAR_FIXED_SIZE} from "@lodestar/params"; import {ResponseError, ResponseOutgoing, RespStatus} from "@lodestar/reqresp"; import {deneb, Slot} from "@lodestar/types"; import {fromHex} from "@lodestar/utils"; import {IBeaconChain} from "../../../chain/index.js"; import {IBeaconDb} from "../../../db/index.js"; -import {BLOB_SIDECARS_IN_WRAPPER_INDEX, BLOBSIDECAR_FIXED_SIZE} from "../../../db/repositories/blobSidecars.js"; +import {BLOB_SIDECARS_IN_WRAPPER_INDEX} from "../../../db/repositories/blobSidecars.js"; export async function* onBlobSidecarsByRange( request: deneb.BlobSidecarsByRangeRequest, diff --git a/packages/beacon-node/src/network/reqresp/handlers/blobSidecarsByRoot.ts b/packages/beacon-node/src/network/reqresp/handlers/blobSidecarsByRoot.ts index 3bb162d019e3..6aa16a0c2629 100644 --- a/packages/beacon-node/src/network/reqresp/handlers/blobSidecarsByRoot.ts +++ b/packages/beacon-node/src/network/reqresp/handlers/blobSidecarsByRoot.ts @@ -1,9 +1,10 @@ import {ResponseError, ResponseOutgoing, RespStatus} from "@lodestar/reqresp"; +import {BLOBSIDECAR_FIXED_SIZE} from "@lodestar/params"; import {deneb, RootHex} from "@lodestar/types"; import {toHex, fromHex} from "@lodestar/utils"; import {IBeaconChain} from "../../../chain/index.js"; import {IBeaconDb} from "../../../db/index.js"; -import {BLOB_SIDECARS_IN_WRAPPER_INDEX, BLOBSIDECAR_FIXED_SIZE} from "../../../db/repositories/blobSidecars.js"; +import {BLOB_SIDECARS_IN_WRAPPER_INDEX} from "../../../db/repositories/blobSidecars.js"; export async function* onBlobSidecarsByRoot( requestBody: deneb.BlobSidecarsByRootRequest, diff --git a/packages/beacon-node/src/network/subnets/attnetsService.ts b/packages/beacon-node/src/network/subnets/attnetsService.ts index d76e56677ac6..7eabc2e4114c 100644 --- a/packages/beacon-node/src/network/subnets/attnetsService.ts +++ b/packages/beacon-node/src/network/subnets/attnetsService.ts @@ -34,7 +34,7 @@ const LAST_SEEN_VALIDATOR_TIMEOUT = 150; const gossipType = GossipType.beacon_attestation; -enum SubnetSource { +export enum SubnetSource { committee = "committee", random = "random", } diff --git a/packages/beacon-node/src/network/subnets/dllAttnetsService.ts b/packages/beacon-node/src/network/subnets/dllAttnetsService.ts index f7ae0e8d09c2..7236695cb11a 100644 --- a/packages/beacon-node/src/network/subnets/dllAttnetsService.ts +++ b/packages/beacon-node/src/network/subnets/dllAttnetsService.ts @@ -20,7 +20,7 @@ import {computeSubscribedSubnet} from "./util.js"; const gossipType = GossipType.beacon_attestation; -enum SubnetSource { +export enum DLLSubnetSource { committee = "committee", longLived = "long_lived", } @@ -179,7 +179,7 @@ export class DLLAttnetsService implements IAttnetsService { if (dutiedSlot === clockSlot + this.opts.slotsToSubscribeBeforeAggregatorDuty) { // Trigger gossip subscription first, in batch if (dutiedInfo.size > 0) { - this.subscribeToSubnets(Array.from(dutiedInfo.keys()), SubnetSource.committee); + this.subscribeToSubnets(Array.from(dutiedInfo.keys()), DLLSubnetSource.committee); } // Then, register the subscriptions for (const subnet of dutiedInfo.keys()) { @@ -276,7 +276,7 @@ export class DLLAttnetsService implements IAttnetsService { } // First, tell gossip to subscribe to the subnets if not connected already - this.subscribeToSubnets(newSubnets, SubnetSource.longLived); + this.subscribeToSubnets(newSubnets, DLLSubnetSource.longLived); // then update longLivedSubscriptions for (const subnet of toRemoveSubnets) { @@ -289,7 +289,7 @@ export class DLLAttnetsService implements IAttnetsService { } // Only tell gossip to unsubsribe last, longLivedSubscriptions has the latest state - this.unsubscribeSubnets(toRemoveSubnets, this.clock.currentSlot, SubnetSource.longLived); + this.unsubscribeSubnets(toRemoveSubnets, this.clock.currentSlot, DLLSubnetSource.longLived); this.updateMetadata(); } @@ -300,7 +300,7 @@ export class DLLAttnetsService implements IAttnetsService { private unsubscribeExpiredCommitteeSubnets(slot: Slot): void { const expired = this.shortLivedSubscriptions.getExpired(slot); if (expired.length > 0) { - this.unsubscribeSubnets(expired, slot, SubnetSource.committee); + this.unsubscribeSubnets(expired, slot, DLLSubnetSource.committee); } } @@ -333,7 +333,7 @@ export class DLLAttnetsService implements IAttnetsService { * Trigger a gossip subcription only if not already subscribed * shortLivedSubscriptions or longLivedSubscriptions should be updated right AFTER this called **/ - private subscribeToSubnets(subnets: number[], src: SubnetSource): void { + private subscribeToSubnets(subnets: number[], src: DLLSubnetSource): void { const forks = getActiveForks(this.config, this.clock.currentEpoch); for (const subnet of subnets) { if (!this.shortLivedSubscriptions.has(subnet) && !this.longLivedSubscriptions.has(subnet)) { @@ -349,7 +349,7 @@ export class DLLAttnetsService implements IAttnetsService { * Trigger a gossip un-subscription only if no-one is still subscribed * If unsubscribe long lived subnets, longLivedSubscriptions should be updated right BEFORE this called **/ - private unsubscribeSubnets(subnets: number[], slot: Slot, src: SubnetSource): void { + private unsubscribeSubnets(subnets: number[], slot: Slot, src: DLLSubnetSource): void { // No need to unsubscribeTopic(). Return early to prevent repetitive extra work if (this.opts.subscribeAllSubnets) return; diff --git a/packages/beacon-node/src/util/array.ts b/packages/beacon-node/src/util/array.ts index 72f81fbee72b..a154ee1bbf34 100644 --- a/packages/beacon-node/src/util/array.ts +++ b/packages/beacon-node/src/util/array.ts @@ -45,6 +45,9 @@ export class LinkedList { return this._length; } + /** + * Add to the end of the list + */ push(data: T): void { if (this._length === 0) { this.tail = this.head = new Node(data); @@ -64,6 +67,9 @@ export class LinkedList { this._length++; } + /** + * Add to the beginning of the list + */ unshift(data: T): void { if (this._length === 0) { this.tail = this.head = new Node(data); @@ -83,6 +89,25 @@ export class LinkedList { this._length++; } + insertAfter(after: T, data: T): void { + const node = this.findNode(after); + if (!node) { + return; + } + + if (node === this.tail) { + this.push(data); + return; + } + + const newNode = new Node(data); + newNode.next = node.next; + newNode.prev = node; + node.next = newNode; + if (newNode.next) newNode.next.prev = newNode; + this._length++; + } + pop(): T | null { const oldTail = this.tail; if (!oldTail) return null; @@ -173,6 +198,48 @@ export class LinkedList { return false; } + /** + * Move an existing item to the head of the list. + * If the item is not found, do nothing. + */ + moveToHead(item: T): void { + // if this is head, do nothing + if (this.head?.data === item) { + return; + } + + const found = this.deleteFirst(item); + if (found) { + this.unshift(item); + } + } + + /** + * Move an existing item to the second position of the list. + * If the item is not found, do nothing. + */ + moveToSecond(item: T): void { + // if this is head or second, do nothing + if (this.head?.data === item || this.head?.next?.data === item) { + return; + } + + const found = this.deleteFirst(item); + if (found) { + if (this.head?.next) { + const oldSecond = this.head.next; + const newSecond = new Node(item); + this.head.next = newSecond; + newSecond.next = oldSecond; + newSecond.prev = this.head; + oldSecond.prev = newSecond; + } else { + // only 1 item in the list + this.push(item); + } + } + } + next(): IteratorResult { if (!this.pointer) { return {done: true, value: undefined}; @@ -222,4 +289,23 @@ export class LinkedList { return arr; } + + /** + * Check if the item is in the list. + * @returns + */ + has(item: T): boolean { + return this.findNode(item) !== null; + } + + private findNode(item: T): Node | null { + let node = this.head; + while (node) { + if (node.data === item) { + return node; + } + node = node.next; + } + return null; + } } diff --git a/packages/beacon-node/src/util/blobs.ts b/packages/beacon-node/src/util/blobs.ts new file mode 100644 index 000000000000..bbad27f684ed --- /dev/null +++ b/packages/beacon-node/src/util/blobs.ts @@ -0,0 +1,48 @@ +import SHA256 from "@chainsafe/as-sha256"; +import {Tree} from "@chainsafe/persistent-merkle-tree"; +import {VERSIONED_HASH_VERSION_KZG, KZG_COMMITMENT_GINDEX0, ForkName} from "@lodestar/params"; +import {deneb, ssz, allForks} from "@lodestar/types"; +import {ChainForkConfig} from "@lodestar/config"; +import {signedBlockToSignedHeader} from "@lodestar/state-transition"; + +type VersionHash = Uint8Array; + +export function kzgCommitmentToVersionedHash(kzgCommitment: deneb.KZGCommitment): VersionHash { + const hash = SHA256.digest(kzgCommitment); + // Equivalent to `VERSIONED_HASH_VERSION_KZG + hash(kzg_commitment)[1:]` + hash[0] = VERSIONED_HASH_VERSION_KZG; + return hash; +} + +export function computeInclusionProof( + fork: ForkName, + body: allForks.BeaconBlockBody, + index: number +): deneb.KzgCommitmentInclusionProof { + const bodyView = (ssz[fork].BeaconBlockBody as allForks.AllForksSSZTypes["BeaconBlockBody"]).toView(body); + const commitmentGindex = KZG_COMMITMENT_GINDEX0 + index; + return new Tree(bodyView.node).getSingleProof(BigInt(commitmentGindex)); +} + +export function computeBlobSidecars( + config: ChainForkConfig, + signedBlock: allForks.SignedBeaconBlock, + contents: deneb.Contents & {kzgCommitmentInclusionProofs?: deneb.KzgCommitmentInclusionProof[]} +): deneb.BlobSidecars { + const blobKzgCommitments = (signedBlock as deneb.SignedBeaconBlock).message.body.blobKzgCommitments; + if (blobKzgCommitments === undefined) { + throw Error("Invalid block with missing blobKzgCommitments for computeBlobSidecars"); + } + + const signedBlockHeader = signedBlockToSignedHeader(config, signedBlock); + const fork = config.getForkName(signedBlockHeader.message.slot); + + return blobKzgCommitments.map((kzgCommitment, index) => { + const blob = contents.blobs[index]; + const kzgProof = contents.kzgProofs[index]; + const kzgCommitmentInclusionProof = + contents.kzgCommitmentInclusionProofs?.[index] ?? computeInclusionProof(fork, signedBlock.message.body, index); + + return {index, blob, kzgCommitment, kzgProof, signedBlockHeader, kzgCommitmentInclusionProof}; + }); +} diff --git a/packages/beacon-node/src/util/queue/options.ts b/packages/beacon-node/src/util/queue/options.ts index c3846cd8be1f..e55d413088e3 100644 --- a/packages/beacon-node/src/util/queue/options.ts +++ b/packages/beacon-node/src/util/queue/options.ts @@ -1,4 +1,4 @@ -import {IGauge, IHistogram} from "../../metrics/index.js"; +import {Gauge, GaugeExtra, Histogram} from "@lodestar/utils"; export enum QueueType { FIFO = "FIFO", @@ -19,12 +19,12 @@ export type JobQueueOpts = { }; export type QueueMetrics = { - length: IGauge; - droppedJobs: IGauge; + length: GaugeExtra; + droppedJobs: Gauge; /** Compute async utilization rate with `rate(metrics_name[1m])` */ - jobTime: IHistogram; - jobWaitTime: IHistogram; - concurrency: IGauge; + jobTime: Histogram; + jobWaitTime: Histogram; + concurrency: Gauge; }; export const defaultQueueOpts: Required< diff --git a/packages/beacon-node/src/util/sszBytes.ts b/packages/beacon-node/src/util/sszBytes.ts index 0c258df35041..cd12c4bd9c18 100644 --- a/packages/beacon-node/src/util/sszBytes.ts +++ b/packages/beacon-node/src/util/sszBytes.ts @@ -1,6 +1,7 @@ import {BitArray, deserializeUint8ArrayBitListFromBytes} from "@chainsafe/ssz"; import {BLSSignature, RootHex, Slot} from "@lodestar/types"; import {toHex} from "@lodestar/utils"; +import {BYTES_PER_FIELD_ELEMENT, FIELD_ELEMENTS_PER_BLOB} from "@lodestar/params"; export type BlockRootHex = RootHex; export type AttDataBase64 = string; @@ -180,23 +181,18 @@ export function getSlotFromSignedBeaconBlockSerialized(data: Uint8Array): Slot | } /** - * 4 + 96 = 100 - * ``` - * class SignedBlobSidecar(Container): - * message: BlobSidecar [fixed] - * signature: BLSSignature [fixed] - * * class BlobSidecar(Container): - * blockRoot: Root [fixed - 32 bytes ], - * index: BlobIndex [fixed - 8 bytes ], - * slot: Slot [fixed - 8 bytes] - * ... - * ``` + * index: BlobIndex [fixed - 8 bytes ], + * blob: Blob, BYTES_PER_FIELD_ELEMENT * FIELD_ELEMENTS_PER_BLOB + * kzgCommitment: Bytes48, + * kzgProof: Bytes48, + * signedBlockHeader: + * slot: 8 bytes */ -const SLOT_BYTES_POSITION_IN_SIGNED_BLOB_SIDECAR = 32 + 8; +const SLOT_BYTES_POSITION_IN_SIGNED_BLOB_SIDECAR = 8 + BYTES_PER_FIELD_ELEMENT * FIELD_ELEMENTS_PER_BLOB + 48 + 48; -export function getSlotFromSignedBlobSidecarSerialized(data: Uint8Array): Slot | null { +export function getSlotFromBlobSidecarSerialized(data: Uint8Array): Slot | null { if (data.length < SLOT_BYTES_POSITION_IN_SIGNED_BLOB_SIDECAR + SLOT_SIZE) { return null; } diff --git a/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts b/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts index 21b70c69a425..923436d6f96d 100644 --- a/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts +++ b/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts @@ -7,11 +7,14 @@ import {defaultOptions as defaultValidatorOptions} from "@lodestar/validator"; // eslint-disable-next-line import/no-relative-packages import {rangeSyncTest} from "../../../../state-transition/test/perf/params.js"; import { - beforeValue, getNetworkCachedState, getNetworkCachedBlock, // eslint-disable-next-line import/no-relative-packages -} from "../../../../state-transition/test/utils/index.js"; +} from "../../../../state-transition/test/utils/testFileCache.js"; +import { + beforeValue, + // eslint-disable-next-line import/no-relative-packages +} from "../../../../state-transition/test/utils/beforeValueMocha.js"; import {BeaconChain} from "../../../src/chain/index.js"; import {ExecutionEngineDisabled} from "../../../src/execution/engine/index.js"; import {Eth1ForBlockProductionDisabled} from "../../../src/eth1/index.js"; diff --git a/packages/beacon-node/test/scripts/el-interop/ethereumjsdocker/post-merge.sh b/packages/beacon-node/test/scripts/el-interop/ethereumjsdocker/post-merge.sh index dee850740370..fbf9dcaaf929 100755 --- a/packages/beacon-node/test/scripts/el-interop/ethereumjsdocker/post-merge.sh +++ b/packages/beacon-node/test/scripts/el-interop/ethereumjsdocker/post-merge.sh @@ -5,4 +5,4 @@ currentDir=$(pwd) . $scriptDir/common-setup.sh -docker run --rm -u $(id -u ${USER}):$(id -g ${USER}) --name custom-execution --network host -v $currentDir/$DATA_DIR:/data $EL_BINARY_DIR --dataDir /data/ethereumjs --gethGenesis /data/genesis.json --rpc --rpcEngine --jwt-secret /data/jwtsecret --logLevel debug --isSingleNode +docker run --rm -u $(id -u ${USER}):$(id -g ${USER}) --name custom-execution --network host -v $currentDir/$DATA_DIR:/data $EL_BINARY_DIR --dataDir /data/ethereumjs --gethGenesis /data/genesis.json --rpc --rpcEngineAddr 0.0.0.0 --rpcAddr 0.0.0.0 --rpcEngine --jwt-secret /data/jwtsecret --logLevel debug --isSingleNode diff --git a/packages/beacon-node/test/sim/mergemock.test.ts b/packages/beacon-node/test/sim/mergemock.test.ts index 0761005714bd..d9492723599d 100644 --- a/packages/beacon-node/test/sim/mergemock.test.ts +++ b/packages/beacon-node/test/sim/mergemock.test.ts @@ -157,7 +157,7 @@ describe("executionEngine / ExecutionEngineHttp", function () { eth1: {enabled: false, providerUrls: [engineRpcUrl], jwtSecretHex}, executionEngine: {urls: [engineRpcUrl], jwtSecretHex}, executionBuilder: { - urls: [ethRpcUrl], + url: ethRpcUrl, enabled: true, issueLocalFcUWithFeeRecipient: feeRecipientMevBoost, allowedFaults: 16, diff --git a/packages/beacon-node/test/spec/presets/fork_choice.test.ts b/packages/beacon-node/test/spec/presets/fork_choice.test.ts index 0ab7b3b363b5..47d72c1226e1 100644 --- a/packages/beacon-node/test/spec/presets/fork_choice.test.ts +++ b/packages/beacon-node/test/spec/presets/fork_choice.test.ts @@ -1,7 +1,7 @@ import path from "node:path"; import {expect} from "chai"; import {toHexString} from "@chainsafe/ssz"; -import {BeaconStateAllForks, isExecutionStateType} from "@lodestar/state-transition"; +import {BeaconStateAllForks, isExecutionStateType, signedBlockToSignedHeader} from "@lodestar/state-transition"; import {InputType} from "@lodestar/spec-test-util"; import {CheckpointWithHex, ForkChoice} from "@lodestar/fork-choice"; import {phase0, allForks, bellatrix, ssz, RootHex, deneb} from "@lodestar/types"; @@ -10,6 +10,7 @@ import {createBeaconConfig} from "@lodestar/config"; import {ACTIVE_PRESET, ForkSeq, isForkBlobs} from "@lodestar/params"; import {BeaconChain} from "../../../src/chain/index.js"; import {ClockEvent} from "../../../src/util/clock.js"; +import {computeInclusionProof} from "../../../src/util/blobs.js"; import {createCachedBeaconStateTest} from "../../utils/cachedBeaconState.js"; import {testLogger} from "../../utils/logger.js"; import {getConfig} from "../../utils/config.js"; @@ -195,20 +196,14 @@ const forkChoiceTest = throw Error("Invalid blobs or proofs lengths"); } - const blockRoot = config - .getForkTypes(signedBlock.message.slot) - .BeaconBlock.hashTreeRoot(signedBlock.message); const blobSidecars: deneb.BlobSidecars = blobs.map((blob, index) => { return { - blockRoot, index, - slot, blob, - // proofs isn't undefined here but typescript(check types) can't figure it out - kzgProof: (proofs ?? [])[index], kzgCommitment: commitments[index], - blockParentRoot: signedBlock.message.parentRoot, - proposerIndex: signedBlock.message.proposerIndex, + kzgProof: (proofs ?? [])[index], + signedBlockHeader: signedBlockToSignedHeader(config, signedBlock), + kzgCommitmentInclusionProof: computeInclusionProof(fork, signedBlock.message.body, index), }; }); diff --git a/packages/beacon-node/test/spec/specTestVersioning.ts b/packages/beacon-node/test/spec/specTestVersioning.ts index 3f1aad878e65..20125520321d 100644 --- a/packages/beacon-node/test/spec/specTestVersioning.ts +++ b/packages/beacon-node/test/spec/specTestVersioning.ts @@ -15,7 +15,7 @@ import {DownloadTestsOptions} from "@lodestar/spec-test-util"; const __dirname = path.dirname(fileURLToPath(import.meta.url)); export const ethereumConsensusSpecsTests: DownloadTestsOptions = { - specVersion: "v1.4.0-beta.2-hotfix", + specVersion: "v1.4.0-beta.5", // Target directory is the host package root: 'packages/*/spec-tests' outputDir: path.join(__dirname, "../../spec-tests"), specTestsRepoUrl: "https://github.com/ethereum/consensus-spec-tests", diff --git a/packages/beacon-node/test/spec/utils/specTestIterator.ts b/packages/beacon-node/test/spec/utils/specTestIterator.ts index a9310d53ac81..084d3d00fd48 100644 --- a/packages/beacon-node/test/spec/utils/specTestIterator.ts +++ b/packages/beacon-node/test/spec/utils/specTestIterator.ts @@ -64,6 +64,7 @@ export const defaultSkipOpts: SkipOpts = { "capella/light_client/single_merkle_proof/BeaconBlockBody", "deneb/light_client/single_merkle_proof/BeaconBlockBody", ], + skippedRunners: ["merkle_proof"], }; /** diff --git a/packages/beacon-node/test/unit/api/impl/validator/produceBlockV2.test.ts b/packages/beacon-node/test/unit/api/impl/validator/produceBlockV2.test.ts index febb027303b7..3e96f3b932c8 100644 --- a/packages/beacon-node/test/unit/api/impl/validator/produceBlockV2.test.ts +++ b/packages/beacon-node/test/unit/api/impl/validator/produceBlockV2.test.ts @@ -72,6 +72,7 @@ describe("api/validator - produceBlockV2", function () { const fullBlock = ssz.bellatrix.BeaconBlock.defaultValue(); const executionPayloadValue = ssz.Wei.defaultValue(); + const consensusBlockValue = ssz.Gwei.defaultValue(); const currentSlot = 100000; vi.spyOn(server.chainStub.clock, "currentSlot", "get").mockReturnValue(currentSlot); @@ -84,7 +85,7 @@ describe("api/validator - produceBlockV2", function () { const feeRecipient = "0xcccccccccccccccccccccccccccccccccccccccc"; const api = getValidatorApi(modules); - server.chainStub.produceBlock.mockResolvedValue({block: fullBlock, executionPayloadValue}); + server.chainStub.produceBlock.mockResolvedValue({block: fullBlock, executionPayloadValue, consensusBlockValue}); // check if expectedFeeRecipient is passed to produceBlock await api.produceBlockV2(slot, randaoReveal, graffiti, {feeRecipient}); diff --git a/packages/beacon-node/test/unit/api/impl/validator/produceBlockV3.test.ts b/packages/beacon-node/test/unit/api/impl/validator/produceBlockV3.test.ts index 0835777dd7ec..83e1e7887510 100644 --- a/packages/beacon-node/test/unit/api/impl/validator/produceBlockV3.test.ts +++ b/packages/beacon-node/test/unit/api/impl/validator/produceBlockV3.test.ts @@ -41,93 +41,96 @@ describe("api/validator - produceBlockV3", function () { vi.clearAllMocks(); }); - const testCases: [routes.validator.BuilderSelection, number | null, number | null, string][] = [ - [routes.validator.BuilderSelection.MaxProfit, 1, 0, "builder"], - [routes.validator.BuilderSelection.MaxProfit, 1, 2, "engine"], - [routes.validator.BuilderSelection.MaxProfit, null, 0, "engine"], - [routes.validator.BuilderSelection.MaxProfit, 0, null, "builder"], - - [routes.validator.BuilderSelection.BuilderAlways, 1, 2, "builder"], - [routes.validator.BuilderSelection.BuilderAlways, 1, 0, "builder"], - [routes.validator.BuilderSelection.BuilderAlways, null, 0, "engine"], - [routes.validator.BuilderSelection.BuilderAlways, 0, null, "builder"], - - [routes.validator.BuilderSelection.BuilderOnly, 0, 2, "builder"], - [routes.validator.BuilderSelection.ExecutionOnly, 2, 0, "execution"], + const testCases: [routes.validator.BuilderSelection, number | null, number | null, number, string][] = [ + [routes.validator.BuilderSelection.MaxProfit, 1, 0, 0, "builder"], + [routes.validator.BuilderSelection.MaxProfit, 1, 2, 1, "engine"], + [routes.validator.BuilderSelection.MaxProfit, null, 0, 0, "engine"], + [routes.validator.BuilderSelection.MaxProfit, 0, null, 1, "builder"], + + [routes.validator.BuilderSelection.BuilderAlways, 1, 2, 0, "builder"], + [routes.validator.BuilderSelection.BuilderAlways, 1, 0, 1, "builder"], + [routes.validator.BuilderSelection.BuilderAlways, null, 0, 0, "engine"], + [routes.validator.BuilderSelection.BuilderAlways, 0, null, 1, "builder"], + + [routes.validator.BuilderSelection.BuilderOnly, 0, 2, 0, "builder"], + [routes.validator.BuilderSelection.ExecutionOnly, 2, 0, 1, "execution"], ]; - testCases.forEach(([builderSelection, builderPayloadValue, enginePayloadValue, finalSelection]) => { - it(`produceBlockV3 - ${finalSelection} produces block`, async () => { - syncStub = server.syncStub; - modules = { - chain: server.chainStub, - config, - db: server.dbStub, - logger, - network: server.networkStub, - sync: syncStub, - metrics: null, - }; - - const fullBlock = ssz.bellatrix.BeaconBlock.defaultValue(); - const blindedBlock = ssz.bellatrix.BlindedBeaconBlock.defaultValue(); - - const slot = 1 * SLOTS_PER_EPOCH; - const randaoReveal = fullBlock.body.randaoReveal; - const graffiti = "a".repeat(32); - const feeRecipient = "0xccccccccccccccccccccccccccccccccccccccaa"; - const currentSlot = 1 * SLOTS_PER_EPOCH; - - vi.spyOn(server.chainStub.clock, "currentSlot", "get").mockReturnValue(currentSlot); - vi.spyOn(syncStub, "state", "get").mockReturnValue(SyncState.Synced); - - const api = getValidatorApi(modules); - - if (enginePayloadValue !== null) { - chainStub.produceBlock.mockResolvedValue({ - block: fullBlock, - executionPayloadValue: BigInt(enginePayloadValue), - }); - } else { - chainStub.produceBlock.mockRejectedValue(Error("not produced")); - } - - if (builderPayloadValue !== null) { - chainStub.produceBlindedBlock.mockResolvedValue({ - block: blindedBlock, - executionPayloadValue: BigInt(builderPayloadValue), - }); - } else { - chainStub.produceBlindedBlock.mockRejectedValue(Error("not produced")); - } - - const _skipRandaoVerification = false; - const produceBlockOpts = { - strictFeeRecipientCheck: false, - builderSelection, - feeRecipient, - }; - - const block = await api.produceBlockV3(slot, randaoReveal, graffiti, _skipRandaoVerification, produceBlockOpts); - - const expectedBlock = finalSelection === "builder" ? blindedBlock : fullBlock; - const expectedExecution = finalSelection === "builder" ? true : false; - - expect(block.data).toEqual(expectedBlock); - expect(block.executionPayloadBlinded).toEqual(expectedExecution); - - // check call counts - if (builderSelection === routes.validator.BuilderSelection.ExecutionOnly) { - expect(chainStub.produceBlindedBlock).toBeCalledTimes(0); - } else { - expect(chainStub.produceBlindedBlock).toBeCalledTimes(1); - } - - if (builderSelection === routes.validator.BuilderSelection.BuilderOnly) { - expect(chainStub.produceBlock).toBeCalledTimes(0); - } else { - expect(chainStub.produceBlock).toBeCalledTimes(1); - } - }); - }); + testCases.forEach( + ([builderSelection, builderPayloadValue, enginePayloadValue, consensusBlockValue, finalSelection]) => { + it(`produceBlockV3 - ${finalSelection} produces block`, async () => { + syncStub = server.syncStub; + modules = { + chain: server.chainStub, + config, + db: server.dbStub, + logger, + network: server.networkStub, + sync: syncStub, + metrics: null, + }; + + const fullBlock = ssz.bellatrix.BeaconBlock.defaultValue(); + const blindedBlock = ssz.bellatrix.BlindedBeaconBlock.defaultValue(); + + const slot = 1 * SLOTS_PER_EPOCH; + const randaoReveal = fullBlock.body.randaoReveal; + const graffiti = "a".repeat(32); + const feeRecipient = "0xccccccccccccccccccccccccccccccccccccccaa"; + const currentSlot = 1 * SLOTS_PER_EPOCH; + + vi.spyOn(server.chainStub.clock, "currentSlot", "get").mockReturnValue(currentSlot); + vi.spyOn(syncStub, "state", "get").mockReturnValue(SyncState.Synced); + + const api = getValidatorApi(modules); + + if (enginePayloadValue !== null) { + chainStub.produceBlock.mockResolvedValue({ + block: fullBlock, + executionPayloadValue: BigInt(enginePayloadValue), + consensusBlockValue: BigInt(consensusBlockValue), + }); + } else { + chainStub.produceBlock.mockRejectedValue(Error("not produced")); + } + + if (builderPayloadValue !== null) { + chainStub.produceBlindedBlock.mockResolvedValue({ + block: blindedBlock, + executionPayloadValue: BigInt(builderPayloadValue), + consensusBlockValue: BigInt(consensusBlockValue), + }); + } else { + chainStub.produceBlindedBlock.mockRejectedValue(Error("not produced")); + } + const _skipRandaoVerification = false; + const produceBlockOpts = { + strictFeeRecipientCheck: false, + builderSelection, + feeRecipient, + }; + + const block = await api.produceBlockV3(slot, randaoReveal, graffiti, _skipRandaoVerification, produceBlockOpts); + + const expectedBlock = finalSelection === "builder" ? blindedBlock : fullBlock; + const expectedExecution = finalSelection === "builder" ? true : false; + + expect(block.data).toEqual(expectedBlock); + expect(block.executionPayloadBlinded).toEqual(expectedExecution); + + // check call counts + if (builderSelection === routes.validator.BuilderSelection.ExecutionOnly) { + expect(chainStub.produceBlindedBlock).toBeCalledTimes(0); + } else { + expect(chainStub.produceBlindedBlock).toBeCalledTimes(1); + } + + if (builderSelection === routes.validator.BuilderSelection.BuilderOnly) { + expect(chainStub.produceBlock).toBeCalledTimes(0); + } else { + expect(chainStub.produceBlock).toBeCalledTimes(1); + } + }); + } + ); }); diff --git a/packages/beacon-node/test/unit/chain/opPools/syncCommittee.test.ts b/packages/beacon-node/test/unit/chain/opPools/syncCommittee.test.ts index 54a2e5102d78..3cb5d496bf9b 100644 --- a/packages/beacon-node/test/unit/chain/opPools/syncCommittee.test.ts +++ b/packages/beacon-node/test/unit/chain/opPools/syncCommittee.test.ts @@ -38,7 +38,7 @@ describe("chain / opPools / SyncCommitteeMessagePool", function () { vi.clearAllMocks(); }); - it("should preaggregate SyncCommitteeContribution", () => { + it("should propagate SyncCommitteeContribution", () => { clockStub.secFromSlot.mockReturnValue(0); let contribution = cache.getContribution(subcommitteeIndex, syncCommittee.slot, syncCommittee.beaconBlockRoot); expect(contribution).not.toBeNull(); diff --git a/packages/beacon-node/test/unit/chain/prepareNextSlot.test.ts b/packages/beacon-node/test/unit/chain/prepareNextSlot.test.ts index 4decbc1b749c..f4fa68609015 100644 --- a/packages/beacon-node/test/unit/chain/prepareNextSlot.test.ts +++ b/packages/beacon-node/test/unit/chain/prepareNextSlot.test.ts @@ -1,4 +1,4 @@ -import {describe, it, expect, beforeEach, afterEach, vi, SpyInstance, Mock} from "vitest"; +import {describe, it, expect, beforeEach, afterEach, vi, Mock, MockInstance} from "vitest"; import {config} from "@lodestar/config/default"; import {ForkName, SLOTS_PER_EPOCH} from "@lodestar/params"; import {routes} from "@lodestar/api"; @@ -20,7 +20,7 @@ describe("PrepareNextSlot scheduler", () => { let regenStub: MockedBeaconChain["regen"]; let loggerStub: MockedLogger; let beaconProposerCacheStub: MockedBeaconChain["beaconProposerCache"]; - let getForkStub: SpyInstance<[number], ForkName>; + let getForkStub: MockInstance<[number], ForkName>; let updateBuilderStatus: MockedBeaconChain["updateBuilderStatus"]; let executionEngineStub: MockedBeaconChain["executionEngine"]; const emitPayloadAttributes = true; diff --git a/packages/beacon-node/test/unit/chain/seenCache/seenGossipBlockInput.test.ts b/packages/beacon-node/test/unit/chain/seenCache/seenGossipBlockInput.test.ts new file mode 100644 index 000000000000..c389e1b81e70 --- /dev/null +++ b/packages/beacon-node/test/unit/chain/seenCache/seenGossipBlockInput.test.ts @@ -0,0 +1,165 @@ +import {describe, it, expect} from "vitest"; +import {createBeaconConfig, createChainForkConfig, defaultChainConfig} from "@lodestar/config"; +import {ssz} from "@lodestar/types"; + +import {SeenGossipBlockInput} from "../../../../src/chain/seenCache/seenGossipBlockInput.js"; +import {BlockInputType, GossipedInputType} from "../../../../src/chain/blocks/types.js"; + +/* eslint-disable @typescript-eslint/naming-convention */ +describe("SeenGossipBlockInput", () => { + const chainConfig = createChainForkConfig({ + ...defaultChainConfig, + ALTAIR_FORK_EPOCH: 0, + BELLATRIX_FORK_EPOCH: 0, + DENEB_FORK_EPOCH: 0, + }); + const genesisValidatorsRoot = Buffer.alloc(32, 0xaa); + const config = createBeaconConfig(chainConfig, genesisValidatorsRoot); + const seenGossipBlockInput = new SeenGossipBlockInput(); + + // array of numBlobs, events where events are array of + // [block|blob11|blob2, pd | bp | null | error string reflecting the expected result] + const testCases: [string, number, [string, string | null][]][] = [ + ["no blobs", 0, [["block", "pd"]]], + [ + "1 blob, block first", + 1, + [ + ["block", "bp"], + ["blob0", "pd"], + ], + ], + [ + "1 blob, blob first", + 1, + [ + ["blob0", null], + ["block", "pd"], + ], + ], + [ + "6 blobs, block first", + 6, + [ + ["block", "bp"], + ["blob1", "bp"], + ["blob0", "bp"], + ["blob5", "bp"], + ["blob4", "bp"], + ["blob2", "bp"], + ["blob3", "pd"], + ], + ], + [ + "4 blobs, block in mid", + 4, + [ + ["blob1", null], + ["blob3", null], + ["block", "bp"], + ["blob0", "bp"], + ["blob2", "pd"], + ], + ], + [ + "3 blobs, block in end", + 3, + [ + ["blob1", null], + ["blob0", null], + ["blob2", null], + ["block", "pd"], + ], + ], + ]; + + // lets start from a random slot to build cases + let slot = 7456; + for (const testCase of testCases) { + const [testName, numBlobs, events] = testCase; + + it(`${testName}`, () => { + const signedBlock = ssz.deneb.SignedBeaconBlock.defaultValue(); + // assign slot and increment for the next block so as to keep these block testcases distinguished + // in the cache + signedBlock.message.slot = slot++; + signedBlock.message.body.blobKzgCommitments = Array.from({length: numBlobs}, () => + ssz.deneb.KZGCommitment.defaultValue() + ); + + // create a dummy signed block header with matching body root + const bodyRoot = ssz.deneb.BeaconBlockBody.hashTreeRoot(signedBlock.message.body); + const signedBlockHeader = ssz.phase0.SignedBeaconBlockHeader.defaultValue(); + signedBlockHeader.message.slot = signedBlock.message.slot; + signedBlockHeader.message.bodyRoot = bodyRoot; + + const blobSidecars = Array.from({length: numBlobs}, (_val, index) => { + const message = {...ssz.deneb.BlobSidecar.defaultValue(), signedBlockHeader, index}; + return message; + }); + + for (const testEvent of events) { + const [inputEvent, expectedRes] = testEvent; + const eventType = inputEvent.includes("block") ? GossipedInputType.block : GossipedInputType.blob; + const expectedResponseType = parseResponseType(expectedRes); + + try { + if (eventType === GossipedInputType.block) { + const blockInputRes = seenGossipBlockInput.getGossipBlockInput(config, { + type: GossipedInputType.block, + signedBlock, + blockBytes: null, + }); + + if (expectedResponseType instanceof Error) { + expect.fail(`expected to fail with error: ${expectedResponseType.message}`); + } else if (expectedResponseType === null) { + expect(blockInputRes).toBeNull; + } else { + expect(blockInputRes.blockInput?.type).to.be.equal(expectedResponseType); + } + } else { + const index = parseInt(inputEvent.split("blob")[1] ?? "0"); + const blobSidecar = blobSidecars[index]; + expect(blobSidecar).not.equal(undefined); + + const blockInputRes = seenGossipBlockInput.getGossipBlockInput(config, { + type: GossipedInputType.blob, + blobSidecar, + blobBytes: null, + }); + + if (expectedResponseType instanceof Error) { + expect.fail(`expected to fail with error: ${expectedResponseType.message}`); + } else if (expectedResponseType === null) { + expect(blockInputRes).toBeNull; + } else { + expect(blockInputRes.blockInput?.type).to.equal(expectedResponseType); + } + } + } catch (e) { + if (!(e as Error).message.includes("expected to fail with error")) { + if (!(expectedResponseType instanceof Error)) { + expect.fail( + `expected not to fail with respose=${expectedResponseType} but errored: ${(e as Error).message}` + ); + } + } + } + } + }); + } +}); + +function parseResponseType(expectedRes: string | null): BlockInputType | null | Error { + switch (expectedRes) { + case null: + return null; + case "pd": + return BlockInputType.postDeneb; + case "bp": + return BlockInputType.blobsPromise; + default: + return Error(expectedRes); + } +} diff --git a/packages/beacon-node/test/unit/chain/stateCache/fifoBlockStateCache.test.ts b/packages/beacon-node/test/unit/chain/stateCache/fifoBlockStateCache.test.ts new file mode 100644 index 000000000000..62f2bff13d19 --- /dev/null +++ b/packages/beacon-node/test/unit/chain/stateCache/fifoBlockStateCache.test.ts @@ -0,0 +1,120 @@ +import {describe, it, expect, beforeEach} from "vitest"; +import {toHexString} from "@chainsafe/ssz"; +import {EpochShuffling} from "@lodestar/state-transition"; +import {SLOTS_PER_EPOCH} from "@lodestar/params"; +import {CachedBeaconStateAllForks} from "@lodestar/state-transition/src/types.js"; +import {FIFOBlockStateCache} from "../../../../src/chain/stateCache/index.js"; +import {generateCachedState} from "../../../utils/state.js"; + +describe("FIFOBlockStateCache", function () { + let cache: FIFOBlockStateCache; + const shuffling: EpochShuffling = { + epoch: 0, + activeIndices: [], + shuffling: [], + committees: [], + committeesPerSlot: 1, + }; + + const state1 = generateCachedState({slot: 0}); + const key1 = toHexString(state1.hashTreeRoot()); + state1.epochCtx.currentShuffling = {...shuffling, epoch: 0}; + + const state2 = generateCachedState({slot: 1 * SLOTS_PER_EPOCH}); + const key2 = toHexString(state2.hashTreeRoot()); + state2.epochCtx.currentShuffling = {...shuffling, epoch: 1}; + + const state3 = generateCachedState({slot: 2 * SLOTS_PER_EPOCH}); + const key3 = toHexString(state3.hashTreeRoot()); + state3.epochCtx.currentShuffling = {...shuffling, epoch: 2}; + + beforeEach(function () { + // max 2 items + cache = new FIFOBlockStateCache({maxBlockStates: 2}, {}); + cache.add(state1); + cache.add(state2); + }); + + const testCases: { + name: string; + headState: CachedBeaconStateAllForks; + addAsHeadArr: boolean[]; + keptStates: string[]; + prunedState: string; + }[] = [ + { + name: "add as head, prune key1", + headState: state2, + addAsHeadArr: [true], + keptStates: [key3, key2], + prunedState: key1, + }, + { + name: "add, prune key1", + headState: state2, + addAsHeadArr: [false], + keptStates: [key2, key3], + prunedState: key1, + }, + { + name: "add as head, prune key2", + headState: state1, + addAsHeadArr: [true], + keptStates: [key3, key1], + prunedState: key2, + }, + { + name: "add, prune key2", + headState: state1, + addAsHeadArr: [false], + keptStates: [key1, key3], + prunedState: key2, + }, + // same flow to importBlock + { + name: "add then set as head, prune key1", + headState: state2, + addAsHeadArr: [false, true], + keptStates: [key3, key2], + prunedState: key1, + }, + { + name: "add then set as head, prune key2", + headState: state1, + addAsHeadArr: [false, true], + keptStates: [key3, key1], + prunedState: key2, + }, + ]; + + for (const {name, headState, addAsHeadArr, keptStates, prunedState} of testCases) { + it(name, () => { + // move to head this state + cache.setHeadState(headState); + expect(cache.size).to.be.equal(2, "Size must be same as initial 2"); + for (const addAsHead of addAsHeadArr) { + cache.add(state3, addAsHead); + } + expect(cache.size).to.be.equal(2, "Size should reduce to initial 2 after prunning"); + expect(cache.dumpKeyOrder()).toEqual(keptStates); + expect(cache.get(prunedState)).toBeNull(); + for (const key of keptStates) { + expect(cache.get(key), `must have key ${key}`).to.be.not.null; + } + }); + } + + it("Should not prune newly added state", () => { + cache = new FIFOBlockStateCache({maxBlockStates: 1}, {}); + cache.setHeadState(state1); + // Size must be same as initial 1 + expect(cache.size).toEqual(1); + cache.add(state2); + // Should not deleted newly added state + expect(cache.size).toEqual(2); + cache.add(state3); + // Should delete 1 state + expect(cache.size).toEqual(2); + expect(cache.dumpKeyOrder()).toEqual([key1, key3]); + }); +}); diff --git a/packages/beacon-node/test/unit/chain/stateCache/persistentCheckpointsCache.test.ts b/packages/beacon-node/test/unit/chain/stateCache/persistentCheckpointsCache.test.ts new file mode 100644 index 000000000000..83a2dddd65dd --- /dev/null +++ b/packages/beacon-node/test/unit/chain/stateCache/persistentCheckpointsCache.test.ts @@ -0,0 +1,954 @@ +import {describe, it, expect, beforeAll, beforeEach} from "vitest"; +import {SLOTS_PER_EPOCH, SLOTS_PER_HISTORICAL_ROOT} from "@lodestar/params"; +import {CachedBeaconStateAllForks, computeEpochAtSlot, computeStartSlotAtEpoch} from "@lodestar/state-transition"; +import {RootHex, phase0} from "@lodestar/types"; +import {mapValues, toHexString} from "@lodestar/utils"; +import {PersistentCheckpointStateCache} from "../../../../src/chain/stateCache/persistentCheckpointsCache.js"; +import {checkpointToDatastoreKey} from "../../../../src/chain/stateCache/datastore/index.js"; +import {generateCachedState} from "../../../utils/state.js"; +import {ShufflingCache} from "../../../../src/chain/shufflingCache.js"; +import {testLogger} from "../../../utils/logger.js"; +import {getTestDatastore} from "../../../utils/chain/stateCache/datastore.js"; +import {CheckpointHex} from "../../../../src/chain/stateCache/types.js"; +import {toCheckpointHex} from "../../../../src/chain/index.js"; + +describe("PersistentCheckpointStateCache", function () { + let root0a: Buffer, root0b: Buffer, root1: Buffer, root2: Buffer; + let cp0a: phase0.Checkpoint, cp0b: phase0.Checkpoint, cp1: phase0.Checkpoint, cp2: phase0.Checkpoint; + let cp0aHex: CheckpointHex, cp0bHex: CheckpointHex, cp1Hex: CheckpointHex, cp2Hex: CheckpointHex; + let persistent0bKey: RootHex; + const startSlotEpoch20 = computeStartSlotAtEpoch(20); + const startSlotEpoch21 = computeStartSlotAtEpoch(21); + const startSlotEpoch22 = computeStartSlotAtEpoch(22); + let cache: PersistentCheckpointStateCache; + let fileApisBuffer: Map; + let states: Record<"cp0a" | "cp0b" | "cp1" | "cp2", CachedBeaconStateAllForks>; + let stateBytes: Record<"cp0a" | "cp0b" | "cp1" | "cp2", Uint8Array>; + + beforeAll(() => { + root0a = Buffer.alloc(32); + root0b = Buffer.alloc(32, 1); + root1 = Buffer.alloc(32, 2); + root2 = Buffer.alloc(32, 3); + root0b[31] = 1; + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ + // || | | + // |0b--------root1--------root2 + // | + // 0a + // root0a is of the last slot of epoch 19 + cp0a = {epoch: 20, root: root0a}; + // root0b is of the first slot of epoch 20 + cp0b = {epoch: 20, root: root0b}; + cp1 = {epoch: 21, root: root1}; + cp2 = {epoch: 22, root: root2}; + [cp0aHex, cp0bHex, cp1Hex, cp2Hex] = [cp0a, cp0b, cp1, cp2].map((cp) => toCheckpointHex(cp)); + persistent0bKey = toHexString(checkpointToDatastoreKey(cp0b)); + const allStates = [cp0a, cp0b, cp1, cp2] + .map((cp) => generateCachedState({slot: cp.epoch * SLOTS_PER_EPOCH})) + .map((state, i) => { + const stateEpoch = computeEpochAtSlot(state.slot); + if (stateEpoch === 20 && i === 0) { + // cp0a + state.blockRoots.set((startSlotEpoch20 - 1) % SLOTS_PER_HISTORICAL_ROOT, root0a); + state.blockRoots.set(startSlotEpoch20 % SLOTS_PER_HISTORICAL_ROOT, root0a); + return state; + } + + // other states based on cp0b + state.blockRoots.set((startSlotEpoch20 - 1) % SLOTS_PER_HISTORICAL_ROOT, root0a); + state.blockRoots.set(startSlotEpoch20 % SLOTS_PER_HISTORICAL_ROOT, root0b); + + if (stateEpoch >= 21) { + state.blockRoots.set(startSlotEpoch21 % SLOTS_PER_HISTORICAL_ROOT, root1); + } + if (stateEpoch >= 22) { + state.blockRoots.set(startSlotEpoch22 % SLOTS_PER_HISTORICAL_ROOT, root2); + } + return state; + }); + + states = { + // Previous Root Checkpoint State of epoch 20 + cp0a: allStates[0], + // Current Root Checkpoint State of epoch 20 + cp0b: allStates[1], + // Current Root Checkpoint State of epoch 21 + cp1: allStates[2], + // Current Root Checkpoint State of epoch 22 + cp2: allStates[3], + }; + stateBytes = mapValues(states, (state) => state.serialize()); + }); + + beforeEach(() => { + fileApisBuffer = new Map(); + const datastore = getTestDatastore(fileApisBuffer); + cache = new PersistentCheckpointStateCache( + {datastore, logger: testLogger(), shufflingCache: new ShufflingCache()}, + {maxCPStateEpochsInMemory: 2} + ); + cache.add(cp0a, states["cp0a"]); + cache.add(cp0b, states["cp0b"]); + cache.add(cp1, states["cp1"]); + }); + + it("getLatest", () => { + // cp0 + expect(cache.getLatest(cp0aHex.rootHex, cp0a.epoch)?.hashTreeRoot()).toEqual(states["cp0a"].hashTreeRoot()); + expect(cache.getLatest(cp0aHex.rootHex, cp0a.epoch + 1)?.hashTreeRoot()).toEqual(states["cp0a"].hashTreeRoot()); + expect(cache.getLatest(cp0aHex.rootHex, cp0a.epoch - 1)?.hashTreeRoot()).to.be.undefined; + + // cp1 + expect(cache.getLatest(cp1Hex.rootHex, cp1.epoch)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + expect(cache.getLatest(cp1Hex.rootHex, cp1.epoch + 1)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + expect(cache.getLatest(cp1Hex.rootHex, cp1.epoch - 1)?.hashTreeRoot()).to.be.undefined; + + // cp2 + expect(cache.getLatest(cp2Hex.rootHex, cp2.epoch)?.hashTreeRoot()).to.be.undefined; + }); + + it("getOrReloadLatest", async () => { + cache.add(cp2, states["cp2"]); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + + // cp0b is persisted + expect(fileApisBuffer.size).toEqual(1); + expect(Array.from(fileApisBuffer.keys())).toEqual([persistent0bKey]); + + // getLatest() does not reload from disk + expect(cache.getLatest(cp0aHex.rootHex, cp0a.epoch)).to.be.null; + expect(cache.getLatest(cp0bHex.rootHex, cp0b.epoch)).to.be.null; + + // cp0a has the root from previous epoch so we only prune it from db + expect(await cache.getOrReloadLatest(cp0aHex.rootHex, cp0a.epoch)).to.be.null; + // but getOrReloadLatest() does for cp0b + expect((await cache.getOrReloadLatest(cp0bHex.rootHex, cp0b.epoch))?.serialize()).toEqual(stateBytes["cp0b"]); + expect((await cache.getOrReloadLatest(cp0bHex.rootHex, cp0b.epoch + 1))?.serialize()).toEqual(stateBytes["cp0b"]); + expect((await cache.getOrReloadLatest(cp0bHex.rootHex, cp0b.epoch - 1))?.serialize()).to.be.undefined; + }); + + it("pruneFinalized and getStateOrBytes", async function () { + cache.add(cp2, states["cp2"]); + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(states["cp0b"]); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + // cp0 is persisted + expect(fileApisBuffer.size).toEqual(1); + expect(Array.from(fileApisBuffer.keys())).toEqual([persistent0bKey]); + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + // cp1 is in memory + expect(cache.get(cp1Hex)).to.be.not.null; + // cp2 is in memory + expect(cache.get(cp2Hex)).to.be.not.null; + // finalize epoch cp2 + cache.pruneFinalized(cp2.epoch); + expect(fileApisBuffer.size).toEqual(0); + expect(cache.get(cp1Hex)).to.be.null; + expect(cache.get(cp2Hex)).to.be.not.null; + expect(await cache.getStateOrBytes(cp0bHex)).to.be.null; + }); + + describe("findSeedStateToReload", () => { + beforeEach(() => { + fileApisBuffer = new Map(); + const datastore = getTestDatastore(fileApisBuffer); + cache = new PersistentCheckpointStateCache( + {datastore, logger: testLogger(), shufflingCache: new ShufflingCache()}, + {maxCPStateEpochsInMemory: 2} + ); + cache.add(cp0a, states["cp0a"]); + cache.add(cp0b, states["cp0b"]); + cache.add(cp1, states["cp1"]); + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ + // || | | + // |0b--------root1--------root2 + // | + // 0a + it("single state at lowest memory epoch", async function () { + cache.add(cp2, states["cp2"]); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + expect(cache.findSeedStateToReload(cp0aHex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + expect(cache.findSeedStateToReload(cp0bHex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ ^ + // || | | | + // |0b--------root1--------root2 | + // | | + // 0a------------------------------root3 + // ^ ^ + // cp1a={0a, 21} {0a, 22}=cp2a + it("multiple states at lowest memory epoch", async function () { + cache.add(cp2, states["cp2"]); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + + const cp1a = {epoch: 21, root: root0a}; + const cp1aState = states["cp0a"].clone(); + cp1aState.slot = 21 * SLOTS_PER_EPOCH; + cp1aState.blockRoots.set(startSlotEpoch21 % SLOTS_PER_HISTORICAL_ROOT, root0a); + cp1aState.commit(); + cache.add(cp1a, cp1aState); + + const cp2a = {epoch: 22, root: root0a}; + const cp2aState = cp1aState.clone(); + cp2aState.slot = 22 * SLOTS_PER_EPOCH; + cp2aState.blockRoots.set(startSlotEpoch22 % SLOTS_PER_HISTORICAL_ROOT, root0a); + cp2aState.commit(); + cache.add(cp2a, cp2aState); + + const root3 = Buffer.alloc(32, 100); + const state3 = cp2aState.clone(); + state3.slot = 22 * SLOTS_PER_EPOCH + 3; + state3.commit(); + await cache.processState(toHexString(root3), state3); + + // state of {0a, 21} is choosen because it was built from cp0a + expect(cache.findSeedStateToReload(cp0aHex)?.hashTreeRoot()).toEqual(cp1aState.hashTreeRoot()); + // cp1 is choosen for 0b because it was built from cp0b + expect(cache.findSeedStateToReload(cp0bHex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + const randomRoot = Buffer.alloc(32, 101); + // for other random root it'll pick the first state of epoch 21 which is states["cp1"] + expect(cache.findSeedStateToReload({epoch: 20, rootHex: toHexString(randomRoot)})?.hashTreeRoot()).toEqual( + states["cp1"].hashTreeRoot() + ); + }); + }); + + describe("processState, maxEpochsInMemory = 2", () => { + beforeEach(() => { + fileApisBuffer = new Map(); + const datastore = getTestDatastore(fileApisBuffer); + cache = new PersistentCheckpointStateCache( + {datastore, logger: testLogger(), shufflingCache: new ShufflingCache()}, + {maxCPStateEpochsInMemory: 2} + ); + cache.add(cp0a, states["cp0a"]); + cache.add(cp0b, states["cp0b"]); + cache.add(cp1, states["cp1"]); + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ ^ + // || | | | + // |0b--------root1--------root2-----root3 + // | + // 0a + it("no reorg", async function () { + expect(fileApisBuffer.size).toEqual(0); + cache.add(cp2, states["cp2"]); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + expect(cache.get(cp2Hex)?.hashTreeRoot()).toEqual(states["cp2"].hashTreeRoot()); + expect(fileApisBuffer.size).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + + const blockStateRoot3 = states["cp2"].clone(); + blockStateRoot3.slot = 22 * SLOTS_PER_EPOCH + 3; + const root3 = Buffer.alloc(32, 100); + // process state of root3 + await cache.processState(toHexString(root3), blockStateRoot3); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + + // epoch 22 has 1 checkpoint state + expect(cache.get(cp2Hex)).to.be.not.null; + // epoch 21 has 1 checkpoint state + expect(cache.get(cp1Hex)).to.be.not.null; + // epoch 20 has 0 checkpoint state + expect(cache.get(cp0bHex)).to.be.null; + // but cp0bHex is persisted + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + // while cp0aHex is not + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ ^ ^ + // || | | | | + // |0b--------root1--------root2-root3 | + // | | + // 0a |---------root4 + it("reorg in same epoch", async function () { + // mostly the same to the above test + expect(fileApisBuffer.size).toEqual(0); + cache.add(cp2, states["cp2"]); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + expect(cache.get(cp2Hex)?.hashTreeRoot()).toEqual(states["cp2"].hashTreeRoot()); + expect(fileApisBuffer.size).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + + const blockStateRoot3 = states["cp2"].clone(); + blockStateRoot3.slot = 22 * SLOTS_PER_EPOCH + 3; + const root3 = Buffer.alloc(32, 100); + // process state of root3 + await cache.processState(toHexString(root3), blockStateRoot3); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + + const blockStateRoot4 = states["cp2"].clone(); + blockStateRoot4.slot = 22 * SLOTS_PER_EPOCH + 4; + const root4 = Buffer.alloc(32, 101); + // process state of root4 + await cache.processState(toHexString(root4), blockStateRoot4); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + + // epoch 22 has 1 checkpoint state + expect(cache.get(cp2Hex)).to.be.not.null; + // epoch 21 has 1 checkpoint state + expect(cache.get(cp1Hex)).to.be.not.null; + // epoch 20 has 0 checkpoint state + expect(cache.get(cp0bHex)).to.be.null; + // but cp0bHex is persisted + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + // while cp0aHex is not + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^ ^ ^ ^ ^ + // | | | | | + // 0b---------root1-----|-root2 | + // | | + // |------root3 + // 1a ^ + // | + // {1a, 22}=cp2a + it("reorg 1 epoch", async function () { + // process root2 state + cache.add(cp2, states["cp2"]); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + + // regen generates cp2a + const root1a = Buffer.alloc(32, 100); + const cp2a = {epoch: 22, root: root1a}; + const cp2aState = states["cp1"].clone(); + cp2aState.slot = 22 * SLOTS_PER_EPOCH; + // assuming reorg block is at slot 5 of epoch 21 + cp2aState.blockRoots.set((startSlotEpoch21 + 5) % SLOTS_PER_HISTORICAL_ROOT, root1a); + cp2aState.blockRoots.set(startSlotEpoch22 % SLOTS_PER_HISTORICAL_ROOT, root1a); + cache.add(cp2a, cp2aState); + + // block state of root3 in epoch 22 is built on cp2a + const blockStateRoot3 = cp2aState.clone(); + blockStateRoot3.slot = 22 * SLOTS_PER_EPOCH + 3; + + const root3 = Buffer.alloc(32, 101); + // process state of root3 + await cache.processState(toHexString(root3), blockStateRoot3); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + // epoch 22 has 2 checkpoint states + expect(cache.get(cp2Hex)).to.be.not.null; + expect(cache.get(toCheckpointHex(cp2a))).to.be.not.null; + // epoch 21 has 1 checkpoint state + expect(cache.get(cp1Hex)).to.be.not.null; + // epoch 20 has 0 checkpoint state + expect(cache.get(cp0aHex)).to.be.null; + expect(cache.get(cp0bHex)).to.be.null; + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^ ^ ^ ^ ^ + // | | | | | + // 0b--------|root1-------root2 | + // | | + // |-----------------root3 + // 0a ^ ^ + // | | + // cp1a={0a, 21} {0a, 22}=cp2a + it("reorg 2 epochs", async function () { + // process root2 state + cache.add(cp2, states["cp2"]); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + + // reload cp0b from disk + expect((await cache.getOrReload(toCheckpointHex(cp0b)))?.serialize()).toStrictEqual(stateBytes["cp0b"]); + + // regen generates cp1a + const root0a = Buffer.alloc(32, 100); + const cp1a = {epoch: 21, root: root0a}; + const cp1aState = states["cp0b"].clone(); + cp1aState.slot = 21 * SLOTS_PER_EPOCH; + // assuming reorg block is at slot 5 of epoch 20 + cp1aState.blockRoots.set((startSlotEpoch20 + 5) % SLOTS_PER_HISTORICAL_ROOT, root0a); + cache.add(cp1a, cp1aState); + + // regen generates cp2a + const cp2a = {epoch: 22, root: root0a}; + const cp2aState = cp1aState.clone(); + cp2aState.slot = 22 * SLOTS_PER_EPOCH; + cp2aState.blockRoots.set(startSlotEpoch22 % SLOTS_PER_HISTORICAL_ROOT, root0a); + cache.add(cp2a, cp2aState); + + // block state of root3 in epoch 22 is built on cp2a + const blockStateRoot3 = cp2aState.clone(); + blockStateRoot3.slot = 22 * SLOTS_PER_EPOCH + 3; + + const root3 = Buffer.alloc(32, 101); + // process state of root3 + await cache.processState(toHexString(root3), blockStateRoot3); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + // epoch 21 and 22 have 2 checkpoint states + expect(cache.get(cp1Hex)).to.be.not.null; + expect(cache.get(toCheckpointHex(cp1a))).to.be.not.null; + expect(cache.get(cp2Hex)).to.be.not.null; + expect(cache.get(toCheckpointHex(cp2a))).to.be.not.null; + // epoch 20 has 0 checkpoint state + expect(cache.get(cp0aHex)).to.be.null; + expect(cache.get(cp0bHex)).to.be.null; + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ ^ + // || | | | + // |0b--------root1--------root2 | + // |/ | + // 0a---------------------------root3 + // ^ ^ + // | | + // cp1a={0a, 21} {0a, 22}=cp2a + it("reorg 3 epochs, persist cp 0a", async function () { + // process root2 state + cache.add(cp2, states["cp2"]); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + // cp0a was pruned from memory and not in disc + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + + // regen needs to regen cp0a + cache.add(cp0a, states["cp0a"]); + + // regen generates cp1a + const cp1a = {epoch: 21, root: root0a}; + const cp1aState = generateCachedState({slot: 21 * SLOTS_PER_EPOCH}); + cp1aState.blockRoots.set((startSlotEpoch20 - 1) % SLOTS_PER_HISTORICAL_ROOT, root0a); + cp1aState.blockRoots.set(startSlotEpoch20 % SLOTS_PER_HISTORICAL_ROOT, root0a); + cache.add(cp1a, cp1aState); + + // regen generates cp2a + const cp2a = {epoch: 22, root: root0a}; + const cp2aState = cp1aState.clone(); + cp2aState.slot = 22 * SLOTS_PER_EPOCH; + cp2aState.blockRoots.set(startSlotEpoch21 % SLOTS_PER_HISTORICAL_ROOT, root0a); + cache.add(cp2a, cp2aState); + + // block state of root3 in epoch 22 is built on cp2a + const blockStateRoot3 = cp2aState.clone(); + blockStateRoot3.slot = 22 * SLOTS_PER_EPOCH + 3; + blockStateRoot3.blockRoots.set(startSlotEpoch22 % SLOTS_PER_HISTORICAL_ROOT, root0a); + + // regen populates cache when producing blockStateRoot3 + + const root3 = Buffer.alloc(32, 100); + // process state of root3 + expect(await cache.processState(toHexString(root3), blockStateRoot3)).toEqual(1); + await assertPersistedCheckpointState([cp0b, cp0a], [stateBytes["cp0b"], stateBytes["cp0a"]]); + // epoch 21 and 22 have 2 checkpoint states + expect(cache.get(cp1Hex)).to.be.not.null; + expect(cache.get(toCheckpointHex(cp1a))).to.be.not.null; + expect(cache.get(cp2Hex)).to.be.not.null; + expect(cache.get(toCheckpointHex(cp2a))).to.be.not.null; + // epoch 20 has 0 checkpoint state + expect(cache.get(cp0aHex)).to.be.null; + expect(cache.get(cp0bHex)).to.be.null; + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ ^ + // || | | | + // |0b--------root1--------root2 | + // || | + // ||---------------------------root3 + // 0a ^ ^ + // | | + // cp1b={0b, 21} {0b, 22}=cp2b + it("reorg 3 epochs, prune but no persist", async function () { + // process root2 state + cache.add(cp2, states["cp2"]); + expect(await cache.processState(toHexString(cp2.root), states["cp2"])).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + // cp0a was pruned from memory and not in disc + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + + // regen needs to reload cp0b + cache.add(cp0b, states["cp0b"]); + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(states["cp0b"]); + + // regen generates cp1b + const cp1b = {epoch: 21, root: root0b}; + const cp1bState = states["cp0b"].clone(); + cp1bState.slot = 21 * SLOTS_PER_EPOCH; + cp1bState.blockRoots.set(startSlotEpoch21 % SLOTS_PER_HISTORICAL_ROOT, root0b); + cache.add(cp1b, cp1bState); + + // regen generates cp2b + const cp2b = {epoch: 22, root: root0b}; + const cp2bState = cp1bState.clone(); + cp2bState.slot = 22 * SLOTS_PER_EPOCH; + cp2bState.blockRoots.set(startSlotEpoch22 % SLOTS_PER_HISTORICAL_ROOT, root0b); + cache.add(cp2b, cp2bState); + + // block state of root3 in epoch 22 is built on cp2a + const blockStateRoot3 = cp2bState.clone(); + blockStateRoot3.slot = 22 * SLOTS_PER_EPOCH + 3; + const root3 = Buffer.alloc(32, 100); + // process state of root3, nothing is persisted + expect(await cache.processState(toHexString(root3), blockStateRoot3)).toEqual(0); + // but state of cp0b is pruned from memory + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + + // epoch 21 and 22 have 2 checkpoint states + expect(cache.get(cp1Hex)).to.be.not.null; + expect(cache.get(toCheckpointHex(cp1b))).to.be.not.null; + expect(cache.get(cp2Hex)).to.be.not.null; + expect(cache.get(toCheckpointHex(cp2b))).to.be.not.null; + // epoch 20 has 0 checkpoint state + expect(cache.get(cp0aHex)).to.be.null; + expect(cache.get(cp0bHex)).to.be.null; + }); + }); + + describe("processState, maxEpochsInMemory = 1", () => { + beforeEach(() => { + fileApisBuffer = new Map(); + const datastore = getTestDatastore(fileApisBuffer); + cache = new PersistentCheckpointStateCache( + {datastore, logger: testLogger(), shufflingCache: new ShufflingCache()}, + {maxCPStateEpochsInMemory: 1} + ); + cache.add(cp0a, states["cp0a"]); + cache.add(cp0b, states["cp0b"]); + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ + // || | | + // |0b--------root1--root2 + // | + // 0a + it("no reorg", async () => { + expect(fileApisBuffer.size).toEqual(0); + cache.add(cp1, states["cp1"]); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + expect(fileApisBuffer.size).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + + const blockStateRoot2 = states["cp1"].clone(); + blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; + const root2 = Buffer.alloc(32, 100); + // process state of root2 + await cache.processState(toHexString(root2), blockStateRoot2); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + + // epoch 21 has 1 checkpoint state + expect(cache.get(cp1Hex)).to.be.not.null; + // epoch 20 has 0 checkpoint state + expect(cache.get(cp0aHex)).to.be.null; + expect(cache.get(cp0bHex)).to.be.null; + // but cp0bHex is persisted + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + // while cp0aHex is not + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ ^ + // || | | | + // |0b--------root1--root2 | + // | |---------root3 + // 0a + it("reorg in same epoch", async () => { + // almost the same to "no reorg" test + expect(fileApisBuffer.size).toEqual(0); + cache.add(cp1, states["cp1"]); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + expect(fileApisBuffer.size).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + + const blockStateRoot2 = states["cp1"].clone(); + blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; + const root2 = Buffer.alloc(32, 100); + // process state of root2 + await cache.processState(toHexString(root2), blockStateRoot2); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + + const blockStateRoot3 = states["cp1"].clone(); + blockStateRoot3.slot = 21 * SLOTS_PER_EPOCH + 4; + const root3 = Buffer.alloc(32, 101); + // process state of root3 + await cache.processState(toHexString(root3), blockStateRoot3); + + // epoch 21 has 1 checkpoint state + expect(cache.get(cp1Hex)).to.be.not.null; + // epoch 20 has 0 checkpoint state + expect(cache.get(cp0aHex)).to.be.null; + expect(cache.get(cp0bHex)).to.be.null; + // but cp0bHex is persisted + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + // while cp0aHex is not + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ ^ + // || | | | + // |0b----1a--root1 | + // | |----|-------root2 + // 0a | + // cp1a={1a, 21} + it("reorg 1 epoch, no persist 1a", async () => { + // root 1a + expect(fileApisBuffer.size).toEqual(0); + const root1a = Buffer.alloc(32, 100); + const state1a = states["cp0b"].clone(); + state1a.slot = 20 * SLOTS_PER_EPOCH + SLOTS_PER_EPOCH - 1; + state1a.blockRoots.set(state1a.slot % SLOTS_PER_HISTORICAL_ROOT, root1a); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(0); + expect(fileApisBuffer.size).toEqual(0); + await assertPersistedCheckpointState([], []); + + // cp1 + cache.add(cp1, states["cp1"]); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + expect(fileApisBuffer.size).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + + // root2, regen cp1a + const cp1aState = state1a.clone(); + cp1aState.slot = 21 * SLOTS_PER_EPOCH; + const cp1a = {epoch: 21, root: root1a}; + cache.add(cp1a, cp1aState); + const blockStateRoot2 = cp1aState.clone(); + blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; + const root2 = Buffer.alloc(32, 100); + // process state of root2 + expect(await cache.processState(toHexString(root2), blockStateRoot2)).toEqual(0); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + // keep these 2 cp states at epoch 21 + expect(cache.get(toCheckpointHex(cp1a))).to.be.not.null; + expect(cache.get(toCheckpointHex(cp1))).to.be.not.null; + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ + // || | | + // |0b--------root1 | + // ||-----------|-------root2 + // 0a {21, 1b}=cp1b + it("reorg 1 epoch, no persist 0b", async () => { + expect(fileApisBuffer.size).toEqual(0); + // cp1 + cache.add(cp1, states["cp1"]); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + expect(fileApisBuffer.size).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + + // simulate regen + cache.add(cp0b, states["cp0b"]); + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(states["cp0b"]); + // root2, regen cp0b + const cp1bState = states["cp0b"].clone(); + cp1bState.slot = 21 * SLOTS_PER_EPOCH; + const cp1b = {epoch: 21, root: root0b}; + cache.add(cp1b, cp1bState); + const blockStateRoot2 = cp1bState.clone(); + blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; + const root2 = Buffer.alloc(32, 100); + // process state of root2, nothing is persisted + expect(await cache.processState(toHexString(root2), blockStateRoot2)).toEqual(0); + + // but cp0b in-memory state is pruned + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + // keep these 2 cp states at epoch 21 + expect(cache.get(toCheckpointHex(cp1b))).to.be.not.null; + expect(cache.get(toCheckpointHex(cp1))).to.be.not.null; + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ ^ + // || | | | + // |0b-----|--root1 | + // | | | | + // 0a-----1a----|-------root2 + // | + // cp1a={1a, 21} + it("reorg 1 epoch, persist one more checkpoint state", async () => { + // root 1a + expect(fileApisBuffer.size).toEqual(0); + const root1a = Buffer.alloc(32, 100); + const state1a = states["cp0a"].clone(); + state1a.slot = 20 * SLOTS_PER_EPOCH + SLOTS_PER_EPOCH - 1; + state1a.blockRoots.set(state1a.slot % SLOTS_PER_HISTORICAL_ROOT, root1a); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(0); + expect(fileApisBuffer.size).toEqual(0); + // at epoch 20, there should be 2 cps in memory + expect(cache.get(cp0aHex)).to.be.not.null; + expect(cache.get(cp0bHex)).to.be.not.null; + await assertPersistedCheckpointState([], []); + + // cp1 + cache.add(cp1, states["cp1"]); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + expect(fileApisBuffer.size).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + // 2 checkpoint states at epoch 20 are pruned + expect(cache.get(cp0aHex)).to.be.null; + expect(cache.get(cp0bHex)).to.be.null; + // only cp0bHex is persisted + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + + // root2, regen cp0a + cache.add(cp0a, states["cp0a"]); + const cp1aState = state1a.clone(); + cp1aState.slot = 21 * SLOTS_PER_EPOCH; + const cp1a = {epoch: 21, root: root1a}; + cache.add(cp1a, cp1aState); + const blockStateRoot2 = cp1aState.clone(); + blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; + const root2 = Buffer.alloc(32, 100); + // process state of root2, persist cp0a + expect(await cache.processState(toHexString(root2), blockStateRoot2)).toEqual(1); + await assertPersistedCheckpointState([cp0b, cp0a], [stateBytes["cp0b"], stateBytes["cp0a"]]); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + // keep these 2 cp states at epoch 21 + expect(cache.get(toCheckpointHex(cp1a))).to.be.not.null; + expect(cache.get(toCheckpointHex(cp1))).to.be.not.null; + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ + // || | | + // |0b--------root1 | + // | | | + // 0a-----------|-------root2 + // | + // cp1a={0a, 21} + it("reorg 2 epochs", async () => { + // cp1 + cache.add(cp1, states["cp1"]); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + expect(fileApisBuffer.size).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + // 2 checkpoint states at epoch 20 are pruned + expect(cache.get(cp0aHex)).to.be.null; + expect(cache.get(cp0bHex)).to.be.null; + // only cp0bHex is persisted + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + + // root2, regen cp0a + cache.add(cp0a, states["cp0a"]); + const cp1aState = states["cp0a"].clone(); + cp1aState.slot = 21 * SLOTS_PER_EPOCH; + const cp1a = {epoch: 21, root: root0a}; + cache.add(cp1a, cp1aState); + const blockStateRoot2 = cp1aState.clone(); + blockStateRoot2.slot = 21 * SLOTS_PER_EPOCH + 3; + const root2 = Buffer.alloc(32, 100); + // process state of root2, persist cp0a + expect(await cache.processState(toHexString(root2), blockStateRoot2)).toEqual(1); + await assertPersistedCheckpointState([cp0b, cp0a], [stateBytes["cp0b"], stateBytes["cp0a"]]); + expect(cache.get(cp1Hex)?.hashTreeRoot()).toEqual(states["cp1"].hashTreeRoot()); + // keep these 2 cp states at epoch 21 + expect(cache.get(toCheckpointHex(cp1a))).to.be.not.null; + expect(cache.get(toCheckpointHex(cp1))).to.be.not.null; + }); + + describe("processState, maxEpochsInMemory = 0", () => { + beforeEach(() => { + fileApisBuffer = new Map(); + const datastore = getTestDatastore(fileApisBuffer); + cache = new PersistentCheckpointStateCache( + {datastore, logger: testLogger(), shufflingCache: new ShufflingCache()}, + {maxCPStateEpochsInMemory: 0} + ); + cache.add(cp0a, states["cp0a"]); + cache.add(cp0b, states["cp0b"]); + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ + // || | + // |0b --root1a + // | + // 0a + it("no reorg", async () => { + expect(await cache.processState(toHexString(root0b), states["cp0b"])).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + + const root1a = Buffer.alloc(32, 100); + const state1a = states["cp0b"].clone(); + state1a.slot = 20 * SLOTS_PER_EPOCH + SLOTS_PER_EPOCH + 3; + state1a.blockRoots.set(state1a.slot % SLOTS_PER_HISTORICAL_ROOT, root1a); + expect(await cache.processState(toHexString(root1a), state1a)).toEqual(0); + + // nothing change + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ + // || | | + // |0b --root1a| + // | \ | + // 0a \------root1b + it("reorg in same epoch", async () => { + expect(await cache.processState(toHexString(root0b), states["cp0b"])).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + + const root1a = Buffer.alloc(32, 100); + const state1a = states["cp0b"].clone(); + state1a.slot = 20 * SLOTS_PER_EPOCH + SLOTS_PER_EPOCH + 3; + state1a.blockRoots.set(state1a.slot % SLOTS_PER_HISTORICAL_ROOT, root1a); + expect(await cache.processState(toHexString(root1a), state1a)).toEqual(0); + + // nothing change + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + + // simulate reload cp1b + cache.add(cp0b, states["cp0b"]); + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(states["cp0b"]); + const root1b = Buffer.alloc(32, 101); + const state1b = states["cp0b"].clone(); + state1b.slot = state1a.slot + 1; + state1b.blockRoots.set(state1b.slot % SLOTS_PER_HISTORICAL_ROOT, root1b); + // but no need to persist cp1b + expect(await cache.processState(toHexString(root1b), state1b)).toEqual(0); + // although states["cp0b"] is pruned + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ + // || | | + // |0b --root1a| + // | | + // 0a---------root1b + it("reorg 1 epoch", async () => { + expect(await cache.processState(toHexString(root0b), states["cp0b"])).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + + const root1a = Buffer.alloc(32, 100); + const state1a = states["cp0b"].clone(); + state1a.slot = 20 * SLOTS_PER_EPOCH + SLOTS_PER_EPOCH + 3; + state1a.blockRoots.set(state1a.slot % SLOTS_PER_HISTORICAL_ROOT, root1a); + expect(await cache.processState(toHexString(root1a), state1a)).toEqual(0); + + // nothing change + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + + const root1b = Buffer.alloc(32, 101); + const state1b = states["cp0a"].clone(); + state1b.slot = state1a.slot + 1; + state1b.blockRoots.set(state1b.slot % SLOTS_PER_HISTORICAL_ROOT, root1b); + // regen should reload cp0a from disk + cache.add(cp0a, states["cp0a"]); + expect(await cache.processState(toHexString(root1b), state1b)).toEqual(1); + await assertPersistedCheckpointState([cp0b, cp0a], [stateBytes["cp0b"], stateBytes["cp0a"]]); + + // both cp0a and cp0b are persisted + expect(await cache.getStateOrBytes(cp0aHex)).toEqual(stateBytes["cp0a"]); + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + }); + + // epoch: 19 20 21 22 23 + // |-----------|-----------|-----------|-----------| + // ^^ ^ ^ + // || | | + // |0b--------root1 | + // | | + // 0a-----------------root2 + // ^ + // {0a, 21}=cp1a + it("reorg 2 epochs", async () => { + expect(await cache.processState(toHexString(root0b), states["cp0b"])).toEqual(1); + await assertPersistedCheckpointState([cp0b], [stateBytes["cp0b"]]); + expect(await cache.getStateOrBytes(cp0aHex)).to.be.null; + expect(await cache.getStateOrBytes(cp0bHex)).toEqual(stateBytes["cp0b"]); + + cache.add(cp1, states["cp1"]); + expect(await cache.processState(toHexString(cp1.root), states["cp1"])).toEqual(1); + await assertPersistedCheckpointState([cp0b, cp1], [stateBytes["cp0b"], stateBytes["cp1"]]); + + // regen should populate cp0a and cp1a checkpoint states + cache.add(cp0a, states["cp0a"]); + const cp1a = {epoch: 21, root: root0a}; + const cp1aState = states["cp0a"].clone(); + cp1aState.blockRoots.set((20 * SLOTS_PER_EPOCH) % SLOTS_PER_HISTORICAL_ROOT, root0a); + cp1aState.blockRoots.set((21 * SLOTS_PER_EPOCH) % SLOTS_PER_HISTORICAL_ROOT, root0a); + cp1aState.slot = 21 * SLOTS_PER_EPOCH; + cache.add(cp1a, cp1aState); + + const root2 = Buffer.alloc(32, 100); + const state2 = cp1aState.clone(); + state2.slot = 21 * SLOTS_PER_EPOCH + 3; + state2.blockRoots.set(state2.slot % SLOTS_PER_HISTORICAL_ROOT, root2); + expect(await cache.processState(toHexString(root2), state2)).toEqual(2); + // expect 4 cp states are persisted + await assertPersistedCheckpointState( + [cp0b, cp1, cp0a, cp1a], + [stateBytes["cp0b"], stateBytes["cp1"], stateBytes["cp0a"], cp1aState.serialize()] + ); + }); + }); + }); + + async function assertPersistedCheckpointState(cps: phase0.Checkpoint[], stateBytesArr: Uint8Array[]): Promise { + const persistedKeys = cps.map((cp) => toHexString(checkpointToDatastoreKey(cp))); + expect(Array.from(fileApisBuffer.keys())).toStrictEqual(persistedKeys); + for (const [i, persistedKey] of persistedKeys.entries()) { + expect(fileApisBuffer.get(persistedKey)).toStrictEqual(stateBytesArr[i]); + } + for (const [i, cp] of cps.entries()) { + const cpHex = toCheckpointHex(cp); + expect(await cache.getStateOrBytes(cpHex)).toStrictEqual(stateBytesArr[i]); + // simple get() does not reload from disk + expect(cache.get(cpHex)).to.be.null; + } + } +}); diff --git a/packages/beacon-node/test/unit/eth1/eth1DepositDataTracker.test.ts b/packages/beacon-node/test/unit/eth1/eth1DepositDataTracker.test.ts index b195e16d5bd0..37e4b84a3d68 100644 --- a/packages/beacon-node/test/unit/eth1/eth1DepositDataTracker.test.ts +++ b/packages/beacon-node/test/unit/eth1/eth1DepositDataTracker.test.ts @@ -1,4 +1,4 @@ -import {describe, it, expect, beforeEach, afterEach, vi, SpyInstance} from "vitest"; +import {describe, it, expect, beforeEach, afterEach, vi, MockInstance} from "vitest"; import {config} from "@lodestar/config/default"; import {TimeoutError} from "@lodestar/utils"; import {Eth1DepositDataTracker} from "../../../src/eth1/eth1DepositDataTracker.js"; @@ -17,8 +17,8 @@ describe("Eth1DepositDataTracker", function () { const eth1Provider = new Eth1Provider(config, opts, signal, null); let db: BeaconDb; let eth1DepositDataTracker: Eth1DepositDataTracker; - let getBlocksByNumberStub: SpyInstance; - let getDepositEventsStub: SpyInstance; + let getBlocksByNumberStub: MockInstance; + let getDepositEventsStub: MockInstance; beforeEach(() => { db = getMockedBeaconDb(); diff --git a/packages/beacon-node/test/unit/monitoring/properties.test.ts b/packages/beacon-node/test/unit/monitoring/properties.test.ts index 639161eefc9e..1a2e2c58377a 100644 --- a/packages/beacon-node/test/unit/monitoring/properties.test.ts +++ b/packages/beacon-node/test/unit/monitoring/properties.test.ts @@ -91,7 +91,11 @@ describe("monitoring / properties", () => { const labelValue = "test_label_value"; const metricValue = 10; - const metric = metrics.register.gauge({name: metricName, help: "withLabel test", labelNames: [labelName]}); + const metric = metrics.register.gauge<{[labelName]: string}>({ + name: metricName, + help: "withLabel test", + labelNames: [labelName], + }); metric.set({[labelName]: "different_value"}, metricValue + 1); metric.set({[labelName]: labelValue}, metricValue); diff --git a/packages/beacon-node/test/unit/monitoring/service.test.ts b/packages/beacon-node/test/unit/monitoring/service.test.ts index 9c1f8b89bae4..068f35634f81 100644 --- a/packages/beacon-node/test/unit/monitoring/service.test.ts +++ b/packages/beacon-node/test/unit/monitoring/service.test.ts @@ -1,7 +1,7 @@ -import {describe, it, expect, beforeEach, beforeAll, afterAll, vi, afterEach, SpyInstance} from "vitest"; +import {describe, it, expect, beforeEach, beforeAll, afterAll, vi, afterEach, MockInstance} from "vitest"; +import {Histogram} from "prom-client"; import {ErrorAborted, TimeoutError} from "@lodestar/utils"; import {RegistryMetricCreator} from "../../../src/index.js"; -import {HistogramExtra} from "../../../src/metrics/utils/histogram.js"; import {MonitoringService} from "../../../src/monitoring/service.js"; import {MonitoringOptions} from "../../../src/monitoring/options.js"; import {sleep} from "../../utils/sleep.js"; @@ -41,8 +41,8 @@ describe("monitoring / service", () => { it("should register metrics for collecting and sending data", () => { service = new MonitoringService("beacon", {endpoint}, {register, logger}); - expect(register.getSingleMetric("lodestar_monitoring_collect_data_seconds")).toBeInstanceOf(HistogramExtra); - expect(register.getSingleMetric("lodestar_monitoring_send_data_seconds")).toBeInstanceOf(HistogramExtra); + expect(register.getSingleMetric("lodestar_monitoring_collect_data_seconds")).toBeInstanceOf(Histogram); + expect(register.getSingleMetric("lodestar_monitoring_send_data_seconds")).toBeInstanceOf(Histogram); }); it("should log a warning message if insecure monitoring endpoint is provided ", () => { @@ -115,7 +115,7 @@ describe("monitoring / service", () => { }); describe("MonitoringService - close", () => { - let clearTimeout: SpyInstance; + let clearTimeout: MockInstance; beforeAll(() => { clearTimeout = vi.spyOn(global, "clearTimeout"); diff --git a/packages/beacon-node/test/unit/network/beaconBlocksMaybeBlobsByRange.test.ts b/packages/beacon-node/test/unit/network/beaconBlocksMaybeBlobsByRange.test.ts index 189327a6a5ab..56fb64104744 100644 --- a/packages/beacon-node/test/unit/network/beaconBlocksMaybeBlobsByRange.test.ts +++ b/packages/beacon-node/test/unit/network/beaconBlocksMaybeBlobsByRange.test.ts @@ -30,16 +30,21 @@ describe("beaconBlocksMaybeBlobsByRange", () => { rangeRequest.count = 1; const block1 = ssz.deneb.SignedBeaconBlock.defaultValue(); + const blockheader1 = ssz.phase0.SignedBeaconBlockHeader.defaultValue(); + blockheader1.message.slot = 1; block1.message.slot = 1; block1.message.body.blobKzgCommitments.push(ssz.deneb.KZGCommitment.defaultValue()); const blobSidecar1 = ssz.deneb.BlobSidecar.defaultValue(); - blobSidecar1.slot = 1; + blobSidecar1.signedBlockHeader = blockheader1; const block2 = ssz.deneb.SignedBeaconBlock.defaultValue(); block2.message.slot = 2; + const blockheader2 = ssz.phase0.SignedBeaconBlockHeader.defaultValue(); + blockheader2.message.slot = 2; + block2.message.body.blobKzgCommitments.push(ssz.deneb.KZGCommitment.defaultValue()); const blobSidecar2 = ssz.deneb.BlobSidecar.defaultValue(); - blobSidecar2.slot = 2; + blobSidecar2.signedBlockHeader = blockheader2; const block3 = ssz.deneb.SignedBeaconBlock.defaultValue(); block3.message.slot = 3; @@ -47,13 +52,18 @@ describe("beaconBlocksMaybeBlobsByRange", () => { const block4 = ssz.deneb.SignedBeaconBlock.defaultValue(); block4.message.slot = 4; + const blockheader4 = ssz.phase0.SignedBeaconBlockHeader.defaultValue(); + blockheader4.message.slot = 4; + // two blobsidecars block4.message.body.blobKzgCommitments.push(ssz.deneb.KZGCommitment.defaultValue()); block4.message.body.blobKzgCommitments.push(ssz.deneb.KZGCommitment.defaultValue()); const blobSidecar41 = ssz.deneb.BlobSidecar.defaultValue(); - blobSidecar41.slot = 4; + + blobSidecar41.signedBlockHeader = blockheader4; + const blobSidecar42 = ssz.deneb.BlobSidecar.defaultValue(); - blobSidecar42.slot = 4; + blobSidecar42.signedBlockHeader = blockheader4; blobSidecar42.index = 1; // Array of testcases which are array of matched blocks with/without (if empty) sidecars diff --git a/packages/beacon-node/test/unit/util/array.test.ts b/packages/beacon-node/test/unit/util/array.test.ts index 5ca275d5a278..d505d27c2e9f 100644 --- a/packages/beacon-node/test/unit/util/array.test.ts +++ b/packages/beacon-node/test/unit/util/array.test.ts @@ -102,6 +102,72 @@ describe("LinkedList", () => { expect(list.last()).toBe(98); }); + describe("moveToHead", () => { + let list: LinkedList; + + beforeEach(() => { + list = new LinkedList(); + list.push(1); + list.push(2); + list.push(3); + }); + + it("item is head", () => { + list.moveToHead(1); + expect(list.toArray()).toEqual([1, 2, 3]); + expect(list.first()).toBe(1); + }); + + it("item is middle", () => { + list.moveToHead(2); + expect(list.toArray()).toEqual([2, 1, 3]); + expect(list.first()).toBe(2); + }); + + it("item is tail", () => { + list.moveToHead(3); + expect(list.toArray()).toEqual([3, 1, 2]); + expect(list.first()).toBe(3); + }); + }); + + describe("moveToSecond", () => { + let list: LinkedList; + + beforeEach(() => { + list = new LinkedList(); + list.push(1); + list.push(2); + list.push(3); + list.push(4); + }); + + it("item is head", () => { + list.moveToSecond(1); + expect(list.toArray()).toEqual([1, 2, 3, 4]); + expect(list.first()).toBe(1); + }); + + it("item is second", () => { + list.moveToSecond(2); + expect(list.toArray()).toEqual([1, 2, 3, 4]); + expect(list.first()).toBe(1); + }); + + it("item is third", () => { + list.moveToSecond(3); + expect(list.toArray()).toEqual([1, 3, 2, 4]); + expect(list.first()).toBe(1); + }); + + it("item is tail", () => { + list.moveToSecond(4); + expect(list.toArray()).toEqual([1, 4, 2, 3]); + expect(list.first()).toBe(1); + expect(list.last()).toBe(3); + }); + }); + it("values", () => { expect(Array.from(list.values())).toEqual([]); const count = 100; @@ -165,6 +231,46 @@ describe("LinkedList", () => { }); }); + describe("insertAfter", () => { + let list: LinkedList; + + beforeEach(() => { + list = new LinkedList(); + list.push(1); + list.push(2); + list.push(3); + }); + + it("insert after 0", () => { + // should do nothing + list.insertAfter(0, 4); + expect(list.toArray()).toEqual([1, 2, 3]); + expect(list.first()).toBe(1); + expect(list.last()).toBe(3); + }); + + it("insert after 1", () => { + list.insertAfter(1, 4); + expect(list.toArray()).toEqual([1, 4, 2, 3]); + expect(list.first()).toBe(1); + expect(list.last()).toBe(3); + }); + + it("insert after 2", () => { + list.insertAfter(2, 4); + expect(list.toArray()).toEqual([1, 2, 4, 3]); + expect(list.first()).toBe(1); + expect(list.last()).toBe(3); + }); + + it("insert after 3", () => { + list.insertAfter(3, 4); + expect(list.toArray()).toEqual([1, 2, 3, 4]); + expect(list.first()).toBe(1); + expect(list.last()).toBe(4); + }); + }); + it("toArray", () => { expect(list.toArray()).toEqual([]); @@ -205,4 +311,22 @@ describe("LinkedList", () => { }); } }); + + describe("has", () => { + let list: LinkedList; + + beforeEach(() => { + list = new LinkedList(); + list.push(1); + list.push(2); + list.push(3); + }); + + it("should return true if the item is in the list", () => { + expect(list.has(1)).toBe(true); + expect(list.has(2)).toBe(true); + expect(list.has(3)).toBe(true); + expect(list.has(4)).toBe(false); + }); + }); }); diff --git a/packages/beacon-node/test/unit/util/kzg.test.ts b/packages/beacon-node/test/unit/util/kzg.test.ts index 5bcaf1071cf6..cfe35e8fc76e 100644 --- a/packages/beacon-node/test/unit/util/kzg.test.ts +++ b/packages/beacon-node/test/unit/util/kzg.test.ts @@ -1,7 +1,8 @@ import {describe, it, expect, afterEach, beforeAll} from "vitest"; import {bellatrix, deneb, ssz} from "@lodestar/types"; import {BYTES_PER_FIELD_ELEMENT, BLOB_TX_TYPE} from "@lodestar/params"; -import {kzgCommitmentToVersionedHash} from "@lodestar/state-transition"; +import {createBeaconConfig, createChainForkConfig, defaultChainConfig} from "@lodestar/config"; +import {computeBlobSidecars, kzgCommitmentToVersionedHash} from "../../../src/util/blobs.js"; import {loadEthereumTrustedSetup, initCKZG, ckzg, FIELD_ELEMENTS_PER_BLOB_MAINNET} from "../../../src/util/kzg.js"; import {validateBlobSidecars, validateGossipBlobSidecar} from "../../../src/chain/validation/blobSidecar.js"; import {getMockedBeaconChain} from "../../__mocks__/mockedBeaconChain.js"; @@ -30,8 +31,18 @@ describe("C-KZG", async () => { expect(ckzg.verifyBlobKzgProofBatch(blobs, commitments, proofs)).toBe(true); }); + /* eslint-disable @typescript-eslint/naming-convention */ it("BlobSidecars", async () => { - const chain = getMockedBeaconChain(); + const chainConfig = createChainForkConfig({ + ...defaultChainConfig, + ALTAIR_FORK_EPOCH: 0, + BELLATRIX_FORK_EPOCH: 0, + DENEB_FORK_EPOCH: 0, + }); + const genesisValidatorsRoot = Buffer.alloc(32, 0xaa); + const config = createBeaconConfig(chainConfig, genesisValidatorsRoot); + + const chain = getMockedBeaconChain({config}); afterEachCallbacks.push(() => chain.close()); const slot = 0; @@ -45,34 +56,17 @@ describe("C-KZG", async () => { signedBeaconBlock.message.body.blobKzgCommitments.push(kzgCommitment); } const blockRoot = ssz.deneb.BeaconBlock.hashTreeRoot(signedBeaconBlock.message); + const kzgProofs = blobs.map((blob, index) => ckzg.computeBlobKzgProof(blob, kzgCommitments[index])); + const blobSidecars: deneb.BlobSidecars = computeBlobSidecars(chain.config, signedBeaconBlock, {blobs, kzgProofs}); - const blobSidecars: deneb.BlobSidecars = blobs.map((blob, index) => { - return { - blockRoot, - index, - slot, - blob, - kzgProof: ckzg.computeBlobKzgProof(blob, kzgCommitments[index]), - kzgCommitment: kzgCommitments[index], - blockParentRoot: Buffer.alloc(32), - proposerIndex: 0, - }; - }); - - const signedBlobSidecars: deneb.SignedBlobSidecar[] = blobSidecars.map((blobSidecar) => { - const signedBlobSidecar = ssz.deneb.SignedBlobSidecar.defaultValue(); - signedBlobSidecar.message = blobSidecar; - return signedBlobSidecar; - }); - - expect(signedBlobSidecars.length).toBe(2); + expect(blobSidecars.length).toBe(2); // Full validation validateBlobSidecars(slot, blockRoot, kzgCommitments, blobSidecars); - signedBlobSidecars.forEach(async (signedBlobSidecar) => { + blobSidecars.forEach(async (blobSidecar) => { try { - await validateGossipBlobSidecar(chain.config, chain, signedBlobSidecar, signedBlobSidecar.message.index); + await validateGossipBlobSidecar(chain, blobSidecar, blobSidecar.index); } catch (error) { // We expect some error from here // console.log(error); diff --git a/packages/beacon-node/test/unit/util/sszBytes.test.ts b/packages/beacon-node/test/unit/util/sszBytes.test.ts index 2ffaa98e6cfe..bb5fc67a7ce6 100644 --- a/packages/beacon-node/test/unit/util/sszBytes.test.ts +++ b/packages/beacon-node/test/unit/util/sszBytes.test.ts @@ -11,7 +11,7 @@ import { getSlotFromSignedAggregateAndProofSerialized, getSignatureFromAttestationSerialized, getSlotFromSignedBeaconBlockSerialized, - getSlotFromSignedBlobSidecarSerialized, + getSlotFromBlobSidecarSerialized, } from "../../../src/util/sszBytes.js"; describe("attestation SSZ serialized picking", () => { @@ -146,20 +146,20 @@ describe("signedBeaconBlock SSZ serialized picking", () => { }); }); -describe("signedBlobSidecar SSZ serialized picking", () => { - const testCases = [ssz.deneb.SignedBlobSidecar.defaultValue(), signedBlobSidecarFromValues(1_000_000)]; +describe("BlobSidecar SSZ serialized picking", () => { + const testCases = [ssz.deneb.BlobSidecar.defaultValue(), blobSidecarFromValues(1_000_000)]; - for (const [i, signedBlobSidecar] of testCases.entries()) { - const bytes = ssz.deneb.SignedBlobSidecar.serialize(signedBlobSidecar); - it(`signedBlobSidecar ${i}`, () => { - expect(getSlotFromSignedBlobSidecarSerialized(bytes)).toBe(signedBlobSidecar.message.slot); + for (const [i, blobSidecar] of testCases.entries()) { + const bytes = ssz.deneb.BlobSidecar.serialize(blobSidecar); + it(`blobSidecar ${i}`, () => { + expect(getSlotFromBlobSidecarSerialized(bytes)).toBe(blobSidecar.signedBlockHeader.message.slot); }); } - it("signedBlobSidecar - invalid data", () => { + it("blobSidecar - invalid data", () => { const invalidSlotDataSizes = [0, 20, 38]; for (const size of invalidSlotDataSizes) { - expect(getSlotFromSignedBlobSidecarSerialized(Buffer.alloc(size))).toBeNull(); + expect(getSlotFromBlobSidecarSerialized(Buffer.alloc(size))).toBeNull(); } }); }); @@ -198,8 +198,8 @@ function signedBeaconBlockFromValues(slot: Slot): phase0.SignedBeaconBlock { return signedBeaconBlock; } -function signedBlobSidecarFromValues(slot: Slot): deneb.SignedBlobSidecar { - const signedBlobSidecar = ssz.deneb.SignedBlobSidecar.defaultValue(); - signedBlobSidecar.message.slot = slot; - return signedBlobSidecar; +function blobSidecarFromValues(slot: Slot): deneb.BlobSidecar { + const blobSidecar = ssz.deneb.BlobSidecar.defaultValue(); + blobSidecar.signedBlockHeader.message.slot = slot; + return blobSidecar; } diff --git a/packages/beacon-node/test/utils/chain/stateCache/datastore.ts b/packages/beacon-node/test/utils/chain/stateCache/datastore.ts new file mode 100644 index 000000000000..8a944f4c2d88 --- /dev/null +++ b/packages/beacon-node/test/utils/chain/stateCache/datastore.ts @@ -0,0 +1,26 @@ +import {fromHexString, toHexString} from "@chainsafe/ssz"; +import {CPStateDatastore, checkpointToDatastoreKey} from "../../../../src/chain/stateCache/datastore/index.js"; + +export function getTestDatastore(fileApisBuffer: Map): CPStateDatastore { + const datastore: CPStateDatastore = { + write: (cp, state) => { + const persistentKey = checkpointToDatastoreKey(cp); + const stringKey = toHexString(persistentKey); + if (!fileApisBuffer.has(stringKey)) { + fileApisBuffer.set(stringKey, state.serialize()); + } + return Promise.resolve(persistentKey); + }, + remove: (persistentKey) => { + const stringKey = toHexString(persistentKey); + if (fileApisBuffer.has(stringKey)) { + fileApisBuffer.delete(stringKey); + } + return Promise.resolve(); + }, + read: (persistentKey) => Promise.resolve(fileApisBuffer.get(toHexString(persistentKey)) ?? null), + readKeys: () => Promise.resolve(Array.from(fileApisBuffer.keys()).map((key) => fromHexString(key))), + }; + + return datastore; +} diff --git a/packages/beacon-node/test/utils/mocks/db.ts b/packages/beacon-node/test/utils/mocks/db.ts index 731091bc8e6e..16d7b32a1bcc 100644 --- a/packages/beacon-node/test/utils/mocks/db.ts +++ b/packages/beacon-node/test/utils/mocks/db.ts @@ -1,4 +1,5 @@ import {IBeaconDb} from "../../../src/db/index.js"; +import {CheckpointStateRepository} from "../../../src/db/repositories/checkpointState.js"; import { AttesterSlashingRepository, BlockArchiveRepository, @@ -38,6 +39,7 @@ export function getStubbedBeaconDb(): IBeaconDb { // finalized states stateArchive: createStubInstance(StateArchiveRepository), + checkpointState: createStubInstance(CheckpointStateRepository), // op pool voluntaryExit: createStubInstance(VoluntaryExitRepository), diff --git a/packages/cli/.mocharc.yaml b/packages/cli/.mocharc.yaml deleted file mode 100644 index b923bc39eb4c..000000000000 --- a/packages/cli/.mocharc.yaml +++ /dev/null @@ -1,7 +0,0 @@ -exit: true -extension: ["ts"] -colors: true -require: - - ./test/setup.ts -node-option: - - "loader=ts-node/esm" diff --git a/packages/cli/.nycrc.json b/packages/cli/.nycrc.json deleted file mode 100644 index 69aa626339a0..000000000000 --- a/packages/cli/.nycrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../../.nycrc.json" -} diff --git a/packages/cli/docsgen/changeCase.ts b/packages/cli/docsgen/changeCase.ts new file mode 100644 index 000000000000..096d26e61833 --- /dev/null +++ b/packages/cli/docsgen/changeCase.ts @@ -0,0 +1,29 @@ +const wordPattern = new RegExp(["[A-Z][a-z]+", "[A-Z]+(?=[A-Z][a-z])", "[A-Z]+", "[a-z]+", "[0-9]+"].join("|"), "g"); +function splitString(str: string): string[] { + const normalized = str + // sanitize characters that cannot be included + .replace(/[!@#$%^&*]/g, "-") + // normalize separators to '-' + .replace(/[._/\s\\]/g, "-") + .split("-"); + return normalized.map((seg) => seg.match(wordPattern) || []).flat(); +} +function capitalizeFirstLetter(segment: string): string { + return segment[0].toUpperCase() + segment.slice(1); +} +function lowercaseFirstLetter(segment: string): string { + return segment[0].toLowerCase() + segment.slice(1); +} +function toKebab(str: string): string { + return splitString(str).join("-").toLowerCase(); +} +function toPascal(str: string): string { + return splitString(str).map(capitalizeFirstLetter).join(""); +} +function toCamel(str: string): string { + return lowercaseFirstLetter(toPascal(str)); +} +function toEnv(str: string): string { + return splitString(str).join("_").toUpperCase(); +} +export {capitalizeFirstLetter, toKebab, toCamel, toPascal, toEnv}; diff --git a/packages/cli/docsgen/index.ts b/packages/cli/docsgen/index.ts index 5e0a3364f73d..524f70a51c5b 100644 --- a/packages/cli/docsgen/index.ts +++ b/packages/cli/docsgen/index.ts @@ -1,103 +1,25 @@ import fs from "node:fs"; import path from "node:path"; -import {Options} from "yargs"; -import omit from "lodash/omit.js"; import {cmds} from "../src/cmds/index.js"; -import {CliCommand} from "../src/util/index.js"; import {globalOptions} from "../src/options/index.js"; -import {beaconOptions} from "../src/cmds/beacon/options.js"; -import {renderMarkdownSections, toMarkdownTable, MarkdownSection} from "./markdown.js"; +import {renderCommandPage} from "./markdown.js"; // Script to generate a reference of all CLI commands and options // Outputs a markdown format ready to be consumed by mkdocs // // Usage: -// ts-node docsgen docs/cli.md +// ts-node packages/cli/docsgen // -// After generation the resulting .md should be mv to the path expected +// After generation the resulting .md files, they are written to the path expected // by the mkdocs index and other existing paths in the documentation -const docsMarkdownPath = process.argv[2]; -if (!docsMarkdownPath) throw Error("Run script with output path: 'ts-node docsgen docs/cli.md'"); +const dirname = path.dirname(new URL(import.meta.url).pathname); +const LODESTAR_COMMAND = "./lodestar"; +const DOCS_PAGES_FOLDER = path.join(dirname, "..", "..", "..", "docs", "pages"); -const docsString = renderMarkdownSections([ - { - title: "Command Line Reference", - body: "This reference describes the syntax of the Lodestar CLI commands and their options.", - subsections: [ - { - title: "Global Options", - body: getOptionsTable(globalOptions), - }, - ...cmds.map((cmd) => cmdToMarkdownSection(cmd)), - ], - }, -]); - -fs.mkdirSync(path.parse(docsMarkdownPath).dir, {recursive: true}); -fs.writeFileSync(docsMarkdownPath, docsString); - -/** - * Parse an CliCommand type recursively and output a MarkdownSection - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function cmdToMarkdownSection(cmd: CliCommand, parentCommand?: string): MarkdownSection { - const commandJson = [parentCommand, cmd.command.replace("", "")].filter(Boolean).join(" "); - const body = [cmd.describe]; - - if (cmd.examples) { - body.push("**Examples**"); - for (const example of cmd.examples) { - if (example.command.startsWith("lodestar")) example.command = `lodestar ${example.command}`; - body.push(example.description); - body.push(`\`\`\` \n${example.command}\n\`\`\``); - } - } - - if (cmd.options) { - body.push("**Options**"); - - if (cmd.subcommands) { - body.push("The options below apply to all sub-commands."); - } - - // De-duplicate beaconOptions. If all beaconOptions exists in this command, skip them - if ( - cmds.some((c) => c.command === "beacon") && - commandJson !== "beacon" && - Object.keys(beaconOptions).every((key) => cmd.options?.[key]) - ) { - cmd.options = omit(cmd.options, Object.keys(beaconOptions)); - body.push(`Cmd \`${commandJson}\` has all the options from the [\`beacon\` cmd](#beacon).`); - } - - body.push(getOptionsTable(cmd.options)); - } - return { - title: `\`${commandJson}\``, - body, - subsections: (cmd.subcommands || []).map((subcmd) => cmdToMarkdownSection(subcmd, commandJson)), - }; -} - -/** - * Render a Yargs options dictionary to a markdown table - */ -function getOptionsTable(options: Record, {showHidden}: {showHidden?: boolean} = {}): string { - const visibleOptions = Object.entries(options).filter(([, opt]) => showHidden || !opt.hidden); - - if (visibleOptions.length === 0) { - return ""; - } - - /* eslint-disable @typescript-eslint/naming-convention */ - return toMarkdownTable( - visibleOptions.map(([key, opt]) => ({ - Option: `\`--${key}\``, - Type: opt.type ?? "", - Description: opt.description ?? "", - Default: String(opt.defaultDescription || opt.default || ""), - })), - ["Option", "Type", "Description", "Default"] - ); +for (const cmd of cmds) { + const docstring = renderCommandPage(cmd, globalOptions, LODESTAR_COMMAND); + const folder = path.join(DOCS_PAGES_FOLDER, cmd.docsFolder ?? ""); + if (!fs.existsSync(folder)) fs.mkdirSync(folder, {recursive: true}); + fs.writeFileSync(path.join(folder, `${cmd.command}-cli.md`), docstring); } diff --git a/packages/cli/docsgen/markdown.ts b/packages/cli/docsgen/markdown.ts index 80952f367c73..c05c7ad8c90f 100644 --- a/packages/cli/docsgen/markdown.ts +++ b/packages/cli/docsgen/markdown.ts @@ -1,41 +1,273 @@ -export type MarkdownSection = { - title: string; - body: string | string[]; - subsections?: MarkdownSection[]; -}; +import {CliOptionDefinition, CliCommand, CliExample, CliCommandOptions} from "../src/util/index.js"; +import {toKebab} from "./changeCase.js"; + +const DEFAULT_SEPARATOR = "\n\n"; +const LINE_BREAK = "\n\n
"; + +function renderExampleBody(example: CliExample, lodestarCommand?: string): string { + const cliExample = [ + `\`\`\` +${lodestarCommand ? `${lodestarCommand} ` : ""}${example.command} +\`\`\``, + ]; + + if (example.description) { + cliExample.unshift(example.description); + } + + return cliExample.join(DEFAULT_SEPARATOR); +} /** - * Render MarkdownSection recursively tracking its level depth + * Renders a single example like shown below. Title and description are optional. + * ------------------- + * #### Basic `validator` command example + * + * Run one validator client with all the keystores available in the directory .goerli/keystores + * + * ``` + * validator --network goerli + * ``` + * ------------------- */ -export function renderMarkdownSections(sections: MarkdownSection[], level = 1): string { - return sections - .map((section) => { - const parts = section.title ? [`${"\n" + "#".repeat(level)} ${section.title}`] : [""]; - if (section.body) { - parts.push(Array.isArray(section.body) ? section.body.join("\n\n") : section.body); - } - if (section.subsections) { - parts.push(renderMarkdownSections(section.subsections, level + 1)); - } - return parts.join(section.title ? "\n" : ""); - }) - .join("\n"); +function renderCommandExample(example: CliExample, lodestarCommand?: string): string { + const title = example.title ? `#### ${example.title}${DEFAULT_SEPARATOR}` : ""; + return title.concat(renderExampleBody(example, lodestarCommand)); } /** - * Render an array of objects as a markdown table + * Renders a example section like shown below + * ------------------- + * ## Examples + * + * #### Basic `validator` command example + * + * Run one validator client with all the keystores available in the directory .goerli/keystores + * + * ``` + * validator --network goerli + * ``` + * + * #### Advanced `validator` command example + * + * Run one validator client with all the keystores available in the directory .goerli/keystores + * using an rcConfig file for configuration + * + * ``` + * validator --rcConfig validator-dir/validator.rcconfig.yaml + * ``` + * ------------------- */ -export function toMarkdownTable(rows: T[], headers: (keyof T)[]): string { - return [ - toMarkdownTableRow(headers as string[]), - toMarkdownTableRow(headers.map(() => "---")), - ...rows.map((row) => toMarkdownTableRow(headers.map((key) => row[key]))), - ].join("\n"); +function renderExamplesSection(examples: CliExample[], sectionTitle?: string, lodestarCommand?: string): string { + const exampleSection = [sectionTitle]; + for (const example of examples) { + exampleSection.push(renderCommandExample(example, lodestarCommand)); + } + return exampleSection.filter(Boolean).join(DEFAULT_SEPARATOR); } /** - * Render an array of items as a markdown table row + * Renders a single cli option like shown below + * ------------------- + * #### `--logLevel` + * + * Logging verbosity level for emitting logs to terminal + * + * type: string + * default: info + * choices: "error", "warn", "info", "verbose", "debug" + * example: Set log level to debug + * + * ``` + * validator --logLevel debug + * ``` + * ------------------- */ -export function toMarkdownTableRow(row: string[]): string { - return `| ${row.join(" | ")} |`; +function renderOption(optionName: string, option: CliOptionDefinition): string | undefined { + if (option.hidden) return; + + const commandOption = [`#### \`--${optionName}\``]; + if (option.description) commandOption.push(`description: ${option.description}`); + + if (option.demandOption === true) { + commandOption.push("required: true"); + } + + if (option.type === "array") { + commandOption.push("type: `string[]`"); + } else if (option.type) { + commandOption.push(`type: \`${option.type}\``); + } + + if (option.choices) { + commandOption.push(`choices: ${option.choices.map((c) => `"${c}"`).join(", ")}`); + } + + let defaultValue = String(option.defaultDescription || option.default || ""); + if (defaultValue) { + if (option.type === "string" || option.string) { + defaultValue = `"${defaultValue}"`; + } + if (option.type === "array") { + // eslint-disable-next-line quotes + if (!defaultValue.includes(`"`)) { + defaultValue = `"${defaultValue}"`; + } + defaultValue = `[ ${defaultValue} ]`; + } + commandOption.push(`default: \`${defaultValue}\``); + } + + if (option.example) { + commandOption.push(`example: ${renderExampleBody(option.example)}`); + } + + return commandOption.join(DEFAULT_SEPARATOR).concat(LINE_BREAK); +} + +function renderOptions(options: CliCommandOptions>, title: string, description?: string): string { + const optionsSection = [title, description]; + for (const [name, option] of Object.entries(options)) { + const optionString = renderOption(name, option as CliOptionDefinition); + // Skip hidden options + if (optionString) { + optionsSection.push(optionString); + } + } + return optionsSection.filter(Boolean).join(DEFAULT_SEPARATOR); +} + +interface SubCommandDefinition { + command: string; + description?: string; + options?: CliCommandOptions>; + examples?: CliExample[]; +} + +function renderSubCommandsList(command: string, subCommands: SubCommandDefinition[]): string { + const list = [ + `## Available Sub-Commands + +The following sub-commands are available with the \`${command}\` command:`, + ]; + + for (const sub of subCommands) { + list.push(`- [${sub.command}](#${toKebab(sub.command)})`); + } + + return list.join(DEFAULT_SEPARATOR); +} + +/** + * ## `validator slashing-protection import` + * + * Import an interchange file from another client + * + * #### `validator slashing-protection import` Options + * + * `--file` + * + * The slashing protection interchange file to import (.json). + * + * type: string + * required: true + * + * #### Sub-Command Examples + * + * Import an interchange file to the slashing protection DB + * + * ``` + * ./lodestar validator slashing-protection import --network goerli --file interchange.json + * ``` + */ +function renderSubCommand(sub: SubCommandDefinition, lodestarCommand?: string): string { + const subCommand = [`## \`${sub.command}\``]; + + if (sub.description) { + subCommand.push(sub.description); + } + + if (sub.examples) { + subCommand.push(renderExamplesSection(sub.examples, `### \`${sub.command}\` Examples`, lodestarCommand)); + } + + if (sub.options) { + subCommand.push( + renderOptions( + sub.options, + `### \`${sub.command}\` Options`, + "_Supports all parent command options plus the following:_\n\n
" + ) + ); + } + + return subCommand.join(DEFAULT_SEPARATOR); +} + +function getSubCommands(rootCommand: string, sub: CliCommand): SubCommandDefinition[] { + const subCommands = [] as SubCommandDefinition[]; + + if (sub.command.includes("")) { + // If subcommand is a nested subcommand recursively render each of its subcommands by + // merging its props with its nested children but do not render the subcommand itself + for (const subSub of sub.subcommands ?? []) { + subCommands.push( + ...getSubCommands(rootCommand, { + ...subSub, + command: sub.command.replace("", subSub.command), + options: { + ...(sub.options ?? {}), + ...(subSub.options ?? {}), + }, + examples: sub.examples?.concat(subSub.examples ?? []), + }) + ); + } + } else { + // If subcommand is not nested build actual markdown + subCommands.push({ + command: `${rootCommand} ${sub.command}`, + description: sub.describe, + options: sub.options, + examples: sub.examples, + }); + + // render any sub-subcommands + if (sub.subcommands) { + for (const subSub of sub.subcommands) { + subCommands.push(...getSubCommands(`${rootCommand} ${sub.command}`, subSub)); + } + } + } + + return subCommands; +} + +export function renderCommandPage( + cmd: CliCommand, + globalOptions: CliCommandOptions>, + lodestarCommand?: string +): string { + const page = [`# \`${cmd.command}\` CLI Command`, cmd.describe]; + + const subCommands = (cmd.subcommands ?? []).map((sub) => getSubCommands(cmd.command, sub)).flat(); + if (subCommands.length > 0) { + page.push(renderSubCommandsList(cmd.command, subCommands)); + } + + if (cmd.examples) { + page.push(renderExamplesSection(cmd.examples, "## Examples", lodestarCommand)); + } + + if (cmd.options) { + page.push(renderOptions({...globalOptions, ...cmd.options}, `## \`${cmd.command}\` Options`)); + } + + if (subCommands.length > 0) { + for (const sub of subCommands) { + page.push(renderSubCommand(sub, lodestarCommand)); + } + } + + return page.join(LINE_BREAK.concat(DEFAULT_SEPARATOR)); } diff --git a/packages/cli/package.json b/packages/cli/package.json index 4089b1c2d5ed..d566258e7f5d 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "@chainsafe/lodestar", - "version": "1.12.0", + "version": "1.13.0", "description": "Command line interface for lodestar", "author": "ChainSafe Systems", "license": "LGPL-3.0", @@ -24,15 +24,15 @@ "build": "tsc -p tsconfig.build.json && yarn write-git-data", "build:release": "yarn clean && yarn run build", "build:watch": "tsc -p tsconfig.build.json --watch", - "build:refdocs": "node --loader ts-node/esm ./docsgen/index.ts docs/cli.md", + "build:docs": "node --loader ts-node/esm ./docsgen/index.ts", "write-git-data": "node lib/util/gitData/writeGitData.js", "check-build": "node -e \"(async function() { await import('./lib/index.js') })()\" lodestar --help", "check-types": "tsc", "lint": "eslint --color --ext .ts src/ test/", "lint:fix": "yarn run lint --fix", "pretest": "yarn run check-types", - "test:unit": "nyc --cache-dir .nyc_output/.cache -e .ts mocha 'test/unit/**/*.test.ts'", - "test:e2e": "LODESTAR_PRESET=minimal mocha --timeout 30000 'test/e2e/**/*.test.ts'", + "test:unit": "vitest --run --dir test/unit/ --coverage", + "test:e2e": "vitest --run --poolOptions.threads.singleThread true --dir test/e2e/", "test:sim:multifork": "LODESTAR_PRESET=minimal node --loader ts-node/esm test/sim/multi_fork.test.ts", "test:sim:mixedclient": "LODESTAR_PRESET=minimal node --loader ts-node/esm test/sim/mixed_client.test.ts", "test:sim:endpoints": "LODESTAR_PRESET=minimal node --loader ts-node/esm test/sim/endpoints.test.ts", @@ -65,17 +65,17 @@ "@libp2p/crypto": "^2.0.4", "@libp2p/peer-id": "^3.0.2", "@libp2p/peer-id-factory": "^3.0.4", - "@lodestar/api": "^1.12.0", - "@lodestar/beacon-node": "^1.12.0", - "@lodestar/config": "^1.12.0", - "@lodestar/db": "^1.12.0", - "@lodestar/light-client": "^1.12.0", - "@lodestar/logger": "^1.12.0", - "@lodestar/params": "^1.12.0", - "@lodestar/state-transition": "^1.12.0", - "@lodestar/types": "^1.12.0", - "@lodestar/utils": "^1.12.0", - "@lodestar/validator": "^1.12.0", + "@lodestar/api": "^1.13.0", + "@lodestar/beacon-node": "^1.13.0", + "@lodestar/config": "^1.13.0", + "@lodestar/db": "^1.13.0", + "@lodestar/light-client": "^1.13.0", + "@lodestar/logger": "^1.13.0", + "@lodestar/params": "^1.13.0", + "@lodestar/state-transition": "^1.13.0", + "@lodestar/types": "^1.13.0", + "@lodestar/utils": "^1.13.0", + "@lodestar/validator": "^1.13.0", "@multiformats/multiaddr": "^12.1.3", "@types/lockfile": "^1.0.2", "bip39": "^3.1.0", @@ -88,7 +88,7 @@ "js-yaml": "^4.1.0", "lockfile": "^1.0.4", "lodash": "^4.17.21", - "prom-client": "^14.2.0", + "prom-client": "^15.1.0", "rimraf": "^4.4.1", "source-map-support": "^0.5.21", "uint8arrays": "^4.0.3", @@ -96,7 +96,7 @@ "yargs": "^17.7.1" }, "devDependencies": { - "@lodestar/test-utils": "^1.12.0", + "@lodestar/test-utils": "^1.13.0", "@types/debug": "^4.1.7", "@types/expand-tilde": "^2.0.0", "@types/got": "^9.6.12", diff --git a/packages/cli/src/cmds/beacon/index.ts b/packages/cli/src/cmds/beacon/index.ts index 0b2e431aec04..38d1d4cad221 100644 --- a/packages/cli/src/cmds/beacon/index.ts +++ b/packages/cli/src/cmds/beacon/index.ts @@ -6,6 +6,7 @@ import {beaconHandler} from "./handler.js"; export const beacon: CliCommand = { command: "beacon", describe: "Run a beacon chain node", + docsFolder: "beacon-management", examples: [ { command: "beacon --network goerli", diff --git a/packages/cli/src/cmds/beacon/options.ts b/packages/cli/src/cmds/beacon/options.ts index 3947e2ba17d0..c9918b5d2e41 100644 --- a/packages/cli/src/cmds/beacon/options.ts +++ b/packages/cli/src/cmds/beacon/options.ts @@ -1,7 +1,6 @@ -import {Options} from "yargs"; import {beaconNodeOptions, paramsOptions, BeaconNodeArgs} from "../../options/index.js"; import {LogArgs, logOptions} from "../../options/logOptions.js"; -import {CliCommandOptions} from "../../util/index.js"; +import {CliCommandOptions, CliOptionDefinition} from "../../util/index.js"; import {defaultBeaconPaths, BeaconPaths} from "./paths.js"; type BeaconExtraArgs = { @@ -144,7 +143,7 @@ type ENRArgs = { nat?: boolean; }; -const enrOptions: Record = { +const enrOptions: Record = { "enr.ip": { description: "Override ENR IP entry", type: "string", @@ -184,7 +183,7 @@ const enrOptions: Record = { export type BeaconArgs = BeaconExtraArgs & LogArgs & BeaconPaths & BeaconNodeArgs & ENRArgs; -export const beaconOptions: {[k: string]: Options} = { +export const beaconOptions: {[k: string]: CliOptionDefinition} = { ...beaconExtraOptions, ...logOptions, ...beaconNodeOptions, diff --git a/packages/cli/src/cmds/bootnode/handler.ts b/packages/cli/src/cmds/bootnode/handler.ts index be639eb1bf4b..7bf9169cfdc6 100644 --- a/packages/cli/src/cmds/bootnode/handler.ts +++ b/packages/cli/src/cmds/bootnode/handler.ts @@ -1,6 +1,6 @@ import path from "node:path"; import {Multiaddr, multiaddr} from "@multiformats/multiaddr"; -import {Discv5, ENR} from "@chainsafe/discv5"; +import {Discv5, ENR, IDiscv5CreateOptions} from "@chainsafe/discv5"; import {ErrorAborted} from "@lodestar/utils"; import {HttpMetricsServer, RegistryMetricCreator, getHttpMetricsServer} from "@lodestar/beacon-node"; @@ -58,7 +58,7 @@ export async function bootnodeHandler(args: BootnodeArgs & GlobalArgs): Promise< ip6: bindAddrs.ip6 ? multiaddr(bindAddrs.ip6) : undefined, }, config: {enrUpdate: !enr.ip && !enr.ip6}, - metricsRegistry, + metricsRegistry: metricsRegistry as IDiscv5CreateOptions["metricsRegistry"], }); // If there are any bootnodes, add them to the routing table diff --git a/packages/cli/src/cmds/bootnode/index.ts b/packages/cli/src/cmds/bootnode/index.ts index c9a7db71eadc..4030c4a73b0f 100644 --- a/packages/cli/src/cmds/bootnode/index.ts +++ b/packages/cli/src/cmds/bootnode/index.ts @@ -7,6 +7,7 @@ export const bootnode: CliCommand = { command: "bootnode", describe: "Run a discv5 bootnode. This will NOT perform any beacon node functions, rather, it will run a discv5 service that allows nodes on the network to discover one another.", + docsFolder: "bootnode", options: bootnodeOptions as CliCommandOptions, handler: bootnodeHandler, }; diff --git a/packages/cli/src/cmds/bootnode/options.ts b/packages/cli/src/cmds/bootnode/options.ts index 622d7b2d506a..ab92ec00e155 100644 --- a/packages/cli/src/cmds/bootnode/options.ts +++ b/packages/cli/src/cmds/bootnode/options.ts @@ -1,6 +1,5 @@ -import {Options} from "yargs"; import {LogArgs, logOptions} from "../../options/logOptions.js"; -import {CliCommandOptions} from "../../util/index.js"; +import {CliOptionDefinition, CliCommandOptions} from "../../util/index.js"; import {MetricsArgs, options as metricsOptions} from "../../options/beaconNodeOptions/metrics.js"; import {defaultListenAddress, defaultP2pPort, defaultP2pPort6} from "../../options/beaconNodeOptions/network.js"; @@ -102,7 +101,7 @@ export const bootnodeExtraOptions: CliCommandOptions = { export type BootnodeArgs = BootnodeExtraArgs & LogArgs & MetricsArgs; -export const bootnodeOptions: {[k: string]: Options} = { +export const bootnodeOptions: {[k: string]: CliOptionDefinition} = { ...bootnodeExtraOptions, ...logOptions, ...metricsOptions, diff --git a/packages/cli/src/cmds/dev/index.ts b/packages/cli/src/cmds/dev/index.ts index 728e80b6ce28..d213c8b3218d 100644 --- a/packages/cli/src/cmds/dev/index.ts +++ b/packages/cli/src/cmds/dev/index.ts @@ -6,6 +6,7 @@ import {devHandler} from "./handler.js"; export const dev: CliCommand = { command: "dev", describe: "Quickly bootstrap a beacon node and multiple validators. Use for development and testing", + docsFolder: "contribution", examples: [ { command: "dev --genesisValidators 8 --reset", diff --git a/packages/cli/src/cmds/dev/options.ts b/packages/cli/src/cmds/dev/options.ts index ae3737646e4f..4665fe529776 100644 --- a/packages/cli/src/cmds/dev/options.ts +++ b/packages/cli/src/cmds/dev/options.ts @@ -1,5 +1,4 @@ -import {Options} from "yargs"; -import {CliCommandOptions} from "../../util/index.js"; +import {CliCommandOptions, CliOptionDefinition} from "../../util/index.js"; import {beaconOptions, BeaconArgs} from "../beacon/options.js"; import {NetworkName} from "../../networks/index.js"; import {beaconNodeOptions, globalOptions} from "../../options/index.js"; @@ -63,7 +62,7 @@ const devOwnOptions: CliCommandOptions = { * - and have api enabled by default (as it's used by validator) * Note: use beaconNodeOptions and globalOptions to make sure option key is correct */ -const externalOptionsOverrides: Partial> = { +const externalOptionsOverrides: Partial> = { // Custom paths different than regular beacon, validator paths // network="dev" will store all data in separate dir than other networks network: { diff --git a/packages/cli/src/cmds/lightclient/index.ts b/packages/cli/src/cmds/lightclient/index.ts index 6d2a8f1ecb4f..1fceb3823154 100644 --- a/packages/cli/src/cmds/lightclient/index.ts +++ b/packages/cli/src/cmds/lightclient/index.ts @@ -6,6 +6,7 @@ import {lightclientHandler} from "./handler.js"; export const lightclient: CliCommand = { command: "lightclient", describe: "Run lightclient", + docsFolder: "lightclient-prover", examples: [ { command: "lightclient --network goerli", diff --git a/packages/cli/src/cmds/validator/handler.ts b/packages/cli/src/cmds/validator/handler.ts index fe14cedbcca1..69e4610bff0e 100644 --- a/packages/cli/src/cmds/validator/handler.ts +++ b/packages/cli/src/cmds/validator/handler.ts @@ -9,7 +9,7 @@ import { defaultOptions, } from "@lodestar/validator"; import {routes} from "@lodestar/api"; -import {getMetrics, MetricsRegister} from "@lodestar/validator"; +import {getMetrics} from "@lodestar/validator"; import { RegistryMetricCreator, collectNodeJSMetrics, @@ -112,7 +112,7 @@ export async function validatorHandler(args: IValidatorCliArgs & GlobalArgs): Pr // Send version and network data for static registries const register = args["metrics"] || args["monitoring.endpoint"] ? new RegistryMetricCreator() : null; - const metrics = register && getMetrics(register as unknown as MetricsRegister, {version, commit, network}); + const metrics = register && getMetrics(register, {version, commit, network}); // Start metrics server if metrics are enabled. // Collect NodeJS metrics defined in the Lodestar repo @@ -170,6 +170,7 @@ export async function validatorHandler(args: IValidatorCliArgs & GlobalArgs): Pr distributed: args.distributed, useProduceBlockV3: args.useProduceBlockV3, broadcastValidation: parseBroadcastValidation(args.broadcastValidation), + blindedLocal: args.blindedLocal, }, metrics ); diff --git a/packages/cli/src/cmds/validator/index.ts b/packages/cli/src/cmds/validator/index.ts index 46d7f2327452..49c7211c740d 100644 --- a/packages/cli/src/cmds/validator/index.ts +++ b/packages/cli/src/cmds/validator/index.ts @@ -12,9 +12,11 @@ import {validatorHandler} from "./handler.js"; export const validator: CliCommand = { command: "validator", describe: "Run one or multiple validator clients", + docsFolder: "validator-management", examples: [ { command: "validator --network goerli", + title: "Base `validator` command", description: "Run one validator client with all the keystores available in the directory" + ` ${getAccountPaths({dataDir: ".goerli"}, "goerli").keystoresDir}`, diff --git a/packages/cli/src/cmds/validator/keymanager/decryptKeystores/threadPool.ts b/packages/cli/src/cmds/validator/keymanager/decryptKeystores/threadPool.ts index 90b502d79ac3..169ddb74ffda 100644 --- a/packages/cli/src/cmds/validator/keymanager/decryptKeystores/threadPool.ts +++ b/packages/cli/src/cmds/validator/keymanager/decryptKeystores/threadPool.ts @@ -1,7 +1,12 @@ +import path from "node:path"; import {spawn, Pool, Worker, ModuleThread, QueuedTask} from "@chainsafe/threads"; import {DecryptKeystoreArgs, DecryptKeystoreWorkerAPI} from "./types.js"; import {maxPoolSize} from "./poolSize.js"; +// Worker constructor consider the path relative to the current working directory +const workerDir = + process.env.NODE_ENV === "test" ? "../../../../../lib/cmds/validator/keymanager/decryptKeystores" : "./"; + /** * Thread pool to decrypt keystores */ @@ -16,7 +21,7 @@ export class DecryptKeystoresThreadPool { ) { this.pool = Pool( () => - spawn(new Worker("./worker.js"), { + spawn(new Worker(path.join(workerDir, "worker.js")), { // The number below is big enough to almost disable the timeout // which helps during tests run on unpredictably slow hosts timeout: 5 * 60 * 1000, diff --git a/packages/cli/src/cmds/validator/options.ts b/packages/cli/src/cmds/validator/options.ts index 25400ecd16d5..41069cfbdd34 100644 --- a/packages/cli/src/cmds/validator/options.ts +++ b/packages/cli/src/cmds/validator/options.ts @@ -48,6 +48,7 @@ export type IValidatorCliArgs = AccountValidatorArgs & useProduceBlockV3?: boolean; broadcastValidation?: string; + blindedLocal?: boolean; importKeystores?: string[]; importKeystoresPassword?: string; @@ -241,7 +242,7 @@ export const validatorOptions: CliCommandOptions = { "builder.selection": { type: "string", description: "Builder block selection strategy `maxprofit`, `builderalways`, `builderonly` or `executiononly`", - defaultDescription: `\`${defaultOptions.builderSelection}\``, + defaultDescription: `${defaultOptions.builderSelection}`, group: "builder", }, @@ -257,6 +258,12 @@ export const validatorOptions: CliCommandOptions = { defaultDescription: `${defaultOptions.broadcastValidation}`, }, + blindedLocal: { + type: "string", + description: "Request fetching local block in blinded format for produceBlockV3", + defaultDescription: `${defaultOptions.blindedLocal}`, + }, + importKeystores: { alias: ["keystore"], // Backwards compatibility with old `validator import` cmdx description: "Path(s) to a directory or single file path to validator keystores, i.e. Launchpad validators", @@ -267,7 +274,7 @@ export const validatorOptions: CliCommandOptions = { importKeystoresPassword: { alias: ["passphraseFile"], // Backwards compatibility with old `validator import` cmd description: "Path to a file with password to decrypt all keystores from `importKeystores` option", - defaultDescription: "`./password.txt`", + defaultDescription: "./password.txt", type: "string", }, diff --git a/packages/cli/src/cmds/validator/signers/logSigners.ts b/packages/cli/src/cmds/validator/signers/logSigners.ts index 85d17ad323ae..85b7922cca15 100644 --- a/packages/cli/src/cmds/validator/signers/logSigners.ts +++ b/packages/cli/src/cmds/validator/signers/logSigners.ts @@ -1,5 +1,5 @@ import {Signer, SignerLocal, SignerRemote, SignerType} from "@lodestar/validator"; -import {LogLevel, Logger} from "@lodestar/utils"; +import {LogLevel, Logger, toSafePrintableUrl} from "@lodestar/utils"; /** * Log each pubkeys for auditing out keys are loaded from the logs @@ -27,7 +27,7 @@ export function logSigners(logger: Pick, signers: Signer[ } for (const {url, pubkeys} of groupExternalSignersByUrl(remoteSigners)) { - logger.info(`External signers on URL: ${url}`); + logger.info(`External signers on URL: ${toSafePrintableUrl(url)}`); for (const pubkey of pubkeys) { logger.info(pubkey); } diff --git a/packages/cli/src/options/beaconNodeOptions/builder.ts b/packages/cli/src/options/beaconNodeOptions/builder.ts index 7313d836a92c..96388ddfe2dd 100644 --- a/packages/cli/src/options/beaconNodeOptions/builder.ts +++ b/packages/cli/src/options/beaconNodeOptions/builder.ts @@ -1,18 +1,24 @@ import {defaultExecutionBuilderHttpOpts, IBeaconNodeOptions} from "@lodestar/beacon-node"; -import {CliCommandOptions} from "../../util/index.js"; +import {CliCommandOptions, YargsError} from "../../util/index.js"; export type ExecutionBuilderArgs = { builder: boolean; - "builder.urls"?: string[]; + "builder.url"?: string; "builder.timeout"?: number; "builder.faultInspectionWindow"?: number; "builder.allowedFaults"?: number; }; export function parseArgs(args: ExecutionBuilderArgs): IBeaconNodeOptions["executionBuilder"] { + if (Array.isArray(args["builder.url"]) || args["builder.url"]?.includes(",http")) { + throw new YargsError( + "Lodestar only supports a single builder URL. External tooling like mev-boost can be used to connect to multiple builder/relays" + ); + } + return { enabled: args["builder"], - urls: args["builder.urls"] ?? defaultExecutionBuilderHttpOpts.urls, + url: args["builder.url"] ?? defaultExecutionBuilderHttpOpts.url, timeout: args["builder.timeout"], faultInspectionWindow: args["builder.faultInspectionWindow"], allowedFaults: args["builder.allowedFaults"], @@ -27,14 +33,11 @@ export const options: CliCommandOptions = { group: "builder", }, - "builder.urls": { - description: "Urls hosting the builder API", - defaultDescription: defaultExecutionBuilderHttpOpts.urls.join(","), - type: "array", - string: true, - coerce: (urls: string[]): string[] => - // Parse ["url1,url2"] to ["url1", "url2"] - urls.map((item) => item.split(",")).flat(1), + "builder.url": { + alias: ["builder.urls"], + description: "Url hosting the builder API", + defaultDescription: defaultExecutionBuilderHttpOpts.url, + type: "string", group: "builder", }, diff --git a/packages/cli/src/options/beaconNodeOptions/chain.ts b/packages/cli/src/options/beaconNodeOptions/chain.ts index ce37135f9689..0c9280f5330c 100644 --- a/packages/cli/src/options/beaconNodeOptions/chain.ts +++ b/packages/cli/src/options/beaconNodeOptions/chain.ts @@ -7,6 +7,7 @@ export type ChainArgs = { "chain.blsVerifyAllMultiThread"?: boolean; "chain.blsVerifyAllMainThread"?: boolean; "chain.disableBlsBatchVerify"?: boolean; + "chain.persistProducedBlocks"?: boolean; "chain.persistInvalidSszObjects"?: boolean; // No need to define chain.persistInvalidSszObjects as part of ChainArgs // as this is defined as part of BeaconPaths @@ -33,6 +34,7 @@ export function parseArgs(args: ChainArgs): IBeaconNodeOptions["chain"] { blsVerifyAllMultiThread: args["chain.blsVerifyAllMultiThread"], blsVerifyAllMainThread: args["chain.blsVerifyAllMainThread"], disableBlsBatchVerify: args["chain.disableBlsBatchVerify"], + persistProducedBlocks: args["chain.persistProducedBlocks"], persistInvalidSszObjects: args["chain.persistInvalidSszObjects"], // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-explicit-any persistInvalidSszObjectsDir: undefined as any, @@ -96,6 +98,13 @@ Will double processing times. Use only for debugging purposes.", group: "chain", }, + "chain.persistProducedBlocks": { + hidden: true, + type: "boolean", + description: "Persist produced blocks or not for debugging purpose", + group: "chain", + }, + "chain.persistInvalidSszObjects": { hidden: true, type: "boolean", diff --git a/packages/cli/src/options/paramsOptions.ts b/packages/cli/src/options/paramsOptions.ts index 49ddc1b563f5..643fb991bc61 100644 --- a/packages/cli/src/options/paramsOptions.ts +++ b/packages/cli/src/options/paramsOptions.ts @@ -1,7 +1,6 @@ -import {Options} from "yargs"; import {ChainConfig, chainConfigTypes} from "@lodestar/config"; import {IBeaconParamsUnparsed} from "../config/types.js"; -import {ObjectKeys, CliCommandOptions} from "../util/index.js"; +import {ObjectKeys, CliCommandOptions, CliOptionDefinition} from "../util/index.js"; // No options are statically declared // If an arbitrary key notation is used, it removes type safety on most of this CLI arg parsing code. @@ -25,7 +24,7 @@ export function parseBeaconParamsArgs(args: Record): IB } const paramsOptionsByName = ObjectKeys(chainConfigTypes).reduce( - (options: Record, key): Record => ({ + (options: Record, key): Record => ({ ...options, [getArgKey(key)]: { hidden: true, diff --git a/packages/cli/src/util/command.ts b/packages/cli/src/util/command.ts index 32d7b24e02bf..0dd2fd82bc9f 100644 --- a/packages/cli/src/util/command.ts +++ b/packages/cli/src/util/command.ts @@ -1,17 +1,32 @@ import {Options, Argv} from "yargs"; +export interface CliExample { + command: string; + title?: string; + description?: string; +} + +export interface CliOptionDefinition extends Options { + example?: Omit; +} + export type CliCommandOptions = Required<{ [K in keyof OwnArgs]: undefined extends OwnArgs[K] - ? Options + ? CliOptionDefinition : // If arg cannot be undefined it must specify a default value - Options & Required>; + CliOptionDefinition & Required>; }>; // eslint-disable-next-line @typescript-eslint/no-explicit-any export interface CliCommand, ParentArgs = Record, R = any> { command: string; describe: string; - examples?: {command: string; description: string}[]; + /** + * The folder in docs/pages that the cli.md should be placed in. If not provided no + * cli flags page will be generated for the command + */ + docsFolder?: string; + examples?: CliExample[]; options?: CliCommandOptions; // 1st arg: any = free own sub command options // 2nd arg: subcommand parent options is = to this command options + parent options @@ -37,7 +52,7 @@ export function registerCommandToYargs(yargs: Argv, cliCommand: CliCommand { const restPort = 9596; @@ -25,7 +26,7 @@ describe("bLSToExecutionChange cmd", function () { // Speed up test to make genesis happen faster "--params.SECONDS_PER_SLOT=2", ], - {pipeStdioToParent: false, logPrefix: "dev"} + {pipeStdioToParent: true, logPrefix: "dev", testContext: {beforeEach, afterEach, afterAll}} ); // Exit early if process exits diff --git a/packages/cli/test/e2e/importFromFsDirect.test.ts b/packages/cli/test/e2e/importFromFsDirect.test.ts index 9d64421c97af..df53e0f973bb 100644 --- a/packages/cli/test/e2e/importFromFsDirect.test.ts +++ b/packages/cli/test/e2e/importFromFsDirect.test.ts @@ -1,21 +1,20 @@ import fs from "node:fs"; import path from "node:path"; +import {describe, it, beforeAll, vi} from "vitest"; import {rimraf} from "rimraf"; -import {getMochaContext} from "@lodestar/test-utils/mocha"; import {getKeystoresStr} from "@lodestar/test-utils"; import {testFilesDir} from "../utils.js"; import {cachedPubkeysHex, cachedSeckeysHex} from "../utils/cachedKeys.js"; import {expectKeys, startValidatorWithKeyManager} from "../utils/validator.js"; describe("import from fs same cmd as validate", function () { - const testContext = getMochaContext(this); - this.timeout("30s"); + vi.setConfig({testTimeout: 30_000}); const dataDir = path.join(testFilesDir, "import-and-validate-test"); const importFromDir = path.join(dataDir, "eth2.0_deposit_out"); const passphraseFilepath = path.join(importFromDir, "password.text"); - before("Clean dataDir", () => { + beforeAll(() => { rimraf.sync(dataDir); rimraf.sync(importFromDir); }); @@ -25,7 +24,7 @@ describe("import from fs same cmd as validate", function () { const pubkeys = cachedPubkeysHex.slice(0, keyCount); const secretKeys = cachedSeckeysHex.slice(0, keyCount); - before("write keystores to disk", async () => { + beforeAll(async () => { // Produce and encrypt keystores const keystoresStr = await getKeystoresStr(passphrase, secretKeys); @@ -41,7 +40,6 @@ describe("import from fs same cmd as validate", function () { const {keymanagerClient} = await startValidatorWithKeyManager([], { dataDir, logPrefix: "case-1", - testContext, }); await expectKeys(keymanagerClient, [], "Wrong listKeys response data"); @@ -51,7 +49,7 @@ describe("import from fs same cmd as validate", function () { it("run 'validator' check keys are loaded", async () => { const {keymanagerClient} = await startValidatorWithKeyManager( [`--importKeystores=${importFromDir}`, `--importKeystoresPassword=${passphraseFilepath}`], - {dataDir, logPrefix: "case-2", testContext} + {dataDir, logPrefix: "case-2"} ); await expectKeys(keymanagerClient, pubkeys, "Wrong listKeys response data"); diff --git a/packages/cli/test/e2e/importFromFsPreStep.test.ts b/packages/cli/test/e2e/importFromFsPreStep.test.ts index efbe7a6b35e4..ae9ac3321a05 100644 --- a/packages/cli/test/e2e/importFromFsPreStep.test.ts +++ b/packages/cli/test/e2e/importFromFsPreStep.test.ts @@ -1,8 +1,7 @@ import fs from "node:fs"; import path from "node:path"; +import {describe, it, expect, beforeAll, vi} from "vitest"; import {rimraf} from "rimraf"; -import {expect} from "chai"; -import {getMochaContext} from "@lodestar/test-utils/mocha"; import {execCliCommand} from "@lodestar/test-utils"; import {getKeystoresStr} from "@lodestar/test-utils"; import {testFilesDir} from "../utils.js"; @@ -10,14 +9,13 @@ import {cachedPubkeysHex, cachedSeckeysHex} from "../utils/cachedKeys.js"; import {expectKeys, startValidatorWithKeyManager} from "../utils/validator.js"; describe("import from fs then validate", function () { - const testContext = getMochaContext(this); - this.timeout("30s"); + vi.setConfig({testTimeout: 30_000}); const dataDir = path.join(testFilesDir, "import-then-validate-test"); const importFromDir = path.join(dataDir, "eth2.0_deposit_out"); const passphraseFilepath = path.join(importFromDir, "password.text"); - before("Clean dataDir", () => { + beforeAll(() => { rimraf.sync(dataDir); rimraf.sync(importFromDir); }); @@ -45,7 +43,7 @@ describe("import from fs then validate", function () { ]); for (let i = 0; i < keyCount; i++) { - expect(stdout).includes(pubkeys[i], `stdout should include imported pubkey[${i}]`); + expect(stdout).toContain(pubkeys[i]); } }); @@ -56,12 +54,12 @@ describe("import from fs then validate", function () { const stdout = await execCliCommand("packages/cli/bin/lodestar.js", ["validator list", `--dataDir ${dataDir}`]); for (let i = 0; i < keyCount; i++) { - expect(stdout).includes(pubkeys[i], `stdout should include imported pubkey[${i}]`); + expect(stdout).toContain(pubkeys[i]); } }); it("run 'validator' check keys are loaded", async function () { - const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); + const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir}); await expectKeys(keymanagerClient, pubkeys, "Wrong listKeys response data"); }); diff --git a/packages/cli/test/e2e/importKeystoresFromApi.test.ts b/packages/cli/test/e2e/importKeystoresFromApi.test.ts index dcd0f38b2182..bb91d467b86a 100644 --- a/packages/cli/test/e2e/importKeystoresFromApi.test.ts +++ b/packages/cli/test/e2e/importKeystoresFromApi.test.ts @@ -1,12 +1,11 @@ import path from "node:path"; +import {describe, it, expect, beforeAll, vi, afterAll, beforeEach, afterEach} from "vitest"; import {rimraf} from "rimraf"; -import {expect} from "chai"; import {DeletionStatus, getClient, ImportStatus} from "@lodestar/api/keymanager"; import {config} from "@lodestar/config/default"; import {Interchange} from "@lodestar/validator"; import {ApiError, HttpStatusCode} from "@lodestar/api"; import {bufferStderr, spawnCliCommand} from "@lodestar/test-utils"; -import {getMochaContext} from "@lodestar/test-utils/mocha"; import {getKeystoresStr} from "@lodestar/test-utils"; import {testFilesDir} from "../utils.js"; import {cachedPubkeysHex, cachedSeckeysHex} from "../utils/cachedKeys.js"; @@ -14,12 +13,11 @@ import {expectDeepEquals} from "../utils/runUtils.js"; import {expectKeys, startValidatorWithKeyManager} from "../utils/validator.js"; describe("import keystores from api", function () { - const testContext = getMochaContext(this); - this.timeout("30s"); + vi.setConfig({testTimeout: 30_000}); const dataDir = path.join(testFilesDir, "import-keystores-test"); - before("Clean dataDir", () => { + beforeAll(() => { rimraf.sync(dataDir); }); @@ -57,7 +55,7 @@ describe("import keystores from api", function () { const slashingProtectionStr = JSON.stringify(slashingProtection); it("run 'validator' and import remote keys from API", async () => { - const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); + const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir}); // Produce and encrypt keystores const keystoresStr = await getKeystoresStr(passphrase, secretKeys); @@ -88,6 +86,7 @@ describe("import keystores from api", function () { // Attempt to run a second process and expect the keystore lock to throw const validator = await spawnCliCommand("packages/cli/bin/lodestar.js", ["validator", "--dataDir", dataDir], { logPrefix: "vc-2", + testContext: {beforeEach, afterEach, afterAll}, }); await new Promise((resolve, reject) => { @@ -112,7 +111,7 @@ describe("import keystores from api", function () { }); it("run 'validator' check keys are loaded + delete", async function () { - const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); + const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir}); // Check that keys imported in previous it() are still there await expectKeys(keymanagerClient, pubkeys, "Wrong listKeys before deleting"); @@ -131,17 +130,17 @@ describe("import keystores from api", function () { }); it("different process check no keys are loaded", async function () { - const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); + const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir}); // After deleting there should be no keys await expectKeys(keymanagerClient, [], "Wrong listKeys"); }); it("reject calls without bearerToken", async function () { - await startValidatorWithKeyManager([], {dataDir, testContext}); + await startValidatorWithKeyManager([], {dataDir}); const keymanagerClientNoAuth = getClient({baseUrl: "http://localhost:38011", bearerToken: undefined}, {config}); const res = await keymanagerClientNoAuth.listRemoteKeys(); - expect(res.ok).to.be.false; - expect(res.error?.code).to.be.eql(HttpStatusCode.UNAUTHORIZED); + expect(res.ok).toBe(false); + expect(res.error?.code).toEqual(HttpStatusCode.UNAUTHORIZED); }); }); diff --git a/packages/cli/test/e2e/importRemoteKeysFromApi.test.ts b/packages/cli/test/e2e/importRemoteKeysFromApi.test.ts index 7f36a6876fd0..fd2193060ddd 100644 --- a/packages/cli/test/e2e/importRemoteKeysFromApi.test.ts +++ b/packages/cli/test/e2e/importRemoteKeysFromApi.test.ts @@ -1,10 +1,9 @@ import path from "node:path"; +import {describe, it, expect, beforeAll, vi} from "vitest"; import {rimraf} from "rimraf"; -import {expect} from "chai"; import {Api, DeleteRemoteKeyStatus, getClient, ImportRemoteKeyStatus} from "@lodestar/api/keymanager"; import {config} from "@lodestar/config/default"; import {ApiError, HttpStatusCode} from "@lodestar/api"; -import {getMochaContext} from "@lodestar/test-utils/mocha"; import {testFilesDir} from "../utils.js"; import {cachedPubkeysHex} from "../utils/cachedKeys.js"; import {expectDeepEquals} from "../utils/runUtils.js"; @@ -23,12 +22,11 @@ async function expectKeys(keymanagerClient: Api, expectedPubkeys: string[], mess } describe("import remoteKeys from api", function () { - const testContext = getMochaContext(this); - this.timeout("30s"); + vi.setConfig({testTimeout: 30_000}); const dataDir = path.join(testFilesDir, "import-remoteKeys-test"); - before("Clean dataDir", () => { + beforeAll(() => { rimraf.sync(dataDir); }); @@ -36,7 +34,7 @@ describe("import remoteKeys from api", function () { const pubkeysToAdd = [cachedPubkeysHex[0], cachedPubkeysHex[1]]; it("run 'validator' and import remote keys from API", async () => { - const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); + const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir}); // Wrap in retry since the API may not be listening yet await expectKeys(keymanagerClient, [], "Wrong listRemoteKeys before importing"); @@ -64,7 +62,7 @@ describe("import remoteKeys from api", function () { }); it("run 'validator' check keys are loaded + delete", async function () { - const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir, testContext}); + const {keymanagerClient} = await startValidatorWithKeyManager([], {dataDir}); // Check that keys imported in previous it() are still there await expectKeys(keymanagerClient, pubkeysToAdd, "Wrong listRemoteKeys before deleting"); @@ -82,11 +80,11 @@ describe("import remoteKeys from api", function () { }); it("reject calls without bearerToken", async function () { - await startValidatorWithKeyManager([], {dataDir, testContext}); + await startValidatorWithKeyManager([], {dataDir}); const keymanagerUrl = "http://localhost:38011"; const keymanagerClientNoAuth = getClient({baseUrl: keymanagerUrl, bearerToken: undefined}, {config}); const res = await keymanagerClientNoAuth.listRemoteKeys(); - expect(res.ok).to.be.false; - expect(res.error?.code).to.be.eql(HttpStatusCode.UNAUTHORIZED); + expect(res.ok).toBe(false); + expect(res.error?.code).toEqual(HttpStatusCode.UNAUTHORIZED); }); }); diff --git a/packages/cli/test/e2e/propserConfigfromKeymanager.test.ts b/packages/cli/test/e2e/propserConfigfromKeymanager.test.ts index 9d6eeafedfd3..eff3a488c898 100644 --- a/packages/cli/test/e2e/propserConfigfromKeymanager.test.ts +++ b/packages/cli/test/e2e/propserConfigfromKeymanager.test.ts @@ -1,8 +1,8 @@ import path from "node:path"; +import {describe, it, beforeAll, vi} from "vitest"; import {rimraf} from "rimraf"; import {Interchange} from "@lodestar/validator"; import {ApiError} from "@lodestar/api"; -import {getMochaContext} from "@lodestar/test-utils/mocha"; import {getKeystoresStr} from "@lodestar/test-utils"; import {testFilesDir} from "../utils.js"; import {cachedPubkeysHex, cachedSeckeysHex} from "../utils/cachedKeys.js"; @@ -10,8 +10,8 @@ import {expectDeepEquals} from "../utils/runUtils.js"; import {startValidatorWithKeyManager} from "../utils/validator.js"; describe("import keystores from api, test DefaultProposerConfig", function () { - this.timeout("30s"); - const testContext = getMochaContext(this); + vi.setConfig({testTimeout: 30_000}); + const dataDir = path.join(testFilesDir, "proposer-config-test"); const defaultOptions = { @@ -26,7 +26,7 @@ describe("import keystores from api, test DefaultProposerConfig", function () { graffiti: "bbbb", }; - before("Clean dataDir", () => { + beforeAll(() => { rimraf.sync(dataDir); }); @@ -51,7 +51,6 @@ describe("import keystores from api, test DefaultProposerConfig", function () { it("1 . run 'validator' import keys from API, getdefaultfeeRecipient", async () => { const {keymanagerClient} = await startValidatorWithKeyManager([`--graffiti ${defaultOptions.graffiti}`], { dataDir, - testContext, }); // Produce and encrypt keystores // Import test keys @@ -122,7 +121,6 @@ describe("import keystores from api, test DefaultProposerConfig", function () { it("2 . run 'validator' Check last feeRecipient and gasLimit persists", async () => { const {keymanagerClient} = await startValidatorWithKeyManager([`--graffiti ${defaultOptions.graffiti}`], { dataDir, - testContext, }); // next time check edited feeRecipient persists @@ -185,7 +183,6 @@ describe("import keystores from api, test DefaultProposerConfig", function () { it("3 . run 'validator' FeeRecipient and GasLimit should be default after delete", async () => { const {keymanagerClient} = await startValidatorWithKeyManager([`--graffiti ${defaultOptions.graffiti}`], { dataDir, - testContext, }); const feeRecipient0 = await keymanagerClient.listFeeRecipient(pubkeys[0]); diff --git a/packages/cli/test/e2e/runDevCmd.test.ts b/packages/cli/test/e2e/runDevCmd.test.ts index 69c8989f1788..c7f51b45045e 100644 --- a/packages/cli/test/e2e/runDevCmd.test.ts +++ b/packages/cli/test/e2e/runDevCmd.test.ts @@ -1,12 +1,11 @@ +import {describe, it, vi, beforeEach, afterEach, afterAll} from "vitest"; import {ApiError, getClient} from "@lodestar/api"; import {config} from "@lodestar/config/default"; import {retry} from "@lodestar/utils"; import {spawnCliCommand} from "@lodestar/test-utils"; -import {getMochaContext} from "@lodestar/test-utils/mocha"; describe("Run dev command", function () { - const testContext = getMochaContext(this); - this.timeout("30s"); + vi.setConfig({testTimeout: 30_000}); it("Run dev command with no --dataDir until beacon api is listening", async () => { const beaconPort = 39011; @@ -14,7 +13,7 @@ describe("Run dev command", function () { const devProc = await spawnCliCommand( "packages/cli/bin/lodestar.js", ["dev", "--reset", "--startValidators=0..7", `--rest.port=${beaconPort}`], - {pipeStdioToParent: true, logPrefix: "dev", testContext} + {pipeStdioToParent: true, logPrefix: "dev", testContext: {beforeEach, afterEach, afterAll}} ); // Exit early if process exits diff --git a/packages/cli/test/e2e/validatorList.test.ts b/packages/cli/test/e2e/validatorList.test.ts index ba2102f07fee..b6fe4da5faeb 100644 --- a/packages/cli/test/e2e/validatorList.test.ts +++ b/packages/cli/test/e2e/validatorList.test.ts @@ -1,25 +1,35 @@ /* eslint-disable no-console */ import fs from "node:fs"; import path from "node:path"; +import {describe, it, beforeAll, vi, expect, afterEach, beforeEach} from "vitest"; import {rimraf} from "rimraf"; -import {expect} from "chai"; import {Keystore} from "@chainsafe/bls-keystore"; import {fromHex} from "@lodestar/utils"; import {runCliCommand} from "@lodestar/test-utils"; -import {stubLogger} from "@lodestar/test-utils/sinon"; import {testFilesDir} from "../utils.js"; import {getLodestarCli} from "../../src/cli.js"; describe("cmds / validator", function () { - this.timeout("30s"); - stubLogger(this, console); + vi.setConfig({testTimeout: 30_000}); + const lodestar = getLodestarCli(); const dataDir = testFilesDir; - before("Clean dataDir", () => { + beforeAll(() => { rimraf.sync(dataDir); }); + beforeEach(() => { + vi.spyOn(console, "log"); + vi.spyOn(console, "info"); + vi.spyOn(console, "error"); + vi.spyOn(console, "debug"); + }); + + afterEach(() => { + vi.resetAllMocks(); + }); + /** Generated from const sk = bls.SecretKey.fromKeygen(Buffer.alloc(32, 0xaa)); */ const skHex = "0x0e5bd52621b6a8956086dcf0ecc89f0cdca56cebb2a8516c2d4252a9867fc551"; const pkHex = "0x8be678633e927aa0435addad5dcd5283fef6110d91362519cd6d43e61f6c017d724fa579cc4b2972134e050b6ba120c0"; @@ -41,7 +51,7 @@ describe("cmds / validator", function () { `--passphraseFile ${passphraseFilepath}`, ]); - expect(console.log).be.calledWith(`Imported keystore ${pkHex} ${keystoreFilepath}`); + expect(console.log).toHaveBeenCalledWith(`Imported keystore ${pkHex} ${keystoreFilepath}`); }); it("should list validators", async function () { @@ -50,7 +60,7 @@ describe("cmds / validator", function () { await runCliCommand(lodestar, ["validator list", `--dataDir ${dataDir}`], {timeoutMs: 5000}); - expect(console.info).calledWith("1 local keystores"); - expect(console.info).calledWith(pkHex); + expect(console.info).toHaveBeenCalledWith("1 local keystores"); + expect(console.info).toHaveBeenCalledWith(pkHex); }); }); diff --git a/packages/cli/test/e2e/voluntaryExit.test.ts b/packages/cli/test/e2e/voluntaryExit.test.ts index b3a539473581..89841fb7c3e4 100644 --- a/packages/cli/test/e2e/voluntaryExit.test.ts +++ b/packages/cli/test/e2e/voluntaryExit.test.ts @@ -1,15 +1,14 @@ import path from "node:path"; +import {afterAll, describe, it, vi, beforeEach, afterEach} from "vitest"; import {retry} from "@lodestar/utils"; import {ApiError, getClient} from "@lodestar/api"; import {config} from "@lodestar/config/default"; import {interopSecretKey} from "@lodestar/state-transition"; import {spawnCliCommand, execCliCommand} from "@lodestar/test-utils"; -import {getMochaContext} from "@lodestar/test-utils/mocha"; import {testFilesDir} from "../utils.js"; describe("voluntaryExit cmd", function () { - const testContext = getMochaContext(this); - this.timeout("60s"); + vi.setConfig({testTimeout: 60_000}); it("Perform a voluntary exit", async () => { const restPort = 9596; @@ -29,7 +28,7 @@ describe("voluntaryExit cmd", function () { // Allow voluntary exists to be valid immediately "--params.SHARD_COMMITTEE_PERIOD=0", ], - {pipeStdioToParent: false, logPrefix: "dev", testContext} + {pipeStdioToParent: true, logPrefix: "dev", testContext: {beforeEach, afterEach, afterAll}} ); // Exit early if process exits diff --git a/packages/cli/test/e2e/voluntaryExitFromApi.test.ts b/packages/cli/test/e2e/voluntaryExitFromApi.test.ts index a06cc2025af3..ed4439d36ab2 100644 --- a/packages/cli/test/e2e/voluntaryExitFromApi.test.ts +++ b/packages/cli/test/e2e/voluntaryExitFromApi.test.ts @@ -1,17 +1,15 @@ import path from "node:path"; -import {expect} from "chai"; +import {describe, it, vi, expect, afterAll, beforeEach, afterEach} from "vitest"; import {ApiError, getClient} from "@lodestar/api"; import {getClient as getKeymanagerClient} from "@lodestar/api/keymanager"; import {config} from "@lodestar/config/default"; import {interopSecretKey} from "@lodestar/state-transition"; import {spawnCliCommand} from "@lodestar/test-utils"; -import {getMochaContext} from "@lodestar/test-utils/mocha"; import {retry} from "@lodestar/utils"; import {testFilesDir} from "../utils.js"; describe("voluntary exit from api", function () { - const testContext = getMochaContext(this); - this.timeout("60s"); + vi.setConfig({testTimeout: 60_000}); it("Perform a voluntary exit", async () => { // Start dev node with keymanager @@ -39,7 +37,7 @@ describe("voluntary exit from api", function () { // Disable bearer token auth to simplify testing "--keymanager.authEnabled=false", ], - {pipeStdioToParent: false, logPrefix: "dev", testContext} + {pipeStdioToParent: false, logPrefix: "dev", testContext: {beforeEach, afterEach, afterAll}} ); // Exit early if process exits @@ -71,10 +69,10 @@ describe("voluntary exit from api", function () { ApiError.assert(res); const signedVoluntaryExit = res.response.data; - expect(signedVoluntaryExit.message.epoch).to.equal(exitEpoch); - expect(signedVoluntaryExit.message.validatorIndex).to.equal(indexToExit); + expect(signedVoluntaryExit.message.epoch).toBe(exitEpoch); + expect(signedVoluntaryExit.message.validatorIndex).toBe(indexToExit); // Signature will be verified when submitting to beacon node - expect(signedVoluntaryExit.signature).to.not.be.undefined; + expect(signedVoluntaryExit.signature).toBeDefined(); // 2. submit signed voluntary exit message to beacon node ApiError.assert(await beaconClient.submitPoolVoluntaryExit(signedVoluntaryExit)); diff --git a/packages/cli/test/e2e/voluntaryExitRemoteSigner.test.ts b/packages/cli/test/e2e/voluntaryExitRemoteSigner.test.ts index 4c6fdab11f2a..b2f902c0e6dd 100644 --- a/packages/cli/test/e2e/voluntaryExitRemoteSigner.test.ts +++ b/packages/cli/test/e2e/voluntaryExitRemoteSigner.test.ts @@ -1,4 +1,5 @@ import path from "node:path"; +import {describe, it, beforeAll, afterAll, beforeEach, afterEach, vi} from "vitest"; import {retry} from "@lodestar/utils"; import {ApiError, getClient} from "@lodestar/api"; import {config} from "@lodestar/config/default"; @@ -10,15 +11,14 @@ import { StartedExternalSigner, getKeystoresStr, } from "@lodestar/test-utils"; -import {getMochaContext} from "@lodestar/test-utils/mocha"; import {testFilesDir} from "../utils.js"; describe("voluntaryExit using remote signer", function () { - this.timeout("30s"); + vi.setConfig({testTimeout: 30_000}); let externalSigner: StartedExternalSigner; - before("start external signer container", async () => { + beforeAll(async () => { const password = "password"; externalSigner = await startExternalSigner({ keystoreStrings: await getKeystoresStr( @@ -29,13 +29,11 @@ describe("voluntaryExit using remote signer", function () { }); }); - after("stop external signer container", async () => { + afterAll(async () => { await externalSigner.container.stop(); }); it("Perform a voluntary exit", async () => { - const testContext = getMochaContext(this); - const restPort = 9596; const devBnProc = await spawnCliCommand( "packages/cli/bin/lodestar.js", @@ -52,7 +50,7 @@ describe("voluntaryExit using remote signer", function () { // Allow voluntary exists to be valid immediately "--params.SHARD_COMMITTEE_PERIOD=0", ], - {pipeStdioToParent: false, logPrefix: "dev", testContext} + {pipeStdioToParent: false, logPrefix: "dev", testContext: {beforeEach, afterEach, afterAll}} ); // Exit early if process exits diff --git a/packages/cli/test/globalSetup.ts b/packages/cli/test/globalSetup.ts new file mode 100644 index 000000000000..02074bb24d11 --- /dev/null +++ b/packages/cli/test/globalSetup.ts @@ -0,0 +1,4 @@ +export async function setup(): Promise { + process.env.NODE_ENV = "test"; +} +export async function teardown(): Promise {} diff --git a/packages/cli/test/setup.ts b/packages/cli/test/setup.ts deleted file mode 100644 index b83e6cb78511..000000000000 --- a/packages/cli/test/setup.ts +++ /dev/null @@ -1,6 +0,0 @@ -import chai from "chai"; -import chaiAsPromised from "chai-as-promised"; -import sinonChai from "sinon-chai"; - -chai.use(chaiAsPromised); -chai.use(sinonChai); diff --git a/packages/cli/test/sim/multi_fork.test.ts b/packages/cli/test/sim/multi_fork.test.ts index 1888195a4745..734ae5c5a380 100644 --- a/packages/cli/test/sim/multi_fork.test.ts +++ b/packages/cli/test/sim/multi_fork.test.ts @@ -2,6 +2,7 @@ import path from "node:path"; import {sleep, toHex, toHexString} from "@lodestar/utils"; import {ApiError} from "@lodestar/api"; +import {SLOTS_PER_EPOCH} from "@lodestar/params"; import {CLIQUE_SEALING_PERIOD, SIM_TESTS_SECONDS_PER_SLOT} from "../utils/simulation/constants.js"; import {AssertionMatch, BeaconClient, ExecutionClient, ValidatorClient} from "../utils/simulation/interfaces.js"; import {SimulationEnvironment} from "../utils/simulation/SimulationEnvironment.js"; @@ -64,10 +65,11 @@ const env = await SimulationEnvironment.initWithDefaults( validator: { type: ValidatorClient.Lodestar, options: { + // this will cause race in beacon but since builder is not attached will + // return with engine full block and publish via publishBlockV2 clientOptions: { useProduceBlockV3: true, - // default builder selection will cause a race try in beacon even if builder is not set - // but not to worry, execution block will be selected as fallback anyway + "builder.selection": "maxprofit", }, }, }, @@ -81,12 +83,12 @@ const env = await SimulationEnvironment.initWithDefaults( validator: { type: ValidatorClient.Lodestar, options: { + // this will make the beacon respond with blinded version of the local block as no + // builder is attached to beacon, and publish via publishBlindedBlockV2 clientOptions: { - useProduceBlockV3: false, - // default builder selection of max profit will make it use produceBlindedBlock - // but not to worry, execution block will be selected as fallback anyway - // but returned in blinded format for validator to use publish blinded block - // which assembles block beacon side from local cache before publishing + useProduceBlockV3: true, + "builder.selection": "maxprofit", + blindedLocal: true, }, }, }, @@ -100,9 +102,9 @@ const env = await SimulationEnvironment.initWithDefaults( validator: { type: ValidatorClient.Lodestar, options: { + // this builder selection will make it use produceBlockV2 and respond with full block clientOptions: { useProduceBlockV3: false, - // this builder selection will make it use produceBlockV2 "builder.selection": "executiononly", }, }, @@ -110,7 +112,24 @@ const env = await SimulationEnvironment.initWithDefaults( execution: ExecutionClient.Nethermind, keysCount: 32, }, - {id: "node-4", beacon: BeaconClient.Lighthouse, execution: ExecutionClient.Geth, keysCount: 32}, + { + id: "node-4", + beacon: BeaconClient.Lodestar, + validator: { + type: ValidatorClient.Lodestar, + options: { + // this builder selection will make it use produceBlindedBlockV2 and respond with blinded version + // of local block and subsequent publishing via publishBlindedBlock + clientOptions: { + useProduceBlockV3: false, + "builder.selection": "maxprofit", + }, + }, + }, + execution: ExecutionClient.Nethermind, + keysCount: 32, + }, + {id: "node-5", beacon: BeaconClient.Lighthouse, execution: ExecutionClient.Geth, keysCount: 32}, ] ); @@ -213,7 +232,8 @@ const unknownBlockSync = await env.createNodePair({ // unknown block sync can work only if the gap is maximum `slotImportTolerance * 2` // default value for slotImportTolerance is one epoch, so if gap is more than 2 epoch // unknown block sync will not work. So why we have to increase it for tests. - "sync.slotImportTolerance": headForUnknownBlockSync.response.data.message.slot / 2 + 2, + // Adding SLOTS_PER_EPOCH will cover the case if the node starts on the last slot of epoch + "sync.slotImportTolerance": headForUnknownBlockSync.response.data.message.slot / 2 + SLOTS_PER_EPOCH, }, }, }, diff --git a/packages/cli/test/unit/cmds/beacon.test.ts b/packages/cli/test/unit/cmds/beacon.test.ts index 08367ad01309..7d111b3362a8 100644 --- a/packages/cli/test/unit/cmds/beacon.test.ts +++ b/packages/cli/test/unit/cmds/beacon.test.ts @@ -1,6 +1,6 @@ import path from "node:path"; import fs from "node:fs"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {createFromJSON, createSecp256k1PeerId} from "@libp2p/peer-id-factory"; import {multiaddr} from "@multiformats/multiaddr"; import {createKeypairFromPeerId, ENR, SignableENR} from "@chainsafe/discv5"; @@ -33,8 +33,8 @@ describe("cmds / beacon / args handler", () => { }); const bootEnrs = options.network.discv5?.bootEnrs ?? []; - expect(bootEnrs.includes(enr1)).to.be.true; - expect(bootEnrs.includes(enr2)).to.be.true; + expect(bootEnrs.includes(enr1)).toBe(true); + expect(bootEnrs.includes(enr2)).toBe(true); }); it("Over-write ENR fields", async () => { @@ -50,15 +50,15 @@ describe("cmds / beacon / args handler", () => { const enr = ENR.decodeTxt(options.network.discv5?.enr as string); - expect(enr.ip).to.equal(enrIp, "wrong enr.ip"); - expect(enr.tcp).to.equal(enrTcp, "wrong enr.tcp"); + expect(enr.ip).toBe(enrIp); + expect(enr.tcp).toBe(enrTcp); }); it("Create different PeerId every run", async () => { const {peerId: peerId1} = await runBeaconHandlerInit({}); const {peerId: peerId2} = await runBeaconHandlerInit({}); - expect(peerId1.toString()).not.equal(peerId2.toString(), "peer ids must be different"); + expect(peerId1.toString()).not.toBe(peerId2.toString()); }); it("Re-use existing peer", async () => { @@ -74,7 +74,7 @@ describe("cmds / beacon / args handler", () => { persistNetworkIdentity: true, }); - expect(peerId.toString()).equal(prevPeerId.toString(), "peer must be equal to persisted"); + expect(peerId.toString()).toBe(prevPeerId.toString()); }); it("Set known deposit contract", async () => { @@ -83,7 +83,7 @@ describe("cmds / beacon / args handler", () => { }); // Okay to hardcode, since this value will never change - expect(options.eth1.depositContractDeployBlock).equal(11052984, "Wrong mainnet eth1.depositContractDeployBlock"); + expect(options.eth1.depositContractDeployBlock).toBe(11052984); }); it("Apply custom network name from config file", async () => { @@ -99,19 +99,19 @@ describe("cmds / beacon / args handler", () => { }); // Okay to hardcode, since this value will never change - expect(network).equal(networkName, "Wrong network name"); + expect(network).toBe(networkName); }); }); describe("Test isLocalMultiAddr", () => { it("should return true for 127.0.0.1", () => { const multi0 = multiaddr("/ip4/127.0.0.1/udp/30303"); - expect(isLocalMultiAddr(multi0)).to.equal(true); + expect(isLocalMultiAddr(multi0)).toBe(true); }); it("should return false for 0.0.0.0", () => { const multi0 = multiaddr("/ip4/0.0.0.0/udp/30303"); - expect(isLocalMultiAddr(multi0)).to.equal(false); + expect(isLocalMultiAddr(multi0)).toBe(false); }); }); @@ -128,7 +128,7 @@ describe("initPeerIdAndEnr", () => { testLogger() ); - expect(peerId1.toString()).not.equal(peerId2.toString(), "peer ids must be different"); + expect(peerId1.toString()).not.toBe(peerId2.toString()); }); it("should reuse peer id, persistNetworkIdentity=true", async () => { @@ -143,7 +143,7 @@ describe("initPeerIdAndEnr", () => { testLogger() ); - expect(peerId1.toString()).to.equal(peerId2.toString(), "peer ids must be equal"); + expect(peerId1.toString()).toBe(peerId2.toString()); }); it("should overwrite invalid peer id", async () => { @@ -157,8 +157,8 @@ describe("initPeerIdAndEnr", () => { ); const filePeerId = await createFromJSON(JSON.parse(fs.readFileSync(peerIdFile, "utf-8"))); - expect(peerId1Str).not.equal(peerId2.toString(), "peer ids must be different"); - expect(filePeerId.toString()).to.equal(peerId2.toString(), "peer ids must be equal"); + expect(peerId1Str).not.toBe(peerId2.toString()); + expect(filePeerId.toString()).toBe(peerId2.toString()); }); it("should overwrite invalid enr", async () => { @@ -170,7 +170,7 @@ describe("initPeerIdAndEnr", () => { const validEnr = fs.readFileSync(enrFilePath, "utf-8"); - expect(validEnr).not.equal(invalidEnr, "enrs must be different"); + expect(validEnr).not.toBe(invalidEnr); }); it("should overwrite enr that doesn't match peer id", async () => { @@ -182,7 +182,7 @@ describe("initPeerIdAndEnr", () => { const {enr} = await initPeerIdAndEnr({persistNetworkIdentity: true} as BeaconArgs, testFilesDir, testLogger()); - expect(enr.nodeId).not.equal(otherEnr, "enrs must be different"); + expect(enr.nodeId).not.toBe(otherEnr); }); }); diff --git a/packages/cli/test/unit/cmds/initPeerIdAndEnr.test.ts b/packages/cli/test/unit/cmds/initPeerIdAndEnr.test.ts index 4bdfedf64b95..a207e0c0f59d 100644 --- a/packages/cli/test/unit/cmds/initPeerIdAndEnr.test.ts +++ b/packages/cli/test/unit/cmds/initPeerIdAndEnr.test.ts @@ -1,6 +1,6 @@ import fs from "node:fs"; +import {describe, it, expect, beforeEach, afterEach} from "vitest"; import tmp from "tmp"; -import {expect} from "chai"; import {initPeerIdAndEnr} from "../../../src/cmds/beacon/initPeerIdAndEnr.js"; import {BeaconArgs} from "../../../src/cmds/beacon/options.js"; import {testLogger} from "../../utils.js"; @@ -23,12 +23,11 @@ describe("initPeerIdAndEnr", () => { testLogger(), true ); - expect((await enr.peerId()).toString(), "enr peer id doesn't equal the returned peer id").to.equal( - peerId.toString() - ); - expect(enr.seq).to.equal(BigInt(1)); - expect(enr.tcp).to.equal(undefined); - expect(enr.tcp6).to.equal(undefined); + // "enr peer id doesn't equal the returned peer id" + expect((await enr.peerId()).toString()).toBe(peerId.toString()); + expect(enr.seq).toBe(BigInt(1)); + expect(enr.tcp).toBeUndefined(); + expect(enr.tcp6).toBeUndefined(); }); it("second time should use ths existing enr and peer id", async () => { @@ -46,7 +45,7 @@ describe("initPeerIdAndEnr", () => { true ); - expect(run1.peerId.toString()).to.equal(run2.peerId.toString()); - expect(run1.enr.encodeTxt()).to.equal(run2.enr.encodeTxt()); + expect(run1.peerId.toString()).toBe(run2.peerId.toString()); + expect(run1.enr.encodeTxt()).toBe(run2.enr.encodeTxt()); }); }); diff --git a/packages/cli/test/unit/cmds/validator/keymanager/keystoreCache.test.ts b/packages/cli/test/unit/cmds/validator/keymanager/keystoreCache.test.ts index 59113b273435..ee0fedf301b6 100644 --- a/packages/cli/test/unit/cmds/validator/keymanager/keystoreCache.test.ts +++ b/packages/cli/test/unit/cmds/validator/keymanager/keystoreCache.test.ts @@ -1,9 +1,7 @@ import fs from "node:fs"; import {randomBytes} from "node:crypto"; +import {describe, it, expect, beforeEach} from "vitest"; import tmp from "tmp"; -import {expect} from "chai"; -import chainAsPromised from "chai-as-promised"; -import chai from "chai"; import {Keystore} from "@chainsafe/bls-keystore"; import bls from "@chainsafe/bls"; import {interopSecretKey} from "@lodestar/state-transition"; @@ -11,8 +9,6 @@ import {SignerLocal, SignerType} from "@lodestar/validator"; import {loadKeystoreCache, writeKeystoreCache} from "../../../../../src/cmds/validator/keymanager/keystoreCache.js"; import {LocalKeystoreDefinition} from "../../../../../src/cmds/validator/keymanager/interface.js"; -chai.use(chainAsPromised); - const numberOfSigners = 10; describe("keystoreCache", () => { @@ -23,7 +19,6 @@ describe("keystoreCache", () => { let keystoreCacheFile: string; beforeEach(async function setup() { - this.timeout(50000); definitions = []; signers = []; secretKeys = []; @@ -55,16 +50,16 @@ describe("keystoreCache", () => { passwords.push(password); secretKeys.push(secretKey.toBytes()); } - }); + }, 50000); describe("writeKeystoreCache", () => { it("should write a valid keystore cache file", async () => { - await expect(writeKeystoreCache(keystoreCacheFile, signers, passwords)).to.fulfilled; - expect(fs.existsSync(keystoreCacheFile)).to.be.true; + await expect(writeKeystoreCache(keystoreCacheFile, signers, passwords)).resolves.toBeUndefined(); + expect(fs.existsSync(keystoreCacheFile)).toBe(true); }); it("should throw error if password length are not same as signers", async () => { - await expect(writeKeystoreCache(keystoreCacheFile, signers, [passwords[0]])).to.rejectedWith( + await expect(writeKeystoreCache(keystoreCacheFile, signers, [passwords[0]])).rejects.toThrow( `Number of signers and passwords must be equal. signers=${numberOfSigners}, passwords=1` ); }); @@ -75,14 +70,14 @@ describe("keystoreCache", () => { await writeKeystoreCache(keystoreCacheFile, signers, passwords); const result = await loadKeystoreCache(keystoreCacheFile, definitions); - expect(result.map((r) => r.secretKey.toBytes())).to.eql(secretKeys); + expect(result.map((r) => r.secretKey.toBytes())).toEqual(secretKeys); }); it("should raise error for mismatch public key", async () => { await writeKeystoreCache(keystoreCacheFile, signers, passwords); definitions[0].keystorePath = definitions[1].keystorePath; - await expect(loadKeystoreCache(keystoreCacheFile, definitions)).to.rejected; + await expect(loadKeystoreCache(keystoreCacheFile, definitions)).rejects.toBeDefined(); }); }); }); diff --git a/packages/cli/test/unit/config/beaconNodeOptions.test.ts b/packages/cli/test/unit/config/beaconNodeOptions.test.ts index 4e1a44102bf7..d35cf06ad1a4 100644 --- a/packages/cli/test/unit/config/beaconNodeOptions.test.ts +++ b/packages/cli/test/unit/config/beaconNodeOptions.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {defaultOptions} from "@lodestar/beacon-node"; import {BeaconNodeOptions} from "../../../src/config/index.js"; @@ -8,7 +8,7 @@ describe("config / beaconNodeOptions", () => { // Asserts only part of the data structure to avoid unnecesary duplicate code const optionsPartial = beaconNodeOptions.getWithDefaults(); - expect(optionsPartial?.api?.rest?.port).to.equal(defaultOptions.api.rest.port, "default api.rest.port not applied"); + expect(optionsPartial?.api?.rest?.port).toBe(defaultOptions.api.rest.port); }); it("Should return added partial options", () => { @@ -19,6 +19,6 @@ describe("config / beaconNodeOptions", () => { beaconNodeOptions.set(editedPartialOptions); const optionsPartial = beaconNodeOptions.get(); - expect(optionsPartial).to.deep.equal(editedPartialOptions); + expect(optionsPartial).toEqual(editedPartialOptions); }); }); diff --git a/packages/cli/test/unit/config/beaconParams.test.ts b/packages/cli/test/unit/config/beaconParams.test.ts index 6a953c584c73..4fedaf788601 100644 --- a/packages/cli/test/unit/config/beaconParams.test.ts +++ b/packages/cli/test/unit/config/beaconParams.test.ts @@ -1,5 +1,5 @@ import fs from "node:fs"; -import {expect} from "chai"; +import {describe, it, expect, beforeAll, afterAll} from "vitest"; import yaml from "js-yaml"; import {toHexString} from "@chainsafe/ssz"; import {getTestdirPath} from "../../utils.js"; @@ -59,19 +59,18 @@ describe("config / beaconParams", () => { }, ]; - before("Write config file", () => { + beforeAll(() => { // eslint-disable-next-line @typescript-eslint/naming-convention fs.writeFileSync(paramsFilepath, yaml.dump({GENESIS_FORK_VERSION: GENESIS_FORK_VERSION_FILE})); }); - after("Remove config file", () => { + afterAll(() => { if (fs.existsSync(paramsFilepath)) fs.unlinkSync(paramsFilepath); }); - for (const {id, kwargs, GENESIS_FORK_VERSION} of testCases) { - it(id, () => { - const params = getBeaconParams(kwargs); - expect(toHexString(params.GENESIS_FORK_VERSION)).to.equal(GENESIS_FORK_VERSION); - }); - } + // eslint-disable-next-line @typescript-eslint/naming-convention + it.each(testCases)("$id", ({kwargs, GENESIS_FORK_VERSION}) => { + const params = getBeaconParams(kwargs); + expect(toHexString(params.GENESIS_FORK_VERSION)).toBe(GENESIS_FORK_VERSION); + }); }); diff --git a/packages/cli/test/unit/config/peerId.test.ts b/packages/cli/test/unit/config/peerId.test.ts index 618c97a99062..c0cdc8cff1a9 100644 --- a/packages/cli/test/unit/config/peerId.test.ts +++ b/packages/cli/test/unit/config/peerId.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {createSecp256k1PeerId} from "@libp2p/peer-id-factory"; import {getTestdirPath} from "../../utils.js"; import {writePeerId, readPeerId} from "../../../src/config/index.js"; @@ -11,6 +11,6 @@ describe("config / peerId", () => { writePeerId(peerIdFilepath, peerId); const peerIdRead = await readPeerId(peerIdFilepath); - expect(peerIdRead.toString()).to.equal(peerId.toString()); + expect(peerIdRead.toString()).toBe(peerId.toString()); }); }); diff --git a/packages/cli/test/unit/db.test.ts b/packages/cli/test/unit/db.test.ts index f951b3e6923b..1e19e514e9e5 100644 --- a/packages/cli/test/unit/db.test.ts +++ b/packages/cli/test/unit/db.test.ts @@ -1,3 +1,4 @@ +import {describe, it} from "vitest"; // eslint-disable-next-line import/no-relative-packages import {Bucket as BeaconBucket} from "../../../beacon-node/src/db/buckets.js"; // eslint-disable-next-line import/no-relative-packages diff --git a/packages/cli/test/unit/options/beaconNodeOptions.test.ts b/packages/cli/test/unit/options/beaconNodeOptions.test.ts index f3a887ffea30..8a9ccff5a917 100644 --- a/packages/cli/test/unit/options/beaconNodeOptions.test.ts +++ b/packages/cli/test/unit/options/beaconNodeOptions.test.ts @@ -1,5 +1,5 @@ import fs from "node:fs"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {IBeaconNodeOptions} from "@lodestar/beacon-node"; import {RecursivePartial} from "@lodestar/utils"; import {parseBeaconNodeArgs, BeaconNodeArgs} from "../../../src/options/beaconNodeOptions/index.js"; @@ -21,6 +21,7 @@ describe("options / beaconNodeOptions", () => { "chain.blsVerifyAllMultiThread": true, "chain.blsVerifyAllMainThread": true, "chain.disableBlsBatchVerify": true, + "chain.persistProducedBlocks": true, "chain.persistInvalidSszObjects": true, "chain.proposerBoostEnabled": false, "chain.disableImportExecutionFcU": false, @@ -52,7 +53,7 @@ describe("options / beaconNodeOptions", () => { "execution.retryAttempts": 1, builder: false, - "builder.urls": ["http://localhost:8661"], + "builder.url": "http://localhost:8661", "builder.timeout": 12000, "builder.faultInspectionWindow": 32, "builder.allowedFaults": 16, @@ -122,6 +123,7 @@ describe("options / beaconNodeOptions", () => { blsVerifyAllMultiThread: true, blsVerifyAllMainThread: true, disableBlsBatchVerify: true, + persistProducedBlocks: true, persistInvalidSszObjects: true, proposerBoostEnabled: false, disableImportExecutionFcU: false, @@ -155,7 +157,7 @@ describe("options / beaconNodeOptions", () => { }, executionBuilder: { enabled: false, - urls: ["http://localhost:8661"], + url: "http://localhost:8661", timeout: 12000, faultInspectionWindow: 32, allowedFaults: 16, @@ -214,7 +216,7 @@ describe("options / beaconNodeOptions", () => { }; const options = parseBeaconNodeArgs(beaconNodeArgsPartial); - expect(options).to.deep.equal(expectedOptions); + expect(options).toEqual(expectedOptions); }); it("Should use execution endpoint & jwt for eth1", () => { @@ -238,6 +240,6 @@ describe("options / beaconNodeOptions", () => { }; const options = parseBeaconNodeArgs(beaconNodeArgsPartial); - expect(options.eth1).to.deep.equal(expectedOptions.eth1); + expect(options.eth1).toEqual(expectedOptions.eth1); }); }); diff --git a/packages/cli/test/unit/options/paramsOptions.test.ts b/packages/cli/test/unit/options/paramsOptions.test.ts index d8a2982b1976..a08c70008612 100644 --- a/packages/cli/test/unit/options/paramsOptions.test.ts +++ b/packages/cli/test/unit/options/paramsOptions.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {parseBeaconParamsArgs} from "../../../src/options/paramsOptions.js"; import {IBeaconParamsUnparsed} from "../../../src/config/types.js"; @@ -18,6 +18,6 @@ describe("options / paramsOptions", () => { }; const beaconParams = parseBeaconParamsArgs(beaconParamsArgs); - expect(beaconParams).to.deep.equal(expectedBeaconParams); + expect(beaconParams).toEqual(expectedBeaconParams); }); }); diff --git a/packages/cli/test/unit/paths/globalPaths.test.ts b/packages/cli/test/unit/paths/globalPaths.test.ts index 0f394dc50b1a..98de8fd87a79 100644 --- a/packages/cli/test/unit/paths/globalPaths.test.ts +++ b/packages/cli/test/unit/paths/globalPaths.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {getGlobalPaths} from "../../../src/paths/global.js"; describe("paths / global", () => { @@ -36,7 +36,7 @@ describe("paths / global", () => { for (const {id, args, globalPaths} of testCases) { it(id, () => { - expect(getGlobalPaths(args, args.network ?? network)).to.deep.equal(globalPaths); + expect(getGlobalPaths(args, args.network ?? network)).toEqual(globalPaths); }); } }); diff --git a/packages/cli/test/unit/util/extractJwtHexSecret.test.ts b/packages/cli/test/unit/util/extractJwtHexSecret.test.ts index 37996c2b625f..bb7032390208 100644 --- a/packages/cli/test/unit/util/extractJwtHexSecret.test.ts +++ b/packages/cli/test/unit/util/extractJwtHexSecret.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {extractJwtHexSecret} from "../../../src/util/index.js"; describe("parseJwtHexSecret", () => { @@ -18,7 +18,7 @@ describe("parseJwtHexSecret", () => { ]; for (const {raw, parsed} of testCases) { it(`parse ${raw}`, () => { - expect(parsed).to.be.equal(extractJwtHexSecret(raw)); + expect(parsed).toBe(extractJwtHexSecret(raw)); }); } }); @@ -30,7 +30,7 @@ describe("invalid jwtHexSecret", () => { ]; for (const {raw, error} of testCases) { it(`should error on ${error}: ${raw}`, () => { - expect(() => extractJwtHexSecret(raw)).to.throw(); + expect(() => extractJwtHexSecret(raw)).toThrow(); }); } }); diff --git a/packages/cli/test/unit/util/format.test.ts b/packages/cli/test/unit/util/format.test.ts index 9c51a2af8a4c..c06259cc1842 100644 --- a/packages/cli/test/unit/util/format.test.ts +++ b/packages/cli/test/unit/util/format.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {isValidatePubkeyHex, parseRange} from "../../../src/util/index.js"; describe("util / format / parseRange", () => { @@ -10,7 +10,7 @@ describe("util / format / parseRange", () => { for (const {range, res} of testCases) { it(range, () => { - expect(parseRange(range)).to.deep.equal(res); + expect(parseRange(range)).toEqual(res); }); } }); @@ -27,7 +27,7 @@ describe("util / format / isValidatePubkeyHex", () => { for (const [pubkeyHex, isValid] of Object.entries(testCases)) { it(pubkeyHex, () => { - expect(isValidatePubkeyHex(pubkeyHex)).equals(isValid); + expect(isValidatePubkeyHex(pubkeyHex)).toBe(isValid); }); } }); diff --git a/packages/cli/test/unit/util/gitData.test.ts b/packages/cli/test/unit/util/gitData.test.ts index 18bd55118884..206dd070b545 100644 --- a/packages/cli/test/unit/util/gitData.test.ts +++ b/packages/cli/test/unit/util/gitData.test.ts @@ -1,7 +1,7 @@ import fs from "node:fs"; import path from "node:path"; import {fileURLToPath} from "node:url"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {findUpSync} from "find-up"; import {gitDataPath, readGitDataFile} from "../../../src/util/gitData/gitDataPath.js"; import {getGitData} from "../../../src/util/index.js"; @@ -20,7 +20,7 @@ describe("util / gitData", function () { it("gitData file must exist", () => { const gitData = readGitDataFile(); - expect(gitData).to.deep.equal(getGitData(), "Wrong git-data.json contents"); + expect(gitData).toEqual(getGitData()); }); it("gitData path must be included in the package.json", () => { @@ -32,6 +32,6 @@ describe("util / gitData", function () { const pkgJson = JSON.parse(fs.readFileSync(pkgJsonPath, "utf8")) as {files: string[]}; const gitDataPathFromPkgJson = path.relative(path.dirname(pkgJsonPath), gitDataPath); - expect(pkgJson.files).to.include(gitDataPathFromPkgJson, "package.json .files does not include gitData path"); + expect(pkgJson.files).toContain(gitDataPathFromPkgJson); }); }); diff --git a/packages/cli/test/unit/util/logger.test.ts b/packages/cli/test/unit/util/logger.test.ts index 5e792e9750d4..bddc86f2a483 100644 --- a/packages/cli/test/unit/util/logger.test.ts +++ b/packages/cli/test/unit/util/logger.test.ts @@ -1,18 +1,17 @@ -import {expect} from "chai"; -import sinon from "sinon"; +import {describe, it, expect, vi, beforeEach, afterEach} from "vitest"; import {shouldDeleteLogFile} from "../../../src/util/logger.js"; describe("shouldDeleteLogFile", function () { const prefix = "beacon"; const extension = "log"; - const sandbox = sinon.createSandbox(); beforeEach(() => { - sandbox.useFakeTimers(new Date("2023-01-01")); + vi.useFakeTimers({now: new Date("2023-01-01")}); }); afterEach(() => { - sandbox.restore(); + vi.useRealTimers(); + vi.clearAllTimers(); }); const tcs: {logFile: string; maxFiles: number; now: number; result: boolean}[] = [ // missing .log @@ -55,7 +54,7 @@ describe("shouldDeleteLogFile", function () { it(`should ${ result ? "" : "not" } delete ${logFile}, maxFiles ${maxFiles}, today ${new Date().toUTCString()}`, () => { - expect(shouldDeleteLogFile(prefix, extension, logFile, maxFiles)).to.be.equal(result); + expect(shouldDeleteLogFile(prefix, extension, logFile, maxFiles)).toBe(result); }); } }); diff --git a/packages/cli/test/unit/util/parseBootnodesFile.test.ts b/packages/cli/test/unit/util/parseBootnodesFile.test.ts index db1a90fbf1c5..07338192bc68 100644 --- a/packages/cli/test/unit/util/parseBootnodesFile.test.ts +++ b/packages/cli/test/unit/util/parseBootnodesFile.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {parseBootnodesFile} from "../../../src/util/index.js"; describe("config / bootnodes / parsing", () => { @@ -18,7 +18,7 @@ describe("config / bootnodes / parsing", () => { { "enrs": [ - "enr:-cabfg", + "enr:-cabfg", "enr:-deadbeef" ] } @@ -124,9 +124,7 @@ enr:-LK4QKWrXTpV9T78hNG6s8AM6IO4XH9kFT91uZtFg1GcsJ6dKovDOr1jtAAFPnS2lvNltkOGA9k2 }, ]; - for (const {name, input, expected} of testCases) { - it(name, () => { - expect(parseBootnodesFile(input)).to.be.deep.equal(expected); - }); - } + it.each(testCases)("$name", ({input, expected}) => { + expect(parseBootnodesFile(input)).toEqual(expected); + }); }); diff --git a/packages/cli/test/unit/util/progress.test.ts b/packages/cli/test/unit/util/progress.test.ts index b62b54dbdd87..d04d959d7422 100644 --- a/packages/cli/test/unit/util/progress.test.ts +++ b/packages/cli/test/unit/util/progress.test.ts @@ -1,102 +1,100 @@ -import {expect} from "chai"; -import sinon from "sinon"; +import {describe, it, expect, vi, beforeEach, afterEach} from "vitest"; import {showProgress} from "../../../src/util/progress.js"; describe("progress", () => { - const sandbox = sinon.createSandbox(); - describe("showProgress", () => { beforeEach(() => { - sandbox.useFakeTimers(); + vi.useFakeTimers(); }); afterEach(() => { - sandbox.restore(); + vi.useRealTimers(); + vi.clearAllTimers(); }); it("should call progress with correct frequency", () => { - const progress = sandbox.spy(); + const progress = vi.fn(); const frequencyMs = 50; showProgress({total: 10, signal: new AbortController().signal, frequencyMs, progress}); - sandbox.clock.tick(frequencyMs * 4); + vi.advanceTimersByTime(frequencyMs * 4); - expect(progress.callCount).to.be.equal(4); + expect(progress).toBeCalledTimes(4); }); it("should call progress with correct values", () => { - const progress = sandbox.spy(); + const progress = vi.fn(); const frequencyMs = 50; const total = 8; const needle = showProgress({total, signal: new AbortController().signal, frequencyMs, progress}); - sandbox.clock.tick(frequencyMs); + vi.advanceTimersByTime(frequencyMs); needle(1); - sandbox.clock.tick(frequencyMs); + vi.advanceTimersByTime(frequencyMs); needle(3); - sandbox.clock.tick(frequencyMs); + vi.advanceTimersByTime(frequencyMs); - expect(progress).to.be.calledThrice; - expect(progress.firstCall.args[0]).to.eql({total, current: 0, ratePerSec: 0, percentage: 0}); - expect(progress.secondCall.args[0]).to.eql({total, current: 2, ratePerSec: 40, percentage: 25}); - expect(progress.thirdCall.args[0]).to.eql({total, current: 4, ratePerSec: 40, percentage: 50}); + expect(progress).toHaveBeenCalledTimes(3); + expect(progress).nthCalledWith(1, {total, current: 0, ratePerSec: 0, percentage: 0}); + expect(progress).nthCalledWith(2, {total, current: 2, ratePerSec: 40, percentage: 25}); + expect(progress).nthCalledWith(3, {total, current: 4, ratePerSec: 40, percentage: 50}); }); it("should call progress with correct values when reach total", () => { - const progress = sandbox.spy(); + const progress = vi.fn(); const frequencyMs = 50; const total = 8; const needle = showProgress({total, signal: new AbortController().signal, frequencyMs, progress}); needle(1); - sandbox.clock.tick(frequencyMs); + vi.advanceTimersByTime(frequencyMs); needle(7); // Once by timer and second time because of reaching total - expect(progress).to.be.calledTwice; + expect(progress).toHaveBeenCalledTimes(2); // ratePerSec is 0 (actually Infinity) because we reached total without moving the clock time - expect(progress.secondCall.args[0]).to.eql({total, current: total, ratePerSec: 0, percentage: 100}); + expect(progress).nthCalledWith(2, {total, current: total, ratePerSec: 0, percentage: 100}); }); it("should call progress with correct values directly reaches to total", () => { - const progress = sandbox.spy(); + const progress = vi.fn(); const frequencyMs = 50; const total = 8; const needle = showProgress({total, signal: new AbortController().signal, frequencyMs, progress}); needle(7); - expect(progress).to.be.calledOnce; - expect(progress.firstCall.args[0]).to.eql({total, current: total, ratePerSec: 0, percentage: 100}); + expect(progress).toHaveBeenCalledTimes(1); + expect(progress).nthCalledWith(1, {total, current: total, ratePerSec: 0, percentage: 100}); }); it("should not call progress when initiated with zero total", () => { - const progress = sandbox.spy(); + const progress = vi.fn(); const frequencyMs = 50; const total = 0; showProgress({total, signal: new AbortController().signal, frequencyMs, progress}); - expect(progress).to.be.not.be.called; + expect(progress).not.toHaveBeenCalled(); }); it("should not call progress further when abort signal is called", () => { - const progress = sandbox.spy(); + const progress = vi.fn(); const frequencyMs = 50; const controller = new AbortController(); showProgress({total: 10, signal: controller.signal, frequencyMs, progress}); - sandbox.clock.tick(frequencyMs * 2); + vi.advanceTimersByTime(frequencyMs * 2); controller.abort(); - sandbox.clock.tick(frequencyMs * 2); + vi.advanceTimersByTime(frequencyMs * 2); - expect(progress.callCount).to.be.equal(2); + expect(progress).toBeCalledTimes(2); }); it("should not call progress further when total is reached", () => { - const progress = sandbox.spy(); + const progress = vi.fn(); const frequencyMs = 50; const needle = showProgress({total: 10, signal: new AbortController().signal, frequencyMs, progress}); - sandbox.clock.tick(frequencyMs * 2); + vi.advanceTimersByTime(frequencyMs * 2); needle(50); - sandbox.clock.tick(frequencyMs * 2); + vi.advanceTimersByTime(frequencyMs * 2); // 2 calls based on interval and 1 call based on reaching total - expect(progress.callCount).to.be.equal(2 + 1); + expect(progress).toBeCalledTimes(2 + 1); }); }); }); diff --git a/packages/cli/test/unit/util/pruneOldFilesInDir.test.ts b/packages/cli/test/unit/util/pruneOldFilesInDir.test.ts index a4c2cf9d05a4..d88f684902e0 100644 --- a/packages/cli/test/unit/util/pruneOldFilesInDir.test.ts +++ b/packages/cli/test/unit/util/pruneOldFilesInDir.test.ts @@ -1,7 +1,7 @@ import fs from "node:fs"; import path from "node:path"; +import {describe, it, expect, beforeEach, afterEach} from "vitest"; import {rimraf} from "rimraf"; -import {expect} from "chai"; import {pruneOldFilesInDir} from "../../../src/util/index.js"; import {testFilesDir} from "../../utils.js"; @@ -25,14 +25,14 @@ describe("pruneOldFilesInDir", () => { pruneOldFilesInDir(dataDir, DAYS_TO_MS); const files = fs.readdirSync(dataDir); - expect(files).to.not.include(oldFile); + expect(files).toEqual(expect.not.arrayContaining([oldFile])); }); it("should not delete new files", () => { pruneOldFilesInDir(dataDir, DAYS_TO_MS); const files = fs.readdirSync(dataDir); - expect(files).to.include(newFile); + expect(files).toEqual(expect.arrayContaining([newFile])); }); it("should delete old files in nested directories", () => { @@ -43,7 +43,7 @@ describe("pruneOldFilesInDir", () => { pruneOldFilesInDir(dataDir, DAYS_TO_MS); - expect(fs.readdirSync(nestedDir)).to.be.empty; + expect(fs.readdirSync(nestedDir)).toHaveLength(0); }); it("should handle empty directories", () => { @@ -52,7 +52,7 @@ describe("pruneOldFilesInDir", () => { pruneOldFilesInDir(emptyDir, DAYS_TO_MS); - expect(fs.readdirSync(emptyDir)).to.be.empty; + expect(fs.readdirSync(emptyDir)).toHaveLength(0); }); function createFileWithAge(path: string, ageInDays: number): void { diff --git a/packages/cli/test/unit/util/stripOffNewlines.test.ts b/packages/cli/test/unit/util/stripOffNewlines.test.ts index 839625836fb5..3a5a5a1f3523 100644 --- a/packages/cli/test/unit/util/stripOffNewlines.test.ts +++ b/packages/cli/test/unit/util/stripOffNewlines.test.ts @@ -1,34 +1,34 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {stripOffNewlines} from "../../../src/util/index.js"; describe("stripOffNewlines", () => { it("should remove trailing newlines from a string", () => { - expect(stripOffNewlines("1231321\n")).to.equal("1231321"); - expect(stripOffNewlines("1231321\r")).to.equal("1231321"); - expect(stripOffNewlines("1231321\r\n")).to.equal("1231321"); - expect(stripOffNewlines("1231321\n\n\r")).to.equal("1231321"); - expect(stripOffNewlines("1231321\n\r\n")).to.equal("1231321"); - expect(stripOffNewlines("\n\r\n")).to.equal(""); + expect(stripOffNewlines("1231321\n")).toBe("1231321"); + expect(stripOffNewlines("1231321\r")).toBe("1231321"); + expect(stripOffNewlines("1231321\r\n")).toBe("1231321"); + expect(stripOffNewlines("1231321\n\n\r")).toBe("1231321"); + expect(stripOffNewlines("1231321\n\r\n")).toBe("1231321"); + expect(stripOffNewlines("\n\r\n")).toBe(""); }); it("should not remove pipe character(s) at the end of a string", () => { - expect(stripOffNewlines("1231321|")).to.equal("1231321|"); - expect(stripOffNewlines("1231321||")).to.equal("1231321||"); - expect(stripOffNewlines("1231321|||")).to.equal("1231321|||"); + expect(stripOffNewlines("1231321|")).toBe("1231321|"); + expect(stripOffNewlines("1231321||")).toBe("1231321||"); + expect(stripOffNewlines("1231321|||")).toBe("1231321|||"); }); it("should not remove newlines in the middle of a string", () => { - expect(stripOffNewlines("123\n1321\n\n\n")).to.equal("123\n1321"); + expect(stripOffNewlines("123\n1321\n\n\n")).toBe("123\n1321"); }); it("should not modify the string if there are no new lines", () => { - expect(stripOffNewlines("1231321")).to.equal("1231321"); - expect(stripOffNewlines("")).to.equal(""); + expect(stripOffNewlines("1231321")).toBe("1231321"); + expect(stripOffNewlines("")).toBe(""); }); it("should not mutate the original string", () => { const originalString = "123\n1321\n\n\n"; stripOffNewlines(originalString); - expect(originalString).to.equal("123\n1321\n\n\n"); + expect(originalString).toBe("123\n1321\n\n\n"); }); }); diff --git a/packages/cli/test/unit/validator/decryptKeystoreDefinitions.test.ts b/packages/cli/test/unit/validator/decryptKeystoreDefinitions.test.ts index dc3ce5dff5ad..f24b83ae43a6 100644 --- a/packages/cli/test/unit/validator/decryptKeystoreDefinitions.test.ts +++ b/packages/cli/test/unit/validator/decryptKeystoreDefinitions.test.ts @@ -1,15 +1,15 @@ import fs from "node:fs"; import path from "node:path"; +import {describe, it, expect, beforeEach, vi} from "vitest"; import {rimraf} from "rimraf"; -import {expect} from "chai"; import {getKeystoresStr} from "@lodestar/test-utils"; import {cachedSeckeysHex} from "../../utils/cachedKeys.js"; import {testFilesDir} from "../../utils.js"; import {decryptKeystoreDefinitions} from "../../../src/cmds/validator/keymanager/decryptKeystoreDefinitions.js"; import {LocalKeystoreDefinition} from "../../../src/cmds/validator/keymanager/interface.js"; -describe("decryptKeystoreDefinitions", function () { - this.timeout(100_000); +describe("decryptKeystoreDefinitions", () => { + vi.setConfig({testTimeout: 100_000}); const signal = new AbortController().signal; const dataDir = path.join(testFilesDir, "decrypt-keystores-test"); @@ -19,11 +19,9 @@ describe("decryptKeystoreDefinitions", function () { const keyCount = 2; const secretKeys = cachedSeckeysHex.slice(0, keyCount); - // Produce and encrypt keystores let definitions: LocalKeystoreDefinition[] = []; - beforeEach("Prepare dataDir", async () => { - // wipe out data dir and existing keystores + beforeEach(async () => { rimraf.sync(dataDir); rimraf.sync(importFromDir); @@ -31,7 +29,7 @@ describe("decryptKeystoreDefinitions", function () { const keystoresStr = await getKeystoresStr(password, secretKeys); definitions = []; - // write keystores to disk + for (let i = 0; i < keyCount; i++) { const keystorePath = path.join(importFromDir, `keystore_${i}.json`); fs.writeFileSync(keystorePath, keystoresStr[i]); @@ -39,13 +37,13 @@ describe("decryptKeystoreDefinitions", function () { } }); - context("with keystore cache", () => { + describe("with keystore cache", () => { const cacheFilePath = path.join(dataDir, "cache", "keystores.cache"); beforeEach(async () => { // create cache file to ensure keystores are loaded from cache during tests await decryptKeystoreDefinitions(definitions, {logger: console, cacheFilePath, signal}); - expect(fs.existsSync(cacheFilePath)).to.be.true; + expect(fs.existsSync(cacheFilePath)).toBe(true); // remove lockfiles created during cache file preparation rimraf.sync(path.join(importFromDir, "*.lock"), {glob: true}); @@ -54,17 +52,18 @@ describe("decryptKeystoreDefinitions", function () { testDecryptKeystoreDefinitions(cacheFilePath); }); - context("without keystore cache", () => { + describe("without keystore cache", () => { testDecryptKeystoreDefinitions(); }); function testDecryptKeystoreDefinitions(cacheFilePath?: string): void { it("decrypt keystores", async () => { const signers = await decryptKeystoreDefinitions(definitions, {logger: console, signal, cacheFilePath}); - expect(signers.length).to.equal(secretKeys.length); + expect(signers.length).toBe(secretKeys.length); for (const signer of signers) { const hexSecret = signer.secretKey.toHex(); - expect(secretKeys.includes(hexSecret), `secretKeys doesn't include ${hexSecret}`).to.be.true; + + expect(secretKeys.includes(hexSecret)).toBe(true); } }); @@ -76,14 +75,14 @@ describe("decryptKeystoreDefinitions", function () { await decryptKeystoreDefinitions(definitions, {logger: console, signal, cacheFilePath}); expect.fail("Second decrypt should fail due to failure to get lockfile"); } catch (e) { - expect((e as Error).message.startsWith("EEXIST: file already exists"), "Wrong error is thrown").to.be.true; + expect((e as Error).message.startsWith("EEXIST: file already exists")).toBe(true); } }); it("decrypt keystores if lockfiles already exist if ignoreLockFile=true", async () => { await decryptKeystoreDefinitions(definitions, {logger: console, signal, cacheFilePath}); - // lockfiles should exist after the first run + // lockfiles should exist after the first run await decryptKeystoreDefinitions(definitions, {logger: console, signal, cacheFilePath, ignoreLockFile: true}); }); } diff --git a/packages/cli/test/unit/validator/keys.test.ts b/packages/cli/test/unit/validator/keys.test.ts index 686915e27db4..c977c2242c33 100644 --- a/packages/cli/test/unit/validator/keys.test.ts +++ b/packages/cli/test/unit/validator/keys.test.ts @@ -1,6 +1,6 @@ import fs from "node:fs"; import path from "node:path"; -import {expect} from "chai"; +import {describe, it, expect, afterEach} from "vitest"; import {importKeystoreDefinitionsFromExternalDir} from "../../../src/cmds/validator/signers/importExternalKeystores.js"; describe("validator / signers / importKeystoreDefinitionsFromExternalDir", () => { @@ -27,10 +27,7 @@ describe("validator / signers / importKeystoreDefinitionsFromExternalDir", () => const password = "12345678"; const definitions = importKeystoreDefinitionsFromExternalDir({keystoresPath: [tmpDir], password}); - expect(definitions.map((def) => def.keystorePath).sort()).to.deep.equal( - toReadFilepaths.sort(), - "Wrong read keystore paths" - ); + expect(definitions.map((def) => def.keystorePath).sort()).toEqual(toReadFilepaths.sort()); }); function inTmp(filepath: string): string { diff --git a/packages/cli/test/unit/validator/options.test.ts b/packages/cli/test/unit/validator/options.test.ts index dbe961ed0f33..627ee8f59818 100644 --- a/packages/cli/test/unit/validator/options.test.ts +++ b/packages/cli/test/unit/validator/options.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {parseFeeRecipient} from "../../../src/util/index.js"; const feeRecipient = Buffer.from(Array.from({length: 20}, () => Math.round(Math.random() * 255))); @@ -8,7 +8,7 @@ describe("validator / parseFeeRecipient", () => { const testCases: string[] = [`0x${feeRecipientString}`, `0X${feeRecipientString}`]; for (const testCase of testCases) { it(`parse ${testCase}`, () => { - expect(`0x${feeRecipientString}`).to.be.deep.equal(parseFeeRecipient(testCase)); + expect(`0x${feeRecipientString}`).toEqual(parseFeeRecipient(testCase)); }); } }); @@ -22,7 +22,7 @@ describe("validator / invalid feeRecipient", () => { ]; for (const testCase of testCases) { it(`should error on ${testCase}`, () => { - expect(() => parseFeeRecipient(testCase)).to.throw(); + expect(() => parseFeeRecipient(testCase)).toThrow(); }); } }); diff --git a/packages/cli/test/unit/validator/parseProposerConfig.test.ts b/packages/cli/test/unit/validator/parseProposerConfig.test.ts index da459cf84c6e..fcb6933f035b 100644 --- a/packages/cli/test/unit/validator/parseProposerConfig.test.ts +++ b/packages/cli/test/unit/validator/parseProposerConfig.test.ts @@ -1,7 +1,7 @@ /* eslint-disable @typescript-eslint/naming-convention */ import path from "node:path"; import {fileURLToPath} from "node:url"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {routes} from "@lodestar/api"; import {parseProposerConfig} from "../../../src/util/index.js"; @@ -42,12 +42,12 @@ const testValue = { describe("validator / valid Proposer", () => { it("parse Valid proposer", () => { - expect(parseProposerConfig(path.join(__dirname, "./proposerConfigs/validData.yaml"))).to.be.deep.equal(testValue); + expect(parseProposerConfig(path.join(__dirname, "./proposerConfigs/validData.yaml"))).toEqual(testValue); }); }); describe("validator / invalid Proposer", () => { it("should throw error", () => { - expect(() => parseProposerConfig(path.join(__dirname, "./proposerConfigs/invalidData.yaml"))).to.throw(); + expect(() => parseProposerConfig(path.join(__dirname, "./proposerConfigs/invalidData.yaml"))).toThrow(); }); }); diff --git a/packages/cli/test/utils/runUtils.ts b/packages/cli/test/utils/runUtils.ts index f6a9c311946b..8d2846d6e664 100644 --- a/packages/cli/test/utils/runUtils.ts +++ b/packages/cli/test/utils/runUtils.ts @@ -1,5 +1,5 @@ import fs from "node:fs"; -import {expect} from "chai"; +import {expect} from "vitest"; import {apiTokenFileName} from "../../src/cmds/validator/keymanager/server.js"; import {recursiveLookup} from "../../src/util/index.js"; @@ -17,12 +17,14 @@ export function findApiToken(dirpath: string): string { } export function expectDeepEquals(a: T, b: T, message: string): void { - expect(a).deep.equals(b, message); + expect(a).toEqualWithMessage(b, message); } /** * Similar to `expectDeepEquals` but only checks presence of all elements in array, irrespective of their order. */ export function expectDeepEqualsUnordered(a: T[], b: T[], message: string): void { - expect(a).to.have.deep.members(b, message); + expect(a).toEqualWithMessage(expect.arrayContaining(b), message); + expect(b).toEqualWithMessage(expect.arrayContaining(a), message); + expect(a).toHaveLength(b.length); } diff --git a/packages/cli/test/utils/simulation/validator_clients/lodestar.ts b/packages/cli/test/utils/simulation/validator_clients/lodestar.ts index a85347d780c5..7c0c9e3537b1 100644 --- a/packages/cli/test/utils/simulation/validator_clients/lodestar.ts +++ b/packages/cli/test/utils/simulation/validator_clients/lodestar.ts @@ -15,7 +15,7 @@ import {getNodePorts} from "../utils/ports.js"; export const generateLodestarValidatorNode: ValidatorNodeGenerator = (opts, runner) => { const {paths, id, keys, forkConfig, genesisTime, nodeIndex, beaconUrls, clientOptions} = opts; const {rootDir, keystoresDir, keystoresSecretFilePath, logFilePath} = paths; - const {useProduceBlockV3, "builder.selection": builderSelection} = clientOptions ?? {}; + const {useProduceBlockV3, "builder.selection": builderSelection, blindedLocal} = clientOptions ?? {}; const ports = getNodePorts(nodeIndex); const rcConfigPath = path.join(rootDir, "rc_config.json"); const paramsPath = path.join(rootDir, "params.json"); @@ -41,6 +41,7 @@ export const generateLodestarValidatorNode: ValidatorNodeGenerator {} +export async function teardown(): Promise {} diff --git a/packages/config/test/unit/index.test.ts b/packages/config/test/unit/index.test.ts index 35dabd5dda61..a6fca7ad643a 100644 --- a/packages/config/test/unit/index.test.ts +++ b/packages/config/test/unit/index.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {toHexString} from "@chainsafe/ssz"; import {ForkName} from "@lodestar/params"; import {config, chainConfig} from "../../src/default.js"; @@ -12,30 +12,27 @@ describe("forks", () => { const fork2 = forks[i + 1]; // Use less equal to be okay with both forks being at Infinity - expect(fork1.epoch).to.be.at.most( - fork2.epoch, - `Forks are not sorted ${fork1.name} ${fork1.epoch} -> ${fork2.name} ${fork2.epoch}` - ); + expect(fork1.epoch).toBeLessThanOrEqual(fork2.epoch); } }); it("Get phase0 fork for slot 0", () => { const fork = config.getForkName(0); - expect(fork).to.equal(ForkName.phase0); + expect(fork).toBe(ForkName.phase0); }); it("correct prev data", () => { for (let i = 1; i < config.forksAscendingEpochOrder.length; i++) { const fork = config.forksAscendingEpochOrder[i]; const prevFork = config.forksAscendingEpochOrder[i - 1]; - expect(toHexString(fork.prevVersion)).to.equal(toHexString(prevFork.version), `Wrong prevVersion ${fork.name}`); - expect(fork.prevForkName).to.equal(prevFork.name, `Wrong prevName ${fork.name}`); + expect(toHexString(fork.prevVersion)).toBe(toHexString(prevFork.version)); + expect(fork.prevForkName).toBe(prevFork.name); } }); it("correctly handle pre-genesis", () => { // eslint-disable-next-line @typescript-eslint/naming-convention const postMergeTestnet = createForkConfig({...chainConfig, ALTAIR_FORK_EPOCH: 0, BELLATRIX_FORK_EPOCH: 0}); - expect(postMergeTestnet.getForkName(-1)).to.equal(ForkName.bellatrix); + expect(postMergeTestnet.getForkName(-1)).toBe(ForkName.bellatrix); }); }); diff --git a/packages/config/test/unit/json.test.ts b/packages/config/test/unit/json.test.ts index e04b566cc091..96ffd050a088 100644 --- a/packages/config/test/unit/json.test.ts +++ b/packages/config/test/unit/json.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {chainConfigFromJson, chainConfigToJson} from "../../src/index.js"; import {chainConfig} from "../../src/default.js"; @@ -7,6 +7,6 @@ describe("chainConfig JSON", () => { const json = chainConfigToJson(chainConfig); const chainConfigRes = chainConfigFromJson(json); - expect(chainConfigRes).to.deep.equal(chainConfig); + expect(chainConfigRes).toEqual(chainConfig); }); }); diff --git a/packages/config/vitest.config.ts b/packages/config/vitest.config.ts new file mode 100644 index 000000000000..1df0de848936 --- /dev/null +++ b/packages/config/vitest.config.ts @@ -0,0 +1,11 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + }) +); diff --git a/packages/db/package.json b/packages/db/package.json index 961dadf19ecf..5d1fef29b6f3 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/db", - "version": "1.12.0", + "version": "1.13.0", "description": "DB modules of Lodestar", "author": "ChainSafe Systems", "homepage": "https://github.com/ChainSafe/lodestar#readme", @@ -33,18 +33,18 @@ "lint:fix": "yarn run lint --fix", "pretest": "yarn run check-types", "test": "yarn test:unit", - "test:unit": "mocha 'test/unit/**/*.test.ts'", + "test:unit": "vitest --run --dir test/unit/ --coverage", "check-readme": "typescript-docs-verifier" }, "dependencies": { "@chainsafe/ssz": "^0.14.0", - "@lodestar/config": "^1.12.0", - "@lodestar/utils": "^1.12.0", + "@lodestar/config": "^1.13.0", + "@lodestar/utils": "^1.13.0", "@types/levelup": "^4.3.3", "it-all": "^3.0.2", "level": "^8.0.0" }, "devDependencies": { - "@lodestar/logger": "^1.12.0" + "@lodestar/logger": "^1.13.0" } } diff --git a/packages/db/src/controller/metrics.ts b/packages/db/src/controller/metrics.ts index b4b8a0bf0963..4827d6fb4515 100644 --- a/packages/db/src/controller/metrics.ts +++ b/packages/db/src/controller/metrics.ts @@ -1,26 +1,10 @@ +import {Counter, Gauge, Histogram} from "@lodestar/utils"; + export type LevelDbControllerMetrics = { - dbReadReq: Counter<"bucket">; - dbReadItems: Counter<"bucket">; - dbWriteReq: Counter<"bucket">; - dbWriteItems: Counter<"bucket">; + dbReadReq: Counter<{bucket: string}>; + dbReadItems: Counter<{bucket: string}>; + dbWriteReq: Counter<{bucket: string}>; + dbWriteItems: Counter<{bucket: string}>; dbSizeTotal: Gauge; dbApproximateSizeTime: Histogram; }; - -type Labels = Partial>; - -interface Counter { - inc(value?: number): void; - inc(labels: Labels, value?: number): void; - inc(arg1?: Labels | number, arg2?: number): void; -} - -interface Gauge { - set(value: number): void; - set(labels: Labels, value: number): void; - set(arg1?: Labels | number, arg2?: number): void; -} - -interface Histogram { - startTimer(): () => number; -} diff --git a/packages/db/test/globalSetup.ts b/packages/db/test/globalSetup.ts new file mode 100644 index 000000000000..02074bb24d11 --- /dev/null +++ b/packages/db/test/globalSetup.ts @@ -0,0 +1,4 @@ +export async function setup(): Promise { + process.env.NODE_ENV = "test"; +} +export async function teardown(): Promise {} diff --git a/packages/db/test/unit/controller/level.test.ts b/packages/db/test/unit/controller/level.test.ts index 768ef3a39006..33d5b9a86c2a 100644 --- a/packages/db/test/unit/controller/level.test.ts +++ b/packages/db/test/unit/controller/level.test.ts @@ -1,6 +1,6 @@ import {execSync} from "node:child_process"; import os from "node:os"; -import {expect} from "chai"; +import {describe, it, expect, beforeAll, afterAll} from "vitest"; import leveldown from "leveldown"; import all from "it-all"; import {getEnvLogger} from "@lodestar/logger/env"; @@ -10,11 +10,11 @@ describe("LevelDB controller", () => { const dbLocation = "./.__testdb"; let db: LevelDbController; - before(async () => { + beforeAll(async () => { db = await LevelDbController.create({name: dbLocation}, {metrics: null, logger: getEnvLogger()}); }); - after(async () => { + afterAll(async () => { await db.close(); await new Promise((resolve, reject) => { leveldown.destroy(dbLocation, (err) => { @@ -26,16 +26,16 @@ describe("LevelDB controller", () => { it("test get not found", async () => { const key = Buffer.from("not-existing-key"); - expect(await db.get(key)).to.equal(null); + expect(await db.get(key)).toBe(null); }); it("test put/get/delete", async () => { const key = Buffer.from("test"); const value = Buffer.from("some value"); await db.put(key, value); - expect(await db.get(key)).to.be.deep.equal(value); + expect(await db.get(key)).toEqual(value); await db.delete(key); - expect(await db.get(key)).to.equal(null); + expect(await db.get(key)).toBe(null); }); it("test batchPut", async () => { @@ -51,8 +51,8 @@ describe("LevelDB controller", () => { value: Buffer.from("value"), }, ]); - expect(await db.get(k1)).to.not.be.null; - expect(await db.get(k2)).to.not.be.null; + expect(await db.get(k1)).not.toBeNull(); + expect(await db.get(k2)).not.toBeNull(); }); it("test batch delete", async () => { @@ -69,9 +69,9 @@ describe("LevelDB controller", () => { value: Buffer.from("value"), }, ]); - expect((await db.entries()).length).to.equal(2); + expect((await db.entries()).length).toBe(2); await db.batchDelete([k1, k2]); - expect((await db.entries()).length).to.equal(0); + expect((await db.entries()).length).toBe(0); }); it("test entries", async () => { @@ -91,7 +91,7 @@ describe("LevelDB controller", () => { gte: k1, lte: k2, }); - expect(result.length).to.be.equal(2); + expect(result.length).toBe(2); }); it("test entriesStream", async () => { @@ -112,7 +112,7 @@ describe("LevelDB controller", () => { lte: k2, }); const result = await all(resultStream); - expect(result.length).to.be.equal(2); + expect(result.length).toBe(2); }); it("test compactRange + approximateSize", async () => { @@ -129,11 +129,11 @@ describe("LevelDB controller", () => { await db.compactRange(minKey, maxKey); const sizeAfterCompact = getDbSize(); - expect(sizeAfterCompact).lt(sizeBeforeCompact, "Expected sizeAfterCompact < sizeBeforeCompact"); + expect(sizeAfterCompact).toBeLessThan(sizeBeforeCompact); // approximateSize is not exact, just test a number is positive const approxSize = await db.approximateSize(minKey, maxKey); - expect(approxSize).gt(0, "approximateSize return not > 0"); + expect(approxSize).toBeGreaterThan(0); }); function getDuCommand(): string { diff --git a/packages/db/test/unit/schema.test.ts b/packages/db/test/unit/schema.test.ts index 46cb3af23de6..c72611453e4f 100644 --- a/packages/db/test/unit/schema.test.ts +++ b/packages/db/test/unit/schema.test.ts @@ -1,4 +1,4 @@ -import {assert} from "chai"; +import {describe, it, expect} from "vitest"; import {intToBytes} from "@lodestar/utils"; import {BUCKET_LENGTH, encodeKey} from "../../src/index.js"; @@ -25,7 +25,7 @@ describe("encodeKey", () => { expected = Buffer.concat([intToBytes(bucket, BUCKET_LENGTH, "le"), intToBytes(BigInt(key), 8, "be")]); } const actual = encodeKey(bucket, key); - assert.deepEqual(actual, expected); + expect(actual).toEqual(expected); }); } }); diff --git a/packages/db/vitest.config.ts b/packages/db/vitest.config.ts new file mode 100644 index 000000000000..1df0de848936 --- /dev/null +++ b/packages/db/vitest.config.ts @@ -0,0 +1,11 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + }) +); diff --git a/packages/flare/.mocharc.yaml b/packages/flare/.mocharc.yaml deleted file mode 100644 index 55ad4d0122e1..000000000000 --- a/packages/flare/.mocharc.yaml +++ /dev/null @@ -1,5 +0,0 @@ -exit: true -extension: ["ts"] -colors: true -node-option: - - "loader=ts-node/esm" diff --git a/packages/flare/package.json b/packages/flare/package.json index dc4157415bd8..7bbd79d02602 100644 --- a/packages/flare/package.json +++ b/packages/flare/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/flare", - "version": "1.12.0", + "version": "1.13.0", "description": "Beacon chain debugging tool", "author": "ChainSafe Systems", "license": "Apache-2.0", @@ -44,7 +44,7 @@ "lint": "eslint --color --ext .ts src/", "lint:fix": "yarn run lint --fix", "pretest": "yarn run check-types", - "test:unit": "nyc --cache-dir .nyc_output/.cache -e .ts mocha 'test/unit/**/*.test.ts'", + "test:unit": "vitest --run --dir test/unit/ --coverage", "check-readme": "typescript-docs-verifier" }, "repository": { @@ -60,12 +60,12 @@ "dependencies": { "@chainsafe/bls": "7.1.1", "@chainsafe/bls-keygen": "^0.3.0", - "@lodestar/api": "^1.12.0", - "@lodestar/config": "^1.12.0", - "@lodestar/params": "^1.12.0", - "@lodestar/state-transition": "^1.12.0", - "@lodestar/types": "^1.12.0", - "@lodestar/utils": "^1.12.0", + "@lodestar/api": "^1.13.0", + "@lodestar/config": "^1.13.0", + "@lodestar/params": "^1.13.0", + "@lodestar/state-transition": "^1.13.0", + "@lodestar/types": "^1.13.0", + "@lodestar/utils": "^1.13.0", "source-map-support": "^0.5.21", "yargs": "^17.7.1" }, diff --git a/packages/flare/src/util/command.ts b/packages/flare/src/util/command.ts index f22aca319af0..81a3993f3c43 100644 --- a/packages/flare/src/util/command.ts +++ b/packages/flare/src/util/command.ts @@ -1,6 +1,16 @@ import {Options, Argv} from "yargs"; -export type CliCommandOptions = Required<{[key in keyof OwnArgs]: Options}>; +export interface CliExample { + command: string; + title?: string; + description?: string; +} + +export interface CliOptionDefinition extends Options { + example?: CliExample; +} + +export type CliCommandOptions = Required<{[key in keyof OwnArgs]: CliOptionDefinition}>; // eslint-disable-next-line @typescript-eslint/no-explicit-any export interface CliCommand, ParentArgs = Record, R = any> { diff --git a/packages/flare/test/globalSetup.ts b/packages/flare/test/globalSetup.ts new file mode 100644 index 000000000000..0ab57c057472 --- /dev/null +++ b/packages/flare/test/globalSetup.ts @@ -0,0 +1,2 @@ +export async function setup(): Promise {} +export async function teardown(): Promise {} diff --git a/packages/flare/test/unit/utils/format.test.ts b/packages/flare/test/unit/utils/format.test.ts index 0eb898a9fd0f..c6ef8805fe7b 100644 --- a/packages/flare/test/unit/utils/format.test.ts +++ b/packages/flare/test/unit/utils/format.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {parseRange} from "../../../src/util/format.js"; describe("utils / format", () => { @@ -10,7 +10,7 @@ describe("utils / format", () => { for (const {range, indexes} of testCases) { it(range, () => { - expect(parseRange(range)).to.deep.equal(indexes); + expect(parseRange(range)).toEqual(indexes); }); } }); diff --git a/packages/flare/vitest.config.ts b/packages/flare/vitest.config.ts new file mode 100644 index 000000000000..1df0de848936 --- /dev/null +++ b/packages/flare/vitest.config.ts @@ -0,0 +1,11 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + }) +); diff --git a/packages/fork-choice/package.json b/packages/fork-choice/package.json index 5789c0109dd9..54fe40063d85 100644 --- a/packages/fork-choice/package.json +++ b/packages/fork-choice/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.12.0", + "version": "1.13.0", "type": "module", "exports": "./lib/index.js", "types": "./lib/index.d.ts", @@ -34,16 +34,16 @@ "lint:fix": "yarn run lint --fix", "pretest": "yarn run check-types", "test": "yarn test:unit", - "test:unit": "mocha --colors -r ts-node/register 'test/unit/**/*.test.ts'", + "test:unit": "vitest --run --dir test/unit/ --coverage", "check-readme": "typescript-docs-verifier" }, "dependencies": { "@chainsafe/ssz": "^0.14.0", - "@lodestar/config": "^1.12.0", - "@lodestar/params": "^1.12.0", - "@lodestar/state-transition": "^1.12.0", - "@lodestar/types": "^1.12.0", - "@lodestar/utils": "^1.12.0" + "@lodestar/config": "^1.13.0", + "@lodestar/params": "^1.13.0", + "@lodestar/state-transition": "^1.13.0", + "@lodestar/types": "^1.13.0", + "@lodestar/utils": "^1.13.0" }, "keywords": [ "ethereum", diff --git a/packages/fork-choice/test/globalSetup.ts b/packages/fork-choice/test/globalSetup.ts new file mode 100644 index 000000000000..0ab57c057472 --- /dev/null +++ b/packages/fork-choice/test/globalSetup.ts @@ -0,0 +1,2 @@ +export async function setup(): Promise {} +export async function teardown(): Promise {} diff --git a/packages/fork-choice/test/unit/forkChoice/forkChoice.test.ts b/packages/fork-choice/test/unit/forkChoice/forkChoice.test.ts index fe4f9a7afaad..fe11532dbb6f 100644 --- a/packages/fork-choice/test/unit/forkChoice/forkChoice.test.ts +++ b/packages/fork-choice/test/unit/forkChoice/forkChoice.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect, beforeEach, beforeAll} from "vitest"; import {fromHexString} from "@chainsafe/ssz"; import {config} from "@lodestar/config/default"; import {RootHex, Slot} from "@lodestar/types"; @@ -119,12 +119,12 @@ describe("Forkchoice", function () { const forkchoice = new ForkChoice(config, fcStore, protoArr); const summaries = forkchoice.getAllAncestorBlocks(getBlockRoot(genesisSlot + 1)); // there are 2 blocks in protoArray but iterateAncestorBlocks should only return non-finalized blocks - expect(summaries.length).to.be.equals(1, "should not return the finalized block"); - expect(summaries[0]).to.be.deep.include(block, "the block summary is not correct"); + expect(summaries).toHaveLength(1); + expect(summaries[0]).toEqual({...block, bestChild: undefined, bestDescendant: undefined, parent: 0, weight: 0}); }); - before("Assert SLOTS_PER_EPOCH", () => { - expect(SLOTS_PER_EPOCH).equals(32, "Unexpected SLOTS_PER_EPOCH value"); + beforeAll(() => { + expect(SLOTS_PER_EPOCH).toBe(32); }); const dependentRootTestCases: {atSlot: Slot; pivotSlot: Slot; epoch: EpochDifference; skipped: Slot[]}[] = [ @@ -162,10 +162,7 @@ describe("Forkchoice", function () { const expectedDependentRoot = getBlockRoot(pivotSlot); - expect(forkchoice.getDependentRoot(block, epoch)).to.be.equal( - expectedDependentRoot, - "incorrect attester dependent root" - ); + expect(forkchoice.getDependentRoot(block, epoch)).toBe(expectedDependentRoot); }); } diff --git a/packages/fork-choice/test/unit/forkChoice/utils.test.ts b/packages/fork-choice/test/unit/forkChoice/utils.test.ts index 3cf497ac38a1..3f315d079842 100644 --- a/packages/fork-choice/test/unit/forkChoice/utils.test.ts +++ b/packages/fork-choice/test/unit/forkChoice/utils.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {createChainForkConfig} from "@lodestar/config"; import {ssz} from "@lodestar/types"; import {assertValidTerminalPowBlock, ExecutionStatus} from "../../../src/index.js"; @@ -17,7 +17,7 @@ describe("assertValidTerminalPowBlock", function () { }; expect(() => assertValidTerminalPowBlock(config, block, {executionStatus, powBlockParent: null, powBlock}) - ).to.not.throw(); + ).not.toThrow(); }); it("should require powBlockParent if powBlock not genesis", function () { @@ -29,7 +29,7 @@ describe("assertValidTerminalPowBlock", function () { }; expect(() => assertValidTerminalPowBlock(config, block, {executionStatus, powBlockParent: null, powBlock}) - ).to.throw(); + ).toThrow(); }); it("should require powBlock >= ttd", function () { @@ -41,7 +41,7 @@ describe("assertValidTerminalPowBlock", function () { }; expect(() => assertValidTerminalPowBlock(config, block, {executionStatus, powBlockParent: powBlock, powBlock}) - ).to.throw(); + ).toThrow(); }); it("should require powBlockParent < ttd", function () { @@ -53,7 +53,7 @@ describe("assertValidTerminalPowBlock", function () { }; expect(() => assertValidTerminalPowBlock(config, block, {executionStatus, powBlockParent: powBlock, powBlock}) - ).to.throw(); + ).toThrow(); }); it("should accept powBlockParent < ttd and powBlock >= ttd", function () { @@ -67,8 +67,6 @@ describe("assertValidTerminalPowBlock", function () { ...powBlock, totalDifficulty: BigInt(9), }; - expect(() => - assertValidTerminalPowBlock(config, block, {executionStatus, powBlockParent, powBlock}) - ).to.not.throw(); + expect(() => assertValidTerminalPowBlock(config, block, {executionStatus, powBlockParent, powBlock})).not.toThrow(); }); }); diff --git a/packages/fork-choice/test/unit/protoArray/computeDeltas.test.ts b/packages/fork-choice/test/unit/protoArray/computeDeltas.test.ts index 3981ef84ff4c..54b8a900d05c 100644 --- a/packages/fork-choice/test/unit/protoArray/computeDeltas.test.ts +++ b/packages/fork-choice/test/unit/protoArray/computeDeltas.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {getEffectiveBalanceIncrementsZeroed} from "@lodestar/state-transition"; import {computeDeltas} from "../../../src/protoArray/computeDeltas.js"; @@ -25,11 +25,11 @@ describe("computeDeltas", () => { const deltas = computeDeltas(indices.size, votes, oldBalances, newBalances, new Set()); - expect(deltas.length).to.eql(validatorCount); - expect(deltas).to.deep.equal(Array.from({length: validatorCount}, () => 0)); + expect(deltas.length).toEqual(validatorCount); + expect(deltas).toEqual(Array.from({length: validatorCount}, () => 0)); for (const vote of votes) { - expect(vote.currentIndex).to.eql(vote.nextIndex); + expect(vote.currentIndex).toEqual(vote.nextIndex); } }); @@ -55,13 +55,13 @@ describe("computeDeltas", () => { const deltas = computeDeltas(indices.size, votes, oldBalances, newBalances, new Set()); - expect(deltas.length).to.eql(validatorCount); + expect(deltas.length).toEqual(validatorCount); for (const [i, delta] of deltas.entries()) { if (i === 0) { - expect(delta.toString()).to.equal((balance * validatorCount).toString()); + expect(delta.toString()).toBe((balance * validatorCount).toString()); } else { - expect(delta.toString()).to.equal("0"); + expect(delta.toString()).toBe("0"); } } }); @@ -88,10 +88,10 @@ describe("computeDeltas", () => { const deltas = computeDeltas(indices.size, votes, oldBalances, newBalances, new Set()); - expect(deltas.length).to.eql(validatorCount); + expect(deltas.length).toEqual(validatorCount); for (const delta of deltas) { - expect(delta.toString()).to.equal(balance.toString()); + expect(delta.toString()).toBe(balance.toString()); } }); @@ -117,17 +117,17 @@ describe("computeDeltas", () => { const deltas = computeDeltas(indices.size, votes, oldBalances, newBalances, new Set()); - expect(deltas.length).to.eql(validatorCount); + expect(deltas.length).toEqual(validatorCount); const totalDelta = balance * validatorCount; for (const [i, delta] of deltas.entries()) { if (i === 0) { - expect(delta.toString()).to.equal((0 - totalDelta).toString()); + expect(delta.toString()).toBe((0 - totalDelta).toString()); } else if (i === 1) { - expect(delta.toString()).to.equal(totalDelta.toString()); + expect(delta.toString()).toBe(totalDelta.toString()); } else { - expect(delta.toString()).to.equal("0"); + expect(delta.toString()).toBe("0"); } } }); @@ -201,15 +201,15 @@ describe("computeDeltas", () => { const deltas = computeDeltas(indices.size, votes, oldBalances, newBalances, new Set()); - expect(deltas.length).to.eql(validatorCount); + expect(deltas.length).toEqual(validatorCount); for (const [i, delta] of deltas.entries()) { if (i === 0) { - expect(delta.toString()).to.equal((0 - oldBalance * validatorCount).toString()); + expect(delta.toString()).toBe((0 - oldBalance * validatorCount).toString()); } else if (i === 1) { - expect(delta.toString()).to.equal((newBalance * validatorCount).toString()); + expect(delta.toString()).toBe((newBalance * validatorCount).toString()); } else { - expect(delta.toString()).to.equal("0"); + expect(delta.toString()).toBe("0"); } } }); @@ -239,13 +239,13 @@ describe("computeDeltas", () => { const deltas = computeDeltas(indices.size, votes, oldBalances, newBalances, new Set()); - expect(deltas.length).to.eql(2); + expect(deltas.length).toEqual(2); - expect(deltas[0].toString()).to.eql((0 - balance).toString()); - expect(deltas[1].toString()).to.eql((balance * 2).toString()); + expect(deltas[0].toString()).toEqual((0 - balance).toString()); + expect(deltas[1].toString()).toEqual((balance * 2).toString()); for (const vote of votes) { - expect(vote.currentIndex).to.equal(vote.nextIndex); + expect(vote.currentIndex).toBe(vote.nextIndex); } }); @@ -273,13 +273,13 @@ describe("computeDeltas", () => { const deltas = computeDeltas(indices.size, votes, oldBalances, newBalances, new Set()); - expect(deltas.length).to.eql(2); + expect(deltas.length).toEqual(2); - expect(deltas[0].toString()).to.eql((0 - balance * 2).toString()); - expect(deltas[1].toString()).to.eql(balance.toString()); + expect(deltas[0].toString()).toEqual((0 - balance * 2).toString()); + expect(deltas[1].toString()).toEqual(balance.toString()); for (const vote of votes) { - expect(vote.currentIndex).to.equal(vote.nextIndex); + expect(vote.currentIndex).toBe(vote.nextIndex); } }); @@ -303,12 +303,12 @@ describe("computeDeltas", () => { // 1st validator is part of an attester slashing const equivocatingIndices = new Set([0]); let deltas = computeDeltas(indices.size, votes, balances, balances, equivocatingIndices); - expect(deltas[0]).to.be.equals( + expect(deltas[0]).toBeWithMessage( -1 * (firstBalance + secondBalance), "should disregard the 1st validator due to attester slashing" ); - expect(deltas[1]).to.be.equals(secondBalance, "should move 2nd balance from 1st root to 2nd root"); + expect(deltas[1]).toBeWithMessage(secondBalance, "should move 2nd balance from 1st root to 2nd root"); deltas = computeDeltas(indices.size, votes, balances, balances, equivocatingIndices); - expect(deltas).to.be.deep.equals([0, 0], "calling computeDeltas again should not have any affect on the weight"); + expect(deltas).toEqualWithMessage([0, 0], "calling computeDeltas again should not have any affect on the weight"); }); }); diff --git a/packages/fork-choice/test/unit/protoArray/executionStatusUpdates.test.ts b/packages/fork-choice/test/unit/protoArray/executionStatusUpdates.test.ts index e1dda450aa46..94e5cd3ac9a0 100644 --- a/packages/fork-choice/test/unit/protoArray/executionStatusUpdates.test.ts +++ b/packages/fork-choice/test/unit/protoArray/executionStatusUpdates.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import { ProtoBlock, ProtoArray, @@ -135,7 +135,7 @@ describe("executionStatus / normal updates", () => { */ const preValidation = collectProtoarrayValidationStatus(fc); it("preValidation forkchoice setup should be correct", () => { - expect(preValidation).to.be.deep.equal(expectedPreValidationFC); + expect(preValidation).toEqual(expectedPreValidationFC); }); /** @@ -156,7 +156,7 @@ describe("executionStatus / normal updates", () => { const invalidate3CValidate2CForkChoice = collectProtoarrayValidationStatus(fc); it("correcly invalidate 3C and validate 2C only", () => { - expect(invalidate3CValidate2CForkChoice).to.be.deep.equal( + expect(invalidate3CValidate2CForkChoice).toEqual( toFcTestCase([ ["0", "1A", "3B", ExecutionStatus.PreMerge], ["1A", "2B", "3B", ExecutionStatus.Syncing], @@ -186,7 +186,7 @@ describe("executionStatus / normal updates", () => { ); const validate3B2B1A = collectProtoarrayValidationStatus(fc); it("Validate 3B, 2B, 1A", () => { - expect(validate3B2B1A).to.be.deep.equal( + expect(validate3B2B1A).toEqual( toFcTestCase([ ["0", "1A", "3B", ExecutionStatus.PreMerge], ["1A", "2B", "3B", ExecutionStatus.Valid], @@ -218,7 +218,7 @@ describe("executionStatus / normal updates", () => { ); const invalidate3A2A = collectProtoarrayValidationStatus(fc); it("Invalidate 3A, 2A with 2A loosing its bestChild, bestDescendant", () => { - expect(invalidate3A2A).to.be.deep.equal( + expect(invalidate3A2A).toEqual( toFcTestCase([ ["0", "1A", "3B", ExecutionStatus.PreMerge], ["1A", "2B", "3B", ExecutionStatus.Valid], @@ -245,7 +245,7 @@ describe("executionStatus / invalidate all postmerge chain", () => { */ const preValidation = collectProtoarrayValidationStatus(fc); it("preValidation forkchoice setup should be correct", () => { - expect(preValidation).to.be.deep.equal(expectedPreValidationFC); + expect(preValidation).toEqual(expectedPreValidationFC); }); /** @@ -265,7 +265,7 @@ describe("executionStatus / invalidate all postmerge chain", () => { ); const postMergeInvalidated = collectProtoarrayValidationStatus(fc); it("all post merge blocks should be invalidated except Cs", () => { - expect(postMergeInvalidated).to.be.deep.equal( + expect(postMergeInvalidated).toEqual( toFcTestCase([ ["0", undefined, undefined, ExecutionStatus.PreMerge], ["1A", undefined, undefined, ExecutionStatus.Invalid], @@ -281,7 +281,7 @@ describe("executionStatus / invalidate all postmerge chain", () => { const fcHead = fc.findHead("0", 3); it("pre merge block should be the FC head", () => { - expect(fcHead).to.be.equal("0"); + expect(fcHead).toBe("0"); }); }); @@ -297,7 +297,7 @@ describe("executionStatus / poision forkchoice if we invalidate previous valid", */ const preValidation = collectProtoarrayValidationStatus(fc); it("preValidation forkchoice setup should be correct", () => { - expect(preValidation).to.be.deep.equal(expectedPreValidationFC); + expect(preValidation).toEqual(expectedPreValidationFC); }); /** @@ -316,7 +316,7 @@ describe("executionStatus / poision forkchoice if we invalidate previous valid", ); const validate3B2B1A = collectProtoarrayValidationStatus(fc); it("Validate 3B, 2B, 1A", () => { - expect(validate3B2B1A).to.be.deep.equal( + expect(validate3B2B1A).toEqual( toFcTestCase([ ["0", "1A", "3B", ExecutionStatus.PreMerge], ["1A", "2B", "3B", ExecutionStatus.Valid], @@ -340,10 +340,10 @@ describe("executionStatus / poision forkchoice if we invalidate previous valid", }, 3 ) - ).to.throw(Error); + ).toThrow(Error); - expect(fc.lvhError).to.be.deep.equal({lvhCode: LVHExecErrorCode.ValidToInvalid, blockRoot: "1A", execHash: "1A"}); - expect(() => fc.findHead("0", 3)).to.throw(Error); + expect(fc.lvhError).toEqual({lvhCode: LVHExecErrorCode.ValidToInvalid, blockRoot: "1A", execHash: "1A"}); + expect(() => fc.findHead("0", 3)).toThrow(Error); }); }); @@ -359,7 +359,7 @@ describe("executionStatus / poision forkchoice if we validate previous invalid", */ const preValidation = collectProtoarrayValidationStatus(fc); it("preValidation forkchoice setup should be correct", () => { - expect(preValidation).to.be.deep.equal(expectedPreValidationFC); + expect(preValidation).toEqual(expectedPreValidationFC); }); /** @@ -379,7 +379,7 @@ describe("executionStatus / poision forkchoice if we validate previous invalid", ); const validate3B2B1A = collectProtoarrayValidationStatus(fc); it("Inalidate 3B, 2B, 1A", () => { - expect(validate3B2B1A).to.be.deep.equal( + expect(validate3B2B1A).toEqual( toFcTestCase([ ["0", undefined, undefined, ExecutionStatus.PreMerge], ["1A", undefined, undefined, ExecutionStatus.Invalid], @@ -402,10 +402,10 @@ describe("executionStatus / poision forkchoice if we validate previous invalid", }, 3 ) - ).to.throw(Error); + ).toThrow(Error); - expect(fc.lvhError).to.be.deep.equal({lvhCode: LVHExecErrorCode.InvalidToValid, blockRoot: "2A", execHash: "2A"}); - expect(() => fc.findHead("0", 3)).to.throw(Error); + expect(fc.lvhError).toEqual({lvhCode: LVHExecErrorCode.InvalidToValid, blockRoot: "2A", execHash: "2A"}); + expect(() => fc.findHead("0", 3)).toThrow(Error); }); }); diff --git a/packages/fork-choice/test/unit/protoArray/getCommonAncestor.test.ts b/packages/fork-choice/test/unit/protoArray/getCommonAncestor.test.ts index 3d47d906f74a..766c02a15a23 100644 --- a/packages/fork-choice/test/unit/protoArray/getCommonAncestor.test.ts +++ b/packages/fork-choice/test/unit/protoArray/getCommonAncestor.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {ProtoArray, ExecutionStatus} from "../../../src/index.js"; describe("getCommonAncestor", () => { @@ -73,7 +73,7 @@ describe("getCommonAncestor", () => { it(`${nodeA} & ${nodeB} -> ${ancestor}`, () => { // eslint-disable-next-line @typescript-eslint/no-non-null-assertion const ancestorNode = fc.getCommonAncestor(fc.getNode(nodeA)!, fc.getNode(nodeB)!); - expect(ancestorNode && ancestorNode.blockRoot).to.equal(ancestor); + expect(ancestorNode && ancestorNode.blockRoot).toBe(ancestor); }); } @@ -104,5 +104,5 @@ describe("getCommonAncestor", () => { // multiple calls to applyScoreChanges don't keep on adding boosts to weight over // and over again, and applyScoreChanges can be safely called after onAttestations - expect(weightsAfterCall1).to.deep.equal(weightsAfterCall2); + expect(weightsAfterCall1).toEqual(weightsAfterCall2); }); diff --git a/packages/fork-choice/test/unit/protoArray/protoArray.test.ts b/packages/fork-choice/test/unit/protoArray/protoArray.test.ts index 88d6453e6204..c3bf8a0f439a 100644 --- a/packages/fork-choice/test/unit/protoArray/protoArray.test.ts +++ b/packages/fork-choice/test/unit/protoArray/protoArray.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {RootHex} from "@lodestar/types"; import {ProtoArray, ExecutionStatus} from "../../../src/index.js"; @@ -107,7 +107,7 @@ describe("ProtoArray", () => { ]; for (const [ancestorRoot, descendantRoot, isDescendant] of assertions) { - expect(fc.isDescendant(ancestorRoot, descendantRoot)).to.equal( + expect(fc.isDescendant(ancestorRoot, descendantRoot)).toBeWithMessage( isDescendant, `${descendantRoot} must be ${isDescendant ? "descendant" : "not descendant"} of ${ancestorRoot}` ); diff --git a/packages/fork-choice/vitest.config.ts b/packages/fork-choice/vitest.config.ts new file mode 100644 index 000000000000..1df0de848936 --- /dev/null +++ b/packages/fork-choice/vitest.config.ts @@ -0,0 +1,11 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + }) +); diff --git a/packages/light-client/.mocharc.yaml b/packages/light-client/.mocharc.yaml deleted file mode 100644 index f28ebdf663a0..000000000000 --- a/packages/light-client/.mocharc.yaml +++ /dev/null @@ -1,6 +0,0 @@ -colors: true -timeout: 5000 -exit: true -extension: ["ts"] -node-option: - - "loader=ts-node/esm" diff --git a/packages/light-client/.nycrc.json b/packages/light-client/.nycrc.json deleted file mode 100644 index 69aa626339a0..000000000000 --- a/packages/light-client/.nycrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../../.nycrc.json" -} diff --git a/packages/light-client/README.md b/packages/light-client/README.md index 85ebd86d3c19..cac2f71c6880 100644 --- a/packages/light-client/README.md +++ b/packages/light-client/README.md @@ -1,29 +1,132 @@ -# Lodestar Light-client +# Lodestar Light Client + +Ethereum light clients provide a pathway for users to interact with the Ethereum blockchain in a trust-minimized manner, comparable to the level of trust required when engaging with a third-party provider like Infura or EtherScan. Not that those platforms are bad, but trust in any centralized provider goes against the ethos of blockchain. Light clients are a way that low-power devices, like cell phones, can do self validation of transactions and dApp state. + +Unlike full nodes, light clients do not download and store the entire blockchain. Instead, they download only the headers of each block and employ Merkle proofs to verify transactions. This enables a quick synchronization with the network and access the latest information without using significant system resources​. This streamlined approach to accessing Ethereum is crucial, especially in scenarios where full-scale network participation is infeasible or undesired. + +The evolution of light clients is emblematic of the broader trajectory of Ethereum towards becoming more accessible and resource-efficient, making blockchain technology more inclusive and adaptable to a wide array of use cases and environments. The Altair hard fork introduced sync committees to allow light-clients to synchronize to the network. + +## Prerequisites [![Discord](https://img.shields.io/discord/593655374469660673.svg?label=Discord&logo=discord)](https://discord.gg/aMxzVcr) [![Eth Consensus Spec v1.1.10](https://img.shields.io/badge/ETH%20consensus--spec-1.1.10-blue)](https://github.com/ethereum/consensus-specs/releases/tag/v1.1.10) ![ES Version](https://img.shields.io/badge/ES-2021-yellow) ![Node Version](https://img.shields.io/badge/node-16.x-green) +![Yarn](https://img.shields.io/badge/yarn-%232C8EBB.svg?style=for-the-badge&logo=yarn&logoColor=white) > This package is part of [ChainSafe's Lodestar](https://lodestar.chainsafe.io) project -## Prerequisites +## Requirements for Running a Light-Client + +Access to an beacon node that supports the light client specification is necessary. The client must support the following routes from the [consensus API spec](https://github.com/ethereum/consensus-specs/tree/dev): -- [NodeJS](https://nodejs.org/) (LTS) -- [Yarn](https://yarnpkg.com/) +- `/eth/v1/beacon/light_client/updates` +- `/eth/v1/beacon/light_client/optimistic_update` +- `/eth/v1/beacon/light_client/finality_update` +- `/eth/v1/beacon/light_client/bootstrap/{block_root}` +- `/eth/v0/beacon/light_client/committee_root` -## What you need +System requirements are quite low so its possible to run a light client in the browser as part of a website. There are a few examples of this on github that you can use as reference, our [prover](https://chainsafe.github.io/lodestar/lightclient-prover/prover) being one of them. -You will need to go over the [specification](https://github.com/ethereum/consensus-specs). +You can find more information about the light-client protocol in the [specification](https://github.com/ethereum/consensus-specs). ## Getting started -- Follow the [installation guide](https://chainsafe.github.io/lodestar/) to install Lodestar. -- Quickly try out the whole stack by [starting a local testnet](https://chainsafe.github.io/lodestar/usage/local). +- Follow the [installation guide](https://chainsafe.github.io/lodestar/getting-started/installation) or [Docker install](https://chainsafe.github.io/lodestar/getting-started/installation/#docker-installation) to install Lodestar. +- Quickly try out the whole stack by [starting a local testnet](https://chainsafe.github.io/lodestar/advanced-topics/setting-up-a-testnet). + +## Light-Client CLI Example + +It is possible to start up the light-client as a standalone process. + +```bash +lodestar lightclient \ + --network mainnet \ + --beacon-api-url https://beacon-node.your-domain.com \ + --checkpoint-root "0xccaff4b99986a7b05e06738f1828a32e40799b277fd9f9ff069be55341fe0229" +``` + +## Light-Client Programmatic Example + +For this example we will assume there is a running beacon node at `https://beacon-node.your-domain.com` + +```ts +import {Api} from "@lodestar/api/beacon"; +import {ApiError} from "@lodestar/api"; +import {Bytes32} from "@lodestar/types"; +import {createChainForkConfig} from "@lodestar/config"; +import {networksChainConfig} from "@lodestar/config/networks"; +import { + GenesisData, + Lightclient, + LightclientEvent, + RunStatusCode, + getLcLoggerConsole +} from `@lodestar/lightclient`; + +async function getGenesisData(api: Pick): Promise { + const res = await api.beacon.getGenesis(); + ApiError.assert(res); + + return { + genesisTime: Number(res.response.data.genesisTime), + genesisValidatorsRoot: res.response.data.genesisValidatorsRoot, + }; +} + +async function getSyncCheckpoint(api: Pick): Promise { + const res = await api.beacon.getStateFinalityCheckpoints("head"); + ApiError.assert(res); + return res.response.data.finalized.root; +} + +const config = createChainForkConfig(networksChainConfig.mainnet); + +const logger = getLcLoggerConsole({logDebug: Boolean(process.env.DEBUG)}); + +const api = getClient({urls: ["https://beacon-node.your-domain.com"]}, {config}); + +const transport = new LightClientRestTransport(api); + +const lightclient = await Lightclient.initializeFromCheckpointRoot({ + config, + logger, + transport, + genesisData: await getGenesisData(api), + checkpointRoot: await getSyncCheckpoint(api), + opts: { + allowForcedUpdates: true, + updateHeadersOnForcedUpdate: true, + } +}); + +// Wait for the lightclient to start +await new Promise((resolve) => { + const lightclientStarted = (status: RunStatusCode): void => { + if (status === RunStatusCode.started) { + this.lightclient?.emitter.off(LightclientEvent.statusChange, lightclientStarted); + resolve(); + } + }; + lightclient?.emitter.on(LightclientEvent.statusChange, lightclientStarted); + logger.info("Initiating lightclient"); + lightclient?.start(); +}); + +logger.info("Lightclient synced"); + +lightclient.emitter.on(LightclientEvent.lightClientFinalityHeader, async (finalityUpdate) => { + console.log(finalityUpdate); +}); + +lightclient.emitter.on(LightclientEvent.lightClientOptimisticHeader, async (optimisticUpdate) => { + console.log(optimisticUpdate); +}); +``` ## Contributors -Read our [contributors document](/CONTRIBUTING.md), [submit an issue](https://github.com/ChainSafe/lodestar/issues/new/choose) or talk to us on our [discord](https://discord.gg/yjyvFRP)! +Read our [contribution documentation](https://chainsafe.github.io/lodestar/contribution/getting-started), [submit an issue](https://github.com/ChainSafe/lodestar/issues/new/choose) or talk to us on our [discord](https://discord.gg/yjyvFRP)! ## License diff --git a/packages/light-client/karma.config.cjs b/packages/light-client/karma.config.cjs deleted file mode 100644 index a3ebb967e2ce..000000000000 --- a/packages/light-client/karma.config.cjs +++ /dev/null @@ -1,9 +0,0 @@ -const karmaConfig = require("../../karma.base.config.js"); -const webpackConfig = require("./webpack.test.config.cjs"); - -module.exports = function karmaConfigurator(config) { - config.set({ - ...karmaConfig, - webpack: webpackConfig, - }); -}; diff --git a/packages/light-client/package.json b/packages/light-client/package.json index c6068fa1b4d9..defe46d10013 100644 --- a/packages/light-client/package.json +++ b/packages/light-client/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.12.0", + "version": "1.13.0", "type": "module", "exports": { ".": { @@ -59,20 +59,23 @@ "lint:fix": "yarn run lint --fix", "pretest": "yarn run check-types", "test": "yarn test:unit && yarn test:e2e", - "test:browsers": "yarn karma start karma.config.cjs", - "test:unit": "LODESTAR_PRESET=minimal nyc --cache-dir .nyc_output/.cache -e .ts mocha 'test/unit/**/*.test.ts'", + "test:unit": "vitest --run --dir test/unit/ --coverage", + "test:browsers": "yarn test:browsers:chrome && yarn test:browsers:firefox && yarn test:browsers:electron", + "test:browsers:chrome": "vitest --run --browser chrome --config ./vitest.browser.config.ts --dir test/unit", + "test:browsers:firefox": "vitest --run --browser firefox --config ./vitest.browser.config.ts --dir test/unit", + "test:browsers:electron": "echo 'Electron tests will be introduced back in the future as soon vitest supports electron.'", "check-readme": "typescript-docs-verifier" }, "dependencies": { "@chainsafe/bls": "7.1.1", "@chainsafe/persistent-merkle-tree": "^0.6.1", "@chainsafe/ssz": "^0.14.0", - "@lodestar/api": "^1.12.0", - "@lodestar/config": "^1.12.0", - "@lodestar/params": "^1.12.0", - "@lodestar/state-transition": "^1.12.0", - "@lodestar/types": "^1.12.0", - "@lodestar/utils": "^1.12.0", + "@lodestar/api": "^1.13.0", + "@lodestar/config": "^1.13.0", + "@lodestar/params": "^1.13.0", + "@lodestar/state-transition": "^1.13.0", + "@lodestar/types": "^1.13.0", + "@lodestar/utils": "^1.13.0", "mitt": "^3.0.0", "strict-event-emitter-types": "^2.0.0" }, diff --git a/packages/light-client/src/spec/validateLightClientUpdate.ts b/packages/light-client/src/spec/validateLightClientUpdate.ts index 256be6a99c2c..2629986e85f2 100644 --- a/packages/light-client/src/spec/validateLightClientUpdate.ts +++ b/packages/light-client/src/spec/validateLightClientUpdate.ts @@ -1,4 +1,4 @@ -import bls from "@chainsafe/bls/switchable"; +import bls from "@chainsafe/bls"; import type {PublicKey, Signature} from "@chainsafe/bls/types"; import {Root, ssz, allForks} from "@lodestar/types"; import {ChainForkConfig} from "@lodestar/config"; diff --git a/packages/light-client/src/utils/utils.ts b/packages/light-client/src/utils/utils.ts index c6be99bac8ac..9960921eee90 100644 --- a/packages/light-client/src/utils/utils.ts +++ b/packages/light-client/src/utils/utils.ts @@ -1,4 +1,4 @@ -import bls from "@chainsafe/bls/switchable"; +import bls from "@chainsafe/bls"; import type {PublicKey} from "@chainsafe/bls/types"; import {BitArray} from "@chainsafe/ssz"; import {altair, Root, ssz} from "@lodestar/types"; diff --git a/packages/light-client/src/validation.ts b/packages/light-client/src/validation.ts index a0d6f83d8d02..85c5c35a2cea 100644 --- a/packages/light-client/src/validation.ts +++ b/packages/light-client/src/validation.ts @@ -1,4 +1,4 @@ -import bls from "@chainsafe/bls/switchable"; +import bls from "@chainsafe/bls"; import type {PublicKey, Signature} from "@chainsafe/bls/types"; import {altair, Root, Slot, ssz, allForks} from "@lodestar/types"; import { diff --git a/packages/light-client/test/globalSetup.ts b/packages/light-client/test/globalSetup.ts new file mode 100644 index 000000000000..0ab57c057472 --- /dev/null +++ b/packages/light-client/test/globalSetup.ts @@ -0,0 +1,2 @@ +export async function setup(): Promise {} +export async function teardown(): Promise {} diff --git a/packages/light-client/test/unit/isValidLightClientHeader.test.ts b/packages/light-client/test/unit/isValidLightClientHeader.test.ts index a28ac65ff618..40efa1293231 100644 --- a/packages/light-client/test/unit/isValidLightClientHeader.test.ts +++ b/packages/light-client/test/unit/isValidLightClientHeader.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {fromHexString} from "@chainsafe/ssz"; import {ssz, allForks} from "@lodestar/types"; import {createBeaconConfig, createChainForkConfig, defaultChainConfig} from "@lodestar/config"; @@ -91,7 +91,7 @@ describe("isValidLightClientHeader", function () { testCases.forEach(([name, header]: [string, allForks.LightClientHeader]) => { it(name, function () { const isValid = isValidLightClientHeader(config, header); - expect(isValid).to.be.true; + expect(isValid).toBe(true); }); }); }); diff --git a/packages/light-client/test/unit/sync.node.test.ts b/packages/light-client/test/unit/sync.node.test.ts index 27c924e37462..168bfeceb5f9 100644 --- a/packages/light-client/test/unit/sync.node.test.ts +++ b/packages/light-client/test/unit/sync.node.test.ts @@ -1,5 +1,4 @@ -import {expect} from "chai"; -import {init} from "@chainsafe/bls/switchable"; +import {describe, it, expect, afterEach, vi} from "vitest"; import {JsonPath, toHexString} from "@chainsafe/ssz"; import {computeDescriptor, TreeOffsetProof} from "@chainsafe/persistent-merkle-tree"; import {EPOCHS_PER_SYNC_COMMITTEE_PERIOD, SLOTS_PER_EPOCH} from "@lodestar/params"; @@ -21,21 +20,15 @@ import { lastInMap, } from "../utils/utils.js"; import {startServer, ServerOpts} from "../utils/server.js"; -import {isNode} from "../../src/utils/utils.js"; import {computeSyncPeriodAtSlot} from "../../src/utils/clock.js"; import {LightClientRestTransport} from "../../src/transport/rest.js"; const SOME_HASH = Buffer.alloc(32, 0xff); describe("sync", () => { + vi.setConfig({testTimeout: 30_000}); const afterEachCbs: (() => Promise | unknown)[] = []; - before("init bls", async () => { - // This process has to be done manually because of an issue in Karma runner - // https://github.com/karma-runner/karma/issues/3804 - await init(isNode ? "blst-native" : "herumi"); - }); - afterEach(async () => { await Promise.all(afterEachCbs); afterEachCbs.length = 0; @@ -168,16 +161,13 @@ describe("sync", () => { }); // Ensure that the lightclient head is correct - expect(lightclient.getHead().beacon.slot).to.equal(targetSlot, "lightclient.head is not the targetSlot head"); + expect(lightclient.getHead().beacon.slot).toBe(targetSlot); // Fetch proof of "latestExecutionPayloadHeader.stateRoot" const {proof, header} = await getHeadStateProof(lightclient, api, [["latestExecutionPayloadHeader", "stateRoot"]]); const recoveredState = ssz.bellatrix.BeaconState.createFromProof(proof, header.beacon.stateRoot); - expect(toHexString(recoveredState.latestExecutionPayloadHeader.stateRoot)).to.equal( - toHexString(executionStateRoot), - "Recovered executionStateRoot from getHeadStateProof() not correct" - ); + expect(toHexString(recoveredState.latestExecutionPayloadHeader.stateRoot)).toBe(toHexString(executionStateRoot)); }); }); diff --git a/packages/light-client/test/unit/syncInMemory.test.ts b/packages/light-client/test/unit/syncInMemory.test.ts index df9f5dcd57da..770827e86655 100644 --- a/packages/light-client/test/unit/syncInMemory.test.ts +++ b/packages/light-client/test/unit/syncInMemory.test.ts @@ -1,5 +1,5 @@ -import {expect} from "chai"; -import bls, {init} from "@chainsafe/bls/switchable"; +import {describe, it, expect, beforeAll, vi} from "vitest"; +import bls from "@chainsafe/bls"; import {createBeaconConfig} from "@lodestar/config"; import {chainConfig} from "@lodestar/config/default"; import {EPOCHS_PER_SYNC_COMMITTEE_PERIOD, SLOTS_PER_EPOCH} from "@lodestar/params"; @@ -9,7 +9,6 @@ import {BeaconChainLcMock} from "../mocks/BeaconChainLcMock.js"; import {processLightClientUpdate} from "../utils/naive/update.js"; import {IBeaconChainLc, prepareUpdateNaive} from "../utils/prepareUpdateNaive.js"; import {getInteropSyncCommittee, getSyncAggregateSigningRoot, SyncCommitteeKeys} from "../utils/utils.js"; -import {isNode} from "../../src/utils/utils.js"; function getSyncCommittee( syncCommitteesKeys: Map, @@ -25,7 +24,7 @@ function getSyncCommittee( describe("syncInMemory", function () { // In browser test this process is taking more time than default 2000ms - this.timeout(10000); + vi.setConfig({testTimeout: 10000}); // Fixed params const genValiRoot = Buffer.alloc(32, 9); @@ -35,20 +34,14 @@ describe("syncInMemory", function () { let updateData: {chain: IBeaconChainLc; blockWithSyncAggregate: altair.BeaconBlock}; let update: altair.LightClientUpdate; - before("init bls", async () => { - // This process has to be done manually because of an issue in Karma runner - // https://github.com/karma-runner/karma/issues/3804 - await init(isNode ? "blst-native" : "herumi"); - }); - - before("BLS sanity check", () => { + beforeAll(() => { const sk = bls.SecretKey.fromBytes(Buffer.alloc(32, 1)); - expect(sk.toPublicKey().toHex()).to.equal( + expect(sk.toPublicKey().toHex()).toBe( "0xaa1a1c26055a329817a5759d877a2795f9499b97d6056edde0eea39512f24e8bc874b4471f0501127abb1ea0d9f68ac1" ); }); - before("Generate data for prepareUpdate", () => { + beforeAll(() => { // Create a state that has as nextSyncCommittee the committee 2 const finalizedBlockSlot = SLOTS_PER_EPOCH * EPOCHS_PER_SYNC_COMMITTEE_PERIOD + 1; const headerBlockSlot = finalizedBlockSlot + 1; @@ -107,6 +100,6 @@ describe("syncInMemory", function () { }, }; - expect(() => processLightClientUpdate(config, store, update, currentSlot)).to.not.throw(); + expect(() => processLightClientUpdate(config, store, update, currentSlot)).not.toThrow(); }); }); diff --git a/packages/light-client/test/unit/utils.test.ts b/packages/light-client/test/unit/utils.test.ts index 90a97e05db28..91bfab113431 100644 --- a/packages/light-client/test/unit/utils.test.ts +++ b/packages/light-client/test/unit/utils.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {isValidMerkleBranch} from "../../src/utils/verifyMerkleBranch.js"; import {computeMerkleBranch} from "../utils/utils.js"; @@ -9,6 +9,6 @@ describe("utils", () => { const index = 22; const {root, proof} = computeMerkleBranch(leaf, depth, index); - expect(isValidMerkleBranch(leaf, proof, depth, index, root)).to.equal(true); + expect(isValidMerkleBranch(leaf, proof, depth, index, root)).toBe(true); }); }); diff --git a/packages/light-client/test/unit/utils/chunkify.test.ts b/packages/light-client/test/unit/utils/chunkify.test.ts index 78fc567513da..297637fd70b0 100644 --- a/packages/light-client/test/unit/utils/chunkify.test.ts +++ b/packages/light-client/test/unit/utils/chunkify.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {chunkifyInclusiveRange} from "../../../src/utils/chunkify.js"; describe("utils / chunkifyInclusiveRange", () => { @@ -20,7 +20,7 @@ describe("utils / chunkifyInclusiveRange", () => { for (const {id, from, to, max, result} of testCases) { it(id, () => { - expect(chunkifyInclusiveRange(from, to, max)).to.deep.equal(result); + expect(chunkifyInclusiveRange(from, to, max)).toEqual(result); }); } }); diff --git a/packages/light-client/test/unit/validation.test.ts b/packages/light-client/test/unit/validation.test.ts index 9bda4c86a91f..61442fb4bf8c 100644 --- a/packages/light-client/test/unit/validation.test.ts +++ b/packages/light-client/test/unit/validation.test.ts @@ -1,5 +1,5 @@ -import {expect} from "chai"; -import bls, {init} from "@chainsafe/bls/switchable"; +import {describe, it, expect, beforeAll, vi} from "vitest"; +import bls from "@chainsafe/bls"; import {Tree} from "@chainsafe/persistent-merkle-tree"; import {altair, ssz} from "@lodestar/types"; import {chainConfig} from "@lodestar/config/default"; @@ -14,12 +14,11 @@ import { import {assertValidLightClientUpdate} from "../../src/validation.js"; import {LightClientSnapshotFast, SyncCommitteeFast} from "../../src/types.js"; import {defaultBeaconBlockHeader, getSyncAggregateSigningRoot, signAndAggregate} from "../utils/utils.js"; -import {isNode} from "../../src/utils/utils.js"; describe("validation", function () { // In browser test this process is taking more time than default 2000ms // specially on the CI - this.timeout(15000); + vi.setConfig({testTimeout: 15000}); const genValiRoot = Buffer.alloc(32, 9); const config = createBeaconConfig(chainConfig, genValiRoot); @@ -27,13 +26,7 @@ describe("validation", function () { let update: altair.LightClientUpdate; let snapshot: LightClientSnapshotFast; - before("prepare bls", async () => { - // This process has to be done manually because of an issue in Karma runner - // https://github.com/karma-runner/karma/issues/3804 - await init(isNode ? "blst-native" : "herumi"); - }); - - before("prepare data", function () { + beforeAll(function () { // Update slot must > snapshot slot // attestedHeaderSlot must == updateHeaderSlot + 1 const snapshotHeaderSlot = 1; @@ -106,6 +99,6 @@ describe("validation", function () { }); it("should validate valid update", () => { - expect(() => assertValidLightClientUpdate(config, snapshot.nextSyncCommittee, update)).to.not.throw(); + expect(() => assertValidLightClientUpdate(config, snapshot.nextSyncCommittee, update)).not.toThrow(); }); }); diff --git a/packages/light-client/test/utils/utils.ts b/packages/light-client/test/utils/utils.ts index df9bd4170dcc..455d6ef9997b 100644 --- a/packages/light-client/test/utils/utils.ts +++ b/packages/light-client/test/utils/utils.ts @@ -1,4 +1,4 @@ -import bls from "@chainsafe/bls/switchable"; +import bls from "@chainsafe/bls"; import {PointFormat, PublicKey, SecretKey} from "@chainsafe/bls/types"; import {hasher, Tree} from "@chainsafe/persistent-merkle-tree"; import {BitArray, fromHexString} from "@chainsafe/ssz"; diff --git a/packages/light-client/tsconfig.e2e.json b/packages/light-client/tsconfig.e2e.json deleted file mode 100644 index cedf626f4124..000000000000 --- a/packages/light-client/tsconfig.e2e.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "extends": "../../tsconfig.e2e.json", - "include": [ - "src", - "test" - ], -} \ No newline at end of file diff --git a/packages/light-client/vitest.browser.config.ts b/packages/light-client/vitest.browser.config.ts new file mode 100644 index 000000000000..3c4b48885a33 --- /dev/null +++ b/packages/light-client/vitest.browser.config.ts @@ -0,0 +1,14 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.browser.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + optimizeDeps: { + exclude: ["@chainsafe/blst"], + }, + }) +); diff --git a/packages/light-client/vitest.config.ts b/packages/light-client/vitest.config.ts new file mode 100644 index 000000000000..1df0de848936 --- /dev/null +++ b/packages/light-client/vitest.config.ts @@ -0,0 +1,11 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + }) +); diff --git a/packages/light-client/webpack.test.config.cjs b/packages/light-client/webpack.test.config.cjs deleted file mode 100644 index 711c6ac891a7..000000000000 --- a/packages/light-client/webpack.test.config.cjs +++ /dev/null @@ -1,5 +0,0 @@ -const webpackConfig = require("../../webpack.test.config.js"); - -module.exports = { - ...webpackConfig, -}; diff --git a/packages/logger/.mocharc.yml b/packages/logger/.mocharc.yml deleted file mode 100644 index a70609535d4f..000000000000 --- a/packages/logger/.mocharc.yml +++ /dev/null @@ -1,5 +0,0 @@ -colors: true -node-option: - - "loader=ts-node/esm" -require: - - ./test/setup.ts \ No newline at end of file diff --git a/packages/logger/karma.config.cjs b/packages/logger/karma.config.cjs deleted file mode 100644 index a3ebb967e2ce..000000000000 --- a/packages/logger/karma.config.cjs +++ /dev/null @@ -1,9 +0,0 @@ -const karmaConfig = require("../../karma.base.config.js"); -const webpackConfig = require("./webpack.test.config.cjs"); - -module.exports = function karmaConfigurator(config) { - config.set({ - ...karmaConfig, - webpack: webpackConfig, - }); -}; diff --git a/packages/logger/package.json b/packages/logger/package.json index cc26cb7123c7..6807fca853a6 100644 --- a/packages/logger/package.json +++ b/packages/logger/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.12.0", + "version": "1.13.0", "type": "module", "exports": { ".": { @@ -56,20 +56,24 @@ "lint": "eslint --color --ext .ts src/ test/", "lint:fix": "yarn run lint --fix", "pretest": "yarn run check-types", - "test:unit": "mocha 'test/unit/**/*.test.ts'", - "test:browsers": "yarn karma start karma.config.cjs", - "test:e2e": "LODESTAR_PRESET=minimal mocha 'test/e2e/**/*.test.ts'", + "test:unit": "vitest --run --dir test/unit/ --coverage", + "test:browsers": "yarn test:browsers:chrome && yarn test:browsers:firefox && yarn test:browsers:electron", + "test:browsers:chrome": "vitest --run --browser chrome --config ./vitest.browser.config.ts --dir test/unit", + "test:browsers:firefox": "vitest --run --browser firefox --config ./vitest.browser.config.ts --dir test/unit", + "test:browsers:electron": "echo 'Electron tests will be introduced back in the future as soon vitest supports electron.'", + "test:e2e": "LODESTAR_PRESET=minimal vitest --run --dir test/e2e", "check-readme": "typescript-docs-verifier" }, "types": "lib/index.d.ts", "dependencies": { - "@lodestar/utils": "^1.12.0", + "@lodestar/utils": "^1.13.0", "winston": "^3.8.2", "winston-daily-rotate-file": "^4.7.1", "winston-transport": "^4.5.0" }, "devDependencies": { - "@lodestar/test-utils": "^1.12.0", + "@chainsafe/threads": "^1.11.1", + "@lodestar/test-utils": "^1.13.0", "@types/triple-beam": "^1.3.2", "rimraf": "^4.4.1", "triple-beam": "^1.3.0" diff --git a/packages/logger/test/e2e/logger/workerLogger.ts b/packages/logger/test/e2e/logger/workerLogger.js similarity index 87% rename from packages/logger/test/e2e/logger/workerLogger.ts rename to packages/logger/test/e2e/logger/workerLogger.js index 0a4f1dd9207b..9608336c433f 100644 --- a/packages/logger/test/e2e/logger/workerLogger.ts +++ b/packages/logger/test/e2e/logger/workerLogger.js @@ -3,7 +3,7 @@ import worker from "node:worker_threads"; import {expose} from "@chainsafe/threads/worker"; const parentPort = worker.parentPort; -const workerData = worker.workerData as {logFilepath: string}; +const workerData = worker.workerData; if (!parentPort) throw Error("parentPort must be defined"); const file = fs.createWriteStream(workerData.logFilepath, {flags: "a"}); diff --git a/packages/logger/test/e2e/logger/workerLoggerHandler.ts b/packages/logger/test/e2e/logger/workerLoggerHandler.ts index 3ff095fc4f89..b166ef15ff00 100644 --- a/packages/logger/test/e2e/logger/workerLoggerHandler.ts +++ b/packages/logger/test/e2e/logger/workerLoggerHandler.ts @@ -9,7 +9,9 @@ export type LoggerWorker = { type WorkerData = {logFilepath: string}; export async function getLoggerWorker(opts: WorkerData): Promise { - const workerThreadjs = new Worker("./workerLogger.js", {workerData: opts}); + const workerThreadjs = new Worker("./workerLogger.js", { + workerData: opts, + }); const worker = workerThreadjs as unknown as worker_threads.Worker; // eslint-disable-next-line @typescript-eslint/no-explicit-any diff --git a/packages/logger/test/e2e/logger/workerLogs.test.ts b/packages/logger/test/e2e/logger/workerLogs.test.ts index 52b8b5efa4b1..3c81cbf92c57 100644 --- a/packages/logger/test/e2e/logger/workerLogs.test.ts +++ b/packages/logger/test/e2e/logger/workerLogs.test.ts @@ -1,7 +1,7 @@ import path from "node:path"; import fs from "node:fs"; import {fileURLToPath} from "node:url"; -import {expect} from "chai"; +import {describe, it, expect, vi, beforeEach, afterEach} from "vitest"; import {sleep} from "@lodestar/utils"; import {LoggerWorker, getLoggerWorker} from "./workerLoggerHandler.js"; @@ -11,7 +11,7 @@ import {LoggerWorker, getLoggerWorker} from "./workerLoggerHandler.js"; const __dirname = path.dirname(fileURLToPath(import.meta.url)); describe("worker logs", function () { - this.timeout(60_000); + vi.setConfig({testTimeout: 60_000}); const logFilepath = path.join(__dirname, "../../../test-logs/test_worker_logs.log"); let loggerWorker: LoggerWorker; @@ -36,7 +36,7 @@ describe("worker logs", function () { fs.createWriteStream(logFilepath, {flags: "a"}).write(logTextMainThread); const data = await waitForFileSize(logFilepath, logTextMainThread.length); - expect(data).includes(logTextMainThread); + expect(data).toContain(logTextMainThread); }); it("worker writes to file", async () => { @@ -44,7 +44,7 @@ describe("worker logs", function () { loggerWorker.log(logTextWorker); const data = await waitForFileSize(logFilepath, logTextWorker.length); - expect(data).includes(logTextWorker); + expect(data).toContain(logTextWorker); }); it("concurrent write from two write streams in different threads", async () => { @@ -57,8 +57,8 @@ describe("worker logs", function () { file.write(logTextMainThread + "\n"); const data = await waitForFileSize(logFilepath, logTextWorker.length + logTextMainThread.length); - expect(data).includes(logTextWorker); - expect(data).includes(logTextMainThread); + expect(data).toContain(logTextWorker); + expect(data).toContain(logTextMainThread); }); }); diff --git a/packages/logger/test/globalSetup.ts b/packages/logger/test/globalSetup.ts new file mode 100644 index 000000000000..0ab57c057472 --- /dev/null +++ b/packages/logger/test/globalSetup.ts @@ -0,0 +1,2 @@ +export async function setup(): Promise {} +export async function teardown(): Promise {} diff --git a/packages/logger/test/setup.ts b/packages/logger/test/setup.ts deleted file mode 100644 index b83e6cb78511..000000000000 --- a/packages/logger/test/setup.ts +++ /dev/null @@ -1,6 +0,0 @@ -import chai from "chai"; -import chaiAsPromised from "chai-as-promised"; -import sinonChai from "sinon-chai"; - -chai.use(chaiAsPromised); -chai.use(sinonChai); diff --git a/packages/logger/test/unit/browser.test.ts b/packages/logger/test/unit/browser.test.ts index c1dd70b6bebd..e2160418663a 100644 --- a/packages/logger/test/unit/browser.test.ts +++ b/packages/logger/test/unit/browser.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {LogLevel} from "@lodestar/utils"; import {stubLoggerForConsole} from "@lodestar/test-utils/mocha"; import {TimestampFormatCode, logFormats} from "../../src/index.js"; @@ -22,7 +22,7 @@ describe("browser logger", () => { logger.warn(message, context, error); logger.restoreStubs(); - expect(logger.getLogs()).deep.equals([output[format]]); + expect(logger.getLogs()).toEqual([output[format]]); }); } } diff --git a/packages/logger/test/unit/env.node.test.ts b/packages/logger/test/unit/env.node.test.ts index 547f891b7ea1..4d2b914ca7f4 100644 --- a/packages/logger/test/unit/env.node.test.ts +++ b/packages/logger/test/unit/env.node.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {LogLevel} from "@lodestar/utils"; import {stubLoggerForConsole} from "@lodestar/test-utils/mocha"; import {TimestampFormatCode, logFormats} from "../../src/index.js"; @@ -20,7 +20,7 @@ describe("env logger", () => { logger.warn(message, context, error); logger.restoreStubs(); - expect(logger.getLogs()).deep.equals([output[format]]); + expect(logger.getLogs()).toEqual([output[format]]); }); } } diff --git a/packages/logger/test/unit/node.node.test.ts b/packages/logger/test/unit/node.node.test.ts index 6342ae9e4ccb..12782fa49af8 100644 --- a/packages/logger/test/unit/node.node.test.ts +++ b/packages/logger/test/unit/node.node.test.ts @@ -1,27 +1,35 @@ -import {expect} from "chai"; +import {describe, it, expect, vi, afterEach, Mock} from "vitest"; import {LogLevel} from "@lodestar/utils"; -import {stubLoggerForProcessStd} from "@lodestar/test-utils/mocha"; import {TimestampFormatCode, logFormats} from "../../src/index.js"; import {getNodeLogger} from "../../src/node.js"; import {formatsTestCases} from "../fixtures/loggerFormats.js"; +// Node.js maps `process.stdout` to `console._stdout`. +// spy does not work on `process.stdout` directly. +// eslint-disable-next-line @typescript-eslint/naming-convention +type TestConsole = typeof console & {_stdout: {write: Mock}}; + describe("node logger", () => { + afterEach(() => { + vi.resetAllMocks(); + }); + describe("format and options", () => { for (const testCase of formatsTestCases) { const {id, opts, message, context, error, output} = typeof testCase === "function" ? testCase() : testCase; for (const format of logFormats) { it(`${id} ${format} output`, async () => { - const logger = stubLoggerForProcessStd( - getNodeLogger({ - level: LogLevel.info, - format, - module: opts?.module, - timestampFormat: {format: TimestampFormatCode.Hidden}, - }) - ); + vi.spyOn((console as TestConsole)._stdout, "write"); + + const logger = getNodeLogger({ + level: LogLevel.info, + format, + module: opts?.module, + timestampFormat: {format: TimestampFormatCode.Hidden}, + }); logger.warn(message, context, error); - logger.restoreStubs(); - expect(logger.getLogs()).deep.equals([output[format]]); + + expect((console as TestConsole)._stdout.write).toHaveBeenNthCalledWith(1, `${output[format]}\n`); }); } } diff --git a/packages/logger/test/unit/utils/json.test.ts b/packages/logger/test/unit/utils/json.test.ts index 06352fc5f171..02a6c95e1ed9 100644 --- a/packages/logger/test/unit/utils/json.test.ts +++ b/packages/logger/test/unit/utils/json.test.ts @@ -1,6 +1,4 @@ -/* eslint-disable @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-assignment */ -import "../../setup.js"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {fromHexString, toHexString} from "@chainsafe/ssz"; import {LodestarError} from "@lodestar/utils"; import {logCtxToJson, logCtxToString} from "../../../src/utils/json.js"; @@ -13,7 +11,7 @@ describe("Json helper", () => { type TestCase = { id: string; arg: unknown; - json: any; + json: unknown; }; const testCases: (TestCase | (() => TestCase))[] = [ // Basic types @@ -27,13 +25,13 @@ describe("Json helper", () => { // Functions // eslint-disable-next-line @typescript-eslint/no-empty-function - {id: "function", arg: function () {}, json: "function () { }"}, + {id: "function", arg: function () {}, json: "function() {\n }"}, // eslint-disable-next-line @typescript-eslint/no-empty-function - {id: "arrow function", arg: () => {}, json: "() => { }"}, + {id: "arrow function", arg: () => {}, json: "() => {\n }"}, // eslint-disable-next-line @typescript-eslint/no-empty-function - {id: "async function", arg: async function () {}, json: "async function () { }"}, + {id: "async function", arg: async function () {}, json: "async function() {\n }"}, // eslint-disable-next-line @typescript-eslint/no-empty-function - {id: "async arrow function", arg: async () => {}, json: "async () => { }"}, + {id: "async arrow function", arg: async () => {}, json: "async () => {\n }"}, // Arrays {id: "array of basic types", arg: [1, 2, 3], json: [1, 2, 3]}, @@ -119,6 +117,7 @@ describe("Json helper", () => { // Circular references () => { const circularReference: any = {}; + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access circularReference.myself = circularReference; return { id: "circular reference", @@ -131,7 +130,7 @@ describe("Json helper", () => { for (const testCase of testCases) { const {id, arg, json} = typeof testCase === "function" ? testCase() : testCase; it(id, () => { - expect(logCtxToJson(arg)).to.deep.equal(json); + expect(logCtxToJson(arg)).toEqual(json); }); } }); @@ -180,6 +179,7 @@ describe("Json helper", () => { // Circular references () => { const circularReference: any = {}; + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access circularReference.myself = circularReference; return { id: "circular reference", @@ -192,7 +192,7 @@ describe("Json helper", () => { for (const testCase of testCases) { const {id, json, output} = typeof testCase === "function" ? testCase() : testCase; it(id, () => { - expect(logCtxToString(json)).to.equal(output); + expect(logCtxToString(json)).toBe(output); }); } }); diff --git a/packages/logger/test/unit/utils/timeFormat.test.ts b/packages/logger/test/unit/utils/timeFormat.test.ts index 62640ff48c2c..fc374a0f6c7f 100644 --- a/packages/logger/test/unit/utils/timeFormat.test.ts +++ b/packages/logger/test/unit/utils/timeFormat.test.ts @@ -1,5 +1,4 @@ -import "../../setup.js"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {formatEpochSlotTime} from "../../../src/utils/timeFormat.js"; describe("logger / util / formatEpochSlotTime", () => { @@ -17,7 +16,7 @@ describe("logger / util / formatEpochSlotTime", () => { const expectLog = `Eph ${epoch}/${slot} ${sec}`; // "Eph 3/6 11.423"; it(expectLog, () => { const genesisTime = nowSec - epoch * slotsPerEpoch * secondsPerSlot - slot * secondsPerSlot - sec; - expect(formatEpochSlotTime({genesisTime, secondsPerSlot, slotsPerEpoch}, nowSec * 1000)).to.equal(expectLog); + expect(formatEpochSlotTime({genesisTime, secondsPerSlot, slotsPerEpoch}, nowSec * 1000)).toBe(expectLog); }); } }); diff --git a/packages/logger/test/unit/winston.node.test.ts b/packages/logger/test/unit/winston.node.test.ts index cdf7e17ddeb1..8ef49da4e02d 100644 --- a/packages/logger/test/unit/winston.node.test.ts +++ b/packages/logger/test/unit/winston.node.test.ts @@ -1,26 +1,34 @@ import fs from "node:fs"; import path from "node:path"; -import {expect} from "chai"; +import {describe, it, expect, beforeAll, afterAll, afterEach, vi, Mock} from "vitest"; import {LogLevel} from "@lodestar/utils"; -import {stubLoggerForProcessStd} from "@lodestar/test-utils/mocha"; import {TimestampFormatCode} from "../../src/index.js"; import {getNodeLogger} from "../../src/node.js"; import {readFileWhenExists} from "../utils/files.js"; +// Node.js maps `process.stdout` to `console._stdout`. +// spy does not work on `process.stdout` directly. +// eslint-disable-next-line @typescript-eslint/naming-convention +type TestConsole = typeof console & {_stdout: {write: Mock}}; + describe("winston logger", () => { + afterEach(() => { + vi.resetAllMocks(); + }); + describe("winston dynamic level by module", () => { it("should log to child at a lower log level", async () => { - const loggerA = stubLoggerForProcessStd( - getNodeLogger({ - level: LogLevel.info, - module: "a", - format: "human", - levelModule: { - "a/b": LogLevel.debug, - }, - timestampFormat: {format: TimestampFormatCode.Hidden}, - }) - ); + vi.spyOn((console as TestConsole)._stdout, "write"); + + const loggerA = getNodeLogger({ + level: LogLevel.info, + module: "a", + format: "human", + levelModule: { + "a/b": LogLevel.debug, + }, + timestampFormat: {format: TimestampFormatCode.Hidden}, + }); const loggerAB = loggerA.child({module: "b"}); @@ -29,24 +37,31 @@ describe("winston logger", () => { loggerAB.info("test a/b info"); // show loggerAB.debug("test a/b debug"); // show - loggerA.restoreStubs(); + expect((console as TestConsole)._stdout.write).toHaveBeenNthCalledWith( + 1, + "[a] \u001b[32minfo\u001b[39m: test a info\n" + ); + + expect((console as TestConsole)._stdout.write).toHaveBeenNthCalledWith( + 2, + "[a/b] \u001b[32minfo\u001b[39m: test a/b info\n" + ); - expect(loggerA.getLogs()).deep.equals([ - "[a] \u001b[32minfo\u001b[39m: test a info", - "[a/b] \u001b[32minfo\u001b[39m: test a/b info", - "[a/b] \u001b[34mdebug\u001b[39m: test a/b debug", - ]); + expect((console as TestConsole)._stdout.write).toHaveBeenNthCalledWith( + 3, + "[a/b] \u001b[34mdebug\u001b[39m: test a/b debug\n" + ); }); }); describe("winston transport log to file", () => { let tmpDir: string; - before(() => { + beforeAll(() => { tmpDir = fs.mkdtempSync("test-lodestar-winston-test"); }); - after(() => { + afterAll(() => { fs.rmSync(tmpDir, {recursive: true}); }); @@ -71,15 +86,20 @@ describe("winston logger", () => { const expectedOut = "[a] \u001b[33mwarn\u001b[39m: test"; - expect(await readFileWhenExists(tmpDir, filenameRx)).to.equal(expectedOut); + expect(await readFileWhenExists(tmpDir, filenameRx)).toBe(expectedOut); }); }); describe("child logger", () => { it("should parse child module", async () => { - const loggerA = stubLoggerForProcessStd( - getNodeLogger({level: LogLevel.info, timestampFormat: {format: TimestampFormatCode.Hidden}, module: "a"}) - ); + vi.spyOn((console as TestConsole)._stdout, "write"); + + const loggerA = getNodeLogger({ + level: LogLevel.info, + timestampFormat: {format: TimestampFormatCode.Hidden}, + module: "a", + }); + const loggerAB = loggerA.child({module: "b"}); const loggerABC = loggerAB.child({module: "c"}); @@ -87,13 +107,18 @@ describe("winston logger", () => { loggerAB.warn("test a/b"); loggerABC.warn("test a/b/c"); - loggerA.restoreStubs(); - - expect(loggerA.getLogs()).deep.equals([ - "[a] \u001b[33mwarn\u001b[39m: test a", - "[a/b] \u001b[33mwarn\u001b[39m: test a/b", - "[a/b/c] \u001b[33mwarn\u001b[39m: test a/b/c", - ]); + expect((console as TestConsole)._stdout.write).toHaveBeenNthCalledWith( + 1, + "[a] \u001b[33mwarn\u001b[39m: test a\n" + ); + expect((console as TestConsole)._stdout.write).toHaveBeenNthCalledWith( + 2, + "[a/b] \u001b[33mwarn\u001b[39m: test a/b\n" + ); + expect((console as TestConsole)._stdout.write).toHaveBeenNthCalledWith( + 3, + "[a/b/c] \u001b[33mwarn\u001b[39m: test a/b/c\n" + ); }); }); }); diff --git a/packages/logger/test/utils/chai.ts b/packages/logger/test/utils/chai.ts deleted file mode 100644 index 3c1e855021be..000000000000 --- a/packages/logger/test/utils/chai.ts +++ /dev/null @@ -1,9 +0,0 @@ -import {expect} from "chai"; - -export function expectDeepEquals(a: T, b: T, message?: string): void { - expect(a).deep.equals(b, message); -} - -export function expectEquals(a: T, b: T, message?: string): void { - expect(a).equals(b, message); -} diff --git a/packages/logger/tsconfig.e2e.json b/packages/logger/tsconfig.e2e.json deleted file mode 100644 index cedf626f4124..000000000000 --- a/packages/logger/tsconfig.e2e.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "extends": "../../tsconfig.e2e.json", - "include": [ - "src", - "test" - ], -} \ No newline at end of file diff --git a/packages/logger/vitest.browser.config.ts b/packages/logger/vitest.browser.config.ts new file mode 100644 index 000000000000..3c4b48885a33 --- /dev/null +++ b/packages/logger/vitest.browser.config.ts @@ -0,0 +1,14 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.browser.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + optimizeDeps: { + exclude: ["@chainsafe/blst"], + }, + }) +); diff --git a/packages/logger/vitest.config.ts b/packages/logger/vitest.config.ts new file mode 100644 index 000000000000..1df0de848936 --- /dev/null +++ b/packages/logger/vitest.config.ts @@ -0,0 +1,11 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + }) +); diff --git a/packages/logger/webpack.test.config.cjs b/packages/logger/webpack.test.config.cjs deleted file mode 100644 index 711c6ac891a7..000000000000 --- a/packages/logger/webpack.test.config.cjs +++ /dev/null @@ -1,5 +0,0 @@ -const webpackConfig = require("../../webpack.test.config.js"); - -module.exports = { - ...webpackConfig, -}; diff --git a/packages/params/karma.config.cjs b/packages/params/karma.config.cjs deleted file mode 100644 index a3ebb967e2ce..000000000000 --- a/packages/params/karma.config.cjs +++ /dev/null @@ -1,9 +0,0 @@ -const karmaConfig = require("../../karma.base.config.js"); -const webpackConfig = require("./webpack.test.config.cjs"); - -module.exports = function karmaConfigurator(config) { - config.set({ - ...karmaConfig, - webpack: webpackConfig, - }); -}; diff --git a/packages/params/package.json b/packages/params/package.json index e3cacfc45b60..c65cc4570ffc 100644 --- a/packages/params/package.json +++ b/packages/params/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/params", - "version": "1.12.0", + "version": "1.13.0", "description": "Chain parameters required for lodestar", "author": "ChainSafe Systems", "license": "Apache-2.0", @@ -52,10 +52,13 @@ "check-types": "tsc", "lint": "eslint --color --ext .ts src/ test/", "lint:fix": "yarn run lint --fix", - "test": "yarn run check-types", - "test:unit": "mocha 'test/unit/**/*.test.ts'", - "test:browsers": "yarn karma start karma.config.cjs", - "test:e2e": "LODESTAR_PRESET=minimal mocha 'test/e2e/**/*.test.ts'", + "test": "yarn run check-types", + "test:unit": "vitest --run --dir test/unit/ --coverage", + "test:browsers": "yarn test:browsers:chrome && yarn test:browsers:firefox && yarn test:browsers:electron", + "test:browsers:chrome": "vitest --run --browser chrome --config ./vitest.browser.config.ts --dir test/unit", + "test:browsers:firefox": "vitest --run --browser firefox --config ./vitest.browser.config.ts --dir test/unit", + "test:browsers:electron": "echo 'Electron tests will be introduced back in the future as soon vitest supports electron.'", + "test:e2e": "LODESTAR_PRESET=minimal vitest --run --dir test/e2e/", "check-readme": "typescript-docs-verifier" }, "repository": { diff --git a/packages/params/src/index.ts b/packages/params/src/index.ts index 3d784356ae0f..e0623537d7f0 100644 --- a/packages/params/src/index.ts +++ b/packages/params/src/index.ts @@ -90,6 +90,7 @@ export const { FIELD_ELEMENTS_PER_BLOB, MAX_BLOB_COMMITMENTS_PER_BLOCK, MAX_BLOBS_PER_BLOCK, + KZG_COMMITMENT_INCLUSION_PROOF_DEPTH, } = activePreset; //////////// @@ -235,3 +236,10 @@ export const INTERVALS_PER_SLOT = 3; export const BYTES_PER_FIELD_ELEMENT = 32; export const BLOB_TX_TYPE = 0x03; export const VERSIONED_HASH_VERSION_KZG = 0x01; + +// ssz.deneb.BeaconBlockBody.getPathInfo(['blobKzgCommitments',0]).gindex +export const KZG_COMMITMENT_GINDEX0 = ACTIVE_PRESET === PresetName.minimal ? 864 : 221184; +export const KZG_COMMITMENT_SUBTREE_INDEX0 = KZG_COMMITMENT_GINDEX0 - 2 ** KZG_COMMITMENT_INCLUSION_PROOF_DEPTH; + +// ssz.deneb.BlobSidecars.elementType.fixedSize +export const BLOBSIDECAR_FIXED_SIZE = ACTIVE_PRESET === PresetName.minimal ? 131672 : 131928; diff --git a/packages/params/src/presets/mainnet.ts b/packages/params/src/presets/mainnet.ts index f29b1668ac44..9b591103edf5 100644 --- a/packages/params/src/presets/mainnet.ts +++ b/packages/params/src/presets/mainnet.ts @@ -115,4 +115,5 @@ export const mainnetPreset: BeaconPreset = { FIELD_ELEMENTS_PER_BLOB: 4096, MAX_BLOB_COMMITMENTS_PER_BLOCK: 4096, MAX_BLOBS_PER_BLOCK: 6, + KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: 17, }; diff --git a/packages/params/src/presets/minimal.ts b/packages/params/src/presets/minimal.ts index 34d690045117..ad86cbf89e61 100644 --- a/packages/params/src/presets/minimal.ts +++ b/packages/params/src/presets/minimal.ts @@ -119,7 +119,8 @@ export const minimalPreset: BeaconPreset = { // DENEB /////////// // https://github.com/ethereum/consensus-specs/blob/dev/presets/minimal/eip4844.yaml - FIELD_ELEMENTS_PER_BLOB: 4, + FIELD_ELEMENTS_PER_BLOB: 4096, MAX_BLOB_COMMITMENTS_PER_BLOCK: 16, MAX_BLOBS_PER_BLOCK: 6, + KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: 9, }; diff --git a/packages/params/src/types.ts b/packages/params/src/types.ts index 67d258bdd0c9..3c5ba6381131 100644 --- a/packages/params/src/types.ts +++ b/packages/params/src/types.ts @@ -81,6 +81,7 @@ export type BeaconPreset = { FIELD_ELEMENTS_PER_BLOB: number; MAX_BLOB_COMMITMENTS_PER_BLOCK: number; MAX_BLOBS_PER_BLOCK: number; + KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: number; }; /** @@ -165,6 +166,7 @@ export const beaconPresetTypes: BeaconPresetTypes = { FIELD_ELEMENTS_PER_BLOB: "number", MAX_BLOB_COMMITMENTS_PER_BLOCK: "number", MAX_BLOBS_PER_BLOCK: "number", + KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: "number", }; type BeaconPresetTypes = { diff --git a/packages/params/test/e2e/ensure-config-is-synced.test.ts b/packages/params/test/e2e/ensure-config-is-synced.test.ts index 6be3e6e15db1..06fb4bae000c 100644 --- a/packages/params/test/e2e/ensure-config-is-synced.test.ts +++ b/packages/params/test/e2e/ensure-config-is-synced.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect, vi} from "vitest"; import axios from "axios"; import {mainnetPreset} from "../../src/presets/mainnet.js"; import {minimalPreset} from "../../src/presets/minimal.js"; @@ -8,10 +8,10 @@ import {loadConfigYaml} from "../yaml.js"; // Not e2e, but slow. Run with e2e tests /** https://github.com/ethereum/consensus-specs/releases */ -const specConfigCommit = "v1.4.0-beta.2"; +const specConfigCommit = "v1.4.0-beta.5"; describe("Ensure config is synced", function () { - this.timeout(60 * 1000); + vi.setConfig({testTimeout: 60 * 1000}); it("mainnet", async function () { const remotePreset = await downloadRemoteConfig("mainnet", specConfigCommit); @@ -27,10 +27,10 @@ describe("Ensure config is synced", function () { function assertCorrectPreset(localPreset: BeaconPreset, remotePreset: BeaconPreset): void { // Check each key for better debuggability for (const key of Object.keys(remotePreset) as (keyof BeaconPreset)[]) { - expect(localPreset[key]).to.equal(remotePreset[key], `Wrong ${key} value`); + expect(localPreset[key]).toBe(remotePreset[key]); } - expect(localPreset).to.deep.equal(remotePreset); + expect(localPreset).toEqual(remotePreset); } async function downloadRemoteConfig(preset: "mainnet" | "minimal", commit: string): Promise { diff --git a/packages/params/test/e2e/overridePreset.test.ts b/packages/params/test/e2e/overridePreset.test.ts index c03e54a480da..16f29f3c5c84 100644 --- a/packages/params/test/e2e/overridePreset.test.ts +++ b/packages/params/test/e2e/overridePreset.test.ts @@ -2,16 +2,13 @@ import path from "node:path"; import util from "node:util"; import child from "node:child_process"; import {fileURLToPath} from "node:url"; -import {expect, use} from "chai"; -import chaiAsPromised from "chai-as-promised"; +import {describe, it, expect, vi} from "vitest"; const scriptNames = { ok: "overridePresetOk.ts", error: "overridePresetError.ts", }; -use(chaiAsPromised); - const exec = util.promisify(child.exec); // Global variable __dirname no longer available in ES6 modules. @@ -21,7 +18,7 @@ const __dirname = path.dirname(fileURLToPath(import.meta.url)); describe("Override preset", function () { // Allow time for ts-node to compile Typescript source - this.timeout(30_000); + vi.setConfig({testTimeout: 30_000}); it("Should correctly override preset", async () => { // These commands can not run with minimal preset @@ -31,7 +28,7 @@ describe("Override preset", function () { }); it("Should throw trying to override preset in the wrong order", async () => { - await expect(exec(`node --loader ts-node/esm ${path.join(__dirname, scriptNames.error)}`)).to.be.rejectedWith( + await expect(exec(`node --loader ts-node/esm ${path.join(__dirname, scriptNames.error)}`)).rejects.toThrow( "Lodestar preset is already frozen" ); }); diff --git a/packages/params/test/e2e/setPreset.test.ts b/packages/params/test/e2e/setPreset.test.ts index 38942d2ee514..aa1371fa2eea 100644 --- a/packages/params/test/e2e/setPreset.test.ts +++ b/packages/params/test/e2e/setPreset.test.ts @@ -2,16 +2,13 @@ import path from "node:path"; import util from "node:util"; import child from "node:child_process"; import {fileURLToPath} from "node:url"; -import {expect, use} from "chai"; -import chaiAsPromised from "chai-as-promised"; +import {describe, it, expect, vi} from "vitest"; const scriptNames = { ok: "setPresetOk.ts", error: "setPresetError.ts", }; -use(chaiAsPromised); - const exec = util.promisify(child.exec); // Global variable __dirname no longer available in ES6 modules. @@ -21,7 +18,7 @@ const __dirname = path.dirname(fileURLToPath(import.meta.url)); describe("setPreset", function () { // Allow time for ts-node to compile Typescript source - this.timeout(30_000); + vi.setConfig({testTimeout: 30_000}); it("Should correctly set preset", async () => { // These commands can not run with minimal preset @@ -31,7 +28,7 @@ describe("setPreset", function () { }); it("Should throw trying to set preset in the wrong order", async () => { - await expect(exec(`node --loader ts-node/esm ${path.join(__dirname, scriptNames.error)}`)).to.be.rejectedWith( + await expect(exec(`node --loader ts-node/esm ${path.join(__dirname, scriptNames.error)}`)).rejects.toThrow( "Lodestar preset is already frozen" ); }); diff --git a/packages/params/test/globalSetup.ts b/packages/params/test/globalSetup.ts new file mode 100644 index 000000000000..0ab57c057472 --- /dev/null +++ b/packages/params/test/globalSetup.ts @@ -0,0 +1,2 @@ +export async function setup(): Promise {} +export async function teardown(): Promise {} diff --git a/packages/params/test/unit/activePreset.test.ts b/packages/params/test/unit/activePreset.test.ts index 1d3c7b7a888d..eceda7eaac92 100644 --- a/packages/params/test/unit/activePreset.test.ts +++ b/packages/params/test/unit/activePreset.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {mainnetPreset} from "../../src/presets/mainnet.js"; import {minimalPreset} from "../../src/presets/minimal.js"; import {gnosisPreset as gnosisParams} from "../../src/presets/gnosis.js"; @@ -16,18 +16,15 @@ describe("active preset", async () => { it("Active preset should be set to the correct value", () => { if (process.env.LODESTAR_PRESET) { - expect(ACTIVE_PRESET).to.equal( - process.env.LODESTAR_PRESET, - "process.env.LODESTAR_PRESET must equal ACTIVE_PRESET" - ); + expect(ACTIVE_PRESET).toBe(process.env.LODESTAR_PRESET); } else { - expect(ACTIVE_PRESET).to.equal(PresetName.mainnet, "Default preset must be mainnet"); + expect(ACTIVE_PRESET).toBe(PresetName.mainnet); } }); it("Constants should be set to the correct value", () => { for (const [k, v] of Object.entries(params[ACTIVE_PRESET])) { - expect(exports[k]).to.deep.equal(v); + expect(exports[k]).toEqual(v); } }); @@ -37,6 +34,6 @@ describe("active preset", async () => { // To ensure this throws, call setActivePreset on both the src and lib file. setActivePreset(PresetName.minimal); setActivePresetLib(PresetName.minimal); - }).to.throw(); + }).toThrow(); }); }); diff --git a/packages/params/test/unit/applicationDomains.test.ts b/packages/params/test/unit/applicationDomains.test.ts index b3c0bb35ad9f..294ceb83ce44 100644 --- a/packages/params/test/unit/applicationDomains.test.ts +++ b/packages/params/test/unit/applicationDomains.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {DOMAIN_APPLICATION_MASK, DOMAIN_APPLICATION_BUILDER} from "../../src/index.js"; describe("validate application domains", () => { @@ -8,7 +8,8 @@ describe("validate application domains", () => { for (let i = 0; i < DOMAIN_APPLICATION_MASK.length; i++) { r += DOMAIN_APPLICATION_MASK[i] & domain[i]; } - expect(r).to.be.above(0, `${name} mask application should be valid`); + // eslint-disable-next-line chai-expect/no-inner-compare + expect(r > 0).toBeWithMessage(true, `${name} mask application should be valid`); }); }); }); diff --git a/packages/params/tsconfig.e2e.json b/packages/params/tsconfig.e2e.json deleted file mode 100644 index cedf626f4124..000000000000 --- a/packages/params/tsconfig.e2e.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "extends": "../../tsconfig.e2e.json", - "include": [ - "src", - "test" - ], -} \ No newline at end of file diff --git a/packages/params/vitest.browser.config.ts b/packages/params/vitest.browser.config.ts new file mode 100644 index 000000000000..3c4b48885a33 --- /dev/null +++ b/packages/params/vitest.browser.config.ts @@ -0,0 +1,14 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.browser.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + optimizeDeps: { + exclude: ["@chainsafe/blst"], + }, + }) +); diff --git a/packages/params/vitest.config.ts b/packages/params/vitest.config.ts new file mode 100644 index 000000000000..1df0de848936 --- /dev/null +++ b/packages/params/vitest.config.ts @@ -0,0 +1,11 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + }) +); diff --git a/packages/params/webpack.test.config.cjs b/packages/params/webpack.test.config.cjs deleted file mode 100644 index 711c6ac891a7..000000000000 --- a/packages/params/webpack.test.config.cjs +++ /dev/null @@ -1,5 +0,0 @@ -const webpackConfig = require("../../webpack.test.config.js"); - -module.exports = { - ...webpackConfig, -}; diff --git a/packages/prover/.mocharc.yaml b/packages/prover/.mocharc.yaml deleted file mode 100644 index f9375365e517..000000000000 --- a/packages/prover/.mocharc.yaml +++ /dev/null @@ -1,8 +0,0 @@ -colors: true -timeout: 2000 -exit: true -extension: ["ts"] -require: - - ./test/setup.ts -node-option: - - "loader=ts-node/esm" diff --git a/packages/prover/.nycrc.json b/packages/prover/.nycrc.json deleted file mode 100644 index 69aa626339a0..000000000000 --- a/packages/prover/.nycrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../../.nycrc.json" -} diff --git a/packages/prover/README.md b/packages/prover/README.md index e9cf389a3cc2..290766219e79 100644 --- a/packages/prover/README.md +++ b/packages/prover/README.md @@ -123,12 +123,12 @@ You will need to go over the [specification](https://github.com/ethereum/beacon- ## Getting started -- Follow the [installation guide](https://chainsafe.github.io/lodestar/) to install Lodestar. -- Quickly try out the whole stack by [starting a local testnet](https://chainsafe.github.io/lodestar/usage/local). +- Follow the [installation guide](https://chainsafe.github.io/lodestar/getting-started/installation) to install Lodestar. +- Quickly try out the whole stack by [starting a local testnet](https://chainsafe.github.io/lodestar/advanced-topics/setting-up-a-testnet). ## Contributors -Read our [contributors document](/CONTRIBUTING.md), [submit an issue](https://github.com/ChainSafe/lodestar/issues/new/choose) or talk to us on our [discord](https://discord.gg/yjyvFRP)! +Read our [contributors document](https://chainsafe.github.io/lodestar/contribution/getting-started), [submit an issue](https://github.com/ChainSafe/lodestar/issues/new/choose) or talk to us on our [discord](https://discord.gg/yjyvFRP)! ## License diff --git a/packages/prover/karma.config.cjs b/packages/prover/karma.config.cjs deleted file mode 100644 index a3ebb967e2ce..000000000000 --- a/packages/prover/karma.config.cjs +++ /dev/null @@ -1,9 +0,0 @@ -const karmaConfig = require("../../karma.base.config.js"); -const webpackConfig = require("./webpack.test.config.cjs"); - -module.exports = function karmaConfigurator(config) { - config.set({ - ...karmaConfig, - webpack: webpackConfig, - }); -}; diff --git a/packages/prover/package.json b/packages/prover/package.json index f4476ab32d14..b5b72c591771 100644 --- a/packages/prover/package.json +++ b/packages/prover/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.12.0", + "version": "1.13.0", "type": "module", "exports": { ".": { @@ -53,9 +53,12 @@ "lint:fix": "yarn run lint --fix", "pretest": "yarn run check-types", "test": "yarn test:unit && yarn test:e2e", - "test:unit": "nyc --cache-dir .nyc_output/.cache -e .ts mocha 'test/unit/**/*.test.ts'", - "test:browsers": "yarn karma start karma.config.cjs", - "test:e2e": "LODESTAR_PRESET=minimal mocha 'test/e2e/**/*.test.ts'", + "test:unit": "vitest --run --dir test/unit/ --coverage", + "test:browsers": "yarn test:browsers:chrome && yarn test:browsers:firefox && yarn test:browsers:electron", + "test:browsers:chrome": "vitest --run --browser chrome --config ./vitest.browser.config.ts --dir test/unit", + "test:browsers:firefox": "vitest --run --browser firefox --config ./vitest.browser.config.ts --dir test/unit", + "test:browsers:electron": "echo 'Electron tests will be introduced back in the future as soon vitest supports electron.'", + "test:e2e": "LODESTAR_PRESET=minimal vitest --run --poolOptions.threads.singleThread true --dir test/e2e", "check-readme": "typescript-docs-verifier", "generate-fixtures": "node --loader ts-node/esm scripts/generate_fixtures.ts" }, @@ -69,13 +72,13 @@ "@ethereumjs/tx": "^4.1.2", "@ethereumjs/util": "^8.0.6", "@ethereumjs/vm": "^6.4.2", - "@lodestar/api": "^1.12.0", - "@lodestar/config": "^1.12.0", - "@lodestar/light-client": "^1.12.0", - "@lodestar/logger": "^1.12.0", - "@lodestar/params": "^1.12.0", - "@lodestar/types": "^1.12.0", - "@lodestar/utils": "^1.12.0", + "@lodestar/api": "^1.13.0", + "@lodestar/config": "^1.13.0", + "@lodestar/light-client": "^1.13.0", + "@lodestar/logger": "^1.13.0", + "@lodestar/params": "^1.13.0", + "@lodestar/types": "^1.13.0", + "@lodestar/utils": "^1.13.0", "ethereum-cryptography": "^1.2.0", "find-up": "^6.3.0", "http-proxy": "^1.18.1", @@ -84,7 +87,7 @@ "yargs": "^17.7.1" }, "devDependencies": { - "@lodestar/test-utils": "^1.12.0", + "@lodestar/test-utils": "^1.13.0", "@types/http-proxy": "^1.17.10", "@types/yargs": "^17.0.24", "axios": "^1.3.4", diff --git a/packages/prover/src/utils/command.ts b/packages/prover/src/utils/command.ts index f22aca319af0..81a3993f3c43 100644 --- a/packages/prover/src/utils/command.ts +++ b/packages/prover/src/utils/command.ts @@ -1,6 +1,16 @@ import {Options, Argv} from "yargs"; -export type CliCommandOptions = Required<{[key in keyof OwnArgs]: Options}>; +export interface CliExample { + command: string; + title?: string; + description?: string; +} + +export interface CliOptionDefinition extends Options { + example?: CliExample; +} + +export type CliCommandOptions = Required<{[key in keyof OwnArgs]: CliOptionDefinition}>; // eslint-disable-next-line @typescript-eslint/no-explicit-any export interface CliCommand, ParentArgs = Record, R = any> { diff --git a/packages/prover/test/e2e/cli/cmds/start.test.ts b/packages/prover/test/e2e/cli/cmds/start.test.ts index 576cc0fd5e2a..da0c9dceb405 100644 --- a/packages/prover/test/e2e/cli/cmds/start.test.ts +++ b/packages/prover/test/e2e/cli/cmds/start.test.ts @@ -1,7 +1,7 @@ import childProcess from "node:child_process"; import {writeFile, mkdir} from "node:fs/promises"; import path from "node:path"; -import {expect} from "chai"; +import {describe, it, expect, beforeAll, afterAll} from "vitest"; import Web3 from "web3"; import {runCliCommand, spawnCliCommand, stopChildProcess} from "@lodestar/test-utils"; import {sleep} from "@lodestar/utils"; @@ -15,11 +15,11 @@ describe("prover/start", () => { it("should show help", async () => { const output = await runCliCommand(cli, ["start", "--help"]); - expect(output).contains("Show help"); + expect(output).toEqual(expect.stringContaining("Show help")); }); it("should fail when --executionRpcUrl is missing", async () => { - await expect(runCliCommand(cli, ["start", "--port", "8088"])).eventually.rejectedWith( + await expect(runCliCommand(cli, ["start", "--port", "8088"])).rejects.toThrow( "Missing required argument: executionRpcUrl" ); }); @@ -33,13 +33,13 @@ describe("prover/start", () => { "--beaconBootnodes", "http://localhost:0000", ]) - ).eventually.rejectedWith("Arguments beaconBootnodes and beaconUrls are mutually exclusive"); + ).rejects.toThrow("Arguments beaconBootnodes and beaconUrls are mutually exclusive"); }); it("should fail when both of --beaconUrls and --beaconBootnodes are not provided", async () => { await expect( runCliCommand(cli, ["start", "--port", "8088", "--executionRpcUrl", "http://localhost:3000"]) - ).eventually.rejectedWith("Either --beaconUrls or --beaconBootnodes must be provided"); + ).rejects.toThrow("Either --beaconUrls or --beaconBootnodes must be provided"); }); describe("when started", () => { @@ -47,8 +47,7 @@ describe("prover/start", () => { const paramsFilePath = path.join("/tmp", "e2e-test-env", "params.json"); const web3: Web3 = new Web3(proxyUrl); - before(async function () { - this.timeout(50000); + beforeAll(async function () { await waitForCapellaFork(); await mkdir(path.dirname(paramsFilePath), {recursive: true}); await writeFile(paramsFilePath, JSON.stringify(chainConfigToJson(config as ChainConfig))); @@ -72,22 +71,22 @@ describe("prover/start", () => { ); // Give sometime to the prover to start proxy server await sleep(3000); - }); + }, 50000); - after(async () => { + afterAll(async () => { await stopChildProcess(proc); }); it("should respond to verified calls", async () => { const accounts = await web3.eth.getAccounts(); - expect(accounts.length).to.be.gt(0); - await expect(web3.eth.getBalance(accounts[0])).eventually.not.null; + expect(accounts.length).toBeGreaterThan(0); + await expect(web3.eth.getBalance(accounts[0])).resolves.not.toBeNull(); }); it("should respond to unverified calls", async () => { // Because web3 latest version return numbers as bigint by default - await expect(web3.eth.getChainId()).eventually.eql(BigInt(chainId)); + await expect(web3.eth.getChainId()).resolves.toEqual(BigInt(chainId)); }); }); }); diff --git a/packages/prover/test/e2e/web3_batch_request.test.ts b/packages/prover/test/e2e/web3_batch_request.test.ts index cadd7af52203..afb995d088a0 100644 --- a/packages/prover/test/e2e/web3_batch_request.test.ts +++ b/packages/prover/test/e2e/web3_batch_request.test.ts @@ -1,18 +1,16 @@ /* eslint-disable @typescript-eslint/naming-convention */ -import {expect} from "chai"; +import {describe, it, expect, beforeAll} from "vitest"; import Web3 from "web3"; import {LCTransport} from "../../src/interfaces.js"; import {createVerifiedExecutionProvider} from "../../src/web3_provider.js"; import {rpcUrl, beaconUrl, config} from "../utils/e2e_env.js"; import {getVerificationFailedMessage} from "../../src/utils/json_rpc.js"; +/* prettier-ignore */ describe("web3_batch_requests", function () { - // Give some margin to sync light client - this.timeout("10s"); - let web3: Web3; - before(() => { + beforeAll(() => { const {provider} = createVerifiedExecutionProvider(new Web3.providers.HttpProvider(rpcUrl), { transport: LCTransport.Rest, urls: [beaconUrl], @@ -45,8 +43,8 @@ describe("web3_batch_requests", function () { await batch.execute(); - expect(results.length).to.be.gt(1); - await expect(Promise.all(results)).to.be.fulfilled; + expect(results.length).toBeGreaterThan(1); + await expect(Promise.all(results)).resolves.toBeDefined(); }); it("should be able to process batch request containing error", async () => { @@ -66,8 +64,8 @@ describe("web3_batch_requests", function () { await batch.execute(); - await expect(successRequest).to.be.fulfilled; - await expect(errorRequest).to.be.rejectedWith(getVerificationFailedMessage("eth_getBlockByHash")); + await expect(successRequest).resolves.toBeDefined(); + await expect(errorRequest).rejects.toThrow(getVerificationFailedMessage("eth_getBlockByHash")); }); }); -}); +}, {timeout: 10_000}); diff --git a/packages/prover/test/e2e/web3_provider.test.ts b/packages/prover/test/e2e/web3_provider.test.ts index b2b4b94277d8..4de51dc94051 100644 --- a/packages/prover/test/e2e/web3_provider.test.ts +++ b/packages/prover/test/e2e/web3_provider.test.ts @@ -1,15 +1,14 @@ /* eslint-disable @typescript-eslint/naming-convention */ -import {expect} from "chai"; +import {describe, it, expect, beforeAll} from "vitest"; import Web3 from "web3"; import {ethers} from "ethers"; import {LCTransport} from "../../src/interfaces.js"; import {createVerifiedExecutionProvider} from "../../src/web3_provider.js"; import {waitForCapellaFork, testTimeout, rpcUrl, beaconUrl, config} from "../utils/e2e_env.js"; +/* prettier-ignore */ describe("web3_provider", function () { - this.timeout(testTimeout); - - before("wait for the capella fork", async () => { + beforeAll(async () => { await waitForCapellaFork(); }); @@ -26,8 +25,8 @@ describe("web3_provider", function () { const accounts = await web3.eth.getAccounts(); // `getProof` will always remain the non-verified method // as we use it to create proof and verify - expect(accounts).not.to.be.empty; - await expect(web3.eth.getProof(accounts[0], [], "latest")).fulfilled; + expect(Object.keys(accounts)).not.toHaveLength(0); + await expect(web3.eth.getProof(accounts[0], [], "latest")).resolves.toBeDefined(); }); }); @@ -40,9 +39,9 @@ describe("web3_provider", function () { }); const accounts = await provider.listAccounts(); - expect(accounts).not.to.be.empty; - await expect(provider.send("eth_getProof", [accounts[0].address, [], "latest"])).fulfilled; + expect(Object.keys(accounts)).not.toHaveLength(0); + await expect(provider.send("eth_getProof", [accounts[0].address, [], "latest"])).resolves.toBeDefined(); }); }); }); -}); +}, {timeout: testTimeout}); diff --git a/packages/prover/test/globalSetup.ts b/packages/prover/test/globalSetup.ts new file mode 100644 index 000000000000..0ab57c057472 --- /dev/null +++ b/packages/prover/test/globalSetup.ts @@ -0,0 +1,2 @@ +export async function setup(): Promise {} +export async function teardown(): Promise {} diff --git a/packages/prover/test/mocks/request_handler.ts b/packages/prover/test/mocks/request_handler.ts index 0a61214c52e2..bbe4f24057de 100644 --- a/packages/prover/test/mocks/request_handler.ts +++ b/packages/prover/test/mocks/request_handler.ts @@ -1,4 +1,5 @@ -import sinon from "sinon"; +import {vi, expect} from "vitest"; +import {when} from "vitest-when"; import deepmerge from "deepmerge"; import {NetworkName} from "@lodestar/config/networks"; import {ForkConfig} from "@lodestar/config"; @@ -51,28 +52,29 @@ export interface TestFixture { dependentRequests: {payload: JsonRpcRequestOrBatch; response: Writeable}[]; } -function matchTransaction(value: ELTransaction, expected: ELTransaction): boolean { - if ( - value.to?.toLowerCase() !== expected.to?.toLowerCase() || - value.from.toLocaleLowerCase() !== expected.from.toLowerCase() - ) { +function matchTransaction(received: ELTransaction, expected: ELTransaction): boolean { + if (received.to?.toLowerCase() !== expected.to?.toLowerCase()) { return false; } - if ("value" in value && value.value.toLowerCase() !== expected.value.toLowerCase()) { + if ("from" in expected && "from" in received && received.from.toLowerCase() !== expected.from.toLowerCase()) { return false; } - if ("data" in value && value.data?.toLowerCase() !== expected.data?.toLowerCase()) { + if ("value" in received && received.value.toLowerCase() !== expected.value.toLowerCase()) { + return false; + } + + if ("data" in received && received.data?.toLowerCase() !== expected.data?.toLowerCase()) { return false; } return true; } -function matchParams(params: unknown[], expected: unknown[]): boolean { - for (let i = 0; i < params.length; i++) { - const item = params[i]; +function matchParams(received: unknown[], expected: unknown[]): boolean { + for (let i = 0; i < received.length; i++) { + const item = received[i]; const expectedItem = expected[i]; if (typeof item === "string" && typeof expectedItem === "string") { @@ -92,20 +94,12 @@ function matchParams(params: unknown[], expected: unknown[]): boolean { return true; } -function getPayloadParamsMatcher(expected: unknown[]): sinon.SinonMatcher { - return sinon.match(function (params: unknown[]): boolean { - return matchParams(params, expected); - }, "payload match params"); -} - -function getBatchPayloadMatcher(expected: JsonRpcBatchRequest): sinon.SinonMatcher { - return sinon.match(function (value: JsonRpcBatchRequest): boolean { - for (const [index, item] of value.entries()) { - if (item.method !== expected[index].method) return false; - if (!matchParams(item.params, expected[index].params)) return false; - } - return true; - }, "batch payload match"); +function matchBatchPayload(received: JsonRpcBatchRequest, expected: JsonRpcBatchRequest): boolean { + for (const [index, item] of received.entries()) { + if (item.method !== expected[index].method) return false; + if (!matchParams(item.params, expected[index].params)) return false; + } + return true; } export function generateReqHandlerOptionsMock( @@ -119,7 +113,7 @@ export function generateReqHandlerOptionsMock( const options = { logger: getEmptyLogger(), proofProvider: { - getExecutionPayload: sinon.stub().resolves(executionPayload), + getExecutionPayload: vi.fn().mockResolvedValue(executionPayload), config: { ...config, // eslint-disable-next-line @typescript-eslint/naming-convention @@ -129,35 +123,46 @@ export function generateReqHandlerOptionsMock( } as unknown as ProofProvider, network: data.network as NetworkName, rpc: { - request: sinon.stub(), - batchRequest: sinon.stub(), + request: vi.fn(), + batchRequest: vi.fn(), getRequestId: () => (Math.random() * 10000).toFixed(0), }, }; - options.rpc.request - .withArgs(data.request.method, getPayloadParamsMatcher(data.request.params), sinon.match.any) - .resolves(data.response); + when(options.rpc.request) + .calledWith( + data.request.method, + expect.toSatisfy((received) => matchParams(received as unknown[], data.request.params)), + expect.anything() + ) + .thenResolve(data.response); for (const {payload, response} of data.dependentRequests) { if (isBatchRequest(payload)) { - options.rpc.batchRequest - .withArgs(getBatchPayloadMatcher(payload), sinon.match.any) - .resolves(mergeBatchReqResp(payload, response as JsonRpcBatchResponse)); + when(options.rpc.batchRequest) + .calledWith( + expect.toSatisfy((received) => matchBatchPayload(received as JsonRpcBatchRequest, payload)), + expect.anything() + ) + .thenResolve(mergeBatchReqResp(payload, response as JsonRpcBatchResponse)); } else { - options.rpc.request - .withArgs(payload.method, getPayloadParamsMatcher(payload.params), sinon.match.any) - .resolves(response); + when(options.rpc.request) + .calledWith( + payload.method, + expect.toSatisfy((received) => matchParams(received as unknown[], data.request.params)), + expect.anything() + ) + .thenResolve(response); } } - options.rpc.request - .withArgs("eth_getBlockByNumber", [data.execution.block.number, true], sinon.match.any) - .resolves({id: 1233, jsonrpc: "2.0", result: data.execution.block}); + when(options.rpc.request) + .calledWith("eth_getBlockByNumber", [data.execution.block.number, true], expect.anything()) + .thenResolve({id: 1233, jsonrpc: "2.0", result: data.execution.block}); - options.rpc.request - .withArgs("eth_getBlockByHash", [data.execution.block.hash, true], sinon.match.any) - .resolves({id: 1233, jsonrpc: "2.0", result: data.execution.block}); + when(options.rpc.request) + .calledWith("eth_getBlockByHash", [data.execution.block.hash, true], expect.anything()) + .thenResolve({id: 1233, jsonrpc: "2.0", result: data.execution.block}); return options as unknown as Omit, "payload">; } diff --git a/packages/prover/test/setup.ts b/packages/prover/test/setup.ts deleted file mode 100644 index b83e6cb78511..000000000000 --- a/packages/prover/test/setup.ts +++ /dev/null @@ -1,6 +0,0 @@ -import chai from "chai"; -import chaiAsPromised from "chai-as-promised"; -import sinonChai from "sinon-chai"; - -chai.use(chaiAsPromised); -chai.use(sinonChai); diff --git a/packages/prover/test/tsconfig.json b/packages/prover/test/tsconfig.json new file mode 100644 index 000000000000..7e6bad81b22f --- /dev/null +++ b/packages/prover/test/tsconfig.json @@ -0,0 +1,6 @@ +{ + "extends": "../tsconfig", + "compilerOptions": { + "noEmit": false + } +} \ No newline at end of file diff --git a/packages/prover/test/unit/proof_provider/orderd_map.test.ts b/packages/prover/test/unit/proof_provider/orderd_map.test.ts index 098f4f9127d5..309c4de71568 100644 --- a/packages/prover/test/unit/proof_provider/orderd_map.test.ts +++ b/packages/prover/test/unit/proof_provider/orderd_map.test.ts @@ -1,25 +1,25 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {OrderedMap} from "../../../src/proof_provider/ordered_map.js"; describe("proof_provider/ordered_map", () => { it("should initialize the min with undefined", () => { const omap = new OrderedMap(); - expect(omap.min).to.undefined; + expect(omap.min).toBeUndefined(); }); it("should initialize the max with undefined", () => { const omap = new OrderedMap(); - expect(omap.max).to.undefined; + expect(omap.max).toBeUndefined(); }); it("should set the min and max to the first value ", () => { const omap = new OrderedMap(); omap.set(11, "value"); - expect(omap.min).eql(11); - expect(omap.max).eql(11); + expect(omap.min).toEqual(11); + expect(omap.max).toEqual(11); }); it("should set the max value", () => { @@ -27,7 +27,7 @@ describe("proof_provider/ordered_map", () => { omap.set(10, "value"); omap.set(11, "value"); - expect(omap.max).eql(11); + expect(omap.max).toEqual(11); }); it("should set the min value", () => { @@ -35,6 +35,6 @@ describe("proof_provider/ordered_map", () => { omap.set(10, "value"); omap.set(11, "value"); - expect(omap.min).eql(10); + expect(omap.min).toEqual(10); }); }); diff --git a/packages/prover/test/unit/proof_provider/payload_store.test.ts b/packages/prover/test/unit/proof_provider/payload_store.test.ts index dc99ab1baa22..02c9cc39f87f 100644 --- a/packages/prover/test/unit/proof_provider/payload_store.test.ts +++ b/packages/prover/test/unit/proof_provider/payload_store.test.ts @@ -1,17 +1,14 @@ -import {expect} from "chai"; -import chai from "chai"; -import sinon from "sinon"; -import sinonChai from "sinon-chai"; -import {Api} from "@lodestar/api"; +import {describe, it, expect, beforeEach, vi, MockedObject} from "vitest"; +import {when} from "vitest-when"; +import {Api, HttpStatusCode, routes} from "@lodestar/api"; import {hash} from "@lodestar/utils"; import {Logger} from "@lodestar/logger"; import {allForks, capella} from "@lodestar/types"; import {toHexString} from "@lodestar/utils"; +import {ForkName} from "@lodestar/params"; import {PayloadStore} from "../../../src/proof_provider/payload_store.js"; import {MAX_PAYLOAD_HISTORY} from "../../../src/constants.js"; -chai.use(sinonChai); - const createHash = (input: string): Uint8Array => hash(Buffer.from(input, "utf8")); const buildPayload = ({blockNumber}: {blockNumber: number}): allForks.ExecutionPayload => @@ -47,22 +44,23 @@ const buildBlockResponse = ({ }: { slot: number; blockNumber: number; -}): {ok: boolean; response: {version: number; executionOptimistic: boolean; data: allForks.SignedBeaconBlock}} => ({ +}): routes.beacon.block.BlockV2Response<"json"> => ({ ok: true, + status: HttpStatusCode.OK, response: { - version: 12, + version: ForkName.altair, executionOptimistic: true, data: buildBlock({slot, blockNumber}), }, }); describe("proof_provider/payload_store", function () { - let api: Api; + let api: Api & {beacon: MockedObject}; let logger: Logger; let store: PayloadStore; beforeEach(() => { - api = {beacon: {getBlockV2: sinon.stub()}} as unknown as Api; + api = {beacon: {getBlockV2: vi.fn()}} as unknown as Api & {beacon: MockedObject}; logger = console as unknown as Logger; store = new PayloadStore({api, logger}); }); @@ -82,7 +80,7 @@ describe("proof_provider/payload_store", function () { const payload = buildPayload({blockNumber: 10}); store.set(payload, true); - expect(store.finalized).to.eql(payload); + expect(store.finalized).toEqual(payload); }); it("should return highest finalized payload", () => { @@ -91,7 +89,7 @@ describe("proof_provider/payload_store", function () { store.set(payload1, true); store.set(payload2, true); - expect(store.finalized).to.eql(payload2); + expect(store.finalized).toEqual(payload2); }); }); @@ -106,7 +104,7 @@ describe("proof_provider/payload_store", function () { store.set(payload1, true); store.set(payload2, true); - expect(store.latest).to.eql(payload2); + expect(store.latest).toEqual(payload2); }); it("should return latest payload if not finalized", () => { @@ -115,20 +113,20 @@ describe("proof_provider/payload_store", function () { store.set(payload1, false); store.set(payload2, false); - expect(store.latest).to.eql(payload2); + expect(store.latest).toEqual(payload2); }); }); describe("get", () => { it("should return undefined for an empty store", async () => { - await expect(store.get(10)).to.eventually.undefined; + await expect(store.get(10)).resolves.toBeUndefined(); }); it("should return undefined for non existing block id", async () => { const payload1 = buildPayload({blockNumber: 10}); store.set(payload1, false); - await expect(store.get(11)).to.eventually.undefined; + await expect(store.get(11)).resolves.toBeUndefined(); }); it("should return undefined for non existing block hash", async () => { @@ -136,7 +134,7 @@ describe("proof_provider/payload_store", function () { store.set(payload1, false); const nonExistingBlockHash = createHash("non-existing-block-hash"); - await expect(store.get(toHexString(nonExistingBlockHash))).to.eventually.undefined; + await expect(store.get(toHexString(nonExistingBlockHash))).resolves.toBeUndefined(); }); describe("block hash as blockId", () => { @@ -144,7 +142,7 @@ describe("proof_provider/payload_store", function () { const payload1 = buildPayload({blockNumber: 10}); store.set(payload1, false); - await expect(store.get(toHexString(payload1.blockHash))).to.eventually.eql(payload1); + await expect(store.get(toHexString(payload1.blockHash))).resolves.toEqual(payload1); }); }); @@ -153,7 +151,7 @@ describe("proof_provider/payload_store", function () { const finalizedPayload = buildPayload({blockNumber: 10}); store.set(finalizedPayload, true); - await expect(store.get(11)).to.rejectedWith( + await expect(store.get(11)).rejects.toThrow( "Block number 11 is higher than the latest finalized block number. We recommend to use block hash for unfinalized blocks." ); }); @@ -162,28 +160,28 @@ describe("proof_provider/payload_store", function () { const payload1 = buildPayload({blockNumber: 10}); store.set(payload1, false); - await expect(store.get(10)).to.eventually.undefined; + await expect(store.get(10)).resolves.toBeUndefined(); }); it("should return payload for a block number in hex", async () => { const payload1 = buildPayload({blockNumber: 10}); store.set(payload1, true); - await expect(store.get(`0x${payload1.blockNumber.toString(16)}`)).to.eventually.eql(payload1); + await expect(store.get(`0x${payload1.blockNumber.toString(16)}`)).resolves.toEqual(payload1); }); it("should return payload for a block number as string", async () => { const payload1 = buildPayload({blockNumber: 10}); store.set(payload1, true); - await expect(store.get(payload1.blockNumber.toString())).to.eventually.eql(payload1); + await expect(store.get(payload1.blockNumber.toString())).resolves.toEqual(payload1); }); it("should return payload for a block number as integer", async () => { const payload1 = buildPayload({blockNumber: 10}); store.set(payload1, true); - await expect(store.get(10)).to.eventually.eql(payload1); + await expect(store.get(10)).resolves.toEqual(payload1); }); it("should fetch the finalized payload from API if payload root not exists", async () => { @@ -193,22 +191,22 @@ describe("proof_provider/payload_store", function () { const availablePayload = buildPayload({blockNumber}); const unavailablePayload = buildPayload({blockNumber: unavailableBlockNumber}); - (api.beacon.getBlockV2 as sinon.SinonStub) - .withArgs(blockNumber) - .resolves(buildBlockResponse({blockNumber, slot: blockNumber})); + when(api.beacon.getBlockV2) + .calledWith(blockNumber) + .thenResolve(buildBlockResponse({blockNumber, slot: blockNumber})); - (api.beacon.getBlockV2 as sinon.SinonStub) - .withArgs(unavailableBlockNumber) - .resolves(buildBlockResponse({blockNumber: unavailableBlockNumber, slot: unavailableBlockNumber})); + when(api.beacon.getBlockV2) + .calledWith(unavailableBlockNumber) + .thenResolve(buildBlockResponse({blockNumber: unavailableBlockNumber, slot: unavailableBlockNumber})); store.set(availablePayload, true); const result = await store.get(unavailablePayload.blockNumber); - expect(api.beacon.getBlockV2 as sinon.SinonStub).calledTwice; - expect(api.beacon.getBlockV2 as sinon.SinonStub).calledWith(blockNumber); - expect(api.beacon.getBlockV2 as sinon.SinonStub).calledWith(unavailableBlockNumber); - expect(result).to.eql(unavailablePayload); + expect(api.beacon.getBlockV2).toHaveBeenCalledTimes(2); + expect(api.beacon.getBlockV2).toHaveBeenCalledWith(blockNumber); + expect(api.beacon.getBlockV2).toHaveBeenCalledWith(unavailableBlockNumber); + expect(result).toEqual(unavailablePayload); }); }); }); @@ -219,16 +217,16 @@ describe("proof_provider/payload_store", function () { store.set(payload1, false); // Unfinalized blocks are not indexed by block hash - await expect(store.get(toHexString(payload1.blockHash))).to.eventually.eql(payload1); - expect(store.finalized).to.eql(undefined); + await expect(store.get(toHexString(payload1.blockHash))).resolves.toEqual(payload1); + expect(store.finalized).toEqual(undefined); }); it("should set the payload for finalized blocks", async () => { const payload1 = buildPayload({blockNumber: 10}); store.set(payload1, true); - await expect(store.get(payload1.blockNumber.toString())).to.eventually.eql(payload1); - expect(store.finalized).to.eql(payload1); + await expect(store.get(payload1.blockNumber.toString())).resolves.toEqual(payload1); + expect(store.finalized).toEqual(payload1); }); }); @@ -243,15 +241,15 @@ describe("proof_provider/payload_store", function () { const slot = 20; const header = buildLCHeader({slot, blockNumber}); const blockResponse = buildBlockResponse({blockNumber, slot}); - const executionPayload = (blockResponse.response.data as capella.SignedBeaconBlock).message.body + const executionPayload = (blockResponse.response?.data as capella.SignedBeaconBlock).message.body .executionPayload; - (api.beacon.getBlockV2 as sinon.SinonStub).resolves(blockResponse); + api.beacon.getBlockV2.mockResolvedValue(blockResponse); await store.processLCHeader(header, true); - expect(api.beacon.getBlockV2).calledOnce; - expect(api.beacon.getBlockV2).calledWith(20); - expect(store.finalized).to.eql(executionPayload); + expect(api.beacon.getBlockV2).toHaveBeenCalledOnce(); + expect(api.beacon.getBlockV2).toHaveBeenCalledWith(20); + expect(store.finalized).toEqual(executionPayload); }); it("should process lightclient header for finalized block which exists as un-finalized in store", async () => { @@ -259,9 +257,9 @@ describe("proof_provider/payload_store", function () { const slot = 20; const header = buildLCHeader({slot, blockNumber}); const blockResponse = buildBlockResponse({blockNumber, slot}); - const executionPayload = (blockResponse.response.data as capella.SignedBeaconBlock).message.body + const executionPayload = (blockResponse.response?.data as capella.SignedBeaconBlock).message.body .executionPayload; - (api.beacon.getBlockV2 as sinon.SinonStub).resolves(blockResponse); + api.beacon.getBlockV2.mockResolvedValue(blockResponse); expect(store.finalized).to.undefined; // First process as unfinalized @@ -271,8 +269,8 @@ describe("proof_provider/payload_store", function () { await store.processLCHeader(header, true); // Called only once when we process unfinalized - expect(api.beacon.getBlockV2).to.be.calledOnce; - expect(store.finalized).to.eql(executionPayload); + expect(api.beacon.getBlockV2).to.be.toHaveBeenCalledOnce(); + expect(store.finalized).toEqual(executionPayload); }); }); @@ -280,19 +278,19 @@ describe("proof_provider/payload_store", function () { const blockNumber = 10; const slot = 20; const header = buildLCHeader({slot, blockNumber}); - (api.beacon.getBlockV2 as sinon.SinonStub).resolves(buildBlockResponse({blockNumber, slot})); + api.beacon.getBlockV2.mockResolvedValue(buildBlockResponse({blockNumber, slot})); await store.processLCHeader(header); - expect(api.beacon.getBlockV2).calledOnce; - expect(api.beacon.getBlockV2).calledWith(20); + expect(api.beacon.getBlockV2).toHaveBeenCalledOnce(); + expect(api.beacon.getBlockV2).toHaveBeenCalledWith(20); }); it("should not fetch existing payload for lightclient header", async () => { const blockNumber = 10; const slot = 20; const header = buildLCHeader({slot, blockNumber}); - (api.beacon.getBlockV2 as sinon.SinonStub).resolves(buildBlockResponse({blockNumber, slot})); + api.beacon.getBlockV2.mockResolvedValue(buildBlockResponse({blockNumber, slot})); await store.processLCHeader(header); @@ -300,21 +298,21 @@ describe("proof_provider/payload_store", function () { await store.processLCHeader(header); // The network fetch should be done once - expect(api.beacon.getBlockV2).calledOnce; - expect(api.beacon.getBlockV2).calledWith(20); + expect(api.beacon.getBlockV2).toHaveBeenCalledOnce(); + expect(api.beacon.getBlockV2).toHaveBeenCalledWith(20); }); it("should prune the existing payloads", async () => { const blockNumber = 10; const slot = 20; const header = buildLCHeader({slot, blockNumber}); - (api.beacon.getBlockV2 as sinon.SinonStub).resolves(buildBlockResponse({blockNumber, slot})); + api.beacon.getBlockV2.mockResolvedValue(buildBlockResponse({blockNumber, slot})); - sinon.spy(store, "prune"); + vi.spyOn(store, "prune"); await store.processLCHeader(header); - expect(store.prune).to.be.calledOnce; + expect(store.prune).toHaveBeenCalledOnce(); }); }); @@ -330,11 +328,11 @@ describe("proof_provider/payload_store", function () { store.set(buildPayload({blockNumber: i}), true); } - expect(store["payloads"].size).to.equal(numberOfPayloads); + expect(store["payloads"].size).toEqual(numberOfPayloads); store.prune(); - expect(store["payloads"].size).to.equal(MAX_PAYLOAD_HISTORY); + expect(store["payloads"].size).toEqual(MAX_PAYLOAD_HISTORY); }); it("should not prune the existing payloads if equal to MAX_PAYLOAD_HISTORY", () => { @@ -344,11 +342,11 @@ describe("proof_provider/payload_store", function () { store.set(buildPayload({blockNumber: i}), true); } - expect(store["payloads"].size).to.equal(MAX_PAYLOAD_HISTORY); + expect(store["payloads"].size).toEqual(MAX_PAYLOAD_HISTORY); store.prune(); - expect(store["payloads"].size).to.equal(MAX_PAYLOAD_HISTORY); + expect(store["payloads"].size).toEqual(MAX_PAYLOAD_HISTORY); }); it("should not prune the existing payloads if less than MAX_PAYLOAD_HISTORY", () => { @@ -358,11 +356,11 @@ describe("proof_provider/payload_store", function () { store.set(buildPayload({blockNumber: i}), true); } - expect(store["payloads"].size).to.equal(numberOfPayloads); + expect(store["payloads"].size).toEqual(numberOfPayloads); store.prune(); - expect(store["payloads"].size).to.equal(numberOfPayloads); + expect(store["payloads"].size).toEqual(numberOfPayloads); }); it("should prune finalized roots", () => { @@ -372,33 +370,33 @@ describe("proof_provider/payload_store", function () { store.set(buildPayload({blockNumber: i}), true); } - expect(store["finalizedRoots"].size).to.equal(numberOfPayloads); + expect(store["finalizedRoots"].size).toEqual(numberOfPayloads); store.prune(); - expect(store["finalizedRoots"].size).to.equal(MAX_PAYLOAD_HISTORY); + expect(store["finalizedRoots"].size).toEqual(MAX_PAYLOAD_HISTORY); }); it("should prune unfinalized roots", async () => { const numberOfPayloads = MAX_PAYLOAD_HISTORY + 2; for (let i = 1; i <= numberOfPayloads; i++) { - (api.beacon.getBlockV2 as sinon.SinonStub) - .withArgs(i) - .resolves(buildBlockResponse({blockNumber: 500 + i, slot: i})); + when(api.beacon.getBlockV2) + .calledWith(i) + .thenResolve(buildBlockResponse({blockNumber: 500 + i, slot: i})); await store.processLCHeader(buildLCHeader({blockNumber: 500 + i, slot: i}), false); } // Because all payloads are unfinalized, they are not pruned - expect(store["unfinalizedRoots"].size).to.equal(numberOfPayloads); + expect(store["unfinalizedRoots"].size).toEqual(numberOfPayloads); // Let make some payloads finalized await store.processLCHeader(buildLCHeader({blockNumber: 500 + 1, slot: 1}), true); await store.processLCHeader(buildLCHeader({blockNumber: 500 + 2, slot: 2}), true); // store.processLCHeader will call the prune method internally and clean the unfinalized roots - expect(store["unfinalizedRoots"].size).to.equal(numberOfPayloads - 2); + expect(store["unfinalizedRoots"].size).toEqual(numberOfPayloads - 2); }); }); }); diff --git a/packages/prover/test/unit/utils/assertion.test.ts b/packages/prover/test/unit/utils/assertion.test.ts index 9bbe07a19b9f..5cf481bacdf1 100644 --- a/packages/prover/test/unit/utils/assertion.test.ts +++ b/packages/prover/test/unit/utils/assertion.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {ethers} from "ethers"; import Web3 from "web3"; import {isSendProvider, isWeb3jsProvider, isEthersProvider} from "../../../src/utils/assertion.js"; @@ -11,41 +11,41 @@ describe("utils/assertion", () => { // Do nothing; }, }; - expect(isSendProvider(provider)).to.be.true; + expect(isSendProvider(provider)).toBe(true); }); it("should return false for ethers provider", () => { const provider = new ethers.JsonRpcProvider("https://lodestar-sepoliarpc.chainsafe.io"); - expect(isSendProvider(provider)).to.be.false; + expect(isSendProvider(provider)).toBe(false); }); it("should return false for web3 provider", () => { const provider = new Web3.providers.HttpProvider("https://lodestar-sepoliarpc.chainsafe.io"); - expect(isSendProvider(provider)).to.be.false; + expect(isSendProvider(provider)).toBe(false); }); }); describe("isWeb3jsProvider", () => { it("should return true if provider is web3.js provider", () => { const provider = new Web3.providers.HttpProvider("https://lodestar-sepoliarpc.chainsafe.io"); - expect(isWeb3jsProvider(provider)).to.be.true; + expect(isWeb3jsProvider(provider)).toBe(true); }); it("should return false if provider is not web3.js provider", () => { const provider = new ethers.JsonRpcProvider("https://lodestar-sepoliarpc.chainsafe.io"); - expect(isWeb3jsProvider(provider)).to.be.false; + expect(isWeb3jsProvider(provider)).toBe(false); }); }); describe("isEthersProvider", () => { it("should return false if provider is not ethers provider", () => { const provider = new Web3.providers.HttpProvider("https://lodestar-sepoliarpc.chainsafe.io"); - expect(isEthersProvider(provider)).to.be.false; + expect(isEthersProvider(provider)).toBe(false); }); it("should return true if provider is ethers provider", () => { const provider = new ethers.JsonRpcProvider("https://lodestar-sepoliarpc.chainsafe.io"); - expect(isEthersProvider(provider)).to.be.true; + expect(isEthersProvider(provider)).toBe(true); }); }); }); diff --git a/packages/prover/test/unit/utils/conversion.test.ts b/packages/prover/test/unit/utils/conversion.test.ts index 50ed03a89450..ee9c16b9cb94 100644 --- a/packages/prover/test/unit/utils/conversion.test.ts +++ b/packages/prover/test/unit/utils/conversion.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {chunkIntoN} from "../../../src/utils/conversion.js"; describe("utils/conversion", () => { @@ -71,12 +71,12 @@ describe("utils/conversion", () => { for (const {title, input, output} of testCases) { it(`should chunkify data when ${title}`, async () => { - expect(chunkIntoN(input.data, input.n)).to.be.deep.eq(output); + expect(chunkIntoN(input.data, input.n)).toEqual(output); }); } it("should not change the order of elements", () => { - expect(chunkIntoN([6, 5, 4, 3, 2, 1], 2)).to.be.deep.eq([ + expect(chunkIntoN([6, 5, 4, 3, 2, 1], 2)).toEqual([ [6, 5], [4, 3], [2, 1], diff --git a/packages/prover/test/unit/utils/execution.test.ts b/packages/prover/test/unit/utils/execution.test.ts index dc2702e5f9d0..9219b5ec0c03 100644 --- a/packages/prover/test/unit/utils/execution.test.ts +++ b/packages/prover/test/unit/utils/execution.test.ts @@ -1,6 +1,4 @@ -import {expect} from "chai"; -import chai from "chai"; -import chaiAsPromised from "chai-as-promised"; +import {describe, it, expect} from "vitest"; import deepmerge from "deepmerge"; import {getEnvLogger} from "@lodestar/logger/env"; import {ELProof, ELStorageProof} from "../../../src/types.js"; @@ -16,8 +14,6 @@ const validStateRoot = hexToBuffer(eoaProof.beacon.executionPayload.state_root); const invalidAccountProof = deepmerge(validAccountProof, {}); delete invalidAccountProof.accountProof[0]; -chai.use(chaiAsPromised); - describe("uitls/execution", () => { const logger = getEnvLogger(); @@ -30,7 +26,7 @@ describe("uitls/execution", () => { stateRoot: validStateRoot, logger, }) - ).eventually.to.be.true; + ).resolves.toBe(true); }); it("should fail with error if proof is valid but address is wrong", async () => { @@ -48,7 +44,7 @@ describe("uitls/execution", () => { stateRoot, logger, }) - ).eventually.to.be.false; + ).resolves.toBe(false); }); it("should fail with error if account is not valid", async () => { @@ -62,7 +58,7 @@ describe("uitls/execution", () => { stateRoot, logger, }) - ).eventually.to.be.false; + ).resolves.toBe(false); }); }); @@ -76,7 +72,7 @@ describe("uitls/execution", () => { storageKeys, logger, }) - ).eventually.to.be.true; + ).resolves.toBe(true); }); it("should fail with error for a wrong proof", async () => { @@ -88,7 +84,7 @@ describe("uitls/execution", () => { proof: invalidStorageProof, storageKeys, }) - ).eventually.to.be.false; + ).resolves.toBe(false); }); it("should fail with error for a non existance key", async () => { @@ -110,7 +106,7 @@ describe("uitls/execution", () => { storageKeys, logger, }) - ).eventually.to.be.false; + ).resolves.toBe(false); }); it("should return true empty keys", async () => { @@ -126,7 +122,7 @@ describe("uitls/execution", () => { storageKeys, logger, }) - ).eventually.to.be.true; + ).resolves.toBe(true); }); }); }); diff --git a/packages/prover/test/unit/verified_requests/eth_call.test.ts b/packages/prover/test/unit/verified_requests/eth_call.test.ts index 89f45ee28fa7..e76b3fe4ed83 100644 --- a/packages/prover/test/unit/verified_requests/eth_call.test.ts +++ b/packages/prover/test/unit/verified_requests/eth_call.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {createForkConfig} from "@lodestar/config"; import {NetworkName, networksChainConfig} from "@lodestar/config/networks"; import {ELTransaction} from "../../../lib/types.js"; @@ -28,7 +28,7 @@ describe("verified_requests / eth_call", () => { }, }); - expect(response).to.eql(testCase.response); + expect(response).toEqual(testCase.response); }); it("should return the json-rpc response with error for an invalid call", async () => { @@ -57,7 +57,7 @@ describe("verified_requests / eth_call", () => { }, }); - expect(response).to.eql({ + expect(response).toEqual({ jsonrpc: "2.0", id: testCase.request.id, error: {code: VERIFICATION_FAILED_RESPONSE_CODE, message: getVerificationFailedMessage("eth_call")}, diff --git a/packages/prover/test/unit/verified_requests/eth_estimateGas.test.ts b/packages/prover/test/unit/verified_requests/eth_estimateGas.test.ts index 2c827bcb9445..27fbfb98a1b3 100644 --- a/packages/prover/test/unit/verified_requests/eth_estimateGas.test.ts +++ b/packages/prover/test/unit/verified_requests/eth_estimateGas.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {createForkConfig} from "@lodestar/config"; import {NetworkName, networksChainConfig} from "@lodestar/config/networks"; import {ELTransaction} from "../../../lib/types.js"; @@ -29,7 +29,7 @@ describe("verified_requests / eth_estimateGas", () => { }, }); - expect(response).to.eql(testCase.response); + expect(response).toEqual(testCase.response); }); it("should return the json-rpc response with error for an invalid call", async () => { @@ -59,7 +59,7 @@ describe("verified_requests / eth_estimateGas", () => { }, }); - expect(response).to.eql({ + expect(response).toEqual({ jsonrpc: "2.0", id: testCase.request.id, error: {code: VERIFICATION_FAILED_RESPONSE_CODE, message: getVerificationFailedMessage("eth_estimateGas")}, diff --git a/packages/prover/test/unit/verified_requests/eth_getBalance.test.ts b/packages/prover/test/unit/verified_requests/eth_getBalance.test.ts index 79f73a632fdc..ea0c902750c8 100644 --- a/packages/prover/test/unit/verified_requests/eth_getBalance.test.ts +++ b/packages/prover/test/unit/verified_requests/eth_getBalance.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {createForkConfig} from "@lodestar/config"; import {NetworkName, networksChainConfig} from "@lodestar/config/networks"; import {VERIFICATION_FAILED_RESPONSE_CODE} from "../../../src/constants.js"; @@ -25,7 +25,7 @@ describe("verified_requests / eth_getBalance", () => { params: [data.request.params[0], data.request.params[1]], }, }); - expect(response).to.eql(data.response); + expect(response).toEqual(data.response); }); it("should return the json-rpc response with error for an invalid account", async () => { @@ -43,7 +43,7 @@ describe("verified_requests / eth_getBalance", () => { }, }); - expect(response).to.eql({ + expect(response).toEqual({ jsonrpc: "2.0", id: data.request.id, error: {code: VERIFICATION_FAILED_RESPONSE_CODE, message: getVerificationFailedMessage("eth_getBalance")}, diff --git a/packages/prover/test/unit/verified_requests/eth_getBlockByHash.test.ts b/packages/prover/test/unit/verified_requests/eth_getBlockByHash.test.ts index 423dfc9b3d6c..8052b290ff0d 100644 --- a/packages/prover/test/unit/verified_requests/eth_getBlockByHash.test.ts +++ b/packages/prover/test/unit/verified_requests/eth_getBlockByHash.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {createForkConfig} from "@lodestar/config"; import {NetworkName, networksChainConfig} from "@lodestar/config/networks"; import {VERIFICATION_FAILED_RESPONSE_CODE} from "../../../src/constants.js"; @@ -29,7 +29,7 @@ describe("verified_requests / eth_getBlockByHash", () => { params: testCase.request.params as [string, boolean], }, }); - expect(response).to.eql(testCase.response); + expect(response).toEqual(testCase.response); }); it("should return the json-rpc response with error for an invalid block header with valid execution payload", async () => { @@ -48,7 +48,7 @@ describe("verified_requests / eth_getBlockByHash", () => { }, }); - expect(response).to.eql({ + expect(response).toEqual({ jsonrpc: "2.0", id: testCase.request.id, error: {code: VERIFICATION_FAILED_RESPONSE_CODE, message: getVerificationFailedMessage("eth_getBlockByHash")}, @@ -71,7 +71,7 @@ describe("verified_requests / eth_getBlockByHash", () => { }, }); - expect(response).to.eql({ + expect(response).toEqual({ jsonrpc: "2.0", id: testCase.request.id, error: {code: VERIFICATION_FAILED_RESPONSE_CODE, message: getVerificationFailedMessage("eth_getBlockByHash")}, @@ -94,7 +94,7 @@ describe("verified_requests / eth_getBlockByHash", () => { }, }); - expect(response).to.eql({ + expect(response).toEqual({ jsonrpc: "2.0", id: testCase.request.id, error: {code: VERIFICATION_FAILED_RESPONSE_CODE, message: getVerificationFailedMessage("eth_getBlockByHash")}, diff --git a/packages/prover/test/unit/verified_requests/eth_getBlockByNumber.test.ts b/packages/prover/test/unit/verified_requests/eth_getBlockByNumber.test.ts index 06d8d60af27c..fa60be225d20 100644 --- a/packages/prover/test/unit/verified_requests/eth_getBlockByNumber.test.ts +++ b/packages/prover/test/unit/verified_requests/eth_getBlockByNumber.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {createForkConfig} from "@lodestar/config"; import {NetworkName, networksChainConfig} from "@lodestar/config/networks"; import {VERIFICATION_FAILED_RESPONSE_CODE} from "../../../src/constants.js"; @@ -29,7 +29,7 @@ describe("verified_requests / eth_getBlockByNumber", () => { params: testCase.request.params as [string | number, boolean], }, }); - expect(response).to.eql(testCase.response); + expect(response).toEqual(testCase.response); }); it("should return the json-rpc response with error for an invalid block header with valid execution payload", async () => { @@ -48,7 +48,7 @@ describe("verified_requests / eth_getBlockByNumber", () => { }, }); - expect(response).to.eql({ + expect(response).toEqual({ jsonrpc: "2.0", id: testCase.request.id, error: { @@ -74,7 +74,7 @@ describe("verified_requests / eth_getBlockByNumber", () => { }, }); - expect(response).to.eql({ + expect(response).toEqual({ jsonrpc: "2.0", id: testCase.request.id, error: { @@ -103,7 +103,7 @@ describe("verified_requests / eth_getBlockByNumber", () => { }, }); - expect(response).to.eql({ + expect(response).toEqual({ jsonrpc: "2.0", id: testCase.request.id, error: { diff --git a/packages/prover/test/unit/verified_requests/eth_getCode.test.ts b/packages/prover/test/unit/verified_requests/eth_getCode.test.ts index 5fe6944904c4..51cf0c153857 100644 --- a/packages/prover/test/unit/verified_requests/eth_getCode.test.ts +++ b/packages/prover/test/unit/verified_requests/eth_getCode.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {createForkConfig} from "@lodestar/config"; import {NetworkName, networksChainConfig} from "@lodestar/config/networks"; import {VERIFICATION_FAILED_RESPONSE_CODE} from "../../../src/constants.js"; @@ -25,7 +25,7 @@ describe("verified_requests / eth_getCode", () => { }, }); - expect(response).to.eql(testCase.response); + expect(response).toEqual(testCase.response); }); it("should return the json-rpc response with error for an invalid account", async () => { @@ -41,7 +41,7 @@ describe("verified_requests / eth_getCode", () => { }, }); - expect(response).to.eql({ + expect(response).toEqual({ jsonrpc: "2.0", id: testCase.request.id, error: {code: VERIFICATION_FAILED_RESPONSE_CODE, message: getVerificationFailedMessage("eth_getCode")}, diff --git a/packages/prover/test/unit/verified_requests/eth_getTransactionCount.test.ts b/packages/prover/test/unit/verified_requests/eth_getTransactionCount.test.ts index 49477aac5747..8baf8fd7976b 100644 --- a/packages/prover/test/unit/verified_requests/eth_getTransactionCount.test.ts +++ b/packages/prover/test/unit/verified_requests/eth_getTransactionCount.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {createForkConfig} from "@lodestar/config"; import {NetworkName, networksChainConfig} from "@lodestar/config/networks"; import {VERIFICATION_FAILED_RESPONSE_CODE} from "../../../src/constants.js"; @@ -25,7 +25,7 @@ describe("verified_requests / eth_getTransactionCount", () => { }, }); - expect(response).to.eql(testCase.response); + expect(response).toEqual(testCase.response); }); it("should return the json-rpc response with error for an invalid account", async () => { @@ -43,7 +43,7 @@ describe("verified_requests / eth_getTransactionCount", () => { }, }); - expect(response).to.eql({ + expect(response).toEqual({ jsonrpc: "2.0", id: testCase.request.id, error: { diff --git a/packages/prover/test/unit/web3_provider.test.ts b/packages/prover/test/unit/web3_provider.node.test.ts similarity index 72% rename from packages/prover/test/unit/web3_provider.test.ts rename to packages/prover/test/unit/web3_provider.node.test.ts index e29188503b96..eed6c336483f 100644 --- a/packages/prover/test/unit/web3_provider.test.ts +++ b/packages/prover/test/unit/web3_provider.node.test.ts @@ -1,22 +1,19 @@ -import {expect} from "chai"; +import {describe, it, expect, afterEach, vi} from "vitest"; import Web3 from "web3"; import {ethers} from "ethers"; -import sinon from "sinon"; import {createVerifiedExecutionProvider, ProofProvider, LCTransport} from "@lodestar/prover/browser"; import {ELRpc} from "../../src/utils/rpc.js"; describe("web3_provider", () => { - const sandbox = sinon.createSandbox(); - afterEach(() => { - sandbox.restore(); + vi.clearAllMocks(); }); describe("createVerifiedExecutionProvider", () => { describe("web3", () => { it("should create a verified execution provider for the web3 provider", () => { // Don't invoke network in unit tests - sandbox.stub(ELRpc.prototype, "verifyCompatibility").resolves(); + vi.spyOn(ELRpc.prototype, "verifyCompatibility").mockResolvedValue(); const {provider, proofProvider} = createVerifiedExecutionProvider( new Web3.providers.HttpProvider("https://lodestar-sepoliarpc.chainsafe.io"), @@ -27,15 +24,15 @@ describe("web3_provider", () => { } ); - expect(provider).be.instanceof(Web3.providers.HttpProvider); - expect(proofProvider).be.instanceOf(ProofProvider); + expect(provider).toBeInstanceOf(Web3.providers.HttpProvider); + expect(proofProvider).toBeInstanceOf(ProofProvider); }); }); describe("ethers", () => { it("should create a verified execution provider for the ethers provider", () => { // Don't invoke network in unit tests - sandbox.stub(ELRpc.prototype, "verifyCompatibility").resolves(); + vi.spyOn(ELRpc.prototype, "verifyCompatibility").mockResolvedValue(); const {provider, proofProvider} = createVerifiedExecutionProvider( new ethers.JsonRpcProvider("https://lodestar-sepoliarpc.chainsafe.io"), @@ -46,8 +43,8 @@ describe("web3_provider", () => { } ); - expect(provider).be.instanceof(ethers.JsonRpcProvider); - expect(proofProvider).be.instanceOf(ProofProvider); + expect(provider).toBeInstanceOf(ethers.JsonRpcProvider); + expect(proofProvider).toBeInstanceOf(ProofProvider); }); }); }); diff --git a/packages/prover/vitest.browser.config.ts b/packages/prover/vitest.browser.config.ts new file mode 100644 index 000000000000..3c4b48885a33 --- /dev/null +++ b/packages/prover/vitest.browser.config.ts @@ -0,0 +1,14 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.browser.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + optimizeDeps: { + exclude: ["@chainsafe/blst"], + }, + }) +); diff --git a/packages/prover/vitest.config.ts b/packages/prover/vitest.config.ts new file mode 100644 index 000000000000..1df0de848936 --- /dev/null +++ b/packages/prover/vitest.config.ts @@ -0,0 +1,11 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + }) +); diff --git a/packages/prover/webpack.test.config.cjs b/packages/prover/webpack.test.config.cjs deleted file mode 100644 index 711c6ac891a7..000000000000 --- a/packages/prover/webpack.test.config.cjs +++ /dev/null @@ -1,5 +0,0 @@ -const webpackConfig = require("../../webpack.test.config.js"); - -module.exports = { - ...webpackConfig, -}; diff --git a/packages/reqresp/.nycrc.json b/packages/reqresp/.nycrc.json deleted file mode 100644 index 69aa626339a0..000000000000 --- a/packages/reqresp/.nycrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "../../.nycrc.json" -} diff --git a/packages/reqresp/package.json b/packages/reqresp/package.json index 4e8923e6cf60..e4ca57dfc3dd 100644 --- a/packages/reqresp/package.json +++ b/packages/reqresp/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.12.0", + "version": "1.13.0", "type": "module", "exports": { ".": { @@ -50,15 +50,15 @@ "lint:fix": "yarn run lint --fix", "pretest": "yarn run check-types", "test": "yarn test:unit", - "test:unit": "nyc --cache-dir .nyc_output/.cache -e .ts mocha 'test/unit/**/*.test.ts'", + "test:unit": "vitest --run --dir test/unit/ --coverage", "check-readme": "typescript-docs-verifier" }, "dependencies": { "@chainsafe/fast-crc32c": "^4.1.1", "@libp2p/interface": "^0.1.2", - "@lodestar/config": "^1.12.0", - "@lodestar/params": "^1.12.0", - "@lodestar/utils": "^1.12.0", + "@lodestar/config": "^1.13.0", + "@lodestar/params": "^1.13.0", + "@lodestar/utils": "^1.13.0", "it-all": "^3.0.2", "it-pipe": "^3.0.1", "snappy": "^7.2.2", @@ -67,8 +67,8 @@ "uint8arraylist": "^2.4.3" }, "devDependencies": { - "@lodestar/logger": "^1.12.0", - "@lodestar/types": "^1.12.0", + "@lodestar/logger": "^1.13.0", + "@lodestar/types": "^1.13.0", "libp2p": "0.46.12" }, "peerDependencies": { diff --git a/packages/reqresp/src/ReqResp.ts b/packages/reqresp/src/ReqResp.ts index e79b5737bc91..671df3c83662 100644 --- a/packages/reqresp/src/ReqResp.ts +++ b/packages/reqresp/src/ReqResp.ts @@ -2,8 +2,8 @@ import {setMaxListeners} from "node:events"; import {Connection, Stream} from "@libp2p/interface/connection"; import {PeerId} from "@libp2p/interface/peer-id"; import type {Libp2p} from "libp2p"; -import {Logger} from "@lodestar/utils"; -import {getMetrics, Metrics, MetricsRegister} from "./metrics.js"; +import {Logger, MetricsRegister} from "@lodestar/utils"; +import {getMetrics, Metrics} from "./metrics.js"; import {RequestError, RequestErrorCode, sendRequest, SendRequestOpts} from "./request/index.js"; import {handleRequest} from "./response/index.js"; import { diff --git a/packages/reqresp/src/index.ts b/packages/reqresp/src/index.ts index 9bb07c1a4fce..d31960fdcd89 100644 --- a/packages/reqresp/src/index.ts +++ b/packages/reqresp/src/index.ts @@ -1,7 +1,7 @@ export {ReqResp} from "./ReqResp.js"; export type {ReqRespOpts} from "./ReqResp.js"; export {getMetrics} from "./metrics.js"; -export type {Metrics, MetricsRegister} from "./metrics.js"; +export type {Metrics} from "./metrics.js"; export {Encoding as ReqRespEncoding} from "./types.js"; // Expose enums renamed export * from "./types.js"; export * from "./interface.js"; diff --git a/packages/reqresp/src/metrics.ts b/packages/reqresp/src/metrics.ts index c4474d0d61b7..4af18a782322 100644 --- a/packages/reqresp/src/metrics.ts +++ b/packages/reqresp/src/metrics.ts @@ -1,62 +1,7 @@ -type LabelValues = Partial>; - -interface Gauge { - // Sorry for this mess, `prom-client` API choices are not great - // If the function signature was `inc(value: number, labels?: Labels)`, this would be simpler - inc(value?: number): void; - inc(labels: LabelValues, value?: number): void; - inc(arg1?: LabelValues | number, arg2?: number): void; - - dec(value?: number): void; - dec(labels: LabelValues, value?: number): void; - dec(arg1?: LabelValues | number, arg2?: number): void; - - set(value: number): void; - set(labels: LabelValues, value: number): void; - set(arg1?: LabelValues | number, arg2?: number): void; - - addCollect: (collectFn: () => void) => void; -} - -interface Histogram { - startTimer(arg1?: LabelValues): (labels?: LabelValues) => number; - - observe(value: number): void; - observe(labels: LabelValues, values: number): void; - observe(arg1: LabelValues | number, arg2?: number): void; - - reset(): void; -} - -type GaugeConfig = { - name: string; - help: string; - labelNames?: T[]; -}; - -type HistogramConfig = { - name: string; - help: string; - labelNames?: T[]; - buckets?: number[]; -}; - -export interface MetricsRegister { - gauge(config: GaugeConfig): Gauge; - histogram(config: HistogramConfig): Histogram; -} +import {MetricsRegister} from "@lodestar/utils"; export type Metrics = ReturnType; -export type LodestarGitData = { - /** "0.16.0 developer/feature-1 ac99f2b5" */ - version: string; - /** "4f816b16dfde718e2d74f95f2c8292596138c248" */ - commit: string; - /** "goerli" */ - network: string; -}; - /** * A collection of metrics used throughout the Gossipsub behaviour. */ @@ -65,48 +10,48 @@ export function getMetrics(register: MetricsRegister) { // Using function style instead of class to prevent having to re-declare all MetricsPrometheus types. return { - outgoingRequests: register.gauge<"method">({ + outgoingRequests: register.gauge<{method: string}>({ name: "beacon_reqresp_outgoing_requests_total", help: "Counts total requests done per method", labelNames: ["method"], }), - outgoingRequestRoundtripTime: register.histogram<"method">({ + outgoingRequestRoundtripTime: register.histogram<{method: string}>({ name: "beacon_reqresp_outgoing_request_roundtrip_time_seconds", help: "Histogram of outgoing requests round-trip time", labelNames: ["method"], // Spec sets RESP_TIMEOUT = 10 sec buckets: [0.1, 0.2, 0.5, 1, 5, 10, 15, 60], }), - outgoingErrors: register.gauge<"method">({ + outgoingErrors: register.gauge<{method: string}>({ name: "beacon_reqresp_outgoing_requests_error_total", help: "Counts total failed requests done per method", labelNames: ["method"], }), - incomingRequests: register.gauge<"method">({ + incomingRequests: register.gauge<{method: string}>({ name: "beacon_reqresp_incoming_requests_total", help: "Counts total responses handled per method", labelNames: ["method"], }), - incomingRequestHandlerTime: register.histogram<"method">({ + incomingRequestHandlerTime: register.histogram<{method: string}>({ name: "beacon_reqresp_incoming_request_handler_time_seconds", help: "Histogram of incoming requests internal handling time", labelNames: ["method"], // Spec sets RESP_TIMEOUT = 10 sec buckets: [0.1, 0.2, 0.5, 1, 5, 10], }), - incomingErrors: register.gauge<"method">({ + incomingErrors: register.gauge<{method: string}>({ name: "beacon_reqresp_incoming_requests_error_total", help: "Counts total failed responses handled per method", labelNames: ["method"], }), - outgoingResponseTTFB: register.histogram<"method">({ + outgoingResponseTTFB: register.histogram<{method: string}>({ name: "beacon_reqresp_outgoing_response_ttfb_seconds", help: "Time to first byte (TTFB) for outgoing responses", labelNames: ["method"], // Spec sets TTFB_TIMEOUT = 5 sec buckets: [0.1, 1, 5], }), - incomingResponseTTFB: register.histogram<"method">({ + incomingResponseTTFB: register.histogram<{method: string}>({ name: "beacon_reqresp_incoming_response_ttfb_seconds", help: "Time to first byte (TTFB) for incoming responses", labelNames: ["method"], diff --git a/packages/reqresp/test/fixtures/messages.ts b/packages/reqresp/test/fixtures/messages.ts index da71e70500ed..7c5eedaeb3d4 100644 --- a/packages/reqresp/test/fixtures/messages.ts +++ b/packages/reqresp/test/fixtures/messages.ts @@ -10,7 +10,7 @@ type MessageFixture = { type: TypeSizes; binaryPayload: ResponseIncoming; chunks: Uint8Array[]; - asyncChunks: Buffer[]; + asyncChunks: Uint8Array[]; }; const phase0Metadata = ssz.phase0.Metadata.fromJson({ @@ -26,14 +26,14 @@ export const sszSnappyPhase0Metadata: MessageFixture = { fork: ForkName.phase0, protocolVersion: 1, }, - chunks: ["0x10", "0xff060000734e61507059011400000b5ee91209000000000000000000000000000000"].map( - (s) => new Uint8Array(fromHexString(s)) + chunks: ["0x10", "0xff060000734e61507059011400000b5ee91209000000000000000000000000000000"].map((s) => + fromHexString(s) ), asyncChunks: [ "0x10", // length prefix "0xff060000734e61507059", // snappy frames header "0x011400000b5ee91209000000000000000000000000000000", // snappy frames content - ].map((d) => Buffer.from(fromHexString(d))), + ].map((d) => fromHexString(d)), }; const altairMetadata = ssz.altair.Metadata.fromJson({ @@ -68,14 +68,12 @@ export const sszSnappyPing: MessageFixture = { fork: ForkName.phase0, protocolVersion: 1, }, - chunks: ["0x08", "0xff060000734e61507059010c00000175de410100000000000000"].map( - (s) => new Uint8Array(fromHexString(s)) - ), + chunks: ["0x08", "0xff060000734e61507059010c00000175de410100000000000000"].map((s) => fromHexString(s)), asyncChunks: [ "0x08", // length prefix "0xff060000734e61507059", // snappy frames header "0x010c00000175de410100000000000000", // snappy frames content - ].map((d) => Buffer.from(fromHexString(d))), + ].map((d) => fromHexString(d)), }; const statusData = { @@ -96,9 +94,9 @@ export const sszSnappyStatus: MessageFixture = { "0x54", // length prefix "0xff060000734e61507059", // snappy frames header "0x001b0000097802c15400da8a010004090009017e2b001c0900000000000000", - ].map((d) => Buffer.from(fromHexString(d))), - chunks: ["0x54", "0xff060000734e61507059001b0000097802c15400da8a010004090009017e2b001c0900000000000000"].map( - (s) => new Uint8Array(fromHexString(s)) + ].map((d) => fromHexString(d)), + chunks: ["0x54", "0xff060000734e61507059001b0000097802c15400da8a010004090009017e2b001c0900000000000000"].map((s) => + fromHexString(s) ), }; @@ -137,11 +135,11 @@ export const sszSnappySignedBeaconBlockPhase0: MessageFixture = { "0x9403", "0xff060000734e61507059", "0x00340000fff3b3f594031064000000dafe01007a010004090009011108fe6f000054feb4008ab4007e0100fecc0011cc0cdc0000003e0400", - ].map((d) => Buffer.from(fromHexString(d))), + ].map((d) => fromHexString(d)), chunks: [ "0x9403", "0xff060000734e6150705900340000fff3b3f594031064000000dafe01007a010004090009011108fe6f000054feb4008ab4007e0100fecc0011cc0cdc0000003e0400", - ].map((s) => new Uint8Array(fromHexString(s))), + ].map((s) => fromHexString(s)), }; const signedBeaconBlockAltairData = { @@ -166,11 +164,11 @@ export const sszSnappySignedBeaconBlockAltair: MessageFixture = { "0xf803", // length prefix "0xff060000734e61507059", // snappy frames header "0x003f0000ee14ab0df8031064000000dafe01007a01000c995f0100010100090105ee70000d700054ee44000d44fe0100fecc0011cc0c400100003e0400fe01008e0100", - ].map((d) => Buffer.from(fromHexString(d))), + ].map((d) => fromHexString(d)), chunks: [ "0xb404", "0xff060000734e6150705900420000bab7f8feb4041064000000dafe01007a01000c995f0100010100090105ee70000d700054ee44000d44fe0100fecc0011cc0c7c0100003e0400fe0100fe01007e0100", - ].map((s) => new Uint8Array(fromHexString(s))), + ].map((s) => fromHexString(s)), }; // Set the altair fork to happen between the two precomputed SSZ snappy blocks diff --git a/packages/reqresp/test/globalSetup.ts b/packages/reqresp/test/globalSetup.ts new file mode 100644 index 000000000000..0ab57c057472 --- /dev/null +++ b/packages/reqresp/test/globalSetup.ts @@ -0,0 +1,2 @@ +export async function setup(): Promise {} +export async function teardown(): Promise {} diff --git a/packages/reqresp/test/setup.ts b/packages/reqresp/test/setup.ts deleted file mode 100644 index b83e6cb78511..000000000000 --- a/packages/reqresp/test/setup.ts +++ /dev/null @@ -1,6 +0,0 @@ -import chai from "chai"; -import chaiAsPromised from "chai-as-promised"; -import sinonChai from "sinon-chai"; - -chai.use(chaiAsPromised); -chai.use(sinonChai); diff --git a/packages/reqresp/test/unit/ReqResp.test.ts b/packages/reqresp/test/unit/ReqResp.test.ts index 26a68ce02d25..b62b1883cce1 100644 --- a/packages/reqresp/test/unit/ReqResp.test.ts +++ b/packages/reqresp/test/unit/ReqResp.test.ts @@ -1,6 +1,5 @@ -import {expect} from "chai"; +import {describe, it, expect, beforeEach, afterEach, vi} from "vitest"; import {Libp2p} from "libp2p"; -import sinon from "sinon"; import {Logger} from "@lodestar/utils"; import {getEmptyLogger} from "@lodestar/logger/empty"; import {RespStatus} from "../../src/interface.js"; @@ -18,7 +17,7 @@ describe("ResResp", () => { beforeEach(() => { libp2p = { - dialProtocol: sinon.stub().resolves( + dialProtocol: vi.fn().mockResolvedValue( new MockLibP2pStream( responseEncode( [ @@ -32,7 +31,7 @@ describe("ResResp", () => { ping.method ) ), - handle: sinon.spy(), + handle: vi.fn(), } as unknown as Libp2p; logger = getEmptyLogger(); @@ -44,12 +43,16 @@ describe("ResResp", () => { }); }); + afterEach(() => { + vi.restoreAllMocks(); + }); + describe("dial only protocol", () => { it("should register protocol and dial", async () => { reqresp.registerDialOnlyProtocol(numberToStringProtocolDialOnly); - expect(reqresp.getRegisteredProtocols()).to.eql(["/eth2/beacon_chain/req/number_to_string/1/ssz_snappy"]); - expect((libp2p.handle as sinon.SinonSpy).calledOnce).to.be.false; + expect(reqresp.getRegisteredProtocols()).toEqual(["/eth2/beacon_chain/req/number_to_string/1/ssz_snappy"]); + expect(libp2p.handle).not.toHaveBeenCalledOnce(); }); }); @@ -57,8 +60,8 @@ describe("ResResp", () => { it("should register protocol and dial", async () => { await reqresp.registerProtocol(numberToStringProtocol); - expect(reqresp.getRegisteredProtocols()).to.eql(["/eth2/beacon_chain/req/number_to_string/1/ssz_snappy"]); - expect((libp2p.handle as sinon.SinonSpy).calledOnce).to.be.true; + expect(reqresp.getRegisteredProtocols()).toEqual(["/eth2/beacon_chain/req/number_to_string/1/ssz_snappy"]); + expect(libp2p.handle).toHaveBeenCalledOnce(); }); }); }); diff --git a/packages/reqresp/test/unit/encoders/reqestEncode.test.ts b/packages/reqresp/test/unit/encoders/reqestEncode.test.ts index f642151f4609..221dc8237e19 100644 --- a/packages/reqresp/test/unit/encoders/reqestEncode.test.ts +++ b/packages/reqresp/test/unit/encoders/reqestEncode.test.ts @@ -1,3 +1,4 @@ +import {describe, it} from "vitest"; import all from "it-all"; import {pipe} from "it-pipe"; import {requestEncode} from "../../../src/encoders/requestEncode.js"; @@ -6,14 +7,12 @@ import {expectEqualByteChunks} from "../../utils/index.js"; describe("encoders / requestEncode", () => { describe("valid cases", () => { - for (const {id, protocol, requestBody, chunks} of requestEncodersCases) { - it(`${id}`, async () => { - const encodedChunks = await pipe(requestEncode(protocol, requestBody), all); - expectEqualByteChunks( - encodedChunks as Uint8Array[], - chunks.map((c) => c.subarray()) - ); - }); - } + it.each(requestEncodersCases)("$id", async ({protocol, requestBody, chunks}) => { + const encodedChunks = await pipe(requestEncode(protocol, requestBody), all); + expectEqualByteChunks( + encodedChunks as Uint8Array[], + chunks.map((c) => c.subarray()) + ); + }); }); }); diff --git a/packages/reqresp/test/unit/encoders/requestDecode.test.ts b/packages/reqresp/test/unit/encoders/requestDecode.test.ts index f306a621b6e6..60ccab1eecf1 100644 --- a/packages/reqresp/test/unit/encoders/requestDecode.test.ts +++ b/packages/reqresp/test/unit/encoders/requestDecode.test.ts @@ -1,30 +1,21 @@ -import chai, {expect} from "chai"; -import chaiAsPromised from "chai-as-promised"; +import {describe, it, expect} from "vitest"; import {pipe} from "it-pipe"; import {requestDecode} from "../../../src/encoders/requestDecode.js"; import {requestEncodersCases, requestEncodersErrorCases} from "../../fixtures/encoders.js"; import {expectRejectedWithLodestarError} from "../../utils/errors.js"; import {arrToSource} from "../../utils/index.js"; -chai.use(chaiAsPromised); - describe("encoders / requestDecode", () => { describe("valid cases", () => { - for (const {id, protocol, requestBody, chunks} of requestEncodersCases) { - it(`${id}`, async () => { - // TODO: Debug this type error - // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment - const decodedBody = await pipe(arrToSource(chunks), requestDecode(protocol)); - expect(decodedBody).to.deep.equal(requestBody); - }); - } + it.each(requestEncodersCases)("$id", async ({protocol, requestBody, chunks}) => { + const decodedBody = await pipe(arrToSource(chunks), requestDecode(protocol)); + expect(decodedBody).to.deep.equal(requestBody); + }); }); describe("error cases", () => { - for (const {id, protocol, errorDecode, chunks} of requestEncodersErrorCases.filter((r) => r.errorDecode)) { - it(`${id}`, async () => { - await expectRejectedWithLodestarError(pipe(arrToSource(chunks), requestDecode(protocol)), errorDecode); - }); - } + it.each(requestEncodersErrorCases.filter((r) => r.errorDecode))("$id", async ({protocol, errorDecode, chunks}) => { + await expectRejectedWithLodestarError(pipe(arrToSource(chunks), requestDecode(protocol)), errorDecode); + }); }); }); diff --git a/packages/reqresp/test/unit/encoders/responseDecode.test.ts b/packages/reqresp/test/unit/encoders/responseDecode.test.ts index 777bda9bd371..a6dfe092b169 100644 --- a/packages/reqresp/test/unit/encoders/responseDecode.test.ts +++ b/packages/reqresp/test/unit/encoders/responseDecode.test.ts @@ -1,5 +1,4 @@ -import chai, {expect} from "chai"; -import chaiAsPromised from "chai-as-promised"; +import {describe, it, expect} from "vitest"; import all from "it-all"; import {pipe} from "it-pipe"; import {LodestarError} from "@lodestar/utils"; @@ -8,30 +7,25 @@ import {responseEncodersErrorTestCases, responseEncodersTestCases} from "../../f import {expectRejectedWithLodestarError} from "../../utils/errors.js"; import {arrToSource, onlySuccessResp} from "../../utils/index.js"; -chai.use(chaiAsPromised); - describe("encoders / responseDecode", () => { describe("valid cases", () => { - for (const {id, protocol, responseChunks, chunks} of responseEncodersTestCases) { - it(`${id}`, async () => { - const responses = await pipe( - arrToSource(chunks), - // eslint-disable-next-line @typescript-eslint/no-empty-function - responseDecode(protocol, {onFirstHeader: () => {}, onFirstResponseChunk: () => {}}), - all - ); + it.each(responseEncodersTestCases)("$id", async ({protocol, responseChunks, chunks}) => { + const responses = await pipe( + arrToSource(chunks), + // eslint-disable-next-line @typescript-eslint/no-empty-function + responseDecode(protocol, {onFirstHeader: () => {}, onFirstResponseChunk: () => {}}), + all + ); - const expectedResponses = responseChunks.filter(onlySuccessResp).map((r) => r.payload); - expect(responses).to.deep.equal(expectedResponses); - }); - } + const expectedResponses = responseChunks.filter(onlySuccessResp).map((r) => r.payload); + expect(responses).to.deep.equal(expectedResponses); + }); }); describe("error cases", () => { - for (const {id, protocol, chunks, decodeError} of responseEncodersErrorTestCases.filter( - (r) => r.decodeError !== undefined - )) { - it(`${id}`, async () => { + it.each(responseEncodersErrorTestCases.filter((r) => r.decodeError !== undefined))( + "$id", + async ({protocol, chunks, decodeError}) => { await expectRejectedWithLodestarError( pipe( arrToSource(chunks as Uint8Array[]), @@ -41,7 +35,7 @@ describe("encoders / responseDecode", () => { ), decodeError as LodestarError ); - }); - } + } + ); }); }); diff --git a/packages/reqresp/test/unit/encoders/responseEncode.test.ts b/packages/reqresp/test/unit/encoders/responseEncode.test.ts index f8617c27ff43..b9b5f3f8ee11 100644 --- a/packages/reqresp/test/unit/encoders/responseEncode.test.ts +++ b/packages/reqresp/test/unit/encoders/responseEncode.test.ts @@ -1,5 +1,4 @@ -import chai from "chai"; -import chaiAsPromised from "chai-as-promised"; +import {describe, it} from "vitest"; import all from "it-all"; import {pipe} from "it-pipe"; import {Protocol} from "../../../src/types.js"; @@ -7,19 +6,18 @@ import {responseEncodersTestCases} from "../../fixtures/encoders.js"; import {responseEncode} from "../../utils/response.js"; import {expectEqualByteChunks} from "../../utils/index.js"; -chai.use(chaiAsPromised); - describe("encoders / responseEncode", () => { describe("valid cases", () => { - for (const {id, protocol, responseChunks, chunks} of responseEncodersTestCases.filter((f) => !f.skipEncoding)) { - it(`${id}`, async () => { + it.each(responseEncodersTestCases.filter((f) => !f.skipEncoding))( + "$id", + async ({protocol, responseChunks, chunks}) => { const encodedChunks = await pipe(responseEncode(responseChunks, protocol as Protocol), all); expectEqualByteChunks( encodedChunks as Uint8Array[], chunks.map((c) => c.subarray()) ); - }); - } + } + ); }); }); diff --git a/packages/reqresp/test/unit/encodingStrategies/sszSnappy/decode.test.ts b/packages/reqresp/test/unit/encodingStrategies/sszSnappy/decode.test.ts index 6ad5954dafa2..bfa597e42519 100644 --- a/packages/reqresp/test/unit/encodingStrategies/sszSnappy/decode.test.ts +++ b/packages/reqresp/test/unit/encodingStrategies/sszSnappy/decode.test.ts @@ -1,5 +1,4 @@ -import chai, {expect} from "chai"; -import chaiAsPromised from "chai-as-promised"; +import {describe, it, expect} from "vitest"; import {Uint8ArrayList} from "uint8arraylist"; import {encode as varintEncode} from "uint8-varint"; import {readSszSnappyPayload} from "../../../../src/encodingStrategies/sszSnappy/index.js"; @@ -11,16 +10,12 @@ import { } from "../../../fixtures/index.js"; import {arrToSource} from "../../../utils/index.js"; -chai.use(chaiAsPromised); - describe("encodingStrategies / sszSnappy / decode", () => { - for (const {id, type, binaryPayload, chunks} of encodingStrategiesTestCases) { - it(id, async () => { - const bufferedSource = new BufferedSource(arrToSource(chunks)); - const bodyResult = await readSszSnappyPayload(bufferedSource, type); - expect(bodyResult).to.deep.equal(binaryPayload.data, "Wrong decoded body"); - }); - } + it.each(encodingStrategiesTestCases)("$id", async ({type, binaryPayload, chunks}) => { + const bufferedSource = new BufferedSource(arrToSource(chunks)); + const bodyResult = await readSszSnappyPayload(bufferedSource, type); + expect(bodyResult).toEqual(binaryPayload.data); + }); describe("mainnet cases", () => { for (const {id, payload, type: serializer, streamedBody} of encodingStrategiesMainnetTestCases) { @@ -31,7 +26,7 @@ describe("encodingStrategies / sszSnappy / decode", () => { const bufferedSource = new BufferedSource(arrToSource([streamedBytes])); const bodyResult = await readSszSnappyPayload(bufferedSource, serializer); - expect(bodyResult).to.deep.equal(payload.data, "Wrong decoded body"); + expect(bodyResult).toEqual(new Uint8Array(payload.data)); }); } }); @@ -40,7 +35,7 @@ describe("encodingStrategies / sszSnappy / decode", () => { for (const {id, type, error, chunks} of encodingStrategiesDecodingErrorCases) { it(id, async () => { const bufferedSource = new BufferedSource(arrToSource([new Uint8ArrayList(...chunks)])); - await expect(readSszSnappyPayload(bufferedSource, type)).to.be.rejectedWith(error); + await expect(readSszSnappyPayload(bufferedSource, type)).rejects.toThrow(error); }); } }); diff --git a/packages/reqresp/test/unit/encodingStrategies/sszSnappy/encode.test.ts b/packages/reqresp/test/unit/encodingStrategies/sszSnappy/encode.test.ts index 6e94596930e6..6ec27d1e6b16 100644 --- a/packages/reqresp/test/unit/encodingStrategies/sszSnappy/encode.test.ts +++ b/packages/reqresp/test/unit/encodingStrategies/sszSnappy/encode.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import all from "it-all"; import {pipe} from "it-pipe"; import {encode as varintEncode} from "uint8-varint"; @@ -7,26 +7,22 @@ import {encodingStrategiesMainnetTestCases, encodingStrategiesTestCases} from ". import {expectEqualByteChunks} from "../../../utils/index.js"; describe("encodingStrategies / sszSnappy / encode", () => { - for (const {id, binaryPayload, chunks} of encodingStrategiesTestCases) { - it(id, async () => { - const encodedChunks = await pipe(writeSszSnappyPayload(binaryPayload.data), all); - expectEqualByteChunks( - encodedChunks as Uint8Array[], - chunks.map((c) => c.subarray()) - ); - }); - } + it.each(encodingStrategiesTestCases)("$id", async ({binaryPayload, chunks}) => { + const encodedChunks = await pipe(writeSszSnappyPayload(Buffer.from(binaryPayload.data)), all); + expectEqualByteChunks( + encodedChunks as Uint8Array[], + chunks.map((c) => c.subarray()) + ); + }); describe("mainnet cases", () => { - for (const {id, payload, streamedBody} of encodingStrategiesMainnetTestCases) { - it(id, async () => { - const bodySize = payload.data.length; + it.each(encodingStrategiesMainnetTestCases)("$id", async ({payload, streamedBody}) => { + const bodySize = payload.data.length; - const encodedChunks = await pipe(writeSszSnappyPayload(payload.data), all); - const encodedStream = Buffer.concat(encodedChunks as Uint8Array[]); - const expectedStreamed = Buffer.concat([Buffer.from(varintEncode(bodySize)), streamedBody]); - expect(encodedStream).to.be.deep.equal(expectedStreamed); - }); - } + const encodedChunks = await pipe(writeSszSnappyPayload(Buffer.from(payload.data)), all); + const encodedStream = Buffer.concat(encodedChunks as Uint8Array[]); + const expectedStreamed = Buffer.concat([Buffer.from(varintEncode(bodySize)), streamedBody]); + expect(encodedStream).toEqual(expectedStreamed); + }); }); }); diff --git a/packages/reqresp/test/unit/encodingStrategies/sszSnappy/snappyFrames/uncompress.test.ts b/packages/reqresp/test/unit/encodingStrategies/sszSnappy/snappyFrames/uncompress.test.ts index 2abb99e35d54..b47621082a65 100644 --- a/packages/reqresp/test/unit/encodingStrategies/sszSnappy/snappyFrames/uncompress.test.ts +++ b/packages/reqresp/test/unit/encodingStrategies/sszSnappy/snappyFrames/uncompress.test.ts @@ -1,57 +1,59 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {Uint8ArrayList} from "uint8arraylist"; import {pipe} from "it-pipe"; import {SnappyFramesUncompress} from "../../../../../src/encodingStrategies/sszSnappy/snappyFrames/uncompress.js"; import {encodeSnappy} from "../../../../../src/encodingStrategies/sszSnappy/snappyFrames/compress.js"; describe("encodingStrategies / sszSnappy / snappy frames / uncompress", function () { - it("should work with short input", function (done) { - const testData = "Small test data"; - const compressIterable = encodeSnappy(Buffer.from(testData)); - - const decompress = new SnappyFramesUncompress(); - - void pipe(compressIterable, async function (source) { - for await (const data of source) { - const result = decompress.uncompress(new Uint8ArrayList(data)); - if (result) { - expect(result.subarray().toString()).to.be.equal(testData); - done(); + it("should work with short input", () => + new Promise((done) => { + const testData = "Small test data"; + const compressIterable = encodeSnappy(Buffer.from(testData)); + + const decompress = new SnappyFramesUncompress(); + + void pipe(compressIterable, async function (source) { + for await (const data of source) { + const result = decompress.uncompress(new Uint8ArrayList(data)); + if (result) { + expect(result.subarray().toString()).toBe(testData); + done(); + } } - } - }); - }); - - it("should work with huge input", function (done) { - const testData = Buffer.alloc(100000, 4).toString(); - const compressIterable = encodeSnappy(Buffer.from(testData)); - let result = Buffer.alloc(0); - const decompress = new SnappyFramesUncompress(); - - void pipe(compressIterable, async function (source) { - for await (const data of source) { - // testData will come compressed as two or more chunks - result = Buffer.concat([ - result, - decompress.uncompress(new Uint8ArrayList(data))?.subarray() ?? Buffer.alloc(0), - ]); - if (result.length === testData.length) { - expect(result.toString()).to.be.equal(testData); - done(); + }); + })); + + it("should work with huge input", () => + new Promise((done) => { + const testData = Buffer.alloc(100000, 4).toString(); + const compressIterable = encodeSnappy(Buffer.from(testData)); + let result = Buffer.alloc(0); + const decompress = new SnappyFramesUncompress(); + + void pipe(compressIterable, async function (source) { + for await (const data of source) { + // testData will come compressed as two or more chunks + result = Buffer.concat([ + result, + decompress.uncompress(new Uint8ArrayList(data))?.subarray() ?? Buffer.alloc(0), + ]); + if (result.length === testData.length) { + expect(result.toString()).toBe(testData); + done(); + } } - } - }); - }); + }); + })); it("should detect malformed input", function () { const decompress = new SnappyFramesUncompress(); - expect(() => decompress.uncompress(new Uint8ArrayList(Buffer.alloc(32, 5)))).to.throw(); + expect(() => decompress.uncompress(new Uint8ArrayList(Buffer.alloc(32, 5)))).toThrow(); }); it("should return null if not enough data", function () { const decompress = new SnappyFramesUncompress(); - expect(decompress.uncompress(new Uint8ArrayList(Buffer.alloc(3, 1)))).to.equal(null); + expect(decompress.uncompress(new Uint8ArrayList(Buffer.alloc(3, 1)))).toBe(null); }); }); diff --git a/packages/reqresp/test/unit/encodingStrategies/sszSnappy/utils.test.ts b/packages/reqresp/test/unit/encodingStrategies/sszSnappy/utils.test.ts index eae2da2e57f8..a494b4acab9a 100644 --- a/packages/reqresp/test/unit/encodingStrategies/sszSnappy/utils.test.ts +++ b/packages/reqresp/test/unit/encodingStrategies/sszSnappy/utils.test.ts @@ -1,10 +1,10 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {maxEncodedLen} from "../../../../src/encodingStrategies/sszSnappy/utils.js"; describe("encodingStrategies / sszSnappy / utils", () => { describe("maxEncodedLen", () => { it("should calculate correct maxEncodedLen", () => { - expect(maxEncodedLen(6)).to.be.equal(39); + expect(maxEncodedLen(6)).toBe(39); }); }); }); diff --git a/packages/reqresp/test/unit/rate_limiter/rateLimiterGRCA.test.ts b/packages/reqresp/test/unit/rate_limiter/rateLimiterGRCA.test.ts index d8ea38b539dd..b3393c609a76 100644 --- a/packages/reqresp/test/unit/rate_limiter/rateLimiterGRCA.test.ts +++ b/packages/reqresp/test/unit/rate_limiter/rateLimiterGRCA.test.ts @@ -1,59 +1,58 @@ -import {expect} from "chai"; -import sinon from "sinon"; +import {describe, it, expect, vi, beforeEach, afterEach} from "vitest"; import {RateLimiterGRCA} from "../../../src/rate_limiter/rateLimiterGRCA.js"; describe("rateLimiterGRCA", () => { let rateLimiter: RateLimiterGRCA; const limit = 500; const limitTimeMs = 60 * 1000; // 1 min - const sandbox = sinon.createSandbox(); beforeEach(() => { - sandbox.useFakeTimers(); + vi.useFakeTimers(); rateLimiter = RateLimiterGRCA.fromQuota({quotaTimeMs: limitTimeMs, quota: limit}); }); afterEach(() => { - sandbox.restore(); + vi.restoreAllMocks(); + vi.useRealTimers(); }); describe("allows()", () => { it("should throw error if requested for a zero value", () => { - expect(() => rateLimiter.allows(null, 0)).to.throw("Token value should always be positive. Given: 0"); + expect(() => rateLimiter.allows(null, 0)).toThrow("Token value should always be positive. Given: 0"); }); it("should throw error if requested for a negative value", () => { - expect(() => rateLimiter.allows(null, -1)).to.throw("Token value should always be positive. Given: -1"); + expect(() => rateLimiter.allows(null, -1)).toThrow("Token value should always be positive. Given: -1"); }); it("should return valid number of requests within request window", () => { - expect(rateLimiter.allows(null, 10)).to.be.true; - expect(rateLimiter.allows(null, 50)).to.be.true; + expect(rateLimiter.allows(null, 10)).toBe(true); + expect(rateLimiter.allows(null, 50)).toBe(true); }); it("should return valid number of requests within request window for maximum requests", () => { - expect(rateLimiter.allows(null, limit)).to.be.true; + expect(rateLimiter.allows(null, limit)).toBe(true); }); it("should return zero within request window for higher number of requests", () => { - expect(rateLimiter.allows(null, limit + 1)).to.be.false; + expect(rateLimiter.allows(null, limit + 1)).toBe(false); }); it("should return zero once the tracker limit reached", () => { rateLimiter.allows(null, limit); - expect(rateLimiter.allows(null, 10)).to.be.false; + expect(rateLimiter.allows(null, 10)).toBe(false); }); it("should return over limit values before limit reached", () => { rateLimiter.allows(null, limit - 10); - expect(rateLimiter.allows(null, 15)).to.be.false; + expect(rateLimiter.allows(null, 15)).toBe(false); }); it("should reset the rate after the time limit", () => { rateLimiter.allows(null, limit); - expect(rateLimiter.allows(null, 10)).to.be.false; - sandbox.clock.tick(limitTimeMs); - expect(rateLimiter.allows(null, 10)).to.be.true; + expect(rateLimiter.allows(null, 10)).toBe(false); + vi.advanceTimersByTime(limitTimeMs); + expect(rateLimiter.allows(null, 10)).toBe(true); }); }); diff --git a/packages/reqresp/test/unit/request/index.test.ts b/packages/reqresp/test/unit/request/index.test.ts index a056d8055668..b3241a8cc44a 100644 --- a/packages/reqresp/test/unit/request/index.test.ts +++ b/packages/reqresp/test/unit/request/index.test.ts @@ -1,9 +1,8 @@ +import {describe, it, expect, vi, beforeEach, afterEach} from "vitest"; import {PeerId} from "@libp2p/interface/peer-id"; import all from "it-all"; import {pipe} from "it-pipe"; -import {expect} from "chai"; import {Libp2p} from "libp2p"; -import sinon from "sinon"; import {getEmptyLogger} from "@lodestar/logger/empty"; import {LodestarError, sleep} from "@lodestar/utils"; import {RequestError, RequestErrorCode, sendRequest, SendRequestOpts} from "../../../src/request/index.js"; @@ -21,7 +20,6 @@ describe("request / sendRequest", () => { let controller: AbortController; let peerId: PeerId; let libp2p: Libp2p; - const sandbox = sinon.createSandbox(); const emptyProtocol = pingProtocol(getEmptyHandler()); const EMPTY_REQUEST = new Uint8Array(); @@ -36,9 +34,9 @@ describe("request / sendRequest", () => { id: "Return first chunk only for a single-chunk method", protocols: [emptyProtocol], requestBody: sszSnappyPing.binaryPayload, - expectedReturn: [sszSnappyPing.binaryPayload], + expectedReturn: [{...sszSnappyPing.binaryPayload, data: Buffer.from(sszSnappyPing.binaryPayload.data)}], }, - // limit to max responses is no longer the responsability of this package + // limit to max responses is no longer the responsibility of this package // { // id: "Return up to maxResponses for a multi-chunk method", // protocols: [customProtocol({})], @@ -53,16 +51,16 @@ describe("request / sendRequest", () => { }); afterEach(() => { - sandbox.restore(); + vi.restoreAllMocks(); controller.abort(); }); for (const {id, protocols, expectedReturn, requestBody} of testCases) { it(id, async () => { libp2p = { - dialProtocol: sinon - .stub() - .resolves( + dialProtocol: vi + .fn() + .mockResolvedValue( new MockLibP2pStream( responseEncode([{status: RespStatus.SUCCESS, payload: requestBody}], protocols[0] as Protocol), protocols[0].method @@ -81,7 +79,7 @@ describe("request / sendRequest", () => { ), all ); - expect(responses).to.deep.equal(expectedReturn); + expect(responses).toEqual(expectedReturn); }); } @@ -138,7 +136,7 @@ describe("request / sendRequest", () => { for (const {id, source, opts, error} of timeoutTestCases) { it(id, async () => { libp2p = { - dialProtocol: sinon.stub().resolves(new MockLibP2pStream(source(), testMethod)), + dialProtocol: vi.fn().mockResolvedValue(new MockLibP2pStream(source(), testMethod)), } as unknown as Libp2p; await expectRejectedWithLodestarError( diff --git a/packages/reqresp/test/unit/response/index.test.ts b/packages/reqresp/test/unit/response/index.test.ts index 7298a21dccbd..5ab299b586ab 100644 --- a/packages/reqresp/test/unit/response/index.test.ts +++ b/packages/reqresp/test/unit/response/index.test.ts @@ -1,5 +1,5 @@ +import {describe, it, expect, beforeEach, afterEach} from "vitest"; import {PeerId} from "@libp2p/interface/peer-id"; -import {expect} from "chai"; import {LodestarError, fromHex} from "@lodestar/utils"; import {getEmptyLogger} from "@lodestar/logger/empty"; import {Protocol, RespStatus} from "../../../src/index.js"; @@ -54,30 +54,28 @@ describe("response / handleRequest", () => { afterEach(() => controller.abort()); - for (const {id, requestChunks, protocol, expectedResponseChunks, expectedError} of testCases) { - it(id, async () => { - const stream = new MockLibP2pStream(requestChunks as any); - const rateLimiter = new ReqRespRateLimiter({rateLimitMultiplier: 0}); + it.each(testCases)("$id", async ({requestChunks, protocol, expectedResponseChunks, expectedError}) => { + const stream = new MockLibP2pStream(requestChunks as any); + const rateLimiter = new ReqRespRateLimiter({rateLimitMultiplier: 0}); - const resultPromise = handleRequest({ - logger, - metrics: null, - protocol, - protocolID: protocol.method, - stream, - peerId, - signal: controller.signal, - rateLimiter, - }); + const resultPromise = handleRequest({ + logger, + metrics: null, + protocol, + protocolID: protocol.method, + stream, + peerId, + signal: controller.signal, + rateLimiter, + }); - // Make sure the test error-ed with expected error, otherwise it's hard to debug with responseChunks - if (expectedError) { - await expectRejectedWithLodestarError(resultPromise, expectedError); - } else { - await expect(resultPromise).to.not.rejectedWith(); - } + // Make sure the test error-ed with expected error, otherwise it's hard to debug with responseChunks + if (expectedError) { + await expectRejectedWithLodestarError(resultPromise, expectedError); + } else { + await expect(resultPromise).resolves.toBeUndefined(); + } - expectEqualByteChunks(stream.resultChunks, expectedResponseChunks, "Wrong response chunks"); - }); - } + expectEqualByteChunks(stream.resultChunks, expectedResponseChunks, "Wrong response chunks"); + }); }); diff --git a/packages/reqresp/test/unit/utils/protocolId.test.ts b/packages/reqresp/test/unit/utils/protocolId.test.ts index 7d16669c1421..04cd93222045 100644 --- a/packages/reqresp/test/unit/utils/protocolId.test.ts +++ b/packages/reqresp/test/unit/utils/protocolId.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {Encoding, ProtocolAttributes} from "../../../src/index.js"; import {formatProtocolID, parseProtocolID as reqrespParseProtocolID} from "../../../src/utils/index.js"; @@ -36,11 +36,11 @@ describe("ReqResp protocolID parse / render", () => { for (const {method, encoding, version, protocolId} of testCases) { it(`Should render ${protocolId}`, () => { - expect(formatProtocolID(protocolPrefix, method, version, encoding)).to.equal(protocolId); + expect(formatProtocolID(protocolPrefix, method, version, encoding)).toBe(protocolId); }); it(`Should parse ${protocolId}`, () => { - expect(parseProtocolId(protocolId)).to.deep.equal({protocolPrefix, method, version, encoding}); + expect(parseProtocolId(protocolId)).toEqual({protocolPrefix, method, version, encoding}); }); } }); diff --git a/packages/reqresp/test/utils/index.ts b/packages/reqresp/test/utils/index.ts index 8ad11bbd30b2..924218c73cd4 100644 --- a/packages/reqresp/test/utils/index.ts +++ b/packages/reqresp/test/utils/index.ts @@ -74,7 +74,7 @@ export function fromHexBuf(hex: string): Buffer { return Buffer.from(fromHex(hex)); } -export const ZERO_HASH = Buffer.alloc(32, 0); +export const ZERO_HASH = new Uint8Array(32); export const onlySuccessResp = (resp: ResponseChunk): resp is {status: RespStatus.SUCCESS; payload: ResponseIncoming} => resp.status === RespStatus.SUCCESS; diff --git a/packages/reqresp/vitest.config.ts b/packages/reqresp/vitest.config.ts new file mode 100644 index 000000000000..1df0de848936 --- /dev/null +++ b/packages/reqresp/vitest.config.ts @@ -0,0 +1,11 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + }) +); diff --git a/packages/spec-test-util/package.json b/packages/spec-test-util/package.json index 79324d813caa..5fc59cd76e12 100644 --- a/packages/spec-test-util/package.json +++ b/packages/spec-test-util/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/spec-test-util", - "version": "1.12.0", + "version": "1.13.0", "description": "Spec test suite generator from yaml test files", "author": "ChainSafe Systems", "license": "Apache-2.0", @@ -45,7 +45,7 @@ "blockchain" ], "dependencies": { - "@lodestar/utils": "^1.12.0", + "@lodestar/utils": "^1.13.0", "async-retry": "^1.3.3", "axios": "^1.3.4", "chai": "^4.3.7", diff --git a/packages/state-transition/.mocharc.yaml b/packages/state-transition/.mocharc.yaml deleted file mode 100644 index f28ebdf663a0..000000000000 --- a/packages/state-transition/.mocharc.yaml +++ /dev/null @@ -1,6 +0,0 @@ -colors: true -timeout: 5000 -exit: true -extension: ["ts"] -node-option: - - "loader=ts-node/esm" diff --git a/packages/state-transition/package.json b/packages/state-transition/package.json index f743861f54ec..f0f2f150f673 100644 --- a/packages/state-transition/package.json +++ b/packages/state-transition/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.12.0", + "version": "1.13.0", "type": "module", "exports": { ".": { @@ -52,7 +52,7 @@ "check-types": "tsc", "lint": "eslint --color --ext .ts src/ test/", "lint:fix": "yarn run lint --fix", - "test:unit": "mocha 'test/unit/**/*.test.ts'", + "test:unit": "vitest --run --dir test/unit/ --coverage", "check-readme": "typescript-docs-verifier" }, "types": "lib/index.d.ts", @@ -63,10 +63,10 @@ "@chainsafe/persistent-merkle-tree": "^0.6.1", "@chainsafe/persistent-ts": "^0.19.1", "@chainsafe/ssz": "^0.14.0", - "@lodestar/config": "^1.12.0", - "@lodestar/params": "^1.12.0", - "@lodestar/types": "^1.12.0", - "@lodestar/utils": "^1.12.0", + "@lodestar/config": "^1.13.0", + "@lodestar/params": "^1.13.0", + "@lodestar/types": "^1.13.0", + "@lodestar/utils": "^1.13.0", "bigint-buffer": "^1.1.5", "buffer-xor": "^2.0.2" }, diff --git a/packages/state-transition/src/block/processAttestationsAltair.ts b/packages/state-transition/src/block/processAttestationsAltair.ts index cabe28b88b27..e37629712194 100644 --- a/packages/state-transition/src/block/processAttestationsAltair.ts +++ b/packages/state-transition/src/block/processAttestationsAltair.ts @@ -123,6 +123,7 @@ export function processAttestationsAltair( } increaseBalance(state, epochCtx.getBeaconProposer(state.slot), proposerReward); + state.proposerRewards.attestations = proposerReward; } /** diff --git a/packages/state-transition/src/block/processSyncCommittee.ts b/packages/state-transition/src/block/processSyncCommittee.ts index e0bfc318e052..70c918ed60bf 100644 --- a/packages/state-transition/src/block/processSyncCommittee.ts +++ b/packages/state-transition/src/block/processSyncCommittee.ts @@ -41,6 +41,7 @@ export function processSyncAggregate( } // Proposer reward proposerBalance += syncProposerReward; + state.proposerRewards.syncAggregate += syncProposerReward; } else { // Negative rewards for non participants if (index === proposerIndex) { diff --git a/packages/state-transition/src/block/slashValidator.ts b/packages/state-transition/src/block/slashValidator.ts index ea2ca91e81ee..133041d36869 100644 --- a/packages/state-transition/src/block/slashValidator.ts +++ b/packages/state-transition/src/block/slashValidator.ts @@ -66,9 +66,11 @@ export function slashValidator( if (whistleblowerIndex === undefined || !Number.isSafeInteger(whistleblowerIndex)) { // Call increaseBalance() once with `(whistleblowerReward - proposerReward) + proposerReward` increaseBalance(state, proposerIndex, whistleblowerReward); + state.proposerRewards.slashing += whistleblowerReward; } else { increaseBalance(state, proposerIndex, proposerReward); increaseBalance(state, whistleblowerIndex, whistleblowerReward - proposerReward); + state.proposerRewards.slashing += proposerReward; } // TODO: describe issue. Compute progressive target balances diff --git a/packages/state-transition/src/cache/epochCache.ts b/packages/state-transition/src/cache/epochCache.ts index 8b63b0285098..78cccacf1d00 100644 --- a/packages/state-transition/src/cache/epochCache.ts +++ b/packages/state-transition/src/cache/epochCache.ts @@ -309,8 +309,10 @@ export class EpochCache { if (cachedPreviousShuffling == null && isActiveValidator(validator, previousEpoch)) { previousActiveIndices.push(i); } - if (cachedCurrentShuffling == null && isActiveValidator(validator, currentEpoch)) { - currentActiveIndices.push(i); + if (isActiveValidator(validator, currentEpoch)) { + if (cachedCurrentShuffling == null) { + currentActiveIndices.push(i); + } // We track totalActiveBalanceIncrements as ETH to fit total network balance in a JS number (53 bits) totalActiveBalanceIncrements += effectiveBalanceIncrements[i]; } diff --git a/packages/state-transition/src/cache/rewardCache.ts b/packages/state-transition/src/cache/rewardCache.ts new file mode 100644 index 000000000000..669060d143cd --- /dev/null +++ b/packages/state-transition/src/cache/rewardCache.ts @@ -0,0 +1,18 @@ +/** + * A simple data structure to store rewards payable to block proposer in the memory. + * Rewards are updated throughout the state transition + * Should only hold info for one state transition + */ +export type RewardCache = { + attestations: number; + syncAggregate: number; + slashing: number; // Sum of attester and proposer slashing reward +}; + +export function createEmptyRewardCache(): RewardCache { + return { + attestations: 0, + syncAggregate: 0, + slashing: 0, + }; +} diff --git a/packages/state-transition/src/cache/stateCache.ts b/packages/state-transition/src/cache/stateCache.ts index 14a29b5f09c0..b01ca0c409b2 100644 --- a/packages/state-transition/src/cache/stateCache.ts +++ b/packages/state-transition/src/cache/stateCache.ts @@ -1,5 +1,5 @@ import bls from "@chainsafe/bls"; -import {CoordType} from "@chainsafe/blst"; +import {CoordType} from "@chainsafe/bls/types"; import {BeaconConfig} from "@lodestar/config"; import {loadState} from "../util/loadState/loadState.js"; import {EpochCache, EpochCacheImmutableData, EpochCacheOpts} from "./epochCache.js"; @@ -12,6 +12,7 @@ import { BeaconStateCapella, BeaconStateDeneb, } from "./types.js"; +import {RewardCache, createEmptyRewardCache} from "./rewardCache.js"; export type BeaconStateCache = { config: BeaconConfig; @@ -20,6 +21,7 @@ export type BeaconStateCache = { readonly clonedCount: number; readonly clonedCountWithTransferCache: number; readonly createdWithTransferCache: boolean; + proposerRewards: RewardCache; }; type Mutable = { @@ -147,6 +149,7 @@ export function createCachedBeaconState( clonedCount: 0, clonedCountWithTransferCache: 0, createdWithTransferCache: false, + proposerRewards: createEmptyRewardCache(), }); return cachedState; @@ -156,9 +159,9 @@ export function createCachedBeaconState( * Create a CachedBeaconState given a cached seed state and state bytes * This guarantees that the returned state shares the same tree with the seed state * Check loadState() api for more details - * TODO: after EIP-6110 need to provide a pivotValidatorIndex to decide which comes to finalized validators cache, which comes to unfinalized cache + * // TODO: rename to loadUnfinalizedCachedBeaconState() due to EIP-6110 */ -export function loadUnfinalizedCachedBeaconState( +export function loadCachedBeaconState( cachedSeedState: T, stateBytes: Uint8Array, opts?: EpochCacheOpts @@ -198,6 +201,7 @@ export function getCachedBeaconState( (cachedState as BeaconStateCacheMutable).clonedCount = cache.clonedCount; (cachedState as BeaconStateCacheMutable).clonedCountWithTransferCache = cache.clonedCountWithTransferCache; (cachedState as BeaconStateCacheMutable).createdWithTransferCache = cache.createdWithTransferCache; + cachedState.proposerRewards = cache.proposerRewards; // Overwrite .clone function to preserve cache // TreeViewDU.clone() creates a new object that does not have the attached cache @@ -219,6 +223,7 @@ export function getCachedBeaconState( clonedCount: 0, clonedCountWithTransferCache: 0, createdWithTransferCache: !dontTransferCache, + proposerRewards: createEmptyRewardCache(), // this sets the rewards to 0 while cloning new state }) as T & BeaconStateCache; } diff --git a/packages/state-transition/src/epoch/index.ts b/packages/state-transition/src/epoch/index.ts index bb37ed17f4e1..b55ebe291fb9 100644 --- a/packages/state-transition/src/epoch/index.ts +++ b/packages/state-transition/src/epoch/index.ts @@ -12,6 +12,7 @@ import { CachedBeaconStatePhase0, EpochTransitionCache, } from "../types.js"; +import {BeaconStateTransitionMetrics} from "../metrics.js"; import {processEffectiveBalanceUpdates} from "./processEffectiveBalanceUpdates.js"; import {processEth1DataReset} from "./processEth1DataReset.js"; import {processHistoricalRootsUpdate} from "./processHistoricalRootsUpdate.js"; @@ -50,7 +51,28 @@ export {computeUnrealizedCheckpoints} from "./computeUnrealizedCheckpoints.js"; const maxValidatorsPerStateSlashing = SLOTS_PER_EPOCH * MAX_ATTESTER_SLASHINGS * MAX_VALIDATORS_PER_COMMITTEE; const maxSafeValidators = Math.floor(Number.MAX_SAFE_INTEGER / MAX_EFFECTIVE_BALANCE); -export function processEpoch(fork: ForkSeq, state: CachedBeaconStateAllForks, cache: EpochTransitionCache): void { +/** + * Epoch transition steps tracked in metrics + */ +export enum EpochTransitionStep { + beforeProcessEpoch = "beforeProcessEpoch", + afterProcessEpoch = "afterProcessEpoch", + processJustificationAndFinalization = "processJustificationAndFinalization", + processInactivityUpdates = "processInactivityUpdates", + processRegistryUpdates = "processRegistryUpdates", + processSlashings = "processSlashings", + processRewardsAndPenalties = "processRewardsAndPenalties", + processEffectiveBalanceUpdates = "processEffectiveBalanceUpdates", + processParticipationFlagUpdates = "processParticipationFlagUpdates", + processSyncCommitteeUpdates = "processSyncCommitteeUpdates", +} + +export function processEpoch( + fork: ForkSeq, + state: CachedBeaconStateAllForks, + cache: EpochTransitionCache, + metrics?: BeaconStateTransitionMetrics | null +): void { // state.slashings is initially a Gwei (BigInt) vector, however since Nov 2023 it's converted to UintNum64 (number) vector in the state transition because: // - state.slashings[nextEpoch % EPOCHS_PER_SLASHINGS_VECTOR] is reset per epoch in processSlashingsReset() // - max slashed validators per epoch is SLOTS_PER_EPOCH * MAX_ATTESTER_SLASHINGS * MAX_VALIDATORS_PER_COMMITTEE which is 32 * 2 * 2048 = 131072 on mainnet @@ -59,19 +81,53 @@ export function processEpoch(fork: ForkSeq, state: CachedBeaconStateAllForks, ca throw new Error("Lodestar does not support this network, parameters don't fit number value inside state.slashings"); } - processJustificationAndFinalization(state, cache); + { + const timer = metrics?.epochTransitionStepTime.startTimer({ + step: EpochTransitionStep.processJustificationAndFinalization, + }); + processJustificationAndFinalization(state, cache); + timer?.(); + } + if (fork >= ForkSeq.altair) { + const timer = metrics?.epochTransitionStepTime.startTimer({step: EpochTransitionStep.processInactivityUpdates}); processInactivityUpdates(state as CachedBeaconStateAltair, cache); + timer?.(); } + // processRewardsAndPenalties() is 2nd step in the specs, we optimize to do it // after processSlashings() to update balances only once // processRewardsAndPenalties(state, cache); - processRegistryUpdates(state, cache); + { + const timer = metrics?.epochTransitionStepTime.startTimer({step: EpochTransitionStep.processRegistryUpdates}); + processRegistryUpdates(state, cache); + timer?.(); + } + // accumulate slashing penalties and only update balances once in processRewardsAndPenalties() - const slashingPenalties = processSlashings(state, cache, false); - processRewardsAndPenalties(state, cache, slashingPenalties); + let slashingPenalties: number[]; + { + const timer = metrics?.epochTransitionStepTime.startTimer({step: EpochTransitionStep.processSlashings}); + slashingPenalties = processSlashings(state, cache, false); + timer?.(); + } + + { + const timer = metrics?.epochTransitionStepTime.startTimer({step: EpochTransitionStep.processRewardsAndPenalties}); + processRewardsAndPenalties(state, cache, slashingPenalties); + timer?.(); + } + processEth1DataReset(state, cache); - processEffectiveBalanceUpdates(state, cache); + + { + const timer = metrics?.epochTransitionStepTime.startTimer({ + step: EpochTransitionStep.processEffectiveBalanceUpdates, + }); + processEffectiveBalanceUpdates(state, cache); + timer?.(); + } + processSlashingsReset(state, cache); processRandaoMixesReset(state, cache); @@ -84,7 +140,20 @@ export function processEpoch(fork: ForkSeq, state: CachedBeaconStateAllForks, ca if (fork === ForkSeq.phase0) { processParticipationRecordUpdates(state as CachedBeaconStatePhase0); } else { - processParticipationFlagUpdates(state as CachedBeaconStateAltair); - processSyncCommitteeUpdates(state as CachedBeaconStateAltair); + { + const timer = metrics?.epochTransitionStepTime.startTimer({ + step: EpochTransitionStep.processParticipationFlagUpdates, + }); + processParticipationFlagUpdates(state as CachedBeaconStateAltair); + timer?.(); + } + + { + const timer = metrics?.epochTransitionStepTime.startTimer({ + step: EpochTransitionStep.processSyncCommitteeUpdates, + }); + processSyncCommitteeUpdates(state as CachedBeaconStateAltair); + timer?.(); + } } } diff --git a/packages/state-transition/src/index.ts b/packages/state-transition/src/index.ts index e72b6fa0581c..8786c0f6e358 100644 --- a/packages/state-transition/src/index.ts +++ b/packages/state-transition/src/index.ts @@ -2,6 +2,7 @@ export * from "./stateTransition.js"; export * from "./constants/index.js"; export * from "./util/index.js"; export * from "./signatureSets/index.js"; +export type {EpochTransitionStep} from "./epoch/index.js"; export type {BeaconStateTransitionMetrics} from "./metrics.js"; export type { @@ -25,7 +26,7 @@ export type { // Main state caches export { createCachedBeaconState, - loadUnfinalizedCachedBeaconState, + loadCachedBeaconState, type BeaconStateCache, isCachedBeaconState, isStateBalancesNodesPopulated, diff --git a/packages/state-transition/src/metrics.ts b/packages/state-transition/src/metrics.ts index c5179a1df5b3..62062bbfc539 100644 --- a/packages/state-transition/src/metrics.ts +++ b/packages/state-transition/src/metrics.ts @@ -1,17 +1,21 @@ import {Epoch} from "@lodestar/types"; +import {Gauge, Histogram} from "@lodestar/utils"; import {CachedBeaconStateAllForks} from "./types.js"; +import {StateCloneSource, StateHashTreeRootSource} from "./stateTransition.js"; import {AttesterStatus} from "./util/attesterStatus.js"; +import {EpochTransitionStep} from "./epoch/index.js"; export type BeaconStateTransitionMetrics = { epochTransitionTime: Histogram; epochTransitionCommitTime: Histogram; + epochTransitionStepTime: Histogram<{step: EpochTransitionStep}>; processBlockTime: Histogram; processBlockCommitTime: Histogram; - stateHashTreeRootTime: Histogram; - preStateBalancesNodesPopulatedMiss: Gauge<"source">; - preStateBalancesNodesPopulatedHit: Gauge<"source">; - preStateValidatorsNodesPopulatedMiss: Gauge<"source">; - preStateValidatorsNodesPopulatedHit: Gauge<"source">; + stateHashTreeRootTime: Histogram<{source: StateHashTreeRootSource}>; + preStateBalancesNodesPopulatedMiss: Gauge<{source: StateCloneSource}>; + preStateBalancesNodesPopulatedHit: Gauge<{source: StateCloneSource}>; + preStateValidatorsNodesPopulatedMiss: Gauge<{source: StateCloneSource}>; + preStateValidatorsNodesPopulatedHit: Gauge<{source: StateCloneSource}>; preStateClonedCount: Histogram; postStateBalancesNodesPopulatedMiss: Gauge; postStateBalancesNodesPopulatedHit: Gauge; @@ -20,26 +24,10 @@ export type BeaconStateTransitionMetrics = { registerValidatorStatuses: (currentEpoch: Epoch, statuses: AttesterStatus[], balances?: number[]) => void; }; -type LabelValues = Partial>; - -interface Histogram { - startTimer(): () => void; - - observe(value: number): void; - observe(labels: LabelValues, values: number): void; - observe(arg1: LabelValues | number, arg2?: number): void; -} - -interface Gauge { - inc(value?: number): void; - inc(labels: LabelValues, value?: number): void; - inc(arg1?: LabelValues | number, arg2?: number): void; -} - export function onStateCloneMetrics( state: CachedBeaconStateAllForks, metrics: BeaconStateTransitionMetrics, - source: "stateTransition" | "processSlots" + source: StateCloneSource ): void { metrics.preStateClonedCount.observe(state.clonedCount); diff --git a/packages/state-transition/src/signatureSets/proposer.ts b/packages/state-transition/src/signatureSets/proposer.ts index a00bcacc7c99..135ac7ed5c7a 100644 --- a/packages/state-transition/src/signatureSets/proposer.ts +++ b/packages/state-transition/src/signatureSets/proposer.ts @@ -1,5 +1,5 @@ -import {DOMAIN_BEACON_PROPOSER, DOMAIN_BLOB_SIDECAR} from "@lodestar/params"; -import {allForks, isBlindedBeaconBlock, isBlindedBlobSidecar, ssz} from "@lodestar/types"; +import {DOMAIN_BEACON_PROPOSER} from "@lodestar/params"; +import {allForks, isBlindedBeaconBlock, phase0, ssz} from "@lodestar/types"; import {computeSigningRoot} from "../util/index.js"; import {ISignatureSet, SignatureSetType, verifySignatureSet} from "../util/signatureSets.js"; import {CachedBeaconStateAllForks} from "../types.js"; @@ -17,7 +17,7 @@ export function getBlockProposerSignatureSet( signedBlock: allForks.FullOrBlindedSignedBeaconBlock ): ISignatureSet { const {config, epochCtx} = state; - const domain = state.config.getDomain(state.slot, DOMAIN_BEACON_PROPOSER, signedBlock.message.slot); + const domain = config.getDomain(state.slot, DOMAIN_BEACON_PROPOSER, signedBlock.message.slot); const blockType = isBlindedBeaconBlock(signedBlock.message) ? config.getBlindedForkTypes(signedBlock.message.slot).BeaconBlock @@ -31,19 +31,17 @@ export function getBlockProposerSignatureSet( }; } -export function getBlobProposerSignatureSet( +export function getBlockHeaderProposerSignatureSet( state: CachedBeaconStateAllForks, - signedBlob: allForks.FullOrBlindedSignedBlobSidecar + signedBlockHeader: phase0.SignedBeaconBlockHeader ): ISignatureSet { const {config, epochCtx} = state; - const domain = config.getDomain(state.slot, DOMAIN_BLOB_SIDECAR, signedBlob.message.slot); - - const blockType = isBlindedBlobSidecar(signedBlob.message) ? ssz.deneb.BlindedBlobSidecar : ssz.deneb.BlobSidecar; + const domain = config.getDomain(state.slot, DOMAIN_BEACON_PROPOSER, signedBlockHeader.message.slot); return { type: SignatureSetType.single, - pubkey: epochCtx.index2pubkey[signedBlob.message.proposerIndex], - signingRoot: computeSigningRoot(blockType, signedBlob.message, domain), - signature: signedBlob.signature, + pubkey: epochCtx.index2pubkey[signedBlockHeader.message.proposerIndex], + signingRoot: computeSigningRoot(ssz.phase0.BeaconBlockHeader, signedBlockHeader.message, domain), + signature: signedBlockHeader.signature, }; } diff --git a/packages/state-transition/src/stateTransition.ts b/packages/state-transition/src/stateTransition.ts index 8fd98f4df03e..b3f3b41eb865 100644 --- a/packages/state-transition/src/stateTransition.ts +++ b/packages/state-transition/src/stateTransition.ts @@ -2,7 +2,7 @@ import {toHexString} from "@chainsafe/ssz"; import {allForks, Slot, ssz} from "@lodestar/types"; import {SLOTS_PER_EPOCH} from "@lodestar/params"; import {BeaconStateTransitionMetrics, onPostStateMetrics, onStateCloneMetrics} from "./metrics.js"; -import {beforeProcessEpoch, EpochTransitionCacheOpts} from "./cache/epochTransitionCache.js"; +import {beforeProcessEpoch, EpochTransitionCache, EpochTransitionCacheOpts} from "./cache/epochTransitionCache.js"; import { CachedBeaconStateAllForks, CachedBeaconStatePhase0, @@ -20,7 +20,7 @@ import { upgradeStateToDeneb, } from "./slot/index.js"; import {processBlock} from "./block/index.js"; -import {processEpoch} from "./epoch/index.js"; +import {EpochTransitionStep, processEpoch} from "./epoch/index.js"; import {BlockExternalData, DataAvailableStatus, ExecutionPayloadStatus} from "./block/externalData.js"; import {ProcessBlockOpts} from "./block/types.js"; @@ -36,6 +36,24 @@ export type StateTransitionOpts = BlockExternalData & dontTransferCache?: boolean; }; +/** + * `state.clone()` invocation source tracked in metrics + */ +export enum StateCloneSource { + stateTransition = "stateTransition", + processSlots = "processSlots", +} + +/** + * `state.hashTreeRoot()` invocation source tracked in metrics + */ +export enum StateHashTreeRootSource { + stateTransition = "state_transition", + blockTransition = "block_transition", + prepareNextSlot = "prepare_next_slot", + computeNewStateRoot = "compute_new_state_root", +} + /** * Implementation Note: follows the optimizations in protolambda's eth2fastspec (https://github.com/protolambda/eth2fastspec) */ @@ -58,7 +76,7 @@ export function stateTransition( let postState = state.clone(options.dontTransferCache); if (metrics) { - onStateCloneMetrics(postState, metrics, "stateTransition"); + onStateCloneMetrics(postState, metrics, StateCloneSource.stateTransition); } // State is already a ViewDU, which won't commit changes. Equivalent to .setStateCachesAsTransient() @@ -96,7 +114,9 @@ export function stateTransition( // Verify state root if (verifyStateRoot) { - const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer(); + const hashTreeRootTimer = metrics?.stateHashTreeRootTime.startTimer({ + source: StateHashTreeRootSource.stateTransition, + }); const stateRoot = postState.hashTreeRoot(); hashTreeRootTimer?.(); @@ -127,7 +147,7 @@ export function processSlots( let postState = state.clone(epochTransitionCacheOpts?.dontTransferCache); if (metrics) { - onStateCloneMetrics(postState, metrics, "processSlots"); + onStateCloneMetrics(postState, metrics, StateCloneSource.processSlots); } // State is already a ViewDU, which won't commit changes. Equivalent to .setStateCachesAsTransient() @@ -165,19 +185,33 @@ function processSlotsWithTransientCache( const epochTransitionTimer = metrics?.epochTransitionTime.startTimer(); - const epochTransitionCache = beforeProcessEpoch(postState, epochTransitionCacheOpts); - processEpoch(fork, postState, epochTransitionCache); + let epochTransitionCache: EpochTransitionCache; + { + const timer = metrics?.epochTransitionStepTime.startTimer({step: EpochTransitionStep.beforeProcessEpoch}); + epochTransitionCache = beforeProcessEpoch(postState, epochTransitionCacheOpts); + timer?.(); + } + + processEpoch(fork, postState, epochTransitionCache, metrics); + const {currentEpoch, statuses, balances} = epochTransitionCache; metrics?.registerValidatorStatuses(currentEpoch, statuses, balances); postState.slot++; - postState.epochCtx.afterProcessEpoch(postState, epochTransitionCache); + + { + const timer = metrics?.epochTransitionStepTime.startTimer({step: EpochTransitionStep.afterProcessEpoch}); + postState.epochCtx.afterProcessEpoch(postState, epochTransitionCache); + timer?.(); + } // Running commit here is not strictly necessary. The cost of running commit twice (here + after process block) // Should be negligible but gives better metrics to differentiate the cost of it for block and epoch proc. - const epochTransitionCommitTimer = metrics?.epochTransitionCommitTime.startTimer(); - postState.commit(); - epochTransitionCommitTimer?.(); + { + const timer = metrics?.epochTransitionCommitTime.startTimer(); + postState.commit(); + timer?.(); + } // Note: time only on success. Include beforeProcessEpoch, processEpoch, afterProcessEpoch, commit epochTransitionTimer?.(); diff --git a/packages/state-transition/src/util/blindedBlock.ts b/packages/state-transition/src/util/blindedBlock.ts index 8c271e7fec81..5b6cf42d3cef 100644 --- a/packages/state-transition/src/util/blindedBlock.ts +++ b/packages/state-transition/src/util/blindedBlock.ts @@ -1,24 +1,9 @@ import {ChainForkConfig} from "@lodestar/config"; import {ForkSeq} from "@lodestar/params"; -import { - allForks, - phase0, - Root, - deneb, - ssz, - isBlindedBeaconBlock, - isBlindedBlobSidecar, - isSignedBlindedBlockContents, - isExecutionPayloadAndBlobsBundle, -} from "@lodestar/types"; +import {allForks, phase0, Root, deneb, isBlindedBeaconBlock, isExecutionPayloadAndBlobsBundle} from "@lodestar/types"; import {executionPayloadToPayloadHeader} from "./execution.js"; -type ParsedSignedBlindedBlockOrContents = { - signedBlindedBlock: allForks.SignedBlindedBeaconBlock; - signedBlindedBlobSidecars: deneb.SignedBlindedBlobSidecars | null; -}; - export function blindedOrFullBlockHashTreeRoot( config: ChainForkConfig, blindedOrFull: allForks.FullOrBlindedBeaconBlock @@ -30,17 +15,6 @@ export function blindedOrFullBlockHashTreeRoot( config.getForkTypes(blindedOrFull.slot).BeaconBlock.hashTreeRoot(blindedOrFull); } -export function blindedOrFullBlobSidecarHashTreeRoot( - config: ChainForkConfig, - blindedOrFull: allForks.FullOrBlindedBlobSidecar -): Root { - return isBlindedBlobSidecar(blindedOrFull) - ? // Blinded - config.getBlobsForkTypes(blindedOrFull.slot).BlindedBlobSidecar.hashTreeRoot(blindedOrFull) - : // Full - config.getBlobsForkTypes(blindedOrFull.slot).BlobSidecar.hashTreeRoot(blindedOrFull); -} - export function blindedOrFullBlockToHeader( config: ChainForkConfig, blindedOrFull: allForks.FullOrBlindedBeaconBlock @@ -70,13 +44,6 @@ export function beaconBlockToBlinded( return blindedBlock; } -export function blobSidecarsToBlinded(blobSidecars: deneb.BlobSidecars): deneb.BlindedBlobSidecars { - return blobSidecars.map((blobSidecar) => { - const blobRoot = ssz.deneb.Blob.hashTreeRoot(blobSidecar.blob); - return {...blobSidecar, blobRoot} as deneb.BlindedBlobSidecar; - }); -} - export function signedBlindedBlockToFull( signedBlindedBlock: allForks.SignedBlindedBeaconBlock, executionPayload: allForks.ExecutionPayload | null @@ -100,33 +67,6 @@ export function signedBlindedBlockToFull( return signedBlock; } -export function signedBlindedBlobSidecarsToFull( - signedBlindedBlobSidecars: deneb.SignedBlindedBlobSidecars, - blobs: deneb.Blobs -): deneb.SignedBlobSidecars { - const signedBlobSidecars = signedBlindedBlobSidecars.map((signedBlindedBlobSidecar, index) => { - const signedBlobSidecar = { - ...signedBlindedBlobSidecar, - message: {...signedBlindedBlobSidecar.message, blob: blobs[index]}, - }; - delete (signedBlobSidecar.message as {blobRoot?: deneb.BlindedBlob}).blobRoot; - return signedBlobSidecar; - }); - return signedBlobSidecars; -} - -export function parseSignedBlindedBlockOrContents( - signedBlindedBlockOrContents: allForks.SignedBlindedBeaconBlockOrContents -): ParsedSignedBlindedBlockOrContents { - if (isSignedBlindedBlockContents(signedBlindedBlockOrContents)) { - const signedBlindedBlock = signedBlindedBlockOrContents.signedBlindedBlock; - const signedBlindedBlobSidecars = signedBlindedBlockOrContents.signedBlindedBlobSidecars; - return {signedBlindedBlock, signedBlindedBlobSidecars}; - } else { - return {signedBlindedBlock: signedBlindedBlockOrContents, signedBlindedBlobSidecars: null}; - } -} - export function parseExecutionPayloadAndBlobsBundle( data: allForks.ExecutionPayload | allForks.ExecutionPayloadAndBlobsBundle ): {executionPayload: allForks.ExecutionPayload; blobsBundle: deneb.BlobsBundle | null} { @@ -141,27 +81,23 @@ export function parseExecutionPayloadAndBlobsBundle( } export function reconstructFullBlockOrContents( - {signedBlindedBlock, signedBlindedBlobSidecars}: ParsedSignedBlindedBlockOrContents, - {executionPayload, blobs}: {executionPayload: allForks.ExecutionPayload | null; blobs: deneb.Blobs | null} + signedBlindedBlock: allForks.SignedBlindedBeaconBlock, + { + executionPayload, + contents, + }: { + executionPayload: allForks.ExecutionPayload | null; + contents: deneb.Contents | null; + } ): allForks.SignedBeaconBlockOrContents { const signedBlock = signedBlindedBlockToFull(signedBlindedBlock, executionPayload); - if (signedBlindedBlobSidecars !== null) { + if (contents !== null) { if (executionPayload === null) { throw Error("Missing locally produced executionPayload for deneb+ publishBlindedBlock"); } - if (blobs === null) { - throw Error("Missing blobs from the local execution cache"); - } - if (blobs.length !== signedBlindedBlobSidecars.length) { - throw Error( - `Length mismatch signedBlindedBlobSidecars=${signedBlindedBlobSidecars.length} blobs=${blobs.length}` - ); - } - const signedBlobSidecars = signedBlindedBlobSidecarsToFull(signedBlindedBlobSidecars, blobs); - - return {signedBlock, signedBlobSidecars} as allForks.SignedBeaconBlockOrContents; + return {signedBlock, ...contents} as allForks.SignedBeaconBlockOrContents; } else { return signedBlock as allForks.SignedBeaconBlockOrContents; } diff --git a/packages/state-transition/src/util/blobs.ts b/packages/state-transition/src/util/blobs.ts deleted file mode 100644 index 8b6ea84362c4..000000000000 --- a/packages/state-transition/src/util/blobs.ts +++ /dev/null @@ -1,12 +0,0 @@ -import SHA256 from "@chainsafe/as-sha256"; -import {VERSIONED_HASH_VERSION_KZG} from "@lodestar/params"; -import {deneb} from "@lodestar/types"; - -type VersionHash = Uint8Array; - -export function kzgCommitmentToVersionedHash(kzgCommitment: deneb.KZGCommitment): VersionHash { - const hash = SHA256.digest(kzgCommitment); - // Equivalent to `VERSIONED_HASH_VERSION_KZG + hash(kzg_commitment)[1:]` - hash[0] = VERSIONED_HASH_VERSION_KZG; - return hash; -} diff --git a/packages/state-transition/src/util/blockRoot.ts b/packages/state-transition/src/util/blockRoot.ts index 7aa5de52cdfe..1e1df38ef4fe 100644 --- a/packages/state-transition/src/util/blockRoot.ts +++ b/packages/state-transition/src/util/blockRoot.ts @@ -54,3 +54,15 @@ export function blockToHeader(config: ChainForkConfig, block: allForks.BeaconBlo bodyRoot: config.getForkTypes(block.slot).BeaconBlockBody.hashTreeRoot(block.body), }; } + +export function signedBlockToSignedHeader( + config: ChainForkConfig, + signedBlock: allForks.SignedBeaconBlock +): phase0.SignedBeaconBlockHeader { + const message = blockToHeader(config, signedBlock.message); + const signature = signedBlock.signature; + return { + message, + signature, + }; +} diff --git a/packages/state-transition/src/util/index.ts b/packages/state-transition/src/util/index.ts index bbc9bf8a8654..3f2e91da9a77 100644 --- a/packages/state-transition/src/util/index.ts +++ b/packages/state-transition/src/util/index.ts @@ -4,7 +4,6 @@ export * from "./attestation.js"; export * from "./attesterStatus.js"; export * from "./balance.js"; export * from "./blindedBlock.js"; -export * from "./blobs.js"; export * from "./capella.js"; export * from "./execution.js"; export * from "./blockRoot.js"; diff --git a/packages/state-transition/src/util/loadState/index.ts b/packages/state-transition/src/util/loadState/index.ts new file mode 100644 index 000000000000..706de3c11540 --- /dev/null +++ b/packages/state-transition/src/util/loadState/index.ts @@ -0,0 +1 @@ +export {loadState} from "./loadState.js"; diff --git a/packages/state-transition/test/globalSetup.ts b/packages/state-transition/test/globalSetup.ts new file mode 100644 index 000000000000..0ab57c057472 --- /dev/null +++ b/packages/state-transition/test/globalSetup.ts @@ -0,0 +1,2 @@ +export async function setup(): Promise {} +export async function teardown(): Promise {} diff --git a/packages/state-transition/test/perf/epoch/epochAltair.test.ts b/packages/state-transition/test/perf/epoch/epochAltair.test.ts index 7e84a533e6c6..273353d8632b 100644 --- a/packages/state-transition/test/perf/epoch/epochAltair.test.ts +++ b/packages/state-transition/test/perf/epoch/epochAltair.test.ts @@ -6,7 +6,8 @@ import { CachedBeaconStateAltair, beforeProcessEpoch, } from "../../../src/index.js"; -import {getNetworkCachedState, beforeValue, LazyValue} from "../../utils/index.js"; +import {beforeValue, LazyValue} from "../../utils/beforeValueMocha.js"; +import {getNetworkCachedState} from "../../utils/testFileCache.js"; import {StateEpoch} from "../types.js"; import {altairState} from "../params.js"; import {processJustificationAndFinalization} from "../../../src/epoch/processJustificationAndFinalization.js"; diff --git a/packages/state-transition/test/perf/epoch/epochCapella.test.ts b/packages/state-transition/test/perf/epoch/epochCapella.test.ts index 36d5caf11a99..eeaf8bfc5400 100644 --- a/packages/state-transition/test/perf/epoch/epochCapella.test.ts +++ b/packages/state-transition/test/perf/epoch/epochCapella.test.ts @@ -7,7 +7,8 @@ import { CachedBeaconStateAltair, beforeProcessEpoch, } from "../../../src/index.js"; -import {getNetworkCachedState, beforeValue, LazyValue} from "../../utils/index.js"; +import {beforeValue, LazyValue} from "../../utils/beforeValueMocha.js"; +import {getNetworkCachedState} from "../../utils/testFileCache.js"; import {StateEpoch} from "../types.js"; import {capellaState} from "../params.js"; import {processJustificationAndFinalization} from "../../../src/epoch/processJustificationAndFinalization.js"; diff --git a/packages/state-transition/test/perf/epoch/epochPhase0.test.ts b/packages/state-transition/test/perf/epoch/epochPhase0.test.ts index ae6b5a536be2..4e43634b1669 100644 --- a/packages/state-transition/test/perf/epoch/epochPhase0.test.ts +++ b/packages/state-transition/test/perf/epoch/epochPhase0.test.ts @@ -6,7 +6,8 @@ import { CachedBeaconStatePhase0, beforeProcessEpoch, } from "../../../src/index.js"; -import {getNetworkCachedState, beforeValue, LazyValue} from "../../utils/index.js"; +import {beforeValue, LazyValue} from "../../utils/beforeValueMocha.js"; +import {getNetworkCachedState} from "../../utils/testFileCache.js"; import {StateEpoch} from "../types.js"; import {phase0State} from "../params.js"; import {processEpoch} from "../../../src/epoch/index.js"; diff --git a/packages/state-transition/test/perf/util/loadState/loadState.test.ts b/packages/state-transition/test/perf/util/loadState/loadState.test.ts index c0df6cf1af47..5d40c64f6ab4 100644 --- a/packages/state-transition/test/perf/util/loadState/loadState.test.ts +++ b/packages/state-transition/test/perf/util/loadState/loadState.test.ts @@ -1,5 +1,5 @@ import bls from "@chainsafe/bls"; -import {CoordType} from "@chainsafe/blst"; +import {CoordType} from "@chainsafe/bls/types"; import {itBench, setBenchOpts} from "@dapplion/benchmark"; import {loadState} from "../../../../src/util/loadState/loadState.js"; import {createCachedBeaconState} from "../../../../src/cache/stateCache.js"; diff --git a/packages/state-transition/test/unit/block/isValidIndexedAttestation.test.ts b/packages/state-transition/test/unit/block/isValidIndexedAttestation.test.ts index c526ab13c17c..c219943b940f 100644 --- a/packages/state-transition/test/unit/block/isValidIndexedAttestation.test.ts +++ b/packages/state-transition/test/unit/block/isValidIndexedAttestation.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {config} from "@lodestar/config/default"; import {FAR_FUTURE_EPOCH, MAX_EFFECTIVE_BALANCE} from "@lodestar/params"; import {phase0, ssz} from "@lodestar/types"; @@ -35,18 +35,16 @@ describe("validate indexed attestation", () => { }, ]; - for (const testValue of testValues) { - it(testValue.name, function () { - const attestationData = ssz.phase0.AttestationData.defaultValue(); - attestationData.source.epoch = 0; - attestationData.target.epoch = 1; + it.each(testValues)("$name", ({indices, expectedValue}) => { + const attestationData = ssz.phase0.AttestationData.defaultValue(); + attestationData.source.epoch = 0; + attestationData.target.epoch = 1; - const indexedAttestation: phase0.IndexedAttestation = { - attestingIndices: testValue.indices, - data: attestationData, - signature: EMPTY_SIGNATURE, - }; - expect(isValidIndexedAttestation(state, indexedAttestation, false)).to.be.equal(testValue.expectedValue); - }); - } + const indexedAttestation: phase0.IndexedAttestation = { + attestingIndices: indices, + data: attestationData, + signature: EMPTY_SIGNATURE, + }; + expect(isValidIndexedAttestation(state, indexedAttestation, false)).toBe(expectedValue); + }); }); diff --git a/packages/state-transition/test/unit/block/processWithdrawals.test.ts b/packages/state-transition/test/unit/block/processWithdrawals.test.ts index 628aae9496da..2841da635472 100644 --- a/packages/state-transition/test/unit/block/processWithdrawals.test.ts +++ b/packages/state-transition/test/unit/block/processWithdrawals.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {getExpectedWithdrawals} from "../../../src/block/processWithdrawals.js"; import {numValidators} from "../../perf/util.js"; import {getExpectedWithdrawalsTestData, WithdrawalOpts} from "../../utils/capella.js"; @@ -38,8 +38,8 @@ describe("getExpectedWithdrawals", () => { it(`getExpectedWithdrawals ${vc} ${caseID}`, () => { const {sampledValidators, withdrawals} = getExpectedWithdrawals(state.value); - expect(sampledValidators).equals(opts.sampled, "Wrong sampledValidators"); - expect(withdrawals.length).equals(opts.withdrawals, "Wrong withdrawals"); + expect(sampledValidators).toBe(opts.sampled); + expect(withdrawals.length).toBe(opts.withdrawals); }); } }); diff --git a/packages/state-transition/test/unit/cachedBeaconState.test.ts b/packages/state-transition/test/unit/cachedBeaconState.test.ts index 072261c1000e..2891cd3e6216 100644 --- a/packages/state-transition/test/unit/cachedBeaconState.test.ts +++ b/packages/state-transition/test/unit/cachedBeaconState.test.ts @@ -1,13 +1,14 @@ -import {expect} from "chai"; -import {ssz} from "@lodestar/types"; +import {describe, it, expect} from "vitest"; +import {Epoch, ssz, RootHex} from "@lodestar/types"; import {toHexString} from "@lodestar/utils"; -import {config} from "@lodestar/config/default"; +import {config as defaultConfig} from "@lodestar/config/default"; import {createBeaconConfig} from "@lodestar/config"; import {createCachedBeaconStateTest} from "../utils/state.js"; import {PubkeyIndexMap} from "../../src/cache/pubkeyCache.js"; -import {createCachedBeaconState, loadUnfinalizedCachedBeaconState} from "../../src/cache/stateCache.js"; +import {createCachedBeaconState, loadCachedBeaconState} from "../../src/cache/stateCache.js"; import {interopPubkeysCached} from "../utils/interop.js"; import {modifyStateSameValidator, newStateWithValidators} from "../utils/capella.js"; +import {EpochShuffling, getShufflingDecisionBlock} from "../../src/util/epochShuffling.js"; describe("CachedBeaconState", () => { it("Clone and mutate", () => { @@ -16,18 +17,15 @@ describe("CachedBeaconState", () => { const state2 = state1.clone(); state1.slot = 1; - expect(state2.slot).to.equal(0, "state2.slot was mutated"); + expect(state2.slot).toBe(0); const prevRoot = state2.currentJustifiedCheckpoint.root; const newRoot = Buffer.alloc(32, 1); state1.currentJustifiedCheckpoint.root = newRoot; - expect(toHexString(state2.currentJustifiedCheckpoint.root)).to.equal( - toHexString(prevRoot), - "state2.currentJustifiedCheckpoint.root was mutated" - ); + expect(toHexString(state2.currentJustifiedCheckpoint.root)).toBe(toHexString(prevRoot)); state1.epochCtx.epoch = 1; - expect(state2.epochCtx.epoch).to.equal(0, "state2.epochCtx.epoch was mutated"); + expect(state2.epochCtx.epoch).toBe(0); }); it("Auto-commit on hashTreeRoot", () => { @@ -40,10 +38,7 @@ describe("CachedBeaconState", () => { // Only commit state1 beforehand cp1.commit(); - expect(toHexString(cp1.hashTreeRoot())).to.equal( - toHexString(cp2.hashTreeRoot()), - ".hashTreeRoot() does not automatically commit" - ); + expect(toHexString(cp1.hashTreeRoot())).toBe(toHexString(cp2.hashTreeRoot())); }); it("Auto-commit on serialize", () => { @@ -55,10 +50,7 @@ describe("CachedBeaconState", () => { // Only commit state1 beforehand cp1.commit(); - expect(toHexString(cp1.serialize())).to.equal( - toHexString(cp2.serialize()), - ".serialize() does not automatically commit" - ); + expect(toHexString(cp1.serialize())).toBe(toHexString(cp2.serialize())); }); describe("loadCachedBeaconState", () => { @@ -66,10 +58,11 @@ describe("CachedBeaconState", () => { const pubkeys = interopPubkeysCached(2 * numValidator); const stateView = newStateWithValidators(numValidator); + const config = createBeaconConfig(defaultConfig, stateView.genesisValidatorsRoot); const seedState = createCachedBeaconState( stateView, { - config: createBeaconConfig(config, stateView.genesisValidatorsRoot), + config, pubkey2index: new PubkeyIndexMap(), index2pubkey: [], }, @@ -136,18 +129,52 @@ describe("CachedBeaconState", () => { // confirm loadState() result const stateBytes = state.serialize(); - const newCachedState = loadUnfinalizedCachedBeaconState(seedState, stateBytes, {skipSyncCommitteeCache: true}); + const newCachedState = loadCachedBeaconState(seedState, stateBytes, {skipSyncCommitteeCache: true}); const newStateBytes = newCachedState.serialize(); - expect(newStateBytes).to.be.deep.equal(stateBytes, "loadState: state bytes are not equal"); - expect(newCachedState.hashTreeRoot()).to.be.deep.equal( - state.hashTreeRoot(), - "loadState: state root is not equal" + expect(newStateBytes).toEqual(stateBytes); + expect(newCachedState.hashTreeRoot()).toEqual(state.hashTreeRoot()); + const shufflingGetter = (shufflingEpoch: Epoch, dependentRoot: RootHex): EpochShuffling | null => { + if ( + shufflingEpoch === seedState.epochCtx.epoch - 1 && + dependentRoot === getShufflingDecisionBlock(seedState, shufflingEpoch) + ) { + return seedState.epochCtx.previousShuffling; + } + + if ( + shufflingEpoch === seedState.epochCtx.epoch && + dependentRoot === getShufflingDecisionBlock(seedState, shufflingEpoch) + ) { + return seedState.epochCtx.currentShuffling; + } + + if ( + shufflingEpoch === seedState.epochCtx.epoch + 1 && + dependentRoot === getShufflingDecisionBlock(seedState, shufflingEpoch) + ) { + return seedState.epochCtx.nextShuffling; + } + + return null; + }; + const cachedState = createCachedBeaconState( + state, + { + config, + pubkey2index: new PubkeyIndexMap(), + index2pubkey: [], + }, + {skipSyncCommitteeCache: true, shufflingGetter} ); + // validatorCountDelta < 0 is unrealistic and shuffling computation results in a different result + if (validatorCountDelta >= 0) { + expect(newCachedState.epochCtx).toEqual(cachedState.epochCtx); + } - // confirm loadUnfinalizedCachedBeaconState() result + // confirm loadCachedBeaconState() result for (let i = 0; i < newCachedState.validators.length; i++) { - expect(newCachedState.epochCtx.pubkey2index.get(newCachedState.validators.get(i).pubkey)).to.be.equal(i); - expect(newCachedState.epochCtx.index2pubkey[i].toBytes()).to.be.deep.equal(pubkeys[i]); + expect(newCachedState.epochCtx.pubkey2index.get(newCachedState.validators.get(i).pubkey)).toBe(i); + expect(newCachedState.epochCtx.index2pubkey[i].toBytes()).toEqual(pubkeys[i]); } }); } diff --git a/packages/state-transition/test/unit/constants.test.ts b/packages/state-transition/test/unit/constants.test.ts index d4975a8a86f5..5b8cc66da73a 100644 --- a/packages/state-transition/test/unit/constants.test.ts +++ b/packages/state-transition/test/unit/constants.test.ts @@ -1,10 +1,10 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import * as blst from "@chainsafe/blst"; import {G2_POINT_AT_INFINITY} from "../../src/index.js"; describe("constants", () => { it("G2_POINT_AT_INFINITY", () => { const p2 = blst.Signature.fromBytes(G2_POINT_AT_INFINITY); - expect(p2.value.is_inf()).to.equal(true, "is not infinity"); + expect(p2.value.is_inf()).toBe(true); }); }); diff --git a/packages/state-transition/test/unit/signatureSets/signatureSets.test.ts b/packages/state-transition/test/unit/signatureSets/signatureSets.test.ts index 9e084dc783a3..1a5b15e1b041 100644 --- a/packages/state-transition/test/unit/signatureSets/signatureSets.test.ts +++ b/packages/state-transition/test/unit/signatureSets/signatureSets.test.ts @@ -1,5 +1,5 @@ import crypto from "node:crypto"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import bls from "@chainsafe/bls"; import {BitArray} from "@chainsafe/ssz"; import {config} from "@lodestar/config/default"; @@ -67,7 +67,7 @@ describe("signatureSets", () => { const state = generateCachedState(config, {validators}); const signatureSets = getBlockSignatureSets(state, signedBlock); - expect(signatureSets.length).to.equal( + expect(signatureSets.length).toBe( // block signature 1 + // randao reveal diff --git a/packages/state-transition/test/unit/upgradeState.test.ts b/packages/state-transition/test/unit/upgradeState.test.ts index ba9ff187a26c..2ea8eef182ac 100644 --- a/packages/state-transition/test/unit/upgradeState.test.ts +++ b/packages/state-transition/test/unit/upgradeState.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {expect, describe, it} from "vitest"; import {ssz} from "@lodestar/types"; import {ForkName} from "@lodestar/params"; import {createBeaconConfig, ChainForkConfig, createChainForkConfig} from "@lodestar/config"; @@ -22,7 +22,7 @@ describe("upgradeState", () => { {skipSyncCommitteeCache: true} ); const newState = upgradeStateToDeneb(stateView); - expect(() => newState.toValue()).to.not.throw(); + expect(() => newState.toValue()).not.toThrow(); }); }); diff --git a/packages/state-transition/test/unit/util/aggregator.test.ts b/packages/state-transition/test/unit/util/aggregator.test.ts index f6e7bbd4a6ed..07fd3172926c 100644 --- a/packages/state-transition/test/unit/util/aggregator.test.ts +++ b/packages/state-transition/test/unit/util/aggregator.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect, beforeAll} from "vitest"; import {fromHexString} from "@chainsafe/ssz"; import { SYNC_COMMITTEE_SIZE, @@ -13,10 +13,10 @@ import {isAggregatorFromCommitteeLength, isSyncCommitteeAggregator} from "../../ describe("isAttestationAggregator", function () { const committeeLength = 130; - before("Ensure constants don't change", () => { + beforeAll(() => { expect({ TARGET_AGGREGATORS_PER_COMMITTEE, - }).to.deep.equal({ + }).toEqual({ TARGET_AGGREGATORS_PER_COMMITTEE: 16, }); }); @@ -28,8 +28,9 @@ describe("isAttestationAggregator", function () { "0x8191d16330837620f0ed85d0d3d52af5b56f7cec12658fa391814251d4b32977eb2e6ca055367354fd63175f8d1d2d7b0678c3c482b738f96a0df40bd06450d99c301a659b8396c227ed781abb37a1604297922219374772ab36b46b84817036" ) ); - expect(result).to.be.equal(false); + expect(result).toBe(false); }); + it("should be true", function () { const result = isAggregatorFromCommitteeLength( committeeLength, @@ -37,17 +38,17 @@ describe("isAttestationAggregator", function () { "0xa8f8bb92931234ca6d8a34530526bcd6a4cfa3bf33bd0470200dc8fa3ebdc3ba24bc8c6e994d58a0f884eb24336d746c01a29693ed0354c0862c2d5de5859e3f58747045182844d267ba232058f7df1867a406f63a1eb8afec0cf3f00a115125" ) ); - expect(result).to.be.equal(true); + expect(result).toBe(true); }); }); describe("isSyncCommitteeAggregator", function () { - before("Ensure constants don't change", () => { + beforeAll(() => { expect({ SYNC_COMMITTEE_SIZE, SYNC_COMMITTEE_SUBNET_COUNT, TARGET_AGGREGATORS_PER_SYNC_SUBCOMMITTEE, - }).to.deep.equal({ + }).toEqual({ SYNC_COMMITTEE_SIZE: 512, SYNC_COMMITTEE_SUBNET_COUNT: 4, TARGET_AGGREGATORS_PER_SYNC_SUBCOMMITTEE: 16, @@ -60,7 +61,7 @@ describe("isSyncCommitteeAggregator", function () { "0x8191d16330837620f0ed85d0d3d52af5b56f7cec12658fa391814251d4b32977eb2e6ca055367354fd63175f8d1d2d7b0678c3c482b738f96a0df40bd06450d99c301a659b8396c227ed781abb37a1604297922219374772ab36b46b84817036" ) ); - expect(result).to.be.equal(false); + expect(result).toBe(false); }); // NOTE: Invalid sig, bruteforced last characters to get a true result @@ -70,6 +71,6 @@ describe("isSyncCommitteeAggregator", function () { "0xa8f8bb92931234ca6d8a34530526bcd6a4cfa3bf33bd0470200dc8fa3ebdc3ba24bc8c6e994d58a0f884eb24336d746c01a29693ed0354c0862c2d5de5859e3f58747045182844d267ba232058f7df1867a406f63a1eb8afec0cf3f00a115142" ) ); - expect(result).to.be.equal(true); + expect(result).toBe(true); }); }); diff --git a/packages/state-transition/test/unit/util/balance.test.ts b/packages/state-transition/test/unit/util/balance.test.ts index dedb4023c5ca..5b666cb0524e 100644 --- a/packages/state-transition/test/unit/util/balance.test.ts +++ b/packages/state-transition/test/unit/util/balance.test.ts @@ -1,4 +1,4 @@ -import {assert, expect} from "chai"; +import {describe, it, expect} from "vitest"; import {config as minimalConfig} from "@lodestar/config/default"; import {EFFECTIVE_BALANCE_INCREMENT} from "@lodestar/params"; @@ -23,7 +23,7 @@ describe("getTotalBalance", () => { const result = getTotalBalance(state, validatorIndices); const expected = BigInt(num * validatorBalance); - assert(result === expected, `Expected: ${expected} :: Result: ${result}`); + expect(result).toEqual(expected); }); it("should return correct balances - 5 validators", () => { @@ -34,8 +34,8 @@ describe("getTotalBalance", () => { const validatorIndices: ValidatorIndex[] = Array.from({length: num}, (_, i) => i); const result = getTotalBalance(state, validatorIndices); - const expected = EFFECTIVE_BALANCE_INCREMENT; - assert(result === BigInt(expected), `Expected: ${expected} :: Result: ${result}`); + const expected = BigInt(EFFECTIVE_BALANCE_INCREMENT); + expect(result).toEqual(expected); }); }); @@ -43,12 +43,12 @@ describe("increaseBalance", () => { it("should add to a validators balance", () => { const state = generateCachedState(); state.balances.push(0); - expect(state.balances.get(0)).to.be.equal(0); + expect(state.balances.get(0)).toBe(0); const delta = 5; for (let i = 1; i < 10; i++) { increaseBalance(state, 0, delta); - expect(state.balances.get(0)).to.be.equal(delta * i); + expect(state.balances.get(0)).toBe(delta * i); } }); }); @@ -62,7 +62,7 @@ describe("decreaseBalance", () => { const delta = 5; for (let i = 1; i < 10; i++) { decreaseBalance(state, 0, delta); - expect(state.balances.get(0)).to.be.equal(initial - delta * i); + expect(state.balances.get(0)).toBe(initial - delta * i); } }); @@ -72,7 +72,7 @@ describe("decreaseBalance", () => { state.balances.push(initial); const delta = 11; decreaseBalance(state, 0, delta); - expect(state.balances.get(0)).to.be.equal(0); + expect(state.balances.get(0)).toBe(0); }); }); @@ -99,9 +99,6 @@ describe("getEffectiveBalanceIncrementsZeroInactive", () => { : 0; } - expect(getEffectiveBalanceIncrementsZeroInactive(justifiedState)).to.be.deep.equal( - effectiveBalances, - "wrong effectiveBalances" - ); + expect(getEffectiveBalanceIncrementsZeroInactive(justifiedState)).toEqual(effectiveBalances); }); }); diff --git a/packages/state-transition/test/unit/util/cachedBeaconState.test.ts b/packages/state-transition/test/unit/util/cachedBeaconState.test.ts index 1a34e2472b50..654e0752adb8 100644 --- a/packages/state-transition/test/unit/util/cachedBeaconState.test.ts +++ b/packages/state-transition/test/unit/util/cachedBeaconState.test.ts @@ -1,3 +1,4 @@ +import {describe, it} from "vitest"; import {createBeaconConfig} from "@lodestar/config"; import {config} from "@lodestar/config/default"; import {ssz} from "@lodestar/types"; diff --git a/packages/state-transition/test/unit/util/epoch.test.ts b/packages/state-transition/test/unit/util/epoch.test.ts index fe404f93f852..e86a41875e1d 100644 --- a/packages/state-transition/test/unit/util/epoch.test.ts +++ b/packages/state-transition/test/unit/util/epoch.test.ts @@ -1,4 +1,4 @@ -import {assert} from "chai"; +import {describe, it, expect} from "vitest"; import {GENESIS_SLOT, MAX_SEED_LOOKAHEAD} from "@lodestar/params"; import {Epoch, Slot} from "@lodestar/types"; @@ -12,7 +12,7 @@ import { import {generateState} from "../../utils/state.js"; describe("computeEpochAtSlot", () => { - const pairs = [ + it.each([ {test: 0, expected: 0}, {test: 1, expected: 0}, {test: 10, expected: 0}, @@ -21,17 +21,14 @@ describe("computeEpochAtSlot", () => { {test: 10000, expected: 312}, {test: 100000, expected: 3125}, {test: 1000000, expected: 31250}, - ]; - for (const pair of pairs) { - it(`Slot ${pair.test} should map to epoch ${pair.expected}`, () => { - const result: Epoch = computeEpochAtSlot(pair.test); - assert.equal(result, pair.expected); - }); - } + ])("Slot $test should map to epoch $expected", ({test, expected}) => { + const result: Epoch = computeEpochAtSlot(test); + expect(result).toEqual(expected); + }); }); describe("computeStartSlotAtEpoch", () => { - const pairs = [ + it.each([ {test: 0, expected: 0}, {test: 1, expected: 32}, {test: 10, expected: 320}, @@ -40,38 +37,31 @@ describe("computeStartSlotAtEpoch", () => { {test: 10000, expected: 320000}, {test: 100000, expected: 3200000}, {test: 1000000, expected: 32000000}, - ]; - for (const pair of pairs) { - it(`Epoch ${pair.test} should map to slot ${pair.expected}`, () => { - const result: Slot = computeStartSlotAtEpoch(pair.test); - assert.equal(result, pair.expected); - }); - } + ])("Epoch $test should map to slot $expected", ({test, expected}) => { + const result: Slot = computeStartSlotAtEpoch(test); + expect(result).toEqual(expected); + }); }); describe("getPreviousEpoch", () => { - const testValues = [ + it.each([ {slot: 512, expectedEpoch: 15}, {slot: 256, expectedEpoch: 7}, { slot: GENESIS_SLOT, expectedEpoch: computeEpochAtSlot(GENESIS_SLOT), }, - ]; - - for (const testValue of testValues) { - it("epoch should return previous epoch", () => { - const state = generateState({slot: testValue.slot}); - const result = getPreviousEpoch(state); - assert.equal(result, testValue.expectedEpoch); - }); - } + ])("epoch should return previous epoch", ({slot, expectedEpoch}) => { + const state = generateState({slot}); + const result = getPreviousEpoch(state); + expect(result).toEqual(expectedEpoch); + }); }); describe("computeActivationExitEpoch", () => { it("epoch is always equal to the epoch after the exit delay", () => { for (let e: Epoch = 0; e < 1000; e++) { - assert.equal(computeActivationExitEpoch(e), e + 1 + MAX_SEED_LOOKAHEAD); + expect(computeActivationExitEpoch(e)).toEqual(e + 1 + MAX_SEED_LOOKAHEAD); } }); }); diff --git a/packages/state-transition/test/unit/util/flags.test.ts b/packages/state-transition/test/unit/util/flags.test.ts index 6566ef8205be..07a8ce3fe097 100644 --- a/packages/state-transition/test/unit/util/flags.test.ts +++ b/packages/state-transition/test/unit/util/flags.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; describe("Altair status flags", () => { for (let prev = 0b000; prev <= 0b111; prev++) { @@ -7,7 +7,7 @@ describe("Altair status flags", () => { expect( // Actual function toStr(getResFlags(prev, att)) - ).to.equal( + ).toBe( // Naive but correct implementation toStr(getResFlagsNaive(prev, att)) ); diff --git a/packages/state-transition/test/unit/util/loadState/findModifiedInactivityScores.test.ts b/packages/state-transition/test/unit/util/loadState/findModifiedInactivityScores.test.ts index e1ad0cf972da..85697af2b7c1 100644 --- a/packages/state-transition/test/unit/util/loadState/findModifiedInactivityScores.test.ts +++ b/packages/state-transition/test/unit/util/loadState/findModifiedInactivityScores.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import { INACTIVITY_SCORE_SIZE, findModifiedInactivityScores, @@ -27,7 +27,7 @@ describe("findModifiedInactivityScores", () => { } const modifiedValidators: number[] = []; findModifiedInactivityScores(inactivityScoresBytes, inactivityScoresBytes2, modifiedValidators); - expect(modifiedValidators.sort((a, b) => a - b)).to.be.deep.equal(expectedModifiedValidators); + expect(modifiedValidators.sort((a, b) => a - b)).toEqual(expectedModifiedValidators); }); } }); diff --git a/packages/state-transition/test/unit/util/loadState/findModifiedValidators.test.ts b/packages/state-transition/test/unit/util/loadState/findModifiedValidators.test.ts index aa2378276d22..25c6233d2738 100644 --- a/packages/state-transition/test/unit/util/loadState/findModifiedValidators.test.ts +++ b/packages/state-transition/test/unit/util/loadState/findModifiedValidators.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {fromHexString} from "@chainsafe/ssz"; import {findModifiedValidators} from "../../../../src/util/loadState/findModifiedValidators.js"; import {generateState} from "../../../utils/state.js"; @@ -35,7 +35,7 @@ describe("findModifiedValidators", () => { const validatorsBytes2 = clonedState.validators.serialize(); const modifiedValidators: number[] = []; findModifiedValidators(validatorsBytes, validatorsBytes2, modifiedValidators); - expect(modifiedValidators.sort((a, b) => a - b)).to.be.deep.equal(expectedModifiedValidators); + expect(modifiedValidators.sort((a, b) => a - b)).toEqual(expectedModifiedValidators); }); } }); diff --git a/packages/state-transition/test/unit/util/loadState/loadValidator.test.ts b/packages/state-transition/test/unit/util/loadState/loadValidator.test.ts index 7c3112537490..9a2094531813 100644 --- a/packages/state-transition/test/unit/util/loadState/loadValidator.test.ts +++ b/packages/state-transition/test/unit/util/loadState/loadValidator.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {CompositeViewDU} from "@chainsafe/ssz"; import {phase0, ssz} from "@lodestar/types"; import {loadValidator} from "../../../../src/util/loadState/loadValidator.js"; @@ -105,19 +105,11 @@ describe("loadValidator", () => { }, ]; - for (const {name, getValidator} of testCases) { - it(name, () => { - const newValidator = getValidator(); - const newValidatorBytes = newValidator.serialize(); - const loadedValidator = loadValidator(validator, newValidatorBytes); - expect(Buffer.compare(loadedValidator.hashTreeRoot(), newValidator.hashTreeRoot())).to.be.equal( - 0, - "root is not correct" - ); - expect(Buffer.compare(loadedValidator.serialize(), newValidator.serialize())).to.be.equal( - 0, - "serialized value is not correct" - ); - }); - } + it.each(testCases)("$name", ({getValidator}) => { + const newValidator = getValidator(); + const newValidatorBytes = newValidator.serialize(); + const loadedValidator = loadValidator(validator, newValidatorBytes); + expect(Buffer.compare(loadedValidator.hashTreeRoot(), newValidator.hashTreeRoot())).toBe(0); + expect(Buffer.compare(loadedValidator.serialize(), newValidator.serialize())).toBe(0); + }); }); diff --git a/packages/state-transition/test/unit/util/misc.test.ts b/packages/state-transition/test/unit/util/misc.test.ts index f487ad41aca2..5651da5ac5d1 100644 --- a/packages/state-transition/test/unit/util/misc.test.ts +++ b/packages/state-transition/test/unit/util/misc.test.ts @@ -1,4 +1,4 @@ -import {assert} from "chai"; +import {describe, it, expect} from "vitest"; import {toBigIntLE} from "bigint-buffer"; import {GENESIS_SLOT, SLOTS_PER_HISTORICAL_ROOT} from "@lodestar/params"; @@ -13,14 +13,14 @@ describe("getBlockRoot", () => { }); const res = Buffer.from(getBlockRoot(state, GENESIS_SLOT)); const expectedRes = BigInt("0xab"); - assert(toBigIntLE(res) === expectedRes, `got: ${toBigIntLE(res)}, expected: ${expectedRes.toString(16)}`); + expect(toBigIntLE(res)).toEqual(expectedRes); }); it("should fail if slot is current slot", () => { const state = generateState({slot: GENESIS_SLOT}); - assert.throws(() => getBlockRoot(state, GENESIS_SLOT), ""); + expect(() => getBlockRoot(state, GENESIS_SLOT)).toThrow(""); }); it("should fail if slot is not within SLOTS_PER_HISTORICAL_ROOT of current slot", () => { const state = generateState({slot: GENESIS_SLOT + SLOTS_PER_HISTORICAL_ROOT + 1}); - assert.throws(() => getBlockRoot(state, GENESIS_SLOT), ""); + expect(() => getBlockRoot(state, GENESIS_SLOT)).toThrow(""); }); }); diff --git a/packages/state-transition/test/unit/util/seed.test.ts b/packages/state-transition/test/unit/util/seed.test.ts index ccdc3332cdba..7f7c0e1f8bc7 100644 --- a/packages/state-transition/test/unit/util/seed.test.ts +++ b/packages/state-transition/test/unit/util/seed.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {toHexString} from "@chainsafe/ssz"; import {GENESIS_EPOCH, GENESIS_SLOT, SLOTS_PER_EPOCH} from "@lodestar/params"; @@ -16,7 +16,7 @@ describe("getRandaoMix", () => { state.randaoMixes.set(0, randaoMix1); const res = getRandaoMix(state, GENESIS_EPOCH); - expect(toHexString(res)).to.equal(toHexString(randaoMix1)); + expect(toHexString(res)).toBe(toHexString(randaoMix1)); }); it("should return second randao mix for GENESIS_EPOCH + 1", () => { // Empty state in 2nd epoch @@ -25,6 +25,6 @@ describe("getRandaoMix", () => { state.randaoMixes.set(1, randaoMix2); const res = getRandaoMix(state, GENESIS_EPOCH + 1); - expect(toHexString(res)).to.equal(toHexString(randaoMix2)); + expect(toHexString(res)).toBe(toHexString(randaoMix2)); }); }); diff --git a/packages/state-transition/test/unit/util/shuffle.test.ts b/packages/state-transition/test/unit/util/shuffle.test.ts index 7cb1e54e2619..9186968674ae 100644 --- a/packages/state-transition/test/unit/util/shuffle.test.ts +++ b/packages/state-transition/test/unit/util/shuffle.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {expect, describe, it} from "vitest"; import {unshuffleList} from "../../../src/index.js"; describe("util / shuffle", () => { @@ -22,10 +22,8 @@ describe("util / shuffle", () => { const seed = new Uint8Array([42, 32]); - for (const {id, input, res} of testCases) { - it(id, () => { - unshuffleList(input, seed); - expect(input).to.deep.equal(res); - }); - } + it.each(testCases)("$id", ({input, res}) => { + unshuffleList(input, seed); + expect(input).toEqual(res); + }); }); diff --git a/packages/state-transition/test/unit/util/slashing.test.ts b/packages/state-transition/test/unit/util/slashing.test.ts index 411933080792..49a7b6454c25 100644 --- a/packages/state-transition/test/unit/util/slashing.test.ts +++ b/packages/state-transition/test/unit/util/slashing.test.ts @@ -1,4 +1,4 @@ -import {assert} from "chai"; +import {expect, it, describe} from "vitest"; import {SLOTS_PER_EPOCH} from "@lodestar/params"; import {Epoch, phase0, ssz} from "@lodestar/types"; @@ -11,7 +11,7 @@ describe("isSlashableAttestationData", () => { const epoch2 = epoch1 + 1; const a1 = getAttestationDataAt(epoch1, epoch2); const a2 = getAttestationDataAt(epoch1 - 1, epoch2); - assert.isTrue(isSlashableAttestationData(a1, a2)); + expect(isSlashableAttestationData(a1, a2)).toBe(true); }); it("Attestation data with disjoint source/target epochs should return false", () => { @@ -21,7 +21,7 @@ describe("isSlashableAttestationData", () => { const epoch4 = epoch1 + 3; const a1 = getAttestationDataAt(epoch1, epoch2); const a2 = getAttestationDataAt(epoch3, epoch4); - assert.isFalse(isSlashableAttestationData(a1, a2)); + expect(isSlashableAttestationData(a1, a2)).toBe(false); }); it("Should return false if the second attestation does not have a greater source epoch", () => { @@ -35,12 +35,12 @@ describe("isSlashableAttestationData", () => { const a1 = getAttestationDataAt(sourceEpoch1, targetEpoch1); const a2Hi = getAttestationDataAt(sourceEpoch2Hi, targetEpoch2); - assert.isFalse(isSlashableAttestationData(a1, a2Hi)); + expect(isSlashableAttestationData(a1, a2Hi)).toBe(false); // Second attestation has a smaller source epoch. const sourceEpoch2Lo = sourceEpoch1 - 1; const a2Lo = getAttestationDataAt(sourceEpoch2Lo, targetEpoch2); - assert.isFalse(isSlashableAttestationData(a1, a2Lo)); + expect(isSlashableAttestationData(a1, a2Lo)).toBe(false); }); it("Should return false if the second attestation does not have a smaller target epoch", () => { @@ -58,14 +58,14 @@ describe("isSlashableAttestationData", () => { let a1 = getAttestationDataAt(targetSlot1, sourceEpoch1); let a2 = getAttestationDataAt(targetSlot2, sourceEpoch2); - assert.isFalse(isSlashableAttestationData(a1, a2)); + expect(isSlashableAttestationData(a1, a2)).toBe(false); // Second attestation has a greater target epoch. targetSlot1 = targetEpoch * SLOTS_PER_EPOCH; targetSlot2 = (targetEpoch + 1) * SLOTS_PER_EPOCH; a1 = getAttestationDataAt(targetSlot1, sourceEpoch1); a2 = getAttestationDataAt(targetSlot2, sourceEpoch2); - assert.isFalse(isSlashableAttestationData(a1, a2)); + expect(isSlashableAttestationData(a1, a2)).toBe(false); }); }); diff --git a/packages/state-transition/test/unit/util/slot.test.ts b/packages/state-transition/test/unit/util/slot.test.ts index f25a1f7b9fe9..c9546ad60043 100644 --- a/packages/state-transition/test/unit/util/slot.test.ts +++ b/packages/state-transition/test/unit/util/slot.test.ts @@ -1,4 +1,4 @@ -import {assert} from "chai"; +import {describe, it, expect} from "vitest"; import {Slot} from "@lodestar/types"; import {computeSlotsSinceEpochStart} from "../../../src/util/index.js"; @@ -14,7 +14,7 @@ describe("computeSlotsSinceEpochStart", () => { for (const pair of pairs) { it(`Slot ${pair.test} is ${pair.expected} from current Epoch start`, () => { const result: Slot = computeSlotsSinceEpochStart(pair.test); - assert.equal(result, pair.expected); + expect(result).toEqual(pair.expected); }); } @@ -23,6 +23,6 @@ describe("computeSlotsSinceEpochStart", () => { const slot = 70; const result = computeSlotsSinceEpochStart(slot, epoch); // 70 - NUM_SLOT_PER_EPOCH - assert.equal(result, 38); + expect(result).toEqual(38); }); }); diff --git a/packages/state-transition/test/unit/util/validator.test.ts b/packages/state-transition/test/unit/util/validator.test.ts index 7a79ce8474f7..65727126742d 100644 --- a/packages/state-transition/test/unit/util/validator.test.ts +++ b/packages/state-transition/test/unit/util/validator.test.ts @@ -1,4 +1,4 @@ -import {assert, expect} from "chai"; +import {describe, it, expect, beforeEach} from "vitest"; import {phase0, ssz} from "@lodestar/types"; @@ -10,7 +10,7 @@ import {generateState} from "../../utils/state.js"; describe("getActiveValidatorIndices", () => { it("empty list of validators should return no indices (empty list)", () => { - assert.deepEqual(getActiveValidatorIndices(generateState(), randBetween(0, 4)), []); + expect(getActiveValidatorIndices(generateState(), randBetween(0, 4))).toStrictEqual([]); }); it("list of cloned validators should return all or none", () => { const state = generateState(); @@ -22,8 +22,8 @@ describe("getActiveValidatorIndices", () => { const allActiveIndices = state.validators.getAllReadonlyValues().map((_, i) => i); const allInactiveIndices: any = []; - assert.deepEqual(getActiveValidatorIndices(state, activationEpoch), allActiveIndices); - assert.deepEqual(getActiveValidatorIndices(state, exitEpoch), allInactiveIndices); + expect(getActiveValidatorIndices(state, activationEpoch)).toStrictEqual(allActiveIndices); + expect(getActiveValidatorIndices(state, exitEpoch)).toStrictEqual(allInactiveIndices); }); }); @@ -41,7 +41,7 @@ describe("isActiveValidator", () => { it(`should be ${testValue.expected ? "" : "not "}active`, () => { const v: phase0.Validator = generateValidator(testValue.validatorOpts); const result: boolean = isActiveValidator(v, testValue.epoch); - expect(result).to.be.equal(testValue.expected); + expect(result).toBe(testValue.expected); }); } }); @@ -57,28 +57,31 @@ describe("isSlashableValidator", () => { validator.activationEpoch = 0; validator.withdrawableEpoch = Infinity; validator.slashed = false; - assert(isSlashableValidator(validator, 0), "unslashed validator should be slashable"); + expect(isSlashableValidator(validator, 0)).toBeWithMessage(true, "unslashed validator should be slashable"); validator.slashed = true; - assert(!isSlashableValidator(validator, 0), "slashed validator should not be slashable"); + expect(!isSlashableValidator(validator, 0)).toBeWithMessage(true, "slashed validator should not be slashable"); }); it("should check validator.activationEpoch", () => { validator.activationEpoch = 10; validator.withdrawableEpoch = Infinity; - assert( - !isSlashableValidator(validator, validator.activationEpoch - 1), + expect(!isSlashableValidator(validator, validator.activationEpoch - 1)).toBeWithMessage( + true, "unactivated validator should not be slashable" ); - assert(isSlashableValidator(validator, validator.activationEpoch), "activated validator should be slashable"); + expect(isSlashableValidator(validator, validator.activationEpoch)).toBeWithMessage( + true, + "activated validator should be slashable" + ); }); it("should check validator.withdrawableEpoch", () => { validator.activationEpoch = 0; validator.withdrawableEpoch = 10; - assert( - isSlashableValidator(validator, validator.withdrawableEpoch - 1), + expect(isSlashableValidator(validator, validator.withdrawableEpoch - 1)).toBeWithMessage( + true, "nonwithdrawable validator should be slashable" ); - assert( - !isSlashableValidator(validator, validator.withdrawableEpoch), + expect(!isSlashableValidator(validator, validator.withdrawableEpoch)).toBeWithMessage( + true, "withdrawable validator should not be slashable" ); }); diff --git a/packages/state-transition/test/unit/util/weakSubjectivity.test.ts b/packages/state-transition/test/unit/util/weakSubjectivity.test.ts index a421caed64ed..5f5c784e975a 100644 --- a/packages/state-transition/test/unit/util/weakSubjectivity.test.ts +++ b/packages/state-transition/test/unit/util/weakSubjectivity.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {config} from "@lodestar/config/default"; import {computeWeakSubjectivityPeriodFromConstituents} from "../../../src/util/weakSubjectivity.js"; import {getChurnLimit} from "../../../src/util/validator.js"; @@ -23,16 +23,17 @@ describe("weak subjectivity tests", () => { {avgValBalance: balance32, valCount: 1048576, wsPeriod: 3532}, ]; - for (const {avgValBalance, valCount, wsPeriod} of testValues) { - it(`should have wsPeriod: ${wsPeriod} with avgValBalance: ${avgValBalance} and valCount: ${valCount}`, () => { + it.each(testValues)( + "should have wsPeriod: $wsPeriod with avgValBalance: $avgValBalance and valCount: $valCount", + ({valCount, avgValBalance}) => { const wsPeriod = computeWeakSubjectivityPeriodFromConstituents( valCount, avgValBalance * valCount, getChurnLimit(config, valCount), config.MIN_VALIDATOR_WITHDRAWABILITY_DELAY ); - expect(wsPeriod).to.equal(wsPeriod); - }); - } + expect(wsPeriod).toBe(wsPeriod); + } + ); }); }); diff --git a/packages/state-transition/test/utils/beforeValue.ts b/packages/state-transition/test/utils/beforeValue.ts index 0d5f8f77d203..61ae3daa32a1 100644 --- a/packages/state-transition/test/utils/beforeValue.ts +++ b/packages/state-transition/test/utils/beforeValue.ts @@ -1,3 +1,5 @@ +import {beforeAll} from "vitest"; + export type LazyValue = {value: T}; /** @@ -12,10 +14,9 @@ export type LazyValue = {value: T}; export function beforeValue(fn: () => T | Promise, timeout?: number): LazyValue { let value: T = null as unknown as T; - before(async function () { - this.timeout(timeout ?? 300_000); + beforeAll(async function () { value = await fn(); - }); + }, timeout ?? 300_000); return new Proxy<{value: T}>( {value}, diff --git a/packages/state-transition/test/utils/beforeValueMocha.ts b/packages/state-transition/test/utils/beforeValueMocha.ts new file mode 100644 index 000000000000..0d5f8f77d203 --- /dev/null +++ b/packages/state-transition/test/utils/beforeValueMocha.ts @@ -0,0 +1,36 @@ +export type LazyValue = {value: T}; + +/** + * Register a callback to compute a value in the before() block of mocha tests + * ```ts + * const state = beforeValue(() => getState()) + * it("test", () => { + * doTest(state.value) + * }) + * ``` + */ +export function beforeValue(fn: () => T | Promise, timeout?: number): LazyValue { + let value: T = null as unknown as T; + + before(async function () { + this.timeout(timeout ?? 300_000); + value = await fn(); + }); + + return new Proxy<{value: T}>( + {value}, + { + get: function (target, prop) { + if (prop === "value") { + if (value === null) { + throw Error("beforeValue has not yet run the before() block"); + } else { + return value; + } + } else { + return undefined; + } + }, + } + ); +} diff --git a/packages/state-transition/test/utils/capella.ts b/packages/state-transition/test/utils/capella.ts index e2cdc47b7e1d..7ef9248a5675 100644 --- a/packages/state-transition/test/utils/capella.ts +++ b/packages/state-transition/test/utils/capella.ts @@ -1,7 +1,12 @@ import crypto from "node:crypto"; import {ssz} from "@lodestar/types"; import {config} from "@lodestar/config/default"; -import {BLS_WITHDRAWAL_PREFIX, ETH1_ADDRESS_WITHDRAWAL_PREFIX, SLOTS_PER_EPOCH} from "@lodestar/params"; +import { + BLS_WITHDRAWAL_PREFIX, + ETH1_ADDRESS_WITHDRAWAL_PREFIX, + SLOTS_PER_EPOCH, + SLOTS_PER_HISTORICAL_ROOT, +} from "@lodestar/params"; import {BeaconStateCapella, CachedBeaconStateCapella} from "../../src/index.js"; import {createCachedBeaconStateTest} from "./state.js"; import {mulberry32} from "./rand.js"; @@ -67,10 +72,17 @@ export function newStateWithValidators(numValidator: number): BeaconStateCapella const capellaStateType = ssz.capella.BeaconState; const stateView = capellaStateType.defaultViewDU(); stateView.slot = config.CAPELLA_FORK_EPOCH * SLOTS_PER_EPOCH + 100; + for (let i = 0; i < SLOTS_PER_HISTORICAL_ROOT; i++) { + stateView.blockRoots.set(i, crypto.randomBytes(32)); + } for (let i = 0; i < numValidator; i++) { const validator = ssz.phase0.Validator.defaultViewDU(); validator.pubkey = pubkeys[i]; + // make all validators active + validator.activationEpoch = 0; + validator.exitEpoch = Infinity; + validator.effectiveBalance = 32e9; stateView.validators.push(validator); stateView.balances.push(32); stateView.inactivityScores.push(0); @@ -85,8 +97,9 @@ export function newStateWithValidators(numValidator: number): BeaconStateCapella * Modify a state without changing number of validators */ export function modifyStateSameValidator(seedState: BeaconStateCapella): BeaconStateCapella { + const slotDiff = 10; const state = seedState.clone(); - state.slot = seedState.slot + 10; + state.slot = seedState.slot + slotDiff; state.latestBlockHeader = ssz.phase0.BeaconBlockHeader.toViewDU({ slot: state.slot, proposerIndex: 0, @@ -94,6 +107,9 @@ export function modifyStateSameValidator(seedState: BeaconStateCapella): BeaconS stateRoot: state.hashTreeRoot(), bodyRoot: ssz.phase0.BeaconBlockBody.hashTreeRoot(ssz.phase0.BeaconBlockBody.defaultValue()), }); + for (let i = 1; i <= slotDiff; i++) { + state.blockRoots.set((seedState.slot + i) % SLOTS_PER_HISTORICAL_ROOT, crypto.randomBytes(32)); + } state.blockRoots.set(0, crypto.randomBytes(32)); state.stateRoots.set(0, crypto.randomBytes(32)); state.historicalRoots.push(crypto.randomBytes(32)); diff --git a/packages/state-transition/test/utils/index.ts b/packages/state-transition/test/utils/index.ts deleted file mode 100644 index 4a79d4371787..000000000000 --- a/packages/state-transition/test/utils/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export * from "./beforeValue.js"; -export * from "./testFileCache.js"; diff --git a/packages/state-transition/vitest.config.ts b/packages/state-transition/vitest.config.ts new file mode 100644 index 000000000000..1df0de848936 --- /dev/null +++ b/packages/state-transition/vitest.config.ts @@ -0,0 +1,11 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + }) +); diff --git a/packages/test-utils/package.json b/packages/test-utils/package.json index a55937bf5acb..586be26fccab 100644 --- a/packages/test-utils/package.json +++ b/packages/test-utils/package.json @@ -1,7 +1,7 @@ { "name": "@lodestar/test-utils", "private": true, - "version": "1.12.0", + "version": "1.13.0", "description": "Test utilities reused across other packages", "author": "ChainSafe Systems", "license": "Apache-2.0", @@ -63,8 +63,8 @@ "dependencies": { "@chainsafe/bls": "7.1.1", "@chainsafe/bls-keystore": "^2.0.0", - "@lodestar/params": "^1.12.0", - "@lodestar/utils": "^1.12.0", + "@lodestar/params": "^1.13.0", + "@lodestar/utils": "^1.13.0", "axios": "^1.3.4", "chai": "^4.3.7", "mocha": "^10.2.0", diff --git a/packages/test-utils/src/cli.ts b/packages/test-utils/src/cli.ts index 13689da5bea9..8b4a84ec467a 100644 --- a/packages/test-utils/src/cli.ts +++ b/packages/test-utils/src/cli.ts @@ -30,11 +30,19 @@ export async function runCliCommand( return wrapTimeout( // eslint-disable-next-line no-async-promise-executor new Promise(async (resolve, reject) => { - await cli.parseAsync(parseArgs(args), {}, (err, _argv, output) => { - if (err) return reject(err); + try { + await cli + .parseAsync(parseArgs(args), {}, (err, _argv, output) => { + if (err) return reject(err); - resolve(output); - }); + resolve(output); + }) + .catch(() => { + // We are suppressing error here as we are throwing from inside the callback + }); + } catch (err) { + reject(err); + } }), opts.timeoutMs ); diff --git a/packages/test-utils/src/mocha.ts b/packages/test-utils/src/mocha.ts index edf8053a60df..7b8c10ad5342 100644 --- a/packages/test-utils/src/mocha.ts +++ b/packages/test-utils/src/mocha.ts @@ -82,22 +82,6 @@ function wrapLogWriter(...writers: [writer: object, ...keys: string[]][]): { }; } -export function stubLoggerForProcessStd( - logger: T -): T & {getLogs: () => string[]; restoreStubs: () => void} { - const {flush: flushStdout, restore: restoreStdout} = wrapLogWriter( - [process.stdout, "write"], - [process.stderr, "write"] - ); - - return Object.assign(logger, { - getLogs: () => flushStdout(), - restoreStubs: () => { - restoreStdout(); - }, - }); -} - export function stubLoggerForConsole( logger: T ): T & {getLogs: () => string[]; restoreStubs: () => void} { diff --git a/packages/types/karma.config.cjs b/packages/types/karma.config.cjs deleted file mode 100644 index a3ebb967e2ce..000000000000 --- a/packages/types/karma.config.cjs +++ /dev/null @@ -1,9 +0,0 @@ -const karmaConfig = require("../../karma.base.config.js"); -const webpackConfig = require("./webpack.test.config.cjs"); - -module.exports = function karmaConfigurator(config) { - config.set({ - ...karmaConfig, - webpack: webpackConfig, - }); -}; diff --git a/packages/types/package.json b/packages/types/package.json index e5e6d4fd5e25..1a6f3ebf1570 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.12.0", + "version": "1.13.0", "type": "module", "exports": { ".": { @@ -61,14 +61,19 @@ "check-types": "tsc", "lint": "eslint --color --ext .ts src/ test/", "lint:fix": "yarn run lint --fix", - "test:unit": "mocha 'test/**/*.test.ts'", - "test:browsers": "yarn karma start karma.config.cjs", + "test:constants:minimal": "LODESTAR_PRESET=minimal vitest --run --dir test/constants/ --coverage", + "test:constants:mainnet": "LODESTAR_PRESET=mainnet vitest --run --dir test/constants/ --coverage", + "test:unit": "yarn test:constants:minimal && yarn test:constants:mainnet && vitest --run --dir test/unit/ --coverage", + "test:browsers": "yarn test:browsers:chrome && yarn test:browsers:firefox && yarn test:browsers:electron", + "test:browsers:chrome": "vitest --run --browser chrome --config ./vitest.browser.config.ts --dir test/unit", + "test:browsers:firefox": "vitest --run --browser firefox --config ./vitest.browser.config.ts --dir test/unit", + "test:browsers:electron": "echo 'Electron tests will be introduced back in the future as soon vitest supports electron.'", "check-readme": "typescript-docs-verifier" }, "types": "lib/index.d.ts", "dependencies": { "@chainsafe/ssz": "^0.14.0", - "@lodestar/params": "^1.12.0" + "@lodestar/params": "^1.13.0" }, "keywords": [ "ethereum", diff --git a/packages/types/src/allForks/sszTypes.ts b/packages/types/src/allForks/sszTypes.ts index 463e5c57bd0d..7174bc52e89c 100644 --- a/packages/types/src/allForks/sszTypes.ts +++ b/packages/types/src/allForks/sszTypes.ts @@ -155,7 +155,6 @@ export const allForksLightClient = { export const allForksBlobs = { deneb: { BlobSidecar: deneb.BlobSidecar, - BlindedBlobSidecar: deneb.BlindedBlobSidecar, ExecutionPayloadAndBlobsBundle: deneb.ExecutionPayloadAndBlobsBundle, }, }; diff --git a/packages/types/src/allForks/types.ts b/packages/types/src/allForks/types.ts index 01c597b8a245..59768a5a3308 100644 --- a/packages/types/src/allForks/types.ts +++ b/packages/types/src/allForks/types.ts @@ -68,31 +68,17 @@ export type FullOrBlindedBeaconBlockBody = BeaconBlockBody | BlindedBeaconBlockB export type FullOrBlindedBeaconBlock = BeaconBlock | BlindedBeaconBlock; export type FullOrBlindedSignedBeaconBlock = SignedBeaconBlock | SignedBlindedBeaconBlock; -export type FullOrBlindedBlobSidecar = deneb.BlobSidecar | deneb.BlindedBlobSidecar; -export type FullOrBlindedSignedBlobSidecar = deneb.SignedBlobSidecar | deneb.SignedBlindedBlobSidecar; - -export type FullOrBlindedBlobSidecars = deneb.BlobSidecars | deneb.BlindedBlobSidecars; -export type BlockContents = {block: BeaconBlock; blobSidecars: deneb.BlobSidecars}; +export type BlockContents = {block: BeaconBlock; kzgProofs: deneb.KZGProofs; blobs: deneb.Blobs}; export type SignedBlockContents = { signedBlock: SignedBeaconBlock; - signedBlobSidecars: deneb.SignedBlobSidecars; + kzgProofs: deneb.KZGProofs; + blobs: deneb.Blobs; }; -export type BlindedBlockContents = { - blindedBlock: BlindedBeaconBlock; - blindedBlobSidecars: deneb.BlindedBlobSidecars; -}; -export type SignedBlindedBlockContents = { - signedBlindedBlock: SignedBlindedBeaconBlock; - signedBlindedBlobSidecars: deneb.SignedBlindedBlobSidecars; -}; - -export type FullOrBlindedBlockContents = BlockContents | BlindedBlockContents; -export type FullOrBlindedBeaconBlockOrContents = FullOrBlindedBeaconBlock | FullOrBlindedBlockContents; export type BeaconBlockOrContents = BeaconBlock | BlockContents; -export type BlindedBeaconBlockOrContents = BlindedBeaconBlock | BlindedBlockContents; export type SignedBeaconBlockOrContents = SignedBeaconBlock | SignedBlockContents; -export type SignedBlindedBeaconBlockOrContents = SignedBlindedBeaconBlock | SignedBlindedBlockContents; + +export type FullOrBlindedBeaconBlockOrContents = BeaconBlockOrContents | BlindedBeaconBlock; export type BuilderBid = bellatrix.BuilderBid | capella.BuilderBid | deneb.BuilderBid; export type SignedBuilderBid = bellatrix.SignedBuilderBid | capella.SignedBuilderBid | deneb.SignedBuilderBid; @@ -308,6 +294,5 @@ export type AllForksLightClientSSZTypes = { export type AllForksBlobsSSZTypes = { BlobSidecar: AllForksTypeOf; - BlindedBlobSidecar: AllForksTypeOf; ExecutionPayloadAndBlobsBundle: AllForksTypeOf; }; diff --git a/packages/types/src/deneb/sszTypes.ts b/packages/types/src/deneb/sszTypes.ts index 96509d1d898b..b39e5f6281e1 100644 --- a/packages/types/src/deneb/sszTypes.ts +++ b/packages/types/src/deneb/sszTypes.ts @@ -8,6 +8,7 @@ import { BLOCK_BODY_EXECUTION_PAYLOAD_DEPTH as EXECUTION_PAYLOAD_DEPTH, EPOCHS_PER_SYNC_COMMITTEE_PERIOD, SLOTS_PER_EPOCH, + KZG_COMMITMENT_INCLUSION_PROOF_DEPTH, } from "@lodestar/params"; import {ssz as primitiveSsz} from "../primitive/index.js"; import {ssz as phase0Ssz} from "../phase0/index.js"; @@ -15,20 +16,8 @@ import {ssz as altairSsz} from "../altair/index.js"; import {ssz as bellatrixSsz} from "../bellatrix/index.js"; import {ssz as capellaSsz} from "../capella/index.js"; -const { - UintNum64, - Slot, - Root, - BLSSignature, - UintBn64, - UintBn256, - Bytes32, - Bytes48, - Bytes96, - BLSPubkey, - BlobIndex, - ValidatorIndex, -} = primitiveSsz; +const {UintNum64, Slot, Root, BLSSignature, UintBn64, UintBn256, Bytes32, Bytes48, Bytes96, BLSPubkey, BlobIndex} = + primitiveSsz; // Polynomial commitments // https://github.com/ethereum/consensus-specs/blob/dev/specs/eip4844/polynomial-commitments.md @@ -124,31 +113,22 @@ export const SignedBeaconBlock = new ContainerType( {typeName: "SignedBeaconBlock", jsonCase: "eth2"} ); +export const KzgCommitmentInclusionProof = new VectorCompositeType(Bytes32, KZG_COMMITMENT_INCLUSION_PROOF_DEPTH); + export const BlobSidecar = new ContainerType( { - blockRoot: Root, index: BlobIndex, - slot: Slot, - blockParentRoot: Root, - proposerIndex: ValidatorIndex, blob: Blob, kzgCommitment: KZGCommitment, kzgProof: KZGProof, + signedBlockHeader: phase0Ssz.SignedBeaconBlockHeader, + kzgCommitmentInclusionProof: KzgCommitmentInclusionProof, }, {typeName: "BlobSidecar", jsonCase: "eth2"} ); export const BlobSidecars = new ListCompositeType(BlobSidecar, MAX_BLOB_COMMITMENTS_PER_BLOCK); -export const SignedBlobSidecar = new ContainerType( - { - message: BlobSidecar, - signature: BLSSignature, - }, - {typeName: "SignedBlobSidecar", jsonCase: "eth2"} -); -export const SignedBlobSidecars = new ListCompositeType(SignedBlobSidecar, MAX_BLOB_COMMITMENTS_PER_BLOCK); - export const BlobsBundle = new ContainerType( { commitments: BlobKzgCommitments, @@ -158,35 +138,6 @@ export const BlobsBundle = new ContainerType( {typeName: "BlobsBundle", jsonCase: "eth2"} ); -export const BlindedBlobSidecar = new ContainerType( - { - blockRoot: Root, - index: BlobIndex, - slot: Slot, - blockParentRoot: Root, - proposerIndex: ValidatorIndex, - blobRoot: BlindedBlob, - kzgCommitment: KZGCommitment, - kzgProof: KZGProof, - }, - {typeName: "BlindedBlobSidecar", jsonCase: "eth2"} -); - -export const BlindedBlobSidecars = new ListCompositeType(BlindedBlobSidecar, MAX_BLOB_COMMITMENTS_PER_BLOCK); - -export const SignedBlindedBlobSidecar = new ContainerType( - { - message: BlindedBlobSidecar, - signature: BLSSignature, - }, - {typeName: "SignedBlindedBlobSidecar", jsonCase: "eth2"} -); - -export const SignedBlindedBlobSidecars = new ListCompositeType( - SignedBlindedBlobSidecar, - MAX_BLOB_COMMITMENTS_PER_BLOCK -); - export const BlindedBeaconBlockBody = new ContainerType( { ...altairSsz.BeaconBlockBody.fields, @@ -213,19 +164,10 @@ export const SignedBlindedBeaconBlock = new ContainerType( {typeName: "SignedBlindedBeaconBlock", jsonCase: "eth2"} ); -export const BlindedBlobsBundle = new ContainerType( - { - commitments: BlobKzgCommitments, - proofs: KZGProofs, - blobRoots: BlindedBlobs, - }, - {typeName: "BlindedBlobsBundle", jsonCase: "eth2"} -); - export const BuilderBid = new ContainerType( { header: ExecutionPayloadHeader, - blindedBlobsBundle: BlindedBlobsBundle, + blobKzgCommitments: BlobKzgCommitments, value: UintBn256, pubkey: BLSPubkey, }, diff --git a/packages/types/src/deneb/types.ts b/packages/types/src/deneb/types.ts index 1d6eb5fca5aa..0921ae2428e7 100644 --- a/packages/types/src/deneb/types.ts +++ b/packages/types/src/deneb/types.ts @@ -1,4 +1,5 @@ import {ValueOf} from "@chainsafe/ssz"; +import {BlockContents} from "../allForks/types.js"; import * as ssz from "./sszTypes.js"; export type KZGProof = ValueOf; @@ -6,19 +7,12 @@ export type KZGCommitment = ValueOf; export type Blob = ValueOf; export type Blobs = ValueOf; -export type BlindedBlob = ValueOf; -export type BlindedBlobs = ValueOf; export type BlobSidecar = ValueOf; export type BlobSidecars = ValueOf; -export type BlindedBlobSidecar = ValueOf; -export type BlindedBlobSidecars = ValueOf; -export type SignedBlobSidecar = ValueOf; -export type SignedBlobSidecars = ValueOf; -export type SignedBlindedBlobSidecar = ValueOf; -export type SignedBlindedBlobSidecars = ValueOf; export type ExecutionPayloadAndBlobsBundle = ValueOf; export type BlobsBundle = ValueOf; +export type KzgCommitmentInclusionProof = ValueOf; export type BlobKzgCommitments = ValueOf; export type KZGProofs = ValueOf; export type BLSFieldElement = ValueOf; @@ -42,7 +36,6 @@ export type SignedBlindedBeaconBlock = ValueOf; export type BuilderBid = ValueOf; export type SignedBuilderBid = ValueOf; export type SSEPayloadAttributes = ValueOf; @@ -53,3 +46,6 @@ export type LightClientUpdate = ValueOf; export type LightClientFinalityUpdate = ValueOf; export type LightClientOptimisticUpdate = ValueOf; export type LightClientStore = ValueOf; + +export type ProducedBlobSidecars = Omit; +export type Contents = Omit; diff --git a/packages/types/src/utils/typeguards.ts b/packages/types/src/utils/typeguards.ts index 0b9bee97d17a..781738c3dbad 100644 --- a/packages/types/src/utils/typeguards.ts +++ b/packages/types/src/utils/typeguards.ts @@ -5,21 +5,15 @@ import { FullOrBlindedBeaconBlockBody, FullOrBlindedExecutionPayload, ExecutionPayloadHeader, - FullOrBlindedBlobSidecar, - FullOrBlindedSignedBlobSidecar, BlindedBeaconBlockBody, BlindedBeaconBlock, BlockContents, - SignedBlindedBlockContents, SignedBlindedBeaconBlock, - BlindedBlockContents, SignedBlockContents, SignedBeaconBlock, - SignedBlindedBeaconBlockOrContents, ExecutionPayload, ExecutionPayloadAndBlobsBundle, } from "../allForks/types.js"; -import {ts as deneb} from "../deneb/index.js"; export function isBlindedExecution(payload: FullOrBlindedExecutionPayload): payload is ExecutionPayloadHeader { // we just check transactionsRoot for determinging as it the base field @@ -42,32 +36,12 @@ export function isBlindedSignedBeaconBlock( return (signedBlock as SignedBlindedBeaconBlock).message.body.executionPayloadHeader !== undefined; } -export function isBlindedBlobSidecar(blob: FullOrBlindedBlobSidecar): blob is deneb.BlindedBlobSidecar { - return (blob as deneb.BlindedBlobSidecar).blobRoot !== undefined; -} - -export function isBlindedSignedBlobSidecar( - blob: FullOrBlindedSignedBlobSidecar -): blob is deneb.SignedBlindedBlobSidecar { - return (blob as deneb.SignedBlindedBlobSidecar).message.blobRoot !== undefined; -} - export function isBlockContents(data: FullOrBlindedBeaconBlockOrContents): data is BlockContents { - return (data as BlockContents).blobSidecars !== undefined; + return (data as BlockContents).kzgProofs !== undefined; } export function isSignedBlockContents(data: SignedBeaconBlock | SignedBlockContents): data is SignedBlockContents { - return (data as SignedBlockContents).signedBlobSidecars !== undefined; -} - -export function isBlindedBlockContents(data: FullOrBlindedBeaconBlockOrContents): data is BlindedBlockContents { - return (data as BlindedBlockContents).blindedBlobSidecars !== undefined; -} - -export function isSignedBlindedBlockContents( - data: SignedBlindedBeaconBlockOrContents -): data is SignedBlindedBlockContents { - return (data as SignedBlindedBlockContents).signedBlindedBlobSidecars !== undefined; + return (data as SignedBlockContents).kzgProofs !== undefined; } export function isExecutionPayloadAndBlobsBundle( diff --git a/packages/types/test/constants/blobs.test.ts b/packages/types/test/constants/blobs.test.ts new file mode 100644 index 000000000000..93193463e76b --- /dev/null +++ b/packages/types/test/constants/blobs.test.ts @@ -0,0 +1,25 @@ +import {describe, it, expect} from "vitest"; +import * as constants from "@lodestar/params"; +import {ssz} from "../../src/index.js"; + +// NOTE: This test is here and not in lodestar-params, to prevent lodestar-params depending on SSZ +// Since lodestar-params and lodestar-types are in the same mono-repo, running this test here is enough +// guarantee that these constants are correct. + +describe(`${constants.ACTIVE_PRESET}/ blobs pre-computed constants`, () => { + const BLOBSIDECAR_FIXED_SIZE = ssz.deneb.BlobSidecars.elementType.fixedSize; + const KZG_COMMITMENT_GINDEX0 = Number(ssz.deneb.BeaconBlockBody.getPathInfo(["blobKzgCommitments", 0]).gindex); + const KZG_COMMITMENT_SUBTREE_INDEX0 = KZG_COMMITMENT_GINDEX0 - 2 ** constants.KZG_COMMITMENT_INCLUSION_PROOF_DEPTH; + + const correctConstants = { + BLOBSIDECAR_FIXED_SIZE, + KZG_COMMITMENT_GINDEX0, + KZG_COMMITMENT_SUBTREE_INDEX0, + }; + + for (const [key, expectedValue] of Object.entries(correctConstants)) { + it(key, () => { + expect((constants as unknown as Record)[key]).to.equal(expectedValue); + }); + } +}); diff --git a/packages/types/test/unit/constants.test.ts b/packages/types/test/constants/lightclient.test.ts similarity index 90% rename from packages/types/test/unit/constants.test.ts rename to packages/types/test/constants/lightclient.test.ts index 09cbec8bf1b5..567cc7c3bd17 100644 --- a/packages/types/test/unit/constants.test.ts +++ b/packages/types/test/constants/lightclient.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import * as constants from "@lodestar/params"; import {ssz} from "../../src/index.js"; @@ -6,7 +6,7 @@ import {ssz} from "../../src/index.js"; // Since lodestar-params and lodestar-types are in the same mono-repo, running this test here is enough // guarantee that these constants are correct. -describe("Lightclient pre-computed constants", () => { +describe(`${constants.ACTIVE_PRESET}/ Lightclient pre-computed constants`, () => { const FINALIZED_ROOT_GINDEX = bnToNum(ssz.altair.BeaconState.getPathInfo(["finalizedCheckpoint", "root"]).gindex); const FINALIZED_ROOT_DEPTH = floorlog2(FINALIZED_ROOT_GINDEX); const FINALIZED_ROOT_INDEX = FINALIZED_ROOT_GINDEX % 2 ** FINALIZED_ROOT_DEPTH; @@ -26,7 +26,7 @@ describe("Lightclient pre-computed constants", () => { for (const [key, expectedValue] of Object.entries(correctConstants)) { it(key, () => { - expect((constants as unknown as Record)[key]).to.equal(expectedValue); + expect((constants as unknown as Record)[key]).toBe(expectedValue); }); } }); diff --git a/packages/types/test/globalSetup.ts b/packages/types/test/globalSetup.ts new file mode 100644 index 000000000000..0ab57c057472 --- /dev/null +++ b/packages/types/test/globalSetup.ts @@ -0,0 +1,2 @@ +export async function setup(): Promise {} +export async function teardown(): Promise {} diff --git a/packages/types/test/unit/ssz.test.ts b/packages/types/test/unit/ssz.test.ts index 80ddcb12b893..b5c972a8f471 100644 --- a/packages/types/test/unit/ssz.test.ts +++ b/packages/types/test/unit/ssz.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {ssz} from "../../src/index.js"; describe("size", function () { @@ -6,8 +6,8 @@ describe("size", function () { const minSize = ssz.phase0.BeaconState.minSize; const maxSize = ssz.phase0.BeaconState.maxSize; // https://gist.github.com/protolambda/db75c7faa1e94f2464787a480e5d613e - expect(minSize).to.be.equal(2687377); - expect(maxSize).to.be.equal(141837543039377); + expect(minSize).toBe(2687377); + expect(maxSize).toBe(141837543039377); }); }); @@ -24,7 +24,7 @@ describe("container serialization/deserialization field casing(s)", function () const result = ssz.phase0.AttesterSlashing.fromJson(json); const back = ssz.phase0.AttesterSlashing.toJson(result); - expect(back).to.be.deep.equal(json); + expect(back).toEqual(json); }); it("ProposerSlashing", function () { @@ -39,6 +39,6 @@ describe("container serialization/deserialization field casing(s)", function () const result = ssz.phase0.ProposerSlashing.fromJson(json); const back = ssz.phase0.ProposerSlashing.toJson(result); - expect(back).to.be.deep.equal(json); + expect(back).toEqual(json); }); }); diff --git a/packages/types/tsconfig.e2e.json b/packages/types/tsconfig.e2e.json deleted file mode 100644 index cedf626f4124..000000000000 --- a/packages/types/tsconfig.e2e.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "extends": "../../tsconfig.e2e.json", - "include": [ - "src", - "test" - ], -} \ No newline at end of file diff --git a/packages/types/vitest.browser.config.ts b/packages/types/vitest.browser.config.ts new file mode 100644 index 000000000000..3c4b48885a33 --- /dev/null +++ b/packages/types/vitest.browser.config.ts @@ -0,0 +1,14 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.browser.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + optimizeDeps: { + exclude: ["@chainsafe/blst"], + }, + }) +); diff --git a/packages/types/vitest.config.ts b/packages/types/vitest.config.ts new file mode 100644 index 000000000000..1df0de848936 --- /dev/null +++ b/packages/types/vitest.config.ts @@ -0,0 +1,11 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + }) +); diff --git a/packages/types/webpack.test.config.cjs b/packages/types/webpack.test.config.cjs deleted file mode 100644 index 711c6ac891a7..000000000000 --- a/packages/types/webpack.test.config.cjs +++ /dev/null @@ -1,5 +0,0 @@ -const webpackConfig = require("../../webpack.test.config.js"); - -module.exports = { - ...webpackConfig, -}; diff --git a/packages/utils/.mocharc.yml b/packages/utils/.mocharc.yml deleted file mode 100644 index 8b4eb53ed37a..000000000000 --- a/packages/utils/.mocharc.yml +++ /dev/null @@ -1,3 +0,0 @@ -colors: true -node-option: - - "loader=ts-node/esm" diff --git a/packages/utils/karma.config.cjs b/packages/utils/karma.config.cjs deleted file mode 100644 index a3ebb967e2ce..000000000000 --- a/packages/utils/karma.config.cjs +++ /dev/null @@ -1,9 +0,0 @@ -const karmaConfig = require("../../karma.base.config.js"); -const webpackConfig = require("./webpack.test.config.cjs"); - -module.exports = function karmaConfigurator(config) { - config.set({ - ...karmaConfig, - webpack: webpackConfig, - }); -}; diff --git a/packages/utils/package.json b/packages/utils/package.json index 457218fa4f4c..8137cd496a0a 100644 --- a/packages/utils/package.json +++ b/packages/utils/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.12.0", + "version": "1.13.0", "type": "module", "exports": "./lib/index.js", "files": [ @@ -27,12 +27,15 @@ "build:watch": "yarn run build --watch", "build:release": "yarn clean && yarn build", "check-build": "node -e \"(async function() { await import('./lib/index.js') })()\"", - "check-types": "tsc", + "check-types": "tsc && vitest --run --typecheck --dir test/types/", "lint": "eslint --color --ext .ts src/ test/", "lint:fix": "yarn run lint --fix", "pretest": "yarn run check-types", - "test:unit": "mocha 'test/**/*.test.ts'", - "test:browsers": "yarn karma start karma.config.cjs", + "test:unit": "vitest --run --dir test/unit/ --coverage", + "test:browsers": "yarn test:browsers:chrome && yarn test:browsers:firefox && yarn test:browsers:electron", + "test:browsers:chrome": "vitest --run --browser chrome --config ./vitest.browser.config.ts --dir test/unit", + "test:browsers:firefox": "vitest --run --browser firefox --config ./vitest.browser.config.ts --dir test/unit", + "test:browsers:electron": "echo 'Electron tests will be introduced back in the future as soon vitest supports electron.'", "check-readme": "typescript-docs-verifier" }, "types": "lib/index.d.ts", @@ -47,6 +50,7 @@ "devDependencies": { "@types/js-yaml": "^4.0.5", "@types/triple-beam": "^1.3.2", + "prom-client": "^15.1.0", "triple-beam": "^1.3.0" }, "keywords": [ diff --git a/packages/utils/src/ethConversion.ts b/packages/utils/src/ethConversion.ts new file mode 100644 index 000000000000..7aa8fa0cc63c --- /dev/null +++ b/packages/utils/src/ethConversion.ts @@ -0,0 +1,12 @@ +export const ETH_TO_GWEI = BigInt(10 ** 9); +export const GWEI_TO_WEI = BigInt(10 ** 9); +export const ETH_TO_WEI = ETH_TO_GWEI * GWEI_TO_WEI; + +type EthNumeric = bigint; + +/** + * Convert gwei to wei. + */ +export function gweiToWei(gwei: EthNumeric): EthNumeric { + return gwei * GWEI_TO_WEI; +} diff --git a/packages/utils/src/index.ts b/packages/utils/src/index.ts index be939673845c..fcff789f9c56 100644 --- a/packages/utils/src/index.ts +++ b/packages/utils/src/index.ts @@ -8,6 +8,7 @@ export * from "./format.js"; export * from "./logger.js"; export * from "./map.js"; export * from "./math.js"; +export * from "./metrics.js"; export * from "./objects.js"; export {retry, type RetryOptions} from "./retry.js"; export * from "./notNullish.js"; @@ -15,7 +16,8 @@ export * from "./sleep.js"; export * from "./sort.js"; export * from "./timeout.js"; export {type RecursivePartial, bnToNum} from "./types.js"; -export * from "./validation.js"; +export * from "./url.js"; export * from "./verifyMerkleBranch.js"; export * from "./promise.js"; export * from "./waitFor.js"; +export * from "./ethConversion.js"; diff --git a/packages/utils/src/metrics.ts b/packages/utils/src/metrics.ts new file mode 100644 index 000000000000..a25518280ee1 --- /dev/null +++ b/packages/utils/src/metrics.ts @@ -0,0 +1,71 @@ +export type NoLabels = Record; +export type LabelsGeneric = Record; +export type LabelKeys = Extract; +export type CollectFn = (metric: Gauge) => void; + +export interface Gauge { + inc: NoLabels extends Labels ? (value?: number) => void : (labels: Labels, value?: number) => void; + dec: NoLabels extends Labels ? (value?: number) => void : (labels: Labels, value?: number) => void; + set: NoLabels extends Labels ? (value: number) => void : (labels: Labels, value: number) => void; + + collect?(): void; +} + +export interface GaugeExtra extends Omit, "collect"> { + addCollect(collectFn: CollectFn): void; +} + +export interface Histogram { + startTimer(): NoLabels extends Labels ? () => number : (labels: Labels) => number; + startTimer>( + labels?: NoLabels extends Labels ? never : L + ): keyof Omit extends never ? () => number : (labels: Omit) => number; + + observe: NoLabels extends Labels ? (value: number) => void : (labels: Labels, value: number) => void; + + reset(): void; +} + +export interface AvgMinMax { + addGetValuesFn(getValuesFn: () => number[]): void; + + set: NoLabels extends Labels ? (values: number[]) => void : (labels: Labels, values: number[]) => void; +} + +export interface Counter { + inc: NoLabels extends Labels ? (value?: number) => void : (labels: Labels, value?: number) => void; +} + +export type GaugeConfig = { + name: string; + help: string; +} & (NoLabels extends Labels ? {labelNames?: never} : {labelNames: [LabelKeys, ...LabelKeys[]]}); + +export type HistogramConfig = GaugeConfig & { + buckets?: number[]; +}; + +export type AvgMinMaxConfig = GaugeConfig; + +export type CounterConfig = GaugeConfig; + +export type StaticConfig = { + name: GaugeConfig["name"]; + help: GaugeConfig["help"]; + value: Record, string>; +}; + +export interface MetricsRegister { + gauge(config: GaugeConfig): Gauge; + histogram(config: HistogramConfig): Histogram; + counter(config: CounterConfig): Counter; +} + +export interface MetricsRegisterExtra extends MetricsRegister { + gauge(config: GaugeConfig): GaugeExtra; +} + +export interface MetricsRegisterCustom extends MetricsRegisterExtra { + avgMinMax(config: AvgMinMaxConfig): AvgMinMax; + static(config: StaticConfig): void; +} diff --git a/packages/utils/src/validation.ts b/packages/utils/src/url.ts similarity index 77% rename from packages/utils/src/validation.ts rename to packages/utils/src/url.ts index ed8b88c912e8..7d0b23347617 100644 --- a/packages/utils/src/validation.ts +++ b/packages/utils/src/url.ts @@ -18,3 +18,12 @@ export function isValidHttpUrl(urlStr: string): boolean { return url.protocol === "http:" || url.protocol === "https:"; } + +/** + * Sanitize URL to prevent leaking user credentials in logs + * + * Note: `urlStr` must be a valid URL + */ +export function toSafePrintableUrl(urlStr: string): string { + return new URL(urlStr).origin; +} diff --git a/packages/utils/test/globalSetup.ts b/packages/utils/test/globalSetup.ts new file mode 100644 index 000000000000..0ab57c057472 --- /dev/null +++ b/packages/utils/test/globalSetup.ts @@ -0,0 +1,2 @@ +export async function setup(): Promise {} +export async function teardown(): Promise {} diff --git a/packages/utils/test/setup.ts b/packages/utils/test/setup.ts deleted file mode 100644 index b83e6cb78511..000000000000 --- a/packages/utils/test/setup.ts +++ /dev/null @@ -1,6 +0,0 @@ -import chai from "chai"; -import chaiAsPromised from "chai-as-promised"; -import sinonChai from "sinon-chai"; - -chai.use(chaiAsPromised); -chai.use(sinonChai); diff --git a/packages/utils/test/types/metrics.test-d.ts b/packages/utils/test/types/metrics.test-d.ts new file mode 100644 index 000000000000..2f008618e648 --- /dev/null +++ b/packages/utils/test/types/metrics.test-d.ts @@ -0,0 +1,114 @@ +import {describe, it, expectTypeOf} from "vitest"; +import {Counter as PromCounter, Gauge as PromGauge, Histogram as PromHistogram} from "prom-client"; +import {Counter, Gauge, Histogram, MetricsRegister} from "../../src/metrics.js"; + +describe("Metric types", () => { + type Labels = {label: string}; + type MultipleLabels = {label1: string; label2: string}; + + describe("MetricsRegister", () => { + const register = {} as MetricsRegister; + + it("should require name and help to be defined on each metric", () => { + expectTypeOf(register.gauge).parameter(0).toHaveProperty("name").toBeString(); + expectTypeOf(register.gauge).parameter(0).toHaveProperty("help").toBeString(); + }); + + it("should require to set labelNames if metric has defined labels", () => { + expectTypeOf(register.gauge) + .parameter(0) + .toHaveProperty("labelNames") + .toMatchTypeOf<"label"[]>(); + + expectTypeOf(register.gauge) + .parameter(0) + .toHaveProperty("labelNames") + .toMatchTypeOf<("label1" | "label2")[]>(); + }); + + it("should not require to set labelNames if metric has no labels", () => { + expectTypeOf(register.gauge).parameter(0).toHaveProperty("labelNames").toEqualTypeOf(); + }); + }); + + describe("Gauge", () => { + it("should be compatible with prom-client type", () => { + expectTypeOf().toMatchTypeOf(); + }); + + it("should require to set labels if metric has defined labels", () => { + const gauge = {} as Gauge; + + expectTypeOf(gauge.inc).toEqualTypeOf<(labels: Labels, value?: number | undefined) => void>(); + expectTypeOf(gauge.dec).toEqualTypeOf<(labels: Labels, value?: number | undefined) => void>(); + expectTypeOf(gauge.set).toEqualTypeOf<(labels: Labels, value: number) => void>(); + }); + + it("should not require to set labels if metric has no labels", () => { + const gauge = {} as Gauge; + + expectTypeOf(gauge.inc).toEqualTypeOf<(value?: number | undefined) => void>(); + expectTypeOf(gauge.dec).toEqualTypeOf<(value?: number | undefined) => void>(); + expectTypeOf(gauge.set).toEqualTypeOf<(value: number) => void>(); + }); + }); + + describe("Histogram", () => { + it("should be compatible with prom-client type", () => { + expectTypeOf().toMatchTypeOf(); + }); + + it("should require to set labels if metric has defined labels", () => { + const histogram = {} as Histogram; + + expectTypeOf(histogram.startTimer).toMatchTypeOf<(labels: Labels) => () => number>(); + expectTypeOf(histogram.observe).toEqualTypeOf<(labels: Labels, value: number) => void>(); + }); + + it("should require to set labels in timer if not set in startTimer", () => { + const histogram = {} as Histogram; + + const timer = histogram.startTimer(); + expectTypeOf(timer).toEqualTypeOf<(labels: MultipleLabels) => number>(); + }); + + it("should not require to set labels in timer if already set in startTimer", () => { + const histogram = {} as Histogram; + + const timer = histogram.startTimer({label1: "value1", label2: "label2"}); + expectTypeOf(timer).toEqualTypeOf<() => number>(); + }); + + it("should allow to set labels in either startTimer or timer", () => { + const histogram = {} as Histogram; + + const timer = histogram.startTimer({label1: "value1"}); + expectTypeOf(timer).toEqualTypeOf<(labels: {label2: string}) => number>(); + }); + + it("should not require to set labels if metric has no labels", () => { + const histogram = {} as Histogram; + + expectTypeOf(histogram.startTimer).toMatchTypeOf<() => () => number>(); + expectTypeOf(histogram.observe).toEqualTypeOf<(value: number) => void>(); + }); + }); + + describe("Counter", () => { + it("should be compatible with prom-client type", () => { + expectTypeOf().toMatchTypeOf(); + }); + + it("should require to set labels if metric has defined labels", () => { + const counter = {} as Counter; + + expectTypeOf(counter.inc).toEqualTypeOf<(labels: Labels, value?: number | undefined) => void>(); + }); + + it("should not require to set labels if metric has no labels", () => { + const counter = {} as Counter; + + expectTypeOf(counter.inc).toEqualTypeOf<(value?: number | undefined) => void>(); + }); + }); +}); diff --git a/packages/utils/test/unit/assert.test.ts b/packages/utils/test/unit/assert.test.ts index e20595b69cfc..0555bcbd01a0 100644 --- a/packages/utils/test/unit/assert.test.ts +++ b/packages/utils/test/unit/assert.test.ts @@ -1,23 +1,22 @@ -import "../setup.js"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {assert} from "../../src/index.js"; describe("assert", () => { describe("true", () => { it("Should not throw with true", () => { - expect(() => assert.true(true)).to.not.throw(); + expect(() => assert.true(true)).not.toThrow(); }); it("Should throw with false", () => { - expect(() => assert.true(false, "something must be valid")).to.throw("something must be valid"); + expect(() => assert.true(false, "something must be valid")).toThrow("something must be valid"); }); }); describe("equal with custom message", () => { it("Should not throw with equal values", () => { - expect(() => assert.equal(1, 1)).to.not.throw(); + expect(() => assert.equal(1, 1)).not.toThrow(); }); it("Should throw with different values", () => { - expect(() => assert.equal(1, 2, "something must be equal")).to.throw("something must be equal: 1 === 2"); + expect(() => assert.equal(1, 2, "something must be equal")).toThrow("something must be equal: 1 === 2"); }); }); @@ -51,9 +50,9 @@ describe("assert", () => { for (const {op, args, ok} of cases) { it(`assert ${args[0]} ${op} ${args[1]} = ${ok}`, () => { if (ok) { - expect(() => assert[op](...args)).to.not.throw(); + expect(() => assert[op](...args)).not.toThrow(); } else { - expect(() => assert[op](...args)).to.throw(); + expect(() => assert[op](...args)).toThrow(); } }); } diff --git a/packages/utils/test/unit/base64.test.ts b/packages/utils/test/unit/base64.test.ts index 38ccd77bafe8..7c68e84f4c3e 100644 --- a/packages/utils/test/unit/base64.test.ts +++ b/packages/utils/test/unit/base64.test.ts @@ -1,15 +1,14 @@ -import "../setup.js"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {toBase64, fromBase64} from "../../src/index.js"; describe("toBase64", () => { it("should encode UTF-8 string as base64 string", () => { - expect(toBase64("user:password")).to.be.equal("dXNlcjpwYXNzd29yZA=="); + expect(toBase64("user:password")).toBe("dXNlcjpwYXNzd29yZA=="); }); }); describe("fromBase64", () => { it("should decode UTF-8 string from base64 string", () => { - expect(fromBase64("dXNlcjpwYXNzd29yZA==")).to.be.equal("user:password"); + expect(fromBase64("dXNlcjpwYXNzd29yZA==")).toBe("user:password"); }); }); diff --git a/packages/utils/test/unit/bytes.test.ts b/packages/utils/test/unit/bytes.test.ts index f47e4c7ac3ed..8410e667187a 100644 --- a/packages/utils/test/unit/bytes.test.ts +++ b/packages/utils/test/unit/bytes.test.ts @@ -1,5 +1,4 @@ -import "../setup.js"; -import {assert, expect} from "chai"; +import {describe, it, expect} from "vitest"; import {intToBytes, bytesToInt, toHex, fromHex, toHexString} from "../../src/index.js"; describe("intToBytes", () => { @@ -27,7 +26,7 @@ describe("intToBytes", () => { const type = typeof input; const length = input[1]; it(`should correctly serialize ${type} to bytes length ${length}`, () => { - assert(intToBytes(input[0], input[1]).equals(output)); + expect(intToBytes(input[0], input[1])).toEqual(output); }); } }); @@ -43,7 +42,7 @@ describe("bytesToInt", () => { ]; for (const {input, output} of testCases) { it(`should produce ${output}`, () => { - expect(bytesToInt(input)).to.be.equal(output); + expect(bytesToInt(input)).toBe(output); }); } }); @@ -57,7 +56,7 @@ describe("toHex", () => { ]; for (const {input, output} of testCases) { it(`should convert Uint8Array to hex string ${output}`, () => { - expect(toHex(input)).to.be.equal(output); + expect(toHex(input)).toBe(output); }); } }); @@ -77,7 +76,7 @@ describe("fromHex", () => { for (const {input, output} of testCases) { it(`should convert hex string ${input} to Uint8Array`, () => { - expect(fromHex(input)).to.deep.equal(output); + expect(fromHex(input)).toEqual(output); }); } }); @@ -94,7 +93,7 @@ describe("toHexString", () => { for (const {input, output} of testCases) { it(`should convert Uint8Array to hex string ${output}`, () => { - expect(toHexString(input)).to.be.equal(output); + expect(toHexString(input)).toBe(output); }); } }); diff --git a/packages/utils/test/unit/err.test.ts b/packages/utils/test/unit/err.test.ts index 81bfd505ffc0..a4b30ee65d73 100644 --- a/packages/utils/test/unit/err.test.ts +++ b/packages/utils/test/unit/err.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {Err, isErr, mapOkResults, mapOkResultsAsync, Result} from "../../src/err.js"; import {expectDeepEquals, expectEquals} from "../utils/chai.js"; @@ -46,7 +46,7 @@ describe("Result Err", () => { }); it("throw for different length", () => { - expect(() => mapOkResults([], () => [0])).to.throw(); + expect(() => mapOkResults([], () => [0])).toThrow(); }); it("num to string mixed results", () => { diff --git a/packages/utils/test/unit/math.test.ts b/packages/utils/test/unit/math.test.ts index 526f98ac1f1a..6827fea2bbb0 100644 --- a/packages/utils/test/unit/math.test.ts +++ b/packages/utils/test/unit/math.test.ts @@ -1,5 +1,4 @@ -import "../setup.js"; -import {assert} from "chai"; +import {describe, it, expect} from "vitest"; import {bigIntMin, bigIntMax, intDiv, intSqrt, bigIntSqrt} from "../../src/index.js"; describe("util/maths", function () { @@ -8,13 +7,13 @@ describe("util/maths", function () { const a = BigInt(1); const b = BigInt(2); const result = bigIntMin(a, b); - assert.equal(result, a, "Should have returned a!"); + expect(result).toBe(a); }); it("if b is lt should return b", () => { const a = BigInt(3); const b = BigInt(2); const result = bigIntMin(a, b); - assert.equal(result, b, "Should have returned b!"); + expect(result).toBe(b); }); }); @@ -23,78 +22,78 @@ describe("util/maths", function () { const a = BigInt(2); const b = BigInt(1); const result = bigIntMax(a, b); - assert.equal(result, a, "Should have returned a!"); + expect(result).toBe(a); }); it("if b is gt should return b", () => { const a = BigInt(2); const b = BigInt(3); const result = bigIntMax(a, b); - assert.equal(result, b, "Should have returned b!"); + expect(result).toBe(b); }); }); describe("intDiv", () => { it("should divide whole number", () => { const result = intDiv(6, 3); - assert.equal(result, 2, "Should have returned 2!"); + expect(result).toBe(2); }); it("should round less division", () => { const result = intDiv(9, 8); - assert.equal(result, 1, "Should have returned 1!"); + expect(result).toBe(1); }); }); describe("intSqrt", () => { it("0 should return 0", () => { const result = intSqrt(0); - assert.equal(result, 0, "Should have returned 0!"); + expect(result).toBe(0); }); it("1 should return 1", () => { const result = intSqrt(1); - assert.equal(result, 1, "Should have returned 1!"); + expect(result).toBe(1); }); it("3 should return 1", () => { const result = intSqrt(3); - assert.equal(result, 1, "Should have returned 1!"); + expect(result).toBe(1); }); it("4 should return 2", () => { const result = intSqrt(4); - assert.equal(result, 2, "Should have returned 2!"); + expect(result).toBe(2); }); it("16 should return 4", () => { const result = intSqrt(16); - assert.equal(result, 4, "Should have returned 4!"); + expect(result).toBe(4); }); it("31 should return 5", () => { const result = intSqrt(31); - assert.equal(result, 5, "Should have returned 5!"); + expect(result).toBe(5); }); }); describe("bigIntSqrt", () => { it("0 should return 0", () => { const result = bigIntSqrt(BigInt(0)); - assert.equal(result.toString(), BigInt(0).toString(), "Should have returned 0!"); + expect(result.toString()).toBe(BigInt(0).toString()); }); it("1 should return 1", () => { const result = bigIntSqrt(BigInt(1)); - assert.equal(result.toString(), BigInt(1).toString(), "Should have returned 1!"); + expect(result.toString()).toBe(BigInt(1).toString()); }); it("3 should return 1", () => { const result = bigIntSqrt(BigInt(3)); - assert.equal(result.toString(), BigInt(1).toString(), "Should have returned 1!"); + expect(result.toString()).toBe(BigInt(1).toString()); }); it("4 should return 2", () => { const result = bigIntSqrt(BigInt(4)); - assert.equal(result.toString(), BigInt(2).toString(), "Should have returned 2!"); + expect(result.toString()).toBe(BigInt(2).toString()); }); it("16 should return 4", () => { const result = bigIntSqrt(BigInt(16)); - assert.equal(result.toString(), BigInt(4).toString(), "Should have returned 4!"); + expect(result.toString()).toBe(BigInt(4).toString()); }); it("31 should return 5", () => { const result = bigIntSqrt(BigInt(31)); - assert.equal(result.toString(), BigInt(5).toString(), "Should have returned 5!"); + expect(result.toString()).toBe(BigInt(5).toString()); }); }); }); diff --git a/packages/utils/test/unit/objects.test.ts b/packages/utils/test/unit/objects.test.ts index ebad6c3f447c..4699a8c6f405 100644 --- a/packages/utils/test/unit/objects.test.ts +++ b/packages/utils/test/unit/objects.test.ts @@ -1,20 +1,19 @@ -import "../setup.js"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {isPlainObject, objectToExpectedCase} from "../../src/index.js"; describe("Objects helper", () => { it("should be plain object", () => { - expect(isPlainObject(Object.create({}))).to.equal(true); - expect(isPlainObject(Object.create(Object.create(Object.prototype)))).to.equal(true); - expect(isPlainObject({foo: "bar"})).to.equal(true); - expect(isPlainObject({})).to.equal(true); + expect(isPlainObject(Object.create({}))).toBe(true); + expect(isPlainObject(Object.create(Object.create(Object.prototype)))).toBe(true); + expect(isPlainObject({foo: "bar"})).toBe(true); + expect(isPlainObject({})).toBe(true); }); it("should not be plain object", () => { - expect(isPlainObject(1)).to.equal(false); - expect(isPlainObject(["foo", "bar"])).to.equal(false); - expect(isPlainObject([])).to.equal(false); - expect(isPlainObject(null)).to.equal(false); + expect(isPlainObject(1)).toBe(false); + expect(isPlainObject(["foo", "bar"])).toBe(false); + expect(isPlainObject([])).toBe(false); + expect(isPlainObject(null)).toBe(false); }); }); @@ -54,11 +53,11 @@ describe("objectToExpectedCase", () => { for (const {id, snake, camel} of testCases) { describe(id, () => { it("snake > camel", () => { - expect(objectToExpectedCase(snake, "camel")).to.deep.equal(camel); + expect(objectToExpectedCase(snake, "camel")).toEqual(camel); }); it("camel > snake", () => { - expect(objectToExpectedCase(camel, "snake")).to.deep.equal(snake); + expect(objectToExpectedCase(camel, "snake")).toEqual(snake); }); }); } diff --git a/packages/utils/test/unit/promise.node.test.ts b/packages/utils/test/unit/promise.node.test.ts new file mode 100644 index 000000000000..c9f6a3c2f98d --- /dev/null +++ b/packages/utils/test/unit/promise.node.test.ts @@ -0,0 +1,35 @@ +import {describe, it, expect, vi, beforeEach, afterEach} from "vitest"; +import {callFnWhenAwait} from "../../src/promise.js"; + +// TODO: Need to debug why vi.useFakeTimers() is not working for the browsers +describe("callFnWhenAwait util", function () { + beforeEach(() => { + vi.useFakeTimers(); + }); + + afterEach(() => { + vi.clearAllTimers(); + }); + + it("should call function while awaing for promise", async () => { + const p = new Promise((resolve) => setTimeout(() => resolve("done"), 5 * 1000)); + const stub = vi.fn(); + const result = await Promise.all([callFnWhenAwait(p, stub, 2 * 1000), vi.advanceTimersByTimeAsync(5000)]); + expect(result[0]).toBe("done"); + expect(stub).toHaveBeenCalledTimes(2); + await vi.advanceTimersByTimeAsync(5000); + expect(stub).toHaveBeenCalledTimes(2); + }); + + it("should throw error", async () => { + const stub = vi.fn(); + const p = new Promise((_, reject) => setTimeout(() => reject(new Error("done")), 5 * 1000)); + try { + await Promise.all([callFnWhenAwait(p, stub, 2 * 1000), vi.advanceTimersByTimeAsync(5000)]); + expect.fail("should throw error here"); + } catch (e) { + expect((e as Error).message).toBe("done"); + expect(stub).toHaveBeenCalledTimes(2); + } + }); +}); diff --git a/packages/utils/test/unit/promise.test.ts b/packages/utils/test/unit/promise.test.ts deleted file mode 100644 index dec5dc370a2b..000000000000 --- a/packages/utils/test/unit/promise.test.ts +++ /dev/null @@ -1,37 +0,0 @@ -import "../setup.js"; -import {expect} from "chai"; -import sinon from "sinon"; -import {callFnWhenAwait} from "../../src/promise.js"; - -describe("callFnWhenAwait util", function () { - const sandbox = sinon.createSandbox(); - beforeEach(() => { - sandbox.useFakeTimers(); - }); - - afterEach(() => { - sandbox.restore(); - }); - - it("should call function while awaing for promise", async () => { - const p = new Promise((resolve) => setTimeout(() => resolve("done"), 5 * 1000)); - const stub = sandbox.stub(); - const result = await Promise.all([callFnWhenAwait(p, stub, 2 * 1000), sandbox.clock.tickAsync(5000)]); - expect(result[0]).to.be.equal("done"); - expect(stub).to.be.calledTwice; - await sandbox.clock.tickAsync(5000); - expect(stub).to.be.calledTwice; - }); - - it("should throw error", async () => { - const stub = sandbox.stub(); - const p = new Promise((_, reject) => setTimeout(() => reject(new Error("done")), 5 * 1000)); - try { - await Promise.all([callFnWhenAwait(p, stub, 2 * 1000), sandbox.clock.tickAsync(5000)]); - expect.fail("should throw error here"); - } catch (e) { - expect((e as Error).message).to.be.equal("done"); - expect(stub).to.be.calledTwice; - } - }); -}); diff --git a/packages/utils/test/unit/promiserace.test.ts b/packages/utils/test/unit/promiserace.test.ts index 25952f828920..5d0567553522 100644 --- a/packages/utils/test/unit/promiserace.test.ts +++ b/packages/utils/test/unit/promiserace.test.ts @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {racePromisesWithCutoff, RaceEvent} from "../../src/promise.js"; describe("racePromisesWithCutoff", () => { @@ -98,7 +98,7 @@ describe("racePromisesWithCutoff", () => { testEvents.push(event) ); const testResultsCmp = testResults.map((res: string | Error) => (res instanceof Error ? res.message : res)); - expect({results: testResultsCmp, events: testEvents}).to.be.deep.equal({results, events}); + expect({results: testResultsCmp, events: testEvents}).toEqual({results, events}); }); } }); diff --git a/packages/utils/test/unit/retry.test.ts b/packages/utils/test/unit/retry.test.ts index b5211c7e106b..12afb7597015 100644 --- a/packages/utils/test/unit/retry.test.ts +++ b/packages/utils/test/unit/retry.test.ts @@ -1,5 +1,4 @@ -import "../setup.js"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {retry, RetryOptions} from "../../src/retry.js"; describe("retry", () => { @@ -39,9 +38,9 @@ describe("retry", () => { for (const {id, fn, opts, result} of testCases) { it(id, async () => { if (result instanceof Error) { - await expect(retry(fn, opts)).to.be.rejectedWith(result); + await expect(retry(fn, opts)).rejects.toThrow(result); } else { - expect(await retry(fn, opts)).to.deep.equal(result); + expect(await retry(fn, opts)).toEqual(result); } }); } diff --git a/packages/utils/test/unit/sleep.test.ts b/packages/utils/test/unit/sleep.test.ts index 44f7d309412a..a887560836eb 100644 --- a/packages/utils/test/unit/sleep.test.ts +++ b/packages/utils/test/unit/sleep.test.ts @@ -1,5 +1,4 @@ -import "../setup.js"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {sleep} from "../../src/sleep.js"; import {ErrorAborted} from "../../src/errors.js"; @@ -13,20 +12,19 @@ describe("sleep", function () { const controller = new AbortController(); setTimeout(() => controller.abort(), 10); - // Sleep for longer than the current test timeout. - // If the abort signal doesn't work mocha will throw a timeout error - const sleepTime = 2 * this.timeout(); + const sleepTime = 5000; - await expect(sleep(sleepTime, controller.signal)).to.rejectedWith(ErrorAborted); + await expect(sleep(sleepTime, controller.signal)).rejects.toThrow(ErrorAborted); }); it("Should abort timeout with already aborted signal", async function () { const controller = new AbortController(); controller.abort(); - expect(controller.signal.aborted, "Signal should already be aborted").to.equal(true); + // "Signal should already be aborted" + expect(controller.signal.aborted).toBe(true); - await expect(sleep(0, controller.signal)).to.rejectedWith(ErrorAborted); + await expect(sleep(0, controller.signal)).rejects.toThrow(ErrorAborted); }); it("sleep 0 must tick the event loop", async () => { @@ -51,16 +49,13 @@ describe("sleep", function () { await new Promise((r) => setTimeout(r, 0)); } - expect(steps).to.deep.equal( - [ - // Sync execution - Step.beforeSleep, - // Next tick, first registered callback - Step.setTimeout0, - // Next tick, second registered callback - Step.afterSleep, - ], - "Wrong steps" - ); + expect(steps).toEqual([ + // Sync execution + Step.beforeSleep, + // Next tick, first registered callback + Step.setTimeout0, + // Next tick, second registered callback + Step.afterSleep, + ]); }); }); diff --git a/packages/utils/test/unit/timeout.test.ts b/packages/utils/test/unit/timeout.test.ts index a2b430e86855..b8844355effb 100644 --- a/packages/utils/test/unit/timeout.test.ts +++ b/packages/utils/test/unit/timeout.test.ts @@ -1,14 +1,11 @@ -import "../setup.js"; -import {expect} from "chai"; +import {describe, it, expect, afterEach} from "vitest"; import {withTimeout} from "../../src/timeout.js"; import {ErrorAborted, TimeoutError} from "../../src/errors.js"; describe("withTimeout", function () { const data = "DATA"; const shortTimeoutMs = 10; - // Sleep for longer than the current test timeout. - // If the abort signal doesn't work mocha will throw a timeout error - const longTimeoutMs = 2 * this.timeout(); + const longTimeoutMs = 5000; const pendingTimeouts: NodeJS.Timeout[] = []; @@ -32,33 +29,33 @@ describe("withTimeout", function () { it("Should resolve timeout", async function () { const res = await withTimeout(() => pause(shortTimeoutMs, data), longTimeoutMs); - expect(res).to.equal(data); + expect(res).toBe(data); }); it("Should resolve timeout with not triggered signal", async function () { const controller = new AbortController(); const res = await withTimeout(() => pause(shortTimeoutMs, data), longTimeoutMs, controller.signal); - expect(res).to.equal(data); + expect(res).toBe(data); }); it("Should abort timeout with triggered signal", async function () { const controller = new AbortController(); setTimeout(() => controller.abort(), shortTimeoutMs); - await expect(withTimeout(() => pause(longTimeoutMs, data), longTimeoutMs, controller.signal)).to.rejectedWith( + await expect(withTimeout(() => pause(longTimeoutMs, data), longTimeoutMs, controller.signal)).rejects.toThrow( ErrorAborted ); }); it("Should timeout with no signal", async function () { - await expect(withTimeout(() => pause(longTimeoutMs, data), shortTimeoutMs)).to.rejectedWith(TimeoutError); + await expect(withTimeout(() => pause(longTimeoutMs, data), shortTimeoutMs)).rejects.toThrow(TimeoutError); }); it("Should timeout with not triggered signal", async function () { const controller = new AbortController(); - await expect(withTimeout(() => pause(longTimeoutMs, data), shortTimeoutMs, controller.signal)).to.rejectedWith( + await expect(withTimeout(() => pause(longTimeoutMs, data), shortTimeoutMs, controller.signal)).rejects.toThrow( TimeoutError ); }); @@ -67,9 +64,10 @@ describe("withTimeout", function () { const controller = new AbortController(); controller.abort(); - expect(controller.signal.aborted, "Signal should already be aborted").to.equal(true); + // "Signal should already be aborted" + expect(controller.signal.aborted).toBe(true); - await expect(withTimeout(() => pause(shortTimeoutMs, data), shortTimeoutMs, controller.signal)).to.rejectedWith( + await expect(withTimeout(() => pause(shortTimeoutMs, data), shortTimeoutMs, controller.signal)).rejects.toThrow( ErrorAborted ); }); diff --git a/packages/utils/test/unit/waitFor.test.ts b/packages/utils/test/unit/waitFor.test.ts index d659be3d4bcb..293e5aba936a 100644 --- a/packages/utils/test/unit/waitFor.test.ts +++ b/packages/utils/test/unit/waitFor.test.ts @@ -1,5 +1,4 @@ -import "../setup.js"; -import {expect} from "chai"; +import {describe, it, expect} from "vitest"; import {waitFor, createElapsedTimeTracker} from "../../src/waitFor.js"; import {ErrorAborted, TimeoutError} from "../../src/errors.js"; import {sleep} from "../../src/sleep.js"; @@ -9,7 +8,7 @@ describe("waitFor", () => { const timeout = 20; it("Should resolve if condition is already true", async () => { - await expect(waitFor(() => true, {interval, timeout})).to.be.fulfilled; + await expect(waitFor(() => true, {interval, timeout})).resolves.toBeUndefined(); }); it("Should resolve if condition becomes true within timeout", async () => { @@ -21,19 +20,19 @@ describe("waitFor", () => { }); it("Should reject with TimeoutError if condition does not become true within timeout", async () => { - await expect(waitFor(() => false, {interval, timeout})).to.be.rejectedWith(TimeoutError); + await expect(waitFor(() => false, {interval, timeout})).rejects.toThrow(TimeoutError); }); it("Should reject with ErrorAborted if aborted before condition becomes true", async () => { const controller = new AbortController(); setTimeout(() => controller.abort(), interval); - await expect(waitFor(() => false, {interval, timeout, signal: controller.signal})).to.be.rejectedWith(ErrorAborted); + await expect(waitFor(() => false, {interval, timeout, signal: controller.signal})).rejects.toThrow(ErrorAborted); }); it("Should reject with ErrorAborted if signal is already aborted", async () => { const controller = new AbortController(); controller.abort(); - await expect(waitFor(() => true, {interval, timeout, signal: controller.signal})).to.be.rejectedWith(ErrorAborted); + await expect(waitFor(() => true, {interval, timeout, signal: controller.signal})).rejects.toThrow(ErrorAborted); }); }); @@ -41,7 +40,7 @@ describe("waitForElapsedTime", () => { it("should true for the first time", () => { const callIfTimePassed = createElapsedTimeTracker({minElapsedTime: 1000}); - expect(callIfTimePassed()).to.be.true; + expect(callIfTimePassed()).toBe(true); }); it("should return true after the minElapsedTime has passed", async () => { @@ -50,7 +49,7 @@ describe("waitForElapsedTime", () => { await sleep(150); - expect(callIfTimePassed()).to.be.true; + expect(callIfTimePassed()).toBe(true); }); it("should return false before the minElapsedTime has passed", async () => { @@ -59,6 +58,6 @@ describe("waitForElapsedTime", () => { await sleep(10); - expect(callIfTimePassed()).to.be.false; + expect(callIfTimePassed()).toBe(false); }); }); diff --git a/packages/utils/tsconfig.e2e.json b/packages/utils/tsconfig.e2e.json deleted file mode 100644 index cedf626f4124..000000000000 --- a/packages/utils/tsconfig.e2e.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "extends": "../../tsconfig.e2e.json", - "include": [ - "src", - "test" - ], -} \ No newline at end of file diff --git a/packages/utils/vitest.browser.config.ts b/packages/utils/vitest.browser.config.ts new file mode 100644 index 000000000000..3c4b48885a33 --- /dev/null +++ b/packages/utils/vitest.browser.config.ts @@ -0,0 +1,14 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.browser.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + optimizeDeps: { + exclude: ["@chainsafe/blst"], + }, + }) +); diff --git a/packages/utils/vitest.config.ts b/packages/utils/vitest.config.ts new file mode 100644 index 000000000000..1df0de848936 --- /dev/null +++ b/packages/utils/vitest.config.ts @@ -0,0 +1,11 @@ +import {defineConfig, mergeConfig} from "vitest/config"; +import vitestConfig from "../../vitest.base.config"; + +export default mergeConfig( + vitestConfig, + defineConfig({ + test: { + globalSetup: ["./test/globalSetup.ts"], + }, + }) +); diff --git a/packages/utils/webpack.test.config.cjs b/packages/utils/webpack.test.config.cjs deleted file mode 100644 index 711c6ac891a7..000000000000 --- a/packages/utils/webpack.test.config.cjs +++ /dev/null @@ -1,5 +0,0 @@ -const webpackConfig = require("../../webpack.test.config.js"); - -module.exports = { - ...webpackConfig, -}; diff --git a/packages/validator/package.json b/packages/validator/package.json index a853057637cd..ecd15116169a 100644 --- a/packages/validator/package.json +++ b/packages/validator/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/validator", - "version": "1.12.0", + "version": "1.13.0", "description": "A Typescript implementation of the validator client", "author": "ChainSafe Systems", "license": "LGPL-3.0", @@ -50,18 +50,18 @@ "dependencies": { "@chainsafe/bls": "7.1.1", "@chainsafe/ssz": "^0.14.0", - "@lodestar/api": "^1.12.0", - "@lodestar/config": "^1.12.0", - "@lodestar/db": "^1.12.0", - "@lodestar/params": "^1.12.0", - "@lodestar/state-transition": "^1.12.0", - "@lodestar/types": "^1.12.0", - "@lodestar/utils": "^1.12.0", + "@lodestar/api": "^1.13.0", + "@lodestar/config": "^1.13.0", + "@lodestar/db": "^1.13.0", + "@lodestar/params": "^1.13.0", + "@lodestar/state-transition": "^1.13.0", + "@lodestar/types": "^1.13.0", + "@lodestar/utils": "^1.13.0", "bigint-buffer": "^1.1.5", "strict-event-emitter-types": "^2.0.0" }, "devDependencies": { - "@lodestar/test-utils": "^1.12.0", + "@lodestar/test-utils": "^1.13.0", "bigint-buffer": "^1.1.5", "rimraf": "^4.4.1" } diff --git a/packages/validator/src/index.ts b/packages/validator/src/index.ts index 39a331af6657..381db59b7c85 100644 --- a/packages/validator/src/index.ts +++ b/packages/validator/src/index.ts @@ -8,7 +8,7 @@ export type { ProposerConfig, } from "./services/validatorStore.js"; export {waitForGenesis} from "./genesis.js"; -export {getMetrics, type Metrics, type MetricsRegister} from "./metrics.js"; +export {getMetrics, type Metrics} from "./metrics.js"; // Remote signer client export { diff --git a/packages/validator/src/metrics.ts b/packages/validator/src/metrics.ts index 5bc3895414a2..4acf66955769 100644 --- a/packages/validator/src/metrics.ts +++ b/packages/validator/src/metrics.ts @@ -1,3 +1,5 @@ +import {MetricsRegisterExtra} from "@lodestar/utils"; + export enum MessageSource { forward = "forward", publish = "publish", @@ -11,64 +13,6 @@ export enum BeaconHealth { ERROR = 4, } -type LabelsGeneric = Record; -type CollectFn = (metric: Gauge) => void; - -interface Gauge { - // Sorry for this mess, `prom-client` API choices are not great - // If the function signature was `inc(value: number, labels?: Labels)`, this would be simpler - inc(value?: number): void; - inc(labels: Labels, value?: number): void; - inc(arg1?: Labels | number, arg2?: number): void; - - dec(value?: number): void; - dec(labels: Labels, value?: number): void; - dec(arg1?: Labels | number, arg2?: number): void; - - set(value: number): void; - set(labels: Labels, value: number): void; - set(arg1?: Labels | number, arg2?: number): void; - - addCollect(collectFn: CollectFn): void; -} - -interface Histogram { - startTimer(): () => number; - - observe(value: number): void; - observe(labels: Labels, values: number): void; - observe(arg1: Labels | number, arg2?: number): void; - - reset(): void; -} - -interface AvgMinMax { - set(values: number[]): void; - set(labels: Labels, values: number[]): void; - set(arg1?: Labels | number[], arg2?: number[]): void; -} - -type GaugeConfig = { - name: string; - help: string; - labelNames?: keyof Labels extends string ? (keyof Labels)[] : undefined; -}; - -type HistogramConfig = { - name: string; - help: string; - labelNames?: (keyof Labels)[]; - buckets?: number[]; -}; - -type AvgMinMaxConfig = GaugeConfig; - -export interface MetricsRegister { - gauge(config: GaugeConfig): Gauge; - histogram(config: HistogramConfig): Histogram; - avgMinMax(config: AvgMinMaxConfig): AvgMinMax; -} - export type Metrics = ReturnType; export type LodestarGitData = { @@ -81,10 +25,10 @@ export type LodestarGitData = { }; /** - * A collection of metrics used throughout the Gossipsub behaviour. + * A collection of metrics used by the validator client */ // eslint-disable-next-line @typescript-eslint/explicit-function-return-type -export function getMetrics(register: MetricsRegister, gitData: LodestarGitData) { +export function getMetrics(register: MetricsRegisterExtra, gitData: LodestarGitData) { // Using function style instead of class to prevent having to re-declare all MetricsPrometheus types. // Track version, same as https://github.com/ChainSafe/lodestar/blob/6df28de64f12ea90b341b219229a47c8a25c9343/packages/lodestar/src/metrics/metrics/lodestar.ts#L17 @@ -92,7 +36,7 @@ export function getMetrics(register: MetricsRegister, gitData: LodestarGitData) .gauge({ name: "lodestar_version", help: "Lodestar version", - labelNames: Object.keys(gitData) as (keyof LodestarGitData)[], + labelNames: Object.keys(gitData) as [keyof LodestarGitData], }) .set(gitData, 1); @@ -367,7 +311,7 @@ export function getMetrics(register: MetricsRegister, gitData: LodestarGitData) labelNames: ["routeId"], }), - urlsScore: register.gauge<{urlIndex: string}>({ + urlsScore: register.gauge<{urlIndex: number}>({ name: "vc_rest_api_client_urls_score", help: "Current score of REST API URLs by url index", labelNames: ["urlIndex"], diff --git a/packages/validator/src/services/block.ts b/packages/validator/src/services/block.ts index 55b7c4cd74ce..b59902870c92 100644 --- a/packages/validator/src/services/block.ts +++ b/packages/validator/src/services/block.ts @@ -4,15 +4,14 @@ import { Slot, BLSSignature, allForks, - isBlindedBeaconBlock, + isBlindedSignedBeaconBlock, ProducedBlockSource, deneb, isBlockContents, - isBlindedBlockContents, } from "@lodestar/types"; import {ChainForkConfig} from "@lodestar/config"; -import {ForkPreBlobs, ForkBlobs, ForkSeq} from "@lodestar/params"; -import {extendError, prettyBytes} from "@lodestar/utils"; +import {ForkPreBlobs, ForkBlobs, ForkSeq, ForkExecution} from "@lodestar/params"; +import {ETH_TO_GWEI, ETH_TO_WEI, extendError, gweiToWei, prettyBytes} from "@lodestar/utils"; import {Api, ApiError, routes} from "@lodestar/api"; import {IClock, LoggerVc} from "../util/index.js"; import {PubkeyHex} from "../types.js"; @@ -21,42 +20,45 @@ import {formatBigDecimal} from "../util/format.js"; import {ValidatorStore} from "./validatorStore.js"; import {BlockDutiesService, GENESIS_SLOT} from "./blockDuties.js"; -const ETH_TO_WEI = BigInt("1000000000000000000"); // display upto 5 decimal places const MAX_DECIMAL_FACTOR = BigInt("100000"); // The following combination of blocks and blobs can be produced // i) a full block pre deneb // ii) a full block and full blobs post deneb -// iii) a blinded block pre deneb as a result of beacon/execution race -// iv) a blinded block + blinded blobs as a result of beacon/execution race +// iii) a blinded block post bellatrix type FullOrBlindedBlockWithContents = | { version: ForkPreBlobs; block: allForks.BeaconBlock; - blobs: null; + contents: null; executionPayloadBlinded: false; + executionPayloadSource: ProducedBlockSource.engine; } | { version: ForkBlobs; block: allForks.BeaconBlock; - blobs: deneb.BlobSidecars; + contents: { + kzgProofs: deneb.KZGProofs; + blobs: deneb.Blobs; + }; executionPayloadBlinded: false; + executionPayloadSource: ProducedBlockSource.engine; } | { - version: ForkPreBlobs; - block: allForks.BlindedBeaconBlock; - blobs: null; - executionPayloadBlinded: true; - } - | { - version: ForkBlobs; + version: ForkExecution; block: allForks.BlindedBeaconBlock; - blobs: deneb.BlindedBlobSidecars; + contents: null; executionPayloadBlinded: true; + executionPayloadSource: ProducedBlockSource; }; type DebugLogCtx = {debugLogCtx: Record}; +type BlockProposalOpts = { + useProduceBlockV3: boolean; + broadcastValidation: routes.beacon.BroadcastValidation; + blindedLocal: boolean; +}; /** * Service that sets up and handles validator block proposal duties. */ @@ -70,7 +72,7 @@ export class BlockProposingService { private readonly clock: IClock, private readonly validatorStore: ValidatorStore, private readonly metrics: Metrics | null, - private readonly opts: {useProduceBlockV3: boolean; broadcastValidation: routes.beacon.BroadcastValidation} + private readonly opts: BlockProposalOpts ) { this.dutiesService = new BlockDutiesService( config, @@ -121,6 +123,7 @@ export class BlockProposingService { const strictFeeRecipientCheck = this.validatorStore.strictFeeRecipientCheck(pubkeyHex); const builderSelection = this.validatorStore.getBuilderSelection(pubkeyHex); const feeRecipient = this.validatorStore.getFeeRecipient(pubkeyHex); + const blindedLocal = this.opts.blindedLocal; this.logger.debug("Producing block", { ...debugLogCtx, @@ -128,42 +131,36 @@ export class BlockProposingService { feeRecipient, strictFeeRecipientCheck, useProduceBlockV3: this.opts.useProduceBlockV3, + blindedLocal, }); this.metrics?.proposerStepCallProduceBlock.observe(this.clock.secFromSlot(slot)); const produceBlockFn = this.opts.useProduceBlockV3 ? this.produceBlockWrapper : this.produceBlockV2Wrapper; - const blockContents = await produceBlockFn(this.config, slot, randaoReveal, graffiti, { + const produceOpts = { feeRecipient, strictFeeRecipientCheck, builderSelection, - }).catch((e: Error) => { - this.metrics?.blockProposingErrors.inc({error: "produce"}); - throw extendError(e, "Failed to produce block"); - }); + blindedLocal, + }; + const blockContents = await produceBlockFn(this.config, slot, randaoReveal, graffiti, produceOpts).catch( + (e: Error) => { + this.metrics?.blockProposingErrors.inc({error: "produce"}); + throw extendError(e, "Failed to produce block"); + } + ); this.logger.debug("Produced block", {...debugLogCtx, ...blockContents.debugLogCtx}); this.metrics?.blocksProduced.inc(); - const signedBlockPromise = this.validatorStore.signBlock(pubkey, blockContents.block, slot, this.logger); - const signedBlobPromises = - blockContents.blobs !== null - ? blockContents.blobs.map((blob) => this.validatorStore.signBlob(pubkey, blob, slot)) - : undefined; - let signedBlock: allForks.FullOrBlindedSignedBeaconBlock, - signedBlobs: allForks.FullOrBlindedSignedBlobSidecar[] | undefined; - if (signedBlobPromises !== undefined) { - [signedBlock, ...signedBlobs] = await Promise.all([signedBlockPromise, ...signedBlobPromises]); - } else { - signedBlock = await signedBlockPromise; - signedBlobs = undefined; - } + const signedBlock = await this.validatorStore.signBlock(pubkey, blockContents.block, slot); - await this.publishBlockWrapper(signedBlock, signedBlobs, { - broadcastValidation: this.opts.broadcastValidation, - }).catch((e: Error) => { + const {broadcastValidation} = this.opts; + const publishOpts = {broadcastValidation}; + await this.publishBlockWrapper(signedBlock, blockContents.contents, publishOpts).catch((e: Error) => { this.metrics?.blockProposingErrors.inc({error: "publish"}); throw extendError(e, "Failed to publish block"); }); + this.metrics?.proposerStepCallPublishBlock.observe(this.clock.secFromSlot(slot)); this.metrics?.blocksPublished.inc(); this.logger.info("Published block", {...logCtx, graffiti, ...blockContents.debugLogCtx}); @@ -174,30 +171,22 @@ export class BlockProposingService { private publishBlockWrapper = async ( signedBlock: allForks.FullOrBlindedSignedBeaconBlock, - signedBlobSidecars?: allForks.FullOrBlindedSignedBlobSidecar[], + contents: {kzgProofs: deneb.KZGProofs; blobs: deneb.Blobs} | null, opts: {broadcastValidation?: routes.beacon.BroadcastValidation} = {} ): Promise => { - if (signedBlobSidecars === undefined) { - ApiError.assert( - isBlindedBeaconBlock(signedBlock.message) - ? await this.api.beacon.publishBlindedBlockV2(signedBlock as allForks.SignedBlindedBeaconBlock, opts) - : await this.api.beacon.publishBlockV2(signedBlock as allForks.SignedBeaconBlock, opts) - ); + if (isBlindedSignedBeaconBlock(signedBlock)) { + if (contents !== null) { + this.logger.warn( + "Ignoring contents while publishing blinded block - publishing beacon should assemble it from its local cache or builder" + ); + } + ApiError.assert(await this.api.beacon.publishBlindedBlockV2(signedBlock, opts)); } else { - ApiError.assert( - isBlindedBeaconBlock(signedBlock.message) - ? await this.api.beacon.publishBlindedBlockV2( - { - signedBlindedBlock: signedBlock, - signedBlindedBlobSidecars: signedBlobSidecars, - } as allForks.SignedBlindedBlockContents, - opts - ) - : await this.api.beacon.publishBlockV2( - {signedBlock, signedBlobSidecars} as allForks.SignedBlockContents, - opts - ) - ); + if (contents === null) { + ApiError.assert(await this.api.beacon.publishBlockV2(signedBlock, opts)); + } else { + ApiError.assert(await this.api.beacon.publishBlockV2({...contents, signedBlock}, opts)); + } } }; @@ -206,20 +195,28 @@ export class BlockProposingService { slot: Slot, randaoReveal: BLSSignature, graffiti: string, - {feeRecipient, strictFeeRecipientCheck, builderSelection}: routes.validator.ExtraProduceBlockOps + {feeRecipient, strictFeeRecipientCheck, builderSelection, blindedLocal}: routes.validator.ExtraProduceBlockOps ): Promise => { const res = await this.api.validator.produceBlockV3(slot, randaoReveal, graffiti, false, { feeRecipient, builderSelection, strictFeeRecipientCheck, + blindedLocal, }); ApiError.assert(res, "Failed to produce block: validator.produceBlockV2"); const {response} = res; const debugLogCtx = { - source: response.executionPayloadBlinded ? ProducedBlockSource.builder : ProducedBlockSource.engine, + executionPayloadSource: response.executionPayloadSource, + executionPayloadBlinded: response.executionPayloadBlinded, // winston logger doesn't like bigint executionPayloadValue: `${formatBigDecimal(response.executionPayloadValue, ETH_TO_WEI, MAX_DECIMAL_FACTOR)} ETH`, + consensusBlockValue: `${formatBigDecimal(response.consensusBlockValue, ETH_TO_GWEI, MAX_DECIMAL_FACTOR)} ETH`, + totalBlockValue: `${formatBigDecimal( + response.executionPayloadValue + gweiToWei(response.consensusBlockValue), + ETH_TO_WEI, + MAX_DECIMAL_FACTOR + )} ETH`, // TODO PR: should be used in api call instead of adding in log strictFeeRecipientCheck, builderSelection, @@ -247,14 +244,23 @@ export class BlockProposingService { const res = await this.api.validator.produceBlockV2(slot, randaoReveal, graffiti); ApiError.assert(res, "Failed to produce block: validator.produceBlockV2"); const {response} = res; - return parseProduceBlockResponse({executionPayloadBlinded: false, ...response}, debugLogCtx); + const executionPayloadSource = ProducedBlockSource.engine; + + return parseProduceBlockResponse( + {executionPayloadBlinded: false, executionPayloadSource, ...response}, + debugLogCtx + ); } else { Object.assign(debugLogCtx, {api: "produceBlindedBlock"}); const res = await this.api.validator.produceBlindedBlock(slot, randaoReveal, graffiti); ApiError.assert(res, "Failed to produce block: validator.produceBlockV2"); const {response} = res; + const executionPayloadSource = ProducedBlockSource.builder; - return parseProduceBlockResponse({executionPayloadBlinded: true, ...response}, debugLogCtx); + return parseProduceBlockResponse( + {executionPayloadBlinded: true, executionPayloadSource, ...response}, + debugLogCtx + ); } }; } @@ -264,38 +270,31 @@ function parseProduceBlockResponse( debugLogCtx: Record ): FullOrBlindedBlockWithContents & DebugLogCtx { if (response.executionPayloadBlinded) { - if (isBlindedBlockContents(response.data)) { - return { - block: response.data.blindedBlock, - blobs: response.data.blindedBlobSidecars, - version: response.version, - executionPayloadBlinded: true, - debugLogCtx, - } as FullOrBlindedBlockWithContents & DebugLogCtx; - } else { - return { - block: response.data, - blobs: null, - version: response.version, - executionPayloadBlinded: true, - debugLogCtx, - } as FullOrBlindedBlockWithContents & DebugLogCtx; - } + return { + block: response.data, + contents: null, + version: response.version, + executionPayloadBlinded: true, + executionPayloadSource: response.executionPayloadSource, + debugLogCtx, + } as FullOrBlindedBlockWithContents & DebugLogCtx; } else { if (isBlockContents(response.data)) { return { block: response.data.block, - blobs: response.data.blobSidecars, + contents: {blobs: response.data.blobs, kzgProofs: response.data.kzgProofs}, version: response.version, executionPayloadBlinded: false, + executionPayloadSource: response.executionPayloadSource, debugLogCtx, } as FullOrBlindedBlockWithContents & DebugLogCtx; } else { return { block: response.data, - blobs: null, + contents: null, version: response.version, executionPayloadBlinded: false, + executionPayloadSource: response.executionPayloadSource, debugLogCtx, } as FullOrBlindedBlockWithContents & DebugLogCtx; } diff --git a/packages/validator/src/services/chainHeaderTracker.ts b/packages/validator/src/services/chainHeaderTracker.ts index ed8471721e32..ebb20670bb24 100644 --- a/packages/validator/src/services/chainHeaderTracker.ts +++ b/packages/validator/src/services/chainHeaderTracker.ts @@ -31,8 +31,10 @@ export class ChainHeaderTracker { ) {} start(signal: AbortSignal): void { - void this.api.events.eventstream([EventType.head], signal, this.onHeadUpdate); - this.logger.verbose("Subscribed to head event"); + this.logger.verbose("Subscribing to head event"); + this.api.events + .eventstream([EventType.head], signal, this.onHeadUpdate) + .catch((e) => this.logger.error("Failed to subscribe to head event", {}, e)); } getCurrentChainHead(slot: Slot): Root | null { diff --git a/packages/validator/src/services/validatorStore.ts b/packages/validator/src/services/validatorStore.ts index e2736a09754a..8cafaa5b14b6 100644 --- a/packages/validator/src/services/validatorStore.ts +++ b/packages/validator/src/services/validatorStore.ts @@ -7,7 +7,6 @@ import { computeDomain, ZERO_HASH, blindedOrFullBlockHashTreeRoot, - blindedOrFullBlobSidecarHashTreeRoot, } from "@lodestar/state-transition"; import {BeaconConfig} from "@lodestar/config"; import { @@ -20,7 +19,6 @@ import { DOMAIN_SYNC_COMMITTEE, DOMAIN_SYNC_COMMITTEE_SELECTION_PROOF, DOMAIN_APPLICATION_BUILDER, - DOMAIN_BLOB_SIDECAR, } from "@lodestar/params"; import { allForks, @@ -129,6 +127,8 @@ export const defaultOptions = { useProduceBlockV3: false, // spec asks for gossip validation by default broadcastValidation: routes.beacon.BroadcastValidation.gossip, + // should request fetching the locally produced block in blinded format + blindedLocal: false, }; /** @@ -395,37 +395,6 @@ export class ValidatorStore { } as allForks.FullOrBlindedSignedBeaconBlock; } - async signBlob( - pubkey: BLSPubkey, - blindedOrFull: allForks.FullOrBlindedBlobSidecar, - currentSlot: Slot - ): Promise { - // Make sure the block slot is not higher than the current slot to avoid potential attacks. - if (blindedOrFull.slot > currentSlot) { - throw Error(`Not signing block with slot ${blindedOrFull.slot} greater than current slot ${currentSlot}`); - } - - // Duties are filtered before-hard by doppelganger-safe, this assert should never throw - this.assertDoppelgangerSafe(pubkey); - - const signingSlot = blindedOrFull.slot; - const domain = this.config.getDomain(signingSlot, DOMAIN_BLOB_SIDECAR); - const blobRoot = blindedOrFullBlobSidecarHashTreeRoot(this.config, blindedOrFull); - // Don't use `computeSigningRoot()` here to compute the objectRoot in typesafe function blindedOrFullBlockHashTreeRoot() - const signingRoot = ssz.phase0.SigningData.hashTreeRoot({objectRoot: blobRoot, domain}); - - // Slashing protection is not required as blobs are binded to blocks which are already protected - const signableMessage: SignableMessage = { - type: SignableMessageType.BLOB, - data: blindedOrFull, - }; - - return { - message: blindedOrFull, - signature: await this.getSignature(pubkey, signingRoot, signingSlot, signableMessage), - } as allForks.FullOrBlindedSignedBlobSidecar; - } - async signRandao(pubkey: BLSPubkey, slot: Slot): Promise { const signingSlot = slot; const domain = this.config.getDomain(slot, DOMAIN_RANDAO); diff --git a/packages/validator/src/util/externalSignerClient.ts b/packages/validator/src/util/externalSignerClient.ts index 2716533e536f..90c6e1f464c8 100644 --- a/packages/validator/src/util/externalSignerClient.ts +++ b/packages/validator/src/util/externalSignerClient.ts @@ -15,7 +15,6 @@ export enum SignableMessageType { AGGREGATE_AND_PROOF = "AGGREGATE_AND_PROOF", ATTESTATION = "ATTESTATION", BLOCK_V2 = "BLOCK_V2", - BLOB = "BLOB", DEPOSIT = "DEPOSIT", RANDAO_REVEAL = "RANDAO_REVEAL", VOLUNTARY_EXIT = "VOLUNTARY_EXIT", @@ -65,7 +64,6 @@ export type SignableMessage = | {type: SignableMessageType.AGGREGATE_AND_PROOF; data: phase0.AggregateAndProof} | {type: SignableMessageType.ATTESTATION; data: phase0.AttestationData} | {type: SignableMessageType.BLOCK_V2; data: allForks.FullOrBlindedBeaconBlock} - | {type: SignableMessageType.BLOB; data: allForks.FullOrBlindedBlobSidecar} | {type: SignableMessageType.DEPOSIT; data: ValueOf} | {type: SignableMessageType.RANDAO_REVEAL; data: {epoch: Epoch}} | {type: SignableMessageType.VOLUNTARY_EXIT; data: phase0.VoluntaryExit} @@ -88,7 +86,6 @@ const requiresForkInfo: Record = { [SignableMessageType.SYNC_COMMITTEE_CONTRIBUTION_AND_PROOF]: true, [SignableMessageType.VALIDATOR_REGISTRATION]: false, [SignableMessageType.BLS_TO_EXECUTION_CHANGE]: true, - [SignableMessageType.BLOB]: true, }; type Web3SignerSerializedRequest = { @@ -232,9 +229,5 @@ function serializerSignableMessagePayload(config: BeaconConfig, payload: Signabl case SignableMessageType.BLS_TO_EXECUTION_CHANGE: return {BLS_TO_EXECUTION_CHANGE: ssz.capella.BLSToExecutionChange.toJson(payload.data)}; - - case SignableMessageType.BLOB: - // TODO DENEB: freetheblobs - throw Error("web3signer for blob signing not yet implemented"); } } diff --git a/packages/validator/src/util/params.ts b/packages/validator/src/util/params.ts index 37908afaf86c..1431f4f3c56e 100644 --- a/packages/validator/src/util/params.ts +++ b/packages/validator/src/util/params.ts @@ -210,5 +210,6 @@ function getSpecCriticalParams(localConfig: ChainConfig): Record opts.abortController.signal}, {config, logger}); + logger.info("Beacon node", {urls: urls.map(toSafePrintableUrl).toString()}); } else { api = opts.api; } diff --git a/packages/validator/test/unit/services/block.test.ts b/packages/validator/test/unit/services/block.test.ts index 0c78fdc82eec..f879017c95f1 100644 --- a/packages/validator/test/unit/services/block.test.ts +++ b/packages/validator/test/unit/services/block.test.ts @@ -5,7 +5,7 @@ import {toHexString} from "@chainsafe/ssz"; import {createChainForkConfig} from "@lodestar/config"; import {config as mainnetConfig} from "@lodestar/config/default"; import {sleep} from "@lodestar/utils"; -import {ssz} from "@lodestar/types"; +import {ssz, ProducedBlockSource} from "@lodestar/types"; import {HttpStatusCode, routes} from "@lodestar/api"; import {ForkName} from "@lodestar/params"; import {BlockProposingService} from "../../../src/services/block.js"; @@ -53,6 +53,7 @@ describe("BlockDutiesService", function () { const blockService = new BlockProposingService(config, loggerVc, api, clock, validatorStore, null, { useProduceBlockV3: true, broadcastValidation: routes.beacon.BroadcastValidation.consensus, + blindedLocal: false, }); const signedBlock = ssz.phase0.SignedBeaconBlock.defaultValue(); @@ -63,7 +64,9 @@ describe("BlockDutiesService", function () { data: signedBlock.message, version: ForkName.bellatrix, executionPayloadValue: BigInt(1), + consensusBlockValue: BigInt(1), executionPayloadBlinded: false, + executionPayloadSource: ProducedBlockSource.engine, }, ok: true, status: HttpStatusCode.OK, @@ -84,4 +87,57 @@ describe("BlockDutiesService", function () { "wrong publishBlock() args" ); }); + + it("Should produce, sign, and publish a blinded block", async function () { + // Reply with some duties + const slot = 0; // genesisTime is right now, so test with slot = currentSlot + api.validator.getProposerDuties.resolves({ + response: { + dependentRoot: ZERO_HASH_HEX, + executionOptimistic: false, + data: [{slot: slot, validatorIndex: 0, pubkey: pubkeys[0]}], + }, + ok: true, + status: HttpStatusCode.OK, + }); + + const clock = new ClockMock(); + // use produceBlockV3 + const blockService = new BlockProposingService(config, loggerVc, api, clock, validatorStore, null, { + useProduceBlockV3: true, + broadcastValidation: routes.beacon.BroadcastValidation.consensus, + blindedLocal: true, + }); + + const signedBlock = ssz.bellatrix.SignedBlindedBeaconBlock.defaultValue(); + validatorStore.signRandao.resolves(signedBlock.message.body.randaoReveal); + validatorStore.signBlock.callsFake(async (_, block) => ({message: block, signature: signedBlock.signature})); + api.validator.produceBlockV3.resolves({ + response: { + data: signedBlock.message, + version: ForkName.bellatrix, + executionPayloadValue: BigInt(1), + consensusBlockValue: BigInt(1), + executionPayloadBlinded: true, + executionPayloadSource: ProducedBlockSource.engine, + }, + ok: true, + status: HttpStatusCode.OK, + }); + api.beacon.publishBlindedBlockV2.resolves(); + + // Trigger block production for slot 1 + const notifyBlockProductionFn = blockService["dutiesService"]["notifyBlockProductionFn"]; + notifyBlockProductionFn(1, [pubkeys[0]]); + + // Resolve all promises + await sleep(20, controller.signal); + + // Must have submitted the block received on signBlock() + expect(api.beacon.publishBlindedBlockV2.callCount).to.equal(1, "publishBlindedBlockV2() must be called once"); + expect(api.beacon.publishBlindedBlockV2.getCall(0).args).to.deep.equal( + [signedBlock, {broadcastValidation: routes.beacon.BroadcastValidation.consensus}], + "wrong publishBlock() args" + ); + }); }); diff --git a/scripts/prepare-docs.sh b/scripts/prepare-docs.sh index 5475f22c398e..78b508bf5f29 100755 --- a/scripts/prepare-docs.sh +++ b/scripts/prepare-docs.sh @@ -1,17 +1,20 @@ #!/bin/bash DOCS_DIR=docs +ASSETS_DIR=assets # exit when any command fails set -e -# Move autogenerated reference -mkdir -p $DOCS_DIR/reference -mv packages/cli/docs/cli.md $DOCS_DIR/reference/cli.md +# Copy contributing docs +cp CONTRIBUTING.md $DOCS_DIR/pages/contribution/getting-started.md +cp SECURITY.md $DOCS_DIR/pages/security.md -# Copy contributing doc -cp CONTRIBUTING.md $DOCS_DIR/contributing.md +# Copy package README.md to docs +cp -r packages/light-client/README.md $DOCS_DIR/pages/lightclient-prover/lightclient.md +cp -r packages/prover/README.md $DOCS_DIR/pages/lightclient-prover/prover.md # Copy visual assets -rm -rf $DOCS_DIR/assets -cp -r assets $DOCS_DIR/assets +rm -rf $DOCS_DIR/pages/assets $DOCS_DIR/pages/images +cp -r $ASSETS_DIR $DOCS_DIR/pages/assets +cp -r $DOCS_DIR/images $DOCS_DIR/pages/images diff --git a/scripts/vitest/customMatchers.ts b/scripts/vitest/customMatchers.ts index 04b665bf3242..227c0a2c0c76 100644 --- a/scripts/vitest/customMatchers.ts +++ b/scripts/vitest/customMatchers.ts @@ -2,14 +2,14 @@ import {expect} from "vitest"; expect.extend({ - toBeValidEpochCommittee: ( + toBeValidEpochCommittee( committee: {index: number; slot: number; validators: unknown[]}, { committeeCount, validatorsPerCommittee, slotsPerEpoch, }: {committeeCount: number; validatorsPerCommittee: number; slotsPerEpoch: number} - ) => { + ) { if (committee.index < 0 || committee.index > committeeCount - 1) { return { message: () => @@ -39,10 +39,10 @@ expect.extend({ pass: true, }; }, - toBeWithMessage: (received: unknown, expected: unknown, message: string) => { - if (received === expected) { + toBeWithMessage(received: unknown, expected: unknown, message: string) { + if (Object.is(received, expected)) { return { - message: () => "Expected value is truthy", + message: () => "Received value is the same as expected value", pass: true, }; } @@ -50,6 +50,36 @@ expect.extend({ return { pass: false, message: () => message, + actual: received, + expected, + }; + }, + toSatisfy(received: unknown, func: (received: unknown) => boolean) { + if (func(received)) { + return { + message: () => "Received value satisfied the condition", + pass: true, + }; + } + + return { + pass: false, + message: () => "Received value did not satisfy the condition", + }; + }, + toEqualWithMessage(received: unknown, expected: unknown, message: string) { + if (this.equals(received, expected)) { + return { + message: () => "Received value equals expected value", + pass: true, + }; + } + + return { + pass: false, + message: () => message, + actual: received, + expected, }; }, }); diff --git a/scripts/vitest/polyfills/perf_hooks.js b/scripts/vitest/polyfills/perf_hooks.js new file mode 100644 index 000000000000..a96781ba23df --- /dev/null +++ b/scripts/vitest/polyfills/perf_hooks.js @@ -0,0 +1,2 @@ +export default null; +export const performance = {}; diff --git a/tsconfig.build.json b/tsconfig.build.json index d4868d14e13a..d767c8eaec8a 100644 --- a/tsconfig.build.json +++ b/tsconfig.build.json @@ -23,6 +23,12 @@ "declaration": true, "declarationMap": true, "incremental": true, - "preserveWatchOutput": true + "preserveWatchOutput": true, + + + // There are two duplicate type definitions included from `chai` and `vitest` packages. + // There is one invalid type declaration introduced from `webdriverio -> got` package. + // TODO: Once we completely remove `chai` and upgrade `webdriverio` we can enable this check again. + "skipLibCheck": true, } } diff --git a/types/vitest/index.d.ts b/types/vitest/index.d.ts index 38ccf5252d52..387edcfa5279 100644 --- a/types/vitest/index.d.ts +++ b/types/vitest/index.d.ts @@ -26,10 +26,23 @@ interface CustomMatchers { * ``` * */ toBeWithMessage(expected: unknown, message: string): R; + /** + * @deprecated + * We highly recommend to not use this matcher instead use detail test case with .toEqual + * where you don't need message to explain assertion + * */ + toEqualWithMessage(expected: unknown, message: string): R; +} + +interface CustomAsymmetricMatchers extends CustomMatchers { + /** + * Non-asymmetric matcher already exists, we just need to add asymmetric version + */ + toSatisfy(func: (received: unknown) => boolean): R; } declare module "vitest" { // eslint-disable-next-line @typescript-eslint/no-explicit-any interface Assertion extends CustomMatchers {} - interface AsymmetricMatchersContaining extends CustomMatchers {} + interface AsymmetricMatchersContaining extends CustomAsymmetricMatchers {} } diff --git a/vitest.base.browser.config.ts b/vitest.base.browser.config.ts new file mode 100644 index 000000000000..5559e0f77b9c --- /dev/null +++ b/vitest.base.browser.config.ts @@ -0,0 +1,49 @@ +import path from "node:path"; +import {defineConfig} from "vitest/config"; +const __dirname = new URL(".", import.meta.url).pathname; +import {nodePolyfills} from "vite-plugin-node-polyfills"; +import topLevelAwait from "vite-plugin-top-level-await"; + +export default defineConfig({ + plugins: [ + topLevelAwait(), + nodePolyfills({ + include: ["buffer", "process", "util", "string_decoder", "url", "querystring", "events"], + globals: {Buffer: true, process: true}, + protocolImports: true, + }), + ], + test: { + include: ["**/*.test.ts"], + exclude: [ + "**/*.node.test.ts", + "**/node_modules/**", + "**/dist/**", + "**/lib/**", + "**/cypress/**", + "**/.{idea,git,cache,output,temp}/**", + "**/{karma,rollup,webpack,vite,vitest,jest,ava,babel,nyc,cypress,tsup,build}.config.*", + ], + setupFiles: [path.join(__dirname, "./scripts/vitest/customMatchers.ts")], + reporters: ["default", "hanging-process"], + coverage: { + enabled: false, + }, + browser: { + name: "chrome", + headless: true, + provider: "webdriverio", + slowHijackESM: false, + providerOptions: { + capabilities: { + browserVersion: "latest", + }, + }, + }, + }, + resolve: { + alias: { + "node:perf_hooks": path.join(__dirname, "scripts/vitest/polyfills/perf_hooks.js"), + }, + }, +}); diff --git a/vitest.base.config.ts b/vitest.base.config.ts index 34c0d56e40d5..2c12cbf41b9b 100644 --- a/vitest.base.config.ts +++ b/vitest.base.config.ts @@ -4,6 +4,16 @@ const __dirname = new URL(".", import.meta.url).pathname; export default defineConfig({ test: { + pool: "threads", + include: ["**/*.test.ts"], + exclude: [ + "**/*.browser.test.ts", + "**/node_modules/**", + "**/dist/**", + "**/cypress/**", + "**/.{idea,git,cache,output,temp}/**", + "**/{karma,rollup,webpack,vite,vitest,jest,ava,babel,nyc,cypress,tsup,build}.config.*", + ], setupFiles: [path.join(__dirname, "./scripts/vitest/customMatchers.ts")], reporters: ["default", "hanging-process"], coverage: { diff --git a/yarn.lock b/yarn.lock index 8c78c64bd89c..291225ebedf5 100644 --- a/yarn.lock +++ b/yarn.lock @@ -346,6 +346,11 @@ resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz#533f36457a25814cf1df6488523ad547d784a99f" integrity sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw== +"@babel/helper-string-parser@^7.23.4": + version "7.23.4" + resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.23.4.tgz#9478c707febcbbe1ddb38a3d91a2e054ae622d83" + integrity sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ== + "@babel/helper-validator-identifier@^7.10.4": version "7.14.9" resolved "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.9.tgz" @@ -356,6 +361,11 @@ resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz#7eea834cf32901ffdc1a7ee555e2f9c27e249ca2" integrity sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w== +"@babel/helper-validator-identifier@^7.22.20": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0" + integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A== + "@babel/helper-validator-identifier@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz#9544ef6a33999343c8740fa51350f30eeaaaf193" @@ -393,6 +403,11 @@ resolved "https://registry.npmjs.org/@babel/parser/-/parser-7.15.3.tgz" integrity sha512-O0L6v/HvqbdJawj0iBEfVQMc3/6WP+AeOsovsIgBFyJaG+W2w7eqvZB7puddATmWuARlm1SX7DwxJ/JJUnDpEA== +"@babel/parser@^7.23.3": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.5.tgz#37dee97c4752af148e1d38c34b856b2507660563" + integrity sha512-hOOqoiNXrmGdFbhgCzu6GiURxUgM27Xwd/aPuu8RfHEZPBzL1Z54okAHAQjXfcQNwvrlkAmAp4SlRTZ45vlthQ== + "@babel/template@^7.10.4": version "7.10.4" resolved "https://registry.npmjs.org/@babel/template/-/template-7.10.4.tgz" @@ -426,6 +441,15 @@ "@babel/helper-validator-identifier" "^7.22.5" to-fast-properties "^2.0.0" +"@babel/types@^7.23.3": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.5.tgz#48d730a00c95109fa4393352705954d74fb5b602" + integrity sha512-ON5kSOJwVO6xXVRTvOI0eOnWe7VdUcIpsovGo9U/Br4Ie4UVFQTboO2cYnDhAGU6Fp+UxSiT+pMft0SMHfuq6w== + dependencies: + "@babel/helper-string-parser" "^7.23.4" + "@babel/helper-validator-identifier" "^7.22.20" + to-fast-properties "^2.0.0" + "@balena/dockerignore@^1.0.2": version "1.0.2" resolved "https://registry.yarnpkg.com/@balena/dockerignore/-/dockerignore-1.0.2.tgz#9ffe4726915251e8eb69f44ef3547e0da2c03e0d" @@ -565,13 +589,13 @@ resolved "https://registry.yarnpkg.com/@chainsafe/is-ip/-/is-ip-2.0.2.tgz#7311e7403f11d8c5cfa48111f56fcecaac37c9f6" integrity sha512-ndGqEMG1W5WkGagaqOZHpPU172AGdxr+LD15sv3WIUvT5oCFUrG1Y0CW/v2Egwj4JXEvSibaIIIqImsm98y1nA== -"@chainsafe/libp2p-gossipsub@^10.1.0": - version "10.1.0" - resolved "https://registry.yarnpkg.com/@chainsafe/libp2p-gossipsub/-/libp2p-gossipsub-10.1.0.tgz#29c2e3da2bbf1dc68ae171c5ac777bce9ca88c2c" - integrity sha512-mOVYJAvxYRkh2HeggNFW/7ukEccQDVEI9LPhvlnJk7gnJhyJJ6mhZxUAaytfp3v3qTkmeBRnEL0eJOQBm+MoOA== +"@chainsafe/libp2p-gossipsub@^10.1.1": + version "10.1.1" + resolved "https://registry.yarnpkg.com/@chainsafe/libp2p-gossipsub/-/libp2p-gossipsub-10.1.1.tgz#906aa2a67efb5fea0bacc6721ef4e7ee4e353d7e" + integrity sha512-nou65zlGaUIPwlUq7ceEVpszJX4tBWRRanppYaKsJk7rbDeIKRJQla2duATGOI3fwj1+pGSlDQuF2zG7P0VJQw== dependencies: "@libp2p/crypto" "^2.0.0" - "@libp2p/interface" "^0.1.0" + "@libp2p/interface" "^0.1.4" "@libp2p/interface-internal" "^0.1.0" "@libp2p/logger" "^3.0.0" "@libp2p/peer-id" "^3.0.0" @@ -711,115 +735,115 @@ optionalDependencies: global-agent "^3.0.0" -"@esbuild/android-arm64@0.18.20": - version "0.18.20" - resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.18.20.tgz#984b4f9c8d0377443cc2dfcef266d02244593622" - integrity sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ== - -"@esbuild/android-arm@0.18.20": - version "0.18.20" - resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.18.20.tgz#fedb265bc3a589c84cc11f810804f234947c3682" - integrity sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw== - -"@esbuild/android-x64@0.18.20": - version "0.18.20" - resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.18.20.tgz#35cf419c4cfc8babe8893d296cd990e9e9f756f2" - integrity sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg== - -"@esbuild/darwin-arm64@0.18.20": - version "0.18.20" - resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.18.20.tgz#08172cbeccf95fbc383399a7f39cfbddaeb0d7c1" - integrity sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA== - -"@esbuild/darwin-x64@0.18.20": - version "0.18.20" - resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.18.20.tgz#d70d5790d8bf475556b67d0f8b7c5bdff053d85d" - integrity sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ== - -"@esbuild/freebsd-arm64@0.18.20": - version "0.18.20" - resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.18.20.tgz#98755cd12707f93f210e2494d6a4b51b96977f54" - integrity sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw== - -"@esbuild/freebsd-x64@0.18.20": - version "0.18.20" - resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.18.20.tgz#c1eb2bff03915f87c29cece4c1a7fa1f423b066e" - integrity sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ== - -"@esbuild/linux-arm64@0.18.20": - version "0.18.20" - resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.18.20.tgz#bad4238bd8f4fc25b5a021280c770ab5fc3a02a0" - integrity sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA== - -"@esbuild/linux-arm@0.18.20": - version "0.18.20" - resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.18.20.tgz#3e617c61f33508a27150ee417543c8ab5acc73b0" - integrity sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg== - -"@esbuild/linux-ia32@0.18.20": - version "0.18.20" - resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.18.20.tgz#699391cccba9aee6019b7f9892eb99219f1570a7" - integrity sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA== - -"@esbuild/linux-loong64@0.18.20": - version "0.18.20" - resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.18.20.tgz#e6fccb7aac178dd2ffb9860465ac89d7f23b977d" - integrity sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg== - -"@esbuild/linux-mips64el@0.18.20": - version "0.18.20" - resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.18.20.tgz#eeff3a937de9c2310de30622a957ad1bd9183231" - integrity sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ== - -"@esbuild/linux-ppc64@0.18.20": - version "0.18.20" - resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.18.20.tgz#2f7156bde20b01527993e6881435ad79ba9599fb" - integrity sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA== - -"@esbuild/linux-riscv64@0.18.20": - version "0.18.20" - resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.18.20.tgz#6628389f210123d8b4743045af8caa7d4ddfc7a6" - integrity sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A== - -"@esbuild/linux-s390x@0.18.20": - version "0.18.20" - resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.18.20.tgz#255e81fb289b101026131858ab99fba63dcf0071" - integrity sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ== - -"@esbuild/linux-x64@0.18.20": - version "0.18.20" - resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.18.20.tgz#c7690b3417af318a9b6f96df3031a8865176d338" - integrity sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w== - -"@esbuild/netbsd-x64@0.18.20": - version "0.18.20" - resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.18.20.tgz#30e8cd8a3dded63975e2df2438ca109601ebe0d1" - integrity sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A== - -"@esbuild/openbsd-x64@0.18.20": - version "0.18.20" - resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.18.20.tgz#7812af31b205055874c8082ea9cf9ab0da6217ae" - integrity sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg== - -"@esbuild/sunos-x64@0.18.20": - version "0.18.20" - resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.18.20.tgz#d5c275c3b4e73c9b0ecd38d1ca62c020f887ab9d" - integrity sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ== - -"@esbuild/win32-arm64@0.18.20": - version "0.18.20" - resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.18.20.tgz#73bc7f5a9f8a77805f357fab97f290d0e4820ac9" - integrity sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg== - -"@esbuild/win32-ia32@0.18.20": - version "0.18.20" - resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.18.20.tgz#ec93cbf0ef1085cc12e71e0d661d20569ff42102" - integrity sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g== - -"@esbuild/win32-x64@0.18.20": - version "0.18.20" - resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.18.20.tgz#786c5f41f043b07afb1af37683d7c33668858f6d" - integrity sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ== +"@esbuild/android-arm64@0.19.8": + version "0.19.8" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.19.8.tgz#fb7130103835b6d43ea499c3f30cfb2b2ed58456" + integrity sha512-B8JbS61bEunhfx8kasogFENgQfr/dIp+ggYXwTqdbMAgGDhRa3AaPpQMuQU0rNxDLECj6FhDzk1cF9WHMVwrtA== + +"@esbuild/android-arm@0.19.8": + version "0.19.8" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.19.8.tgz#b46e4d9e984e6d6db6c4224d72c86b7757e35bcb" + integrity sha512-31E2lxlGM1KEfivQl8Yf5aYU/mflz9g06H6S15ITUFQueMFtFjESRMoDSkvMo8thYvLBax+VKTPlpnx+sPicOA== + +"@esbuild/android-x64@0.19.8": + version "0.19.8" + resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.19.8.tgz#a13db9441b5a4f4e4fec4a6f8ffacfea07888db7" + integrity sha512-rdqqYfRIn4jWOp+lzQttYMa2Xar3OK9Yt2fhOhzFXqg0rVWEfSclJvZq5fZslnz6ypHvVf3CT7qyf0A5pM682A== + +"@esbuild/darwin-arm64@0.19.8": + version "0.19.8" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.19.8.tgz#49f5718d36541f40dd62bfdf84da9c65168a0fc2" + integrity sha512-RQw9DemMbIq35Bprbboyf8SmOr4UXsRVxJ97LgB55VKKeJOOdvsIPy0nFyF2l8U+h4PtBx/1kRf0BelOYCiQcw== + +"@esbuild/darwin-x64@0.19.8": + version "0.19.8" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.19.8.tgz#75c5c88371eea4bfc1f9ecfd0e75104c74a481ac" + integrity sha512-3sur80OT9YdeZwIVgERAysAbwncom7b4bCI2XKLjMfPymTud7e/oY4y+ci1XVp5TfQp/bppn7xLw1n/oSQY3/Q== + +"@esbuild/freebsd-arm64@0.19.8": + version "0.19.8" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.19.8.tgz#9d7259fea4fd2b5f7437b52b542816e89d7c8575" + integrity sha512-WAnPJSDattvS/XtPCTj1tPoTxERjcTpH6HsMr6ujTT+X6rylVe8ggxk8pVxzf5U1wh5sPODpawNicF5ta/9Tmw== + +"@esbuild/freebsd-x64@0.19.8": + version "0.19.8" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.19.8.tgz#abac03e1c4c7c75ee8add6d76ec592f46dbb39e3" + integrity sha512-ICvZyOplIjmmhjd6mxi+zxSdpPTKFfyPPQMQTK/w+8eNK6WV01AjIztJALDtwNNfFhfZLux0tZLC+U9nSyA5Zg== + +"@esbuild/linux-arm64@0.19.8": + version "0.19.8" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.19.8.tgz#c577932cf4feeaa43cb9cec27b89cbe0df7d9098" + integrity sha512-z1zMZivxDLHWnyGOctT9JP70h0beY54xDDDJt4VpTX+iwA77IFsE1vCXWmprajJGa+ZYSqkSbRQ4eyLCpCmiCQ== + +"@esbuild/linux-arm@0.19.8": + version "0.19.8" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.19.8.tgz#d6014d8b98b5cbc96b95dad3d14d75bb364fdc0f" + integrity sha512-H4vmI5PYqSvosPaTJuEppU9oz1dq2A7Mr2vyg5TF9Ga+3+MGgBdGzcyBP7qK9MrwFQZlvNyJrvz6GuCaj3OukQ== + +"@esbuild/linux-ia32@0.19.8": + version "0.19.8" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.19.8.tgz#2379a0554307d19ac4a6cdc15b08f0ea28e7a40d" + integrity sha512-1a8suQiFJmZz1khm/rDglOc8lavtzEMRo0v6WhPgxkrjcU0LkHj+TwBrALwoz/OtMExvsqbbMI0ChyelKabSvQ== + +"@esbuild/linux-loong64@0.19.8": + version "0.19.8" + resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.19.8.tgz#e2a5bbffe15748b49356a6cd7b2d5bf60c5a7123" + integrity sha512-fHZWS2JJxnXt1uYJsDv9+b60WCc2RlvVAy1F76qOLtXRO+H4mjt3Tr6MJ5l7Q78X8KgCFudnTuiQRBhULUyBKQ== + +"@esbuild/linux-mips64el@0.19.8": + version "0.19.8" + resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.19.8.tgz#1359331e6f6214f26f4b08db9b9df661c57cfa24" + integrity sha512-Wy/z0EL5qZYLX66dVnEg9riiwls5IYnziwuju2oUiuxVc+/edvqXa04qNtbrs0Ukatg5HEzqT94Zs7J207dN5Q== + +"@esbuild/linux-ppc64@0.19.8": + version "0.19.8" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.19.8.tgz#9ba436addc1646dc89dae48c62d3e951ffe70951" + integrity sha512-ETaW6245wK23YIEufhMQ3HSeHO7NgsLx8gygBVldRHKhOlD1oNeNy/P67mIh1zPn2Hr2HLieQrt6tWrVwuqrxg== + +"@esbuild/linux-riscv64@0.19.8": + version "0.19.8" + resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.19.8.tgz#fbcf0c3a0b20f40b5fc31c3b7695f0769f9de66b" + integrity sha512-T2DRQk55SgoleTP+DtPlMrxi/5r9AeFgkhkZ/B0ap99zmxtxdOixOMI570VjdRCs9pE4Wdkz7JYrsPvsl7eESg== + +"@esbuild/linux-s390x@0.19.8": + version "0.19.8" + resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.19.8.tgz#989e8a05f7792d139d5564ffa7ff898ac6f20a4a" + integrity sha512-NPxbdmmo3Bk7mbNeHmcCd7R7fptJaczPYBaELk6NcXxy7HLNyWwCyDJ/Xx+/YcNH7Im5dHdx9gZ5xIwyliQCbg== + +"@esbuild/linux-x64@0.19.8": + version "0.19.8" + resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.19.8.tgz#b187295393a59323397fe5ff51e769ec4e72212b" + integrity sha512-lytMAVOM3b1gPypL2TRmZ5rnXl7+6IIk8uB3eLsV1JwcizuolblXRrc5ShPrO9ls/b+RTp+E6gbsuLWHWi2zGg== + +"@esbuild/netbsd-x64@0.19.8": + version "0.19.8" + resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.19.8.tgz#c1ec0e24ea82313cb1c7bae176bd5acd5bde7137" + integrity sha512-hvWVo2VsXz/8NVt1UhLzxwAfo5sioj92uo0bCfLibB0xlOmimU/DeAEsQILlBQvkhrGjamP0/el5HU76HAitGw== + +"@esbuild/openbsd-x64@0.19.8": + version "0.19.8" + resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.19.8.tgz#0c5b696ac66c6d70cf9ee17073a581a28af9e18d" + integrity sha512-/7Y7u77rdvmGTxR83PgaSvSBJCC2L3Kb1M/+dmSIvRvQPXXCuC97QAwMugBNG0yGcbEGfFBH7ojPzAOxfGNkwQ== + +"@esbuild/sunos-x64@0.19.8": + version "0.19.8" + resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.19.8.tgz#2a697e1f77926ff09fcc457d8f29916d6cd48fb1" + integrity sha512-9Lc4s7Oi98GqFA4HzA/W2JHIYfnXbUYgekUP/Sm4BG9sfLjyv6GKKHKKVs83SMicBF2JwAX6A1PuOLMqpD001w== + +"@esbuild/win32-arm64@0.19.8": + version "0.19.8" + resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.19.8.tgz#ec029e62a2fca8c071842ecb1bc5c2dd20b066f1" + integrity sha512-rq6WzBGjSzihI9deW3fC2Gqiak68+b7qo5/3kmB6Gvbh/NYPA0sJhrnp7wgV4bNwjqM+R2AApXGxMO7ZoGhIJg== + +"@esbuild/win32-ia32@0.19.8": + version "0.19.8" + resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.19.8.tgz#cbb9a3146bde64dc15543e48afe418c7a3214851" + integrity sha512-AIAbverbg5jMvJznYiGhrd3sumfwWs8572mIJL5NQjJa06P8KfCPWZQ0NwZbPQnbQi9OWSZhFVSUWjjIrn4hSw== + +"@esbuild/win32-x64@0.19.8": + version "0.19.8" + resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.19.8.tgz#c8285183dbdb17008578dbacb6e22748709b4822" + integrity sha512-bfZ0cQ1uZs2PqpulNL5j/3w+GDhP36k1K5c38QdQg+Swy51jFZWWeIkteNsufkQxp986wnqRRsb/bHbY1WQ7TA== "@eslint-community/eslint-utils@^4.2.0", "@eslint-community/eslint-utils@^4.4.0": version "4.4.0" @@ -1686,6 +1710,20 @@ p-defer "^4.0.0" uint8arraylist "^2.4.3" +"@libp2p/interface@^0.1.4": + version "0.1.6" + resolved "https://registry.yarnpkg.com/@libp2p/interface/-/interface-0.1.6.tgz#1328cf6086f02c499183489ccb143fe9c159e871" + integrity sha512-Lzc5cS/hXuoXhuAbVIxJIHLCYmfPcbU0vVgrpMoiP1Qb2Q3ETU4A46GB8s8mWXgSU6tr9RcqerUqzFYD6+OAag== + dependencies: + "@multiformats/multiaddr" "^12.1.5" + abortable-iterator "^5.0.1" + it-pushable "^3.2.0" + it-stream-types "^2.0.1" + multiformats "^12.0.1" + p-defer "^4.0.0" + race-signal "^1.0.0" + uint8arraylist "^2.4.3" + "@libp2p/keychain@^3.0.4": version "3.0.4" resolved "https://registry.yarnpkg.com/@libp2p/keychain/-/keychain-3.0.4.tgz#94d04a592ea18d83ebed6d6d8457e9aa8cc72e91" @@ -2560,6 +2598,11 @@ resolved "https://registry.npmjs.org/@opentelemetry/api/-/api-1.0.0-rc.0.tgz" integrity sha512-iXKByCMfrlO5S6Oh97BuM56tM2cIBB0XsL/vWF/AtJrJEKx4MC/Xdu0xDsGXMGcNWpqF7ujMsjjnp0+UHBwnDQ== +"@opentelemetry/api@^1.4.0": + version "1.7.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.7.0.tgz#b139c81999c23e3c8d3c0a7234480e945920fc40" + integrity sha512-AdY5wvN0P2vXBi3b29hxZgSFvdhdxPB9+f0B6s//P9Q8nibRWeA3cHm8UmLpio9ABigkVHJ5NMPk+Mz8VCCyrw== + "@parcel/watcher@2.0.4": version "2.0.4" resolved "https://registry.yarnpkg.com/@parcel/watcher/-/watcher-2.0.4.tgz#f300fef4cc38008ff4b8c29d92588eced3ce014b" @@ -2585,6 +2628,11 @@ picocolors "^1.0.0" tslib "^2.6.0" +"@polka/url@^1.0.0-next.20": + version "1.0.0-next.24" + resolved "https://registry.yarnpkg.com/@polka/url/-/url-1.0.0-next.24.tgz#58601079e11784d20f82d0585865bb42305c4df3" + integrity sha512-2LuNTFBIO0m7kKIQvvPHN6UE63VjpmL9rnEEaOOaiSPbZK+zUOYIzBAWcED+3XYzhYsd/0mD57VdxAEqqV52CQ== + "@protobufjs/aspromise@^1.1.1", "@protobufjs/aspromise@^1.1.2": version "1.1.2" resolved "https://registry.yarnpkg.com/@protobufjs/aspromise/-/aspromise-1.1.2.tgz#9b8b0cc663d669a7d8f6f5d0893a14d348f30fbf" @@ -2638,6 +2686,115 @@ resolved "https://registry.yarnpkg.com/@protobufjs/utf8/-/utf8-1.1.0.tgz#a777360b5b39a1a2e5106f8e858f2fd2d060c570" integrity sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw== +"@puppeteer/browsers@1.4.6": + version "1.4.6" + resolved "https://registry.yarnpkg.com/@puppeteer/browsers/-/browsers-1.4.6.tgz#1f70fd23d5d2ccce9d29b038e5039d7a1049ca77" + integrity sha512-x4BEjr2SjOPowNeiguzjozQbsc6h437ovD/wu+JpaenxVLm3jkgzHY2xOslMTp50HoTvQreMjiexiGQw1sqZlQ== + dependencies: + debug "4.3.4" + extract-zip "2.0.1" + progress "2.0.3" + proxy-agent "6.3.0" + tar-fs "3.0.4" + unbzip2-stream "1.4.3" + yargs "17.7.1" + +"@puppeteer/browsers@^1.6.0": + version "1.8.0" + resolved "https://registry.yarnpkg.com/@puppeteer/browsers/-/browsers-1.8.0.tgz#fb6ee61de15e7f0e67737aea9f9bab1512dbd7d8" + integrity sha512-TkRHIV6k2D8OlUe8RtG+5jgOF/H98Myx0M6AOafC8DdNVOFiBSFa5cpRDtpm8LXOa9sVwe0+e6Q3FC56X/DZfg== + dependencies: + debug "4.3.4" + extract-zip "2.0.1" + progress "2.0.3" + proxy-agent "6.3.1" + tar-fs "3.0.4" + unbzip2-stream "1.4.3" + yargs "17.7.2" + +"@rollup/plugin-inject@^5.0.5": + version "5.0.5" + resolved "https://registry.yarnpkg.com/@rollup/plugin-inject/-/plugin-inject-5.0.5.tgz#616f3a73fe075765f91c5bec90176608bed277a3" + integrity sha512-2+DEJbNBoPROPkgTDNe8/1YXWcqxbN5DTjASVIOx8HS+pITXushyNiBV56RB08zuptzz8gT3YfkqriTBVycepg== + dependencies: + "@rollup/pluginutils" "^5.0.1" + estree-walker "^2.0.2" + magic-string "^0.30.3" + +"@rollup/plugin-virtual@^3.0.2": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@rollup/plugin-virtual/-/plugin-virtual-3.0.2.tgz#17e17eeecb4c9fa1c0a6e72c9e5f66382fddbb82" + integrity sha512-10monEYsBp3scM4/ND4LNH5Rxvh3e/cVeL3jWTgZ2SrQ+BmUoQcopVQvnaMcOnykb1VkxUFuDAN+0FnpTFRy2A== + +"@rollup/pluginutils@^5.0.1": + version "5.0.5" + resolved "https://registry.yarnpkg.com/@rollup/pluginutils/-/pluginutils-5.0.5.tgz#bbb4c175e19ebfeeb8c132c2eea0ecb89941a66c" + integrity sha512-6aEYR910NyP73oHiJglti74iRyOwgFU4x3meH/H8OJx6Ry0j6cOVZ5X/wTvub7G7Ao6qaHBEaNsV3GLJkSsF+Q== + dependencies: + "@types/estree" "^1.0.0" + estree-walker "^2.0.2" + picomatch "^2.3.1" + +"@rollup/rollup-android-arm-eabi@4.6.1": + version "4.6.1" + resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.6.1.tgz#0ea289f68ff248b50fea5716ca9f65f7d4dba3ae" + integrity sha512-0WQ0ouLejaUCRsL93GD4uft3rOmB8qoQMU05Kb8CmMtMBe7XUDLAltxVZI1q6byNqEtU7N1ZX1Vw5lIpgulLQA== + +"@rollup/rollup-android-arm64@4.6.1": + version "4.6.1" + resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.6.1.tgz#27c8c67fc5de574874085a1b480ac65b3e18378e" + integrity sha512-1TKm25Rn20vr5aTGGZqo6E4mzPicCUD79k17EgTLAsXc1zysyi4xXKACfUbwyANEPAEIxkzwue6JZ+stYzWUTA== + +"@rollup/rollup-darwin-arm64@4.6.1": + version "4.6.1" + resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.6.1.tgz#c5735c042980c85495411af7183dd20294763bd8" + integrity sha512-cEXJQY/ZqMACb+nxzDeX9IPLAg7S94xouJJCNVE5BJM8JUEP4HeTF+ti3cmxWeSJo+5D+o8Tc0UAWUkfENdeyw== + +"@rollup/rollup-darwin-x64@4.6.1": + version "4.6.1" + resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.6.1.tgz#af844bd54abb73ca3c9cf89a31eec17861d1375d" + integrity sha512-LoSU9Xu56isrkV2jLldcKspJ7sSXmZWkAxg7sW/RfF7GS4F5/v4EiqKSMCFbZtDu2Nc1gxxFdQdKwkKS4rwxNg== + +"@rollup/rollup-linux-arm-gnueabihf@4.6.1": + version "4.6.1" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.6.1.tgz#5e972f63c441eaf859551039b3f18db9b035977d" + integrity sha512-EfI3hzYAy5vFNDqpXsNxXcgRDcFHUWSx5nnRSCKwXuQlI5J9dD84g2Usw81n3FLBNsGCegKGwwTVsSKK9cooSQ== + +"@rollup/rollup-linux-arm64-gnu@4.6.1": + version "4.6.1" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.6.1.tgz#f4cfbc71e3b6fdb395b28b1472414e181515c72d" + integrity sha512-9lhc4UZstsegbNLhH0Zu6TqvDfmhGzuCWtcTFXY10VjLLUe4Mr0Ye2L3rrtHaDd/J5+tFMEuo5LTCSCMXWfUKw== + +"@rollup/rollup-linux-arm64-musl@4.6.1": + version "4.6.1" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.6.1.tgz#6a94c691830dc29bf708de7c640f494996130893" + integrity sha512-FfoOK1yP5ksX3wwZ4Zk1NgyGHZyuRhf99j64I5oEmirV8EFT7+OhUZEnP+x17lcP/QHJNWGsoJwrz4PJ9fBEXw== + +"@rollup/rollup-linux-x64-gnu@4.6.1": + version "4.6.1" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.6.1.tgz#f07bae3f7dc532d9ea5ab36c9071db329f9a1efb" + integrity sha512-DNGZvZDO5YF7jN5fX8ZqmGLjZEXIJRdJEdTFMhiyXqyXubBa0WVLDWSNlQ5JR2PNgDbEV1VQowhVRUh+74D+RA== + +"@rollup/rollup-linux-x64-musl@4.6.1": + version "4.6.1" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.6.1.tgz#357a34fdbf410af88ce48bd802bea6462bb9a8bc" + integrity sha512-RkJVNVRM+piYy87HrKmhbexCHg3A6Z6MU0W9GHnJwBQNBeyhCJG9KDce4SAMdicQnpURggSvtbGo9xAWOfSvIQ== + +"@rollup/rollup-win32-arm64-msvc@4.6.1": + version "4.6.1" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.6.1.tgz#b6e97fd38281667e35297033393cd1101f4a31be" + integrity sha512-v2FVT6xfnnmTe3W9bJXl6r5KwJglMK/iRlkKiIFfO6ysKs0rDgz7Cwwf3tjldxQUrHL9INT/1r4VA0n9L/F1vQ== + +"@rollup/rollup-win32-ia32-msvc@4.6.1": + version "4.6.1" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.6.1.tgz#a95db026c640c8128bfd38546d85342f2329beaf" + integrity sha512-YEeOjxRyEjqcWphH9dyLbzgkF8wZSKAKUkldRY6dgNR5oKs2LZazqGB41cWJ4Iqqcy9/zqYgmzBkRoVz3Q9MLw== + +"@rollup/rollup-win32-x64-msvc@4.6.1": + version "4.6.1" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.6.1.tgz#45785b5caf83200a34a9867ba50d69560880c120" + integrity sha512-0zfTlFAIhgz8V2G8STq8toAjsYYA6eci1hnXuyOTUFnymrtJwnS6uGKiv3v5UrPZkBlamLvrLV2iiaeqCKzb0A== + "@scure/base@~1.0.0": version "1.0.0" resolved "https://registry.npmjs.org/@scure/base/-/base-1.0.0.tgz" @@ -2729,6 +2886,11 @@ resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-4.6.0.tgz#3c7c9c46e678feefe7a2e5bb609d3dbd665ffb3f" integrity sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw== +"@sindresorhus/is@^5.2.0": + version "5.6.0" + resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-5.6.0.tgz#41dd6093d34652cddb5d5bdeee04eafc33826668" + integrity sha512-TV7t8GKYaJWsn00tFDqBw8+Uqmr8A0fRU1tvTQhyZzGv0sJCGRQL3JGMI3ucuKo3XIZdUP+Lx7/gh2t3lewy7g== + "@sinonjs/commons@^2.0.0": version "2.0.0" resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-2.0.0.tgz#fd4ca5b063554307e8327b4564bd56d3b73924a3" @@ -2771,6 +2933,85 @@ resolved "https://registry.npmjs.org/@sinonjs/text-encoding/-/text-encoding-0.7.1.tgz" integrity sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ== +"@swc/core-darwin-arm64@1.3.101": + version "1.3.101" + resolved "https://registry.yarnpkg.com/@swc/core-darwin-arm64/-/core-darwin-arm64-1.3.101.tgz#9ffdc0e77c31b20877fa7405c82905e0c76738d0" + integrity sha512-mNFK+uHNPRXSnfTOG34zJOeMl2waM4hF4a2NY7dkMXrPqw9CoJn4MwTXJcyMiSz1/BnNjjTCHF3Yhj0jPxmkzQ== + +"@swc/core-darwin-x64@1.3.101": + version "1.3.101" + resolved "https://registry.yarnpkg.com/@swc/core-darwin-x64/-/core-darwin-x64-1.3.101.tgz#e50130e21e3cfd3029fd6cea43e8309b58ad9fa6" + integrity sha512-B085j8XOx73Fg15KsHvzYWG262bRweGr3JooO1aW5ec5pYbz5Ew9VS5JKYS03w2UBSxf2maWdbPz2UFAxg0whw== + +"@swc/core-linux-arm-gnueabihf@1.3.101": + version "1.3.101" + resolved "https://registry.yarnpkg.com/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.3.101.tgz#8cd36328e794b3c42b6c8e578bb1f42e59ba0231" + integrity sha512-9xLKRb6zSzRGPqdz52Hy5GuB1lSjmLqa0lST6MTFads3apmx4Vgs8Y5NuGhx/h2I8QM4jXdLbpqQlifpzTlSSw== + +"@swc/core-linux-arm64-gnu@1.3.101": + version "1.3.101" + resolved "https://registry.yarnpkg.com/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.3.101.tgz#d15e3885eb13a1512ba62f00ce4f5bb19f710a0c" + integrity sha512-oE+r1lo7g/vs96Weh2R5l971dt+ZLuhaUX+n3BfDdPxNHfObXgKMjO7E+QS5RbGjv/AwiPCxQmbdCp/xN5ICJA== + +"@swc/core-linux-arm64-musl@1.3.101": + version "1.3.101" + resolved "https://registry.yarnpkg.com/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.3.101.tgz#851d4cc1079b091fee36f5f64335232210749d7a" + integrity sha512-OGjYG3H4BMOTnJWJyBIovCez6KiHF30zMIu4+lGJTCrxRI2fAjGLml3PEXj8tC3FMcud7U2WUn6TdG0/te2k6g== + +"@swc/core-linux-x64-gnu@1.3.101": + version "1.3.101" + resolved "https://registry.yarnpkg.com/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.3.101.tgz#3a2a7c584db2e05a798e28361440424914563fa3" + integrity sha512-/kBMcoF12PRO/lwa8Z7w4YyiKDcXQEiLvM+S3G9EvkoKYGgkkz4Q6PSNhF5rwg/E3+Hq5/9D2R+6nrkF287ihg== + +"@swc/core-linux-x64-musl@1.3.101": + version "1.3.101" + resolved "https://registry.yarnpkg.com/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.3.101.tgz#45d1d53945994f08e93703b8de24ccac88538d0c" + integrity sha512-kDN8lm4Eew0u1p+h1l3JzoeGgZPQ05qDE0czngnjmfpsH2sOZxVj1hdiCwS5lArpy7ktaLu5JdRnx70MkUzhXw== + +"@swc/core-win32-arm64-msvc@1.3.101": + version "1.3.101" + resolved "https://registry.yarnpkg.com/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.3.101.tgz#b2610b8354e5fbca7cc5be3f728e61b046227fa8" + integrity sha512-9Wn8TTLWwJKw63K/S+jjrZb9yoJfJwCE2RV5vPCCWmlMf3U1AXj5XuWOLUX+Rp2sGKau7wZKsvywhheWm+qndQ== + +"@swc/core-win32-ia32-msvc@1.3.101": + version "1.3.101" + resolved "https://registry.yarnpkg.com/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.3.101.tgz#c919175bb4cd5e9fcfa56fbd3708167c1d445c68" + integrity sha512-onO5KvICRVlu2xmr4//V2je9O2XgS1SGKpbX206KmmjcJhXN5EYLSxW9qgg+kgV5mip+sKTHTAu7IkzkAtElYA== + +"@swc/core-win32-x64-msvc@1.3.101": + version "1.3.101" + resolved "https://registry.yarnpkg.com/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.3.101.tgz#17743fe425caffc596fde5965c9c4cf9a48aa26a" + integrity sha512-T3GeJtNQV00YmiVw/88/nxJ/H43CJvFnpvBHCVn17xbahiVUOPOduh3rc9LgAkKiNt/aV8vU3OJR+6PhfMR7UQ== + +"@swc/core@^1.3.100": + version "1.3.101" + resolved "https://registry.yarnpkg.com/@swc/core/-/core-1.3.101.tgz#4e8f1583094a73c410e48a0bebdeccdc6c66d4a5" + integrity sha512-w5aQ9qYsd/IYmXADAnkXPGDMTqkQalIi+kfFf/MHRKTpaOL7DHjMXwPp/n8hJ0qNjRvchzmPtOqtPBiER50d8A== + dependencies: + "@swc/counter" "^0.1.1" + "@swc/types" "^0.1.5" + optionalDependencies: + "@swc/core-darwin-arm64" "1.3.101" + "@swc/core-darwin-x64" "1.3.101" + "@swc/core-linux-arm-gnueabihf" "1.3.101" + "@swc/core-linux-arm64-gnu" "1.3.101" + "@swc/core-linux-arm64-musl" "1.3.101" + "@swc/core-linux-x64-gnu" "1.3.101" + "@swc/core-linux-x64-musl" "1.3.101" + "@swc/core-win32-arm64-msvc" "1.3.101" + "@swc/core-win32-ia32-msvc" "1.3.101" + "@swc/core-win32-x64-msvc" "1.3.101" + +"@swc/counter@^0.1.1": + version "0.1.2" + resolved "https://registry.yarnpkg.com/@swc/counter/-/counter-0.1.2.tgz#bf06d0770e47c6f1102270b744e17b934586985e" + integrity sha512-9F4ys4C74eSTEUNndnER3VJ15oru2NumfQxS8geE+f3eB5xvfxpWyqE5XlVnxb/R14uoXi6SLbBwwiDSkv+XEw== + +"@swc/types@^0.1.5": + version "0.1.5" + resolved "https://registry.yarnpkg.com/@swc/types/-/types-0.1.5.tgz#043b731d4f56a79b4897a3de1af35e75d56bc63a" + integrity sha512-myfUej5naTBWnqOCc/MdVOLVjXUXtIA+NpDrDBKJtLLg2shUjBu3cZmB/85RyitKc55+lUUyl7oRfLOvkr2hsw== + "@szmarczak/http-timer@^4.0.5": version "4.0.6" resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-4.0.6.tgz#b4a914bb62e7c272d4e5989fe4440f812ab1d807" @@ -2778,6 +3019,13 @@ dependencies: defer-to-connect "^2.0.0" +"@szmarczak/http-timer@^5.0.1": + version "5.0.1" + resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-5.0.1.tgz#c7c1bf1141cdd4751b0399c8fc7b8b664cd5be3a" + integrity sha512-+PmQX0PiAYPMeVYe237LJAYvOMYW1j2rH5YROyS3b4CTVJum34HfRvKvAzozHAQG0TnHNdUfY9nCeUyRAs//cw== + dependencies: + defer-to-connect "^2.0.1" + "@tootallnate/once@1": version "1.1.2" resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" @@ -2788,6 +3036,11 @@ resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-2.0.0.tgz#f544a148d3ab35801c1f633a7441fd87c2e484bf" integrity sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A== +"@tootallnate/quickjs-emscripten@^0.23.0": + version "0.23.0" + resolved "https://registry.yarnpkg.com/@tootallnate/quickjs-emscripten/-/quickjs-emscripten-0.23.0.tgz#db4ecfd499a9765ab24002c3b696d02e6d32a12c" + integrity sha512-C5Mc6rdnsaJDjO3UpGW/CQTHtCKaYlScZTly4JIu97Jxo/odCiH0ITnDXSJPTOrEKk/ycSZ0AOgTmkDtkOsvIA== + "@tsconfig/node10@^1.0.7": version "1.0.8" resolved "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.8.tgz" @@ -2857,19 +3110,12 @@ dependencies: "@types/chai" "*" -"@types/chai-subset@^1.3.3": - version "1.3.3" - resolved "https://registry.yarnpkg.com/@types/chai-subset/-/chai-subset-1.3.3.tgz#97893814e92abd2c534de422cb377e0e0bdaac94" - integrity sha512-frBecisrNGz+F4T6bcc+NLeolfiojh5FxW2klu669+8BARtyQv2C/GkNW6FUodVe4BroGMP/wER/YDGc7rEllw== - dependencies: - "@types/chai" "*" - "@types/chai@*": version "4.2.17" resolved "https://registry.npmjs.org/@types/chai/-/chai-4.2.17.tgz" integrity sha512-LaiwWNnYuL8xJlQcE91QB2JoswWZckq9A4b+nMPq8dt8AP96727Nb3X4e74u+E3tm4NLTILNI9MYFsyVc30wSA== -"@types/chai@^4.3.5", "@types/chai@^4.3.6": +"@types/chai@^4.3.6": version "4.3.6" resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.3.6.tgz#7b489e8baf393d5dd1266fb203ddd4ea941259e6" integrity sha512-VOVRLM1mBxIRxydiViqPcKn6MIxZytrbMpd6RJLIWKxUNr3zux8no0Oc7kJx0WAPIitgZ0gkrDS+btlqQpubpw== @@ -2977,9 +3223,14 @@ form-data "^2.5.0" "@types/http-cache-semantics@*": - version "4.0.1" - resolved "https://registry.yarnpkg.com/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz#0ea7b61496902b95890dc4c3a116b60cb8dae812" - integrity sha512-SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ== + version "4.0.3" + resolved "https://registry.yarnpkg.com/@types/http-cache-semantics/-/http-cache-semantics-4.0.3.tgz#a3ff232bf7d5c55f38e4e45693eda2ebb545794d" + integrity sha512-V46MYLFp08Wf2mmaBhvgjStM3tPa+2GAdy/iqoX+noX1//zje2x4XmrIU0cAwyClATsTmahbtoQ2EwP7I5WSiA== + +"@types/http-cache-semantics@^4.0.2": + version "4.0.4" + resolved "https://registry.yarnpkg.com/@types/http-cache-semantics/-/http-cache-semantics-4.0.4.tgz#b979ebad3919799c979b17c72621c0bc0a31c6c4" + integrity sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA== "@types/http-proxy@^1.17.10": version "1.17.10" @@ -3126,6 +3377,13 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-18.17.14.tgz#a621ad26e7eb076d6846dd3d39557ddf9d89f04b" integrity sha512-ZE/5aB73CyGqgQULkLG87N9GnyGe5TcQjv34pwS8tfBs1IkCh0ASM69mydb2znqd6v0eX+9Ytvk6oQRqu8T1Vw== +"@types/node@^20.1.0": + version "20.10.1" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.10.1.tgz#d2c96f356c3125fedc983d74c424910c3767141c" + integrity sha512-T2qwhjWwGH81vUEx4EXmBKsTJRXFXNZTL4v0gi01+zyBmCwzE6TyHszqX01m+QHTEq+EZNo13NeJIdEqf+Myrg== + dependencies: + undici-types "~5.26.4" + "@types/normalize-package-data@^2.4.0": version "2.4.1" resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz#d3357479a0fdfdd5907fe67e17e0a85c906e1301" @@ -3152,9 +3410,9 @@ safe-buffer "~5.1.1" "@types/responselike@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@types/responselike/-/responselike-1.0.0.tgz#251f4fe7d154d2bad125abe1b429b23afd262e29" - integrity sha512-85Y2BjiufFzaMIlvJDvTTB8Fxl2xfLo4HgmHzVBz08w4wDePCTjYw66PdrolO0kzli3yam/YCgRufyo1DdQVTA== + version "1.0.2" + resolved "https://registry.yarnpkg.com/@types/responselike/-/responselike-1.0.2.tgz#8de1b0477fd7c12df77e50832fa51701a8414bd6" + integrity sha512-/4YQT5Kp6HxUDb4yhRkm0bJ7TbjvTddqX7PZ5hz6qV3pxSo72f/6YPRo+Mu2DU307tm9IioO69l7uAwn5XNcFA== dependencies: "@types/node" "*" @@ -3296,6 +3554,11 @@ resolved "https://registry.yarnpkg.com/@types/uuid/-/uuid-8.3.4.tgz#bd86a43617df0594787d38b735f55c805becf1bc" integrity sha512-c/I8ZRb51j+pYGAu5CrFMRxqZ2ke4y2grEBO5AUjgSkSk+qT2Ea+OdWElz/OiMf5MNpn2b17kuVBwZLQJXzihw== +"@types/which@^2.0.1": + version "2.0.2" + resolved "https://registry.yarnpkg.com/@types/which/-/which-2.0.2.tgz#54541d02d6b1daee5ec01ac0d1b37cecf37db1ae" + integrity sha512-113D3mDkZDjo+EeUEHCFy0qniNc1ZpecGiAU7WSo7YDoSzolZIQKpYFHrPpjkB2nuyahcKfrmLXeQlh7gqJYdw== + "@types/ws@^8.5.3": version "8.5.5" resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.5.tgz#af587964aa06682702ee6dcbc7be41a80e4b28eb" @@ -3407,65 +3670,147 @@ "@typescript-eslint/types" "6.7.2" eslint-visitor-keys "^3.4.1" -"@vitest/coverage-v8@^0.34.6": - version "0.34.6" - resolved "https://registry.yarnpkg.com/@vitest/coverage-v8/-/coverage-v8-0.34.6.tgz#931d9223fa738474e00c08f52b84e0f39cedb6d1" - integrity sha512-fivy/OK2d/EsJFoEoxHFEnNGTg+MmdZBAVK9Ka4qhXR2K3J0DS08vcGVwzDtXSuUMabLv4KtPcpSKkcMXFDViw== +"@vitest/browser@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@vitest/browser/-/browser-1.1.0.tgz#b3c3e06d04506309a1e163103e1f65ee1391c262" + integrity sha512-59Uwoiw/zAQPmqgIKrzev8HNfeNlD8Q/nDyP9Xqg1D3kaM0tcOT/wk5RnZFW5f0JdguK0c1+vSeOPUSrOja1hQ== + dependencies: + estree-walker "^3.0.3" + magic-string "^0.30.5" + sirv "^2.0.3" + +"@vitest/coverage-v8@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@vitest/coverage-v8/-/coverage-v8-1.1.0.tgz#bc0bbb99fcb608f72794701a86302ff3aabbc125" + integrity sha512-kHQRk70vTdXAyQY2C0vKOHPyQD/R6IUzcGdO4vCuyr4alE5Yg1+Sk2jSdjlIrTTXdcNEs+ReWVM09mmSFJpzyQ== dependencies: "@ampproject/remapping" "^2.2.1" "@bcoe/v8-coverage" "^0.2.3" - istanbul-lib-coverage "^3.2.0" + debug "^4.3.4" + istanbul-lib-coverage "^3.2.2" istanbul-lib-report "^3.0.1" istanbul-lib-source-maps "^4.0.1" - istanbul-reports "^3.1.5" - magic-string "^0.30.1" + istanbul-reports "^3.1.6" + magic-string "^0.30.5" + magicast "^0.3.2" picocolors "^1.0.0" - std-env "^3.3.3" + std-env "^3.5.0" test-exclude "^6.0.0" - v8-to-istanbul "^9.1.0" + v8-to-istanbul "^9.2.0" -"@vitest/expect@0.34.6": - version "0.34.6" - resolved "https://registry.yarnpkg.com/@vitest/expect/-/expect-0.34.6.tgz#608a7b7a9aa3de0919db99b4cc087340a03ea77e" - integrity sha512-QUzKpUQRc1qC7qdGo7rMK3AkETI7w18gTCUrsNnyjjJKYiuUB9+TQK3QnR1unhCnWRC0AbKv2omLGQDF/mIjOw== +"@vitest/expect@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@vitest/expect/-/expect-1.1.0.tgz#f58eef7de090ad65f30bb93ec54fa9f94c9d1d5d" + integrity sha512-9IE2WWkcJo2BR9eqtY5MIo3TPmS50Pnwpm66A6neb2hvk/QSLfPXBz2qdiwUOQkwyFuuXEUj5380CbwfzW4+/w== dependencies: - "@vitest/spy" "0.34.6" - "@vitest/utils" "0.34.6" + "@vitest/spy" "1.1.0" + "@vitest/utils" "1.1.0" chai "^4.3.10" -"@vitest/runner@0.34.6": - version "0.34.6" - resolved "https://registry.yarnpkg.com/@vitest/runner/-/runner-0.34.6.tgz#6f43ca241fc96b2edf230db58bcde5b974b8dcaf" - integrity sha512-1CUQgtJSLF47NnhN+F9X2ycxUP0kLHQ/JWvNHbeBfwW8CzEGgeskzNnHDyv1ieKTltuR6sdIHV+nmR6kPxQqzQ== +"@vitest/runner@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@vitest/runner/-/runner-1.1.0.tgz#b3bf60f4a78f4324ca09811dd0f87b721a96b534" + integrity sha512-zdNLJ00pm5z/uhbWF6aeIJCGMSyTyWImy3Fcp9piRGvueERFlQFbUwCpzVce79OLm2UHk9iwaMSOaU9jVHgNVw== dependencies: - "@vitest/utils" "0.34.6" - p-limit "^4.0.0" + "@vitest/utils" "1.1.0" + p-limit "^5.0.0" pathe "^1.1.1" -"@vitest/snapshot@0.34.6": - version "0.34.6" - resolved "https://registry.yarnpkg.com/@vitest/snapshot/-/snapshot-0.34.6.tgz#b4528cf683b60a3e8071cacbcb97d18b9d5e1d8b" - integrity sha512-B3OZqYn6k4VaN011D+ve+AA4whM4QkcwcrwaKwAbyyvS/NB1hCWjFIBQxAQQSQir9/RtyAAGuq+4RJmbn2dH4w== +"@vitest/snapshot@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@vitest/snapshot/-/snapshot-1.1.0.tgz#b9924e4303382b43bb2c31061b173e69a6fb3437" + integrity sha512-5O/wyZg09V5qmNmAlUgCBqflvn2ylgsWJRRuPrnHEfDNT6tQpQ8O1isNGgo+VxofISHqz961SG3iVvt3SPK/QQ== dependencies: - magic-string "^0.30.1" + magic-string "^0.30.5" pathe "^1.1.1" - pretty-format "^29.5.0" + pretty-format "^29.7.0" -"@vitest/spy@0.34.6": - version "0.34.6" - resolved "https://registry.yarnpkg.com/@vitest/spy/-/spy-0.34.6.tgz#b5e8642a84aad12896c915bce9b3cc8cdaf821df" - integrity sha512-xaCvneSaeBw/cz8ySmF7ZwGvL0lBjfvqc1LpQ/vcdHEvpLn3Ff1vAvjw+CoGn0802l++5L/pxb7whwcWAw+DUQ== +"@vitest/spy@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@vitest/spy/-/spy-1.1.0.tgz#7f40697e4fc217ac8c3cc89a865d1751b263f561" + integrity sha512-sNOVSU/GE+7+P76qYo+VXdXhXffzWZcYIPQfmkiRxaNCSPiLANvQx5Mx6ZURJ/ndtEkUJEpvKLXqAYTKEY+lTg== dependencies: - tinyspy "^2.1.1" + tinyspy "^2.2.0" -"@vitest/utils@0.34.6": - version "0.34.6" - resolved "https://registry.yarnpkg.com/@vitest/utils/-/utils-0.34.6.tgz#38a0a7eedddb8e7291af09a2409cb8a189516968" - integrity sha512-IG5aDD8S6zlvloDsnzHw0Ut5xczlF+kv2BOTo+iXfPr54Yhi5qbVOgGB1hZaVq4iJ4C/MZ2J0y15IlsV/ZcI0A== +"@vitest/utils@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@vitest/utils/-/utils-1.1.0.tgz#d177a5f41bdb484bbb43c8d73a77ca782df068b5" + integrity sha512-z+s510fKmYz4Y41XhNs3vcuFTFhcij2YF7F8VQfMEYAAUfqQh0Zfg7+w9xdgFGhPf3tX3TicAe+8BDITk6ampQ== dependencies: - diff-sequences "^29.4.3" - loupe "^2.3.6" - pretty-format "^29.5.0" + diff-sequences "^29.6.3" + loupe "^2.3.7" + pretty-format "^29.7.0" + +"@wdio/config@8.27.0": + version "8.27.0" + resolved "https://registry.yarnpkg.com/@wdio/config/-/config-8.27.0.tgz#c738d8108b5161cf3f80bb34d0e1f4d700b1a9ce" + integrity sha512-zYM5daeiBVVAbQj0ASymAt0RUsocLVIwKiUHNa8gg/1GsZnztGjetXExSp1gXlxtMVM5xWUSKjh6ceFK79gWDQ== + dependencies: + "@wdio/logger" "8.24.12" + "@wdio/types" "8.27.0" + "@wdio/utils" "8.27.0" + decamelize "^6.0.0" + deepmerge-ts "^5.0.0" + glob "^10.2.2" + import-meta-resolve "^4.0.0" + +"@wdio/logger@8.24.12": + version "8.24.12" + resolved "https://registry.yarnpkg.com/@wdio/logger/-/logger-8.24.12.tgz#03cb8bb7ce7ee443e1dcd200a3b44270ae16a1f9" + integrity sha512-QisOiVIWKTUCf1H7S+DOtC+gruhlpimQrUXfWMTeeh672PvAJYnTpOJDWA+BtXfsikkUYFAzAaq8SeMJk8rqKg== + dependencies: + chalk "^5.1.2" + loglevel "^1.6.0" + loglevel-plugin-prefix "^0.8.4" + strip-ansi "^7.1.0" + +"@wdio/logger@^8.11.0", "@wdio/logger@^8.16.17": + version "8.16.17" + resolved "https://registry.yarnpkg.com/@wdio/logger/-/logger-8.16.17.tgz#c2055857ed3e3cf12cfad843140fa79264c6a632" + integrity sha512-zeQ41z3T+b4IsrriZZipayXxLNDuGsm7TdExaviNGojPVrIsQUCSd/FvlLHM32b7ZrMyInHenu/zx1cjAZO71g== + dependencies: + chalk "^5.1.2" + loglevel "^1.6.0" + loglevel-plugin-prefix "^0.8.4" + strip-ansi "^7.1.0" + +"@wdio/protocols@8.24.12": + version "8.24.12" + resolved "https://registry.yarnpkg.com/@wdio/protocols/-/protocols-8.24.12.tgz#16c2e3dff4cfc0ed694f3f8142fb68b74609fbf5" + integrity sha512-QnVj3FkapmVD3h2zoZk+ZQ8gevSj9D9MiIQIy8eOnY4FAneYZ9R9GvoW+mgNcCZO8S8++S/jZHetR8n+8Q808g== + +"@wdio/repl@8.24.12": + version "8.24.12" + resolved "https://registry.yarnpkg.com/@wdio/repl/-/repl-8.24.12.tgz#b09746ae4f51f7da684312db617e598f2d064d9a" + integrity sha512-321F3sWafnlw93uRTSjEBVuvWCxTkWNDs7ektQS15drrroL3TMeFOynu4rDrIz0jXD9Vas0HCD2Tq/P0uxFLdw== + dependencies: + "@types/node" "^20.1.0" + +"@wdio/types@8.27.0": + version "8.27.0" + resolved "https://registry.yarnpkg.com/@wdio/types/-/types-8.27.0.tgz#ef2e3a9ae083f08ee5fe5bf9e5dfc70cc55cebcb" + integrity sha512-LbP9FKh8r0uW9/dKhTIUCC1Su8PsP9TmzGKXkWt6/IMacgJiB/zW3u1CgyaLw9lG0UiQORHGoeJX9zB2HZAh4w== + dependencies: + "@types/node" "^20.1.0" + +"@wdio/utils@8.27.0": + version "8.27.0" + resolved "https://registry.yarnpkg.com/@wdio/utils/-/utils-8.27.0.tgz#6cb9b29649b4e301a959a8e8aea831edec635d55" + integrity sha512-4BY+JBQssVn003P5lA289uDMie3LtGinHze5btkcW9timB6VaU+EeZS4eKTPC0pziizLhteVvXYxv3YTpeeRfA== + dependencies: + "@puppeteer/browsers" "^1.6.0" + "@wdio/logger" "8.24.12" + "@wdio/types" "8.27.0" + decamelize "^6.0.0" + deepmerge-ts "^5.1.0" + edgedriver "^5.3.5" + geckodriver "^4.2.0" + get-port "^7.0.0" + import-meta-resolve "^4.0.0" + locate-app "^2.1.0" + safaridriver "^0.1.0" + split2 "^4.2.0" + wait-port "^1.0.4" "@webassemblyjs/ast@1.11.6", "@webassemblyjs/ast@^1.11.5": version "1.11.6" @@ -3694,11 +4039,16 @@ acorn-jsx@^5.3.2: resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== -acorn-walk@^8.1.1, acorn-walk@^8.2.0: +acorn-walk@^8.1.1: version "8.2.0" resolved "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz" integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== +acorn-walk@^8.3.0: + version "8.3.0" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.0.tgz#2097665af50fd0cf7a2dfccd2b9368964e66540f" + integrity sha512-FS7hV565M5l1R08MXqo8odwMTB02C2UqzB17RVgu9EyuYFBqJZ3/ZY97sQD5FewVu1UyDFc1yztUDrAwT0EypA== + acorn@^8.10.0, acorn@^8.9.0: version "8.10.0" resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.10.0.tgz#8be5b3907a67221a81ab23c7889c4c5526b62ec5" @@ -3736,6 +4086,13 @@ agent-base@6, agent-base@^6.0.2: dependencies: debug "4" +agent-base@^7.0.2, agent-base@^7.1.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-7.1.0.tgz#536802b76bc0b34aa50195eb2442276d613e3434" + integrity sha512-o/zjMZRhJxny7OyEF+Op8X+efiELC7k7yOjMzgfzVqOzXqkBkWI79YoTdOtsuWd5BWhAGAuOY/Xa6xpiaWXiNg== + dependencies: + debug "^4.3.4" + agentkeepalive@^4.1.3, agentkeepalive@^4.2.1: version "4.2.1" resolved "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-4.2.1.tgz#a7975cbb9f83b367f06c90cc51ff28fe7d499717" @@ -3909,6 +4266,18 @@ archiver-utils@^2.1.0: normalize-path "^3.0.0" readable-stream "^2.0.0" +archiver-utils@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/archiver-utils/-/archiver-utils-4.0.1.tgz#66ad15256e69589a77f706c90c6dbcc1b2775d2a" + integrity sha512-Q4Q99idbvzmgCTEAAhi32BkOyq8iVI5EwdO0PmBDSGIzzjYNdcFn7Q7k3OzbLy4kLUPXfJtG6fO2RjftXbobBg== + dependencies: + glob "^8.0.0" + graceful-fs "^4.2.0" + lazystream "^1.0.0" + lodash "^4.17.15" + normalize-path "^3.0.0" + readable-stream "^3.6.0" + archiver@^5.3.1: version "5.3.1" resolved "https://registry.yarnpkg.com/archiver/-/archiver-5.3.1.tgz#21e92811d6f09ecfce649fbefefe8c79e57cbbb6" @@ -3922,6 +4291,19 @@ archiver@^5.3.1: tar-stream "^2.2.0" zip-stream "^4.1.0" +archiver@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/archiver/-/archiver-6.0.1.tgz#d56968d4c09df309435adb5a1bbfc370dae48133" + integrity sha512-CXGy4poOLBKptiZH//VlWdFuUC1RESbdZjGjILwBuZ73P7WkAUN0htfSfBq/7k6FRFlpu7bg4JOkj1vU9G6jcQ== + dependencies: + archiver-utils "^4.0.1" + async "^3.2.4" + buffer-crc32 "^0.2.1" + readable-stream "^3.6.0" + readdir-glob "^1.1.2" + tar-stream "^3.0.0" + zip-stream "^5.0.1" + archy@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/archy/-/archy-1.0.0.tgz" @@ -3957,6 +4339,13 @@ argv@0.0.2: resolved "https://registry.npmjs.org/argv/-/argv-0.0.2.tgz" integrity sha1-7L0W+JSbFXGDcRsb2jNPN4QBhas= +aria-query@^5.0.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-5.3.0.tgz#650c569e41ad90b51b3d7df5e5eed1c7549c103e" + integrity sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A== + dependencies: + dequal "^2.0.3" + array-buffer-byte-length@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz#fabe8bc193fea865f317fe7807085ee0dee5aead" @@ -4095,6 +4484,13 @@ assertion-error@^1.1.0: resolved "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz" integrity sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw== +ast-types@^0.13.4: + version "0.13.4" + resolved "https://registry.yarnpkg.com/ast-types/-/ast-types-0.13.4.tgz#ee0d77b343263965ecc3fb62da16e7222b2b6782" + integrity sha512-x1FCFnFifvYDDzTaLII71vG5uvDwgtmDTEVWAxrgeiR8VjMONcCXJx7E+USjDtHlwFmt9MysbqgF9b9Vjr6w+w== + dependencies: + tslib "^2.0.1" + async-lock@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/async-lock/-/async-lock-1.4.0.tgz#c8b6630eff68fbbdd8a5b6eb763dac3bfbb8bf02" @@ -4112,6 +4508,11 @@ async@^3.2.3, async@~3.2.2: resolved "https://registry.yarnpkg.com/async/-/async-3.2.4.tgz#2d22e00f8cddeb5fde5dd33522b56d1cf569a81c" integrity sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ== +async@^3.2.4: + version "3.2.5" + resolved "https://registry.yarnpkg.com/async/-/async-3.2.5.tgz#ebd52a8fdaf7a2289a24df399f8d8485c8a46b66" + integrity sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg== + asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" @@ -4216,10 +4617,15 @@ base64url@^3.0.1: resolved "https://registry.npmjs.org/base64url/-/base64url-3.0.1.tgz" integrity sha512-ir1UPr3dkwexU7FdV8qBBbNDRUhMmIekYMFZfi+C/sLNnRESKPl23nB9b2pltqfOQNnGzsDdId90AEtG5tCx4A== +basic-ftp@^5.0.2: + version "5.0.3" + resolved "https://registry.yarnpkg.com/basic-ftp/-/basic-ftp-5.0.3.tgz#b14c0fe8111ce001ec913686434fe0c2fb461228" + integrity sha512-QHX8HLlncOLpy54mh+k/sWIFd0ThmRqwe9ZjELybGZK+tZ8rUb9VO0saKJUROTbE+KhzDUT7xziGpGrW8Kmd+g== + bcrypt-pbkdf@^1.0.2: version "1.0.2" - resolved "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz" - integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4= + resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" + integrity sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w== dependencies: tweetnacl "^0.14.3" @@ -4249,6 +4655,11 @@ benchmark@^2.1.4: lodash "^4.17.4" platform "^1.3.3" +big-integer@^1.6.17: + version "1.6.52" + resolved "https://registry.yarnpkg.com/big-integer/-/big-integer-1.6.52.tgz#60a887f3047614a8e1bffe5d7173490a97dc8c85" + integrity sha512-QxD8cf2eVqJOOz63z6JIN9BzvVs/dlySa5HGSBH5xtR8dPteIRQnBxxKqkNTiT6jbDTF6jAfrd4oMcND9RGbQg== + big-integer@^1.6.44: version "1.6.51" resolved "https://registry.yarnpkg.com/big-integer/-/big-integer-1.6.51.tgz#0df92a5d9880560d3ff2d5fd20245c889d130686" @@ -4271,6 +4682,14 @@ binary-extensions@^2.0.0: resolved "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz" integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== +binary@~0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/binary/-/binary-0.3.0.tgz#9f60553bc5ce8c3386f3b553cff47462adecaa79" + integrity sha512-D4H1y5KYwpJgK8wk1Cue5LLPgmwHKYSChkbspQg5JtVuR5ulGckxfR62H3AE9UDkdMC8yyXlqYihuz3Aqg2XZg== + dependencies: + buffers "~0.1.1" + chainsaw "~0.1.0" + bindings@^1.3.0, bindings@^1.5.0: version "1.5.0" resolved "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz" @@ -4328,6 +4747,11 @@ bls-eth-wasm@^0.4.8: resolved "https://registry.npmjs.org/bls-eth-wasm/-/bls-eth-wasm-0.4.8.tgz" integrity sha512-ye7+G6KFLb3i9xSrLASAoYqOUK5WLB6XA5DD8Sh0UQpZ3T999ylsYbFdoOJpmvTDuBuMi23Vy8Jm0pn/GF01CA== +bluebird@~3.4.1: + version "3.4.7" + resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.4.7.tgz#f72d760be09b7f76d08ed8fae98b289a8d05fab3" + integrity sha512-iD3898SR7sWVRHbiQv+sHUtHnMvC1o3nW5rAcqnq3uOn07DSAppZYUkIGslDz6gXC7HfunPe7YVBgoEJASPcHA== + bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.11.1, bn.js@^4.11.9: version "4.12.0" resolved "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz" @@ -4410,6 +4834,13 @@ browser-level@^1.0.1: module-error "^1.0.2" run-parallel-limit "^1.1.0" +browser-resolve@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/browser-resolve/-/browser-resolve-2.0.0.tgz#99b7304cb392f8d73dba741bb2d7da28c6d7842b" + integrity sha512-7sWsQlYL2rGLy2IWm8WL8DCTJvYLc/qlOnsakDac87SOoCd16WLsaAMdCiAqsTNHIe+SXfaqyxyo6THoWqs8WQ== + dependencies: + resolve "^1.17.0" + browser-stdout@1.3.1: version "1.3.1" resolved "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz" @@ -4512,6 +4943,11 @@ buffer-from@^1.0.0: resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== +buffer-indexof-polyfill@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/buffer-indexof-polyfill/-/buffer-indexof-polyfill-1.0.2.tgz#d2732135c5999c64b277fcf9b1abe3498254729c" + integrity sha512-I7wzHwA3t1/lwXQh+A5PbNvJxgfo5r3xulgpYDB5zckTu/Z9oUK9biouBKQUjEqzaz3HnAT6TYoovmE+GqSf7A== + buffer-xor@^1.0.3: version "1.0.3" resolved "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz" @@ -4533,7 +4969,7 @@ buffer@4.9.2, buffer@^4.3.0: ieee754 "^1.1.4" isarray "^1.0.0" -buffer@^5.4.3, buffer@^5.5.0: +buffer@^5.2.1, buffer@^5.4.3, buffer@^5.5.0, buffer@^5.7.1: version "5.7.1" resolved "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz" integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== @@ -4542,13 +4978,19 @@ buffer@^5.4.3, buffer@^5.5.0: ieee754 "^1.1.13" buffer@^6.0.3: + name buffer-polyfill version "6.0.3" - resolved "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-6.0.3.tgz#2ace578459cc8fbe2a70aaa8f52ee63b6a74c6c6" integrity sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA== dependencies: base64-js "^1.3.1" ieee754 "^1.2.1" +buffers@~0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/buffers/-/buffers-0.1.1.tgz#b24579c3bed4d6d396aeee6d9a8ae7f5482ab7bb" + integrity sha512-9q/rDEGSb/Qsvv2qvzIzdluL5k7AaJOTrw23z9reQthrbF7is4CtlT0DXyO1oei2DCp4uojjzQ7igaSHp1kAEQ== + bufio@~1.0.7: version "1.0.7" resolved "https://registry.npmjs.org/bufio/-/bufio-1.0.7.tgz" @@ -4708,6 +5150,24 @@ cacheable-lookup@^5.0.3: resolved "https://registry.yarnpkg.com/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz#5a6b865b2c44357be3d5ebc2a467b032719a7005" integrity sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA== +cacheable-lookup@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/cacheable-lookup/-/cacheable-lookup-7.0.0.tgz#3476a8215d046e5a3202a9209dd13fec1f933a27" + integrity sha512-+qJyx4xiKra8mZrcwhjMRMUhD5NR1R8esPkzIYxX96JiecFoxAXFuz/GpR3+ev4PE1WamHip78wV0vcmPQtp8w== + +cacheable-request@^10.2.8: + version "10.2.14" + resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-10.2.14.tgz#eb915b665fda41b79652782df3f553449c406b9d" + integrity sha512-zkDT5WAF4hSSoUgyfg5tFIxz8XQK+25W/TLVojJTMKBaxevLBBtLxgqguAuVQB8PVW79FVjHcU+GJ9tVbDZ9mQ== + dependencies: + "@types/http-cache-semantics" "^4.0.2" + get-stream "^6.0.1" + http-cache-semantics "^4.1.1" + keyv "^4.5.3" + mimic-response "^4.0.0" + normalize-url "^8.0.0" + responselike "^3.0.0" + cacheable-request@^7.0.2: version "7.0.4" resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-7.0.4.tgz#7a33ebf08613178b403635be7b899d3e69bbe817" @@ -4785,7 +5245,7 @@ chai-as-promised@^7.1.1: dependencies: check-error "^1.0.2" -chai@^4.3.10: +chai@^4.3.10, chai@^4.3.7, chai@^4.3.8: version "4.3.10" resolved "https://registry.yarnpkg.com/chai/-/chai-4.3.10.tgz#d784cec635e3b7e2ffb66446a63b4e33bd390384" integrity sha512-0UXG04VuVbruMUYbJ6JctvH0YnC/4q3/AkT18q4NaITo91CUm0liMS9VqzT9vZhVQ/1eqPanMWjBM+Juhfb/9g== @@ -4798,31 +5258,12 @@ chai@^4.3.10: pathval "^1.1.1" type-detect "^4.0.8" -chai@^4.3.7: - version "4.3.7" - resolved "https://registry.yarnpkg.com/chai/-/chai-4.3.7.tgz#ec63f6df01829088e8bf55fca839bcd464a8ec51" - integrity sha512-HLnAzZ2iupm25PlN0xFreAlBA5zaBSv3og0DdeGA4Ar6h6rJ3A0rolRUKJhSF2V10GZKDgWF/VmAEsNWjCRB+A== - dependencies: - assertion-error "^1.1.0" - check-error "^1.0.2" - deep-eql "^4.1.2" - get-func-name "^2.0.0" - loupe "^2.3.1" - pathval "^1.1.1" - type-detect "^4.0.5" - -chai@^4.3.8: - version "4.3.8" - resolved "https://registry.yarnpkg.com/chai/-/chai-4.3.8.tgz#40c59718ad6928da6629c70496fe990b2bb5b17c" - integrity sha512-vX4YvVVtxlfSZ2VecZgFUTU5qPCYsobVI2O9FmwEXBhDigYGQA6jRXCycIs1yJnnWbZ6/+a2zNIF5DfVCcJBFQ== +chainsaw@~0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/chainsaw/-/chainsaw-0.1.0.tgz#5eab50b28afe58074d0d58291388828b5e5fbc98" + integrity sha512-75kWfWt6MEKNC8xYXIdRpDehRYY/tNSgwKaJq+dbbDcxORuVrrQ+SEHoWsniVn9XPYfP4gmdWIeDk/4YNp1rNQ== dependencies: - assertion-error "^1.1.0" - check-error "^1.0.2" - deep-eql "^4.1.2" - get-func-name "^2.0.0" - loupe "^2.3.1" - pathval "^1.1.1" - type-detect "^4.0.5" + traverse ">=0.3.0 <0.4" chalk@4.1.0: version "4.1.0" @@ -4862,6 +5303,11 @@ chalk@^5.0.0, chalk@^5.2.0: resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.2.0.tgz#249623b7d66869c673699fb66d65723e54dfcfb3" integrity sha512-ree3Gqw/nazQAPuJJEy+avdl7QfZMcUvmHIKgEZkGL+xOBzRvup5Hxo6LHuMceSxOabuJLJm5Yp/92R9eMmMvA== +chalk@^5.1.2: + version "5.3.0" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.3.0.tgz#67c20a7ebef70e7f3970a01f90fa210cb6860385" + integrity sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w== + chardet@^0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e" @@ -4909,6 +5355,13 @@ chrome-trace-event@^1.0.2: resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz#1015eced4741e15d06664a957dbbf50d041e26ac" integrity sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg== +chromium-bidi@0.4.16: + version "0.4.16" + resolved "https://registry.yarnpkg.com/chromium-bidi/-/chromium-bidi-0.4.16.tgz#8a67bfdf6bb8804efc22765a82859d20724b46ab" + integrity sha512-7ZbXdWERxRxSwo3txsBjjmc/NLxqb1Bk30mRb0BMS4YIaiV6zvKZqL/UAH+DdqcDYayDWk2n/y8klkBDODrPvA== + dependencies: + mitt "3.0.0" + ci-info@^3.2.0, ci-info@^3.6.1: version "3.8.0" resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.8.0.tgz#81408265a5380c929f0bc665d62256628ce9ef91" @@ -5014,9 +5467,9 @@ clone-deep@4.0.1: shallow-clone "^3.0.0" clone-response@^1.0.2: - version "1.0.2" - resolved "https://registry.npmjs.org/clone-response/-/clone-response-1.0.2.tgz" - integrity sha1-0dyXOSAxTfZ/vrlCI7TuNQI56Ws= + version "1.0.3" + resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.3.tgz#af2032aa47816399cf5f0a1d0db902f517abb8c3" + integrity sha512-ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA== dependencies: mimic-response "^1.0.0" @@ -5181,6 +5634,16 @@ compress-commons@^4.1.0: normalize-path "^3.0.0" readable-stream "^3.6.0" +compress-commons@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/compress-commons/-/compress-commons-5.0.1.tgz#e46723ebbab41b50309b27a0e0f6f3baed2d6590" + integrity sha512-MPh//1cERdLtqwO3pOFLeXtpuai0Y2WCd5AhtKxznqM7WtaMYaOEMSgn45d9D10sIHSfIKE603HlOp8OPGrvag== + dependencies: + crc-32 "^1.2.0" + crc32-stream "^5.0.0" + normalize-path "^3.0.0" + readable-stream "^3.6.0" + concat-map@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" @@ -5397,6 +5860,14 @@ crc32-stream@^4.0.2: crc-32 "^1.2.0" readable-stream "^3.4.0" +crc32-stream@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/crc32-stream/-/crc32-stream-5.0.0.tgz#a97d3a802c8687f101c27cc17ca5253327354720" + integrity sha512-B0EPa1UK+qnpBZpG+7FgPCu0J2ETLpXq09o9BkLkEAhdB6Z61Qo4pJ3JYu0c+Qi+/SAL7QThqnzS06pmSSyZaw== + dependencies: + crc-32 "^1.2.0" + readable-stream "^3.4.0" + create-ecdh@^4.0.0: version "4.0.4" resolved "https://registry.npmjs.org/create-ecdh/-/create-ecdh-4.0.4.tgz" @@ -5428,11 +5899,18 @@ create-hmac@^1.1.0, create-hmac@^1.1.4, create-hmac@^1.1.7: safe-buffer "^5.0.1" sha.js "^2.4.8" -create-require@^1.1.0: +create-require@^1.1.0, create-require@^1.1.1: version "1.1.1" resolved "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz" integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ== +cross-fetch@4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-4.0.0.tgz#f037aef1580bb3a1a35164ea2a848ba81b445983" + integrity sha512-e4a5N8lVvuLgAWgnCrLr2PP0YyDOTHa9H/Rj54dirp61qXnNq46m82bRhNqIA5VccJtWBvPTFRV3TtvHUKPB1g== + dependencies: + node-fetch "^2.6.12" + cross-fetch@^3.1.5: version "3.1.8" resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.1.8.tgz#0327eba65fd68a7d119f8fb2bf9334a1a7956f82" @@ -5477,6 +5955,23 @@ crypto-browserify@^3.11.0, crypto-browserify@^3.12.0: randombytes "^2.0.0" randomfill "^1.0.3" +css-shorthand-properties@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/css-shorthand-properties/-/css-shorthand-properties-1.1.1.tgz#1c808e63553c283f289f2dd56fcee8f3337bd935" + integrity sha512-Md+Juc7M3uOdbAFwOYlTrccIZ7oCFuzrhKYQjdeUEW/sE1hv17Jp/Bws+ReOPpGVBTYCBoYo+G17V5Qo8QQ75A== + +css-value@^0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/css-value/-/css-value-0.0.1.tgz#5efd6c2eea5ea1fd6b6ac57ec0427b18452424ea" + integrity sha512-FUV3xaJ63buRLgHrLQVlVgQnQdR4yqdLGaDu7g8CQcWjInDfM9plBTPI9FRfpahju1UBSaMckeb2/46ApS/V1Q== + +cssstyle@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-3.0.0.tgz#17ca9c87d26eac764bb8cfd00583cff21ce0277a" + integrity sha512-N4u2ABATi3Qplzf0hWbVCdjenim8F3ojEXpBDF5hBpjzW182MjNGLqfmQ0SkSPeQ+V86ZXgeH8aXj6kayd4jgg== + dependencies: + rrweb-cssom "^0.6.0" + csv-parse@^4.16.0: version "4.16.0" resolved "https://registry.npmjs.org/csv-parse/-/csv-parse-4.16.0.tgz" @@ -5497,6 +5992,24 @@ dargs@^7.0.0: resolved "https://registry.yarnpkg.com/dargs/-/dargs-7.0.0.tgz#04015c41de0bcb69ec84050f3d9be0caf8d6d5cc" integrity sha512-2iy1EkLdlBzQGvbweYRFxmFath8+K7+AKB0TlhHWkNuH+TmovaMH/Wp7V7R4u7f4SnX3OgLsU9t1NI9ioDnUpg== +data-uri-to-buffer@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz#d8feb2b2881e6a4f58c2e08acfd0e2834e26222e" + integrity sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A== + +data-uri-to-buffer@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/data-uri-to-buffer/-/data-uri-to-buffer-6.0.1.tgz#540bd4c8753a25ee129035aebdedf63b078703c7" + integrity sha512-MZd3VlchQkp8rdend6vrx7MmVDJzSNTBvghvKjirLkD+WTChA3KUf0jkE68Q4UyctNqI11zZO9/x2Yx+ub5Cvg== + +data-urls@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-5.0.0.tgz#2f76906bce1824429ffecb6920f45a0b30f00dde" + integrity sha512-ZYP5VBHshaDAiVZxjbRVcFJpc+4xGgT0bK3vzy1HLN8jTO975HEbuYzZJcHoQEY5K1a0z8YayJkyVETa08eNTg== + dependencies: + whatwg-mimetype "^4.0.0" + whatwg-url "^14.0.0" + datastore-core@^9.0.0, datastore-core@^9.1.1: version "9.1.1" resolved "https://registry.yarnpkg.com/datastore-core/-/datastore-core-9.1.1.tgz#613db89a9bb2624943811dd39b831125319fab79" @@ -5604,6 +6117,16 @@ decamelize@^4.0.0: resolved "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz" integrity sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ== +decamelize@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-6.0.0.tgz#8cad4d916fde5c41a264a43d0ecc56fe3d31749e" + integrity sha512-Fv96DCsdOgB6mdGl67MT5JaTNKRzrzill5OH5s8bjYJXVlcXyPYGyPsUkWyGV5p1TXI5esYIYMMeDJL0hEIwaA== + +decimal.js@^10.4.3: + version "10.4.3" + resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.4.3.tgz#1044092884d245d1b7f65725fa4ad4c6f781cc23" + integrity sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA== + decompress-response@^6.0.0: version "6.0.0" resolved "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz" @@ -5616,7 +6139,7 @@ dedent@0.7.0: resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" integrity sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA== -deep-eql@^4.1.2, deep-eql@^4.1.3: +deep-eql@^4.1.3: version "4.1.3" resolved "https://registry.yarnpkg.com/deep-eql/-/deep-eql-4.1.3.tgz#7c7775513092f7df98d8df9996dd085eb668cc6d" integrity sha512-WaEtAOpRA1MQ0eohqZjpGD8zdI0Ovsm8mmFhaDN8dvDZzyoUMcYDnf5Y6iu7HTXxf8JDS23qWa4a+hKCDyOPzw== @@ -5628,6 +6151,11 @@ deep-is@^0.1.3: resolved "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz" integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= +deepmerge-ts@^5.0.0, deepmerge-ts@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/deepmerge-ts/-/deepmerge-ts-5.1.0.tgz#c55206cc4c7be2ded89b9c816cf3608884525d7a" + integrity sha512-eS8dRJOckyo9maw9Tu5O5RUi/4inFLrnoLkBe3cPfDMx3WZioXtmOew4TXQaxq7Rhl4xjDtR7c6x8nNTxOvbFw== + deepmerge@^4.3.1: version "4.3.1" resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.3.1.tgz#44b5f2147cd3b00d4b56137685966f26fd25dd4a" @@ -5672,7 +6200,7 @@ defaults@^1.0.3: dependencies: clone "^1.0.2" -defer-to-connect@^2.0.0: +defer-to-connect@^2.0.0, defer-to-connect@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-2.0.1.tgz#8016bdb4143e4632b77a3449c6236277de520587" integrity sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg== @@ -5713,6 +6241,15 @@ define-properties@^1.2.0: has-property-descriptors "^1.0.0" object-keys "^1.1.1" +degenerator@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/degenerator/-/degenerator-5.0.1.tgz#9403bf297c6dad9a1ece409b37db27954f91f2f5" + integrity sha512-TllpMR/t0M5sqCXfj85i4XaAzxmS5tVA16dqvdkMwGmzI+dXLXnw3J+3Vdv7VKw+ThlTMboK6i9rnZ6Nntj5CQ== + dependencies: + ast-types "^0.13.4" + escodegen "^2.1.0" + esprima "^4.0.1" + delay@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/delay/-/delay-6.0.0.tgz#43749aefdf6cabd9e17b0d00bd3904525137e607" @@ -5748,6 +6285,11 @@ deprecation@^2.0.0, deprecation@^2.3.1: resolved "https://registry.yarnpkg.com/deprecation/-/deprecation-2.3.1.tgz#6368cbdb40abf3373b525ac87e4a260c3a700919" integrity sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ== +dequal@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/dequal/-/dequal-2.0.3.tgz#2644214f1997d39ed0ee0ece72335490a7ac67be" + integrity sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA== + des.js@^1.0.0: version "1.0.1" resolved "https://registry.npmjs.org/des.js/-/des.js-1.0.1.tgz" @@ -5771,6 +6313,16 @@ detect-node@^2.0.4: resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.1.0.tgz#c9c70775a49c3d03bc2c06d9a73be550f978f8b1" integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g== +devtools-protocol@0.0.1147663: + version "0.0.1147663" + resolved "https://registry.yarnpkg.com/devtools-protocol/-/devtools-protocol-0.0.1147663.tgz#4ec5610b39a6250d1f87e6b9c7e16688ed0ac78e" + integrity sha512-hyWmRrexdhbZ1tcJUGpO95ivbRhWXz++F4Ko+n21AY5PNln2ovoJw+8ZMNDTtip+CNFQfrtLVh/w4009dXO/eQ== + +devtools-protocol@^0.0.1237913: + version "0.0.1237913" + resolved "https://registry.yarnpkg.com/devtools-protocol/-/devtools-protocol-0.0.1237913.tgz#ac0208ff0cbe9c53646753576b5c1d788e3caa38" + integrity sha512-Pxtmz2ZIqBkpU82HaIdsvCQBG94yTC4xajrEsWx9p38QKEfBCJktSazsHkrjf9j3dVVNPhg5LR21F6KWeXpjiQ== + dezalgo@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/dezalgo/-/dezalgo-1.0.4.tgz#751235260469084c132157dfa857f386d4c33d81" @@ -5789,7 +6341,7 @@ di@^0.0.1: resolved "https://registry.yarnpkg.com/di/-/di-0.0.1.tgz#806649326ceaa7caa3306d75d985ea2748ba913c" integrity sha512-uJaamHkagcZtHPqCIHZxnFrXlunQXgBOsZSUOWwFw31QJCAbyTBoHMW75YOTur5ZNx8pIeAKgf6GWIgaqqiLhA== -diff-sequences@^29.4.3, diff-sequences@^29.6.3: +diff-sequences@^29.6.3: version "29.6.3" resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-29.6.3.tgz#4deaf894d11407c51efc8418012f9e70b84ea921" integrity sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q== @@ -5897,6 +6449,11 @@ domain-browser@^1.1.1: resolved "https://registry.npmjs.org/domain-browser/-/domain-browser-1.2.0.tgz" integrity sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA== +domain-browser@^4.22.0: + version "4.22.0" + resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-4.22.0.tgz#6ddd34220ec281f9a65d3386d267ddd35c491f9f" + integrity sha512-IGBwjF7tNk3cwypFNH/7bfzBcgSCbaMOD3GsaY1AU/JRrnHnYgEM0+9kQt52iZxjNsjBtJYtao146V+f8jFZNw== + dot-prop@^5.1.0: version "5.3.0" resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-5.3.0.tgz#90ccce708cd9cd82cc4dc8c3ddd9abdd55b20e88" @@ -5914,6 +6471,13 @@ dotenv@~16.3.1: resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.3.1.tgz#369034de7d7e5b120972693352a3bf112172cc3e" integrity sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ== +duplexer2@~0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/duplexer2/-/duplexer2-0.1.4.tgz#8b12dab878c0d69e3e7891051662a32fc6bddcc1" + integrity sha512-asLFVfWWtJ90ZyOUHMqk7/S2w2guQKxUI2itj3d92ADHhxUSbCMGi1f1cBcJ7xM1To+pE/Khbwo1yuNbMEPKeA== + dependencies: + readable-stream "^2.0.2" + duplexer@^0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" @@ -5924,6 +6488,26 @@ eastasianwidth@^0.2.0: resolved "https://registry.yarnpkg.com/eastasianwidth/-/eastasianwidth-0.2.0.tgz#696ce2ec0aa0e6ea93a397ffcf24aa7840c827cb" integrity sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA== +edge-paths@^3.0.5: + version "3.0.5" + resolved "https://registry.yarnpkg.com/edge-paths/-/edge-paths-3.0.5.tgz#9a35361d701d9b5dc07f641cebe8da01ede80937" + integrity sha512-sB7vSrDnFa4ezWQk9nZ/n0FdpdUuC6R1EOrlU3DL+bovcNFK28rqu2emmAUjujYEJTWIgQGqgVVWUZXMnc8iWg== + dependencies: + "@types/which" "^2.0.1" + which "^2.0.2" + +edgedriver@^5.3.5: + version "5.3.8" + resolved "https://registry.yarnpkg.com/edgedriver/-/edgedriver-5.3.8.tgz#ba304bd05a6696e0121df14b49c428af5eebf4e3" + integrity sha512-FWLPDuwJDeGGgtmlqTXb4lQi/HV9yylLo1F9O1g9TLqSemA5T6xH28seUIfyleVirLFtDQyKNUxKsMhMT4IfnA== + dependencies: + "@wdio/logger" "^8.16.17" + decamelize "^6.0.0" + edge-paths "^3.0.5" + node-fetch "^3.3.2" + unzipper "^0.10.14" + which "^4.0.0" + ee-first@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" @@ -6046,6 +6630,11 @@ ent@~2.2.0: resolved "https://registry.yarnpkg.com/ent/-/ent-2.2.0.tgz#e964219325a21d05f44466a2f686ed6ce5f5dd1d" integrity sha512-GHrMyVZQWvTIdDtpiEXdHZnFQKzeO09apj8Cbl4pKWy4i0Oprcq17usfDt5aO63swf0JOeMWjWQE/LzgSRuWpA== +entities@^4.4.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/entities/-/entities-4.5.0.tgz#5d268ea5e7113ec74c4d033b79ea5a35a488fb48" + integrity sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw== + env-paths@^2.2.0: version "2.2.1" resolved "https://registry.yarnpkg.com/env-paths/-/env-paths-2.2.1.tgz#420399d416ce1fbe9bc0a07c62fa68d67fd0f8f2" @@ -6279,33 +6868,33 @@ es6-object-assign@^1.1.0: resolved "https://registry.npmjs.org/es6-object-assign/-/es6-object-assign-1.1.0.tgz" integrity sha1-wsNYJlYkfDnqEHyx5mUrb58kUjw= -esbuild@^0.18.10: - version "0.18.20" - resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.18.20.tgz#4709f5a34801b43b799ab7d6d82f7284a9b7a7a6" - integrity sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA== +esbuild@^0.19.3: + version "0.19.8" + resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.19.8.tgz#ad05b72281d84483fa6b5345bd246c27a207b8f1" + integrity sha512-l7iffQpT2OrZfH2rXIp7/FkmaeZM0vxbxN9KfiCwGYuZqzMg/JdvX26R31Zxn/Pxvsrg3Y9N6XTcnknqDyyv4w== optionalDependencies: - "@esbuild/android-arm" "0.18.20" - "@esbuild/android-arm64" "0.18.20" - "@esbuild/android-x64" "0.18.20" - "@esbuild/darwin-arm64" "0.18.20" - "@esbuild/darwin-x64" "0.18.20" - "@esbuild/freebsd-arm64" "0.18.20" - "@esbuild/freebsd-x64" "0.18.20" - "@esbuild/linux-arm" "0.18.20" - "@esbuild/linux-arm64" "0.18.20" - "@esbuild/linux-ia32" "0.18.20" - "@esbuild/linux-loong64" "0.18.20" - "@esbuild/linux-mips64el" "0.18.20" - "@esbuild/linux-ppc64" "0.18.20" - "@esbuild/linux-riscv64" "0.18.20" - "@esbuild/linux-s390x" "0.18.20" - "@esbuild/linux-x64" "0.18.20" - "@esbuild/netbsd-x64" "0.18.20" - "@esbuild/openbsd-x64" "0.18.20" - "@esbuild/sunos-x64" "0.18.20" - "@esbuild/win32-arm64" "0.18.20" - "@esbuild/win32-ia32" "0.18.20" - "@esbuild/win32-x64" "0.18.20" + "@esbuild/android-arm" "0.19.8" + "@esbuild/android-arm64" "0.19.8" + "@esbuild/android-x64" "0.19.8" + "@esbuild/darwin-arm64" "0.19.8" + "@esbuild/darwin-x64" "0.19.8" + "@esbuild/freebsd-arm64" "0.19.8" + "@esbuild/freebsd-x64" "0.19.8" + "@esbuild/linux-arm" "0.19.8" + "@esbuild/linux-arm64" "0.19.8" + "@esbuild/linux-ia32" "0.19.8" + "@esbuild/linux-loong64" "0.19.8" + "@esbuild/linux-mips64el" "0.19.8" + "@esbuild/linux-ppc64" "0.19.8" + "@esbuild/linux-riscv64" "0.19.8" + "@esbuild/linux-s390x" "0.19.8" + "@esbuild/linux-x64" "0.19.8" + "@esbuild/netbsd-x64" "0.19.8" + "@esbuild/openbsd-x64" "0.19.8" + "@esbuild/sunos-x64" "0.19.8" + "@esbuild/win32-arm64" "0.19.8" + "@esbuild/win32-ia32" "0.19.8" + "@esbuild/win32-x64" "0.19.8" escalade@^3.1.1: version "3.1.1" @@ -6332,6 +6921,17 @@ escape-string-regexp@^5.0.0: resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz#4683126b500b61762f2dbebace1806e8be31b1c8" integrity sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw== +escodegen@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-2.1.0.tgz#ba93bbb7a43986d29d6041f99f5262da773e2e17" + integrity sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w== + dependencies: + esprima "^4.0.1" + estraverse "^5.2.0" + esutils "^2.0.2" + optionalDependencies: + source-map "~0.6.1" + eslint-import-resolver-node@^0.3.7: version "0.3.7" resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.7.tgz#83b375187d412324a1963d84fa664377a23eb4d7" @@ -6541,7 +7141,7 @@ espree@^9.6.1: acorn-jsx "^5.3.2" eslint-visitor-keys "^3.4.1" -esprima@^4.0.0: +esprima@^4.0.0, esprima@^4.0.1: version "4.0.1" resolved "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== @@ -6570,6 +7170,18 @@ estraverse@^5.1.0, estraverse@^5.2.0: resolved "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz" integrity sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ== +estree-walker@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-2.0.2.tgz#52f010178c2a4c117a7757cfe942adb7d2da4cac" + integrity sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w== + +estree-walker@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-3.0.3.tgz#67c3e549ec402a487b4fc193d1953a524752340d" + integrity sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g== + dependencies: + "@types/estree" "^1.0.0" + esutils@^2.0.2: version "2.0.3" resolved "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz" @@ -6682,17 +7294,12 @@ eventemitter3@^4.0.0, eventemitter3@^4.0.4, eventemitter3@^4.0.7: events@1.1.1: version "1.1.1" - resolved "https://registry.npmjs.org/events/-/events-1.1.1.tgz" - integrity sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ= - -events@^3.0.0: - version "3.1.0" - resolved "https://registry.npmjs.org/events/-/events-3.1.0.tgz" - integrity sha512-Rv+u8MLHNOdMjTAFeT3nCjHn2aGlx435FP/sDHNaRhDEMwyI/aB22Kj2qIN8R0cw3z28psEQLYwxVKLsKrMgWg== + resolved "https://registry.yarnpkg.com/events/-/events-1.1.1.tgz#9ebdb7635ad099c70dcc4c2a1f5004288e8bd924" + integrity sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw== -events@^3.2.0, events@^3.3.0: +events@^3.0.0, events@^3.2.0, events@^3.3.0: version "3.3.0" - resolved "https://registry.npmjs.org/events/-/events-3.3.0.tgz" + resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== eventsource@^2.0.2: @@ -6753,6 +7360,21 @@ execa@^7.1.1: signal-exit "^3.0.7" strip-final-newline "^3.0.0" +execa@^8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/execa/-/execa-8.0.1.tgz#51f6a5943b580f963c3ca9c6321796db8cc39b8c" + integrity sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg== + dependencies: + cross-spawn "^7.0.3" + get-stream "^8.0.1" + human-signals "^5.0.0" + is-stream "^3.0.0" + merge-stream "^2.0.0" + npm-run-path "^5.1.0" + onetime "^6.0.0" + signal-exit "^4.1.0" + strip-final-newline "^3.0.0" + expand-tilde@^2.0.2: version "2.0.2" resolved "https://registry.npmjs.org/expand-tilde/-/expand-tilde-2.0.2.tgz" @@ -6779,7 +7401,7 @@ external-editor@^3.0.3: iconv-lite "^0.4.24" tmp "^0.0.33" -extract-zip@^2.0.1: +extract-zip@2.0.1, extract-zip@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/extract-zip/-/extract-zip-2.0.1.tgz#663dca56fe46df890d5f131ef4a06d22bb8ba13a" integrity sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg== @@ -6800,6 +7422,11 @@ fast-decode-uri-component@^1.0.1: resolved "https://registry.npmjs.org/fast-decode-uri-component/-/fast-decode-uri-component-1.0.1.tgz" integrity sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg== +fast-deep-equal@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz#7b05218ddf9667bf7f370bf7fdb2cb15fdd0aa49" + integrity sha512-bCK/2Z4zLidyB4ReuIsvALH6w31YfAQDmXMqMx6FyfHqvBxtjC0eRumeSu4Bs3XtXwpyIywtSTrVT99BxY1f9w== + fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" @@ -6952,6 +7579,14 @@ fecha@^4.2.0: resolved "https://registry.npmjs.org/fecha/-/fecha-4.2.1.tgz" integrity sha512-MMMQ0ludy/nBs1/o0zVOiKTpG7qMbonKUzjJgQFEuvq6INZ1OraKPRAWkBq5vlKLOUMpmNYG1JoN3oDPUQ9m3Q== +fetch-blob@^3.1.2, fetch-blob@^3.1.4: + version "3.2.0" + resolved "https://registry.yarnpkg.com/fetch-blob/-/fetch-blob-3.2.0.tgz#f09b8d4bbd45adc6f0c20b7e787e793e309dcce9" + integrity sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ== + dependencies: + node-domexception "^1.0.0" + web-streams-polyfill "^3.0.3" + figures@3.2.0, figures@^3.0.0: version "3.2.0" resolved "https://registry.yarnpkg.com/figures/-/figures-3.2.0.tgz#625c18bd293c604dc4a8ddb2febf0c88341746af" @@ -7128,6 +7763,11 @@ foreground-child@^3.1.0: cross-spawn "^7.0.0" signal-exit "^4.0.1" +form-data-encoder@^2.1.2: + version "2.1.4" + resolved "https://registry.yarnpkg.com/form-data-encoder/-/form-data-encoder-2.1.4.tgz#261ea35d2a70d48d30ec7a9603130fa5515e9cd5" + integrity sha512-yDYSgNMraqvnxiEXO4hi88+YZxaHC6QKzb5N84iRCTDeRO7ZALpir/lVmf/uXUhnwUr2O4HU8s/n6x+yNjQkHw== + form-data@^2.5.0: version "2.5.1" resolved "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz" @@ -7155,6 +7795,13 @@ form-data@^4.0.0: combined-stream "^1.0.8" mime-types "^2.1.12" +formdata-polyfill@^4.0.10: + version "4.0.10" + resolved "https://registry.yarnpkg.com/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz#24807c31c9d402e002ab3d8c720144ceb8848423" + integrity sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g== + dependencies: + fetch-blob "^3.1.2" + formidable@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/formidable/-/formidable-2.1.2.tgz#fa973a2bec150e4ce7cac15589d7a25fc30ebd89" @@ -7236,6 +7883,21 @@ fsevents@~2.3.2: resolved "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz" integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== +fsevents@~2.3.3: + version "2.3.3" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" + integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== + +fstream@^1.0.12: + version "1.0.12" + resolved "https://registry.yarnpkg.com/fstream/-/fstream-1.0.12.tgz#4e8ba8ee2d48be4f7d0de505455548eae5932045" + integrity sha512-WvJ193OHa0GHPEL+AycEJgxvBEwyfRkN1vhjca23OaPVMCaLCXTd5qAu82AjTcgP1UJmytkOKb63Ypde7raDIg== + dependencies: + graceful-fs "^4.1.2" + inherits "~2.0.0" + mkdirp ">=0.5 0" + rimraf "2" + function-bind@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" @@ -7300,6 +7962,20 @@ gauge@^4.0.3: strip-ansi "^6.0.1" wide-align "^1.1.5" +geckodriver@^4.2.0: + version "4.2.1" + resolved "https://registry.yarnpkg.com/geckodriver/-/geckodriver-4.2.1.tgz#03ad628241417737b962966aa8f8b13fa0f8bf75" + integrity sha512-4m/CRk0OI8MaANRuFIahvOxYTSjlNAO2p9JmE14zxueknq6cdtB5M9UGRQ8R9aMV0bLGNVHHDnDXmoXdOwJfWg== + dependencies: + "@wdio/logger" "^8.11.0" + decamelize "^6.0.0" + http-proxy-agent "^7.0.0" + https-proxy-agent "^7.0.1" + node-fetch "^3.3.1" + tar-fs "^3.0.4" + unzipper "^0.10.14" + which "^4.0.0" + generate-function@^2.0.0: version "2.3.1" resolved "https://registry.yarnpkg.com/generate-function/-/generate-function-2.3.1.tgz#f069617690c10c868e73b8465746764f97c3479f" @@ -7401,6 +8077,11 @@ get-port@5.1.1, get-port@^5.1.1: resolved "https://registry.yarnpkg.com/get-port/-/get-port-5.1.1.tgz#0469ed07563479de6efb986baf053dcd7d4e3193" integrity sha512-g/Q1aTSDOxFpchXC4i8ZWvxA1lnPqx/JHqcpIw0/LX9T8x/GBbi6YnlN5nhaKIFkT8oFsscUKgDJYxfwfS6QsQ== +get-port@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/get-port/-/get-port-7.0.0.tgz#ffcd83da826146529e307a341d7801cae351daff" + integrity sha512-mDHFgApoQd+azgMdwylJrv2DX47ywGq1i5VFJE7fZ0dttNq3iQMfsU4IvEgBHojA3KqEudyu7Vq+oN8kNaNkWw== + get-stream@6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.0.tgz#3e0012cb6827319da2706e601a1583e8629a6718" @@ -7418,6 +8099,11 @@ get-stream@^6.0.0, get-stream@^6.0.1: resolved "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz" integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== +get-stream@^8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-8.0.1.tgz#def9dfd71742cd7754a7761ed43749a27d02eca2" + integrity sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA== + get-symbol-description@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" @@ -7433,6 +8119,16 @@ get-tsconfig@^4.5.0: dependencies: resolve-pkg-maps "^1.0.0" +get-uri@^6.0.1: + version "6.0.2" + resolved "https://registry.yarnpkg.com/get-uri/-/get-uri-6.0.2.tgz#e019521646f4a8ff6d291fbaea2c46da204bb75b" + integrity sha512-5KLucCJobh8vBY1K07EFV4+cPZH3mrV9YeAruUseCQKHB58SGjjT2l9/eA9LD082IiuMjSlFJEcdJ27TXvbZNw== + dependencies: + basic-ftp "^5.0.2" + data-uri-to-buffer "^6.0.0" + debug "^4.3.4" + fs-extra "^8.1.0" + git-raw-commits@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/git-raw-commits/-/git-raw-commits-3.0.0.tgz#5432f053a9744f67e8db03dbc48add81252cfdeb" @@ -7546,6 +8242,17 @@ glob@^7.1.3, glob@^7.1.4, glob@^7.1.6, glob@^7.1.7: once "^1.3.0" path-is-absolute "^1.0.0" +glob@^8.0.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/glob/-/glob-8.1.0.tgz#d388f656593ef708ee3e34640fdfb99a9fd1c33e" + integrity sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^5.0.1" + once "^1.3.0" + glob@^8.0.1: version "8.0.3" resolved "https://registry.yarnpkg.com/glob/-/glob-8.0.3.tgz#415c6eb2deed9e502c68fa44a272e6da6eeca42e" @@ -7644,8 +8351,25 @@ got@^11.8.5, got@^11.8.6: p-cancelable "^2.0.0" responselike "^2.0.0" -graceful-fs@4.2.11: - version "4.2.11" +got@^12.6.1: + version "12.6.1" + resolved "https://registry.yarnpkg.com/got/-/got-12.6.1.tgz#8869560d1383353204b5a9435f782df9c091f549" + integrity sha512-mThBblvlAF1d4O5oqyvN+ZxLAYwIJK7bpMxgYqPD9okW0C3qm5FFn7k811QrcuEBwaogR3ngOFoCfs6mRv7teQ== + dependencies: + "@sindresorhus/is" "^5.2.0" + "@szmarczak/http-timer" "^5.0.1" + cacheable-lookup "^7.0.0" + cacheable-request "^10.2.8" + decompress-response "^6.0.0" + form-data-encoder "^2.1.2" + get-stream "^6.0.1" + http2-wrapper "^2.1.10" + lowercase-keys "^3.0.0" + p-cancelable "^3.0.0" + responselike "^3.0.0" + +graceful-fs@4.2.11, graceful-fs@^4.2.2: + version "4.2.11" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== @@ -7664,6 +8388,11 @@ graceful-fs@^4.2.6: resolved "https://registry.yarnpkg.com/graceful-readlink/-/graceful-readlink-1.0.1.tgz#4cafad76bc62f02fa039b2f94e9a3dd3a391a725" integrity sha512-8tLu60LgxF6XpdbK8OW3FA+IfTNBn1ZHGHKF4KQbEeSkajYw5PlYJcKluntgegDPTg8UkHjpet1T82vk6TQ68w== +grapheme-splitter@^1.0.2: + version "1.0.4" + resolved "https://registry.yarnpkg.com/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz#9cf3a665c6247479896834af35cf1dbb4400767e" + integrity sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ== + graphemer@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6" @@ -7814,6 +8543,13 @@ hosted-git-info@^6.0.0: dependencies: lru-cache "^7.5.1" +html-encoding-sniffer@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-4.0.0.tgz#696df529a7cfd82446369dc5193e590a3735b448" + integrity sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ== + dependencies: + whatwg-encoding "^3.1.1" + html-escaper@^2.0.0: version "2.0.2" resolved "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz" @@ -7853,6 +8589,14 @@ http-proxy-agent@^5.0.0: agent-base "6" debug "4" +http-proxy-agent@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-7.0.0.tgz#e9096c5afd071a3fce56e6252bb321583c124673" + integrity sha512-+ZT+iBxVUQ1asugqnD6oWoRiS25AkjNfG085dKJGtGxkdwLQrMKU5wJr2bOOFAXzKcTuqq+7fZlTMgG3SRfIYQ== + dependencies: + agent-base "^7.1.0" + debug "^4.3.4" + http-proxy@^1.18.1: version "1.18.1" resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" @@ -7870,6 +8614,14 @@ http2-wrapper@^1.0.0-beta.5.2: quick-lru "^5.1.1" resolve-alpn "^1.0.0" +http2-wrapper@^2.1.10: + version "2.2.1" + resolved "https://registry.yarnpkg.com/http2-wrapper/-/http2-wrapper-2.2.1.tgz#310968153dcdedb160d8b72114363ef5fce1f64a" + integrity sha512-V5nVw1PAOgfI3Lmeaj2Exmeg7fenjhRUgz1lPSezy1CuhPYbgQtbQj4jZfEAEMlaL+vupsvhjqCyjzob0yxsmQ== + dependencies: + quick-lru "^5.1.1" + resolve-alpn "^1.2.0" + https-browserify@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73" @@ -7883,6 +8635,14 @@ https-proxy-agent@^5.0.0: agent-base "6" debug "4" +https-proxy-agent@^7.0.0, https-proxy-agent@^7.0.1, https-proxy-agent@^7.0.2: + version "7.0.2" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-7.0.2.tgz#e2645b846b90e96c6e6f347fb5b2e41f1590b09b" + integrity sha512-NmLNjm6ucYwtcUmL7JQC1ZQ57LmHP4lT15FQ8D61nak1rO6DH+fz5qNK2Ap5UN4ZapYICE3/0KodcLYSPsPbaA== + dependencies: + agent-base "^7.0.2" + debug "4" + human-signals@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" @@ -7893,6 +8653,11 @@ human-signals@^4.3.0: resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-4.3.1.tgz#ab7f811e851fca97ffbd2c1fe9a958964de321b2" integrity sha512-nZXjEF2nbo7lIw3mgYjItAfgQXog3OjJogSbKa2CQIIvSGWcKgeJnQlNXip6NglNzYH45nSRiEVimMvYL8DDqQ== +human-signals@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-5.0.0.tgz#42665a284f9ae0dade3ba41ebc37eb4b852f3a28" + integrity sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ== + humanize-ms@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/humanize-ms/-/humanize-ms-1.2.1.tgz#c46e3159a293f6b896da29316d8b6fe8bb79bbed" @@ -7907,7 +8672,7 @@ iconv-lite@0.4.24, iconv-lite@^0.4.24: dependencies: safer-buffer ">= 2.1.2 < 3" -iconv-lite@^0.6.2: +iconv-lite@0.6.3, iconv-lite@^0.6.2: version "0.6.3" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501" integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== @@ -7976,6 +8741,11 @@ import-local@3.1.0: pkg-dir "^4.2.0" resolve-cwd "^3.0.0" +import-meta-resolve@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/import-meta-resolve/-/import-meta-resolve-4.0.0.tgz#0b1195915689f60ab00f830af0f15cc841e8919e" + integrity sha512-okYUR7ZQPH+efeuMJGlq4f8ubUgO50kByRPyt/Cy1Io4PSRsPjxME+YlVaCOx+NIToW7hCsZNFJyTPFFKepRSA== + imurmurhash@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" @@ -7999,7 +8769,7 @@ inflight@^1.0.4: once "^1.3.0" wrappy "1" -inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3, inherits@~2.0.4: +inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.0, inherits@~2.0.1, inherits@~2.0.3, inherits@~2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== @@ -8123,11 +8893,16 @@ ip-regex@^5.0.0: resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-5.0.0.tgz#cd313b2ae9c80c07bd3851e12bf4fa4dc5480632" integrity sha512-fOCG6lhoKKakwv+C6KdsOnGvgXnmgfmp0myi3bcNwj3qfwPAxRKWEuFhvEFF7ceYIz6+1jRZ+yguLFAmUNPEfw== -ip@^1.1.5: +ip@^1.1.5, ip@^1.1.8: version "1.1.8" resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.8.tgz#ae05948f6b075435ed3307acce04629da8cdbf48" integrity sha512-PuExPYUiu6qMBQb4l06ecm6T6ujzhmh+MeJcW9wa89PoAz5pvd4zPgN5WJV104mb6S2T1AwNIAaB70JNrLQWhg== +ip@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.0.tgz#4cf4ab182fee2314c75ede1276f8c80b479936da" + integrity sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ== + ipaddr.js@1.9.1: version "1.9.1" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" @@ -8392,6 +9167,11 @@ is-plain-obj@^2.1.0: resolved "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz" integrity sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA== +is-plain-obj@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-4.1.0.tgz#d65025edec3657ce032fd7db63c97883eaed71f0" + integrity sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg== + is-plain-object@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" @@ -8404,6 +9184,11 @@ is-plain-object@^5.0.0: resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-5.0.0.tgz#4427f50ab3429e9025ea7d52e9043a9ef4159344" integrity sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q== +is-potential-custom-element-name@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" + integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== + is-property@^1.0.0, is-property@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-property/-/is-property-1.0.2.tgz#57fe1c4e48474edd65b09911f26b1cd4095dda84" @@ -8555,11 +9340,21 @@ isexe@^2.0.0: resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== +isexe@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-3.1.1.tgz#4a407e2bd78ddfb14bea0c27c6f7072dde775f0d" + integrity sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ== + isobject@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== +isomorphic-timers-promises@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/isomorphic-timers-promises/-/isomorphic-timers-promises-1.0.1.tgz#e4137c24dbc54892de8abae3a4b5c1ffff381598" + integrity sha512-u4sej9B1LPSxTGKB/HiuzvEQnXH0ECYkSVQU39koSwmFAxhlEAFl9RdTvLv4TOTQUgBS5O3O5fwUxk6byBZ+IQ== + isomorphic-ws@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/isomorphic-ws/-/isomorphic-ws-5.0.0.tgz#e5529148912ecb9b451b46ed44d53dae1ce04bbf" @@ -8575,6 +9370,11 @@ istanbul-lib-coverage@^3.2.0: resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== +istanbul-lib-coverage@^3.2.2: + version "3.2.2" + resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz#2d166c4b0644d43a39f04bf6c2edd1e585f31756" + integrity sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg== + istanbul-lib-hook@^3.0.0: version "3.0.0" resolved "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-3.0.0.tgz" @@ -8649,7 +9449,7 @@ istanbul-reports@^3.0.2: html-escaper "^2.0.0" istanbul-lib-report "^3.0.0" -istanbul-reports@^3.1.5, istanbul-reports@^3.1.6: +istanbul-reports@^3.1.6: version "3.1.6" resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.6.tgz#2544bcab4768154281a2f0870471902704ccaa1a" integrity sha512-TLgnMkKg3iTDsQ9PbPTdpfAK2DzjF9mqUG7RMgcQl8oFjad8ob4laGxv5XV5U9MAfx8D6tSJiUyuAwzLicaxlg== @@ -8915,6 +9715,33 @@ js-yaml@4.1.0, js-yaml@^4.1.0: dependencies: argparse "^2.0.1" +jsdom@^23.0.1: + version "23.0.1" + resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-23.0.1.tgz#ede7ff76e89ca035b11178d200710d8982ebfee0" + integrity sha512-2i27vgvlUsGEBO9+/kJQRbtqtm+191b5zAZrU/UezVmnC2dlDAFLgDYJvAEi94T4kjsRKkezEtLQTgsNEsW2lQ== + dependencies: + cssstyle "^3.0.0" + data-urls "^5.0.0" + decimal.js "^10.4.3" + form-data "^4.0.0" + html-encoding-sniffer "^4.0.0" + http-proxy-agent "^7.0.0" + https-proxy-agent "^7.0.2" + is-potential-custom-element-name "^1.0.1" + nwsapi "^2.2.7" + parse5 "^7.1.2" + rrweb-cssom "^0.6.0" + saxes "^6.0.0" + symbol-tree "^3.2.4" + tough-cookie "^4.1.3" + w3c-xmlserializer "^5.0.0" + webidl-conversions "^7.0.0" + whatwg-encoding "^3.1.1" + whatwg-mimetype "^4.0.0" + whatwg-url "^14.0.0" + ws "^8.14.2" + xml-name-validator "^5.0.0" + jsesc@^2.5.1: version "2.5.2" resolved "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz" @@ -9135,10 +9962,10 @@ keypress@0.1.x: resolved "https://registry.yarnpkg.com/keypress/-/keypress-0.1.0.tgz#4a3188d4291b66b4f65edb99f806aa9ae293592a" integrity sha512-x0yf9PL/nx9Nw9oLL8ZVErFAk85/lslwEP7Vz7s5SI1ODXZIgit3C5qyWjw4DxOuO/3Hb4866SQh28a1V1d+WA== -keyv@^4.0.0: - version "4.5.3" - resolved "https://registry.yarnpkg.com/keyv/-/keyv-4.5.3.tgz#00873d2b046df737963157bd04f294ca818c9c25" - integrity sha512-QCiSav9WaX1PgETJ+SpNnx2PRRapJ/oRSXM4VO5OGYGSjrxbKPVFVhB3l2OCbLCk329N8qyAtsJjSjvVBWzEug== +keyv@^4.0.0, keyv@^4.5.3: + version "4.5.4" + resolved "https://registry.yarnpkg.com/keyv/-/keyv-4.5.4.tgz#a879a99e29452f942439f2a405e3af8b31d4de93" + integrity sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw== dependencies: json-buffer "3.0.1" @@ -9152,6 +9979,11 @@ kuler@^2.0.0: resolved "https://registry.npmjs.org/kuler/-/kuler-2.0.0.tgz" integrity sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A== +ky@^0.33.0: + version "0.33.3" + resolved "https://registry.yarnpkg.com/ky/-/ky-0.33.3.tgz#bf1ad322a3f2c3428c13cfa4b3af95e6c4a2f543" + integrity sha512-CasD9OCEQSFIam2U8efFK81Yeg8vNMTBUqtMOHlrcWQHqUX3HeCl9Dr31u4toV7emlH8Mymk5+9p0lL6mKb/Xw== + lazystream@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/lazystream/-/lazystream-1.0.1.tgz#494c831062f1f9408251ec44db1cba29242a2638" @@ -9382,6 +10214,11 @@ lines-and-columns@~2.0.3: resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-2.0.3.tgz#b2f0badedb556b747020ab8ea7f0373e22efac1b" integrity sha512-cNOjgCnLB+FnvWWtyRTzmB3POJ+cXxTA81LoW7u8JdmhfXzriropYwpjShnz1QLLWsQwY7nIxoDmcPTwphDK9w== +listenercount@~1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/listenercount/-/listenercount-1.0.1.tgz#84c8a72ab59c4725321480c975e6508342e70937" + integrity sha512-3mk/Zag0+IJxeDrxSgaDPy4zZ3w05PRZeJNnlWhzFz5OkX49J4krc+A8X2d2M69vGMBEX0uyl8M+W+8gH+kBqQ== + load-json-file@6.2.0: version "6.2.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-6.2.0.tgz#5c7770b42cafa97074ca2848707c61662f4251a1" @@ -9412,10 +10249,22 @@ loady@~0.0.5: resolved "https://registry.npmjs.org/loady/-/loady-0.0.5.tgz" integrity sha512-uxKD2HIj042/HBx77NBcmEPsD+hxCgAtjEWlYNScuUjIsh/62Uyu39GOR68TBR68v+jqDL9zfftCWoUo4y03sQ== -local-pkg@^0.4.3: - version "0.4.3" - resolved "https://registry.yarnpkg.com/local-pkg/-/local-pkg-0.4.3.tgz#0ff361ab3ae7f1c19113d9bb97b98b905dbc4963" - integrity sha512-SFppqq5p42fe2qcZQqqEOiVRXl+WCP1MdT6k7BDEW1j++sp5fIY+/fdRQitvKgB5BrBcmrs5m/L0v2FrU5MY1g== +local-pkg@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/local-pkg/-/local-pkg-0.5.0.tgz#093d25a346bae59a99f80e75f6e9d36d7e8c925c" + integrity sha512-ok6z3qlYyCDS4ZEU27HaU6x/xZa9Whf8jD4ptH5UZTQYZVYeb9bnZ3ojVhiJNLiXK1Hfc0GNbLXcmZ5plLDDBg== + dependencies: + mlly "^1.4.2" + pkg-types "^1.0.3" + +locate-app@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/locate-app/-/locate-app-2.1.0.tgz#97bbbeb3be59eec55368d20f69c77ebaaddacac1" + integrity sha512-rcVo/iLUxrd9d0lrmregK/Z5Y5NCpSwf9KlMbPpOHmKmdxdQY1Fj8NDQ5QymJTryCsBLqwmniFv2f3JKbk9Bvg== + dependencies: + n12 "0.4.0" + type-fest "2.13.0" + userhome "1.0.0" locate-path@^2.0.0: version "2.0.0" @@ -9458,6 +10307,11 @@ lodash._reinterpolate@^3.0.0: resolved "https://registry.yarnpkg.com/lodash._reinterpolate/-/lodash._reinterpolate-3.0.0.tgz#0ccf2d89166af03b3663c796538b75ac6e114d9d" integrity sha512-xYHt68QRoYGjeeM/XOE1uJtvXQAgvszfBhjV4yvsQH0u2i9I6cI6c6/eG4Hh3UAOVn0y/xAXwmTzEay49Q//HA== +lodash.clonedeep@^4.5.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef" + integrity sha512-H5ZhCF25riFd9uB5UCkVKo61m3S/xZk1x4wA6yp/L3RFP6Z/eHH1ymQcGLo7J3GMPfm0V/7m1tryHuGVxpqEBQ== + lodash.defaults@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/lodash.defaults/-/lodash.defaults-4.2.0.tgz#d09178716ffea4dde9e5fb7b37f6f0802274580c" @@ -9528,6 +10382,11 @@ lodash.union@^4.6.0: resolved "https://registry.yarnpkg.com/lodash.union/-/lodash.union-4.6.0.tgz#48bb5088409f16f1821666641c44dd1aaae3cd88" integrity sha512-c4pB2CdGrGdjMKYLA+XiRDO7Y0PRQbm/Gzg8qMj+QH+pFVAoTp5sBpO0odL3FjoPCGjK96p6qsP+yQoiLoOBcw== +lodash.zip@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/lodash.zip/-/lodash.zip-4.2.0.tgz#ec6662e4896408ed4ab6c542a3990b72cc080020" + integrity sha512-C7IOaBBK/0gMORRBd8OETNx3kmOkgIWIPvyDpZSCTwUrpYmgZwJkjZeOD8ww4xbOUOs4/attY+pciKvadNfFbg== + lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.21, lodash@^4.17.4: version "4.17.21" resolved "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz" @@ -9572,19 +10431,22 @@ logform@^2.3.2, logform@^2.4.0: safe-stable-stringify "^2.3.1" triple-beam "^1.3.0" +loglevel-plugin-prefix@^0.8.4: + version "0.8.4" + resolved "https://registry.yarnpkg.com/loglevel-plugin-prefix/-/loglevel-plugin-prefix-0.8.4.tgz#2fe0e05f1a820317d98d8c123e634c1bd84ff644" + integrity sha512-WpG9CcFAOjz/FtNht+QJeGpvVl/cdR6P0z6OcXSkr8wFJOsV2GRj2j10JLfjuA4aYkcKCNIEqRGCyTife9R8/g== + +loglevel@^1.6.0: + version "1.8.1" + resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.8.1.tgz#5c621f83d5b48c54ae93b6156353f555963377b4" + integrity sha512-tCRIJM51SHjAayKwC+QAg8hT8vg6z7GSgLJKGvzuPb1Wc+hLzqtuVLxp6/HzSPOozuK+8ErAhy7U/sVzw8Dgfg== + long@^5.0.0: version "5.2.0" resolved "https://registry.yarnpkg.com/long/-/long-5.2.0.tgz#2696dadf4b4da2ce3f6f6b89186085d94d52fd61" integrity sha512-9RTUNjK60eJbx3uz+TEGF7fUr29ZDxR5QzXcyDpeSfeH28S9ycINflOgOlppit5U+4kNTe83KQnMEerw7GmE8w== -loupe@^2.3.1: - version "2.3.4" - resolved "https://registry.yarnpkg.com/loupe/-/loupe-2.3.4.tgz#7e0b9bffc76f148f9be769cb1321d3dcf3cb25f3" - integrity sha512-OvKfgCC2Ndby6aSTREl5aCCPTNIzlDfQZvZxNUrBrihDhL3xcrYegTblhmEiCrg2kKQz4XsFIaemE5BF4ybSaQ== - dependencies: - get-func-name "^2.0.0" - -loupe@^2.3.6: +loupe@^2.3.6, loupe@^2.3.7: version "2.3.6" resolved "https://registry.yarnpkg.com/loupe/-/loupe-2.3.6.tgz#76e4af498103c532d1ecc9be102036a21f787b53" integrity sha512-RaPMZKiMy8/JruncMU5Bt6na1eftNoo++R4Y+N2FrxkDVTrGvcyzFTsaGif4QTeKESheMGegbhw6iUAq+5A8zA== @@ -9593,9 +10455,14 @@ loupe@^2.3.6: lowercase-keys@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479" integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA== +lowercase-keys@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-3.0.0.tgz#c5e7d442e37ead247ae9db117a9d0a467c89d4f2" + integrity sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ== + lru-cache@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" @@ -9610,6 +10477,11 @@ lru-cache@^6.0.0: dependencies: yallist "^4.0.0" +lru-cache@^7.14.1: + version "7.18.3" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-7.18.3.tgz#f793896e0fd0e954a59dfdd82f0773808df6aa89" + integrity sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA== + lru-cache@^7.4.4, lru-cache@^7.5.1: version "7.14.0" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-7.14.0.tgz#21be64954a4680e303a09e9468f880b98a0b3c7f" @@ -9625,13 +10497,22 @@ lru-cache@^7.7.1: resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.0.0.tgz#b9e2a6a72a129d81ab317202d93c7691df727e61" integrity sha512-svTf/fzsKHffP42sujkO/Rjs37BCIsQVRCeNYIm9WN8rgT7ffoUnRtZCqU+6BqcSBdv8gwJeTz8knJpgACeQMw== -magic-string@^0.30.1: - version "0.30.4" - resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.30.4.tgz#c2c683265fc18dda49b56fc7318d33ca0332c98c" - integrity sha512-Q/TKtsC5BPm0kGqgBIF9oXAs/xEf2vRKiIB4wCRQTJOQIByZ1d+NnUOotvJOvNpi5RNIgVOMC3pOuaP1ZTDlVg== +magic-string@^0.30.3, magic-string@^0.30.5: + version "0.30.5" + resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.30.5.tgz#1994d980bd1c8835dc6e78db7cbd4ae4f24746f9" + integrity sha512-7xlpfBaQaP/T6Vh8MO/EqXSW5En6INHEvEXQiuff7Gku0PWjU3uf6w/j9o7O+SpB5fOAkrI5HeoNgwjEO0pFsA== dependencies: "@jridgewell/sourcemap-codec" "^1.4.15" +magicast@^0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/magicast/-/magicast-0.3.2.tgz#42dcade5573ed8f10f5540f9d04964e21dba9130" + integrity sha512-Fjwkl6a0syt9TFN0JSYpOybxiMCkYNEeOTnOTNRbjphirLakznZXAqrXgj/7GG3D1dvETONNwrBfinvAbpunDg== + dependencies: + "@babel/parser" "^7.23.3" + "@babel/types" "^7.23.3" + source-map-js "^1.0.2" + make-dir@4.0.0, make-dir@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-4.0.0.tgz#c3c2307a771277cd9638305f915c29ae741b614e" @@ -9889,7 +10770,7 @@ mimic-fn@^4.0.0: mimic-response@^1.0.0: version "1.0.1" - resolved "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b" integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ== mimic-response@^3.1.0: @@ -9897,6 +10778,11 @@ mimic-response@^3.1.0: resolved "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz" integrity sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ== +mimic-response@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-4.0.0.tgz#35468b19e7c75d10f5165ea25e75a5ceea7cf70f" + integrity sha512-e5ISH9xMYU0DzrT+jl8q2ze9D6eWBto+I8CNpe+VI+K2J/F/k3PdkdTdz4wvGVH4NTpo+NRYTVIuMQEMMcsLqg== + min-indent@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/min-indent/-/min-indent-1.0.1.tgz#a63f681673b30571fbe8bc25686ae746eefa9869" @@ -10088,9 +10974,9 @@ minstache@~1.2.0: dependencies: commander "1.0.4" -mitt@^3.0.0: +mitt@3.0.0, mitt@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/mitt/-/mitt-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/mitt/-/mitt-3.0.0.tgz#69ef9bd5c80ff6f57473e8d89326d01c414be0bd" integrity sha512-7dX2/10ITVyqh4aOSVI9gdape+t9l2/8QxHrFmUXu4EEUpdlxl6RudZUPZoc+zuY2hk1j7XxVroIVIan/pD/SQ== mkdirp-classic@^0.5.2: @@ -10098,7 +10984,7 @@ mkdirp-classic@^0.5.2: resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A== -mkdirp@^0.5.5: +"mkdirp@>=0.5 0", mkdirp@^0.5.5: version "0.5.6" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== @@ -10110,7 +10996,7 @@ mkdirp@^1.0.3, mkdirp@^1.0.4: resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== -mlly@^1.2.0, mlly@^1.4.0: +mlly@^1.2.0, mlly@^1.4.2: version "1.4.2" resolved "https://registry.yarnpkg.com/mlly/-/mlly-1.4.2.tgz#7cf406aa319ff6563d25da6b36610a93f2a8007e" integrity sha512-i/Ykufi2t1EZ6NaPLdfnZk2AX8cs0d+mTzVKuPfqPKPatxLApaBoxJQ9x1/uckXtrS/U5oisPMDkNs0yQTaBRg== @@ -10184,6 +11070,11 @@ mortice@^3.0.1: p-queue "^7.2.0" p-timeout "^6.0.0" +mrmime@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/mrmime/-/mrmime-1.0.1.tgz#5f90c825fad4bdd41dc914eff5d1a8cfdaf24f27" + integrity sha512-hzzEagAgDyoU1Q6yg5uI+AorQgdvMCur3FcKf7NhMKWsaYg+RnbTyHRa/9IlLF9rf455MOCtcqqrQQ83pPP7Uw== + ms@2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" @@ -10248,6 +11139,11 @@ mute-stream@1.0.0, mute-stream@~1.0.0: resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-1.0.0.tgz#e31bd9fe62f0aed23520aa4324ea6671531e013e" integrity sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA== +n12@0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/n12/-/n12-0.4.0.tgz#363058560b435e6857b5e039ed5eab08c5122e5e" + integrity sha512-p/hj4zQ8d3pbbFLQuN1K9honUxiDDhueOWyFLw/XgBv+wZCE44bcLH4CIcsolOceJQduh4Jf7m/LfaTxyGmGtQ== + nan@^2.16.0, nan@^2.17.0: version "2.17.0" resolved "https://registry.yarnpkg.com/nan/-/nan-2.17.0.tgz#c0150a2368a182f033e9aa5195ec76ea41a199cb" @@ -10258,10 +11154,10 @@ nanoid@3.3.3: resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.3.tgz#fd8e8b7aa761fe807dba2d1b98fb7241bb724a25" integrity sha512-p1sjXuopFs0xg+fPASzQ28agW1oHD7xDsd9Xkf3T15H3c/cifrFHVwrh74PdoklAPi+i7MdRsE47vm2r6JoB+w== -nanoid@^3.3.6: - version "3.3.6" - resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.6.tgz#443380c856d6e9f9824267d960b4236ad583ea4c" - integrity sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA== +nanoid@^3.3.7: + version "3.3.7" + resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.7.tgz#d0c301a691bc8d54efa0a2226ccf3fe2fd656bd8" + integrity sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g== nanoid@^4.0.0: version "4.0.2" @@ -10329,6 +11225,11 @@ node-addon-api@^5.0.0: resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-5.1.0.tgz#49da1ca055e109a23d537e9de43c09cca21eb762" integrity sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA== +node-domexception@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5" + integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ== + node-fetch@2.6.7, node-fetch@^2.6.0, node-fetch@^2.6.1, node-fetch@^2.6.7: version "2.6.7" resolved "https://registry.npmjs.org/@achingbrain/node-fetch/-/node-fetch-2.6.7.tgz" @@ -10341,6 +11242,15 @@ node-fetch@^2.6.12: dependencies: whatwg-url "^5.0.0" +node-fetch@^3.3.1, node-fetch@^3.3.2: + version "3.3.2" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-3.3.2.tgz#d1e889bacdf733b4ff3b2b243eb7a12866a0b78b" + integrity sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA== + dependencies: + data-uri-to-buffer "^4.0.0" + fetch-blob "^3.1.4" + formdata-polyfill "^4.0.10" + node-forge@^1.1.0: version "1.3.1" resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" @@ -10451,6 +11361,39 @@ node-releases@^2.0.6: resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.6.tgz#8a7088c63a55e493845683ebf3c828d8c51c5503" integrity sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg== +node-stdlib-browser@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/node-stdlib-browser/-/node-stdlib-browser-1.2.0.tgz#5ddcfdf4063b88fb282979a1aa6ddab9728d5e4c" + integrity sha512-VSjFxUhRhkyed8AtLwSCkMrJRfQ3e2lGtG3sP6FEgaLKBBbxM/dLfjRe1+iLhjvyLFW3tBQ8+c0pcOtXGbAZJg== + dependencies: + assert "^2.0.0" + browser-resolve "^2.0.0" + browserify-zlib "^0.2.0" + buffer "^5.7.1" + console-browserify "^1.1.0" + constants-browserify "^1.0.0" + create-require "^1.1.1" + crypto-browserify "^3.11.0" + domain-browser "^4.22.0" + events "^3.0.0" + https-browserify "^1.0.0" + isomorphic-timers-promises "^1.0.1" + os-browserify "^0.3.0" + path-browserify "^1.0.1" + pkg-dir "^5.0.0" + process "^0.11.10" + punycode "^1.4.1" + querystring-es3 "^0.2.1" + readable-stream "^3.6.0" + stream-browserify "^3.0.0" + stream-http "^3.2.0" + string_decoder "^1.0.0" + timers-browserify "^2.0.4" + tty-browserify "0.0.1" + url "^0.11.0" + util "^0.12.4" + vm-browserify "^1.0.1" + nopt@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/nopt/-/nopt-5.0.0.tgz#530942bb58a512fccafe53fe210f13a25355dc88" @@ -10505,6 +11448,11 @@ normalize-url@^6.0.1: resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a" integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== +normalize-url@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-8.0.0.tgz#593dbd284f743e8dcf6a5ddf8fadff149c82701a" + integrity sha512-uVFpKhj5MheNBJRTiMZ9pE/7hD1QTeEvugSJW/OmLzAp78PB5O6adfMNTvmfKhXBkvCzC+rqifWcVYpGFwTjnw== + npm-bundled@^1.1.2: version "1.1.2" resolved "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.1.2.tgz" @@ -10657,6 +11605,11 @@ npmlog@^6.0.2: gauge "^4.0.3" set-blocking "^2.0.0" +nwsapi@^2.2.7: + version "2.2.7" + resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.7.tgz#738e0707d3128cb750dddcfe90e4610482df0f30" + integrity sha512-ub5E4+FBPKwAZx0UwIQOjYWGHTEq5sPqHQNRN8Z9e4A7u3Tj1weLJsL59yH9vmvqEtBHaOmT6cYQKIZOxp35FQ== + nx@16.9.0, "nx@>=16.5.1 < 17": version "16.9.0" resolved "https://registry.yarnpkg.com/nx/-/nx-16.9.0.tgz#fad51967bb80c12b311f3699292566cf445232f0" @@ -10962,6 +11915,11 @@ p-cancelable@^2.0.0: resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-2.1.1.tgz#aab7fbd416582fa32a3db49859c122487c5ed2cf" integrity sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg== +p-cancelable@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-3.0.0.tgz#63826694b54d61ca1c20ebcb6d3ecf5e14cd8050" + integrity sha512-mlVgR3PGuzlo0MmTdk4cXqXWlwQDLnONTAg6sm62XkMJEiRxN3GL3SffkYvqwonbkJBcrI7Uvv5Zh9yjvn2iUw== + p-defer@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-4.0.0.tgz#8082770aeeb10eb6b408abe91866738741ddd5d2" @@ -11000,6 +11958,13 @@ p-limit@^4.0.0: dependencies: yocto-queue "^1.0.0" +p-limit@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-5.0.0.tgz#6946d5b7140b649b7a33a027d89b4c625b3a5985" + integrity sha512-/Eaoq+QyLSiXQ4lyYV23f14mZRQcXnxfHrN0vCai+ak9G0pp9iEQukIIZq5NccEvwRB8PUnZT0KsOoDCINS1qQ== + dependencies: + yocto-queue "^1.0.0" + p-locate@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" @@ -11129,6 +12094,29 @@ p-waterfall@2.1.1: dependencies: p-reduce "^2.0.0" +pac-proxy-agent@^7.0.0, pac-proxy-agent@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/pac-proxy-agent/-/pac-proxy-agent-7.0.1.tgz#6b9ddc002ec3ff0ba5fdf4a8a21d363bcc612d75" + integrity sha512-ASV8yU4LLKBAjqIPMbrgtaKIvxQri/yh2OpI+S6hVa9JRkUI3Y3NPFbfngDtY7oFtSMD3w31Xns89mDa3Feo5A== + dependencies: + "@tootallnate/quickjs-emscripten" "^0.23.0" + agent-base "^7.0.2" + debug "^4.3.4" + get-uri "^6.0.1" + http-proxy-agent "^7.0.0" + https-proxy-agent "^7.0.2" + pac-resolver "^7.0.0" + socks-proxy-agent "^8.0.2" + +pac-resolver@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/pac-resolver/-/pac-resolver-7.0.0.tgz#79376f1ca26baf245b96b34c339d79bff25e900c" + integrity sha512-Fd9lT9vJbHYRACT8OhCbZBbxr6KRSawSovFpy8nDGshaK99S/EBhVIHp9+crhxrsZOuvLpgL1n23iyPg6Rl2hg== + dependencies: + degenerator "^5.0.0" + ip "^1.1.8" + netmask "^2.0.2" + package-hash@^4.0.0: version "4.0.0" resolved "https://registry.npmjs.org/package-hash/-/package-hash-4.0.0.tgz" @@ -11223,6 +12211,13 @@ parse-url@^8.1.0: dependencies: parse-path "^7.0.0" +parse5@^7.1.2: + version "7.1.2" + resolved "https://registry.yarnpkg.com/parse5/-/parse5-7.1.2.tgz#0736bebbfd77793823240a23b7fc5e010b7f8e32" + integrity sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw== + dependencies: + entities "^4.4.0" + parseurl@~1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" @@ -11408,6 +12403,13 @@ pkg-dir@^4.1.0, pkg-dir@^4.2.0: dependencies: find-up "^4.0.0" +pkg-dir@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-5.0.0.tgz#a02d6aebe6ba133a928f74aec20bafdfe6b8e760" + integrity sha512-NPE8TDbzl/3YQYY7CSS228s3g2ollTFnc+Qi3tqmqJp9Vg2ovUpixcJEo2HJScN2Ez+kEaal6y70c0ehqJBJeA== + dependencies: + find-up "^5.0.0" + pkg-types@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/pkg-types/-/pkg-types-1.0.3.tgz#988b42ab19254c01614d13f4f65a2cfc7880f868" @@ -11422,12 +12424,12 @@ platform@^1.3.3: resolved "https://registry.yarnpkg.com/platform/-/platform-1.3.6.tgz#48b4ce983164b209c2d45a107adb31f473a6e7a7" integrity sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg== -postcss@^8.4.27: - version "8.4.31" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.31.tgz#92b451050a9f914da6755af352bdc0192508656d" - integrity sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ== +postcss@^8.4.32: + version "8.4.32" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.32.tgz#1dac6ac51ab19adb21b8b34fd2d93a86440ef6c9" + integrity sha512-D/kj5JNu6oo2EIy+XL/26JEDTlIbB8hw85G8StOE6L74RQAVVP5rej6wxCNqyMbR4RkPfqvezVbPw81Ngd6Kcw== dependencies: - nanoid "^3.3.6" + nanoid "^3.3.7" picocolors "^1.0.0" source-map-js "^1.0.2" @@ -11448,7 +12450,7 @@ prettier@^3.0.3: resolved "https://registry.yarnpkg.com/prettier/-/prettier-3.0.3.tgz#432a51f7ba422d1469096c0fdc28e235db8f9643" integrity sha512-L/4pUDMxcNa8R/EthV08Zt42WBO4h1rarVtK0K+QJG0X187OLo7l699jWw0GKuwzkPQ//jMFA/8Xm6Fh3J/DAg== -pretty-format@^29.5.0, pretty-format@^29.7.0: +pretty-format@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-29.7.0.tgz#ca42c758310f365bfa71a0bda0a807160b776812" integrity sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ== @@ -11494,7 +12496,7 @@ process@^0.11.10: resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" integrity sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A== -progress@^2.0.3: +progress@2.0.3, progress@^2.0.3: version "2.0.3" resolved "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz" integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== @@ -11506,6 +12508,14 @@ prom-client@^14.2.0: dependencies: tdigest "^0.1.1" +prom-client@^15.1.0: + version "15.1.0" + resolved "https://registry.yarnpkg.com/prom-client/-/prom-client-15.1.0.tgz#816a4a2128da169d0471093baeccc6d2f17a4613" + integrity sha512-cCD7jLTqyPdjEPBo/Xk4Iu8jxjuZgZJ3e/oET3L+ZwOuap/7Cw3dH/TJSsZKs1TQLZ2IHpIlRAKw82ef06kmMw== + dependencies: + "@opentelemetry/api" "^1.4.0" + tdigest "^0.1.1" + promise-inflight@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" @@ -11600,6 +12610,34 @@ proxy-addr@^2.0.7: forwarded "0.2.0" ipaddr.js "1.9.1" +proxy-agent@6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/proxy-agent/-/proxy-agent-6.3.0.tgz#72f7bb20eb06049db79f7f86c49342c34f9ba08d" + integrity sha512-0LdR757eTj/JfuU7TL2YCuAZnxWXu3tkJbg4Oq3geW/qFNT/32T0sp2HnZ9O0lMR4q3vwAt0+xCA8SR0WAD0og== + dependencies: + agent-base "^7.0.2" + debug "^4.3.4" + http-proxy-agent "^7.0.0" + https-proxy-agent "^7.0.0" + lru-cache "^7.14.1" + pac-proxy-agent "^7.0.0" + proxy-from-env "^1.1.0" + socks-proxy-agent "^8.0.1" + +proxy-agent@6.3.1: + version "6.3.1" + resolved "https://registry.yarnpkg.com/proxy-agent/-/proxy-agent-6.3.1.tgz#40e7b230552cf44fd23ffaf7c59024b692612687" + integrity sha512-Rb5RVBy1iyqOtNl15Cw/llpeLH8bsb37gM1FUfKQ+Wck6xHlbAhWGUFiTRHtkjqGTA5pSHz6+0hrPW/oECihPQ== + dependencies: + agent-base "^7.0.2" + debug "^4.3.4" + http-proxy-agent "^7.0.0" + https-proxy-agent "^7.0.2" + lru-cache "^7.14.1" + pac-proxy-agent "^7.0.1" + proxy-from-env "^1.1.0" + socks-proxy-agent "^8.0.2" + proxy-from-env@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" @@ -11640,7 +12678,7 @@ punycode@1.3.2: resolved "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz" integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0= -punycode@^1.2.4, punycode@^1.3.2: +punycode@^1.2.4, punycode@^1.3.2, punycode@^1.4.1: version "1.4.1" resolved "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz" integrity sha1-wNWmOycYgArY4esPpSachN1BhF4= @@ -11650,6 +12688,23 @@ punycode@^2.1.0, punycode@^2.1.1: resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== +punycode@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.1.tgz#027422e2faec0b25e1549c3e1bd8309b9133b6e5" + integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg== + +puppeteer-core@^20.9.0: + version "20.9.0" + resolved "https://registry.yarnpkg.com/puppeteer-core/-/puppeteer-core-20.9.0.tgz#6f4b420001b64419deab38d398a4d9cd071040e6" + integrity sha512-H9fYZQzMTRrkboEfPmf7m3CLDN6JvbxXA3qTtS+dFt27tR+CsFHzPsT6pzp6lYL6bJbAPaR0HaPO6uSi+F94Pg== + dependencies: + "@puppeteer/browsers" "1.4.6" + chromium-bidi "0.4.16" + cross-fetch "4.0.0" + debug "4.3.4" + devtools-protocol "0.0.1147663" + ws "8.13.0" + qjobs@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/qjobs/-/qjobs-1.2.0.tgz#c45e9c61800bd087ef88d7e256423bdd49e5d071" @@ -11669,7 +12724,12 @@ qs@^6.11.0, qs@^6.11.1: dependencies: side-channel "^1.0.4" -querystring-es3@^0.2.0: +query-selector-shadow-dom@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/query-selector-shadow-dom/-/query-selector-shadow-dom-1.0.1.tgz#1c7b0058eff4881ac44f45d8f84ede32e9a2f349" + integrity sha512-lT5yCqEBgfoMYpf3F2xQRK7zEr1rhIIZuceDK6+xRkJQ4NMbHTwXqk4NkwDwQMNqXgG9r9fyHnzwNVs6zV5KRw== + +querystring-es3@^0.2.0, querystring-es3@^0.2.1: version "0.2.1" resolved "https://registry.npmjs.org/querystring-es3/-/querystring-es3-0.2.1.tgz" integrity sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM= @@ -11679,6 +12739,11 @@ querystring@0.2.0: resolved "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz" integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA= +querystringify@^2.1.1: + version "2.2.0" + resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" + integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== + queue-microtask@^1.2.2, queue-microtask@^1.2.3: version "1.2.3" resolved "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz" @@ -11704,6 +12769,11 @@ quick-lru@^5.1.1: resolved "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz" integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA== +race-signal@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/race-signal/-/race-signal-1.0.2.tgz#e42379fba0cec4ee8dab7c9bbbd4aa6e0d14c25f" + integrity sha512-o3xNv0iTcIDQCXFlF6fPAMEBRjFxssgGoRqLbg06m+AdzEXXLUmoNOoUHTVz2NoBI8hHwKFKoC6IqyNtWr2bww== + rambda@^7.4.0: version "7.5.0" resolved "https://registry.yarnpkg.com/rambda/-/rambda-7.5.0.tgz#1865044c59bc0b16f63026c6e5a97e4b1bbe98fe" @@ -11864,6 +12934,13 @@ readdir-glob@^1.0.0: dependencies: minimatch "^5.1.0" +readdir-glob@^1.1.2: + version "1.1.3" + resolved "https://registry.yarnpkg.com/readdir-glob/-/readdir-glob-1.1.3.tgz#c3d831f51f5e7bfa62fa2ffbe4b508c640f09584" + integrity sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA== + dependencies: + minimatch "^5.1.0" + readdirp@~3.6.0: version "3.6.0" resolved "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz" @@ -11946,7 +13023,7 @@ requires-port@^1.0.0: resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ== -resolve-alpn@^1.0.0: +resolve-alpn@^1.0.0, resolve-alpn@^1.2.0: version "1.2.1" resolved "https://registry.yarnpkg.com/resolve-alpn/-/resolve-alpn-1.2.1.tgz#b7adbdac3546aaaec20b45e7d8265927072726f9" integrity sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g== @@ -11996,6 +13073,15 @@ resolve@^1.10.1: dependencies: path-parse "^1.0.6" +resolve@^1.17.0: + version "1.22.8" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d" + integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw== + dependencies: + is-core-module "^2.13.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + resolve@^1.3.2: version "1.20.0" resolved "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz" @@ -12011,6 +13097,20 @@ responselike@^2.0.0: dependencies: lowercase-keys "^2.0.0" +responselike@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/responselike/-/responselike-3.0.0.tgz#20decb6c298aff0dbee1c355ca95461d42823626" + integrity sha512-40yHxbNcl2+rzXvZuVkrYohathsSJlMTXKryG5y8uciHv1+xDLHQpgjG64JUO9nrEq2jGLH6IZ8BcZyw3wrweg== + dependencies: + lowercase-keys "^3.0.0" + +resq@^1.9.1: + version "1.11.0" + resolved "https://registry.yarnpkg.com/resq/-/resq-1.11.0.tgz#edec8c58be9af800fd628118c0ca8815283de196" + integrity sha512-G10EBz+zAAy3zUd/CDoBbXRL6ia9kOo3xRHrMDsHljI0GDkhYlyjwoCx5+3eCC4swi1uCoZQhskuJkj7Gp57Bw== + dependencies: + fast-deep-equal "^2.0.1" + restore-cursor@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e" @@ -12066,6 +13166,18 @@ rfdc@^1.1.4, rfdc@^1.2.0, rfdc@^1.3.0: resolved "https://registry.npmjs.org/rfdc/-/rfdc-1.3.0.tgz" integrity sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA== +rgb2hex@0.2.5: + version "0.2.5" + resolved "https://registry.yarnpkg.com/rgb2hex/-/rgb2hex-0.2.5.tgz#f82230cd3ab1364fa73c99be3a691ed688f8dbdc" + integrity sha512-22MOP1Rh7sAo1BZpDG6R5RFYzR2lYEgwq7HEmyW2qcsOqR2lQKmn+O//xV3YG/0rrhMC6KVX2hU+ZXuaw9a5bw== + +rimraf@2: + version "2.7.1" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" + integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== + dependencies: + glob "^7.1.3" + rimraf@^3.0.0, rimraf@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" @@ -12107,13 +13219,30 @@ roarr@^2.15.3: semver-compare "^1.0.0" sprintf-js "^1.1.2" -rollup@^3.27.1: - version "3.29.4" - resolved "https://registry.yarnpkg.com/rollup/-/rollup-3.29.4.tgz#4d70c0f9834146df8705bfb69a9a19c9e1109981" - integrity sha512-oWzmBZwvYrU0iJHtDmhsm662rC15FRXmcjCk1xD771dFDx5jJ02ufAQQTn0etB2emNk4J9EZg/yWKpsn9BWGRw== +rollup@^4.2.0: + version "4.6.1" + resolved "https://registry.yarnpkg.com/rollup/-/rollup-4.6.1.tgz#351501c86b5b4f976dde8c5837516452b59921f8" + integrity sha512-jZHaZotEHQaHLgKr8JnQiDT1rmatjgKlMekyksz+yk9jt/8z9quNjnKNRoaM0wd9DC2QKXjmWWuDYtM3jfF8pQ== optionalDependencies: + "@rollup/rollup-android-arm-eabi" "4.6.1" + "@rollup/rollup-android-arm64" "4.6.1" + "@rollup/rollup-darwin-arm64" "4.6.1" + "@rollup/rollup-darwin-x64" "4.6.1" + "@rollup/rollup-linux-arm-gnueabihf" "4.6.1" + "@rollup/rollup-linux-arm64-gnu" "4.6.1" + "@rollup/rollup-linux-arm64-musl" "4.6.1" + "@rollup/rollup-linux-x64-gnu" "4.6.1" + "@rollup/rollup-linux-x64-musl" "4.6.1" + "@rollup/rollup-win32-arm64-msvc" "4.6.1" + "@rollup/rollup-win32-ia32-msvc" "4.6.1" + "@rollup/rollup-win32-x64-msvc" "4.6.1" fsevents "~2.3.2" +rrweb-cssom@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/rrweb-cssom/-/rrweb-cssom-0.6.0.tgz#ed298055b97cbddcdeb278f904857629dec5e0e1" + integrity sha512-APM0Gt1KoXBz0iIkkdB/kfvGOwC4UuJFeG/c+yV7wSc7q96cG/kJ0HiYCnzivD9SB53cLV1MlHFNfOuPaadYSw== + run-applescript@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/run-applescript/-/run-applescript-5.0.0.tgz#e11e1c932e055d5c6b40d98374e0268d9b11899c" @@ -12166,6 +13295,11 @@ rxjs@^7.8.0: dependencies: tslib "^2.1.0" +safaridriver@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/safaridriver/-/safaridriver-0.1.0.tgz#8ff901e847b003c6a52b534028f57cddc82d6b14" + integrity sha512-azzzIP3gR1TB9bVPv7QO4Zjw0rR1BWEU/s2aFdUMN48gxDjxEB13grAEuXDmkKPgE74cObymDxmAmZnL3clj4w== + safe-array-concat@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/safe-array-concat/-/safe-array-concat-1.0.1.tgz#91686a63ce3adbea14d61b14c99572a8ff84754c" @@ -12229,6 +13363,13 @@ sax@>=0.6.0: resolved "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz" integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== +saxes@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/saxes/-/saxes-6.0.0.tgz#fe5b4a4768df4f14a201b1ba6a65c1f3d9988cc5" + integrity sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA== + dependencies: + xmlchars "^2.2.0" + schema-utils@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.1.1.tgz#bc74c4b6b6995c1d88f76a8b77bea7219e0c8281" @@ -12295,6 +13436,13 @@ semver@^7.0.0, semver@^7.1.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semve dependencies: lru-cache "^6.0.0" +serialize-error@^11.0.1: + version "11.0.3" + resolved "https://registry.yarnpkg.com/serialize-error/-/serialize-error-11.0.3.tgz#b54f439e15da5b4961340fbbd376b6b04aa52e92" + integrity sha512-2G2y++21dhj2R7iHAdd0FIzjGwuKZld+7Pl/bTU6YIkrC2ZMbVUjm+luj6A6V34Rv9XfKJDKpTWu9W4Gse1D9g== + dependencies: + type-fest "^2.12.2" + serialize-error@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/serialize-error/-/serialize-error-7.0.1.tgz#f1360b0447f61ffb483ec4157c737fab7d778e18" @@ -12335,7 +13483,7 @@ set-function-name@^2.0.0: functions-have-names "^1.2.3" has-property-descriptors "^1.0.0" -setimmediate@^1.0.4, setimmediate@^1.0.5: +setimmediate@^1.0.4, setimmediate@^1.0.5, setimmediate@~1.0.4: version "1.0.5" resolved "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz" integrity sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU= @@ -12408,7 +13556,7 @@ signal-exit@3.0.7, signal-exit@^3.0.2, signal-exit@^3.0.3, signal-exit@^3.0.7: resolved "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz" integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== -signal-exit@^4.0.1: +signal-exit@^4.0.1, signal-exit@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.1.0.tgz#952188c1cbd546070e2dd20d0f41c0ae0530cb04" integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw== @@ -12460,6 +13608,15 @@ sinon@^16.0.0: nise "^5.1.4" supports-color "^7.2.0" +sirv@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/sirv/-/sirv-2.0.3.tgz#ca5868b87205a74bef62a469ed0296abceccd446" + integrity sha512-O9jm9BsID1P+0HOi81VpXPoDxYP374pkOLzACAoyUQ/3OUVndNpsz6wMnY2z+yOxzbllCKZrM+9QrWsv4THnyA== + dependencies: + "@polka/url" "^1.0.0-next.20" + mrmime "^1.0.0" + totalist "^3.0.0" + slash@3.0.0, slash@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" @@ -12538,6 +13695,15 @@ socks-proxy-agent@^7.0.0: debug "^4.3.3" socks "^2.6.2" +socks-proxy-agent@^8.0.1, socks-proxy-agent@^8.0.2: + version "8.0.2" + resolved "https://registry.yarnpkg.com/socks-proxy-agent/-/socks-proxy-agent-8.0.2.tgz#5acbd7be7baf18c46a3f293a840109a430a640ad" + integrity sha512-8zuqoLv1aP/66PHF5TqwJ7Czm3Yv32urJQHrVyhD7mmA6d61Zv8cIXQYPTWwmg6qlupnPvs/QKDmfa4P/qct2g== + dependencies: + agent-base "^7.0.2" + debug "^4.3.4" + socks "^2.7.1" + socks@^2.6.1, socks@^2.6.2: version "2.6.2" resolved "https://registry.npmjs.org/socks/-/socks-2.6.2.tgz" @@ -12546,6 +13712,14 @@ socks@^2.6.1, socks@^2.6.2: ip "^1.1.5" smart-buffer "^4.2.0" +socks@^2.7.1: + version "2.7.1" + resolved "https://registry.yarnpkg.com/socks/-/socks-2.7.1.tgz#d8e651247178fde79c0663043e07240196857d55" + integrity sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ== + dependencies: + ip "^2.0.0" + smart-buffer "^4.2.0" + sonic-boom@^3.1.0: version "3.3.0" resolved "https://registry.yarnpkg.com/sonic-boom/-/sonic-boom-3.3.0.tgz#cffab6dafee3b2bcb88d08d589394198bee1838c" @@ -12578,7 +13752,7 @@ source-map@^0.5.0, source-map@~0.5.3: resolved "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz" integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= -source-map@^0.6.0, source-map@^0.6.1: +source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== @@ -12633,7 +13807,7 @@ split2@^3.2.2: dependencies: readable-stream "^3.0.0" -split2@^4.0.0: +split2@^4.0.0, split2@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/split2/-/split2-4.2.0.tgz#c9c5920904d148bab0b9f67145f245a86aadbfa4" integrity sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg== @@ -12715,10 +13889,10 @@ statuses@~1.5.0: resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== -std-env@^3.3.3: - version "3.4.3" - resolved "https://registry.yarnpkg.com/std-env/-/std-env-3.4.3.tgz#326f11db518db751c83fd58574f449b7c3060910" - integrity sha512-f9aPhy8fYBuMN+sNfakZV18U39PbalgjXG3lLB9WkaYTxijru61wb57V9wxxNthXM5Sd88ETBWi29qLAsHO52Q== +std-env@^3.5.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/std-env/-/std-env-3.6.0.tgz#94807562bddc68fa90f2e02c5fd5b6865bb4e98e" + integrity sha512-aFZ19IgVmhdB2uX599ve2kE6BIE3YMnQ6Gp6BURhW/oIzpXGKr878TQfAQZn1+i0Flcc/UKUy1gOlcfaUBCryg== stdin-discarder@^0.1.0: version "0.1.0" @@ -12939,6 +14113,13 @@ strip-ansi@^7.0.1: dependencies: ansi-regex "^6.0.1" +strip-ansi@^7.1.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" + integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ== + dependencies: + ansi-regex "^6.0.1" + strip-bom@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" @@ -12976,7 +14157,7 @@ strip-json-comments@^2.0.0: resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz" integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= -strip-literal@^1.0.1: +strip-literal@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/strip-literal/-/strip-literal-1.3.0.tgz#db3942c2ec1699e6836ad230090b84bb458e3a07" integrity sha512-PugKzOsyXpArk0yWmUwqOZecSO0GH0bPoctLcqNDH9J04pVW3lflYE0ujElBGTloevcxF5MofAOZ7C5l2b+wLg== @@ -13054,6 +14235,11 @@ supports-preserve-symlinks-flag@^1.0.0: resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== +symbol-tree@^3.2.4: + version "3.2.4" + resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" + integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== + synckit@^0.8.5: version "0.8.5" resolved "https://registry.yarnpkg.com/synckit/-/synckit-0.8.5.tgz#b7f4358f9bb559437f9f167eb6bc46b3c9818fa3" @@ -13072,7 +14258,7 @@ tapable@^2.1.1, tapable@^2.2.0: resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== -tar-fs@^3.0.4: +tar-fs@3.0.4, tar-fs@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-3.0.4.tgz#a21dc60a2d5d9f55e0089ccd78124f1d3771dbbf" integrity sha512-5AFQU8b9qLfZCX9zp2duONhPmZv0hGYiBPJsyUdqMjzq/mqVpy/rEUSeHk1+YitmxugaptgBh5oDGU3VsAJq4w== @@ -13102,7 +14288,7 @@ tar-stream@^2.0.0, tar-stream@^2.2.0, tar-stream@~2.2.0: inherits "^2.0.3" readable-stream "^3.1.1" -tar-stream@^3.1.5: +tar-stream@^3.0.0, tar-stream@^3.1.5: version "3.1.6" resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-3.1.6.tgz#6520607b55a06f4a2e2e04db360ba7d338cc5bab" integrity sha512-B/UyjYwPpMBv+PaFSWAmtYjwdrlEaZQEhMIBFNC5oEG8lpiW8XjcSdmEaClj28ArfKScKHs2nshz3k2le6crsg== @@ -13238,7 +14424,7 @@ through2@^2.0.0: readable-stream "~2.3.6" xtend "~4.0.1" -through@2, "through@>=2.2.7 <3", through@^2.3.4, through@^2.3.6: +through@2, "through@>=2.2.7 <3", through@^2.3.4, through@^2.3.6, through@^2.3.8: version "2.3.8" resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" integrity sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg== @@ -13267,17 +14453,17 @@ tiny-lru@^11.0.1: dependencies: esm "^3.2.25" -tinybench@^2.5.0: +tinybench@^2.5.1: version "2.5.1" resolved "https://registry.yarnpkg.com/tinybench/-/tinybench-2.5.1.tgz#3408f6552125e53a5a48adee31261686fd71587e" integrity sha512-65NKvSuAVDP/n4CqH+a9w2kTlLReS9vhsAP06MWx+/89nMinJyB2icyl58RIcqCmIggpojIGeuJGhjU1aGMBSg== -tinypool@^0.7.0: - version "0.7.0" - resolved "https://registry.yarnpkg.com/tinypool/-/tinypool-0.7.0.tgz#88053cc99b4a594382af23190c609d93fddf8021" - integrity sha512-zSYNUlYSMhJ6Zdou4cJwo/p7w5nmAH17GRfU/ui3ctvjXFErXXkruT4MWW6poDeXgCaIBlGLrfU6TbTXxyGMww== +tinypool@^0.8.1: + version "0.8.1" + resolved "https://registry.yarnpkg.com/tinypool/-/tinypool-0.8.1.tgz#b6c4e4972ede3e3e5cda74a3da1679303d386b03" + integrity sha512-zBTCK0cCgRROxvs9c0CGK838sPkeokNGdQVUUwHAbynHFlmyJYj825f/oRs528HaIJ97lo0pLIlDUzwN+IorWg== -tinyspy@^2.1.1: +tinyspy@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/tinyspy/-/tinyspy-2.2.0.tgz#9dc04b072746520b432f77ea2c2d17933de5d6ce" integrity sha512-d2eda04AN/cPOR89F7Xv5bK/jrQEhmcLFe6HFldoeO9AJtps+fqEnh486vnT/8y4bw38pSyxDcTCAq+Ks2aJTg== @@ -13323,6 +14509,11 @@ toidentifier@1.0.1: resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== +totalist@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/totalist/-/totalist-3.0.1.tgz#ba3a3d600c915b1a97872348f79c127475f6acf8" + integrity sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ== + tough-cookie@^3.0.1: version "3.0.1" resolved "https://registry.npmjs.org/tough-cookie/-/tough-cookie-3.0.1.tgz" @@ -13341,11 +14532,33 @@ tough-cookie@^4.0.0: punycode "^2.1.1" universalify "^0.1.2" +tough-cookie@^4.1.3: + version "4.1.3" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.1.3.tgz#97b9adb0728b42280aa3d814b6b999b2ff0318bf" + integrity sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw== + dependencies: + psl "^1.1.33" + punycode "^2.1.1" + universalify "^0.2.0" + url-parse "^1.5.3" + +tr46@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-5.0.0.tgz#3b46d583613ec7283020d79019f1335723801cec" + integrity sha512-tk2G5R2KRwBd+ZN0zaEXpmzdKyOYksXwywulIX95MBODjSzMIuQnQ3m8JxgbhnL1LeVo7lqQKsYa1O3Htl7K5g== + dependencies: + punycode "^2.3.1" + tr46@~0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== +"traverse@>=0.3.0 <0.4": + version "0.3.9" + resolved "https://registry.yarnpkg.com/traverse/-/traverse-0.3.9.tgz#717b8f220cc0bb7b44e40514c22b2e8bbc70d8b9" + integrity sha512-iawgk0hLP3SxGKDfnDJf8wTz4p2qImnyihM5Hh/sGvQ3K37dPi/w8sRhdNIxYA1TwFwc5mDhIJq+O0RsvXBKdQ== + trim-newlines@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-3.0.1.tgz#260a5d962d8b752425b32f3a7db0dcacd176c144" @@ -13446,6 +14659,11 @@ tslib@^2.0.0, tslib@^2.2.0: resolved "https://registry.npmjs.org/tslib/-/tslib-2.3.0.tgz" integrity sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg== +tslib@^2.0.1: + version "2.6.2" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" + integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== + tslib@^2.1.0: version "2.2.0" resolved "https://registry.npmjs.org/tslib/-/tslib-2.2.0.tgz" @@ -13466,6 +14684,11 @@ tty-browserify@0.0.0: resolved "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.0.tgz" integrity sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY= +tty-browserify@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.1.tgz#3f05251ee17904dfd0677546670db9651682b811" + integrity sha512-C3TaO7K81YvjCgQH9Q1S3R3P3BtN3RIM8n+OvX4il1K1zgE8ZhI0op7kClgkxtutIE8hQrcrHBXvIheqKUUCxw== + tuf-js@^1.1.7: version "1.1.7" resolved "https://registry.yarnpkg.com/tuf-js/-/tuf-js-1.1.7.tgz#21b7ae92a9373015be77dfe0cb282a80ec3bbe43" @@ -13492,11 +14715,16 @@ type-check@^0.4.0, type-check@~0.4.0: dependencies: prelude-ls "^1.2.1" -type-detect@4.0.8, type-detect@^4.0.0, type-detect@^4.0.5, type-detect@^4.0.8: +type-detect@4.0.8, type-detect@^4.0.0, type-detect@^4.0.8: version "4.0.8" resolved "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz" integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== +type-fest@2.13.0: + version "2.13.0" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-2.13.0.tgz#d1ecee38af29eb2e863b22299a3d68ef30d2abfb" + integrity sha512-lPfAm42MxE4/456+QyIaaVBAwgpJb6xZ8PRu09utnhPdWwcyj9vgy6Sq0Z5yNbJ21EdxB5dRU/Qg8bsyAMtlcw== + type-fest@^0.13.1: version "0.13.1" resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.13.1.tgz#0172cb5bce80b0bd542ea348db50c7e21834d934" @@ -13532,6 +14760,11 @@ type-fest@^0.8.0, type-fest@^0.8.1: resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz" integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA== +type-fest@^2.12.2: + version "2.19.0" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-2.19.0.tgz#88068015bb33036a598b952e55e9311a60fd3a9b" + integrity sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA== + type-fest@^3.0.0: version "3.7.2" resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-3.7.2.tgz#08f83ee3229b63077e95c9035034d32905969457" @@ -13675,6 +14908,19 @@ unbox-primitive@^1.0.0, unbox-primitive@^1.0.2: has-symbols "^1.0.3" which-boxed-primitive "^1.0.2" +unbzip2-stream@1.4.3: + version "1.4.3" + resolved "https://registry.yarnpkg.com/unbzip2-stream/-/unbzip2-stream-1.4.3.tgz#b0da04c4371311df771cdc215e87f2130991ace7" + integrity sha512-mlExGW4w71ebDJviH16lQLtZS32VKqsSfk80GCfUlwT/4/hNRFsoscrF/c++9xinkMzECL1uL9DDwXqFWkruPg== + dependencies: + buffer "^5.2.1" + through "^2.3.8" + +undici-types@~5.26.4: + version "5.26.5" + resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" + integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== + undici@^5.12.0: version "5.22.1" resolved "https://registry.yarnpkg.com/undici/-/undici-5.22.1.tgz#877d512effef2ac8be65e695f3586922e1a57d7b" @@ -13720,6 +14966,11 @@ universalify@^0.1.0, universalify@^0.1.2: resolved "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz" integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== +universalify@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.2.0.tgz#6451760566fa857534745ab1dde952d1b1761be0" + integrity sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg== + universalify@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" @@ -13735,6 +14986,22 @@ untildify@^4.0.0: resolved "https://registry.yarnpkg.com/untildify/-/untildify-4.0.0.tgz#2bc947b953652487e4600949fb091e3ae8cd919b" integrity sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw== +unzipper@^0.10.14: + version "0.10.14" + resolved "https://registry.yarnpkg.com/unzipper/-/unzipper-0.10.14.tgz#d2b33c977714da0fbc0f82774ad35470a7c962b1" + integrity sha512-ti4wZj+0bQTiX2KmKWuwj7lhV+2n//uXEotUmGuQqrbVZSEGFMbI68+c6JCQ8aAmUWYvtHEz2A8K6wXvueR/6g== + dependencies: + big-integer "^1.6.17" + binary "~0.3.0" + bluebird "~3.4.1" + buffer-indexof-polyfill "~1.0.0" + duplexer2 "~0.1.4" + fstream "^1.0.12" + graceful-fs "^4.2.2" + listenercount "~1.0.1" + readable-stream "~2.3.6" + setimmediate "~1.0.4" + upath@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/upath/-/upath-2.0.1.tgz#50c73dea68d6f6b990f51d279ce6081665d61a8b" @@ -13755,6 +15022,14 @@ uri-js@^4.2.2: dependencies: punycode "^2.1.0" +url-parse@^1.5.3: + version "1.5.10" + resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.10.tgz#9d3c2f736c1d75dd3bd2be507dcc111f1e2ea9c1" + integrity sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ== + dependencies: + querystringify "^2.1.1" + requires-port "^1.0.0" + url@0.10.3: version "0.10.3" resolved "https://registry.npmjs.org/url/-/url-0.10.3.tgz" @@ -13778,6 +15053,11 @@ urlgrey@1.0.0: dependencies: fast-url-parser "^1.1.3" +userhome@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/userhome/-/userhome-1.0.0.tgz#b6491ff12d21a5e72671df9ccc8717e1c6688c0b" + integrity sha512-ayFKY3H+Pwfy4W98yPdtH1VqH4psDeyW8lYYFzfecR9d6hqLpqhecktvYR3SEEXt7vG0S1JEpciI3g94pMErig== + utf8-byte-length@^1.0.1: version "1.0.4" resolved "https://registry.npmjs.org/utf8-byte-length/-/utf8-byte-length-1.0.4.tgz" @@ -13814,7 +15094,7 @@ util@^0.12.0: safe-buffer "^5.1.2" which-typed-array "^1.1.2" -util@^0.12.5: +util@^0.12.4, util@^0.12.5: version "0.12.5" resolved "https://registry.yarnpkg.com/util/-/util-0.12.5.tgz#5f17a6059b73db61a875668781a1c2b136bd6fbc" integrity sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA== @@ -13845,7 +15125,7 @@ uuid@^3.3.2, uuid@^3.3.3: resolved "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz" integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== -uuid@^9.0.0: +uuid@^9.0.0, uuid@^9.0.1: version "9.0.1" resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.1.tgz#e188d4c8853cc722220392c424cd637f32293f30" integrity sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA== @@ -13877,10 +15157,10 @@ v8-to-istanbul@^9.0.0: "@types/istanbul-lib-coverage" "^2.0.1" convert-source-map "^1.6.0" -v8-to-istanbul@^9.1.0: - version "9.1.3" - resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-9.1.3.tgz#ea456604101cd18005ac2cae3cdd1aa058a6306b" - integrity sha512-9lDD+EVI2fjFsMWXc6dy5JJzBsVTcQ2fVkfBvncZ6xJWG9wtBhOldG+mHkSL0+V1K/xgZz0JDO5UT5hFwHUghg== +v8-to-istanbul@^9.2.0: + version "9.2.0" + resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-9.2.0.tgz#2ed7644a245cddd83d4e087b9b33b3e62dfd10ad" + integrity sha512-/EH/sDgxU2eGxajKdwLCDmQ4FWq+kpi3uCmBGpw1xJtnAxEjlD8j8PEiGWpCIMIs3ciNAgH0d3TTJiUkYzyZjA== dependencies: "@jridgewell/trace-mapping" "^0.3.12" "@types/istanbul-lib-coverage" "^2.0.1" @@ -13913,62 +15193,75 @@ vary@^1: resolved "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz" integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw= -vite-node@0.34.6: - version "0.34.6" - resolved "https://registry.yarnpkg.com/vite-node/-/vite-node-0.34.6.tgz#34d19795de1498562bf21541a58edcd106328a17" - integrity sha512-nlBMJ9x6n7/Amaz6F3zJ97EBwR2FkzhBRxF5e+jE6LA3yi6Wtc2lyTij1OnDMIr34v5g/tVQtsVAzhT0jc5ygA== +vite-node@1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/vite-node/-/vite-node-1.1.0.tgz#0ebcb7398692e378954786dfba28e905e28a76b4" + integrity sha512-jV48DDUxGLEBdHCQvxL1mEh7+naVy+nhUUUaPAZLd3FJgXuxQiewHcfeZebbJ6onDqNGkP4r3MhQ342PRlG81Q== dependencies: cac "^6.7.14" debug "^4.3.4" - mlly "^1.4.0" pathe "^1.1.1" picocolors "^1.0.0" - vite "^3.0.0 || ^4.0.0 || ^5.0.0-0" + vite "^5.0.0" -"vite@^3.0.0 || ^4.0.0 || ^5.0.0-0", "vite@^3.1.0 || ^4.0.0 || ^5.0.0-0": - version "4.4.11" - resolved "https://registry.yarnpkg.com/vite/-/vite-4.4.11.tgz#babdb055b08c69cfc4c468072a2e6c9ca62102b0" - integrity sha512-ksNZJlkcU9b0lBwAGZGGaZHCMqHsc8OpgtoYhsQ4/I2v5cnpmmmqe5pM4nv/4Hn6G/2GhTdj0DhZh2e+Er1q5A== +vite-plugin-node-polyfills@^0.18.0: + version "0.18.0" + resolved "https://registry.yarnpkg.com/vite-plugin-node-polyfills/-/vite-plugin-node-polyfills-0.18.0.tgz#2ad147960f7a35dbbb1c9f9c1ae928bd0f438c1e" + integrity sha512-zkdLD3gpOhLFyxYRMJ5apk0RcODhomuS3XQgExowiX8naoc251JfcP3toqnfDlMdF0xuPYahre/H38xAcq8ApA== dependencies: - esbuild "^0.18.10" - postcss "^8.4.27" - rollup "^3.27.1" - optionalDependencies: - fsevents "~2.3.2" + "@rollup/plugin-inject" "^5.0.5" + node-stdlib-browser "^1.2.0" -vitest-when@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/vitest-when/-/vitest-when-0.2.0.tgz#3b3234efa6be0f976616f54e35357b56ed5e5f5f" - integrity sha512-BS1+L6HPwV3cMQB+pGa1Zr7gFkKX1TG8GbdgzpTlyW19nvWBmqDZW5GucS79K/lEu0ULWOUceHM56dnr8P/ajg== +vite-plugin-top-level-await@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/vite-plugin-top-level-await/-/vite-plugin-top-level-await-1.4.1.tgz#607dfe084157550fa33df18062b99ceea774cd9c" + integrity sha512-hogbZ6yT7+AqBaV6lK9JRNvJDn4/IJvHLu6ET06arNfo0t2IsyCaon7el9Xa8OumH+ESuq//SDf8xscZFE0rWw== + dependencies: + "@rollup/plugin-virtual" "^3.0.2" + "@swc/core" "^1.3.100" + uuid "^9.0.1" -vitest@^0.34.6: - version "0.34.6" - resolved "https://registry.yarnpkg.com/vitest/-/vitest-0.34.6.tgz#44880feeeef493c04b7f795ed268f24a543250d7" - integrity sha512-+5CALsOvbNKnS+ZHMXtuUC7nL8/7F1F2DnHGjSsszX8zCjWSSviphCb/NuS9Nzf4Q03KyyDRBAXhF/8lffME4Q== +vite@^5.0.0: + version "5.0.6" + resolved "https://registry.yarnpkg.com/vite/-/vite-5.0.6.tgz#f9e13503a4c5ccd67312c67803dec921f3bdea7c" + integrity sha512-MD3joyAEBtV7QZPl2JVVUai6zHms3YOmLR+BpMzLlX2Yzjfcc4gTgNi09d/Rua3F4EtC8zdwPU8eQYyib4vVMQ== dependencies: - "@types/chai" "^4.3.5" - "@types/chai-subset" "^1.3.3" - "@types/node" "*" - "@vitest/expect" "0.34.6" - "@vitest/runner" "0.34.6" - "@vitest/snapshot" "0.34.6" - "@vitest/spy" "0.34.6" - "@vitest/utils" "0.34.6" - acorn "^8.9.0" - acorn-walk "^8.2.0" + esbuild "^0.19.3" + postcss "^8.4.32" + rollup "^4.2.0" + optionalDependencies: + fsevents "~2.3.3" + +vitest-when@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/vitest-when/-/vitest-when-0.3.0.tgz#663d4274f1e7302bd24ec00dda8269d20b2eff04" + integrity sha512-wYfmzd+GkvdNNhbeb/40PnKpetUP5I7qxvdbu1OAXRXaLrnLfSrJTa/dMIbqqrc8SA0vhonpw5p0RHDXwhDM1Q== + +vitest@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/vitest/-/vitest-1.1.0.tgz#47ba67c564aa137b53b0197d2a992908e7f5b04d" + integrity sha512-oDFiCrw7dd3Jf06HoMtSRARivvyjHJaTxikFxuqJjO76U436PqlVw1uLn7a8OSPrhSfMGVaRakKpA2lePdw79A== + dependencies: + "@vitest/expect" "1.1.0" + "@vitest/runner" "1.1.0" + "@vitest/snapshot" "1.1.0" + "@vitest/spy" "1.1.0" + "@vitest/utils" "1.1.0" + acorn-walk "^8.3.0" cac "^6.7.14" chai "^4.3.10" debug "^4.3.4" - local-pkg "^0.4.3" - magic-string "^0.30.1" + execa "^8.0.1" + local-pkg "^0.5.0" + magic-string "^0.30.5" pathe "^1.1.1" picocolors "^1.0.0" - std-env "^3.3.3" - strip-literal "^1.0.1" - tinybench "^2.5.0" - tinypool "^0.7.0" - vite "^3.1.0 || ^4.0.0 || ^5.0.0-0" - vite-node "0.34.6" + std-env "^3.5.0" + strip-literal "^1.3.0" + tinybench "^2.5.1" + tinypool "^0.8.1" + vite "^5.0.0" + vite-node "1.1.0" why-is-node-running "^2.2.2" vm-browserify@^1.0.1: @@ -13981,7 +15274,14 @@ void-elements@^2.0.0: resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-2.0.1.tgz#c066afb582bb1cb4128d60ea92392e94d5e9dbec" integrity sha512-qZKX4RnBzH2ugr8Lxa7x+0V6XD9Sb/ouARtiasEQCHB1EVU4NXtmHsDDrx1dO4ne5fc3J6EW05BP1Dl0z0iung== -wait-port@^1.1.0: +w3c-xmlserializer@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/w3c-xmlserializer/-/w3c-xmlserializer-5.0.0.tgz#f925ba26855158594d907313cedd1476c5967f6c" + integrity sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA== + dependencies: + xml-name-validator "^5.0.0" + +wait-port@^1.0.4, wait-port@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/wait-port/-/wait-port-1.1.0.tgz#e5d64ee071118d985e2b658ae7ad32b2ce29b6b5" integrity sha512-3e04qkoN3LxTMLakdqeWth8nih8usyg+sf1Bgdf9wwUkp05iuK1eSY/QpLvscT/+F/gA89+LpUmmgBtesbqI2Q== @@ -14005,6 +15305,11 @@ wcwidth@^1.0.0, wcwidth@^1.0.1: dependencies: defaults "^1.0.3" +web-streams-polyfill@^3.0.3: + version "3.2.1" + resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz#71c2718c52b45fd49dbeee88634b3a60ceab42a6" + integrity sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q== + web3-core@^4.0.3: version "4.0.3" resolved "https://registry.yarnpkg.com/web3-core/-/web3-core-4.0.3.tgz#eab6cc23a43ff202d8f38bbd9801a7a2ec750cc2" @@ -14216,11 +15521,63 @@ web3@^4.0.3: web3-utils "^4.0.3" web3-validator "^1.0.2" +webdriver@8.27.0: + version "8.27.0" + resolved "https://registry.yarnpkg.com/webdriver/-/webdriver-8.27.0.tgz#27e936a03c08b2d72ed6bd01a6a46f8189ef0abf" + integrity sha512-n1IA+rR3u84XxU9swiKUM06BkEC0GDimfZkBML57cny+utQOUbdM/mBpqCUnkWX/RBz/p2EfHdKNyOs3/REaog== + dependencies: + "@types/node" "^20.1.0" + "@types/ws" "^8.5.3" + "@wdio/config" "8.27.0" + "@wdio/logger" "8.24.12" + "@wdio/protocols" "8.24.12" + "@wdio/types" "8.27.0" + "@wdio/utils" "8.27.0" + deepmerge-ts "^5.1.0" + got "^12.6.1" + ky "^0.33.0" + ws "^8.8.0" + +webdriverio@^8.27.0: + version "8.27.0" + resolved "https://registry.yarnpkg.com/webdriverio/-/webdriverio-8.27.0.tgz#4068b0164ab66bfb62d6eb6b8d97df2d140922d5" + integrity sha512-Qh5VCiBjEmxnmXcL1QEFoDzFqTtaWKrXriuU5G0yHKCModGAt2G7IHTkAok3CpmkVJfZpEvY630aP1MvgDtFhw== + dependencies: + "@types/node" "^20.1.0" + "@wdio/config" "8.27.0" + "@wdio/logger" "8.24.12" + "@wdio/protocols" "8.24.12" + "@wdio/repl" "8.24.12" + "@wdio/types" "8.27.0" + "@wdio/utils" "8.27.0" + archiver "^6.0.0" + aria-query "^5.0.0" + css-shorthand-properties "^1.1.1" + css-value "^0.0.1" + devtools-protocol "^0.0.1237913" + grapheme-splitter "^1.0.2" + import-meta-resolve "^4.0.0" + is-plain-obj "^4.1.0" + lodash.clonedeep "^4.5.0" + lodash.zip "^4.2.0" + minimatch "^9.0.0" + puppeteer-core "^20.9.0" + query-selector-shadow-dom "^1.0.0" + resq "^1.9.1" + rgb2hex "0.2.5" + serialize-error "^11.0.1" + webdriver "8.27.0" + webidl-conversions@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== +webidl-conversions@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-7.0.0.tgz#256b4e1882be7debbf01d05f0aa2039778ea080a" + integrity sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g== + webpack-merge@^4.1.5: version "4.2.2" resolved "https://registry.yarnpkg.com/webpack-merge/-/webpack-merge-4.2.2.tgz#a27c52ea783d1398afd2087f547d7b9d2f43634d" @@ -14263,6 +15620,26 @@ webpack@^5.88.2: watchpack "^2.4.0" webpack-sources "^3.2.3" +whatwg-encoding@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-3.1.1.tgz#d0f4ef769905d426e1688f3e34381a99b60b76e5" + integrity sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ== + dependencies: + iconv-lite "0.6.3" + +whatwg-mimetype@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz#bc1bf94a985dc50388d54a9258ac405c3ca2fc0a" + integrity sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg== + +whatwg-url@^14.0.0: + version "14.0.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-14.0.0.tgz#00baaa7fd198744910c4b1ef68378f2200e4ceb6" + integrity sha512-1lfMEm2IEr7RIV+f4lUNPOqfFL+pO+Xw3fJSqmjX9AbXcXcYOkCe1P6+9VBZB6n94af16NfZf+sSk0JCBZC9aw== + dependencies: + tr46 "^5.0.0" + webidl-conversions "^7.0.0" + whatwg-url@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" @@ -14351,6 +15728,13 @@ which@^3.0.0: dependencies: isexe "^2.0.0" +which@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/which/-/which-4.0.0.tgz#cd60b5e74503a3fbcfbf6cd6b4138a8bae644c1a" + integrity sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg== + dependencies: + isexe "^3.1.1" + why-is-node-running@^2.2.2: version "2.2.2" resolved "https://registry.yarnpkg.com/why-is-node-running/-/why-is-node-running-2.2.2.tgz#4185b2b4699117819e7154594271e7e344c9973e" @@ -14517,21 +15901,31 @@ ws@7.4.6: resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.6.tgz#5654ca8ecdeee47c33a9a4bf6d28e2be2980377c" integrity sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A== +ws@8.13.0, ws@^8.8.1: + version "8.13.0" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.13.0.tgz#9a9fb92f93cf41512a0735c8f4dd09b8a1211cd0" + integrity sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA== + ws@8.5.0: version "8.5.0" resolved "https://registry.yarnpkg.com/ws/-/ws-8.5.0.tgz#bfb4be96600757fe5382de12c670dab984a1ed4f" integrity sha512-BWX0SWVgLPzYwF8lTzEy1egjhS4S4OEAHfsO8o65WOVsrnSRGaSiUaa9e0ggGlkMTtBlmOpEXiie9RUcBO86qg== -ws@^8.8.1: - version "8.13.0" - resolved "https://registry.yarnpkg.com/ws/-/ws-8.13.0.tgz#9a9fb92f93cf41512a0735c8f4dd09b8a1211cd0" - integrity sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA== +ws@^8.14.2, ws@^8.8.0: + version "8.14.2" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.14.2.tgz#6c249a806eb2db7a20d26d51e7709eab7b2e6c7f" + integrity sha512-wEBG1ftX4jcglPxgFCMJmZ2PLtSbJ2Peg6TmpJFTbe9GZYOQCDPdMYu/Tm0/bGZkw8paZnJY45J4K2PZrLYq8g== ws@~8.2.3: version "8.2.3" resolved "https://registry.yarnpkg.com/ws/-/ws-8.2.3.tgz#63a56456db1b04367d0b721a0b80cae6d8becbba" integrity sha512-wBuoj1BDpC6ZQ1B7DWQBYVLphPWkm8i9Y0/3YdHjHKHiohOJ1ws+3OccDWtH+PoC9DZD5WOTrJvNbWvjS6JWaA== +xml-name-validator@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-5.0.0.tgz#82be9b957f7afdacf961e5980f1bf227c0bf7673" + integrity sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg== + xml2js@0.4.19: version "0.4.19" resolved "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz" @@ -14566,6 +15960,11 @@ xmlbuilder@~9.0.1: resolved "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz" integrity sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0= +xmlchars@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" + integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== + xsalsa20@^1.1.0: version "1.1.0" resolved "https://registry.npmjs.org/xsalsa20/-/xsalsa20-1.1.0.tgz" @@ -14652,6 +16051,32 @@ yargs@16.2.0, yargs@^16.1.1, yargs@^16.2.0: y18n "^5.0.5" yargs-parser "^20.2.2" +yargs@17.7.1, yargs@^17.5.1, yargs@^17.6.2, yargs@^17.7.1: + version "17.7.1" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.1.tgz#34a77645201d1a8fc5213ace787c220eabbd0967" + integrity sha512-cwiTb08Xuv5fqF4AovYacTFNxk62th7LKJ6BL9IGUpTJrWoU7/7WdQGTP2SjKf1dUNBGzDd28p/Yfs/GI6JrLw== + dependencies: + cliui "^8.0.1" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.3" + y18n "^5.0.5" + yargs-parser "^21.1.1" + +yargs@17.7.2, yargs@^17.7.2: + version "17.7.2" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269" + integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w== + dependencies: + cliui "^8.0.1" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.3" + y18n "^5.0.5" + yargs-parser "^21.1.1" + yargs@^15.0.2: version "15.4.1" resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8" @@ -14682,32 +16107,6 @@ yargs@^17.1.1: y18n "^5.0.5" yargs-parser "^21.1.1" -yargs@^17.5.1, yargs@^17.6.2, yargs@^17.7.1: - version "17.7.1" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.1.tgz#34a77645201d1a8fc5213ace787c220eabbd0967" - integrity sha512-cwiTb08Xuv5fqF4AovYacTFNxk62th7LKJ6BL9IGUpTJrWoU7/7WdQGTP2SjKf1dUNBGzDd28p/Yfs/GI6JrLw== - dependencies: - cliui "^8.0.1" - escalade "^3.1.1" - get-caller-file "^2.0.5" - require-directory "^2.1.1" - string-width "^4.2.3" - y18n "^5.0.5" - yargs-parser "^21.1.1" - -yargs@^17.7.2: - version "17.7.2" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269" - integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w== - dependencies: - cliui "^8.0.1" - escalade "^3.1.1" - get-caller-file "^2.0.5" - require-directory "^2.1.1" - string-width "^4.2.3" - y18n "^5.0.5" - yargs-parser "^21.1.1" - yauzl@^2.10.0: version "2.10.0" resolved "https://registry.yarnpkg.com/yauzl/-/yauzl-2.10.0.tgz#c7eb17c93e112cb1086fa6d8e51fb0667b79a5f9" @@ -14739,3 +16138,12 @@ zip-stream@^4.1.0: archiver-utils "^2.1.0" compress-commons "^4.1.0" readable-stream "^3.6.0" + +zip-stream@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/zip-stream/-/zip-stream-5.0.1.tgz#cf3293bba121cad98be2ec7f05991d81d9f18134" + integrity sha512-UfZ0oa0C8LI58wJ+moL46BDIMgCQbnsb+2PoiJYtonhBsMh2bq1eRBVkvjfVsqbEHd9/EgKPUuL9saSSsec8OA== + dependencies: + archiver-utils "^4.0.1" + compress-commons "^5.0.1" + readable-stream "^3.6.0"