From 3cebedbc5cc37b057664969e8c7945c643af9250 Mon Sep 17 00:00:00 2001 From: Guillaume Ballet <3272758+gballet@users.noreply.github.com> Date: Thu, 26 Jan 2023 09:14:08 +0100 Subject: [PATCH 01/89] Spec draft for the verge Co-Authored-By: Dankrad Feist --- specs/verge/beacon-chain.md | 208 ++++++++++++++++++++++++++++++++++++ specs/verge/fork.md | 143 +++++++++++++++++++++++++ 2 files changed, 351 insertions(+) create mode 100644 specs/verge/beacon-chain.md create mode 100644 specs/verge/fork.md diff --git a/specs/verge/beacon-chain.md b/specs/verge/beacon-chain.md new file mode 100644 index 0000000000..2f380a38e8 --- /dev/null +++ b/specs/verge/beacon-chain.md @@ -0,0 +1,208 @@ +# The Verge -- The Beacon Chain + +## Table of contents + + + + + +- [Introduction](#introduction) +- [Custom types](#custom-types) +- [Preset](#preset) + - [Execution](#execution) +- [Containers](#containers) + - [Extended containers](#extended-containers) + - [`ExecutionPayload`](#executionpayload) + - [`ExecutionPayloadHeader`](#executionpayloadheader) + - [New containers](#new-containers) + - [`SuffixStateDiff`](#suffixstatediff) + - [`StemStateDiff`](#stemstatediff) + - [`IPAProof`](#ipaproof) + - [`VerkleProof`](#verkleproof) + - [`ExecutionWitness`](#executionwitness) +- [Beacon chain state transition function](#beacon-chain-state-transition-function) + - [Execution engine](#execution-engine) + - [`notify_new_payload`](#notify_new_payload) + - [Block processing](#block-processing) + - [Execution payload](#execution-payload) + - [`process_execution_payload`](#process_execution_payload) +- [Testing](#testing) + + + + +## Introduction + +This upgrade adds transaction execution to the beacon chain as part of the Verge upgrade. + +## Custom types + +| Name | SSZ equivalent | Description | +| - | - | - | +| `StateDiff` | `List[StemStateDiff, MAX_STEMS]` | Only valid if list is sorted by stems | +| `BandersnatchGroupElement` | `Bytes32` | | +| `BandersnatchFieldElement` | `Bytes32` | | +| `Stem` | `Bytes31` | | + +## Preset + +### Execution + +| Name | Value | +| - | - | +| `MAX_STEMS` | `2**16` | +| `MAX_COMMITMENTS_PER_STEM` | `33` | +| `VERKLE_WIDTH` | `256` | +| `IPA_PROOF_DEPTH` | `8` | + +## Containers + +### Extended containers + +#### `ExecutionPayload` + +```python +class ExecutionPayload(Container): + # Execution block header fields + parent_hash: Hash32 + fee_recipient: ExecutionAddress # 'beneficiary' in the yellow paper + state_root: Bytes32 + receipts_root: Bytes32 + logs_bloom: ByteVector[BYTES_PER_LOGS_BLOOM] + prev_randao: Bytes32 # 'difficulty' in the yellow paper + block_number: uint64 # 'number' in the yellow paper + gas_limit: uint64 + gas_used: uint64 + timestamp: uint64 + extra_data: ByteList[MAX_EXTRA_DATA_BYTES] + base_fee_per_gas: uint256 + block_hash: Hash32 # Hash of execution block + # Extra payload field + execution_witness: ExecutionWitness + transactions: List[Transaction, MAX_TRANSACTIONS_PER_PAYLOAD] +``` + +#### `ExecutionPayloadHeader` + +```python +class ExecutionPayloadHeader(Container): + # Execution block header fields + parent_hash: Hash32 + fee_recipient: ExecutionAddress + state_root: Bytes32 + receipts_root: Bytes32 + logs_bloom: ByteVector[BYTES_PER_LOGS_BLOOM] + prev_randao: Bytes32 + block_number: uint64 + gas_limit: uint64 + gas_used: uint64 + timestamp: uint64 + extra_data: ByteList[MAX_EXTRA_DATA_BYTES] + base_fee_per_gas: uint256 + block_hash: Hash32 # Hash of execution block + transactions_root: Root + # Extra payload fields + execution_witness: ExecutionWitness +``` + +### New containers + +#### `SuffixStateDiff` + +```python +class SuffixStateDiff(Container): + suffix: Byte + + # Null means not currently present + current_value: Union[Null, Bytes32] + + # Null means value not updated + new_value: Union[Null, Bytes32] +``` + +*Note*: on the Kaustinen testnet, `new_value` is ommitted from the container. + +#### `StemStateDiff` + +```python +class StemStateDiff(Container): + stem: Stem + # Valid only if list is sorted by suffixes + suffix_diffs: List[SuffixStateDiff, VERKLE_WIDTH] +``` + +```python +# Valid only if list is sorted by stems +StateDiff = List[StemStateDiff, MAX_STEMS] +``` + +#### `IPAProof` + +```python +class IpaProof(Container): + C_L = Vector[BandersnatchGroupElement, IPA_PROOF_DEPTH] + C_R = Vector[BandersnatchGroupElement, IPA_PROOF_DEPTH] + final_evaluation = BandersnatchFieldElement +``` + +#### `VerkleProof` + +```python +class VerkleProof(Container): + other_stems: List[Bytes32, MAX_STEMS] + depth_extension_present: List[uint8, MAX_STEMS] + commitments_by_path: List[BandersnatchGroupElement, MAX_STEMS * MAX_COMMITMENTS_PER_STEM] + D: BandersnatchGroupElement + ipa_proof: IpaProof +``` + +#### `ExecutionWitness` + +```python +class ExecutionWitness(container): + state_diff: StateDiff + verkle_proof: VerkleProof +``` + +## Beacon chain state transition function + +### Block processing + +#### Execution payload + +##### `process_execution_payload` + +```python +def process_execution_payload(state: BeaconState, payload: ExecutionPayload, execution_engine: ExecutionEngine) -> None: + # Verify consistency of the parent hash with respect to the previous execution payload header + if is_merge_transition_complete(state): + assert payload.parent_hash == state.latest_execution_payload_header.block_hash + # Verify prev_randao + assert payload.prev_randao == get_randao_mix(state, get_current_epoch(state)) + # Verify timestamp + assert payload.timestamp == compute_timestamp_at_slot(state, state.slot) + # Verify the execution payload is valid + assert execution_engine.notify_new_payload(payload) + # Cache execution payload header + state.latest_execution_payload_header = ExecutionPayloadHeader( + parent_hash=payload.parent_hash, + fee_recipient=payload.fee_recipient, + state_root=payload.state_root, + receipts_root=payload.receipts_root, + logs_bloom=payload.logs_bloom, + prev_randao=payload.prev_randao, + block_number=payload.block_number, + gas_limit=payload.gas_limit, + gas_used=payload.gas_used, + timestamp=payload.timestamp, + extra_data=payload.extra_data, + base_fee_per_gas=payload.base_fee_per_gas, + block_hash=payload.block_hash, + transactions_root=hash_tree_root(payload.transactions), + execution_witness=payload.execution_witness, + ) +``` + +## Testing + +TBD \ No newline at end of file diff --git a/specs/verge/fork.md b/specs/verge/fork.md new file mode 100644 index 0000000000..627c1183c0 --- /dev/null +++ b/specs/verge/fork.md @@ -0,0 +1,143 @@ +# The Verge -- Fork Logic + +## Table of contents + + + + +- [Introduction](#introduction) +- [Configuration](#configuration) +- [Helper functions](#helper-functions) + - [Misc](#misc) + - [Modified `compute_fork_version`](#modified-compute_fork_version) +- [Fork to the Verge](#fork-to-capella) + - [Fork trigger](#fork-trigger) + - [Upgrading the state](#upgrading-the-state) + + + +## Introduction + +This document describes the process of the Verge upgrade. + +## Configuration + +Warning: this configuration is not definitive. + +| Name | Value | +| - | - | +| `VERGE_FORK_VERSION` | `Version('0x05000000')` | +| `VERGE_FORK_EPOCH` | `Epoch(18446744073709551615)` **TBD** | + + +## Helper functions + +### Misc + +#### Modified `compute_fork_version` + +```python +def compute_fork_version(epoch: Epoch) -> Version: + """ + Return the fork version at the given ``epoch``. + """ + if epoch >= VERGE_FORK_EPOCH: + return VERGE_FORK_VERSION + if epoch >= CAPELLA_FORK_EPOCH: + return CAPELLA_FORK_VERSION + if epoch >= BELLATRIX_FORK_EPOCH: + return BELLATRIX_FORK_VERSION + if epoch >= ALTAIR_FORK_EPOCH: + return ALTAIR_FORK_VERSION + return GENESIS_FORK_VERSION +``` + +## Fork to the Verge + +### Fork trigger + +The fork is triggered at epoch `VERGE_FORK_EPOCH`. + +Note that for the pure verge networks, we don't apply `upgrade_to_verge` since it starts with the Verge version logic. + +### Upgrading the state + +If `state.slot % SLOTS_PER_EPOCH == 0` and `compute_epoch_at_slot(state.slot) == VERGE_FORK_EPOCH`, +an irregular state change is made to upgrade to the Verge. + +The upgrade occurs after the completion of the inner loop of `process_slots` that sets `state.slot` equal to `VERGE_FORK_EPOCH * SLOTS_PER_EPOCH`. +Care must be taken when transitioning through the fork boundary as implementations will need a modified [state transition function](../phase0/beacon-chain.md#beacon-chain-state-transition-function) that deviates from the Phase 0 document. +In particular, the outer `state_transition` function defined in the Phase 0 document will not expose the precise fork slot to execute the upgrade in the presence of skipped slots at the fork boundary. Instead, the logic must be within `process_slots`. + +```python +def upgrade_to_verge(pre: capella.BeaconState) -> BeaconState: + epoch = capella.get_current_epoch(pre) + latest_execution_payload_header = ExecutionPayloadHeader( + parent_hash=pre.latest_execution_payload_header.parent_hash, + fee_recipient=pre.latest_execution_payload_header.fee_recipient, + state_root=pre.latest_execution_payload_header.state_root, + receipts_root=pre.latest_execution_payload_header.receipts_root, + logs_bloom=pre.latest_execution_payload_header.logs_bloom, + prev_randao=pre.latest_execution_payload_header.prev_randao, + block_number=pre.latest_execution_payload_header.block_number, + gas_limit=pre.latest_execution_payload_header.gas_limit, + gas_used=pre.latest_execution_payload_header.gas_used, + timestamp=pre.latest_execution_payload_header.timestamp, + extra_data=pre.latest_execution_payload_header.extra_data, + base_fee_per_gas=pre.latest_execution_payload_header.base_fee_per_gas, + block_hash=pre.latest_execution_payload_header.block_hash, + transactions_root=pre.latest_execution_payload_header.transactions_root, + withdrawals_root=pre.latest_execution_payload_header.withdrawals_root, + execution_witness=ExecutionWitness([], []) # New in the Verge + ) + post = BeaconState( + # Versioning + genesis_time=pre.genesis_time, + genesis_validators_root=pre.genesis_validators_root, + slot=pre.slot, + fork=Fork( + previous_version=pre.fork.current_version, + current_version=VERGE_FORK_VERSION, + epoch=epoch, + ), + # History + latest_block_header=pre.latest_block_header, + block_roots=pre.block_roots, + state_roots=pre.state_roots, + historical_roots=pre.historical_roots, + # Eth1 + eth1_data=pre.eth1_data, + eth1_data_votes=pre.eth1_data_votes, + eth1_deposit_index=pre.eth1_deposit_index, + # Registry + validators=pre.validators, + balances=pre.balances, + # Randomness + randao_mixes=pre.randao_mixes, + # Slashings + slashings=pre.slashings, + # Participation + previous_epoch_participation=pre.previous_epoch_participation, + current_epoch_participation=pre.current_epoch_participation, + # Finality + justification_bits=pre.justification_bits, + previous_justified_checkpoint=pre.previous_justified_checkpoint, + current_justified_checkpoint=pre.current_justified_checkpoint, + finalized_checkpoint=pre.finalized_checkpoint, + # Inactivity + inactivity_scores=pre.inactivity_scores, + # Sync + current_sync_committee=pre.current_sync_committee, + next_sync_committee=pre.next_sync_committee, + # Execution-layer + latest_execution_payload_header=latest_execution_payload_header, + # Withdrawals + next_withdrawal_index=pre.next_withdrawal_index, + next_withdrawal_validator_index=pre.next_withdrawal_validator_index, + # Deep history valid from Capella onwards + # FIXME most likely wrong + historical_summaries=List[HistoricalSummary, HISTORICAL_ROOTS_LIMIT]([]), # [New in Capella] + ) + + return post +``` From 832a799907cf8f3903531a9ce208afceba48219b Mon Sep 17 00:00:00 2001 From: Guillaume Ballet <3272758+gballet@users.noreply.github.com> Date: Thu, 26 Jan 2023 13:23:20 +0100 Subject: [PATCH 02/89] Update specs/verge/beacon-chain.md Co-authored-by: terencechain --- specs/verge/beacon-chain.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/verge/beacon-chain.md b/specs/verge/beacon-chain.md index 2f380a38e8..e0f2111588 100644 --- a/specs/verge/beacon-chain.md +++ b/specs/verge/beacon-chain.md @@ -78,8 +78,8 @@ class ExecutionPayload(Container): base_fee_per_gas: uint256 block_hash: Hash32 # Hash of execution block # Extra payload field - execution_witness: ExecutionWitness transactions: List[Transaction, MAX_TRANSACTIONS_PER_PAYLOAD] + execution_witness: ExecutionWitness ``` #### `ExecutionPayloadHeader` From 8fc37ca64c0d235c9c9678ba274ee6d02aef3fe3 Mon Sep 17 00:00:00 2001 From: Guillaume Ballet <3272758+gballet@users.noreply.github.com> Date: Fri, 27 Jan 2023 11:47:26 +0100 Subject: [PATCH 03/89] fix some typos --- specs/verge/beacon-chain.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/specs/verge/beacon-chain.md b/specs/verge/beacon-chain.md index e0f2111588..e23b458225 100644 --- a/specs/verge/beacon-chain.md +++ b/specs/verge/beacon-chain.md @@ -40,8 +40,8 @@ This upgrade adds transaction execution to the beacon chain as part of the Verge | Name | SSZ equivalent | Description | | - | - | - | | `StateDiff` | `List[StemStateDiff, MAX_STEMS]` | Only valid if list is sorted by stems | -| `BandersnatchGroupElement` | `Bytes32` | | -| `BandersnatchFieldElement` | `Bytes32` | | +| `BanderwagonGroupElement` | `Bytes32` | | +| `BanderwagonFieldElement` | `Bytes32` | | | `Stem` | `Bytes31` | | ## Preset @@ -140,19 +140,19 @@ StateDiff = List[StemStateDiff, MAX_STEMS] ```python class IpaProof(Container): - C_L = Vector[BandersnatchGroupElement, IPA_PROOF_DEPTH] - C_R = Vector[BandersnatchGroupElement, IPA_PROOF_DEPTH] - final_evaluation = BandersnatchFieldElement + C_L = Vector[BanderwagonGroupElement, IPA_PROOF_DEPTH] + C_R = Vector[BanderwagonGroupElement, IPA_PROOF_DEPTH] + final_evaluation = BanderwagonFieldElement ``` #### `VerkleProof` ```python class VerkleProof(Container): - other_stems: List[Bytes32, MAX_STEMS] + other_stems: List[Bytes31, MAX_STEMS] depth_extension_present: List[uint8, MAX_STEMS] - commitments_by_path: List[BandersnatchGroupElement, MAX_STEMS * MAX_COMMITMENTS_PER_STEM] - D: BandersnatchGroupElement + commitments_by_path: List[BanderwagonGroupElement, MAX_STEMS * MAX_COMMITMENTS_PER_STEM] + D: BanderwagonGroupElement ipa_proof: IpaProof ``` From 698650cd19e6f8852df02bea890108c0c9a73d4f Mon Sep 17 00:00:00 2001 From: Guillaume Ballet <3272758+gballet@users.noreply.github.com> Date: Thu, 9 Feb 2023 14:19:46 +0100 Subject: [PATCH 04/89] add a comment to pinpoint the new, verge-related fields Co-authored-by: Mikhail Kalinin --- specs/verge/beacon-chain.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/verge/beacon-chain.md b/specs/verge/beacon-chain.md index e23b458225..072a7461b4 100644 --- a/specs/verge/beacon-chain.md +++ b/specs/verge/beacon-chain.md @@ -79,7 +79,7 @@ class ExecutionPayload(Container): block_hash: Hash32 # Hash of execution block # Extra payload field transactions: List[Transaction, MAX_TRANSACTIONS_PER_PAYLOAD] - execution_witness: ExecutionWitness + execution_witness: ExecutionWitness # [New in Verge] ``` #### `ExecutionPayloadHeader` From 3b362c61157cc109c75508b37dfe3e1fe86e5253 Mon Sep 17 00:00:00 2001 From: Guillaume Ballet <3272758+gballet@users.noreply.github.com> Date: Fri, 10 Feb 2023 14:51:14 +0100 Subject: [PATCH 05/89] Use Optional[Bytes32] instead of Union[Null, Bytes32] for value diffs Co-authored-by: Mikhail Kalinin --- specs/verge/beacon-chain.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/specs/verge/beacon-chain.md b/specs/verge/beacon-chain.md index 072a7461b4..ebe6745a7f 100644 --- a/specs/verge/beacon-chain.md +++ b/specs/verge/beacon-chain.md @@ -114,10 +114,10 @@ class SuffixStateDiff(Container): suffix: Byte # Null means not currently present - current_value: Union[Null, Bytes32] + current_value: Optional[Bytes32] # Null means value not updated - new_value: Union[Null, Bytes32] + new_value: Optional[Bytes32] ``` *Note*: on the Kaustinen testnet, `new_value` is ommitted from the container. From d0c7bbc8020ac196fb8b542d25b47a2d60fde6e0 Mon Sep 17 00:00:00 2001 From: Guillaume Ballet <3272758+gballet@users.noreply.github.com> Date: Sun, 19 Feb 2023 20:16:48 +0100 Subject: [PATCH 06/89] rebase and move to _features --- specs/{ => _features}/verge/beacon-chain.md | 0 specs/{ => _features}/verge/fork.md | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename specs/{ => _features}/verge/beacon-chain.md (100%) rename specs/{ => _features}/verge/fork.md (100%) diff --git a/specs/verge/beacon-chain.md b/specs/_features/verge/beacon-chain.md similarity index 100% rename from specs/verge/beacon-chain.md rename to specs/_features/verge/beacon-chain.md diff --git a/specs/verge/fork.md b/specs/_features/verge/fork.md similarity index 100% rename from specs/verge/fork.md rename to specs/_features/verge/fork.md From 567e62fbe94be1dae0bc4c4a29824a6b0a8e93f9 Mon Sep 17 00:00:00 2001 From: Guillaume Ballet <3272758+gballet@users.noreply.github.com> Date: Thu, 9 Mar 2023 11:48:00 +0100 Subject: [PATCH 07/89] Style feedback Co-Authored-By: dapplion <35266934+dapplion@users.noreply.github.com> --- specs/_features/verge/beacon-chain.md | 2 -- specs/_features/verge/fork.md | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/specs/_features/verge/beacon-chain.md b/specs/_features/verge/beacon-chain.md index ebe6745a7f..5a25b6e664 100644 --- a/specs/_features/verge/beacon-chain.md +++ b/specs/_features/verge/beacon-chain.md @@ -112,10 +112,8 @@ class ExecutionPayloadHeader(Container): ```python class SuffixStateDiff(Container): suffix: Byte - # Null means not currently present current_value: Optional[Bytes32] - # Null means value not updated new_value: Optional[Bytes32] ``` diff --git a/specs/_features/verge/fork.md b/specs/_features/verge/fork.md index 627c1183c0..17fbac370e 100644 --- a/specs/_features/verge/fork.md +++ b/specs/_features/verge/fork.md @@ -136,7 +136,7 @@ def upgrade_to_verge(pre: capella.BeaconState) -> BeaconState: next_withdrawal_validator_index=pre.next_withdrawal_validator_index, # Deep history valid from Capella onwards # FIXME most likely wrong - historical_summaries=List[HistoricalSummary, HISTORICAL_ROOTS_LIMIT]([]), # [New in Capella] + historical_summaries=List[HistoricalSummary, HISTORICAL_ROOTS_LIMIT]([]), ) return post From a5c955fb6371fd4d18d12c3eee66701fa5ab91ca Mon Sep 17 00:00:00 2001 From: Guillaume Ballet <3272758+gballet@users.noreply.github.com> Date: Thu, 9 Mar 2023 15:33:15 +0100 Subject: [PATCH 08/89] feedback from ACDC --- specs/_features/verge/beacon-chain.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/specs/_features/verge/beacon-chain.md b/specs/_features/verge/beacon-chain.md index 5a25b6e664..5b808c8817 100644 --- a/specs/_features/verge/beacon-chain.md +++ b/specs/_features/verge/beacon-chain.md @@ -102,7 +102,7 @@ class ExecutionPayloadHeader(Container): block_hash: Hash32 # Hash of execution block transactions_root: Root # Extra payload fields - execution_witness: ExecutionWitness + execution_witness_root: Root # [New in Verge] ``` ### New containers @@ -138,8 +138,8 @@ StateDiff = List[StemStateDiff, MAX_STEMS] ```python class IpaProof(Container): - C_L = Vector[BanderwagonGroupElement, IPA_PROOF_DEPTH] - C_R = Vector[BanderwagonGroupElement, IPA_PROOF_DEPTH] + c_l: Vector[BanderwagonGroupElement, IPA_PROOF_DEPTH] + c_r: Vector[BanderwagonGroupElement, IPA_PROOF_DEPTH] final_evaluation = BanderwagonFieldElement ``` @@ -150,7 +150,7 @@ class VerkleProof(Container): other_stems: List[Bytes31, MAX_STEMS] depth_extension_present: List[uint8, MAX_STEMS] commitments_by_path: List[BanderwagonGroupElement, MAX_STEMS * MAX_COMMITMENTS_PER_STEM] - D: BanderwagonGroupElement + d: BanderwagonGroupElement ipa_proof: IpaProof ``` From 509fbb2c5ca74d93e9d0c3b3532be4c7214e1cd5 Mon Sep 17 00:00:00 2001 From: Guillaume Ballet <3272758+gballet@users.noreply.github.com> Date: Fri, 10 Mar 2023 09:34:44 +0100 Subject: [PATCH 09/89] style: remove confusing underscore --- specs/_features/verge/beacon-chain.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/specs/_features/verge/beacon-chain.md b/specs/_features/verge/beacon-chain.md index 5b808c8817..31bd21e79c 100644 --- a/specs/_features/verge/beacon-chain.md +++ b/specs/_features/verge/beacon-chain.md @@ -138,8 +138,8 @@ StateDiff = List[StemStateDiff, MAX_STEMS] ```python class IpaProof(Container): - c_l: Vector[BanderwagonGroupElement, IPA_PROOF_DEPTH] - c_r: Vector[BanderwagonGroupElement, IPA_PROOF_DEPTH] + cl: Vector[BanderwagonGroupElement, IPA_PROOF_DEPTH] + cr: Vector[BanderwagonGroupElement, IPA_PROOF_DEPTH] final_evaluation = BanderwagonFieldElement ``` From d8c486bcc5c9b21c87db50082b99fcc4301dc37a Mon Sep 17 00:00:00 2001 From: Mikhail Kalinin Date: Tue, 7 May 2024 22:44:37 +0600 Subject: [PATCH 10/89] Rename DepositReceipt to DepositRequest --- presets/mainnet/electra.yaml | 4 +- presets/minimal/electra.yaml | 2 +- specs/electra/beacon-chain.md | 50 ++++---- specs/electra/fork.md | 4 +- specs/electra/validator.md | 2 +- .../test_process_deposit_receipt.py | 70 ++++++------ .../sanity/blocks/test_deposit_transition.py | 108 +++++++++--------- .../pyspec/eth2spec/test/helpers/deposits.py | 34 +++--- .../test/helpers/execution_payload.py | 38 +++--- .../pyspec/eth2spec/test/helpers/genesis.py | 8 +- tests/formats/operations/README.md | 2 +- tests/generators/operations/main.py | 2 +- 12 files changed, 162 insertions(+), 162 deletions(-) diff --git a/presets/mainnet/electra.yaml b/presets/mainnet/electra.yaml index 72c626ded2..6158219dff 100644 --- a/presets/mainnet/electra.yaml +++ b/presets/mainnet/electra.yaml @@ -34,8 +34,8 @@ MAX_CONSOLIDATIONS: 1 # Execution # --------------------------------------------------------------- -# 2**13 (= 8192) receipts -MAX_DEPOSIT_RECEIPTS_PER_PAYLOAD: 8192 +# 2**13 (= 8192) deposit requests +MAX_DEPOSIT_REQUESTS_PER_PAYLOAD: 8192 # 2**4 (= 16) withdrawal requests MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD: 16 diff --git a/presets/minimal/electra.yaml b/presets/minimal/electra.yaml index 11aa5e1f50..cf726e004b 100644 --- a/presets/minimal/electra.yaml +++ b/presets/minimal/electra.yaml @@ -35,7 +35,7 @@ MAX_CONSOLIDATIONS: 1 # Execution # --------------------------------------------------------------- # [customized] -MAX_DEPOSIT_RECEIPTS_PER_PAYLOAD: 4 +MAX_DEPOSIT_REQUESTS_PER_PAYLOAD: 4 # [customized] 2**1 (= 2) withdrawal requests MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD: 2 diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 62da891146..e860394da0 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -24,7 +24,7 @@ - [Validator cycle](#validator-cycle) - [Containers](#containers) - [New containers](#new-containers) - - [`DepositReceipt`](#depositreceipt) + - [`DepositRequest`](#depositrequest) - [`PendingBalanceDeposit`](#pendingbalancedeposit) - [`PendingPartialWithdrawal`](#pendingpartialwithdrawal) - [`ExecutionLayerWithdrawalRequest`](#executionlayerwithdrawalrequest) @@ -92,8 +92,8 @@ - [Updated `process_voluntary_exit`](#updated-process_voluntary_exit) - [Execution layer withdrawal requests](#execution-layer-withdrawal-requests) - [New `process_execution_layer_withdrawal_request`](#new-process_execution_layer_withdrawal_request) - - [Deposit receipts](#deposit-receipts) - - [New `process_deposit_receipt`](#new-process_deposit_receipt) + - [Deposit requests](#deposit-requests) + - [New `process_deposit_request`](#new-process_deposit_request) - [Consolidations](#consolidations) - [New `process_consolidation`](#new-process_consolidation) - [Testing](#testing) @@ -119,7 +119,7 @@ The following values are (non-configurable) constants used throughout the specif | Name | Value | Description | | - | - | - | -| `UNSET_DEPOSIT_RECEIPTS_START_INDEX` | `uint64(2**64 - 1)` | *[New in Electra:EIP6110]* | +| `UNSET_DEPOSIT_REQUESTS_START_INDEX` | `uint64(2**64 - 1)` | *[New in Electra:EIP6110]* | | `FULL_EXIT_REQUEST_AMOUNT` | `uint64(0)` | *[New in Electra:EIP7002]* | ### Withdrawal prefixes @@ -170,7 +170,7 @@ The following values are (non-configurable) constants used throughout the specif | Name | Value | Description | | - | - | - | -| `MAX_DEPOSIT_RECEIPTS_PER_PAYLOAD` | `uint64(2**13)` (= 8,192) | *[New in Electra:EIP6110]* Maximum number of deposit receipts allowed in each payload | +| `MAX_DEPOSIT_REQUESTS_PER_PAYLOAD` | `uint64(2**13)` (= 8,192) | *[New in Electra:EIP6110]* Maximum number of deposit requests allowed in each payload | | `MAX_ATTESTER_SLASHINGS_ELECTRA` | `2**0` (= 1) | *[New in Electra:EIP7549]* | | `MAX_ATTESTATIONS_ELECTRA` | `2**3` (= 8) | *[New in Electra:EIP7549]* | | `MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD` | `uint64(2**4)` (= 16)| *[New in Electra:EIP7002]* Maximum number of execution layer withdrawal requests in each payload | @@ -194,12 +194,12 @@ The following values are (non-configurable) constants used throughout the specif ### New containers -#### `DepositReceipt` +#### `DepositRequest` *Note*: The container is new in EIP6110. ```python -class DepositReceipt(Container): +class DepositRequest(Container): pubkey: BLSPubkey withdrawal_credentials: Bytes32 amount: Gwei @@ -345,7 +345,7 @@ class ExecutionPayload(Container): withdrawals: List[Withdrawal, MAX_WITHDRAWALS_PER_PAYLOAD] blob_gas_used: uint64 excess_blob_gas: uint64 - deposit_receipts: List[DepositReceipt, MAX_DEPOSIT_RECEIPTS_PER_PAYLOAD] # [New in Electra:EIP6110] + deposit_requests: List[DepositRequest, MAX_DEPOSIT_REQUESTS_PER_PAYLOAD] # [New in Electra:EIP6110] # [New in Electra:EIP7002:EIP7251] withdrawal_requests: List[ExecutionLayerWithdrawalRequest, MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD] ``` @@ -373,7 +373,7 @@ class ExecutionPayloadHeader(Container): withdrawals_root: Root blob_gas_used: uint64 excess_blob_gas: uint64 - deposit_receipts_root: Root # [New in Electra:EIP6110] + deposit_requests_root: Root # [New in Electra:EIP6110] withdrawal_requests_root: Root # [New in Electra:EIP7002:EIP7251] ``` @@ -422,7 +422,7 @@ class BeaconState(Container): next_withdrawal_validator_index: ValidatorIndex # Deep history valid from Capella onwards historical_summaries: List[HistoricalSummary, HISTORICAL_ROOTS_LIMIT] - deposit_receipts_start_index: uint64 # [New in Electra:EIP6110] + deposit_requests_start_index: uint64 # [New in Electra:EIP6110] deposit_balance_to_consume: Gwei # [New in Electra:EIP7251] exit_balance_to_consume: Gwei # [New in Electra:EIP7251] earliest_exit_epoch: Epoch # [New in Electra:EIP7251] @@ -1011,7 +1011,7 @@ def process_execution_payload(state: BeaconState, body: BeaconBlockBody, executi withdrawals_root=hash_tree_root(payload.withdrawals), blob_gas_used=payload.blob_gas_used, excess_blob_gas=payload.excess_blob_gas, - deposit_receipts_root=hash_tree_root(payload.deposit_receipts), # [New in Electra:EIP6110] + deposit_requests_root=hash_tree_root(payload.deposit_requests), # [New in Electra:EIP6110] withdrawal_requests_root=hash_tree_root(payload.withdrawal_requests), # [New in Electra:EIP7002:EIP7251] ) ``` @@ -1026,7 +1026,7 @@ def process_execution_payload(state: BeaconState, body: BeaconBlockBody, executi def process_operations(state: BeaconState, body: BeaconBlockBody) -> None: # [Modified in Electra:EIP6110] # Disable former deposit mechanism once all prior deposits are processed - eth1_deposit_index_limit = min(state.eth1_data.deposit_count, state.deposit_receipts_start_index) + eth1_deposit_index_limit = min(state.eth1_data.deposit_count, state.deposit_requests_start_index) if state.eth1_deposit_index < eth1_deposit_index_limit: assert len(body.deposits) == min(MAX_DEPOSITS, eth1_deposit_index_limit - state.eth1_deposit_index) else: @@ -1044,7 +1044,7 @@ def process_operations(state: BeaconState, body: BeaconBlockBody) -> None: for_ops(body.bls_to_execution_changes, process_bls_to_execution_change) # [New in Electra:EIP7002:EIP7251] for_ops(body.execution_payload.withdrawal_requests, process_execution_layer_withdrawal_request) - for_ops(body.execution_payload.deposit_receipts, process_deposit_receipt) # [New in Electra:EIP6110] + for_ops(body.execution_payload.deposit_requests, process_deposit_request) # [New in Electra:EIP6110] for_ops(body.consolidations, process_consolidation) # [New in Electra:EIP7251] ``` @@ -1271,24 +1271,24 @@ def process_execution_layer_withdrawal_request( )) ``` -##### Deposit receipts +##### Deposit requests -###### New `process_deposit_receipt` +###### New `process_deposit_request` *Note*: This function is new in Electra:EIP6110. ```python -def process_deposit_receipt(state: BeaconState, deposit_receipt: DepositReceipt) -> None: - # Set deposit receipt start index - if state.deposit_receipts_start_index == UNSET_DEPOSIT_RECEIPTS_START_INDEX: - state.deposit_receipts_start_index = deposit_receipt.index +def process_deposit_request(state: BeaconState, deposit_request: DepositRequest) -> None: + # Set deposit request start index + if state.deposit_requests_start_index == UNSET_DEPOSIT_REQUESTS_START_INDEX: + state.deposit_requests_start_index = deposit_request.index apply_deposit( state=state, - pubkey=deposit_receipt.pubkey, - withdrawal_credentials=deposit_receipt.withdrawal_credentials, - amount=deposit_receipt.amount, - signature=deposit_receipt.signature, + pubkey=deposit_request.pubkey, + withdrawal_credentials=deposit_request.withdrawal_credentials, + amount=deposit_request.amount, + signature=deposit_request.signature, ) ``` @@ -1349,7 +1349,7 @@ def process_consolidation(state: BeaconState, signed_consolidation: SignedConsol Modifications include: 1. Use `ELECTRA_FORK_VERSION` as the previous and current fork version. 2. Utilize the Electra `BeaconBlockBody` when constructing the initial `latest_block_header`. -3. *[New in Electra:EIP6110]* Add `deposit_receipts_start_index` variable to the genesis state initialization. +3. *[New in Electra:EIP6110]* Add `deposit_requests_start_index` variable to the genesis state initialization. 4. *[New in Electra:EIP7251]* Initialize new fields to support increasing the maximum effective balance. ```python @@ -1369,7 +1369,7 @@ def initialize_beacon_state_from_eth1(eth1_block_hash: Hash32, eth1_data=Eth1Data(block_hash=eth1_block_hash, deposit_count=uint64(len(deposits))), latest_block_header=BeaconBlockHeader(body_root=hash_tree_root(BeaconBlockBody())), randao_mixes=[eth1_block_hash] * EPOCHS_PER_HISTORICAL_VECTOR, # Seed RANDAO with Eth1 entropy - deposit_receipts_start_index=UNSET_DEPOSIT_RECEIPTS_START_INDEX, # [New in Electra:EIP6110] + deposit_requests_start_index=UNSET_DEPOSIT_REQUESTS_START_INDEX, # [New in Electra:EIP6110] ) # Process deposits diff --git a/specs/electra/fork.md b/specs/electra/fork.md index ffd5f21571..12b03b210f 100644 --- a/specs/electra/fork.md +++ b/specs/electra/fork.md @@ -90,7 +90,7 @@ def upgrade_to_electra(pre: deneb.BeaconState) -> BeaconState: withdrawals_root=pre.latest_execution_payload_header.withdrawals_root, blob_gas_used=pre.latest_execution_payload_header.blob_gas_used, excess_blob_gas=pre.latest_execution_payload_header.excess_blob_gas, - deposit_receipts_root=Root(), # [New in Electra:EIP6110] + deposit_requests_root=Root(), # [New in Electra:EIP6110] withdrawal_requests_root=Root(), # [New in Electra:EIP7002], ) @@ -146,7 +146,7 @@ def upgrade_to_electra(pre: deneb.BeaconState) -> BeaconState: # Deep history valid from Capella onwards historical_summaries=pre.historical_summaries, # [New in Electra:EIP6110] - deposit_receipts_start_index=UNSET_DEPOSIT_RECEIPTS_START_INDEX, + deposit_requests_start_index=UNSET_DEPOSIT_REQUESTS_START_INDEX, # [New in Electra:EIP7251] deposit_balance_to_consume=0, exit_balance_to_consume=0, diff --git a/specs/electra/validator.md b/specs/electra/validator.md index 6b838c1260..8d006754ee 100644 --- a/specs/electra/validator.md +++ b/specs/electra/validator.md @@ -80,7 +80,7 @@ def compute_on_chain_aggregate(network_aggregates: Sequence[Attestation]) -> Att ```python def get_eth1_pending_deposit_count(state: BeaconState) -> uint64: - eth1_deposit_index_limit = min(state.eth1_data.deposit_count, state.deposit_receipts_start_index) + eth1_deposit_index_limit = min(state.eth1_data.deposit_count, state.deposit_requests_start_index) if state.eth1_deposit_index < eth1_deposit_index_limit: return min(MAX_DEPOSITS, eth1_deposit_index_limit - state.eth1_deposit_index) else: diff --git a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_deposit_receipt.py b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_deposit_receipt.py index 8b4271d6bb..4b96200e45 100644 --- a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_deposit_receipt.py +++ b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_deposit_receipt.py @@ -1,8 +1,8 @@ from eth2spec.test.context import spec_state_test, always_bls, with_electra_and_later from eth2spec.test.helpers.deposits import ( - prepare_deposit_receipt, - run_deposit_receipt_processing, - run_deposit_receipt_processing_with_specific_fork_version + prepare_deposit_request, + run_deposit_request_processing, + run_deposit_request_processing_with_specific_fork_version ) from eth2spec.test.helpers.state import next_epoch_via_block from eth2spec.test.helpers.withdrawals import set_validator_fully_withdrawable @@ -15,9 +15,9 @@ def test_new_deposit_under_max(spec, state): validator_index = len(state.validators) # effective balance will be 1 EFFECTIVE_BALANCE_INCREMENT smaller because of this small decrement. amount = spec.MAX_EFFECTIVE_BALANCE - 1 - deposit_receipt = prepare_deposit_receipt(spec, validator_index, amount, signed=True) + deposit_request = prepare_deposit_request(spec, validator_index, amount, signed=True) - yield from run_deposit_receipt_processing(spec, state, deposit_receipt, validator_index) + yield from run_deposit_request_processing(spec, state, deposit_request, validator_index) @with_electra_and_later @@ -27,9 +27,9 @@ def test_new_deposit_max(spec, state): validator_index = len(state.validators) # effective balance will be exactly the same as balance. amount = spec.MAX_EFFECTIVE_BALANCE - deposit_receipt = prepare_deposit_receipt(spec, validator_index, amount, signed=True) + deposit_request = prepare_deposit_request(spec, validator_index, amount, signed=True) - yield from run_deposit_receipt_processing(spec, state, deposit_receipt, validator_index) + yield from run_deposit_request_processing(spec, state, deposit_request, validator_index) @with_electra_and_later @@ -39,9 +39,9 @@ def test_new_deposit_over_max(spec, state): validator_index = len(state.validators) # just 1 over the limit, effective balance should be set MAX_EFFECTIVE_BALANCE during processing amount = spec.MAX_EFFECTIVE_BALANCE + 1 - deposit_receipt = prepare_deposit_receipt(spec, validator_index, amount, signed=True) + deposit_request = prepare_deposit_request(spec, validator_index, amount, signed=True) - yield from run_deposit_receipt_processing(spec, state, deposit_receipt, validator_index) + yield from run_deposit_request_processing(spec, state, deposit_request, validator_index) @with_electra_and_later @@ -55,7 +55,7 @@ def test_new_deposit_eth1_withdrawal_credentials(spec, state): + b'\x59' * 20 # a 20-byte eth1 address ) amount = spec.MAX_EFFECTIVE_BALANCE - deposit_receipt = prepare_deposit_receipt( + deposit_request = prepare_deposit_request( spec, validator_index, amount, @@ -63,7 +63,7 @@ def test_new_deposit_eth1_withdrawal_credentials(spec, state): signed=True, ) - yield from run_deposit_receipt_processing(spec, state, deposit_receipt, validator_index) + yield from run_deposit_request_processing(spec, state, deposit_request, validator_index) @with_electra_and_later @@ -76,7 +76,7 @@ def test_new_deposit_non_versioned_withdrawal_credentials(spec, state): + b'\x02' * 31 # Garabage bytes ) amount = spec.MAX_EFFECTIVE_BALANCE - deposit_receipt = prepare_deposit_receipt( + deposit_request = prepare_deposit_request( spec, validator_index, amount, @@ -84,7 +84,7 @@ def test_new_deposit_non_versioned_withdrawal_credentials(spec, state): signed=True, ) - yield from run_deposit_receipt_processing(spec, state, deposit_receipt, validator_index) + yield from run_deposit_request_processing(spec, state, deposit_request, validator_index) @with_electra_and_later @@ -95,8 +95,8 @@ def test_correct_sig_but_forked_state(spec, state): amount = spec.MAX_EFFECTIVE_BALANCE # deposits will always be valid, regardless of the current fork state.fork.current_version = spec.Version('0x1234abcd') - deposit_receipt = prepare_deposit_receipt(spec, validator_index, amount, signed=True) - yield from run_deposit_receipt_processing(spec, state, deposit_receipt, validator_index) + deposit_request = prepare_deposit_request(spec, validator_index, amount, signed=True) + yield from run_deposit_request_processing(spec, state, deposit_request, validator_index) @with_electra_and_later @@ -106,8 +106,8 @@ def test_incorrect_sig_new_deposit(spec, state): # fresh deposit = next validator index = validator appended to registry validator_index = len(state.validators) amount = spec.MAX_EFFECTIVE_BALANCE - deposit_receipt = prepare_deposit_receipt(spec, validator_index, amount) - yield from run_deposit_receipt_processing(spec, state, deposit_receipt, validator_index, effective=False) + deposit_request = prepare_deposit_request(spec, validator_index, amount) + yield from run_deposit_request_processing(spec, state, deposit_request, validator_index, effective=False) @with_electra_and_later @@ -115,12 +115,12 @@ def test_incorrect_sig_new_deposit(spec, state): def test_top_up__max_effective_balance(spec, state): validator_index = 0 amount = spec.MAX_EFFECTIVE_BALANCE // 4 - deposit_receipt = prepare_deposit_receipt(spec, validator_index, amount, signed=True) + deposit_request = prepare_deposit_request(spec, validator_index, amount, signed=True) state.balances[validator_index] = spec.MAX_EFFECTIVE_BALANCE state.validators[validator_index].effective_balance = spec.MAX_EFFECTIVE_BALANCE - yield from run_deposit_receipt_processing(spec, state, deposit_receipt, validator_index) + yield from run_deposit_request_processing(spec, state, deposit_request, validator_index) deposits_len = len(state.pending_balance_deposits) assert state.pending_balance_deposits[deposits_len - 1].amount == amount @@ -132,14 +132,14 @@ def test_top_up__max_effective_balance(spec, state): def test_top_up__less_effective_balance(spec, state): validator_index = 0 amount = spec.MAX_EFFECTIVE_BALANCE // 4 - deposit_receipt = prepare_deposit_receipt(spec, validator_index, amount, signed=True) + deposit_request = prepare_deposit_request(spec, validator_index, amount, signed=True) initial_balance = spec.MAX_EFFECTIVE_BALANCE - 1000 initial_effective_balance = spec.MAX_EFFECTIVE_BALANCE - spec.EFFECTIVE_BALANCE_INCREMENT state.balances[validator_index] = initial_balance state.validators[validator_index].effective_balance = initial_effective_balance - yield from run_deposit_receipt_processing(spec, state, deposit_receipt, validator_index) + yield from run_deposit_request_processing(spec, state, deposit_request, validator_index) deposits_len = len(state.pending_balance_deposits) assert state.pending_balance_deposits[deposits_len - 1].amount == amount @@ -152,14 +152,14 @@ def test_top_up__less_effective_balance(spec, state): def test_top_up__zero_balance(spec, state): validator_index = 0 amount = spec.MAX_EFFECTIVE_BALANCE // 4 - deposit_receipt = prepare_deposit_receipt(spec, validator_index, amount, signed=True) + deposit_request = prepare_deposit_request(spec, validator_index, amount, signed=True) initial_balance = 0 initial_effective_balance = 0 state.balances[validator_index] = initial_balance state.validators[validator_index].effective_balance = initial_effective_balance - yield from run_deposit_receipt_processing(spec, state, deposit_receipt, validator_index) + yield from run_deposit_request_processing(spec, state, deposit_request, validator_index) deposits_len = len(state.pending_balance_deposits) assert state.pending_balance_deposits[deposits_len - 1].amount == amount @@ -173,10 +173,10 @@ def test_top_up__zero_balance(spec, state): def test_incorrect_sig_top_up(spec, state): validator_index = 0 amount = spec.MAX_EFFECTIVE_BALANCE // 4 - deposit_receipt = prepare_deposit_receipt(spec, validator_index, amount) + deposit_request = prepare_deposit_request(spec, validator_index, amount) # invalid signatures, in top-ups, are allowed! - yield from run_deposit_receipt_processing(spec, state, deposit_receipt, validator_index) + yield from run_deposit_request_processing(spec, state, deposit_request, validator_index) @with_electra_and_later @@ -185,7 +185,7 @@ def test_incorrect_withdrawal_credentials_top_up(spec, state): validator_index = 0 amount = spec.MAX_EFFECTIVE_BALANCE // 4 withdrawal_credentials = spec.BLS_WITHDRAWAL_PREFIX + spec.hash(b"junk")[1:] - deposit_receipt = prepare_deposit_receipt( + deposit_request = prepare_deposit_request( spec, validator_index, amount, @@ -193,7 +193,7 @@ def test_incorrect_withdrawal_credentials_top_up(spec, state): ) # inconsistent withdrawal credentials, in top-ups, are allowed! - yield from run_deposit_receipt_processing(spec, state, deposit_receipt, validator_index) + yield from run_deposit_request_processing(spec, state, deposit_request, validator_index) @with_electra_and_later @@ -205,9 +205,9 @@ def test_key_validate_invalid_subgroup(spec, state): # All-zero pubkey would not pass `bls.KeyValidate`, but `process_deposit` would not throw exception. pubkey = b'\x00' * 48 - deposit_receipt = prepare_deposit_receipt(spec, validator_index, amount, pubkey=pubkey, signed=True) + deposit_request = prepare_deposit_request(spec, validator_index, amount, pubkey=pubkey, signed=True) - yield from run_deposit_receipt_processing(spec, state, deposit_receipt, validator_index) + yield from run_deposit_request_processing(spec, state, deposit_request, validator_index) @with_electra_and_later @@ -221,9 +221,9 @@ def test_key_validate_invalid_decompression(spec, state): pubkey_hex = 'c01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' pubkey = bytes.fromhex(pubkey_hex) - deposit_receipt = prepare_deposit_receipt(spec, validator_index, amount, pubkey=pubkey, signed=True) + deposit_request = prepare_deposit_request(spec, validator_index, amount, pubkey=pubkey, signed=True) - yield from run_deposit_receipt_processing(spec, state, deposit_receipt, validator_index) + yield from run_deposit_request_processing(spec, state, deposit_request, validator_index) @with_electra_and_later @@ -235,7 +235,7 @@ def test_ineffective_deposit_with_previous_fork_version(spec, state): # NOTE: it was effective in Altair. assert state.fork.previous_version != state.fork.current_version - yield from run_deposit_receipt_processing_with_specific_fork_version( + yield from run_deposit_request_processing_with_specific_fork_version( spec, state, fork_version=state.fork.previous_version, @@ -249,7 +249,7 @@ def test_ineffective_deposit_with_previous_fork_version(spec, state): def test_effective_deposit_with_genesis_fork_version(spec, state): assert spec.config.GENESIS_FORK_VERSION not in (state.fork.previous_version, state.fork.current_version) - yield from run_deposit_receipt_processing_with_specific_fork_version( + yield from run_deposit_request_processing_with_specific_fork_version( spec, state, fork_version=spec.config.GENESIS_FORK_VERSION, @@ -272,9 +272,9 @@ def test_success_top_up_to_withdrawn_validator(spec, state): # Make a top-up balance to validator amount = spec.MAX_EFFECTIVE_BALANCE // 4 - deposit_receipt = prepare_deposit_receipt(spec, validator_index, amount, len(state.validators), signed=True) + deposit_request = prepare_deposit_request(spec, validator_index, amount, len(state.validators), signed=True) - yield from run_deposit_receipt_processing(spec, state, deposit_receipt, validator_index) + yield from run_deposit_request_processing(spec, state, deposit_request, validator_index) deposits_len = len(state.pending_balance_deposits) assert state.pending_balance_deposits[deposits_len - 1].amount == amount diff --git a/tests/core/pyspec/eth2spec/test/electra/sanity/blocks/test_deposit_transition.py b/tests/core/pyspec/eth2spec/test/electra/sanity/blocks/test_deposit_transition.py index a8d1832091..f253b6c60d 100644 --- a/tests/core/pyspec/eth2spec/test/electra/sanity/blocks/test_deposit_transition.py +++ b/tests/core/pyspec/eth2spec/test/electra/sanity/blocks/test_deposit_transition.py @@ -9,7 +9,7 @@ from eth2spec.test.helpers.deposits import ( build_deposit_data, deposit_from_context, - prepare_deposit_receipt, + prepare_deposit_request, ) from eth2spec.test.helpers.execution_payload import ( compute_el_block_hash, @@ -38,8 +38,8 @@ def run_deposit_transition_block(spec, state, block, top_up_keys=[], valid=True) # Check that deposits are applied if valid: expected_pubkeys = [d.data.pubkey for d in block.body.deposits] - deposit_receipts = block.body.execution_payload.deposit_receipts - expected_pubkeys = expected_pubkeys + [d.pubkey for d in deposit_receipts if (d.pubkey not in top_up_keys)] + deposit_requests = block.body.execution_payload.deposit_requests + expected_pubkeys = expected_pubkeys + [d.pubkey for d in deposit_requests if (d.pubkey not in top_up_keys)] actual_pubkeys = [v.pubkey for v in state.validators[len(state.validators) - len(expected_pubkeys):]] assert actual_pubkeys == expected_pubkeys @@ -48,12 +48,12 @@ def run_deposit_transition_block(spec, state, block, top_up_keys=[], valid=True) def prepare_state_and_block(spec, state, deposit_cnt, - deposit_receipt_cnt, - first_deposit_receipt_index=0, - deposit_receipts_start_index=None, + deposit_request_cnt, + first_deposit_request_index=0, + deposit_requests_start_index=None, eth1_data_deposit_count=None): deposits = [] - deposit_receipts = [] + deposit_requests = [] keypair_index = len(state.validators) # Prepare deposits @@ -83,26 +83,26 @@ def prepare_state_and_block(spec, deposit_count=eth1_data_deposit_count, block_hash=state.eth1_data.block_hash) - # Prepare deposit receipts - for offset in range(deposit_receipt_cnt): - deposit_receipt = prepare_deposit_receipt(spec, + # Prepare deposit requests + for offset in range(deposit_request_cnt): + deposit_request = prepare_deposit_request(spec, keypair_index, # use max effective balance spec.MAX_EFFECTIVE_BALANCE, - first_deposit_receipt_index + offset, + first_deposit_request_index + offset, signed=True) - deposit_receipts.append(deposit_receipt) + deposit_requests.append(deposit_request) keypair_index += 1 # Set start index if defined - if deposit_receipts_start_index: - state.deposit_receipts_start_index = deposit_receipts_start_index + if deposit_requests_start_index: + state.deposit_requests_start_index = deposit_requests_start_index block = build_empty_block_for_next_slot(spec, state) - # Assign deposits and deposit receipts + # Assign deposits and deposit requests block.body.deposits = deposits - block.body.execution_payload.deposit_receipts = deposit_receipts + block.body.execution_payload.deposit_requests = deposit_requests block.body.execution_payload.block_hash = compute_el_block_hash(spec, block.body.execution_payload) return state, block @@ -111,27 +111,27 @@ def prepare_state_and_block(spec, @with_phases([ELECTRA]) @spec_state_test def test_deposit_transition__start_index_is_set(spec, state): - # 0 deposits, 2 deposit receipts, unset deposit_receipts_start_index + # 0 deposits, 2 deposit requests, unset deposit_requests_start_index state, block = prepare_state_and_block(spec, state, deposit_cnt=0, - deposit_receipt_cnt=2, - first_deposit_receipt_index=state.eth1_data.deposit_count + 11) + deposit_request_cnt=2, + first_deposit_request_index=state.eth1_data.deposit_count + 11) yield from run_deposit_transition_block(spec, state, block) - # deposit_receipts_start_index must be set to the index of the first receipt - assert state.deposit_receipts_start_index == block.body.execution_payload.deposit_receipts[0].index + # deposit_requests_start_index must be set to the index of the first request + assert state.deposit_requests_start_index == block.body.execution_payload.deposit_requests[0].index @with_phases([ELECTRA]) @spec_state_test def test_deposit_transition__process_eth1_deposits(spec, state): - # 3 deposits, 1 deposit receipt, state.eth1_data.deposit_count < state.deposit_receipts_start_index + # 3 deposits, 1 deposit request, state.eth1_data.deposit_count < state.deposit_requests_start_index state, block = prepare_state_and_block(spec, state, deposit_cnt=3, - deposit_receipt_cnt=1, - first_deposit_receipt_index=11, - deposit_receipts_start_index=7) + deposit_request_cnt=1, + first_deposit_request_index=11, + deposit_requests_start_index=7) yield from run_deposit_transition_block(spec, state, block) @@ -139,13 +139,13 @@ def test_deposit_transition__process_eth1_deposits(spec, state): @with_phases([ELECTRA]) @spec_state_test def test_deposit_transition__process_max_eth1_deposits(spec, state): - # spec.MAX_DEPOSITS deposits, 1 deposit receipt, state.eth1_data.deposit_count > state.deposit_receipts_start_index - # state.deposit_receipts_start_index == spec.MAX_DEPOSITS + # spec.MAX_DEPOSITS deposits, 1 deposit request, state.eth1_data.deposit_count > state.deposit_requests_start_index + # state.deposit_requests_start_index == spec.MAX_DEPOSITS state, block = prepare_state_and_block(spec, state, deposit_cnt=spec.MAX_DEPOSITS, - deposit_receipt_cnt=1, - first_deposit_receipt_index=spec.MAX_DEPOSITS + 1, - deposit_receipts_start_index=spec.MAX_DEPOSITS, + deposit_request_cnt=1, + first_deposit_request_index=spec.MAX_DEPOSITS + 1, + deposit_requests_start_index=spec.MAX_DEPOSITS, eth1_data_deposit_count=23) yield from run_deposit_transition_block(spec, state, block) @@ -154,12 +154,12 @@ def test_deposit_transition__process_max_eth1_deposits(spec, state): @with_phases([ELECTRA]) @spec_state_test def test_deposit_transition__process_eth1_deposits_up_to_start_index(spec, state): - # 3 deposits, 1 deposit receipt, state.eth1_data.deposit_count == state.deposit_receipts_start_index + # 3 deposits, 1 deposit request, state.eth1_data.deposit_count == state.deposit_requests_start_index state, block = prepare_state_and_block(spec, state, deposit_cnt=3, - deposit_receipt_cnt=1, - first_deposit_receipt_index=7, - deposit_receipts_start_index=3) + deposit_request_cnt=1, + first_deposit_request_index=7, + deposit_requests_start_index=3) yield from run_deposit_transition_block(spec, state, block) @@ -167,12 +167,12 @@ def test_deposit_transition__process_eth1_deposits_up_to_start_index(spec, state @with_phases([ELECTRA]) @spec_state_test def test_deposit_transition__invalid_not_enough_eth1_deposits(spec, state): - # 3 deposits, 1 deposit receipt, state.eth1_data.deposit_count < state.deposit_receipts_start_index + # 3 deposits, 1 deposit request, state.eth1_data.deposit_count < state.deposit_requests_start_index state, block = prepare_state_and_block(spec, state, deposit_cnt=3, - deposit_receipt_cnt=1, - first_deposit_receipt_index=29, - deposit_receipts_start_index=23, + deposit_request_cnt=1, + first_deposit_request_index=29, + deposit_requests_start_index=23, eth1_data_deposit_count=17) yield from run_deposit_transition_block(spec, state, block, valid=False) @@ -181,12 +181,12 @@ def test_deposit_transition__invalid_not_enough_eth1_deposits(spec, state): @with_phases([ELECTRA]) @spec_state_test def test_deposit_transition__invalid_too_many_eth1_deposits(spec, state): - # 3 deposits, 1 deposit receipt, state.eth1_data.deposit_count < state.eth1_data_index + # 3 deposits, 1 deposit request, state.eth1_data.deposit_count < state.eth1_data_index state, block = prepare_state_and_block(spec, state, deposit_cnt=3, - deposit_receipt_cnt=1, - first_deposit_receipt_index=11, - deposit_receipts_start_index=7, + deposit_request_cnt=1, + first_deposit_request_index=11, + deposit_requests_start_index=7, eth1_data_deposit_count=2) yield from run_deposit_transition_block(spec, state, block, valid=False) @@ -195,13 +195,13 @@ def test_deposit_transition__invalid_too_many_eth1_deposits(spec, state): @with_phases([ELECTRA]) @spec_state_test def test_deposit_transition__invalid_eth1_deposits_overlap_in_protocol_deposits(spec, state): - # spec.MAX_DEPOSITS deposits, 1 deposit receipt, state.eth1_data.deposit_count > state.deposit_receipts_start_index - # state.deposit_receipts_start_index == spec.MAX_DEPOSITS - 1 + # spec.MAX_DEPOSITS deposits, 1 deposit request, state.eth1_data.deposit_count > state.deposit_requests_start_index + # state.deposit_requests_start_index == spec.MAX_DEPOSITS - 1 state, block = prepare_state_and_block(spec, state, deposit_cnt=spec.MAX_DEPOSITS, - deposit_receipt_cnt=1, - first_deposit_receipt_index=spec.MAX_DEPOSITS, - deposit_receipts_start_index=spec.MAX_DEPOSITS - 1, + deposit_request_cnt=1, + first_deposit_request_index=spec.MAX_DEPOSITS, + deposit_requests_start_index=spec.MAX_DEPOSITS - 1, eth1_data_deposit_count=23) yield from run_deposit_transition_block(spec, state, block, valid=False) @@ -210,16 +210,16 @@ def test_deposit_transition__invalid_eth1_deposits_overlap_in_protocol_deposits( @with_phases([ELECTRA]) @spec_state_test def test_deposit_transition__deposit_and_top_up_same_block(spec, state): - # 1 deposit, 1 deposit receipt that top ups deposited validator + # 1 deposit, 1 deposit request that top ups deposited validator state, block = prepare_state_and_block(spec, state, deposit_cnt=1, - deposit_receipt_cnt=1, - first_deposit_receipt_index=11, - deposit_receipts_start_index=7) + deposit_request_cnt=1, + first_deposit_request_index=11, + deposit_requests_start_index=7) - # Artificially assign deposit's pubkey to a deposit receipt of the same block + # Artificially assign deposit's pubkey to a deposit request of the same block top_up_keys = [block.body.deposits[0].data.pubkey] - block.body.execution_payload.deposit_receipts[0].pubkey = top_up_keys[0] + block.body.execution_payload.deposit_requests[0].pubkey = top_up_keys[0] block.body.execution_payload.block_hash = compute_el_block_hash(spec, block.body.execution_payload) pre_pending_deposits = len(state.pending_balance_deposits) @@ -229,5 +229,5 @@ def test_deposit_transition__deposit_and_top_up_same_block(spec, state): # Check the top up assert len(state.pending_balance_deposits) == pre_pending_deposits + 2 assert state.pending_balance_deposits[pre_pending_deposits].amount == block.body.deposits[0].data.amount - amount_from_deposit = block.body.execution_payload.deposit_receipts[0].amount + amount_from_deposit = block.body.execution_payload.deposit_requests[0].amount assert state.pending_balance_deposits[pre_pending_deposits + 1].amount == amount_from_deposit diff --git a/tests/core/pyspec/eth2spec/test/helpers/deposits.py b/tests/core/pyspec/eth2spec/test/helpers/deposits.py index 031c1fcf7a..c8aa30313c 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/deposits.py +++ b/tests/core/pyspec/eth2spec/test/helpers/deposits.py @@ -171,7 +171,7 @@ def prepare_state_and_deposit(spec, state, validator_index, amount, return deposit -def build_deposit_receipt(spec, +def build_deposit_request(spec, index, pubkey, privkey, @@ -179,7 +179,7 @@ def build_deposit_receipt(spec, withdrawal_credentials, signed): deposit_data = build_deposit_data(spec, pubkey, privkey, amount, withdrawal_credentials, signed=signed) - return spec.DepositReceipt( + return spec.DepositRequest( pubkey=deposit_data.pubkey, withdrawal_credentials=deposit_data.withdrawal_credentials, amount=deposit_data.amount, @@ -187,14 +187,14 @@ def build_deposit_receipt(spec, index=index) -def prepare_deposit_receipt(spec, validator_index, amount, +def prepare_deposit_request(spec, validator_index, amount, index=None, pubkey=None, privkey=None, withdrawal_credentials=None, signed=False): """ - Create a deposit receipt for the given validator, depositing the given amount. + Create a deposit request for the given validator, depositing the given amount. """ if index is None: index = validator_index @@ -209,7 +209,7 @@ def prepare_deposit_receipt(spec, validator_index, amount, if withdrawal_credentials is None: withdrawal_credentials = spec.BLS_WITHDRAWAL_PREFIX + spec.hash(pubkey)[1:] - return build_deposit_receipt( + return build_deposit_request( spec, index, pubkey, @@ -320,11 +320,11 @@ def run_deposit_processing_with_specific_fork_version( yield from run_deposit_processing(spec, state, deposit, validator_index, valid=valid, effective=effective) -def run_deposit_receipt_processing(spec, state, deposit_receipt, validator_index, valid=True, effective=True): +def run_deposit_request_processing(spec, state, deposit_request, validator_index, valid=True, effective=True): """ - Run ``process_deposit_receipt``, yielding: + Run ``process_deposit_request``, yielding: - pre-state ('pre') - - deposit_receipt ('deposit_receipt') + - deposit_request ('deposit_request') - post-state ('post'). If ``valid == False``, run expecting ``AssertionError`` """ @@ -340,18 +340,18 @@ def run_deposit_receipt_processing(spec, state, deposit_receipt, validator_index pre_pending_deposits = len(state.pending_balance_deposits) yield 'pre', state - yield 'deposit_receipt', deposit_receipt + yield 'deposit_request', deposit_request if not valid: - expect_assertion_error(lambda: spec.process_deposit_receipt(state, deposit_receipt)) + expect_assertion_error(lambda: spec.process_deposit_request(state, deposit_request)) yield 'post', None return - spec.process_deposit_receipt(state, deposit_receipt) + spec.process_deposit_request(state, deposit_request) yield 'post', state - if not effective or not bls.KeyValidate(deposit_receipt.pubkey): + if not effective or not bls.KeyValidate(deposit_request.pubkey): assert len(state.validators) == pre_validator_count assert len(state.balances) == pre_validator_count if is_top_up: @@ -368,11 +368,11 @@ def run_deposit_receipt_processing(spec, state, deposit_receipt, validator_index assert len(state.balances) == pre_validator_count + 1 assert len(state.pending_balance_deposits) == pre_pending_deposits + 1 - assert state.pending_balance_deposits[pre_pending_deposits].amount == deposit_receipt.amount + assert state.pending_balance_deposits[pre_pending_deposits].amount == deposit_request.amount assert state.pending_balance_deposits[pre_pending_deposits].index == validator_index -def run_deposit_receipt_processing_with_specific_fork_version( +def run_deposit_request_processing_with_specific_fork_version( spec, state, fork_version, @@ -391,17 +391,17 @@ def run_deposit_receipt_processing_with_specific_fork_version( pubkey=pubkey, withdrawal_credentials=withdrawal_credentials, amount=amount, signature=bls.Sign(privkey, spec.compute_signing_root(deposit_message, domain)) ) - deposit_receipt = spec.DepositReceipt( + deposit_request = spec.DepositRequest( pubkey=deposit_data.pubkey, withdrawal_credentials=deposit_data.withdrawal_credentials, amount=deposit_data.amount, signature=deposit_data.signature, index=validator_index) - yield from run_deposit_receipt_processing( + yield from run_deposit_request_processing( spec, state, - deposit_receipt, + deposit_request, validator_index, valid=valid, effective=effective diff --git a/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py b/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py index ef6e2f6442..36f2194578 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py +++ b/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py @@ -35,7 +35,7 @@ def get_execution_payload_header(spec, execution_payload): payload_header.blob_gas_used = execution_payload.blob_gas_used payload_header.excess_blob_gas = execution_payload.excess_blob_gas if is_post_electra(spec): - payload_header.deposit_receipts_root = spec.hash_tree_root(execution_payload.deposit_receipts) + payload_header.deposit_requests_root = spec.hash_tree_root(execution_payload.deposit_requests) payload_header.withdrawal_requests_root = spec.hash_tree_root(execution_payload.withdrawal_requests) return payload_header @@ -58,7 +58,7 @@ def compute_el_header_block_hash(spec, payload_header, transactions_trie_root, withdrawals_trie_root=None, - deposit_receipts_trie_root=None, + deposit_requests_trie_root=None, withdrawal_requests_root=None): """ Computes the RLP execution block hash described by an `ExecutionPayloadHeader`. @@ -105,9 +105,9 @@ def compute_el_header_block_hash(spec, execution_payload_header_rlp.append((big_endian_int, payload_header.blob_gas_used)) execution_payload_header_rlp.append((big_endian_int, payload_header.excess_blob_gas)) if is_post_electra(spec): - # deposit_receipts_root - assert deposit_receipts_trie_root is not None - execution_payload_header_rlp.append((Binary(32, 32), deposit_receipts_trie_root)) + # deposit_requests_root + assert deposit_requests_trie_root is not None + execution_payload_header_rlp.append((Binary(32, 32), deposit_requests_trie_root)) # withdrawal requests root execution_payload_header_rlp.append((Binary(32, 32), withdrawal_requests_root)) @@ -150,22 +150,22 @@ def get_withdrawal_request_rlp(withdrawal_request): return encode(values, sedes) -def get_deposit_receipt_rlp(spec, deposit_receipt): - deposit_receipt_rlp = [ +def get_deposit_request_rlp(spec, deposit_request): + deposit_request_rlp = [ # pubkey - (Binary(48, 48), deposit_receipt.pubkey), + (Binary(48, 48), deposit_request.pubkey), # withdrawal_credentials - (Binary(32, 32), deposit_receipt.withdrawal_credentials), + (Binary(32, 32), deposit_request.withdrawal_credentials), # amount - (big_endian_int, deposit_receipt.amount), + (big_endian_int, deposit_request.amount), # pubkey - (Binary(96, 96), deposit_receipt.signature), + (Binary(96, 96), deposit_request.signature), # index - (big_endian_int, deposit_receipt.index), + (big_endian_int, deposit_request.index), ] - sedes = List([schema for schema, _ in deposit_receipt_rlp]) - values = [value for _, value in deposit_receipt_rlp] + sedes = List([schema for schema, _ in deposit_request_rlp]) + values = [value for _, value in deposit_request_rlp] return encode(values, sedes) @@ -173,15 +173,15 @@ def compute_el_block_hash(spec, payload): transactions_trie_root = compute_trie_root_from_indexed_data(payload.transactions) withdrawals_trie_root = None - deposit_receipts_trie_root = None + deposit_requests_trie_root = None withdrawal_requests_root = None if is_post_capella(spec): withdrawals_encoded = [get_withdrawal_rlp(withdrawal) for withdrawal in payload.withdrawals] withdrawals_trie_root = compute_trie_root_from_indexed_data(withdrawals_encoded) if is_post_electra(spec): - deposit_receipts_encoded = [get_deposit_receipt_rlp(spec, receipt) for receipt in payload.deposit_receipts] - deposit_receipts_trie_root = compute_trie_root_from_indexed_data(deposit_receipts_encoded) + deposit_requests_encoded = [get_deposit_request_rlp(spec, receipt) for receipt in payload.deposit_requests] + deposit_requests_trie_root = compute_trie_root_from_indexed_data(deposit_requests_encoded) withdrawal_requests_encoded = [get_withdrawal_request_rlp(request) for request in payload.withdrawal_requests] withdrawal_requests_root = compute_trie_root_from_indexed_data(withdrawal_requests_encoded) @@ -192,7 +192,7 @@ def compute_el_block_hash(spec, payload): payload_header, transactions_trie_root, withdrawals_trie_root, - deposit_receipts_trie_root, + deposit_requests_trie_root, withdrawal_requests_root, ) @@ -230,7 +230,7 @@ def build_empty_execution_payload(spec, state, randao_mix=None): payload.excess_blob_gas = 0 if is_post_electra(spec): # just to be clear - payload.deposit_receipts = [] + payload.deposit_requests = [] payload.block_hash = compute_el_block_hash(spec, payload) diff --git a/tests/core/pyspec/eth2spec/test/helpers/genesis.py b/tests/core/pyspec/eth2spec/test/helpers/genesis.py index 3896b41731..34ddd88ca2 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/genesis.py +++ b/tests/core/pyspec/eth2spec/test/helpers/genesis.py @@ -50,13 +50,13 @@ def get_sample_genesis_execution_payload_header(spec, transactions_trie_root = bytes.fromhex("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421") withdrawals_trie_root = None - deposit_receipts_trie_root = None + deposit_requests_trie_root = None exits_trie_root = None if is_post_capella(spec): withdrawals_trie_root = bytes.fromhex("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421") if is_post_electra(spec): - deposit_receipts_trie_root = bytes.fromhex("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421") + deposit_requests_trie_root = bytes.fromhex("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421") exits_trie_root = bytes.fromhex("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421") payload_header.block_hash = compute_el_header_block_hash( @@ -64,7 +64,7 @@ def get_sample_genesis_execution_payload_header(spec, payload_header, transactions_trie_root, withdrawals_trie_root, - deposit_receipts_trie_root, + deposit_requests_trie_root, exits_trie_root, ) return payload_header @@ -134,7 +134,7 @@ def create_genesis_state(spec, validator_balances, activation_threshold): ) if is_post_electra(spec): - state.deposit_receipts_start_index = spec.UNSET_DEPOSIT_RECEIPTS_START_INDEX + state.deposit_requests_start_index = spec.UNSET_DEPOSIT_REQUESTS_START_INDEX if is_post_whisk(spec): vc = len(state.validators) diff --git a/tests/formats/operations/README.md b/tests/formats/operations/README.md index b020b5fd03..c87ce0e8b2 100644 --- a/tests/formats/operations/README.md +++ b/tests/formats/operations/README.md @@ -45,7 +45,7 @@ Operations: | `execution_payload` | `BeaconBlockBody` | **`body`** | `process_execution_payload(state, body)` (new in Bellatrix) | | `withdrawals` | `ExecutionPayload` | `execution_payload` | `process_withdrawals(state, execution_payload)` (new in Capella) | | `bls_to_execution_change` | `SignedBLSToExecutionChange` | `address_change` | `process_bls_to_execution_change(state, address_change)` (new in Capella) | -| `deposit_receipt` | `DepositReceipt` | `deposit_receipt` | `process_deposit_receipt(state, deposit_receipt)` (new in Electra) | +| `deposit_request` | `DepositRequest` | `deposit_request` | `process_deposit_request(state, deposit_request)` (new in Electra) | | `exits` | `ExecutionLayerExit` | `execution_layer_exit` | `process_execution_layer_exit(state, execution_layer_exit)` (new in Electra) | Note that `block_header` is not strictly an operation (and is a full `Block`), but processed in the same manner, and hence included here. diff --git a/tests/generators/operations/main.py b/tests/generators/operations/main.py index 0d203fca6f..c04952be84 100644 --- a/tests/generators/operations/main.py +++ b/tests/generators/operations/main.py @@ -46,7 +46,7 @@ _new_electra_mods = {key: 'eth2spec.test.electra.block_processing.test_process_' + key for key in [ 'attestation', 'consolidation', - 'deposit_receipt', + 'deposit_request', 'execution_layer_withdrawal_request', 'voluntary_exit' ]} From cd0f8a12012f98a5554edbf2e7fa058cfdec1258 Mon Sep 17 00:00:00 2001 From: Justin Traglia Date: Tue, 7 May 2024 13:25:53 -0500 Subject: [PATCH 11/89] Change duplicate cell id test to use 65 cell ids --- tests/generators/kzg_7594/main.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/generators/kzg_7594/main.py b/tests/generators/kzg_7594/main.py index 9afea6efe1..d55609de01 100644 --- a/tests/generators/kzg_7594/main.py +++ b/tests/generators/kzg_7594/main.py @@ -782,7 +782,12 @@ def case05_recover_all_cells(): # Edge case: Duplicate cell_id blob = BLOB_RANDOM_VALID2 cells = spec.compute_cells(blob) - cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2)) + # There will be 65 cell, where 64 are unique and 1 is a duplicate. + # Depending on the implementation, 63 & 1 might not fail for the right + # reason. For example, if the implementation assigns cells in an array + # via index, this would result in 63 cells and the test would fail due + # to insufficient cell count, not because of a duplicate cell. + cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2 + 1)) partial_cells = [cells[cell_id] for cell_id in cell_ids] # Replace first cell_id with the second cell_id cell_ids[0] = cell_ids[1] From 0891be24f69e880657498882bb56785c2cf52930 Mon Sep 17 00:00:00 2001 From: Justin Traglia Date: Tue, 7 May 2024 13:34:35 -0500 Subject: [PATCH 12/89] Change cell -> cells --- tests/generators/kzg_7594/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/generators/kzg_7594/main.py b/tests/generators/kzg_7594/main.py index d55609de01..a780f289c5 100644 --- a/tests/generators/kzg_7594/main.py +++ b/tests/generators/kzg_7594/main.py @@ -782,7 +782,7 @@ def case05_recover_all_cells(): # Edge case: Duplicate cell_id blob = BLOB_RANDOM_VALID2 cells = spec.compute_cells(blob) - # There will be 65 cell, where 64 are unique and 1 is a duplicate. + # There will be 65 cells, where 64 are unique and 1 is a duplicate. # Depending on the implementation, 63 & 1 might not fail for the right # reason. For example, if the implementation assigns cells in an array # via index, this would result in 63 cells and the test would fail due From 99ccfe0163b3ea961b90e9f28f4c039d08f52d93 Mon Sep 17 00:00:00 2001 From: Justin Traglia Date: Tue, 7 May 2024 16:29:49 -0500 Subject: [PATCH 13/89] Add electra version of AggregateAndProof --- specs/electra/beacon-chain.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 62da891146..674c4e02e6 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -291,6 +291,23 @@ class Attestation(Container): signature: BLSSignature ``` +#### `AggregateAndProof` + +```python +class AggregateAndProof(Container): + aggregator_index: ValidatorIndex + aggregate: Attestation # [New in Electra:EIP7549] + selection_proof: BLSSignature +``` + +#### `SignedAggregateAndProof` + +```python +class SignedAggregateAndProof(Container): + message: AggregateAndProof # [New in Electra:EIP7549] + signature: BLSSignature +``` + #### `IndexedAttestation` ```python From 0da03ab5acc2522b1d695935843d68e0d379385e Mon Sep 17 00:00:00 2001 From: Justin Traglia Date: Tue, 7 May 2024 16:33:06 -0500 Subject: [PATCH 14/89] Update TOC --- specs/electra/beacon-chain.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 674c4e02e6..44d6042a60 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -35,6 +35,8 @@ - [`AttesterSlashing`](#attesterslashing) - [Extended Containers](#extended-containers) - [`Attestation`](#attestation) + - [`AggregateAndProof`](#aggregateandproof) + - [`SignedAggregateAndProof`](#signedaggregateandproof) - [`IndexedAttestation`](#indexedattestation) - [`BeaconBlockBody`](#beaconblockbody) - [`ExecutionPayload`](#executionpayload) From 8aed03767a15cfa27073d24cb44f2ae822acdc05 Mon Sep 17 00:00:00 2001 From: Justin Traglia Date: Tue, 7 May 2024 16:37:23 -0500 Subject: [PATCH 15/89] Change "new" to "modified" --- specs/electra/beacon-chain.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 44d6042a60..e119ab3180 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -298,7 +298,7 @@ class Attestation(Container): ```python class AggregateAndProof(Container): aggregator_index: ValidatorIndex - aggregate: Attestation # [New in Electra:EIP7549] + aggregate: Attestation # [Modified in Electra:EIP7549] selection_proof: BLSSignature ``` @@ -306,7 +306,7 @@ class AggregateAndProof(Container): ```python class SignedAggregateAndProof(Container): - message: AggregateAndProof # [New in Electra:EIP7549] + message: AggregateAndProof # [Modified in Electra:EIP7549] signature: BLSSignature ``` From f3d0a0e89b916e57ab349a6e84e02959c1337a44 Mon Sep 17 00:00:00 2001 From: Justin Traglia Date: Tue, 7 May 2024 19:13:24 -0500 Subject: [PATCH 16/89] Move containers to validator.md --- specs/electra/beacon-chain.md | 39 +++++++++-------------------------- specs/electra/validator.md | 25 ++++++++++++++++++++++ 2 files changed, 35 insertions(+), 29 deletions(-) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index e119ab3180..188a25d3d3 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -33,11 +33,9 @@ - [`PendingConsolidation`](#pendingconsolidation) - [Modified Containers](#modified-containers) - [`AttesterSlashing`](#attesterslashing) + - [`IndexedAttestation`](#indexedattestation) - [Extended Containers](#extended-containers) - [`Attestation`](#attestation) - - [`AggregateAndProof`](#aggregateandproof) - - [`SignedAggregateAndProof`](#signedaggregateandproof) - - [`IndexedAttestation`](#indexedattestation) - [`BeaconBlockBody`](#beaconblockbody) - [`ExecutionPayload`](#executionpayload) - [`ExecutionPayloadHeader`](#executionpayloadheader) @@ -281,42 +279,25 @@ class AttesterSlashing(Container): attestation_2: IndexedAttestation # [Modified in Electra:EIP7549] ``` -### Extended Containers - -#### `Attestation` +#### `IndexedAttestation` ```python -class Attestation(Container): - aggregation_bits: Bitlist[MAX_VALIDATORS_PER_COMMITTEE * MAX_COMMITTEES_PER_SLOT] # [Modified in Electra:EIP7549] +class IndexedAttestation(Container): + # [Modified in Electra:EIP7549] + attesting_indices: List[ValidatorIndex, MAX_VALIDATORS_PER_COMMITTEE * MAX_COMMITTEES_PER_SLOT] data: AttestationData - committee_bits: Bitvector[MAX_COMMITTEES_PER_SLOT] # [New in Electra:EIP7549] signature: BLSSignature ``` -#### `AggregateAndProof` - -```python -class AggregateAndProof(Container): - aggregator_index: ValidatorIndex - aggregate: Attestation # [Modified in Electra:EIP7549] - selection_proof: BLSSignature -``` - -#### `SignedAggregateAndProof` - -```python -class SignedAggregateAndProof(Container): - message: AggregateAndProof # [Modified in Electra:EIP7549] - signature: BLSSignature -``` +### Extended Containers -#### `IndexedAttestation` +#### `Attestation` ```python -class IndexedAttestation(Container): - # [Modified in Electra:EIP7549] - attesting_indices: List[ValidatorIndex, MAX_VALIDATORS_PER_COMMITTEE * MAX_COMMITTEES_PER_SLOT] +class Attestation(Container): + aggregation_bits: Bitlist[MAX_VALIDATORS_PER_COMMITTEE * MAX_COMMITTEES_PER_SLOT] # [Modified in Electra:EIP7549] data: AttestationData + committee_bits: Bitvector[MAX_COMMITTEES_PER_SLOT] # [New in Electra:EIP7549] signature: BLSSignature ``` diff --git a/specs/electra/validator.md b/specs/electra/validator.md index 6b838c1260..712156db22 100644 --- a/specs/electra/validator.md +++ b/specs/electra/validator.md @@ -8,6 +8,10 @@ - [Introduction](#introduction) - [Prerequisites](#prerequisites) +- [Containers](#containers) + - [Modified Containers](#modified-containers) + - [`AggregateAndProof`](#aggregateandproof) + - [`SignedAggregateAndProof`](#signedaggregateandproof) - [Block proposal](#block-proposal) - [Constructing the `BeaconBlockBody`](#constructing-the-beaconblockbody) - [Attester slashings](#attester-slashings) @@ -34,6 +38,27 @@ All behaviors and definitions defined in this document, and documents it extends All terminology, constants, functions, and protocol mechanics defined in the updated Beacon Chain doc of [Electra](./beacon-chain.md) are requisite for this document and used throughout. Please see related Beacon Chain doc before continuing and use them as a reference throughout. +## Containers + +### Modified Containers + +#### `AggregateAndProof` + +```python +class AggregateAndProof(Container): + aggregator_index: ValidatorIndex + aggregate: Attestation # [Modified in Electra:EIP7549] + selection_proof: BLSSignature +``` + +#### `SignedAggregateAndProof` + +```python +class SignedAggregateAndProof(Container): + message: AggregateAndProof # [Modified in Electra:EIP7549] + signature: BLSSignature +``` + ## Block proposal ### Constructing the `BeaconBlockBody` From 93c043a647f3ac9ebb22f13a8fa191db836c0ea0 Mon Sep 17 00:00:00 2001 From: Justin Traglia Date: Tue, 7 May 2024 19:14:48 -0500 Subject: [PATCH 17/89] Revert moving IndexedAttestation I'll do this in a separate PR. --- specs/electra/beacon-chain.md | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 188a25d3d3..62da891146 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -33,9 +33,9 @@ - [`PendingConsolidation`](#pendingconsolidation) - [Modified Containers](#modified-containers) - [`AttesterSlashing`](#attesterslashing) - - [`IndexedAttestation`](#indexedattestation) - [Extended Containers](#extended-containers) - [`Attestation`](#attestation) + - [`IndexedAttestation`](#indexedattestation) - [`BeaconBlockBody`](#beaconblockbody) - [`ExecutionPayload`](#executionpayload) - [`ExecutionPayloadHeader`](#executionpayloadheader) @@ -279,16 +279,6 @@ class AttesterSlashing(Container): attestation_2: IndexedAttestation # [Modified in Electra:EIP7549] ``` -#### `IndexedAttestation` - -```python -class IndexedAttestation(Container): - # [Modified in Electra:EIP7549] - attesting_indices: List[ValidatorIndex, MAX_VALIDATORS_PER_COMMITTEE * MAX_COMMITTEES_PER_SLOT] - data: AttestationData - signature: BLSSignature -``` - ### Extended Containers #### `Attestation` @@ -301,6 +291,16 @@ class Attestation(Container): signature: BLSSignature ``` +#### `IndexedAttestation` + +```python +class IndexedAttestation(Container): + # [Modified in Electra:EIP7549] + attesting_indices: List[ValidatorIndex, MAX_VALIDATORS_PER_COMMITTEE * MAX_COMMITTEES_PER_SLOT] + data: AttestationData + signature: BLSSignature +``` + #### `BeaconBlockBody` ```python From b9befda08b6753fe6e49173c58551ccab74a5c94 Mon Sep 17 00:00:00 2001 From: Nishant Das Date: Thu, 9 May 2024 16:03:55 +0800 Subject: [PATCH 18/89] Update p2p-interface.md --- specs/_features/eip7594/p2p-interface.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/_features/eip7594/p2p-interface.md b/specs/_features/eip7594/p2p-interface.md index ea372026d9..2fc89852ec 100644 --- a/specs/_features/eip7594/p2p-interface.md +++ b/specs/_features/eip7594/p2p-interface.md @@ -218,7 +218,7 @@ Request Content: ( start_slot: Slot count: uint64 - columns: List[ColumnIndex] + columns: List[ColumnIndex, MAX_REQUEST_DATA_COLUMN_SIDECARS] ) ``` From 8f73a474233403e76af81ed316c3624bd75b9b0e Mon Sep 17 00:00:00 2001 From: Nishant Das Date: Thu, 9 May 2024 16:14:52 +0800 Subject: [PATCH 19/89] Update p2p-interface.md --- specs/_features/eip7594/p2p-interface.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/_features/eip7594/p2p-interface.md b/specs/_features/eip7594/p2p-interface.md index 2fc89852ec..268881ae2e 100644 --- a/specs/_features/eip7594/p2p-interface.md +++ b/specs/_features/eip7594/p2p-interface.md @@ -218,7 +218,7 @@ Request Content: ( start_slot: Slot count: uint64 - columns: List[ColumnIndex, MAX_REQUEST_DATA_COLUMN_SIDECARS] + columns: List[ColumnIndex, NUMBER_OF_COLUMNS] ) ``` From fdeff744ffc74074bd0a444228b9d546a0f68667 Mon Sep 17 00:00:00 2001 From: George Kadianakis Date: Tue, 14 May 2024 17:18:14 +0300 Subject: [PATCH 20/89] EIP-7594: Add a missing check and a missing test vector (#3765) --- .../eip7594/polynomial-commitments-sampling.md | 3 +++ tests/generators/kzg_7594/main.py | 15 +++++++++++++++ 2 files changed, 18 insertions(+) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 08a414c418..3a0bd8a77f 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -683,6 +683,9 @@ def recover_all_cells(cell_ids: Sequence[CellID], cells: Sequence[Cell]) -> Sequ # Check that each cell is the correct length for cell in cells: assert len(cell) == BYTES_PER_CELL + # Check that the cell ids are within bounds + for cell_id in cell_ids: + assert cell_id < CELLS_PER_EXT_BLOB # Get the extended domain roots_of_unity_extended = compute_roots_of_unity(FIELD_ELEMENTS_PER_EXT_BLOB) diff --git a/tests/generators/kzg_7594/main.py b/tests/generators/kzg_7594/main.py index a780f289c5..ef412805a4 100644 --- a/tests/generators/kzg_7594/main.py +++ b/tests/generators/kzg_7594/main.py @@ -710,6 +710,21 @@ def case05_recover_all_cells(): 'output': None } + # Edge case: More cells provided than CELLS_PER_EXT_BLOB + blob = BLOB_RANDOM_VALID2 + cells = spec.compute_cells(blob) + cell_ids = list(range(spec.CELLS_PER_EXT_BLOB)) + [0] + partial_cells = [cells[cell_id] for cell_id in cell_ids] + expect_exception(spec.recover_all_cells, cell_ids, partial_cells) + identifier = make_id(cell_ids, partial_cells) + yield f'recover_all_cells_case_invalid_more_cells_than_cells_per_ext_blob_{identifier}', { + 'input': { + 'cell_ids': cell_ids, + 'cells': encode_hex_list(partial_cells), + }, + 'output': None + } + # Edge case: Invalid cell_id blob = BLOB_RANDOM_VALID1 cells = spec.compute_cells(blob) From d29581315579d37064abf21b174750f9d5c099c0 Mon Sep 17 00:00:00 2001 From: Andrew Davis <1709934+Savid@users.noreply.github.com> Date: Wed, 15 May 2024 15:34:41 +0300 Subject: [PATCH 21/89] EIP-7594: add custody settings config (#3766) * EIP-7594: add custody settings config * Add `TARGET_NUMBER_OF_PEERS` to config * add TARGET_NUMBER_OF_PEERS Co-authored-by: Hsiao-Wei Wang * fix double TARGET_NUMBER_OF_PEERS * fix tests --------- Co-authored-by: Hsiao-Wei Wang --- configs/mainnet.yaml | 3 +++ configs/minimal.yaml | 3 +++ .../test/eip7594/unittests/test_config_invariants.py | 4 ++-- .../eth2spec/test/eip7594/unittests/test_custody.py | 10 +++++----- 4 files changed, 13 insertions(+), 7 deletions(-) diff --git a/configs/mainnet.yaml b/configs/mainnet.yaml index cda09fca72..7051873ce9 100644 --- a/configs/mainnet.yaml +++ b/configs/mainnet.yaml @@ -159,6 +159,9 @@ NUMBER_OF_COLUMNS: 128 MAX_CELLS_IN_EXTENDED_MATRIX: 768 DATA_COLUMN_SIDECAR_SUBNET_COUNT: 32 MAX_REQUEST_DATA_COLUMN_SIDECARS: 16384 +SAMPLES_PER_SLOT: 8 +CUSTODY_REQUIREMENT: 1 +TARGET_NUMBER_OF_PEERS: 70 # [New in Electra:EIP7251] MIN_PER_EPOCH_CHURN_LIMIT_ELECTRA: 128000000000 # 2**7 * 10**9 (= 128,000,000,000) diff --git a/configs/minimal.yaml b/configs/minimal.yaml index e7a92a811e..8e2a222d47 100644 --- a/configs/minimal.yaml +++ b/configs/minimal.yaml @@ -158,6 +158,9 @@ NUMBER_OF_COLUMNS: 128 MAX_CELLS_IN_EXTENDED_MATRIX: 768 DATA_COLUMN_SIDECAR_SUBNET_COUNT: 32 MAX_REQUEST_DATA_COLUMN_SIDECARS: 16384 +SAMPLES_PER_SLOT: 8 +CUSTODY_REQUIREMENT: 1 +TARGET_NUMBER_OF_PEERS: 70 # [New in Electra:EIP7251] MIN_PER_EPOCH_CHURN_LIMIT_ELECTRA: 64000000000 # 2**6 * 10**9 (= 64,000,000,000) diff --git a/tests/core/pyspec/eth2spec/test/eip7594/unittests/test_config_invariants.py b/tests/core/pyspec/eth2spec/test/eip7594/unittests/test_config_invariants.py index 9ad21d2624..998bf35128 100644 --- a/tests/core/pyspec/eth2spec/test/eip7594/unittests/test_config_invariants.py +++ b/tests/core/pyspec/eth2spec/test/eip7594/unittests/test_config_invariants.py @@ -11,8 +11,8 @@ def test_invariants(spec): assert spec.FIELD_ELEMENTS_PER_BLOB % spec.FIELD_ELEMENTS_PER_CELL == 0 assert spec.FIELD_ELEMENTS_PER_EXT_BLOB % spec.config.NUMBER_OF_COLUMNS == 0 - assert spec.SAMPLES_PER_SLOT <= spec.config.NUMBER_OF_COLUMNS - assert spec.CUSTODY_REQUIREMENT <= spec.config.DATA_COLUMN_SIDECAR_SUBNET_COUNT + assert spec.config.SAMPLES_PER_SLOT <= spec.config.NUMBER_OF_COLUMNS + assert spec.config.CUSTODY_REQUIREMENT <= spec.config.DATA_COLUMN_SIDECAR_SUBNET_COUNT assert spec.config.DATA_COLUMN_SIDECAR_SUBNET_COUNT <= spec.config.NUMBER_OF_COLUMNS assert spec.config.NUMBER_OF_COLUMNS % spec.config.DATA_COLUMN_SIDECAR_SUBNET_COUNT == 0 assert spec.config.MAX_REQUEST_DATA_COLUMN_SIDECARS == ( diff --git a/tests/core/pyspec/eth2spec/test/eip7594/unittests/test_custody.py b/tests/core/pyspec/eth2spec/test/eip7594/unittests/test_custody.py index e1ab136c4f..5db3635a8e 100644 --- a/tests/core/pyspec/eth2spec/test/eip7594/unittests/test_custody.py +++ b/tests/core/pyspec/eth2spec/test/eip7594/unittests/test_custody.py @@ -9,7 +9,7 @@ def run_get_custody_columns(spec, peer_count, custody_subnet_count): assignments = [spec.get_custody_columns(node_id, custody_subnet_count) for node_id in range(peer_count)] - columns_per_subnet = spec.NUMBER_OF_COLUMNS // spec.config.DATA_COLUMN_SIDECAR_SUBNET_COUNT + columns_per_subnet = spec.config.NUMBER_OF_COLUMNS // spec.config.DATA_COLUMN_SIDECAR_SUBNET_COUNT for assignment in assignments: assert len(assignment) == custody_subnet_count * columns_per_subnet assert len(assignment) == len(set(assignment)) @@ -20,8 +20,8 @@ def run_get_custody_columns(spec, peer_count, custody_subnet_count): @single_phase def test_get_custody_columns_peers_within_number_of_columns(spec): peer_count = 10 - custody_subnet_count = spec.CUSTODY_REQUIREMENT - assert spec.NUMBER_OF_COLUMNS > peer_count + custody_subnet_count = spec.config.CUSTODY_REQUIREMENT + assert spec.config.NUMBER_OF_COLUMNS > peer_count run_get_custody_columns(spec, peer_count, custody_subnet_count) @@ -30,8 +30,8 @@ def test_get_custody_columns_peers_within_number_of_columns(spec): @single_phase def test_get_custody_columns_peers_more_than_number_of_columns(spec): peer_count = 200 - custody_subnet_count = spec.CUSTODY_REQUIREMENT - assert spec.NUMBER_OF_COLUMNS < peer_count + custody_subnet_count = spec.config.CUSTODY_REQUIREMENT + assert spec.config.NUMBER_OF_COLUMNS < peer_count run_get_custody_columns(spec, peer_count, custody_subnet_count) From 8bf606884ba4132d31c214fe0de658271e98378d Mon Sep 17 00:00:00 2001 From: Etan Kissling Date: Thu, 16 May 2024 10:59:51 +0300 Subject: [PATCH 22/89] EIP-7549: Append new `committee_bits` field to end of `Attestation` Introducing new fields in the middle of an existing `Container` pointlessly breaks merkleization of all subsequent fields. In the case of `committee_bits`, it is also misleading, as `signature` only covers `data` inside `Attestation`. --- specs/electra/beacon-chain.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 62da891146..c75970fbcd 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -287,8 +287,8 @@ class AttesterSlashing(Container): class Attestation(Container): aggregation_bits: Bitlist[MAX_VALIDATORS_PER_COMMITTEE * MAX_COMMITTEES_PER_SLOT] # [Modified in Electra:EIP7549] data: AttestationData - committee_bits: Bitvector[MAX_COMMITTEES_PER_SLOT] # [New in Electra:EIP7549] signature: BLSSignature + committee_bits: Bitvector[MAX_COMMITTEES_PER_SLOT] # [New in Electra:EIP7549] ``` #### `IndexedAttestation` From 74be5b243671c90f5b2c66f36e8d83e2b5d884dc Mon Sep 17 00:00:00 2001 From: Mark Mackey Date: Thu, 16 May 2024 15:29:32 +0300 Subject: [PATCH 23/89] Electra: Properly Calculate Proposer Probabilities --- specs/electra/beacon-chain.md | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 551a41af16..5c3c67dd21 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -41,6 +41,7 @@ - [`BeaconState`](#beaconstate) - [Helper functions](#helper-functions) - [Predicates](#predicates) + - [Updated `compute_proposer_index`](#updated-compute_proposer_index) - [Updated `is_eligible_for_activation_queue`](#updated-is_eligible_for_activation_queue) - [New `is_compounding_withdrawal_credential`](#new-is_compounding_withdrawal_credential) - [New `has_compounding_withdrawal_credential`](#new-has_compounding_withdrawal_credential) @@ -431,6 +432,26 @@ class BeaconState(Container): ### Predicates +#### Updated `compute_proposer_index` + +```python +def compute_proposer_index(state: BeaconState, indices: Sequence[ValidatorIndex], seed: Bytes32) -> ValidatorIndex: + """ + Return from ``indices`` a random index sampled by effective balance. + """ + assert len(indices) > 0 + MAX_RANDOM_BYTE = 2**8 - 1 + i = uint64(0) + total = uint64(len(indices)) + while True: + candidate_index = indices[compute_shuffled_index(i % total, total, seed)] + random_byte = hash(seed + uint_to_bytes(uint64(i // 32)))[i % 32] + effective_balance = state.validators[candidate_index].effective_balance + if effective_balance * MAX_RANDOM_BYTE >= MAX_EFFECTIVE_BALANCE_ELECTRA * random_byte: #[Modified in Electra:EIP7251] + return candidate_index + i += 1 +``` + #### Updated `is_eligible_for_activation_queue` ```python From 222e980b7ef4eba949f9b1d3ba8814e5ce1224b0 Mon Sep 17 00:00:00 2001 From: Mikhail Kalinin Date: Thu, 16 May 2024 16:55:23 +0300 Subject: [PATCH 24/89] Fix lint --- specs/electra/beacon-chain.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 5c3c67dd21..309e8a1b78 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -447,7 +447,7 @@ def compute_proposer_index(state: BeaconState, indices: Sequence[ValidatorIndex] candidate_index = indices[compute_shuffled_index(i % total, total, seed)] random_byte = hash(seed + uint_to_bytes(uint64(i // 32)))[i % 32] effective_balance = state.validators[candidate_index].effective_balance - if effective_balance * MAX_RANDOM_BYTE >= MAX_EFFECTIVE_BALANCE_ELECTRA * random_byte: #[Modified in Electra:EIP7251] + if effective_balance * MAX_RANDOM_BYTE >= MAX_EFFECTIVE_BALANCE_ELECTRA * random_byte: # [Modified in Electra:EIP7251] return candidate_index i += 1 ``` From 32b441d381d45f84ce30d289f02b36c3353ad9a8 Mon Sep 17 00:00:00 2001 From: Mikhail Kalinin Date: Thu, 16 May 2024 17:03:10 +0300 Subject: [PATCH 25/89] Fix lint 2 --- specs/electra/beacon-chain.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 309e8a1b78..b892acf82f 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -447,7 +447,8 @@ def compute_proposer_index(state: BeaconState, indices: Sequence[ValidatorIndex] candidate_index = indices[compute_shuffled_index(i % total, total, seed)] random_byte = hash(seed + uint_to_bytes(uint64(i // 32)))[i % 32] effective_balance = state.validators[candidate_index].effective_balance - if effective_balance * MAX_RANDOM_BYTE >= MAX_EFFECTIVE_BALANCE_ELECTRA * random_byte: # [Modified in Electra:EIP7251] + # [Modified in Electra:EIP7251] + if effective_balance * MAX_RANDOM_BYTE >= MAX_EFFECTIVE_BALANCE_ELECTRA * random_byte: return candidate_index i += 1 ``` From 0f5f155d1b67e150428edc59235b9589edea5d71 Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Fri, 17 May 2024 14:38:58 +0800 Subject: [PATCH 26/89] fix typo (#3771) --- tests/core/pyspec/eth2spec/utils/bls.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/core/pyspec/eth2spec/utils/bls.py b/tests/core/pyspec/eth2spec/utils/bls.py index 666de68094..59e24109c6 100644 --- a/tests/core/pyspec/eth2spec/utils/bls.py +++ b/tests/core/pyspec/eth2spec/utils/bls.py @@ -1,5 +1,5 @@ from py_ecc.bls import G2ProofOfPossession as py_ecc_bls -from py_ecc.bls.g2_primatives import signature_to_G2 as _signature_to_G2 +from py_ecc.bls.g2_primitives import signature_to_G2 as _signature_to_G2 from py_ecc.optimized_bls12_381 import ( # noqa: F401 G1 as py_ecc_G1, G2 as py_ecc_G2, From f7bb8aab1ae0747b94e171874ac9e0790bdc9729 Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Fri, 17 May 2024 17:31:08 +0300 Subject: [PATCH 27/89] EIP-7594 - Update ENR record Proposal for https://github.com/ethereum/consensus-specs/issues/3744 --- specs/_features/eip7594/p2p-interface.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/specs/_features/eip7594/p2p-interface.md b/specs/_features/eip7594/p2p-interface.md index 268881ae2e..06d1bb2d19 100644 --- a/specs/_features/eip7594/p2p-interface.md +++ b/specs/_features/eip7594/p2p-interface.md @@ -284,10 +284,10 @@ After the initial data column sidecar, clients MAY stop in the process of respon #### ENR structure -##### `custody_subnet_count` +##### Custody subnet count -A new field is added to the ENR under the key `custody_subnet_count` to facilitate custody data column discovery. +A new field is added to the ENR under the key `csc` to facilitate custody data column discovery. -| Key | Value | -|:-----------------------|:-------------| -| `custody_subnet_count` | SSZ `uint64` | +| Key | Value | +|:------|:-----------------------------------------| +| `csc` | Custody subnet count, big endian integer | From 64b93c9361fa426c26a860db87edfc5364699a37 Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Fri, 17 May 2024 17:47:38 +0300 Subject: [PATCH 28/89] Fix table. --- specs/_features/eip7594/p2p-interface.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/_features/eip7594/p2p-interface.md b/specs/_features/eip7594/p2p-interface.md index 06d1bb2d19..582ee65140 100644 --- a/specs/_features/eip7594/p2p-interface.md +++ b/specs/_features/eip7594/p2p-interface.md @@ -28,7 +28,7 @@ - [DataColumnSidecarsByRange v1](#datacolumnsidecarsbyrange-v1) - [The discovery domain: discv5](#the-discovery-domain-discv5) - [ENR structure](#enr-structure) - - [`custody_subnet_count`](#custody_subnet_count) + - [Custody subnet count](#custody-subnet-count) From c685a6574cad413a531d977e69fb5aedd1098ea8 Mon Sep 17 00:00:00 2001 From: terence tsao Date: Mon, 20 May 2024 12:18:52 -0700 Subject: [PATCH 29/89] Fix broken links for new-feature.md --- docs/docs/new-feature.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/docs/new-feature.md b/docs/docs/new-feature.md index b36129bd4e..78fd1357fc 100644 --- a/docs/docs/new-feature.md +++ b/docs/docs/new-feature.md @@ -54,10 +54,10 @@ You can refer to the previous fork's `fork.md` file. ### 5. Make it executable - Update Pyspec [`constants.py`](https://github.com/ethereum/consensus-specs/blob/dev/tests/core/pyspec/eth2spec/test/helpers/constants.py) with the new feature name. - Update helpers for [`setup.py`](https://github.com/ethereum/consensus-specs/blob/dev/setup.py) for building the spec: - - Update [`pysetup/constants.py`](https://github.com/ethereum/consensus-specs/blob/dev/constants.py) with the new feature name as Pyspec `constants.py` defined. + - Update [`pysetup/constants.py`](https://github.com/ethereum/consensus-specs/blob/dev/pysetup/constants.py) with the new feature name as Pyspec `constants.py` defined. - Update [`pysetup/spec_builders/__init__.py`](https://github.com/ethereum/consensus-specs/blob/dev/pysetup/spec_builders/__init__.py). Implement a new `SpecBuilder` in `pysetup/spec_builders/.py` with the new feature name. e.g., `EIP9999SpecBuilder`. Append it to the `spec_builders` list. - Update [`pysetup/md_doc_paths.py`](https://github.com/ethereum/consensus-specs/blob/dev/pysetup/md_doc_paths.py): add the path of the new markdown files in `get_md_doc_paths` function if needed. -- Update `PREVIOUS_FORK_OF` setting in both [`test/helpers/constants.py`](https://github.com/ethereum/consensus-specs/blob/dev/constants.py) and [`pysetup/md_doc_paths.py`](https://github.com/ethereum/consensus-specs/blob/dev/pysetup/md_doc_paths.py). +- Update `PREVIOUS_FORK_OF` setting in both [`test/helpers/constants.py`](https://github.com/ethereum/consensus-specs/blob/dev/tests/core/pyspec/eth2spec/test/helpers/constants.py) and [`pysetup/md_doc_paths.py`](https://github.com/ethereum/consensus-specs/blob/dev/pysetup/md_doc_paths.py). - NOTE: since these two modules (the pyspec itself and the spec builder tool) must be separate, the fork sequence setting has to be defined again. ## B: Make it executable for pytest and test generator From eb380c1e835fab1d7268117c7f72b839e3c8c4ff Mon Sep 17 00:00:00 2001 From: terence tsao Date: Mon, 20 May 2024 12:25:29 -0700 Subject: [PATCH 30/89] Remove Deneb from all phases --- tests/core/pyspec/eth2spec/test/helpers/constants.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/core/pyspec/eth2spec/test/helpers/constants.py b/tests/core/pyspec/eth2spec/test/helpers/constants.py index 067d2a480e..ed398516cd 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/constants.py +++ b/tests/core/pyspec/eth2spec/test/helpers/constants.py @@ -34,7 +34,6 @@ ALL_PHASES = ( # Formal forks *MAINNET_FORKS, - DENEB, ELECTRA, # Experimental patches EIP7594, From 1292bd9375db82cf1a6b73c2fab286ff64b4f959 Mon Sep 17 00:00:00 2001 From: fradamt Date: Thu, 18 Apr 2024 11:22:27 +0200 Subject: [PATCH 31/89] move to EL consolidations --- presets/mainnet/electra.yaml | 2 +- presets/minimal/electra.yaml | 2 +- specs/electra/beacon-chain.md | 91 ++++++++++++++++++----------------- 3 files changed, 48 insertions(+), 47 deletions(-) diff --git a/presets/mainnet/electra.yaml b/presets/mainnet/electra.yaml index 72c626ded2..2c8d642bf7 100644 --- a/presets/mainnet/electra.yaml +++ b/presets/mainnet/electra.yaml @@ -30,7 +30,7 @@ MAX_ATTESTER_SLASHINGS_ELECTRA: 1 # `uint64(2**3)` (= 8) MAX_ATTESTATIONS_ELECTRA: 8 # `uint64(2**0)` (= 1) -MAX_CONSOLIDATIONS: 1 +MAX_CONSOLIDATIONS_PER_PAYLOAD: 1 # Execution # --------------------------------------------------------------- diff --git a/presets/minimal/electra.yaml b/presets/minimal/electra.yaml index 11aa5e1f50..beb5265746 100644 --- a/presets/minimal/electra.yaml +++ b/presets/minimal/electra.yaml @@ -30,7 +30,7 @@ MAX_ATTESTER_SLASHINGS_ELECTRA: 1 # `uint64(2**3)` (= 8) MAX_ATTESTATIONS_ELECTRA: 8 # `uint64(2**0)` (= 1) -MAX_CONSOLIDATIONS: 1 +MAX_CONSOLIDATIONS_PER_PAYLOAD: 1 # Execution # --------------------------------------------------------------- diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 62da891146..3a0deebb06 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -164,7 +164,7 @@ The following values are (non-configurable) constants used throughout the specif | Name | Value | | - | - | -| `MAX_CONSOLIDATIONS` | `uint64(1)` | +| `MAX_CONSOLIDATIONS_PER_PAYLOAD` | `uint64(1)` | ### Execution @@ -238,25 +238,15 @@ class ExecutionLayerWithdrawalRequest(Container): amount: Gwei ``` -#### `Consolidation` +#### `ExecutionLayerConsolidation` *Note*: The container is new in EIP7251. ```python -class Consolidation(Container): - source_index: ValidatorIndex - target_index: ValidatorIndex - epoch: Epoch -``` - -#### `SignedConsolidation` - -*Note*: The container is new in EIP7251. - -```python -class SignedConsolidation(Container): - message: Consolidation - signature: BLSSignature +class ExecutionLayerConsolidation(Container): + source_address: ExecutionAddress + source_pubkey: BLSPubkey + target_pubkey: BLSPubkey ``` #### `PendingConsolidation` @@ -319,7 +309,6 @@ class BeaconBlockBody(Container): execution_payload: ExecutionPayload # [Modified in Electra:EIP6110:EIP7002] bls_to_execution_changes: List[SignedBLSToExecutionChange, MAX_BLS_TO_EXECUTION_CHANGES] blob_kzg_commitments: List[KZGCommitment, MAX_BLOB_COMMITMENTS_PER_BLOCK] - consolidations: List[SignedConsolidation, MAX_CONSOLIDATIONS] # [New in Electra:EIP7251] ``` #### `ExecutionPayload` @@ -348,6 +337,7 @@ class ExecutionPayload(Container): deposit_receipts: List[DepositReceipt, MAX_DEPOSIT_RECEIPTS_PER_PAYLOAD] # [New in Electra:EIP6110] # [New in Electra:EIP7002:EIP7251] withdrawal_requests: List[ExecutionLayerWithdrawalRequest, MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD] + consolidations: List[ExecutionLayerConsolidations, MAX_CONSOLIDATIONS_PER_PAYLOAD] # [New in Electra:EIP7251] ``` #### `ExecutionPayloadHeader` @@ -375,6 +365,7 @@ class ExecutionPayloadHeader(Container): excess_blob_gas: uint64 deposit_receipts_root: Root # [New in Electra:EIP6110] withdrawal_requests_root: Root # [New in Electra:EIP7002:EIP7251] + consolidations_root: Root # [New in Electra:EIP7251] ``` #### `BeaconState` @@ -1045,7 +1036,7 @@ def process_operations(state: BeaconState, body: BeaconBlockBody) -> None: # [New in Electra:EIP7002:EIP7251] for_ops(body.execution_payload.withdrawal_requests, process_execution_layer_withdrawal_request) for_ops(body.execution_payload.deposit_receipts, process_deposit_receipt) # [New in Electra:EIP6110] - for_ops(body.consolidations, process_consolidation) # [New in Electra:EIP7251] + for_ops(body.execution_payload.consolidations, process_execution_layer_consolidation) # [New in Electra:EIP7251] ``` ##### Attestations @@ -1297,38 +1288,48 @@ def process_deposit_receipt(state: BeaconState, deposit_receipt: DepositReceipt) ###### New `process_consolidation` ```python -def process_consolidation(state: BeaconState, signed_consolidation: SignedConsolidation) -> None: - # If the pending consolidations queue is full, no consolidations are allowed in the block - assert len(state.pending_consolidations) < PENDING_CONSOLIDATIONS_LIMIT - # If there is too little available consolidation churn limit, no consolidations are allowed in the block - assert get_consolidation_churn_limit(state) > MIN_ACTIVATION_BALANCE - consolidation = signed_consolidation.message +def process_execution_layer_consolidation(state: BeaconState, consolidation: ExecutionLayerConsolidation) -> None: + # If the pending consolidations queue is full, consolidation requests are ignored + if len(state.pending_consolidations) == PENDING_CONSOLIDATIONS_LIMIT: + return + # If there is too little available consolidation churn limit, consolidation requests are ignored + if get_consolidation_churn_limit(state) > MIN_ACTIVATION_BALANCE: + return + + validator_pubkeys = [v.pubkey for v in state.validators] + # Verify pubkeys exists + if consolidation.source_pubkey not in validator_pubkeys: + return + if consolidation.target_pubkey not in validator_pubkeys: + return + source_index = ValidatorIndex(validator_pubkeys.index(consolidation.source_pubkey)) + target_index = ValidatorIndex(validator_pubkeys.index(consolidation.target_pubkey)) + source_validator = state.validators[source_index] + target_validator = state.validators[target_validator] + # Verify that source != target, so a consolidation cannot be used as an exit. - assert consolidation.source_index != consolidation.target_index + if source_index == target_index: + return + + # Verify source withdrawal credentials + has_correct_credential = has_execution_withdrawal_credential(source_validator) + is_correct_source_address = ( + validator.withdrawal_credentials[12:] == consolidation.source_address + ) + if not (has_correct_credential and is_correct_source_address): + return - source_validator = state.validators[consolidation.source_index] - target_validator = state.validators[consolidation.target_index] # Verify the source and the target are active current_epoch = get_current_epoch(state) - assert is_active_validator(source_validator, current_epoch) - assert is_active_validator(target_validator, current_epoch) + if not is_active_validator(source_validator, current_epoch): + return + if not is_active_validator(target_validator, current_epoch): + return # Verify exits for source and target have not been initiated - assert source_validator.exit_epoch == FAR_FUTURE_EPOCH - assert target_validator.exit_epoch == FAR_FUTURE_EPOCH - # Consolidations must specify an epoch when they become valid; they are not valid before then - assert current_epoch >= consolidation.epoch - - # Verify the source and the target have Execution layer withdrawal credentials - assert has_execution_withdrawal_credential(source_validator) - assert has_execution_withdrawal_credential(target_validator) - # Verify the same withdrawal address - assert source_validator.withdrawal_credentials[12:] == target_validator.withdrawal_credentials[12:] - - # Verify consolidation is signed by the source and the target - domain = compute_domain(DOMAIN_CONSOLIDATION, genesis_validators_root=state.genesis_validators_root) - signing_root = compute_signing_root(consolidation, domain) - pubkeys = [source_validator.pubkey, target_validator.pubkey] - assert bls.FastAggregateVerify(pubkeys, signing_root, signed_consolidation.signature) + if source_validator.exit_epoch != FAR_FUTURE_EPOCH: + return + if target_validator.exit_epoch != FAR_FUTURE_EPOCH: + return # Initiate source validator exit and append pending consolidation source_validator.exit_epoch = compute_consolidation_epoch_and_update_churn( From 204b39dd74b8c430d85a01f62979fe034f7eb076 Mon Sep 17 00:00:00 2001 From: fradamt Date: Thu, 18 Apr 2024 11:30:18 +0200 Subject: [PATCH 32/89] ensure that target has execution credentials --- specs/electra/beacon-chain.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 3a0deebb06..629f33a2b4 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -1319,6 +1319,10 @@ def process_execution_layer_consolidation(state: BeaconState, consolidation: Exe if not (has_correct_credential and is_correct_source_address): return + # Verify that target has execution withdrawal credentials + if not has_execution_withdrawal_credential(target_validator): + return + # Verify the source and the target are active current_epoch = get_current_epoch(state) if not is_active_validator(source_validator, current_epoch): From 74eaf576683685b5afa8d644e9f37fd729c18332 Mon Sep 17 00:00:00 2001 From: fradamt Date: Thu, 18 Apr 2024 13:10:46 +0200 Subject: [PATCH 33/89] add consolidations_root where needed --- specs/electra/beacon-chain.md | 1 + specs/electra/fork.md | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 629f33a2b4..da0bc57078 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -1004,6 +1004,7 @@ def process_execution_payload(state: BeaconState, body: BeaconBlockBody, executi excess_blob_gas=payload.excess_blob_gas, deposit_receipts_root=hash_tree_root(payload.deposit_receipts), # [New in Electra:EIP6110] withdrawal_requests_root=hash_tree_root(payload.withdrawal_requests), # [New in Electra:EIP7002:EIP7251] + consolidations_root=hash_tree_root(payload.consolidations), # [New in Electra:EIP7251] ) ``` diff --git a/specs/electra/fork.md b/specs/electra/fork.md index ffd5f21571..2bf39029dd 100644 --- a/specs/electra/fork.md +++ b/specs/electra/fork.md @@ -91,7 +91,8 @@ def upgrade_to_electra(pre: deneb.BeaconState) -> BeaconState: blob_gas_used=pre.latest_execution_payload_header.blob_gas_used, excess_blob_gas=pre.latest_execution_payload_header.excess_blob_gas, deposit_receipts_root=Root(), # [New in Electra:EIP6110] - withdrawal_requests_root=Root(), # [New in Electra:EIP7002], + withdrawal_requests_root=Root(), # [New in Electra:EIP7002] + consolidations_root=Root(), # [New in Electra:EIP7251] ) exit_epochs = [v.exit_epoch for v in pre.validators if v.exit_epoch != FAR_FUTURE_EPOCH] From 8a6ca1c568eb214782cf8396c8e5c4b5116bf102 Mon Sep 17 00:00:00 2001 From: fradamt Date: Thu, 18 Apr 2024 13:11:36 +0200 Subject: [PATCH 34/89] fix consolidation churn limit check --- specs/electra/beacon-chain.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index da0bc57078..18dca832ef 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -1294,7 +1294,7 @@ def process_execution_layer_consolidation(state: BeaconState, consolidation: Exe if len(state.pending_consolidations) == PENDING_CONSOLIDATIONS_LIMIT: return # If there is too little available consolidation churn limit, consolidation requests are ignored - if get_consolidation_churn_limit(state) > MIN_ACTIVATION_BALANCE: + if get_consolidation_churn_limit(state) <= MIN_ACTIVATION_BALANCE: return validator_pubkeys = [v.pubkey for v in state.validators] From e030f2cd60fa6a7fa92ed63827e6cc82d350b6de Mon Sep 17 00:00:00 2001 From: fradamt Date: Thu, 18 Apr 2024 13:45:17 +0200 Subject: [PATCH 35/89] Small fixes Co-authored-by: Mikhail Kalinin --- specs/electra/beacon-chain.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 18dca832ef..19e66d76d9 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -1315,7 +1315,7 @@ def process_execution_layer_consolidation(state: BeaconState, consolidation: Exe # Verify source withdrawal credentials has_correct_credential = has_execution_withdrawal_credential(source_validator) is_correct_source_address = ( - validator.withdrawal_credentials[12:] == consolidation.source_address + source_validator.withdrawal_credentials[12:] == consolidation.source_address ) if not (has_correct_credential and is_correct_source_address): return @@ -1344,8 +1344,8 @@ def process_execution_layer_consolidation(state: BeaconState, consolidation: Exe source_validator.exit_epoch + MIN_VALIDATOR_WITHDRAWABILITY_DELAY ) state.pending_consolidations.append(PendingConsolidation( - source_index=consolidation.source_index, - target_index=consolidation.target_index + source_index=source_index, + target_index=target_index )) ``` From 901a2491b46d61629ac9e250d7083988887414c7 Mon Sep 17 00:00:00 2001 From: fradamt Date: Thu, 18 Apr 2024 20:33:37 +0200 Subject: [PATCH 36/89] fix typo --- specs/electra/beacon-chain.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 19e66d76d9..daa28a491e 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -337,7 +337,7 @@ class ExecutionPayload(Container): deposit_receipts: List[DepositReceipt, MAX_DEPOSIT_RECEIPTS_PER_PAYLOAD] # [New in Electra:EIP6110] # [New in Electra:EIP7002:EIP7251] withdrawal_requests: List[ExecutionLayerWithdrawalRequest, MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD] - consolidations: List[ExecutionLayerConsolidations, MAX_CONSOLIDATIONS_PER_PAYLOAD] # [New in Electra:EIP7251] + consolidations: List[ExecutionLayerConsolidation, MAX_CONSOLIDATIONS_PER_PAYLOAD] # [New in Electra:EIP7251] ``` #### `ExecutionPayloadHeader` @@ -1306,7 +1306,7 @@ def process_execution_layer_consolidation(state: BeaconState, consolidation: Exe source_index = ValidatorIndex(validator_pubkeys.index(consolidation.source_pubkey)) target_index = ValidatorIndex(validator_pubkeys.index(consolidation.target_pubkey)) source_validator = state.validators[source_index] - target_validator = state.validators[target_validator] + target_validator = state.validators[target_index] # Verify that source != target, so a consolidation cannot be used as an exit. if source_index == target_index: From c492d6198a61a06072c097f2935034a6fb8245e9 Mon Sep 17 00:00:00 2001 From: fradamt Date: Thu, 18 Apr 2024 21:18:22 +0200 Subject: [PATCH 37/89] adjust consolidations tests for EL consolidations --- ..._process_execution_layer_consolidation.py} | 713 ++++++++---------- .../eth2spec/test/helpers/consolidations.py | 61 -- tests/generators/operations/main.py | 2 +- 3 files changed, 327 insertions(+), 449 deletions(-) rename tests/core/pyspec/eth2spec/test/electra/block_processing/{test_process_consolidation.py => test_process_execution_layer_consolidation.py} (50%) delete mode 100644 tests/core/pyspec/eth2spec/test/helpers/consolidations.py diff --git a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_consolidation.py b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation.py similarity index 50% rename from tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_consolidation.py rename to tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation.py index a1267cb716..c6bc071b20 100644 --- a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_consolidation.py +++ b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation.py @@ -2,18 +2,14 @@ from eth2spec.test.context import ( with_electra_and_later, with_presets, - always_bls, spec_test, single_phase, with_custom_state, scaled_churn_balances_exceed_activation_exit_churn_limit, default_activation_threshold, + spec_state_test, ) from eth2spec.test.helpers.keys import pubkey_to_privkey -from eth2spec.test.helpers.consolidations import ( - run_consolidation_processing, - sign_consolidation, -) from eth2spec.test.helpers.withdrawals import ( set_eth1_withdrawal_credential_with_balance, set_compounding_withdrawal_credential, @@ -37,22 +33,21 @@ def test_basic_consolidation_in_current_consolidation_epoch(spec, state): current_epoch = spec.get_current_epoch(state) source_index = spec.get_active_validator_indices(state, current_epoch)[0] target_index = spec.get_active_validator_indices(state, current_epoch)[1] - source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey] - target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey] - - # Set source and target withdrawal credentials to the same eth1 credential - set_eth1_withdrawal_credential_with_balance(spec, state, source_index) - set_eth1_withdrawal_credential_with_balance(spec, state, target_index) - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation( - epoch=current_epoch, source_index=source_index, target_index=target_index - ), - source_privkey, - target_privkey, + # Set source to eth1 credentials + source_address = b"\x22" * 20 + set_eth1_withdrawal_credential_with_balance( + spec, state, source_index, address=source_address ) + # Make consolidation with source address + consolidation = spec.ExecutionLayerConsolidation( + source_address=source_address, + source_pubkey=state.validators[source_index].pubkey, + target_pubkey=state.validators[target_index].pubkey, + ) + + # Set target to eth1 credentials + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) # Set earliest consolidation epoch to the expected exit epoch expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch) @@ -61,7 +56,7 @@ def test_basic_consolidation_in_current_consolidation_epoch(spec, state): # Set the consolidation balance to consume equal to churn limit state.consolidation_balance_to_consume = consolidation_churn_limit - yield from run_consolidation_processing(spec, state, signed_consolidation) + yield from run_consolidation_processing(spec, state, consolidation) # Check consolidation churn is decremented correctly assert ( @@ -87,23 +82,23 @@ def test_basic_consolidation_in_new_consolidation_epoch(spec, state): current_epoch = spec.get_current_epoch(state) source_index = spec.get_active_validator_indices(state, current_epoch)[0] target_index = spec.get_active_validator_indices(state, current_epoch)[1] - source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey] - target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey] - # Set source and target withdrawal credentials to the same eth1 credential - set_eth1_withdrawal_credential_with_balance(spec, state, source_index) + # Set source to eth1 credentials + source_address = b"\x22" * 20 + set_eth1_withdrawal_credential_with_balance( + spec, state, source_index, address=source_address + ) + # Make consolidation with source address + consolidation = spec.ExecutionLayerConsolidation( + source_address=source_address, + source_pubkey=state.validators[source_index].pubkey, + target_pubkey=state.validators[target_index].pubkey, + ) + + # Set target to eth1 credentials set_eth1_withdrawal_credential_with_balance(spec, state, target_index) - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation( - epoch=current_epoch, source_index=source_index, target_index=target_index - ), - source_privkey, - target_privkey, - ) - yield from run_consolidation_processing(spec, state, signed_consolidation) + yield from run_consolidation_processing(spec, state, consolidation) expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch) # Check consolidation churn is decremented correctly @@ -130,23 +125,22 @@ def test_basic_consolidation_with_preexisting_churn(spec, state): current_epoch = spec.get_current_epoch(state) source_index = spec.get_active_validator_indices(state, current_epoch)[0] target_index = spec.get_active_validator_indices(state, current_epoch)[1] - source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey] - target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey] - - # Set source and target withdrawal credentials to the same eth1 credential - set_eth1_withdrawal_credential_with_balance(spec, state, source_index) - set_eth1_withdrawal_credential_with_balance(spec, state, target_index) - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation( - epoch=current_epoch, source_index=source_index, target_index=target_index - ), - source_privkey, - target_privkey, + # Set source to eth1 credentials + source_address = b"\x22" * 20 + set_eth1_withdrawal_credential_with_balance( + spec, state, source_index, address=source_address + ) + # Make consolidation with source address + consolidation = spec.ExecutionLayerConsolidation( + source_address=source_address, + source_pubkey=state.validators[source_index].pubkey, + target_pubkey=state.validators[target_index].pubkey, ) + # Set target to eth1 credentials + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) + # Set earliest consolidation epoch to the expected exit epoch expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch) state.earliest_consolidation_epoch = expected_exit_epoch @@ -154,7 +148,7 @@ def test_basic_consolidation_with_preexisting_churn(spec, state): preexisting_churn = 2 * spec.MIN_ACTIVATION_BALANCE state.consolidation_balance_to_consume = preexisting_churn - yield from run_consolidation_processing(spec, state, signed_consolidation) + yield from run_consolidation_processing(spec, state, consolidation) # Check consolidation churn is decremented correctly assert ( @@ -178,22 +172,21 @@ def test_basic_consolidation_with_insufficient_preexisting_churn(spec, state): current_epoch = spec.get_current_epoch(state) source_index = spec.get_active_validator_indices(state, current_epoch)[0] target_index = spec.get_active_validator_indices(state, current_epoch)[1] - source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey] - target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey] - - # Set source and target withdrawal credentials to the same eth1 credential - set_eth1_withdrawal_credential_with_balance(spec, state, source_index) - set_eth1_withdrawal_credential_with_balance(spec, state, target_index) - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation( - epoch=current_epoch, source_index=source_index, target_index=target_index - ), - source_privkey, - target_privkey, + # Set source to eth1 credentials + source_address = b"\x22" * 20 + set_eth1_withdrawal_credential_with_balance( + spec, state, source_index, address=source_address ) + # Make consolidation with source address + consolidation = spec.ExecutionLayerConsolidation( + source_address=source_address, + source_pubkey=state.validators[source_index].pubkey, + target_pubkey=state.validators[target_index].pubkey, + ) + + # Set target to eth1 credentials + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) # Set earliest consolidation epoch to the first available epoch state.earliest_consolidation_epoch = spec.compute_activation_exit_epoch( @@ -203,7 +196,7 @@ def test_basic_consolidation_with_insufficient_preexisting_churn(spec, state): preexisting_churn = spec.MIN_ACTIVATION_BALANCE - spec.EFFECTIVE_BALANCE_INCREMENT state.consolidation_balance_to_consume = preexisting_churn - yield from run_consolidation_processing(spec, state, signed_consolidation) + yield from run_consolidation_processing(spec, state, consolidation) # It takes one more epoch to process the consolidation due to insufficient churn expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch) + 1 @@ -227,29 +220,30 @@ def test_basic_consolidation_with_insufficient_preexisting_churn(spec, state): @single_phase def test_basic_consolidation_with_compounding_credential(spec, state): # This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn - consolidation_churn_limit = spec.get_consolidation_churn_limit(state) - # Set the consolidation balance to consume equal to churn limit - state.consolidation_balance_to_consume = consolidation_churn_limit current_epoch = spec.get_current_epoch(state) source_index = spec.get_active_validator_indices(state, current_epoch)[0] target_index = spec.get_active_validator_indices(state, current_epoch)[1] - source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey] - target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey] - - # Set source and target withdrawal credentials to the same eth1 credential - set_compounding_withdrawal_credential(spec, state, source_index) - set_compounding_withdrawal_credential(spec, state, target_index) - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation( - epoch=current_epoch, source_index=source_index, target_index=target_index - ), - source_privkey, - target_privkey, + # Set source to eth1 credentials + source_address = b"\x22" * 20 + set_eth1_withdrawal_credential_with_balance( + spec, state, source_index, address=source_address + ) + # Make consolidation with source address + consolidation = spec.ExecutionLayerConsolidation( + source_address=source_address, + source_pubkey=state.validators[source_index].pubkey, + target_pubkey=state.validators[target_index].pubkey, ) - yield from run_consolidation_processing(spec, state, signed_consolidation) + + # Set target to eth1 credentials + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) + + # Set the consolidation balance to consume equal to churn limit + consolidation_churn_limit = spec.get_consolidation_churn_limit(state) + state.consolidation_balance_to_consume = consolidation_churn_limit + + yield from run_consolidation_processing(spec, state, consolidation) expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch) # Check consolidation churn is decremented correctly @@ -271,32 +265,32 @@ def test_basic_consolidation_with_compounding_credential(spec, state): @single_phase def test_consolidation_churn_limit_balance(spec, state): # This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn - consolidation_churn_limit = spec.get_consolidation_churn_limit(state) current_epoch = spec.get_current_epoch(state) - source_index = spec.get_active_validator_indices(state, current_epoch)[0] - source_validator = state.validators[source_index] - source_validator.effective_balance = consolidation_churn_limit - # Churn limit increases due to higher total balance - updated_consolidation_churn_limit = spec.get_consolidation_churn_limit(state) target_index = spec.get_active_validator_indices(state, current_epoch)[1] - source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey] - target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey] - - # Set source and target withdrawal credentials to the same eth1 credential - set_compounding_withdrawal_credential(spec, state, source_index) - set_compounding_withdrawal_credential(spec, state, target_index) - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation( - epoch=current_epoch, source_index=source_index, target_index=target_index - ), - source_privkey, - target_privkey, + # Set source to eth1 credentials + source_address = b"\x22" * 20 + set_eth1_withdrawal_credential_with_balance( + spec, state, source_index, address=source_address + ) + # Make consolidation with source address + consolidation = spec.ExecutionLayerConsolidation( + source_address=source_address, + source_pubkey=state.validators[source_index].pubkey, + target_pubkey=state.validators[target_index].pubkey, ) - yield from run_consolidation_processing(spec, state, signed_consolidation) + + # Set target to eth1 credentials + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) + + # Set source effective balance to consolidation churn limit + consolidation_churn_limit = spec.get_consolidation_churn_limit(state) + state.validators[source_index].effective_balance = consolidation_churn_limit + # Churn limit increases due to higher total balance + updated_consolidation_churn_limit = spec.get_consolidation_churn_limit(state) + + yield from run_consolidation_processing(spec, state, consolidation) # validator's effective balance fits into the churn, exit as soon as possible expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch) @@ -319,35 +313,35 @@ def test_consolidation_churn_limit_balance(spec, state): @single_phase def test_consolidation_balance_larger_than_churn_limit(spec, state): # This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn - consolidation_churn_limit = spec.get_consolidation_churn_limit(state) current_epoch = spec.get_current_epoch(state) - source_index = spec.get_active_validator_indices(state, current_epoch)[0] - # Set source balance higher than consolidation churn limit - state.validators[source_index].effective_balance = 2 * consolidation_churn_limit target_index = spec.get_active_validator_indices(state, current_epoch)[1] - source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey] - target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey] - # Set source and target withdrawal credentials to the same eth1 credential - set_compounding_withdrawal_credential(spec, state, source_index) - set_compounding_withdrawal_credential(spec, state, target_index) + # Set source to eth1 credentials + source_address = b"\x22" * 20 + set_eth1_withdrawal_credential_with_balance( + spec, state, source_index, address=source_address + ) + # Make consolidation with source address + consolidation = spec.ExecutionLayerConsolidation( + source_address=source_address, + source_pubkey=state.validators[source_index].pubkey, + target_pubkey=state.validators[target_index].pubkey, + ) + + # Set target to eth1 credentials + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) + + # Set source effective balance to 2 * consolidation churn limit + consolidation_churn_limit = spec.get_consolidation_churn_limit(state) + state.validators[source_index].effective_balance = 2 * consolidation_churn_limit # Consolidation churn limit increases due to higher total balance - new_churn_limit = spec.get_consolidation_churn_limit(state) - remainder = state.validators[source_index].effective_balance % new_churn_limit - expected_balance = new_churn_limit - remainder + updated_consolidation_churn_limit = spec.get_consolidation_churn_limit(state) + remainder = state.validators[source_index].effective_balance % updated_consolidation_churn_limit + expected_balance = updated_consolidation_churn_limit - remainder - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation( - epoch=current_epoch, source_index=source_index, target_index=target_index - ), - source_privkey, - target_privkey, - ) - yield from run_consolidation_processing(spec, state, signed_consolidation) + yield from run_consolidation_processing(spec, state, consolidation) expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch) + 1 # Check consolidation churn is decremented correctly @@ -366,35 +360,34 @@ def test_consolidation_balance_larger_than_churn_limit(spec, state): @single_phase def test_consolidation_balance_through_two_churn_epochs(spec, state): # This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn - consolidation_churn_limit = spec.get_consolidation_churn_limit(state) current_epoch = spec.get_current_epoch(state) - source_index = spec.get_active_validator_indices(state, current_epoch)[0] target_index = spec.get_active_validator_indices(state, current_epoch)[1] - source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey] - target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey] - # Set source and target withdrawal credentials to the same eth1 credential - set_compounding_withdrawal_credential(spec, state, source_index) - set_compounding_withdrawal_credential(spec, state, target_index) + # Set source to eth1 credentials + source_address = b"\x22" * 20 + set_eth1_withdrawal_credential_with_balance( + spec, state, source_index, address=source_address + ) + # Make consolidation with source address + consolidation = spec.ExecutionLayerConsolidation( + source_address=source_address, + source_pubkey=state.validators[source_index].pubkey, + target_pubkey=state.validators[target_index].pubkey, + ) - # Set source balance higher than consolidation churn limit + # Set target to eth1 credentials + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) + + # Set source balance higher to 3 * consolidation churn limit + consolidation_churn_limit = spec.get_consolidation_churn_limit(state) state.validators[source_index].effective_balance = 3 * consolidation_churn_limit new_churn_limit = spec.get_consolidation_churn_limit(state) remainder = state.validators[source_index].effective_balance % new_churn_limit expected_balance = new_churn_limit - remainder - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation( - epoch=current_epoch, source_index=source_index, target_index=target_index - ), - source_privkey, - target_privkey, - ) - yield from run_consolidation_processing(spec, state, signed_consolidation) + yield from run_consolidation_processing(spec, state, consolidation) # when exiting a multiple of the churn limit greater than 1, an extra exit epoch is added expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch) + 2 @@ -415,25 +408,22 @@ def test_consolidation_balance_through_two_churn_epochs(spec, state): @single_phase def test_invalid_source_equals_target(spec, state): current_epoch = spec.get_current_epoch(state) - validator_index = spec.get_active_validator_indices(state, current_epoch)[0] - validator_privkey = pubkey_to_privkey[state.validators[validator_index].pubkey] - - # Set withdrawal credentials to eth1 - set_eth1_withdrawal_credential_with_balance(spec, state, validator_index) - - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation( - epoch=current_epoch, - source_index=validator_index, - target_index=validator_index, - ), - validator_privkey, - validator_privkey, + source_index = spec.get_active_validator_indices(state, current_epoch)[0] + + # Set source to eth1 credentials + source_address = b"\x22" * 20 + set_eth1_withdrawal_credential_with_balance( + spec, state, source_index, address=source_address + ) + # Make consolidation from source to source + consolidation = spec.ExecutionLayerConsolidation( + source_address=source_address, + source_pubkey=state.validators[source_index].pubkey, + target_pubkey=state.validators[source_index].pubkey, ) + yield from run_consolidation_processing( - spec, state, signed_consolidation, valid=False + spec, state, consolidation, success=False ) @@ -449,52 +439,52 @@ def test_invalid_exceed_pending_consolidations_limit(spec, state): state.pending_consolidations = [ spec.PendingConsolidation(source_index=0, target_index=1) ] * spec.PENDING_CONSOLIDATIONS_LIMIT + + # Set up an otherwise correct consolidation current_epoch = spec.get_current_epoch(state) - source_privkey = pubkey_to_privkey[state.validators[0].pubkey] - target_privkey = pubkey_to_privkey[state.validators[1].pubkey] - # Set source and target withdrawal credentials to the same eth1 credential - set_eth1_withdrawal_credential_with_balance(spec, state, 0) - set_eth1_withdrawal_credential_with_balance(spec, state, 1) - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation(epoch=current_epoch, source_index=0, target_index=1), - source_privkey, - target_privkey, + source_index = spec.get_active_validator_indices(state, current_epoch)[0] + target_index = spec.get_active_validator_indices(state, current_epoch)[1] + source_address = b"\x22" * 20 + set_eth1_withdrawal_credential_with_balance( + spec, state, source_index, address=source_address + ) + consolidation = spec.ExecutionLayerConsolidation( + source_address=source_address, + source_pubkey=state.validators[source_index].pubkey, + target_pubkey=state.validators[target_index].pubkey, ) + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) + yield from run_consolidation_processing( - spec, state, signed_consolidation, valid=False + spec, state, consolidation, success=False ) @with_electra_and_later -@with_presets([MINIMAL], "need sufficient consolidation churn limit") -@with_custom_state( - balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit, - threshold_fn=default_activation_threshold, -) -@spec_test +@spec_state_test @single_phase def test_invalid_not_enough_consolidation_churn_available(spec, state): state.validators = state.validators[0:2] state.pending_consolidations = [ spec.PendingConsolidation(source_index=0, target_index=1) ] + + # Set up an otherwise correct consolidation current_epoch = spec.get_current_epoch(state) - source_privkey = pubkey_to_privkey[state.validators[0].pubkey] - target_privkey = pubkey_to_privkey[state.validators[1].pubkey] - # Set source and target withdrawal credentials to the same eth1 credential - set_eth1_withdrawal_credential_with_balance(spec, state, 0) - set_eth1_withdrawal_credential_with_balance(spec, state, 1) - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation(epoch=current_epoch, source_index=0, target_index=1), - source_privkey, - target_privkey, + source_index = spec.get_active_validator_indices(state, current_epoch)[0] + target_index = spec.get_active_validator_indices(state, current_epoch)[1] + source_address = b"\x22" * 20 + set_eth1_withdrawal_credential_with_balance( + spec, state, source_index, address=source_address + ) + consolidation = spec.ExecutionLayerConsolidation( + source_address=source_address, + source_pubkey=state.validators[source_index].pubkey, + target_pubkey=state.validators[target_index].pubkey, ) + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) yield from run_consolidation_processing( - spec, state, signed_consolidation, valid=False + spec, state, consolidation, success=False ) @@ -507,22 +497,26 @@ def test_invalid_not_enough_consolidation_churn_available(spec, state): @spec_test @single_phase def test_invalid_exited_source(spec, state): + # Set up an otherwise correct consolidation current_epoch = spec.get_current_epoch(state) - source_privkey = pubkey_to_privkey[state.validators[0].pubkey] - target_privkey = pubkey_to_privkey[state.validators[1].pubkey] - set_eth1_withdrawal_credential_with_balance(spec, state, 0) - set_eth1_withdrawal_credential_with_balance(spec, state, 1) - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation(epoch=current_epoch, source_index=0, target_index=1), - source_privkey, - target_privkey, + source_index = spec.get_active_validator_indices(state, current_epoch)[0] + target_index = spec.get_active_validator_indices(state, current_epoch)[1] + source_address = b"\x22" * 20 + set_eth1_withdrawal_credential_with_balance( + spec, state, source_index, address=source_address + ) + consolidation = spec.ExecutionLayerConsolidation( + source_address=source_address, + source_pubkey=state.validators[source_index].pubkey, + target_pubkey=state.validators[target_index].pubkey, ) + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) + # exit source - spec.initiate_validator_exit(state, 0) + spec.initiate_validator_exit(state, source_index) + yield from run_consolidation_processing( - spec, state, signed_consolidation, valid=False + spec, state, consolidation, success=False ) @@ -535,23 +529,24 @@ def test_invalid_exited_source(spec, state): @spec_test @single_phase def test_invalid_exited_target(spec, state): + # Set up an otherwise correct consolidation current_epoch = spec.get_current_epoch(state) - source_privkey = pubkey_to_privkey[state.validators[0].pubkey] - target_privkey = pubkey_to_privkey[state.validators[1].pubkey] - # Set source and target withdrawal credentials to the same eth1 credential - set_eth1_withdrawal_credential_with_balance(spec, state, 0) - set_eth1_withdrawal_credential_with_balance(spec, state, 1) - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation(epoch=current_epoch, source_index=0, target_index=1), - source_privkey, - target_privkey, + source_index = spec.get_active_validator_indices(state, current_epoch)[0] + target_index = spec.get_active_validator_indices(state, current_epoch)[1] + source_address = b"\x22" * 20 + set_eth1_withdrawal_credential_with_balance( + spec, state, source_index, address=source_address ) + consolidation = spec.ExecutionLayerConsolidation( + source_address=source_address, + source_pubkey=state.validators[source_index].pubkey, + target_pubkey=state.validators[target_index].pubkey, + ) + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) # exit target spec.initiate_validator_exit(state, 1) yield from run_consolidation_processing( - spec, state, signed_consolidation, valid=False + spec, state, consolidation, success=False ) @@ -564,22 +559,26 @@ def test_invalid_exited_target(spec, state): @spec_test @single_phase def test_invalid_inactive_source(spec, state): + # Set up an otherwise correct consolidation current_epoch = spec.get_current_epoch(state) - source_privkey = pubkey_to_privkey[state.validators[0].pubkey] - target_privkey = pubkey_to_privkey[state.validators[1].pubkey] - set_eth1_withdrawal_credential_with_balance(spec, state, 0) - set_eth1_withdrawal_credential_with_balance(spec, state, 1) - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation(epoch=current_epoch, source_index=0, target_index=1), - source_privkey, - target_privkey, + source_index = spec.get_active_validator_indices(state, current_epoch)[0] + target_index = spec.get_active_validator_indices(state, current_epoch)[1] + source_address = b"\x22" * 20 + set_eth1_withdrawal_credential_with_balance( + spec, state, source_index, address=source_address + ) + consolidation = spec.ExecutionLayerConsolidation( + source_address=source_address, + source_pubkey=state.validators[source_index].pubkey, + target_pubkey=state.validators[target_index].pubkey, ) + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) + # set source validator as not yet activated - state.validators[0].activation_epoch = spec.FAR_FUTURE_EPOCH + state.validators[source_index].activation_epoch = spec.FAR_FUTURE_EPOCH + yield from run_consolidation_processing( - spec, state, signed_consolidation, valid=False + spec, state, consolidation, success=False ) @@ -592,23 +591,25 @@ def test_invalid_inactive_source(spec, state): @spec_test @single_phase def test_invalid_inactive_target(spec, state): + # Set up an otherwise correct consolidation current_epoch = spec.get_current_epoch(state) - source_privkey = pubkey_to_privkey[state.validators[0].pubkey] - target_privkey = pubkey_to_privkey[state.validators[1].pubkey] - # Set source and target withdrawal credentials to the same eth1 credential - set_eth1_withdrawal_credential_with_balance(spec, state, 0) - set_eth1_withdrawal_credential_with_balance(spec, state, 1) - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation(epoch=current_epoch, source_index=0, target_index=1), - source_privkey, - target_privkey, + source_index = spec.get_active_validator_indices(state, current_epoch)[0] + target_index = spec.get_active_validator_indices(state, current_epoch)[1] + source_address = b"\x22" * 20 + set_eth1_withdrawal_credential_with_balance( + spec, state, source_index, address=source_address + ) + consolidation = spec.ExecutionLayerConsolidation( + source_address=source_address, + source_pubkey=state.validators[source_index].pubkey, + target_pubkey=state.validators[target_index].pubkey, ) + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) + # set target validator as not yet activated state.validators[1].activation_epoch = spec.FAR_FUTURE_EPOCH yield from run_consolidation_processing( - spec, state, signed_consolidation, valid=False + spec, state, consolidation, success=False ) @@ -620,49 +621,23 @@ def test_invalid_inactive_target(spec, state): ) @spec_test @single_phase -def test_invalid_no_execution_withdrawal_credential(spec, state): +def test_invalid_no_source_execution_withdrawal_credential(spec, state): + # Set up a correct consolidation, but source does not have + # an execution withdrawal credential current_epoch = spec.get_current_epoch(state) - source_privkey = pubkey_to_privkey[state.validators[0].pubkey] - target_privkey = pubkey_to_privkey[state.validators[1].pubkey] - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation(epoch=current_epoch, source_index=0, target_index=1), - source_privkey, - target_privkey, - ) - yield from run_consolidation_processing( - spec, state, signed_consolidation, valid=False + source_index = spec.get_active_validator_indices(state, current_epoch)[0] + target_index = spec.get_active_validator_indices(state, current_epoch)[1] + source_address = b"\x22" * 20 + consolidation = spec.ExecutionLayerConsolidation( + source_address=source_address, + source_pubkey=state.validators[source_index].pubkey, + target_pubkey=state.validators[target_index].pubkey, ) - - -@with_electra_and_later -@with_presets([MINIMAL], "need sufficient consolidation churn limit") -@with_custom_state( - balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit, - threshold_fn=default_activation_threshold, -) -@spec_test -@single_phase -def test_invalid_different_credentials(spec, state): - current_epoch = spec.get_current_epoch(state) - source_privkey = pubkey_to_privkey[state.validators[0].pubkey] - target_privkey = pubkey_to_privkey[state.validators[1].pubkey] - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation(epoch=current_epoch, source_index=0, target_index=1), - source_privkey, - target_privkey, - ) - # Set source and target withdrawal credentials to different eth1 credentials - set_eth1_withdrawal_credential_with_balance(spec, state, 0) - set_eth1_withdrawal_credential_with_balance(spec, state, 1, address=b"\x10" * 20) + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) yield from run_consolidation_processing( - spec, state, signed_consolidation, valid=False + spec, state, consolidation, success=False ) - @with_electra_and_later @with_presets([MINIMAL], "need sufficient consolidation churn limit") @with_custom_state( @@ -671,55 +646,24 @@ def test_invalid_different_credentials(spec, state): ) @spec_test @single_phase -@always_bls -def test_invalid_source_signature(spec, state): - # This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn +def test_invalid_no_target_execution_withdrawal_credential(spec, state): + # Set up a correct consolidation, but target does not have + # an execution withdrawal credential current_epoch = spec.get_current_epoch(state) source_index = spec.get_active_validator_indices(state, current_epoch)[0] target_index = spec.get_active_validator_indices(state, current_epoch)[1] - source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey] - target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey] - - # Set source and target withdrawal credentials to the same eth1 credential - set_eth1_withdrawal_credential_with_balance(spec, state, source_index) - set_eth1_withdrawal_credential_with_balance(spec, state, target_index) - - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation( - epoch=current_epoch, source_index=source_index, target_index=target_index - ), - source_privkey, - target_privkey, + source_address = b"\x22" * 20 + set_eth1_withdrawal_credential_with_balance( + spec, state, source_index, address=source_address ) - - # Set earliest consolidation epoch to the expected exit epoch - expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch) - state.earliest_consolidation_epoch = expected_exit_epoch - consolidation_churn_limit = spec.get_consolidation_churn_limit(state) - # Set the consolidation balance to consume equal to churn limit - state.consolidation_balance_to_consume = consolidation_churn_limit - - current_epoch = spec.get_current_epoch(state) - source_privkey = pubkey_to_privkey[state.validators[0].pubkey] - target_privkey = pubkey_to_privkey[state.validators[1].pubkey] - # Set source and target withdrawal credentials to the same eth1 credential - set_eth1_withdrawal_credential_with_balance(spec, state, 0) - set_eth1_withdrawal_credential_with_balance(spec, state, 1) - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation(epoch=current_epoch, source_index=0, target_index=1), - source_privkey, - target_privkey, - ) - - # Change the pubkey of the source validator, invalidating its signature - state.validators[0].pubkey = state.validators[1].pubkey - + consolidation = spec.ExecutionLayerConsolidation( + source_address=source_address, + source_pubkey=state.validators[source_index].pubkey, + target_pubkey=state.validators[target_index].pubkey, + ) + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) yield from run_consolidation_processing( - spec, state, signed_consolidation, valid=False + spec, state, consolidation, success=False ) @@ -731,81 +675,76 @@ def test_invalid_source_signature(spec, state): ) @spec_test @single_phase -@always_bls -def test_invalid_target_signature(spec, state): - # This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn +def test_invalid_different_credentials(spec, state): current_epoch = spec.get_current_epoch(state) source_index = spec.get_active_validator_indices(state, current_epoch)[0] target_index = spec.get_active_validator_indices(state, current_epoch)[1] - source_privkey = pubkey_to_privkey[state.validators[source_index].pubkey] - target_privkey = pubkey_to_privkey[state.validators[target_index].pubkey] - - # Set source and target withdrawal credentials to the same eth1 credential - set_eth1_withdrawal_credential_with_balance(spec, state, source_index) + source_address = b"\x22" * 20 + set_eth1_withdrawal_credential_with_balance( + spec, state, source_index, address=source_address + ) + consolidation = spec.ExecutionLayerConsolidation( + source_address=source_address, + source_pubkey=state.validators[source_index].pubkey, + target_pubkey=state.validators[target_index].pubkey, + ) set_eth1_withdrawal_credential_with_balance(spec, state, target_index) - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation( - epoch=current_epoch, source_index=source_index, target_index=target_index - ), - source_privkey, - target_privkey, + + yield from run_consolidation_processing( + spec, state, consolidation, success=False ) - # Set earliest consolidation epoch to the expected exit epoch - expected_exit_epoch = spec.compute_activation_exit_epoch(current_epoch) - state.earliest_consolidation_epoch = expected_exit_epoch - consolidation_churn_limit = spec.get_consolidation_churn_limit(state) - # Set the consolidation balance to consume equal to churn limit - state.consolidation_balance_to_consume = consolidation_churn_limit - current_epoch = spec.get_current_epoch(state) - source_privkey = pubkey_to_privkey[state.validators[0].pubkey] - target_privkey = pubkey_to_privkey[state.validators[1].pubkey] - # Set source and target withdrawal credentials to the same eth1 credential - set_eth1_withdrawal_credential_with_balance(spec, state, 0) - set_eth1_withdrawal_credential_with_balance(spec, state, 1) - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation(epoch=current_epoch, source_index=0, target_index=1), - source_privkey, - target_privkey, - ) - - # Change the pubkey of the target validator, invalidating its signature - state.validators[1].pubkey = state.validators[2].pubkey - yield from run_consolidation_processing( - spec, state, signed_consolidation, valid=False - ) +def run_consolidation_processing(spec, state, consolidation, success=True): + """ + Run ``process_consolidation``, yielding: + - pre-state ('pre') + - consolidation ('consolidation') + - post-state ('post'). + If ``valid == False``, run expecting ``AssertionError`` + """ + validator_pubkeys = [v.pubkey for v in state.validators] + source_index = spec.ValidatorIndex(validator_pubkeys.index(consolidation.source_pubkey)) + target_index = spec.ValidatorIndex(validator_pubkeys.index(consolidation.target_pubkey)) + source_validator = state.validators[source_index] + target_validator = state.validators[target_index] + + yield 'pre', state + yield 'consolidation', consolidation + + pre_exit_epoch_source = source_validator.exit_epoch + pre_exit_epoch_target = target_validator.exit_epoch + pre_pending_consolidations = state.pending_consolidations.copy() + pre_state = state.copy() + + spec.process_execution_layer_consolidation(state, consolidation) + + yield 'post', state + + if success: + # Check source and target have execution credentials + assert spec.has_execution_withdrawal_credential(source_validator) + assert spec.has_execution_withdrawal_credential(target_validator) + # Check source address in the consolidation fits the withdrawal credentials + assert source_validator.withdrawal_credentials[12:] == consolidation.source_address + # Check source and target are not the same + assert source_index != target_index + # Check source and target were not exiting + assert pre_exit_epoch_source == spec.FAR_FUTURE_EPOCH + assert pre_exit_epoch_target == spec.FAR_FUTURE_EPOCH + # Check source is now exiting + assert state.validators[source_index].exit_epoch < spec.FAR_FUTURE_EPOCH + # Check that the exit epoch matches earliest_consolidation_epoch + assert state.validators[source_index].exit_epoch == state.earliest_consolidation_epoch + # Check that the correct consolidation has been appended + expected_new_pending_consolidation = spec.PendingConsolidation( + source_index=source_index, + target_index=target_index, + ) + assert state.pending_consolidations == pre_pending_consolidations + [expected_new_pending_consolidation] + if not success: + assert pre_state == state -@with_electra_and_later -@with_presets([MINIMAL], "need sufficient consolidation churn limit") -@with_custom_state( - balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit, - threshold_fn=default_activation_threshold, -) -@spec_test -@single_phase -def test_invalid_before_specified_epoch(spec, state): - current_epoch = spec.get_current_epoch(state) - source_privkey = pubkey_to_privkey[state.validators[0].pubkey] - target_privkey = pubkey_to_privkey[state.validators[1].pubkey] - # Set source and target withdrawal credentials to the same eth1 credential - set_eth1_withdrawal_credential_with_balance(spec, state, 0) - set_eth1_withdrawal_credential_with_balance(spec, state, 1) - # set epoch=current_epoch + 1, so it's too early to process it - signed_consolidation = sign_consolidation( - spec, - state, - spec.Consolidation(epoch=current_epoch + 1, source_index=0, target_index=1), - source_privkey, - target_privkey, - ) - yield from run_consolidation_processing( - spec, state, signed_consolidation, valid=False - ) diff --git a/tests/core/pyspec/eth2spec/test/helpers/consolidations.py b/tests/core/pyspec/eth2spec/test/helpers/consolidations.py deleted file mode 100644 index ccdcb9e069..0000000000 --- a/tests/core/pyspec/eth2spec/test/helpers/consolidations.py +++ /dev/null @@ -1,61 +0,0 @@ -from eth2spec.utils import bls -from eth2spec.test.context import expect_assertion_error -from eth2spec.test.helpers.keys import privkeys - - -def prepare_signed_consolidations(spec, state, index_pairs, fork_version=None): - def create_signed_consolidation(source_index, target_index): - consolidation = spec.Consolidation( - epoch=spec.get_current_epoch(state), - source_index=source_index, - target_index=target_index, - ) - return sign_consolidation(spec, state, consolidation, privkeys[source_index], privkeys[target_index], - fork_version=fork_version) - - return [create_signed_consolidation(source_index, target_index) for (source_index, target_index) in index_pairs] - - -def sign_consolidation(spec, state, consolidation, source_privkey, target_privkey, fork_version=None): - domain = spec.compute_domain(spec.DOMAIN_CONSOLIDATION, genesis_validators_root=state.genesis_validators_root) - signing_root = spec.compute_signing_root(consolidation, domain) - return spec.SignedConsolidation( - message=consolidation, - signature=bls.Aggregate([bls.Sign(source_privkey, signing_root), bls.Sign(target_privkey, signing_root)]) - ) - - -def run_consolidation_processing(spec, state, signed_consolidation, valid=True): - """ - Run ``process_consolidation``, yielding: - - pre-state ('pre') - - consolidation ('consolidation') - - post-state ('post'). - If ``valid == False``, run expecting ``AssertionError`` - """ - - source_validator = state.validators[signed_consolidation.message.source_index] - target_validator = state.validators[signed_consolidation.message.target_index] - - yield 'pre', state - yield 'consolidation', signed_consolidation - - if not valid: - expect_assertion_error(lambda: spec.process_consolidation(state, signed_consolidation)) - yield 'post', None - return - - pre_exit_epoch = source_validator.exit_epoch - - spec.process_consolidation(state, signed_consolidation) - - yield 'post', state - - assert source_validator.withdrawal_credentials[1:] == target_validator.withdrawal_credentials[1:] - assert pre_exit_epoch == spec.FAR_FUTURE_EPOCH - assert state.validators[signed_consolidation.message.source_index].exit_epoch < spec.FAR_FUTURE_EPOCH - assert state.validators[signed_consolidation.message.source_index].exit_epoch == state.earliest_consolidation_epoch - assert state.pending_consolidations[len(state.pending_consolidations) - 1] == spec.PendingConsolidation( - source_index=signed_consolidation.message.source_index, - target_index=signed_consolidation.message.target_index - ) diff --git a/tests/generators/operations/main.py b/tests/generators/operations/main.py index 0d203fca6f..fed12753b2 100644 --- a/tests/generators/operations/main.py +++ b/tests/generators/operations/main.py @@ -45,7 +45,7 @@ _new_electra_mods = {key: 'eth2spec.test.electra.block_processing.test_process_' + key for key in [ 'attestation', - 'consolidation', + 'execution_layer_consolidation', 'deposit_receipt', 'execution_layer_withdrawal_request', 'voluntary_exit' From 17c51488eb86e9b3604bc389dda60fc7abf29c9d Mon Sep 17 00:00:00 2001 From: fradamt Date: Fri, 19 Apr 2024 08:58:28 +0200 Subject: [PATCH 38/89] add tests for remaining failure cases --- ...t_process_execution_layer_consolidation.py | 110 ++++++++++++++---- 1 file changed, 86 insertions(+), 24 deletions(-) diff --git a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation.py b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation.py index c6bc071b20..419d13ed14 100644 --- a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation.py +++ b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation.py @@ -64,7 +64,7 @@ def test_basic_consolidation_in_current_consolidation_epoch(spec, state): == consolidation_churn_limit - spec.MIN_ACTIVATION_BALANCE ) # Check exit epoch - assert state.validators[0].exit_epoch == expected_exit_epoch + assert state.validators[source_index].exit_epoch == expected_exit_epoch @with_electra_and_later @@ -109,7 +109,7 @@ def test_basic_consolidation_in_new_consolidation_epoch(spec, state): == consolidation_churn_limit - spec.MIN_ACTIVATION_BALANCE ) # Check exit epochs - assert state.validators[0].exit_epoch == expected_exit_epoch + assert state.validators[source_index].exit_epoch == expected_exit_epoch @with_electra_and_later @@ -156,7 +156,7 @@ def test_basic_consolidation_with_preexisting_churn(spec, state): == preexisting_churn - spec.MIN_ACTIVATION_BALANCE ) # Check exit epoch - assert state.validators[0].exit_epoch == expected_exit_epoch + assert state.validators[source_index].exit_epoch == expected_exit_epoch @with_electra_and_later @@ -207,7 +207,7 @@ def test_basic_consolidation_with_insufficient_preexisting_churn(spec, state): state.consolidation_balance_to_consume == consolidation_churn_limit - remainder ) # Check exit epoch - assert state.validators[0].exit_epoch == expected_exit_epoch + assert state.validators[source_index].exit_epoch == expected_exit_epoch @with_electra_and_later @@ -218,7 +218,7 @@ def test_basic_consolidation_with_insufficient_preexisting_churn(spec, state): ) @spec_test @single_phase -def test_basic_consolidation_with_compounding_credential(spec, state): +def test_basic_consolidation_with_compounding_credentials(spec, state): # This state has 256 validators each with 32 ETH in MINIMAL preset, 128 ETH consolidation churn current_epoch = spec.get_current_epoch(state) source_index = spec.get_active_validator_indices(state, current_epoch)[0] @@ -226,7 +226,7 @@ def test_basic_consolidation_with_compounding_credential(spec, state): # Set source to eth1 credentials source_address = b"\x22" * 20 - set_eth1_withdrawal_credential_with_balance( + set_compounding_withdrawal_credential( spec, state, source_index, address=source_address ) # Make consolidation with source address @@ -237,7 +237,7 @@ def test_basic_consolidation_with_compounding_credential(spec, state): ) # Set target to eth1 credentials - set_eth1_withdrawal_credential_with_balance(spec, state, target_index) + set_compounding_withdrawal_credential(spec, state, target_index) # Set the consolidation balance to consume equal to churn limit consolidation_churn_limit = spec.get_consolidation_churn_limit(state) @@ -252,7 +252,8 @@ def test_basic_consolidation_with_compounding_credential(spec, state): == consolidation_churn_limit - spec.MIN_ACTIVATION_BALANCE ) # Check exit epoch - assert state.validators[0].exit_epoch == expected_exit_epoch + assert state.validators[source_index].exit_epoch == expected_exit_epoch + @with_electra_and_later @@ -300,7 +301,7 @@ def test_consolidation_churn_limit_balance(spec, state): == updated_consolidation_churn_limit - consolidation_churn_limit ) # Check exit epoch - assert state.validators[0].exit_epoch == expected_exit_epoch + assert state.validators[source_index].exit_epoch == expected_exit_epoch @with_electra_and_later @@ -347,7 +348,7 @@ def test_consolidation_balance_larger_than_churn_limit(spec, state): # Check consolidation churn is decremented correctly assert state.consolidation_balance_to_consume == expected_balance # Check exit epoch - assert state.validators[0].exit_epoch == expected_exit_epoch + assert state.validators[source_index].exit_epoch == expected_exit_epoch @with_electra_and_later @@ -667,7 +668,6 @@ def test_invalid_no_target_execution_withdrawal_credential(spec, state): ) -@with_electra_and_later @with_presets([MINIMAL], "need sufficient consolidation churn limit") @with_custom_state( balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit, @@ -675,7 +675,8 @@ def test_invalid_no_target_execution_withdrawal_credential(spec, state): ) @spec_test @single_phase -def test_invalid_different_credentials(spec, state): +def test_invalid_incorrect_source_address(spec, state): + # Set up an otherwise correct consolidation current_epoch = spec.get_current_epoch(state) source_index = spec.get_active_validator_indices(state, current_epoch)[0] target_index = spec.get_active_validator_indices(state, current_epoch)[1] @@ -683,8 +684,9 @@ def test_invalid_different_credentials(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) + # Make consolidation with different source address consolidation = spec.ExecutionLayerConsolidation( - source_address=source_address, + source_address=b"\x33" * 20, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, ) @@ -696,6 +698,64 @@ def test_invalid_different_credentials(spec, state): ) +@with_presets([MINIMAL], "need sufficient consolidation churn limit") +@with_custom_state( + balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit, + threshold_fn=default_activation_threshold, +) +@spec_test +@single_phase +def test_invalid_unknown_source_pubkey(spec, state): + # Set up an otherwise correct consolidation + current_epoch = spec.get_current_epoch(state) + source_index = spec.get_active_validator_indices(state, current_epoch)[0] + target_index = spec.get_active_validator_indices(state, current_epoch)[1] + source_address = b"\x22" * 20 + set_eth1_withdrawal_credential_with_balance( + spec, state, source_index, address=source_address + ) + # Make consolidation with different source pubkey + consolidation = spec.ExecutionLayerConsolidation( + source_address=source_address, + source_pubkey=b"\x00" * 48, + target_pubkey=state.validators[target_index].pubkey, + ) + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) + + yield from run_consolidation_processing( + spec, state, consolidation, success=False + ) + + +@with_presets([MINIMAL], "need sufficient consolidation churn limit") +@with_custom_state( + balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit, + threshold_fn=default_activation_threshold, +) +@spec_test +@single_phase +def test_invalid_unknown_target_pubkey(spec, state): + # Set up an otherwise correct consolidation + current_epoch = spec.get_current_epoch(state) + source_index = spec.get_active_validator_indices(state, current_epoch)[0] + target_index = spec.get_active_validator_indices(state, current_epoch)[1] + source_address = b"\x22" * 20 + set_eth1_withdrawal_credential_with_balance( + spec, state, source_index, address=source_address + ) + # Make consolidation with different target pubkey + consolidation = spec.ExecutionLayerConsolidation( + source_address=b"\x33" * 20, + source_pubkey=state.validators[source_index].pubkey, + target_pubkey=b"\x00" * 48, + ) + set_eth1_withdrawal_credential_with_balance(spec, state, target_index) + + yield from run_consolidation_processing( + spec, state, consolidation, success=False + ) + + def run_consolidation_processing(spec, state, consolidation, success=True): """ @@ -706,20 +766,22 @@ def run_consolidation_processing(spec, state, consolidation, success=True): If ``valid == False``, run expecting ``AssertionError`` """ - validator_pubkeys = [v.pubkey for v in state.validators] - source_index = spec.ValidatorIndex(validator_pubkeys.index(consolidation.source_pubkey)) - target_index = spec.ValidatorIndex(validator_pubkeys.index(consolidation.target_pubkey)) - source_validator = state.validators[source_index] - target_validator = state.validators[target_index] + if success: + validator_pubkeys = [v.pubkey for v in state.validators] + source_index = spec.ValidatorIndex(validator_pubkeys.index(consolidation.source_pubkey)) + target_index = spec.ValidatorIndex(validator_pubkeys.index(consolidation.target_pubkey)) + source_validator = state.validators[source_index] + target_validator = state.validators[target_index] + pre_exit_epoch_source = source_validator.exit_epoch + pre_exit_epoch_target = target_validator.exit_epoch + pre_pending_consolidations = state.pending_consolidations.copy() + else: + pre_state = state.copy() + yield 'pre', state yield 'consolidation', consolidation - pre_exit_epoch_source = source_validator.exit_epoch - pre_exit_epoch_target = target_validator.exit_epoch - pre_pending_consolidations = state.pending_consolidations.copy() - pre_state = state.copy() - spec.process_execution_layer_consolidation(state, consolidation) yield 'post', state @@ -745,6 +807,6 @@ def run_consolidation_processing(spec, state, consolidation, success=True): target_index=target_index, ) assert state.pending_consolidations == pre_pending_consolidations + [expected_new_pending_consolidation] - if not success: + else: assert pre_state == state From 7c4b32a7177315f8b4ccda5e23d4efd0aebd654a Mon Sep 17 00:00:00 2001 From: fradamt Date: Tue, 21 May 2024 17:47:57 +0200 Subject: [PATCH 39/89] renaming consolidations to consolidation_requests --- presets/mainnet/electra.yaml | 2 +- presets/minimal/electra.yaml | 2 +- specs/electra/beacon-chain.md | 34 ++++++++++++++++------------- specs/electra/fork.md | 2 +- tests/generators/operations/main.py | 2 +- 5 files changed, 23 insertions(+), 19 deletions(-) diff --git a/presets/mainnet/electra.yaml b/presets/mainnet/electra.yaml index 2c8d642bf7..8aa663cec8 100644 --- a/presets/mainnet/electra.yaml +++ b/presets/mainnet/electra.yaml @@ -30,7 +30,7 @@ MAX_ATTESTER_SLASHINGS_ELECTRA: 1 # `uint64(2**3)` (= 8) MAX_ATTESTATIONS_ELECTRA: 8 # `uint64(2**0)` (= 1) -MAX_CONSOLIDATIONS_PER_PAYLOAD: 1 +MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD: 1 # Execution # --------------------------------------------------------------- diff --git a/presets/minimal/electra.yaml b/presets/minimal/electra.yaml index beb5265746..a5897f340a 100644 --- a/presets/minimal/electra.yaml +++ b/presets/minimal/electra.yaml @@ -30,7 +30,7 @@ MAX_ATTESTER_SLASHINGS_ELECTRA: 1 # `uint64(2**3)` (= 8) MAX_ATTESTATIONS_ELECTRA: 8 # `uint64(2**0)` (= 1) -MAX_CONSOLIDATIONS_PER_PAYLOAD: 1 +MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD: 1 # Execution # --------------------------------------------------------------- diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index daa28a491e..4bc2cfaf06 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -164,16 +164,16 @@ The following values are (non-configurable) constants used throughout the specif | Name | Value | | - | - | -| `MAX_CONSOLIDATIONS_PER_PAYLOAD` | `uint64(1)` | +| `MAX_ATTESTER_SLASHINGS_ELECTRA` | `2**0` (= 1) | *[New in Electra:EIP7549]* | +| `MAX_ATTESTATIONS_ELECTRA` | `2**3` (= 8) | *[New in Electra:EIP7549]* | ### Execution | Name | Value | Description | | - | - | - | | `MAX_DEPOSIT_RECEIPTS_PER_PAYLOAD` | `uint64(2**13)` (= 8,192) | *[New in Electra:EIP6110]* Maximum number of deposit receipts allowed in each payload | -| `MAX_ATTESTER_SLASHINGS_ELECTRA` | `2**0` (= 1) | *[New in Electra:EIP7549]* | -| `MAX_ATTESTATIONS_ELECTRA` | `2**3` (= 8) | *[New in Electra:EIP7549]* | | `MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD` | `uint64(2**4)` (= 16)| *[New in Electra:EIP7002]* Maximum number of execution layer withdrawal requests in each payload | +| `MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD` | `uint64(1)` (= 1) | *[New in Electra:EIP7002]* Maximum number of execution layer consolidation requests in each payload | ### Withdrawals processing @@ -238,12 +238,12 @@ class ExecutionLayerWithdrawalRequest(Container): amount: Gwei ``` -#### `ExecutionLayerConsolidation` +#### `ExecutionLayerConsolidationRequest` *Note*: The container is new in EIP7251. ```python -class ExecutionLayerConsolidation(Container): +class ExecutionLayerConsolidationRequest(Container): source_address: ExecutionAddress source_pubkey: BLSPubkey target_pubkey: BLSPubkey @@ -337,7 +337,7 @@ class ExecutionPayload(Container): deposit_receipts: List[DepositReceipt, MAX_DEPOSIT_RECEIPTS_PER_PAYLOAD] # [New in Electra:EIP6110] # [New in Electra:EIP7002:EIP7251] withdrawal_requests: List[ExecutionLayerWithdrawalRequest, MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD] - consolidations: List[ExecutionLayerConsolidation, MAX_CONSOLIDATIONS_PER_PAYLOAD] # [New in Electra:EIP7251] + consolidations_requests: List[ExecutionLayerConsolidationRequest, MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD] # [New in Electra:EIP7251] ``` #### `ExecutionPayloadHeader` @@ -365,7 +365,7 @@ class ExecutionPayloadHeader(Container): excess_blob_gas: uint64 deposit_receipts_root: Root # [New in Electra:EIP6110] withdrawal_requests_root: Root # [New in Electra:EIP7002:EIP7251] - consolidations_root: Root # [New in Electra:EIP7251] + consolidations_requests_root: Root # [New in Electra:EIP7251] ``` #### `BeaconState` @@ -1037,7 +1037,7 @@ def process_operations(state: BeaconState, body: BeaconBlockBody) -> None: # [New in Electra:EIP7002:EIP7251] for_ops(body.execution_payload.withdrawal_requests, process_execution_layer_withdrawal_request) for_ops(body.execution_payload.deposit_receipts, process_deposit_receipt) # [New in Electra:EIP6110] - for_ops(body.execution_payload.consolidations, process_execution_layer_consolidation) # [New in Electra:EIP7251] + for_ops(body.execution_payload.consolidations_requests, process_execution_layer_consolidation_request) # [New in Electra:EIP7251] ``` ##### Attestations @@ -1284,12 +1284,14 @@ def process_deposit_receipt(state: BeaconState, deposit_receipt: DepositReceipt) ) ``` -##### Consolidations +##### Execution layer consolidation requests -###### New `process_consolidation` +###### New `process_execution_layer_consolidation_requests` ```python -def process_execution_layer_consolidation(state: BeaconState, consolidation: ExecutionLayerConsolidation) -> None: +def process_execution_layer_consolidation_requests( + state: BeaconState, + execution_layer_consolidation_request: ExecutionLayerConsolidationRequest) -> None: # If the pending consolidations queue is full, consolidation requests are ignored if len(state.pending_consolidations) == PENDING_CONSOLIDATIONS_LIMIT: return @@ -1299,12 +1301,14 @@ def process_execution_layer_consolidation(state: BeaconState, consolidation: Exe validator_pubkeys = [v.pubkey for v in state.validators] # Verify pubkeys exists - if consolidation.source_pubkey not in validator_pubkeys: + request_source_pubkey = execution_layer_consolidation_request.source_pubkey + request_target_pubkey = execution_layer_consolidation_request.target_pubkey + if request_source_pubkey not in validator_pubkeys: return - if consolidation.target_pubkey not in validator_pubkeys: + if request_target_pubkey not in validator_pubkeys: return - source_index = ValidatorIndex(validator_pubkeys.index(consolidation.source_pubkey)) - target_index = ValidatorIndex(validator_pubkeys.index(consolidation.target_pubkey)) + source_index = ValidatorIndex(validator_pubkeys.index(request_source_pubkey)) + target_index = ValidatorIndex(validator_pubkeys.index(request_target_pubkey)) source_validator = state.validators[source_index] target_validator = state.validators[target_index] diff --git a/specs/electra/fork.md b/specs/electra/fork.md index 2bf39029dd..a65bdebb53 100644 --- a/specs/electra/fork.md +++ b/specs/electra/fork.md @@ -92,7 +92,7 @@ def upgrade_to_electra(pre: deneb.BeaconState) -> BeaconState: excess_blob_gas=pre.latest_execution_payload_header.excess_blob_gas, deposit_receipts_root=Root(), # [New in Electra:EIP6110] withdrawal_requests_root=Root(), # [New in Electra:EIP7002] - consolidations_root=Root(), # [New in Electra:EIP7251] + consolidations_requests_root=Root(), # [New in Electra:EIP7251] ) exit_epochs = [v.exit_epoch for v in pre.validators if v.exit_epoch != FAR_FUTURE_EPOCH] diff --git a/tests/generators/operations/main.py b/tests/generators/operations/main.py index fed12753b2..85a5b64e3c 100644 --- a/tests/generators/operations/main.py +++ b/tests/generators/operations/main.py @@ -45,7 +45,7 @@ _new_electra_mods = {key: 'eth2spec.test.electra.block_processing.test_process_' + key for key in [ 'attestation', - 'execution_layer_consolidation', + 'execution_layer_consolidation_requests', 'deposit_receipt', 'execution_layer_withdrawal_request', 'voluntary_exit' From dc2a2bd85ae1e8c7c2e426faba3167069ef548a5 Mon Sep 17 00:00:00 2001 From: fradamt Date: Tue, 21 May 2024 18:10:58 +0200 Subject: [PATCH 40/89] minor fixes, doctoc --- README.md | 18 ++++++++ SECURITY.md | 10 +++++ configs/README.md | 10 +++++ docker/README.md | 8 ++++ docs/docs/templates/beacon-chain-template.md | 18 ++++++++ docs/light-client/index.md | 8 ++++ presets/README.md | 10 +++++ solidity_deposit_contract/README.md | 12 ++++++ specs/electra/beacon-chain.md | 11 +++-- tests/README.md | 17 ++++++++ tests/core/pyspec/README.md | 17 ++++++++ tests/core/pyspec/eth2spec/config/README.md | 9 ++++ .../pyspec/eth2spec/gen_helpers/README.md | 11 +++++ ...t_process_execution_layer_consolidation.py | 42 +++++++++---------- tests/formats/README.md | 29 +++++++++++++ tests/formats/bls/README.md | 8 ++++ tests/formats/bls/aggregate.md | 10 +++++ tests/formats/bls/aggregate_verify.md | 10 +++++ tests/formats/bls/eth_aggregate_pubkeys.md | 10 +++++ .../formats/bls/eth_fast_aggregate_verify.md | 10 +++++ tests/formats/bls/fast_aggregate_verify.md | 10 +++++ tests/formats/bls/sign.md | 10 +++++ tests/formats/bls/verify.md | 9 ++++ tests/formats/epoch_processing/README.md | 13 ++++++ tests/formats/finality/README.md | 14 +++++++ tests/formats/forks/README.md | 15 +++++++ tests/formats/genesis/README.md | 8 ++++ tests/formats/genesis/initialization.md | 16 +++++++ tests/formats/genesis/validity.md | 14 +++++++ tests/formats/kzg_4844/README.md | 8 ++++ .../kzg_4844/blob_to_kzg_commitment.md | 10 +++++ .../kzg_4844/compute_blob_kzg_proof.md | 10 +++++ tests/formats/kzg_4844/compute_kzg_proof.md | 10 +++++ .../formats/kzg_4844/verify_blob_kzg_proof.md | 10 +++++ .../kzg_4844/verify_blob_kzg_proof_batch.md | 10 +++++ tests/formats/kzg_4844/verify_kzg_proof.md | 10 +++++ tests/formats/light_client/README.md | 8 ++++ .../light_client/single_merkle_proof.md | 12 ++++++ tests/formats/light_client/sync.md | 17 ++++++++ tests/formats/light_client/update_ranking.md | 12 ++++++ tests/formats/merkle_proof/README.md | 8 ++++ tests/formats/operations/README.md | 14 +++++++ tests/formats/random/README.md | 9 ++++ tests/formats/rewards/README.md | 17 ++++++++ tests/formats/sanity/README.md | 8 ++++ tests/formats/sanity/blocks.md | 14 +++++++ tests/formats/sanity/slots.md | 15 +++++++ tests/formats/shuffling/README.md | 11 +++++ tests/formats/ssz_generic/README.md | 23 ++++++++++ tests/formats/ssz_static/README.md | 8 ++++ tests/formats/ssz_static/core.md | 14 +++++++ tests/formats/sync/README.md | 8 ++++ tests/formats/transition/README.md | 14 +++++++ tests/generators/README.md | 1 - tests/generators/bls/README.md | 9 ++++ tests/generators/epoch_processing/README.md | 8 ++++ tests/generators/finality/README.md | 8 ++++ tests/generators/fork_choice/README.md | 8 ++++ tests/generators/genesis/README.md | 8 ++++ tests/generators/kzg_4844/README.md | 8 ++++ tests/generators/light_client/README.md | 8 ++++ tests/generators/merkle_proof/README.md | 8 ++++ tests/generators/operations/README.md | 8 ++++ tests/generators/random/README.md | 11 +++++ tests/generators/rewards/README.md | 8 ++++ tests/generators/sanity/README.md | 8 ++++ tests/generators/shuffling/README.md | 8 ++++ tests/generators/ssz_static/README.md | 8 ++++ 68 files changed, 756 insertions(+), 28 deletions(-) diff --git a/README.md b/README.md index 58bff5b9e4..e66e79b097 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,21 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Ethereum Proof-of-Stake Consensus Specifications](#ethereum-proof-of-stake-consensus-specifications) + - [Specs](#specs) + - [Stable Specifications](#stable-specifications) + - [In-development Specifications](#in-development-specifications) + - [Accompanying documents can be found in specs and include:](#accompanying-documents-can-be-found-in-specs-and-include) + - [Additional specifications for client implementers](#additional-specifications-for-client-implementers) + - [Design goals](#design-goals) + - [Useful external resources](#useful-external-resources) + - [For spec contributors](#for-spec-contributors) + - [Online viewer of the latest release (latest `master` branch)](#online-viewer-of-the-latest-release-latest-master-branch) + - [Consensus spec tests](#consensus-spec-tests) + + + # Ethereum Proof-of-Stake Consensus Specifications [![Join the chat at https://discord.gg/qGpsxSA](https://img.shields.io/badge/chat-on%20discord-blue.svg)](https://discord.gg/qGpsxSA) diff --git a/SECURITY.md b/SECURITY.md index 2101ea1554..a770f5749e 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -1,3 +1,13 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Security Policy](#security-policy) + - [Supported Versions](#supported-versions) + - [Reporting a Vulnerability](#reporting-a-vulnerability) + + + # Security Policy ## Supported Versions diff --git a/configs/README.md b/configs/README.md index 6ef081e4c4..82b7783682 100644 --- a/configs/README.md +++ b/configs/README.md @@ -1,3 +1,13 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Configurations](#configurations) + - [Forking](#forking) + - [Format](#format) + + + # Configurations This directory contains a set of configurations used for testing, testnets, and mainnet. diff --git a/docker/README.md b/docker/README.md index 6d5b21e59d..29c7faa3f5 100644 --- a/docker/README.md +++ b/docker/README.md @@ -1,3 +1,11 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Docker related information](#docker-related-information) + + + ## Docker related information This dockerfile sets up the dependencies required to run consensus-spec tests. The docker image can be locally built with: diff --git a/docs/docs/templates/beacon-chain-template.md b/docs/docs/templates/beacon-chain-template.md index 4d22d3908e..04d61b071f 100644 --- a/docs/docs/templates/beacon-chain-template.md +++ b/docs/docs/templates/beacon-chain-template.md @@ -7,6 +7,24 @@ +- [Introduction](#introduction) +- [Notation](#notation) +- [Custom types](#custom-types) +- [Constants](#constants) + - [[CATEGORY OF CONSTANTS]](#category-of-constants) +- [Preset](#preset) + - [[CATEGORY OF PRESETS]](#category-of-presets) +- [Configuration](#configuration) + - [[CATEGORY OF CONFIGURATIONS]](#category-of-configurations) +- [Containers](#containers) + - [[CATEGORY OF CONTAINERS]](#category-of-containers) + - [`CONTAINER_NAME`](#container_name) +- [Helper functions](#helper-functions) + - [[CATEGORY OF HELPERS]](#category-of-helpers) + - [Epoch processing](#epoch-processing) + - [Block processing](#block-processing) +- [Testing](#testing) + diff --git a/docs/light-client/index.md b/docs/light-client/index.md index 32155b1852..0431126eeb 100644 --- a/docs/light-client/index.md +++ b/docs/light-client/index.md @@ -1 +1,9 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Light client specifications](#light-client-specifications) + + + # Light client specifications diff --git a/presets/README.md b/presets/README.md index 3a438cb2ca..00709d653b 100644 --- a/presets/README.md +++ b/presets/README.md @@ -1,3 +1,13 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Presets](#presets) + - [Forking](#forking) + - [Format](#format) + + + # Presets Presets are more extensive than runtime configurations, and generally only applicable during compile-time. diff --git a/solidity_deposit_contract/README.md b/solidity_deposit_contract/README.md index 0388d7d2f5..139adf8b11 100644 --- a/solidity_deposit_contract/README.md +++ b/solidity_deposit_contract/README.md @@ -1,3 +1,15 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Deposit Contract](#deposit-contract) + - [History](#history) + - [Compiling solidity deposit contract](#compiling-solidity-deposit-contract) + - [Running web3 tests](#running-web3-tests) + - [Running randomized `dapp` tests:](#running-randomized-dapp-tests) + + + # Deposit Contract ## History diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 4bc2cfaf06..87a282b8f5 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -28,8 +28,7 @@ - [`PendingBalanceDeposit`](#pendingbalancedeposit) - [`PendingPartialWithdrawal`](#pendingpartialwithdrawal) - [`ExecutionLayerWithdrawalRequest`](#executionlayerwithdrawalrequest) - - [`Consolidation`](#consolidation) - - [`SignedConsolidation`](#signedconsolidation) + - [`ExecutionLayerConsolidationRequest`](#executionlayerconsolidationrequest) - [`PendingConsolidation`](#pendingconsolidation) - [Modified Containers](#modified-containers) - [`AttesterSlashing`](#attesterslashing) @@ -94,8 +93,8 @@ - [New `process_execution_layer_withdrawal_request`](#new-process_execution_layer_withdrawal_request) - [Deposit receipts](#deposit-receipts) - [New `process_deposit_receipt`](#new-process_deposit_receipt) - - [Consolidations](#consolidations) - - [New `process_consolidation`](#new-process_consolidation) + - [Execution layer consolidation requests](#execution-layer-consolidation-requests) + - [New `process_execution_layer_consolidation_request`](#new-process_execution_layer_consolidation_request) - [Testing](#testing) @@ -1286,10 +1285,10 @@ def process_deposit_receipt(state: BeaconState, deposit_receipt: DepositReceipt) ##### Execution layer consolidation requests -###### New `process_execution_layer_consolidation_requests` +###### New `process_execution_layer_consolidation_request` ```python -def process_execution_layer_consolidation_requests( +def process_execution_layer_consolidation_request( state: BeaconState, execution_layer_consolidation_request: ExecutionLayerConsolidationRequest) -> None: # If the pending consolidations queue is full, consolidation requests are ignored diff --git a/tests/README.md b/tests/README.md index dbd2b31de2..46a7bf2ddf 100644 --- a/tests/README.md +++ b/tests/README.md @@ -1,3 +1,20 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Getting Started with Consensus Spec Tests](#getting-started-with-consensus-spec-tests) + - [Getting Started](#getting-started) + - [Creating the environment](#creating-the-environment) + - [Running your first test](#running-your-first-test) + - [The "Hello, World" of Consensus Spec Tests](#the-hello-world-of-consensus-spec-tests) + - [New Tests](#new-tests) + - [Tests Designed to Fail](#tests-designed-to-fail) + - [Attestation Tests](#attestation-tests) + - [Adding an Attestation Test](#adding-an-attestation-test) + - [How are These Tests Used?](#how-are-these-tests-used) + + + # Getting Started with Consensus Spec Tests ## Getting Started diff --git a/tests/core/pyspec/README.md b/tests/core/pyspec/README.md index baa1322771..ca177cf772 100644 --- a/tests/core/pyspec/README.md +++ b/tests/core/pyspec/README.md @@ -1,3 +1,20 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Executable Python Spec (PySpec)](#executable-python-spec-pyspec) + - [Dev Install](#dev-install) + - [Py-tests](#py-tests) + - [How to run tests](#how-to-run-tests) + - [Automated](#automated) + - [Manual](#manual) + - [How to view code coverage report](#how-to-view-code-coverage-report) + - [Advanced](#advanced) + - [Contributing](#contributing) + - [License](#license) + + + # Executable Python Spec (PySpec) The executable Python spec is built from the consensus specifications, diff --git a/tests/core/pyspec/eth2spec/config/README.md b/tests/core/pyspec/eth2spec/config/README.md index c03d890c20..7d6a044de3 100644 --- a/tests/core/pyspec/eth2spec/config/README.md +++ b/tests/core/pyspec/eth2spec/config/README.md @@ -1,3 +1,12 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Consensus specs config util](#consensus-specs-config-util) + - [Config usage:](#config-usage) + + + # Consensus specs config util For run-time configuration, see [Configs documentation](../../../../../configs/README.md). diff --git a/tests/core/pyspec/eth2spec/gen_helpers/README.md b/tests/core/pyspec/eth2spec/gen_helpers/README.md index bf791ccfea..88f8fcf4b7 100644 --- a/tests/core/pyspec/eth2spec/gen_helpers/README.md +++ b/tests/core/pyspec/eth2spec/gen_helpers/README.md @@ -1,3 +1,14 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Consensus test generator helpers](#consensus-test-generator-helpers) + - [`gen_base`](#gen_base) + - [`gen_from_tests`](#gen_from_tests) + - [Test-case parts](#test-case-parts) + + + # Consensus test generator helpers ## `gen_base` diff --git a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation.py b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation.py index 419d13ed14..7f0b53ba99 100644 --- a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation.py +++ b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation.py @@ -40,7 +40,7 @@ def test_basic_consolidation_in_current_consolidation_epoch(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -89,7 +89,7 @@ def test_basic_consolidation_in_new_consolidation_epoch(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -132,7 +132,7 @@ def test_basic_consolidation_with_preexisting_churn(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -179,7 +179,7 @@ def test_basic_consolidation_with_insufficient_preexisting_churn(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -230,7 +230,7 @@ def test_basic_consolidation_with_compounding_credentials(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -276,7 +276,7 @@ def test_consolidation_churn_limit_balance(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -324,7 +324,7 @@ def test_consolidation_balance_larger_than_churn_limit(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -371,7 +371,7 @@ def test_consolidation_balance_through_two_churn_epochs(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -417,7 +417,7 @@ def test_invalid_source_equals_target(spec, state): spec, state, source_index, address=source_address ) # Make consolidation from source to source - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[source_index].pubkey, @@ -449,7 +449,7 @@ def test_invalid_exceed_pending_consolidations_limit(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -478,7 +478,7 @@ def test_invalid_not_enough_consolidation_churn_available(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -506,7 +506,7 @@ def test_invalid_exited_source(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -538,7 +538,7 @@ def test_invalid_exited_target(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -568,7 +568,7 @@ def test_invalid_inactive_source(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -600,7 +600,7 @@ def test_invalid_inactive_target(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -629,7 +629,7 @@ def test_invalid_no_source_execution_withdrawal_credential(spec, state): source_index = spec.get_active_validator_indices(state, current_epoch)[0] target_index = spec.get_active_validator_indices(state, current_epoch)[1] source_address = b"\x22" * 20 - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -657,7 +657,7 @@ def test_invalid_no_target_execution_withdrawal_credential(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -685,7 +685,7 @@ def test_invalid_incorrect_source_address(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with different source address - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=b"\x33" * 20, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -715,7 +715,7 @@ def test_invalid_unknown_source_pubkey(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with different source pubkey - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=b"\x00" * 48, target_pubkey=state.validators[target_index].pubkey, @@ -744,7 +744,7 @@ def test_invalid_unknown_target_pubkey(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with different target pubkey - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=b"\x33" * 20, source_pubkey=state.validators[source_index].pubkey, target_pubkey=b"\x00" * 48, @@ -782,7 +782,7 @@ def run_consolidation_processing(spec, state, consolidation, success=True): yield 'pre', state yield 'consolidation', consolidation - spec.process_execution_layer_consolidation(state, consolidation) + spec.process_execution_layer_consolidation_request(state, consolidation) yield 'post', state diff --git a/tests/formats/README.md b/tests/formats/README.md index ec495daa5b..0fbf3cd9ea 100644 --- a/tests/formats/README.md +++ b/tests/formats/README.md @@ -1,3 +1,32 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [General test format](#general-test-format) + - [Table of contents](#table-of-contents) + - [About](#about) + - [Test-case formats](#test-case-formats) + - [Glossary](#glossary) + - [Test format philosophy](#test-format-philosophy) + - [Config design](#config-design) + - [Test completeness](#test-completeness) + - [Test structure](#test-structure) + - [`/`](#config-name) + - [`/`](#fork-or-phase-name) + - [`/`](#test-runner-name) + - [`/`](#test-handler-name) + - [`/`](#test-suite-name) + - [`/`](#test-case) + - [``](#output-part) + - [Common output formats](#common-output-formats) + - [Special output parts](#special-output-parts) + - [`meta.yaml`](#metayaml) + - [`config.yaml`](#configyaml) + - [Config sourcing](#config-sourcing) + - [Note for implementers](#note-for-implementers) + + + # General test format This document defines the YAML format and structure used for consensus spec testing. diff --git a/tests/formats/bls/README.md b/tests/formats/bls/README.md index 77a9654a8d..d4e3ea9035 100644 --- a/tests/formats/bls/README.md +++ b/tests/formats/bls/README.md @@ -1,3 +1,11 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [BLS tests](#bls-tests) + + + # BLS tests A test type for BLS. Primarily geared towards verifying the *integration* of any BLS library. diff --git a/tests/formats/bls/aggregate.md b/tests/formats/bls/aggregate.md index 7cdebcf4d9..4f86bc0d3f 100644 --- a/tests/formats/bls/aggregate.md +++ b/tests/formats/bls/aggregate.md @@ -1,3 +1,13 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Test format: BLS signature aggregation](#test-format-bls-signature-aggregation) + - [Test case format](#test-case-format) + - [Condition](#condition) + + + # Test format: BLS signature aggregation A BLS signature aggregation combines a series of signatures into a single signature. diff --git a/tests/formats/bls/aggregate_verify.md b/tests/formats/bls/aggregate_verify.md index 9b251af46e..0e8414c00b 100644 --- a/tests/formats/bls/aggregate_verify.md +++ b/tests/formats/bls/aggregate_verify.md @@ -1,3 +1,13 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Test format: BLS sign message](#test-format-bls-sign-message) + - [Test case format](#test-case-format) + - [Condition](#condition) + + + # Test format: BLS sign message Verify the signature against the given pubkeys and one messages. diff --git a/tests/formats/bls/eth_aggregate_pubkeys.md b/tests/formats/bls/eth_aggregate_pubkeys.md index 2b72c1dcaf..4a61330a30 100644 --- a/tests/formats/bls/eth_aggregate_pubkeys.md +++ b/tests/formats/bls/eth_aggregate_pubkeys.md @@ -1,3 +1,13 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Test format: Ethereum-customized BLS pubkey aggregation](#test-format-ethereum-customized-bls-pubkey-aggregation) + - [Test case format](#test-case-format) + - [Condition](#condition) + + + # Test format: Ethereum-customized BLS pubkey aggregation A BLS pubkey aggregation combines a series of pubkeys into a single pubkey. diff --git a/tests/formats/bls/eth_fast_aggregate_verify.md b/tests/formats/bls/eth_fast_aggregate_verify.md index 83b5484e05..66a3728a0d 100644 --- a/tests/formats/bls/eth_fast_aggregate_verify.md +++ b/tests/formats/bls/eth_fast_aggregate_verify.md @@ -1,3 +1,13 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Test format: Ethereum-customized BLS fast aggregate verify](#test-format-ethereum-customized-bls-fast-aggregate-verify) + - [Test case format](#test-case-format) + - [Condition](#condition) + + + # Test format: Ethereum-customized BLS fast aggregate verify Verify the signature against the given pubkeys and one message. diff --git a/tests/formats/bls/fast_aggregate_verify.md b/tests/formats/bls/fast_aggregate_verify.md index 38ea29bb5f..b7a94b1c00 100644 --- a/tests/formats/bls/fast_aggregate_verify.md +++ b/tests/formats/bls/fast_aggregate_verify.md @@ -1,3 +1,13 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Test format: BLS fast aggregate verify](#test-format-bls-fast-aggregate-verify) + - [Test case format](#test-case-format) + - [Condition](#condition) + + + # Test format: BLS fast aggregate verify Verify the signature against the given pubkeys and one message. diff --git a/tests/formats/bls/sign.md b/tests/formats/bls/sign.md index 09e9286148..96756c9e3f 100644 --- a/tests/formats/bls/sign.md +++ b/tests/formats/bls/sign.md @@ -1,3 +1,13 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Test format: BLS sign message](#test-format-bls-sign-message) + - [Test case format](#test-case-format) + - [Condition](#condition) + + + # Test format: BLS sign message Message signing with BLS should produce a signature. diff --git a/tests/formats/bls/verify.md b/tests/formats/bls/verify.md index 57ec8a33a7..0fd5f43f29 100644 --- a/tests/formats/bls/verify.md +++ b/tests/formats/bls/verify.md @@ -1,3 +1,12 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Test format: BLS sign message](#test-format-bls-sign-message) + - [Test case format](#test-case-format) + + + # Test format: BLS sign message Verify the signature against the given one pubkey and one message. diff --git a/tests/formats/epoch_processing/README.md b/tests/formats/epoch_processing/README.md index 2951767f2c..60eee01179 100644 --- a/tests/formats/epoch_processing/README.md +++ b/tests/formats/epoch_processing/README.md @@ -1,3 +1,16 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Epoch processing tests](#epoch-processing-tests) + - [Test case format](#test-case-format) + - [`meta.yaml`](#metayaml) + - [`pre.ssz_snappy`](#pressz_snappy) + - [`post.ssz_snappy`](#postssz_snappy) + - [Condition](#condition) + + + # Epoch processing tests The different epoch sub-transitions are tested individually with test handlers. diff --git a/tests/formats/finality/README.md b/tests/formats/finality/README.md index af39f5c8ca..2cdb820c79 100644 --- a/tests/formats/finality/README.md +++ b/tests/formats/finality/README.md @@ -1,3 +1,17 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Finality tests](#finality-tests) + - [Test case format](#test-case-format) + - [`meta.yaml`](#metayaml) + - [`pre.ssz_snappy`](#pressz_snappy) + - [`blocks_.yaml`](#blocks_indexyaml) + - [`post.ssz_snappy`](#postssz_snappy) + - [Condition](#condition) + + + # Finality tests The aim of the tests for the finality rules. diff --git a/tests/formats/forks/README.md b/tests/formats/forks/README.md index dfbaf2df0b..6c685e41f6 100644 --- a/tests/formats/forks/README.md +++ b/tests/formats/forks/README.md @@ -1,3 +1,18 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Forks](#forks) + - [Test case format](#test-case-format) + - [`meta.yaml`](#metayaml) + - [Fork strings](#fork-strings) + - [`pre.ssz_snappy`](#pressz_snappy) + - [`post.ssz_snappy`](#postssz_snappy) + - [Processing](#processing) + - [Condition](#condition) + + + # Forks The aim of the fork tests is to ensure that a pre-fork state can be transformed diff --git a/tests/formats/genesis/README.md b/tests/formats/genesis/README.md index 25761e2f6a..110c3c9524 100644 --- a/tests/formats/genesis/README.md +++ b/tests/formats/genesis/README.md @@ -1,3 +1,11 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Genesis tests](#genesis-tests) + + + # Genesis tests The aim of the genesis tests is to provide a baseline to test genesis-state initialization and test diff --git a/tests/formats/genesis/initialization.md b/tests/formats/genesis/initialization.md index 9848e157d9..aebba624af 100644 --- a/tests/formats/genesis/initialization.md +++ b/tests/formats/genesis/initialization.md @@ -1,3 +1,19 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Genesis creation testing](#genesis-creation-testing) + - [Test case format](#test-case-format) + - [`eth1.yaml`](#eth1yaml) + - [`meta.yaml`](#metayaml) + - [`deposits_.ssz_snappy`](#deposits_indexssz_snappy) + - [`execution_payload_header.ssz_snappy`](#execution_payload_headerssz_snappy) + - [`state.ssz_snappy`](#statessz_snappy) + - [Processing](#processing) + - [Condition](#condition) + + + # Genesis creation testing Tests the initialization of a genesis state based on Eth1 data. diff --git a/tests/formats/genesis/validity.md b/tests/formats/genesis/validity.md index 15236c3ba3..2ad42feb67 100644 --- a/tests/formats/genesis/validity.md +++ b/tests/formats/genesis/validity.md @@ -1,3 +1,17 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Genesis validity testing](#genesis-validity-testing) + - [Test case format](#test-case-format) + - [`meta.yaml`](#metayaml) + - [`genesis.ssz_snappy`](#genesisssz_snappy) + - [`is_valid.yaml`](#is_validyaml) + - [Processing](#processing) + - [Condition](#condition) + + + # Genesis validity testing Tests if a genesis state is valid, i.e. if it counts as trigger to launch. diff --git a/tests/formats/kzg_4844/README.md b/tests/formats/kzg_4844/README.md index b5bd720393..f5afa9ed12 100644 --- a/tests/formats/kzg_4844/README.md +++ b/tests/formats/kzg_4844/README.md @@ -1,3 +1,11 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [KZG tests](#kzg-tests) + + + # KZG tests A test type for KZG libraries. Tests all the public interfaces that a KZG library required to implement EIP-4844 needs to provide, as defined in `polynomial-commitments.md`. diff --git a/tests/formats/kzg_4844/blob_to_kzg_commitment.md b/tests/formats/kzg_4844/blob_to_kzg_commitment.md index dbb1556a1d..fdc710edfd 100644 --- a/tests/formats/kzg_4844/blob_to_kzg_commitment.md +++ b/tests/formats/kzg_4844/blob_to_kzg_commitment.md @@ -1,3 +1,13 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Test format: Blob to KZG commitment](#test-format-blob-to-kzg-commitment) + - [Test case format](#test-case-format) + - [Condition](#condition) + + + # Test format: Blob to KZG commitment Compute the KZG commitment for a given `blob`. diff --git a/tests/formats/kzg_4844/compute_blob_kzg_proof.md b/tests/formats/kzg_4844/compute_blob_kzg_proof.md index 62fce37231..32a9f97104 100644 --- a/tests/formats/kzg_4844/compute_blob_kzg_proof.md +++ b/tests/formats/kzg_4844/compute_blob_kzg_proof.md @@ -1,3 +1,13 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Test format: Compute blob KZG proof](#test-format-compute-blob-kzg-proof) + - [Test case format](#test-case-format) + - [Condition](#condition) + + + # Test format: Compute blob KZG proof Compute the blob KZG proof for a given `blob`, that helps with quickly verifying that the KZG commitment for the blob is correct. diff --git a/tests/formats/kzg_4844/compute_kzg_proof.md b/tests/formats/kzg_4844/compute_kzg_proof.md index b10105129b..e85616539d 100644 --- a/tests/formats/kzg_4844/compute_kzg_proof.md +++ b/tests/formats/kzg_4844/compute_kzg_proof.md @@ -1,3 +1,13 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Test format: Compute KZG proof](#test-format-compute-kzg-proof) + - [Test case format](#test-case-format) + - [Condition](#condition) + + + # Test format: Compute KZG proof Compute the KZG proof for a given `blob` and an evaluation point `z`. diff --git a/tests/formats/kzg_4844/verify_blob_kzg_proof.md b/tests/formats/kzg_4844/verify_blob_kzg_proof.md index dd0bcda5a9..9a62ba92a4 100644 --- a/tests/formats/kzg_4844/verify_blob_kzg_proof.md +++ b/tests/formats/kzg_4844/verify_blob_kzg_proof.md @@ -1,3 +1,13 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Test format: Verify blob KZG proof](#test-format-verify-blob-kzg-proof) + - [Test case format](#test-case-format) + - [Condition](#condition) + + + # Test format: Verify blob KZG proof Use the blob KZG proof to verify that the KZG commitment for a given `blob` is correct diff --git a/tests/formats/kzg_4844/verify_blob_kzg_proof_batch.md b/tests/formats/kzg_4844/verify_blob_kzg_proof_batch.md index 82e668497d..040446f69a 100644 --- a/tests/formats/kzg_4844/verify_blob_kzg_proof_batch.md +++ b/tests/formats/kzg_4844/verify_blob_kzg_proof_batch.md @@ -1,3 +1,13 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Test format: Verify blob KZG proof batch](#test-format-verify-blob-kzg-proof-batch) + - [Test case format](#test-case-format) + - [Condition](#condition) + + + # Test format: Verify blob KZG proof batch Use the blob KZG proofs to verify that the KZG commitments for given `blobs` are correct diff --git a/tests/formats/kzg_4844/verify_kzg_proof.md b/tests/formats/kzg_4844/verify_kzg_proof.md index 18e02710c5..bb4c1547f1 100644 --- a/tests/formats/kzg_4844/verify_kzg_proof.md +++ b/tests/formats/kzg_4844/verify_kzg_proof.md @@ -1,3 +1,13 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Test format: Verify KZG proof](#test-format-verify-kzg-proof) + - [Test case format](#test-case-format) + - [Condition](#condition) + + + # Test format: Verify KZG proof Verify the KZG proof for a given `blob` and an evaluation point `z` that claims to result in a value of `y`. diff --git a/tests/formats/light_client/README.md b/tests/formats/light_client/README.md index 505b416019..84f06f58c1 100644 --- a/tests/formats/light_client/README.md +++ b/tests/formats/light_client/README.md @@ -1,3 +1,11 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Light client sync protocol tests](#light-client-sync-protocol-tests) + + + # Light client sync protocol tests This series of tests provides reference test vectors for the light client sync protocol spec. diff --git a/tests/formats/light_client/single_merkle_proof.md b/tests/formats/light_client/single_merkle_proof.md index 0cb4cd0d0c..789603af57 100644 --- a/tests/formats/light_client/single_merkle_proof.md +++ b/tests/formats/light_client/single_merkle_proof.md @@ -1,3 +1,15 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Single leaf merkle proof tests](#single-leaf-merkle-proof-tests) + - [Test case format](#test-case-format) + - [`object.ssz_snappy`](#objectssz_snappy) + - [`proof.yaml`](#proofyaml) + - [Condition](#condition) + + + # Single leaf merkle proof tests This series of tests provides reference test vectors for validating correct diff --git a/tests/formats/light_client/sync.md b/tests/formats/light_client/sync.md index 1706b4c162..f2a52f167e 100644 --- a/tests/formats/light_client/sync.md +++ b/tests/formats/light_client/sync.md @@ -1,3 +1,20 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Light client sync tests](#light-client-sync-tests) + - [Test case format](#test-case-format) + - [`meta.yaml`](#metayaml) + - [`bootstrap.ssz_snappy`](#bootstrapssz_snappy) + - [`steps.yaml`](#stepsyaml) + - [Checks to run after each step](#checks-to-run-after-each-step) + - [`force_update` execution step](#force_update-execution-step) + - [`process_update` execution step](#process_update-execution-step) + - [`upgrade_store`](#upgrade_store) + - [Condition](#condition) + + + # Light client sync tests This series of tests provides reference test vectors for validating that a light client implementing the sync protocol can sync to the latest block header. diff --git a/tests/formats/light_client/update_ranking.md b/tests/formats/light_client/update_ranking.md index fe73fb9df7..4640f7860b 100644 --- a/tests/formats/light_client/update_ranking.md +++ b/tests/formats/light_client/update_ranking.md @@ -1,3 +1,15 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [`LightClientUpdate` ranking tests](#lightclientupdate-ranking-tests) + - [Test case format](#test-case-format) + - [`meta.yaml`](#metayaml) + - [`updates_.ssz_snappy`](#updates_indexssz_snappy) + - [Condition](#condition) + + + # `LightClientUpdate` ranking tests This series of tests provides reference test vectors for validating that `LightClientUpdate` instances are ranked in a canonical order. diff --git a/tests/formats/merkle_proof/README.md b/tests/formats/merkle_proof/README.md index 77822daabe..5791dd5283 100644 --- a/tests/formats/merkle_proof/README.md +++ b/tests/formats/merkle_proof/README.md @@ -1,3 +1,11 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Merkle proof tests](#merkle-proof-tests) + + + # Merkle proof tests Handlers: diff --git a/tests/formats/operations/README.md b/tests/formats/operations/README.md index b020b5fd03..e3627cf30b 100644 --- a/tests/formats/operations/README.md +++ b/tests/formats/operations/README.md @@ -1,3 +1,17 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Operations tests](#operations-tests) + - [Test case format](#test-case-format) + - [`meta.yaml`](#metayaml) + - [`pre.ssz_snappy`](#pressz_snappy) + - [`.ssz_snappy`](#input-namessz_snappy) + - [`post.ssz_snappy`](#postssz_snappy) + - [Condition](#condition) + + + # Operations tests The different kinds of operations ("transactions") are tested individually with test handlers. diff --git a/tests/formats/random/README.md b/tests/formats/random/README.md index 54b2c1a23e..b6ac4429fd 100644 --- a/tests/formats/random/README.md +++ b/tests/formats/random/README.md @@ -1,3 +1,12 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Random tests](#random-tests) + - [Test case format](#test-case-format) + + + # Random tests The random tests are generated with various randomized states and blocks. diff --git a/tests/formats/rewards/README.md b/tests/formats/rewards/README.md index a6682042f7..7aa36adb1b 100644 --- a/tests/formats/rewards/README.md +++ b/tests/formats/rewards/README.md @@ -1,3 +1,20 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Rewards tests](#rewards-tests) + - [Test case format](#test-case-format) + - [`meta.yaml`](#metayaml) + - [`pre.ssz_snappy`](#pressz_snappy) + - [`source_deltas.ssz_snappy`](#source_deltasssz_snappy) + - [`target_deltas.ssz_snappy`](#target_deltasssz_snappy) + - [`head_deltas.ssz_snappy`](#head_deltasssz_snappy) + - [`inclusion_delay_deltas.ssz_snappy`](#inclusion_delay_deltasssz_snappy) + - [`inactivity_penalty_deltas.ssz_snappy`](#inactivity_penalty_deltasssz_snappy) + - [Condition](#condition) + + + # Rewards tests All rewards deltas sub-functions are tested for each test case. diff --git a/tests/formats/sanity/README.md b/tests/formats/sanity/README.md index 20b36208a4..d2b26709ee 100644 --- a/tests/formats/sanity/README.md +++ b/tests/formats/sanity/README.md @@ -1,3 +1,11 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Sanity tests](#sanity-tests) + + + # Sanity tests The aim of the sanity tests is to set a base-line on what really needs to pass, i.e. the essentials. diff --git a/tests/formats/sanity/blocks.md b/tests/formats/sanity/blocks.md index 7ea646b9e0..1c1bc3c92c 100644 --- a/tests/formats/sanity/blocks.md +++ b/tests/formats/sanity/blocks.md @@ -1,3 +1,17 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Sanity blocks testing](#sanity-blocks-testing) + - [Test case format](#test-case-format) + - [`meta.yaml`](#metayaml) + - [`pre.ssz_snappy`](#pressz_snappy) + - [`blocks_.ssz_snappy`](#blocks_indexssz_snappy) + - [`post.ssz_snappy`](#postssz_snappy) + - [Condition](#condition) + + + # Sanity blocks testing Sanity tests to cover a series of one or more blocks being processed, aiming to cover common changes. diff --git a/tests/formats/sanity/slots.md b/tests/formats/sanity/slots.md index f1b8a13219..54083f5cf0 100644 --- a/tests/formats/sanity/slots.md +++ b/tests/formats/sanity/slots.md @@ -1,3 +1,18 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Sanity slots testing](#sanity-slots-testing) + - [Test case format](#test-case-format) + - [`meta.yaml`](#metayaml) + - [`pre.ssz_snappy`](#pressz_snappy) + - [`slots.yaml`](#slotsyaml) + - [`post.ssz_snappy`](#postssz_snappy) + - [Processing](#processing) + - [Condition](#condition) + + + # Sanity slots testing Sanity tests to cover a series of one or more empty-slot transitions being processed, aiming to cover common changes. diff --git a/tests/formats/shuffling/README.md b/tests/formats/shuffling/README.md index 15bfe6996b..1334362f49 100644 --- a/tests/formats/shuffling/README.md +++ b/tests/formats/shuffling/README.md @@ -1,3 +1,14 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Test format: shuffling](#test-format-shuffling) + - [Test case format](#test-case-format) + - [`mapping.yaml`](#mappingyaml) + - [Condition](#condition) + + + # Test format: shuffling The runner of the Shuffling test type has only one handler: `core`. diff --git a/tests/formats/ssz_generic/README.md b/tests/formats/ssz_generic/README.md index c46025847a..c95ef2aad6 100644 --- a/tests/formats/ssz_generic/README.md +++ b/tests/formats/ssz_generic/README.md @@ -1,3 +1,26 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [SSZ, generic tests](#ssz-generic-tests) + - [Format](#format) + - [`valid`](#valid) + - [`meta.yaml`](#metayaml) + - [`serialized.ssz_snappy`](#serializedssz_snappy) + - [`value.yaml`](#valueyaml) + - [Conditions](#conditions) + - [`invalid`](#invalid) + - [Condition](#condition) + - [Type declarations](#type-declarations) + - [`basic_vector`](#basic_vector) + - [`bitlist`](#bitlist) + - [`bitvector`](#bitvector) + - [`boolean`](#boolean) + - [`uints`](#uints) + - [`containers`](#containers) + + + # SSZ, generic tests This set of test-suites provides general testing for SSZ: diff --git a/tests/formats/ssz_static/README.md b/tests/formats/ssz_static/README.md index ffa7373349..3cef2de714 100644 --- a/tests/formats/ssz_static/README.md +++ b/tests/formats/ssz_static/README.md @@ -1,3 +1,11 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [SSZ, static tests](#ssz-static-tests) + + + # SSZ, static tests This set of test-suites provides static testing for SSZ: diff --git a/tests/formats/ssz_static/core.md b/tests/formats/ssz_static/core.md index 09ff04e20d..6995de9abb 100644 --- a/tests/formats/ssz_static/core.md +++ b/tests/formats/ssz_static/core.md @@ -1,3 +1,17 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Test format: SSZ static types](#test-format-ssz-static-types) + - [Test case format](#test-case-format) + - [`roots.yaml`](#rootsyaml) + - [`serialized.ssz_snappy`](#serializedssz_snappy) + - [`value.yaml`](#valueyaml) + - [Condition](#condition) + - [References](#references) + + + # Test format: SSZ static types The goal of this type is to provide clients with a solid reference for how the known SSZ objects should be encoded. diff --git a/tests/formats/sync/README.md b/tests/formats/sync/README.md index ff9f8168cb..be95ba765f 100644 --- a/tests/formats/sync/README.md +++ b/tests/formats/sync/README.md @@ -1,3 +1,11 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Sync tests](#sync-tests) + + + # Sync tests It re-uses the [fork choice test format](../fork_choice/README.md) to apply the test script. diff --git a/tests/formats/transition/README.md b/tests/formats/transition/README.md index 7f89bdd610..cd4a23f293 100644 --- a/tests/formats/transition/README.md +++ b/tests/formats/transition/README.md @@ -1,3 +1,17 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Transition testing](#transition-testing) + - [Test case format](#test-case-format) + - [`meta.yaml`](#metayaml) + - [`pre.ssz_snappy`](#pressz_snappy) + - [`blocks_.ssz_snappy`](#blocks_indexssz_snappy) + - [`post.ssz_snappy`](#postssz_snappy) + - [Condition](#condition) + + + # Transition testing Transition tests to cover processing the chain across a fork boundary. diff --git a/tests/generators/README.md b/tests/generators/README.md index 0146ca35e8..0dd1a87a65 100644 --- a/tests/generators/README.md +++ b/tests/generators/README.md @@ -14,7 +14,6 @@ An automated nightly tests release system, with a config filter applied, is bein - - [How to run generators](#how-to-run-generators) - [Cleaning](#cleaning) - [Running all test generators](#running-all-test-generators) diff --git a/tests/generators/bls/README.md b/tests/generators/bls/README.md index 24013f88e7..be19386372 100644 --- a/tests/generators/bls/README.md +++ b/tests/generators/bls/README.md @@ -1,3 +1,12 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [BLS Test Generator](#bls-test-generator) + - [Resources](#resources) + + + # BLS Test Generator The [BLS Signature APIs](../../../specs/phase0/beacon-chain.md#bls-signatures) diff --git a/tests/generators/epoch_processing/README.md b/tests/generators/epoch_processing/README.md index 662b0b516d..4e7a8119c9 100644 --- a/tests/generators/epoch_processing/README.md +++ b/tests/generators/epoch_processing/README.md @@ -1,3 +1,11 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Epoch processing](#epoch-processing) + + + # Epoch processing Epoch processing covers the sub-transitions during an epoch change. diff --git a/tests/generators/finality/README.md b/tests/generators/finality/README.md index dec5819c68..8686ffc445 100644 --- a/tests/generators/finality/README.md +++ b/tests/generators/finality/README.md @@ -1,3 +1,11 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Finality tests](#finality-tests) + + + # Finality tests Finality tests cover regular state-transitions in a common block-list format to test finality rules. diff --git a/tests/generators/fork_choice/README.md b/tests/generators/fork_choice/README.md index e67b115ba1..61f837d42d 100644 --- a/tests/generators/fork_choice/README.md +++ b/tests/generators/fork_choice/README.md @@ -1,3 +1,11 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Fork choice tests](#fork-choice-tests) + + + # Fork choice tests Fork choice tests cover the different forking cases with fork choice helper functions. diff --git a/tests/generators/genesis/README.md b/tests/generators/genesis/README.md index e270f6e35e..3f218841ae 100644 --- a/tests/generators/genesis/README.md +++ b/tests/generators/genesis/README.md @@ -1,3 +1,11 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Genesis test generator](#genesis-test-generator) + + + # Genesis test generator Genesis tests cover the initialization and validity-based launch trigger for the Beacon Chain genesis state. diff --git a/tests/generators/kzg_4844/README.md b/tests/generators/kzg_4844/README.md index ab81a85e86..61031ac1f4 100644 --- a/tests/generators/kzg_4844/README.md +++ b/tests/generators/kzg_4844/README.md @@ -1,3 +1,11 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [KZG 4844 Test Generator](#kzg-4844-test-generator) + + + # KZG 4844 Test Generator These tests are specific to the KZG API required for implementing EIP-4844 \ No newline at end of file diff --git a/tests/generators/light_client/README.md b/tests/generators/light_client/README.md index 7eabc2520c..2751c0874e 100644 --- a/tests/generators/light_client/README.md +++ b/tests/generators/light_client/README.md @@ -1,3 +1,11 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Light client tests](#light-client-tests) + + + # Light client tests The purpose of this test-generator is to provide test-vectors for validating the correct implementation of the light client sync protocol. diff --git a/tests/generators/merkle_proof/README.md b/tests/generators/merkle_proof/README.md index fb4d05fda8..b5ee6c8b6c 100644 --- a/tests/generators/merkle_proof/README.md +++ b/tests/generators/merkle_proof/README.md @@ -1,3 +1,11 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Merkle proof tests](#merkle-proof-tests) + + + # Merkle proof tests The purpose of this test-generator is to provide test-vectors for validating the correct implementation of the Merkle proof verification. diff --git a/tests/generators/operations/README.md b/tests/generators/operations/README.md index a5d48c11b4..2eac9c9f9e 100644 --- a/tests/generators/operations/README.md +++ b/tests/generators/operations/README.md @@ -1,3 +1,11 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Operations](#operations) + + + # Operations Operations (or "transactions" in previous spec iterations), diff --git a/tests/generators/random/README.md b/tests/generators/random/README.md index fd17284412..4c49226f8e 100644 --- a/tests/generators/random/README.md +++ b/tests/generators/random/README.md @@ -1,3 +1,14 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Randomized tests](#randomized-tests) +- [To generate test sources](#to-generate-test-sources) +- [To run tests](#to-run-tests) +- [To generate spec tests (from the generated files)](#to-generate-spec-tests-from-the-generated-files) + + + # Randomized tests Randomized tests in the format of `sanity` blocks tests, with randomized operations. diff --git a/tests/generators/rewards/README.md b/tests/generators/rewards/README.md index 60f106836a..4233958924 100644 --- a/tests/generators/rewards/README.md +++ b/tests/generators/rewards/README.md @@ -1,3 +1,11 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Rewards](#rewards) + + + # Rewards Rewards covers the sub-functions of `process_rewards_and_penalties` for granular testing of components of the rewards function. diff --git a/tests/generators/sanity/README.md b/tests/generators/sanity/README.md index cbc6aef06d..31c59f84d3 100644 --- a/tests/generators/sanity/README.md +++ b/tests/generators/sanity/README.md @@ -1,3 +1,11 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Sanity tests](#sanity-tests) + + + # Sanity tests Sanity tests cover regular state-transitions in a common block-list format, to ensure the basics work. diff --git a/tests/generators/shuffling/README.md b/tests/generators/shuffling/README.md index 81ddaba15f..0294c1ec6f 100644 --- a/tests/generators/shuffling/README.md +++ b/tests/generators/shuffling/README.md @@ -1,3 +1,11 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Shuffling Tests](#shuffling-tests) + + + # Shuffling Tests Tests for the swap-or-not shuffling in the beacon chain. diff --git a/tests/generators/ssz_static/README.md b/tests/generators/ssz_static/README.md index 3434fe174b..b557b726ad 100644 --- a/tests/generators/ssz_static/README.md +++ b/tests/generators/ssz_static/README.md @@ -1,3 +1,11 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [SSZ-static](#ssz-static) + + + # SSZ-static The purpose of this test-generator is to provide test-vectors for the most important applications of SSZ: From 5998e744476f6af6211688e5209cb7724d8f7240 Mon Sep 17 00:00:00 2001 From: fradamt Date: Tue, 21 May 2024 18:27:32 +0200 Subject: [PATCH 41/89] typos. electra tests passing --- specs/electra/beacon-chain.md | 10 +++++----- specs/electra/fork.md | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 87a282b8f5..b8d367b5aa 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -336,7 +336,7 @@ class ExecutionPayload(Container): deposit_receipts: List[DepositReceipt, MAX_DEPOSIT_RECEIPTS_PER_PAYLOAD] # [New in Electra:EIP6110] # [New in Electra:EIP7002:EIP7251] withdrawal_requests: List[ExecutionLayerWithdrawalRequest, MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD] - consolidations_requests: List[ExecutionLayerConsolidationRequest, MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD] # [New in Electra:EIP7251] + consolidation_requests: List[ExecutionLayerConsolidationRequest, MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD] # [New in Electra:EIP7251] ``` #### `ExecutionPayloadHeader` @@ -364,7 +364,7 @@ class ExecutionPayloadHeader(Container): excess_blob_gas: uint64 deposit_receipts_root: Root # [New in Electra:EIP6110] withdrawal_requests_root: Root # [New in Electra:EIP7002:EIP7251] - consolidations_requests_root: Root # [New in Electra:EIP7251] + consolidation_requests_root: Root # [New in Electra:EIP7251] ``` #### `BeaconState` @@ -1003,7 +1003,7 @@ def process_execution_payload(state: BeaconState, body: BeaconBlockBody, executi excess_blob_gas=payload.excess_blob_gas, deposit_receipts_root=hash_tree_root(payload.deposit_receipts), # [New in Electra:EIP6110] withdrawal_requests_root=hash_tree_root(payload.withdrawal_requests), # [New in Electra:EIP7002:EIP7251] - consolidations_root=hash_tree_root(payload.consolidations), # [New in Electra:EIP7251] + consolidation_requests_root=hash_tree_root(payload.consolidation_requests), # [New in Electra:EIP7251] ) ``` @@ -1036,7 +1036,7 @@ def process_operations(state: BeaconState, body: BeaconBlockBody) -> None: # [New in Electra:EIP7002:EIP7251] for_ops(body.execution_payload.withdrawal_requests, process_execution_layer_withdrawal_request) for_ops(body.execution_payload.deposit_receipts, process_deposit_receipt) # [New in Electra:EIP6110] - for_ops(body.execution_payload.consolidations_requests, process_execution_layer_consolidation_request) # [New in Electra:EIP7251] + for_ops(body.execution_payload.consolidation_requests, process_execution_layer_consolidation_request) # [New in Electra:EIP7251] ``` ##### Attestations @@ -1318,7 +1318,7 @@ def process_execution_layer_consolidation_request( # Verify source withdrawal credentials has_correct_credential = has_execution_withdrawal_credential(source_validator) is_correct_source_address = ( - source_validator.withdrawal_credentials[12:] == consolidation.source_address + source_validator.withdrawal_credentials[12:] == execution_layer_consolidation_request.source_address ) if not (has_correct_credential and is_correct_source_address): return diff --git a/specs/electra/fork.md b/specs/electra/fork.md index a65bdebb53..0660aaf7f7 100644 --- a/specs/electra/fork.md +++ b/specs/electra/fork.md @@ -92,7 +92,7 @@ def upgrade_to_electra(pre: deneb.BeaconState) -> BeaconState: excess_blob_gas=pre.latest_execution_payload_header.excess_blob_gas, deposit_receipts_root=Root(), # [New in Electra:EIP6110] withdrawal_requests_root=Root(), # [New in Electra:EIP7002] - consolidations_requests_root=Root(), # [New in Electra:EIP7251] + consolidation_requests_root=Root(), # [New in Electra:EIP7251] ) exit_epochs = [v.exit_epoch for v in pre.validators if v.exit_epoch != FAR_FUTURE_EPOCH] From c17f22fedc0efc39cd04e6c5f4442908ebe9cd90 Mon Sep 17 00:00:00 2001 From: fradamt Date: Tue, 21 May 2024 22:46:44 +0200 Subject: [PATCH 42/89] add missing decorators --- .../test_process_execution_layer_consolidation.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation.py b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation.py index 7f0b53ba99..ce31da6acd 100644 --- a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation.py +++ b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation.py @@ -662,12 +662,11 @@ def test_invalid_no_target_execution_withdrawal_credential(spec, state): source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, ) - set_eth1_withdrawal_credential_with_balance(spec, state, target_index) yield from run_consolidation_processing( spec, state, consolidation, success=False ) - +@with_electra_and_later @with_presets([MINIMAL], "need sufficient consolidation churn limit") @with_custom_state( balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit, @@ -698,6 +697,7 @@ def test_invalid_incorrect_source_address(spec, state): ) +@with_electra_and_later @with_presets([MINIMAL], "need sufficient consolidation churn limit") @with_custom_state( balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit, @@ -727,6 +727,7 @@ def test_invalid_unknown_source_pubkey(spec, state): ) +@with_electra_and_later @with_presets([MINIMAL], "need sufficient consolidation churn limit") @with_custom_state( balances_fn=scaled_churn_balances_exceed_activation_exit_churn_limit, From 6b6936836acc5f06f6b0cde738e2ad361f195636 Mon Sep 17 00:00:00 2001 From: fradamt Date: Tue, 21 May 2024 22:54:25 +0200 Subject: [PATCH 43/89] Revert "minor fixes, doctoc" This reverts commit dc2a2bd85ae1e8c7c2e426faba3167069ef548a5. --- README.md | 18 -------- SECURITY.md | 10 ----- configs/README.md | 10 ----- docker/README.md | 8 ---- docs/docs/templates/beacon-chain-template.md | 18 -------- docs/light-client/index.md | 8 ---- presets/README.md | 10 ----- solidity_deposit_contract/README.md | 12 ------ specs/electra/beacon-chain.md | 11 ++--- tests/README.md | 17 -------- tests/core/pyspec/README.md | 17 -------- tests/core/pyspec/eth2spec/config/README.md | 9 ---- .../pyspec/eth2spec/gen_helpers/README.md | 11 ----- ...t_process_execution_layer_consolidation.py | 42 +++++++++---------- tests/formats/README.md | 29 ------------- tests/formats/bls/README.md | 8 ---- tests/formats/bls/aggregate.md | 10 ----- tests/formats/bls/aggregate_verify.md | 10 ----- tests/formats/bls/eth_aggregate_pubkeys.md | 10 ----- .../formats/bls/eth_fast_aggregate_verify.md | 10 ----- tests/formats/bls/fast_aggregate_verify.md | 10 ----- tests/formats/bls/sign.md | 10 ----- tests/formats/bls/verify.md | 9 ---- tests/formats/epoch_processing/README.md | 13 ------ tests/formats/finality/README.md | 14 ------- tests/formats/forks/README.md | 15 ------- tests/formats/genesis/README.md | 8 ---- tests/formats/genesis/initialization.md | 16 ------- tests/formats/genesis/validity.md | 14 ------- tests/formats/kzg_4844/README.md | 8 ---- .../kzg_4844/blob_to_kzg_commitment.md | 10 ----- .../kzg_4844/compute_blob_kzg_proof.md | 10 ----- tests/formats/kzg_4844/compute_kzg_proof.md | 10 ----- .../formats/kzg_4844/verify_blob_kzg_proof.md | 10 ----- .../kzg_4844/verify_blob_kzg_proof_batch.md | 10 ----- tests/formats/kzg_4844/verify_kzg_proof.md | 10 ----- tests/formats/light_client/README.md | 8 ---- .../light_client/single_merkle_proof.md | 12 ------ tests/formats/light_client/sync.md | 17 -------- tests/formats/light_client/update_ranking.md | 12 ------ tests/formats/merkle_proof/README.md | 8 ---- tests/formats/operations/README.md | 14 ------- tests/formats/random/README.md | 9 ---- tests/formats/rewards/README.md | 17 -------- tests/formats/sanity/README.md | 8 ---- tests/formats/sanity/blocks.md | 14 ------- tests/formats/sanity/slots.md | 15 ------- tests/formats/shuffling/README.md | 11 ----- tests/formats/ssz_generic/README.md | 23 ---------- tests/formats/ssz_static/README.md | 8 ---- tests/formats/ssz_static/core.md | 14 ------- tests/formats/sync/README.md | 8 ---- tests/formats/transition/README.md | 14 ------- tests/generators/README.md | 1 + tests/generators/bls/README.md | 9 ---- tests/generators/epoch_processing/README.md | 8 ---- tests/generators/finality/README.md | 8 ---- tests/generators/fork_choice/README.md | 8 ---- tests/generators/genesis/README.md | 8 ---- tests/generators/kzg_4844/README.md | 8 ---- tests/generators/light_client/README.md | 8 ---- tests/generators/merkle_proof/README.md | 8 ---- tests/generators/operations/README.md | 8 ---- tests/generators/random/README.md | 11 ----- tests/generators/rewards/README.md | 8 ---- tests/generators/sanity/README.md | 8 ---- tests/generators/shuffling/README.md | 8 ---- tests/generators/ssz_static/README.md | 8 ---- 68 files changed, 28 insertions(+), 756 deletions(-) diff --git a/README.md b/README.md index e66e79b097..58bff5b9e4 100644 --- a/README.md +++ b/README.md @@ -1,21 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Ethereum Proof-of-Stake Consensus Specifications](#ethereum-proof-of-stake-consensus-specifications) - - [Specs](#specs) - - [Stable Specifications](#stable-specifications) - - [In-development Specifications](#in-development-specifications) - - [Accompanying documents can be found in specs and include:](#accompanying-documents-can-be-found-in-specs-and-include) - - [Additional specifications for client implementers](#additional-specifications-for-client-implementers) - - [Design goals](#design-goals) - - [Useful external resources](#useful-external-resources) - - [For spec contributors](#for-spec-contributors) - - [Online viewer of the latest release (latest `master` branch)](#online-viewer-of-the-latest-release-latest-master-branch) - - [Consensus spec tests](#consensus-spec-tests) - - - # Ethereum Proof-of-Stake Consensus Specifications [![Join the chat at https://discord.gg/qGpsxSA](https://img.shields.io/badge/chat-on%20discord-blue.svg)](https://discord.gg/qGpsxSA) diff --git a/SECURITY.md b/SECURITY.md index a770f5749e..2101ea1554 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -1,13 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Security Policy](#security-policy) - - [Supported Versions](#supported-versions) - - [Reporting a Vulnerability](#reporting-a-vulnerability) - - - # Security Policy ## Supported Versions diff --git a/configs/README.md b/configs/README.md index 82b7783682..6ef081e4c4 100644 --- a/configs/README.md +++ b/configs/README.md @@ -1,13 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Configurations](#configurations) - - [Forking](#forking) - - [Format](#format) - - - # Configurations This directory contains a set of configurations used for testing, testnets, and mainnet. diff --git a/docker/README.md b/docker/README.md index 29c7faa3f5..6d5b21e59d 100644 --- a/docker/README.md +++ b/docker/README.md @@ -1,11 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Docker related information](#docker-related-information) - - - ## Docker related information This dockerfile sets up the dependencies required to run consensus-spec tests. The docker image can be locally built with: diff --git a/docs/docs/templates/beacon-chain-template.md b/docs/docs/templates/beacon-chain-template.md index 04d61b071f..4d22d3908e 100644 --- a/docs/docs/templates/beacon-chain-template.md +++ b/docs/docs/templates/beacon-chain-template.md @@ -7,24 +7,6 @@ -- [Introduction](#introduction) -- [Notation](#notation) -- [Custom types](#custom-types) -- [Constants](#constants) - - [[CATEGORY OF CONSTANTS]](#category-of-constants) -- [Preset](#preset) - - [[CATEGORY OF PRESETS]](#category-of-presets) -- [Configuration](#configuration) - - [[CATEGORY OF CONFIGURATIONS]](#category-of-configurations) -- [Containers](#containers) - - [[CATEGORY OF CONTAINERS]](#category-of-containers) - - [`CONTAINER_NAME`](#container_name) -- [Helper functions](#helper-functions) - - [[CATEGORY OF HELPERS]](#category-of-helpers) - - [Epoch processing](#epoch-processing) - - [Block processing](#block-processing) -- [Testing](#testing) - diff --git a/docs/light-client/index.md b/docs/light-client/index.md index 0431126eeb..32155b1852 100644 --- a/docs/light-client/index.md +++ b/docs/light-client/index.md @@ -1,9 +1 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Light client specifications](#light-client-specifications) - - - # Light client specifications diff --git a/presets/README.md b/presets/README.md index 00709d653b..3a438cb2ca 100644 --- a/presets/README.md +++ b/presets/README.md @@ -1,13 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Presets](#presets) - - [Forking](#forking) - - [Format](#format) - - - # Presets Presets are more extensive than runtime configurations, and generally only applicable during compile-time. diff --git a/solidity_deposit_contract/README.md b/solidity_deposit_contract/README.md index 139adf8b11..0388d7d2f5 100644 --- a/solidity_deposit_contract/README.md +++ b/solidity_deposit_contract/README.md @@ -1,15 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Deposit Contract](#deposit-contract) - - [History](#history) - - [Compiling solidity deposit contract](#compiling-solidity-deposit-contract) - - [Running web3 tests](#running-web3-tests) - - [Running randomized `dapp` tests:](#running-randomized-dapp-tests) - - - # Deposit Contract ## History diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index b8d367b5aa..9cfff26e9a 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -28,7 +28,8 @@ - [`PendingBalanceDeposit`](#pendingbalancedeposit) - [`PendingPartialWithdrawal`](#pendingpartialwithdrawal) - [`ExecutionLayerWithdrawalRequest`](#executionlayerwithdrawalrequest) - - [`ExecutionLayerConsolidationRequest`](#executionlayerconsolidationrequest) + - [`Consolidation`](#consolidation) + - [`SignedConsolidation`](#signedconsolidation) - [`PendingConsolidation`](#pendingconsolidation) - [Modified Containers](#modified-containers) - [`AttesterSlashing`](#attesterslashing) @@ -93,8 +94,8 @@ - [New `process_execution_layer_withdrawal_request`](#new-process_execution_layer_withdrawal_request) - [Deposit receipts](#deposit-receipts) - [New `process_deposit_receipt`](#new-process_deposit_receipt) - - [Execution layer consolidation requests](#execution-layer-consolidation-requests) - - [New `process_execution_layer_consolidation_request`](#new-process_execution_layer_consolidation_request) + - [Consolidations](#consolidations) + - [New `process_consolidation`](#new-process_consolidation) - [Testing](#testing) @@ -1285,10 +1286,10 @@ def process_deposit_receipt(state: BeaconState, deposit_receipt: DepositReceipt) ##### Execution layer consolidation requests -###### New `process_execution_layer_consolidation_request` +###### New `process_execution_layer_consolidation_requests` ```python -def process_execution_layer_consolidation_request( +def process_execution_layer_consolidation_requests( state: BeaconState, execution_layer_consolidation_request: ExecutionLayerConsolidationRequest) -> None: # If the pending consolidations queue is full, consolidation requests are ignored diff --git a/tests/README.md b/tests/README.md index 46a7bf2ddf..dbd2b31de2 100644 --- a/tests/README.md +++ b/tests/README.md @@ -1,20 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Getting Started with Consensus Spec Tests](#getting-started-with-consensus-spec-tests) - - [Getting Started](#getting-started) - - [Creating the environment](#creating-the-environment) - - [Running your first test](#running-your-first-test) - - [The "Hello, World" of Consensus Spec Tests](#the-hello-world-of-consensus-spec-tests) - - [New Tests](#new-tests) - - [Tests Designed to Fail](#tests-designed-to-fail) - - [Attestation Tests](#attestation-tests) - - [Adding an Attestation Test](#adding-an-attestation-test) - - [How are These Tests Used?](#how-are-these-tests-used) - - - # Getting Started with Consensus Spec Tests ## Getting Started diff --git a/tests/core/pyspec/README.md b/tests/core/pyspec/README.md index ca177cf772..baa1322771 100644 --- a/tests/core/pyspec/README.md +++ b/tests/core/pyspec/README.md @@ -1,20 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Executable Python Spec (PySpec)](#executable-python-spec-pyspec) - - [Dev Install](#dev-install) - - [Py-tests](#py-tests) - - [How to run tests](#how-to-run-tests) - - [Automated](#automated) - - [Manual](#manual) - - [How to view code coverage report](#how-to-view-code-coverage-report) - - [Advanced](#advanced) - - [Contributing](#contributing) - - [License](#license) - - - # Executable Python Spec (PySpec) The executable Python spec is built from the consensus specifications, diff --git a/tests/core/pyspec/eth2spec/config/README.md b/tests/core/pyspec/eth2spec/config/README.md index 7d6a044de3..c03d890c20 100644 --- a/tests/core/pyspec/eth2spec/config/README.md +++ b/tests/core/pyspec/eth2spec/config/README.md @@ -1,12 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Consensus specs config util](#consensus-specs-config-util) - - [Config usage:](#config-usage) - - - # Consensus specs config util For run-time configuration, see [Configs documentation](../../../../../configs/README.md). diff --git a/tests/core/pyspec/eth2spec/gen_helpers/README.md b/tests/core/pyspec/eth2spec/gen_helpers/README.md index 88f8fcf4b7..bf791ccfea 100644 --- a/tests/core/pyspec/eth2spec/gen_helpers/README.md +++ b/tests/core/pyspec/eth2spec/gen_helpers/README.md @@ -1,14 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Consensus test generator helpers](#consensus-test-generator-helpers) - - [`gen_base`](#gen_base) - - [`gen_from_tests`](#gen_from_tests) - - [Test-case parts](#test-case-parts) - - - # Consensus test generator helpers ## `gen_base` diff --git a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation.py b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation.py index ce31da6acd..54e88fef4f 100644 --- a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation.py +++ b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation.py @@ -40,7 +40,7 @@ def test_basic_consolidation_in_current_consolidation_epoch(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ExecutionLayerConsolidation( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -89,7 +89,7 @@ def test_basic_consolidation_in_new_consolidation_epoch(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ExecutionLayerConsolidation( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -132,7 +132,7 @@ def test_basic_consolidation_with_preexisting_churn(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ExecutionLayerConsolidation( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -179,7 +179,7 @@ def test_basic_consolidation_with_insufficient_preexisting_churn(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ExecutionLayerConsolidation( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -230,7 +230,7 @@ def test_basic_consolidation_with_compounding_credentials(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ExecutionLayerConsolidation( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -276,7 +276,7 @@ def test_consolidation_churn_limit_balance(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ExecutionLayerConsolidation( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -324,7 +324,7 @@ def test_consolidation_balance_larger_than_churn_limit(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ExecutionLayerConsolidation( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -371,7 +371,7 @@ def test_consolidation_balance_through_two_churn_epochs(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ExecutionLayerConsolidation( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -417,7 +417,7 @@ def test_invalid_source_equals_target(spec, state): spec, state, source_index, address=source_address ) # Make consolidation from source to source - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ExecutionLayerConsolidation( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[source_index].pubkey, @@ -449,7 +449,7 @@ def test_invalid_exceed_pending_consolidations_limit(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ExecutionLayerConsolidation( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -478,7 +478,7 @@ def test_invalid_not_enough_consolidation_churn_available(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ExecutionLayerConsolidation( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -506,7 +506,7 @@ def test_invalid_exited_source(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ExecutionLayerConsolidation( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -538,7 +538,7 @@ def test_invalid_exited_target(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ExecutionLayerConsolidation( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -568,7 +568,7 @@ def test_invalid_inactive_source(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ExecutionLayerConsolidation( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -600,7 +600,7 @@ def test_invalid_inactive_target(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ExecutionLayerConsolidation( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -629,7 +629,7 @@ def test_invalid_no_source_execution_withdrawal_credential(spec, state): source_index = spec.get_active_validator_indices(state, current_epoch)[0] target_index = spec.get_active_validator_indices(state, current_epoch)[1] source_address = b"\x22" * 20 - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ExecutionLayerConsolidation( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -657,7 +657,7 @@ def test_invalid_no_target_execution_withdrawal_credential(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ExecutionLayerConsolidation( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -684,7 +684,7 @@ def test_invalid_incorrect_source_address(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with different source address - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ExecutionLayerConsolidation( source_address=b"\x33" * 20, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -715,7 +715,7 @@ def test_invalid_unknown_source_pubkey(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with different source pubkey - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ExecutionLayerConsolidation( source_address=source_address, source_pubkey=b"\x00" * 48, target_pubkey=state.validators[target_index].pubkey, @@ -745,7 +745,7 @@ def test_invalid_unknown_target_pubkey(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with different target pubkey - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ExecutionLayerConsolidation( source_address=b"\x33" * 20, source_pubkey=state.validators[source_index].pubkey, target_pubkey=b"\x00" * 48, @@ -783,7 +783,7 @@ def run_consolidation_processing(spec, state, consolidation, success=True): yield 'pre', state yield 'consolidation', consolidation - spec.process_execution_layer_consolidation_request(state, consolidation) + spec.process_execution_layer_consolidation(state, consolidation) yield 'post', state diff --git a/tests/formats/README.md b/tests/formats/README.md index 0fbf3cd9ea..ec495daa5b 100644 --- a/tests/formats/README.md +++ b/tests/formats/README.md @@ -1,32 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [General test format](#general-test-format) - - [Table of contents](#table-of-contents) - - [About](#about) - - [Test-case formats](#test-case-formats) - - [Glossary](#glossary) - - [Test format philosophy](#test-format-philosophy) - - [Config design](#config-design) - - [Test completeness](#test-completeness) - - [Test structure](#test-structure) - - [`/`](#config-name) - - [`/`](#fork-or-phase-name) - - [`/`](#test-runner-name) - - [`/`](#test-handler-name) - - [`/`](#test-suite-name) - - [`/`](#test-case) - - [``](#output-part) - - [Common output formats](#common-output-formats) - - [Special output parts](#special-output-parts) - - [`meta.yaml`](#metayaml) - - [`config.yaml`](#configyaml) - - [Config sourcing](#config-sourcing) - - [Note for implementers](#note-for-implementers) - - - # General test format This document defines the YAML format and structure used for consensus spec testing. diff --git a/tests/formats/bls/README.md b/tests/formats/bls/README.md index d4e3ea9035..77a9654a8d 100644 --- a/tests/formats/bls/README.md +++ b/tests/formats/bls/README.md @@ -1,11 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [BLS tests](#bls-tests) - - - # BLS tests A test type for BLS. Primarily geared towards verifying the *integration* of any BLS library. diff --git a/tests/formats/bls/aggregate.md b/tests/formats/bls/aggregate.md index 4f86bc0d3f..7cdebcf4d9 100644 --- a/tests/formats/bls/aggregate.md +++ b/tests/formats/bls/aggregate.md @@ -1,13 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Test format: BLS signature aggregation](#test-format-bls-signature-aggregation) - - [Test case format](#test-case-format) - - [Condition](#condition) - - - # Test format: BLS signature aggregation A BLS signature aggregation combines a series of signatures into a single signature. diff --git a/tests/formats/bls/aggregate_verify.md b/tests/formats/bls/aggregate_verify.md index 0e8414c00b..9b251af46e 100644 --- a/tests/formats/bls/aggregate_verify.md +++ b/tests/formats/bls/aggregate_verify.md @@ -1,13 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Test format: BLS sign message](#test-format-bls-sign-message) - - [Test case format](#test-case-format) - - [Condition](#condition) - - - # Test format: BLS sign message Verify the signature against the given pubkeys and one messages. diff --git a/tests/formats/bls/eth_aggregate_pubkeys.md b/tests/formats/bls/eth_aggregate_pubkeys.md index 4a61330a30..2b72c1dcaf 100644 --- a/tests/formats/bls/eth_aggregate_pubkeys.md +++ b/tests/formats/bls/eth_aggregate_pubkeys.md @@ -1,13 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Test format: Ethereum-customized BLS pubkey aggregation](#test-format-ethereum-customized-bls-pubkey-aggregation) - - [Test case format](#test-case-format) - - [Condition](#condition) - - - # Test format: Ethereum-customized BLS pubkey aggregation A BLS pubkey aggregation combines a series of pubkeys into a single pubkey. diff --git a/tests/formats/bls/eth_fast_aggregate_verify.md b/tests/formats/bls/eth_fast_aggregate_verify.md index 66a3728a0d..83b5484e05 100644 --- a/tests/formats/bls/eth_fast_aggregate_verify.md +++ b/tests/formats/bls/eth_fast_aggregate_verify.md @@ -1,13 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Test format: Ethereum-customized BLS fast aggregate verify](#test-format-ethereum-customized-bls-fast-aggregate-verify) - - [Test case format](#test-case-format) - - [Condition](#condition) - - - # Test format: Ethereum-customized BLS fast aggregate verify Verify the signature against the given pubkeys and one message. diff --git a/tests/formats/bls/fast_aggregate_verify.md b/tests/formats/bls/fast_aggregate_verify.md index b7a94b1c00..38ea29bb5f 100644 --- a/tests/formats/bls/fast_aggregate_verify.md +++ b/tests/formats/bls/fast_aggregate_verify.md @@ -1,13 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Test format: BLS fast aggregate verify](#test-format-bls-fast-aggregate-verify) - - [Test case format](#test-case-format) - - [Condition](#condition) - - - # Test format: BLS fast aggregate verify Verify the signature against the given pubkeys and one message. diff --git a/tests/formats/bls/sign.md b/tests/formats/bls/sign.md index 96756c9e3f..09e9286148 100644 --- a/tests/formats/bls/sign.md +++ b/tests/formats/bls/sign.md @@ -1,13 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Test format: BLS sign message](#test-format-bls-sign-message) - - [Test case format](#test-case-format) - - [Condition](#condition) - - - # Test format: BLS sign message Message signing with BLS should produce a signature. diff --git a/tests/formats/bls/verify.md b/tests/formats/bls/verify.md index 0fd5f43f29..57ec8a33a7 100644 --- a/tests/formats/bls/verify.md +++ b/tests/formats/bls/verify.md @@ -1,12 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Test format: BLS sign message](#test-format-bls-sign-message) - - [Test case format](#test-case-format) - - - # Test format: BLS sign message Verify the signature against the given one pubkey and one message. diff --git a/tests/formats/epoch_processing/README.md b/tests/formats/epoch_processing/README.md index 60eee01179..2951767f2c 100644 --- a/tests/formats/epoch_processing/README.md +++ b/tests/formats/epoch_processing/README.md @@ -1,16 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Epoch processing tests](#epoch-processing-tests) - - [Test case format](#test-case-format) - - [`meta.yaml`](#metayaml) - - [`pre.ssz_snappy`](#pressz_snappy) - - [`post.ssz_snappy`](#postssz_snappy) - - [Condition](#condition) - - - # Epoch processing tests The different epoch sub-transitions are tested individually with test handlers. diff --git a/tests/formats/finality/README.md b/tests/formats/finality/README.md index 2cdb820c79..af39f5c8ca 100644 --- a/tests/formats/finality/README.md +++ b/tests/formats/finality/README.md @@ -1,17 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Finality tests](#finality-tests) - - [Test case format](#test-case-format) - - [`meta.yaml`](#metayaml) - - [`pre.ssz_snappy`](#pressz_snappy) - - [`blocks_.yaml`](#blocks_indexyaml) - - [`post.ssz_snappy`](#postssz_snappy) - - [Condition](#condition) - - - # Finality tests The aim of the tests for the finality rules. diff --git a/tests/formats/forks/README.md b/tests/formats/forks/README.md index 6c685e41f6..dfbaf2df0b 100644 --- a/tests/formats/forks/README.md +++ b/tests/formats/forks/README.md @@ -1,18 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Forks](#forks) - - [Test case format](#test-case-format) - - [`meta.yaml`](#metayaml) - - [Fork strings](#fork-strings) - - [`pre.ssz_snappy`](#pressz_snappy) - - [`post.ssz_snappy`](#postssz_snappy) - - [Processing](#processing) - - [Condition](#condition) - - - # Forks The aim of the fork tests is to ensure that a pre-fork state can be transformed diff --git a/tests/formats/genesis/README.md b/tests/formats/genesis/README.md index 110c3c9524..25761e2f6a 100644 --- a/tests/formats/genesis/README.md +++ b/tests/formats/genesis/README.md @@ -1,11 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Genesis tests](#genesis-tests) - - - # Genesis tests The aim of the genesis tests is to provide a baseline to test genesis-state initialization and test diff --git a/tests/formats/genesis/initialization.md b/tests/formats/genesis/initialization.md index aebba624af..9848e157d9 100644 --- a/tests/formats/genesis/initialization.md +++ b/tests/formats/genesis/initialization.md @@ -1,19 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Genesis creation testing](#genesis-creation-testing) - - [Test case format](#test-case-format) - - [`eth1.yaml`](#eth1yaml) - - [`meta.yaml`](#metayaml) - - [`deposits_.ssz_snappy`](#deposits_indexssz_snappy) - - [`execution_payload_header.ssz_snappy`](#execution_payload_headerssz_snappy) - - [`state.ssz_snappy`](#statessz_snappy) - - [Processing](#processing) - - [Condition](#condition) - - - # Genesis creation testing Tests the initialization of a genesis state based on Eth1 data. diff --git a/tests/formats/genesis/validity.md b/tests/formats/genesis/validity.md index 2ad42feb67..15236c3ba3 100644 --- a/tests/formats/genesis/validity.md +++ b/tests/formats/genesis/validity.md @@ -1,17 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Genesis validity testing](#genesis-validity-testing) - - [Test case format](#test-case-format) - - [`meta.yaml`](#metayaml) - - [`genesis.ssz_snappy`](#genesisssz_snappy) - - [`is_valid.yaml`](#is_validyaml) - - [Processing](#processing) - - [Condition](#condition) - - - # Genesis validity testing Tests if a genesis state is valid, i.e. if it counts as trigger to launch. diff --git a/tests/formats/kzg_4844/README.md b/tests/formats/kzg_4844/README.md index f5afa9ed12..b5bd720393 100644 --- a/tests/formats/kzg_4844/README.md +++ b/tests/formats/kzg_4844/README.md @@ -1,11 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [KZG tests](#kzg-tests) - - - # KZG tests A test type for KZG libraries. Tests all the public interfaces that a KZG library required to implement EIP-4844 needs to provide, as defined in `polynomial-commitments.md`. diff --git a/tests/formats/kzg_4844/blob_to_kzg_commitment.md b/tests/formats/kzg_4844/blob_to_kzg_commitment.md index fdc710edfd..dbb1556a1d 100644 --- a/tests/formats/kzg_4844/blob_to_kzg_commitment.md +++ b/tests/formats/kzg_4844/blob_to_kzg_commitment.md @@ -1,13 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Test format: Blob to KZG commitment](#test-format-blob-to-kzg-commitment) - - [Test case format](#test-case-format) - - [Condition](#condition) - - - # Test format: Blob to KZG commitment Compute the KZG commitment for a given `blob`. diff --git a/tests/formats/kzg_4844/compute_blob_kzg_proof.md b/tests/formats/kzg_4844/compute_blob_kzg_proof.md index 32a9f97104..62fce37231 100644 --- a/tests/formats/kzg_4844/compute_blob_kzg_proof.md +++ b/tests/formats/kzg_4844/compute_blob_kzg_proof.md @@ -1,13 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Test format: Compute blob KZG proof](#test-format-compute-blob-kzg-proof) - - [Test case format](#test-case-format) - - [Condition](#condition) - - - # Test format: Compute blob KZG proof Compute the blob KZG proof for a given `blob`, that helps with quickly verifying that the KZG commitment for the blob is correct. diff --git a/tests/formats/kzg_4844/compute_kzg_proof.md b/tests/formats/kzg_4844/compute_kzg_proof.md index e85616539d..b10105129b 100644 --- a/tests/formats/kzg_4844/compute_kzg_proof.md +++ b/tests/formats/kzg_4844/compute_kzg_proof.md @@ -1,13 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Test format: Compute KZG proof](#test-format-compute-kzg-proof) - - [Test case format](#test-case-format) - - [Condition](#condition) - - - # Test format: Compute KZG proof Compute the KZG proof for a given `blob` and an evaluation point `z`. diff --git a/tests/formats/kzg_4844/verify_blob_kzg_proof.md b/tests/formats/kzg_4844/verify_blob_kzg_proof.md index 9a62ba92a4..dd0bcda5a9 100644 --- a/tests/formats/kzg_4844/verify_blob_kzg_proof.md +++ b/tests/formats/kzg_4844/verify_blob_kzg_proof.md @@ -1,13 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Test format: Verify blob KZG proof](#test-format-verify-blob-kzg-proof) - - [Test case format](#test-case-format) - - [Condition](#condition) - - - # Test format: Verify blob KZG proof Use the blob KZG proof to verify that the KZG commitment for a given `blob` is correct diff --git a/tests/formats/kzg_4844/verify_blob_kzg_proof_batch.md b/tests/formats/kzg_4844/verify_blob_kzg_proof_batch.md index 040446f69a..82e668497d 100644 --- a/tests/formats/kzg_4844/verify_blob_kzg_proof_batch.md +++ b/tests/formats/kzg_4844/verify_blob_kzg_proof_batch.md @@ -1,13 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Test format: Verify blob KZG proof batch](#test-format-verify-blob-kzg-proof-batch) - - [Test case format](#test-case-format) - - [Condition](#condition) - - - # Test format: Verify blob KZG proof batch Use the blob KZG proofs to verify that the KZG commitments for given `blobs` are correct diff --git a/tests/formats/kzg_4844/verify_kzg_proof.md b/tests/formats/kzg_4844/verify_kzg_proof.md index bb4c1547f1..18e02710c5 100644 --- a/tests/formats/kzg_4844/verify_kzg_proof.md +++ b/tests/formats/kzg_4844/verify_kzg_proof.md @@ -1,13 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Test format: Verify KZG proof](#test-format-verify-kzg-proof) - - [Test case format](#test-case-format) - - [Condition](#condition) - - - # Test format: Verify KZG proof Verify the KZG proof for a given `blob` and an evaluation point `z` that claims to result in a value of `y`. diff --git a/tests/formats/light_client/README.md b/tests/formats/light_client/README.md index 84f06f58c1..505b416019 100644 --- a/tests/formats/light_client/README.md +++ b/tests/formats/light_client/README.md @@ -1,11 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Light client sync protocol tests](#light-client-sync-protocol-tests) - - - # Light client sync protocol tests This series of tests provides reference test vectors for the light client sync protocol spec. diff --git a/tests/formats/light_client/single_merkle_proof.md b/tests/formats/light_client/single_merkle_proof.md index 789603af57..0cb4cd0d0c 100644 --- a/tests/formats/light_client/single_merkle_proof.md +++ b/tests/formats/light_client/single_merkle_proof.md @@ -1,15 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Single leaf merkle proof tests](#single-leaf-merkle-proof-tests) - - [Test case format](#test-case-format) - - [`object.ssz_snappy`](#objectssz_snappy) - - [`proof.yaml`](#proofyaml) - - [Condition](#condition) - - - # Single leaf merkle proof tests This series of tests provides reference test vectors for validating correct diff --git a/tests/formats/light_client/sync.md b/tests/formats/light_client/sync.md index f2a52f167e..1706b4c162 100644 --- a/tests/formats/light_client/sync.md +++ b/tests/formats/light_client/sync.md @@ -1,20 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Light client sync tests](#light-client-sync-tests) - - [Test case format](#test-case-format) - - [`meta.yaml`](#metayaml) - - [`bootstrap.ssz_snappy`](#bootstrapssz_snappy) - - [`steps.yaml`](#stepsyaml) - - [Checks to run after each step](#checks-to-run-after-each-step) - - [`force_update` execution step](#force_update-execution-step) - - [`process_update` execution step](#process_update-execution-step) - - [`upgrade_store`](#upgrade_store) - - [Condition](#condition) - - - # Light client sync tests This series of tests provides reference test vectors for validating that a light client implementing the sync protocol can sync to the latest block header. diff --git a/tests/formats/light_client/update_ranking.md b/tests/formats/light_client/update_ranking.md index 4640f7860b..fe73fb9df7 100644 --- a/tests/formats/light_client/update_ranking.md +++ b/tests/formats/light_client/update_ranking.md @@ -1,15 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [`LightClientUpdate` ranking tests](#lightclientupdate-ranking-tests) - - [Test case format](#test-case-format) - - [`meta.yaml`](#metayaml) - - [`updates_.ssz_snappy`](#updates_indexssz_snappy) - - [Condition](#condition) - - - # `LightClientUpdate` ranking tests This series of tests provides reference test vectors for validating that `LightClientUpdate` instances are ranked in a canonical order. diff --git a/tests/formats/merkle_proof/README.md b/tests/formats/merkle_proof/README.md index 5791dd5283..77822daabe 100644 --- a/tests/formats/merkle_proof/README.md +++ b/tests/formats/merkle_proof/README.md @@ -1,11 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Merkle proof tests](#merkle-proof-tests) - - - # Merkle proof tests Handlers: diff --git a/tests/formats/operations/README.md b/tests/formats/operations/README.md index e3627cf30b..b020b5fd03 100644 --- a/tests/formats/operations/README.md +++ b/tests/formats/operations/README.md @@ -1,17 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Operations tests](#operations-tests) - - [Test case format](#test-case-format) - - [`meta.yaml`](#metayaml) - - [`pre.ssz_snappy`](#pressz_snappy) - - [`.ssz_snappy`](#input-namessz_snappy) - - [`post.ssz_snappy`](#postssz_snappy) - - [Condition](#condition) - - - # Operations tests The different kinds of operations ("transactions") are tested individually with test handlers. diff --git a/tests/formats/random/README.md b/tests/formats/random/README.md index b6ac4429fd..54b2c1a23e 100644 --- a/tests/formats/random/README.md +++ b/tests/formats/random/README.md @@ -1,12 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Random tests](#random-tests) - - [Test case format](#test-case-format) - - - # Random tests The random tests are generated with various randomized states and blocks. diff --git a/tests/formats/rewards/README.md b/tests/formats/rewards/README.md index 7aa36adb1b..a6682042f7 100644 --- a/tests/formats/rewards/README.md +++ b/tests/formats/rewards/README.md @@ -1,20 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Rewards tests](#rewards-tests) - - [Test case format](#test-case-format) - - [`meta.yaml`](#metayaml) - - [`pre.ssz_snappy`](#pressz_snappy) - - [`source_deltas.ssz_snappy`](#source_deltasssz_snappy) - - [`target_deltas.ssz_snappy`](#target_deltasssz_snappy) - - [`head_deltas.ssz_snappy`](#head_deltasssz_snappy) - - [`inclusion_delay_deltas.ssz_snappy`](#inclusion_delay_deltasssz_snappy) - - [`inactivity_penalty_deltas.ssz_snappy`](#inactivity_penalty_deltasssz_snappy) - - [Condition](#condition) - - - # Rewards tests All rewards deltas sub-functions are tested for each test case. diff --git a/tests/formats/sanity/README.md b/tests/formats/sanity/README.md index d2b26709ee..20b36208a4 100644 --- a/tests/formats/sanity/README.md +++ b/tests/formats/sanity/README.md @@ -1,11 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Sanity tests](#sanity-tests) - - - # Sanity tests The aim of the sanity tests is to set a base-line on what really needs to pass, i.e. the essentials. diff --git a/tests/formats/sanity/blocks.md b/tests/formats/sanity/blocks.md index 1c1bc3c92c..7ea646b9e0 100644 --- a/tests/formats/sanity/blocks.md +++ b/tests/formats/sanity/blocks.md @@ -1,17 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Sanity blocks testing](#sanity-blocks-testing) - - [Test case format](#test-case-format) - - [`meta.yaml`](#metayaml) - - [`pre.ssz_snappy`](#pressz_snappy) - - [`blocks_.ssz_snappy`](#blocks_indexssz_snappy) - - [`post.ssz_snappy`](#postssz_snappy) - - [Condition](#condition) - - - # Sanity blocks testing Sanity tests to cover a series of one or more blocks being processed, aiming to cover common changes. diff --git a/tests/formats/sanity/slots.md b/tests/formats/sanity/slots.md index 54083f5cf0..f1b8a13219 100644 --- a/tests/formats/sanity/slots.md +++ b/tests/formats/sanity/slots.md @@ -1,18 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Sanity slots testing](#sanity-slots-testing) - - [Test case format](#test-case-format) - - [`meta.yaml`](#metayaml) - - [`pre.ssz_snappy`](#pressz_snappy) - - [`slots.yaml`](#slotsyaml) - - [`post.ssz_snappy`](#postssz_snappy) - - [Processing](#processing) - - [Condition](#condition) - - - # Sanity slots testing Sanity tests to cover a series of one or more empty-slot transitions being processed, aiming to cover common changes. diff --git a/tests/formats/shuffling/README.md b/tests/formats/shuffling/README.md index 1334362f49..15bfe6996b 100644 --- a/tests/formats/shuffling/README.md +++ b/tests/formats/shuffling/README.md @@ -1,14 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Test format: shuffling](#test-format-shuffling) - - [Test case format](#test-case-format) - - [`mapping.yaml`](#mappingyaml) - - [Condition](#condition) - - - # Test format: shuffling The runner of the Shuffling test type has only one handler: `core`. diff --git a/tests/formats/ssz_generic/README.md b/tests/formats/ssz_generic/README.md index c95ef2aad6..c46025847a 100644 --- a/tests/formats/ssz_generic/README.md +++ b/tests/formats/ssz_generic/README.md @@ -1,26 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [SSZ, generic tests](#ssz-generic-tests) - - [Format](#format) - - [`valid`](#valid) - - [`meta.yaml`](#metayaml) - - [`serialized.ssz_snappy`](#serializedssz_snappy) - - [`value.yaml`](#valueyaml) - - [Conditions](#conditions) - - [`invalid`](#invalid) - - [Condition](#condition) - - [Type declarations](#type-declarations) - - [`basic_vector`](#basic_vector) - - [`bitlist`](#bitlist) - - [`bitvector`](#bitvector) - - [`boolean`](#boolean) - - [`uints`](#uints) - - [`containers`](#containers) - - - # SSZ, generic tests This set of test-suites provides general testing for SSZ: diff --git a/tests/formats/ssz_static/README.md b/tests/formats/ssz_static/README.md index 3cef2de714..ffa7373349 100644 --- a/tests/formats/ssz_static/README.md +++ b/tests/formats/ssz_static/README.md @@ -1,11 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [SSZ, static tests](#ssz-static-tests) - - - # SSZ, static tests This set of test-suites provides static testing for SSZ: diff --git a/tests/formats/ssz_static/core.md b/tests/formats/ssz_static/core.md index 6995de9abb..09ff04e20d 100644 --- a/tests/formats/ssz_static/core.md +++ b/tests/formats/ssz_static/core.md @@ -1,17 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Test format: SSZ static types](#test-format-ssz-static-types) - - [Test case format](#test-case-format) - - [`roots.yaml`](#rootsyaml) - - [`serialized.ssz_snappy`](#serializedssz_snappy) - - [`value.yaml`](#valueyaml) - - [Condition](#condition) - - [References](#references) - - - # Test format: SSZ static types The goal of this type is to provide clients with a solid reference for how the known SSZ objects should be encoded. diff --git a/tests/formats/sync/README.md b/tests/formats/sync/README.md index be95ba765f..ff9f8168cb 100644 --- a/tests/formats/sync/README.md +++ b/tests/formats/sync/README.md @@ -1,11 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Sync tests](#sync-tests) - - - # Sync tests It re-uses the [fork choice test format](../fork_choice/README.md) to apply the test script. diff --git a/tests/formats/transition/README.md b/tests/formats/transition/README.md index cd4a23f293..7f89bdd610 100644 --- a/tests/formats/transition/README.md +++ b/tests/formats/transition/README.md @@ -1,17 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Transition testing](#transition-testing) - - [Test case format](#test-case-format) - - [`meta.yaml`](#metayaml) - - [`pre.ssz_snappy`](#pressz_snappy) - - [`blocks_.ssz_snappy`](#blocks_indexssz_snappy) - - [`post.ssz_snappy`](#postssz_snappy) - - [Condition](#condition) - - - # Transition testing Transition tests to cover processing the chain across a fork boundary. diff --git a/tests/generators/README.md b/tests/generators/README.md index 0dd1a87a65..0146ca35e8 100644 --- a/tests/generators/README.md +++ b/tests/generators/README.md @@ -14,6 +14,7 @@ An automated nightly tests release system, with a config filter applied, is bein + - [How to run generators](#how-to-run-generators) - [Cleaning](#cleaning) - [Running all test generators](#running-all-test-generators) diff --git a/tests/generators/bls/README.md b/tests/generators/bls/README.md index be19386372..24013f88e7 100644 --- a/tests/generators/bls/README.md +++ b/tests/generators/bls/README.md @@ -1,12 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [BLS Test Generator](#bls-test-generator) - - [Resources](#resources) - - - # BLS Test Generator The [BLS Signature APIs](../../../specs/phase0/beacon-chain.md#bls-signatures) diff --git a/tests/generators/epoch_processing/README.md b/tests/generators/epoch_processing/README.md index 4e7a8119c9..662b0b516d 100644 --- a/tests/generators/epoch_processing/README.md +++ b/tests/generators/epoch_processing/README.md @@ -1,11 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Epoch processing](#epoch-processing) - - - # Epoch processing Epoch processing covers the sub-transitions during an epoch change. diff --git a/tests/generators/finality/README.md b/tests/generators/finality/README.md index 8686ffc445..dec5819c68 100644 --- a/tests/generators/finality/README.md +++ b/tests/generators/finality/README.md @@ -1,11 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Finality tests](#finality-tests) - - - # Finality tests Finality tests cover regular state-transitions in a common block-list format to test finality rules. diff --git a/tests/generators/fork_choice/README.md b/tests/generators/fork_choice/README.md index 61f837d42d..e67b115ba1 100644 --- a/tests/generators/fork_choice/README.md +++ b/tests/generators/fork_choice/README.md @@ -1,11 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Fork choice tests](#fork-choice-tests) - - - # Fork choice tests Fork choice tests cover the different forking cases with fork choice helper functions. diff --git a/tests/generators/genesis/README.md b/tests/generators/genesis/README.md index 3f218841ae..e270f6e35e 100644 --- a/tests/generators/genesis/README.md +++ b/tests/generators/genesis/README.md @@ -1,11 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Genesis test generator](#genesis-test-generator) - - - # Genesis test generator Genesis tests cover the initialization and validity-based launch trigger for the Beacon Chain genesis state. diff --git a/tests/generators/kzg_4844/README.md b/tests/generators/kzg_4844/README.md index 61031ac1f4..ab81a85e86 100644 --- a/tests/generators/kzg_4844/README.md +++ b/tests/generators/kzg_4844/README.md @@ -1,11 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [KZG 4844 Test Generator](#kzg-4844-test-generator) - - - # KZG 4844 Test Generator These tests are specific to the KZG API required for implementing EIP-4844 \ No newline at end of file diff --git a/tests/generators/light_client/README.md b/tests/generators/light_client/README.md index 2751c0874e..7eabc2520c 100644 --- a/tests/generators/light_client/README.md +++ b/tests/generators/light_client/README.md @@ -1,11 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Light client tests](#light-client-tests) - - - # Light client tests The purpose of this test-generator is to provide test-vectors for validating the correct implementation of the light client sync protocol. diff --git a/tests/generators/merkle_proof/README.md b/tests/generators/merkle_proof/README.md index b5ee6c8b6c..fb4d05fda8 100644 --- a/tests/generators/merkle_proof/README.md +++ b/tests/generators/merkle_proof/README.md @@ -1,11 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Merkle proof tests](#merkle-proof-tests) - - - # Merkle proof tests The purpose of this test-generator is to provide test-vectors for validating the correct implementation of the Merkle proof verification. diff --git a/tests/generators/operations/README.md b/tests/generators/operations/README.md index 2eac9c9f9e..a5d48c11b4 100644 --- a/tests/generators/operations/README.md +++ b/tests/generators/operations/README.md @@ -1,11 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Operations](#operations) - - - # Operations Operations (or "transactions" in previous spec iterations), diff --git a/tests/generators/random/README.md b/tests/generators/random/README.md index 4c49226f8e..fd17284412 100644 --- a/tests/generators/random/README.md +++ b/tests/generators/random/README.md @@ -1,14 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Randomized tests](#randomized-tests) -- [To generate test sources](#to-generate-test-sources) -- [To run tests](#to-run-tests) -- [To generate spec tests (from the generated files)](#to-generate-spec-tests-from-the-generated-files) - - - # Randomized tests Randomized tests in the format of `sanity` blocks tests, with randomized operations. diff --git a/tests/generators/rewards/README.md b/tests/generators/rewards/README.md index 4233958924..60f106836a 100644 --- a/tests/generators/rewards/README.md +++ b/tests/generators/rewards/README.md @@ -1,11 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Rewards](#rewards) - - - # Rewards Rewards covers the sub-functions of `process_rewards_and_penalties` for granular testing of components of the rewards function. diff --git a/tests/generators/sanity/README.md b/tests/generators/sanity/README.md index 31c59f84d3..cbc6aef06d 100644 --- a/tests/generators/sanity/README.md +++ b/tests/generators/sanity/README.md @@ -1,11 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Sanity tests](#sanity-tests) - - - # Sanity tests Sanity tests cover regular state-transitions in a common block-list format, to ensure the basics work. diff --git a/tests/generators/shuffling/README.md b/tests/generators/shuffling/README.md index 0294c1ec6f..81ddaba15f 100644 --- a/tests/generators/shuffling/README.md +++ b/tests/generators/shuffling/README.md @@ -1,11 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Shuffling Tests](#shuffling-tests) - - - # Shuffling Tests Tests for the swap-or-not shuffling in the beacon chain. diff --git a/tests/generators/ssz_static/README.md b/tests/generators/ssz_static/README.md index b557b726ad..3434fe174b 100644 --- a/tests/generators/ssz_static/README.md +++ b/tests/generators/ssz_static/README.md @@ -1,11 +1,3 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [SSZ-static](#ssz-static) - - - # SSZ-static The purpose of this test-generator is to provide test-vectors for the most important applications of SSZ: From f0ef76ada1885ab0023ba4c4bfed9462bd8cd79e Mon Sep 17 00:00:00 2001 From: fradamt Date: Tue, 21 May 2024 23:03:34 +0200 Subject: [PATCH 44/89] minor fixes --- specs/electra/beacon-chain.md | 4 +- ..._execution_layer_consolidation_request.py} | 42 +++++++++---------- 2 files changed, 23 insertions(+), 23 deletions(-) rename tests/core/pyspec/eth2spec/test/electra/block_processing/{test_process_execution_layer_consolidation.py => test_process_execution_layer_consolidation_request.py} (96%) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 9cfff26e9a..9b1b88f33d 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -1286,10 +1286,10 @@ def process_deposit_receipt(state: BeaconState, deposit_receipt: DepositReceipt) ##### Execution layer consolidation requests -###### New `process_execution_layer_consolidation_requests` +###### New `process_execution_layer_consolidation_request` ```python -def process_execution_layer_consolidation_requests( +def process_execution_layer_consolidation_request( state: BeaconState, execution_layer_consolidation_request: ExecutionLayerConsolidationRequest) -> None: # If the pending consolidations queue is full, consolidation requests are ignored diff --git a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation.py b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation_request.py similarity index 96% rename from tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation.py rename to tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation_request.py index 54e88fef4f..ce31da6acd 100644 --- a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation.py +++ b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation_request.py @@ -40,7 +40,7 @@ def test_basic_consolidation_in_current_consolidation_epoch(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -89,7 +89,7 @@ def test_basic_consolidation_in_new_consolidation_epoch(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -132,7 +132,7 @@ def test_basic_consolidation_with_preexisting_churn(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -179,7 +179,7 @@ def test_basic_consolidation_with_insufficient_preexisting_churn(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -230,7 +230,7 @@ def test_basic_consolidation_with_compounding_credentials(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -276,7 +276,7 @@ def test_consolidation_churn_limit_balance(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -324,7 +324,7 @@ def test_consolidation_balance_larger_than_churn_limit(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -371,7 +371,7 @@ def test_consolidation_balance_through_two_churn_epochs(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -417,7 +417,7 @@ def test_invalid_source_equals_target(spec, state): spec, state, source_index, address=source_address ) # Make consolidation from source to source - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[source_index].pubkey, @@ -449,7 +449,7 @@ def test_invalid_exceed_pending_consolidations_limit(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -478,7 +478,7 @@ def test_invalid_not_enough_consolidation_churn_available(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -506,7 +506,7 @@ def test_invalid_exited_source(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -538,7 +538,7 @@ def test_invalid_exited_target(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -568,7 +568,7 @@ def test_invalid_inactive_source(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -600,7 +600,7 @@ def test_invalid_inactive_target(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -629,7 +629,7 @@ def test_invalid_no_source_execution_withdrawal_credential(spec, state): source_index = spec.get_active_validator_indices(state, current_epoch)[0] target_index = spec.get_active_validator_indices(state, current_epoch)[1] source_address = b"\x22" * 20 - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -657,7 +657,7 @@ def test_invalid_no_target_execution_withdrawal_credential(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -684,7 +684,7 @@ def test_invalid_incorrect_source_address(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with different source address - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=b"\x33" * 20, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -715,7 +715,7 @@ def test_invalid_unknown_source_pubkey(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with different source pubkey - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=source_address, source_pubkey=b"\x00" * 48, target_pubkey=state.validators[target_index].pubkey, @@ -745,7 +745,7 @@ def test_invalid_unknown_target_pubkey(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with different target pubkey - consolidation = spec.ExecutionLayerConsolidation( + consolidation = spec.ExecutionLayerConsolidationRequest( source_address=b"\x33" * 20, source_pubkey=state.validators[source_index].pubkey, target_pubkey=b"\x00" * 48, @@ -783,7 +783,7 @@ def run_consolidation_processing(spec, state, consolidation, success=True): yield 'pre', state yield 'consolidation', consolidation - spec.process_execution_layer_consolidation(state, consolidation) + spec.process_execution_layer_consolidation_request(state, consolidation) yield 'post', state From 1970b56f8fe0ab5dbc9de6e9bf26646d94c0ad98 Mon Sep 17 00:00:00 2001 From: fradamt Date: Wed, 22 May 2024 09:37:16 +0200 Subject: [PATCH 45/89] add consolidation_requests_root to get_execution_payload_header test helper --- .../core/pyspec/eth2spec/test/helpers/execution_payload.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py b/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py index ef6e2f6442..78bd28e35a 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py +++ b/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py @@ -37,6 +37,7 @@ def get_execution_payload_header(spec, execution_payload): if is_post_electra(spec): payload_header.deposit_receipts_root = spec.hash_tree_root(execution_payload.deposit_receipts) payload_header.withdrawal_requests_root = spec.hash_tree_root(execution_payload.withdrawal_requests) + payload_header.consolidation_requests_root = spec.hash_tree_root(execution_payload.consolidation_requests) return payload_header @@ -59,7 +60,8 @@ def compute_el_header_block_hash(spec, transactions_trie_root, withdrawals_trie_root=None, deposit_receipts_trie_root=None, - withdrawal_requests_root=None): + withdrawal_requests_root=None, + consolidation_requests_root=None): """ Computes the RLP execution block hash described by an `ExecutionPayloadHeader`. """ @@ -110,6 +112,8 @@ def compute_el_header_block_hash(spec, execution_payload_header_rlp.append((Binary(32, 32), deposit_receipts_trie_root)) # withdrawal requests root execution_payload_header_rlp.append((Binary(32, 32), withdrawal_requests_root)) + # consolidation requests root + execution_payload_header_rlp.append((Binary(32, 32), consolidation_requests_root)) sedes = List([schema for schema, _ in execution_payload_header_rlp]) values = [value for _, value in execution_payload_header_rlp] From 6a731e9115760fb3894e31eb754e8605c9a87745 Mon Sep 17 00:00:00 2001 From: fradamt Date: Wed, 22 May 2024 10:03:03 +0200 Subject: [PATCH 46/89] fix lint --- specs/electra/beacon-chain.md | 11 +++++++---- ...rocess_execution_layer_consolidation_request.py | 14 +++++--------- .../eth2spec/test/helpers/execution_payload.py | 2 -- 3 files changed, 12 insertions(+), 15 deletions(-) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 9b1b88f33d..101be40f60 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -337,7 +337,8 @@ class ExecutionPayload(Container): deposit_receipts: List[DepositReceipt, MAX_DEPOSIT_RECEIPTS_PER_PAYLOAD] # [New in Electra:EIP6110] # [New in Electra:EIP7002:EIP7251] withdrawal_requests: List[ExecutionLayerWithdrawalRequest, MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD] - consolidation_requests: List[ExecutionLayerConsolidationRequest, MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD] # [New in Electra:EIP7251] + # [New in Electra:EIP7251] + consolidation_requests: List[ExecutionLayerConsolidationRequest, MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD] ``` #### `ExecutionPayloadHeader` @@ -365,7 +366,7 @@ class ExecutionPayloadHeader(Container): excess_blob_gas: uint64 deposit_receipts_root: Root # [New in Electra:EIP6110] withdrawal_requests_root: Root # [New in Electra:EIP7002:EIP7251] - consolidation_requests_root: Root # [New in Electra:EIP7251] + consolidation_requests_root: Root # [New in Electra:EIP7251] ``` #### `BeaconState` @@ -1037,7 +1038,8 @@ def process_operations(state: BeaconState, body: BeaconBlockBody) -> None: # [New in Electra:EIP7002:EIP7251] for_ops(body.execution_payload.withdrawal_requests, process_execution_layer_withdrawal_request) for_ops(body.execution_payload.deposit_receipts, process_deposit_receipt) # [New in Electra:EIP6110] - for_ops(body.execution_payload.consolidation_requests, process_execution_layer_consolidation_request) # [New in Electra:EIP7251] + # [New in Electra:EIP7251] + for_ops(body.execution_payload.consolidation_requests, process_execution_layer_consolidation_request) ``` ##### Attestations @@ -1291,7 +1293,8 @@ def process_deposit_receipt(state: BeaconState, deposit_receipt: DepositReceipt) ```python def process_execution_layer_consolidation_request( state: BeaconState, - execution_layer_consolidation_request: ExecutionLayerConsolidationRequest) -> None: + execution_layer_consolidation_request: ExecutionLayerConsolidationRequest +) -> None: # If the pending consolidations queue is full, consolidation requests are ignored if len(state.pending_consolidations) == PENDING_CONSOLIDATIONS_LIMIT: return diff --git a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation_request.py b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation_request.py index ce31da6acd..95386f4282 100644 --- a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation_request.py +++ b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation_request.py @@ -9,7 +9,6 @@ default_activation_threshold, spec_state_test, ) -from eth2spec.test.helpers.keys import pubkey_to_privkey from eth2spec.test.helpers.withdrawals import ( set_eth1_withdrawal_credential_with_balance, set_compounding_withdrawal_credential, @@ -255,7 +254,6 @@ def test_basic_consolidation_with_compounding_credentials(spec, state): assert state.validators[source_index].exit_epoch == expected_exit_epoch - @with_electra_and_later @with_presets([MINIMAL], "need sufficient consolidation churn limit") @with_custom_state( @@ -623,7 +621,7 @@ def test_invalid_inactive_target(spec, state): @spec_test @single_phase def test_invalid_no_source_execution_withdrawal_credential(spec, state): - # Set up a correct consolidation, but source does not have + # Set up a correct consolidation, but source does not have # an execution withdrawal credential current_epoch = spec.get_current_epoch(state) source_index = spec.get_active_validator_indices(state, current_epoch)[0] @@ -639,6 +637,7 @@ def test_invalid_no_source_execution_withdrawal_credential(spec, state): spec, state, consolidation, success=False ) + @with_electra_and_later @with_presets([MINIMAL], "need sufficient consolidation churn limit") @with_custom_state( @@ -648,7 +647,7 @@ def test_invalid_no_source_execution_withdrawal_credential(spec, state): @spec_test @single_phase def test_invalid_no_target_execution_withdrawal_credential(spec, state): - # Set up a correct consolidation, but target does not have + # Set up a correct consolidation, but target does not have # an execution withdrawal credential current_epoch = spec.get_current_epoch(state) source_index = spec.get_active_validator_indices(state, current_epoch)[0] @@ -666,6 +665,7 @@ def test_invalid_no_target_execution_withdrawal_credential(spec, state): spec, state, consolidation, success=False ) + @with_electra_and_later @with_presets([MINIMAL], "need sufficient consolidation churn limit") @with_custom_state( @@ -691,7 +691,6 @@ def test_invalid_incorrect_source_address(spec, state): ) set_eth1_withdrawal_credential_with_balance(spec, state, target_index) - yield from run_consolidation_processing( spec, state, consolidation, success=False ) @@ -757,7 +756,6 @@ def test_invalid_unknown_target_pubkey(spec, state): ) - def run_consolidation_processing(spec, state, consolidation, success=True): """ Run ``process_consolidation``, yielding: @@ -779,7 +777,6 @@ def run_consolidation_processing(spec, state, consolidation, success=True): else: pre_state = state.copy() - yield 'pre', state yield 'consolidation', consolidation @@ -802,7 +799,7 @@ def run_consolidation_processing(spec, state, consolidation, success=True): assert state.validators[source_index].exit_epoch < spec.FAR_FUTURE_EPOCH # Check that the exit epoch matches earliest_consolidation_epoch assert state.validators[source_index].exit_epoch == state.earliest_consolidation_epoch - # Check that the correct consolidation has been appended + # Check that the correct consolidation has been appended expected_new_pending_consolidation = spec.PendingConsolidation( source_index=source_index, target_index=target_index, @@ -810,4 +807,3 @@ def run_consolidation_processing(spec, state, consolidation, success=True): assert state.pending_consolidations == pre_pending_consolidations + [expected_new_pending_consolidation] else: assert pre_state == state - diff --git a/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py b/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py index 78bd28e35a..08c430b621 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py +++ b/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py @@ -112,8 +112,6 @@ def compute_el_header_block_hash(spec, execution_payload_header_rlp.append((Binary(32, 32), deposit_receipts_trie_root)) # withdrawal requests root execution_payload_header_rlp.append((Binary(32, 32), withdrawal_requests_root)) - # consolidation requests root - execution_payload_header_rlp.append((Binary(32, 32), consolidation_requests_root)) sedes = List([schema for schema, _ in execution_payload_header_rlp]) values = [value for _, value in execution_payload_header_rlp] From 96db63ea29505e763feb40b8289e716c6c522978 Mon Sep 17 00:00:00 2001 From: fradamt Date: Wed, 22 May 2024 10:11:39 +0200 Subject: [PATCH 47/89] doctoc --- specs/electra/beacon-chain.md | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 101be40f60..414cc90a52 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -28,8 +28,7 @@ - [`PendingBalanceDeposit`](#pendingbalancedeposit) - [`PendingPartialWithdrawal`](#pendingpartialwithdrawal) - [`ExecutionLayerWithdrawalRequest`](#executionlayerwithdrawalrequest) - - [`Consolidation`](#consolidation) - - [`SignedConsolidation`](#signedconsolidation) + - [`ExecutionLayerConsolidationRequest`](#executionlayerconsolidationrequest) - [`PendingConsolidation`](#pendingconsolidation) - [Modified Containers](#modified-containers) - [`AttesterSlashing`](#attesterslashing) @@ -94,8 +93,8 @@ - [New `process_execution_layer_withdrawal_request`](#new-process_execution_layer_withdrawal_request) - [Deposit receipts](#deposit-receipts) - [New `process_deposit_receipt`](#new-process_deposit_receipt) - - [Consolidations](#consolidations) - - [New `process_consolidation`](#new-process_consolidation) + - [Execution layer consolidation requests](#execution-layer-consolidation-requests) + - [New `process_execution_layer_consolidation_request`](#new-process_execution_layer_consolidation_request) - [Testing](#testing) From 69ee35d4e39c38583c2b7c8b0224717ab5b8c41f Mon Sep 17 00:00:00 2001 From: fradamt Date: Wed, 22 May 2024 12:31:22 +0200 Subject: [PATCH 48/89] handle top-ups to exiting/exited validators --- specs/electra/beacon-chain.md | 25 +++- .../test_process_pending_balance_deposits.py | 130 ++++++++++++++++++ 2 files changed, 151 insertions(+), 4 deletions(-) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 62da891146..b09cf6906d 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -798,12 +798,27 @@ def process_pending_balance_deposits(state: BeaconState) -> None: available_for_processing = state.deposit_balance_to_consume + get_activation_exit_churn_limit(state) processed_amount = 0 next_deposit_index = 0 + deposits_to_postpone = [] for deposit in state.pending_balance_deposits: - if processed_amount + deposit.amount > available_for_processing: - break - increase_balance(state, deposit.index, deposit.amount) - processed_amount += deposit.amount + validator = state.validators[deposit.index] + # Validator is exiting, postpone the deposit until after withdrawable epoch + if validator.exit_epoch < FAR_FUTURE_EPOCH: + if get_current_epoch(state) <= validator.withdrawable_epoch: + deposits_to_postpone.append(deposit) + # Deposited balance will never become active. Increase balance but do not consume churn + else: + increase_balance(state, deposit.index, deposit.amount) + # Validator is not exiting, attempt to process deposit + else: + # Deposit does not fit in the churn, no more deposit processing in this epoch. + if processed_amount + deposit.amount > available_for_processing: + break + # Deposit fits in the churn, process it. Increase balance and consume churn. + else: + increase_balance(state, deposit.index, deposit.amount) + processed_amount += deposit.amount + # Regardless of how the deposit was handled, we move on in the queue. next_deposit_index += 1 state.pending_balance_deposits = state.pending_balance_deposits[next_deposit_index:] @@ -812,6 +827,8 @@ def process_pending_balance_deposits(state: BeaconState) -> None: state.deposit_balance_to_consume = Gwei(0) else: state.deposit_balance_to_consume = available_for_processing - processed_amount + + state.pending_balance_deposits += deposits_to_postpone ``` #### New `process_pending_consolidations` diff --git a/tests/core/pyspec/eth2spec/test/electra/epoch_processing/test_process_pending_balance_deposits.py b/tests/core/pyspec/eth2spec/test/electra/epoch_processing/test_process_pending_balance_deposits.py index 981851bc80..e3f8526913 100644 --- a/tests/core/pyspec/eth2spec/test/electra/epoch_processing/test_process_pending_balance_deposits.py +++ b/tests/core/pyspec/eth2spec/test/electra/epoch_processing/test_process_pending_balance_deposits.py @@ -132,3 +132,133 @@ def test_multiple_pending_deposits_above_churn(spec, state): assert state.pending_balance_deposits == [ spec.PendingBalanceDeposit(index=2, amount=amount) ] + + +@with_electra_and_later +@spec_state_test +def test_skipped_deposit_exiting_validator(spec, state): + index = 0 + amount = spec.MIN_ACTIVATION_BALANCE + state.pending_balance_deposits.append(spec.PendingBalanceDeposit(index=index, amount=amount)) + pre_pending_balance_deposits = state.pending_balance_deposits.copy() + pre_balance = state.balances[index] + # Initiate the validator's exit + spec.initiate_validator_exit(state, index) + yield from run_epoch_processing_with(spec, state, 'process_pending_balance_deposits') + # Deposit is skipped because validator is exiting + assert state.balances[index] == pre_balance + # All deposits either processed or postponed, no leftover deposit balance to consume + assert state.deposit_balance_to_consume == 0 + # The deposit is still in the queue + assert state.pending_balance_deposits == pre_pending_balance_deposits + + +@with_electra_and_later +@spec_state_test +def test_multiple_skipped_deposits_exiting_validators(spec, state): + amount = spec.EFFECTIVE_BALANCE_INCREMENT + for i in [0, 1, 2]: + # Append pending deposit for validator i + state.pending_balance_deposits.append(spec.PendingBalanceDeposit(index=i, amount=amount)) + + # Initiate the exit of validator i + spec.initiate_validator_exit(state, i) + pre_pending_balance_deposits = state.pending_balance_deposits.copy() + pre_balances = state.balances.copy() + yield from run_epoch_processing_with(spec, state, 'process_pending_balance_deposits') + # All deposits are postponed, no balance changes + assert state.balances == pre_balances + # All deposits are postponed, no leftover deposit balance to consume + assert state.deposit_balance_to_consume == 0 + # All deposits still in the queue, in the same order + assert state.pending_balance_deposits == pre_pending_balance_deposits + + +@with_electra_and_later +@spec_state_test +def test_multiple_pending_one_skipped(spec, state): + amount = spec.EFFECTIVE_BALANCE_INCREMENT + for i in [0, 1, 2]: + state.pending_balance_deposits.append(spec.PendingBalanceDeposit(index=i, amount=amount)) + pre_balances = state.balances.copy() + # Initiate the second validator's exit + spec.initiate_validator_exit(state, 1) + yield from run_epoch_processing_with(spec, state, 'process_pending_balance_deposits') + # First and last deposit are processed, second is not because of exiting + for i in [0, 2]: + assert state.balances[i] == pre_balances[i] + amount + assert state.balances[1] == pre_balances[1] + # All deposits either processed or postponed, no leftover deposit balance to consume + assert state.deposit_balance_to_consume == 0 + # second deposit is still in the queue + assert state.pending_balance_deposits == [spec.PendingBalanceDeposit(index=1, amount=amount)] + + +@with_electra_and_later +@spec_state_test +def test_mixture_of_skipped_and_above_churn(spec, state): + amount01 = spec.EFFECTIVE_BALANCE_INCREMENT + amount2 = spec.MAX_EFFECTIVE_BALANCE_ELECTRA + # First two validators have small deposit, third validators a large one + for i in [0, 1]: + state.pending_balance_deposits.append(spec.PendingBalanceDeposit(index=i, amount=amount01)) + state.pending_balance_deposits.append(spec.PendingBalanceDeposit(index=2, amount=amount2)) + pre_balances = state.balances.copy() + # Initiate the second validator's exit + spec.initiate_validator_exit(state, 1) + yield from run_epoch_processing_with(spec, state, 'process_pending_balance_deposits') + # First deposit is processed + assert state.balances[0] == pre_balances[0] + amount01 + # Second deposit is postponed, third is above churn + for i in [1, 2]: + assert state.balances[i] == pre_balances[i] + # First deposit consumes some deposit balance + # Deposit balance to consume is not reset because third deposit is not processed + assert state.deposit_balance_to_consume == spec.get_activation_exit_churn_limit(state) - amount01 + # second and third deposit still in the queue, but second is appended at the end + assert state.pending_balance_deposits == [spec.PendingBalanceDeposit(index=2, amount=amount2), + spec.PendingBalanceDeposit(index=1, amount=amount01)] + + +@with_electra_and_later +@spec_state_test +def test_processing_deposit_of_withdrawable_validator(spec, state): + index = 0 + amount = spec.MIN_ACTIVATION_BALANCE + state.pending_balance_deposits.append(spec.PendingBalanceDeposit(index=index, amount=amount)) + pre_balance = state.balances[index] + # Initiate the validator's exit + spec.initiate_validator_exit(state, index) + # Set epoch to withdrawable epoch + 1 to allow processing of the deposit + state.slot = spec.SLOTS_PER_EPOCH * (state.validators[index].withdrawable_epoch + 1) + yield from run_epoch_processing_with(spec, state, 'process_pending_balance_deposits') + # Deposit is correctly processed + assert state.balances[index] == pre_balance + amount + # No leftover deposit balance to consume when there are no deposits left to process + assert state.deposit_balance_to_consume == 0 + assert state.pending_balance_deposits == [] + + +@with_electra_and_later +@spec_state_test +def test_processing_deposit_of_withdrawable_validator_does_not_get_churned(spec, state): + amount = spec.MAX_EFFECTIVE_BALANCE_ELECTRA + for i in [0, 1]: + state.pending_balance_deposits.append(spec.PendingBalanceDeposit(index=i, amount=amount)) + pre_balances = state.balances.copy() + # Initiate the first validator's exit + spec.initiate_validator_exit(state, 0) + # Set epoch to withdrawable epoch + 1 to allow processing of the deposit + state.slot = spec.SLOTS_PER_EPOCH * (state.validators[0].withdrawable_epoch + 1) + # Don't use run_epoch_processing_with to avoid penalties being applied + yield 'pre', state + spec.process_pending_balance_deposits(state) + yield 'post', state + # First deposit is processed though above churn limit, because validator is withdrawable + assert state.balances[0] == pre_balances[0] + amount + # Second deposit is not processed because above churn + assert state.balances[1] == pre_balances[1] + # Second deposit is not processed, so there's leftover deposit balance to consume. + # First deposit does not consume any. + assert state.deposit_balance_to_consume == spec.get_activation_exit_churn_limit(state) + assert state.pending_balance_deposits == [spec.PendingBalanceDeposit(index=1, amount=amount)] From 1d02110a68d5a8dfe1c3463228ee9ab06b3ec133 Mon Sep 17 00:00:00 2001 From: Justin Traglia Date: Thu, 23 May 2024 15:30:49 -0500 Subject: [PATCH 49/89] Update compute_el_header_block_hash for EIP-7685 --- .../test/helpers/execution_payload.py | 35 +++++++++---------- .../pyspec/eth2spec/test/helpers/genesis.py | 9 ++--- 2 files changed, 19 insertions(+), 25 deletions(-) diff --git a/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py b/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py index ef6e2f6442..33d7acce59 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py +++ b/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py @@ -58,8 +58,7 @@ def compute_el_header_block_hash(spec, payload_header, transactions_trie_root, withdrawals_trie_root=None, - deposit_receipts_trie_root=None, - withdrawal_requests_root=None): + requests_trie_root=None): """ Computes the RLP execution block hash described by an `ExecutionPayloadHeader`. """ @@ -101,15 +100,15 @@ def compute_el_header_block_hash(spec, # withdrawals_root execution_payload_header_rlp.append((Binary(32, 32), withdrawals_trie_root)) if is_post_deneb(spec): - # excess_blob_gas + # blob_gas_used execution_payload_header_rlp.append((big_endian_int, payload_header.blob_gas_used)) + # excess_blob_gas execution_payload_header_rlp.append((big_endian_int, payload_header.excess_blob_gas)) + # parent_beacon_root + execution_payload_header_rlp.append((Binary(32, 32), bytes.fromhex("0000000000000000000000000000000000000000000000000000000000000000"))) if is_post_electra(spec): - # deposit_receipts_root - assert deposit_receipts_trie_root is not None - execution_payload_header_rlp.append((Binary(32, 32), deposit_receipts_trie_root)) - # withdrawal requests root - execution_payload_header_rlp.append((Binary(32, 32), withdrawal_requests_root)) + # requests_root + execution_payload_header_rlp.append((Binary(32, 32), requests_trie_root)) sedes = List([schema for schema, _ in execution_payload_header_rlp]) values = [value for _, value in execution_payload_header_rlp] @@ -147,7 +146,7 @@ def get_withdrawal_request_rlp(withdrawal_request): sedes = List([schema for schema, _ in withdrawal_request_rlp]) values = [value for _, value in withdrawal_request_rlp] - return encode(values, sedes) + return "\x01" + encode(values, sedes) def get_deposit_receipt_rlp(spec, deposit_receipt): @@ -166,24 +165,23 @@ def get_deposit_receipt_rlp(spec, deposit_receipt): sedes = List([schema for schema, _ in deposit_receipt_rlp]) values = [value for _, value in deposit_receipt_rlp] - return encode(values, sedes) + return "\x00" + encode(values, sedes) def compute_el_block_hash(spec, payload): transactions_trie_root = compute_trie_root_from_indexed_data(payload.transactions) withdrawals_trie_root = None - deposit_receipts_trie_root = None - withdrawal_requests_root = None + requests_trie_root = None if is_post_capella(spec): withdrawals_encoded = [get_withdrawal_rlp(withdrawal) for withdrawal in payload.withdrawals] withdrawals_trie_root = compute_trie_root_from_indexed_data(withdrawals_encoded) if is_post_electra(spec): - deposit_receipts_encoded = [get_deposit_receipt_rlp(spec, receipt) for receipt in payload.deposit_receipts] - deposit_receipts_trie_root = compute_trie_root_from_indexed_data(deposit_receipts_encoded) - withdrawal_requests_encoded = [get_withdrawal_request_rlp(request) for request in payload.withdrawal_requests] - withdrawal_requests_root = compute_trie_root_from_indexed_data(withdrawal_requests_encoded) + requests_encoded = [] + requests_encoded += [get_deposit_receipt_rlp(spec, receipt) for receipt in payload.deposit_receipts] + requests_encoded += [get_withdrawal_request_rlp(request) for request in payload.withdrawal_requests] + requests_trie_root = compute_trie_root_from_indexed_data(requests_encoded) payload_header = get_execution_payload_header(spec, payload) @@ -192,8 +190,7 @@ def compute_el_block_hash(spec, payload): payload_header, transactions_trie_root, withdrawals_trie_root, - deposit_receipts_trie_root, - withdrawal_requests_root, + requests_trie_root, ) @@ -229,8 +226,8 @@ def build_empty_execution_payload(spec, state, randao_mix=None): payload.blob_gas_used = 0 payload.excess_blob_gas = 0 if is_post_electra(spec): - # just to be clear payload.deposit_receipts = [] + payload.withdrawal_requests = [] payload.block_hash = compute_el_block_hash(spec, payload) diff --git a/tests/core/pyspec/eth2spec/test/helpers/genesis.py b/tests/core/pyspec/eth2spec/test/helpers/genesis.py index 3896b41731..4919509eb2 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/genesis.py +++ b/tests/core/pyspec/eth2spec/test/helpers/genesis.py @@ -50,22 +50,19 @@ def get_sample_genesis_execution_payload_header(spec, transactions_trie_root = bytes.fromhex("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421") withdrawals_trie_root = None - deposit_receipts_trie_root = None - exits_trie_root = None + requests_trie_root = None if is_post_capella(spec): withdrawals_trie_root = bytes.fromhex("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421") if is_post_electra(spec): - deposit_receipts_trie_root = bytes.fromhex("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421") - exits_trie_root = bytes.fromhex("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421") + requests_trie_root = bytes.fromhex("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421") payload_header.block_hash = compute_el_header_block_hash( spec, payload_header, transactions_trie_root, withdrawals_trie_root, - deposit_receipts_trie_root, - exits_trie_root, + requests_trie_root, ) return payload_header From d46e4aa36b64c47e4c510e52a43ff25363fbef9f Mon Sep 17 00:00:00 2001 From: Justin Traglia Date: Thu, 23 May 2024 15:55:16 -0500 Subject: [PATCH 50/89] Split long line into two --- tests/core/pyspec/eth2spec/test/helpers/execution_payload.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py b/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py index 33d7acce59..3b5ec5c0da 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py +++ b/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py @@ -105,7 +105,8 @@ def compute_el_header_block_hash(spec, # excess_blob_gas execution_payload_header_rlp.append((big_endian_int, payload_header.excess_blob_gas)) # parent_beacon_root - execution_payload_header_rlp.append((Binary(32, 32), bytes.fromhex("0000000000000000000000000000000000000000000000000000000000000000"))) + empty_root = bytes.fromhex("0000000000000000000000000000000000000000000000000000000000000000") + execution_payload_header_rlp.append((Binary(32, 32), empty_root)) if is_post_electra(spec): # requests_root execution_payload_header_rlp.append((Binary(32, 32), requests_trie_root)) From 12cab5a864f296566066c1e07f18c17f70bd605e Mon Sep 17 00:00:00 2001 From: Justin Traglia Date: Thu, 23 May 2024 16:17:29 -0500 Subject: [PATCH 51/89] Make request type a bytes type --- tests/core/pyspec/eth2spec/test/helpers/execution_payload.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py b/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py index 3b5ec5c0da..d9177dd9af 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py +++ b/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py @@ -147,7 +147,7 @@ def get_withdrawal_request_rlp(withdrawal_request): sedes = List([schema for schema, _ in withdrawal_request_rlp]) values = [value for _, value in withdrawal_request_rlp] - return "\x01" + encode(values, sedes) + return b"\x01" + encode(values, sedes) def get_deposit_receipt_rlp(spec, deposit_receipt): @@ -166,7 +166,7 @@ def get_deposit_receipt_rlp(spec, deposit_receipt): sedes = List([schema for schema, _ in deposit_receipt_rlp]) values = [value for _, value in deposit_receipt_rlp] - return "\x00" + encode(values, sedes) + return b"\x00" + encode(values, sedes) def compute_el_block_hash(spec, payload): From 5c13f5c6517c1ba2d919f418453899b6be6f5729 Mon Sep 17 00:00:00 2001 From: Guillaume Ballet <3272758+gballet@users.noreply.github.com> Date: Mon, 27 May 2024 11:53:38 +0200 Subject: [PATCH 52/89] fix depth+extension serialization type Co-authored-by: g11tech --- specs/_features/verge/beacon-chain.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/_features/verge/beacon-chain.md b/specs/_features/verge/beacon-chain.md index 31bd21e79c..e01b636622 100644 --- a/specs/_features/verge/beacon-chain.md +++ b/specs/_features/verge/beacon-chain.md @@ -148,7 +148,7 @@ class IpaProof(Container): ```python class VerkleProof(Container): other_stems: List[Bytes31, MAX_STEMS] - depth_extension_present: List[uint8, MAX_STEMS] + depth_extension_present: ByteList[MAX_STEMS] commitments_by_path: List[BanderwagonGroupElement, MAX_STEMS * MAX_COMMITMENTS_PER_STEM] d: BanderwagonGroupElement ipa_proof: IpaProof From 7fbbb252d802f3a858ba1276c3ef6271b42dee2e Mon Sep 17 00:00:00 2001 From: Guillaume Ballet <3272758+gballet@users.noreply.github.com> Date: Mon, 27 May 2024 12:46:30 +0200 Subject: [PATCH 53/89] rebase on top of deneb --- .../{verge => eip6800}/beacon-chain.md | 22 +++++++---- specs/_features/{verge => eip6800}/fork.md | 38 ++++++++++--------- 2 files changed, 34 insertions(+), 26 deletions(-) rename specs/_features/{verge => eip6800}/beacon-chain.md (91%) rename specs/_features/{verge => eip6800}/fork.md (81%) diff --git a/specs/_features/verge/beacon-chain.md b/specs/_features/eip6800/beacon-chain.md similarity index 91% rename from specs/_features/verge/beacon-chain.md rename to specs/_features/eip6800/beacon-chain.md index e01b636622..8b33d71732 100644 --- a/specs/_features/verge/beacon-chain.md +++ b/specs/_features/eip6800/beacon-chain.md @@ -1,4 +1,4 @@ -# The Verge -- The Beacon Chain +# eip6800 -- The Beacon Chain ## Table of contents @@ -33,7 +33,7 @@ ## Introduction -This upgrade adds transaction execution to the beacon chain as part of the Verge upgrade. +This upgrade adds transaction execution to the beacon chain as part of the eip6800 upgrade. ## Custom types @@ -76,10 +76,11 @@ class ExecutionPayload(Container): timestamp: uint64 extra_data: ByteList[MAX_EXTRA_DATA_BYTES] base_fee_per_gas: uint256 + # Extra payload fields block_hash: Hash32 # Hash of execution block - # Extra payload field transactions: List[Transaction, MAX_TRANSACTIONS_PER_PAYLOAD] - execution_witness: ExecutionWitness # [New in Verge] + withdrawals: List[Withdrawal, MAX_WITHDRAWALS_PER_PAYLOAD] + execution_witness: ExecutionWitness # [New in eip6800] ``` #### `ExecutionPayloadHeader` @@ -99,10 +100,12 @@ class ExecutionPayloadHeader(Container): timestamp: uint64 extra_data: ByteList[MAX_EXTRA_DATA_BYTES] base_fee_per_gas: uint256 + # Extra payload fields block_hash: Hash32 # Hash of execution block transactions_root: Root - # Extra payload fields - execution_witness_root: Root # [New in Verge] + withdrawals_root: Root + excess_data_gas: uint256 + execution_witness_root: Root # [New in eip6800] ``` ### New containers @@ -181,6 +184,7 @@ def process_execution_payload(state: BeaconState, payload: ExecutionPayload, exe assert payload.timestamp == compute_timestamp_at_slot(state, state.slot) # Verify the execution payload is valid assert execution_engine.notify_new_payload(payload) + # Cache execution payload header state.latest_execution_payload_header = ExecutionPayloadHeader( parent_hash=payload.parent_hash, @@ -197,10 +201,12 @@ def process_execution_payload(state: BeaconState, payload: ExecutionPayload, exe base_fee_per_gas=payload.base_fee_per_gas, block_hash=payload.block_hash, transactions_root=hash_tree_root(payload.transactions), - execution_witness=payload.execution_witness, + withdrawals_root=hash_tree_root(payload.withdrawals), + excess_data_gas=payload.excess_data_gas, + execution_witness=payload.execution_witness, # [New in eip6800] ) ``` ## Testing -TBD \ No newline at end of file +TBD diff --git a/specs/_features/verge/fork.md b/specs/_features/eip6800/fork.md similarity index 81% rename from specs/_features/verge/fork.md rename to specs/_features/eip6800/fork.md index 17fbac370e..1edbe4d600 100644 --- a/specs/_features/verge/fork.md +++ b/specs/_features/eip6800/fork.md @@ -1,4 +1,4 @@ -# The Verge -- Fork Logic +# eip6800 -- Fork Logic ## Table of contents @@ -10,7 +10,7 @@ - [Helper functions](#helper-functions) - [Misc](#misc) - [Modified `compute_fork_version`](#modified-compute_fork_version) -- [Fork to the Verge](#fork-to-capella) +- [Fork to eip6800](#fork-to-eip6800) - [Fork trigger](#fork-trigger) - [Upgrading the state](#upgrading-the-state) @@ -18,7 +18,7 @@ ## Introduction -This document describes the process of the Verge upgrade. +This document describes the process of the eip6800 upgrade. ## Configuration @@ -26,8 +26,8 @@ Warning: this configuration is not definitive. | Name | Value | | - | - | -| `VERGE_FORK_VERSION` | `Version('0x05000000')` | -| `VERGE_FORK_EPOCH` | `Epoch(18446744073709551615)` **TBD** | +| `EIP6800_FORK_VERSION` | `Version('0x05000000')` | +| `EIP6800_FORK_EPOCH` | `Epoch(18446744073709551615)` **TBD** | ## Helper functions @@ -41,8 +41,10 @@ def compute_fork_version(epoch: Epoch) -> Version: """ Return the fork version at the given ``epoch``. """ - if epoch >= VERGE_FORK_EPOCH: - return VERGE_FORK_VERSION + if epoch >= EIP6800_FORK_EPOCH: + return EIP6800_FORK_VERSION + if epoch >= DENEB_FORK_EPOCH: + return DENEB_FORK_VERSION if epoch >= CAPELLA_FORK_EPOCH: return CAPELLA_FORK_VERSION if epoch >= BELLATRIX_FORK_EPOCH: @@ -52,25 +54,25 @@ def compute_fork_version(epoch: Epoch) -> Version: return GENESIS_FORK_VERSION ``` -## Fork to the Verge +## Fork to eip6800 ### Fork trigger -The fork is triggered at epoch `VERGE_FORK_EPOCH`. +The fork is triggered at epoch `EIP6800_FORK_EPOCH`. -Note that for the pure verge networks, we don't apply `upgrade_to_verge` since it starts with the Verge version logic. +Note that for the pure eip6800 networks, we don't apply `upgrade_to_eip6800` since it starts with the eip6800 version logic. ### Upgrading the state -If `state.slot % SLOTS_PER_EPOCH == 0` and `compute_epoch_at_slot(state.slot) == VERGE_FORK_EPOCH`, -an irregular state change is made to upgrade to the Verge. +If `state.slot % SLOTS_PER_EPOCH == 0` and `compute_epoch_at_slot(state.slot) == EIP6800_FORK_EPOCH`, +an irregular state change is made to upgrade to eip6800. -The upgrade occurs after the completion of the inner loop of `process_slots` that sets `state.slot` equal to `VERGE_FORK_EPOCH * SLOTS_PER_EPOCH`. +The upgrade occurs after the completion of the inner loop of `process_slots` that sets `state.slot` equal to `EIP6800_FORK_EPOCH * SLOTS_PER_EPOCH`. Care must be taken when transitioning through the fork boundary as implementations will need a modified [state transition function](../phase0/beacon-chain.md#beacon-chain-state-transition-function) that deviates from the Phase 0 document. In particular, the outer `state_transition` function defined in the Phase 0 document will not expose the precise fork slot to execute the upgrade in the presence of skipped slots at the fork boundary. Instead, the logic must be within `process_slots`. ```python -def upgrade_to_verge(pre: capella.BeaconState) -> BeaconState: +def upgrade_to_eip6800(pre: capella.BeaconState) -> BeaconState: epoch = capella.get_current_epoch(pre) latest_execution_payload_header = ExecutionPayloadHeader( parent_hash=pre.latest_execution_payload_header.parent_hash, @@ -85,10 +87,11 @@ def upgrade_to_verge(pre: capella.BeaconState) -> BeaconState: timestamp=pre.latest_execution_payload_header.timestamp, extra_data=pre.latest_execution_payload_header.extra_data, base_fee_per_gas=pre.latest_execution_payload_header.base_fee_per_gas, + excess_data_gas=uint256(0), block_hash=pre.latest_execution_payload_header.block_hash, transactions_root=pre.latest_execution_payload_header.transactions_root, withdrawals_root=pre.latest_execution_payload_header.withdrawals_root, - execution_witness=ExecutionWitness([], []) # New in the Verge + execution_witness=ExecutionWitness([], []) # New in eip6800 ) post = BeaconState( # Versioning @@ -97,7 +100,7 @@ def upgrade_to_verge(pre: capella.BeaconState) -> BeaconState: slot=pre.slot, fork=Fork( previous_version=pre.fork.current_version, - current_version=VERGE_FORK_VERSION, + current_version=EIP6800_FORK_VERSION, # [Modified in eip6800] epoch=epoch, ), # History @@ -135,8 +138,7 @@ def upgrade_to_verge(pre: capella.BeaconState) -> BeaconState: next_withdrawal_index=pre.next_withdrawal_index, next_withdrawal_validator_index=pre.next_withdrawal_validator_index, # Deep history valid from Capella onwards - # FIXME most likely wrong - historical_summaries=List[HistoricalSummary, HISTORICAL_ROOTS_LIMIT]([]), + historical_summaries=pre.historical_summaries, ) return post From 0fd49c1a323cf464848c065959efa3a9ce57f77b Mon Sep 17 00:00:00 2001 From: Guillaume Ballet <3272758+gballet@users.noreply.github.com> Date: Mon, 27 May 2024 14:10:58 +0200 Subject: [PATCH 54/89] fix spelling error --- specs/_features/eip6800/beacon-chain.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/_features/eip6800/beacon-chain.md b/specs/_features/eip6800/beacon-chain.md index 8b33d71732..dd821e66b6 100644 --- a/specs/_features/eip6800/beacon-chain.md +++ b/specs/_features/eip6800/beacon-chain.md @@ -121,7 +121,7 @@ class SuffixStateDiff(Container): new_value: Optional[Bytes32] ``` -*Note*: on the Kaustinen testnet, `new_value` is ommitted from the container. +*Note*: on the Kaustinen testnet, `new_value` is omitted from the container. #### `StemStateDiff` From 744ae2e687e2effbea177d0f83e8d825ae1232de Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Tue, 28 May 2024 16:24:11 +0800 Subject: [PATCH 55/89] Add EIP-6800 linter checks. Fix the lint errors. Remove custom type `StateDiff` and then use `List[StemStateDiff, MAX_STEMS]` directly in `ExecutionWitness`. --- .gitignore | 1 + Makefile | 2 +- presets/mainnet/eip6800.yaml | 12 ++++ presets/minimal/eip6800.yaml | 12 ++++ pysetup/constants.py | 2 +- pysetup/helpers.py | 2 +- pysetup/md_doc_paths.py | 2 + pysetup/spec_builders/__init__.py | 3 +- pysetup/spec_builders/eip6800.py | 21 +++++++ setup.py | 8 ++- specs/_features/eip6800/beacon-chain.md | 61 +++++++++++-------- specs/_features/eip6800/fork.md | 6 +- .../pyspec/eth2spec/utils/ssz/ssz_typing.py | 1 + 13 files changed, 99 insertions(+), 34 deletions(-) create mode 100644 presets/mainnet/eip6800.yaml create mode 100644 presets/minimal/eip6800.yaml create mode 100644 pysetup/spec_builders/eip6800.py diff --git a/.gitignore b/.gitignore index 3e4413e977..3586b356ca 100644 --- a/.gitignore +++ b/.gitignore @@ -24,6 +24,7 @@ tests/core/pyspec/eth2spec/deneb/ tests/core/pyspec/eth2spec/electra/ tests/core/pyspec/eth2spec/whisk/ tests/core/pyspec/eth2spec/eip7594/ +tests/core/pyspec/eth2spec/eip6800/ # coverage reports .htmlcov diff --git a/Makefile b/Makefile index 8fbedf2518..bdf4bdde7a 100644 --- a/Makefile +++ b/Makefile @@ -35,7 +35,7 @@ MARKDOWN_FILES = $(wildcard $(SPEC_DIR)/*/*.md) \ $(wildcard $(SPEC_DIR)/_features/*/*/*.md) \ $(wildcard $(SSZ_DIR)/*.md) -ALL_EXECUTABLE_SPEC_NAMES = phase0 altair bellatrix capella deneb electra whisk +ALL_EXECUTABLE_SPEC_NAMES = phase0 altair bellatrix capella deneb electra whisk eip6800 # The parameters for commands. Use `foreach` to avoid listing specs again. COVERAGE_SCOPE := $(foreach S,$(ALL_EXECUTABLE_SPEC_NAMES), --cov=eth2spec.$S.$(TEST_PRESET_TYPE)) PYLINT_SCOPE := $(foreach S,$(ALL_EXECUTABLE_SPEC_NAMES), ./eth2spec/$S) diff --git a/presets/mainnet/eip6800.yaml b/presets/mainnet/eip6800.yaml new file mode 100644 index 0000000000..d74ee62122 --- /dev/null +++ b/presets/mainnet/eip6800.yaml @@ -0,0 +1,12 @@ +# Mainnet preset - EIP6800 + +# Misc +# --------------------------------------------------------------- +# `uint64(2**16)` (= 65,536) +MAX_STEMS: 65536 +# `uint64(33)` +MAX_COMMITMENTS_PER_STEM: 33 +# `uint64(2**8)` (= 256) +VERKLE_WIDTH: 256 +# `uint64(2**3)` (= 8) +IPA_PROOF_DEPTH: 8 diff --git a/presets/minimal/eip6800.yaml b/presets/minimal/eip6800.yaml new file mode 100644 index 0000000000..499721e4a3 --- /dev/null +++ b/presets/minimal/eip6800.yaml @@ -0,0 +1,12 @@ +# Minimal preset - EIP6800 + +# Execution +# --------------------------------------------------------------- +# `uint64(2**16)` (= 65,536) +MAX_STEMS: 65536 +# `uint64(33)` +MAX_COMMITMENTS_PER_STEM: 33 +# `uint64(2**8)` (= 256) +VERKLE_WIDTH: 256 +# `uint64(2**3)` (= 8) +IPA_PROOF_DEPTH: 8 diff --git a/pysetup/constants.py b/pysetup/constants.py index 0078b24dce..e26efb8e06 100644 --- a/pysetup/constants.py +++ b/pysetup/constants.py @@ -6,10 +6,10 @@ DENEB = 'deneb' ELECTRA = 'electra' EIP7594 = 'eip7594' +EIP6800 = 'eip6800' WHISK = 'whisk' - # The helper functions that are used when defining constants CONSTANT_DEP_SUNDRY_CONSTANTS_FUNCTIONS = ''' def ceillog2(x: int) -> uint64: diff --git a/pysetup/helpers.py b/pysetup/helpers.py index 49c0fcafcb..589ae6ab58 100644 --- a/pysetup/helpers.py +++ b/pysetup/helpers.py @@ -178,7 +178,7 @@ def combine_dicts(old_dict: Dict[str, T], new_dict: Dict[str, T]) -> Dict[str, T ignored_dependencies = [ 'bit', 'boolean', 'Vector', 'List', 'Container', 'BLSPubkey', 'BLSSignature', - 'Bytes1', 'Bytes4', 'Bytes8', 'Bytes20', 'Bytes32', 'Bytes48', 'Bytes96', 'Bitlist', 'Bitvector', + 'Bytes1', 'Bytes4', 'Bytes8', 'Bytes20', 'Bytes31', 'Bytes32', 'Bytes48', 'Bytes96', 'Bitlist', 'Bitvector', 'uint8', 'uint16', 'uint32', 'uint64', 'uint128', 'uint256', 'bytes', 'byte', 'ByteList', 'ByteVector', 'Dict', 'dict', 'field', 'ceillog2', 'floorlog2', 'Set', diff --git a/pysetup/md_doc_paths.py b/pysetup/md_doc_paths.py index a4a730e5a3..28ebc71379 100644 --- a/pysetup/md_doc_paths.py +++ b/pysetup/md_doc_paths.py @@ -9,6 +9,7 @@ ELECTRA, WHISK, EIP7594, + EIP6800, ) @@ -21,6 +22,7 @@ ELECTRA: DENEB, WHISK: CAPELLA, EIP7594: DENEB, + EIP6800: DENEB, } ALL_FORKS = list(PREVIOUS_FORK_OF.keys()) diff --git a/pysetup/spec_builders/__init__.py b/pysetup/spec_builders/__init__.py index ea74b50b72..922cee18b2 100644 --- a/pysetup/spec_builders/__init__.py +++ b/pysetup/spec_builders/__init__.py @@ -6,12 +6,13 @@ from .electra import ElectraSpecBuilder from .whisk import WhiskSpecBuilder from .eip7594 import EIP7594SpecBuilder +from .eip6800 import EIP6800SpecBuilder spec_builders = { builder.fork: builder for builder in ( Phase0SpecBuilder, AltairSpecBuilder, BellatrixSpecBuilder, CapellaSpecBuilder, DenebSpecBuilder, - ElectraSpecBuilder, WhiskSpecBuilder, EIP7594SpecBuilder, + ElectraSpecBuilder, WhiskSpecBuilder, EIP7594SpecBuilder, EIP6800SpecBuilder, ) } diff --git a/pysetup/spec_builders/eip6800.py b/pysetup/spec_builders/eip6800.py new file mode 100644 index 0000000000..4ea76d6a21 --- /dev/null +++ b/pysetup/spec_builders/eip6800.py @@ -0,0 +1,21 @@ +from typing import Dict + +from .base import BaseSpecBuilder +from ..constants import EIP6800 + + +class EIP6800SpecBuilder(BaseSpecBuilder): + fork: str = EIP6800 + + @classmethod + def imports(cls, preset_name: str): + return f''' +from eth2spec.deneb import {preset_name} as deneb +from eth2spec.utils.ssz.ssz_typing import Bytes31 +''' + + @classmethod + def hardcoded_custom_type_dep_constants(cls, spec_object) -> str: + return { + 'MAX_STEMS': spec_object.preset_vars['MAX_STEMS'].value, + } diff --git a/setup.py b/setup.py index fe2250f7c0..e5c348ada5 100644 --- a/setup.py +++ b/setup.py @@ -219,7 +219,13 @@ def get_spec(file_name: Path, preset: Dict[str, str], config: Dict[str, str], pr elif source.startswith("class"): class_name, parent_class = _get_class_info_from_source(source) # check consistency with spec - assert class_name == current_name + try: + assert class_name == current_name + except Exception: + print('class_name', class_name) + print('current_name', current_name) + raise + if parent_class: assert parent_class == "Container" # NOTE: trim whitespace from spec diff --git a/specs/_features/eip6800/beacon-chain.md b/specs/_features/eip6800/beacon-chain.md index dd821e66b6..8c93729b84 100644 --- a/specs/_features/eip6800/beacon-chain.md +++ b/specs/_features/eip6800/beacon-chain.md @@ -1,4 +1,4 @@ -# eip6800 -- The Beacon Chain +# EIP6800 -- The Beacon Chain ## Table of contents @@ -21,8 +21,6 @@ - [`VerkleProof`](#verkleproof) - [`ExecutionWitness`](#executionwitness) - [Beacon chain state transition function](#beacon-chain-state-transition-function) - - [Execution engine](#execution-engine) - - [`notify_new_payload`](#notify_new_payload) - [Block processing](#block-processing) - [Execution payload](#execution-payload) - [`process_execution_payload`](#process_execution_payload) @@ -39,7 +37,6 @@ This upgrade adds transaction execution to the beacon chain as part of the eip68 | Name | SSZ equivalent | Description | | - | - | - | -| `StateDiff` | `List[StemStateDiff, MAX_STEMS]` | Only valid if list is sorted by stems | | `BanderwagonGroupElement` | `Bytes32` | | | `BanderwagonFieldElement` | `Bytes32` | | | `Stem` | `Bytes31` | | @@ -50,10 +47,10 @@ This upgrade adds transaction execution to the beacon chain as part of the eip68 | Name | Value | | - | - | -| `MAX_STEMS` | `2**16` | -| `MAX_COMMITMENTS_PER_STEM` | `33` | -| `VERKLE_WIDTH` | `256` | -| `IPA_PROOF_DEPTH` | `8` | +| `MAX_STEMS` | `uint64(2**16)` (= 65,536) | +| `MAX_COMMITMENTS_PER_STEM` | `uint64(33)` | +| `VERKLE_WIDTH` | `uint64(2**8)` (= 256) | +| `IPA_PROOF_DEPTH` | `uint64(2**3)` (= 8) | ## Containers @@ -80,7 +77,9 @@ class ExecutionPayload(Container): block_hash: Hash32 # Hash of execution block transactions: List[Transaction, MAX_TRANSACTIONS_PER_PAYLOAD] withdrawals: List[Withdrawal, MAX_WITHDRAWALS_PER_PAYLOAD] - execution_witness: ExecutionWitness # [New in eip6800] + blob_gas_used: uint64 + excess_blob_gas: uint64 + execution_witness: ExecutionWitness # [New in EIP6800] ``` #### `ExecutionPayloadHeader` @@ -104,8 +103,9 @@ class ExecutionPayloadHeader(Container): block_hash: Hash32 # Hash of execution block transactions_root: Root withdrawals_root: Root - excess_data_gas: uint256 - execution_witness_root: Root # [New in eip6800] + blob_gas_used: uint64 + excess_data_gas: uint64 + execution_witness_root: Root # [New in EIP6800] ``` ### New containers @@ -114,7 +114,7 @@ class ExecutionPayloadHeader(Container): ```python class SuffixStateDiff(Container): - suffix: Byte + suffix: Bytes1 # Null means not currently present current_value: Optional[Bytes32] # Null means value not updated @@ -132,15 +132,10 @@ class StemStateDiff(Container): suffix_diffs: List[SuffixStateDiff, VERKLE_WIDTH] ``` -```python -# Valid only if list is sorted by stems -StateDiff = List[StemStateDiff, MAX_STEMS] -``` - #### `IPAProof` ```python -class IpaProof(Container): +class IPAProof(Container): cl: Vector[BanderwagonGroupElement, IPA_PROOF_DEPTH] cr: Vector[BanderwagonGroupElement, IPA_PROOF_DEPTH] final_evaluation = BanderwagonFieldElement @@ -154,14 +149,14 @@ class VerkleProof(Container): depth_extension_present: ByteList[MAX_STEMS] commitments_by_path: List[BanderwagonGroupElement, MAX_STEMS * MAX_COMMITMENTS_PER_STEM] d: BanderwagonGroupElement - ipa_proof: IpaProof + ipa_proof: IPAProof ``` #### `ExecutionWitness` ```python -class ExecutionWitness(container): - state_diff: StateDiff +class ExecutionWitness(Container): + state_diff: List[StemStateDiff, MAX_STEMS] verkle_proof: VerkleProof ``` @@ -174,16 +169,30 @@ class ExecutionWitness(container): ##### `process_execution_payload` ```python -def process_execution_payload(state: BeaconState, payload: ExecutionPayload, execution_engine: ExecutionEngine) -> None: +def process_execution_payload(state: BeaconState, body: BeaconBlockBody, execution_engine: ExecutionEngine) -> None: + payload = body.execution_payload + # Verify consistency of the parent hash with respect to the previous execution payload header - if is_merge_transition_complete(state): - assert payload.parent_hash == state.latest_execution_payload_header.block_hash + assert payload.parent_hash == state.latest_execution_payload_header.block_hash # Verify prev_randao assert payload.prev_randao == get_randao_mix(state, get_current_epoch(state)) # Verify timestamp assert payload.timestamp == compute_timestamp_at_slot(state, state.slot) + + # Verify commitments are under limit + assert len(body.blob_kzg_commitments) <= MAX_BLOBS_PER_BLOCK + # Verify the execution payload is valid - assert execution_engine.notify_new_payload(payload) + # Pass `versioned_hashes` to Execution Engine + # Pass `parent_beacon_block_root` to Execution Engine + versioned_hashes = [kzg_commitment_to_versioned_hash(commitment) for commitment in body.blob_kzg_commitments] + assert execution_engine.verify_and_notify_new_payload( + NewPayloadRequest( + execution_payload=payload, + versioned_hashes=versioned_hashes, + parent_beacon_block_root=state.latest_block_header.parent_root, + ) + ) # Cache execution payload header state.latest_execution_payload_header = ExecutionPayloadHeader( @@ -203,7 +212,7 @@ def process_execution_payload(state: BeaconState, payload: ExecutionPayload, exe transactions_root=hash_tree_root(payload.transactions), withdrawals_root=hash_tree_root(payload.withdrawals), excess_data_gas=payload.excess_data_gas, - execution_witness=payload.execution_witness, # [New in eip6800] + execution_witness=payload.execution_witness, # [New in EIP6800] ) ``` diff --git a/specs/_features/eip6800/fork.md b/specs/_features/eip6800/fork.md index 1edbe4d600..9c6df2f16e 100644 --- a/specs/_features/eip6800/fork.md +++ b/specs/_features/eip6800/fork.md @@ -1,4 +1,4 @@ -# eip6800 -- Fork Logic +# EIP-6800 -- Fork Logic ## Table of contents @@ -72,7 +72,7 @@ Care must be taken when transitioning through the fork boundary as implementatio In particular, the outer `state_transition` function defined in the Phase 0 document will not expose the precise fork slot to execute the upgrade in the presence of skipped slots at the fork boundary. Instead, the logic must be within `process_slots`. ```python -def upgrade_to_eip6800(pre: capella.BeaconState) -> BeaconState: +def upgrade_to_eip6800(pre: deneb.BeaconState) -> BeaconState: epoch = capella.get_current_epoch(pre) latest_execution_payload_header = ExecutionPayloadHeader( parent_hash=pre.latest_execution_payload_header.parent_hash, @@ -91,7 +91,7 @@ def upgrade_to_eip6800(pre: capella.BeaconState) -> BeaconState: block_hash=pre.latest_execution_payload_header.block_hash, transactions_root=pre.latest_execution_payload_header.transactions_root, withdrawals_root=pre.latest_execution_payload_header.withdrawals_root, - execution_witness=ExecutionWitness([], []) # New in eip6800 + execution_witness=ExecutionWitness([], []) # New in eip6800 ) post = BeaconState( # Versioning diff --git a/tests/core/pyspec/eth2spec/utils/ssz/ssz_typing.py b/tests/core/pyspec/eth2spec/utils/ssz/ssz_typing.py index 5a1b61d0be..1f3db2fe00 100644 --- a/tests/core/pyspec/eth2spec/utils/ssz/ssz_typing.py +++ b/tests/core/pyspec/eth2spec/utils/ssz/ssz_typing.py @@ -10,3 +10,4 @@ Bytes20 = ByteVector[20] # type: ignore +Bytes31 = ByteVector[31] # type: ignore From 8737e69c212912ff5aa53bc15f3eb78fafaca6d2 Mon Sep 17 00:00:00 2001 From: Guillaume Ballet <3272758+gballet@users.noreply.github.com> Date: Tue, 28 May 2024 10:39:35 +0200 Subject: [PATCH 56/89] use execution_witness_root in header --- specs/_features/eip6800/beacon-chain.md | 2 +- specs/_features/eip6800/fork.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/specs/_features/eip6800/beacon-chain.md b/specs/_features/eip6800/beacon-chain.md index 8c93729b84..ab935cb870 100644 --- a/specs/_features/eip6800/beacon-chain.md +++ b/specs/_features/eip6800/beacon-chain.md @@ -212,7 +212,7 @@ def process_execution_payload(state: BeaconState, body: BeaconBlockBody, executi transactions_root=hash_tree_root(payload.transactions), withdrawals_root=hash_tree_root(payload.withdrawals), excess_data_gas=payload.excess_data_gas, - execution_witness=payload.execution_witness, # [New in EIP6800] + execution_witness_root=hash_tree_root(payload.execution_witness), # [New in EIP6800] ) ``` diff --git a/specs/_features/eip6800/fork.md b/specs/_features/eip6800/fork.md index 9c6df2f16e..74f143f597 100644 --- a/specs/_features/eip6800/fork.md +++ b/specs/_features/eip6800/fork.md @@ -91,7 +91,7 @@ def upgrade_to_eip6800(pre: deneb.BeaconState) -> BeaconState: block_hash=pre.latest_execution_payload_header.block_hash, transactions_root=pre.latest_execution_payload_header.transactions_root, withdrawals_root=pre.latest_execution_payload_header.withdrawals_root, - execution_witness=ExecutionWitness([], []) # New in eip6800 + execution_witness_root=hash_tree_root(ExecutionWitness([], [])) # New in eip6800 ) post = BeaconState( # Versioning From 85adbfbc4db5a1575fce4a033cc864e1ef244df4 Mon Sep 17 00:00:00 2001 From: Mikhail Kalinin Date: Fri, 31 May 2024 15:57:51 +0600 Subject: [PATCH 57/89] Switch spec to MAX_EFFECTIVE_BALANCE_ELECTRA --- specs/electra/beacon-chain.md | 37 +++++++++++++++++++++++++++++++++-- 1 file changed, 35 insertions(+), 2 deletions(-) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 44949a6408..53ef54b4ef 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -59,6 +59,7 @@ - [New `get_active_balance`](#new-get_active_balance) - [New `get_pending_balance_to_withdraw`](#new-get_pending_balance_to_withdraw) - [Modified `get_attesting_indices`](#modified-get_attesting_indices) + - [Modified `get_next_sync_committee_indices`](#modified-get_next_sync_committee_indices) - [Beacon state mutators](#beacon-state-mutators) - [Updated `initiate_validator_exit`](#updated--initiate_validator_exit) - [New `switch_to_compounding_validator`](#new-switch_to_compounding_validator) @@ -441,6 +442,8 @@ class BeaconState(Container): #### Updated `compute_proposer_index` +*Note*: The function is modified to use `MAX_EFFECTIVE_BALANCE_ELECTRA` preset. + ```python def compute_proposer_index(state: BeaconState, indices: Sequence[ValidatorIndex], seed: Bytes32) -> ValidatorIndex: """ @@ -624,6 +627,36 @@ def get_attesting_indices(state: BeaconState, attestation: Attestation) -> Set[V return output ``` +#### Modified `get_next_sync_committee_indices` + +*Note*: The function is modified to use `MAX_EFFECTIVE_BALANCE_ELECTRA` preset. + +```python +def get_next_sync_committee_indices(state: BeaconState) -> Sequence[ValidatorIndex]: + """ + Return the sync committee indices, with possible duplicates, for the next sync committee. + """ + epoch = Epoch(get_current_epoch(state) + 1) + + MAX_RANDOM_BYTE = 2**8 - 1 + active_validator_indices = get_active_validator_indices(state, epoch) + active_validator_count = uint64(len(active_validator_indices)) + seed = get_seed(state, epoch, DOMAIN_SYNC_COMMITTEE) + i = 0 + sync_committee_indices: List[ValidatorIndex] = [] + while len(sync_committee_indices) < SYNC_COMMITTEE_SIZE: + shuffled_index = compute_shuffled_index(uint64(i % active_validator_count), active_validator_count, seed) + candidate_index = active_validator_indices[shuffled_index] + random_byte = hash(seed + uint_to_bytes(uint64(i // 32)))[i % 32] + effective_balance = state.validators[candidate_index].effective_balance + # [Modified in Electra:EIP7251] + if effective_balance * MAX_RANDOM_BYTE >= MAX_EFFECTIVE_BALANCE_ELECTRA * random_byte: + sync_committee_indices.append(candidate_index) + i += 1 + return sync_committee_indices +``` + + ### Beacon state mutators #### Updated `initiate_validator_exit` @@ -1409,8 +1442,8 @@ def initialize_beacon_state_from_eth1(eth1_block_hash: Hash32, # Process activations for index, validator in enumerate(state.validators): balance = state.balances[index] - validator.effective_balance = min(balance - balance % EFFECTIVE_BALANCE_INCREMENT, MAX_EFFECTIVE_BALANCE) - if validator.effective_balance == MAX_EFFECTIVE_BALANCE: + validator.effective_balance = min(balance - balance % EFFECTIVE_BALANCE_INCREMENT, MAX_EFFECTIVE_BALANCE_ELECTRA) + if validator.effective_balance >= MIN_ACTIVATION_BALANCE: validator.activation_eligibility_epoch = GENESIS_EPOCH validator.activation_epoch = GENESIS_EPOCH From ffebf88de533a8ee0b5274cb7b7825a642d8f550 Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Fri, 31 May 2024 19:50:59 +0800 Subject: [PATCH 58/89] Fix testgen and test format --- ...s_execution_layer_consolidation_request.py | 28 +++++++++---------- tests/formats/operations/README.md | 3 +- tests/generators/operations/main.py | 2 +- 3 files changed, 17 insertions(+), 16 deletions(-) diff --git a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation_request.py b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation_request.py index 95386f4282..5e3f7877cf 100644 --- a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation_request.py +++ b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation_request.py @@ -405,7 +405,7 @@ def test_consolidation_balance_through_two_churn_epochs(spec, state): ) @spec_test @single_phase -def test_invalid_source_equals_target(spec, state): +def test_incorrect_source_equals_target(spec, state): current_epoch = spec.get_current_epoch(state) source_index = spec.get_active_validator_indices(state, current_epoch)[0] @@ -434,7 +434,7 @@ def test_invalid_source_equals_target(spec, state): ) @spec_test @single_phase -def test_invalid_exceed_pending_consolidations_limit(spec, state): +def test_incorrect_exceed_pending_consolidations_limit(spec, state): state.pending_consolidations = [ spec.PendingConsolidation(source_index=0, target_index=1) ] * spec.PENDING_CONSOLIDATIONS_LIMIT @@ -462,7 +462,7 @@ def test_invalid_exceed_pending_consolidations_limit(spec, state): @with_electra_and_later @spec_state_test @single_phase -def test_invalid_not_enough_consolidation_churn_available(spec, state): +def test_incorrect_not_enough_consolidation_churn_available(spec, state): state.validators = state.validators[0:2] state.pending_consolidations = [ spec.PendingConsolidation(source_index=0, target_index=1) @@ -495,7 +495,7 @@ def test_invalid_not_enough_consolidation_churn_available(spec, state): ) @spec_test @single_phase -def test_invalid_exited_source(spec, state): +def test_incorrect_exited_source(spec, state): # Set up an otherwise correct consolidation current_epoch = spec.get_current_epoch(state) source_index = spec.get_active_validator_indices(state, current_epoch)[0] @@ -527,7 +527,7 @@ def test_invalid_exited_source(spec, state): ) @spec_test @single_phase -def test_invalid_exited_target(spec, state): +def test_incorrect_exited_target(spec, state): # Set up an otherwise correct consolidation current_epoch = spec.get_current_epoch(state) source_index = spec.get_active_validator_indices(state, current_epoch)[0] @@ -557,7 +557,7 @@ def test_invalid_exited_target(spec, state): ) @spec_test @single_phase -def test_invalid_inactive_source(spec, state): +def test_incorrect_inactive_source(spec, state): # Set up an otherwise correct consolidation current_epoch = spec.get_current_epoch(state) source_index = spec.get_active_validator_indices(state, current_epoch)[0] @@ -589,7 +589,7 @@ def test_invalid_inactive_source(spec, state): ) @spec_test @single_phase -def test_invalid_inactive_target(spec, state): +def test_incorrect_inactive_target(spec, state): # Set up an otherwise correct consolidation current_epoch = spec.get_current_epoch(state) source_index = spec.get_active_validator_indices(state, current_epoch)[0] @@ -620,7 +620,7 @@ def test_invalid_inactive_target(spec, state): ) @spec_test @single_phase -def test_invalid_no_source_execution_withdrawal_credential(spec, state): +def test_incorrect_no_source_execution_withdrawal_credential(spec, state): # Set up a correct consolidation, but source does not have # an execution withdrawal credential current_epoch = spec.get_current_epoch(state) @@ -646,7 +646,7 @@ def test_invalid_no_source_execution_withdrawal_credential(spec, state): ) @spec_test @single_phase -def test_invalid_no_target_execution_withdrawal_credential(spec, state): +def test_incorrect_no_target_execution_withdrawal_credential(spec, state): # Set up a correct consolidation, but target does not have # an execution withdrawal credential current_epoch = spec.get_current_epoch(state) @@ -674,7 +674,7 @@ def test_invalid_no_target_execution_withdrawal_credential(spec, state): ) @spec_test @single_phase -def test_invalid_incorrect_source_address(spec, state): +def test_incorrect_incorrect_source_address(spec, state): # Set up an otherwise correct consolidation current_epoch = spec.get_current_epoch(state) source_index = spec.get_active_validator_indices(state, current_epoch)[0] @@ -704,7 +704,7 @@ def test_invalid_incorrect_source_address(spec, state): ) @spec_test @single_phase -def test_invalid_unknown_source_pubkey(spec, state): +def test_incorrect_unknown_source_pubkey(spec, state): # Set up an otherwise correct consolidation current_epoch = spec.get_current_epoch(state) source_index = spec.get_active_validator_indices(state, current_epoch)[0] @@ -734,7 +734,7 @@ def test_invalid_unknown_source_pubkey(spec, state): ) @spec_test @single_phase -def test_invalid_unknown_target_pubkey(spec, state): +def test_incorrect_unknown_target_pubkey(spec, state): # Set up an otherwise correct consolidation current_epoch = spec.get_current_epoch(state) source_index = spec.get_active_validator_indices(state, current_epoch)[0] @@ -760,7 +760,7 @@ def run_consolidation_processing(spec, state, consolidation, success=True): """ Run ``process_consolidation``, yielding: - pre-state ('pre') - - consolidation ('consolidation') + - execution_layer_consolidation_request ('execution_layer_consolidation_request') - post-state ('post'). If ``valid == False``, run expecting ``AssertionError`` """ @@ -778,7 +778,7 @@ def run_consolidation_processing(spec, state, consolidation, success=True): pre_state = state.copy() yield 'pre', state - yield 'consolidation', consolidation + yield 'execution_layer_consolidation_request', consolidation spec.process_execution_layer_consolidation_request(state, consolidation) diff --git a/tests/formats/operations/README.md b/tests/formats/operations/README.md index b020b5fd03..d69a704866 100644 --- a/tests/formats/operations/README.md +++ b/tests/formats/operations/README.md @@ -46,7 +46,8 @@ Operations: | `withdrawals` | `ExecutionPayload` | `execution_payload` | `process_withdrawals(state, execution_payload)` (new in Capella) | | `bls_to_execution_change` | `SignedBLSToExecutionChange` | `address_change` | `process_bls_to_execution_change(state, address_change)` (new in Capella) | | `deposit_receipt` | `DepositReceipt` | `deposit_receipt` | `process_deposit_receipt(state, deposit_receipt)` (new in Electra) | -| `exits` | `ExecutionLayerExit` | `execution_layer_exit` | `process_execution_layer_exit(state, execution_layer_exit)` (new in Electra) | +| `execution_layer_withdrawal_request` | `ExecutionLayerWithdrawalRequest` | `execution_layer_withdrawal_request` | `process_execution_layer_withdrawal_request(state, execution_layer_withdrawal_request)` (new in Electra) | +| `execution_layer_consolidation_request` | `ExecutionLayerConsolidationRequest` | `execution_layer_consolidation_request` | `process_execution_layer_consolidation_request(state, execution_layer_consolidation_request)` (new in Electra) | Note that `block_header` is not strictly an operation (and is a full `Block`), but processed in the same manner, and hence included here. diff --git a/tests/generators/operations/main.py b/tests/generators/operations/main.py index 85a5b64e3c..d4ca895570 100644 --- a/tests/generators/operations/main.py +++ b/tests/generators/operations/main.py @@ -45,7 +45,7 @@ _new_electra_mods = {key: 'eth2spec.test.electra.block_processing.test_process_' + key for key in [ 'attestation', - 'execution_layer_consolidation_requests', + 'execution_layer_consolidation_request', 'deposit_receipt', 'execution_layer_withdrawal_request', 'voluntary_exit' From 79d67ba8e28b8ac04c682b27aaf1d85b8502138d Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Fri, 31 May 2024 21:47:50 +0800 Subject: [PATCH 59/89] minor refactor --- .../test_process_pending_balance_deposits.py | 60 ++++++++++++------- 1 file changed, 37 insertions(+), 23 deletions(-) diff --git a/tests/core/pyspec/eth2spec/test/electra/epoch_processing/test_process_pending_balance_deposits.py b/tests/core/pyspec/eth2spec/test/electra/epoch_processing/test_process_pending_balance_deposits.py index e3f8526913..c5789e8090 100644 --- a/tests/core/pyspec/eth2spec/test/electra/epoch_processing/test_process_pending_balance_deposits.py +++ b/tests/core/pyspec/eth2spec/test/electra/epoch_processing/test_process_pending_balance_deposits.py @@ -5,6 +5,10 @@ ) +def run_process_pending_balance_deposits(spec, state): + yield from run_epoch_processing_with(spec, state, 'process_pending_balance_deposits') + + @with_electra_and_later @spec_state_test def test_pending_deposit_min_activation_balance(spec, state): @@ -14,9 +18,9 @@ def test_pending_deposit_min_activation_balance(spec, state): spec.PendingBalanceDeposit(index=index, amount=amount) ) pre_balance = state.balances[index] - yield from run_epoch_processing_with( - spec, state, "process_pending_balance_deposits" - ) + + yield from run_process_pending_balance_deposits(spec, state) + assert state.balances[index] == pre_balance + amount # No leftover deposit balance to consume when there are no deposits left to process assert state.deposit_balance_to_consume == 0 @@ -32,9 +36,9 @@ def test_pending_deposit_balance_equal_churn(spec, state): spec.PendingBalanceDeposit(index=index, amount=amount) ) pre_balance = state.balances[index] - yield from run_epoch_processing_with( - spec, state, "process_pending_balance_deposits" - ) + + yield from run_process_pending_balance_deposits(spec, state) + assert state.balances[index] == pre_balance + amount assert state.deposit_balance_to_consume == 0 assert state.pending_balance_deposits == [] @@ -49,9 +53,9 @@ def test_pending_deposit_balance_above_churn(spec, state): spec.PendingBalanceDeposit(index=index, amount=amount) ) pre_balance = state.balances[index] - yield from run_epoch_processing_with( - spec, state, "process_pending_balance_deposits" - ) + + yield from run_process_pending_balance_deposits(spec, state) + # deposit was above churn, balance hasn't changed assert state.balances[index] == pre_balance # deposit balance to consume is the full churn limit @@ -74,9 +78,9 @@ def test_pending_deposit_preexisting_churn(spec, state): spec.PendingBalanceDeposit(index=index, amount=amount) ) pre_balance = state.balances[index] - yield from run_epoch_processing_with( - spec, state, "process_pending_balance_deposits" - ) + + yield from run_process_pending_balance_deposits(spec, state) + # balance was deposited correctly assert state.balances[index] == pre_balance + amount # No leftover deposit balance to consume when there are no deposits left to process @@ -96,9 +100,9 @@ def test_multiple_pending_deposits_below_churn(spec, state): spec.PendingBalanceDeposit(index=1, amount=amount) ) pre_balances = state.balances.copy() - yield from run_epoch_processing_with( - spec, state, "process_pending_balance_deposits" - ) + + yield from run_process_pending_balance_deposits(spec, state) + for i in [0, 1]: assert state.balances[i] == pre_balances[i] + amount # No leftover deposit balance to consume when there are no deposits left to process @@ -116,9 +120,9 @@ def test_multiple_pending_deposits_above_churn(spec, state): spec.PendingBalanceDeposit(index=i, amount=amount) ) pre_balances = state.balances.copy() - yield from run_epoch_processing_with( - spec, state, "process_pending_balance_deposits" - ) + + yield from run_process_pending_balance_deposits(spec, state) + # First two deposits are processed, third is not because above churn for i in [0, 1]: assert state.balances[i] == pre_balances[i] + amount @@ -144,7 +148,9 @@ def test_skipped_deposit_exiting_validator(spec, state): pre_balance = state.balances[index] # Initiate the validator's exit spec.initiate_validator_exit(state, index) - yield from run_epoch_processing_with(spec, state, 'process_pending_balance_deposits') + + yield from run_process_pending_balance_deposits(spec, state) + # Deposit is skipped because validator is exiting assert state.balances[index] == pre_balance # All deposits either processed or postponed, no leftover deposit balance to consume @@ -165,7 +171,9 @@ def test_multiple_skipped_deposits_exiting_validators(spec, state): spec.initiate_validator_exit(state, i) pre_pending_balance_deposits = state.pending_balance_deposits.copy() pre_balances = state.balances.copy() - yield from run_epoch_processing_with(spec, state, 'process_pending_balance_deposits') + + yield from run_process_pending_balance_deposits(spec, state) + # All deposits are postponed, no balance changes assert state.balances == pre_balances # All deposits are postponed, no leftover deposit balance to consume @@ -183,7 +191,9 @@ def test_multiple_pending_one_skipped(spec, state): pre_balances = state.balances.copy() # Initiate the second validator's exit spec.initiate_validator_exit(state, 1) - yield from run_epoch_processing_with(spec, state, 'process_pending_balance_deposits') + + yield from run_process_pending_balance_deposits(spec, state) + # First and last deposit are processed, second is not because of exiting for i in [0, 2]: assert state.balances[i] == pre_balances[i] + amount @@ -206,7 +216,9 @@ def test_mixture_of_skipped_and_above_churn(spec, state): pre_balances = state.balances.copy() # Initiate the second validator's exit spec.initiate_validator_exit(state, 1) - yield from run_epoch_processing_with(spec, state, 'process_pending_balance_deposits') + + yield from run_process_pending_balance_deposits(spec, state) + # First deposit is processed assert state.balances[0] == pre_balances[0] + amount01 # Second deposit is postponed, third is above churn @@ -231,7 +243,9 @@ def test_processing_deposit_of_withdrawable_validator(spec, state): spec.initiate_validator_exit(state, index) # Set epoch to withdrawable epoch + 1 to allow processing of the deposit state.slot = spec.SLOTS_PER_EPOCH * (state.validators[index].withdrawable_epoch + 1) - yield from run_epoch_processing_with(spec, state, 'process_pending_balance_deposits') + + yield from run_process_pending_balance_deposits(spec, state) + # Deposit is correctly processed assert state.balances[index] == pre_balance + amount # No leftover deposit balance to consume when there are no deposits left to process From 9b15abc7983cfd87b8debb749cbd73b3754218fa Mon Sep 17 00:00:00 2001 From: Justin Traglia Date: Fri, 31 May 2024 08:53:18 -0500 Subject: [PATCH 60/89] Append "_bytes" to modified RLP functions --- .../pyspec/eth2spec/test/helpers/execution_payload.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py b/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py index d9177dd9af..9b963e6034 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py +++ b/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py @@ -137,7 +137,7 @@ def get_withdrawal_rlp(withdrawal): # https://eips.ethereum.org/EIPS/eip-7002 -def get_withdrawal_request_rlp(withdrawal_request): +def get_withdrawal_request_rlp_bytes(withdrawal_request): withdrawal_request_rlp = [ # source_address (Binary(20, 20), withdrawal_request.source_address), @@ -150,7 +150,7 @@ def get_withdrawal_request_rlp(withdrawal_request): return b"\x01" + encode(values, sedes) -def get_deposit_receipt_rlp(spec, deposit_receipt): +def get_deposit_receipt_rlp_bytes(deposit_receipt): deposit_receipt_rlp = [ # pubkey (Binary(48, 48), deposit_receipt.pubkey), @@ -180,8 +180,8 @@ def compute_el_block_hash(spec, payload): withdrawals_trie_root = compute_trie_root_from_indexed_data(withdrawals_encoded) if is_post_electra(spec): requests_encoded = [] - requests_encoded += [get_deposit_receipt_rlp(spec, receipt) for receipt in payload.deposit_receipts] - requests_encoded += [get_withdrawal_request_rlp(request) for request in payload.withdrawal_requests] + requests_encoded += [get_deposit_receipt_rlp_bytes(receipt) for receipt in payload.deposit_receipts] + requests_encoded += [get_withdrawal_request_rlp_bytes(request) for request in payload.withdrawal_requests] requests_trie_root = compute_trie_root_from_indexed_data(requests_encoded) payload_header = get_execution_payload_header(spec, payload) From eb0cdc72e87248953e3338fcb1ee60830e8b343e Mon Sep 17 00:00:00 2001 From: Mikhail Kalinin Date: Fri, 31 May 2024 22:02:22 +0600 Subject: [PATCH 61/89] Update tests --- .../test_process_sync_aggregate.py | 9 ++++- .../test_process_sync_aggregate_random.py | 23 +++++++++---- tests/core/pyspec/eth2spec/test/context.py | 34 +++++++++++++++++-- .../pyspec/eth2spec/test/helpers/genesis.py | 21 ++++++++++-- .../phase0/genesis/test_initialization.py | 10 ++++-- 5 files changed, 83 insertions(+), 14 deletions(-) diff --git a/tests/core/pyspec/eth2spec/test/altair/block_processing/sync_aggregate/test_process_sync_aggregate.py b/tests/core/pyspec/eth2spec/test/altair/block_processing/sync_aggregate/test_process_sync_aggregate.py index 37e4439eea..f1a270092c 100644 --- a/tests/core/pyspec/eth2spec/test/altair/block_processing/sync_aggregate/test_process_sync_aggregate.py +++ b/tests/core/pyspec/eth2spec/test/altair/block_processing/sync_aggregate/test_process_sync_aggregate.py @@ -24,6 +24,11 @@ with_presets, spec_state_test, always_bls, + single_phase, + with_custom_state, + spec_test, + default_balances_electra, + default_activation_threshold, ) @@ -143,7 +148,9 @@ def is_duplicate_sync_committee(committee_indices): @with_altair_and_later @with_presets([MINIMAL], reason="to create nonduplicate committee") -@spec_state_test +@spec_test +@with_custom_state(balances_fn=default_balances_electra, threshold_fn=default_activation_threshold) +@single_phase def test_sync_committee_rewards_nonduplicate_committee(spec, state): committee_indices = compute_committee_indices(state) diff --git a/tests/core/pyspec/eth2spec/test/altair/block_processing/sync_aggregate/test_process_sync_aggregate_random.py b/tests/core/pyspec/eth2spec/test/altair/block_processing/sync_aggregate/test_process_sync_aggregate_random.py index a402e3d540..792bcb0e33 100644 --- a/tests/core/pyspec/eth2spec/test/altair/block_processing/sync_aggregate/test_process_sync_aggregate_random.py +++ b/tests/core/pyspec/eth2spec/test/altair/block_processing/sync_aggregate/test_process_sync_aggregate_random.py @@ -24,6 +24,8 @@ with_custom_state, with_presets, spec_test, + default_balances_electra, + misc_balances_electra, ) @@ -132,7 +134,9 @@ def test_random_with_exits_with_duplicates(spec, state): @with_altair_and_later @with_presets([MINIMAL], reason="to create nonduplicate committee") -@spec_state_test +@spec_test +@with_custom_state(balances_fn=default_balances_electra, threshold_fn=default_activation_threshold) +@single_phase def test_random_only_one_participant_without_duplicates(spec, state): rng = random.Random(501) yield from _test_harness_for_randomized_test_case( @@ -144,7 +148,9 @@ def test_random_only_one_participant_without_duplicates(spec, state): @with_altair_and_later @with_presets([MINIMAL], reason="to create nonduplicate committee") -@spec_state_test +@spec_test +@with_custom_state(balances_fn=default_balances_electra, threshold_fn=default_activation_threshold) +@single_phase def test_random_low_participation_without_duplicates(spec, state): rng = random.Random(601) yield from _test_harness_for_randomized_test_case( @@ -156,7 +162,9 @@ def test_random_low_participation_without_duplicates(spec, state): @with_altair_and_later @with_presets([MINIMAL], reason="to create nonduplicate committee") -@spec_state_test +@spec_test +@with_custom_state(balances_fn=default_balances_electra, threshold_fn=default_activation_threshold) +@single_phase def test_random_high_participation_without_duplicates(spec, state): rng = random.Random(701) yield from _test_harness_for_randomized_test_case( @@ -168,7 +176,9 @@ def test_random_high_participation_without_duplicates(spec, state): @with_altair_and_later @with_presets([MINIMAL], reason="to create nonduplicate committee") -@spec_state_test +@spec_test +@with_custom_state(balances_fn=default_balances_electra, threshold_fn=default_activation_threshold) +@single_phase def test_random_all_but_one_participating_without_duplicates(spec, state): rng = random.Random(801) yield from _test_harness_for_randomized_test_case( @@ -181,7 +191,7 @@ def test_random_all_but_one_participating_without_duplicates(spec, state): @with_altair_and_later @with_presets([MINIMAL], reason="to create nonduplicate committee") @spec_test -@with_custom_state(balances_fn=misc_balances, threshold_fn=default_activation_threshold) +@with_custom_state(balances_fn=misc_balances_electra, threshold_fn=default_activation_threshold) @single_phase def test_random_misc_balances_and_half_participation_without_duplicates(spec, state): rng = random.Random(1501) @@ -194,7 +204,8 @@ def test_random_misc_balances_and_half_participation_without_duplicates(spec, st @with_altair_and_later @with_presets([MINIMAL], reason="to create nonduplicate committee") -@spec_state_test +@spec_test +@with_custom_state(balances_fn=default_balances_electra, threshold_fn=default_activation_threshold) @single_phase def test_random_with_exits_without_duplicates(spec, state): rng = random.Random(1502) diff --git a/tests/core/pyspec/eth2spec/test/context.py b/tests/core/pyspec/eth2spec/test/context.py index ff2ab80b5c..e805e1c120 100644 --- a/tests/core/pyspec/eth2spec/test/context.py +++ b/tests/core/pyspec/eth2spec/test/context.py @@ -16,7 +16,7 @@ ALLOWED_TEST_RUNNER_FORKS, LIGHT_CLIENT_TESTING_FORKS, ) -from .helpers.forks import is_post_fork +from .helpers.forks import is_post_fork, is_post_electra from .helpers.genesis import create_genesis_state from .helpers.typing import ( Spec, @@ -86,7 +86,10 @@ def default_activation_threshold(spec: Spec): Helper method to use the default balance activation threshold for state creation for tests. Usage: `@with_custom_state(threshold_fn=default_activation_threshold, ...)` """ - return spec.MAX_EFFECTIVE_BALANCE + if is_post_electra(spec): + return spec.MIN_ACTIVATION_BALANCE + else: + return spec.MAX_EFFECTIVE_BALANCE def zero_activation_threshold(spec: Spec): @@ -106,6 +109,18 @@ def default_balances(spec: Spec): return [spec.MAX_EFFECTIVE_BALANCE] * num_validators +def default_balances_electra(spec: Spec): + """ + Helper method to create a series of default balances for Electra. + Usage: `@with_custom_state(balances_fn=default_balances_electra, ...)` + """ + if not is_post_electra(spec): + return default_balances(spec) + + num_validators = spec.SLOTS_PER_EPOCH * 8 + return [spec.MAX_EFFECTIVE_BALANCE_ELECTRA] * num_validators + + def scaled_churn_balances_min_churn_limit(spec: Spec): """ Helper method to create enough validators to scale the churn limit. @@ -175,6 +190,21 @@ def misc_balances(spec: Spec): return balances +def misc_balances_electra(spec: Spec): + """ + Helper method to create a series of balances that includes some misc. balances for Electra. + Usage: `@with_custom_state(balances_fn=misc_balances, ...)` + """ + if not is_post_electra(spec): + return misc_balances(spec) + + num_validators = spec.SLOTS_PER_EPOCH * 8 + balances = [spec.MAX_EFFECTIVE_BALANCE_ELECTRA * 2 * i // num_validators for i in range(num_validators)] + rng = Random(1234) + rng.shuffle(balances) + return balances + + def misc_balances_in_default_range_with_many_validators(spec: Spec): """ Helper method to create a series of balances that includes some misc. balances but diff --git a/tests/core/pyspec/eth2spec/test/helpers/genesis.py b/tests/core/pyspec/eth2spec/test/helpers/genesis.py index 3896b41731..ab274b7573 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/genesis.py +++ b/tests/core/pyspec/eth2spec/test/helpers/genesis.py @@ -15,8 +15,23 @@ def build_mock_validator(spec, i: int, balance: int): active_pubkey = pubkeys[i] withdrawal_pubkey = pubkeys[-1 - i] - # insecurely use pubkey as withdrawal key as well - withdrawal_credentials = spec.BLS_WITHDRAWAL_PREFIX + spec.hash(withdrawal_pubkey)[1:] + if is_post_electra(spec): + if balance > spec.MIN_ACTIVATION_BALANCE: + # use compounding withdrawal credentials if the balance is higher than MIN_ACTIVATION_BALANCE + withdrawal_credentials = ( + spec.COMPOUNDING_WITHDRAWAL_PREFIX + + b'\x00' * 11 + + spec.hash(withdrawal_pubkey)[12:] + ) + else: + # insecurely use pubkey as withdrawal key as well + withdrawal_credentials = spec.BLS_WITHDRAWAL_PREFIX + spec.hash(withdrawal_pubkey)[1:] + max_effective_balace = spec.MAX_EFFECTIVE_BALANCE_ELECTRA + else: + # insecurely use pubkey as withdrawal key as well + withdrawal_credentials = spec.BLS_WITHDRAWAL_PREFIX + spec.hash(withdrawal_pubkey)[1:] + max_effective_balace = spec.MAX_EFFECTIVE_BALANCE + validator = spec.Validator( pubkey=active_pubkey, withdrawal_credentials=withdrawal_credentials, @@ -24,7 +39,7 @@ def build_mock_validator(spec, i: int, balance: int): activation_epoch=spec.FAR_FUTURE_EPOCH, exit_epoch=spec.FAR_FUTURE_EPOCH, withdrawable_epoch=spec.FAR_FUTURE_EPOCH, - effective_balance=min(balance - balance % spec.EFFECTIVE_BALANCE_INCREMENT, spec.MAX_EFFECTIVE_BALANCE) + effective_balance=min(balance - balance % spec.EFFECTIVE_BALANCE_INCREMENT, max_effective_balace) ) return validator diff --git a/tests/core/pyspec/eth2spec/test/phase0/genesis/test_initialization.py b/tests/core/pyspec/eth2spec/test/phase0/genesis/test_initialization.py index 4c7c5f28c0..f1c3064723 100644 --- a/tests/core/pyspec/eth2spec/test/phase0/genesis/test_initialization.py +++ b/tests/core/pyspec/eth2spec/test/phase0/genesis/test_initialization.py @@ -11,6 +11,7 @@ ) from eth2spec.test.helpers.forks import ( is_post_altair, + is_post_electra, ) @@ -69,9 +70,14 @@ def test_initialize_beacon_state_some_small_balances(spec): if is_post_altair(spec): yield 'description', 'meta', get_post_altair_description(spec) + if is_post_electra(spec): + max_effective_balance = spec.MAX_EFFECTIVE_BALANCE_ELECTRA + else: + max_effective_balance = spec.MAX_EFFECTIVE_BALANCE + main_deposit_count = spec.config.MIN_GENESIS_ACTIVE_VALIDATOR_COUNT main_deposits, _, deposit_data_list = prepare_full_genesis_deposits( - spec, spec.MAX_EFFECTIVE_BALANCE, + spec, max_effective_balance, deposit_count=main_deposit_count, signed=True, ) # For deposits above, and for another deposit_count, add a balance of EFFECTIVE_BALANCE_INCREMENT @@ -99,7 +105,7 @@ def test_initialize_beacon_state_some_small_balances(spec): assert state.eth1_data.deposit_count == len(deposits) assert state.eth1_data.block_hash == eth1_block_hash # only main deposits participate to the active balance - assert spec.get_total_active_balance(state) == main_deposit_count * spec.MAX_EFFECTIVE_BALANCE + assert spec.get_total_active_balance(state) == main_deposit_count * max_effective_balance # yield state yield 'state', state From a0a23245f053d6a732dc3e7687c00d9a1c4e59ff Mon Sep 17 00:00:00 2001 From: Mikhail Kalinin Date: Fri, 31 May 2024 22:16:18 +0600 Subject: [PATCH 62/89] Fix lint --- specs/electra/beacon-chain.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 53ef54b4ef..e64ef75748 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -1442,7 +1442,8 @@ def initialize_beacon_state_from_eth1(eth1_block_hash: Hash32, # Process activations for index, validator in enumerate(state.validators): balance = state.balances[index] - validator.effective_balance = min(balance - balance % EFFECTIVE_BALANCE_INCREMENT, MAX_EFFECTIVE_BALANCE_ELECTRA) + validator.effective_balance = min( + balance - balance % EFFECTIVE_BALANCE_INCREMENT, MAX_EFFECTIVE_BALANCE_ELECTRA) if validator.effective_balance >= MIN_ACTIVATION_BALANCE: validator.activation_eligibility_epoch = GENESIS_EPOCH validator.activation_epoch = GENESIS_EPOCH From 3fd125f07802b72e84ca8e6e0ef77f18b0f1b283 Mon Sep 17 00:00:00 2001 From: Mikhail Kalinin Date: Mon, 3 Jun 2024 14:56:39 +0600 Subject: [PATCH 63/89] Applied suggestions by @hwwhww Co-authored-by: Hsiao-Wei Wang --- specs/electra/beacon-chain.md | 1 + 1 file changed, 1 insertion(+) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index e64ef75748..512b365ae9 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -1442,6 +1442,7 @@ def initialize_beacon_state_from_eth1(eth1_block_hash: Hash32, # Process activations for index, validator in enumerate(state.validators): balance = state.balances[index] + # [Modified in Electra:EIP7251] validator.effective_balance = min( balance - balance % EFFECTIVE_BALANCE_INCREMENT, MAX_EFFECTIVE_BALANCE_ELECTRA) if validator.effective_balance >= MIN_ACTIVATION_BALANCE: From 143b9e623dce1f134b33ec70c892ac2f8eae87c8 Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Tue, 4 Jun 2024 01:40:41 +0800 Subject: [PATCH 64/89] Switch the order of `process_deposit_receipt` and `process_execution_layer_withdrawal_request` --- specs/electra/beacon-chain.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 414cc90a52..3220fea1bf 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -1034,9 +1034,9 @@ def process_operations(state: BeaconState, body: BeaconBlockBody) -> None: for_ops(body.deposits, process_deposit) # [Modified in Electra:EIP7251] for_ops(body.voluntary_exits, process_voluntary_exit) # [Modified in Electra:EIP7251] for_ops(body.bls_to_execution_changes, process_bls_to_execution_change) + for_ops(body.execution_payload.deposit_receipts, process_deposit_receipt) # [New in Electra:EIP6110] # [New in Electra:EIP7002:EIP7251] for_ops(body.execution_payload.withdrawal_requests, process_execution_layer_withdrawal_request) - for_ops(body.execution_payload.deposit_receipts, process_deposit_receipt) # [New in Electra:EIP6110] # [New in Electra:EIP7251] for_ops(body.execution_payload.consolidation_requests, process_execution_layer_consolidation_request) ``` From d0ad73bd136f3be1ffc1d0dea06eeb399f74f59a Mon Sep 17 00:00:00 2001 From: Preston Van Loon Date: Wed, 5 Jun 2024 14:57:45 -0500 Subject: [PATCH 65/89] Use /bin/bash to fix `./build_run_docker_tests.sh: 13: Syntax error: "(" unexpected` --- scripts/build_run_docker_tests.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/build_run_docker_tests.sh b/scripts/build_run_docker_tests.sh index a7fdfbc3dc..8b20cfae62 100755 --- a/scripts/build_run_docker_tests.sh +++ b/scripts/build_run_docker_tests.sh @@ -1,4 +1,4 @@ -#! /bin/sh +#! /bin/bash # Run 'consensus-specs' tests from a docker container instance. # *Be sure to launch Docker before running this script.* From 2e6c59393f47341354a562c240cca6733625ab74 Mon Sep 17 00:00:00 2001 From: Preston Van Loon Date: Wed, 5 Jun 2024 14:57:45 -0500 Subject: [PATCH 66/89] Rename ExecutionLayerWithdrawalRequest->WithdrawalRequest --- specs/electra/beacon-chain.md | 24 +-- ...cess_execution_layer_withdrawal_request.py | 168 +++++++++--------- .../test/electra/sanity/blocks/test_blocks.py | 16 +- .../execution_layer_withdrawal_request.py | 4 +- tests/formats/operations/README.md | 2 +- tests/generators/operations/main.py | 2 +- 6 files changed, 108 insertions(+), 108 deletions(-) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 7b75667eb1..7d8a4489fa 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -27,7 +27,7 @@ - [`DepositRequest`](#depositrequest) - [`PendingBalanceDeposit`](#pendingbalancedeposit) - [`PendingPartialWithdrawal`](#pendingpartialwithdrawal) - - [`ExecutionLayerWithdrawalRequest`](#executionlayerwithdrawalrequest) + - [`WithdrawalRequest`](#executionlayerwithdrawalrequest) - [`ExecutionLayerConsolidationRequest`](#executionlayerconsolidationrequest) - [`PendingConsolidation`](#pendingconsolidation) - [Modified Containers](#modified-containers) @@ -91,7 +91,7 @@ - [Voluntary exits](#voluntary-exits) - [Updated `process_voluntary_exit`](#updated-process_voluntary_exit) - [Execution layer withdrawal requests](#execution-layer-withdrawal-requests) - - [New `process_execution_layer_withdrawal_request`](#new-process_execution_layer_withdrawal_request) + - [New `process_withdrawal_request`](#new-process_withdrawal_request) - [Deposit requests](#deposit-requests) - [New `process_deposit_request`](#new-process_deposit_request) - [Execution layer consolidation requests](#execution-layer-consolidation-requests) @@ -227,12 +227,12 @@ class PendingPartialWithdrawal(Container): amount: Gwei withdrawable_epoch: Epoch ``` -#### `ExecutionLayerWithdrawalRequest` +#### `WithdrawalRequest` *Note*: The container is new in EIP7251:EIP7002. ```python -class ExecutionLayerWithdrawalRequest(Container): +class WithdrawalRequest(Container): source_address: ExecutionAddress validator_pubkey: BLSPubkey amount: Gwei @@ -336,7 +336,7 @@ class ExecutionPayload(Container): excess_blob_gas: uint64 deposit_requests: List[DepositRequest, MAX_DEPOSIT_REQUESTS_PER_PAYLOAD] # [New in Electra:EIP6110] # [New in Electra:EIP7002:EIP7251] - withdrawal_requests: List[ExecutionLayerWithdrawalRequest, MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD] + withdrawal_requests: List[WithdrawalRequest, MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD] # [New in Electra:EIP7251] consolidation_requests: List[ExecutionLayerConsolidationRequest, MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD] ``` @@ -1075,7 +1075,7 @@ def process_operations(state: BeaconState, body: BeaconBlockBody) -> None: for_ops(body.bls_to_execution_changes, process_bls_to_execution_change) for_ops(body.execution_payload.deposit_requests, process_deposit_request) # [New in Electra:EIP6110] # [New in Electra:EIP7002:EIP7251] - for_ops(body.execution_payload.withdrawal_requests, process_execution_layer_withdrawal_request) + for_ops(body.execution_payload.withdrawal_requests, process_withdrawal_request) # [New in Electra:EIP7251] for_ops(body.execution_payload.consolidation_requests, process_execution_layer_consolidation_request) ``` @@ -1236,16 +1236,16 @@ def process_voluntary_exit(state: BeaconState, signed_voluntary_exit: SignedVolu ##### Execution layer withdrawal requests -###### New `process_execution_layer_withdrawal_request` +###### New `process_withdrawal_request` *Note*: This function is new in Electra following EIP-7002 and EIP-7251. ```python -def process_execution_layer_withdrawal_request( +def process_withdrawal_request( state: BeaconState, - execution_layer_withdrawal_request: ExecutionLayerWithdrawalRequest + withdrawal_request: WithdrawalRequest ) -> None: - amount = execution_layer_withdrawal_request.amount + amount = withdrawal_request.amount is_full_exit_request = amount == FULL_EXIT_REQUEST_AMOUNT # If partial withdrawal queue is full, only full exits are processed @@ -1254,7 +1254,7 @@ def process_execution_layer_withdrawal_request( validator_pubkeys = [v.pubkey for v in state.validators] # Verify pubkey exists - request_pubkey = execution_layer_withdrawal_request.validator_pubkey + request_pubkey = withdrawal_request.validator_pubkey if request_pubkey not in validator_pubkeys: return index = ValidatorIndex(validator_pubkeys.index(request_pubkey)) @@ -1263,7 +1263,7 @@ def process_execution_layer_withdrawal_request( # Verify withdrawal credentials has_correct_credential = has_execution_withdrawal_credential(validator) is_correct_source_address = ( - validator.withdrawal_credentials[12:] == execution_layer_withdrawal_request.source_address + validator.withdrawal_credentials[12:] == withdrawal_request.source_address ) if not (has_correct_credential and is_correct_source_address): return diff --git a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_withdrawal_request.py b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_withdrawal_request.py index 554fc53bda..53460347ad 100644 --- a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_withdrawal_request.py +++ b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_withdrawal_request.py @@ -29,14 +29,14 @@ def test_basic_withdrawal_request(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, validator_index, address=address ) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, amount=spec.FULL_EXIT_REQUEST_AMOUNT, ) - yield from run_execution_layer_withdrawal_request_processing( - spec, state, execution_layer_withdrawal_request + yield from run_withdrawal_request_processing( + spec, state, withdrawal_request ) @@ -51,14 +51,14 @@ def test_basic_withdrawal_request_with_compounding_credentials(spec, state): validator_pubkey = state.validators[validator_index].pubkey address = b"\x22" * 20 set_compounding_withdrawal_credential(spec, state, validator_index, address=address) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, amount=spec.FULL_EXIT_REQUEST_AMOUNT, ) - yield from run_execution_layer_withdrawal_request_processing( - spec, state, execution_layer_withdrawal_request + yield from run_withdrawal_request_processing( + spec, state, withdrawal_request ) @@ -74,7 +74,7 @@ def test_basic_withdrawal_request_with_full_partial_withdrawal_queue(spec, state set_eth1_withdrawal_credential_with_balance( spec, state, validator_index, address=address ) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, amount=spec.FULL_EXIT_REQUEST_AMOUNT, @@ -89,10 +89,10 @@ def test_basic_withdrawal_request_with_full_partial_withdrawal_queue(spec, state ] * spec.PENDING_PARTIAL_WITHDRAWALS_LIMIT # Exit should still be processed - yield from run_execution_layer_withdrawal_request_processing( + yield from run_withdrawal_request_processing( spec, state, - execution_layer_withdrawal_request, + withdrawal_request, ) @@ -113,14 +113,14 @@ def test_incorrect_source_address(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, validator_index, address=address ) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=incorrect_address, validator_pubkey=validator_pubkey, amount=spec.FULL_EXIT_REQUEST_AMOUNT, ) - yield from run_execution_layer_withdrawal_request_processing( - spec, state, execution_layer_withdrawal_request, success=False + yield from run_withdrawal_request_processing( + spec, state, withdrawal_request, success=False ) @@ -142,14 +142,14 @@ def test_incorrect_withdrawal_credential_prefix(spec, state): spec.BLS_WITHDRAWAL_PREFIX + state.validators[validator_index].withdrawal_credentials[1:] ) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, amount=spec.FULL_EXIT_REQUEST_AMOUNT, ) - yield from run_execution_layer_withdrawal_request_processing( - spec, state, execution_layer_withdrawal_request, success=False + yield from run_withdrawal_request_processing( + spec, state, withdrawal_request, success=False ) @@ -168,14 +168,14 @@ def test_on_withdrawal_request_initiated_validator(spec, state): ) # Initiate exit earlier spec.initiate_validator_exit(state, validator_index) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, amount=spec.FULL_EXIT_REQUEST_AMOUNT, ) - yield from run_execution_layer_withdrawal_request_processing( - spec, state, execution_layer_withdrawal_request, success=False + yield from run_withdrawal_request_processing( + spec, state, withdrawal_request, success=False ) @@ -189,7 +189,7 @@ def test_activation_epoch_less_than_shard_committee_period(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, validator_index, address=address ) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, amount=spec.FULL_EXIT_REQUEST_AMOUNT, @@ -200,8 +200,8 @@ def test_activation_epoch_less_than_shard_committee_period(spec, state): + spec.config.SHARD_COMMITTEE_PERIOD ) - yield from run_execution_layer_withdrawal_request_processing( - spec, state, execution_layer_withdrawal_request, success=False + yield from run_withdrawal_request_processing( + spec, state, withdrawal_request, success=False ) @@ -221,16 +221,16 @@ def test_basic_partial_withdrawal_request(spec, state): state.balances[validator_index] += amount set_compounding_withdrawal_credential(spec, state, validator_index, address=address) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, amount=amount, ) - yield from run_execution_layer_withdrawal_request_processing( + yield from run_withdrawal_request_processing( spec, state, - execution_layer_withdrawal_request, + withdrawal_request, ) # Check that the assigned exit epoch is correct @@ -253,16 +253,16 @@ def test_basic_partial_withdrawal_request_higher_excess_balance(spec, state): state.balances[validator_index] += 2 * amount set_compounding_withdrawal_credential(spec, state, validator_index, address=address) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, amount=amount, ) - yield from run_execution_layer_withdrawal_request_processing( + yield from run_withdrawal_request_processing( spec, state, - execution_layer_withdrawal_request, + withdrawal_request, ) # Check that the assigned exit epoch is correct @@ -286,16 +286,16 @@ def test_basic_partial_withdrawal_request_lower_than_excess_balance(spec, state) state.balances[validator_index] += excess_balance set_compounding_withdrawal_credential(spec, state, validator_index, address=address) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, amount=amount, ) - yield from run_execution_layer_withdrawal_request_processing( + yield from run_withdrawal_request_processing( spec, state, - execution_layer_withdrawal_request, + withdrawal_request, ) # Check that the assigned exit epoch is correct @@ -316,7 +316,7 @@ def test_partial_withdrawal_request_with_pending_withdrawals(spec, state): amount = spec.EFFECTIVE_BALANCE_INCREMENT set_compounding_withdrawal_credential(spec, state, validator_index, address=address) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, amount=amount, @@ -331,10 +331,10 @@ def test_partial_withdrawal_request_with_pending_withdrawals(spec, state): # Set balance so that the validator still has excess balance even with the pending withdrawals state.balances[validator_index] += 3 * amount - yield from run_execution_layer_withdrawal_request_processing( + yield from run_withdrawal_request_processing( spec, state, - execution_layer_withdrawal_request, + withdrawal_request, ) # Check that the assigned exit epoch is correct @@ -357,7 +357,7 @@ def test_partial_withdrawal_request_with_pending_withdrawals_and_high_amount( amount = spec.UINT64_MAX set_compounding_withdrawal_credential(spec, state, validator_index, address=address) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, amount=amount, @@ -376,10 +376,10 @@ def test_partial_withdrawal_request_with_pending_withdrawals_and_high_amount( # Set balance so that the validator still has excess balance even with the pending withdrawals state.balances[validator_index] = spec.MAX_EFFECTIVE_BALANCE_ELECTRA - yield from run_execution_layer_withdrawal_request_processing( + yield from run_withdrawal_request_processing( spec, state, - execution_layer_withdrawal_request, + withdrawal_request, ) @@ -399,7 +399,7 @@ def test_partial_withdrawal_request_with_high_balance(spec, state): ) set_compounding_withdrawal_credential(spec, state, validator_index, address=address) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, amount=amount, @@ -407,10 +407,10 @@ def test_partial_withdrawal_request_with_high_balance(spec, state): churn_limit = spec.get_activation_exit_churn_limit(state) - yield from run_execution_layer_withdrawal_request_processing( + yield from run_withdrawal_request_processing( spec, state, - execution_layer_withdrawal_request, + withdrawal_request, ) # Check that the assigned exit epoch is correct @@ -435,16 +435,16 @@ def test_partial_withdrawal_request_with_high_amount(spec, state): state.balances[validator_index] += 1 set_compounding_withdrawal_credential(spec, state, validator_index, address=address) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, amount=amount, ) - yield from run_execution_layer_withdrawal_request_processing( + yield from run_withdrawal_request_processing( spec, state, - execution_layer_withdrawal_request, + withdrawal_request, ) # Check that the assigned exit epoch is correct @@ -467,16 +467,16 @@ def test_partial_withdrawal_request_with_low_amount(spec, state): state.balances[validator_index] += amount set_compounding_withdrawal_credential(spec, state, validator_index, address=address) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, amount=amount, ) - yield from run_execution_layer_withdrawal_request_processing( + yield from run_withdrawal_request_processing( spec, state, - execution_layer_withdrawal_request, + withdrawal_request, ) # Check that the assigned exit epoch is correct @@ -501,7 +501,7 @@ def test_partial_withdrawal_queue_full(spec, state): # Ensure that the validator has sufficient excess balance state.balances[validator_index] += 2 * amount set_compounding_withdrawal_credential(spec, state, validator_index, address=address) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, amount=amount, @@ -514,8 +514,8 @@ def test_partial_withdrawal_queue_full(spec, state): state.pending_partial_withdrawals = [ partial_withdrawal ] * spec.PENDING_PARTIAL_WITHDRAWALS_LIMIT - yield from run_execution_layer_withdrawal_request_processing( - spec, state, execution_layer_withdrawal_request, success=False + yield from run_withdrawal_request_processing( + spec, state, withdrawal_request, success=False ) @@ -534,16 +534,16 @@ def test_no_compounding_credentials(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, validator_index, address=address ) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, amount=amount, ) - yield from run_execution_layer_withdrawal_request_processing( + yield from run_withdrawal_request_processing( spec, state, - execution_layer_withdrawal_request, + withdrawal_request, success=False, ) @@ -559,14 +559,14 @@ def test_no_excess_balance(spec, state): amount = spec.EFFECTIVE_BALANCE_INCREMENT set_compounding_withdrawal_credential(spec, state, validator_index, address=address) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, amount=amount, ) - yield from run_execution_layer_withdrawal_request_processing( - spec, state, execution_layer_withdrawal_request, success=False + yield from run_withdrawal_request_processing( + spec, state, withdrawal_request, success=False ) @@ -583,7 +583,7 @@ def test_pending_withdrawals_consume_all_excess_balance(spec, state): state.balances[validator_index] += 10 * amount set_compounding_withdrawal_credential(spec, state, validator_index, address=address) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, amount=amount, @@ -595,8 +595,8 @@ def test_pending_withdrawals_consume_all_excess_balance(spec, state): ) state.pending_partial_withdrawals = [partial_withdrawal] * 10 - yield from run_execution_layer_withdrawal_request_processing( - spec, state, execution_layer_withdrawal_request, success=False + yield from run_withdrawal_request_processing( + spec, state, withdrawal_request, success=False ) @@ -615,16 +615,16 @@ def test_insufficient_effective_balance(spec, state): ].effective_balance -= spec.EFFECTIVE_BALANCE_INCREMENT set_compounding_withdrawal_credential(spec, state, validator_index, address=address) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, amount=amount, ) - yield from run_execution_layer_withdrawal_request_processing( + yield from run_withdrawal_request_processing( spec, state, - execution_layer_withdrawal_request, + withdrawal_request, success=False, ) @@ -644,14 +644,14 @@ def test_partial_withdrawal_incorrect_source_address(spec, state): state.balances[validator_index] += 2 * amount set_compounding_withdrawal_credential(spec, state, validator_index, address=address) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=incorrect_address, validator_pubkey=validator_pubkey, amount=amount, ) - yield from run_execution_layer_withdrawal_request_processing( - spec, state, execution_layer_withdrawal_request, success=False + yield from run_withdrawal_request_processing( + spec, state, withdrawal_request, success=False ) @@ -673,14 +673,14 @@ def test_partial_withdrawal_incorrect_withdrawal_credential_prefix(spec, state): spec.BLS_WITHDRAWAL_PREFIX + state.validators[validator_index].withdrawal_credentials[1:] ) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, amount=amount, ) - yield from run_execution_layer_withdrawal_request_processing( - spec, state, execution_layer_withdrawal_request, success=False + yield from run_withdrawal_request_processing( + spec, state, withdrawal_request, success=False ) @@ -699,14 +699,14 @@ def test_partial_withdrawal_on_exit_initiated_validator(spec, state): set_compounding_withdrawal_credential(spec, state, validator_index, address=address) # Initiate exit earlier spec.initiate_validator_exit(state, validator_index) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, amount=amount, ) - yield from run_execution_layer_withdrawal_request_processing( - spec, state, execution_layer_withdrawal_request, success=False + yield from run_withdrawal_request_processing( + spec, state, withdrawal_request, success=False ) @@ -722,7 +722,7 @@ def test_partial_withdrawal_activation_epoch_less_than_shard_committee_period( amount = spec.EFFECTIVE_BALANCE_INCREMENT state.balances[validator_index] += 2 * amount set_compounding_withdrawal_credential(spec, state, validator_index, address=address) - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, amount=amount, @@ -733,8 +733,8 @@ def test_partial_withdrawal_activation_epoch_less_than_shard_committee_period( + spec.config.SHARD_COMMITTEE_PERIOD ) - yield from run_execution_layer_withdrawal_request_processing( - spec, state, execution_layer_withdrawal_request, success=False + yield from run_withdrawal_request_processing( + spec, state, withdrawal_request, success=False ) @@ -743,28 +743,28 @@ def test_partial_withdrawal_activation_epoch_less_than_shard_committee_period( # -def run_execution_layer_withdrawal_request_processing( - spec, state, execution_layer_withdrawal_request, valid=True, success=True +def run_withdrawal_request_processing( + spec, state, withdrawal_request, valid=True, success=True ): """ - Run ``process_execution_layer_withdrawal_request``, yielding: + Run ``process_withdrawal_request``, yielding: - pre-state ('pre') - - execution_layer_withdrawal_request ('execution_layer_withdrawal_request') + - withdrawal_request ('withdrawal_request') - post-state ('post'). If ``valid == False``, run expecting ``AssertionError`` If ``success == False``, it doesn't initiate exit successfully """ validator_index = get_validator_index_by_pubkey( - state, execution_layer_withdrawal_request.validator_pubkey + state, withdrawal_request.validator_pubkey ) yield "pre", state - yield "execution_layer_withdrawal_request", execution_layer_withdrawal_request + yield "withdrawal_request", withdrawal_request if not valid: expect_assertion_error( - lambda: spec.process_execution_layer_withdrawal_request( - state, execution_layer_withdrawal_request + lambda: spec.process_withdrawal_request( + state, withdrawal_request ) ) yield "post", None @@ -776,11 +776,11 @@ def run_execution_layer_withdrawal_request_processing( pre_effective_balance = state.validators[validator_index].effective_balance pre_state = state.copy() expected_amount_to_withdraw = compute_amount_to_withdraw( - spec, state, validator_index, execution_layer_withdrawal_request.amount + spec, state, validator_index, withdrawal_request.amount ) - spec.process_execution_layer_withdrawal_request( - state, execution_layer_withdrawal_request + spec.process_withdrawal_request( + state, withdrawal_request ) yield "post", state @@ -794,7 +794,7 @@ def run_execution_layer_withdrawal_request_processing( state.validators[validator_index].effective_balance == pre_effective_balance ) # Full exit request - if execution_layer_withdrawal_request.amount == spec.FULL_EXIT_REQUEST_AMOUNT: + if withdrawal_request.amount == spec.FULL_EXIT_REQUEST_AMOUNT: assert pre_exit_epoch == spec.FAR_FUTURE_EPOCH assert state.validators[validator_index].exit_epoch < spec.FAR_FUTURE_EPOCH assert spec.get_pending_balance_to_withdraw(state, validator_index) == 0 diff --git a/tests/core/pyspec/eth2spec/test/electra/sanity/blocks/test_blocks.py b/tests/core/pyspec/eth2spec/test/electra/sanity/blocks/test_blocks.py index 2f8af2b265..4fc76e9c93 100644 --- a/tests/core/pyspec/eth2spec/test/electra/sanity/blocks/test_blocks.py +++ b/tests/core/pyspec/eth2spec/test/electra/sanity/blocks/test_blocks.py @@ -36,12 +36,12 @@ def test_basic_el_withdrawal_request(spec, state): assert state.validators[validator_index].exit_epoch == spec.FAR_FUTURE_EPOCH validator_pubkey = state.validators[validator_index].pubkey - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, ) block = build_empty_block_for_next_slot(spec, state) - block.body.execution_payload.withdrawal_requests = [execution_layer_withdrawal_request] + block.body.execution_payload.withdrawal_requests = [withdrawal_request] block.body.execution_payload.block_hash = compute_el_block_hash(spec, block.body.execution_payload) signed_block = state_transition_and_sign_block(spec, state, block) @@ -73,11 +73,11 @@ def test_basic_btec_and_el_withdrawal_request_in_same_block(spec, state): block.body.bls_to_execution_changes = [signed_address_change] validator_pubkey = state.validators[validator_index].pubkey - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, ) - block.body.execution_payload.withdrawal_requests = [execution_layer_withdrawal_request] + block.body.execution_payload.withdrawal_requests = [withdrawal_request] block.body.execution_payload.block_hash = compute_el_block_hash(spec, block.body.execution_payload) signed_block = state_transition_and_sign_block(spec, state, block) @@ -125,12 +125,12 @@ def test_basic_btec_before_el_withdrawal_request(spec, state): # block_2 contains an EL-Exit operation of the given validator validator_pubkey = state.validators[validator_index].pubkey - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, ) block_2 = build_empty_block_for_next_slot(spec, state) - block_2.body.execution_payload.withdrawal_requests = [execution_layer_withdrawal_request] + block_2.body.execution_payload.withdrawal_requests = [withdrawal_request] block_2.body.execution_payload.block_hash = compute_el_block_hash(spec, block_2.body.execution_payload) signed_block_2 = state_transition_and_sign_block(spec, state, block_2) @@ -157,13 +157,13 @@ def test_cl_exit_and_el_withdrawal_request_in_same_block(spec, state): signed_voluntary_exits = prepare_signed_exits(spec, state, indices=[validator_index]) # EL-Exit validator_pubkey = state.validators[validator_index].pubkey - execution_layer_withdrawal_request = spec.ExecutionLayerWithdrawalRequest( + withdrawal_request = spec.WithdrawalRequest( source_address=address, validator_pubkey=validator_pubkey, ) block = build_empty_block_for_next_slot(spec, state) block.body.voluntary_exits = signed_voluntary_exits - block.body.execution_payload.withdrawal_requests = [execution_layer_withdrawal_request] + block.body.execution_payload.withdrawal_requests = [withdrawal_request] block.body.execution_payload.block_hash = compute_el_block_hash(spec, block.body.execution_payload) signed_block = state_transition_and_sign_block(spec, state, block) diff --git a/tests/core/pyspec/eth2spec/test/helpers/execution_layer_withdrawal_request.py b/tests/core/pyspec/eth2spec/test/helpers/execution_layer_withdrawal_request.py index f111f89430..6741db1025 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/execution_layer_withdrawal_request.py +++ b/tests/core/pyspec/eth2spec/test/helpers/execution_layer_withdrawal_request.py @@ -7,9 +7,9 @@ # -def run_execution_layer_withdrawal_request_processing(spec, state, withdrawal_request, valid=True, success=True): +def run_withdrawal_request_processing(spec, state, withdrawal_request, valid=True, success=True): """ - Run ``process_execution_layer_withdrawal_request``, yielding: + Run ``process_withdrawal_request``, yielding: - pre-state ('pre') - withdrawal_request ('withdrawal_request') - post-state ('post'). diff --git a/tests/formats/operations/README.md b/tests/formats/operations/README.md index 6a7fcbb1d5..95900f5111 100644 --- a/tests/formats/operations/README.md +++ b/tests/formats/operations/README.md @@ -46,7 +46,7 @@ Operations: | `withdrawals` | `ExecutionPayload` | `execution_payload` | `process_withdrawals(state, execution_payload)` (new in Capella) | | `bls_to_execution_change` | `SignedBLSToExecutionChange` | `address_change` | `process_bls_to_execution_change(state, address_change)` (new in Capella) | | `deposit_request` | `DepositRequest` | `deposit_request` | `process_deposit_request(state, deposit_request)` (new in Electra) | -| `execution_layer_withdrawal_request` | `ExecutionLayerWithdrawalRequest` | `execution_layer_withdrawal_request` | `process_execution_layer_withdrawal_request(state, execution_layer_withdrawal_request)` (new in Electra) | +| `withdrawal_request` | `WithdrawalRequest` | `withdrawal_request` | `process_withdrawal_request(state, withdrawal_request)` (new in Electra) | | `execution_layer_consolidation_request` | `ExecutionLayerConsolidationRequest` | `execution_layer_consolidation_request` | `process_execution_layer_consolidation_request(state, execution_layer_consolidation_request)` (new in Electra) | Note that `block_header` is not strictly an operation (and is a full `Block`), but processed in the same manner, and hence included here. diff --git a/tests/generators/operations/main.py b/tests/generators/operations/main.py index 10c099bc57..4058469ae8 100644 --- a/tests/generators/operations/main.py +++ b/tests/generators/operations/main.py @@ -47,7 +47,7 @@ 'attestation', 'execution_layer_consolidation_request', 'deposit_request', - 'execution_layer_withdrawal_request', + 'withdrawal_request', 'voluntary_exit' ]} electra_mods = combine_mods(_new_electra_mods, deneb_mods) From 5910aeed4dd25adba36adaf5583ecfdfd5003e25 Mon Sep 17 00:00:00 2001 From: Preston Van Loon Date: Wed, 5 Jun 2024 14:58:23 -0500 Subject: [PATCH 67/89] Rename ExecutionLayerConsolidationRequest->ConsolidationRequest --- specs/electra/beacon-chain.md | 24 +++++----- ...s_execution_layer_consolidation_request.py | 46 +++++++++---------- tests/formats/operations/README.md | 2 +- tests/generators/operations/main.py | 2 +- 4 files changed, 37 insertions(+), 37 deletions(-) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 7d8a4489fa..a5a6a5072c 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -28,7 +28,7 @@ - [`PendingBalanceDeposit`](#pendingbalancedeposit) - [`PendingPartialWithdrawal`](#pendingpartialwithdrawal) - [`WithdrawalRequest`](#executionlayerwithdrawalrequest) - - [`ExecutionLayerConsolidationRequest`](#executionlayerconsolidationrequest) + - [`ConsolidationRequest`](#executionlayerconsolidationrequest) - [`PendingConsolidation`](#pendingconsolidation) - [Modified Containers](#modified-containers) - [`AttesterSlashing`](#attesterslashing) @@ -95,7 +95,7 @@ - [Deposit requests](#deposit-requests) - [New `process_deposit_request`](#new-process_deposit_request) - [Execution layer consolidation requests](#execution-layer-consolidation-requests) - - [New `process_execution_layer_consolidation_request`](#new-process_execution_layer_consolidation_request) + - [New `process_consolidation_request`](#new-process_consolidation_request) - [Testing](#testing) @@ -238,12 +238,12 @@ class WithdrawalRequest(Container): amount: Gwei ``` -#### `ExecutionLayerConsolidationRequest` +#### `ConsolidationRequest` *Note*: The container is new in EIP7251. ```python -class ExecutionLayerConsolidationRequest(Container): +class ConsolidationRequest(Container): source_address: ExecutionAddress source_pubkey: BLSPubkey target_pubkey: BLSPubkey @@ -338,7 +338,7 @@ class ExecutionPayload(Container): # [New in Electra:EIP7002:EIP7251] withdrawal_requests: List[WithdrawalRequest, MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD] # [New in Electra:EIP7251] - consolidation_requests: List[ExecutionLayerConsolidationRequest, MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD] + consolidation_requests: List[ConsolidationRequest, MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD] ``` #### `ExecutionPayloadHeader` @@ -1077,7 +1077,7 @@ def process_operations(state: BeaconState, body: BeaconBlockBody) -> None: # [New in Electra:EIP7002:EIP7251] for_ops(body.execution_payload.withdrawal_requests, process_withdrawal_request) # [New in Electra:EIP7251] - for_ops(body.execution_payload.consolidation_requests, process_execution_layer_consolidation_request) + for_ops(body.execution_payload.consolidation_requests, process_consolidation_request) ``` ##### Attestations @@ -1326,12 +1326,12 @@ def process_deposit_request(state: BeaconState, deposit_request: DepositRequest) ##### Execution layer consolidation requests -###### New `process_execution_layer_consolidation_request` +###### New `process_consolidation_request` ```python -def process_execution_layer_consolidation_request( +def process_consolidation_request( state: BeaconState, - execution_layer_consolidation_request: ExecutionLayerConsolidationRequest + consolidation_request: ConsolidationRequest ) -> None: # If the pending consolidations queue is full, consolidation requests are ignored if len(state.pending_consolidations) == PENDING_CONSOLIDATIONS_LIMIT: @@ -1342,8 +1342,8 @@ def process_execution_layer_consolidation_request( validator_pubkeys = [v.pubkey for v in state.validators] # Verify pubkeys exists - request_source_pubkey = execution_layer_consolidation_request.source_pubkey - request_target_pubkey = execution_layer_consolidation_request.target_pubkey + request_source_pubkey = consolidation_request.source_pubkey + request_target_pubkey = consolidation_request.target_pubkey if request_source_pubkey not in validator_pubkeys: return if request_target_pubkey not in validator_pubkeys: @@ -1360,7 +1360,7 @@ def process_execution_layer_consolidation_request( # Verify source withdrawal credentials has_correct_credential = has_execution_withdrawal_credential(source_validator) is_correct_source_address = ( - source_validator.withdrawal_credentials[12:] == execution_layer_consolidation_request.source_address + source_validator.withdrawal_credentials[12:] == consolidation_request.source_address ) if not (has_correct_credential and is_correct_source_address): return diff --git a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation_request.py b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation_request.py index 5e3f7877cf..b12438d0d1 100644 --- a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation_request.py +++ b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation_request.py @@ -39,7 +39,7 @@ def test_basic_consolidation_in_current_consolidation_epoch(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -88,7 +88,7 @@ def test_basic_consolidation_in_new_consolidation_epoch(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -131,7 +131,7 @@ def test_basic_consolidation_with_preexisting_churn(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -178,7 +178,7 @@ def test_basic_consolidation_with_insufficient_preexisting_churn(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -229,7 +229,7 @@ def test_basic_consolidation_with_compounding_credentials(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -274,7 +274,7 @@ def test_consolidation_churn_limit_balance(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -322,7 +322,7 @@ def test_consolidation_balance_larger_than_churn_limit(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -369,7 +369,7 @@ def test_consolidation_balance_through_two_churn_epochs(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with source address - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -415,7 +415,7 @@ def test_incorrect_source_equals_target(spec, state): spec, state, source_index, address=source_address ) # Make consolidation from source to source - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[source_index].pubkey, @@ -447,7 +447,7 @@ def test_incorrect_exceed_pending_consolidations_limit(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -476,7 +476,7 @@ def test_incorrect_not_enough_consolidation_churn_available(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -504,7 +504,7 @@ def test_incorrect_exited_source(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -536,7 +536,7 @@ def test_incorrect_exited_target(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -566,7 +566,7 @@ def test_incorrect_inactive_source(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -598,7 +598,7 @@ def test_incorrect_inactive_target(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -627,7 +627,7 @@ def test_incorrect_no_source_execution_withdrawal_credential(spec, state): source_index = spec.get_active_validator_indices(state, current_epoch)[0] target_index = spec.get_active_validator_indices(state, current_epoch)[1] source_address = b"\x22" * 20 - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -656,7 +656,7 @@ def test_incorrect_no_target_execution_withdrawal_credential(spec, state): set_eth1_withdrawal_credential_with_balance( spec, state, source_index, address=source_address ) - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ConsolidationRequest( source_address=source_address, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -684,7 +684,7 @@ def test_incorrect_incorrect_source_address(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with different source address - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ConsolidationRequest( source_address=b"\x33" * 20, source_pubkey=state.validators[source_index].pubkey, target_pubkey=state.validators[target_index].pubkey, @@ -714,7 +714,7 @@ def test_incorrect_unknown_source_pubkey(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with different source pubkey - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ConsolidationRequest( source_address=source_address, source_pubkey=b"\x00" * 48, target_pubkey=state.validators[target_index].pubkey, @@ -744,7 +744,7 @@ def test_incorrect_unknown_target_pubkey(spec, state): spec, state, source_index, address=source_address ) # Make consolidation with different target pubkey - consolidation = spec.ExecutionLayerConsolidationRequest( + consolidation = spec.ConsolidationRequest( source_address=b"\x33" * 20, source_pubkey=state.validators[source_index].pubkey, target_pubkey=b"\x00" * 48, @@ -760,7 +760,7 @@ def run_consolidation_processing(spec, state, consolidation, success=True): """ Run ``process_consolidation``, yielding: - pre-state ('pre') - - execution_layer_consolidation_request ('execution_layer_consolidation_request') + - consolidation_request ('consolidation_request') - post-state ('post'). If ``valid == False``, run expecting ``AssertionError`` """ @@ -778,9 +778,9 @@ def run_consolidation_processing(spec, state, consolidation, success=True): pre_state = state.copy() yield 'pre', state - yield 'execution_layer_consolidation_request', consolidation + yield 'consolidation_request', consolidation - spec.process_execution_layer_consolidation_request(state, consolidation) + spec.process_consolidation_request(state, consolidation) yield 'post', state diff --git a/tests/formats/operations/README.md b/tests/formats/operations/README.md index 95900f5111..7c3281e2c6 100644 --- a/tests/formats/operations/README.md +++ b/tests/formats/operations/README.md @@ -47,7 +47,7 @@ Operations: | `bls_to_execution_change` | `SignedBLSToExecutionChange` | `address_change` | `process_bls_to_execution_change(state, address_change)` (new in Capella) | | `deposit_request` | `DepositRequest` | `deposit_request` | `process_deposit_request(state, deposit_request)` (new in Electra) | | `withdrawal_request` | `WithdrawalRequest` | `withdrawal_request` | `process_withdrawal_request(state, withdrawal_request)` (new in Electra) | -| `execution_layer_consolidation_request` | `ExecutionLayerConsolidationRequest` | `execution_layer_consolidation_request` | `process_execution_layer_consolidation_request(state, execution_layer_consolidation_request)` (new in Electra) | +| `consolidation_request` | `ConsolidationRequest` | `consolidation_request` | `process_consolidation_request(state, consolidation_request)` (new in Electra) | Note that `block_header` is not strictly an operation (and is a full `Block`), but processed in the same manner, and hence included here. diff --git a/tests/generators/operations/main.py b/tests/generators/operations/main.py index 4058469ae8..06b65b8b3b 100644 --- a/tests/generators/operations/main.py +++ b/tests/generators/operations/main.py @@ -45,7 +45,7 @@ _new_electra_mods = {key: 'eth2spec.test.electra.block_processing.test_process_' + key for key in [ 'attestation', - 'execution_layer_consolidation_request', + 'consolidation_request', 'deposit_request', 'withdrawal_request', 'voluntary_exit' From 5dec87f90f0d09c0dcadd03c4bb697947cec697e Mon Sep 17 00:00:00 2001 From: Preston Van Loon Date: Wed, 5 Jun 2024 15:12:08 -0500 Subject: [PATCH 68/89] Update TOC --- specs/electra/beacon-chain.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index a5a6a5072c..d9e9d1f27b 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -27,8 +27,8 @@ - [`DepositRequest`](#depositrequest) - [`PendingBalanceDeposit`](#pendingbalancedeposit) - [`PendingPartialWithdrawal`](#pendingpartialwithdrawal) - - [`WithdrawalRequest`](#executionlayerwithdrawalrequest) - - [`ConsolidationRequest`](#executionlayerconsolidationrequest) + - [`WithdrawalRequest`](#withdrawalrequest) + - [`ConsolidationRequest`](#consolidationrequest) - [`PendingConsolidation`](#pendingconsolidation) - [Modified Containers](#modified-containers) - [`AttesterSlashing`](#attesterslashing) From ac105f472a6383ff1cdb4eaa308c654606721094 Mon Sep 17 00:00:00 2001 From: b-wagn Date: Thu, 6 Jun 2024 16:36:03 +0200 Subject: [PATCH 69/89] update polynomial-commitments-sampling.md -- fix shift_polynomialcoeff --- specs/_features/eip7594/polynomial-commitments-sampling.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 3a0bd8a77f..b5f7995c58 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -263,14 +263,14 @@ def divide_polynomialcoeff(a: PolynomialCoeff, b: PolynomialCoeff) -> Polynomial def shift_polynomialcoeff(polynomial_coeff: PolynomialCoeff, factor: BLSFieldElement) -> PolynomialCoeff: """ Shift the evaluation of a polynomial in coefficient form by factor. - This results in a new polynomial g(x) = f(factor * x) + This returns a new polynomial g in coefficient form such that g(x) = f(factor * x). + In other words, each coefficient of f is scaled by a power of factor. """ factor_power = 1 - inv_factor = pow(int(factor), BLS_MODULUS - 2, BLS_MODULUS) o = [] for p in polynomial_coeff: o.append(int(p) * factor_power % BLS_MODULUS) - factor_power = factor_power * inv_factor % BLS_MODULUS + factor_power = factor_power * int(factor) % BLS_MODULUS return o ``` From 99dfc9ab5b0695ccce441208b4d17469769d9f9a Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Fri, 7 Jun 2024 21:53:01 +0800 Subject: [PATCH 70/89] Update file names --- ... => test_process_consolidation_request.py} | 0 ....py => test_process_withdrawal_request.py} | 0 .../execution_layer_withdrawal_request.py | 39 ------------------- tests/generators/operations/main.py | 2 +- 4 files changed, 1 insertion(+), 40 deletions(-) rename tests/core/pyspec/eth2spec/test/electra/block_processing/{test_process_execution_layer_consolidation_request.py => test_process_consolidation_request.py} (100%) rename tests/core/pyspec/eth2spec/test/electra/block_processing/{test_process_execution_layer_withdrawal_request.py => test_process_withdrawal_request.py} (100%) delete mode 100644 tests/core/pyspec/eth2spec/test/helpers/execution_layer_withdrawal_request.py diff --git a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation_request.py b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_consolidation_request.py similarity index 100% rename from tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_consolidation_request.py rename to tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_consolidation_request.py diff --git a/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_withdrawal_request.py b/tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_withdrawal_request.py similarity index 100% rename from tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_execution_layer_withdrawal_request.py rename to tests/core/pyspec/eth2spec/test/electra/block_processing/test_process_withdrawal_request.py diff --git a/tests/core/pyspec/eth2spec/test/helpers/execution_layer_withdrawal_request.py b/tests/core/pyspec/eth2spec/test/helpers/execution_layer_withdrawal_request.py deleted file mode 100644 index 6741db1025..0000000000 --- a/tests/core/pyspec/eth2spec/test/helpers/execution_layer_withdrawal_request.py +++ /dev/null @@ -1,39 +0,0 @@ -from eth2spec.test.context import expect_assertion_error -from eth2spec.test.helpers.state import get_validator_index_by_pubkey - - -# -# Run processing -# - - -def run_withdrawal_request_processing(spec, state, withdrawal_request, valid=True, success=True): - """ - Run ``process_withdrawal_request``, yielding: - - pre-state ('pre') - - withdrawal_request ('withdrawal_request') - - post-state ('post'). - If ``valid == False``, run expecting ``AssertionError`` - If ``success == False``, it doesn't initiate exit successfully - """ - validator_index = get_validator_index_by_pubkey(state, withdrawal_request.validator_pubkey) - - yield 'pre', state - yield 'withdrawal_request', withdrawal_request - - if not valid: - expect_assertion_error(lambda: spec.process_withdrawal_request(state, withdrawal_request)) - yield 'post', None - return - - pre_exit_epoch = state.validators[validator_index].exit_epoch - - spec.process_withdrawal_request(state, withdrawal_request) - - yield 'post', state - - if success: - assert pre_exit_epoch == spec.FAR_FUTURE_EPOCH - assert state.validators[validator_index].exit_epoch < spec.FAR_FUTURE_EPOCH - else: - assert state.validators[validator_index].exit_epoch == pre_exit_epoch diff --git a/tests/generators/operations/main.py b/tests/generators/operations/main.py index 06b65b8b3b..204f5023a4 100644 --- a/tests/generators/operations/main.py +++ b/tests/generators/operations/main.py @@ -47,8 +47,8 @@ 'attestation', 'consolidation_request', 'deposit_request', - 'withdrawal_request', 'voluntary_exit' + 'withdrawal_request', ]} electra_mods = combine_mods(_new_electra_mods, deneb_mods) From 0298370af2e0876abe563691c59bb9924bbb16f2 Mon Sep 17 00:00:00 2001 From: Justin Traglia Date: Mon, 10 Jun 2024 11:28:18 -0500 Subject: [PATCH 71/89] Fix rlp_bytes methods --- .../test/helpers/execution_payload.py | 40 +++++++++---------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py b/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py index 5047f0d2ea..73dfe99fd4 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py +++ b/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py @@ -137,6 +137,25 @@ def get_withdrawal_rlp(withdrawal): return encode(values, sedes) +def get_deposit_request_rlp_bytes(deposit_request): + deposit_request_rlp = [ + # pubkey + (Binary(48, 48), deposit_request.pubkey), + # withdrawal_credentials + (Binary(32, 32), deposit_request.withdrawal_credentials), + # amount + (big_endian_int, deposit_request.amount), + # pubkey + (Binary(96, 96), deposit_request.signature), + # index + (big_endian_int, deposit_request.index), + ] + + sedes = List([schema for schema, _ in deposit_request_rlp]) + values = [value for _, value in deposit_request_rlp] + return b"\x00" + encode(values, sedes) + + # https://eips.ethereum.org/EIPS/eip-7002 def get_withdrawal_request_rlp_bytes(withdrawal_request): withdrawal_request_rlp = [ @@ -164,26 +183,7 @@ def get_consolidation_request_rlp_bytes(consolidation_request): sedes = List([schema for schema, _ in consolidation_request_rlp]) values = [value for _, value in consolidation_request_rlp] - return b"\x01" + encode(values, sedes) - - -def get_deposit_request_rlp_bytes(spec, deposit_request): - deposit_request_rlp = [ - # pubkey - (Binary(48, 48), deposit_request.pubkey), - # withdrawal_credentials - (Binary(32, 32), deposit_request.withdrawal_credentials), - # amount - (big_endian_int, deposit_request.amount), - # pubkey - (Binary(96, 96), deposit_request.signature), - # index - (big_endian_int, deposit_request.index), - ] - - sedes = List([schema for schema, _ in deposit_request_rlp]) - values = [value for _, value in deposit_request_rlp] - return encode(values, sedes) + return b"\x02" + encode(values, sedes) def compute_el_block_hash(spec, payload): From 83cb494e31829ea42523fda7e025e9b954c30570 Mon Sep 17 00:00:00 2001 From: Justin Traglia Date: Mon, 10 Jun 2024 11:28:54 -0500 Subject: [PATCH 72/89] Rename receipt to request --- tests/core/pyspec/eth2spec/test/helpers/execution_payload.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py b/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py index 73dfe99fd4..dbfd29e671 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py +++ b/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py @@ -197,7 +197,7 @@ def compute_el_block_hash(spec, payload): withdrawals_trie_root = compute_trie_root_from_indexed_data(withdrawals_encoded) if is_post_electra(spec): requests_encoded = [] - requests_encoded += [get_deposit_request_rlp_bytes(receipt) for receipt in payload.deposit_requests] + requests_encoded += [get_deposit_request_rlp_bytes(request) for request in payload.deposit_requests] requests_encoded += [get_withdrawal_request_rlp_bytes(request) for request in payload.withdrawal_requests] requests_encoded += [get_consolidation_request_rlp_bytes(request) for request in payload.consolidation_requests] From 5ace424cd83a4fab8b4a5963d4d0ce34e888739a Mon Sep 17 00:00:00 2001 From: Justin Traglia <95511699+jtraglia@users.noreply.github.com> Date: Tue, 11 Jun 2024 06:52:24 -0500 Subject: [PATCH 73/89] `recover_cells_and_kzg_proofs` & matrix refactor (#3788) * Recover cells and proofs & matrix clean up * Fix table of contents * Update reference tests generator * Update test format * Remove unused imports * Fix some minor nits * Rename MatrixEntry's proof to kzg_proof * Move RowIndex & ColumnIndex to das-core --- specs/_features/eip7594/das-core.md | 78 +++++--- .../polynomial-commitments-sampling.md | 49 +++-- .../test/eip7594/unittests/das/test_das.py | 51 +++-- .../test_polynomial_commitments.py | 14 +- tests/formats/kzg_7594/recover_all_cells.md | 23 --- .../kzg_7594/recover_cells_and_kzg_proofs.md | 24 +++ tests/generators/kzg_7594/main.py | 180 +++++++++++------- 7 files changed, 255 insertions(+), 164 deletions(-) delete mode 100644 tests/formats/kzg_7594/recover_all_cells.md create mode 100644 tests/formats/kzg_7594/recover_cells_and_kzg_proofs.md diff --git a/specs/_features/eip7594/das-core.md b/specs/_features/eip7594/das-core.md index dc50365b1e..67719ad0f3 100644 --- a/specs/_features/eip7594/das-core.md +++ b/specs/_features/eip7594/das-core.md @@ -17,6 +17,7 @@ - [Custody setting](#custody-setting) - [Containers](#containers) - [`DataColumnSidecar`](#datacolumnsidecar) + - [`MatrixEntry`](#matrixentry) - [Helper functions](#helper-functions) - [`get_custody_columns`](#get_custody_columns) - [`compute_extended_matrix`](#compute_extended_matrix) @@ -53,12 +54,10 @@ The following values are (non-configurable) constants used throughout the specif ## Custom types -We define the following Python custom types for type hinting and readability: - | Name | SSZ equivalent | Description | | - | - | - | -| `DataColumn` | `List[Cell, MAX_BLOB_COMMITMENTS_PER_BLOCK]` | The data of each column in EIP-7594 | -| `ExtendedMatrix` | `List[Cell, MAX_CELLS_IN_EXTENDED_MATRIX]` | The full data of one-dimensional erasure coding extended blobs (in row major format). | +| `RowIndex` | `uint64` | Row identifier in the matrix of cells | +| `ColumnIndex` | `uint64` | Column identifier in the matrix of cells | ## Configuration @@ -79,7 +78,7 @@ We define the following Python custom types for type hinting and readability: | Name | Value | Description | | - | - | - | -| `SAMPLES_PER_SLOT` | `8` | Number of `DataColumn` random samples a node queries per slot | +| `SAMPLES_PER_SLOT` | `8` | Number of `DataColumnSidecar` random samples a node queries per slot | | `CUSTODY_REQUIREMENT` | `1` | Minimum number of subnets an honest node custodies and serves samples from | | `TARGET_NUMBER_OF_PEERS` | `70` | Suggested minimum peer count | @@ -90,13 +89,23 @@ We define the following Python custom types for type hinting and readability: ```python class DataColumnSidecar(Container): index: ColumnIndex # Index of column in extended matrix - column: DataColumn + column: List[Cell, MAX_BLOB_COMMITMENTS_PER_BLOCK] kzg_commitments: List[KZGCommitment, MAX_BLOB_COMMITMENTS_PER_BLOCK] kzg_proofs: List[KZGProof, MAX_BLOB_COMMITMENTS_PER_BLOCK] signed_block_header: SignedBeaconBlockHeader kzg_commitments_inclusion_proof: Vector[Bytes32, KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH] ``` +#### `MatrixEntry` + +```python +class MatrixEntry(Container): + cell: Cell + kzg_proof: KZGProof + column_index: ColumnIndex + row_index: RowIndex +``` + ### Helper functions #### `get_custody_columns` @@ -132,7 +141,7 @@ def get_custody_columns(node_id: NodeID, custody_subnet_count: uint64) -> Sequen #### `compute_extended_matrix` ```python -def compute_extended_matrix(blobs: Sequence[Blob]) -> ExtendedMatrix: +def compute_extended_matrix(blobs: Sequence[Blob]) -> List[MatrixEntry, MAX_CELLS_IN_EXTENDED_MATRIX]: """ Return the full ``ExtendedMatrix``. @@ -140,29 +149,44 @@ def compute_extended_matrix(blobs: Sequence[Blob]) -> ExtendedMatrix: The data structure for storing cells is implementation-dependent. """ extended_matrix = [] - for blob in blobs: - extended_matrix.extend(compute_cells(blob)) - return ExtendedMatrix(extended_matrix) + for blob_index, blob in enumerate(blobs): + cells, proofs = compute_cells_and_kzg_proofs(blob) + for cell_id, (cell, proof) in enumerate(zip(cells, proofs)): + extended_matrix.append(MatrixEntry( + cell=cell, + kzg_proof=proof, + row_index=blob_index, + column_index=cell_id, + )) + return extended_matrix ``` #### `recover_matrix` ```python -def recover_matrix(cells_dict: Dict[Tuple[BlobIndex, CellID], Cell], blob_count: uint64) -> ExtendedMatrix: +def recover_matrix(partial_matrix: Sequence[MatrixEntry], + blob_count: uint64) -> List[MatrixEntry, MAX_CELLS_IN_EXTENDED_MATRIX]: """ - Return the recovered ``ExtendedMatrix``. + Return the recovered extended matrix. - This helper demonstrates how to apply ``recover_all_cells``. + This helper demonstrates how to apply ``recover_cells_and_kzg_proofs``. The data structure for storing cells is implementation-dependent. """ - extended_matrix: List[Cell] = [] + extended_matrix = [] for blob_index in range(blob_count): - cell_ids = [cell_id for b_index, cell_id in cells_dict.keys() if b_index == blob_index] - cells = [cells_dict[(BlobIndex(blob_index), cell_id)] for cell_id in cell_ids] - - all_cells_for_row = recover_all_cells(cell_ids, cells) - extended_matrix.extend(all_cells_for_row) - return ExtendedMatrix(extended_matrix) + cell_ids = [e.column_index for e in partial_matrix if e.row_index == blob_index] + cells = [e.cell for e in partial_matrix if e.row_index == blob_index] + proofs = [e.kzg_proof for e in partial_matrix if e.row_index == blob_index] + + recovered_cells, recovered_proofs = recover_cells_and_kzg_proofs(cell_ids, cells, proofs) + for cell_id, (cell, proof) in enumerate(zip(recovered_cells, recovered_proofs)): + extended_matrix.append(MatrixEntry( + cell=cell, + kzg_proof=proof, + row_index=blob_index, + column_index=cell_id, + )) + return extended_matrix ``` #### `get_data_column_sidecars` @@ -182,15 +206,15 @@ def get_data_column_sidecars(signed_block: SignedBeaconBlock, proofs = [cells_and_proofs[i][1] for i in range(blob_count)] sidecars = [] for column_index in range(NUMBER_OF_COLUMNS): - column = DataColumn([cells[row_index][column_index] - for row_index in range(blob_count)]) - kzg_proof_of_column = [proofs[row_index][column_index] - for row_index in range(blob_count)] + column_cells = [cells[row_index][column_index] + for row_index in range(blob_count)] + column_proofs = [proofs[row_index][column_index] + for row_index in range(blob_count)] sidecars.append(DataColumnSidecar( index=column_index, - column=column, + column=column_cells, kzg_commitments=block.body.blob_kzg_commitments, - kzg_proofs=kzg_proof_of_column, + kzg_proofs=column_proofs, signed_block_header=signed_block_header, kzg_commitments_inclusion_proof=kzg_commitments_inclusion_proof, )) @@ -283,7 +307,7 @@ Such trailing techniques and their analysis will be valuable for any DAS constru ### Row (blob) custody -In the one-dimension construction, a node samples the peers by requesting the whole `DataColumn`. In reconstruction, a node can reconstruct all the blobs by 50% of the columns. Note that nodes can still download the row via `blob_sidecar_{subnet_id}` subnets. +In the one-dimension construction, a node samples the peers by requesting the whole `DataColumnSidecar`. In reconstruction, a node can reconstruct all the blobs by 50% of the columns. Note that nodes can still download the row via `blob_sidecar_{subnet_id}` subnets. The potential benefits of having row custody could include: diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index b5f7995c58..acf4ffc627 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -1,4 +1,4 @@ -# EIP-7594 -- Polynomial Commitments +# EIP-7594 -- Polynomial Commitments Sampling ## Table of contents @@ -46,7 +46,7 @@ - [`construct_vanishing_polynomial`](#construct_vanishing_polynomial) - [`recover_shifted_data`](#recover_shifted_data) - [`recover_original_data`](#recover_original_data) - - [`recover_all_cells`](#recover_all_cells) + - [`recover_cells_and_kzg_proofs`](#recover_cells_and_kzg_proofs) @@ -67,9 +67,7 @@ Public functions MUST accept raw bytes as input and perform the required cryptog | `Coset` | `Vector[BLSFieldElement, FIELD_ELEMENTS_PER_CELL]` | The evaluation domain of a cell | | `CosetEvals` | `Vector[BLSFieldElement, FIELD_ELEMENTS_PER_CELL]` | The internal representation of a cell (the evaluations over its Coset) | | `Cell` | `ByteVector[BYTES_PER_FIELD_ELEMENT * FIELD_ELEMENTS_PER_CELL]` | The unit of blob data that can come with its own KZG proof | -| `CellID` | `uint64` | Cell identifier | -| `RowIndex` | `uint64` | Row identifier | -| `ColumnIndex` | `uint64` | Column identifier | +| `CellID` | `uint64` | Validation: `x < CELLS_PER_EXT_BLOB` | ## Constants @@ -660,14 +658,18 @@ def recover_original_data(eval_shifted_extended_evaluation: Sequence[BLSFieldEle return reconstructed_data ``` -### `recover_all_cells` +### `recover_cells_and_kzg_proofs` ```python -def recover_all_cells(cell_ids: Sequence[CellID], cells: Sequence[Cell]) -> Sequence[Cell]: +def recover_cells_and_kzg_proofs(cell_ids: Sequence[CellID], + cells: Sequence[Cell], + proofs_bytes: Sequence[Bytes48]) -> Tuple[ + Vector[Cell, CELLS_PER_EXT_BLOB], + Vector[KZGProof, CELLS_PER_EXT_BLOB]]: """ - Recover all of the cells in the extended blob from FIELD_ELEMENTS_PER_EXT_BLOB evaluations, - half of which can be missing. - This algorithm uses FFTs to recover cells faster than using Lagrange implementation, as can be seen here: + Given at least 50% of cells/proofs for a blob, recover all the cells/proofs. + This algorithm uses FFTs to recover cells faster than using Lagrange + implementation, as can be seen here: https://ethresear.ch/t/reed-solomon-erasure-code-recovery-in-n-log-2-n-time-with-ffts/3039 A faster version thanks to Qi Zhou can be found here: @@ -675,17 +677,20 @@ def recover_all_cells(cell_ids: Sequence[CellID], cells: Sequence[Cell]) -> Sequ Public method. """ - assert len(cell_ids) == len(cells) + assert len(cell_ids) == len(cells) == len(proofs_bytes) # Check we have enough cells to be able to perform the reconstruction assert CELLS_PER_EXT_BLOB / 2 <= len(cell_ids) <= CELLS_PER_EXT_BLOB # Check for duplicates assert len(cell_ids) == len(set(cell_ids)) - # Check that each cell is the correct length - for cell in cells: - assert len(cell) == BYTES_PER_CELL # Check that the cell ids are within bounds for cell_id in cell_ids: assert cell_id < CELLS_PER_EXT_BLOB + # Check that each cell is the correct length + for cell in cells: + assert len(cell) == BYTES_PER_CELL + # Check that each proof is the correct length + for proof_bytes in proofs_bytes: + assert len(proof_bytes) == BYTES_PER_PROOF # Get the extended domain roots_of_unity_extended = compute_roots_of_unity(FIELD_ELEMENTS_PER_EXT_BLOB) @@ -716,9 +721,21 @@ def recover_all_cells(cell_ids: Sequence[CellID], cells: Sequence[Cell]) -> Sequ end = (cell_id + 1) * FIELD_ELEMENTS_PER_CELL assert reconstructed_data[start:end] == coset_evals - reconstructed_data_as_cells = [ + recovered_cells = [ coset_evals_to_cell(reconstructed_data[i * FIELD_ELEMENTS_PER_CELL:(i + 1) * FIELD_ELEMENTS_PER_CELL]) for i in range(CELLS_PER_EXT_BLOB)] + + polynomial_eval = reconstructed_data[:FIELD_ELEMENTS_PER_BLOB] + polynomial_coeff = polynomial_eval_to_coeff(polynomial_eval) + recovered_proofs = [None] * CELLS_PER_EXT_BLOB + for i, cell_id in enumerate(cell_ids): + recovered_proofs[cell_id] = bytes_to_kzg_proof(proofs_bytes[i]) + for i in range(CELLS_PER_EXT_BLOB): + if recovered_proofs[i] is None: + coset = coset_for_cell(CellID(i)) + proof, ys = compute_kzg_proof_multi_impl(polynomial_coeff, coset) + assert coset_evals_to_cell(ys) == recovered_cells[i] + recovered_proofs[i] = proof - return reconstructed_data_as_cells + return recovered_cells, recovered_proofs ``` diff --git a/tests/core/pyspec/eth2spec/test/eip7594/unittests/das/test_das.py b/tests/core/pyspec/eth2spec/test/eip7594/unittests/das/test_das.py index cdbfad9ffe..b2e0a44b82 100644 --- a/tests/core/pyspec/eth2spec/test/eip7594/unittests/das/test_das.py +++ b/tests/core/pyspec/eth2spec/test/eip7594/unittests/das/test_das.py @@ -9,6 +9,11 @@ ) +def chunks(lst, n): + """Helper that splits a list into N sized chunks.""" + return [lst[i:i + n] for i in range(0, len(lst), n)] + + @with_eip7594_and_later @spec_test @single_phase @@ -20,15 +25,15 @@ def test_compute_extended_matrix(spec): extended_matrix = spec.compute_extended_matrix(input_blobs) assert len(extended_matrix) == spec.CELLS_PER_EXT_BLOB * blob_count - rows = [extended_matrix[i:(i + spec.CELLS_PER_EXT_BLOB)] - for i in range(0, len(extended_matrix), spec.CELLS_PER_EXT_BLOB)] + rows = chunks(extended_matrix, spec.CELLS_PER_EXT_BLOB) assert len(rows) == blob_count - assert len(rows[0]) == spec.CELLS_PER_EXT_BLOB + for row in rows: + assert len(row) == spec.CELLS_PER_EXT_BLOB for blob_index, row in enumerate(rows): extended_blob = [] - for cell in row: - extended_blob.extend(spec.cell_to_coset_evals(cell)) + for entry in row: + extended_blob.extend(spec.cell_to_coset_evals(entry.cell)) blob_part = extended_blob[0:len(extended_blob) // 2] blob = b''.join([spec.bls_field_to_bytes(x) for x in blob_part]) assert blob == input_blobs[blob_index] @@ -43,27 +48,19 @@ def test_recover_matrix(spec): # Number of samples we will be recovering from N_SAMPLES = spec.CELLS_PER_EXT_BLOB // 2 + # Compute an extended matrix with two blobs blob_count = 2 - cells_dict = {} - original_cells = [] - for blob_index in range(blob_count): - # Get the data we will be working with - blob = get_sample_blob(spec, rng=rng) - # Extend data with Reed-Solomon and split the extended data in cells - cells = spec.compute_cells(blob) - original_cells.append(cells) - cell_ids = [] - # First figure out just the indices of the cells - for _ in range(N_SAMPLES): - cell_id = rng.randint(0, spec.CELLS_PER_EXT_BLOB - 1) - while cell_id in cell_ids: - cell_id = rng.randint(0, spec.CELLS_PER_EXT_BLOB - 1) - cell_ids.append(cell_id) - cell = cells[cell_id] - cells_dict[(blob_index, cell_id)] = cell - assert len(cell_ids) == N_SAMPLES + blobs = [get_sample_blob(spec, rng=rng) for _ in range(blob_count)] + extended_matrix = spec.compute_extended_matrix(blobs) + + # Construct a matrix with some entries missing + partial_matrix = [] + for blob_entries in chunks(extended_matrix, spec.CELLS_PER_EXT_BLOB): + rng.shuffle(blob_entries) + partial_matrix.extend(blob_entries[:N_SAMPLES]) + + # Given the partial matrix, recover the missing entries + recovered_matrix = spec.recover_matrix(partial_matrix, blob_count) - # Recover the matrix - recovered_matrix = spec.recover_matrix(cells_dict, blob_count) - flatten_original_cells = [cell for cells in original_cells for cell in cells] - assert recovered_matrix == flatten_original_cells + # Ensure that the recovered matrix matches the original matrix + assert recovered_matrix == extended_matrix diff --git a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py index c247e0532f..215f8f3e6f 100644 --- a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py +++ b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py @@ -64,7 +64,7 @@ def test_verify_cell_kzg_proof_batch(spec): @with_eip7594_and_later @spec_test @single_phase -def test_recover_all_cells(spec): +def test_recover_cells_and_kzg_proofs(spec): rng = random.Random(5566) # Number of samples we will be recovering from @@ -74,7 +74,7 @@ def test_recover_all_cells(spec): blob = get_sample_blob(spec) # Extend data with Reed-Solomon and split the extended data in cells - cells = spec.compute_cells(blob) + cells, proofs = spec.compute_cells_and_kzg_proofs(blob) # Compute the cells we will be recovering from cell_ids = [] @@ -84,19 +84,21 @@ def test_recover_all_cells(spec): while j in cell_ids: j = rng.randint(0, spec.CELLS_PER_EXT_BLOB - 1) cell_ids.append(j) - # Now the cells themselves + # Now the cells/proofs themselves known_cells = [cells[cell_id] for cell_id in cell_ids] + known_proofs = [proofs[cell_id] for cell_id in cell_ids] - # Recover all of the cells - recovered_cells = spec.recover_all_cells(cell_ids, known_cells) + # Recover the missing cells and proofs + recovered_cells, recovered_proofs = spec.recover_cells_and_kzg_proofs(cell_ids, known_cells, known_proofs) recovered_data = [x for xs in recovered_cells for x in xs] # Check that the original data match the non-extended portion of the recovered data blob_byte_array = [b for b in blob] assert blob_byte_array == recovered_data[:len(recovered_data) // 2] - # Check that the recovered cells match the original cells + # Check that the recovered cells/proofs match the original cells/proofs assert cells == recovered_cells + assert proofs == recovered_proofs @with_eip7594_and_later diff --git a/tests/formats/kzg_7594/recover_all_cells.md b/tests/formats/kzg_7594/recover_all_cells.md deleted file mode 100644 index 082769627e..0000000000 --- a/tests/formats/kzg_7594/recover_all_cells.md +++ /dev/null @@ -1,23 +0,0 @@ -# Test format: Recover all cells - -Recover all cells given at least 50% of the original `cells`. - -## Test case format - -The test data is declared in a `data.yaml` file: - -```yaml -input: - cell_ids: List[CellID] -- the cell identifier for each cell - cells: List[Cell] -- the partial collection of cells -output: List[Cell] -- all cells, including recovered cells -``` - -- `CellID` is an unsigned 64-bit integer. -- `Cell` is a 2048-byte hexadecimal string, prefixed with `0x`. - -All byte(s) fields are encoded as strings, hexadecimal encoding, prefixed with `0x`. - -## Condition - -The `recover_all_cells` handler should recover missing cells, and the result should match the expected `output`. If any cell is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element) or any `cell_id` is invalid (e.g. greater than the number of cells for an extended blob), it should error, i.e. the output should be `null`. diff --git a/tests/formats/kzg_7594/recover_cells_and_kzg_proofs.md b/tests/formats/kzg_7594/recover_cells_and_kzg_proofs.md new file mode 100644 index 0000000000..8967664128 --- /dev/null +++ b/tests/formats/kzg_7594/recover_cells_and_kzg_proofs.md @@ -0,0 +1,24 @@ +# Test format: Recover cells and KZG proofs + +Recover all cells/proofs given at least 50% of the original `cells` and `proofs`. + +## Test case format + +The test data is declared in a `data.yaml` file: + +```yaml +input: + cell_ids: List[CellID] -- the cell identifier for each cell + cells: List[Cell] -- the partial collection of cells +output: Tuple[List[Cell], List[KZGProof]] -- all cells and proofs +``` + +- `CellID` is an unsigned 64-bit integer. +- `Cell` is a 2048-byte hexadecimal string, prefixed with `0x`. +- `KZGProof` is a 48-byte hexadecimal string, prefixed with `0x`. + +All byte(s) fields are encoded as strings, hexadecimal encoding, prefixed with `0x`. + +## Condition + +The `recover_cells_and_kzg_proofs` handler should recover missing cells and proofs, and the result should match the expected `output`. If any cell is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element), any proof is invalid (e.g. not on the curve or not in the G1 subgroup of the BLS curve), or any `cell_id` is invalid (e.g. greater than the number of cells for an extended blob), it should error, i.e. the output should be `null`. diff --git a/tests/generators/kzg_7594/main.py b/tests/generators/kzg_7594/main.py index ef412805a4..fd9d28727c 100644 --- a/tests/generators/kzg_7594/main.py +++ b/tests/generators/kzg_7594/main.py @@ -11,11 +11,9 @@ from eth2spec.test.helpers.constants import EIP7594 from eth2spec.test.helpers.typing import SpecForkName from eth2spec.test.utils.kzg_tests import ( - BLOB_RANDOM_VALID1, - BLOB_RANDOM_VALID2, - BLOB_RANDOM_VALID3, CELL_RANDOM_VALID1, CELL_RANDOM_VALID2, + G1, INVALID_BLOBS, INVALID_G1_POINTS, INVALID_INDIVIDUAL_CELL_BYTES, @@ -616,187 +614,237 @@ def case04_verify_cell_kzg_proof_batch(): ############################################################################### -# Test cases for recover_all_cells +# Test cases for recover_cells_and_kzg_proofs ############################################################################### -def case05_recover_all_cells(): +def case05_recover_cells_and_kzg_proofs(): # Valid: No missing cells - blob = BLOB_RANDOM_VALID1 - cells = spec.compute_cells(blob) + cells, proofs = VALID_CELLS_AND_PROOFS[0] cell_ids = list(range(spec.CELLS_PER_EXT_BLOB)) - recovered_cells = spec.recover_all_cells(cell_ids, cells) + recovered_cells, recovered_proofs = spec.recover_cells_and_kzg_proofs(cell_ids, cells, proofs) assert recovered_cells == cells - identifier = make_id(cell_ids, cells) - yield f'recover_all_cells_case_valid_no_missing_{identifier}', { + assert recovered_proofs == proofs + identifier = make_id(cell_ids, cells, proofs) + yield f'recover_cells_and_kzg_proofs_case_valid_no_missing_{identifier}', { 'input': { 'cell_ids': cell_ids, 'cells': encode_hex_list(cells), + 'proofs': encode_hex_list(proofs), }, - 'output': encode_hex_list(recovered_cells) + 'output': (encode_hex_list(recovered_cells), encode_hex_list(recovered_proofs)) } # Valid: Half missing cells (every other cell) - blob = BLOB_RANDOM_VALID2 - cells = spec.compute_cells(blob) + cells, proofs = VALID_CELLS_AND_PROOFS[1] cell_ids = list(range(0, spec.CELLS_PER_EXT_BLOB, 2)) partial_cells = [cells[cell_id] for cell_id in cell_ids] - recovered_cells = spec.recover_all_cells(cell_ids, partial_cells) + partial_proofs = [proofs[cell_id] for cell_id in cell_ids] + recovered_cells, recovered_proofs = spec.recover_cells_and_kzg_proofs(cell_ids, partial_cells, partial_proofs) assert recovered_cells == cells - identifier = make_id(cell_ids, partial_cells) - yield f'recover_all_cells_case_valid_half_missing_every_other_cell_{identifier}', { + assert recovered_proofs == proofs + identifier = make_id(cell_ids, partial_cells, partial_proofs) + yield f'recover_cells_and_kzg_proofs_case_valid_half_missing_every_other_cell_{identifier}', { 'input': { 'cell_ids': cell_ids, 'cells': encode_hex_list(partial_cells), + 'proofs': encode_hex_list(partial_proofs), }, - 'output': encode_hex_list(recovered_cells) + 'output': (encode_hex_list(recovered_cells), encode_hex_list(recovered_proofs)) } # Valid: Half missing cells (first half) - blob = BLOB_RANDOM_VALID3 - cells = spec.compute_cells(blob) + cells, proofs = VALID_CELLS_AND_PROOFS[2] cell_ids = list(range(0, spec.CELLS_PER_EXT_BLOB // 2)) partial_cells = [cells[cell_id] for cell_id in cell_ids] - recovered_cells = spec.recover_all_cells(cell_ids, partial_cells) + partial_proofs = [proofs[cell_id] for cell_id in cell_ids] + recovered_cells, recovered_proofs = spec.recover_cells_and_kzg_proofs(cell_ids, partial_cells, partial_proofs) assert recovered_cells == cells + assert recovered_proofs == proofs identifier = make_id(cell_ids, partial_cells) - yield f'recover_all_cells_case_valid_half_missing_first_half_{identifier}', { + yield f'recover_cells_and_kzg_proofs_case_valid_half_missing_first_half_{identifier}', { 'input': { 'cell_ids': cell_ids, 'cells': encode_hex_list(partial_cells), + 'proofs': encode_hex_list(partial_proofs), }, - 'output': encode_hex_list(recovered_cells) + 'output': (encode_hex_list(recovered_cells), encode_hex_list(recovered_proofs)) } # Valid: Half missing cells (second half) - blob = BLOB_RANDOM_VALID1 - cells = spec.compute_cells(blob) + cells, proofs = VALID_CELLS_AND_PROOFS[3] cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2, spec.CELLS_PER_EXT_BLOB)) partial_cells = [cells[cell_id] for cell_id in cell_ids] - recovered_cells = spec.recover_all_cells(cell_ids, partial_cells) + partial_proofs = [proofs[cell_id] for cell_id in cell_ids] + recovered_cells, recovered_proofs = spec.recover_cells_and_kzg_proofs(cell_ids, partial_cells, partial_proofs) assert recovered_cells == cells + assert recovered_proofs == proofs identifier = make_id(cell_ids, partial_cells) - yield f'recover_all_cells_case_valid_half_missing_second_half_{identifier}', { + yield f'recover_cells_and_kzg_proofs_case_valid_half_missing_second_half_{identifier}', { 'input': { 'cell_ids': cell_ids, 'cells': encode_hex_list(partial_cells), + 'proofs': encode_hex_list(partial_proofs), }, - 'output': encode_hex_list(recovered_cells) + 'output': (encode_hex_list(recovered_cells), encode_hex_list(recovered_proofs)) } # Edge case: All cells are missing cell_ids, partial_cells = [], [] - expect_exception(spec.recover_all_cells, cell_ids, partial_cells) + expect_exception(spec.recover_cells_and_kzg_proofs, cell_ids, partial_cells) identifier = make_id(cell_ids, partial_cells) - yield f'recover_all_cells_case_invalid_all_cells_are_missing_{identifier}', { + yield f'recover_cells_and_kzg_proofs_case_invalid_all_cells_are_missing_{identifier}', { 'input': { 'cell_ids': cell_ids, 'cells': encode_hex_list(partial_cells), + 'proofs': encode_hex_list(partial_proofs), }, 'output': None } # Edge case: More than half missing - blob = BLOB_RANDOM_VALID2 - cells = spec.compute_cells(blob) + cells, proofs = VALID_CELLS_AND_PROOFS[4] cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2 - 1)) partial_cells = [cells[cell_id] for cell_id in cell_ids] - expect_exception(spec.recover_all_cells, cell_ids, partial_cells) - identifier = make_id(cell_ids, partial_cells) - yield f'recover_all_cells_case_invalid_more_than_half_missing_{identifier}', { + partial_proofs = [proofs[cell_id] for cell_id in cell_ids] + expect_exception(spec.recover_cells_and_kzg_proofs, cell_ids, partial_cells, partial_proofs) + identifier = make_id(cell_ids, partial_cells, partial_proofs) + yield f'recover_cells_and_kzg_proofs_case_invalid_more_than_half_missing_{identifier}', { 'input': { 'cell_ids': cell_ids, 'cells': encode_hex_list(partial_cells), + 'proofs': encode_hex_list(partial_proofs), }, 'output': None } # Edge case: More cells provided than CELLS_PER_EXT_BLOB - blob = BLOB_RANDOM_VALID2 - cells = spec.compute_cells(blob) + cells, proofs = VALID_CELLS_AND_PROOFS[5] cell_ids = list(range(spec.CELLS_PER_EXT_BLOB)) + [0] partial_cells = [cells[cell_id] for cell_id in cell_ids] - expect_exception(spec.recover_all_cells, cell_ids, partial_cells) - identifier = make_id(cell_ids, partial_cells) - yield f'recover_all_cells_case_invalid_more_cells_than_cells_per_ext_blob_{identifier}', { + partial_proofs = [proofs[cell_id] for cell_id in cell_ids] + expect_exception(spec.recover_cells_and_kzg_proofs, cell_ids, partial_cells, partial_proofs) + identifier = make_id(cell_ids, partial_cells, partial_proofs) + yield f'recover_cells_and_kzg_proofs_case_invalid_more_cells_than_cells_per_ext_blob_{identifier}', { 'input': { 'cell_ids': cell_ids, 'cells': encode_hex_list(partial_cells), + 'proofs': encode_hex_list(partial_proofs), }, 'output': None } # Edge case: Invalid cell_id - blob = BLOB_RANDOM_VALID1 - cells = spec.compute_cells(blob) + cells, proofs = VALID_CELLS_AND_PROOFS[6] cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2)) partial_cells = [cells[cell_id] for cell_id in cell_ids] + partial_proofs = [proofs[cell_id] for cell_id in cell_ids] # Replace first cell_id with an invalid value cell_ids[0] = spec.CELLS_PER_EXT_BLOB - expect_exception(spec.recover_all_cells, cell_ids, partial_cells) - identifier = make_id(cell_ids, partial_cells) - yield f'recover_all_cells_case_invalid_cell_id_{identifier}', { + expect_exception(spec.recover_cells_and_kzg_proofs, cell_ids, partial_cells, partial_proofs) + identifier = make_id(cell_ids, partial_cells, partial_proofs) + yield f'recover_cells_and_kzg_proofs_case_invalid_cell_id_{identifier}', { 'input': { 'cell_ids': cell_ids, 'cells': encode_hex_list(partial_cells), + 'proofs': encode_hex_list(partial_proofs), }, 'output': None } # Edge case: Invalid cell - blob = BLOB_RANDOM_VALID2 for cell in INVALID_INDIVIDUAL_CELL_BYTES: - cells = spec.compute_cells(blob) + cells, proofs = VALID_CELLS_AND_PROOFS[6] cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2)) partial_cells = [cells[cell_id] for cell_id in cell_ids] + partial_proofs = [proofs[cell_id] for cell_id in cell_ids] # Replace first cell with an invalid value partial_cells[0] = cell - expect_exception(spec.recover_all_cells, cell_ids, partial_cells) - identifier = make_id(cell_ids, partial_cells) - yield f'recover_all_cells_case_invalid_cell_{identifier}', { + expect_exception(spec.recover_cells_and_kzg_proofs, cell_ids, partial_cells, partial_proofs) + identifier = make_id(cell_ids, partial_cells, partial_proofs) + yield f'recover_cells_and_kzg_proofs_case_invalid_cell_{identifier}', { 'input': { 'cell_ids': cell_ids, 'cells': encode_hex_list(partial_cells), + 'proofs': encode_hex_list(partial_proofs), + }, + 'output': None + } + + # Edge case: Invalid proof + for proof in INVALID_G1_POINTS: + cells, proofs = VALID_CELLS_AND_PROOFS[0] + cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2)) + partial_cells = [cells[cell_id] for cell_id in cell_ids] + partial_proofs = [proofs[cell_id] for cell_id in cell_ids] + # Replace first proof with an invalid value + partial_proofs[0] = proof + expect_exception(spec.recover_cells_and_kzg_proofs, cell_ids, partial_cells, partial_proofs) + identifier = make_id(cell_ids, partial_cells, partial_proofs) + yield f'recover_cells_and_kzg_proofs_case_invalid_proof_{identifier}', { + 'input': { + 'cell_ids': cell_ids, + 'cells': encode_hex_list(partial_cells), + 'proofs': encode_hex_list(partial_proofs), }, 'output': None } # Edge case: More cell_ids than cells - blob = BLOB_RANDOM_VALID3 - cells = spec.compute_cells(blob) + cells, proofs = VALID_CELLS_AND_PROOFS[0] cell_ids = list(range(0, spec.CELLS_PER_EXT_BLOB, 2)) partial_cells = [cells[cell_id] for cell_id in cell_ids] + partial_proofs = [proofs[cell_id] for cell_id in cell_ids] # Add another cell_id cell_ids.append(spec.CELLS_PER_EXT_BLOB - 1) - expect_exception(spec.recover_all_cells, cell_ids, partial_cells) - identifier = make_id(cell_ids, partial_cells) - yield f'recover_all_cells_case_invalid_more_cell_ids_than_cells_{identifier}', { + expect_exception(spec.recover_cells_and_kzg_proofs, cell_ids, partial_cells, partial_proofs) + identifier = make_id(cell_ids, partial_cells, partial_proofs) + yield f'recover_cells_and_kzg_proofs_case_invalid_more_cell_ids_than_cells_{identifier}', { 'input': { 'cell_ids': cell_ids, 'cells': encode_hex_list(partial_cells), + 'proofs': encode_hex_list(partial_proofs), }, 'output': None } # Edge case: More cells than cell_ids - blob = BLOB_RANDOM_VALID1 - cells = spec.compute_cells(blob) + cells, proofs = VALID_CELLS_AND_PROOFS[1] cell_ids = list(range(0, spec.CELLS_PER_EXT_BLOB, 2)) partial_cells = [cells[cell_id] for cell_id in cell_ids] + partial_proofs = [proofs[cell_id] for cell_id in cell_ids] # Add another cell partial_cells.append(CELL_RANDOM_VALID1) - expect_exception(spec.recover_all_cells, cell_ids, partial_cells) - identifier = make_id(cell_ids, partial_cells) - yield f'recover_all_cells_case_invalid_more_cells_than_cell_ids_{identifier}', { + expect_exception(spec.recover_cells_and_kzg_proofs, cell_ids, partial_cells, partial_proofs) + identifier = make_id(cell_ids, partial_cells, partial_proofs) + yield f'recover_cells_and_kzg_proofs_case_invalid_more_cells_than_cell_ids_{identifier}', { 'input': { 'cell_ids': cell_ids, 'cells': encode_hex_list(partial_cells), + 'proofs': encode_hex_list(partial_proofs), + }, + 'output': None + } + + # Edge case: More proofs than cell_ids + cells, proofs = VALID_CELLS_AND_PROOFS[1] + cell_ids = list(range(0, spec.CELLS_PER_EXT_BLOB, 2)) + partial_cells = [cells[cell_id] for cell_id in cell_ids] + partial_proofs = [proofs[cell_id] for cell_id in cell_ids] + # Add another proof + partial_proofs.append(G1) + expect_exception(spec.recover_cells_and_kzg_proofs, cell_ids, partial_cells, partial_proofs) + identifier = make_id(cell_ids, partial_cells, partial_proofs) + yield f'recover_cells_and_kzg_proofs_case_invalid_more_proofs_than_cell_ids_{identifier}', { + 'input': { + 'cell_ids': cell_ids, + 'cells': encode_hex_list(partial_cells), + 'proofs': encode_hex_list(partial_proofs), }, 'output': None } # Edge case: Duplicate cell_id - blob = BLOB_RANDOM_VALID2 - cells = spec.compute_cells(blob) + cells, proofs = VALID_CELLS_AND_PROOFS[2] # There will be 65 cells, where 64 are unique and 1 is a duplicate. # Depending on the implementation, 63 & 1 might not fail for the right # reason. For example, if the implementation assigns cells in an array @@ -804,14 +852,16 @@ def case05_recover_all_cells(): # to insufficient cell count, not because of a duplicate cell. cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2 + 1)) partial_cells = [cells[cell_id] for cell_id in cell_ids] + partial_proofs = [proofs[cell_id] for cell_id in cell_ids] # Replace first cell_id with the second cell_id cell_ids[0] = cell_ids[1] - expect_exception(spec.recover_all_cells, cell_ids, partial_cells) - identifier = make_id(cell_ids, partial_cells) - yield f'recover_all_cells_case_invalid_duplicate_cell_id_{identifier}', { + expect_exception(spec.recover_cells_and_kzg_proofs, cell_ids, partial_cells, partial_proofs) + identifier = make_id(cell_ids, partial_cells, partial_proofs) + yield f'recover_cells_and_kzg_proofs_case_invalid_duplicate_cell_id_{identifier}', { 'input': { 'cell_ids': cell_ids, 'cells': encode_hex_list(partial_cells), + 'proofs': encode_hex_list(partial_proofs), }, 'output': None } @@ -853,5 +903,5 @@ def cases_fn() -> Iterable[gen_typing.TestCase]: create_provider(EIP7594, 'compute_cells_and_kzg_proofs', case02_compute_cells_and_kzg_proofs), create_provider(EIP7594, 'verify_cell_kzg_proof', case03_verify_cell_kzg_proof), create_provider(EIP7594, 'verify_cell_kzg_proof_batch', case04_verify_cell_kzg_proof_batch), - create_provider(EIP7594, 'recover_all_cells', case05_recover_all_cells), + create_provider(EIP7594, 'recover_cells_and_kzg_proofs', case05_recover_cells_and_kzg_proofs), ]) From d137553f96c525279a595ff29ade8e617346ee3f Mon Sep 17 00:00:00 2001 From: Justin Traglia Date: Tue, 11 Jun 2024 07:41:52 -0500 Subject: [PATCH 74/89] Rename CellID to CellIndex --- specs/_features/eip7594/das-core.md | 12 +- .../polynomial-commitments-sampling.md | 81 ++--- .../test_polynomial_commitments.py | 20 +- .../kzg_7594/recover_cells_and_kzg_proofs.md | 6 +- .../formats/kzg_7594/verify_cell_kzg_proof.md | 6 +- .../kzg_7594/verify_cell_kzg_proof_batch.md | 2 +- tests/generators/kzg_7594/main.py | 282 +++++++++--------- 7 files changed, 205 insertions(+), 204 deletions(-) diff --git a/specs/_features/eip7594/das-core.md b/specs/_features/eip7594/das-core.md index 67719ad0f3..9faf77b9ab 100644 --- a/specs/_features/eip7594/das-core.md +++ b/specs/_features/eip7594/das-core.md @@ -151,12 +151,12 @@ def compute_extended_matrix(blobs: Sequence[Blob]) -> List[MatrixEntry, MAX_CELL extended_matrix = [] for blob_index, blob in enumerate(blobs): cells, proofs = compute_cells_and_kzg_proofs(blob) - for cell_id, (cell, proof) in enumerate(zip(cells, proofs)): + for cell_index, (cell, proof) in enumerate(zip(cells, proofs)): extended_matrix.append(MatrixEntry( cell=cell, kzg_proof=proof, row_index=blob_index, - column_index=cell_id, + column_index=cell_index, )) return extended_matrix ``` @@ -174,17 +174,17 @@ def recover_matrix(partial_matrix: Sequence[MatrixEntry], """ extended_matrix = [] for blob_index in range(blob_count): - cell_ids = [e.column_index for e in partial_matrix if e.row_index == blob_index] + cell_indices = [e.column_index for e in partial_matrix if e.row_index == blob_index] cells = [e.cell for e in partial_matrix if e.row_index == blob_index] proofs = [e.kzg_proof for e in partial_matrix if e.row_index == blob_index] - recovered_cells, recovered_proofs = recover_cells_and_kzg_proofs(cell_ids, cells, proofs) - for cell_id, (cell, proof) in enumerate(zip(recovered_cells, recovered_proofs)): + recovered_cells, recovered_proofs = recover_cells_and_kzg_proofs(cell_indices, cells, proofs) + for cell_index, (cell, proof) in enumerate(zip(recovered_cells, recovered_proofs)): extended_matrix.append(MatrixEntry( cell=cell, kzg_proof=proof, row_index=blob_index, - column_index=cell_id, + column_index=cell_index, )) return extended_matrix ``` diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index acf4ffc627..a79e5eed9a 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -67,7 +67,7 @@ Public functions MUST accept raw bytes as input and perform the required cryptog | `Coset` | `Vector[BLSFieldElement, FIELD_ELEMENTS_PER_CELL]` | The evaluation domain of a cell | | `CosetEvals` | `Vector[BLSFieldElement, FIELD_ELEMENTS_PER_CELL]` | The internal representation of a cell (the evaluations over its Coset) | | `Cell` | `ByteVector[BYTES_PER_FIELD_ELEMENT * FIELD_ELEMENTS_PER_CELL]` | The unit of blob data that can come with its own KZG proof | -| `CellID` | `uint64` | Validation: `x < CELLS_PER_EXT_BLOB` | +| `CellIndex` | `uint64` | Validation: `x < CELLS_PER_EXT_BLOB` | ## Constants @@ -402,15 +402,15 @@ def verify_kzg_proof_multi_impl(commitment: KZGCommitment, #### `coset_for_cell` ```python -def coset_for_cell(cell_id: CellID) -> Coset: +def coset_for_cell(cell_index: CellIndex) -> Coset: """ - Get the coset for a given ``cell_id`` + Get the coset for a given ``cell_index``. """ - assert cell_id < CELLS_PER_EXT_BLOB + assert cell_index < CELLS_PER_EXT_BLOB roots_of_unity_brp = bit_reversal_permutation( compute_roots_of_unity(FIELD_ELEMENTS_PER_EXT_BLOB) ) - return Coset(roots_of_unity_brp[FIELD_ELEMENTS_PER_CELL * cell_id:FIELD_ELEMENTS_PER_CELL * (cell_id + 1)]) + return Coset(roots_of_unity_brp[FIELD_ELEMENTS_PER_CELL * cell_index:FIELD_ELEMENTS_PER_CELL * (cell_index + 1)]) ``` ## Cells @@ -439,7 +439,7 @@ def compute_cells_and_kzg_proofs(blob: Blob) -> Tuple[ proofs = [] for i in range(CELLS_PER_EXT_BLOB): - coset = coset_for_cell(CellID(i)) + coset = coset_for_cell(CellIndex(i)) proof, ys = compute_kzg_proof_multi_impl(polynomial_coeff, coset) cells.append(coset_evals_to_cell(ys)) proofs.append(proof) @@ -465,9 +465,9 @@ def compute_cells(blob: Blob) -> Vector[Cell, CELLS_PER_EXT_BLOB]: compute_roots_of_unity(FIELD_ELEMENTS_PER_EXT_BLOB)) extended_data_rbo = bit_reversal_permutation(extended_data) cells = [] - for cell_id in range(CELLS_PER_EXT_BLOB): - start = cell_id * FIELD_ELEMENTS_PER_CELL - end = (cell_id + 1) * FIELD_ELEMENTS_PER_CELL + for cell_index in range(CELLS_PER_EXT_BLOB): + start = cell_index * FIELD_ELEMENTS_PER_CELL + end = (cell_index + 1) * FIELD_ELEMENTS_PER_CELL cells.append(coset_evals_to_cell(CosetEvals(extended_data_rbo[start:end]))) return cells ``` @@ -478,7 +478,7 @@ def compute_cells(blob: Blob) -> Vector[Cell, CELLS_PER_EXT_BLOB]: ```python def verify_cell_kzg_proof(commitment_bytes: Bytes48, - cell_id: CellID, + cell_index: CellIndex, cell: Cell, proof_bytes: Bytes48) -> bool: """ @@ -487,11 +487,11 @@ def verify_cell_kzg_proof(commitment_bytes: Bytes48, Public method. """ assert len(commitment_bytes) == BYTES_PER_COMMITMENT - assert cell_id < CELLS_PER_EXT_BLOB + assert cell_index < CELLS_PER_EXT_BLOB assert len(cell) == BYTES_PER_CELL assert len(proof_bytes) == BYTES_PER_PROOF - coset = coset_for_cell(cell_id) + coset = coset_for_cell(cell_index) return verify_kzg_proof_multi_impl( bytes_to_kzg_commitment(commitment_bytes), @@ -553,7 +553,7 @@ def verify_cell_kzg_proof_batch(row_commitments_bytes: Sequence[Bytes48], ### `construct_vanishing_polynomial` ```python -def construct_vanishing_polynomial(missing_cell_ids: Sequence[CellID]) -> Tuple[ +def construct_vanishing_polynomial(missing_cell_indices: Sequence[CellIndex]) -> Tuple[ Sequence[BLSFieldElement], Sequence[BLSFieldElement]]: """ @@ -565,8 +565,8 @@ def construct_vanishing_polynomial(missing_cell_ids: Sequence[CellID]) -> Tuple[ # Compute polynomial that vanishes at all the missing cells (over the small domain) short_zero_poly = vanishing_polynomialcoeff([ - roots_of_unity_reduced[reverse_bits(missing_cell_id, CELLS_PER_EXT_BLOB)] - for missing_cell_id in missing_cell_ids + roots_of_unity_reduced[reverse_bits(missing_cell_index, CELLS_PER_EXT_BLOB)] + for missing_cell_index in missing_cell_indices ]) # Extend vanishing polynomial to full domain using the closed form of the vanishing polynomial over a coset @@ -580,12 +580,12 @@ def construct_vanishing_polynomial(missing_cell_ids: Sequence[CellID]) -> Tuple[ zero_poly_eval_brp = bit_reversal_permutation(zero_poly_eval) # Sanity check - for cell_id in range(CELLS_PER_EXT_BLOB): - start = cell_id * FIELD_ELEMENTS_PER_CELL - end = (cell_id + 1) * FIELD_ELEMENTS_PER_CELL - if cell_id in missing_cell_ids: + for cell_index in range(CELLS_PER_EXT_BLOB): + start = cell_index * FIELD_ELEMENTS_PER_CELL + end = (cell_index + 1) * FIELD_ELEMENTS_PER_CELL + if cell_index in missing_cell_indices: assert all(a == 0 for a in zero_poly_eval_brp[start:end]) - else: # cell_id in cell_ids + else: # cell_index in cell_indices assert all(a != 0 for a in zero_poly_eval_brp[start:end]) return zero_poly_coeff, zero_poly_eval @@ -594,7 +594,7 @@ def construct_vanishing_polynomial(missing_cell_ids: Sequence[CellID]) -> Tuple[ ### `recover_shifted_data` ```python -def recover_shifted_data(cell_ids: Sequence[CellID], +def recover_shifted_data(cell_indices: Sequence[CellIndex], cells: Sequence[Cell], zero_poly_eval: Sequence[BLSFieldElement], zero_poly_coeff: Sequence[BLSFieldElement], @@ -609,9 +609,9 @@ def recover_shifted_data(cell_ids: Sequence[CellID], shift_inv = div(BLSFieldElement(1), shift_factor) extended_evaluation_rbo = [0] * FIELD_ELEMENTS_PER_EXT_BLOB - for cell_id, cell in zip(cell_ids, cells): - start = cell_id * FIELD_ELEMENTS_PER_CELL - end = (cell_id + 1) * FIELD_ELEMENTS_PER_CELL + for cell_index, cell in zip(cell_indices, cells): + start = cell_index * FIELD_ELEMENTS_PER_CELL + end = (cell_index + 1) * FIELD_ELEMENTS_PER_CELL extended_evaluation_rbo[start:end] = cell extended_evaluation = bit_reversal_permutation(extended_evaluation_rbo) @@ -661,7 +661,7 @@ def recover_original_data(eval_shifted_extended_evaluation: Sequence[BLSFieldEle ### `recover_cells_and_kzg_proofs` ```python -def recover_cells_and_kzg_proofs(cell_ids: Sequence[CellID], +def recover_cells_and_kzg_proofs(cell_indices: Sequence[CellIndex], cells: Sequence[Cell], proofs_bytes: Sequence[Bytes48]) -> Tuple[ Vector[Cell, CELLS_PER_EXT_BLOB], @@ -677,14 +677,14 @@ def recover_cells_and_kzg_proofs(cell_ids: Sequence[CellID], Public method. """ - assert len(cell_ids) == len(cells) == len(proofs_bytes) + assert len(cell_indices) == len(cells) == len(proofs_bytes) # Check we have enough cells to be able to perform the reconstruction - assert CELLS_PER_EXT_BLOB / 2 <= len(cell_ids) <= CELLS_PER_EXT_BLOB + assert CELLS_PER_EXT_BLOB / 2 <= len(cell_indices) <= CELLS_PER_EXT_BLOB # Check for duplicates - assert len(cell_ids) == len(set(cell_ids)) - # Check that the cell ids are within bounds - for cell_id in cell_ids: - assert cell_id < CELLS_PER_EXT_BLOB + assert len(cell_indices) == len(set(cell_indices)) + # Check that the cell indices are within bounds + for cell_index in cell_indices: + assert cell_index < CELLS_PER_EXT_BLOB # Check that each cell is the correct length for cell in cells: assert len(cell) == BYTES_PER_CELL @@ -698,11 +698,12 @@ def recover_cells_and_kzg_proofs(cell_ids: Sequence[CellID], # Convert cells to coset evals cosets_evals = [cell_to_coset_evals(cell) for cell in cells] - missing_cell_ids = [CellID(cell_id) for cell_id in range(CELLS_PER_EXT_BLOB) if cell_id not in cell_ids] - zero_poly_coeff, zero_poly_eval = construct_vanishing_polynomial(missing_cell_ids) + missing_cell_indices = [CellIndex(cell_index) for cell_index in range(CELLS_PER_EXT_BLOB) + if cell_index not in cell_indices] + zero_poly_coeff, zero_poly_eval = construct_vanishing_polynomial(missing_cell_indices) eval_shifted_extended_evaluation, eval_shifted_zero_poly, shift_inv = recover_shifted_data( - cell_ids, + cell_indices, cosets_evals, zero_poly_eval, zero_poly_coeff, @@ -716,9 +717,9 @@ def recover_cells_and_kzg_proofs(cell_ids: Sequence[CellID], roots_of_unity_extended, ) - for cell_id, coset_evals in zip(cell_ids, cosets_evals): - start = cell_id * FIELD_ELEMENTS_PER_CELL - end = (cell_id + 1) * FIELD_ELEMENTS_PER_CELL + for cell_index, coset_evals in zip(cell_indices, cosets_evals): + start = cell_index * FIELD_ELEMENTS_PER_CELL + end = (cell_index + 1) * FIELD_ELEMENTS_PER_CELL assert reconstructed_data[start:end] == coset_evals recovered_cells = [ @@ -728,11 +729,11 @@ def recover_cells_and_kzg_proofs(cell_ids: Sequence[CellID], polynomial_eval = reconstructed_data[:FIELD_ELEMENTS_PER_BLOB] polynomial_coeff = polynomial_eval_to_coeff(polynomial_eval) recovered_proofs = [None] * CELLS_PER_EXT_BLOB - for i, cell_id in enumerate(cell_ids): - recovered_proofs[cell_id] = bytes_to_kzg_proof(proofs_bytes[i]) + for i, cell_index in enumerate(cell_indices): + recovered_proofs[cell_index] = bytes_to_kzg_proof(proofs_bytes[i]) for i in range(CELLS_PER_EXT_BLOB): if recovered_proofs[i] is None: - coset = coset_for_cell(CellID(i)) + coset = coset_for_cell(CellIndex(i)) proof, ys = compute_kzg_proof_multi_impl(polynomial_coeff, coset) assert coset_evals_to_cell(ys) == recovered_cells[i] recovered_proofs[i] = proof diff --git a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py index 215f8f3e6f..ec54db661f 100644 --- a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py +++ b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py @@ -36,10 +36,10 @@ def test_verify_cell_kzg_proof(spec): commitment = spec.blob_to_kzg_commitment(blob) cells, proofs = spec.compute_cells_and_kzg_proofs(blob) - cell_id = 0 - assert spec.verify_cell_kzg_proof(commitment, cell_id, cells[cell_id], proofs[cell_id]) - cell_id = 1 - assert spec.verify_cell_kzg_proof(commitment, cell_id, cells[cell_id], proofs[cell_id]) + cell_index = 0 + assert spec.verify_cell_kzg_proof(commitment, cell_index, cells[cell_index], proofs[cell_index]) + cell_index = 1 + assert spec.verify_cell_kzg_proof(commitment, cell_index, cells[cell_index], proofs[cell_index]) @with_eip7594_and_later @@ -77,19 +77,19 @@ def test_recover_cells_and_kzg_proofs(spec): cells, proofs = spec.compute_cells_and_kzg_proofs(blob) # Compute the cells we will be recovering from - cell_ids = [] + cell_indices = [] # First figure out just the indices of the cells for i in range(N_SAMPLES): j = rng.randint(0, spec.CELLS_PER_EXT_BLOB - 1) - while j in cell_ids: + while j in cell_indices: j = rng.randint(0, spec.CELLS_PER_EXT_BLOB - 1) - cell_ids.append(j) + cell_indices.append(j) # Now the cells/proofs themselves - known_cells = [cells[cell_id] for cell_id in cell_ids] - known_proofs = [proofs[cell_id] for cell_id in cell_ids] + known_cells = [cells[cell_index] for cell_index in cell_indices] + known_proofs = [proofs[cell_index] for cell_index in cell_indices] # Recover the missing cells and proofs - recovered_cells, recovered_proofs = spec.recover_cells_and_kzg_proofs(cell_ids, known_cells, known_proofs) + recovered_cells, recovered_proofs = spec.recover_cells_and_kzg_proofs(cell_indices, known_cells, known_proofs) recovered_data = [x for xs in recovered_cells for x in xs] # Check that the original data match the non-extended portion of the recovered data diff --git a/tests/formats/kzg_7594/recover_cells_and_kzg_proofs.md b/tests/formats/kzg_7594/recover_cells_and_kzg_proofs.md index 8967664128..4e839c8ff4 100644 --- a/tests/formats/kzg_7594/recover_cells_and_kzg_proofs.md +++ b/tests/formats/kzg_7594/recover_cells_and_kzg_proofs.md @@ -8,12 +8,12 @@ The test data is declared in a `data.yaml` file: ```yaml input: - cell_ids: List[CellID] -- the cell identifier for each cell + cell_indices: List[CellIndex] -- the cell indices cells: List[Cell] -- the partial collection of cells output: Tuple[List[Cell], List[KZGProof]] -- all cells and proofs ``` -- `CellID` is an unsigned 64-bit integer. +- `CellIndex` is an unsigned 64-bit integer. - `Cell` is a 2048-byte hexadecimal string, prefixed with `0x`. - `KZGProof` is a 48-byte hexadecimal string, prefixed with `0x`. @@ -21,4 +21,4 @@ All byte(s) fields are encoded as strings, hexadecimal encoding, prefixed with ` ## Condition -The `recover_cells_and_kzg_proofs` handler should recover missing cells and proofs, and the result should match the expected `output`. If any cell is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element), any proof is invalid (e.g. not on the curve or not in the G1 subgroup of the BLS curve), or any `cell_id` is invalid (e.g. greater than the number of cells for an extended blob), it should error, i.e. the output should be `null`. +The `recover_cells_and_kzg_proofs` handler should recover missing cells and proofs, and the result should match the expected `output`. If any cell is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element), any proof is invalid (e.g. not on the curve or not in the G1 subgroup of the BLS curve), or any `cell_index` is invalid (e.g. greater than the number of cells for an extended blob), it should error, i.e. the output should be `null`. diff --git a/tests/formats/kzg_7594/verify_cell_kzg_proof.md b/tests/formats/kzg_7594/verify_cell_kzg_proof.md index 5ab3ad0739..3d5f33b524 100644 --- a/tests/formats/kzg_7594/verify_cell_kzg_proof.md +++ b/tests/formats/kzg_7594/verify_cell_kzg_proof.md @@ -9,18 +9,18 @@ The test data is declared in a `data.yaml` file: ```yaml input: commitment: Bytes48 -- the KZG commitment - cell_id: CellID -- the identifier for the cell + cell_index: CellIndex -- the cell index cell: Cell -- the cell proof: Bytes48 -- the KZG proof for the cell output: bool -- true (correct proof) or false (incorrect proof) ``` - `Bytes48` is a 48-byte hexadecimal string, prefixed with `0x`. -- `CellID` is an unsigned 64-bit integer. +- `CellIndex` is an unsigned 64-bit integer. - `Cell` is a 2048-byte hexadecimal string, prefixed with `0x`. All byte(s) fields are encoded as strings, hexadecimal encoding, prefixed with `0x`. ## Condition -The `verify_cell_kzg_proof` handler should verify that `commitment` is a correct KZG commitment to `cell` by using the cell KZG proof `proof`, and the result should match the expected `output`. If the commitment or proof is invalid (e.g. not on the curve or not in the G1 subgroup of the BLS curve), `cell` is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element), or `cell_id` is invalid (e.g. greater than the number of cells for an extended blob), it should error, i.e. the output should be `null`. +The `verify_cell_kzg_proof` handler should verify that `commitment` is a correct KZG commitment to `cell` by using the cell KZG proof `proof`, and the result should match the expected `output`. If the commitment or proof is invalid (e.g. not on the curve or not in the G1 subgroup of the BLS curve), `cell` is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element), or `cell_index` is invalid (e.g. greater than the number of cells for an extended blob), it should error, i.e. the output should be `null`. diff --git a/tests/formats/kzg_7594/verify_cell_kzg_proof_batch.md b/tests/formats/kzg_7594/verify_cell_kzg_proof_batch.md index 9761b55032..439d1e25ae 100644 --- a/tests/formats/kzg_7594/verify_cell_kzg_proof_batch.md +++ b/tests/formats/kzg_7594/verify_cell_kzg_proof_batch.md @@ -25,4 +25,4 @@ All byte(s) fields are encoded as strings, hexadecimal encoding, prefixed with ` ## Condition -The `verify_cell_kzg_proof_batch` handler should verify that `row_commitments` are correct KZG commitments to `cells` by using the cell KZG proofs `proofs`, and the result should match the expected `output`. If any of the commitments or proofs are invalid (e.g. not on the curve or not in the G1 subgroup of the BLS curve), any cell is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element), or any `cell_id` is invalid (e.g. greater than the number of cells for an extended blob), it should error, i.e. the output should be `null`. +The `verify_cell_kzg_proof_batch` handler should verify that `row_commitments` are correct KZG commitments to `cells` by using the cell KZG proofs `proofs`, and the result should match the expected `output`. If any of the commitments or proofs are invalid (e.g. not on the curve or not in the G1 subgroup of the BLS curve), any cell is invalid (e.g. incorrect length or one of the 32-byte blocks does not represent a BLS field element), or any `cell_index` is invalid (e.g. greater than the number of cells for an extended blob), it should error, i.e. the output should be `null`. diff --git a/tests/generators/kzg_7594/main.py b/tests/generators/kzg_7594/main.py index fd9d28727c..be343795e7 100644 --- a/tests/generators/kzg_7594/main.py +++ b/tests/generators/kzg_7594/main.py @@ -96,15 +96,15 @@ def case03_verify_cell_kzg_proof(): for i in range(len(VALID_BLOBS)): cells, proofs = VALID_CELLS_AND_PROOFS[i] commitment = VALID_COMMITMENTS[i] - cell_id = (2 ** i - 1) % spec.CELLS_PER_EXT_BLOB - cell = cells[cell_id] - proof = proofs[cell_id] - assert spec.verify_cell_kzg_proof(commitment, cell_id, cell, proof) - identifier = make_id(commitment, cell_id, cell, proof) + cell_index = (2 ** i - 1) % spec.CELLS_PER_EXT_BLOB + cell = cells[cell_index] + proof = proofs[cell_index] + assert spec.verify_cell_kzg_proof(commitment, cell_index, cell, proof) + identifier = make_id(commitment, cell_index, cell, proof) yield f'verify_cell_kzg_proof_case_valid_{identifier}', { 'input': { 'commitment': encode_hex(commitment), - 'cell_id': cell_id, + 'cell_index': cell_index, 'cell': encode_hex(cell), 'proof': encode_hex(proof), }, @@ -115,15 +115,15 @@ def case03_verify_cell_kzg_proof(): for i in range(len(VALID_BLOBS)): cells, proofs = VALID_CELLS_AND_PROOFS[i] commitment = bls_add_one(VALID_COMMITMENTS[i]) - cell_id = 99 % spec.CELLS_PER_EXT_BLOB - cell = cells[cell_id] - proof = proofs[cell_id] - assert not spec.verify_cell_kzg_proof(commitment, cell_id, cell, proof) - identifier = make_id(commitment, cell_id, cell, proof) + cell_index = 99 % spec.CELLS_PER_EXT_BLOB + cell = cells[cell_index] + proof = proofs[cell_index] + assert not spec.verify_cell_kzg_proof(commitment, cell_index, cell, proof) + identifier = make_id(commitment, cell_index, cell, proof) yield f'verify_cell_kzg_proof_case_incorrect_commitment_{identifier}', { 'input': { 'commitment': encode_hex(commitment), - 'cell_id': cell_id, + 'cell_index': cell_index, 'cell': encode_hex(cell), 'proof': encode_hex(proof), }, @@ -132,17 +132,17 @@ def case03_verify_cell_kzg_proof(): # Incorrect cell for i in range(len(VALID_INDIVIDUAL_RANDOM_CELL_BYTES)): - cell_id = 16 % spec.CELLS_PER_EXT_BLOB + cell_index = 16 % spec.CELLS_PER_EXT_BLOB commitment = VALID_COMMITMENTS[i] cells, proofs = VALID_CELLS_AND_PROOFS[i] cell = VALID_INDIVIDUAL_RANDOM_CELL_BYTES[i] - proof = proofs[cell_id] - assert not spec.verify_cell_kzg_proof(commitment, cell_id, cell, proof) - identifier = make_id(commitment, cell_id, cell, proof) + proof = proofs[cell_index] + assert not spec.verify_cell_kzg_proof(commitment, cell_index, cell, proof) + identifier = make_id(commitment, cell_index, cell, proof) yield f'verify_cell_kzg_proof_case_incorrect_cell_{identifier}', { 'input': { 'commitment': encode_hex(commitment), - 'cell_id': cell_id, + 'cell_index': cell_index, 'cell': encode_hex(cell), 'proof': encode_hex(proof), }, @@ -151,17 +151,17 @@ def case03_verify_cell_kzg_proof(): # Incorrect proof for i in range(len(VALID_BLOBS)): - cell_id = 91 % spec.CELLS_PER_EXT_BLOB + cell_index = 91 % spec.CELLS_PER_EXT_BLOB commitment = VALID_COMMITMENTS[i] cells, proofs = VALID_CELLS_AND_PROOFS[i] - cell = cells[cell_id] - proof = bls_add_one(proofs[cell_id]) - assert not spec.verify_cell_kzg_proof(commitment, cell_id, cell, proof) - identifier = make_id(commitment, cell_id, cell, proof) + cell = cells[cell_index] + proof = bls_add_one(proofs[cell_index]) + assert not spec.verify_cell_kzg_proof(commitment, cell_index, cell, proof) + identifier = make_id(commitment, cell_index, cell, proof) yield f'verify_cell_kzg_proof_case_incorrect_proof_{identifier}', { 'input': { 'commitment': encode_hex(commitment), - 'cell_id': cell_id, + 'cell_index': cell_index, 'cell': encode_hex(cell), 'proof': encode_hex(proof), }, @@ -171,33 +171,33 @@ def case03_verify_cell_kzg_proof(): # Edge case: Invalid commitment for commitment in INVALID_G1_POINTS: cells, proofs = VALID_CELLS_AND_PROOFS[0] - cell_id = 81 % spec.CELLS_PER_EXT_BLOB - cell = cells[cell_id] - proof = proofs[cell_id] - expect_exception(spec.verify_cell_kzg_proof, commitment, cell_id, cell, proof) - identifier = make_id(commitment, cell_id, cell, proof) + cell_index = 81 % spec.CELLS_PER_EXT_BLOB + cell = cells[cell_index] + proof = proofs[cell_index] + expect_exception(spec.verify_cell_kzg_proof, commitment, cell_index, cell, proof) + identifier = make_id(commitment, cell_index, cell, proof) yield f'verify_cell_kzg_proof_case_invalid_commitment_{identifier}', { 'input': { 'commitment': encode_hex(commitment), - 'cell_id': cell_id, + 'cell_index': cell_index, 'cell': encode_hex(cell), 'proof': encode_hex(proof), }, 'output': None } - # Edge case: Invalid cell_id - for cell_id in [spec.CELLS_PER_EXT_BLOB, spec.CELLS_PER_EXT_BLOB + 1]: + # Edge case: Invalid cell_index + for cell_index in [spec.CELLS_PER_EXT_BLOB, spec.CELLS_PER_EXT_BLOB + 1]: cells, proofs = VALID_CELLS_AND_PROOFS[1] commitment = VALID_COMMITMENTS[1] cell = cells[0] proof = proofs[0] - expect_exception(spec.verify_cell_kzg_proof, commitment, cell_id, cell, proof) - identifier = make_id(commitment, cell_id, cell, proof) - yield f'verify_cell_kzg_proof_case_invalid_cell_id_{identifier}', { + expect_exception(spec.verify_cell_kzg_proof, commitment, cell_index, cell, proof) + identifier = make_id(commitment, cell_index, cell, proof) + yield f'verify_cell_kzg_proof_case_invalid_cell_index_{identifier}', { 'input': { 'commitment': encode_hex(commitment), - 'cell_id': cell_id, + 'cell_index': cell_index, 'cell': encode_hex(cell), 'proof': encode_hex(proof), }, @@ -206,16 +206,16 @@ def case03_verify_cell_kzg_proof(): # Edge case: Invalid cell for cell in INVALID_INDIVIDUAL_CELL_BYTES: - cell_id = 32 % spec.CELLS_PER_EXT_BLOB + cell_index = 32 % spec.CELLS_PER_EXT_BLOB commitment = VALID_COMMITMENTS[2] cells, proofs = VALID_CELLS_AND_PROOFS[2] - proof = proofs[cell_id] - expect_exception(spec.verify_cell_kzg_proof, commitment, cell_id, cell, proof) - identifier = make_id(commitment, cell_id, cell, proof) + proof = proofs[cell_index] + expect_exception(spec.verify_cell_kzg_proof, commitment, cell_index, cell, proof) + identifier = make_id(commitment, cell_index, cell, proof) yield f'verify_cell_kzg_proof_case_invalid_cell_{identifier}', { 'input': { 'commitment': encode_hex(commitment), - 'cell_id': cell_id, + 'cell_index': cell_index, 'cell': encode_hex(cell), 'proof': encode_hex(proof), }, @@ -226,14 +226,14 @@ def case03_verify_cell_kzg_proof(): for proof in INVALID_G1_POINTS: cells, _ = VALID_CELLS_AND_PROOFS[3] commitment = VALID_COMMITMENTS[3] - cell_id = 36 % spec.CELLS_PER_EXT_BLOB - cell = cells[cell_id] - expect_exception(spec.verify_cell_kzg_proof, commitment, cell_id, cell, proof) - identifier = make_id(commitment, cell_id, cell, proof) + cell_index = 36 % spec.CELLS_PER_EXT_BLOB + cell = cells[cell_index] + expect_exception(spec.verify_cell_kzg_proof, commitment, cell_index, cell, proof) + identifier = make_id(commitment, cell_index, cell, proof) yield f'verify_cell_kzg_proof_case_invalid_proof_{identifier}', { 'input': { 'commitment': encode_hex(commitment), - 'cell_id': cell_id, + 'cell_index': cell_index, 'cell': encode_hex(cell), 'proof': encode_hex(proof), }, @@ -620,14 +620,14 @@ def case04_verify_cell_kzg_proof_batch(): def case05_recover_cells_and_kzg_proofs(): # Valid: No missing cells cells, proofs = VALID_CELLS_AND_PROOFS[0] - cell_ids = list(range(spec.CELLS_PER_EXT_BLOB)) - recovered_cells, recovered_proofs = spec.recover_cells_and_kzg_proofs(cell_ids, cells, proofs) + cell_indices = list(range(spec.CELLS_PER_EXT_BLOB)) + recovered_cells, recovered_proofs = spec.recover_cells_and_kzg_proofs(cell_indices, cells, proofs) assert recovered_cells == cells assert recovered_proofs == proofs - identifier = make_id(cell_ids, cells, proofs) + identifier = make_id(cell_indices, cells, proofs) yield f'recover_cells_and_kzg_proofs_case_valid_no_missing_{identifier}', { 'input': { - 'cell_ids': cell_ids, + 'cell_indices': cell_indices, 'cells': encode_hex_list(cells), 'proofs': encode_hex_list(proofs), }, @@ -636,16 +636,16 @@ def case05_recover_cells_and_kzg_proofs(): # Valid: Half missing cells (every other cell) cells, proofs = VALID_CELLS_AND_PROOFS[1] - cell_ids = list(range(0, spec.CELLS_PER_EXT_BLOB, 2)) - partial_cells = [cells[cell_id] for cell_id in cell_ids] - partial_proofs = [proofs[cell_id] for cell_id in cell_ids] - recovered_cells, recovered_proofs = spec.recover_cells_and_kzg_proofs(cell_ids, partial_cells, partial_proofs) + cell_indices = list(range(0, spec.CELLS_PER_EXT_BLOB, 2)) + partial_cells = [cells[cell_index] for cell_index in cell_indices] + partial_proofs = [proofs[cell_index] for cell_index in cell_indices] + recovered_cells, recovered_proofs = spec.recover_cells_and_kzg_proofs(cell_indices, partial_cells, partial_proofs) assert recovered_cells == cells assert recovered_proofs == proofs - identifier = make_id(cell_ids, partial_cells, partial_proofs) + identifier = make_id(cell_indices, partial_cells, partial_proofs) yield f'recover_cells_and_kzg_proofs_case_valid_half_missing_every_other_cell_{identifier}', { 'input': { - 'cell_ids': cell_ids, + 'cell_indices': cell_indices, 'cells': encode_hex_list(partial_cells), 'proofs': encode_hex_list(partial_proofs), }, @@ -654,16 +654,16 @@ def case05_recover_cells_and_kzg_proofs(): # Valid: Half missing cells (first half) cells, proofs = VALID_CELLS_AND_PROOFS[2] - cell_ids = list(range(0, spec.CELLS_PER_EXT_BLOB // 2)) - partial_cells = [cells[cell_id] for cell_id in cell_ids] - partial_proofs = [proofs[cell_id] for cell_id in cell_ids] - recovered_cells, recovered_proofs = spec.recover_cells_and_kzg_proofs(cell_ids, partial_cells, partial_proofs) + cell_indices = list(range(0, spec.CELLS_PER_EXT_BLOB // 2)) + partial_cells = [cells[cell_index] for cell_index in cell_indices] + partial_proofs = [proofs[cell_index] for cell_index in cell_indices] + recovered_cells, recovered_proofs = spec.recover_cells_and_kzg_proofs(cell_indices, partial_cells, partial_proofs) assert recovered_cells == cells assert recovered_proofs == proofs - identifier = make_id(cell_ids, partial_cells) + identifier = make_id(cell_indices, partial_cells) yield f'recover_cells_and_kzg_proofs_case_valid_half_missing_first_half_{identifier}', { 'input': { - 'cell_ids': cell_ids, + 'cell_indices': cell_indices, 'cells': encode_hex_list(partial_cells), 'proofs': encode_hex_list(partial_proofs), }, @@ -672,16 +672,16 @@ def case05_recover_cells_and_kzg_proofs(): # Valid: Half missing cells (second half) cells, proofs = VALID_CELLS_AND_PROOFS[3] - cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2, spec.CELLS_PER_EXT_BLOB)) - partial_cells = [cells[cell_id] for cell_id in cell_ids] - partial_proofs = [proofs[cell_id] for cell_id in cell_ids] - recovered_cells, recovered_proofs = spec.recover_cells_and_kzg_proofs(cell_ids, partial_cells, partial_proofs) + cell_indices = list(range(spec.CELLS_PER_EXT_BLOB // 2, spec.CELLS_PER_EXT_BLOB)) + partial_cells = [cells[cell_index] for cell_index in cell_indices] + partial_proofs = [proofs[cell_index] for cell_index in cell_indices] + recovered_cells, recovered_proofs = spec.recover_cells_and_kzg_proofs(cell_indices, partial_cells, partial_proofs) assert recovered_cells == cells assert recovered_proofs == proofs - identifier = make_id(cell_ids, partial_cells) + identifier = make_id(cell_indices, partial_cells) yield f'recover_cells_and_kzg_proofs_case_valid_half_missing_second_half_{identifier}', { 'input': { - 'cell_ids': cell_ids, + 'cell_indices': cell_indices, 'cells': encode_hex_list(partial_cells), 'proofs': encode_hex_list(partial_proofs), }, @@ -689,12 +689,12 @@ def case05_recover_cells_and_kzg_proofs(): } # Edge case: All cells are missing - cell_ids, partial_cells = [], [] - expect_exception(spec.recover_cells_and_kzg_proofs, cell_ids, partial_cells) - identifier = make_id(cell_ids, partial_cells) + cell_indices, partial_cells = [], [] + expect_exception(spec.recover_cells_and_kzg_proofs, cell_indices, partial_cells) + identifier = make_id(cell_indices, partial_cells) yield f'recover_cells_and_kzg_proofs_case_invalid_all_cells_are_missing_{identifier}', { 'input': { - 'cell_ids': cell_ids, + 'cell_indices': cell_indices, 'cells': encode_hex_list(partial_cells), 'proofs': encode_hex_list(partial_proofs), }, @@ -703,14 +703,14 @@ def case05_recover_cells_and_kzg_proofs(): # Edge case: More than half missing cells, proofs = VALID_CELLS_AND_PROOFS[4] - cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2 - 1)) - partial_cells = [cells[cell_id] for cell_id in cell_ids] - partial_proofs = [proofs[cell_id] for cell_id in cell_ids] - expect_exception(spec.recover_cells_and_kzg_proofs, cell_ids, partial_cells, partial_proofs) - identifier = make_id(cell_ids, partial_cells, partial_proofs) + cell_indices = list(range(spec.CELLS_PER_EXT_BLOB // 2 - 1)) + partial_cells = [cells[cell_index] for cell_index in cell_indices] + partial_proofs = [proofs[cell_index] for cell_index in cell_indices] + expect_exception(spec.recover_cells_and_kzg_proofs, cell_indices, partial_cells, partial_proofs) + identifier = make_id(cell_indices, partial_cells, partial_proofs) yield f'recover_cells_and_kzg_proofs_case_invalid_more_than_half_missing_{identifier}', { 'input': { - 'cell_ids': cell_ids, + 'cell_indices': cell_indices, 'cells': encode_hex_list(partial_cells), 'proofs': encode_hex_list(partial_proofs), }, @@ -719,32 +719,32 @@ def case05_recover_cells_and_kzg_proofs(): # Edge case: More cells provided than CELLS_PER_EXT_BLOB cells, proofs = VALID_CELLS_AND_PROOFS[5] - cell_ids = list(range(spec.CELLS_PER_EXT_BLOB)) + [0] - partial_cells = [cells[cell_id] for cell_id in cell_ids] - partial_proofs = [proofs[cell_id] for cell_id in cell_ids] - expect_exception(spec.recover_cells_and_kzg_proofs, cell_ids, partial_cells, partial_proofs) - identifier = make_id(cell_ids, partial_cells, partial_proofs) + cell_indices = list(range(spec.CELLS_PER_EXT_BLOB)) + [0] + partial_cells = [cells[cell_index] for cell_index in cell_indices] + partial_proofs = [proofs[cell_index] for cell_index in cell_indices] + expect_exception(spec.recover_cells_and_kzg_proofs, cell_indices, partial_cells, partial_proofs) + identifier = make_id(cell_indices, partial_cells, partial_proofs) yield f'recover_cells_and_kzg_proofs_case_invalid_more_cells_than_cells_per_ext_blob_{identifier}', { 'input': { - 'cell_ids': cell_ids, + 'cell_indices': cell_indices, 'cells': encode_hex_list(partial_cells), 'proofs': encode_hex_list(partial_proofs), }, 'output': None } - # Edge case: Invalid cell_id + # Edge case: Invalid cell_index cells, proofs = VALID_CELLS_AND_PROOFS[6] - cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2)) - partial_cells = [cells[cell_id] for cell_id in cell_ids] - partial_proofs = [proofs[cell_id] for cell_id in cell_ids] - # Replace first cell_id with an invalid value - cell_ids[0] = spec.CELLS_PER_EXT_BLOB - expect_exception(spec.recover_cells_and_kzg_proofs, cell_ids, partial_cells, partial_proofs) - identifier = make_id(cell_ids, partial_cells, partial_proofs) - yield f'recover_cells_and_kzg_proofs_case_invalid_cell_id_{identifier}', { + cell_indices = list(range(spec.CELLS_PER_EXT_BLOB // 2)) + partial_cells = [cells[cell_index] for cell_index in cell_indices] + partial_proofs = [proofs[cell_index] for cell_index in cell_indices] + # Replace first cell_index with an invalid value + cell_indices[0] = spec.CELLS_PER_EXT_BLOB + expect_exception(spec.recover_cells_and_kzg_proofs, cell_indices, partial_cells, partial_proofs) + identifier = make_id(cell_indices, partial_cells, partial_proofs) + yield f'recover_cells_and_kzg_proofs_case_invalid_cell_index_{identifier}', { 'input': { - 'cell_ids': cell_ids, + 'cell_indices': cell_indices, 'cells': encode_hex_list(partial_cells), 'proofs': encode_hex_list(partial_proofs), }, @@ -754,16 +754,16 @@ def case05_recover_cells_and_kzg_proofs(): # Edge case: Invalid cell for cell in INVALID_INDIVIDUAL_CELL_BYTES: cells, proofs = VALID_CELLS_AND_PROOFS[6] - cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2)) - partial_cells = [cells[cell_id] for cell_id in cell_ids] - partial_proofs = [proofs[cell_id] for cell_id in cell_ids] + cell_indices = list(range(spec.CELLS_PER_EXT_BLOB // 2)) + partial_cells = [cells[cell_index] for cell_index in cell_indices] + partial_proofs = [proofs[cell_index] for cell_index in cell_indices] # Replace first cell with an invalid value partial_cells[0] = cell - expect_exception(spec.recover_cells_and_kzg_proofs, cell_ids, partial_cells, partial_proofs) - identifier = make_id(cell_ids, partial_cells, partial_proofs) + expect_exception(spec.recover_cells_and_kzg_proofs, cell_indices, partial_cells, partial_proofs) + identifier = make_id(cell_indices, partial_cells, partial_proofs) yield f'recover_cells_and_kzg_proofs_case_invalid_cell_{identifier}', { 'input': { - 'cell_ids': cell_ids, + 'cell_indices': cell_indices, 'cells': encode_hex_list(partial_cells), 'proofs': encode_hex_list(partial_proofs), }, @@ -773,93 +773,93 @@ def case05_recover_cells_and_kzg_proofs(): # Edge case: Invalid proof for proof in INVALID_G1_POINTS: cells, proofs = VALID_CELLS_AND_PROOFS[0] - cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2)) - partial_cells = [cells[cell_id] for cell_id in cell_ids] - partial_proofs = [proofs[cell_id] for cell_id in cell_ids] + cell_indices = list(range(spec.CELLS_PER_EXT_BLOB // 2)) + partial_cells = [cells[cell_index] for cell_index in cell_indices] + partial_proofs = [proofs[cell_index] for cell_index in cell_indices] # Replace first proof with an invalid value partial_proofs[0] = proof - expect_exception(spec.recover_cells_and_kzg_proofs, cell_ids, partial_cells, partial_proofs) - identifier = make_id(cell_ids, partial_cells, partial_proofs) + expect_exception(spec.recover_cells_and_kzg_proofs, cell_indices, partial_cells, partial_proofs) + identifier = make_id(cell_indices, partial_cells, partial_proofs) yield f'recover_cells_and_kzg_proofs_case_invalid_proof_{identifier}', { 'input': { - 'cell_ids': cell_ids, + 'cell_indices': cell_indices, 'cells': encode_hex_list(partial_cells), 'proofs': encode_hex_list(partial_proofs), }, 'output': None } - # Edge case: More cell_ids than cells + # Edge case: More cell_indices than cells cells, proofs = VALID_CELLS_AND_PROOFS[0] - cell_ids = list(range(0, spec.CELLS_PER_EXT_BLOB, 2)) - partial_cells = [cells[cell_id] for cell_id in cell_ids] - partial_proofs = [proofs[cell_id] for cell_id in cell_ids] - # Add another cell_id - cell_ids.append(spec.CELLS_PER_EXT_BLOB - 1) - expect_exception(spec.recover_cells_and_kzg_proofs, cell_ids, partial_cells, partial_proofs) - identifier = make_id(cell_ids, partial_cells, partial_proofs) - yield f'recover_cells_and_kzg_proofs_case_invalid_more_cell_ids_than_cells_{identifier}', { + cell_indices = list(range(0, spec.CELLS_PER_EXT_BLOB, 2)) + partial_cells = [cells[cell_index] for cell_index in cell_indices] + partial_proofs = [proofs[cell_index] for cell_index in cell_indices] + # Add another cell_index + cell_indices.append(spec.CELLS_PER_EXT_BLOB - 1) + expect_exception(spec.recover_cells_and_kzg_proofs, cell_indices, partial_cells, partial_proofs) + identifier = make_id(cell_indices, partial_cells, partial_proofs) + yield f'recover_cells_and_kzg_proofs_case_invalid_more_cell_indices_than_cells_{identifier}', { 'input': { - 'cell_ids': cell_ids, + 'cell_indices': cell_indices, 'cells': encode_hex_list(partial_cells), 'proofs': encode_hex_list(partial_proofs), }, 'output': None } - # Edge case: More cells than cell_ids + # Edge case: More cells than cell_indices cells, proofs = VALID_CELLS_AND_PROOFS[1] - cell_ids = list(range(0, spec.CELLS_PER_EXT_BLOB, 2)) - partial_cells = [cells[cell_id] for cell_id in cell_ids] - partial_proofs = [proofs[cell_id] for cell_id in cell_ids] + cell_indices = list(range(0, spec.CELLS_PER_EXT_BLOB, 2)) + partial_cells = [cells[cell_index] for cell_index in cell_indices] + partial_proofs = [proofs[cell_index] for cell_index in cell_indices] # Add another cell partial_cells.append(CELL_RANDOM_VALID1) - expect_exception(spec.recover_cells_and_kzg_proofs, cell_ids, partial_cells, partial_proofs) - identifier = make_id(cell_ids, partial_cells, partial_proofs) - yield f'recover_cells_and_kzg_proofs_case_invalid_more_cells_than_cell_ids_{identifier}', { + expect_exception(spec.recover_cells_and_kzg_proofs, cell_indices, partial_cells, partial_proofs) + identifier = make_id(cell_indices, partial_cells, partial_proofs) + yield f'recover_cells_and_kzg_proofs_case_invalid_more_cells_than_cell_indices_{identifier}', { 'input': { - 'cell_ids': cell_ids, + 'cell_indices': cell_indices, 'cells': encode_hex_list(partial_cells), 'proofs': encode_hex_list(partial_proofs), }, 'output': None } - # Edge case: More proofs than cell_ids + # Edge case: More proofs than cell_indices cells, proofs = VALID_CELLS_AND_PROOFS[1] - cell_ids = list(range(0, spec.CELLS_PER_EXT_BLOB, 2)) - partial_cells = [cells[cell_id] for cell_id in cell_ids] - partial_proofs = [proofs[cell_id] for cell_id in cell_ids] + cell_indices = list(range(0, spec.CELLS_PER_EXT_BLOB, 2)) + partial_cells = [cells[cell_index] for cell_index in cell_indices] + partial_proofs = [proofs[cell_index] for cell_index in cell_indices] # Add another proof partial_proofs.append(G1) - expect_exception(spec.recover_cells_and_kzg_proofs, cell_ids, partial_cells, partial_proofs) - identifier = make_id(cell_ids, partial_cells, partial_proofs) - yield f'recover_cells_and_kzg_proofs_case_invalid_more_proofs_than_cell_ids_{identifier}', { + expect_exception(spec.recover_cells_and_kzg_proofs, cell_indices, partial_cells, partial_proofs) + identifier = make_id(cell_indices, partial_cells, partial_proofs) + yield f'recover_cells_and_kzg_proofs_case_invalid_more_proofs_than_cell_indices_{identifier}', { 'input': { - 'cell_ids': cell_ids, + 'cell_indices': cell_indices, 'cells': encode_hex_list(partial_cells), 'proofs': encode_hex_list(partial_proofs), }, 'output': None } - # Edge case: Duplicate cell_id + # Edge case: Duplicate cell_index cells, proofs = VALID_CELLS_AND_PROOFS[2] # There will be 65 cells, where 64 are unique and 1 is a duplicate. # Depending on the implementation, 63 & 1 might not fail for the right # reason. For example, if the implementation assigns cells in an array # via index, this would result in 63 cells and the test would fail due # to insufficient cell count, not because of a duplicate cell. - cell_ids = list(range(spec.CELLS_PER_EXT_BLOB // 2 + 1)) - partial_cells = [cells[cell_id] for cell_id in cell_ids] - partial_proofs = [proofs[cell_id] for cell_id in cell_ids] - # Replace first cell_id with the second cell_id - cell_ids[0] = cell_ids[1] - expect_exception(spec.recover_cells_and_kzg_proofs, cell_ids, partial_cells, partial_proofs) - identifier = make_id(cell_ids, partial_cells, partial_proofs) - yield f'recover_cells_and_kzg_proofs_case_invalid_duplicate_cell_id_{identifier}', { + cell_indices = list(range(spec.CELLS_PER_EXT_BLOB // 2 + 1)) + partial_cells = [cells[cell_index] for cell_index in cell_indices] + partial_proofs = [proofs[cell_index] for cell_index in cell_indices] + # Replace first cell_index with the second cell_index + cell_indices[0] = cell_indices[1] + expect_exception(spec.recover_cells_and_kzg_proofs, cell_indices, partial_cells, partial_proofs) + identifier = make_id(cell_indices, partial_cells, partial_proofs) + yield f'recover_cells_and_kzg_proofs_case_invalid_duplicate_cell_index_{identifier}', { 'input': { - 'cell_ids': cell_ids, + 'cell_indices': cell_indices, 'cells': encode_hex_list(partial_cells), 'proofs': encode_hex_list(partial_proofs), }, From c5e9c3cb4e3fcf7734b46cfb9ef06c5af6b47eb0 Mon Sep 17 00:00:00 2001 From: Benedikt Wagner <113296072+b-wagn@users.noreply.github.com> Date: Tue, 11 Jun 2024 14:43:40 +0200 Subject: [PATCH 75/89] Add public methods section in KZG spec (#3795) --- .../eip7594/polynomial-commitments-sampling.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index acf4ffc627..e6e39350bc 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -7,6 +7,7 @@ - [Introduction](#introduction) +- [Public Methods](#public-methods) - [Custom types](#custom-types) - [Constants](#constants) - [Preset](#preset) @@ -55,10 +56,21 @@ This document extends [polynomial-commitments.md](polynomial-commitments.md) with the functions required for data availability sampling (DAS). It is not part of the core Deneb spec but an extension that can be optionally implemented to allow nodes to reduce their load using DAS. +## Public Methods + For any KZG library extended to support DAS, functions flagged as "Public method" MUST be provided by the underlying KZG library as public functions. All other functions are private functions used internally by the KZG library. Public functions MUST accept raw bytes as input and perform the required cryptographic normalization before invoking any internal functions. +The following is a list of the public methods: + +* [`compute_cells_and_kzg_proofs`](#compute_cells_and_kzg_proofs) +* [`compute_cells`](#compute_cells) +* [`verify_cell_kzg_proof`](#verify_cell_kzg_proof) +* [`verify_cell_kzg_proof_batch`](#verify_cell_kzg_proof_batch) +* [`recover_cells_and_kzg_proofs`](#recover_cells_and_kzg_proofs) + + ## Custom types | Name | SSZ equivalent | Description | From 10bac07b39bbaeb31f4aefab8f23ec4d02e1e39a Mon Sep 17 00:00:00 2001 From: George Kadianakis Date: Tue, 11 Jun 2024 19:17:43 +0300 Subject: [PATCH 76/89] Remove compute_cells method --- .../polynomial-commitments-sampling.md | 26 ----------- tests/generators/kzg_7594/main.py | 45 ++++--------------- 2 files changed, 8 insertions(+), 63 deletions(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index f2b574afe5..1d138913fc 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -39,7 +39,6 @@ - [Cells](#cells-1) - [Cell computation](#cell-computation) - [`compute_cells_and_kzg_proofs`](#compute_cells_and_kzg_proofs) - - [`compute_cells`](#compute_cells) - [Cell verification](#cell-verification) - [`verify_cell_kzg_proof`](#verify_cell_kzg_proof) - [`verify_cell_kzg_proof_batch`](#verify_cell_kzg_proof_batch) @@ -459,31 +458,6 @@ def compute_cells_and_kzg_proofs(blob: Blob) -> Tuple[ return cells, proofs ``` -#### `compute_cells` - -```python -def compute_cells(blob: Blob) -> Vector[Cell, CELLS_PER_EXT_BLOB]: - """ - Compute the cell data for an extended blob (without computing the proofs). - - Public method. - """ - assert len(blob) == BYTES_PER_BLOB - - polynomial = blob_to_polynomial(blob) - polynomial_coeff = polynomial_eval_to_coeff(polynomial) - - extended_data = fft_field(polynomial_coeff + [0] * FIELD_ELEMENTS_PER_BLOB, - compute_roots_of_unity(FIELD_ELEMENTS_PER_EXT_BLOB)) - extended_data_rbo = bit_reversal_permutation(extended_data) - cells = [] - for cell_index in range(CELLS_PER_EXT_BLOB): - start = cell_index * FIELD_ELEMENTS_PER_CELL - end = (cell_index + 1) * FIELD_ELEMENTS_PER_CELL - cells.append(coset_evals_to_cell(CosetEvals(extended_data_rbo[start:end]))) - return cells -``` - ### Cell verification #### `verify_cell_kzg_proof` diff --git a/tests/generators/kzg_7594/main.py b/tests/generators/kzg_7594/main.py index be343795e7..e39b9d64cc 100644 --- a/tests/generators/kzg_7594/main.py +++ b/tests/generators/kzg_7594/main.py @@ -29,39 +29,11 @@ from eth2spec.utils import bls -############################################################################### -# Test cases for compute_cells -############################################################################### - -def case01_compute_cells(): - # Valid cases - for blob in VALID_BLOBS: - cells = spec.compute_cells(blob) - identifier = make_id(blob) - yield f'compute_cells_case_valid_{identifier}', { - 'input': { - 'blob': encode_hex(blob), - }, - 'output': encode_hex_list(cells) - } - - # Edge case: Invalid blobs - for blob in INVALID_BLOBS: - expect_exception(spec.compute_cells, blob) - identifier = make_id(blob) - yield f'compute_cells_case_invalid_blob_{identifier}', { - 'input': { - 'blob': encode_hex(blob) - }, - 'output': None - } - - ############################################################################### # Test cases for compute_cells_and_kzg_proofs ############################################################################### -def case02_compute_cells_and_kzg_proofs(): +def case_compute_cells_and_kzg_proofs(): # Valid cases for blob in VALID_BLOBS: cells, proofs = spec.compute_cells_and_kzg_proofs(blob) @@ -91,7 +63,7 @@ def case02_compute_cells_and_kzg_proofs(): # Test cases for verify_cell_kzg_proof ############################################################################### -def case03_verify_cell_kzg_proof(): +def case_verify_cell_kzg_proof(): # Valid cases for i in range(len(VALID_BLOBS)): cells, proofs = VALID_CELLS_AND_PROOFS[i] @@ -245,7 +217,7 @@ def case03_verify_cell_kzg_proof(): # Test cases for verify_cell_kzg_proof_batch ############################################################################### -def case04_verify_cell_kzg_proof_batch(): +def case_verify_cell_kzg_proof_batch(): # Valid cases for i in range(len(VALID_BLOBS)): cells, proofs = VALID_CELLS_AND_PROOFS[i] @@ -617,7 +589,7 @@ def case04_verify_cell_kzg_proof_batch(): # Test cases for recover_cells_and_kzg_proofs ############################################################################### -def case05_recover_cells_and_kzg_proofs(): +def case_recover_cells_and_kzg_proofs(): # Valid: No missing cells cells, proofs = VALID_CELLS_AND_PROOFS[0] cell_indices = list(range(spec.CELLS_PER_EXT_BLOB)) @@ -899,9 +871,8 @@ def cases_fn() -> Iterable[gen_typing.TestCase]: bls.use_arkworks() gen_runner.run_generator("kzg_7594", [ # EIP-7594 - create_provider(EIP7594, 'compute_cells', case01_compute_cells), - create_provider(EIP7594, 'compute_cells_and_kzg_proofs', case02_compute_cells_and_kzg_proofs), - create_provider(EIP7594, 'verify_cell_kzg_proof', case03_verify_cell_kzg_proof), - create_provider(EIP7594, 'verify_cell_kzg_proof_batch', case04_verify_cell_kzg_proof_batch), - create_provider(EIP7594, 'recover_cells_and_kzg_proofs', case05_recover_cells_and_kzg_proofs), + create_provider(EIP7594, 'compute_cells_and_kzg_proofs', case_compute_cells_and_kzg_proofs), + create_provider(EIP7594, 'verify_cell_kzg_proof', case_verify_cell_kzg_proof), + create_provider(EIP7594, 'verify_cell_kzg_proof_batch', case_verify_cell_kzg_proof_batch), + create_provider(EIP7594, 'recover_cells_and_kzg_proofs', case_recover_cells_and_kzg_proofs), ]) From 93dfcdaf4c521a95e769f88ce96311af2423c11b Mon Sep 17 00:00:00 2001 From: Justin Traglia Date: Tue, 11 Jun 2024 08:07:26 -0500 Subject: [PATCH 77/89] Remove compute_cells from list of public methods --- specs/_features/eip7594/polynomial-commitments-sampling.md | 1 - 1 file changed, 1 deletion(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 1d138913fc..8ccad97f6c 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -64,7 +64,6 @@ Public functions MUST accept raw bytes as input and perform the required cryptog The following is a list of the public methods: * [`compute_cells_and_kzg_proofs`](#compute_cells_and_kzg_proofs) -* [`compute_cells`](#compute_cells) * [`verify_cell_kzg_proof`](#verify_cell_kzg_proof) * [`verify_cell_kzg_proof_batch`](#verify_cell_kzg_proof_batch) * [`recover_cells_and_kzg_proofs`](#recover_cells_and_kzg_proofs) From 6efab19ad09a92a8e326c00247102a87cb61c03a Mon Sep 17 00:00:00 2001 From: kevaundray Date: Tue, 11 Jun 2024 18:26:19 +0100 Subject: [PATCH 78/89] chore: Refactor cell recovery code (#3781) * multi: - Remove shift_polynomial_coeff - Remove recover_shifted_data - Remove recover_original_data - Move `zero_poly_eval_brp ` under sanity check comment as its only used for sanity checking * chore: remove sanity check -- this was doing a wasteful `compute_root_of_unity` operation * chore: add previous sanity check as a unit test * chore: copy values python was taking a reference, so it passes in our regular codepaths but not in isolated test * chore: add coset_fft test * Update specs/_features/eip7594/polynomial-commitments-sampling.md Co-authored-by: Justin Traglia <95511699+jtraglia@users.noreply.github.com> * Update specs/_features/eip7594/polynomial-commitments-sampling.md Co-authored-by: Justin Traglia <95511699+jtraglia@users.noreply.github.com> * chore: linter * chore: asn (switch to bls_modular_inverse) * chore: (ben) rename func to test_construct_vanishing_polynomial * chore: (ben) rename `extended_evaluations_coeffs` to `extended_evaluation_times_zero_coeffs` * chore: compute `roots_of_unity_extended` in recover_data method * chore: add more comments explaining whats happening in recover_data * chore: compute_zero_poly_coeff in recover_data * chore: make lint * chore: add doc comment to coset_fft_field * chore: (ben) add code to generate the vanishing polynomial when all cells are missing * chore: remove handling of edge case when constructing a vanishing polynomial * chore: rename H(x) to Q_3(x) * chore: remove trailing whitespace * chore: add whitespace between comments * chore: (asn) add assert that num missing cells is not 0 * chore: (justin) address comments * chore: merge resolution * chore: fixup remaining IDs -> indices * chore: use indice nomenclature in tests --------- Co-authored-by: Justin Traglia <95511699+jtraglia@users.noreply.github.com> --- .../polynomial-commitments-sampling.md | 197 ++++++++---------- .../test_polynomial_commitments.py | 41 ++++ 2 files changed, 130 insertions(+), 108 deletions(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 8ccad97f6c..8d97a12575 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -21,13 +21,13 @@ - [FFTs](#ffts) - [`_fft_field`](#_fft_field) - [`fft_field`](#fft_field) + - [`coset_fft_field`](#coset_fft_field) - [Polynomials in coefficient form](#polynomials-in-coefficient-form) - [`polynomial_eval_to_coeff`](#polynomial_eval_to_coeff) - [`add_polynomialcoeff`](#add_polynomialcoeff) - [`neg_polynomialcoeff`](#neg_polynomialcoeff) - [`multiply_polynomialcoeff`](#multiply_polynomialcoeff) - [`divide_polynomialcoeff`](#divide_polynomialcoeff) - - [`shift_polynomialcoeff`](#shift_polynomialcoeff) - [`interpolate_polynomialcoeff`](#interpolate_polynomialcoeff) - [`vanishing_polynomialcoeff`](#vanishing_polynomialcoeff) - [`evaluate_polynomialcoeff`](#evaluate_polynomialcoeff) @@ -44,8 +44,7 @@ - [`verify_cell_kzg_proof_batch`](#verify_cell_kzg_proof_batch) - [Reconstruction](#reconstruction) - [`construct_vanishing_polynomial`](#construct_vanishing_polynomial) - - [`recover_shifted_data`](#recover_shifted_data) - - [`recover_original_data`](#recover_original_data) + - [`recover_data`](#recover_data) - [`recover_cells_and_kzg_proofs`](#recover_cells_and_kzg_proofs) @@ -63,11 +62,10 @@ Public functions MUST accept raw bytes as input and perform the required cryptog The following is a list of the public methods: -* [`compute_cells_and_kzg_proofs`](#compute_cells_and_kzg_proofs) -* [`verify_cell_kzg_proof`](#verify_cell_kzg_proof) -* [`verify_cell_kzg_proof_batch`](#verify_cell_kzg_proof_batch) -* [`recover_cells_and_kzg_proofs`](#recover_cells_and_kzg_proofs) - +- [`compute_cells_and_kzg_proofs`](#compute_cells_and_kzg_proofs) +- [`verify_cell_kzg_proof`](#verify_cell_kzg_proof) +- [`verify_cell_kzg_proof_batch`](#verify_cell_kzg_proof_batch) +- [`recover_cells_and_kzg_proofs`](#recover_cells_and_kzg_proofs) ## Custom types @@ -188,6 +186,41 @@ def fft_field(vals: Sequence[BLSFieldElement], return _fft_field(vals, roots_of_unity) ``` +#### `coset_fft_field` + +```python +def coset_fft_field(vals: Sequence[BLSFieldElement], + roots_of_unity: Sequence[BLSFieldElement], + inv: bool=False) -> Sequence[BLSFieldElement]: + """ + Computes an FFT/IFFT over a coset of the roots of unity. + This is useful for when one wants to divide by a polynomial which + vanishes on one or more elements in the domain. + """ + vals = vals.copy() + + def shift_vals(vals: Sequence[BLSFieldElement], factor: BLSFieldElement) -> Sequence[BLSFieldElement]: + """ + Multiply each entry in `vals` by succeeding powers of `factor` + i.e., [vals[0] * factor^0, vals[1] * factor^1, ..., vals[n] * factor^n] + """ + shift = 1 + for i in range(len(vals)): + vals[i] = BLSFieldElement((int(vals[i]) * shift) % BLS_MODULUS) + shift = (shift * int(factor)) % BLS_MODULUS + return vals + + # This is the coset generator; it is used to compute a FFT/IFFT over a coset of + # the roots of unity. + shift_factor = BLSFieldElement(PRIMITIVE_ROOT_OF_UNITY) + if inv: + vals = fft_field(vals, roots_of_unity, inv) + shift_inv = bls_modular_inverse(shift_factor) + return shift_vals(vals, shift_inv) + else: + vals = shift_vals(vals, shift_factor) + return fft_field(vals, roots_of_unity, inv) +``` ### Polynomials in coefficient form @@ -265,23 +298,6 @@ def divide_polynomialcoeff(a: PolynomialCoeff, b: PolynomialCoeff) -> Polynomial return [x % BLS_MODULUS for x in o] ``` -#### `shift_polynomialcoeff` - -```python -def shift_polynomialcoeff(polynomial_coeff: PolynomialCoeff, factor: BLSFieldElement) -> PolynomialCoeff: - """ - Shift the evaluation of a polynomial in coefficient form by factor. - This returns a new polynomial g in coefficient form such that g(x) = f(factor * x). - In other words, each coefficient of f is scaled by a power of factor. - """ - factor_power = 1 - o = [] - for p in polynomial_coeff: - o.append(int(p) * factor_power % BLS_MODULUS) - factor_power = factor_power * int(factor) % BLS_MODULUS - return o -``` - #### `interpolate_polynomialcoeff` ```python @@ -494,7 +510,7 @@ def verify_cell_kzg_proof_batch(row_commitments_bytes: Sequence[Bytes48], cells: Sequence[Cell], proofs_bytes: Sequence[Bytes48]) -> bool: """ - Verify a set of cells, given their corresponding proofs and their coordinates (row_id, column_id) in the blob + Verify a set of cells, given their corresponding proofs and their coordinates (row_index, column_index) in the blob matrix. The list of all commitments is also provided in row_commitments_bytes. This function implements the naive algorithm of checking every cell @@ -519,7 +535,7 @@ def verify_cell_kzg_proof_batch(row_commitments_bytes: Sequence[Bytes48], for proof_bytes in proofs_bytes: assert len(proof_bytes) == BYTES_PER_PROOF - # Get commitments via row IDs + # Get commitments via row indices commitments_bytes = [row_commitments_bytes[row_index] for row_index in row_indices] # Get objects from bytes @@ -538,13 +554,20 @@ def verify_cell_kzg_proof_batch(row_commitments_bytes: Sequence[Bytes48], ### `construct_vanishing_polynomial` ```python -def construct_vanishing_polynomial(missing_cell_indices: Sequence[CellIndex]) -> Tuple[ - Sequence[BLSFieldElement], - Sequence[BLSFieldElement]]: +def construct_vanishing_polynomial(missing_cell_indices: Sequence[CellIndex]) -> Sequence[BLSFieldElement]: """ - Given the cells that are missing from the data, compute the polynomial that vanishes at every point that + Given the cells indices that are missing from the data, compute the polynomial that vanishes at every point that corresponds to a missing field element. + + This method assumes that all of the cells cannot be missing. In this case the vanishing polynomial + could be computed as Z(x) = x^n - 1, where `n` is FIELD_ELEMENTS_PER_EXT_BLOB. + + We never encounter this case however because this method is used solely for recovery and recovery only + works if at least half of the cells are available. """ + + assert len(missing_cell_indices) != 0 + # Get the small domain roots_of_unity_reduced = compute_roots_of_unity(CELLS_PER_EXT_BLOB) @@ -559,40 +582,24 @@ def construct_vanishing_polynomial(missing_cell_indices: Sequence[CellIndex]) -> for i, coeff in enumerate(short_zero_poly): zero_poly_coeff[i * FIELD_ELEMENTS_PER_CELL] = coeff - # Compute evaluations of the extended vanishing polynomial - zero_poly_eval = fft_field(zero_poly_coeff, - compute_roots_of_unity(FIELD_ELEMENTS_PER_EXT_BLOB)) - zero_poly_eval_brp = bit_reversal_permutation(zero_poly_eval) - - # Sanity check - for cell_index in range(CELLS_PER_EXT_BLOB): - start = cell_index * FIELD_ELEMENTS_PER_CELL - end = (cell_index + 1) * FIELD_ELEMENTS_PER_CELL - if cell_index in missing_cell_indices: - assert all(a == 0 for a in zero_poly_eval_brp[start:end]) - else: # cell_index in cell_indices - assert all(a != 0 for a in zero_poly_eval_brp[start:end]) - - return zero_poly_coeff, zero_poly_eval + return zero_poly_coeff ``` -### `recover_shifted_data` +### `recover_data` ```python -def recover_shifted_data(cell_indices: Sequence[CellIndex], - cells: Sequence[Cell], - zero_poly_eval: Sequence[BLSFieldElement], - zero_poly_coeff: Sequence[BLSFieldElement], - roots_of_unity_extended: Sequence[BLSFieldElement]) -> Tuple[ - Sequence[BLSFieldElement], - Sequence[BLSFieldElement], - BLSFieldElement]: +def recover_data(cell_indices: Sequence[CellIndex], + cells: Sequence[Cell], + ) -> Sequence[BLSFieldElement]: """ - Given Z(x), return polynomial Q_1(x)=(E*Z)(k*x) and Q_2(x)=Z(k*x) and k^{-1}. + Recover the missing evaluations for the extended blob, given at least half of the evaluations. """ - shift_factor = BLSFieldElement(PRIMITIVE_ROOT_OF_UNITY) - shift_inv = div(BLSFieldElement(1), shift_factor) + # Get the extended domain. This will be referred to as the FFT domain. + roots_of_unity_extended = compute_roots_of_unity(FIELD_ELEMENTS_PER_EXT_BLOB) + + # Flatten the cells into evaluations. + # If a cell is missing, then its evaluation is zero. extended_evaluation_rbo = [0] * FIELD_ELEMENTS_PER_EXT_BLOB for cell_index, cell in zip(cell_indices, cells): start = cell_index * FIELD_ELEMENTS_PER_CELL @@ -600,45 +607,39 @@ def recover_shifted_data(cell_indices: Sequence[CellIndex], extended_evaluation_rbo[start:end] = cell extended_evaluation = bit_reversal_permutation(extended_evaluation_rbo) - # Compute (E*Z)(x) - extended_evaluation_times_zero = [BLSFieldElement(int(a) * int(b) % BLS_MODULUS) - for a, b in zip(zero_poly_eval, extended_evaluation)] + # Compute Z(x) in monomial form + # Z(x) is the polynomial which vanishes on all of the evaluations which are missing + missing_cell_indices = [CellIndex(cell_index) for cell_index in range(CELLS_PER_EXT_BLOB) + if cell_index not in cell_indices] + zero_poly_coeff = construct_vanishing_polynomial(missing_cell_indices) - extended_evaluations_fft = fft_field(extended_evaluation_times_zero, roots_of_unity_extended, inv=True) + # Convert Z(x) to evaluation form over the FFT domain + zero_poly_eval = fft_field(zero_poly_coeff, roots_of_unity_extended) - # Compute (E*Z)(k*x) - shifted_extended_evaluation = shift_polynomialcoeff(extended_evaluations_fft, shift_factor) - # Compute Z(k*x) - shifted_zero_poly = shift_polynomialcoeff(zero_poly_coeff, shift_factor) + # Compute (E*Z)(x) = E(x) * Z(x) in evaluation form over the FFT domain + extended_evaluation_times_zero = [BLSFieldElement(int(a) * int(b) % BLS_MODULUS) + for a, b in zip(zero_poly_eval, extended_evaluation)] - eval_shifted_extended_evaluation = fft_field(shifted_extended_evaluation, roots_of_unity_extended) - eval_shifted_zero_poly = fft_field(shifted_zero_poly, roots_of_unity_extended) + # Convert (E*Z)(x) to monomial form + extended_evaluation_times_zero_coeffs = fft_field(extended_evaluation_times_zero, roots_of_unity_extended, inv=True) - return eval_shifted_extended_evaluation, eval_shifted_zero_poly, shift_inv -``` + # Convert (E*Z)(x) to evaluation form over a coset of the FFT domain + extended_evaluations_over_coset = coset_fft_field(extended_evaluation_times_zero_coeffs, roots_of_unity_extended) -### `recover_original_data` + # Convert Z(x) to evaluation form over a coset of the FFT domain + zero_poly_over_coset = coset_fft_field(zero_poly_coeff, roots_of_unity_extended) -```python -def recover_original_data(eval_shifted_extended_evaluation: Sequence[BLSFieldElement], - eval_shifted_zero_poly: Sequence[BLSFieldElement], - shift_inv: BLSFieldElement, - roots_of_unity_extended: Sequence[BLSFieldElement]) -> Sequence[BLSFieldElement]: - """ - Given Q_1, Q_2 and k^{-1}, compute P(x). - """ - # Compute Q_3 = Q_1(x)/Q_2(x) = P(k*x) - eval_shifted_reconstructed_poly = [ + # Compute Q_3(x) = (E*Z)(x) / Z(x) in evaluation form over a coset of the FFT domain + reconstructed_poly_over_coset = [ div(a, b) - for a, b in zip(eval_shifted_extended_evaluation, eval_shifted_zero_poly) + for a, b in zip(extended_evaluations_over_coset, zero_poly_over_coset) ] - shifted_reconstructed_poly = fft_field(eval_shifted_reconstructed_poly, roots_of_unity_extended, inv=True) - - # Unshift P(k*x) by k^{-1} to get P(x) - reconstructed_poly = shift_polynomialcoeff(shifted_reconstructed_poly, shift_inv) + # Convert Q_3(x) to monomial form + reconstructed_poly_coeff = coset_fft_field(reconstructed_poly_over_coset, roots_of_unity_extended, inv=True) - reconstructed_data = bit_reversal_permutation(fft_field(reconstructed_poly, roots_of_unity_extended)) + # Convert Q_3(x) to evaluation form over the FFT domain and bit reverse the result + reconstructed_data = bit_reversal_permutation(fft_field(reconstructed_poly_coeff, roots_of_unity_extended)) return reconstructed_data ``` @@ -677,30 +678,10 @@ def recover_cells_and_kzg_proofs(cell_indices: Sequence[CellIndex], for proof_bytes in proofs_bytes: assert len(proof_bytes) == BYTES_PER_PROOF - # Get the extended domain - roots_of_unity_extended = compute_roots_of_unity(FIELD_ELEMENTS_PER_EXT_BLOB) - # Convert cells to coset evals cosets_evals = [cell_to_coset_evals(cell) for cell in cells] - missing_cell_indices = [CellIndex(cell_index) for cell_index in range(CELLS_PER_EXT_BLOB) - if cell_index not in cell_indices] - zero_poly_coeff, zero_poly_eval = construct_vanishing_polynomial(missing_cell_indices) - - eval_shifted_extended_evaluation, eval_shifted_zero_poly, shift_inv = recover_shifted_data( - cell_indices, - cosets_evals, - zero_poly_eval, - zero_poly_coeff, - roots_of_unity_extended, - ) - - reconstructed_data = recover_original_data( - eval_shifted_extended_evaluation, - eval_shifted_zero_poly, - shift_inv, - roots_of_unity_extended, - ) + reconstructed_data = recover_data(cell_indices, cosets_evals) for cell_index, coset_evals in zip(cell_indices, cosets_evals): start = cell_index * FIELD_ELEMENTS_PER_CELL diff --git a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py index ec54db661f..7741398749 100644 --- a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py +++ b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py @@ -28,6 +28,47 @@ def test_fft(spec): assert poly_coeff_inversed == poly_coeff +@with_eip7594_and_later +@spec_test +@single_phase +def test_coset_fft(spec): + rng = random.Random(5566) + + roots_of_unity = spec.compute_roots_of_unity(spec.FIELD_ELEMENTS_PER_BLOB) + + poly_coeff = [rng.randint(0, BLS_MODULUS - 1) for _ in range(spec.FIELD_ELEMENTS_PER_BLOB)] + + poly_eval = spec.coset_fft_field(poly_coeff, roots_of_unity) + poly_coeff_inversed = spec.coset_fft_field(poly_eval, roots_of_unity, inv=True) + + assert len(poly_eval) == len(poly_coeff) == len(poly_coeff_inversed) + assert poly_coeff_inversed == poly_coeff + + +@with_eip7594_and_later +@spec_test +@single_phase +def test_construct_vanishing_polynomial(spec): + rng = random.Random(5566) + + num_missing_cells = rng.randint(0, spec.CELLS_PER_EXT_BLOB - 1) + # Get a unique list of `num_missing_cells` cell indices + unique_missing_cell_indices = rng.sample(range(spec.CELLS_PER_EXT_BLOB), num_missing_cells) + + zero_poly_coeff = spec.construct_vanishing_polynomial(unique_missing_cell_indices) + roots_of_unity = spec.compute_roots_of_unity(spec.FIELD_ELEMENTS_PER_EXT_BLOB) + zero_poly_eval = spec.fft_field(zero_poly_coeff, roots_of_unity) + zero_poly_eval_brp = spec.bit_reversal_permutation(zero_poly_eval) + + for cell_index in range(spec.CELLS_PER_EXT_BLOB): + start = cell_index * spec.FIELD_ELEMENTS_PER_CELL + end = (cell_index + 1) * spec.FIELD_ELEMENTS_PER_CELL + if cell_index in unique_missing_cell_indices: + assert all(a == 0 for a in zero_poly_eval_brp[start:end]) + else: # cell_index in cell_indices + assert all(a != 0 for a in zero_poly_eval_brp[start:end]) + + @with_eip7594_and_later @spec_test @single_phase From d4b6c0c304f557f087c0b3e8e2d62594436dc48c Mon Sep 17 00:00:00 2001 From: Parithosh Jayanthi Date: Tue, 11 Jun 2024 22:54:45 +0200 Subject: [PATCH 79/89] Adds CI to generate test vectors (#3793) * Add support for the CI to generate the test vectors --- .github/workflows/generate_vectors.yml | 78 ++++++++++++++++++++++++++ 1 file changed, 78 insertions(+) create mode 100644 .github/workflows/generate_vectors.yml diff --git a/.github/workflows/generate_vectors.yml b/.github/workflows/generate_vectors.yml new file mode 100644 index 0000000000..37468d203d --- /dev/null +++ b/.github/workflows/generate_vectors.yml @@ -0,0 +1,78 @@ +name: Run test vector generation + +defaults: + run: + shell: zsh {0} + +on: + workflow_dispatch: + inputs: + ref: + description: The branch, tag or SHA to checkout and build from + default: dev + type: string + required: true + +jobs: + generate-tests: + runs-on: [self-hosted-ghr-custom, size-chungus-x64, profile-consensusSpecs] + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + repository: 'ethereum/consensus-specs' + path: 'consensus-specs' + ref: ${{ inputs.source_ref }} + - name: Checkout consensus-spec-tests repository + uses: actions/checkout@v4 + with: + repository: 'ethereum/consensus-spec-tests' + path: 'consensus-spec-tests' + fetch-depth: 1 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: '3.10' + cache: '' + - name: Clean up Spec Repository + run: | + cd consensus-specs + make clean + - name: Install dependencies and generate pyspec + run: | + cd consensus-specs + make install_test + make -B pyspec + - name: Generate tests + run: | + cd consensus-specs + make -j 16 generate_tests 2>&1 | tee ../consensustestgen.log + cp -r presets/ ../consensus-spec-tests/presets + cp -r configs/ ../consensus-spec-tests/configs + find . -type d -empty -delete + - name: Archive configurations + run: | + cd consensus-spec-tests + tar -czvf general.tar.gz tests/general + tar -czvf minimal.tar.gz tests/minimal + tar -czvf mainnet.tar.gz tests/mainnet + - name: Upload general.tar.gz + uses: actions/upload-artifact@v4 + with: + name: General Test Configuration + path: consensus-spec-tests/general.tar.gz + - name: Upload minimal.tar.gz + uses: actions/upload-artifact@v4 + with: + name: Minimal Test Configuration + path: consensus-spec-tests/minimal.tar.gz + - name: Upload mainnet.tar.gz + uses: actions/upload-artifact@v4 + with: + name: Mainnet Test Configuration + path: consensus-spec-tests/mainnet.tar.gz + - name: Upload consensustestgen + uses: actions/upload-artifact@v4 + with: + name: consensustestgen.log + path: consensustestgen.log \ No newline at end of file From 1c675ae4a145615229d563849b83ca6953a671ba Mon Sep 17 00:00:00 2001 From: Benedikt Wagner <113296072+b-wagn@users.noreply.github.com> Date: Thu, 13 Jun 2024 16:30:05 +0200 Subject: [PATCH 80/89] Update polynomial-commitments-sampling.md - Fix dead link --- specs/_features/eip7594/polynomial-commitments-sampling.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 8d97a12575..c2f8eb93cb 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -52,7 +52,7 @@ ## Introduction -This document extends [polynomial-commitments.md](polynomial-commitments.md) with the functions required for data availability sampling (DAS). It is not part of the core Deneb spec but an extension that can be optionally implemented to allow nodes to reduce their load using DAS. +This document extends [polynomial-commitments.md](https://github.com/ethereum/consensus-specs/blob/dev/specs/deneb/polynomial-commitments.md) with the functions required for data availability sampling (DAS). It is not part of the core Deneb spec but an extension that can be optionally implemented to allow nodes to reduce their load using DAS. ## Public Methods From 77eac20df69798375d0046271d79bb7e52deb5e2 Mon Sep 17 00:00:00 2001 From: Benedikt Wagner <113296072+b-wagn@users.noreply.github.com> Date: Thu, 13 Jun 2024 17:20:04 +0200 Subject: [PATCH 81/89] Update specs/_features/eip7594/polynomial-commitments-sampling.md Co-authored-by: Hsiao-Wei Wang --- specs/_features/eip7594/polynomial-commitments-sampling.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index c2f8eb93cb..9337a912e0 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -52,7 +52,7 @@ ## Introduction -This document extends [polynomial-commitments.md](https://github.com/ethereum/consensus-specs/blob/dev/specs/deneb/polynomial-commitments.md) with the functions required for data availability sampling (DAS). It is not part of the core Deneb spec but an extension that can be optionally implemented to allow nodes to reduce their load using DAS. +This document extends [polynomial-commitments.md](../../deneb/polynomial-commitments.md) with the functions required for data availability sampling (DAS). It is not part of the core Deneb spec but an extension that can be optionally implemented to allow nodes to reduce their load using DAS. ## Public Methods From 490c89a3c2db6084cf13db0842ff7ea348034fac Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Thu, 13 Jun 2024 23:42:36 +0800 Subject: [PATCH 82/89] bump version to `1.5.0-alpha.3` --- tests/core/pyspec/eth2spec/VERSION.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/core/pyspec/eth2spec/VERSION.txt b/tests/core/pyspec/eth2spec/VERSION.txt index ead8dd9dd9..86f9d092d6 100644 --- a/tests/core/pyspec/eth2spec/VERSION.txt +++ b/tests/core/pyspec/eth2spec/VERSION.txt @@ -1 +1 @@ -1.5.0-alpha.2 +1.5.0-alpha.3 From aa65fd782408217f188b99e008131ff4918f241c Mon Sep 17 00:00:00 2001 From: Mikhail Kalinin Date: Fri, 14 Jun 2024 13:26:47 +0700 Subject: [PATCH 83/89] Apply suggestion from @hwwhww Co-authored-by: Hsiao-Wei Wang --- specs/electra/beacon-chain.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 512b365ae9..4975bcb373 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -1444,7 +1444,7 @@ def initialize_beacon_state_from_eth1(eth1_block_hash: Hash32, balance = state.balances[index] # [Modified in Electra:EIP7251] validator.effective_balance = min( - balance - balance % EFFECTIVE_BALANCE_INCREMENT, MAX_EFFECTIVE_BALANCE_ELECTRA) + balance - balance % EFFECTIVE_BALANCE_INCREMENT, get_validator_max_effective_balance(validator)) if validator.effective_balance >= MIN_ACTIVATION_BALANCE: validator.activation_eligibility_epoch = GENESIS_EPOCH validator.activation_epoch = GENESIS_EPOCH From 718aadf263a101227e3a9eb465c2e1c9152aa492 Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Fri, 14 Jun 2024 15:43:28 +0800 Subject: [PATCH 84/89] Fix tests and remove duplicate `_WITHDRAWAL_PREFIX` definition --- specs/electra/beacon-chain.md | 2 -- .../eth2spec/test/phase0/genesis/test_initialization.py | 4 +++- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/specs/electra/beacon-chain.md b/specs/electra/beacon-chain.md index 4975bcb373..6c6a63d89c 100644 --- a/specs/electra/beacon-chain.md +++ b/specs/electra/beacon-chain.md @@ -128,8 +128,6 @@ The following values are (non-configurable) constants used throughout the specif | Name | Value | | - | - | -| `BLS_WITHDRAWAL_PREFIX` | `Bytes1('0x00')` | -| `ETH1_ADDRESS_WITHDRAWAL_PREFIX` | `Bytes1('0x01')` | | `COMPOUNDING_WITHDRAWAL_PREFIX` | `Bytes1('0x02')` | ### Domains diff --git a/tests/core/pyspec/eth2spec/test/phase0/genesis/test_initialization.py b/tests/core/pyspec/eth2spec/test/phase0/genesis/test_initialization.py index f1c3064723..ed584ed612 100644 --- a/tests/core/pyspec/eth2spec/test/phase0/genesis/test_initialization.py +++ b/tests/core/pyspec/eth2spec/test/phase0/genesis/test_initialization.py @@ -105,7 +105,9 @@ def test_initialize_beacon_state_some_small_balances(spec): assert state.eth1_data.deposit_count == len(deposits) assert state.eth1_data.block_hash == eth1_block_hash # only main deposits participate to the active balance - assert spec.get_total_active_balance(state) == main_deposit_count * max_effective_balance + # NOTE: they are pre-ELECTRA deposits with BLS_WITHDRAWAL_PREFIX, + # so `MAX_EFFECTIVE_BALANCE` is used + assert spec.get_total_active_balance(state) == main_deposit_count * spec.MAX_EFFECTIVE_BALANCE # yield state yield 'state', state From fc62e597785bfc2356fe37907426729d35c2f306 Mon Sep 17 00:00:00 2001 From: b-wagn Date: Fri, 14 Jun 2024 14:23:53 +0200 Subject: [PATCH 85/89] extend test for eip7594 --- .../test_polynomial_commitments.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py index 7741398749..a051f14a11 100644 --- a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py +++ b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py @@ -15,18 +15,35 @@ @spec_test @single_phase def test_fft(spec): + + # in this test we sample a random polynomial in coefficient form + # then we apply an FFT to get evaluations over the roots of unity + # we then apply an inverse FFT to the evaluations to get coefficients + + # we check two things: + # 1) the original coefficients and the resulting coefficients match + # 2) the evaluations that we got are the same as if we would have evaluated individually + rng = random.Random(5566) roots_of_unity = spec.compute_roots_of_unity(spec.FIELD_ELEMENTS_PER_BLOB) + # sample a random polynomial poly_coeff = [rng.randint(0, BLS_MODULUS - 1) for _ in range(spec.FIELD_ELEMENTS_PER_BLOB)] + # do an FFT and then an inverse FFT poly_eval = spec.fft_field(poly_coeff, roots_of_unity) poly_coeff_inversed = spec.fft_field(poly_eval, roots_of_unity, inv=True) + # first check: inverse FFT after FFT results in original coefficients assert len(poly_eval) == len(poly_coeff) == len(poly_coeff_inversed) assert poly_coeff_inversed == poly_coeff + # second check: result of FFT are really the evaluations + for i, w in enumerate(roots_of_unity): + individual_evaluation = spec.evaluate_polynomialcoeff(poly_coeff, w) + assert individual_evaluation == poly_eval[i] + @with_eip7594_and_later @spec_test From 7095e14b60a37e069eabef64361d06f9869c4228 Mon Sep 17 00:00:00 2001 From: b-wagn Date: Fri, 14 Jun 2024 14:59:22 +0200 Subject: [PATCH 86/89] extend test for coset_fft --- .../test_polynomial_commitments.py | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py index a051f14a11..b1b6c2d248 100644 --- a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py +++ b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py @@ -49,18 +49,40 @@ def test_fft(spec): @spec_test @single_phase def test_coset_fft(spec): + + # in this test we sample a random polynomial in coefficient form + # then we apply a Coset FFT to get evaluations over the coset of the roots of unity + # we then apply an inverse Coset FFT to the evaluations to get coefficients + + # we check two things: + # 1) the original coefficients and the resulting coefficients match + # 2) the evaluations that we got are the same as if we would have evaluated individually + rng = random.Random(5566) roots_of_unity = spec.compute_roots_of_unity(spec.FIELD_ELEMENTS_PER_BLOB) + # this is the shift that generates the coset + coset_shift = spec.PRIMITIVE_ROOT_OF_UNITY + + # sample a random polynomial poly_coeff = [rng.randint(0, BLS_MODULUS - 1) for _ in range(spec.FIELD_ELEMENTS_PER_BLOB)] + # do a coset FFT and then an inverse coset FFT poly_eval = spec.coset_fft_field(poly_coeff, roots_of_unity) poly_coeff_inversed = spec.coset_fft_field(poly_eval, roots_of_unity, inv=True) + # first check: inverse coset FFT after coset FFT results in original coefficients assert len(poly_eval) == len(poly_coeff) == len(poly_coeff_inversed) assert poly_coeff_inversed == poly_coeff + # second check: result of FFT are really the evaluations over the coset + for i, w in enumerate(roots_of_unity): + # the element of the coset is coset_shift * w + shifted_w = spec.BLSFieldElement((coset_shift * int(w)) % BLS_MODULUS) + individual_evaluation = spec.evaluate_polynomialcoeff(poly_coeff, shifted_w) + assert individual_evaluation == poly_eval[i] + @with_eip7594_and_later @spec_test From 2aeddf2f86a28987323d35fd2774bdfa7b0cf637 Mon Sep 17 00:00:00 2001 From: b-wagn Date: Fri, 14 Jun 2024 15:18:00 +0200 Subject: [PATCH 87/89] remove trailing whitespaces --- .../test_polynomial_commitments.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py index b1b6c2d248..5bc3a4330a 100644 --- a/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py +++ b/tests/core/pyspec/eth2spec/test/eip7594/unittests/polynomial_commitments/test_polynomial_commitments.py @@ -20,7 +20,7 @@ def test_fft(spec): # then we apply an FFT to get evaluations over the roots of unity # we then apply an inverse FFT to the evaluations to get coefficients - # we check two things: + # we check two things: # 1) the original coefficients and the resulting coefficients match # 2) the evaluations that we got are the same as if we would have evaluated individually @@ -42,19 +42,19 @@ def test_fft(spec): # second check: result of FFT are really the evaluations for i, w in enumerate(roots_of_unity): individual_evaluation = spec.evaluate_polynomialcoeff(poly_coeff, w) - assert individual_evaluation == poly_eval[i] + assert individual_evaluation == poly_eval[i] @with_eip7594_and_later @spec_test @single_phase def test_coset_fft(spec): - + # in this test we sample a random polynomial in coefficient form # then we apply a Coset FFT to get evaluations over the coset of the roots of unity # we then apply an inverse Coset FFT to the evaluations to get coefficients - # we check two things: + # we check two things: # 1) the original coefficients and the resulting coefficients match # 2) the evaluations that we got are the same as if we would have evaluated individually @@ -81,7 +81,7 @@ def test_coset_fft(spec): # the element of the coset is coset_shift * w shifted_w = spec.BLSFieldElement((coset_shift * int(w)) % BLS_MODULUS) individual_evaluation = spec.evaluate_polynomialcoeff(poly_coeff, shifted_w) - assert individual_evaluation == poly_eval[i] + assert individual_evaluation == poly_eval[i] @with_eip7594_and_later From ae654d2281bc1f6f408c4c5001eb6b33407fa9e3 Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Fri, 14 Jun 2024 23:05:41 +0800 Subject: [PATCH 88/89] fix typo --- tests/generators/operations/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/generators/operations/main.py b/tests/generators/operations/main.py index 204f5023a4..51cd507066 100644 --- a/tests/generators/operations/main.py +++ b/tests/generators/operations/main.py @@ -47,7 +47,7 @@ 'attestation', 'consolidation_request', 'deposit_request', - 'voluntary_exit' + 'voluntary_exit', 'withdrawal_request', ]} electra_mods = combine_mods(_new_electra_mods, deneb_mods) From 483a286ba52342dce3eb595e5ddd43fb539c7190 Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Fri, 14 Jun 2024 23:57:21 +0800 Subject: [PATCH 89/89] Remove `assert len(missing_cell_indices) != 0` check --- specs/_features/eip7594/polynomial-commitments-sampling.md | 3 --- 1 file changed, 3 deletions(-) diff --git a/specs/_features/eip7594/polynomial-commitments-sampling.md b/specs/_features/eip7594/polynomial-commitments-sampling.md index 9337a912e0..7be1a4a059 100644 --- a/specs/_features/eip7594/polynomial-commitments-sampling.md +++ b/specs/_features/eip7594/polynomial-commitments-sampling.md @@ -565,9 +565,6 @@ def construct_vanishing_polynomial(missing_cell_indices: Sequence[CellIndex]) -> We never encounter this case however because this method is used solely for recovery and recovery only works if at least half of the cells are available. """ - - assert len(missing_cell_indices) != 0 - # Get the small domain roots_of_unity_reduced = compute_roots_of_unity(CELLS_PER_EXT_BLOB)