diff --git a/.circleci/config.yml b/.circleci/config.yml index 157c56ca5f..032b8fb0b2 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -36,26 +36,26 @@ commands: steps: - restore_cached_venv: venv_name: v24-pyspec - reqs_checksum: cache-{{ checksum "setup.py" }} + reqs_checksum: cache-{{ checksum "setup.py" }}-{{ checksum "requirements_preinstallation.txt" }} save_pyspec_cached_venv: description: Save a venv into a cache with pyspec keys" steps: - save_cached_venv: venv_name: v24-pyspec - reqs_checksum: cache-{{ checksum "setup.py" }} + reqs_checksum: cache-{{ checksum "setup.py" }}-{{ checksum "requirements_preinstallation.txt" }} venv_path: ./venv restore_deposit_contract_tester_cached_venv: description: "Restore the venv from cache for the deposit contract tester" steps: - restore_cached_venv: venv_name: v23-deposit-contract-tester - reqs_checksum: cache-{{ checksum "setup.py" }}-{{ checksum "solidity_deposit_contract/web3_tester/requirements.txt" }} + reqs_checksum: cache-{{ checksum "setup.py" }}-{{ checksum "requirements_preinstallation.txt" }}-{{ checksum "solidity_deposit_contract/web3_tester/requirements.txt" }} save_deposit_contract_tester_cached_venv: description: "Save the venv to cache for later use of the deposit contract tester" steps: - save_cached_venv: venv_name: v23-deposit-contract-tester - reqs_checksum: cache-{{ checksum "setup.py" }}-{{ checksum "solidity_deposit_contract/web3_tester/requirements.txt" }} + reqs_checksum: cache-{{ checksum "setup.py" }}-{{ checksum "requirements_preinstallation.txt" }}-{{ checksum "solidity_deposit_contract/web3_tester/requirements.txt" }} venv_path: ./solidity_deposit_contract/web3_tester/venv jobs: checkout_specs: @@ -168,6 +168,19 @@ jobs: command: make citest fork=eip6110 - store_test_results: path: tests/core/pyspec/test-reports + test-eip7002: + docker: + - image: circleci/python:3.9 + working_directory: ~/specs-repo + steps: + - restore_cache: + key: v3-specs-repo-{{ .Branch }}-{{ .Revision }} + - restore_pyspec_cached_venv + - run: + name: Run py-tests + command: make citest fork=eip7002 + - store_test_results: + path: tests/core/pyspec/test-reports table_of_contents: docker: - image: circleci/node:10.16.3 @@ -291,6 +304,9 @@ workflows: - test-eip6110: requires: - install_pyspec_test + - test-eip7002: + requires: + - install_pyspec_test - table_of_contents - codespell - lint: diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 6b24ef5eb1..998a1e0fc4 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -71,7 +71,7 @@ jobs: needs: [preclear,lint,codespell,table_of_contents] strategy: matrix: - version: ["phase0", "altair", "bellatrix", "capella", "deneb", "eip6110"] + version: ["phase0", "altair", "bellatrix", "capella", "deneb", "eip6110", "eip7002"] steps: - name: Checkout this repo uses: actions/checkout@v3.2.0 diff --git a/.gitignore b/.gitignore index 2ff10cf099..cdfddfb0c3 100644 --- a/.gitignore +++ b/.gitignore @@ -22,6 +22,7 @@ tests/core/pyspec/eth2spec/bellatrix/ tests/core/pyspec/eth2spec/capella/ tests/core/pyspec/eth2spec/deneb/ tests/core/pyspec/eth2spec/eip6110/ +tests/core/pyspec/eth2spec/eip7002/ tests/core/pyspec/eth2spec/whisk/ # coverage reports diff --git a/Makefile b/Makefile index 6c852a1e94..16d189c05a 100644 --- a/Makefile +++ b/Makefile @@ -104,9 +104,15 @@ generate_tests: $(GENERATOR_TARGETS) pyspec: python3 -m venv venv; . venv/bin/activate; python3 setup.py pyspecdev +# check the setup tool requirements +preinstallation: + python3 -m venv venv; . venv/bin/activate; \ + python3 -m pip install -r requirements_preinstallation.txt + # installs the packages to run pyspec tests -install_test: - python3 -m venv venv; . venv/bin/activate; python3 -m pip install -e .[lint]; python3 -m pip install -e .[test] +install_test: preinstallation + python3 -m venv venv; . venv/bin/activate; \ + python3 -m pip install -e .[lint]; python3 -m pip install -e .[test] # Testing against `minimal` or `mainnet` config by default test: pyspec diff --git a/README.md b/README.md index 28d61ad404..aeda0712d9 100644 --- a/README.md +++ b/README.md @@ -42,6 +42,7 @@ Features are researched and developed in parallel, and then consolidated into se Additional specifications and standards outside of requisite client functionality can be found in the following repos: * [Beacon APIs](https://github.com/ethereum/beacon-apis) +* [Engine APIs](https://github.com/ethereum/execution-apis/tree/main/src/engine) * [Beacon Metrics](https://github.com/ethereum/beacon-metrics/) ## Design goals diff --git a/configs/mainnet.yaml b/configs/mainnet.yaml index 9206ab77da..ed018aab1b 100644 --- a/configs/mainnet.yaml +++ b/configs/mainnet.yaml @@ -53,6 +53,9 @@ DENEB_FORK_EPOCH: 18446744073709551615 # EIP6110 EIP6110_FORK_VERSION: 0x05000000 # temporary stub EIP6110_FORK_EPOCH: 18446744073709551615 +# EIP7002 +EIP7002_FORK_VERSION: 0x05000000 # temporary stub +EIP7002_FORK_EPOCH: 18446744073709551615 # WHISK WHISK_FORK_VERSION: 0x06000000 # temporary stub WHISK_FORK_EPOCH: 18446744073709551615 diff --git a/configs/minimal.yaml b/configs/minimal.yaml index 256a39d1c1..238598b0e0 100644 --- a/configs/minimal.yaml +++ b/configs/minimal.yaml @@ -52,6 +52,9 @@ DENEB_FORK_EPOCH: 18446744073709551615 # EIP6110 EIP6110_FORK_VERSION: 0x05000001 EIP6110_FORK_EPOCH: 18446744073709551615 +# EIP7002 +EIP7002_FORK_VERSION: 0x05000001 +EIP7002_FORK_EPOCH: 18446744073709551615 # WHISK WHISK_FORK_VERSION: 0x06000001 WHISK_FORK_EPOCH: 18446744073709551615 diff --git a/docs/docs/new-feature.md b/docs/docs/new-feature.md index 5e6180329f..b987e2e97d 100644 --- a/docs/docs/new-feature.md +++ b/docs/docs/new-feature.md @@ -53,17 +53,17 @@ For example, if the latest fork is Capella, use `./specs/capella` content as you ### 4. Add `fork.md` You can refer to the previous fork's `fork.md` file. ### 5. Make it executable -- Update [`constants.py`](https://github.com/ethereum/consensus-specs/blob/dev/tests/core/pyspec/eth2spec/test/helpers/constants.py) with the new feature name. -- Update [`setup.py`](https://github.com/ethereum/consensus-specs/blob/dev/setup.py): - - Add a new `SpecBuilder` with the new feature name constant. e.g., `EIP9999SpecBuilder` - - Add the new `SpecBuilder` to `spec_builders` list. - - Add the path of the new markdown files in `finalize_options` function. +- Update Pyspec [`constants.py`](https://github.com/ethereum/consensus-specs/blob/dev/tests/core/pyspec/eth2spec/test/helpers/constants.py) with the new feature name. +- Update helpers for [`setup.py`](https://github.com/ethereum/consensus-specs/blob/dev/setup.py) for building the spec: + - Update [`pysetup/constants.py`](https://github.com/ethereum/consensus-specs/blob/dev/constants.py) with the new feature name as Pyspec `constants.py` defined. + - Update [`pysetup/spec_builders/__init__.py`](https://github.com/ethereum/consensus-specs/blob/dev/pysetup/spec_builders/__init__.py). Implement a new `SpecBuilder` in `pysetup/spec_builders/.py` with the new feature name. e.g., `EIP9999SpecBuilder`. Append it to the `spec_builders` list. + - Update [`pysetup/md_doc_paths.py`](https://github.com/ethereum/consensus-specs/blob/dev/pysetup/md_doc_paths.py): add the path of the new markdown files in `get_md_doc_paths` function if needed. ## B: Make it executable for pytest and test generator -### 1. Add `light-client/*` docs if you updated the content of `BeaconBlock` +### 1. [Optional] Add `light-client/*` docs if you updated the content of `BeaconBlock` - You can refer to the previous fork's `light-client/*` file. -- Add the path of the new markdown files in `setup.py`'s `finalize_options` function. +- Add the path of the new markdown files in [`pysetup/md_doc_paths.py`](https://github.com/ethereum/consensus-specs/blob/dev/pysetup/md_doc_paths.py)'s `get_md_doc_paths` function. ### 2. Add the mainnet and minimal presets and update the configs - Add presets: `presets/mainnet/.yaml` and `presets/minimal/.yaml` diff --git a/pysetup/__init__.py b/pysetup/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pysetup/constants.py b/pysetup/constants.py new file mode 100644 index 0000000000..8d53455634 --- /dev/null +++ b/pysetup/constants.py @@ -0,0 +1,34 @@ +# Definitions in context.py +PHASE0 = 'phase0' +ALTAIR = 'altair' +BELLATRIX = 'bellatrix' +CAPELLA = 'capella' +DENEB = 'deneb' +EIP6110 = 'eip6110' +EIP7002 = 'eip7002' +WHISK = 'whisk' + + + +# The helper functions that are used when defining constants +CONSTANT_DEP_SUNDRY_CONSTANTS_FUNCTIONS = ''' +def ceillog2(x: int) -> uint64: + if x < 1: + raise ValueError(f"ceillog2 accepts only positive values, x={x}") + return uint64((x - 1).bit_length()) + + +def floorlog2(x: int) -> uint64: + if x < 1: + raise ValueError(f"floorlog2 accepts only positive values, x={x}") + return uint64(x.bit_length() - 1) +''' + + +OPTIMIZED_BLS_AGGREGATE_PUBKEYS = ''' +def eth_aggregate_pubkeys(pubkeys: Sequence[BLSPubkey]) -> BLSPubkey: + return bls.AggregatePKs(pubkeys) +''' + + +ETH2_SPEC_COMMENT_PREFIX = "eth2spec:" diff --git a/pysetup/helpers.py b/pysetup/helpers.py new file mode 100644 index 0000000000..692aaa0d7e --- /dev/null +++ b/pysetup/helpers.py @@ -0,0 +1,253 @@ +import re +from typing import TypeVar, Dict +import textwrap +from functools import reduce + +from .constants import CONSTANT_DEP_SUNDRY_CONSTANTS_FUNCTIONS +from .spec_builders import spec_builders +from .md_doc_paths import PREVIOUS_FORK_OF +from .typing import ( + ProtocolDefinition, + SpecObject, + VariableDefinition, +) + + +def collect_prev_forks(fork: str) -> list[str]: + forks = [fork] + while True: + fork = PREVIOUS_FORK_OF[fork] + if fork is None: + return forks + forks.append(fork) + + +def is_byte_vector(value: str) -> bool: + return value.startswith(('ByteVector')) + + +def make_function_abstract(protocol_def: ProtocolDefinition, key: str): + function = protocol_def.functions[key].split('"""') + protocol_def.functions[key] = function[0] + "..." + + +def objects_to_spec(preset_name: str, + spec_object: SpecObject, + fork: str, + ordered_class_objects: Dict[str, str]) -> str: + """ + Given all the objects that constitute a spec, combine them into a single pyfile. + """ + new_type_definitions = ( + '\n\n'.join( + [ + f"class {key}({value}):\n pass\n" if not is_byte_vector(value) else f"class {key}({value}): # type: ignore\n pass\n" + for key, value in spec_object.custom_types.items() + ] + ) + ) + + # Collect builders with the reversed previous forks + # e.g. `[bellatrix, altair, phase0]` -> `[phase0, altair, bellatrix]` + builders = [spec_builders[fork] for fork in collect_prev_forks(fork)[::-1]] + + def format_protocol(protocol_name: str, protocol_def: ProtocolDefinition) -> str: + abstract_functions = ["verify_and_notify_new_payload"] + for key in protocol_def.functions.keys(): + if key in abstract_functions: + make_function_abstract(protocol_def, key) + + protocol = f"class {protocol_name}(Protocol):" + for fn_source in protocol_def.functions.values(): + fn_source = fn_source.replace("self: "+protocol_name, "self") + protocol += "\n\n" + textwrap.indent(fn_source, " ") + return protocol + + protocols_spec = '\n\n\n'.join(format_protocol(k, v) for k, v in spec_object.protocols.items()) + for k in list(spec_object.functions): + if k in [ + "ceillog2", + "floorlog2", + "compute_merkle_proof_for_block_body", + "compute_merkle_proof_for_state", + ]: + del spec_object.functions[k] + + functions = reduce(lambda fns, builder: builder.implement_optimizations(fns), builders, spec_object.functions) + functions_spec = '\n\n\n'.join(functions.values()) + + # Access global dict of config vars for runtime configurables + for name in spec_object.config_vars.keys(): + functions_spec = re.sub(r"\b%s\b" % name, 'config.' + name, functions_spec) + + def format_config_var(name: str, vardef: VariableDefinition) -> str: + if vardef.type_name is None: + out = f'{name}={vardef.value},' + else: + out = f'{name}={vardef.type_name}({vardef.value}),' + if vardef.comment is not None: + out += f' # {vardef.comment}' + return out + + config_spec = 'class Configuration(NamedTuple):\n' + config_spec += ' PRESET_BASE: str\n' + config_spec += '\n'.join(f' {k}: {v.type_name if v.type_name is not None else "int"}' + for k, v in spec_object.config_vars.items()) + config_spec += '\n\n\nconfig = Configuration(\n' + config_spec += f' PRESET_BASE="{preset_name}",\n' + config_spec += '\n'.join(' ' + format_config_var(k, v) for k, v in spec_object.config_vars.items()) + config_spec += '\n)\n' + + def format_constant(name: str, vardef: VariableDefinition) -> str: + if vardef.type_name is None: + if vardef.type_hint is None: + out = f'{name} = {vardef.value}' + else: + out = f'{name}: {vardef.type_hint} = {vardef.value}' + else: + out = f'{name} = {vardef.type_name}({vardef.value})' + if vardef.comment is not None: + out += f' # {vardef.comment}' + return out + + # Merge all constant objects + hardcoded_ssz_dep_constants = reduce(lambda obj, builder: {**obj, **builder.hardcoded_ssz_dep_constants()}, builders, {}) + hardcoded_custom_type_dep_constants = reduce(lambda obj, builder: {**obj, **builder.hardcoded_custom_type_dep_constants(spec_object)}, builders, {}) + # Concatenate all strings + imports = reduce(lambda txt, builder: (txt + "\n\n" + builder.imports(preset_name) ).strip("\n"), builders, "") + preparations = reduce(lambda txt, builder: (txt + "\n\n" + builder.preparations() ).strip("\n"), builders, "") + sundry_functions = reduce(lambda txt, builder: (txt + "\n\n" + builder.sundry_functions() ).strip("\n"), builders, "") + # Keep engine from the most recent fork + execution_engine_cls = reduce(lambda txt, builder: builder.execution_engine_cls() or txt, builders, "") + + constant_vars_spec = '# Constant vars\n' + '\n'.join(format_constant(k, v) for k, v in spec_object.constant_vars.items()) + preset_vars_spec = '# Preset vars\n' + '\n'.join(format_constant(k, v) for k, v in spec_object.preset_vars.items()) + ordered_class_objects_spec = '\n\n\n'.join(ordered_class_objects.values()) + ssz_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, hardcoded_ssz_dep_constants[x]), hardcoded_ssz_dep_constants)) + ssz_dep_constants_verification = '\n'.join(map(lambda x: 'assert %s == %s' % (x, spec_object.ssz_dep_constants[x]), hardcoded_ssz_dep_constants)) + custom_type_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, hardcoded_custom_type_dep_constants[x]), hardcoded_custom_type_dep_constants)) + spec_strs = [ + imports, + preparations, + f"fork = \'{fork}\'\n", + # The constants that some SSZ containers require. Need to be defined before `new_type_definitions` + custom_type_dep_constants, + new_type_definitions, + CONSTANT_DEP_SUNDRY_CONSTANTS_FUNCTIONS, + # The constants that some SSZ containers require. Need to be defined before `constants_spec` + ssz_dep_constants, + constant_vars_spec, + preset_vars_spec, + config_spec, + ordered_class_objects_spec, + protocols_spec, + functions_spec, + sundry_functions, + execution_engine_cls, + # Since some constants are hardcoded in setup.py, the following assertions verify that the hardcoded constants are + # as same as the spec definition. + ssz_dep_constants_verification, + ] + return "\n\n\n".join([str.strip("\n") for str in spec_strs if str]) + "\n" + + +def combine_protocols(old_protocols: Dict[str, ProtocolDefinition], + new_protocols: Dict[str, ProtocolDefinition]) -> Dict[str, ProtocolDefinition]: + for key, value in new_protocols.items(): + if key not in old_protocols: + old_protocols[key] = value + else: + functions = combine_dicts(old_protocols[key].functions, value.functions) + old_protocols[key] = ProtocolDefinition(functions=functions) + return old_protocols + + +T = TypeVar('T') + + +def combine_dicts(old_dict: Dict[str, T], new_dict: Dict[str, T]) -> Dict[str, T]: + return {**old_dict, **new_dict} + + +ignored_dependencies = [ + 'bit', 'boolean', 'Vector', 'List', 'Container', 'BLSPubkey', 'BLSSignature', + 'Bytes1', 'Bytes4', 'Bytes8', 'Bytes20', 'Bytes32', 'Bytes48', 'Bytes96', 'Bitlist', 'Bitvector', + 'uint8', 'uint16', 'uint32', 'uint64', 'uint128', 'uint256', + 'bytes', 'byte', 'ByteList', 'ByteVector', + 'Dict', 'dict', 'field', 'ceillog2', 'floorlog2', 'Set', + 'Optional', 'Sequence', +] + + +def dependency_order_class_objects(objects: Dict[str, str], custom_types: Dict[str, str]) -> None: + """ + Determines which SSZ Object is dependent on which other and orders them appropriately + """ + items = list(objects.items()) + for key, value in items: + dependencies = [] + for line in value.split('\n'): + if not re.match(r'\s+\w+: .+', line): + continue # skip whitespace etc. + line = line[line.index(':') + 1:] # strip of field name + if '#' in line: + line = line[:line.index('#')] # strip of comment + dependencies.extend(re.findall(r'(\w+)', line)) # catch all legible words, potential dependencies + dependencies = filter(lambda x: '_' not in x and x.upper() != x, dependencies) # filter out constants + dependencies = filter(lambda x: x not in ignored_dependencies, dependencies) + dependencies = filter(lambda x: x not in custom_types, dependencies) + for dep in dependencies: + key_list = list(objects.keys()) + for item in [dep, key] + key_list[key_list.index(dep)+1:]: + objects[item] = objects.pop(item) + + +def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str], custom_types) -> Dict[str, str]: + """ + Takes in old spec and new spec ssz objects, combines them, + and returns the newer versions of the objects in dependency order. + """ + for key, value in new_objects.items(): + old_objects[key] = value + return old_objects + + +def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject: + """ + Takes in two spec variants (as tuples of their objects) and combines them using the appropriate combiner function. + """ + protocols = combine_protocols(spec0.protocols, spec1.protocols) + functions = combine_dicts(spec0.functions, spec1.functions) + custom_types = combine_dicts(spec0.custom_types, spec1.custom_types) + constant_vars = combine_dicts(spec0.constant_vars, spec1.constant_vars) + preset_vars = combine_dicts(spec0.preset_vars, spec1.preset_vars) + config_vars = combine_dicts(spec0.config_vars, spec1.config_vars) + ssz_dep_constants = combine_dicts(spec0.ssz_dep_constants, spec1.ssz_dep_constants) + ssz_objects = combine_ssz_objects(spec0.ssz_objects, spec1.ssz_objects, custom_types) + dataclasses = combine_dicts(spec0.dataclasses, spec1.dataclasses) + return SpecObject( + functions=functions, + protocols=protocols, + custom_types=custom_types, + constant_vars=constant_vars, + preset_vars=preset_vars, + config_vars=config_vars, + ssz_dep_constants=ssz_dep_constants, + ssz_objects=ssz_objects, + dataclasses=dataclasses, + ) + + +def parse_config_vars(conf: Dict[str, str]) -> Dict[str, str]: + """ + Parses a dict of basic str/int/list types into a dict for insertion into the spec code. + """ + out: Dict[str, str] = dict() + for k, v in conf.items(): + if isinstance(v, str) and (v.startswith("0x") or k == 'PRESET_BASE' or k == 'CONFIG_NAME'): + # Represent byte data with string, to avoid misinterpretation as big-endian int. + # Everything except PRESET_BASE and CONFIG_NAME is either byte data or an integer. + out[k] = f"'{v}'" + else: + out[k] = str(int(v)) + return out diff --git a/pysetup/md_doc_paths.py b/pysetup/md_doc_paths.py new file mode 100644 index 0000000000..781ae41db3 --- /dev/null +++ b/pysetup/md_doc_paths.py @@ -0,0 +1,78 @@ +import os + +from .constants import ( + PHASE0, + ALTAIR, + BELLATRIX, + CAPELLA, + DENEB, + EIP6110, + WHISK, + EIP7002, +) + + +PREVIOUS_FORK_OF = { + PHASE0: None, + ALTAIR: PHASE0, + BELLATRIX: ALTAIR, + CAPELLA: BELLATRIX, + DENEB: CAPELLA, + EIP6110: DENEB, + WHISK: CAPELLA, + EIP7002: CAPELLA, +} + +ALL_FORKS = list(PREVIOUS_FORK_OF.keys()) + +IGNORE_SPEC_FILES = [ + "specs/phase0/deposit-contract.md" +] + +EXTRA_SPEC_FILES = { + BELLATRIX: "sync/optimistic.md" +} + + +def is_post_fork(a, b) -> bool: + """ + Returns true if fork a is after b, or if a == b + """ + if a == b: + return True + + prev_fork = PREVIOUS_FORK_OF[a] + if prev_fork == b: + return True + elif prev_fork == None: + return False + else: + return is_post_fork(prev_fork, b) + + +def get_fork_directory(fork): + dir1 = f'specs/{fork}' + if os.path.exists(dir1): + return dir1 + dir2 = f'specs/_features/{fork}' + if os.path.exists(dir2): + return dir2 + raise FileNotFoundError(f"No directory found for fork: {fork}") + + +def get_md_doc_paths(spec_fork: str) -> str: + md_doc_paths = "" + + for fork in ALL_FORKS: + if is_post_fork(spec_fork, fork): + # Append all files in fork directory recursively + for root, dirs, files in os.walk(get_fork_directory(fork)): + for filename in files: + filepath = os.path.join(root, filename) + if filepath.endswith('.md') and filepath not in IGNORE_SPEC_FILES: + md_doc_paths += filepath + "\n" + # Append extra files if any + if fork in EXTRA_SPEC_FILES: + md_doc_paths += EXTRA_SPEC_FILES[fork] + "\n" + + return md_doc_paths diff --git a/pysetup/spec_builders/__init__.py b/pysetup/spec_builders/__init__.py new file mode 100644 index 0000000000..794ae50d29 --- /dev/null +++ b/pysetup/spec_builders/__init__.py @@ -0,0 +1,17 @@ +from .phase0 import Phase0SpecBuilder +from .altair import AltairSpecBuilder +from .bellatrix import BellatrixSpecBuilder +from .capella import CapellaSpecBuilder +from .deneb import DenebSpecBuilder +from .eip6110 import EIP6110SpecBuilder +from .eip7002 import EIP7002SpecBuilder +from .whisk import WhiskSpecBuilder + + +spec_builders = { + builder.fork: builder + for builder in ( + Phase0SpecBuilder, AltairSpecBuilder, BellatrixSpecBuilder, CapellaSpecBuilder, DenebSpecBuilder, + EIP6110SpecBuilder, EIP7002SpecBuilder, WhiskSpecBuilder, + ) +} diff --git a/pysetup/spec_builders/altair.py b/pysetup/spec_builders/altair.py new file mode 100644 index 0000000000..4b35380de9 --- /dev/null +++ b/pysetup/spec_builders/altair.py @@ -0,0 +1,54 @@ +from typing import Dict + +from .base import BaseSpecBuilder +from ..constants import ALTAIR, OPTIMIZED_BLS_AGGREGATE_PUBKEYS + + +class AltairSpecBuilder(BaseSpecBuilder): + fork: str = ALTAIR + + @classmethod + def imports(cls, preset_name: str) -> str: + return f''' +from typing import NewType, Union as PyUnion + +from eth2spec.phase0 import {preset_name} as phase0 +from eth2spec.test.helpers.merkle import build_proof +from eth2spec.utils.ssz.ssz_typing import Path +''' + + @classmethod + def preparations(cls): + return ''' +SSZVariableName = str +GeneralizedIndex = NewType('GeneralizedIndex', int) +''' + + @classmethod + def sundry_functions(cls) -> str: + return ''' +def get_generalized_index(ssz_class: Any, *path: Sequence[PyUnion[int, SSZVariableName]]) -> GeneralizedIndex: + ssz_path = Path(ssz_class) + for item in path: + ssz_path = ssz_path / item + return GeneralizedIndex(ssz_path.gindex()) + + +def compute_merkle_proof_for_state(state: BeaconState, + index: GeneralizedIndex) -> Sequence[Bytes32]: + return build_proof(state.get_backing(), index)''' + + + @classmethod + def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]: + return { + 'FINALIZED_ROOT_INDEX': 'GeneralizedIndex(105)', + 'CURRENT_SYNC_COMMITTEE_INDEX': 'GeneralizedIndex(54)', + 'NEXT_SYNC_COMMITTEE_INDEX': 'GeneralizedIndex(55)', + } + + @classmethod + def implement_optimizations(cls, functions: Dict[str, str]) -> Dict[str, str]: + if "eth_aggregate_pubkeys" in functions: + functions["eth_aggregate_pubkeys"] = OPTIMIZED_BLS_AGGREGATE_PUBKEYS.strip() + return functions diff --git a/pysetup/spec_builders/base.py b/pysetup/spec_builders/base.py new file mode 100644 index 0000000000..44743682ab --- /dev/null +++ b/pysetup/spec_builders/base.py @@ -0,0 +1,52 @@ +from abc import ABC, abstractmethod +from typing import Sequence, Dict +from pathlib import Path + +class BaseSpecBuilder(ABC): + @property + @abstractmethod + def fork(self) -> str: + raise NotImplementedError() + + @classmethod + def imports(cls, preset_name: str) -> str: + """ + Import objects from other libraries. + """ + return "" + + @classmethod + def preparations(cls) -> str: + """ + Define special types/constants for building pyspec or call functions. + """ + return "" + + @classmethod + def sundry_functions(cls) -> str: + """ + The functions that are (1) defined abstractly in specs or (2) adjusted for getting better performance. + """ + return "" + + @classmethod + def execution_engine_cls(cls) -> str: + return "" + + @classmethod + def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]: + """ + The constants that are required for SSZ objects. + """ + return {} + + @classmethod + def hardcoded_custom_type_dep_constants(cls, spec_object) -> Dict[str, str]: # TODO + """ + The constants that are required for custom types. + """ + return {} + + @classmethod + def implement_optimizations(cls, functions: Dict[str, str]) -> Dict[str, str]: + return functions diff --git a/pysetup/spec_builders/bellatrix.py b/pysetup/spec_builders/bellatrix.py new file mode 100644 index 0000000000..c5753d7df0 --- /dev/null +++ b/pysetup/spec_builders/bellatrix.py @@ -0,0 +1,66 @@ +from .base import BaseSpecBuilder +from ..constants import BELLATRIX + +class BellatrixSpecBuilder(BaseSpecBuilder): + fork: str = BELLATRIX + + @classmethod + def imports(cls, preset_name: str): + return f''' +from typing import Protocol +from eth2spec.altair import {preset_name} as altair +from eth2spec.utils.ssz.ssz_typing import Bytes8, Bytes20, ByteList, ByteVector +''' + + @classmethod + def sundry_functions(cls) -> str: + return """ +ExecutionState = Any + + +def get_pow_block(hash: Bytes32) -> Optional[PowBlock]: + return PowBlock(block_hash=hash, parent_hash=Bytes32(), total_difficulty=uint256(0)) + + +def get_execution_state(_execution_state_root: Bytes32) -> ExecutionState: + pass + + +def get_pow_chain_head() -> PowBlock: + pass""" + + @classmethod + def execution_engine_cls(cls) -> str: + return """ +class NoopExecutionEngine(ExecutionEngine): + + def notify_new_payload(self: ExecutionEngine, execution_payload: ExecutionPayload) -> bool: + return True + + def notify_forkchoice_updated(self: ExecutionEngine, + head_block_hash: Hash32, + safe_block_hash: Hash32, + finalized_block_hash: Hash32, + payload_attributes: Optional[PayloadAttributes]) -> Optional[PayloadId]: + pass + + def get_payload(self: ExecutionEngine, payload_id: PayloadId) -> GetPayloadResponse: + # pylint: disable=unused-argument + raise NotImplementedError("no default block production") + + def is_valid_block_hash(self: ExecutionEngine, execution_payload: ExecutionPayload) -> bool: + return True + + def verify_and_notify_new_payload(self: ExecutionEngine, + new_payload_request: NewPayloadRequest) -> bool: + return True + + +EXECUTION_ENGINE = NoopExecutionEngine()""" + + + @classmethod + def hardcoded_custom_type_dep_constants(cls, spec_object) -> str: + return { + 'MAX_BYTES_PER_TRANSACTION': spec_object.preset_vars['MAX_BYTES_PER_TRANSACTION'].value, + } diff --git a/pysetup/spec_builders/capella.py b/pysetup/spec_builders/capella.py new file mode 100644 index 0000000000..03b619b66a --- /dev/null +++ b/pysetup/spec_builders/capella.py @@ -0,0 +1,29 @@ +from typing import Dict + +from .base import BaseSpecBuilder +from ..constants import CAPELLA + + +class CapellaSpecBuilder(BaseSpecBuilder): + fork: str = CAPELLA + + @classmethod + def imports(cls, preset_name: str): + return f''' +from eth2spec.bellatrix import {preset_name} as bellatrix +''' + + + @classmethod + def sundry_functions(cls) -> str: + return ''' +def compute_merkle_proof_for_block_body(body: BeaconBlockBody, + index: GeneralizedIndex) -> Sequence[Bytes32]: + return build_proof(body.get_backing(), index)''' + + + @classmethod + def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]: + return { + 'EXECUTION_PAYLOAD_INDEX': 'GeneralizedIndex(25)', + } diff --git a/pysetup/spec_builders/deneb.py b/pysetup/spec_builders/deneb.py new file mode 100644 index 0000000000..b4e180c2ae --- /dev/null +++ b/pysetup/spec_builders/deneb.py @@ -0,0 +1,71 @@ +from .base import BaseSpecBuilder +from ..constants import DENEB + + +class DenebSpecBuilder(BaseSpecBuilder): + fork: str = DENEB + + @classmethod + def imports(cls, preset_name: str): + return f''' +from eth2spec.capella import {preset_name} as capella +''' + + + @classmethod + def preparations(cls): + return ''' +T = TypeVar('T') # For generic function +''' + + @classmethod + def sundry_functions(cls) -> str: + return ''' +def retrieve_blobs_and_proofs(beacon_block_root: Root) -> PyUnion[Tuple[Blob, KZGProof], Tuple[str, str]]: + # pylint: disable=unused-argument + return ("TEST", "TEST")''' + + @classmethod + def execution_engine_cls(cls) -> str: + return """ +class NoopExecutionEngine(ExecutionEngine): + + def notify_new_payload(self: ExecutionEngine, + execution_payload: ExecutionPayload, + parent_beacon_block_root: Root) -> bool: + return True + + def notify_forkchoice_updated(self: ExecutionEngine, + head_block_hash: Hash32, + safe_block_hash: Hash32, + finalized_block_hash: Hash32, + payload_attributes: Optional[PayloadAttributes]) -> Optional[PayloadId]: + pass + + def get_payload(self: ExecutionEngine, payload_id: PayloadId) -> GetPayloadResponse: + # pylint: disable=unused-argument + raise NotImplementedError("no default block production") + + def is_valid_block_hash(self: ExecutionEngine, + execution_payload: ExecutionPayload, + parent_beacon_block_root: Root) -> bool: + return True + + def is_valid_versioned_hashes(self: ExecutionEngine, new_payload_request: NewPayloadRequest) -> bool: + return True + + def verify_and_notify_new_payload(self: ExecutionEngine, + new_payload_request: NewPayloadRequest) -> bool: + return True + + +EXECUTION_ENGINE = NoopExecutionEngine()""" + + + @classmethod + def hardcoded_custom_type_dep_constants(cls, spec_object) -> str: + return { + 'BYTES_PER_FIELD_ELEMENT': spec_object.constant_vars['BYTES_PER_FIELD_ELEMENT'].value, + 'FIELD_ELEMENTS_PER_BLOB': spec_object.preset_vars['FIELD_ELEMENTS_PER_BLOB'].value, + 'MAX_BLOBS_PER_BLOCK': spec_object.preset_vars['MAX_BLOBS_PER_BLOCK'].value, + } diff --git a/pysetup/spec_builders/eip6110.py b/pysetup/spec_builders/eip6110.py new file mode 100644 index 0000000000..e0fd253f19 --- /dev/null +++ b/pysetup/spec_builders/eip6110.py @@ -0,0 +1,12 @@ +from .base import BaseSpecBuilder +from ..constants import EIP6110 + + +class EIP6110SpecBuilder(BaseSpecBuilder): + fork: str = EIP6110 + + @classmethod + def imports(cls, preset_name: str): + return f''' +from eth2spec.deneb import {preset_name} as deneb +''' diff --git a/pysetup/spec_builders/eip7002.py b/pysetup/spec_builders/eip7002.py new file mode 100644 index 0000000000..a9f6ebc9b1 --- /dev/null +++ b/pysetup/spec_builders/eip7002.py @@ -0,0 +1,12 @@ +from .base import BaseSpecBuilder +from ..constants import EIP7002 + + +class EIP7002SpecBuilder(BaseSpecBuilder): + fork: str = EIP7002 + + @classmethod + def imports(cls, preset_name: str): + return super().imports(preset_name) + f''' +from eth2spec.capella import {preset_name} as capella +''' diff --git a/pysetup/spec_builders/phase0.py b/pysetup/spec_builders/phase0.py new file mode 100644 index 0000000000..6b3d826179 --- /dev/null +++ b/pysetup/spec_builders/phase0.py @@ -0,0 +1,105 @@ +from .base import BaseSpecBuilder +from ..constants import PHASE0 + + +class Phase0SpecBuilder(BaseSpecBuilder): + fork: str = PHASE0 + + @classmethod + def imports(cls, preset_name: str) -> str: + return '''from lru import LRU +from dataclasses import ( + dataclass, + field, +) +from typing import ( + Any, Callable, Dict, Set, Sequence, Tuple, Optional, TypeVar, NamedTuple, Final +) + +from eth2spec.utils.ssz.ssz_impl import hash_tree_root, copy, uint_to_bytes +from eth2spec.utils.ssz.ssz_typing import ( + View, boolean, Container, List, Vector, uint8, uint32, uint64, uint256, + Bytes1, Bytes4, Bytes32, Bytes48, Bytes96, Bitlist) +from eth2spec.utils.ssz.ssz_typing import Bitvector # noqa: F401 +from eth2spec.utils import bls +from eth2spec.utils.hash_function import hash +''' + + @classmethod + def preparations(cls) -> str: + return ''' +SSZObject = TypeVar('SSZObject', bound=View) +''' + + @classmethod + def sundry_functions(cls) -> str: + return ''' +def get_eth1_data(block: Eth1Block) -> Eth1Data: + """ + A stub function return mocking Eth1Data. + """ + return Eth1Data( + deposit_root=block.deposit_root, + deposit_count=block.deposit_count, + block_hash=hash_tree_root(block)) + + +def cache_this(key_fn, value_fn, lru_size): # type: ignore + cache_dict = LRU(size=lru_size) + + def wrapper(*args, **kw): # type: ignore + key = key_fn(*args, **kw) + nonlocal cache_dict + if key not in cache_dict: + cache_dict[key] = value_fn(*args, **kw) + return cache_dict[key] + return wrapper + + +_compute_shuffled_index = compute_shuffled_index +compute_shuffled_index = cache_this( + lambda index, index_count, seed: (index, index_count, seed), + _compute_shuffled_index, lru_size=SLOTS_PER_EPOCH * 3) + +_get_total_active_balance = get_total_active_balance +get_total_active_balance = cache_this( + lambda state: (state.validators.hash_tree_root(), compute_epoch_at_slot(state.slot)), + _get_total_active_balance, lru_size=10) + +_get_base_reward = get_base_reward +get_base_reward = cache_this( + lambda state, index: (state.validators.hash_tree_root(), state.slot, index), + _get_base_reward, lru_size=2048) + +_get_committee_count_per_slot = get_committee_count_per_slot +get_committee_count_per_slot = cache_this( + lambda state, epoch: (state.validators.hash_tree_root(), epoch), + _get_committee_count_per_slot, lru_size=SLOTS_PER_EPOCH * 3) + +_get_active_validator_indices = get_active_validator_indices +get_active_validator_indices = cache_this( + lambda state, epoch: (state.validators.hash_tree_root(), epoch), + _get_active_validator_indices, lru_size=3) + +_get_beacon_committee = get_beacon_committee +get_beacon_committee = cache_this( + lambda state, slot, index: (state.validators.hash_tree_root(), state.randao_mixes.hash_tree_root(), slot, index), + _get_beacon_committee, lru_size=SLOTS_PER_EPOCH * MAX_COMMITTEES_PER_SLOT * 3) + +_get_matching_target_attestations = get_matching_target_attestations +get_matching_target_attestations = cache_this( + lambda state, epoch: (state.hash_tree_root(), epoch), + _get_matching_target_attestations, lru_size=10) + +_get_matching_head_attestations = get_matching_head_attestations +get_matching_head_attestations = cache_this( + lambda state, epoch: (state.hash_tree_root(), epoch), + _get_matching_head_attestations, lru_size=10) + +_get_attesting_indices = get_attesting_indices +get_attesting_indices = cache_this( + lambda state, data, bits: ( + state.randao_mixes.hash_tree_root(), + state.validators.hash_tree_root(), data.hash_tree_root(), bits.hash_tree_root() + ), + _get_attesting_indices, lru_size=SLOTS_PER_EPOCH * MAX_COMMITTEES_PER_SLOT * 3)''' diff --git a/pysetup/spec_builders/whisk.py b/pysetup/spec_builders/whisk.py new file mode 100644 index 0000000000..e9cd4a67da --- /dev/null +++ b/pysetup/spec_builders/whisk.py @@ -0,0 +1,20 @@ +from .base import BaseSpecBuilder +from ..constants import WHISK + + +class WhiskSpecBuilder(BaseSpecBuilder): + fork: str = WHISK + + @classmethod + def imports(cls, preset_name: str): + return f''' +from eth2spec.capella import {preset_name} as capella +''' + + @classmethod + def hardcoded_custom_type_dep_constants(cls, spec_object) -> str: + # Necessary for custom types `WhiskShuffleProof` and `WhiskTrackerProof` + return { + 'WHISK_MAX_SHUFFLE_PROOF_SIZE': spec_object.preset_vars['WHISK_MAX_SHUFFLE_PROOF_SIZE'].value, + 'WHISK_MAX_OPENING_PROOF_SIZE': spec_object.preset_vars['WHISK_MAX_OPENING_PROOF_SIZE'].value, + } diff --git a/pysetup/typing.py b/pysetup/typing.py new file mode 100644 index 0000000000..56f0cf3b10 --- /dev/null +++ b/pysetup/typing.py @@ -0,0 +1,32 @@ +from pathlib import Path +from typing import Dict, NamedTuple, Optional, List + + +class ProtocolDefinition(NamedTuple): + # just function definitions currently. May expand with configuration vars in future. + functions: Dict[str, str] + + +class VariableDefinition(NamedTuple): + type_name: Optional[str] + value: str + comment: Optional[str] # e.g. "noqa: E501" + type_hint: Optional[str] # e.g., "Final" + + +class SpecObject(NamedTuple): + functions: Dict[str, str] + protocols: Dict[str, ProtocolDefinition] + custom_types: Dict[str, str] + constant_vars: Dict[str, VariableDefinition] + preset_vars: Dict[str, VariableDefinition] + config_vars: Dict[str, VariableDefinition] + ssz_dep_constants: Dict[str, str] # the constants that depend on ssz_objects + ssz_objects: Dict[str, str] + dataclasses: Dict[str, str] + + +class BuildTarget(NamedTuple): + name: str + preset_paths: List[Path] + config_path: Path diff --git a/requirements_preinstallation.txt b/requirements_preinstallation.txt new file mode 100644 index 0000000000..69d9a66607 --- /dev/null +++ b/requirements_preinstallation.txt @@ -0,0 +1,3 @@ +pip>=23.1.2 +wheel>=0.40.0 +setuptools>=68.0.0 diff --git a/setup.py b/setup.py index 5dfe29c2ae..efa57ac2b1 100644 --- a/setup.py +++ b/setup.py @@ -4,17 +4,36 @@ from distutils.util import convert_path from pathlib import Path import os -import re import string -import textwrap -from typing import Dict, NamedTuple, List, Sequence, Optional, TypeVar, Tuple -from abc import ABC, abstractmethod +from typing import Dict, List, Sequence, Optional, Tuple import ast import subprocess import sys import copy from collections import OrderedDict import json +from functools import reduce + +from pysetup.constants import ( + # code names + PHASE0, + # misc + ETH2_SPEC_COMMENT_PREFIX, +) +from pysetup.spec_builders import spec_builders +from pysetup.typing import ( + BuildTarget, + ProtocolDefinition, + SpecObject, + VariableDefinition, +) +from pysetup.helpers import ( + combine_spec_objects, + dependency_order_class_objects, + objects_to_spec, + parse_config_vars, +) +from pysetup.md_doc_paths import get_md_doc_paths # NOTE: have to programmatically include third-party dependencies in `setup.py`. @@ -41,104 +60,6 @@ def installPackage(package: str): from marko.ext.gfm.elements import Table -# Definitions in context.py -PHASE0 = 'phase0' -ALTAIR = 'altair' -BELLATRIX = 'bellatrix' -CAPELLA = 'capella' -DENEB = 'deneb' -EIP6110 = 'eip6110' -WHISK = 'whisk' - -PREVIOUS_FORK_OF = { - PHASE0: None, - ALTAIR: PHASE0, - BELLATRIX: ALTAIR, - CAPELLA: BELLATRIX, - DENEB: CAPELLA, - EIP6110: DENEB, - WHISK: CAPELLA, -} - -ALL_FORKS = list(PREVIOUS_FORK_OF.keys()) - -IGNORE_SPEC_FILES = [ - "specs/phase0/deposit-contract.md" -] - -EXTRA_SPEC_FILES = { - BELLATRIX: "sync/optimistic.md" -} - -# The helper functions that are used when defining constants -CONSTANT_DEP_SUNDRY_CONSTANTS_FUNCTIONS = ''' -def ceillog2(x: int) -> uint64: - if x < 1: - raise ValueError(f"ceillog2 accepts only positive values, x={x}") - return uint64((x - 1).bit_length()) - - -def floorlog2(x: int) -> uint64: - if x < 1: - raise ValueError(f"floorlog2 accepts only positive values, x={x}") - return uint64(x.bit_length() - 1) -''' - - -OPTIMIZED_BLS_AGGREGATE_PUBKEYS = ''' -def eth_aggregate_pubkeys(pubkeys: Sequence[BLSPubkey]) -> BLSPubkey: - return bls.AggregatePKs(pubkeys) -''' - - -class ProtocolDefinition(NamedTuple): - # just function definitions currently. May expand with configuration vars in future. - functions: Dict[str, str] - - -class VariableDefinition(NamedTuple): - type_name: Optional[str] - value: str - comment: Optional[str] # e.g. "noqa: E501" - type_hint: Optional[str] # e.g., "Final" - - -class SpecObject(NamedTuple): - functions: Dict[str, str] - protocols: Dict[str, ProtocolDefinition] - custom_types: Dict[str, str] - constant_vars: Dict[str, VariableDefinition] - preset_vars: Dict[str, VariableDefinition] - config_vars: Dict[str, VariableDefinition] - ssz_dep_constants: Dict[str, str] # the constants that depend on ssz_objects - ssz_objects: Dict[str, str] - dataclasses: Dict[str, str] - - -def is_post_fork(a, b) -> bool: - """ - Returns true if fork a is after b, or if a == b - """ - if a == b: - return True - - prev_fork = PREVIOUS_FORK_OF[a] - if prev_fork == b: - return True - elif prev_fork == None: - return False - else: - return is_post_fork(prev_fork, b) - -def get_fork_directory(fork): - dir1 = f'specs/{fork}' - if os.path.exists(dir1): - return dir1 - dir2 = f'specs/_features/{fork}' - if os.path.exists(dir2): - return dir2 - raise FileNotFoundError(f"No directory found for fork: {fork}") - def _get_name_from_heading(heading: Heading) -> Optional[str]: last_child = heading.children[-1] if isinstance(last_child, CodeSpan): @@ -203,13 +124,12 @@ def _load_kzg_trusted_setups(preset_name): return trusted_setup_G1, trusted_setup_G2, trusted_setup_G1_lagrange, roots_of_unity + ALL_KZG_SETUPS = { 'minimal': _load_kzg_trusted_setups('minimal'), 'mainnet': _load_kzg_trusted_setups('mainnet') } -ETH2_SPEC_COMMENT_PREFIX = "eth2spec:" - def _get_eth2_spec_comment(child: LinkRefDef) -> Optional[str]: _, _, title = child._parse_info @@ -221,7 +141,7 @@ def _get_eth2_spec_comment(child: LinkRefDef) -> Optional[str]: return title[len(ETH2_SPEC_COMMENT_PREFIX):].strip() -def _parse_value(name: str, typed_value: str, type_hint: Optional[str]=None) -> VariableDefinition: +def _parse_value(name: str, typed_value: str, type_hint: Optional[str] = None) -> VariableDefinition: comment = None if name == "BLS12_381_Q": comment = "noqa: E501" @@ -350,683 +270,6 @@ def get_spec(file_name: Path, preset: Dict[str, str], config: Dict[str, str], pr ) -class SpecBuilder(ABC): - @property - @abstractmethod - def fork(self) -> str: - raise NotImplementedError() - - @classmethod - @abstractmethod - def imports(cls, preset_name: str) -> str: - """ - Import objects from other libraries. - """ - raise NotImplementedError() - - @classmethod - @abstractmethod - def preparations(cls) -> str: - """ - Define special types/constants for building pyspec or call functions. - """ - raise NotImplementedError() - - @classmethod - @abstractmethod - def sundry_functions(cls) -> str: - """ - The functions that are (1) defined abstractly in specs or (2) adjusted for getting better performance. - """ - raise NotImplementedError() - - @classmethod - def execution_engine_cls(cls) -> str: - raise NotImplementedError() - - @classmethod - @abstractmethod - def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]: - """ - The constants that are required for SSZ objects. - """ - raise NotImplementedError() - - @classmethod - @abstractmethod - def hardcoded_custom_type_dep_constants(cls, spec_object) -> Dict[str, str]: # TODO - """ - The constants that are required for custom types. - """ - raise NotImplementedError() - - @classmethod - @abstractmethod - def implement_optimizations(cls, functions: Dict[str, str]) -> Dict[str, str]: - raise NotImplementedError() - - @classmethod - @abstractmethod - def build_spec(cls, preset_name: str, - source_files: List[Path], preset_files: Sequence[Path], config_file: Path) -> str: - raise NotImplementedError() - - -# -# Phase0SpecBuilder -# -class Phase0SpecBuilder(SpecBuilder): - fork: str = PHASE0 - - @classmethod - def imports(cls, preset_name: str) -> str: - return '''from lru import LRU -from dataclasses import ( - dataclass, - field, -) -from typing import ( - Any, Callable, Dict, Set, Sequence, Tuple, Optional, TypeVar, NamedTuple, Final -) - -from eth2spec.utils.ssz.ssz_impl import hash_tree_root, copy, uint_to_bytes -from eth2spec.utils.ssz.ssz_typing import ( - View, boolean, Container, List, Vector, uint8, uint32, uint64, uint256, - Bytes1, Bytes4, Bytes32, Bytes48, Bytes96, Bitlist) -from eth2spec.utils.ssz.ssz_typing import Bitvector # noqa: F401 -from eth2spec.utils import bls -from eth2spec.utils.hash_function import hash -''' - - @classmethod - def preparations(cls) -> str: - return ''' -SSZObject = TypeVar('SSZObject', bound=View) -''' - - @classmethod - def sundry_functions(cls) -> str: - return ''' -def get_eth1_data(block: Eth1Block) -> Eth1Data: - """ - A stub function return mocking Eth1Data. - """ - return Eth1Data( - deposit_root=block.deposit_root, - deposit_count=block.deposit_count, - block_hash=hash_tree_root(block)) - - -def cache_this(key_fn, value_fn, lru_size): # type: ignore - cache_dict = LRU(size=lru_size) - - def wrapper(*args, **kw): # type: ignore - key = key_fn(*args, **kw) - nonlocal cache_dict - if key not in cache_dict: - cache_dict[key] = value_fn(*args, **kw) - return cache_dict[key] - return wrapper - - -_compute_shuffled_index = compute_shuffled_index -compute_shuffled_index = cache_this( - lambda index, index_count, seed: (index, index_count, seed), - _compute_shuffled_index, lru_size=SLOTS_PER_EPOCH * 3) - -_get_total_active_balance = get_total_active_balance -get_total_active_balance = cache_this( - lambda state: (state.validators.hash_tree_root(), compute_epoch_at_slot(state.slot)), - _get_total_active_balance, lru_size=10) - -_get_base_reward = get_base_reward -get_base_reward = cache_this( - lambda state, index: (state.validators.hash_tree_root(), state.slot, index), - _get_base_reward, lru_size=2048) - -_get_committee_count_per_slot = get_committee_count_per_slot -get_committee_count_per_slot = cache_this( - lambda state, epoch: (state.validators.hash_tree_root(), epoch), - _get_committee_count_per_slot, lru_size=SLOTS_PER_EPOCH * 3) - -_get_active_validator_indices = get_active_validator_indices -get_active_validator_indices = cache_this( - lambda state, epoch: (state.validators.hash_tree_root(), epoch), - _get_active_validator_indices, lru_size=3) - -_get_beacon_committee = get_beacon_committee -get_beacon_committee = cache_this( - lambda state, slot, index: (state.validators.hash_tree_root(), state.randao_mixes.hash_tree_root(), slot, index), - _get_beacon_committee, lru_size=SLOTS_PER_EPOCH * MAX_COMMITTEES_PER_SLOT * 3) - -_get_matching_target_attestations = get_matching_target_attestations -get_matching_target_attestations = cache_this( - lambda state, epoch: (state.hash_tree_root(), epoch), - _get_matching_target_attestations, lru_size=10) - -_get_matching_head_attestations = get_matching_head_attestations -get_matching_head_attestations = cache_this( - lambda state, epoch: (state.hash_tree_root(), epoch), - _get_matching_head_attestations, lru_size=10) - -_get_attesting_indices = get_attesting_indices -get_attesting_indices = cache_this( - lambda state, data, bits: ( - state.randao_mixes.hash_tree_root(), - state.validators.hash_tree_root(), data.hash_tree_root(), bits.hash_tree_root() - ), - _get_attesting_indices, lru_size=SLOTS_PER_EPOCH * MAX_COMMITTEES_PER_SLOT * 3)''' - - - @classmethod - def execution_engine_cls(cls) -> str: - return "" - - - @classmethod - def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]: - return {} - - @classmethod - def hardcoded_custom_type_dep_constants(cls, spec_object) -> Dict[str, str]: - return {} - - @classmethod - def implement_optimizations(cls, functions: Dict[str, str]) -> Dict[str, str]: - return functions - - @classmethod - def build_spec(cls, preset_name: str, - source_files: Sequence[Path], preset_files: Sequence[Path], config_file: Path) -> str: - return _build_spec(preset_name, cls.fork, source_files, preset_files, config_file) - - -# -# AltairSpecBuilder -# -class AltairSpecBuilder(Phase0SpecBuilder): - fork: str = ALTAIR - - @classmethod - def imports(cls, preset_name: str) -> str: - return super().imports(preset_name) + '\n' + f''' -from typing import NewType, Union as PyUnion - -from eth2spec.phase0 import {preset_name} as phase0 -from eth2spec.test.helpers.merkle import build_proof -from eth2spec.utils.ssz.ssz_typing import Path -''' - - @classmethod - def preparations(cls): - return super().preparations() + '\n' + ''' -SSZVariableName = str -GeneralizedIndex = NewType('GeneralizedIndex', int) -''' - - @classmethod - def sundry_functions(cls) -> str: - return super().sundry_functions() + '\n\n' + ''' -def get_generalized_index(ssz_class: Any, *path: Sequence[PyUnion[int, SSZVariableName]]) -> GeneralizedIndex: - ssz_path = Path(ssz_class) - for item in path: - ssz_path = ssz_path / item - return GeneralizedIndex(ssz_path.gindex()) - - -def compute_merkle_proof_for_state(state: BeaconState, - index: GeneralizedIndex) -> Sequence[Bytes32]: - return build_proof(state.get_backing(), index)''' - - - @classmethod - def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]: - constants = { - 'FINALIZED_ROOT_INDEX': 'GeneralizedIndex(105)', - 'CURRENT_SYNC_COMMITTEE_INDEX': 'GeneralizedIndex(54)', - 'NEXT_SYNC_COMMITTEE_INDEX': 'GeneralizedIndex(55)', - } - return {**super().hardcoded_ssz_dep_constants(), **constants} - - @classmethod - def implement_optimizations(cls, functions: Dict[str, str]) -> Dict[str, str]: - if "eth_aggregate_pubkeys" in functions: - functions["eth_aggregate_pubkeys"] = OPTIMIZED_BLS_AGGREGATE_PUBKEYS.strip() - return super().implement_optimizations(functions) - -# -# BellatrixSpecBuilder -# -class BellatrixSpecBuilder(AltairSpecBuilder): - fork: str = BELLATRIX - - @classmethod - def imports(cls, preset_name: str): - return super().imports(preset_name) + f''' -from typing import Protocol -from eth2spec.altair import {preset_name} as altair -from eth2spec.utils.ssz.ssz_typing import Bytes8, Bytes20, ByteList, ByteVector -''' - - @classmethod - def preparations(cls): - return super().preparations() - - @classmethod - def sundry_functions(cls) -> str: - return super().sundry_functions() + '\n\n' + """ -ExecutionState = Any - - -def get_pow_block(hash: Bytes32) -> Optional[PowBlock]: - return PowBlock(block_hash=hash, parent_hash=Bytes32(), total_difficulty=uint256(0)) - - -def get_execution_state(_execution_state_root: Bytes32) -> ExecutionState: - pass - - -def get_pow_chain_head() -> PowBlock: - pass""" - - @classmethod - def execution_engine_cls(cls) -> str: - return "\n\n" + """ -class NoopExecutionEngine(ExecutionEngine): - - def notify_new_payload(self: ExecutionEngine, execution_payload: ExecutionPayload) -> bool: - return True - - def notify_forkchoice_updated(self: ExecutionEngine, - head_block_hash: Hash32, - safe_block_hash: Hash32, - finalized_block_hash: Hash32, - payload_attributes: Optional[PayloadAttributes]) -> Optional[PayloadId]: - pass - - def get_payload(self: ExecutionEngine, payload_id: PayloadId) -> GetPayloadResponse: - # pylint: disable=unused-argument - raise NotImplementedError("no default block production") - - def is_valid_block_hash(self: ExecutionEngine, execution_payload: ExecutionPayload) -> bool: - return True - - def verify_and_notify_new_payload(self: ExecutionEngine, - new_payload_request: NewPayloadRequest) -> bool: - return True - - -EXECUTION_ENGINE = NoopExecutionEngine()""" - - - @classmethod - def hardcoded_custom_type_dep_constants(cls, spec_object) -> str: - constants = { - 'MAX_BYTES_PER_TRANSACTION': spec_object.preset_vars['MAX_BYTES_PER_TRANSACTION'].value, - } - return {**super().hardcoded_custom_type_dep_constants(spec_object), **constants} - - -# -# CapellaSpecBuilder -# -class CapellaSpecBuilder(BellatrixSpecBuilder): - fork: str = CAPELLA - - @classmethod - def imports(cls, preset_name: str): - return super().imports(preset_name) + f''' -from eth2spec.bellatrix import {preset_name} as bellatrix -''' - - - @classmethod - def sundry_functions(cls) -> str: - return super().sundry_functions() + '\n\n' + ''' -def compute_merkle_proof_for_block_body(body: BeaconBlockBody, - index: GeneralizedIndex) -> Sequence[Bytes32]: - return build_proof(body.get_backing(), index)''' - - - @classmethod - def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]: - constants = { - 'EXECUTION_PAYLOAD_INDEX': 'GeneralizedIndex(25)', - } - return {**super().hardcoded_ssz_dep_constants(), **constants} - -# -# DenebSpecBuilder -# -class DenebSpecBuilder(CapellaSpecBuilder): - fork: str = DENEB - - @classmethod - def imports(cls, preset_name: str): - return super().imports(preset_name) + f''' -from eth2spec.capella import {preset_name} as capella -''' - - - @classmethod - def preparations(cls): - return super().preparations() + '\n' + ''' -T = TypeVar('T') # For generic function -''' - - @classmethod - def sundry_functions(cls) -> str: - return super().sundry_functions() + '\n\n' + ''' -def retrieve_blobs_and_proofs(beacon_block_root: Root) -> PyUnion[Tuple[Blob, KZGProof], Tuple[str, str]]: - # pylint: disable=unused-argument - return ("TEST", "TEST")''' - - @classmethod - def execution_engine_cls(cls) -> str: - return "\n\n" + """ -class NoopExecutionEngine(ExecutionEngine): - - def notify_new_payload(self: ExecutionEngine, - execution_payload: ExecutionPayload, - parent_beacon_block_root: Root) -> bool: - return True - - def notify_forkchoice_updated(self: ExecutionEngine, - head_block_hash: Hash32, - safe_block_hash: Hash32, - finalized_block_hash: Hash32, - payload_attributes: Optional[PayloadAttributes]) -> Optional[PayloadId]: - pass - - def get_payload(self: ExecutionEngine, payload_id: PayloadId) -> GetPayloadResponse: - # pylint: disable=unused-argument - raise NotImplementedError("no default block production") - - def is_valid_block_hash(self: ExecutionEngine, - execution_payload: ExecutionPayload, - parent_beacon_block_root: Root) -> bool: - return True - - def is_valid_versioned_hashes(self: ExecutionEngine, new_payload_request: NewPayloadRequest) -> bool: - return True - - def verify_and_notify_new_payload(self: ExecutionEngine, - new_payload_request: NewPayloadRequest) -> bool: - return True - - -EXECUTION_ENGINE = NoopExecutionEngine()""" - - - @classmethod - def hardcoded_custom_type_dep_constants(cls, spec_object) -> str: - constants = { - 'BYTES_PER_FIELD_ELEMENT': spec_object.constant_vars['BYTES_PER_FIELD_ELEMENT'].value, - 'FIELD_ELEMENTS_PER_BLOB': spec_object.preset_vars['FIELD_ELEMENTS_PER_BLOB'].value, - 'MAX_BLOBS_PER_BLOCK': spec_object.preset_vars['MAX_BLOBS_PER_BLOCK'].value, - } - return {**super().hardcoded_custom_type_dep_constants(spec_object), **constants} - - -# -# EIP6110SpecBuilder -# -class EIP6110SpecBuilder(DenebSpecBuilder): - fork: str = EIP6110 - - @classmethod - def imports(cls, preset_name: str): - return super().imports(preset_name) + f''' -from eth2spec.deneb import {preset_name} as deneb -''' - -# -# WhiskSpecBuilder -# -class WhiskSpecBuilder(CapellaSpecBuilder): - fork: str = WHISK - - @classmethod - def imports(cls, preset_name: str): - return super().imports(preset_name) + f''' -from eth2spec.capella import {preset_name} as capella -''' - - @classmethod - def hardcoded_custom_type_dep_constants(cls, spec_object) -> str: - # Necessary for custom types `WhiskShuffleProof` and `WhiskTrackerProof` - constants = { - 'WHISK_MAX_SHUFFLE_PROOF_SIZE': spec_object.preset_vars['WHISK_MAX_SHUFFLE_PROOF_SIZE'].value, - 'WHISK_MAX_OPENING_PROOF_SIZE': spec_object.preset_vars['WHISK_MAX_OPENING_PROOF_SIZE'].value, - } - return {**super().hardcoded_custom_type_dep_constants(spec_object), **constants} - - -spec_builders = { - builder.fork: builder - for builder in ( - Phase0SpecBuilder, AltairSpecBuilder, BellatrixSpecBuilder, CapellaSpecBuilder, DenebSpecBuilder, - EIP6110SpecBuilder, WhiskSpecBuilder, - ) -} - - -def is_byte_vector(value: str) -> bool: - return value.startswith(('ByteVector')) - - -def make_function_abstract(protocol_def: ProtocolDefinition, key: str): - function = protocol_def.functions[key].split('"""') - protocol_def.functions[key] = function[0] + "..." - - -def objects_to_spec(preset_name: str, - spec_object: SpecObject, - builder: SpecBuilder, - ordered_class_objects: Dict[str, str]) -> str: - """ - Given all the objects that constitute a spec, combine them into a single pyfile. - """ - new_type_definitions = ( - '\n\n'.join( - [ - f"class {key}({value}):\n pass\n" if not is_byte_vector(value) else f"class {key}({value}): # type: ignore\n pass\n" - for key, value in spec_object.custom_types.items() - ] - ) - ) - - def format_protocol(protocol_name: str, protocol_def: ProtocolDefinition) -> str: - abstract_functions = ["verify_and_notify_new_payload"] - for key in protocol_def.functions.keys(): - if key in abstract_functions: - make_function_abstract(protocol_def, key) - - protocol = f"class {protocol_name}(Protocol):" - for fn_source in protocol_def.functions.values(): - fn_source = fn_source.replace("self: "+protocol_name, "self") - protocol += "\n\n" + textwrap.indent(fn_source, " ") - return protocol - - protocols_spec = '\n\n\n'.join(format_protocol(k, v) for k, v in spec_object.protocols.items()) - for k in list(spec_object.functions): - if k in [ - "ceillog2", - "floorlog2", - "compute_merkle_proof_for_block_body", - "compute_merkle_proof_for_state", - ]: - del spec_object.functions[k] - functions = builder.implement_optimizations(spec_object.functions) - functions_spec = '\n\n\n'.join(functions.values()) - - # Access global dict of config vars for runtime configurables - for name in spec_object.config_vars.keys(): - functions_spec = re.sub(r"\b%s\b" % name, 'config.' + name, functions_spec) - - def format_config_var(name: str, vardef: VariableDefinition) -> str: - if vardef.type_name is None: - out = f'{name}={vardef.value},' - else: - out = f'{name}={vardef.type_name}({vardef.value}),' - if vardef.comment is not None: - out += f' # {vardef.comment}' - return out - - config_spec = 'class Configuration(NamedTuple):\n' - config_spec += ' PRESET_BASE: str\n' - config_spec += '\n'.join(f' {k}: {v.type_name if v.type_name is not None else "int"}' - for k, v in spec_object.config_vars.items()) - config_spec += '\n\n\nconfig = Configuration(\n' - config_spec += f' PRESET_BASE="{preset_name}",\n' - config_spec += '\n'.join(' ' + format_config_var(k, v) for k, v in spec_object.config_vars.items()) - config_spec += '\n)\n' - - def format_constant(name: str, vardef: VariableDefinition) -> str: - if vardef.type_name is None: - if vardef.type_hint is None: - out = f'{name} = {vardef.value}' - else: - out = f'{name}: {vardef.type_hint} = {vardef.value}' - else: - out = f'{name} = {vardef.type_name}({vardef.value})' - if vardef.comment is not None: - out += f' # {vardef.comment}' - return out - - constant_vars_spec = '# Constant vars\n' + '\n'.join(format_constant(k, v) for k, v in spec_object.constant_vars.items()) - preset_vars_spec = '# Preset vars\n' + '\n'.join(format_constant(k, v) for k, v in spec_object.preset_vars.items()) - ordered_class_objects_spec = '\n\n\n'.join(ordered_class_objects.values()) - ssz_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, builder.hardcoded_ssz_dep_constants()[x]), builder.hardcoded_ssz_dep_constants())) - ssz_dep_constants_verification = '\n'.join(map(lambda x: 'assert %s == %s' % (x, spec_object.ssz_dep_constants[x]), builder.hardcoded_ssz_dep_constants())) - custom_type_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, builder.hardcoded_custom_type_dep_constants(spec_object)[x]), builder.hardcoded_custom_type_dep_constants(spec_object))) - spec = ( - builder.imports(preset_name) - + builder.preparations() - + '\n\n' + f"fork = \'{builder.fork}\'\n" - # The constants that some SSZ containers require. Need to be defined before `new_type_definitions` - + ('\n\n' + custom_type_dep_constants + '\n' if custom_type_dep_constants != '' else '') - + '\n\n' + new_type_definitions - + '\n' + CONSTANT_DEP_SUNDRY_CONSTANTS_FUNCTIONS - # The constants that some SSZ containers require. Need to be defined before `constants_spec` - + ('\n\n' + ssz_dep_constants if ssz_dep_constants != '' else '') - + '\n\n' + constant_vars_spec - + '\n\n' + preset_vars_spec - + '\n\n\n' + config_spec - + '\n\n' + ordered_class_objects_spec - + ('\n\n\n' + protocols_spec if protocols_spec != '' else '') - + '\n\n\n' + functions_spec - + '\n\n' + builder.sundry_functions() - + builder.execution_engine_cls() - # Since some constants are hardcoded in setup.py, the following assertions verify that the hardcoded constants are - # as same as the spec definition. - + ('\n\n\n' + ssz_dep_constants_verification if ssz_dep_constants_verification != '' else '') - + '\n' - ) - return spec - - -def combine_protocols(old_protocols: Dict[str, ProtocolDefinition], - new_protocols: Dict[str, ProtocolDefinition]) -> Dict[str, ProtocolDefinition]: - for key, value in new_protocols.items(): - if key not in old_protocols: - old_protocols[key] = value - else: - functions = combine_dicts(old_protocols[key].functions, value.functions) - old_protocols[key] = ProtocolDefinition(functions=functions) - return old_protocols - - -T = TypeVar('T') - - -def combine_dicts(old_dict: Dict[str, T], new_dict: Dict[str, T]) -> Dict[str, T]: - return {**old_dict, **new_dict} - - -ignored_dependencies = [ - 'bit', 'boolean', 'Vector', 'List', 'Container', 'BLSPubkey', 'BLSSignature', - 'Bytes1', 'Bytes4', 'Bytes8', 'Bytes20', 'Bytes32', 'Bytes48', 'Bytes96', 'Bitlist', 'Bitvector', - 'uint8', 'uint16', 'uint32', 'uint64', 'uint128', 'uint256', - 'bytes', 'byte', 'ByteList', 'ByteVector', - 'Dict', 'dict', 'field', 'ceillog2', 'floorlog2', 'Set', - 'Optional', 'Sequence', -] - - -def dependency_order_class_objects(objects: Dict[str, str], custom_types: Dict[str, str]) -> None: - """ - Determines which SSZ Object is dependent on which other and orders them appropriately - """ - items = list(objects.items()) - for key, value in items: - dependencies = [] - for line in value.split('\n'): - if not re.match(r'\s+\w+: .+', line): - continue # skip whitespace etc. - line = line[line.index(':') + 1:] # strip of field name - if '#' in line: - line = line[:line.index('#')] # strip of comment - dependencies.extend(re.findall(r'(\w+)', line)) # catch all legible words, potential dependencies - dependencies = filter(lambda x: '_' not in x and x.upper() != x, dependencies) # filter out constants - dependencies = filter(lambda x: x not in ignored_dependencies, dependencies) - dependencies = filter(lambda x: x not in custom_types, dependencies) - for dep in dependencies: - key_list = list(objects.keys()) - for item in [dep, key] + key_list[key_list.index(dep)+1:]: - objects[item] = objects.pop(item) - -def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str], custom_types) -> Dict[str, str]: - """ - Takes in old spec and new spec ssz objects, combines them, - and returns the newer versions of the objects in dependency order. - """ - for key, value in new_objects.items(): - old_objects[key] = value - return old_objects - - -def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject: - """ - Takes in two spec variants (as tuples of their objects) and combines them using the appropriate combiner function. - """ - protocols = combine_protocols(spec0.protocols, spec1.protocols) - functions = combine_dicts(spec0.functions, spec1.functions) - custom_types = combine_dicts(spec0.custom_types, spec1.custom_types) - constant_vars = combine_dicts(spec0.constant_vars, spec1.constant_vars) - preset_vars = combine_dicts(spec0.preset_vars, spec1.preset_vars) - config_vars = combine_dicts(spec0.config_vars, spec1.config_vars) - ssz_dep_constants = combine_dicts(spec0.ssz_dep_constants, spec1.ssz_dep_constants) - ssz_objects = combine_ssz_objects(spec0.ssz_objects, spec1.ssz_objects, custom_types) - dataclasses = combine_dicts(spec0.dataclasses, spec1.dataclasses) - return SpecObject( - functions=functions, - protocols=protocols, - custom_types=custom_types, - constant_vars=constant_vars, - preset_vars=preset_vars, - config_vars=config_vars, - ssz_dep_constants=ssz_dep_constants, - ssz_objects=ssz_objects, - dataclasses=dataclasses, - ) - - -def parse_config_vars(conf: Dict[str, str]) -> Dict[str, str]: - """ - Parses a dict of basic str/int/list types into a dict for insertion into the spec code. - """ - out: Dict[str, str] = dict() - for k, v in conf.items(): - if isinstance(v, str) and (v.startswith("0x") or k == 'PRESET_BASE' or k == 'CONFIG_NAME'): - # Represent byte data with string, to avoid misinterpretation as big-endian int. - # Everything except PRESET_BASE and CONFIG_NAME is either byte data or an integer. - out[k] = f"'{v}'" - else: - out[k] = str(int(v)) - return out - - def load_preset(preset_files: Sequence[Path]) -> Dict[str, str]: """ Loads the a directory of preset files, merges the result into one preset. @@ -1054,8 +297,11 @@ def load_config(config_path: Path) -> Dict[str, str]: return parse_config_vars(config_data) -def _build_spec(preset_name: str, fork: str, - source_files: Sequence[Path], preset_files: Sequence[Path], config_file: Path) -> str: +def build_spec(fork: str, + preset_name: str, + source_files: Sequence[Path], + preset_files: Sequence[Path], + config_file: Path) -> str: preset = load_preset(preset_files) config = load_config(config_file) all_specs = [get_spec(spec, preset, config, preset_name) for spec in source_files] @@ -1072,13 +318,7 @@ def _build_spec(preset_name: str, fork: str, new_objects = copy.deepcopy(class_objects) dependency_order_class_objects(class_objects, spec_object.custom_types) - return objects_to_spec(preset_name, spec_object, spec_builders[fork], class_objects) - - -class BuildTarget(NamedTuple): - name: str - preset_paths: List[Path] - config_path: Path + return objects_to_spec(preset_name, spec_object, fork, class_objects) class PySpecCommand(Command): @@ -1117,20 +357,7 @@ def finalize_options(self): if len(self.md_doc_paths) == 0: print("no paths were specified, using default markdown file paths for pyspec" " build (spec fork: %s)" % self.spec_fork) - self.md_doc_paths = "" - - for fork in ALL_FORKS: - if is_post_fork(self.spec_fork, fork): - # Append all files in fork directory recursively - for root, dirs, files in os.walk(get_fork_directory(fork)): - for filename in files: - filepath = os.path.join(root, filename) - if filepath.endswith('.md') and filepath not in IGNORE_SPEC_FILES: - self.md_doc_paths += filepath + "\n" - # Append extra files if any - if fork in EXTRA_SPEC_FILES: - self.md_doc_paths += EXTRA_SPEC_FILES[fork] + "\n" - + self.md_doc_paths = get_md_doc_paths(self.spec_fork) if len(self.md_doc_paths) == 0: raise Exception('no markdown files specified, and spec fork "%s" is unknown', self.spec_fork) @@ -1163,8 +390,13 @@ def run(self): dir_util.mkpath(self.out_dir) for (name, preset_paths, config_path) in self.parsed_build_targets: - spec_str = spec_builders[self.spec_fork].build_spec( - name, self.parsed_md_doc_paths, preset_paths, config_path) + spec_str = build_spec( + spec_builders[self.spec_fork].fork, + name, + self.parsed_md_doc_paths, + preset_paths, + config_path, + ) if self.dry_run: self.announce('dry run successfully prepared contents for spec.' f' out dir: "{self.out_dir}", spec fork: "{self.spec_fork}", build target: "{name}"') @@ -1225,6 +457,7 @@ def run(self): for spec_fork in spec_builders: self.run_pyspec_cmd(spec_fork=spec_fork) + commands = { 'pyspec': PySpecCommand, 'build_py': BuildPyCommand, diff --git a/specs/_features/eip6110/beacon-chain.md b/specs/_features/eip6110/beacon-chain.md index 44980685c4..e2964267d9 100644 --- a/specs/_features/eip6110/beacon-chain.md +++ b/specs/_features/eip6110/beacon-chain.md @@ -91,8 +91,8 @@ class ExecutionPayload(Container): block_hash: Hash32 transactions: List[Transaction, MAX_TRANSACTIONS_PER_PAYLOAD] withdrawals: List[Withdrawal, MAX_WITHDRAWALS_PER_PAYLOAD] - data_gas_used: uint64 - excess_data_gas: uint64 + blob_gas_used: uint64 + excess_blob_gas: uint64 deposit_receipts: List[DepositReceipt, MAX_DEPOSIT_RECEIPTS_PER_PAYLOAD] # [New in EIP6110] ``` @@ -117,8 +117,8 @@ class ExecutionPayloadHeader(Container): block_hash: Hash32 transactions_root: Root withdrawals_root: Root - data_gas_used: uint64 - excess_data_gas: uint64 + blob_gas_used: uint64 + excess_blob_gas: uint64 deposit_receipts_root: Root # [New in EIP6110] ``` @@ -274,8 +274,8 @@ def process_execution_payload(state: BeaconState, body: BeaconBlockBody, executi block_hash=payload.block_hash, transactions_root=hash_tree_root(payload.transactions), withdrawals_root=hash_tree_root(payload.withdrawals), - data_gas_used=payload.data_gas_used, - excess_data_gas=payload.excess_data_gas, + blob_gas_used=payload.blob_gas_used, + excess_blob_gas=payload.excess_blob_gas, deposit_receipts_root=hash_tree_root(payload.deposit_receipts), # [New in EIP6110] ) ``` diff --git a/specs/_features/eip6110/fork.md b/specs/_features/eip6110/fork.md index b3b14a3f8c..6b5ab04319 100644 --- a/specs/_features/eip6110/fork.md +++ b/specs/_features/eip6110/fork.md @@ -88,8 +88,8 @@ def upgrade_to_eip6110(pre: deneb.BeaconState) -> BeaconState: block_hash=pre.latest_execution_payload_header.block_hash, transactions_root=pre.latest_execution_payload_header.transactions_root, withdrawals_root=pre.latest_execution_payload_header.withdrawals_root, - data_gas_used=uint64(0), - excess_data_gas=uint64(0), + blob_gas_used=uint64(0), + excess_blob_gas=uint64(0), deposit_receipts_root=Root(), # [New in EIP-6110] ) post = BeaconState( diff --git a/specs/_features/eip6914/beacon-chain.md b/specs/_features/eip6914/beacon-chain.md index 2c60c9bdb4..1e0b20747e 100644 --- a/specs/_features/eip6914/beacon-chain.md +++ b/specs/_features/eip6914/beacon-chain.md @@ -1,4 +1,4 @@ -EIP-6914 -- The Beacon Chain +# EIP-6914 -- The Beacon Chain ## Table of contents diff --git a/specs/_features/eip6914/fork-choice.md b/specs/_features/eip6914/fork-choice.md new file mode 100644 index 0000000000..25adc82d61 --- /dev/null +++ b/specs/_features/eip6914/fork-choice.md @@ -0,0 +1,36 @@ +# EIP-6914 -- Fork Choice + +## Table of contents + + + + + +- [Introduction](#introduction) +- [Fork choice](#fork-choice) + - [Handlers](#handlers) + - [`on_reused_index`](#on_reused_index) + + + + +## Introduction + +This is the modification of the fork choice according to EIP-6914. + +## Fork choice + +A new handler is added with this upgrade: + +- `on_reused_index(store, index)` whenever a validator index `index: ValidatorIndex` is reused. That is, [`get_index_for_new_validator()`](./beacon-chain.md#get_index_for_new_validator) provides an index due to a return value of `True` from [`is_reusable_validator()`](./beacon-chain.md#is_reusable_validator). + +This new handler is used to update the list of equivocating indices to be synchronized with the canonical chain. + +### Handlers + +#### `on_reused_index` + +```python +def on_reused_index(store: Store, index: ValidatorIndex) -> None: + store.equivocating_indices.discard(index) +``` diff --git a/specs/_features/eip7002/beacon-chain.md b/specs/_features/eip7002/beacon-chain.md new file mode 100644 index 0000000000..39b03e878a --- /dev/null +++ b/specs/_features/eip7002/beacon-chain.md @@ -0,0 +1,300 @@ +# EIP-7002 -- The Beacon Chain + +## Table of contents + + + + + +- [Introduction](#introduction) +- [Preset](#preset) + - [Max operations per block](#max-operations-per-block) +- [Containers](#containers) + - [New containers](#new-containers) + - [`ExecutionLayerExit`](#executionlayerexit) + - [Extended Containers](#extended-containers) + - [`ExecutionPayload`](#executionpayload) + - [`ExecutionPayloadHeader`](#executionpayloadheader) + - [`BeaconState`](#beaconstate) +- [Beacon chain state transition function](#beacon-chain-state-transition-function) + - [Block processing](#block-processing) + - [Execution payload](#execution-payload) + - [Modified `process_execution_payload`](#modified-process_execution_payload) + - [Operations](#operations) + - [Modified `process_operations`](#modified-process_operations) + - [New `process_execution_layer_exit`](#new-process_execution_layer_exit) +- [Testing](#testing) + + + + +## Introduction + +This is the beacon chain specification of the execution layer triggerable exits feature. + +This mechanism relies on the changes proposed by [EIP-7002](http://eips.ethereum.org/EIPS/eip-7002). + +*Note:* This specification is built upon [Capella](../../capella/beacon-chain.md) and is under active development. + +## Preset + +### Max operations per block + +| Name | Value | +| - | - | +| `MAX_EXECUTION_LAYER_EXITS` | `2**4` (= 16) | + +## Containers + +### New containers + +#### `ExecutionLayerExit` + +```python +class ExecutionLayerExit(Container): + source_address: ExecutionAddress + validator_pubkey: BLSPubkey +``` + +### Extended Containers + +#### `ExecutionPayload` + +```python +class ExecutionPayload(Container): + # Execution block header fields + parent_hash: Hash32 + fee_recipient: ExecutionAddress + state_root: Bytes32 + receipts_root: Bytes32 + logs_bloom: ByteVector[BYTES_PER_LOGS_BLOOM] + prev_randao: Bytes32 + block_number: uint64 + gas_limit: uint64 + gas_used: uint64 + timestamp: uint64 + extra_data: ByteList[MAX_EXTRA_DATA_BYTES] + base_fee_per_gas: uint256 + # Extra payload fields + block_hash: Hash32 + transactions: List[Transaction, MAX_TRANSACTIONS_PER_PAYLOAD] + withdrawals: List[Withdrawal, MAX_WITHDRAWALS_PER_PAYLOAD] + exits: List[ExecutionLayerExit, MAX_EXECUTION_LAYER_EXITS] # [New in EIP7002] +``` + +#### `ExecutionPayloadHeader` + +```python +class ExecutionPayloadHeader(Container): + # Execution block header fields + parent_hash: Hash32 + fee_recipient: ExecutionAddress + state_root: Bytes32 + receipts_root: Bytes32 + logs_bloom: ByteVector[BYTES_PER_LOGS_BLOOM] + prev_randao: Bytes32 + block_number: uint64 + gas_limit: uint64 + gas_used: uint64 + timestamp: uint64 + extra_data: ByteList[MAX_EXTRA_DATA_BYTES] + base_fee_per_gas: uint256 + # Extra payload fields + block_hash: Hash32 + transactions_root: Root + withdrawals_root: Root + exits_root: Root # [New in EIP7002] +``` + +#### `BeaconState` + +```python +class BeaconState(Container): + # Versioning + genesis_time: uint64 + genesis_validators_root: Root + slot: Slot + fork: Fork + # History + latest_block_header: BeaconBlockHeader + block_roots: Vector[Root, SLOTS_PER_HISTORICAL_ROOT] + state_roots: Vector[Root, SLOTS_PER_HISTORICAL_ROOT] + historical_roots: List[Root, HISTORICAL_ROOTS_LIMIT] + # Eth1 + eth1_data: Eth1Data + eth1_data_votes: List[Eth1Data, EPOCHS_PER_ETH1_VOTING_PERIOD * SLOTS_PER_EPOCH] + eth1_deposit_index: uint64 + # Registry + validators: List[Validator, VALIDATOR_REGISTRY_LIMIT] + balances: List[Gwei, VALIDATOR_REGISTRY_LIMIT] + # Randomness + randao_mixes: Vector[Bytes32, EPOCHS_PER_HISTORICAL_VECTOR] + # Slashings + slashings: Vector[Gwei, EPOCHS_PER_SLASHINGS_VECTOR] # Per-epoch sums of slashed effective balances + # Participation + previous_epoch_participation: List[ParticipationFlags, VALIDATOR_REGISTRY_LIMIT] + current_epoch_participation: List[ParticipationFlags, VALIDATOR_REGISTRY_LIMIT] + # Finality + justification_bits: Bitvector[JUSTIFICATION_BITS_LENGTH] # Bit set for every recent justified epoch + previous_justified_checkpoint: Checkpoint + current_justified_checkpoint: Checkpoint + finalized_checkpoint: Checkpoint + # Inactivity + inactivity_scores: List[uint64, VALIDATOR_REGISTRY_LIMIT] + # Sync + current_sync_committee: SyncCommittee + next_sync_committee: SyncCommittee + # Execution + latest_execution_payload_header: ExecutionPayloadHeader # [Modified in EIP7002] + # Withdrawals + next_withdrawal_index: WithdrawalIndex + next_withdrawal_validator_index: ValidatorIndex + # Deep history valid from Capella onwards + historical_summaries: List[HistoricalSummary, HISTORICAL_ROOTS_LIMIT] +``` + +## Beacon chain state transition function + +### Block processing + +#### Execution payload + +##### Modified `process_execution_payload` + +```python +def process_execution_payload(state: BeaconState, body: BeaconBlockBody, execution_engine: ExecutionEngine) -> None: + payload = body.execution_payload + # Verify consistency of the parent hash with respect to the previous execution payload header + assert payload.parent_hash == state.latest_execution_payload_header.block_hash + # Verify prev_randao + assert payload.prev_randao == get_randao_mix(state, get_current_epoch(state)) + # Verify timestamp + assert payload.timestamp == compute_timestamp_at_slot(state, state.slot) + # Verify the execution payload is valid + assert execution_engine.verify_and_notify_new_payload(NewPayloadRequest(execution_payload=payload)) + # Cache execution payload header + state.latest_execution_payload_header = ExecutionPayloadHeader( + parent_hash=payload.parent_hash, + fee_recipient=payload.fee_recipient, + state_root=payload.state_root, + receipts_root=payload.receipts_root, + logs_bloom=payload.logs_bloom, + prev_randao=payload.prev_randao, + block_number=payload.block_number, + gas_limit=payload.gas_limit, + gas_used=payload.gas_used, + timestamp=payload.timestamp, + extra_data=payload.extra_data, + base_fee_per_gas=payload.base_fee_per_gas, + block_hash=payload.block_hash, + transactions_root=hash_tree_root(payload.transactions), + withdrawals_root=hash_tree_root(payload.withdrawals), + exits_root=hash_tree_root(payload.exits), # [New in EIP7002] + ) +``` + +#### Operations + +##### Modified `process_operations` + +*Note*: The function `process_operations` is modified to process `ExecutionLayerExit` operations included in the block. + +```python +def process_operations(state: BeaconState, body: BeaconBlockBody) -> None: + # Verify that outstanding deposits are processed up to the maximum number of deposits + assert len(body.deposits) == min(MAX_DEPOSITS, state.eth1_data.deposit_count - state.eth1_deposit_index) + + def for_ops(operations: Sequence[Any], fn: Callable[[BeaconState, Any], None]) -> None: + for operation in operations: + fn(state, operation) + + for_ops(body.proposer_slashings, process_proposer_slashing) + for_ops(body.attester_slashings, process_attester_slashing) + for_ops(body.attestations, process_attestation) + for_ops(body.deposits, process_deposit) + for_ops(body.voluntary_exits, process_voluntary_exit) + for_ops(body.execution_payload.exits, process_execution_layer_exit) # [New in EIP7002] + for_ops(body.bls_to_execution_changes, process_bls_to_execution_change) +``` + +##### New `process_execution_layer_exit` + +```python +def process_execution_layer_exit(state: BeaconState, execution_layer_exit: ExecutionLayerExit) -> None: + validator_pubkeys = [v.pubkey for v in state.validators] + validator_index = ValidatorIndex(validator_pubkeys.index(execution_layer_exit.validator_pubkey)) + validator = state.validators[validator_index] + + # Verify withdrawal credentials + is_execution_address = validator.withdrawal_credentials[:1] == ETH1_ADDRESS_WITHDRAWAL_PREFIX + is_correct_source_address = validator.withdrawal_credentials[12:] == execution_layer_exit.source_address + if not (is_execution_address and is_correct_source_address): + return + # Verify the validator is active + if not is_active_validator(validator, get_current_epoch(state)): + return + # Verify exit has not been initiated + if validator.exit_epoch != FAR_FUTURE_EPOCH: + return + # Verify the validator has been active long enough + if get_current_epoch(state) < validator.activation_epoch + SHARD_COMMITTEE_PERIOD: + return + + # Initiate exit + initiate_validator_exit(state, validator_index) +``` + +## Testing + +*Note*: The function `initialize_beacon_state_from_eth1` is modified for pure EIP-7002 testing only. +Modifications include: +1. Use `EIP7002_FORK_VERSION` as the previous and current fork version. +2. Utilize the EIP-7002 `BeaconBlockBody` when constructing the initial `latest_block_header`. + +```python +def initialize_beacon_state_from_eth1(eth1_block_hash: Hash32, + eth1_timestamp: uint64, + deposits: Sequence[Deposit], + execution_payload_header: ExecutionPayloadHeader=ExecutionPayloadHeader() + ) -> BeaconState: + fork = Fork( + previous_version=EIP7002_FORK_VERSION, # [Modified in EIP7002] for testing only + current_version=EIP7002_FORK_VERSION, # [Modified in EIP7002] + epoch=GENESIS_EPOCH, + ) + state = BeaconState( + genesis_time=eth1_timestamp + GENESIS_DELAY, + fork=fork, + eth1_data=Eth1Data(block_hash=eth1_block_hash, deposit_count=uint64(len(deposits))), + latest_block_header=BeaconBlockHeader(body_root=hash_tree_root(BeaconBlockBody())), + randao_mixes=[eth1_block_hash] * EPOCHS_PER_HISTORICAL_VECTOR, # Seed RANDAO with Eth1 entropy + ) + + # Process deposits + leaves = list(map(lambda deposit: deposit.data, deposits)) + for index, deposit in enumerate(deposits): + deposit_data_list = List[DepositData, 2**DEPOSIT_CONTRACT_TREE_DEPTH](*leaves[:index + 1]) + state.eth1_data.deposit_root = hash_tree_root(deposit_data_list) + process_deposit(state, deposit) + + # Process activations + for index, validator in enumerate(state.validators): + balance = state.balances[index] + validator.effective_balance = min(balance - balance % EFFECTIVE_BALANCE_INCREMENT, MAX_EFFECTIVE_BALANCE) + if validator.effective_balance == MAX_EFFECTIVE_BALANCE: + validator.activation_eligibility_epoch = GENESIS_EPOCH + validator.activation_epoch = GENESIS_EPOCH + + # Set genesis validators root for domain separation and chain versioning + state.genesis_validators_root = hash_tree_root(state.validators) + + # Fill in sync committees + # Note: A duplicate committee is assigned for the current and next committee at genesis + state.current_sync_committee = get_next_sync_committee(state) + state.next_sync_committee = get_next_sync_committee(state) + + # Initialize the execution payload header + state.latest_execution_payload_header = execution_payload_header + + return state +``` diff --git a/specs/_features/eip7002/fork.md b/specs/_features/eip7002/fork.md new file mode 100644 index 0000000000..87008de77b --- /dev/null +++ b/specs/_features/eip7002/fork.md @@ -0,0 +1,140 @@ +# EIP-7002 -- Fork Logic + +**Notice**: This document is a work-in-progress for researchers and implementers. + +## Table of contents + + + + +- [Introduction](#introduction) +- [Configuration](#configuration) +- [Helper functions](#helper-functions) + - [Misc](#misc) + - [Modified `compute_fork_version`](#modified-compute_fork_version) +- [Fork to EIP-7002](#fork-to-eip-7002) + - [Fork trigger](#fork-trigger) + - [Upgrading the state](#upgrading-the-state) + + + +## Introduction + +This document describes the process of EIP-7002 upgrade. + +## Configuration + +Warning: this configuration is not definitive. + +| Name | Value | +| - | - | +| `EIP7002_FORK_VERSION` | `Version('0x05000000')` | +| `EIP7002_FORK_EPOCH` | `Epoch(18446744073709551615)` **TBD** | + +## Helper functions + +### Misc + +#### Modified `compute_fork_version` + +```python +def compute_fork_version(epoch: Epoch) -> Version: + """ + Return the fork version at the given ``epoch``. + """ + if epoch >= EIP7002_FORK_EPOCH: + return EIP7002_FORK_VERSION + if epoch >= CAPELLA_FORK_EPOCH: + return CAPELLA_FORK_VERSION + if epoch >= BELLATRIX_FORK_EPOCH: + return BELLATRIX_FORK_VERSION + if epoch >= ALTAIR_FORK_EPOCH: + return ALTAIR_FORK_VERSION + return GENESIS_FORK_VERSION +``` + +## Fork to EIP-7002 + +### Fork trigger + +TBD. This fork is defined for testing purposes, the EIP may be combined with other consensus-layer upgrade. +For now, we assume the condition will be triggered at epoch `EIP7002_FORK_EPOCH`. + +Note that for the pure EIP-7002 networks, we don't apply `upgrade_to_eip7002` since it starts with EIP-7002 version logic. + +### Upgrading the state + +If `state.slot % SLOTS_PER_EPOCH == 0` and `compute_epoch_at_slot(state.slot) == EIP7002_FORK_EPOCH`, +an irregular state change is made to upgrade to EIP-7002. + +```python +def upgrade_to_eip7002(pre: capella.BeaconState) -> BeaconState: + epoch = capella.get_current_epoch(pre) + latest_execution_payload_header = ExecutionPayloadHeader( + parent_hash=pre.latest_execution_payload_header.parent_hash, + fee_recipient=pre.latest_execution_payload_header.fee_recipient, + state_root=pre.latest_execution_payload_header.state_root, + receipts_root=pre.latest_execution_payload_header.receipts_root, + logs_bloom=pre.latest_execution_payload_header.logs_bloom, + prev_randao=pre.latest_execution_payload_header.prev_randao, + block_number=pre.latest_execution_payload_header.block_number, + gas_limit=pre.latest_execution_payload_header.gas_limit, + gas_used=pre.latest_execution_payload_header.gas_used, + timestamp=pre.latest_execution_payload_header.timestamp, + extra_data=pre.latest_execution_payload_header.extra_data, + base_fee_per_gas=pre.latest_execution_payload_header.base_fee_per_gas, + block_hash=pre.latest_execution_payload_header.block_hash, + transactions_root=pre.latest_execution_payload_header.transactions_root, + withdrawals_root=pre.latest_execution_payload_header.withdrawals_root, + exits_root=Root(), # [New in EIP-7002] + ) + post = BeaconState( + # Versioning + genesis_time=pre.genesis_time, + genesis_validators_root=pre.genesis_validators_root, + slot=pre.slot, + fork=Fork( + previous_version=pre.fork.current_version, + current_version=EIP7002_FORK_VERSION, # [Modified in EIP-7002] + epoch=epoch, + ), + # History + latest_block_header=pre.latest_block_header, + block_roots=pre.block_roots, + state_roots=pre.state_roots, + historical_roots=pre.historical_roots, + # Eth1 + eth1_data=pre.eth1_data, + eth1_data_votes=pre.eth1_data_votes, + eth1_deposit_index=pre.eth1_deposit_index, + # Registry + validators=pre.validators, + balances=pre.balances, + # Randomness + randao_mixes=pre.randao_mixes, + # Slashings + slashings=pre.slashings, + # Participation + previous_epoch_participation=pre.previous_epoch_participation, + current_epoch_participation=pre.current_epoch_participation, + # Finality + justification_bits=pre.justification_bits, + previous_justified_checkpoint=pre.previous_justified_checkpoint, + current_justified_checkpoint=pre.current_justified_checkpoint, + finalized_checkpoint=pre.finalized_checkpoint, + # Inactivity + inactivity_scores=pre.inactivity_scores, + # Sync + current_sync_committee=pre.current_sync_committee, + next_sync_committee=pre.next_sync_committee, + # Execution-layer + latest_execution_payload_header=latest_execution_payload_header, # [Modified in EIP-7002] + # Withdrawals + next_withdrawal_index=pre.next_withdrawal_index, + next_withdrawal_validator_index=pre.next_withdrawal_validator_index, + # Deep history valid from Capella onwards + historical_summaries=pre.historical_summaries, + ) + + return post +``` diff --git a/specs/_features/whisk/beacon-chain.md b/specs/_features/whisk/beacon-chain.md index f2a51e6223..c955585903 100644 --- a/specs/_features/whisk/beacon-chain.md +++ b/specs/_features/whisk/beacon-chain.md @@ -31,7 +31,7 @@ ## Introduction -This document details the beacon chain additions and changes of to support the Whisk SSLE, +This document details the beacon chain additions and changes of to support the Whisk SSLE. *Note:* This specification is built upon [Capella](../../capella/beacon-chain.md) and is under active development. @@ -94,7 +94,7 @@ def bytes_to_bls_field(b: Bytes32) -> BLSFieldElement: ### Curdleproofs and opening proofs -Note that Curdleproofs (Whisk Shuffle Proofs), the tracker opening proofs and all related data structures and verifier code (along with tests) is specified in [curdleproofs.pie](https://github.com/nalinbhardwaj/curdleproofs.pie/tree/verifier-only) repository. +Note that Curdleproofs (Whisk Shuffle Proofs), the tracker opening proofs and all related data structures and verifier code (along with tests) is specified in [curdleproofs.pie](https://github.com/nalinbhardwaj/curdleproofs.pie/tree/dev) repository. ```python def IsValidWhiskShuffleProof(pre_shuffle_trackers: Sequence[WhiskTracker], @@ -103,7 +103,7 @@ def IsValidWhiskShuffleProof(pre_shuffle_trackers: Sequence[WhiskTracker], shuffle_proof: WhiskShuffleProof) -> bool: """ Verify `post_shuffle_trackers` is a permutation of `pre_shuffle_trackers`. - Defined in https://github.com/nalinbhardwaj/curdleproofs.pie/tree/verifier-only. + Defined in https://github.com/nalinbhardwaj/curdleproofs.pie/blob/dev/curdleproofs/curdleproofs/whisk_interface.py. """ # pylint: disable=unused-argument return True @@ -115,7 +115,7 @@ def IsValidWhiskOpeningProof(tracker: WhiskTracker, tracker_proof: WhiskTrackerProof) -> bool: """ Verify knowledge of `k` such that `tracker.k_r_G == k * tracker.r_G` and `k_commitment == k * BLS_G1_GENERATOR`. - Defined in https://github.com/nalinbhardwaj/curdleproofs.pie/tree/verifier-only. + Defined in https://github.com/nalinbhardwaj/curdleproofs.pie/blob/dev/curdleproofs/curdleproofs/whisk_interface.py. """ # pylint: disable=unused-argument return True @@ -150,7 +150,7 @@ class BeaconState(Container): eth1_data_votes: List[Eth1Data, EPOCHS_PER_ETH1_VOTING_PERIOD * SLOTS_PER_EPOCH] eth1_deposit_index: uint64 # Registry - validators: List[Validator, VALIDATOR_REGISTRY_LIMIT] # [Modified in Whisk] + validators: List[Validator, VALIDATOR_REGISTRY_LIMIT] balances: List[Gwei, VALIDATOR_REGISTRY_LIMIT] # Randomness randao_mixes: Vector[Bytes32, EPOCHS_PER_HISTORICAL_VECTOR] @@ -304,7 +304,7 @@ class BeaconBlockBody(capella.BeaconBlockBody): ```python def get_shuffle_indices(randao_reveal: BLSSignature) -> Sequence[uint64]: """ - Given a `randao_reveal` return the list of indices that got shuffled from the entire candidate set + Given a `randao_reveal` return the list of indices that got shuffled from the entire candidate set. """ indices = [] for i in range(0, WHISK_VALIDATORS_PER_SHUFFLE): @@ -461,7 +461,7 @@ def get_beacon_proposer_index(state: BeaconState) -> ValidatorIndex: ## Testing -*Note*: The function `initialize_beacon_state_from_eth1` is modified for pure Whisk testing only. +*Note*: The function `initialize_beacon_state_from_eth1` is modified purely for Whisk testing. ```python def initialize_beacon_state_from_eth1(eth1_block_hash: Hash32, diff --git a/specs/deneb/beacon-chain.md b/specs/deneb/beacon-chain.md index 321dfb25e3..2328b20e03 100644 --- a/specs/deneb/beacon-chain.md +++ b/specs/deneb/beacon-chain.md @@ -85,7 +85,7 @@ Deneb is a consensus-layer upgrade containing a number of features. Including: | `MAX_BLOBS_PER_BLOCK` | `uint64(6)` | *[New in Deneb:EIP4844]* maximum number of blobs in a single block limited by `MAX_BLOB_COMMITMENTS_PER_BLOCK` | *Note*: The blob transactions are packed into the execution payload by the EL/builder with their corresponding blobs being independently transmitted -and are limited by `MAX_DATA_GAS_PER_BLOCK // DATA_GAS_PER_BLOB`. However the CL limit is independently defined by `MAX_BLOBS_PER_BLOCK`. +and are limited by `MAX_BLOB_GAS_PER_BLOCK // GAS_PER_BLOB`. However the CL limit is independently defined by `MAX_BLOBS_PER_BLOCK`. ## Configuration @@ -136,8 +136,8 @@ class ExecutionPayload(Container): block_hash: Hash32 # Hash of execution block transactions: List[Transaction, MAX_TRANSACTIONS_PER_PAYLOAD] withdrawals: List[Withdrawal, MAX_WITHDRAWALS_PER_PAYLOAD] - data_gas_used: uint64 # [New in Deneb:EIP4844] - excess_data_gas: uint64 # [New in Deneb:EIP4844] + blob_gas_used: uint64 # [New in Deneb:EIP4844] + excess_blob_gas: uint64 # [New in Deneb:EIP4844] ``` #### `ExecutionPayloadHeader` @@ -161,8 +161,8 @@ class ExecutionPayloadHeader(Container): block_hash: Hash32 # Hash of execution block transactions_root: Root withdrawals_root: Root - data_gas_used: uint64 # [New in Deneb:EIP4844] - excess_data_gas: uint64 # [New in Deneb:EIP4844] + blob_gas_used: uint64 # [New in Deneb:EIP4844] + excess_blob_gas: uint64 # [New in Deneb:EIP4844] ``` ## Helper functions @@ -385,8 +385,8 @@ def process_execution_payload(state: BeaconState, body: BeaconBlockBody, executi block_hash=payload.block_hash, transactions_root=hash_tree_root(payload.transactions), withdrawals_root=hash_tree_root(payload.withdrawals), - data_gas_used=payload.data_gas_used, # [New in Deneb:EIP4844] - excess_data_gas=payload.excess_data_gas, # [New in Deneb:EIP4844] + blob_gas_used=payload.blob_gas_used, # [New in Deneb:EIP4844] + excess_blob_gas=payload.excess_blob_gas, # [New in Deneb:EIP4844] ) ``` diff --git a/specs/deneb/fork.md b/specs/deneb/fork.md index 08af2fd351..9c314052b1 100644 --- a/specs/deneb/fork.md +++ b/specs/deneb/fork.md @@ -83,8 +83,8 @@ def upgrade_to_deneb(pre: capella.BeaconState) -> BeaconState: block_hash=pre.latest_execution_payload_header.block_hash, transactions_root=pre.latest_execution_payload_header.transactions_root, withdrawals_root=pre.latest_execution_payload_header.withdrawals_root, - data_gas_used=uint64(0), # [New in Deneb:EIP4844] - excess_data_gas=uint64(0), # [New in Deneb:EIP4844] + blob_gas_used=uint64(0), # [New in Deneb:EIP4844] + excess_blob_gas=uint64(0), # [New in Deneb:EIP4844] ) post = BeaconState( # Versioning diff --git a/specs/deneb/light-client/fork.md b/specs/deneb/light-client/fork.md index f4fd1b396b..2dce4778ed 100644 --- a/specs/deneb/light-client/fork.md +++ b/specs/deneb/light-client/fork.md @@ -41,8 +41,8 @@ def upgrade_lc_header_to_deneb(pre: capella.LightClientHeader) -> LightClientHea block_hash=pre.execution.block_hash, transactions_root=pre.execution.transactions_root, withdrawals_root=pre.execution.withdrawals_root, - data_gas_used=uint64(0), # [New in Deneb:EIP4844] - excess_data_gas=uint64(0), # [New in Deneb:EIP4844] + blob_gas_used=uint64(0), # [New in Deneb:EIP4844] + excess_blob_gas=uint64(0), # [New in Deneb:EIP4844] ), execution_branch=pre.execution_branch, ) diff --git a/specs/deneb/light-client/full-node.md b/specs/deneb/light-client/full-node.md index 876a6c2580..281348167a 100644 --- a/specs/deneb/light-client/full-node.md +++ b/specs/deneb/light-client/full-node.md @@ -49,8 +49,8 @@ def block_to_light_client_header(block: SignedBeaconBlock) -> LightClientHeader: # [New in Deneb:EIP4844] if epoch >= DENEB_FORK_EPOCH: - execution_header.data_gas_used = payload.data_gas_used - execution_header.excess_data_gas = payload.excess_data_gas + execution_header.blob_gas_used = payload.blob_gas_used + execution_header.excess_blob_gas = payload.excess_blob_gas execution_branch = compute_merkle_proof_for_block_body(block.message.body, EXECUTION_PAYLOAD_INDEX) else: diff --git a/specs/deneb/light-client/sync-protocol.md b/specs/deneb/light-client/sync-protocol.md index 38909ddbf5..3b5663fa52 100644 --- a/specs/deneb/light-client/sync-protocol.md +++ b/specs/deneb/light-client/sync-protocol.md @@ -68,7 +68,7 @@ def is_valid_light_client_header(header: LightClientHeader) -> bool: # [New in Deneb:EIP4844] if epoch < DENEB_FORK_EPOCH: - if header.execution.data_gas_used != uint64(0) or header.execution.excess_data_gas != uint64(0): + if header.execution.blob_gas_used != uint64(0) or header.execution.excess_blob_gas != uint64(0): return False if epoch < CAPELLA_FORK_EPOCH: diff --git a/tests/core/pyspec/eth2spec/VERSION.txt b/tests/core/pyspec/eth2spec/VERSION.txt index c431216bf2..1b4859bc51 100644 --- a/tests/core/pyspec/eth2spec/VERSION.txt +++ b/tests/core/pyspec/eth2spec/VERSION.txt @@ -1 +1 @@ -1.4.0-beta.0 +1.4.0-beta.1 diff --git a/tests/core/pyspec/eth2spec/gen_helpers/gen_base/gen_runner.py b/tests/core/pyspec/eth2spec/gen_helpers/gen_base/gen_runner.py index 2562c7fad9..3ab2e9eea8 100644 --- a/tests/core/pyspec/eth2spec/gen_helpers/gen_base/gen_runner.py +++ b/tests/core/pyspec/eth2spec/gen_helpers/gen_base/gen_runner.py @@ -171,7 +171,6 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]): help="if set re-generate and overwrite test files if they already exist", ) parser.add_argument( - "-l", "--preset-list", dest="preset_list", nargs='*', @@ -179,6 +178,14 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]): required=False, help="specify presets to run with. Allows all if no preset names are specified.", ) + parser.add_argument( + "--fork-list", + dest="fork_list", + nargs='*', + type=str, + required=False, + help="specify forks to run with. Allows all if no fork names are specified.", + ) parser.add_argument( "-c", "--collect-only", @@ -199,6 +206,7 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]): print(f"Generating tests into {output_dir}") print(f'Error log file: {log_file}') + # preset_list arg presets = args.preset_list if presets is None: presets = [] @@ -206,6 +214,14 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]): if len(presets) != 0: print(f"Filtering test-generator runs to only include presets: {', '.join(presets)}") + # fork_list arg + forks = args.fork_list + if forks is None: + forks = [] + + if len(presets) != 0: + print(f"Filtering test-generator runs to only include forks: {', '.join(forks)}") + collect_only = args.collect_only diagnostics_obj = Diagnostics() @@ -224,6 +240,10 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]): if len(presets) != 0 and test_case.preset_name not in presets: continue + # If fork list is assigned, filter by forks. + if len(forks) != 0 and test_case.fork_name not in forks: + continue + case_dir = get_test_case_dir(test_case, output_dir) print(f"Collected test at: {case_dir}") diagnostics_obj.collected_test_count += 1 diff --git a/tests/core/pyspec/eth2spec/test/bellatrix/sanity/test_blocks.py b/tests/core/pyspec/eth2spec/test/bellatrix/sanity/test_blocks.py index 75dfa0c9cf..6ac76ca596 100644 --- a/tests/core/pyspec/eth2spec/test/bellatrix/sanity/test_blocks.py +++ b/tests/core/pyspec/eth2spec/test/bellatrix/sanity/test_blocks.py @@ -33,7 +33,7 @@ def test_empty_block_transition_no_tx(spec, state): @with_bellatrix_and_later @spec_state_test -def test_empty_block_transition_randomized_payload(spec, state): +def test_block_transition_randomized_payload(spec, state): yield 'pre', state block = build_empty_block_for_next_slot(spec, state) diff --git a/tests/core/pyspec/eth2spec/test/context.py b/tests/core/pyspec/eth2spec/test/context.py index 48f6857f64..0c9d4a1ec5 100644 --- a/tests/core/pyspec/eth2spec/test/context.py +++ b/tests/core/pyspec/eth2spec/test/context.py @@ -9,12 +9,13 @@ from eth2spec.capella import mainnet as spec_capella_mainnet, minimal as spec_capella_minimal from eth2spec.deneb import mainnet as spec_deneb_mainnet, minimal as spec_deneb_minimal from eth2spec.eip6110 import mainnet as spec_eip6110_mainnet, minimal as spec_eip6110_minimal +from eth2spec.eip7002 import mainnet as spec_eip7002_mainnet, minimal as spec_eip7002_minimal from eth2spec.utils import bls from .exceptions import SkippedTest from .helpers.constants import ( PHASE0, ALTAIR, BELLATRIX, CAPELLA, DENEB, - EIP6110, + EIP6110, EIP7002, MINIMAL, MAINNET, ALL_PHASES, ALL_FORK_UPGRADES, @@ -83,6 +84,7 @@ class ForkMeta: CAPELLA: spec_capella_minimal, DENEB: spec_deneb_minimal, EIP6110: spec_eip6110_minimal, + EIP7002: spec_eip7002_minimal, }, MAINNET: { PHASE0: spec_phase0_mainnet, @@ -91,6 +93,7 @@ class ForkMeta: CAPELLA: spec_capella_mainnet, DENEB: spec_deneb_mainnet, EIP6110: spec_eip6110_mainnet, + EIP7002: spec_eip7002_mainnet, }, } @@ -541,6 +544,7 @@ def wrapper(*args, spec: Spec, **kw): with_capella_and_later = with_all_phases_from(CAPELLA) with_deneb_and_later = with_all_phases_from(DENEB) with_eip6110_and_later = with_all_phases_from(EIP6110) +with_eip7002_and_later = with_all_phases_from(EIP7002) class quoted_str(str): diff --git a/tests/core/pyspec/eth2spec/test/deneb/block_processing/test_process_execution_payload.py b/tests/core/pyspec/eth2spec/test/deneb/block_processing/test_process_execution_payload.py index 988070278d..b0937aac96 100644 --- a/tests/core/pyspec/eth2spec/test/deneb/block_processing/test_process_execution_payload.py +++ b/tests/core/pyspec/eth2spec/test/deneb/block_processing/test_process_execution_payload.py @@ -59,6 +59,13 @@ def verify_and_notify_new_payload(self, new_payload_request) -> bool: assert state.latest_execution_payload_header == get_execution_payload_header(spec, body.execution_payload) +""" +Tests with incorrect blob transactions in the execution payload, but the execution client returns +VALID, and the purpose of these tests is that the beacon client must not reject the block by +attempting to do a validation of its own. +""" + + @with_deneb_and_later @spec_state_test def test_incorrect_blob_tx_type(spec, state): @@ -78,14 +85,14 @@ def test_incorrect_blob_tx_type(spec, state): @with_deneb_and_later @spec_state_test -def test_incorrect_transaction_length_1_byte(spec, state): +def test_incorrect_transaction_length_1_extra_byte(spec, state): """ The versioned hashes are wrong, but the testing ExecutionEngine returns VALID by default. """ execution_payload = build_empty_execution_payload(spec, state) opaque_tx, _, blob_kzg_commitments, _ = get_sample_opaque_tx(spec) - opaque_tx = opaque_tx + b'\x12' # incorrect tx length + opaque_tx = opaque_tx + b'\x12' # incorrect tx length, longer execution_payload.transactions = [opaque_tx] execution_payload.block_hash = compute_el_block_hash(spec, execution_payload) @@ -95,7 +102,41 @@ def test_incorrect_transaction_length_1_byte(spec, state): @with_deneb_and_later @spec_state_test -def test_incorrect_transaction_length_32_bytes(spec, state): +def test_incorrect_transaction_length_1_byte_short(spec, state): + """ + The versioned hashes are wrong, but the testing ExecutionEngine returns VALID by default. + """ + execution_payload = build_empty_execution_payload(spec, state) + + opaque_tx, _, blob_kzg_commitments, _ = get_sample_opaque_tx(spec) + opaque_tx = opaque_tx[:-1] # incorrect tx length, shorter + + execution_payload.transactions = [opaque_tx] + execution_payload.block_hash = compute_el_block_hash(spec, execution_payload) + + yield from run_execution_payload_processing(spec, state, execution_payload, blob_kzg_commitments) + + +@with_deneb_and_later +@spec_state_test +def test_incorrect_transaction_length_empty(spec, state): + """ + The versioned hashes are wrong, but the testing ExecutionEngine returns VALID by default. + """ + execution_payload = build_empty_execution_payload(spec, state) + + opaque_tx, _, blob_kzg_commitments, _ = get_sample_opaque_tx(spec) + opaque_tx = opaque_tx[0:0] # incorrect tx length, empty + + execution_payload.transactions = [opaque_tx] + execution_payload.block_hash = compute_el_block_hash(spec, execution_payload) + + yield from run_execution_payload_processing(spec, state, execution_payload, blob_kzg_commitments) + + +@with_deneb_and_later +@spec_state_test +def test_incorrect_transaction_length_32_extra_bytes(spec, state): """ The versioned hashes are wrong, but the testing ExecutionEngine returns VALID by default. """ @@ -110,6 +151,22 @@ def test_incorrect_transaction_length_32_bytes(spec, state): yield from run_execution_payload_processing(spec, state, execution_payload, blob_kzg_commitments) +@with_deneb_and_later +@spec_state_test +def test_no_transactions_with_commitments(spec, state): + """ + The versioned hashes are wrong, but the testing ExecutionEngine returns VALID by default. + """ + execution_payload = build_empty_execution_payload(spec, state) + + _, _, blob_kzg_commitments, _ = get_sample_opaque_tx(spec) + + execution_payload.transactions = [] + execution_payload.block_hash = compute_el_block_hash(spec, execution_payload) + + yield from run_execution_payload_processing(spec, state, execution_payload, blob_kzg_commitments) + + @with_deneb_and_later @spec_state_test def test_incorrect_commitment(spec, state): diff --git a/tests/core/pyspec/eth2spec/test/deneb/sanity/test_blocks.py b/tests/core/pyspec/eth2spec/test/deneb/sanity/test_blocks.py index c64efe747b..8598530d9a 100644 --- a/tests/core/pyspec/eth2spec/test/deneb/sanity/test_blocks.py +++ b/tests/core/pyspec/eth2spec/test/deneb/sanity/test_blocks.py @@ -1,38 +1,45 @@ +import random + from eth2spec.test.helpers.state import ( state_transition_and_sign_block, - next_epoch_via_block, - transition_to, ) from eth2spec.test.helpers.block import ( build_empty_block_for_next_slot, ) from eth2spec.test.context import ( - DENEB, spec_state_test, - spec_configured_state_test, with_deneb_and_later, - with_phases, ) from eth2spec.test.helpers.execution_payload import ( compute_el_block_hash, -) -from eth2spec.test.helpers.attestations import ( - get_valid_attestation, + get_random_tx, ) from eth2spec.test.helpers.sharding import ( get_sample_opaque_tx, ) -def run_block_with_blobs(spec, state, blob_count, data_gas_used=1, excess_data_gas=1, valid=True): +def run_block_with_blobs(spec, state, blob_count, tx_count=1, blob_gas_used=1, excess_blob_gas=1, + non_blob_tx_count=0, rng=random.Random(7777), valid=True): yield 'pre', state block = build_empty_block_for_next_slot(spec, state) - opaque_tx, _, blob_kzg_commitments, _ = get_sample_opaque_tx(spec, blob_count=blob_count) + txs = [] + blob_kzg_commitments = [] + for _ in range(tx_count): + opaque_tx, _, commits, _ = get_sample_opaque_tx(spec, blob_count=blob_count) + txs.append(opaque_tx) + blob_kzg_commitments += commits + + for _ in range(non_blob_tx_count): + txs.append(get_random_tx(rng)) + + rng.shuffle(txs) + block.body.blob_kzg_commitments = blob_kzg_commitments - block.body.execution_payload.transactions = [opaque_tx] - block.body.execution_payload.data_gas_used = data_gas_used - block.body.execution_payload.excess_data_gas = excess_data_gas + block.body.execution_payload.transactions = txs + block.body.execution_payload.blob_gas_used = blob_gas_used + block.body.execution_payload.excess_blob_gas = excess_blob_gas block.body.execution_payload.block_hash = compute_el_block_hash(spec, block.body.execution_payload) if valid: @@ -58,43 +65,41 @@ def test_one_blob(spec, state): @with_deneb_and_later @spec_state_test -def test_max_blobs_per_block(spec, state): - yield from run_block_with_blobs(spec, state, blob_count=spec.MAX_BLOBS_PER_BLOCK) +def test_one_blob_two_txs(spec, state): + yield from run_block_with_blobs(spec, state, blob_count=1, tx_count=2) @with_deneb_and_later @spec_state_test -def test_invalid_exceed_max_blobs_per_block(spec, state): - yield from run_block_with_blobs(spec, state, blob_count=spec.MAX_BLOBS_PER_BLOCK + 1, valid=False) +def test_one_blob_max_txs(spec, state): + yield from run_block_with_blobs(spec, state, blob_count=1, tx_count=spec.MAX_BLOBS_PER_BLOCK) -@with_phases([DENEB]) -@spec_configured_state_test({ - 'DENEB_FORK_EPOCH': 2, -}) -def test_include_attestation_from_previous_fork_with_new_range(spec, state): - # Transition to the epoch prior to the fork epoch - next_epoch_via_block(spec, state) +@with_deneb_and_later +@spec_state_test +def test_invalid_one_blob_max_plus_one_txs(spec, state): + yield from run_block_with_blobs(spec, state, blob_count=1, tx_count=spec.MAX_BLOBS_PER_BLOCK + 1, valid=False) - # Generate an attestation for slot 0 of this epoch - attestation = get_valid_attestation(spec, state, signed=True) - # Transition to second to last slot in `DENEB_FORK_EPOCH` - next_epoch_via_block(spec, state) - current_epoch = spec.get_current_epoch(state) - assert current_epoch == spec.config.DENEB_FORK_EPOCH - penultimate_slot = spec.compute_start_slot_at_epoch(current_epoch + 1) - 2 - transition_to(spec, state, penultimate_slot) +@with_deneb_and_later +@spec_state_test +def test_max_blobs_per_block(spec, state): + yield from run_block_with_blobs(spec, state, blob_count=spec.MAX_BLOBS_PER_BLOCK) - # Ensure the new state is in the increased EIP-7045 slot inclusion range - assert penultimate_slot - attestation.data.slot > spec.SLOTS_PER_EPOCH - block = build_empty_block_for_next_slot(spec, state) - block.body.attestations.append(attestation) +@with_deneb_and_later +@spec_state_test +def test_invalid_max_blobs_per_block_two_txs(spec, state): + yield from run_block_with_blobs(spec, state, blob_count=spec.MAX_BLOBS_PER_BLOCK, tx_count=2, valid=False) - yield 'pre', state - signed_block = state_transition_and_sign_block(spec, state, block) +@with_deneb_and_later +@spec_state_test +def test_invalid_exceed_max_blobs_per_block(spec, state): + yield from run_block_with_blobs(spec, state, blob_count=spec.MAX_BLOBS_PER_BLOCK + 1, valid=False) - yield 'blocks', [signed_block] - yield 'post', state + +@with_deneb_and_later +@spec_state_test +def test_mix_blob_tx_and_non_blob_tx(spec, state): + yield from run_block_with_blobs(spec, state, blob_count=1, tx_count=1, non_blob_tx_count=1) diff --git a/tests/core/pyspec/eth2spec/test/deneb/transition/test_operations.py b/tests/core/pyspec/eth2spec/test/deneb/transition/test_operations.py index f945afa8f2..27c27c8c6d 100644 --- a/tests/core/pyspec/eth2spec/test/deneb/transition/test_operations.py +++ b/tests/core/pyspec/eth2spec/test/deneb/transition/test_operations.py @@ -3,12 +3,25 @@ always_bls, with_fork_metas, ) +from eth2spec.test.helpers.attestations import ( + get_valid_attestation, +) +from eth2spec.test.helpers.block import ( + build_empty_block_for_next_slot, +) from eth2spec.test.helpers.constants import ( AFTER_DENEB_PRE_POST_FORKS, ) +from eth2spec.test.helpers.state import ( + next_epoch_via_block, + state_transition_and_sign_block, + transition_to, +) from eth2spec.test.helpers.fork_transition import ( OperationType, + do_fork, run_transition_with_operation, + transition_until_fork, ) @@ -52,3 +65,38 @@ def test_transition_with_btec_right_before_fork(state, fork_epoch, spec, post_sp operation_type=OperationType.BLS_TO_EXECUTION_CHANGE, operation_at_slot=fork_epoch * spec.SLOTS_PER_EPOCH - 1, ) + + +@with_fork_metas([ForkMeta(pre_fork_name=pre, post_fork_name=post, fork_epoch=2) + for pre, post in AFTER_DENEB_PRE_POST_FORKS]) +def test_transition_attestation_from_previous_fork_with_new_range( + state, fork_epoch, spec, post_spec, pre_tag, post_tag): + """ + [EIP-7045] test + """ + # Transition to the epoch prior to the fork epoch + next_epoch_via_block(spec, state) + + # Generate an attestation for slot 0 of this epoch + attestation = get_valid_attestation(spec, state, signed=True) + + yield 'pre', state + + # Transition to the fork epoch with a block + transition_until_fork(spec, state, fork_epoch) + state, fork_block = do_fork(state, spec, post_spec, fork_epoch) + current_epoch = spec.get_current_epoch(state) + assert current_epoch == fork_epoch + # Transition to second to last slot in `fork_epoch` + penultimate_slot = post_spec.compute_start_slot_at_epoch(current_epoch + 1) - 2 + transition_to(post_spec, state, penultimate_slot) + + # Ensure the new state is in the increased EIP-7045 slot inclusion range + assert penultimate_slot - attestation.data.slot > post_spec.SLOTS_PER_EPOCH + + block = build_empty_block_for_next_slot(post_spec, state) + block.body.attestations.append(attestation) + signed_block = state_transition_and_sign_block(post_spec, state, block) + + yield 'blocks', [post_tag(fork_block), post_tag(signed_block)] + yield 'post', state diff --git a/tests/core/pyspec/eth2spec/test/eip7002/__init__.py b/tests/core/pyspec/eth2spec/test/eip7002/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/core/pyspec/eth2spec/test/eip7002/block_processing/__init__.py b/tests/core/pyspec/eth2spec/test/eip7002/block_processing/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/core/pyspec/eth2spec/test/eip7002/block_processing/test_process_execution_layer_exit.py b/tests/core/pyspec/eth2spec/test/eip7002/block_processing/test_process_execution_layer_exit.py new file mode 100644 index 0000000000..bd944a1fa3 --- /dev/null +++ b/tests/core/pyspec/eth2spec/test/eip7002/block_processing/test_process_execution_layer_exit.py @@ -0,0 +1,107 @@ +from eth2spec.test.context import spec_state_test, with_eip7002_and_later +from eth2spec.test.helpers.execution_layer_exits import run_execution_layer_exit_processing +from eth2spec.test.helpers.withdrawals import set_eth1_withdrawal_credential_with_balance + + +@with_eip7002_and_later +@spec_state_test +def test_basic_exit(spec, state): + # move state forward SHARD_COMMITTEE_PERIOD epochs to allow for exit + state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH + + current_epoch = spec.get_current_epoch(state) + validator_index = spec.get_active_validator_indices(state, current_epoch)[0] + validator_pubkey = state.validators[validator_index].pubkey + address = b'\x22' * 20 + set_eth1_withdrawal_credential_with_balance(spec, state, validator_index, address=address) + execution_layer_exit = spec.ExecutionLayerExit( + source_address=address, + validator_pubkey=validator_pubkey, + ) + + yield from run_execution_layer_exit_processing(spec, state, execution_layer_exit) + + +@with_eip7002_and_later +@spec_state_test +def test_incorrect_source_address(spec, state): + # move state forward SHARD_COMMITTEE_PERIOD epochs to allow for exit + state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH + + current_epoch = spec.get_current_epoch(state) + validator_index = spec.get_active_validator_indices(state, current_epoch)[0] + validator_pubkey = state.validators[validator_index].pubkey + address = b'\x22' * 20 + incorrect_address = b'\x33' * 20 + set_eth1_withdrawal_credential_with_balance(spec, state, validator_index, address=address) + execution_layer_exit = spec.ExecutionLayerExit( + source_address=incorrect_address, + validator_pubkey=validator_pubkey, + ) + + yield from run_execution_layer_exit_processing(spec, state, execution_layer_exit, success=False) + + +@with_eip7002_and_later +@spec_state_test +def test_incorrect_withdrawal_credential_prefix(spec, state): + # move state forward SHARD_COMMITTEE_PERIOD epochs to allow for exit + state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH + + current_epoch = spec.get_current_epoch(state) + validator_index = spec.get_active_validator_indices(state, current_epoch)[0] + validator_pubkey = state.validators[validator_index].pubkey + address = b'\x22' * 20 + set_eth1_withdrawal_credential_with_balance(spec, state, validator_index, address=address) + # Set incorrect prefix + state.validators[validator_index].withdrawal_credentials = ( + spec.BLS_WITHDRAWAL_PREFIX + + state.validators[validator_index].withdrawal_credentials[1:] + ) + execution_layer_exit = spec.ExecutionLayerExit( + source_address=address, + validator_pubkey=validator_pubkey, + ) + + yield from run_execution_layer_exit_processing(spec, state, execution_layer_exit, success=False) + + +@with_eip7002_and_later +@spec_state_test +def test_on_exit_initiated_validator(spec, state): + # move state forward SHARD_COMMITTEE_PERIOD epochs to allow for exit + state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH + + current_epoch = spec.get_current_epoch(state) + validator_index = spec.get_active_validator_indices(state, current_epoch)[0] + validator_pubkey = state.validators[validator_index].pubkey + address = b'\x22' * 20 + set_eth1_withdrawal_credential_with_balance(spec, state, validator_index, address=address) + # Initiate exit earlier + spec.initiate_validator_exit(state, validator_index) + execution_layer_exit = spec.ExecutionLayerExit( + source_address=address, + validator_pubkey=validator_pubkey, + ) + + yield from run_execution_layer_exit_processing(spec, state, execution_layer_exit, success=False) + + +@with_eip7002_and_later +@spec_state_test +def test_activation_epoch_less_than_shard_committee_period(spec, state): + current_epoch = spec.get_current_epoch(state) + validator_index = spec.get_active_validator_indices(state, current_epoch)[0] + validator_pubkey = state.validators[validator_index].pubkey + address = b'\x22' * 20 + set_eth1_withdrawal_credential_with_balance(spec, state, validator_index, address=address) + execution_layer_exit = spec.ExecutionLayerExit( + source_address=address, + validator_pubkey=validator_pubkey, + ) + + assert spec.get_current_epoch(state) < ( + state.validators[validator_index].activation_epoch + spec.config.SHARD_COMMITTEE_PERIOD + ) + + yield from run_execution_layer_exit_processing(spec, state, execution_layer_exit, success=False) diff --git a/tests/core/pyspec/eth2spec/test/eip7002/sanity/__init__.py b/tests/core/pyspec/eth2spec/test/eip7002/sanity/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/core/pyspec/eth2spec/test/eip7002/sanity/test_blocks.py b/tests/core/pyspec/eth2spec/test/eip7002/sanity/test_blocks.py new file mode 100644 index 0000000000..29a03fee01 --- /dev/null +++ b/tests/core/pyspec/eth2spec/test/eip7002/sanity/test_blocks.py @@ -0,0 +1,174 @@ +from eth2spec.test.helpers.block import ( + build_empty_block_for_next_slot +) +from eth2spec.test.context import ( + spec_state_test, + with_eip7002_and_later, +) +from eth2spec.test.helpers.bls_to_execution_changes import ( + get_signed_address_change, +) +from eth2spec.test.helpers.execution_payload import ( + compute_el_block_hash, +) +from eth2spec.test.helpers.voluntary_exits import ( + prepare_signed_exits, +) +from eth2spec.test.helpers.state import ( + state_transition_and_sign_block, +) +from eth2spec.test.helpers.withdrawals import ( + set_eth1_withdrawal_credential_with_balance, +) + + +@with_eip7002_and_later +@spec_state_test +def test_basic_el_exit(spec, state): + # move state forward SHARD_COMMITTEE_PERIOD epochs to allow for exit + state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH + + yield 'pre', state + + validator_index = 0 + address = b'\x22' * 20 + set_eth1_withdrawal_credential_with_balance(spec, state, validator_index, address=address) + assert state.validators[validator_index].exit_epoch == spec.FAR_FUTURE_EPOCH + + validator_pubkey = state.validators[validator_index].pubkey + execution_layer_exit = spec.ExecutionLayerExit( + source_address=address, + validator_pubkey=validator_pubkey, + ) + block = build_empty_block_for_next_slot(spec, state) + block.body.execution_payload.exits = [execution_layer_exit] + block.body.execution_payload.block_hash = compute_el_block_hash(spec, block.body.execution_payload) + signed_block = state_transition_and_sign_block(spec, state, block) + + yield 'blocks', [signed_block] + yield 'post', state + + assert state.validators[validator_index].exit_epoch < spec.FAR_FUTURE_EPOCH + + +@with_eip7002_and_later +@spec_state_test +def test_basic_btec_and_el_exit_in_same_block(spec, state): + # move state forward SHARD_COMMITTEE_PERIOD epochs to allow for exit + state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH + + yield 'pre', state + validator_index = 0 + assert state.validators[validator_index].exit_epoch == spec.FAR_FUTURE_EPOCH + + block = build_empty_block_for_next_slot(spec, state) + + address = b'\x22' * 20 + signed_address_change = get_signed_address_change( + spec, + state, + validator_index=validator_index, + to_execution_address=address, + ) + block.body.bls_to_execution_changes = [signed_address_change] + + validator_pubkey = state.validators[validator_index].pubkey + execution_layer_exit = spec.ExecutionLayerExit( + source_address=address, + validator_pubkey=validator_pubkey, + ) + block.body.execution_payload.exits = [execution_layer_exit] + + block.body.execution_payload.block_hash = compute_el_block_hash(spec, block.body.execution_payload) + signed_block = state_transition_and_sign_block(spec, state, block) + + yield 'blocks', [signed_block] + yield 'post', state + + # BTEC is executed after EL-Exit, so it doesn't take effect. `initiate_validator_exit` is not called. + validator = state.validators[validator_index] + assert validator.exit_epoch == spec.FAR_FUTURE_EPOCH + # Check if BTEC is effect + is_execution_address = validator.withdrawal_credentials[:1] == spec.ETH1_ADDRESS_WITHDRAWAL_PREFIX + is_correct_source_address = validator.withdrawal_credentials[12:] == address + assert is_execution_address and is_correct_source_address + + +@with_eip7002_and_later +@spec_state_test +def test_basic_btec_before_el_exit(spec, state): + # move state forward SHARD_COMMITTEE_PERIOD epochs to allow for exit + state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH + + yield 'pre', state + + validator_index = 0 + assert state.validators[validator_index].exit_epoch == spec.FAR_FUTURE_EPOCH + + # block_1 contains a BTEC operation of the given validator + address = b'\x22' * 20 + signed_address_change = get_signed_address_change( + spec, + state, + validator_index=validator_index, + to_execution_address=address, + ) + block_1 = build_empty_block_for_next_slot(spec, state) + block_1.body.bls_to_execution_changes = [signed_address_change] + signed_block_1 = state_transition_and_sign_block(spec, state, block_1) + + validator = state.validators[validator_index] + assert validator.exit_epoch == spec.FAR_FUTURE_EPOCH + # Check if BTEC is effect + is_execution_address = validator.withdrawal_credentials[:1] == spec.ETH1_ADDRESS_WITHDRAWAL_PREFIX + is_correct_source_address = validator.withdrawal_credentials[12:] == address + assert is_execution_address and is_correct_source_address + + # block_2 contains an EL-Exit operation of the given validator + validator_pubkey = state.validators[validator_index].pubkey + execution_layer_exit = spec.ExecutionLayerExit( + source_address=address, + validator_pubkey=validator_pubkey, + ) + block_2 = build_empty_block_for_next_slot(spec, state) + block_2.body.execution_payload.exits = [execution_layer_exit] + block_2.body.execution_payload.block_hash = compute_el_block_hash(spec, block_2.body.execution_payload) + signed_block_2 = state_transition_and_sign_block(spec, state, block_2) + + yield 'blocks', [signed_block_1, signed_block_2] + yield 'post', state + + assert state.validators[validator_index].exit_epoch < spec.FAR_FUTURE_EPOCH + + +@with_eip7002_and_later +@spec_state_test +def test_cl_exit_and_el_exit_in_same_block(spec, state): + # move state forward SHARD_COMMITTEE_PERIOD epochs to allow for exit + state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH + + yield 'pre', state + + validator_index = 0 + address = b'\x22' * 20 + set_eth1_withdrawal_credential_with_balance(spec, state, validator_index, address=address) + assert state.validators[validator_index].exit_epoch == spec.FAR_FUTURE_EPOCH + + # CL-Exit + signed_voluntary_exits = prepare_signed_exits(spec, state, indices=[validator_index]) + # EL-Exit + validator_pubkey = state.validators[validator_index].pubkey + execution_layer_exit = spec.ExecutionLayerExit( + source_address=address, + validator_pubkey=validator_pubkey, + ) + block = build_empty_block_for_next_slot(spec, state) + block.body.voluntary_exits = signed_voluntary_exits + block.body.execution_payload.exits = [execution_layer_exit] + block.body.execution_payload.block_hash = compute_el_block_hash(spec, block.body.execution_payload) + signed_block = state_transition_and_sign_block(spec, state, block) + + yield 'blocks', [signed_block] + yield 'post', state + + assert state.validators[validator_index].exit_epoch < spec.FAR_FUTURE_EPOCH diff --git a/tests/core/pyspec/eth2spec/test/helpers/constants.py b/tests/core/pyspec/eth2spec/test/helpers/constants.py index 049c354caf..82e4f9d0a5 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/constants.py +++ b/tests/core/pyspec/eth2spec/test/helpers/constants.py @@ -17,6 +17,7 @@ CUSTODY_GAME = SpecForkName('custody_game') DAS = SpecForkName('das') EIP6110 = SpecForkName('eip6110') +EIP7002 = SpecForkName('eip7002') # # SpecFork settings @@ -32,6 +33,7 @@ DENEB, # Experimental patches EIP6110, + EIP7002, ) # The forks that have light client specs LIGHT_CLIENT_TESTING_FORKS = (*[item for item in MAINNET_FORKS if item != PHASE0], DENEB) diff --git a/tests/core/pyspec/eth2spec/test/helpers/execution_layer_exits.py b/tests/core/pyspec/eth2spec/test/helpers/execution_layer_exits.py new file mode 100644 index 0000000000..e0dda75d1d --- /dev/null +++ b/tests/core/pyspec/eth2spec/test/helpers/execution_layer_exits.py @@ -0,0 +1,39 @@ +from eth2spec.test.context import expect_assertion_error +from eth2spec.test.helpers.state import get_validator_index_by_pubkey + + +# +# Run processing +# + + +def run_execution_layer_exit_processing(spec, state, execution_layer_exit, valid=True, success=True): + """ + Run ``process_execution_layer_exit``, yielding: + - pre-state ('pre') + - execution_layer_exit ('execution_layer_exit') + - post-state ('post'). + If ``valid == False``, run expecting ``AssertionError`` + If ``success == False``, it doesn't initiate exit successfully + """ + validator_index = get_validator_index_by_pubkey(state, execution_layer_exit.validator_pubkey) + + yield 'pre', state + yield 'execution_layer_exit', execution_layer_exit + + if not valid: + expect_assertion_error(lambda: spec.process_execution_layer_exit(state, execution_layer_exit)) + yield 'post', None + return + + pre_exit_epoch = state.validators[validator_index].exit_epoch + + spec.process_execution_layer_exit(state, execution_layer_exit) + + yield 'post', state + + if success: + assert pre_exit_epoch == spec.FAR_FUTURE_EPOCH + assert state.validators[validator_index].exit_epoch < spec.FAR_FUTURE_EPOCH + else: + assert state.validators[validator_index].exit_epoch == pre_exit_epoch diff --git a/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py b/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py index b08b1975eb..adec1d781c 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py +++ b/tests/core/pyspec/eth2spec/test/helpers/execution_payload.py @@ -8,6 +8,7 @@ is_post_capella, is_post_deneb, is_post_eip6110, + is_post_eip7002, ) @@ -31,10 +32,12 @@ def get_execution_payload_header(spec, execution_payload): if is_post_capella(spec): payload_header.withdrawals_root = spec.hash_tree_root(execution_payload.withdrawals) if is_post_deneb(spec): - payload_header.data_gas_used = execution_payload.data_gas_used - payload_header.excess_data_gas = execution_payload.excess_data_gas + payload_header.blob_gas_used = execution_payload.blob_gas_used + payload_header.excess_blob_gas = execution_payload.excess_blob_gas if is_post_eip6110(spec): payload_header.deposit_receipts_root = spec.hash_tree_root(execution_payload.deposit_receipts) + if is_post_eip7002(spec): + payload_header.exits_root = spec.hash_tree_root(execution_payload.exits) return payload_header @@ -56,7 +59,8 @@ def compute_el_header_block_hash(spec, payload_header, transactions_trie_root, withdrawals_trie_root=None, - deposit_receipts_trie_root=None): + deposit_receipts_trie_root=None, + exits_trie_root=None): """ Computes the RLP execution block hash described by an `ExecutionPayloadHeader`. """ @@ -98,13 +102,16 @@ def compute_el_header_block_hash(spec, # withdrawals_root execution_payload_header_rlp.append((Binary(32, 32), withdrawals_trie_root)) if is_post_deneb(spec): - # excess_data_gas - execution_payload_header_rlp.append((big_endian_int, payload_header.data_gas_used)) - execution_payload_header_rlp.append((big_endian_int, payload_header.excess_data_gas)) + # excess_blob_gas + execution_payload_header_rlp.append((big_endian_int, payload_header.blob_gas_used)) + execution_payload_header_rlp.append((big_endian_int, payload_header.excess_blob_gas)) if is_post_eip6110(spec): # deposit_receipts_root assert deposit_receipts_trie_root is not None execution_payload_header_rlp.append((Binary(32, 32), deposit_receipts_trie_root)) + if is_post_eip7002(spec): + # exits_trie_root + execution_payload_header_rlp.append((Binary(32, 32), exits_trie_root)) sedes = List([schema for schema, _ in execution_payload_header_rlp]) values = [value for _, value in execution_payload_header_rlp] @@ -114,7 +121,7 @@ def compute_el_header_block_hash(spec, # https://eips.ethereum.org/EIPS/eip-4895 -def get_withdrawal_rlp(spec, withdrawal): +def get_withdrawal_rlp(withdrawal): withdrawal_rlp = [ # index (big_endian_int, withdrawal.index), @@ -131,6 +138,20 @@ def get_withdrawal_rlp(spec, withdrawal): return encode(values, sedes) +# https://eips.ethereum.org/EIPS/eip-7002 +def get_exit_rlp(exit): + exit_rlp = [ + # source_address + (Binary(20, 20), exit.source_address), + # validator_pubkey + (Binary(48, 48), exit.validator_pubkey), + ] + + sedes = List([schema for schema, _ in exit_rlp]) + values = [value for _, value in exit_rlp] + return encode(values, sedes) + + def get_deposit_receipt_rlp(spec, deposit_receipt): deposit_receipt_rlp = [ # pubkey @@ -155,13 +176,17 @@ def compute_el_block_hash(spec, payload): withdrawals_trie_root = None deposit_receipts_trie_root = None + exits_trie_root = None if is_post_capella(spec): - withdrawals_encoded = [get_withdrawal_rlp(spec, withdrawal) for withdrawal in payload.withdrawals] + withdrawals_encoded = [get_withdrawal_rlp(withdrawal) for withdrawal in payload.withdrawals] withdrawals_trie_root = compute_trie_root_from_indexed_data(withdrawals_encoded) if is_post_eip6110(spec): deposit_receipts_encoded = [get_deposit_receipt_rlp(spec, receipt) for receipt in payload.deposit_receipts] deposit_receipts_trie_root = compute_trie_root_from_indexed_data(deposit_receipts_encoded) + if is_post_eip7002(spec): + exits_encoded = [get_exit_rlp(exit) for exit in payload.exits] + exits_trie_root = compute_trie_root_from_indexed_data(exits_encoded) payload_header = get_execution_payload_header(spec, payload) @@ -171,6 +196,7 @@ def compute_el_block_hash(spec, payload): transactions_trie_root, withdrawals_trie_root, deposit_receipts_trie_root, + exits_trie_root, ) @@ -203,8 +229,8 @@ def build_empty_execution_payload(spec, state, randao_mix=None): if is_post_capella(spec): payload.withdrawals = spec.get_expected_withdrawals(state) if is_post_deneb(spec): - payload.data_gas_used = 0 - payload.excess_data_gas = 0 + payload.blob_gas_used = 0 + payload.excess_blob_gas = 0 if is_post_eip6110(spec): # just to be clear payload.deposit_receipts = [] @@ -233,7 +259,7 @@ def build_randomized_execution_payload(spec, state, rng): num_transactions = rng.randint(0, 100) execution_payload.transactions = [ - spec.Transaction(get_random_bytes_list(rng, rng.randint(0, 1000))) + get_random_tx(rng) for _ in range(num_transactions) ] @@ -264,3 +290,7 @@ def build_state_with_execution_payload_header(spec, state, execution_payload_hea pre_state.latest_execution_payload_header = execution_payload_header return pre_state + + +def get_random_tx(rng): + return get_random_bytes_list(rng, rng.randint(0, 1000)) diff --git a/tests/core/pyspec/eth2spec/test/helpers/fork_transition.py b/tests/core/pyspec/eth2spec/test/helpers/fork_transition.py index 68444c4726..80c54d9c1f 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/fork_transition.py +++ b/tests/core/pyspec/eth2spec/test/helpers/fork_transition.py @@ -16,6 +16,7 @@ CAPELLA, DENEB, EIP6110, + EIP7002, ) from eth2spec.test.helpers.deposits import ( prepare_state_and_deposit, @@ -161,6 +162,8 @@ def do_fork(state, spec, post_spec, fork_epoch, with_block=True, sync_aggregate= state = post_spec.upgrade_to_deneb(state) elif post_spec.fork == EIP6110: state = post_spec.upgrade_to_eip6110(state) + elif post_spec.fork == EIP7002: + state = post_spec.upgrade_to_eip7002(state) assert state.fork.epoch == fork_epoch @@ -179,6 +182,9 @@ def do_fork(state, spec, post_spec, fork_epoch, with_block=True, sync_aggregate= elif post_spec.fork == EIP6110: assert state.fork.previous_version == post_spec.config.DENEB_FORK_VERSION assert state.fork.current_version == post_spec.config.EIP6110_FORK_VERSION + elif post_spec.fork == EIP7002: + assert state.fork.previous_version == post_spec.config.CAPELLA_FORK_VERSION + assert state.fork.current_version == post_spec.config.EIP7002_FORK_VERSION if with_block: return state, _state_transition_and_sign_block_at_slot( diff --git a/tests/core/pyspec/eth2spec/test/helpers/forks.py b/tests/core/pyspec/eth2spec/test/helpers/forks.py index 5e97522dbb..492af47fe3 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/forks.py +++ b/tests/core/pyspec/eth2spec/test/helpers/forks.py @@ -1,10 +1,12 @@ from .constants import ( PHASE0, ALTAIR, BELLATRIX, CAPELLA, DENEB, - EIP6110, + EIP6110, EIP7002, ) def is_post_fork(a, b): + if a == EIP7002: + return b in [PHASE0, ALTAIR, BELLATRIX, CAPELLA, EIP7002] if a == EIP6110: return b in [PHASE0, ALTAIR, BELLATRIX, CAPELLA, DENEB, EIP6110] if a == DENEB: @@ -38,3 +40,7 @@ def is_post_deneb(spec): def is_post_eip6110(spec): return is_post_fork(spec.fork, EIP6110) + + +def is_post_eip7002(spec): + return is_post_fork(spec.fork, EIP7002) diff --git a/tests/core/pyspec/eth2spec/test/helpers/genesis.py b/tests/core/pyspec/eth2spec/test/helpers/genesis.py index fea259013b..e55bdef5ce 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/genesis.py +++ b/tests/core/pyspec/eth2spec/test/helpers/genesis.py @@ -1,11 +1,11 @@ from eth2spec.test.helpers.constants import ( - ALTAIR, BELLATRIX, CAPELLA, DENEB, EIP6110, + ALTAIR, BELLATRIX, CAPELLA, DENEB, EIP6110, EIP7002, ) from eth2spec.test.helpers.execution_payload import ( compute_el_header_block_hash, ) from eth2spec.test.helpers.forks import ( - is_post_altair, is_post_bellatrix, is_post_capella, is_post_eip6110, + is_post_altair, is_post_bellatrix, is_post_capella, is_post_eip6110, is_post_eip7002, ) from eth2spec.test.helpers.keys import pubkeys @@ -49,11 +49,14 @@ def get_sample_genesis_execution_payload_header(spec, transactions_trie_root = bytes.fromhex("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421") withdrawals_trie_root = None deposit_receipts_trie_root = None + exits_trie_root = None if is_post_capella(spec): withdrawals_trie_root = bytes.fromhex("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421") if is_post_eip6110(spec): deposit_receipts_trie_root = bytes.fromhex("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421") + if is_post_eip7002(spec): + exits_trie_root = bytes.fromhex("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421") payload_header.block_hash = compute_el_header_block_hash( spec, @@ -61,6 +64,7 @@ def get_sample_genesis_execution_payload_header(spec, transactions_trie_root, withdrawals_trie_root, deposit_receipts_trie_root, + exits_trie_root, ) return payload_header @@ -86,6 +90,9 @@ def create_genesis_state(spec, validator_balances, activation_threshold): elif spec.fork == EIP6110: previous_version = spec.config.DENEB_FORK_VERSION current_version = spec.config.EIP6110_FORK_VERSION + elif spec.fork == EIP7002: + previous_version = spec.config.CAPELLA_FORK_VERSION + current_version = spec.config.EIP7002_FORK_VERSION state = spec.BeaconState( genesis_time=0, diff --git a/tests/core/pyspec/eth2spec/test/helpers/sharding.py b/tests/core/pyspec/eth2spec/test/helpers/sharding.py index 32e1d4021d..9393f40c50 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/sharding.py +++ b/tests/core/pyspec/eth2spec/test/helpers/sharding.py @@ -41,7 +41,7 @@ class BlobTransaction(Container): value: uint256 data: ByteList[MAX_CALLDATA_SIZE] access_list: List[AccessTuple, MAX_ACCESS_LIST_SIZE] - max_fee_per_data_gas: uint256 + max_fee_per_blob_gas: uint256 blob_versioned_hashes: List[Bytes32, MAX_VERSIONED_HASHES_LIST_SIZE] diff --git a/tests/core/pyspec/eth2spec/test/helpers/state.py b/tests/core/pyspec/eth2spec/test/helpers/state.py index 0dc17b00f1..1e64bd4db2 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/state.py +++ b/tests/core/pyspec/eth2spec/test/helpers/state.py @@ -166,3 +166,8 @@ def has_active_balance_differential(spec, state): active_balance = spec.get_total_active_balance(state) total_balance = spec.get_total_balance(state, set(range(len(state.validators)))) return active_balance // spec.EFFECTIVE_BALANCE_INCREMENT != total_balance // spec.EFFECTIVE_BALANCE_INCREMENT + + +def get_validator_index_by_pubkey(state, pubkey): + index = next((i for i, validator in enumerate(state.validators) if validator.pubkey == pubkey), None) + return index diff --git a/tests/core/pyspec/eth2spec/test/helpers/withdrawals.py b/tests/core/pyspec/eth2spec/test/helpers/withdrawals.py index aebe49f26e..9ebb05ba0e 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/withdrawals.py +++ b/tests/core/pyspec/eth2spec/test/helpers/withdrawals.py @@ -20,9 +20,14 @@ def set_validator_fully_withdrawable(spec, state, index, withdrawable_epoch=None assert spec.is_fully_withdrawable_validator(validator, state.balances[index], withdrawable_epoch) -def set_eth1_withdrawal_credential_with_balance(spec, state, index, balance): +def set_eth1_withdrawal_credential_with_balance(spec, state, index, balance=None, address=None): + if balance is None: + balance = spec.MAX_EFFECTIVE_BALANCE + if address is None: + address = b'\x11' * 20 + validator = state.validators[index] - validator.withdrawal_credentials = spec.ETH1_ADDRESS_WITHDRAWAL_PREFIX + validator.withdrawal_credentials[1:] + validator.withdrawal_credentials = spec.ETH1_ADDRESS_WITHDRAWAL_PREFIX + b'\x00' * 11 + address validator.effective_balance = min(balance, spec.MAX_EFFECTIVE_BALANCE) state.balances[index] = balance diff --git a/tests/core/pyspec/eth2spec/test/utils/randomized_block_tests.py b/tests/core/pyspec/eth2spec/test/utils/randomized_block_tests.py index c164515103..3255de87a6 100644 --- a/tests/core/pyspec/eth2spec/test/utils/randomized_block_tests.py +++ b/tests/core/pyspec/eth2spec/test/utils/randomized_block_tests.py @@ -9,6 +9,7 @@ from eth2spec.test.helpers.execution_payload import ( compute_el_block_hash, + build_randomized_execution_payload, ) from eth2spec.test.helpers.multi_operations import ( build_random_block_from_state_for_next_slot, @@ -216,14 +217,17 @@ def random_block_altair_with_cycling_sync_committee_participation(spec, return block -def random_block_bellatrix(spec, state, signed_blocks, scenario_state): +def random_block_bellatrix(spec, state, signed_blocks, scenario_state, rng=Random(3456)): block = random_block_altair_with_cycling_sync_committee_participation(spec, state, signed_blocks, scenario_state) - # TODO: return randomized execution payload + # build execution_payload at the next slot + state = state.copy() + next_slot(spec, state) + block.body.execution_payload = build_randomized_execution_payload(spec, state, rng=rng) return block def random_block_capella(spec, state, signed_blocks, scenario_state, rng=Random(3456)): - block = random_block_bellatrix(spec, state, signed_blocks, scenario_state) + block = random_block_bellatrix(spec, state, signed_blocks, scenario_state, rng=rng) block.body.bls_to_execution_changes = get_random_bls_to_execution_changes( spec, state, @@ -233,10 +237,11 @@ def random_block_capella(spec, state, signed_blocks, scenario_state, rng=Random( def random_block_deneb(spec, state, signed_blocks, scenario_state, rng=Random(3456)): - block = random_block_capella(spec, state, signed_blocks, scenario_state) + block = random_block_capella(spec, state, signed_blocks, scenario_state, rng=rng) # TODO: more commitments. blob_kzg_commitments: List[KZGCommitment, MAX_BLOBS_PER_BLOCK] - opaque_tx, _, blob_kzg_commitments, _ = get_sample_opaque_tx(spec, blob_count=1) - block.body.execution_payload.transactions = [opaque_tx] + opaque_tx, _, blob_kzg_commitments, _ = get_sample_opaque_tx( + spec, blob_count=rng.randint(0, spec.MAX_BLOBS_PER_BLOCK), rng=rng) + block.body.execution_payload.transactions.append(opaque_tx) block.body.execution_payload.block_hash = compute_el_block_hash(spec, block.body.execution_payload) block.body.blob_kzg_commitments = blob_kzg_commitments