Merge branch 'dev' into pr3433
This commit is contained in:
commit
ff9aa13462
|
@ -36,26 +36,26 @@ commands:
|
|||
steps:
|
||||
- restore_cached_venv:
|
||||
venv_name: v24-pyspec
|
||||
reqs_checksum: cache-{{ checksum "setup.py" }}
|
||||
reqs_checksum: cache-{{ checksum "setup.py" }}-{{ checksum "requirements_preinstallation.txt" }}
|
||||
save_pyspec_cached_venv:
|
||||
description: Save a venv into a cache with pyspec keys"
|
||||
steps:
|
||||
- save_cached_venv:
|
||||
venv_name: v24-pyspec
|
||||
reqs_checksum: cache-{{ checksum "setup.py" }}
|
||||
reqs_checksum: cache-{{ checksum "setup.py" }}-{{ checksum "requirements_preinstallation.txt" }}
|
||||
venv_path: ./venv
|
||||
restore_deposit_contract_tester_cached_venv:
|
||||
description: "Restore the venv from cache for the deposit contract tester"
|
||||
steps:
|
||||
- restore_cached_venv:
|
||||
venv_name: v23-deposit-contract-tester
|
||||
reqs_checksum: cache-{{ checksum "setup.py" }}-{{ checksum "solidity_deposit_contract/web3_tester/requirements.txt" }}
|
||||
reqs_checksum: cache-{{ checksum "setup.py" }}-{{ checksum "requirements_preinstallation.txt" }}-{{ checksum "solidity_deposit_contract/web3_tester/requirements.txt" }}
|
||||
save_deposit_contract_tester_cached_venv:
|
||||
description: "Save the venv to cache for later use of the deposit contract tester"
|
||||
steps:
|
||||
- save_cached_venv:
|
||||
venv_name: v23-deposit-contract-tester
|
||||
reqs_checksum: cache-{{ checksum "setup.py" }}-{{ checksum "solidity_deposit_contract/web3_tester/requirements.txt" }}
|
||||
reqs_checksum: cache-{{ checksum "setup.py" }}-{{ checksum "requirements_preinstallation.txt" }}-{{ checksum "solidity_deposit_contract/web3_tester/requirements.txt" }}
|
||||
venv_path: ./solidity_deposit_contract/web3_tester/venv
|
||||
jobs:
|
||||
checkout_specs:
|
||||
|
@ -168,6 +168,19 @@ jobs:
|
|||
command: make citest fork=eip6110
|
||||
- store_test_results:
|
||||
path: tests/core/pyspec/test-reports
|
||||
test-eip7002:
|
||||
docker:
|
||||
- image: circleci/python:3.9
|
||||
working_directory: ~/specs-repo
|
||||
steps:
|
||||
- restore_cache:
|
||||
key: v3-specs-repo-{{ .Branch }}-{{ .Revision }}
|
||||
- restore_pyspec_cached_venv
|
||||
- run:
|
||||
name: Run py-tests
|
||||
command: make citest fork=eip7002
|
||||
- store_test_results:
|
||||
path: tests/core/pyspec/test-reports
|
||||
table_of_contents:
|
||||
docker:
|
||||
- image: circleci/node:10.16.3
|
||||
|
@ -291,6 +304,9 @@ workflows:
|
|||
- test-eip6110:
|
||||
requires:
|
||||
- install_pyspec_test
|
||||
- test-eip7002:
|
||||
requires:
|
||||
- install_pyspec_test
|
||||
- table_of_contents
|
||||
- codespell
|
||||
- lint:
|
||||
|
|
|
@ -6,10 +6,8 @@ defaults:
|
|||
|
||||
env:
|
||||
TEST_PRESET_TYPE: "minimal"
|
||||
DEFAULT_BRANCH: "dev"
|
||||
|
||||
# Run tests on workflow_Dispatch
|
||||
on:
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
|
@ -22,10 +20,6 @@ on:
|
|||
description: Type of test to run, either mainnet or minimal
|
||||
type: string
|
||||
required: true
|
||||
commitRef:
|
||||
description: The branch, tag or SHA to checkout and build from
|
||||
default: dev
|
||||
required: true
|
||||
schedule:
|
||||
- cron: '0 0 * * *'
|
||||
|
||||
|
@ -47,8 +41,6 @@ jobs:
|
|||
steps:
|
||||
- name: Checkout this repo
|
||||
uses: actions/checkout@v3.2.0
|
||||
with:
|
||||
ref: ${{ github.event.inputs.commitRef || env.DEFAULT_BRANCH }}
|
||||
- name: Check table of contents
|
||||
run: sudo npm install -g doctoc@2.2.0 && make check_toc
|
||||
|
||||
|
@ -58,8 +50,6 @@ jobs:
|
|||
steps:
|
||||
- name: Checkout this repo
|
||||
uses: actions/checkout@v3.2.0
|
||||
with:
|
||||
ref: ${{ github.event.inputs.commitRef || env.DEFAULT_BRANCH }}
|
||||
- name: Check codespell
|
||||
run: pip install 'codespell<3.0.0,>=2.0.0' --user && make codespell
|
||||
|
||||
|
@ -69,8 +59,6 @@ jobs:
|
|||
steps:
|
||||
- name: Checkout this repo
|
||||
uses: actions/checkout@v3.2.0
|
||||
with:
|
||||
ref: ${{ github.event.inputs.commitRef || env.DEFAULT_BRANCH }}
|
||||
- name: Install pyspec requirements
|
||||
run: make install_test
|
||||
- name: Run linter for pyspec
|
||||
|
@ -83,12 +71,10 @@ jobs:
|
|||
needs: [preclear,lint,codespell,table_of_contents]
|
||||
strategy:
|
||||
matrix:
|
||||
version: ["phase0", "altair", "bellatrix", "capella", "deneb", "eip6110"]
|
||||
version: ["phase0", "altair", "bellatrix", "capella", "deneb", "eip6110", "eip7002"]
|
||||
steps:
|
||||
- name: Checkout this repo
|
||||
uses: actions/checkout@v3.2.0
|
||||
with:
|
||||
ref: ${{ github.event.inputs.commitRef || env.DEFAULT_BRANCH }}
|
||||
- name: set TEST_PRESET_TYPE
|
||||
if: github.event.inputs.test_preset_type != ''
|
||||
run: |
|
||||
|
|
|
@ -4,6 +4,7 @@ venv
|
|||
.venvs
|
||||
.venv
|
||||
/.pytest_cache
|
||||
*.swp
|
||||
|
||||
build/
|
||||
output/
|
||||
|
@ -21,6 +22,7 @@ tests/core/pyspec/eth2spec/bellatrix/
|
|||
tests/core/pyspec/eth2spec/capella/
|
||||
tests/core/pyspec/eth2spec/deneb/
|
||||
tests/core/pyspec/eth2spec/eip6110/
|
||||
tests/core/pyspec/eth2spec/eip7002/
|
||||
tests/core/pyspec/eth2spec/whisk/
|
||||
|
||||
# coverage reports
|
||||
|
|
10
Makefile
10
Makefile
|
@ -104,9 +104,15 @@ generate_tests: $(GENERATOR_TARGETS)
|
|||
pyspec:
|
||||
python3 -m venv venv; . venv/bin/activate; python3 setup.py pyspecdev
|
||||
|
||||
# check the setup tool requirements
|
||||
preinstallation:
|
||||
python3 -m venv venv; . venv/bin/activate; \
|
||||
python3 -m pip install -r requirements_preinstallation.txt
|
||||
|
||||
# installs the packages to run pyspec tests
|
||||
install_test:
|
||||
python3 -m venv venv; . venv/bin/activate; python3 -m pip install -e .[lint]; python3 -m pip install -e .[test]
|
||||
install_test: preinstallation
|
||||
python3 -m venv venv; . venv/bin/activate; \
|
||||
python3 -m pip install -e .[lint]; python3 -m pip install -e .[test]
|
||||
|
||||
# Testing against `minimal` or `mainnet` config by default
|
||||
test: pyspec
|
||||
|
|
|
@ -42,6 +42,7 @@ Features are researched and developed in parallel, and then consolidated into se
|
|||
Additional specifications and standards outside of requisite client functionality can be found in the following repos:
|
||||
|
||||
* [Beacon APIs](https://github.com/ethereum/beacon-apis)
|
||||
* [Engine APIs](https://github.com/ethereum/execution-apis/tree/main/src/engine)
|
||||
* [Beacon Metrics](https://github.com/ethereum/beacon-metrics/)
|
||||
|
||||
## Design goals
|
||||
|
|
|
@ -53,6 +53,9 @@ DENEB_FORK_EPOCH: 18446744073709551615
|
|||
# EIP6110
|
||||
EIP6110_FORK_VERSION: 0x05000000 # temporary stub
|
||||
EIP6110_FORK_EPOCH: 18446744073709551615
|
||||
# EIP7002
|
||||
EIP7002_FORK_VERSION: 0x05000000 # temporary stub
|
||||
EIP7002_FORK_EPOCH: 18446744073709551615
|
||||
# WHISK
|
||||
WHISK_FORK_VERSION: 0x06000000 # temporary stub
|
||||
WHISK_FORK_EPOCH: 18446744073709551615
|
||||
|
@ -137,3 +140,9 @@ MAX_REQUEST_BLOB_SIDECARS: 768
|
|||
MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS: 4096
|
||||
# `6`
|
||||
BLOB_SIDECAR_SUBNET_COUNT: 6
|
||||
|
||||
# Whisk
|
||||
# `Epoch(2**8)`
|
||||
WHISK_EPOCHS_PER_SHUFFLING_PHASE: 256
|
||||
# `Epoch(2)`
|
||||
WHISK_PROPOSER_SELECTION_GAP: 2
|
||||
|
|
|
@ -52,6 +52,9 @@ DENEB_FORK_EPOCH: 18446744073709551615
|
|||
# EIP6110
|
||||
EIP6110_FORK_VERSION: 0x05000001
|
||||
EIP6110_FORK_EPOCH: 18446744073709551615
|
||||
# EIP7002
|
||||
EIP7002_FORK_VERSION: 0x05000001
|
||||
EIP7002_FORK_EPOCH: 18446744073709551615
|
||||
# WHISK
|
||||
WHISK_FORK_VERSION: 0x06000001
|
||||
WHISK_FORK_EPOCH: 18446744073709551615
|
||||
|
@ -138,3 +141,7 @@ MAX_REQUEST_BLOB_SIDECARS: 768
|
|||
MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS: 4096
|
||||
# `6`
|
||||
BLOB_SIDECAR_SUBNET_COUNT: 6
|
||||
|
||||
# Whisk
|
||||
WHISK_EPOCHS_PER_SHUFFLING_PHASE: 4
|
||||
WHISK_PROPOSER_SELECTION_GAP: 1
|
||||
|
|
|
@ -53,17 +53,17 @@ For example, if the latest fork is Capella, use `./specs/capella` content as you
|
|||
### 4. Add `fork.md`
|
||||
You can refer to the previous fork's `fork.md` file.
|
||||
### 5. Make it executable
|
||||
- Update [`constants.py`](https://github.com/ethereum/consensus-specs/blob/dev/tests/core/pyspec/eth2spec/test/helpers/constants.py) with the new feature name.
|
||||
- Update [`setup.py`](https://github.com/ethereum/consensus-specs/blob/dev/setup.py):
|
||||
- Add a new `SpecBuilder` with the new feature name constant. e.g., `EIP9999SpecBuilder`
|
||||
- Add the new `SpecBuilder` to `spec_builders` list.
|
||||
- Add the path of the new markdown files in `finalize_options` function.
|
||||
- Update Pyspec [`constants.py`](https://github.com/ethereum/consensus-specs/blob/dev/tests/core/pyspec/eth2spec/test/helpers/constants.py) with the new feature name.
|
||||
- Update helpers for [`setup.py`](https://github.com/ethereum/consensus-specs/blob/dev/setup.py) for building the spec:
|
||||
- Update [`pysetup/constants.py`](https://github.com/ethereum/consensus-specs/blob/dev/constants.py) with the new feature name as Pyspec `constants.py` defined.
|
||||
- Update [`pysetup/spec_builders/__init__.py`](https://github.com/ethereum/consensus-specs/blob/dev/pysetup/spec_builders/__init__.py). Implement a new `<FEATURE_NAME>SpecBuilder` in `pysetup/spec_builders/<FEATURE_NAME>.py` with the new feature name. e.g., `EIP9999SpecBuilder`. Append it to the `spec_builders` list.
|
||||
- Update [`pysetup/md_doc_paths.py`](https://github.com/ethereum/consensus-specs/blob/dev/pysetup/md_doc_paths.py): add the path of the new markdown files in `get_md_doc_paths` function if needed.
|
||||
|
||||
## B: Make it executable for pytest and test generator
|
||||
|
||||
### 1. Add `light-client/*` docs if you updated the content of `BeaconBlock`
|
||||
### 1. [Optional] Add `light-client/*` docs if you updated the content of `BeaconBlock`
|
||||
- You can refer to the previous fork's `light-client/*` file.
|
||||
- Add the path of the new markdown files in `setup.py`'s `finalize_options` function.
|
||||
- Add the path of the new markdown files in [`pysetup/md_doc_paths.py`](https://github.com/ethereum/consensus-specs/blob/dev/pysetup/md_doc_paths.py)'s `get_md_doc_paths` function.
|
||||
|
||||
### 2. Add the mainnet and minimal presets and update the configs
|
||||
- Add presets: `presets/mainnet/<new-feature-name>.yaml` and `presets/minimal/<new-feature-name>.yaml`
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
# Mainnet preset - Whisk
|
||||
|
||||
# Misc
|
||||
# ---------------------------------------------------------------
|
||||
# `uint64(4)`
|
||||
CURDLEPROOFS_N_BLINDERS: 4
|
||||
# `uint64(2**14)`
|
||||
WHISK_CANDIDATE_TRACKERS_COUNT: 16384
|
||||
# `uint64(2**13)` must be < WHISK_CANDIDATE_TRACKERS_COUNT
|
||||
WHISK_PROPOSER_TRACKERS_COUNT: 8192
|
||||
# `uint64(2**7 - CURDLEPROOFS_N_BLINDERS)`
|
||||
WHISK_VALIDATORS_PER_SHUFFLE: 124
|
||||
# `uint64(2**15)` TODO: will be replaced by a fix format once there's a serialized format
|
||||
WHISK_MAX_SHUFFLE_PROOF_SIZE: 32768
|
||||
# `uint64(2**10)` TODO: will be replaced by a fix format once there's a serialized format
|
||||
WHISK_MAX_OPENING_PROOF_SIZE: 1024
|
|
@ -0,0 +1,16 @@
|
|||
# Minimal preset - Whisk
|
||||
|
||||
# Misc
|
||||
# ---------------------------------------------------------------
|
||||
# [customized]
|
||||
CURDLEPROOFS_N_BLINDERS: 4
|
||||
# [customized]
|
||||
WHISK_CANDIDATE_TRACKERS_COUNT: 32
|
||||
# [customized]
|
||||
WHISK_PROPOSER_TRACKERS_COUNT: 16
|
||||
# [customized]
|
||||
WHISK_VALIDATORS_PER_SHUFFLE: 4
|
||||
# `uint64(2**15)` TODO: will be replaced by a fix format once there's a serialized format
|
||||
WHISK_MAX_SHUFFLE_PROOF_SIZE: 32768
|
||||
# `uint64(2**10)` TODO: will be replaced by a fix format once there's a serialized format
|
||||
WHISK_MAX_OPENING_PROOF_SIZE: 1024
|
|
@ -0,0 +1,34 @@
|
|||
# Definitions in context.py
|
||||
PHASE0 = 'phase0'
|
||||
ALTAIR = 'altair'
|
||||
BELLATRIX = 'bellatrix'
|
||||
CAPELLA = 'capella'
|
||||
DENEB = 'deneb'
|
||||
EIP6110 = 'eip6110'
|
||||
EIP7002 = 'eip7002'
|
||||
WHISK = 'whisk'
|
||||
|
||||
|
||||
|
||||
# The helper functions that are used when defining constants
|
||||
CONSTANT_DEP_SUNDRY_CONSTANTS_FUNCTIONS = '''
|
||||
def ceillog2(x: int) -> uint64:
|
||||
if x < 1:
|
||||
raise ValueError(f"ceillog2 accepts only positive values, x={x}")
|
||||
return uint64((x - 1).bit_length())
|
||||
|
||||
|
||||
def floorlog2(x: int) -> uint64:
|
||||
if x < 1:
|
||||
raise ValueError(f"floorlog2 accepts only positive values, x={x}")
|
||||
return uint64(x.bit_length() - 1)
|
||||
'''
|
||||
|
||||
|
||||
OPTIMIZED_BLS_AGGREGATE_PUBKEYS = '''
|
||||
def eth_aggregate_pubkeys(pubkeys: Sequence[BLSPubkey]) -> BLSPubkey:
|
||||
return bls.AggregatePKs(pubkeys)
|
||||
'''
|
||||
|
||||
|
||||
ETH2_SPEC_COMMENT_PREFIX = "eth2spec:"
|
|
@ -0,0 +1,253 @@
|
|||
import re
|
||||
from typing import TypeVar, Dict
|
||||
import textwrap
|
||||
from functools import reduce
|
||||
|
||||
from .constants import CONSTANT_DEP_SUNDRY_CONSTANTS_FUNCTIONS
|
||||
from .spec_builders import spec_builders
|
||||
from .md_doc_paths import PREVIOUS_FORK_OF
|
||||
from .typing import (
|
||||
ProtocolDefinition,
|
||||
SpecObject,
|
||||
VariableDefinition,
|
||||
)
|
||||
|
||||
|
||||
def collect_prev_forks(fork: str) -> list[str]:
|
||||
forks = [fork]
|
||||
while True:
|
||||
fork = PREVIOUS_FORK_OF[fork]
|
||||
if fork is None:
|
||||
return forks
|
||||
forks.append(fork)
|
||||
|
||||
|
||||
def is_byte_vector(value: str) -> bool:
|
||||
return value.startswith(('ByteVector'))
|
||||
|
||||
|
||||
def make_function_abstract(protocol_def: ProtocolDefinition, key: str):
|
||||
function = protocol_def.functions[key].split('"""')
|
||||
protocol_def.functions[key] = function[0] + "..."
|
||||
|
||||
|
||||
def objects_to_spec(preset_name: str,
|
||||
spec_object: SpecObject,
|
||||
fork: str,
|
||||
ordered_class_objects: Dict[str, str]) -> str:
|
||||
"""
|
||||
Given all the objects that constitute a spec, combine them into a single pyfile.
|
||||
"""
|
||||
new_type_definitions = (
|
||||
'\n\n'.join(
|
||||
[
|
||||
f"class {key}({value}):\n pass\n" if not is_byte_vector(value) else f"class {key}({value}): # type: ignore\n pass\n"
|
||||
for key, value in spec_object.custom_types.items()
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
# Collect builders with the reversed previous forks
|
||||
# e.g. `[bellatrix, altair, phase0]` -> `[phase0, altair, bellatrix]`
|
||||
builders = [spec_builders[fork] for fork in collect_prev_forks(fork)[::-1]]
|
||||
|
||||
def format_protocol(protocol_name: str, protocol_def: ProtocolDefinition) -> str:
|
||||
abstract_functions = ["verify_and_notify_new_payload"]
|
||||
for key in protocol_def.functions.keys():
|
||||
if key in abstract_functions:
|
||||
make_function_abstract(protocol_def, key)
|
||||
|
||||
protocol = f"class {protocol_name}(Protocol):"
|
||||
for fn_source in protocol_def.functions.values():
|
||||
fn_source = fn_source.replace("self: "+protocol_name, "self")
|
||||
protocol += "\n\n" + textwrap.indent(fn_source, " ")
|
||||
return protocol
|
||||
|
||||
protocols_spec = '\n\n\n'.join(format_protocol(k, v) for k, v in spec_object.protocols.items())
|
||||
for k in list(spec_object.functions):
|
||||
if k in [
|
||||
"ceillog2",
|
||||
"floorlog2",
|
||||
"compute_merkle_proof_for_block_body",
|
||||
"compute_merkle_proof_for_state",
|
||||
]:
|
||||
del spec_object.functions[k]
|
||||
|
||||
functions = reduce(lambda fns, builder: builder.implement_optimizations(fns), builders, spec_object.functions)
|
||||
functions_spec = '\n\n\n'.join(functions.values())
|
||||
|
||||
# Access global dict of config vars for runtime configurables
|
||||
for name in spec_object.config_vars.keys():
|
||||
functions_spec = re.sub(r"\b%s\b" % name, 'config.' + name, functions_spec)
|
||||
|
||||
def format_config_var(name: str, vardef: VariableDefinition) -> str:
|
||||
if vardef.type_name is None:
|
||||
out = f'{name}={vardef.value},'
|
||||
else:
|
||||
out = f'{name}={vardef.type_name}({vardef.value}),'
|
||||
if vardef.comment is not None:
|
||||
out += f' # {vardef.comment}'
|
||||
return out
|
||||
|
||||
config_spec = 'class Configuration(NamedTuple):\n'
|
||||
config_spec += ' PRESET_BASE: str\n'
|
||||
config_spec += '\n'.join(f' {k}: {v.type_name if v.type_name is not None else "int"}'
|
||||
for k, v in spec_object.config_vars.items())
|
||||
config_spec += '\n\n\nconfig = Configuration(\n'
|
||||
config_spec += f' PRESET_BASE="{preset_name}",\n'
|
||||
config_spec += '\n'.join(' ' + format_config_var(k, v) for k, v in spec_object.config_vars.items())
|
||||
config_spec += '\n)\n'
|
||||
|
||||
def format_constant(name: str, vardef: VariableDefinition) -> str:
|
||||
if vardef.type_name is None:
|
||||
if vardef.type_hint is None:
|
||||
out = f'{name} = {vardef.value}'
|
||||
else:
|
||||
out = f'{name}: {vardef.type_hint} = {vardef.value}'
|
||||
else:
|
||||
out = f'{name} = {vardef.type_name}({vardef.value})'
|
||||
if vardef.comment is not None:
|
||||
out += f' # {vardef.comment}'
|
||||
return out
|
||||
|
||||
# Merge all constant objects
|
||||
hardcoded_ssz_dep_constants = reduce(lambda obj, builder: {**obj, **builder.hardcoded_ssz_dep_constants()}, builders, {})
|
||||
hardcoded_custom_type_dep_constants = reduce(lambda obj, builder: {**obj, **builder.hardcoded_custom_type_dep_constants(spec_object)}, builders, {})
|
||||
# Concatenate all strings
|
||||
imports = reduce(lambda txt, builder: (txt + "\n\n" + builder.imports(preset_name) ).strip("\n"), builders, "")
|
||||
preparations = reduce(lambda txt, builder: (txt + "\n\n" + builder.preparations() ).strip("\n"), builders, "")
|
||||
sundry_functions = reduce(lambda txt, builder: (txt + "\n\n" + builder.sundry_functions() ).strip("\n"), builders, "")
|
||||
# Keep engine from the most recent fork
|
||||
execution_engine_cls = reduce(lambda txt, builder: builder.execution_engine_cls() or txt, builders, "")
|
||||
|
||||
constant_vars_spec = '# Constant vars\n' + '\n'.join(format_constant(k, v) for k, v in spec_object.constant_vars.items())
|
||||
preset_vars_spec = '# Preset vars\n' + '\n'.join(format_constant(k, v) for k, v in spec_object.preset_vars.items())
|
||||
ordered_class_objects_spec = '\n\n\n'.join(ordered_class_objects.values())
|
||||
ssz_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, hardcoded_ssz_dep_constants[x]), hardcoded_ssz_dep_constants))
|
||||
ssz_dep_constants_verification = '\n'.join(map(lambda x: 'assert %s == %s' % (x, spec_object.ssz_dep_constants[x]), hardcoded_ssz_dep_constants))
|
||||
custom_type_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, hardcoded_custom_type_dep_constants[x]), hardcoded_custom_type_dep_constants))
|
||||
spec_strs = [
|
||||
imports,
|
||||
preparations,
|
||||
f"fork = \'{fork}\'\n",
|
||||
# The constants that some SSZ containers require. Need to be defined before `new_type_definitions`
|
||||
custom_type_dep_constants,
|
||||
new_type_definitions,
|
||||
CONSTANT_DEP_SUNDRY_CONSTANTS_FUNCTIONS,
|
||||
# The constants that some SSZ containers require. Need to be defined before `constants_spec`
|
||||
ssz_dep_constants,
|
||||
constant_vars_spec,
|
||||
preset_vars_spec,
|
||||
config_spec,
|
||||
ordered_class_objects_spec,
|
||||
protocols_spec,
|
||||
functions_spec,
|
||||
sundry_functions,
|
||||
execution_engine_cls,
|
||||
# Since some constants are hardcoded in setup.py, the following assertions verify that the hardcoded constants are
|
||||
# as same as the spec definition.
|
||||
ssz_dep_constants_verification,
|
||||
]
|
||||
return "\n\n\n".join([str.strip("\n") for str in spec_strs if str]) + "\n"
|
||||
|
||||
|
||||
def combine_protocols(old_protocols: Dict[str, ProtocolDefinition],
|
||||
new_protocols: Dict[str, ProtocolDefinition]) -> Dict[str, ProtocolDefinition]:
|
||||
for key, value in new_protocols.items():
|
||||
if key not in old_protocols:
|
||||
old_protocols[key] = value
|
||||
else:
|
||||
functions = combine_dicts(old_protocols[key].functions, value.functions)
|
||||
old_protocols[key] = ProtocolDefinition(functions=functions)
|
||||
return old_protocols
|
||||
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
def combine_dicts(old_dict: Dict[str, T], new_dict: Dict[str, T]) -> Dict[str, T]:
|
||||
return {**old_dict, **new_dict}
|
||||
|
||||
|
||||
ignored_dependencies = [
|
||||
'bit', 'boolean', 'Vector', 'List', 'Container', 'BLSPubkey', 'BLSSignature',
|
||||
'Bytes1', 'Bytes4', 'Bytes8', 'Bytes20', 'Bytes32', 'Bytes48', 'Bytes96', 'Bitlist', 'Bitvector',
|
||||
'uint8', 'uint16', 'uint32', 'uint64', 'uint128', 'uint256',
|
||||
'bytes', 'byte', 'ByteList', 'ByteVector',
|
||||
'Dict', 'dict', 'field', 'ceillog2', 'floorlog2', 'Set',
|
||||
'Optional', 'Sequence',
|
||||
]
|
||||
|
||||
|
||||
def dependency_order_class_objects(objects: Dict[str, str], custom_types: Dict[str, str]) -> None:
|
||||
"""
|
||||
Determines which SSZ Object is dependent on which other and orders them appropriately
|
||||
"""
|
||||
items = list(objects.items())
|
||||
for key, value in items:
|
||||
dependencies = []
|
||||
for line in value.split('\n'):
|
||||
if not re.match(r'\s+\w+: .+', line):
|
||||
continue # skip whitespace etc.
|
||||
line = line[line.index(':') + 1:] # strip of field name
|
||||
if '#' in line:
|
||||
line = line[:line.index('#')] # strip of comment
|
||||
dependencies.extend(re.findall(r'(\w+)', line)) # catch all legible words, potential dependencies
|
||||
dependencies = filter(lambda x: '_' not in x and x.upper() != x, dependencies) # filter out constants
|
||||
dependencies = filter(lambda x: x not in ignored_dependencies, dependencies)
|
||||
dependencies = filter(lambda x: x not in custom_types, dependencies)
|
||||
for dep in dependencies:
|
||||
key_list = list(objects.keys())
|
||||
for item in [dep, key] + key_list[key_list.index(dep)+1:]:
|
||||
objects[item] = objects.pop(item)
|
||||
|
||||
|
||||
def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str], custom_types) -> Dict[str, str]:
|
||||
"""
|
||||
Takes in old spec and new spec ssz objects, combines them,
|
||||
and returns the newer versions of the objects in dependency order.
|
||||
"""
|
||||
for key, value in new_objects.items():
|
||||
old_objects[key] = value
|
||||
return old_objects
|
||||
|
||||
|
||||
def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject:
|
||||
"""
|
||||
Takes in two spec variants (as tuples of their objects) and combines them using the appropriate combiner function.
|
||||
"""
|
||||
protocols = combine_protocols(spec0.protocols, spec1.protocols)
|
||||
functions = combine_dicts(spec0.functions, spec1.functions)
|
||||
custom_types = combine_dicts(spec0.custom_types, spec1.custom_types)
|
||||
constant_vars = combine_dicts(spec0.constant_vars, spec1.constant_vars)
|
||||
preset_vars = combine_dicts(spec0.preset_vars, spec1.preset_vars)
|
||||
config_vars = combine_dicts(spec0.config_vars, spec1.config_vars)
|
||||
ssz_dep_constants = combine_dicts(spec0.ssz_dep_constants, spec1.ssz_dep_constants)
|
||||
ssz_objects = combine_ssz_objects(spec0.ssz_objects, spec1.ssz_objects, custom_types)
|
||||
dataclasses = combine_dicts(spec0.dataclasses, spec1.dataclasses)
|
||||
return SpecObject(
|
||||
functions=functions,
|
||||
protocols=protocols,
|
||||
custom_types=custom_types,
|
||||
constant_vars=constant_vars,
|
||||
preset_vars=preset_vars,
|
||||
config_vars=config_vars,
|
||||
ssz_dep_constants=ssz_dep_constants,
|
||||
ssz_objects=ssz_objects,
|
||||
dataclasses=dataclasses,
|
||||
)
|
||||
|
||||
|
||||
def parse_config_vars(conf: Dict[str, str]) -> Dict[str, str]:
|
||||
"""
|
||||
Parses a dict of basic str/int/list types into a dict for insertion into the spec code.
|
||||
"""
|
||||
out: Dict[str, str] = dict()
|
||||
for k, v in conf.items():
|
||||
if isinstance(v, str) and (v.startswith("0x") or k == 'PRESET_BASE' or k == 'CONFIG_NAME'):
|
||||
# Represent byte data with string, to avoid misinterpretation as big-endian int.
|
||||
# Everything except PRESET_BASE and CONFIG_NAME is either byte data or an integer.
|
||||
out[k] = f"'{v}'"
|
||||
else:
|
||||
out[k] = str(int(v))
|
||||
return out
|
|
@ -0,0 +1,78 @@
|
|||
import os
|
||||
|
||||
from .constants import (
|
||||
PHASE0,
|
||||
ALTAIR,
|
||||
BELLATRIX,
|
||||
CAPELLA,
|
||||
DENEB,
|
||||
EIP6110,
|
||||
WHISK,
|
||||
EIP7002,
|
||||
)
|
||||
|
||||
|
||||
PREVIOUS_FORK_OF = {
|
||||
PHASE0: None,
|
||||
ALTAIR: PHASE0,
|
||||
BELLATRIX: ALTAIR,
|
||||
CAPELLA: BELLATRIX,
|
||||
DENEB: CAPELLA,
|
||||
EIP6110: DENEB,
|
||||
WHISK: CAPELLA,
|
||||
EIP7002: CAPELLA,
|
||||
}
|
||||
|
||||
ALL_FORKS = list(PREVIOUS_FORK_OF.keys())
|
||||
|
||||
IGNORE_SPEC_FILES = [
|
||||
"specs/phase0/deposit-contract.md"
|
||||
]
|
||||
|
||||
EXTRA_SPEC_FILES = {
|
||||
BELLATRIX: "sync/optimistic.md"
|
||||
}
|
||||
|
||||
|
||||
def is_post_fork(a, b) -> bool:
|
||||
"""
|
||||
Returns true if fork a is after b, or if a == b
|
||||
"""
|
||||
if a == b:
|
||||
return True
|
||||
|
||||
prev_fork = PREVIOUS_FORK_OF[a]
|
||||
if prev_fork == b:
|
||||
return True
|
||||
elif prev_fork == None:
|
||||
return False
|
||||
else:
|
||||
return is_post_fork(prev_fork, b)
|
||||
|
||||
|
||||
def get_fork_directory(fork):
|
||||
dir1 = f'specs/{fork}'
|
||||
if os.path.exists(dir1):
|
||||
return dir1
|
||||
dir2 = f'specs/_features/{fork}'
|
||||
if os.path.exists(dir2):
|
||||
return dir2
|
||||
raise FileNotFoundError(f"No directory found for fork: {fork}")
|
||||
|
||||
|
||||
def get_md_doc_paths(spec_fork: str) -> str:
|
||||
md_doc_paths = ""
|
||||
|
||||
for fork in ALL_FORKS:
|
||||
if is_post_fork(spec_fork, fork):
|
||||
# Append all files in fork directory recursively
|
||||
for root, dirs, files in os.walk(get_fork_directory(fork)):
|
||||
for filename in files:
|
||||
filepath = os.path.join(root, filename)
|
||||
if filepath.endswith('.md') and filepath not in IGNORE_SPEC_FILES:
|
||||
md_doc_paths += filepath + "\n"
|
||||
# Append extra files if any
|
||||
if fork in EXTRA_SPEC_FILES:
|
||||
md_doc_paths += EXTRA_SPEC_FILES[fork] + "\n"
|
||||
|
||||
return md_doc_paths
|
|
@ -0,0 +1,17 @@
|
|||
from .phase0 import Phase0SpecBuilder
|
||||
from .altair import AltairSpecBuilder
|
||||
from .bellatrix import BellatrixSpecBuilder
|
||||
from .capella import CapellaSpecBuilder
|
||||
from .deneb import DenebSpecBuilder
|
||||
from .eip6110 import EIP6110SpecBuilder
|
||||
from .eip7002 import EIP7002SpecBuilder
|
||||
from .whisk import WhiskSpecBuilder
|
||||
|
||||
|
||||
spec_builders = {
|
||||
builder.fork: builder
|
||||
for builder in (
|
||||
Phase0SpecBuilder, AltairSpecBuilder, BellatrixSpecBuilder, CapellaSpecBuilder, DenebSpecBuilder,
|
||||
EIP6110SpecBuilder, EIP7002SpecBuilder, WhiskSpecBuilder,
|
||||
)
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
from typing import Dict
|
||||
|
||||
from .base import BaseSpecBuilder
|
||||
from ..constants import ALTAIR, OPTIMIZED_BLS_AGGREGATE_PUBKEYS
|
||||
|
||||
|
||||
class AltairSpecBuilder(BaseSpecBuilder):
|
||||
fork: str = ALTAIR
|
||||
|
||||
@classmethod
|
||||
def imports(cls, preset_name: str) -> str:
|
||||
return f'''
|
||||
from typing import NewType, Union as PyUnion
|
||||
|
||||
from eth2spec.phase0 import {preset_name} as phase0
|
||||
from eth2spec.test.helpers.merkle import build_proof
|
||||
from eth2spec.utils.ssz.ssz_typing import Path
|
||||
'''
|
||||
|
||||
@classmethod
|
||||
def preparations(cls):
|
||||
return '''
|
||||
SSZVariableName = str
|
||||
GeneralizedIndex = NewType('GeneralizedIndex', int)
|
||||
'''
|
||||
|
||||
@classmethod
|
||||
def sundry_functions(cls) -> str:
|
||||
return '''
|
||||
def get_generalized_index(ssz_class: Any, *path: Sequence[PyUnion[int, SSZVariableName]]) -> GeneralizedIndex:
|
||||
ssz_path = Path(ssz_class)
|
||||
for item in path:
|
||||
ssz_path = ssz_path / item
|
||||
return GeneralizedIndex(ssz_path.gindex())
|
||||
|
||||
|
||||
def compute_merkle_proof_for_state(state: BeaconState,
|
||||
index: GeneralizedIndex) -> Sequence[Bytes32]:
|
||||
return build_proof(state.get_backing(), index)'''
|
||||
|
||||
|
||||
@classmethod
|
||||
def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]:
|
||||
return {
|
||||
'FINALIZED_ROOT_INDEX': 'GeneralizedIndex(105)',
|
||||
'CURRENT_SYNC_COMMITTEE_INDEX': 'GeneralizedIndex(54)',
|
||||
'NEXT_SYNC_COMMITTEE_INDEX': 'GeneralizedIndex(55)',
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def implement_optimizations(cls, functions: Dict[str, str]) -> Dict[str, str]:
|
||||
if "eth_aggregate_pubkeys" in functions:
|
||||
functions["eth_aggregate_pubkeys"] = OPTIMIZED_BLS_AGGREGATE_PUBKEYS.strip()
|
||||
return functions
|
|
@ -0,0 +1,52 @@
|
|||
from abc import ABC, abstractmethod
|
||||
from typing import Sequence, Dict
|
||||
from pathlib import Path
|
||||
|
||||
class BaseSpecBuilder(ABC):
|
||||
@property
|
||||
@abstractmethod
|
||||
def fork(self) -> str:
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def imports(cls, preset_name: str) -> str:
|
||||
"""
|
||||
Import objects from other libraries.
|
||||
"""
|
||||
return ""
|
||||
|
||||
@classmethod
|
||||
def preparations(cls) -> str:
|
||||
"""
|
||||
Define special types/constants for building pyspec or call functions.
|
||||
"""
|
||||
return ""
|
||||
|
||||
@classmethod
|
||||
def sundry_functions(cls) -> str:
|
||||
"""
|
||||
The functions that are (1) defined abstractly in specs or (2) adjusted for getting better performance.
|
||||
"""
|
||||
return ""
|
||||
|
||||
@classmethod
|
||||
def execution_engine_cls(cls) -> str:
|
||||
return ""
|
||||
|
||||
@classmethod
|
||||
def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]:
|
||||
"""
|
||||
The constants that are required for SSZ objects.
|
||||
"""
|
||||
return {}
|
||||
|
||||
@classmethod
|
||||
def hardcoded_custom_type_dep_constants(cls, spec_object) -> Dict[str, str]: # TODO
|
||||
"""
|
||||
The constants that are required for custom types.
|
||||
"""
|
||||
return {}
|
||||
|
||||
@classmethod
|
||||
def implement_optimizations(cls, functions: Dict[str, str]) -> Dict[str, str]:
|
||||
return functions
|
|
@ -0,0 +1,66 @@
|
|||
from .base import BaseSpecBuilder
|
||||
from ..constants import BELLATRIX
|
||||
|
||||
class BellatrixSpecBuilder(BaseSpecBuilder):
|
||||
fork: str = BELLATRIX
|
||||
|
||||
@classmethod
|
||||
def imports(cls, preset_name: str):
|
||||
return f'''
|
||||
from typing import Protocol
|
||||
from eth2spec.altair import {preset_name} as altair
|
||||
from eth2spec.utils.ssz.ssz_typing import Bytes8, Bytes20, ByteList, ByteVector
|
||||
'''
|
||||
|
||||
@classmethod
|
||||
def sundry_functions(cls) -> str:
|
||||
return """
|
||||
ExecutionState = Any
|
||||
|
||||
|
||||
def get_pow_block(hash: Bytes32) -> Optional[PowBlock]:
|
||||
return PowBlock(block_hash=hash, parent_hash=Bytes32(), total_difficulty=uint256(0))
|
||||
|
||||
|
||||
def get_execution_state(_execution_state_root: Bytes32) -> ExecutionState:
|
||||
pass
|
||||
|
||||
|
||||
def get_pow_chain_head() -> PowBlock:
|
||||
pass"""
|
||||
|
||||
@classmethod
|
||||
def execution_engine_cls(cls) -> str:
|
||||
return """
|
||||
class NoopExecutionEngine(ExecutionEngine):
|
||||
|
||||
def notify_new_payload(self: ExecutionEngine, execution_payload: ExecutionPayload) -> bool:
|
||||
return True
|
||||
|
||||
def notify_forkchoice_updated(self: ExecutionEngine,
|
||||
head_block_hash: Hash32,
|
||||
safe_block_hash: Hash32,
|
||||
finalized_block_hash: Hash32,
|
||||
payload_attributes: Optional[PayloadAttributes]) -> Optional[PayloadId]:
|
||||
pass
|
||||
|
||||
def get_payload(self: ExecutionEngine, payload_id: PayloadId) -> GetPayloadResponse:
|
||||
# pylint: disable=unused-argument
|
||||
raise NotImplementedError("no default block production")
|
||||
|
||||
def is_valid_block_hash(self: ExecutionEngine, execution_payload: ExecutionPayload) -> bool:
|
||||
return True
|
||||
|
||||
def verify_and_notify_new_payload(self: ExecutionEngine,
|
||||
new_payload_request: NewPayloadRequest) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
EXECUTION_ENGINE = NoopExecutionEngine()"""
|
||||
|
||||
|
||||
@classmethod
|
||||
def hardcoded_custom_type_dep_constants(cls, spec_object) -> str:
|
||||
return {
|
||||
'MAX_BYTES_PER_TRANSACTION': spec_object.preset_vars['MAX_BYTES_PER_TRANSACTION'].value,
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
from typing import Dict
|
||||
|
||||
from .base import BaseSpecBuilder
|
||||
from ..constants import CAPELLA
|
||||
|
||||
|
||||
class CapellaSpecBuilder(BaseSpecBuilder):
|
||||
fork: str = CAPELLA
|
||||
|
||||
@classmethod
|
||||
def imports(cls, preset_name: str):
|
||||
return f'''
|
||||
from eth2spec.bellatrix import {preset_name} as bellatrix
|
||||
'''
|
||||
|
||||
|
||||
@classmethod
|
||||
def sundry_functions(cls) -> str:
|
||||
return '''
|
||||
def compute_merkle_proof_for_block_body(body: BeaconBlockBody,
|
||||
index: GeneralizedIndex) -> Sequence[Bytes32]:
|
||||
return build_proof(body.get_backing(), index)'''
|
||||
|
||||
|
||||
@classmethod
|
||||
def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]:
|
||||
return {
|
||||
'EXECUTION_PAYLOAD_INDEX': 'GeneralizedIndex(25)',
|
||||
}
|
|
@ -0,0 +1,71 @@
|
|||
from .base import BaseSpecBuilder
|
||||
from ..constants import DENEB
|
||||
|
||||
|
||||
class DenebSpecBuilder(BaseSpecBuilder):
|
||||
fork: str = DENEB
|
||||
|
||||
@classmethod
|
||||
def imports(cls, preset_name: str):
|
||||
return f'''
|
||||
from eth2spec.capella import {preset_name} as capella
|
||||
'''
|
||||
|
||||
|
||||
@classmethod
|
||||
def preparations(cls):
|
||||
return '''
|
||||
T = TypeVar('T') # For generic function
|
||||
'''
|
||||
|
||||
@classmethod
|
||||
def sundry_functions(cls) -> str:
|
||||
return '''
|
||||
def retrieve_blobs_and_proofs(beacon_block_root: Root) -> Tuple[Sequence[Blob], Sequence[KZGProof]]:
|
||||
# pylint: disable=unused-argument
|
||||
return [], []'''
|
||||
|
||||
@classmethod
|
||||
def execution_engine_cls(cls) -> str:
|
||||
return """
|
||||
class NoopExecutionEngine(ExecutionEngine):
|
||||
|
||||
def notify_new_payload(self: ExecutionEngine,
|
||||
execution_payload: ExecutionPayload,
|
||||
parent_beacon_block_root: Root) -> bool:
|
||||
return True
|
||||
|
||||
def notify_forkchoice_updated(self: ExecutionEngine,
|
||||
head_block_hash: Hash32,
|
||||
safe_block_hash: Hash32,
|
||||
finalized_block_hash: Hash32,
|
||||
payload_attributes: Optional[PayloadAttributes]) -> Optional[PayloadId]:
|
||||
pass
|
||||
|
||||
def get_payload(self: ExecutionEngine, payload_id: PayloadId) -> GetPayloadResponse:
|
||||
# pylint: disable=unused-argument
|
||||
raise NotImplementedError("no default block production")
|
||||
|
||||
def is_valid_block_hash(self: ExecutionEngine,
|
||||
execution_payload: ExecutionPayload,
|
||||
parent_beacon_block_root: Root) -> bool:
|
||||
return True
|
||||
|
||||
def is_valid_versioned_hashes(self: ExecutionEngine, new_payload_request: NewPayloadRequest) -> bool:
|
||||
return True
|
||||
|
||||
def verify_and_notify_new_payload(self: ExecutionEngine,
|
||||
new_payload_request: NewPayloadRequest) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
EXECUTION_ENGINE = NoopExecutionEngine()"""
|
||||
|
||||
|
||||
@classmethod
|
||||
def hardcoded_custom_type_dep_constants(cls, spec_object) -> str:
|
||||
return {
|
||||
'BYTES_PER_FIELD_ELEMENT': spec_object.constant_vars['BYTES_PER_FIELD_ELEMENT'].value,
|
||||
'FIELD_ELEMENTS_PER_BLOB': spec_object.preset_vars['FIELD_ELEMENTS_PER_BLOB'].value,
|
||||
'MAX_BLOBS_PER_BLOCK': spec_object.preset_vars['MAX_BLOBS_PER_BLOCK'].value,
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
from .base import BaseSpecBuilder
|
||||
from ..constants import EIP6110
|
||||
|
||||
|
||||
class EIP6110SpecBuilder(BaseSpecBuilder):
|
||||
fork: str = EIP6110
|
||||
|
||||
@classmethod
|
||||
def imports(cls, preset_name: str):
|
||||
return f'''
|
||||
from eth2spec.deneb import {preset_name} as deneb
|
||||
'''
|
|
@ -0,0 +1,12 @@
|
|||
from .base import BaseSpecBuilder
|
||||
from ..constants import EIP7002
|
||||
|
||||
|
||||
class EIP7002SpecBuilder(BaseSpecBuilder):
|
||||
fork: str = EIP7002
|
||||
|
||||
@classmethod
|
||||
def imports(cls, preset_name: str):
|
||||
return super().imports(preset_name) + f'''
|
||||
from eth2spec.capella import {preset_name} as capella
|
||||
'''
|
|
@ -0,0 +1,105 @@
|
|||
from .base import BaseSpecBuilder
|
||||
from ..constants import PHASE0
|
||||
|
||||
|
||||
class Phase0SpecBuilder(BaseSpecBuilder):
|
||||
fork: str = PHASE0
|
||||
|
||||
@classmethod
|
||||
def imports(cls, preset_name: str) -> str:
|
||||
return '''from lru import LRU
|
||||
from dataclasses import (
|
||||
dataclass,
|
||||
field,
|
||||
)
|
||||
from typing import (
|
||||
Any, Callable, Dict, Set, Sequence, Tuple, Optional, TypeVar, NamedTuple, Final
|
||||
)
|
||||
|
||||
from eth2spec.utils.ssz.ssz_impl import hash_tree_root, copy, uint_to_bytes
|
||||
from eth2spec.utils.ssz.ssz_typing import (
|
||||
View, boolean, Container, List, Vector, uint8, uint32, uint64, uint256,
|
||||
Bytes1, Bytes4, Bytes32, Bytes48, Bytes96, Bitlist)
|
||||
from eth2spec.utils.ssz.ssz_typing import Bitvector # noqa: F401
|
||||
from eth2spec.utils import bls
|
||||
from eth2spec.utils.hash_function import hash
|
||||
'''
|
||||
|
||||
@classmethod
|
||||
def preparations(cls) -> str:
|
||||
return '''
|
||||
SSZObject = TypeVar('SSZObject', bound=View)
|
||||
'''
|
||||
|
||||
@classmethod
|
||||
def sundry_functions(cls) -> str:
|
||||
return '''
|
||||
def get_eth1_data(block: Eth1Block) -> Eth1Data:
|
||||
"""
|
||||
A stub function return mocking Eth1Data.
|
||||
"""
|
||||
return Eth1Data(
|
||||
deposit_root=block.deposit_root,
|
||||
deposit_count=block.deposit_count,
|
||||
block_hash=hash_tree_root(block))
|
||||
|
||||
|
||||
def cache_this(key_fn, value_fn, lru_size): # type: ignore
|
||||
cache_dict = LRU(size=lru_size)
|
||||
|
||||
def wrapper(*args, **kw): # type: ignore
|
||||
key = key_fn(*args, **kw)
|
||||
nonlocal cache_dict
|
||||
if key not in cache_dict:
|
||||
cache_dict[key] = value_fn(*args, **kw)
|
||||
return cache_dict[key]
|
||||
return wrapper
|
||||
|
||||
|
||||
_compute_shuffled_index = compute_shuffled_index
|
||||
compute_shuffled_index = cache_this(
|
||||
lambda index, index_count, seed: (index, index_count, seed),
|
||||
_compute_shuffled_index, lru_size=SLOTS_PER_EPOCH * 3)
|
||||
|
||||
_get_total_active_balance = get_total_active_balance
|
||||
get_total_active_balance = cache_this(
|
||||
lambda state: (state.validators.hash_tree_root(), compute_epoch_at_slot(state.slot)),
|
||||
_get_total_active_balance, lru_size=10)
|
||||
|
||||
_get_base_reward = get_base_reward
|
||||
get_base_reward = cache_this(
|
||||
lambda state, index: (state.validators.hash_tree_root(), state.slot, index),
|
||||
_get_base_reward, lru_size=2048)
|
||||
|
||||
_get_committee_count_per_slot = get_committee_count_per_slot
|
||||
get_committee_count_per_slot = cache_this(
|
||||
lambda state, epoch: (state.validators.hash_tree_root(), epoch),
|
||||
_get_committee_count_per_slot, lru_size=SLOTS_PER_EPOCH * 3)
|
||||
|
||||
_get_active_validator_indices = get_active_validator_indices
|
||||
get_active_validator_indices = cache_this(
|
||||
lambda state, epoch: (state.validators.hash_tree_root(), epoch),
|
||||
_get_active_validator_indices, lru_size=3)
|
||||
|
||||
_get_beacon_committee = get_beacon_committee
|
||||
get_beacon_committee = cache_this(
|
||||
lambda state, slot, index: (state.validators.hash_tree_root(), state.randao_mixes.hash_tree_root(), slot, index),
|
||||
_get_beacon_committee, lru_size=SLOTS_PER_EPOCH * MAX_COMMITTEES_PER_SLOT * 3)
|
||||
|
||||
_get_matching_target_attestations = get_matching_target_attestations
|
||||
get_matching_target_attestations = cache_this(
|
||||
lambda state, epoch: (state.hash_tree_root(), epoch),
|
||||
_get_matching_target_attestations, lru_size=10)
|
||||
|
||||
_get_matching_head_attestations = get_matching_head_attestations
|
||||
get_matching_head_attestations = cache_this(
|
||||
lambda state, epoch: (state.hash_tree_root(), epoch),
|
||||
_get_matching_head_attestations, lru_size=10)
|
||||
|
||||
_get_attesting_indices = get_attesting_indices
|
||||
get_attesting_indices = cache_this(
|
||||
lambda state, data, bits: (
|
||||
state.randao_mixes.hash_tree_root(),
|
||||
state.validators.hash_tree_root(), data.hash_tree_root(), bits.hash_tree_root()
|
||||
),
|
||||
_get_attesting_indices, lru_size=SLOTS_PER_EPOCH * MAX_COMMITTEES_PER_SLOT * 3)'''
|
|
@ -0,0 +1,20 @@
|
|||
from .base import BaseSpecBuilder
|
||||
from ..constants import WHISK
|
||||
|
||||
|
||||
class WhiskSpecBuilder(BaseSpecBuilder):
|
||||
fork: str = WHISK
|
||||
|
||||
@classmethod
|
||||
def imports(cls, preset_name: str):
|
||||
return f'''
|
||||
from eth2spec.capella import {preset_name} as capella
|
||||
'''
|
||||
|
||||
@classmethod
|
||||
def hardcoded_custom_type_dep_constants(cls, spec_object) -> str:
|
||||
# Necessary for custom types `WhiskShuffleProof` and `WhiskTrackerProof`
|
||||
return {
|
||||
'WHISK_MAX_SHUFFLE_PROOF_SIZE': spec_object.preset_vars['WHISK_MAX_SHUFFLE_PROOF_SIZE'].value,
|
||||
'WHISK_MAX_OPENING_PROOF_SIZE': spec_object.preset_vars['WHISK_MAX_OPENING_PROOF_SIZE'].value,
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
from pathlib import Path
|
||||
from typing import Dict, NamedTuple, Optional, List
|
||||
|
||||
|
||||
class ProtocolDefinition(NamedTuple):
|
||||
# just function definitions currently. May expand with configuration vars in future.
|
||||
functions: Dict[str, str]
|
||||
|
||||
|
||||
class VariableDefinition(NamedTuple):
|
||||
type_name: Optional[str]
|
||||
value: str
|
||||
comment: Optional[str] # e.g. "noqa: E501"
|
||||
type_hint: Optional[str] # e.g., "Final"
|
||||
|
||||
|
||||
class SpecObject(NamedTuple):
|
||||
functions: Dict[str, str]
|
||||
protocols: Dict[str, ProtocolDefinition]
|
||||
custom_types: Dict[str, str]
|
||||
constant_vars: Dict[str, VariableDefinition]
|
||||
preset_vars: Dict[str, VariableDefinition]
|
||||
config_vars: Dict[str, VariableDefinition]
|
||||
ssz_dep_constants: Dict[str, str] # the constants that depend on ssz_objects
|
||||
ssz_objects: Dict[str, str]
|
||||
dataclasses: Dict[str, str]
|
||||
|
||||
|
||||
class BuildTarget(NamedTuple):
|
||||
name: str
|
||||
preset_paths: List[Path]
|
||||
config_path: Path
|
|
@ -0,0 +1,3 @@
|
|||
pip>=23.1.2
|
||||
wheel>=0.40.0
|
||||
setuptools>=68.0.0
|
843
setup.py
843
setup.py
|
@ -4,17 +4,36 @@ from distutils import dir_util
|
|||
from distutils.util import convert_path
|
||||
from pathlib import Path
|
||||
import os
|
||||
import re
|
||||
import string
|
||||
import textwrap
|
||||
from typing import Dict, NamedTuple, List, Sequence, Optional, TypeVar, Tuple
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Dict, List, Sequence, Optional, Tuple
|
||||
import ast
|
||||
import subprocess
|
||||
import sys
|
||||
import copy
|
||||
from collections import OrderedDict
|
||||
import json
|
||||
from functools import reduce
|
||||
|
||||
from pysetup.constants import (
|
||||
# code names
|
||||
PHASE0,
|
||||
# misc
|
||||
ETH2_SPEC_COMMENT_PREFIX,
|
||||
)
|
||||
from pysetup.spec_builders import spec_builders
|
||||
from pysetup.typing import (
|
||||
BuildTarget,
|
||||
ProtocolDefinition,
|
||||
SpecObject,
|
||||
VariableDefinition,
|
||||
)
|
||||
from pysetup.helpers import (
|
||||
combine_spec_objects,
|
||||
dependency_order_class_objects,
|
||||
objects_to_spec,
|
||||
parse_config_vars,
|
||||
)
|
||||
from pysetup.md_doc_paths import get_md_doc_paths
|
||||
|
||||
|
||||
# NOTE: have to programmatically include third-party dependencies in `setup.py`.
|
||||
|
@ -41,104 +60,6 @@ from marko.ext.gfm import gfm
|
|||
from marko.ext.gfm.elements import Table
|
||||
|
||||
|
||||
# Definitions in context.py
|
||||
PHASE0 = 'phase0'
|
||||
ALTAIR = 'altair'
|
||||
BELLATRIX = 'bellatrix'
|
||||
CAPELLA = 'capella'
|
||||
DENEB = 'deneb'
|
||||
EIP6110 = 'eip6110'
|
||||
WHISK = 'whisk'
|
||||
|
||||
PREVIOUS_FORK_OF = {
|
||||
PHASE0: None,
|
||||
ALTAIR: PHASE0,
|
||||
BELLATRIX: ALTAIR,
|
||||
CAPELLA: BELLATRIX,
|
||||
DENEB: CAPELLA,
|
||||
EIP6110: DENEB,
|
||||
WHISK: CAPELLA,
|
||||
}
|
||||
|
||||
ALL_FORKS = list(PREVIOUS_FORK_OF.keys())
|
||||
|
||||
IGNORE_SPEC_FILES = [
|
||||
"specs/phase0/deposit-contract.md"
|
||||
]
|
||||
|
||||
EXTRA_SPEC_FILES = {
|
||||
BELLATRIX: "sync/optimistic.md"
|
||||
}
|
||||
|
||||
# The helper functions that are used when defining constants
|
||||
CONSTANT_DEP_SUNDRY_CONSTANTS_FUNCTIONS = '''
|
||||
def ceillog2(x: int) -> uint64:
|
||||
if x < 1:
|
||||
raise ValueError(f"ceillog2 accepts only positive values, x={x}")
|
||||
return uint64((x - 1).bit_length())
|
||||
|
||||
|
||||
def floorlog2(x: int) -> uint64:
|
||||
if x < 1:
|
||||
raise ValueError(f"floorlog2 accepts only positive values, x={x}")
|
||||
return uint64(x.bit_length() - 1)
|
||||
'''
|
||||
|
||||
|
||||
OPTIMIZED_BLS_AGGREGATE_PUBKEYS = '''
|
||||
def eth_aggregate_pubkeys(pubkeys: Sequence[BLSPubkey]) -> BLSPubkey:
|
||||
return bls.AggregatePKs(pubkeys)
|
||||
'''
|
||||
|
||||
|
||||
class ProtocolDefinition(NamedTuple):
|
||||
# just function definitions currently. May expand with configuration vars in future.
|
||||
functions: Dict[str, str]
|
||||
|
||||
|
||||
class VariableDefinition(NamedTuple):
|
||||
type_name: Optional[str]
|
||||
value: str
|
||||
comment: Optional[str] # e.g. "noqa: E501"
|
||||
type_hint: Optional[str] # e.g., "Final"
|
||||
|
||||
|
||||
class SpecObject(NamedTuple):
|
||||
functions: Dict[str, str]
|
||||
protocols: Dict[str, ProtocolDefinition]
|
||||
custom_types: Dict[str, str]
|
||||
constant_vars: Dict[str, VariableDefinition]
|
||||
preset_vars: Dict[str, VariableDefinition]
|
||||
config_vars: Dict[str, VariableDefinition]
|
||||
ssz_dep_constants: Dict[str, str] # the constants that depend on ssz_objects
|
||||
ssz_objects: Dict[str, str]
|
||||
dataclasses: Dict[str, str]
|
||||
|
||||
|
||||
def is_post_fork(a, b) -> bool:
|
||||
"""
|
||||
Returns true if fork a is after b, or if a == b
|
||||
"""
|
||||
if a == b:
|
||||
return True
|
||||
|
||||
prev_fork = PREVIOUS_FORK_OF[a]
|
||||
if prev_fork == b:
|
||||
return True
|
||||
elif prev_fork == None:
|
||||
return False
|
||||
else:
|
||||
return is_post_fork(prev_fork, b)
|
||||
|
||||
def get_fork_directory(fork):
|
||||
dir1 = f'specs/{fork}'
|
||||
if os.path.exists(dir1):
|
||||
return dir1
|
||||
dir2 = f'specs/_features/{fork}'
|
||||
if os.path.exists(dir2):
|
||||
return dir2
|
||||
raise FileNotFoundError(f"No directory found for fork: {fork}")
|
||||
|
||||
def _get_name_from_heading(heading: Heading) -> Optional[str]:
|
||||
last_child = heading.children[-1]
|
||||
if isinstance(last_child, CodeSpan):
|
||||
|
@ -203,13 +124,12 @@ def _load_kzg_trusted_setups(preset_name):
|
|||
|
||||
return trusted_setup_G1, trusted_setup_G2, trusted_setup_G1_lagrange, roots_of_unity
|
||||
|
||||
|
||||
ALL_KZG_SETUPS = {
|
||||
'minimal': _load_kzg_trusted_setups('minimal'),
|
||||
'mainnet': _load_kzg_trusted_setups('mainnet')
|
||||
}
|
||||
|
||||
ETH2_SPEC_COMMENT_PREFIX = "eth2spec:"
|
||||
|
||||
|
||||
def _get_eth2_spec_comment(child: LinkRefDef) -> Optional[str]:
|
||||
_, _, title = child._parse_info
|
||||
|
@ -221,7 +141,7 @@ def _get_eth2_spec_comment(child: LinkRefDef) -> Optional[str]:
|
|||
return title[len(ETH2_SPEC_COMMENT_PREFIX):].strip()
|
||||
|
||||
|
||||
def _parse_value(name: str, typed_value: str, type_hint: Optional[str]=None) -> VariableDefinition:
|
||||
def _parse_value(name: str, typed_value: str, type_hint: Optional[str] = None) -> VariableDefinition:
|
||||
comment = None
|
||||
if name == "BLS12_381_Q":
|
||||
comment = "noqa: E501"
|
||||
|
@ -350,677 +270,6 @@ def get_spec(file_name: Path, preset: Dict[str, str], config: Dict[str, str], pr
|
|||
)
|
||||
|
||||
|
||||
class SpecBuilder(ABC):
|
||||
@property
|
||||
@abstractmethod
|
||||
def fork(self) -> str:
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def imports(cls, preset_name: str) -> str:
|
||||
"""
|
||||
Import objects from other libraries.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def preparations(cls) -> str:
|
||||
"""
|
||||
Define special types/constants for building pyspec or call functions.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def sundry_functions(cls) -> str:
|
||||
"""
|
||||
The functions that are (1) defined abstractly in specs or (2) adjusted for getting better performance.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def execution_engine_cls(cls) -> str:
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]:
|
||||
"""
|
||||
The constants that are required for SSZ objects.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def hardcoded_custom_type_dep_constants(cls, spec_object) -> Dict[str, str]: # TODO
|
||||
"""
|
||||
The constants that are required for custom types.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def implement_optimizations(cls, functions: Dict[str, str]) -> Dict[str, str]:
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def build_spec(cls, preset_name: str,
|
||||
source_files: List[Path], preset_files: Sequence[Path], config_file: Path) -> str:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
#
|
||||
# Phase0SpecBuilder
|
||||
#
|
||||
class Phase0SpecBuilder(SpecBuilder):
|
||||
fork: str = PHASE0
|
||||
|
||||
@classmethod
|
||||
def imports(cls, preset_name: str) -> str:
|
||||
return '''from lru import LRU
|
||||
from dataclasses import (
|
||||
dataclass,
|
||||
field,
|
||||
)
|
||||
from typing import (
|
||||
Any, Callable, Dict, Set, Sequence, Tuple, Optional, TypeVar, NamedTuple, Final
|
||||
)
|
||||
|
||||
from eth2spec.utils.ssz.ssz_impl import hash_tree_root, copy, uint_to_bytes
|
||||
from eth2spec.utils.ssz.ssz_typing import (
|
||||
View, boolean, Container, List, Vector, uint8, uint32, uint64, uint256,
|
||||
Bytes1, Bytes4, Bytes32, Bytes48, Bytes96, Bitlist)
|
||||
from eth2spec.utils.ssz.ssz_typing import Bitvector # noqa: F401
|
||||
from eth2spec.utils import bls
|
||||
from eth2spec.utils.hash_function import hash
|
||||
'''
|
||||
|
||||
@classmethod
|
||||
def preparations(cls) -> str:
|
||||
return '''
|
||||
SSZObject = TypeVar('SSZObject', bound=View)
|
||||
'''
|
||||
|
||||
@classmethod
|
||||
def sundry_functions(cls) -> str:
|
||||
return '''
|
||||
def get_eth1_data(block: Eth1Block) -> Eth1Data:
|
||||
"""
|
||||
A stub function return mocking Eth1Data.
|
||||
"""
|
||||
return Eth1Data(
|
||||
deposit_root=block.deposit_root,
|
||||
deposit_count=block.deposit_count,
|
||||
block_hash=hash_tree_root(block))
|
||||
|
||||
|
||||
def cache_this(key_fn, value_fn, lru_size): # type: ignore
|
||||
cache_dict = LRU(size=lru_size)
|
||||
|
||||
def wrapper(*args, **kw): # type: ignore
|
||||
key = key_fn(*args, **kw)
|
||||
nonlocal cache_dict
|
||||
if key not in cache_dict:
|
||||
cache_dict[key] = value_fn(*args, **kw)
|
||||
return cache_dict[key]
|
||||
return wrapper
|
||||
|
||||
|
||||
_compute_shuffled_index = compute_shuffled_index
|
||||
compute_shuffled_index = cache_this(
|
||||
lambda index, index_count, seed: (index, index_count, seed),
|
||||
_compute_shuffled_index, lru_size=SLOTS_PER_EPOCH * 3)
|
||||
|
||||
_get_total_active_balance = get_total_active_balance
|
||||
get_total_active_balance = cache_this(
|
||||
lambda state: (state.validators.hash_tree_root(), compute_epoch_at_slot(state.slot)),
|
||||
_get_total_active_balance, lru_size=10)
|
||||
|
||||
_get_base_reward = get_base_reward
|
||||
get_base_reward = cache_this(
|
||||
lambda state, index: (state.validators.hash_tree_root(), state.slot, index),
|
||||
_get_base_reward, lru_size=2048)
|
||||
|
||||
_get_committee_count_per_slot = get_committee_count_per_slot
|
||||
get_committee_count_per_slot = cache_this(
|
||||
lambda state, epoch: (state.validators.hash_tree_root(), epoch),
|
||||
_get_committee_count_per_slot, lru_size=SLOTS_PER_EPOCH * 3)
|
||||
|
||||
_get_active_validator_indices = get_active_validator_indices
|
||||
get_active_validator_indices = cache_this(
|
||||
lambda state, epoch: (state.validators.hash_tree_root(), epoch),
|
||||
_get_active_validator_indices, lru_size=3)
|
||||
|
||||
_get_beacon_committee = get_beacon_committee
|
||||
get_beacon_committee = cache_this(
|
||||
lambda state, slot, index: (state.validators.hash_tree_root(), state.randao_mixes.hash_tree_root(), slot, index),
|
||||
_get_beacon_committee, lru_size=SLOTS_PER_EPOCH * MAX_COMMITTEES_PER_SLOT * 3)
|
||||
|
||||
_get_matching_target_attestations = get_matching_target_attestations
|
||||
get_matching_target_attestations = cache_this(
|
||||
lambda state, epoch: (state.hash_tree_root(), epoch),
|
||||
_get_matching_target_attestations, lru_size=10)
|
||||
|
||||
_get_matching_head_attestations = get_matching_head_attestations
|
||||
get_matching_head_attestations = cache_this(
|
||||
lambda state, epoch: (state.hash_tree_root(), epoch),
|
||||
_get_matching_head_attestations, lru_size=10)
|
||||
|
||||
_get_attesting_indices = get_attesting_indices
|
||||
get_attesting_indices = cache_this(
|
||||
lambda state, data, bits: (
|
||||
state.randao_mixes.hash_tree_root(),
|
||||
state.validators.hash_tree_root(), data.hash_tree_root(), bits.hash_tree_root()
|
||||
),
|
||||
_get_attesting_indices, lru_size=SLOTS_PER_EPOCH * MAX_COMMITTEES_PER_SLOT * 3)'''
|
||||
|
||||
|
||||
@classmethod
|
||||
def execution_engine_cls(cls) -> str:
|
||||
return ""
|
||||
|
||||
|
||||
@classmethod
|
||||
def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]:
|
||||
return {}
|
||||
|
||||
@classmethod
|
||||
def hardcoded_custom_type_dep_constants(cls, spec_object) -> Dict[str, str]:
|
||||
return {}
|
||||
|
||||
@classmethod
|
||||
def implement_optimizations(cls, functions: Dict[str, str]) -> Dict[str, str]:
|
||||
return functions
|
||||
|
||||
@classmethod
|
||||
def build_spec(cls, preset_name: str,
|
||||
source_files: Sequence[Path], preset_files: Sequence[Path], config_file: Path) -> str:
|
||||
return _build_spec(preset_name, cls.fork, source_files, preset_files, config_file)
|
||||
|
||||
|
||||
#
|
||||
# AltairSpecBuilder
|
||||
#
|
||||
class AltairSpecBuilder(Phase0SpecBuilder):
|
||||
fork: str = ALTAIR
|
||||
|
||||
@classmethod
|
||||
def imports(cls, preset_name: str) -> str:
|
||||
return super().imports(preset_name) + '\n' + f'''
|
||||
from typing import NewType, Union as PyUnion
|
||||
|
||||
from eth2spec.phase0 import {preset_name} as phase0
|
||||
from eth2spec.test.helpers.merkle import build_proof
|
||||
from eth2spec.utils.ssz.ssz_typing import Path
|
||||
'''
|
||||
|
||||
@classmethod
|
||||
def preparations(cls):
|
||||
return super().preparations() + '\n' + '''
|
||||
SSZVariableName = str
|
||||
GeneralizedIndex = NewType('GeneralizedIndex', int)
|
||||
'''
|
||||
|
||||
@classmethod
|
||||
def sundry_functions(cls) -> str:
|
||||
return super().sundry_functions() + '\n\n' + '''
|
||||
def get_generalized_index(ssz_class: Any, *path: Sequence[PyUnion[int, SSZVariableName]]) -> GeneralizedIndex:
|
||||
ssz_path = Path(ssz_class)
|
||||
for item in path:
|
||||
ssz_path = ssz_path / item
|
||||
return GeneralizedIndex(ssz_path.gindex())
|
||||
|
||||
|
||||
def compute_merkle_proof_for_state(state: BeaconState,
|
||||
index: GeneralizedIndex) -> Sequence[Bytes32]:
|
||||
return build_proof(state.get_backing(), index)'''
|
||||
|
||||
|
||||
@classmethod
|
||||
def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]:
|
||||
constants = {
|
||||
'FINALIZED_ROOT_INDEX': 'GeneralizedIndex(105)',
|
||||
'CURRENT_SYNC_COMMITTEE_INDEX': 'GeneralizedIndex(54)',
|
||||
'NEXT_SYNC_COMMITTEE_INDEX': 'GeneralizedIndex(55)',
|
||||
}
|
||||
return {**super().hardcoded_ssz_dep_constants(), **constants}
|
||||
|
||||
@classmethod
|
||||
def implement_optimizations(cls, functions: Dict[str, str]) -> Dict[str, str]:
|
||||
if "eth_aggregate_pubkeys" in functions:
|
||||
functions["eth_aggregate_pubkeys"] = OPTIMIZED_BLS_AGGREGATE_PUBKEYS.strip()
|
||||
return super().implement_optimizations(functions)
|
||||
|
||||
#
|
||||
# BellatrixSpecBuilder
|
||||
#
|
||||
class BellatrixSpecBuilder(AltairSpecBuilder):
|
||||
fork: str = BELLATRIX
|
||||
|
||||
@classmethod
|
||||
def imports(cls, preset_name: str):
|
||||
return super().imports(preset_name) + f'''
|
||||
from typing import Protocol
|
||||
from eth2spec.altair import {preset_name} as altair
|
||||
from eth2spec.utils.ssz.ssz_typing import Bytes8, Bytes20, ByteList, ByteVector
|
||||
'''
|
||||
|
||||
@classmethod
|
||||
def preparations(cls):
|
||||
return super().preparations()
|
||||
|
||||
@classmethod
|
||||
def sundry_functions(cls) -> str:
|
||||
return super().sundry_functions() + '\n\n' + """
|
||||
ExecutionState = Any
|
||||
|
||||
|
||||
def get_pow_block(hash: Bytes32) -> Optional[PowBlock]:
|
||||
return PowBlock(block_hash=hash, parent_hash=Bytes32(), total_difficulty=uint256(0))
|
||||
|
||||
|
||||
def get_execution_state(_execution_state_root: Bytes32) -> ExecutionState:
|
||||
pass
|
||||
|
||||
|
||||
def get_pow_chain_head() -> PowBlock:
|
||||
pass"""
|
||||
|
||||
@classmethod
|
||||
def execution_engine_cls(cls) -> str:
|
||||
return "\n\n" + """
|
||||
class NoopExecutionEngine(ExecutionEngine):
|
||||
|
||||
def notify_new_payload(self: ExecutionEngine, execution_payload: ExecutionPayload) -> bool:
|
||||
return True
|
||||
|
||||
def notify_forkchoice_updated(self: ExecutionEngine,
|
||||
head_block_hash: Hash32,
|
||||
safe_block_hash: Hash32,
|
||||
finalized_block_hash: Hash32,
|
||||
payload_attributes: Optional[PayloadAttributes]) -> Optional[PayloadId]:
|
||||
pass
|
||||
|
||||
def get_payload(self: ExecutionEngine, payload_id: PayloadId) -> GetPayloadResponse:
|
||||
# pylint: disable=unused-argument
|
||||
raise NotImplementedError("no default block production")
|
||||
|
||||
def is_valid_block_hash(self: ExecutionEngine, execution_payload: ExecutionPayload) -> bool:
|
||||
return True
|
||||
|
||||
def verify_and_notify_new_payload(self: ExecutionEngine,
|
||||
new_payload_request: NewPayloadRequest) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
EXECUTION_ENGINE = NoopExecutionEngine()"""
|
||||
|
||||
|
||||
@classmethod
|
||||
def hardcoded_custom_type_dep_constants(cls, spec_object) -> str:
|
||||
constants = {
|
||||
'MAX_BYTES_PER_TRANSACTION': spec_object.preset_vars['MAX_BYTES_PER_TRANSACTION'].value,
|
||||
}
|
||||
return {**super().hardcoded_custom_type_dep_constants(spec_object), **constants}
|
||||
|
||||
|
||||
#
|
||||
# CapellaSpecBuilder
|
||||
#
|
||||
class CapellaSpecBuilder(BellatrixSpecBuilder):
|
||||
fork: str = CAPELLA
|
||||
|
||||
@classmethod
|
||||
def imports(cls, preset_name: str):
|
||||
return super().imports(preset_name) + f'''
|
||||
from eth2spec.bellatrix import {preset_name} as bellatrix
|
||||
'''
|
||||
|
||||
|
||||
@classmethod
|
||||
def sundry_functions(cls) -> str:
|
||||
return super().sundry_functions() + '\n\n' + '''
|
||||
def compute_merkle_proof_for_block_body(body: BeaconBlockBody,
|
||||
index: GeneralizedIndex) -> Sequence[Bytes32]:
|
||||
return build_proof(body.get_backing(), index)'''
|
||||
|
||||
|
||||
@classmethod
|
||||
def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]:
|
||||
constants = {
|
||||
'EXECUTION_PAYLOAD_INDEX': 'GeneralizedIndex(25)',
|
||||
}
|
||||
return {**super().hardcoded_ssz_dep_constants(), **constants}
|
||||
|
||||
#
|
||||
# DenebSpecBuilder
|
||||
#
|
||||
class DenebSpecBuilder(CapellaSpecBuilder):
|
||||
fork: str = DENEB
|
||||
|
||||
@classmethod
|
||||
def imports(cls, preset_name: str):
|
||||
return super().imports(preset_name) + f'''
|
||||
from eth2spec.capella import {preset_name} as capella
|
||||
'''
|
||||
|
||||
|
||||
@classmethod
|
||||
def preparations(cls):
|
||||
return super().preparations() + '\n' + '''
|
||||
T = TypeVar('T') # For generic function
|
||||
'''
|
||||
|
||||
@classmethod
|
||||
def sundry_functions(cls) -> str:
|
||||
return super().sundry_functions() + '\n\n' + '''
|
||||
def retrieve_blobs_and_proofs(beacon_block_root: Root) -> PyUnion[Tuple[Blob, KZGProof], Tuple[str, str]]:
|
||||
# pylint: disable=unused-argument
|
||||
return ("TEST", "TEST")'''
|
||||
|
||||
@classmethod
|
||||
def execution_engine_cls(cls) -> str:
|
||||
return "\n\n" + """
|
||||
class NoopExecutionEngine(ExecutionEngine):
|
||||
|
||||
def notify_new_payload(self: ExecutionEngine, execution_payload: ExecutionPayload) -> bool:
|
||||
return True
|
||||
|
||||
def notify_forkchoice_updated(self: ExecutionEngine,
|
||||
head_block_hash: Hash32,
|
||||
safe_block_hash: Hash32,
|
||||
finalized_block_hash: Hash32,
|
||||
payload_attributes: Optional[PayloadAttributes]) -> Optional[PayloadId]:
|
||||
pass
|
||||
|
||||
def get_payload(self: ExecutionEngine, payload_id: PayloadId) -> GetPayloadResponse:
|
||||
# pylint: disable=unused-argument
|
||||
raise NotImplementedError("no default block production")
|
||||
|
||||
def is_valid_block_hash(self: ExecutionEngine, execution_payload: ExecutionPayload) -> bool:
|
||||
return True
|
||||
|
||||
def is_valid_versioned_hashes(self: ExecutionEngine, new_payload_request: NewPayloadRequest) -> bool:
|
||||
return True
|
||||
|
||||
def verify_and_notify_new_payload(self: ExecutionEngine,
|
||||
new_payload_request: NewPayloadRequest) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
EXECUTION_ENGINE = NoopExecutionEngine()"""
|
||||
|
||||
|
||||
@classmethod
|
||||
def hardcoded_custom_type_dep_constants(cls, spec_object) -> str:
|
||||
constants = {
|
||||
'BYTES_PER_FIELD_ELEMENT': spec_object.constant_vars['BYTES_PER_FIELD_ELEMENT'].value,
|
||||
'FIELD_ELEMENTS_PER_BLOB': spec_object.preset_vars['FIELD_ELEMENTS_PER_BLOB'].value,
|
||||
'MAX_BLOBS_PER_BLOCK': spec_object.preset_vars['MAX_BLOBS_PER_BLOCK'].value,
|
||||
}
|
||||
return {**super().hardcoded_custom_type_dep_constants(spec_object), **constants}
|
||||
|
||||
|
||||
#
|
||||
# EIP6110SpecBuilder
|
||||
#
|
||||
class EIP6110SpecBuilder(DenebSpecBuilder):
|
||||
fork: str = EIP6110
|
||||
|
||||
@classmethod
|
||||
def imports(cls, preset_name: str):
|
||||
return super().imports(preset_name) + f'''
|
||||
from eth2spec.deneb import {preset_name} as deneb
|
||||
'''
|
||||
|
||||
|
||||
#
|
||||
# WhiskSpecBuilder
|
||||
#
|
||||
class WhiskSpecBuilder(CapellaSpecBuilder):
|
||||
fork: str = WHISK
|
||||
|
||||
@classmethod
|
||||
def imports(cls, preset_name: str):
|
||||
return super().imports(preset_name) + f'''
|
||||
from eth2spec.capella import {preset_name} as capella
|
||||
'''
|
||||
|
||||
@classmethod
|
||||
def hardcoded_custom_type_dep_constants(cls, spec_object) -> str:
|
||||
# Necessary for custom types `WhiskShuffleProof` and `WhiskTrackerProof`
|
||||
constants = {
|
||||
'WHISK_MAX_SHUFFLE_PROOF_SIZE': spec_object.constant_vars['WHISK_MAX_SHUFFLE_PROOF_SIZE'].value,
|
||||
'WHISK_MAX_OPENING_PROOF_SIZE': spec_object.constant_vars['WHISK_MAX_OPENING_PROOF_SIZE'].value,
|
||||
}
|
||||
return {**super().hardcoded_custom_type_dep_constants(spec_object), **constants}
|
||||
|
||||
|
||||
spec_builders = {
|
||||
builder.fork: builder
|
||||
for builder in (Phase0SpecBuilder, AltairSpecBuilder, BellatrixSpecBuilder, CapellaSpecBuilder, DenebSpecBuilder, EIP6110SpecBuilder, WhiskSpecBuilder)
|
||||
}
|
||||
|
||||
|
||||
def is_byte_vector(value: str) -> bool:
|
||||
return value.startswith(('ByteVector'))
|
||||
|
||||
|
||||
def make_function_abstract(protocol_def: ProtocolDefinition, key: str):
|
||||
function = protocol_def.functions[key].split('"""')
|
||||
protocol_def.functions[key] = function[0] + "..."
|
||||
|
||||
|
||||
def objects_to_spec(preset_name: str,
|
||||
spec_object: SpecObject,
|
||||
builder: SpecBuilder,
|
||||
ordered_class_objects: Dict[str, str]) -> str:
|
||||
"""
|
||||
Given all the objects that constitute a spec, combine them into a single pyfile.
|
||||
"""
|
||||
new_type_definitions = (
|
||||
'\n\n'.join(
|
||||
[
|
||||
f"class {key}({value}):\n pass\n" if not is_byte_vector(value) else f"class {key}({value}): # type: ignore\n pass\n"
|
||||
for key, value in spec_object.custom_types.items()
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
def format_protocol(protocol_name: str, protocol_def: ProtocolDefinition) -> str:
|
||||
abstract_functions = ["verify_and_notify_new_payload"]
|
||||
for key in protocol_def.functions.keys():
|
||||
if key in abstract_functions:
|
||||
make_function_abstract(protocol_def, key)
|
||||
|
||||
protocol = f"class {protocol_name}(Protocol):"
|
||||
for fn_source in protocol_def.functions.values():
|
||||
fn_source = fn_source.replace("self: "+protocol_name, "self")
|
||||
protocol += "\n\n" + textwrap.indent(fn_source, " ")
|
||||
return protocol
|
||||
|
||||
protocols_spec = '\n\n\n'.join(format_protocol(k, v) for k, v in spec_object.protocols.items())
|
||||
for k in list(spec_object.functions):
|
||||
if k in [
|
||||
"ceillog2",
|
||||
"floorlog2",
|
||||
"compute_merkle_proof_for_block_body",
|
||||
"compute_merkle_proof_for_state",
|
||||
]:
|
||||
del spec_object.functions[k]
|
||||
functions = builder.implement_optimizations(spec_object.functions)
|
||||
functions_spec = '\n\n\n'.join(functions.values())
|
||||
|
||||
# Access global dict of config vars for runtime configurables
|
||||
for name in spec_object.config_vars.keys():
|
||||
functions_spec = re.sub(r"\b%s\b" % name, 'config.' + name, functions_spec)
|
||||
|
||||
def format_config_var(name: str, vardef: VariableDefinition) -> str:
|
||||
if vardef.type_name is None:
|
||||
out = f'{name}={vardef.value},'
|
||||
else:
|
||||
out = f'{name}={vardef.type_name}({vardef.value}),'
|
||||
if vardef.comment is not None:
|
||||
out += f' # {vardef.comment}'
|
||||
return out
|
||||
|
||||
config_spec = 'class Configuration(NamedTuple):\n'
|
||||
config_spec += ' PRESET_BASE: str\n'
|
||||
config_spec += '\n'.join(f' {k}: {v.type_name if v.type_name is not None else "int"}'
|
||||
for k, v in spec_object.config_vars.items())
|
||||
config_spec += '\n\n\nconfig = Configuration(\n'
|
||||
config_spec += f' PRESET_BASE="{preset_name}",\n'
|
||||
config_spec += '\n'.join(' ' + format_config_var(k, v) for k, v in spec_object.config_vars.items())
|
||||
config_spec += '\n)\n'
|
||||
|
||||
def format_constant(name: str, vardef: VariableDefinition) -> str:
|
||||
if vardef.type_name is None:
|
||||
if vardef.type_hint is None:
|
||||
out = f'{name} = {vardef.value}'
|
||||
else:
|
||||
out = f'{name}: {vardef.type_hint} = {vardef.value}'
|
||||
else:
|
||||
out = f'{name} = {vardef.type_name}({vardef.value})'
|
||||
if vardef.comment is not None:
|
||||
out += f' # {vardef.comment}'
|
||||
return out
|
||||
|
||||
constant_vars_spec = '# Constant vars\n' + '\n'.join(format_constant(k, v) for k, v in spec_object.constant_vars.items())
|
||||
preset_vars_spec = '# Preset vars\n' + '\n'.join(format_constant(k, v) for k, v in spec_object.preset_vars.items())
|
||||
ordered_class_objects_spec = '\n\n\n'.join(ordered_class_objects.values())
|
||||
ssz_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, builder.hardcoded_ssz_dep_constants()[x]), builder.hardcoded_ssz_dep_constants()))
|
||||
ssz_dep_constants_verification = '\n'.join(map(lambda x: 'assert %s == %s' % (x, spec_object.ssz_dep_constants[x]), builder.hardcoded_ssz_dep_constants()))
|
||||
custom_type_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, builder.hardcoded_custom_type_dep_constants(spec_object)[x]), builder.hardcoded_custom_type_dep_constants(spec_object)))
|
||||
spec = (
|
||||
builder.imports(preset_name)
|
||||
+ builder.preparations()
|
||||
+ '\n\n' + f"fork = \'{builder.fork}\'\n"
|
||||
# The constants that some SSZ containers require. Need to be defined before `new_type_definitions`
|
||||
+ ('\n\n' + custom_type_dep_constants + '\n' if custom_type_dep_constants != '' else '')
|
||||
+ '\n\n' + new_type_definitions
|
||||
+ '\n' + CONSTANT_DEP_SUNDRY_CONSTANTS_FUNCTIONS
|
||||
# The constants that some SSZ containers require. Need to be defined before `constants_spec`
|
||||
+ ('\n\n' + ssz_dep_constants if ssz_dep_constants != '' else '')
|
||||
+ '\n\n' + constant_vars_spec
|
||||
+ '\n\n' + preset_vars_spec
|
||||
+ '\n\n\n' + config_spec
|
||||
+ '\n\n' + ordered_class_objects_spec
|
||||
+ ('\n\n\n' + protocols_spec if protocols_spec != '' else '')
|
||||
+ '\n\n\n' + functions_spec
|
||||
+ '\n\n' + builder.sundry_functions()
|
||||
+ builder.execution_engine_cls()
|
||||
# Since some constants are hardcoded in setup.py, the following assertions verify that the hardcoded constants are
|
||||
# as same as the spec definition.
|
||||
+ ('\n\n\n' + ssz_dep_constants_verification if ssz_dep_constants_verification != '' else '')
|
||||
+ '\n'
|
||||
)
|
||||
return spec
|
||||
|
||||
|
||||
def combine_protocols(old_protocols: Dict[str, ProtocolDefinition],
|
||||
new_protocols: Dict[str, ProtocolDefinition]) -> Dict[str, ProtocolDefinition]:
|
||||
for key, value in new_protocols.items():
|
||||
if key not in old_protocols:
|
||||
old_protocols[key] = value
|
||||
else:
|
||||
functions = combine_dicts(old_protocols[key].functions, value.functions)
|
||||
old_protocols[key] = ProtocolDefinition(functions=functions)
|
||||
return old_protocols
|
||||
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
def combine_dicts(old_dict: Dict[str, T], new_dict: Dict[str, T]) -> Dict[str, T]:
|
||||
return {**old_dict, **new_dict}
|
||||
|
||||
|
||||
ignored_dependencies = [
|
||||
'bit', 'boolean', 'Vector', 'List', 'Container', 'BLSPubkey', 'BLSSignature',
|
||||
'Bytes1', 'Bytes4', 'Bytes8', 'Bytes20', 'Bytes32', 'Bytes48', 'Bytes96', 'Bitlist', 'Bitvector',
|
||||
'uint8', 'uint16', 'uint32', 'uint64', 'uint128', 'uint256',
|
||||
'bytes', 'byte', 'ByteList', 'ByteVector',
|
||||
'Dict', 'dict', 'field', 'ceillog2', 'floorlog2', 'Set',
|
||||
'Optional', 'Sequence',
|
||||
]
|
||||
|
||||
|
||||
def dependency_order_class_objects(objects: Dict[str, str], custom_types: Dict[str, str]) -> None:
|
||||
"""
|
||||
Determines which SSZ Object is dependent on which other and orders them appropriately
|
||||
"""
|
||||
items = list(objects.items())
|
||||
for key, value in items:
|
||||
dependencies = []
|
||||
for line in value.split('\n'):
|
||||
if not re.match(r'\s+\w+: .+', line):
|
||||
continue # skip whitespace etc.
|
||||
line = line[line.index(':') + 1:] # strip of field name
|
||||
if '#' in line:
|
||||
line = line[:line.index('#')] # strip of comment
|
||||
dependencies.extend(re.findall(r'(\w+)', line)) # catch all legible words, potential dependencies
|
||||
dependencies = filter(lambda x: '_' not in x and x.upper() != x, dependencies) # filter out constants
|
||||
dependencies = filter(lambda x: x not in ignored_dependencies, dependencies)
|
||||
dependencies = filter(lambda x: x not in custom_types, dependencies)
|
||||
for dep in dependencies:
|
||||
key_list = list(objects.keys())
|
||||
for item in [dep, key] + key_list[key_list.index(dep)+1:]:
|
||||
objects[item] = objects.pop(item)
|
||||
|
||||
def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str], custom_types) -> Dict[str, str]:
|
||||
"""
|
||||
Takes in old spec and new spec ssz objects, combines them,
|
||||
and returns the newer versions of the objects in dependency order.
|
||||
"""
|
||||
for key, value in new_objects.items():
|
||||
old_objects[key] = value
|
||||
return old_objects
|
||||
|
||||
|
||||
def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject:
|
||||
"""
|
||||
Takes in two spec variants (as tuples of their objects) and combines them using the appropriate combiner function.
|
||||
"""
|
||||
protocols = combine_protocols(spec0.protocols, spec1.protocols)
|
||||
functions = combine_dicts(spec0.functions, spec1.functions)
|
||||
custom_types = combine_dicts(spec0.custom_types, spec1.custom_types)
|
||||
constant_vars = combine_dicts(spec0.constant_vars, spec1.constant_vars)
|
||||
preset_vars = combine_dicts(spec0.preset_vars, spec1.preset_vars)
|
||||
config_vars = combine_dicts(spec0.config_vars, spec1.config_vars)
|
||||
ssz_dep_constants = combine_dicts(spec0.ssz_dep_constants, spec1.ssz_dep_constants)
|
||||
ssz_objects = combine_ssz_objects(spec0.ssz_objects, spec1.ssz_objects, custom_types)
|
||||
dataclasses = combine_dicts(spec0.dataclasses, spec1.dataclasses)
|
||||
return SpecObject(
|
||||
functions=functions,
|
||||
protocols=protocols,
|
||||
custom_types=custom_types,
|
||||
constant_vars=constant_vars,
|
||||
preset_vars=preset_vars,
|
||||
config_vars=config_vars,
|
||||
ssz_dep_constants=ssz_dep_constants,
|
||||
ssz_objects=ssz_objects,
|
||||
dataclasses=dataclasses,
|
||||
)
|
||||
|
||||
|
||||
def parse_config_vars(conf: Dict[str, str]) -> Dict[str, str]:
|
||||
"""
|
||||
Parses a dict of basic str/int/list types into a dict for insertion into the spec code.
|
||||
"""
|
||||
out: Dict[str, str] = dict()
|
||||
for k, v in conf.items():
|
||||
if isinstance(v, str) and (v.startswith("0x") or k == 'PRESET_BASE' or k == 'CONFIG_NAME'):
|
||||
# Represent byte data with string, to avoid misinterpretation as big-endian int.
|
||||
# Everything except PRESET_BASE and CONFIG_NAME is either byte data or an integer.
|
||||
out[k] = f"'{v}'"
|
||||
else:
|
||||
out[k] = str(int(v))
|
||||
return out
|
||||
|
||||
|
||||
def load_preset(preset_files: Sequence[Path]) -> Dict[str, str]:
|
||||
"""
|
||||
Loads the a directory of preset files, merges the result into one preset.
|
||||
|
@ -1048,8 +297,11 @@ def load_config(config_path: Path) -> Dict[str, str]:
|
|||
return parse_config_vars(config_data)
|
||||
|
||||
|
||||
def _build_spec(preset_name: str, fork: str,
|
||||
source_files: Sequence[Path], preset_files: Sequence[Path], config_file: Path) -> str:
|
||||
def build_spec(fork: str,
|
||||
preset_name: str,
|
||||
source_files: Sequence[Path],
|
||||
preset_files: Sequence[Path],
|
||||
config_file: Path) -> str:
|
||||
preset = load_preset(preset_files)
|
||||
config = load_config(config_file)
|
||||
all_specs = [get_spec(spec, preset, config, preset_name) for spec in source_files]
|
||||
|
@ -1066,13 +318,7 @@ def _build_spec(preset_name: str, fork: str,
|
|||
new_objects = copy.deepcopy(class_objects)
|
||||
dependency_order_class_objects(class_objects, spec_object.custom_types)
|
||||
|
||||
return objects_to_spec(preset_name, spec_object, spec_builders[fork], class_objects)
|
||||
|
||||
|
||||
class BuildTarget(NamedTuple):
|
||||
name: str
|
||||
preset_paths: List[Path]
|
||||
config_path: Path
|
||||
return objects_to_spec(preset_name, spec_object, fork, class_objects)
|
||||
|
||||
|
||||
class PySpecCommand(Command):
|
||||
|
@ -1111,20 +357,7 @@ class PySpecCommand(Command):
|
|||
if len(self.md_doc_paths) == 0:
|
||||
print("no paths were specified, using default markdown file paths for pyspec"
|
||||
" build (spec fork: %s)" % self.spec_fork)
|
||||
self.md_doc_paths = ""
|
||||
|
||||
for fork in ALL_FORKS:
|
||||
if is_post_fork(self.spec_fork, fork):
|
||||
# Append all files in fork directory recursively
|
||||
for root, dirs, files in os.walk(get_fork_directory(fork)):
|
||||
for filename in files:
|
||||
filepath = os.path.join(root, filename)
|
||||
if filepath.endswith('.md') and filepath not in IGNORE_SPEC_FILES:
|
||||
self.md_doc_paths += filepath + "\n"
|
||||
# Append extra files if any
|
||||
if fork in EXTRA_SPEC_FILES:
|
||||
self.md_doc_paths += EXTRA_SPEC_FILES[fork] + "\n"
|
||||
|
||||
self.md_doc_paths = get_md_doc_paths(self.spec_fork)
|
||||
if len(self.md_doc_paths) == 0:
|
||||
raise Exception('no markdown files specified, and spec fork "%s" is unknown', self.spec_fork)
|
||||
|
||||
|
@ -1157,8 +390,13 @@ class PySpecCommand(Command):
|
|||
dir_util.mkpath(self.out_dir)
|
||||
|
||||
for (name, preset_paths, config_path) in self.parsed_build_targets:
|
||||
spec_str = spec_builders[self.spec_fork].build_spec(
|
||||
name, self.parsed_md_doc_paths, preset_paths, config_path)
|
||||
spec_str = build_spec(
|
||||
spec_builders[self.spec_fork].fork,
|
||||
name,
|
||||
self.parsed_md_doc_paths,
|
||||
preset_paths,
|
||||
config_path,
|
||||
)
|
||||
if self.dry_run:
|
||||
self.announce('dry run successfully prepared contents for spec.'
|
||||
f' out dir: "{self.out_dir}", spec fork: "{self.spec_fork}", build target: "{name}"')
|
||||
|
@ -1219,6 +457,7 @@ class PyspecDevCommand(Command):
|
|||
for spec_fork in spec_builders:
|
||||
self.run_pyspec_cmd(spec_fork=spec_fork)
|
||||
|
||||
|
||||
commands = {
|
||||
'pyspec': PySpecCommand,
|
||||
'build_py': BuildPyCommand,
|
||||
|
@ -1280,6 +519,6 @@ setup(
|
|||
"lru-dict==1.2.0",
|
||||
MARKO_VERSION,
|
||||
"py_arkworks_bls12381==0.3.4",
|
||||
"curdleproofs @ git+https://github.com/nalinbhardwaj/curdleproofs.pie@805d06785b6ff35fde7148762277dd1ae678beeb#egg=curdleproofs&subdirectory=curdleproofs",
|
||||
"curdleproofs==0.1.1",
|
||||
]
|
||||
)
|
||||
|
|
|
@ -1,72 +0,0 @@
|
|||
# EIP-4788 -- The Beacon Chain
|
||||
|
||||
## Table of contents
|
||||
|
||||
<!-- TOC -->
|
||||
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||
|
||||
- [Introduction](#introduction)
|
||||
- [Containers](#containers)
|
||||
- [Extended Containers](#extended-containers)
|
||||
- [`ExecutionPayload`](#executionpayload)
|
||||
- [`ExecutionPayloadHeader`](#executionpayloadheader)
|
||||
|
||||
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- /TOC -->
|
||||
|
||||
## Introduction
|
||||
|
||||
TODO
|
||||
|
||||
## Containers
|
||||
|
||||
### Extended Containers
|
||||
|
||||
#### `ExecutionPayload`
|
||||
|
||||
```python
|
||||
class ExecutionPayload(Container):
|
||||
# Execution block header fields
|
||||
parent_hash: Hash32
|
||||
fee_recipient: ExecutionAddress # 'beneficiary' in the yellow paper
|
||||
state_root: Bytes32
|
||||
receipts_root: Bytes32
|
||||
logs_bloom: ByteVector[BYTES_PER_LOGS_BLOOM]
|
||||
prev_randao: Bytes32 # 'difficulty' in the yellow paper
|
||||
block_number: uint64 # 'number' in the yellow paper
|
||||
gas_limit: uint64
|
||||
gas_used: uint64
|
||||
timestamp: uint64
|
||||
extra_data: ByteList[MAX_EXTRA_DATA_BYTES]
|
||||
base_fee_per_gas: uint256
|
||||
# Extra payload fields
|
||||
block_hash: Hash32 # Hash of execution block
|
||||
transactions: List[Transaction, MAX_TRANSACTIONS_PER_PAYLOAD]
|
||||
withdrawals: List[Withdrawal, MAX_WITHDRAWALS_PER_PAYLOAD]
|
||||
parent_beacon_block_root: Root # [New in EIP-4788]
|
||||
```
|
||||
|
||||
#### `ExecutionPayloadHeader`
|
||||
|
||||
```python
|
||||
class ExecutionPayloadHeader(Container):
|
||||
# Execution block header fields
|
||||
parent_hash: Hash32
|
||||
fee_recipient: ExecutionAddress
|
||||
state_root: Bytes32
|
||||
receipts_root: Bytes32
|
||||
logs_bloom: ByteVector[BYTES_PER_LOGS_BLOOM]
|
||||
prev_randao: Bytes32
|
||||
block_number: uint64
|
||||
gas_limit: uint64
|
||||
gas_used: uint64
|
||||
timestamp: uint64
|
||||
extra_data: ByteList[MAX_EXTRA_DATA_BYTES]
|
||||
base_fee_per_gas: uint256
|
||||
# Extra payload fields
|
||||
block_hash: Hash32 # Hash of execution block
|
||||
transactions_root: Root
|
||||
withdrawals_root: Root
|
||||
parent_beacon_block_root: Root # [New in EIP-4788]
|
||||
```
|
|
@ -1,88 +0,0 @@
|
|||
# EIP-4788 -- Honest Validator
|
||||
|
||||
**Notice**: This document is a work-in-progress for researchers and implementers.
|
||||
|
||||
## Table of contents
|
||||
|
||||
<!-- TOC -->
|
||||
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||
|
||||
- [Introduction](#introduction)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Helpers](#helpers)
|
||||
- [Protocols](#protocols)
|
||||
- [`ExecutionEngine`](#executionengine)
|
||||
- [Modified `get_payload`](#modified-get_payload)
|
||||
- [Beacon chain responsibilities](#beacon-chain-responsibilities)
|
||||
- [Block proposal](#block-proposal)
|
||||
- [Constructing the `BeaconBlockBody`](#constructing-the-beaconblockbody)
|
||||
- [ExecutionPayload](#executionpayload)
|
||||
|
||||
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- /TOC -->
|
||||
|
||||
## Introduction
|
||||
|
||||
This document represents the changes to be made in the code of an "honest validator" to implement the EIP-4788 feature.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
This document is an extension of the [Capella -- Honest Validator](../capella/validator.md) guide.
|
||||
All behaviors and definitions defined in this document, and documents it extends, carry over unless explicitly noted or overridden.
|
||||
|
||||
All terminology, constants, functions, and protocol mechanics defined in the updated Beacon Chain doc of [Capella](../capella/beacon-chain.md) are requisite for this document and used throughout.
|
||||
Please see related Beacon Chain doc before continuing and use them as a reference throughout.
|
||||
|
||||
## Helpers
|
||||
|
||||
## Protocols
|
||||
|
||||
### `ExecutionEngine`
|
||||
|
||||
#### Modified `get_payload`
|
||||
|
||||
`get_payload` returns the upgraded EIP-4788 `ExecutionPayload` type.
|
||||
|
||||
## Beacon chain responsibilities
|
||||
|
||||
All validator responsibilities remain unchanged other than those noted below.
|
||||
|
||||
### Block proposal
|
||||
|
||||
#### Constructing the `BeaconBlockBody`
|
||||
|
||||
##### ExecutionPayload
|
||||
|
||||
`ExecutionPayload`s are constructed as they were in Capella, except that the parent beacon block root is also supplied.
|
||||
|
||||
*Note*: In this section, `state` is the state of the slot for the block proposal _without_ the block yet applied.
|
||||
That is, `state` is the `previous_state` processed through any empty slots up to the assigned slot using `process_slots(previous_state, slot)`.
|
||||
|
||||
*Note*: The only change made to `prepare_execution_payload` is to add the parent beacon block root as an additional
|
||||
parameter to the `PayloadAttributes`.
|
||||
|
||||
```python
|
||||
def prepare_execution_payload(state: BeaconState,
|
||||
safe_block_hash: Hash32,
|
||||
finalized_block_hash: Hash32,
|
||||
suggested_fee_recipient: ExecutionAddress,
|
||||
execution_engine: ExecutionEngine) -> Optional[PayloadId]:
|
||||
# Verify consistency of the parent hash with respect to the previous execution payload header
|
||||
parent_hash = state.latest_execution_payload_header.block_hash
|
||||
|
||||
# Set the forkchoice head and initiate the payload build process
|
||||
payload_attributes = PayloadAttributes(
|
||||
timestamp=compute_timestamp_at_slot(state, state.slot),
|
||||
prev_randao=get_randao_mix(state, get_current_epoch(state)),
|
||||
suggested_fee_recipient=suggested_fee_recipient,
|
||||
withdrawals=get_expected_withdrawals(state),
|
||||
parent_beacon_block_root=hash_tree_root(state.latest_block_header), # [New in EIP-4788]
|
||||
)
|
||||
return execution_engine.notify_forkchoice_updated(
|
||||
head_block_hash=parent_hash,
|
||||
safe_block_hash=safe_block_hash,
|
||||
finalized_block_hash=finalized_block_hash,
|
||||
payload_attributes=payload_attributes,
|
||||
)
|
||||
```
|
|
@ -91,8 +91,8 @@ class ExecutionPayload(Container):
|
|||
block_hash: Hash32
|
||||
transactions: List[Transaction, MAX_TRANSACTIONS_PER_PAYLOAD]
|
||||
withdrawals: List[Withdrawal, MAX_WITHDRAWALS_PER_PAYLOAD]
|
||||
data_gas_used: uint64
|
||||
excess_data_gas: uint64
|
||||
blob_gas_used: uint64
|
||||
excess_blob_gas: uint64
|
||||
deposit_receipts: List[DepositReceipt, MAX_DEPOSIT_RECEIPTS_PER_PAYLOAD] # [New in EIP6110]
|
||||
```
|
||||
|
||||
|
@ -117,8 +117,8 @@ class ExecutionPayloadHeader(Container):
|
|||
block_hash: Hash32
|
||||
transactions_root: Root
|
||||
withdrawals_root: Root
|
||||
data_gas_used: uint64
|
||||
excess_data_gas: uint64
|
||||
blob_gas_used: uint64
|
||||
excess_blob_gas: uint64
|
||||
deposit_receipts_root: Root # [New in EIP6110]
|
||||
```
|
||||
|
||||
|
@ -224,7 +224,7 @@ def process_deposit_receipt(state: BeaconState, deposit_receipt: DepositReceipt)
|
|||
state.deposit_receipts_start_index = deposit_receipt.index
|
||||
|
||||
apply_deposit(
|
||||
state=state,
|
||||
state=state,
|
||||
pubkey=deposit_receipt.pubkey,
|
||||
withdrawal_credentials=deposit_receipt.withdrawal_credentials,
|
||||
amount=deposit_receipt.amount,
|
||||
|
@ -251,7 +251,11 @@ def process_execution_payload(state: BeaconState, body: BeaconBlockBody, executi
|
|||
# Verify the execution payload is valid
|
||||
versioned_hashes = [kzg_commitment_to_versioned_hash(commitment) for commitment in body.blob_kzg_commitments]
|
||||
assert execution_engine.verify_and_notify_new_payload(
|
||||
NewPayloadRequest(execution_payload=payload, versioned_hashes=versioned_hashes)
|
||||
NewPayloadRequest(
|
||||
execution_payload=payload,
|
||||
versioned_hashes=versioned_hashes,
|
||||
parent_beacon_block_root=state.latest_block_header.parent_root,
|
||||
)
|
||||
)
|
||||
# Cache execution payload header
|
||||
state.latest_execution_payload_header = ExecutionPayloadHeader(
|
||||
|
@ -270,8 +274,8 @@ def process_execution_payload(state: BeaconState, body: BeaconBlockBody, executi
|
|||
block_hash=payload.block_hash,
|
||||
transactions_root=hash_tree_root(payload.transactions),
|
||||
withdrawals_root=hash_tree_root(payload.withdrawals),
|
||||
data_gas_used=payload.data_gas_used,
|
||||
excess_data_gas=payload.excess_data_gas,
|
||||
blob_gas_used=payload.blob_gas_used,
|
||||
excess_blob_gas=payload.excess_blob_gas,
|
||||
deposit_receipts_root=hash_tree_root(payload.deposit_receipts), # [New in EIP6110]
|
||||
)
|
||||
```
|
||||
|
|
|
@ -88,8 +88,8 @@ def upgrade_to_eip6110(pre: deneb.BeaconState) -> BeaconState:
|
|||
block_hash=pre.latest_execution_payload_header.block_hash,
|
||||
transactions_root=pre.latest_execution_payload_header.transactions_root,
|
||||
withdrawals_root=pre.latest_execution_payload_header.withdrawals_root,
|
||||
data_gas_used=uint64(0),
|
||||
excess_data_gas=uint64(0),
|
||||
blob_gas_used=uint64(0),
|
||||
excess_blob_gas=uint64(0),
|
||||
deposit_receipts_root=Root(), # [New in EIP-6110]
|
||||
)
|
||||
post = BeaconState(
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
EIP-6914 -- The Beacon Chain
|
||||
# EIP-6914 -- The Beacon Chain
|
||||
|
||||
## Table of contents
|
||||
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
# EIP-6914 -- Fork Choice
|
||||
|
||||
## Table of contents
|
||||
|
||||
<!-- TOC -->
|
||||
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||
|
||||
- [Introduction](#introduction)
|
||||
- [Fork choice](#fork-choice)
|
||||
- [Handlers](#handlers)
|
||||
- [`on_reused_index`](#on_reused_index)
|
||||
|
||||
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- /TOC -->
|
||||
|
||||
## Introduction
|
||||
|
||||
This is the modification of the fork choice according to EIP-6914.
|
||||
|
||||
## Fork choice
|
||||
|
||||
A new handler is added with this upgrade:
|
||||
|
||||
- `on_reused_index(store, index)` whenever a validator index `index: ValidatorIndex` is reused. That is, [`get_index_for_new_validator()`](./beacon-chain.md#get_index_for_new_validator) provides an index due to a return value of `True` from [`is_reusable_validator()`](./beacon-chain.md#is_reusable_validator).
|
||||
|
||||
This new handler is used to update the list of equivocating indices to be synchronized with the canonical chain.
|
||||
|
||||
### Handlers
|
||||
|
||||
#### `on_reused_index`
|
||||
|
||||
```python
|
||||
def on_reused_index(store: Store, index: ValidatorIndex) -> None:
|
||||
store.equivocating_indices.discard(index)
|
||||
```
|
|
@ -0,0 +1,300 @@
|
|||
# EIP-7002 -- The Beacon Chain
|
||||
|
||||
## Table of contents
|
||||
|
||||
<!-- TOC -->
|
||||
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||
|
||||
- [Introduction](#introduction)
|
||||
- [Preset](#preset)
|
||||
- [Max operations per block](#max-operations-per-block)
|
||||
- [Containers](#containers)
|
||||
- [New containers](#new-containers)
|
||||
- [`ExecutionLayerExit`](#executionlayerexit)
|
||||
- [Extended Containers](#extended-containers)
|
||||
- [`ExecutionPayload`](#executionpayload)
|
||||
- [`ExecutionPayloadHeader`](#executionpayloadheader)
|
||||
- [`BeaconState`](#beaconstate)
|
||||
- [Beacon chain state transition function](#beacon-chain-state-transition-function)
|
||||
- [Block processing](#block-processing)
|
||||
- [Execution payload](#execution-payload)
|
||||
- [Modified `process_execution_payload`](#modified-process_execution_payload)
|
||||
- [Operations](#operations)
|
||||
- [Modified `process_operations`](#modified-process_operations)
|
||||
- [New `process_execution_layer_exit`](#new-process_execution_layer_exit)
|
||||
- [Testing](#testing)
|
||||
|
||||
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- /TOC -->
|
||||
|
||||
## Introduction
|
||||
|
||||
This is the beacon chain specification of the execution layer triggerable exits feature.
|
||||
|
||||
This mechanism relies on the changes proposed by [EIP-7002](http://eips.ethereum.org/EIPS/eip-7002).
|
||||
|
||||
*Note:* This specification is built upon [Capella](../../capella/beacon-chain.md) and is under active development.
|
||||
|
||||
## Preset
|
||||
|
||||
### Max operations per block
|
||||
|
||||
| Name | Value |
|
||||
| - | - |
|
||||
| `MAX_EXECUTION_LAYER_EXITS` | `2**4` (= 16) |
|
||||
|
||||
## Containers
|
||||
|
||||
### New containers
|
||||
|
||||
#### `ExecutionLayerExit`
|
||||
|
||||
```python
|
||||
class ExecutionLayerExit(Container):
|
||||
source_address: ExecutionAddress
|
||||
validator_pubkey: BLSPubkey
|
||||
```
|
||||
|
||||
### Extended Containers
|
||||
|
||||
#### `ExecutionPayload`
|
||||
|
||||
```python
|
||||
class ExecutionPayload(Container):
|
||||
# Execution block header fields
|
||||
parent_hash: Hash32
|
||||
fee_recipient: ExecutionAddress
|
||||
state_root: Bytes32
|
||||
receipts_root: Bytes32
|
||||
logs_bloom: ByteVector[BYTES_PER_LOGS_BLOOM]
|
||||
prev_randao: Bytes32
|
||||
block_number: uint64
|
||||
gas_limit: uint64
|
||||
gas_used: uint64
|
||||
timestamp: uint64
|
||||
extra_data: ByteList[MAX_EXTRA_DATA_BYTES]
|
||||
base_fee_per_gas: uint256
|
||||
# Extra payload fields
|
||||
block_hash: Hash32
|
||||
transactions: List[Transaction, MAX_TRANSACTIONS_PER_PAYLOAD]
|
||||
withdrawals: List[Withdrawal, MAX_WITHDRAWALS_PER_PAYLOAD]
|
||||
exits: List[ExecutionLayerExit, MAX_EXECUTION_LAYER_EXITS] # [New in EIP7002]
|
||||
```
|
||||
|
||||
#### `ExecutionPayloadHeader`
|
||||
|
||||
```python
|
||||
class ExecutionPayloadHeader(Container):
|
||||
# Execution block header fields
|
||||
parent_hash: Hash32
|
||||
fee_recipient: ExecutionAddress
|
||||
state_root: Bytes32
|
||||
receipts_root: Bytes32
|
||||
logs_bloom: ByteVector[BYTES_PER_LOGS_BLOOM]
|
||||
prev_randao: Bytes32
|
||||
block_number: uint64
|
||||
gas_limit: uint64
|
||||
gas_used: uint64
|
||||
timestamp: uint64
|
||||
extra_data: ByteList[MAX_EXTRA_DATA_BYTES]
|
||||
base_fee_per_gas: uint256
|
||||
# Extra payload fields
|
||||
block_hash: Hash32
|
||||
transactions_root: Root
|
||||
withdrawals_root: Root
|
||||
exits_root: Root # [New in EIP7002]
|
||||
```
|
||||
|
||||
#### `BeaconState`
|
||||
|
||||
```python
|
||||
class BeaconState(Container):
|
||||
# Versioning
|
||||
genesis_time: uint64
|
||||
genesis_validators_root: Root
|
||||
slot: Slot
|
||||
fork: Fork
|
||||
# History
|
||||
latest_block_header: BeaconBlockHeader
|
||||
block_roots: Vector[Root, SLOTS_PER_HISTORICAL_ROOT]
|
||||
state_roots: Vector[Root, SLOTS_PER_HISTORICAL_ROOT]
|
||||
historical_roots: List[Root, HISTORICAL_ROOTS_LIMIT]
|
||||
# Eth1
|
||||
eth1_data: Eth1Data
|
||||
eth1_data_votes: List[Eth1Data, EPOCHS_PER_ETH1_VOTING_PERIOD * SLOTS_PER_EPOCH]
|
||||
eth1_deposit_index: uint64
|
||||
# Registry
|
||||
validators: List[Validator, VALIDATOR_REGISTRY_LIMIT]
|
||||
balances: List[Gwei, VALIDATOR_REGISTRY_LIMIT]
|
||||
# Randomness
|
||||
randao_mixes: Vector[Bytes32, EPOCHS_PER_HISTORICAL_VECTOR]
|
||||
# Slashings
|
||||
slashings: Vector[Gwei, EPOCHS_PER_SLASHINGS_VECTOR] # Per-epoch sums of slashed effective balances
|
||||
# Participation
|
||||
previous_epoch_participation: List[ParticipationFlags, VALIDATOR_REGISTRY_LIMIT]
|
||||
current_epoch_participation: List[ParticipationFlags, VALIDATOR_REGISTRY_LIMIT]
|
||||
# Finality
|
||||
justification_bits: Bitvector[JUSTIFICATION_BITS_LENGTH] # Bit set for every recent justified epoch
|
||||
previous_justified_checkpoint: Checkpoint
|
||||
current_justified_checkpoint: Checkpoint
|
||||
finalized_checkpoint: Checkpoint
|
||||
# Inactivity
|
||||
inactivity_scores: List[uint64, VALIDATOR_REGISTRY_LIMIT]
|
||||
# Sync
|
||||
current_sync_committee: SyncCommittee
|
||||
next_sync_committee: SyncCommittee
|
||||
# Execution
|
||||
latest_execution_payload_header: ExecutionPayloadHeader # [Modified in EIP7002]
|
||||
# Withdrawals
|
||||
next_withdrawal_index: WithdrawalIndex
|
||||
next_withdrawal_validator_index: ValidatorIndex
|
||||
# Deep history valid from Capella onwards
|
||||
historical_summaries: List[HistoricalSummary, HISTORICAL_ROOTS_LIMIT]
|
||||
```
|
||||
|
||||
## Beacon chain state transition function
|
||||
|
||||
### Block processing
|
||||
|
||||
#### Execution payload
|
||||
|
||||
##### Modified `process_execution_payload`
|
||||
|
||||
```python
|
||||
def process_execution_payload(state: BeaconState, body: BeaconBlockBody, execution_engine: ExecutionEngine) -> None:
|
||||
payload = body.execution_payload
|
||||
# Verify consistency of the parent hash with respect to the previous execution payload header
|
||||
assert payload.parent_hash == state.latest_execution_payload_header.block_hash
|
||||
# Verify prev_randao
|
||||
assert payload.prev_randao == get_randao_mix(state, get_current_epoch(state))
|
||||
# Verify timestamp
|
||||
assert payload.timestamp == compute_timestamp_at_slot(state, state.slot)
|
||||
# Verify the execution payload is valid
|
||||
assert execution_engine.verify_and_notify_new_payload(NewPayloadRequest(execution_payload=payload))
|
||||
# Cache execution payload header
|
||||
state.latest_execution_payload_header = ExecutionPayloadHeader(
|
||||
parent_hash=payload.parent_hash,
|
||||
fee_recipient=payload.fee_recipient,
|
||||
state_root=payload.state_root,
|
||||
receipts_root=payload.receipts_root,
|
||||
logs_bloom=payload.logs_bloom,
|
||||
prev_randao=payload.prev_randao,
|
||||
block_number=payload.block_number,
|
||||
gas_limit=payload.gas_limit,
|
||||
gas_used=payload.gas_used,
|
||||
timestamp=payload.timestamp,
|
||||
extra_data=payload.extra_data,
|
||||
base_fee_per_gas=payload.base_fee_per_gas,
|
||||
block_hash=payload.block_hash,
|
||||
transactions_root=hash_tree_root(payload.transactions),
|
||||
withdrawals_root=hash_tree_root(payload.withdrawals),
|
||||
exits_root=hash_tree_root(payload.exits), # [New in EIP7002]
|
||||
)
|
||||
```
|
||||
|
||||
#### Operations
|
||||
|
||||
##### Modified `process_operations`
|
||||
|
||||
*Note*: The function `process_operations` is modified to process `ExecutionLayerExit` operations included in the block.
|
||||
|
||||
```python
|
||||
def process_operations(state: BeaconState, body: BeaconBlockBody) -> None:
|
||||
# Verify that outstanding deposits are processed up to the maximum number of deposits
|
||||
assert len(body.deposits) == min(MAX_DEPOSITS, state.eth1_data.deposit_count - state.eth1_deposit_index)
|
||||
|
||||
def for_ops(operations: Sequence[Any], fn: Callable[[BeaconState, Any], None]) -> None:
|
||||
for operation in operations:
|
||||
fn(state, operation)
|
||||
|
||||
for_ops(body.proposer_slashings, process_proposer_slashing)
|
||||
for_ops(body.attester_slashings, process_attester_slashing)
|
||||
for_ops(body.attestations, process_attestation)
|
||||
for_ops(body.deposits, process_deposit)
|
||||
for_ops(body.voluntary_exits, process_voluntary_exit)
|
||||
for_ops(body.execution_payload.exits, process_execution_layer_exit) # [New in EIP7002]
|
||||
for_ops(body.bls_to_execution_changes, process_bls_to_execution_change)
|
||||
```
|
||||
|
||||
##### New `process_execution_layer_exit`
|
||||
|
||||
```python
|
||||
def process_execution_layer_exit(state: BeaconState, execution_layer_exit: ExecutionLayerExit) -> None:
|
||||
validator_pubkeys = [v.pubkey for v in state.validators]
|
||||
validator_index = ValidatorIndex(validator_pubkeys.index(execution_layer_exit.validator_pubkey))
|
||||
validator = state.validators[validator_index]
|
||||
|
||||
# Verify withdrawal credentials
|
||||
is_execution_address = validator.withdrawal_credentials[:1] == ETH1_ADDRESS_WITHDRAWAL_PREFIX
|
||||
is_correct_source_address = validator.withdrawal_credentials[12:] == execution_layer_exit.source_address
|
||||
if not (is_execution_address and is_correct_source_address):
|
||||
return
|
||||
# Verify the validator is active
|
||||
if not is_active_validator(validator, get_current_epoch(state)):
|
||||
return
|
||||
# Verify exit has not been initiated
|
||||
if validator.exit_epoch != FAR_FUTURE_EPOCH:
|
||||
return
|
||||
# Verify the validator has been active long enough
|
||||
if get_current_epoch(state) < validator.activation_epoch + SHARD_COMMITTEE_PERIOD:
|
||||
return
|
||||
|
||||
# Initiate exit
|
||||
initiate_validator_exit(state, validator_index)
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
*Note*: The function `initialize_beacon_state_from_eth1` is modified for pure EIP-7002 testing only.
|
||||
Modifications include:
|
||||
1. Use `EIP7002_FORK_VERSION` as the previous and current fork version.
|
||||
2. Utilize the EIP-7002 `BeaconBlockBody` when constructing the initial `latest_block_header`.
|
||||
|
||||
```python
|
||||
def initialize_beacon_state_from_eth1(eth1_block_hash: Hash32,
|
||||
eth1_timestamp: uint64,
|
||||
deposits: Sequence[Deposit],
|
||||
execution_payload_header: ExecutionPayloadHeader=ExecutionPayloadHeader()
|
||||
) -> BeaconState:
|
||||
fork = Fork(
|
||||
previous_version=EIP7002_FORK_VERSION, # [Modified in EIP7002] for testing only
|
||||
current_version=EIP7002_FORK_VERSION, # [Modified in EIP7002]
|
||||
epoch=GENESIS_EPOCH,
|
||||
)
|
||||
state = BeaconState(
|
||||
genesis_time=eth1_timestamp + GENESIS_DELAY,
|
||||
fork=fork,
|
||||
eth1_data=Eth1Data(block_hash=eth1_block_hash, deposit_count=uint64(len(deposits))),
|
||||
latest_block_header=BeaconBlockHeader(body_root=hash_tree_root(BeaconBlockBody())),
|
||||
randao_mixes=[eth1_block_hash] * EPOCHS_PER_HISTORICAL_VECTOR, # Seed RANDAO with Eth1 entropy
|
||||
)
|
||||
|
||||
# Process deposits
|
||||
leaves = list(map(lambda deposit: deposit.data, deposits))
|
||||
for index, deposit in enumerate(deposits):
|
||||
deposit_data_list = List[DepositData, 2**DEPOSIT_CONTRACT_TREE_DEPTH](*leaves[:index + 1])
|
||||
state.eth1_data.deposit_root = hash_tree_root(deposit_data_list)
|
||||
process_deposit(state, deposit)
|
||||
|
||||
# Process activations
|
||||
for index, validator in enumerate(state.validators):
|
||||
balance = state.balances[index]
|
||||
validator.effective_balance = min(balance - balance % EFFECTIVE_BALANCE_INCREMENT, MAX_EFFECTIVE_BALANCE)
|
||||
if validator.effective_balance == MAX_EFFECTIVE_BALANCE:
|
||||
validator.activation_eligibility_epoch = GENESIS_EPOCH
|
||||
validator.activation_epoch = GENESIS_EPOCH
|
||||
|
||||
# Set genesis validators root for domain separation and chain versioning
|
||||
state.genesis_validators_root = hash_tree_root(state.validators)
|
||||
|
||||
# Fill in sync committees
|
||||
# Note: A duplicate committee is assigned for the current and next committee at genesis
|
||||
state.current_sync_committee = get_next_sync_committee(state)
|
||||
state.next_sync_committee = get_next_sync_committee(state)
|
||||
|
||||
# Initialize the execution payload header
|
||||
state.latest_execution_payload_header = execution_payload_header
|
||||
|
||||
return state
|
||||
```
|
|
@ -0,0 +1,140 @@
|
|||
# EIP-7002 -- Fork Logic
|
||||
|
||||
**Notice**: This document is a work-in-progress for researchers and implementers.
|
||||
|
||||
## Table of contents
|
||||
|
||||
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||
|
||||
- [Introduction](#introduction)
|
||||
- [Configuration](#configuration)
|
||||
- [Helper functions](#helper-functions)
|
||||
- [Misc](#misc)
|
||||
- [Modified `compute_fork_version`](#modified-compute_fork_version)
|
||||
- [Fork to EIP-7002](#fork-to-eip-7002)
|
||||
- [Fork trigger](#fork-trigger)
|
||||
- [Upgrading the state](#upgrading-the-state)
|
||||
|
||||
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||
|
||||
## Introduction
|
||||
|
||||
This document describes the process of EIP-7002 upgrade.
|
||||
|
||||
## Configuration
|
||||
|
||||
Warning: this configuration is not definitive.
|
||||
|
||||
| Name | Value |
|
||||
| - | - |
|
||||
| `EIP7002_FORK_VERSION` | `Version('0x05000000')` |
|
||||
| `EIP7002_FORK_EPOCH` | `Epoch(18446744073709551615)` **TBD** |
|
||||
|
||||
## Helper functions
|
||||
|
||||
### Misc
|
||||
|
||||
#### Modified `compute_fork_version`
|
||||
|
||||
```python
|
||||
def compute_fork_version(epoch: Epoch) -> Version:
|
||||
"""
|
||||
Return the fork version at the given ``epoch``.
|
||||
"""
|
||||
if epoch >= EIP7002_FORK_EPOCH:
|
||||
return EIP7002_FORK_VERSION
|
||||
if epoch >= CAPELLA_FORK_EPOCH:
|
||||
return CAPELLA_FORK_VERSION
|
||||
if epoch >= BELLATRIX_FORK_EPOCH:
|
||||
return BELLATRIX_FORK_VERSION
|
||||
if epoch >= ALTAIR_FORK_EPOCH:
|
||||
return ALTAIR_FORK_VERSION
|
||||
return GENESIS_FORK_VERSION
|
||||
```
|
||||
|
||||
## Fork to EIP-7002
|
||||
|
||||
### Fork trigger
|
||||
|
||||
TBD. This fork is defined for testing purposes, the EIP may be combined with other consensus-layer upgrade.
|
||||
For now, we assume the condition will be triggered at epoch `EIP7002_FORK_EPOCH`.
|
||||
|
||||
Note that for the pure EIP-7002 networks, we don't apply `upgrade_to_eip7002` since it starts with EIP-7002 version logic.
|
||||
|
||||
### Upgrading the state
|
||||
|
||||
If `state.slot % SLOTS_PER_EPOCH == 0` and `compute_epoch_at_slot(state.slot) == EIP7002_FORK_EPOCH`,
|
||||
an irregular state change is made to upgrade to EIP-7002.
|
||||
|
||||
```python
|
||||
def upgrade_to_eip7002(pre: capella.BeaconState) -> BeaconState:
|
||||
epoch = capella.get_current_epoch(pre)
|
||||
latest_execution_payload_header = ExecutionPayloadHeader(
|
||||
parent_hash=pre.latest_execution_payload_header.parent_hash,
|
||||
fee_recipient=pre.latest_execution_payload_header.fee_recipient,
|
||||
state_root=pre.latest_execution_payload_header.state_root,
|
||||
receipts_root=pre.latest_execution_payload_header.receipts_root,
|
||||
logs_bloom=pre.latest_execution_payload_header.logs_bloom,
|
||||
prev_randao=pre.latest_execution_payload_header.prev_randao,
|
||||
block_number=pre.latest_execution_payload_header.block_number,
|
||||
gas_limit=pre.latest_execution_payload_header.gas_limit,
|
||||
gas_used=pre.latest_execution_payload_header.gas_used,
|
||||
timestamp=pre.latest_execution_payload_header.timestamp,
|
||||
extra_data=pre.latest_execution_payload_header.extra_data,
|
||||
base_fee_per_gas=pre.latest_execution_payload_header.base_fee_per_gas,
|
||||
block_hash=pre.latest_execution_payload_header.block_hash,
|
||||
transactions_root=pre.latest_execution_payload_header.transactions_root,
|
||||
withdrawals_root=pre.latest_execution_payload_header.withdrawals_root,
|
||||
exits_root=Root(), # [New in EIP-7002]
|
||||
)
|
||||
post = BeaconState(
|
||||
# Versioning
|
||||
genesis_time=pre.genesis_time,
|
||||
genesis_validators_root=pre.genesis_validators_root,
|
||||
slot=pre.slot,
|
||||
fork=Fork(
|
||||
previous_version=pre.fork.current_version,
|
||||
current_version=EIP7002_FORK_VERSION, # [Modified in EIP-7002]
|
||||
epoch=epoch,
|
||||
),
|
||||
# History
|
||||
latest_block_header=pre.latest_block_header,
|
||||
block_roots=pre.block_roots,
|
||||
state_roots=pre.state_roots,
|
||||
historical_roots=pre.historical_roots,
|
||||
# Eth1
|
||||
eth1_data=pre.eth1_data,
|
||||
eth1_data_votes=pre.eth1_data_votes,
|
||||
eth1_deposit_index=pre.eth1_deposit_index,
|
||||
# Registry
|
||||
validators=pre.validators,
|
||||
balances=pre.balances,
|
||||
# Randomness
|
||||
randao_mixes=pre.randao_mixes,
|
||||
# Slashings
|
||||
slashings=pre.slashings,
|
||||
# Participation
|
||||
previous_epoch_participation=pre.previous_epoch_participation,
|
||||
current_epoch_participation=pre.current_epoch_participation,
|
||||
# Finality
|
||||
justification_bits=pre.justification_bits,
|
||||
previous_justified_checkpoint=pre.previous_justified_checkpoint,
|
||||
current_justified_checkpoint=pre.current_justified_checkpoint,
|
||||
finalized_checkpoint=pre.finalized_checkpoint,
|
||||
# Inactivity
|
||||
inactivity_scores=pre.inactivity_scores,
|
||||
# Sync
|
||||
current_sync_committee=pre.current_sync_committee,
|
||||
next_sync_committee=pre.next_sync_committee,
|
||||
# Execution-layer
|
||||
latest_execution_payload_header=latest_execution_payload_header, # [Modified in EIP-7002]
|
||||
# Withdrawals
|
||||
next_withdrawal_index=pre.next_withdrawal_index,
|
||||
next_withdrawal_validator_index=pre.next_withdrawal_validator_index,
|
||||
# Deep history valid from Capella onwards
|
||||
historical_summaries=pre.historical_summaries,
|
||||
)
|
||||
|
||||
return post
|
||||
```
|
|
@ -10,6 +10,9 @@
|
|||
|
||||
- [Introduction](#introduction)
|
||||
- [Constants](#constants)
|
||||
- [Domain types](#domain-types)
|
||||
- [Preset](#preset)
|
||||
- [Configuration](#configuration)
|
||||
- [Cryptography](#cryptography)
|
||||
- [BLS](#bls)
|
||||
- [Curdleproofs and opening proofs](#curdleproofs-and-opening-proofs)
|
||||
|
@ -29,21 +32,13 @@
|
|||
|
||||
## Introduction
|
||||
|
||||
This document details the beacon chain additions and changes of to support the Whisk SSLE,
|
||||
This document details the beacon chain additions and changes of to support the Whisk SSLE.
|
||||
|
||||
*Note:* This specification is built upon [Capella](../../capella/beacon-chain.md) and is under active development.
|
||||
|
||||
## Constants
|
||||
|
||||
| Name | Value | Description |
|
||||
| ---------------------------------- | -------------------------- | ----------------------------------------------------------- |
|
||||
| `WHISK_CANDIDATE_TRACKERS_COUNT` | `uint64(2**14)` (= 16,384) | number of candidate trackers |
|
||||
| `WHISK_PROPOSER_TRACKERS_COUNT` | `uint64(2**13)` (= 8,192) | number of proposer trackers |
|
||||
| `WHISK_EPOCHS_PER_SHUFFLING_PHASE` | `Epoch(2**8)` (= 256) | epochs per shuffling phase |
|
||||
| `WHISK_VALIDATORS_PER_SHUFFLE` | `uint64(2**7)` (= 128) | number of validators shuffled per shuffle step |
|
||||
| `WHISK_PROPOSER_SELECTION_GAP` | `Epoch(2)` | gap between proposer selection and the block proposal phase |
|
||||
| `WHISK_MAX_SHUFFLE_PROOF_SIZE` | `uint64(2**15)` | max size of a shuffle proof |
|
||||
| `WHISK_MAX_OPENING_PROOF_SIZE` | `uint64(2**10)` | max size of a opening proof |
|
||||
### Domain types
|
||||
|
||||
| Name | Value |
|
||||
| ---------------------------------- | -------------------------- |
|
||||
|
@ -51,6 +46,24 @@ This document details the beacon chain additions and changes of to support the W
|
|||
| `DOMAIN_WHISK_SHUFFLE` | `DomainType('0x07100000')` |
|
||||
| `DOMAIN_WHISK_PROPOSER_SELECTION` | `DomainType('0x07200000')` |
|
||||
|
||||
## Preset
|
||||
|
||||
| Name | Value | Description |
|
||||
| ---------------------------------- | -------------------------- | ----------------------------------------------------------- |
|
||||
| `CURDLEPROOFS_N_BLINDERS` | `uint64(4)` | number of blinders for curdleproofs |
|
||||
| `WHISK_CANDIDATE_TRACKERS_COUNT` | `uint64(2**14)` (= 16,384) | number of candidate trackers |
|
||||
| `WHISK_PROPOSER_TRACKERS_COUNT` | `uint64(2**13)` (= 8,192) | number of proposer trackers |
|
||||
| `WHISK_VALIDATORS_PER_SHUFFLE` | `uint64(2**7 - 4)` (= 124) | number of validators shuffled per shuffle step |
|
||||
| `WHISK_MAX_SHUFFLE_PROOF_SIZE` | `uint64(2**15)` | max size of a shuffle proof |
|
||||
| `WHISK_MAX_OPENING_PROOF_SIZE` | `uint64(2**10)` | max size of a opening proof |
|
||||
|
||||
## Configuration
|
||||
|
||||
| Name | Value | Description |
|
||||
| ---------------------------------- | -------------------------- | ----------------------------------------------------------- |
|
||||
| `WHISK_EPOCHS_PER_SHUFFLING_PHASE` | `Epoch(2**8)` (= 256) | epochs per shuffling phase |
|
||||
| `WHISK_PROPOSER_SELECTION_GAP` | `Epoch(2)` | gap between proposer selection and the block proposal phase |
|
||||
|
||||
## Cryptography
|
||||
|
||||
### BLS
|
||||
|
@ -87,7 +100,7 @@ def bytes_to_bls_field(b: Bytes32) -> BLSFieldElement:
|
|||
|
||||
### Curdleproofs and opening proofs
|
||||
|
||||
Note that Curdleproofs (Whisk Shuffle Proofs), the tracker opening proofs and all related data structures and verifier code (along with tests) is specified in [curdleproofs.pie](https://github.com/nalinbhardwaj/curdleproofs.pie/tree/verifier-only) repository.
|
||||
Note that Curdleproofs (Whisk Shuffle Proofs), the tracker opening proofs and all related data structures and verifier code (along with tests) is specified in [curdleproofs.pie](https://github.com/nalinbhardwaj/curdleproofs.pie/tree/dev) repository.
|
||||
|
||||
```python
|
||||
def IsValidWhiskShuffleProof(pre_shuffle_trackers: Sequence[WhiskTracker],
|
||||
|
@ -96,7 +109,7 @@ def IsValidWhiskShuffleProof(pre_shuffle_trackers: Sequence[WhiskTracker],
|
|||
shuffle_proof: WhiskShuffleProof) -> bool:
|
||||
"""
|
||||
Verify `post_shuffle_trackers` is a permutation of `pre_shuffle_trackers`.
|
||||
Defined in https://github.com/nalinbhardwaj/curdleproofs.pie/tree/verifier-only.
|
||||
Defined in https://github.com/nalinbhardwaj/curdleproofs.pie/blob/dev/curdleproofs/curdleproofs/whisk_interface.py.
|
||||
"""
|
||||
# pylint: disable=unused-argument
|
||||
return True
|
||||
|
@ -108,7 +121,7 @@ def IsValidWhiskOpeningProof(tracker: WhiskTracker,
|
|||
tracker_proof: WhiskTrackerProof) -> bool:
|
||||
"""
|
||||
Verify knowledge of `k` such that `tracker.k_r_G == k * tracker.r_G` and `k_commitment == k * BLS_G1_GENERATOR`.
|
||||
Defined in https://github.com/nalinbhardwaj/curdleproofs.pie/tree/verifier-only.
|
||||
Defined in https://github.com/nalinbhardwaj/curdleproofs.pie/blob/dev/curdleproofs/curdleproofs/whisk_interface.py.
|
||||
"""
|
||||
# pylint: disable=unused-argument
|
||||
return True
|
||||
|
@ -143,7 +156,7 @@ class BeaconState(Container):
|
|||
eth1_data_votes: List[Eth1Data, EPOCHS_PER_ETH1_VOTING_PERIOD * SLOTS_PER_EPOCH]
|
||||
eth1_deposit_index: uint64
|
||||
# Registry
|
||||
validators: List[Validator, VALIDATOR_REGISTRY_LIMIT] # [Modified in Whisk]
|
||||
validators: List[Validator, VALIDATOR_REGISTRY_LIMIT]
|
||||
balances: List[Gwei, VALIDATOR_REGISTRY_LIMIT]
|
||||
# Randomness
|
||||
randao_mixes: Vector[Bytes32, EPOCHS_PER_HISTORICAL_VECTOR]
|
||||
|
@ -177,17 +190,24 @@ class BeaconState(Container):
|
|||
```
|
||||
|
||||
```python
|
||||
def select_whisk_trackers(state: BeaconState, epoch: Epoch) -> None:
|
||||
def select_whisk_proposer_trackers(state: BeaconState, epoch: Epoch) -> None:
|
||||
# Select proposer trackers from candidate trackers
|
||||
proposer_seed = get_seed(state, epoch - WHISK_PROPOSER_SELECTION_GAP, DOMAIN_WHISK_PROPOSER_SELECTION)
|
||||
proposer_seed = get_seed(
|
||||
state,
|
||||
Epoch(saturating_sub(epoch, WHISK_PROPOSER_SELECTION_GAP)),
|
||||
DOMAIN_WHISK_PROPOSER_SELECTION
|
||||
)
|
||||
for i in range(WHISK_PROPOSER_TRACKERS_COUNT):
|
||||
index = compute_shuffled_index(uint64(i), uint64(len(state.whisk_candidate_trackers)), proposer_seed)
|
||||
state.whisk_proposer_trackers[i] = state.whisk_candidate_trackers[index]
|
||||
```
|
||||
|
||||
```python
|
||||
def select_whisk_candidate_trackers(state: BeaconState, epoch: Epoch) -> None:
|
||||
# Select candidate trackers from active validator trackers
|
||||
active_validator_indices = get_active_validator_indices(state, epoch)
|
||||
for i in range(WHISK_CANDIDATE_TRACKERS_COUNT):
|
||||
seed = hash(get_seed(state, epoch, DOMAIN_WHISK_CANDIDATE_SELECTION) + uint_to_bytes(i))
|
||||
seed = hash(get_seed(state, epoch, DOMAIN_WHISK_CANDIDATE_SELECTION) + uint_to_bytes(uint64(i)))
|
||||
candidate_index = compute_proposer_index(state, active_validator_indices, seed) # sample by effective balance
|
||||
state.whisk_candidate_trackers[i] = state.whisk_trackers[candidate_index]
|
||||
```
|
||||
|
@ -196,7 +216,8 @@ def select_whisk_trackers(state: BeaconState, epoch: Epoch) -> None:
|
|||
def process_whisk_updates(state: BeaconState) -> None:
|
||||
next_epoch = Epoch(get_current_epoch(state) + 1)
|
||||
if next_epoch % WHISK_EPOCHS_PER_SHUFFLING_PHASE == 0: # select trackers at the start of shuffling phases
|
||||
select_whisk_trackers(state, next_epoch)
|
||||
select_whisk_proposer_trackers(state, next_epoch)
|
||||
select_whisk_candidate_trackers(state, next_epoch)
|
||||
```
|
||||
|
||||
```python
|
||||
|
@ -289,7 +310,7 @@ class BeaconBlockBody(capella.BeaconBlockBody):
|
|||
```python
|
||||
def get_shuffle_indices(randao_reveal: BLSSignature) -> Sequence[uint64]:
|
||||
"""
|
||||
Given a `randao_reveal` return the list of indices that got shuffled from the entire candidate set
|
||||
Given a `randao_reveal` return the list of indices that got shuffled from the entire candidate set.
|
||||
"""
|
||||
indices = []
|
||||
for i in range(0, WHISK_VALIDATORS_PER_SHUFFLE):
|
||||
|
@ -303,30 +324,25 @@ def get_shuffle_indices(randao_reveal: BLSSignature) -> Sequence[uint64]:
|
|||
|
||||
```python
|
||||
def process_shuffled_trackers(state: BeaconState, body: BeaconBlockBody) -> None:
|
||||
# Check the shuffle proof
|
||||
shuffle_indices = get_shuffle_indices(body.randao_reveal)
|
||||
pre_shuffle_trackers = [state.whisk_candidate_trackers[i] for i in shuffle_indices]
|
||||
|
||||
shuffle_epoch = get_current_epoch(state) % WHISK_EPOCHS_PER_SHUFFLING_PHASE
|
||||
if shuffle_epoch + WHISK_PROPOSER_SELECTION_GAP + 1 >= WHISK_EPOCHS_PER_SHUFFLING_PHASE:
|
||||
# Require trackers set to zero during cooldown
|
||||
assert body.whisk_post_shuffle_trackers == Vector[WhiskTracker, WHISK_VALIDATORS_PER_SHUFFLE]()
|
||||
assert body.whisk_shuffle_proof_M_commitment == BLSG1Point()
|
||||
assert body.whisk_shuffle_proof == WhiskShuffleProof()
|
||||
post_shuffle_trackers = pre_shuffle_trackers
|
||||
else:
|
||||
# Require shuffled trackers during shuffle
|
||||
shuffle_indices = get_shuffle_indices(body.randao_reveal)
|
||||
pre_shuffle_trackers = [state.whisk_candidate_trackers[i] for i in shuffle_indices]
|
||||
assert IsValidWhiskShuffleProof(
|
||||
pre_shuffle_trackers,
|
||||
body.whisk_post_shuffle_trackers,
|
||||
body.whisk_shuffle_proof_M_commitment,
|
||||
body.whisk_shuffle_proof,
|
||||
)
|
||||
post_shuffle_trackers = body.whisk_post_shuffle_trackers
|
||||
|
||||
# Shuffle candidate trackers
|
||||
for i, shuffle_index in enumerate(shuffle_indices):
|
||||
state.whisk_candidate_trackers[shuffle_index] = post_shuffle_trackers[i]
|
||||
# Shuffle candidate trackers
|
||||
for i, shuffle_index in enumerate(shuffle_indices):
|
||||
state.whisk_candidate_trackers[shuffle_index] = body.whisk_post_shuffle_trackers[i]
|
||||
```
|
||||
|
||||
```python
|
||||
|
@ -446,7 +462,7 @@ def get_beacon_proposer_index(state: BeaconState) -> ValidatorIndex:
|
|||
|
||||
## Testing
|
||||
|
||||
*Note*: The function `initialize_beacon_state_from_eth1` is modified for pure Whisk testing only.
|
||||
*Note*: The function `initialize_beacon_state_from_eth1` is modified purely for Whisk testing.
|
||||
|
||||
```python
|
||||
def initialize_beacon_state_from_eth1(eth1_block_hash: Hash32,
|
||||
|
|
|
@ -53,27 +53,13 @@ The upgrade occurs after the completion of the inner loop of `process_slots` tha
|
|||
This ensures that we drop right into the beginning of the shuffling phase but without `process_whisk_epoch()` triggering for this Whisk run. Hence we handle all the setup ourselves in `upgrade_to_whisk()` below.
|
||||
|
||||
```python
|
||||
def whisk_candidate_selection(state: BeaconState, epoch: Epoch) -> None:
|
||||
# TODO
|
||||
# pylint: disable=unused-argument
|
||||
pass
|
||||
```
|
||||
|
||||
```python
|
||||
def whisk_proposer_selection(state: BeaconState, epoch: Epoch) -> None:
|
||||
# TODO
|
||||
# pylint: disable=unused-argument
|
||||
pass
|
||||
```
|
||||
|
||||
```python
|
||||
def upgrade_to_whisk(pre: bellatrix.BeaconState) -> BeaconState:
|
||||
def upgrade_to_whisk(pre: capella.BeaconState) -> BeaconState:
|
||||
# Compute initial unsafe trackers for all validators
|
||||
ks = [get_initial_whisk_k(ValidatorIndex(validator_index), 0) for validator_index in range(len(pre.validators))]
|
||||
whisk_k_commitments = [get_k_commitment(k) for k in ks]
|
||||
whisk_trackers = [get_initial_tracker(k) for k in ks]
|
||||
|
||||
epoch = bellatrix.get_current_epoch(pre)
|
||||
epoch = get_current_epoch(pre)
|
||||
post = BeaconState(
|
||||
# Versioning
|
||||
genesis_time=pre.genesis_time,
|
||||
|
@ -115,6 +101,11 @@ def upgrade_to_whisk(pre: bellatrix.BeaconState) -> BeaconState:
|
|||
next_sync_committee=pre.next_sync_committee,
|
||||
# Execution-layer
|
||||
latest_execution_payload_header=pre.latest_execution_payload_header,
|
||||
# Withdrawals
|
||||
next_withdrawal_index=pre.next_withdrawal_index,
|
||||
next_withdrawal_validator_index=pre.next_withdrawal_validator_index,
|
||||
# Deep history valid from Capella onwards
|
||||
historical_summaries=pre.historical_summaries,
|
||||
# Whisk
|
||||
whisk_proposer_trackers=[WhiskTracker() for _ in range(WHISK_PROPOSER_TRACKERS_COUNT)], # [New in Whisk]
|
||||
whisk_candidate_trackers=[WhiskTracker() for _ in range(WHISK_CANDIDATE_TRACKERS_COUNT)], # [New in Whisk]
|
||||
|
@ -124,12 +115,12 @@ def upgrade_to_whisk(pre: bellatrix.BeaconState) -> BeaconState:
|
|||
|
||||
# Do a candidate selection followed by a proposer selection so that we have proposers for the upcoming day
|
||||
# Use an old epoch when selecting candidates so that we don't get the same seed as in the next candidate selection
|
||||
whisk_candidate_selection(post, epoch - WHISK_PROPOSER_SELECTION_GAP - 1)
|
||||
whisk_proposer_selection(post, epoch)
|
||||
select_whisk_candidate_trackers(post, Epoch(saturating_sub(epoch, WHISK_PROPOSER_SELECTION_GAP + 1)))
|
||||
select_whisk_proposer_trackers(post, epoch)
|
||||
|
||||
# Do a final round of candidate selection.
|
||||
# We need it so that we have something to shuffle over the upcoming shuffling phase.
|
||||
whisk_candidate_selection(post, epoch)
|
||||
select_whisk_candidate_trackers(post, epoch)
|
||||
|
||||
return post
|
||||
```
|
||||
|
|
|
@ -45,7 +45,7 @@
|
|||
- [Modified `slash_validator`](#modified-slash_validator)
|
||||
- [Block processing](#block-processing)
|
||||
- [Modified `process_attestation`](#modified-process_attestation)
|
||||
- [Modified `apply_deposit`](#modified-apply_deposit)
|
||||
- [Modified `add_validator_to_registry`](#modified-add_validator_to_registry)
|
||||
- [Sync aggregate processing](#sync-aggregate-processing)
|
||||
- [Epoch processing](#epoch-processing)
|
||||
- [Justification and finalization](#justification-and-finalization)
|
||||
|
@ -508,40 +508,23 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None:
|
|||
increase_balance(state, get_beacon_proposer_index(state), proposer_reward)
|
||||
```
|
||||
|
||||
#### Modified `apply_deposit`
|
||||
#### Modified `add_validator_to_registry`
|
||||
|
||||
*Note*: The function `apply_deposit` is modified to initialize `inactivity_scores`, `previous_epoch_participation`, and `current_epoch_participation`.
|
||||
*Note*: The function `add_validator_to_registry` is modified to initialize `inactivity_scores`, `previous_epoch_participation`, and `current_epoch_participation`.
|
||||
|
||||
```python
|
||||
def apply_deposit(state: BeaconState,
|
||||
pubkey: BLSPubkey,
|
||||
withdrawal_credentials: Bytes32,
|
||||
amount: uint64,
|
||||
signature: BLSSignature) -> None:
|
||||
validator_pubkeys = [validator.pubkey for validator in state.validators]
|
||||
if pubkey not in validator_pubkeys:
|
||||
# Verify the deposit signature (proof of possession) which is not checked by the deposit contract
|
||||
deposit_message = DepositMessage(
|
||||
pubkey=pubkey,
|
||||
withdrawal_credentials=withdrawal_credentials,
|
||||
amount=amount,
|
||||
)
|
||||
domain = compute_domain(DOMAIN_DEPOSIT) # Fork-agnostic domain since deposits are valid across forks
|
||||
signing_root = compute_signing_root(deposit_message, domain)
|
||||
# Initialize validator if the deposit signature is valid
|
||||
if bls.Verify(pubkey, signing_root, signature):
|
||||
index = get_index_for_new_validator(state)
|
||||
validator = get_validator_from_deposit(pubkey, withdrawal_credentials, amount)
|
||||
set_or_append_list(state.validators, index, validator)
|
||||
set_or_append_list(state.balances, index, amount)
|
||||
# [New in Altair]
|
||||
set_or_append_list(state.previous_epoch_participation, index, ParticipationFlags(0b0000_0000))
|
||||
set_or_append_list(state.current_epoch_participation, index, ParticipationFlags(0b0000_0000))
|
||||
set_or_append_list(state.inactivity_scores, index, uint64(0))
|
||||
else:
|
||||
# Increase balance by deposit amount
|
||||
index = ValidatorIndex(validator_pubkeys.index(pubkey))
|
||||
increase_balance(state, index, amount)
|
||||
def add_validator_to_registry(state: BeaconState,
|
||||
pubkey: BLSPubkey,
|
||||
withdrawal_credentials: Bytes32,
|
||||
amount: uint64) -> None:
|
||||
index = get_index_for_new_validator(state)
|
||||
validator = get_validator_from_deposit(pubkey, withdrawal_credentials, amount)
|
||||
set_or_append_list(state.validators, index, validator)
|
||||
set_or_append_list(state.balances, index, amount)
|
||||
# [New in Altair]
|
||||
set_or_append_list(state.previous_epoch_participation, index, ParticipationFlags(0b0000_0000))
|
||||
set_or_append_list(state.current_epoch_participation, index, ParticipationFlags(0b0000_0000))
|
||||
set_or_append_list(state.inactivity_scores, index, uint64(0))
|
||||
```
|
||||
|
||||
#### Sync aggregate processing
|
||||
|
|
|
@ -22,8 +22,6 @@ This document describes the process of the first upgrade of the beacon chain: th
|
|||
|
||||
## Configuration
|
||||
|
||||
Warning: this configuration is not definitive.
|
||||
|
||||
| Name | Value |
|
||||
| - | - |
|
||||
| `ALTAIR_FORK_VERSION` | `Version('0x01000000')` |
|
||||
|
|
|
@ -13,7 +13,6 @@ Altair adds new messages, topics and data to the Req-Resp, Gossip and Discovery
|
|||
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||
|
||||
- [Warning](#warning)
|
||||
- [Modifications in Altair](#modifications-in-altair)
|
||||
- [MetaData](#metadata)
|
||||
- [The gossip domain: gossipsub](#the-gossip-domain-gossipsub)
|
||||
|
@ -38,11 +37,6 @@ Altair adds new messages, topics and data to the Req-Resp, Gossip and Discovery
|
|||
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- /TOC -->
|
||||
|
||||
## Warning
|
||||
|
||||
This document is currently illustrative for early Altair testnets and some parts are subject to change.
|
||||
Refer to the note in the [validator guide](./validator.md) for further details.
|
||||
|
||||
## Modifications in Altair
|
||||
|
||||
### MetaData
|
||||
|
|
|
@ -10,7 +10,6 @@ This is an accompanying document to [Altair -- The Beacon Chain](./beacon-chain.
|
|||
|
||||
- [Introduction](#introduction)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Warning](#warning)
|
||||
- [Constants](#constants)
|
||||
- [Misc](#misc)
|
||||
- [Containers](#containers)
|
||||
|
@ -63,10 +62,6 @@ Block proposers incorporate the (aggregated) sync committee signatures into each
|
|||
All terminology, constants, functions, and protocol mechanics defined in the [Altair -- The Beacon Chain](./beacon-chain.md) doc are requisite for this document and used throughout.
|
||||
Please see this document before continuing and use as a reference throughout.
|
||||
|
||||
## Warning
|
||||
|
||||
This document is currently illustrative for early Altair testnets and some parts are subject to change, especially pending implementation and profiling of Altair testnets.
|
||||
|
||||
## Constants
|
||||
|
||||
### Misc
|
||||
|
|
|
@ -194,7 +194,8 @@ def on_block(store: Store, signed_block: SignedBeaconBlock) -> None:
|
|||
# Add proposer score boost if the block is timely
|
||||
time_into_slot = (store.time - store.genesis_time) % SECONDS_PER_SLOT
|
||||
is_before_attesting_interval = time_into_slot < SECONDS_PER_SLOT // INTERVALS_PER_SLOT
|
||||
if get_current_slot(store) == block.slot and is_before_attesting_interval:
|
||||
is_first_block = store.proposer_boost_root == Root()
|
||||
if get_current_slot(store) == block.slot and is_before_attesting_interval and is_first_block:
|
||||
store.proposer_boost_root = hash_tree_root(block)
|
||||
|
||||
# Update checkpoints in store if necessary
|
||||
|
|
|
@ -22,8 +22,6 @@ This document describes the process of Bellatrix upgrade.
|
|||
|
||||
## Configuration
|
||||
|
||||
Warning: this configuration is not definitive.
|
||||
|
||||
| Name | Value |
|
||||
| - | - |
|
||||
| `BELLATRIX_FORK_VERSION` | `Version('0x02000000')` |
|
||||
|
|
|
@ -12,7 +12,6 @@ Readers should understand the Phase 0 and Altair documents and use them as a bas
|
|||
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||
|
||||
- [Warning](#warning)
|
||||
- [Modifications in Bellatrix](#modifications-in-bellatrix)
|
||||
- [The gossip domain: gossipsub](#the-gossip-domain-gossipsub)
|
||||
- [Topics and messages](#topics-and-messages)
|
||||
|
@ -33,11 +32,6 @@ Readers should understand the Phase 0 and Altair documents and use them as a bas
|
|||
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- /TOC -->
|
||||
|
||||
## Warning
|
||||
|
||||
This document is currently illustrative for early Bellatrix testnets and some parts are subject to change.
|
||||
Refer to the note in the [validator guide](./validator.md) for further details.
|
||||
|
||||
## Modifications in Bellatrix
|
||||
|
||||
### The gossip domain: gossipsub
|
||||
|
|
|
@ -106,7 +106,8 @@ def on_block(store: Store, signed_block: SignedBeaconBlock) -> None:
|
|||
# Add proposer score boost if the block is timely
|
||||
time_into_slot = (store.time - store.genesis_time) % SECONDS_PER_SLOT
|
||||
is_before_attesting_interval = time_into_slot < SECONDS_PER_SLOT // INTERVALS_PER_SLOT
|
||||
if get_current_slot(store) == block.slot and is_before_attesting_interval:
|
||||
is_first_block = store.proposer_boost_root == Root()
|
||||
if get_current_slot(store) == block.slot and is_before_attesting_interval and is_first_block:
|
||||
store.proposer_boost_root = hash_tree_root(block)
|
||||
|
||||
# Update checkpoints in store if necessary
|
||||
|
|
|
@ -22,8 +22,6 @@ This document describes the process of the Capella upgrade.
|
|||
|
||||
## Configuration
|
||||
|
||||
Warning: this configuration is not definitive.
|
||||
|
||||
| Name | Value |
|
||||
| - | - |
|
||||
| `CAPELLA_FORK_VERSION` | `Version('0x03000000')` |
|
||||
|
|
|
@ -24,14 +24,19 @@
|
|||
- [Helper functions](#helper-functions)
|
||||
- [Misc](#misc)
|
||||
- [`kzg_commitment_to_versioned_hash`](#kzg_commitment_to_versioned_hash)
|
||||
- [Beacon state accessors](#beacon-state-accessors)
|
||||
- [Modified `get_attestation_participation_flag_indices`](#modified-get_attestation_participation_flag_indices)
|
||||
- [Beacon chain state transition function](#beacon-chain-state-transition-function)
|
||||
- [Execution engine](#execution-engine)
|
||||
- [Request data](#request-data)
|
||||
- [Modified `NewPayloadRequest`](#modified-newpayloadrequest)
|
||||
- [Engine APIs](#engine-apis)
|
||||
- [`is_valid_block_hash`](#is_valid_block_hash)
|
||||
- [`is_valid_versioned_hashes`](#is_valid_versioned_hashes)
|
||||
- [Modified `notify_new_payload`](#modified-notify_new_payload)
|
||||
- [Modified `verify_and_notify_new_payload`](#modified-verify_and_notify_new_payload)
|
||||
- [Block processing](#block-processing)
|
||||
- [Modified `process_attestation`](#modified-process_attestation)
|
||||
- [Execution payload](#execution-payload)
|
||||
- [Modified `process_execution_payload`](#modified-process_execution_payload)
|
||||
- [Modified `process_voluntary_exit`](#modified-process_voluntary_exit)
|
||||
|
@ -43,8 +48,10 @@
|
|||
## Introduction
|
||||
|
||||
Deneb is a consensus-layer upgrade containing a number of features. Including:
|
||||
* [EIP-4788](https://eips.ethereum.org/EIPS/eip-4788): Beacon block root in the EVM
|
||||
* [EIP-4844](https://eips.ethereum.org/EIPS/eip-4844): Shard Blob Transactions scale data-availability of Ethereum in a simple, forwards-compatible manner
|
||||
* [EIP-7044](https://github.com/ethereum/EIPs/pull/7044): Perpetually Valid Signed Voluntary Exits
|
||||
* [EIP-7045](https://eips.ethereum.org/EIPS/eip-7045): Increase Max Attestation Inclusion Slot
|
||||
|
||||
## Custom types
|
||||
|
||||
|
@ -78,11 +85,10 @@ Deneb is a consensus-layer upgrade containing a number of features. Including:
|
|||
| `MAX_BLOBS_PER_BLOCK` | `uint64(6)` | *[New in Deneb:EIP4844]* maximum number of blobs in a single block limited by `MAX_BLOB_COMMITMENTS_PER_BLOCK` |
|
||||
|
||||
*Note*: The blob transactions are packed into the execution payload by the EL/builder with their corresponding blobs being independently transmitted
|
||||
and are limited by `MAX_DATA_GAS_PER_BLOCK // DATA_GAS_PER_BLOB`. However the CL limit is independently defined by `MAX_BLOBS_PER_BLOCK`.
|
||||
and are limited by `MAX_BLOB_GAS_PER_BLOCK // GAS_PER_BLOB`. However the CL limit is independently defined by `MAX_BLOBS_PER_BLOCK`.
|
||||
|
||||
## Configuration
|
||||
|
||||
|
||||
## Containers
|
||||
|
||||
### Extended containers
|
||||
|
@ -130,8 +136,8 @@ class ExecutionPayload(Container):
|
|||
block_hash: Hash32 # Hash of execution block
|
||||
transactions: List[Transaction, MAX_TRANSACTIONS_PER_PAYLOAD]
|
||||
withdrawals: List[Withdrawal, MAX_WITHDRAWALS_PER_PAYLOAD]
|
||||
data_gas_used: uint64 # [New in Deneb:EIP4844]
|
||||
excess_data_gas: uint64 # [New in Deneb:EIP4844]
|
||||
blob_gas_used: uint64 # [New in Deneb:EIP4844]
|
||||
excess_blob_gas: uint64 # [New in Deneb:EIP4844]
|
||||
```
|
||||
|
||||
#### `ExecutionPayloadHeader`
|
||||
|
@ -155,8 +161,8 @@ class ExecutionPayloadHeader(Container):
|
|||
block_hash: Hash32 # Hash of execution block
|
||||
transactions_root: Root
|
||||
withdrawals_root: Root
|
||||
data_gas_used: uint64 # [New in Deneb:EIP4844]
|
||||
excess_data_gas: uint64 # [New in Deneb:EIP4844]
|
||||
blob_gas_used: uint64 # [New in Deneb:EIP4844]
|
||||
excess_blob_gas: uint64 # [New in Deneb:EIP4844]
|
||||
```
|
||||
|
||||
## Helper functions
|
||||
|
@ -170,6 +176,41 @@ def kzg_commitment_to_versioned_hash(kzg_commitment: KZGCommitment) -> Versioned
|
|||
return VERSIONED_HASH_VERSION_KZG + hash(kzg_commitment)[1:]
|
||||
```
|
||||
|
||||
### Beacon state accessors
|
||||
|
||||
#### Modified `get_attestation_participation_flag_indices`
|
||||
|
||||
*Note:* The function `get_attestation_participation_flag_indices` is modified to set the `TIMELY_TARGET_FLAG` for any correct target attestation, regardless of `inclusion_delay` as a baseline reward for any speed of inclusion of an attestation that contributes to justification of the contained chain for EIP-7045.
|
||||
|
||||
```python
|
||||
def get_attestation_participation_flag_indices(state: BeaconState,
|
||||
data: AttestationData,
|
||||
inclusion_delay: uint64) -> Sequence[int]:
|
||||
"""
|
||||
Return the flag indices that are satisfied by an attestation.
|
||||
"""
|
||||
if data.target.epoch == get_current_epoch(state):
|
||||
justified_checkpoint = state.current_justified_checkpoint
|
||||
else:
|
||||
justified_checkpoint = state.previous_justified_checkpoint
|
||||
|
||||
# Matching roots
|
||||
is_matching_source = data.source == justified_checkpoint
|
||||
is_matching_target = is_matching_source and data.target.root == get_block_root(state, data.target.epoch)
|
||||
is_matching_head = is_matching_target and data.beacon_block_root == get_block_root_at_slot(state, data.slot)
|
||||
assert is_matching_source
|
||||
|
||||
participation_flag_indices = []
|
||||
if is_matching_source and inclusion_delay <= integer_squareroot(SLOTS_PER_EPOCH):
|
||||
participation_flag_indices.append(TIMELY_SOURCE_FLAG_INDEX)
|
||||
if is_matching_target: # [Modified in Deneb:EIP7045]
|
||||
participation_flag_indices.append(TIMELY_TARGET_FLAG_INDEX)
|
||||
if is_matching_head and inclusion_delay == MIN_ATTESTATION_INCLUSION_DELAY:
|
||||
participation_flag_indices.append(TIMELY_HEAD_FLAG_INDEX)
|
||||
|
||||
return participation_flag_indices
|
||||
```
|
||||
|
||||
## Beacon chain state transition function
|
||||
|
||||
### Execution engine
|
||||
|
@ -183,10 +224,25 @@ def kzg_commitment_to_versioned_hash(kzg_commitment: KZGCommitment) -> Versioned
|
|||
class NewPayloadRequest(object):
|
||||
execution_payload: ExecutionPayload
|
||||
versioned_hashes: Sequence[VersionedHash]
|
||||
parent_beacon_block_root: Root
|
||||
```
|
||||
|
||||
#### Engine APIs
|
||||
|
||||
##### `is_valid_block_hash`
|
||||
|
||||
*Note*: The function `is_valid_block_hash` is modified to include the additional `parent_beacon_block_root` parameter for EIP-4788.
|
||||
|
||||
```python
|
||||
def is_valid_block_hash(self: ExecutionEngine,
|
||||
execution_payload: ExecutionPayload,
|
||||
parent_beacon_block_root: Root) -> bool:
|
||||
"""
|
||||
Return ``True`` if and only if ``execution_payload.block_hash`` is computed correctly.
|
||||
"""
|
||||
...
|
||||
```
|
||||
|
||||
##### `is_valid_versioned_hashes`
|
||||
|
||||
```python
|
||||
|
@ -198,6 +254,20 @@ def is_valid_versioned_hashes(self: ExecutionEngine, new_payload_request: NewPay
|
|||
...
|
||||
```
|
||||
|
||||
##### Modified `notify_new_payload`
|
||||
|
||||
*Note*: The function `notify_new_payload` is modified to include the additional `parent_beacon_block_root` parameter for EIP-4788.
|
||||
|
||||
```python
|
||||
def notify_new_payload(self: ExecutionEngine,
|
||||
execution_payload: ExecutionPayload,
|
||||
parent_beacon_block_root: Root) -> bool:
|
||||
"""
|
||||
Return ``True`` if and only if ``execution_payload`` is valid with respect to ``self.execution_state``.
|
||||
"""
|
||||
...
|
||||
```
|
||||
|
||||
##### Modified `verify_and_notify_new_payload`
|
||||
|
||||
```python
|
||||
|
@ -206,14 +276,19 @@ def verify_and_notify_new_payload(self: ExecutionEngine,
|
|||
"""
|
||||
Return ``True`` if and only if ``new_payload_request`` is valid with respect to ``self.execution_state``.
|
||||
"""
|
||||
if not self.is_valid_block_hash(new_payload_request.execution_payload):
|
||||
execution_payload = new_payload_request.execution_payload
|
||||
parent_beacon_block_root = new_payload_request.parent_beacon_block_root
|
||||
|
||||
# [Modified in Deneb:EIP4788]
|
||||
if not self.is_valid_block_hash(execution_payload, parent_beacon_block_root):
|
||||
return False
|
||||
|
||||
# [New in Deneb:EIP4844]
|
||||
if not self.is_valid_versioned_hashes(new_payload_request):
|
||||
return False
|
||||
|
||||
if not self.notify_new_payload(new_payload_request.execution_payload):
|
||||
# [Modified in Deneb:EIP4788]
|
||||
if not self.notify_new_payload(execution_payload, parent_beacon_block_root):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -221,10 +296,52 @@ def verify_and_notify_new_payload(self: ExecutionEngine,
|
|||
|
||||
### Block processing
|
||||
|
||||
#### Modified `process_attestation`
|
||||
|
||||
*Note*: The function `process_attestation` is modified to expand valid slots for inclusion to those in both `target.epoch` epoch and `target.epoch + 1` epoch for EIP-7045. Additionally, it utilizes an updated version of `get_attestation_participation_flag_indices` to ensure rewards are available for the extended attestation inclusion range for EIP-7045.
|
||||
|
||||
```python
|
||||
def process_attestation(state: BeaconState, attestation: Attestation) -> None:
|
||||
data = attestation.data
|
||||
assert data.target.epoch in (get_previous_epoch(state), get_current_epoch(state))
|
||||
assert data.target.epoch == compute_epoch_at_slot(data.slot)
|
||||
assert data.slot + MIN_ATTESTATION_INCLUSION_DELAY <= state.slot # [Modified in Deneb:EIP7045]
|
||||
assert data.index < get_committee_count_per_slot(state, data.target.epoch)
|
||||
|
||||
committee = get_beacon_committee(state, data.slot, data.index)
|
||||
assert len(attestation.aggregation_bits) == len(committee)
|
||||
|
||||
# Participation flag indices
|
||||
participation_flag_indices = get_attestation_participation_flag_indices(state, data, state.slot - data.slot)
|
||||
|
||||
# Verify signature
|
||||
assert is_valid_indexed_attestation(state, get_indexed_attestation(state, attestation))
|
||||
|
||||
# Update epoch participation flags
|
||||
if data.target.epoch == get_current_epoch(state):
|
||||
epoch_participation = state.current_epoch_participation
|
||||
else:
|
||||
epoch_participation = state.previous_epoch_participation
|
||||
|
||||
proposer_reward_numerator = 0
|
||||
for index in get_attesting_indices(state, data, attestation.aggregation_bits):
|
||||
for flag_index, weight in enumerate(PARTICIPATION_FLAG_WEIGHTS):
|
||||
if flag_index in participation_flag_indices and not has_flag(epoch_participation[index], flag_index):
|
||||
epoch_participation[index] = add_flag(epoch_participation[index], flag_index)
|
||||
proposer_reward_numerator += get_base_reward(state, index) * weight
|
||||
|
||||
# Reward proposer
|
||||
proposer_reward_denominator = (WEIGHT_DENOMINATOR - PROPOSER_WEIGHT) * WEIGHT_DENOMINATOR // PROPOSER_WEIGHT
|
||||
proposer_reward = Gwei(proposer_reward_numerator // proposer_reward_denominator)
|
||||
increase_balance(state, get_beacon_proposer_index(state), proposer_reward)
|
||||
```
|
||||
|
||||
#### Execution payload
|
||||
|
||||
##### Modified `process_execution_payload`
|
||||
|
||||
*Note*: The function `process_execution_payload` is modified to pass `versioned_hashes` into `execution_engine.verify_and_notify_new_payload` and to assign the new fields in `ExecutionPayloadHeader` for EIP-4844. It is also modified to pass in the parent beacon block root to support EIP-4788.
|
||||
|
||||
```python
|
||||
def process_execution_payload(state: BeaconState, body: BeaconBlockBody, execution_engine: ExecutionEngine) -> None:
|
||||
payload = body.execution_payload
|
||||
|
@ -241,9 +358,14 @@ def process_execution_payload(state: BeaconState, body: BeaconBlockBody, executi
|
|||
|
||||
# Verify the execution payload is valid
|
||||
# [Modified in Deneb:EIP4844] Pass `versioned_hashes` to Execution Engine
|
||||
# [Modified in Deneb:EIP4788] Pass `parent_beacon_block_root` to Execution Engine
|
||||
versioned_hashes = [kzg_commitment_to_versioned_hash(commitment) for commitment in body.blob_kzg_commitments]
|
||||
assert execution_engine.verify_and_notify_new_payload(
|
||||
NewPayloadRequest(execution_payload=payload, versioned_hashes=versioned_hashes)
|
||||
NewPayloadRequest(
|
||||
execution_payload=payload,
|
||||
versioned_hashes=versioned_hashes,
|
||||
parent_beacon_block_root=state.latest_block_header.parent_root,
|
||||
)
|
||||
)
|
||||
|
||||
# Cache execution payload header
|
||||
|
@ -263,14 +385,14 @@ def process_execution_payload(state: BeaconState, body: BeaconBlockBody, executi
|
|||
block_hash=payload.block_hash,
|
||||
transactions_root=hash_tree_root(payload.transactions),
|
||||
withdrawals_root=hash_tree_root(payload.withdrawals),
|
||||
data_gas_used=payload.data_gas_used, # [New in Deneb:EIP4844]
|
||||
excess_data_gas=payload.excess_data_gas, # [New in Deneb:EIP4844]
|
||||
blob_gas_used=payload.blob_gas_used, # [New in Deneb:EIP4844]
|
||||
excess_blob_gas=payload.excess_blob_gas, # [New in Deneb:EIP4844]
|
||||
)
|
||||
```
|
||||
|
||||
#### Modified `process_voluntary_exit`
|
||||
|
||||
*Note*: The function `process_voluntary_exit` is modified to use the a fixed fork version -- `CAPELLA_FORK_VERSION` -- for EIP-7044
|
||||
*Note*: The function `process_voluntary_exit` is modified to use the a fixed fork version -- `CAPELLA_FORK_VERSION` -- for EIP-7044.
|
||||
|
||||
```python
|
||||
def process_voluntary_exit(state: BeaconState, signed_voluntary_exit: SignedVoluntaryExit) -> None:
|
||||
|
|
|
@ -9,7 +9,8 @@
|
|||
- [Containers](#containers)
|
||||
- [Constants](#constants)
|
||||
- [Helpers](#helpers)
|
||||
- [`is_data_available`](#is_data_available)
|
||||
- [Extended `PayloadAttributes`](#extended-payloadattributes)
|
||||
- [`is_data_available`](#is_data_available)
|
||||
- [Updated fork-choice handlers](#updated-fork-choice-handlers)
|
||||
- [`on_block`](#on_block)
|
||||
|
||||
|
@ -30,7 +31,21 @@ This is the modification of the fork choice accompanying the Deneb upgrade.
|
|||
|
||||
## Helpers
|
||||
|
||||
#### `is_data_available`
|
||||
### Extended `PayloadAttributes`
|
||||
|
||||
`PayloadAttributes` is extended with the parent beacon block root for EIP-4788.
|
||||
|
||||
```python
|
||||
@dataclass
|
||||
class PayloadAttributes(object):
|
||||
timestamp: uint64
|
||||
prev_randao: Bytes32
|
||||
suggested_fee_recipient: ExecutionAddress
|
||||
withdrawals: Sequence[Withdrawal]
|
||||
parent_beacon_block_root: Root # [New in Deneb:EIP4788]
|
||||
```
|
||||
|
||||
### `is_data_available`
|
||||
|
||||
*[New in Deneb:EIP4844]*
|
||||
|
||||
|
@ -47,11 +62,6 @@ def is_data_available(beacon_block_root: Root, blob_kzg_commitments: Sequence[KZ
|
|||
# `MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS`
|
||||
blobs, proofs = retrieve_blobs_and_proofs(beacon_block_root)
|
||||
|
||||
# For testing, `retrieve_blobs_and_proofs` returns ("TEST", "TEST").
|
||||
# TODO: Remove it once we have a way to inject `BlobSidecar` into tests.
|
||||
if isinstance(blobs, str) or isinstance(proofs, str):
|
||||
return True
|
||||
|
||||
return verify_blob_kzg_proof_batch(blobs, blob_kzg_commitments, proofs)
|
||||
```
|
||||
|
||||
|
@ -103,7 +113,8 @@ def on_block(store: Store, signed_block: SignedBeaconBlock) -> None:
|
|||
# Add proposer score boost if the block is timely
|
||||
time_into_slot = (store.time - store.genesis_time) % SECONDS_PER_SLOT
|
||||
is_before_late_block_cutoff = time_into_slot * 1000 < LATE_BLOCK_CUTOFF_MS
|
||||
if get_current_slot(store) == block.slot and is_before_late_block_cutoff:
|
||||
is_first_block = store.proposer_boost_root == Root()
|
||||
if get_current_slot(store) == block.slot and is_before_late_block_cutoff and is_first_block:
|
||||
store.proposer_boost_root = hash_tree_root(block)
|
||||
|
||||
# Update checkpoints in store if necessary
|
||||
|
|
|
@ -57,7 +57,7 @@ def compute_fork_version(epoch: Epoch) -> Version:
|
|||
|
||||
### Fork trigger
|
||||
|
||||
TBD. This fork is defined for testing purposes, the EIP may be combined with other consensus-layer upgrade.
|
||||
TBD. This fork is defined for testing purposes.
|
||||
For now, we assume the condition will be triggered at epoch `DENEB_FORK_EPOCH`.
|
||||
|
||||
Note that for the pure Deneb networks, we don't apply `upgrade_to_deneb` since it starts with Deneb version logic.
|
||||
|
@ -83,8 +83,8 @@ def upgrade_to_deneb(pre: capella.BeaconState) -> BeaconState:
|
|||
block_hash=pre.latest_execution_payload_header.block_hash,
|
||||
transactions_root=pre.latest_execution_payload_header.transactions_root,
|
||||
withdrawals_root=pre.latest_execution_payload_header.withdrawals_root,
|
||||
data_gas_used=uint64(0), # [New in Deneb:EIP4844]
|
||||
excess_data_gas=uint64(0), # [New in Deneb:EIP4844]
|
||||
blob_gas_used=uint64(0), # [New in Deneb:EIP4844]
|
||||
excess_blob_gas=uint64(0), # [New in Deneb:EIP4844]
|
||||
)
|
||||
post = BeaconState(
|
||||
# Versioning
|
||||
|
|
|
@ -41,8 +41,8 @@ def upgrade_lc_header_to_deneb(pre: capella.LightClientHeader) -> LightClientHea
|
|||
block_hash=pre.execution.block_hash,
|
||||
transactions_root=pre.execution.transactions_root,
|
||||
withdrawals_root=pre.execution.withdrawals_root,
|
||||
data_gas_used=uint64(0), # [New in Deneb:EIP4844]
|
||||
excess_data_gas=uint64(0), # [New in Deneb:EIP4844]
|
||||
blob_gas_used=uint64(0), # [New in Deneb:EIP4844]
|
||||
excess_blob_gas=uint64(0), # [New in Deneb:EIP4844]
|
||||
),
|
||||
execution_branch=pre.execution_branch,
|
||||
)
|
||||
|
|
|
@ -49,8 +49,8 @@ def block_to_light_client_header(block: SignedBeaconBlock) -> LightClientHeader:
|
|||
|
||||
# [New in Deneb:EIP4844]
|
||||
if epoch >= DENEB_FORK_EPOCH:
|
||||
execution_header.data_gas_used = payload.data_gas_used
|
||||
execution_header.excess_data_gas = payload.excess_data_gas
|
||||
execution_header.blob_gas_used = payload.blob_gas_used
|
||||
execution_header.excess_blob_gas = payload.excess_blob_gas
|
||||
|
||||
execution_branch = compute_merkle_proof_for_block_body(block.message.body, EXECUTION_PAYLOAD_INDEX)
|
||||
else:
|
||||
|
|
|
@ -68,7 +68,7 @@ def is_valid_light_client_header(header: LightClientHeader) -> bool:
|
|||
|
||||
# [New in Deneb:EIP4844]
|
||||
if epoch < DENEB_FORK_EPOCH:
|
||||
if header.execution.data_gas_used != uint64(0) or header.execution.excess_data_gas != uint64(0):
|
||||
if header.execution.blob_gas_used != uint64(0) or header.execution.excess_blob_gas != uint64(0):
|
||||
return False
|
||||
|
||||
if epoch < CAPELLA_FORK_EPOCH:
|
||||
|
|
|
@ -23,6 +23,9 @@ The specification of these changes continues in the same format as the network s
|
|||
- [Global topics](#global-topics)
|
||||
- [`beacon_block`](#beacon_block)
|
||||
- [`blob_sidecar_{subnet_id}`](#blob_sidecar_subnet_id)
|
||||
- [`beacon_aggregate_and_proof`](#beacon_aggregate_and_proof)
|
||||
- [Attestation subnets](#attestation-subnets)
|
||||
- [`beacon_attestation_{subnet_id}`](#beacon_attestation_subnet_id)
|
||||
- [Transitioning the gossip](#transitioning-the-gossip)
|
||||
- [The Req/Resp domain](#the-reqresp-domain)
|
||||
- [Messages](#messages)
|
||||
|
@ -106,7 +109,11 @@ Some gossip meshes are upgraded in the fork of Deneb to support upgraded types.
|
|||
|
||||
Topics follow the same specification as in prior upgrades.
|
||||
|
||||
The `beacon_block` topic is modified to also support deneb blocks and new topics are added per table below. All other topics remain stable.
|
||||
The `beacon_block` topic is modified to also support Deneb blocks and new topics are added per table below.
|
||||
|
||||
The `voluntary_exit` topic is implicitly modified due to the lock-in use of `CAPELLA_FORK_VERSION` for this message signature validation for EIP-7044.
|
||||
|
||||
The `beacon_aggregate_and_proof` and `beacon_attestation_{subnet_id}` topics are modified to support the gossip of attestations created in epoch `N` to be gossiped through the entire range of slots in epoch `N+1` rather than only through one epoch of slots for EIP-7045.
|
||||
|
||||
The specification around the creation, validation, and dissemination of messages has not changed from the Capella document unless explicitly noted here.
|
||||
|
||||
|
@ -124,7 +131,9 @@ Deneb introduces new global topics for blob sidecars.
|
|||
|
||||
###### `beacon_block`
|
||||
|
||||
The *type* of the payload of this topic changes to the (modified) `SignedBeaconBlock` found in deneb.
|
||||
The *type* of the payload of this topic changes to the (modified) `SignedBeaconBlock` found in Deneb.
|
||||
|
||||
*[Modified in Deneb:EIP4844]*
|
||||
|
||||
New validation:
|
||||
|
||||
|
@ -150,6 +159,41 @@ The following validations MUST pass before forwarding the `signed_blob_sidecar`
|
|||
- _[REJECT]_ The sidecar is proposed by the expected `proposer_index` for the block's slot in the context of the current shuffling (defined by `block_parent_root`/`slot`).
|
||||
If the `proposer_index` cannot immediately be verified against the expected shuffling, the sidecar MAY be queued for later processing while proposers for the block's branch are calculated -- in such a case _do not_ `REJECT`, instead `IGNORE` this message.
|
||||
|
||||
###### `beacon_aggregate_and_proof`
|
||||
|
||||
*[Modified in Deneb:EIP7045]*
|
||||
|
||||
The following validation is removed:
|
||||
* _[IGNORE]_ `aggregate.data.slot` is within the last `ATTESTATION_PROPAGATION_SLOT_RANGE` slots (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) --
|
||||
i.e. `aggregate.data.slot + ATTESTATION_PROPAGATION_SLOT_RANGE >= current_slot >= aggregate.data.slot`
|
||||
(a client MAY queue future aggregates for processing at the appropriate slot).
|
||||
|
||||
The following validations are added in its place:
|
||||
* _[IGNORE]_ `aggregate.data.slot` is equal to or earlier than the `current_slot` (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) --
|
||||
i.e. `aggregate.data.slot <= current_slot`
|
||||
(a client MAY queue future aggregates for processing at the appropriate slot).
|
||||
* _[IGNORE]_ the epoch of `aggregate.data.slot` is either the current or previous epoch
|
||||
(with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) --
|
||||
i.e. `compute_epoch_at_slot(aggregate.data.slot) in (get_previous_epoch(state), get_current_epoch(state))`
|
||||
|
||||
##### Attestation subnets
|
||||
|
||||
###### `beacon_attestation_{subnet_id}`
|
||||
|
||||
*[Modified in Deneb:EIP7045]*
|
||||
|
||||
The following validation is removed:
|
||||
* _[IGNORE]_ `attestation.data.slot` is within the last `ATTESTATION_PROPAGATION_SLOT_RANGE` slots (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) --
|
||||
i.e. `attestation.data.slot + ATTESTATION_PROPAGATION_SLOT_RANGE >= current_slot >= attestation.data.slot`
|
||||
(a client MAY queue future attestations for processing at the appropriate slot).
|
||||
|
||||
The following validations are added in its place:
|
||||
* _[IGNORE]_ `attestation.data.slot` is equal to or earlier than the `current_slot` (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) --
|
||||
i.e. `attestation.data.slot <= current_slot`
|
||||
(a client MAY queue future attestation for processing at the appropriate slot).
|
||||
* _[IGNORE]_ the epoch of `attestation.data.slot` is either the current or previous epoch
|
||||
(with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) --
|
||||
i.e. `compute_epoch_at_slot(attestation.data.slot) in (get_previous_epoch(state), get_current_epoch(state))`
|
||||
|
||||
#### Transitioning the gossip
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
- [Beacon chain responsibilities](#beacon-chain-responsibilities)
|
||||
- [Block and sidecar proposal](#block-and-sidecar-proposal)
|
||||
- [Constructing the `BeaconBlockBody`](#constructing-the-beaconblockbody)
|
||||
- [ExecutionPayload](#executionpayload)
|
||||
- [Blob KZG commitments](#blob-kzg-commitments)
|
||||
- [Constructing the `SignedBlobSidecar`s](#constructing-the-signedblobsidecars)
|
||||
- [Sidecar](#sidecar)
|
||||
|
@ -103,11 +104,46 @@ All validator responsibilities remain unchanged other than those noted below.
|
|||
|
||||
#### Constructing the `BeaconBlockBody`
|
||||
|
||||
##### ExecutionPayload
|
||||
|
||||
`prepare_execution_payload` is updated from the Capella specs to provide the parent beacon block root.
|
||||
|
||||
*Note*: In this section, `state` is the state of the slot for the block proposal _without_ the block yet applied.
|
||||
That is, `state` is the `previous_state` processed through any empty slots up to the assigned slot using `process_slots(previous_state, slot)`.
|
||||
|
||||
*Note*: The only change made to `prepare_execution_payload` is to add the parent beacon block root as an additional
|
||||
parameter to the `PayloadAttributes`.
|
||||
|
||||
```python
|
||||
def prepare_execution_payload(state: BeaconState,
|
||||
safe_block_hash: Hash32,
|
||||
finalized_block_hash: Hash32,
|
||||
suggested_fee_recipient: ExecutionAddress,
|
||||
execution_engine: ExecutionEngine) -> Optional[PayloadId]:
|
||||
# Verify consistency of the parent hash with respect to the previous execution payload header
|
||||
parent_hash = state.latest_execution_payload_header.block_hash
|
||||
|
||||
# Set the forkchoice head and initiate the payload build process
|
||||
payload_attributes = PayloadAttributes(
|
||||
timestamp=compute_timestamp_at_slot(state, state.slot),
|
||||
prev_randao=get_randao_mix(state, get_current_epoch(state)),
|
||||
suggested_fee_recipient=suggested_fee_recipient,
|
||||
withdrawals=get_expected_withdrawals(state),
|
||||
parent_beacon_block_root=hash_tree_root(state.latest_block_header), # [New in Deneb:EIP4788]
|
||||
)
|
||||
return execution_engine.notify_forkchoice_updated(
|
||||
head_block_hash=parent_hash,
|
||||
safe_block_hash=safe_block_hash,
|
||||
finalized_block_hash=finalized_block_hash,
|
||||
payload_attributes=payload_attributes,
|
||||
)
|
||||
```
|
||||
|
||||
##### Blob KZG commitments
|
||||
|
||||
*[New in Deneb:EIP4844]*
|
||||
|
||||
1. After retrieving the execution payload from the execution engine as specified in Capella,
|
||||
1. After retrieving the execution payload from the execution engine as specified above,
|
||||
use the `payload_id` to retrieve `blobs`, `blob_kzg_commitments`, and `blob_kzg_proofs`
|
||||
via `get_payload(payload_id).blobs_bundle`.
|
||||
2. Set `block.body.blob_kzg_commitments = blob_kzg_commitments`.
|
||||
|
|
|
@ -59,6 +59,7 @@
|
|||
- [`xor`](#xor)
|
||||
- [`uint_to_bytes`](#uint_to_bytes)
|
||||
- [`bytes_to_uint64`](#bytes_to_uint64)
|
||||
- [`saturating_sub`](#saturating_sub)
|
||||
- [Crypto](#crypto)
|
||||
- [`hash`](#hash)
|
||||
- [`hash_tree_root`](#hash_tree_root)
|
||||
|
@ -630,6 +631,16 @@ def bytes_to_uint64(data: bytes) -> uint64:
|
|||
return uint64(int.from_bytes(data, ENDIANNESS))
|
||||
```
|
||||
|
||||
#### `saturating_sub`
|
||||
|
||||
```python
|
||||
def saturating_sub(a: int, b: int) -> int:
|
||||
"""
|
||||
Computes a - b, saturating at numeric bounds.
|
||||
"""
|
||||
return a - b if a > b else 0
|
||||
```
|
||||
|
||||
### Crypto
|
||||
|
||||
#### `hash`
|
||||
|
@ -1849,6 +1860,15 @@ def get_validator_from_deposit(pubkey: BLSPubkey, withdrawal_credentials: Bytes3
|
|||
)
|
||||
```
|
||||
|
||||
```python
|
||||
def add_validator_to_registry(state: BeaconState,
|
||||
pubkey: BLSPubkey,
|
||||
withdrawal_credentials: Bytes32,
|
||||
amount: uint64) -> None:
|
||||
state.validators.append(get_validator_from_deposit(pubkey, withdrawal_credentials, amount))
|
||||
state.balances.append(amount)
|
||||
```
|
||||
|
||||
```python
|
||||
def apply_deposit(state: BeaconState,
|
||||
pubkey: BLSPubkey,
|
||||
|
@ -1865,12 +1885,8 @@ def apply_deposit(state: BeaconState,
|
|||
)
|
||||
domain = compute_domain(DOMAIN_DEPOSIT) # Fork-agnostic domain since deposits are valid across forks
|
||||
signing_root = compute_signing_root(deposit_message, domain)
|
||||
if not bls.Verify(pubkey, signing_root, signature):
|
||||
return
|
||||
|
||||
# Add validator and balance entries
|
||||
state.validators.append(get_validator_from_deposit(pubkey, withdrawal_credentials, amount))
|
||||
state.balances.append(amount)
|
||||
if bls.Verify(pubkey, signing_root, signature):
|
||||
add_validator_to_registry(state, pubkey, withdrawal_credentials, amount)
|
||||
else:
|
||||
# Increase balance by deposit amount
|
||||
index = ValidatorIndex(validator_pubkeys.index(pubkey))
|
||||
|
|
|
@ -539,7 +539,8 @@ def on_block(store: Store, signed_block: SignedBeaconBlock) -> None:
|
|||
# Add proposer score boost if the block is timely
|
||||
time_into_slot = (store.time - store.genesis_time) % SECONDS_PER_SLOT
|
||||
is_before_attesting_interval = time_into_slot < SECONDS_PER_SLOT // INTERVALS_PER_SLOT
|
||||
if get_current_slot(store) == block.slot and is_before_attesting_interval:
|
||||
is_first_block = store.proposer_boost_root == Root()
|
||||
if get_current_slot(store) == block.slot and is_before_attesting_interval and is_first_block:
|
||||
store.proposer_boost_root = hash_tree_root(block)
|
||||
|
||||
# Update checkpoints in store if necessary
|
||||
|
|
|
@ -55,7 +55,7 @@ It consists of four main sections:
|
|||
- [ENR structure](#enr-structure)
|
||||
- [Attestation subnet bitfield](#attestation-subnet-bitfield)
|
||||
- [`eth2` field](#eth2-field)
|
||||
- [Attestation subnet subcription](#attestation-subnet-subcription)
|
||||
- [Attestation subnet subscription](#attestation-subnet-subscription)
|
||||
- [Design decision rationale](#design-decision-rationale)
|
||||
- [Transport](#transport-1)
|
||||
- [Why are we defining specific transports?](#why-are-we-defining-specific-transports)
|
||||
|
@ -1002,7 +1002,7 @@ Clients MAY connect to peers with the same `fork_digest` but a different `next_f
|
|||
Unless `ENRForkID` is manually updated to matching prior to the earlier `next_fork_epoch` of the two clients,
|
||||
these connecting clients will be unable to successfully interact starting at the earlier `next_fork_epoch`.
|
||||
|
||||
### Attestation subnet subcription
|
||||
### Attestation subnet subscription
|
||||
|
||||
Because Phase 0 does not have shards and thus does not have Shard Committees, there is no stable backbone to the attestation subnets (`beacon_attestation_{subnet_id}`). To provide this stability, each beacon node should:
|
||||
|
||||
|
|
|
@ -606,7 +606,7 @@ def get_aggregate_and_proof_signature(state: BeaconState,
|
|||
|
||||
"Slashing" is the burning of some amount of validator funds and immediate ejection from the active validator set. In Phase 0, there are two ways in which funds can be slashed: [proposer slashing](#proposer-slashing) and [attester slashing](#attester-slashing). Although being slashed has serious repercussions, it is simple enough to avoid being slashed all together by remaining _consistent_ with respect to the messages a validator has previously signed.
|
||||
|
||||
*Note*: Signed data must be within a sequential `Fork` context to conflict. Messages cannot be slashed across diverging forks. If the previous fork version is 1 and the chain splits into fork 2 and 102, messages from 1 can slashable against messages in forks 1, 2, and 102. Messages in 2 cannot be slashable against messages in 102, and vice versa.
|
||||
*Note*: Signed data must be within a sequential `Fork` context to conflict. Messages cannot be slashed across diverging forks. If the previous fork version is 1 and the chain splits into fork 2 and 102, messages from 1 can be slashable against messages in forks 1, 2, and 102. Messages in 2 cannot be slashable against messages in 102, and vice versa.
|
||||
|
||||
### Proposer slashing
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
1.4.0-alpha.3
|
||||
1.4.0-beta.1
|
||||
|
|
|
@ -171,7 +171,6 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]):
|
|||
help="if set re-generate and overwrite test files if they already exist",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-l",
|
||||
"--preset-list",
|
||||
dest="preset_list",
|
||||
nargs='*',
|
||||
|
@ -179,6 +178,14 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]):
|
|||
required=False,
|
||||
help="specify presets to run with. Allows all if no preset names are specified.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--fork-list",
|
||||
dest="fork_list",
|
||||
nargs='*',
|
||||
type=str,
|
||||
required=False,
|
||||
help="specify forks to run with. Allows all if no fork names are specified.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-c",
|
||||
"--collect-only",
|
||||
|
@ -199,6 +206,7 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]):
|
|||
print(f"Generating tests into {output_dir}")
|
||||
print(f'Error log file: {log_file}')
|
||||
|
||||
# preset_list arg
|
||||
presets = args.preset_list
|
||||
if presets is None:
|
||||
presets = []
|
||||
|
@ -206,6 +214,14 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]):
|
|||
if len(presets) != 0:
|
||||
print(f"Filtering test-generator runs to only include presets: {', '.join(presets)}")
|
||||
|
||||
# fork_list arg
|
||||
forks = args.fork_list
|
||||
if forks is None:
|
||||
forks = []
|
||||
|
||||
if len(presets) != 0:
|
||||
print(f"Filtering test-generator runs to only include forks: {', '.join(forks)}")
|
||||
|
||||
collect_only = args.collect_only
|
||||
|
||||
diagnostics_obj = Diagnostics()
|
||||
|
@ -224,6 +240,10 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]):
|
|||
if len(presets) != 0 and test_case.preset_name not in presets:
|
||||
continue
|
||||
|
||||
# If fork list is assigned, filter by forks.
|
||||
if len(forks) != 0 and test_case.fork_name not in forks:
|
||||
continue
|
||||
|
||||
case_dir = get_test_case_dir(test_case, output_dir)
|
||||
print(f"Collected test at: {case_dir}")
|
||||
diagnostics_obj.collected_test_count += 1
|
||||
|
|
|
@ -10,6 +10,10 @@ from eth2spec.gen_helpers.gen_base import gen_runner
|
|||
from eth2spec.gen_helpers.gen_base.gen_typing import TestCase, TestProvider
|
||||
|
||||
|
||||
def generate_case_fn(tfn, generator_mode, phase, preset, bls_active):
|
||||
return lambda: tfn(generator_mode=generator_mode, phase=phase, preset=preset, bls_active=bls_active)
|
||||
|
||||
|
||||
def generate_from_tests(runner_name: str, handler_name: str, src: Any,
|
||||
fork_name: SpecForkName, preset_name: PresetBaseName,
|
||||
bls_active: bool = True,
|
||||
|
@ -52,7 +56,7 @@ def generate_from_tests(runner_name: str, handler_name: str, src: Any,
|
|||
suite_name=getattr(tfn, 'suite_name', 'pyspec_tests'),
|
||||
case_name=case_name,
|
||||
# TODO: with_all_phases and other per-phase tooling, should be replaced with per-fork equivalent.
|
||||
case_fn=lambda: tfn(generator_mode=True, phase=phase, preset=preset_name, bls_active=bls_active)
|
||||
case_fn=generate_case_fn(tfn, generator_mode=True, phase=phase, preset=preset_name, bls_active=bls_active)
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ def test_empty_block_transition_no_tx(spec, state):
|
|||
|
||||
@with_bellatrix_and_later
|
||||
@spec_state_test
|
||||
def test_empty_block_transition_randomized_payload(spec, state):
|
||||
def test_block_transition_randomized_payload(spec, state):
|
||||
yield 'pre', state
|
||||
|
||||
block = build_empty_block_for_next_slot(spec, state)
|
||||
|
|
|
@ -9,12 +9,13 @@ from eth2spec.bellatrix import mainnet as spec_bellatrix_mainnet, minimal as spe
|
|||
from eth2spec.capella import mainnet as spec_capella_mainnet, minimal as spec_capella_minimal
|
||||
from eth2spec.deneb import mainnet as spec_deneb_mainnet, minimal as spec_deneb_minimal
|
||||
from eth2spec.eip6110 import mainnet as spec_eip6110_mainnet, minimal as spec_eip6110_minimal
|
||||
from eth2spec.eip7002 import mainnet as spec_eip7002_mainnet, minimal as spec_eip7002_minimal
|
||||
from eth2spec.utils import bls
|
||||
|
||||
from .exceptions import SkippedTest
|
||||
from .helpers.constants import (
|
||||
PHASE0, ALTAIR, BELLATRIX, CAPELLA, DENEB,
|
||||
EIP6110,
|
||||
EIP6110, EIP7002,
|
||||
MINIMAL, MAINNET,
|
||||
ALL_PHASES,
|
||||
ALL_FORK_UPGRADES,
|
||||
|
@ -83,6 +84,7 @@ spec_targets: Dict[PresetBaseName, Dict[SpecForkName, Spec]] = {
|
|||
CAPELLA: spec_capella_minimal,
|
||||
DENEB: spec_deneb_minimal,
|
||||
EIP6110: spec_eip6110_minimal,
|
||||
EIP7002: spec_eip7002_minimal,
|
||||
},
|
||||
MAINNET: {
|
||||
PHASE0: spec_phase0_mainnet,
|
||||
|
@ -91,6 +93,7 @@ spec_targets: Dict[PresetBaseName, Dict[SpecForkName, Spec]] = {
|
|||
CAPELLA: spec_capella_mainnet,
|
||||
DENEB: spec_deneb_mainnet,
|
||||
EIP6110: spec_eip6110_mainnet,
|
||||
EIP7002: spec_eip7002_mainnet,
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -534,12 +537,14 @@ def with_presets(preset_bases, reason=None):
|
|||
return decorator
|
||||
|
||||
|
||||
with_light_client = with_phases(LIGHT_CLIENT_TESTING_FORKS)
|
||||
|
||||
with_altair_and_later = with_all_phases_from(ALTAIR)
|
||||
with_bellatrix_and_later = with_all_phases_from(BELLATRIX)
|
||||
with_capella_and_later = with_all_phases_from(CAPELLA)
|
||||
with_deneb_and_later = with_all_phases_from(DENEB)
|
||||
with_eip6110_and_later = with_all_phases_from(EIP6110)
|
||||
with_light_client = with_phases(LIGHT_CLIENT_TESTING_FORKS)
|
||||
with_eip7002_and_later = with_all_phases_from(EIP7002)
|
||||
|
||||
|
||||
class quoted_str(str):
|
||||
|
|
|
@ -59,6 +59,13 @@ def run_execution_payload_processing(spec, state, execution_payload, blob_kzg_co
|
|||
assert state.latest_execution_payload_header == get_execution_payload_header(spec, body.execution_payload)
|
||||
|
||||
|
||||
"""
|
||||
Tests with incorrect blob transactions in the execution payload, but the execution client returns
|
||||
VALID, and the purpose of these tests is that the beacon client must not reject the block by
|
||||
attempting to do a validation of its own.
|
||||
"""
|
||||
|
||||
|
||||
@with_deneb_and_later
|
||||
@spec_state_test
|
||||
def test_incorrect_blob_tx_type(spec, state):
|
||||
|
@ -78,14 +85,14 @@ def test_incorrect_blob_tx_type(spec, state):
|
|||
|
||||
@with_deneb_and_later
|
||||
@spec_state_test
|
||||
def test_incorrect_transaction_length_1_byte(spec, state):
|
||||
def test_incorrect_transaction_length_1_extra_byte(spec, state):
|
||||
"""
|
||||
The versioned hashes are wrong, but the testing ExecutionEngine returns VALID by default.
|
||||
"""
|
||||
execution_payload = build_empty_execution_payload(spec, state)
|
||||
|
||||
opaque_tx, _, blob_kzg_commitments, _ = get_sample_opaque_tx(spec)
|
||||
opaque_tx = opaque_tx + b'\x12' # incorrect tx length
|
||||
opaque_tx = opaque_tx + b'\x12' # incorrect tx length, longer
|
||||
|
||||
execution_payload.transactions = [opaque_tx]
|
||||
execution_payload.block_hash = compute_el_block_hash(spec, execution_payload)
|
||||
|
@ -95,7 +102,41 @@ def test_incorrect_transaction_length_1_byte(spec, state):
|
|||
|
||||
@with_deneb_and_later
|
||||
@spec_state_test
|
||||
def test_incorrect_transaction_length_32_bytes(spec, state):
|
||||
def test_incorrect_transaction_length_1_byte_short(spec, state):
|
||||
"""
|
||||
The versioned hashes are wrong, but the testing ExecutionEngine returns VALID by default.
|
||||
"""
|
||||
execution_payload = build_empty_execution_payload(spec, state)
|
||||
|
||||
opaque_tx, _, blob_kzg_commitments, _ = get_sample_opaque_tx(spec)
|
||||
opaque_tx = opaque_tx[:-1] # incorrect tx length, shorter
|
||||
|
||||
execution_payload.transactions = [opaque_tx]
|
||||
execution_payload.block_hash = compute_el_block_hash(spec, execution_payload)
|
||||
|
||||
yield from run_execution_payload_processing(spec, state, execution_payload, blob_kzg_commitments)
|
||||
|
||||
|
||||
@with_deneb_and_later
|
||||
@spec_state_test
|
||||
def test_incorrect_transaction_length_empty(spec, state):
|
||||
"""
|
||||
The versioned hashes are wrong, but the testing ExecutionEngine returns VALID by default.
|
||||
"""
|
||||
execution_payload = build_empty_execution_payload(spec, state)
|
||||
|
||||
opaque_tx, _, blob_kzg_commitments, _ = get_sample_opaque_tx(spec)
|
||||
opaque_tx = opaque_tx[0:0] # incorrect tx length, empty
|
||||
|
||||
execution_payload.transactions = [opaque_tx]
|
||||
execution_payload.block_hash = compute_el_block_hash(spec, execution_payload)
|
||||
|
||||
yield from run_execution_payload_processing(spec, state, execution_payload, blob_kzg_commitments)
|
||||
|
||||
|
||||
@with_deneb_and_later
|
||||
@spec_state_test
|
||||
def test_incorrect_transaction_length_32_extra_bytes(spec, state):
|
||||
"""
|
||||
The versioned hashes are wrong, but the testing ExecutionEngine returns VALID by default.
|
||||
"""
|
||||
|
@ -110,6 +151,22 @@ def test_incorrect_transaction_length_32_bytes(spec, state):
|
|||
yield from run_execution_payload_processing(spec, state, execution_payload, blob_kzg_commitments)
|
||||
|
||||
|
||||
@with_deneb_and_later
|
||||
@spec_state_test
|
||||
def test_no_transactions_with_commitments(spec, state):
|
||||
"""
|
||||
The versioned hashes are wrong, but the testing ExecutionEngine returns VALID by default.
|
||||
"""
|
||||
execution_payload = build_empty_execution_payload(spec, state)
|
||||
|
||||
_, _, blob_kzg_commitments, _ = get_sample_opaque_tx(spec)
|
||||
|
||||
execution_payload.transactions = []
|
||||
execution_payload.block_hash = compute_el_block_hash(spec, execution_payload)
|
||||
|
||||
yield from run_execution_payload_processing(spec, state, execution_payload, blob_kzg_commitments)
|
||||
|
||||
|
||||
@with_deneb_and_later
|
||||
@spec_state_test
|
||||
def test_incorrect_commitment(spec, state):
|
||||
|
|
|
@ -0,0 +1,182 @@
|
|||
from random import Random
|
||||
|
||||
from eth2spec.test.context import (
|
||||
spec_state_test,
|
||||
with_deneb_and_later,
|
||||
)
|
||||
|
||||
from eth2spec.test.helpers.block import (
|
||||
build_empty_block_for_next_slot,
|
||||
)
|
||||
from eth2spec.test.helpers.execution_payload import (
|
||||
compute_el_block_hash,
|
||||
)
|
||||
from eth2spec.test.helpers.fork_choice import (
|
||||
BlobData,
|
||||
get_genesis_forkchoice_store_and_block,
|
||||
on_tick_and_append_step,
|
||||
tick_and_add_block_with_data,
|
||||
)
|
||||
from eth2spec.test.helpers.state import (
|
||||
state_transition_and_sign_block,
|
||||
)
|
||||
from eth2spec.test.helpers.sharding import (
|
||||
get_sample_opaque_tx,
|
||||
)
|
||||
|
||||
|
||||
def get_block_with_blob(spec, state, rng=None):
|
||||
block = build_empty_block_for_next_slot(spec, state)
|
||||
opaque_tx, blobs, blob_kzg_commitments, blob_kzg_proofs = get_sample_opaque_tx(spec, blob_count=1, rng=rng)
|
||||
block.body.execution_payload.transactions = [opaque_tx]
|
||||
block.body.execution_payload.block_hash = compute_el_block_hash(spec, block.body.execution_payload)
|
||||
block.body.blob_kzg_commitments = blob_kzg_commitments
|
||||
return block, blobs, blob_kzg_proofs
|
||||
|
||||
|
||||
@with_deneb_and_later
|
||||
@spec_state_test
|
||||
def test_simple_blob_data(spec, state):
|
||||
rng = Random(1234)
|
||||
|
||||
test_steps = []
|
||||
# Initialization
|
||||
store, anchor_block = get_genesis_forkchoice_store_and_block(spec, state)
|
||||
yield 'anchor_state', state
|
||||
yield 'anchor_block', anchor_block
|
||||
current_time = state.slot * spec.config.SECONDS_PER_SLOT + store.genesis_time
|
||||
on_tick_and_append_step(spec, store, current_time, test_steps)
|
||||
assert store.time == current_time
|
||||
|
||||
# On receiving a block of `GENESIS_SLOT + 1` slot
|
||||
block, blobs, blob_kzg_proofs = get_block_with_blob(spec, state, rng=rng)
|
||||
signed_block = state_transition_and_sign_block(spec, state, block)
|
||||
blob_data = BlobData(blobs, blob_kzg_proofs)
|
||||
|
||||
yield from tick_and_add_block_with_data(spec, store, signed_block, test_steps, blob_data)
|
||||
|
||||
assert spec.get_head(store) == signed_block.message.hash_tree_root()
|
||||
|
||||
# On receiving a block of next epoch
|
||||
store.time = current_time + spec.config.SECONDS_PER_SLOT * spec.SLOTS_PER_EPOCH
|
||||
block, blobs, blob_kzg_proofs = get_block_with_blob(spec, state, rng=rng)
|
||||
signed_block = state_transition_and_sign_block(spec, state, block)
|
||||
blob_data = BlobData(blobs, blob_kzg_proofs)
|
||||
|
||||
yield from tick_and_add_block_with_data(spec, store, signed_block, test_steps, blob_data)
|
||||
|
||||
assert spec.get_head(store) == signed_block.message.hash_tree_root()
|
||||
|
||||
yield 'steps', test_steps
|
||||
|
||||
|
||||
@with_deneb_and_later
|
||||
@spec_state_test
|
||||
def test_invalid_incorrect_proof(spec, state):
|
||||
rng = Random(1234)
|
||||
|
||||
test_steps = []
|
||||
# Initialization
|
||||
store, anchor_block = get_genesis_forkchoice_store_and_block(spec, state)
|
||||
yield 'anchor_state', state
|
||||
yield 'anchor_block', anchor_block
|
||||
current_time = state.slot * spec.config.SECONDS_PER_SLOT + store.genesis_time
|
||||
on_tick_and_append_step(spec, store, current_time, test_steps)
|
||||
assert store.time == current_time
|
||||
|
||||
# On receiving a block of `GENESIS_SLOT + 1` slot
|
||||
block, blobs, _ = get_block_with_blob(spec, state, rng=rng)
|
||||
signed_block = state_transition_and_sign_block(spec, state, block)
|
||||
# Insert incorrect proof
|
||||
blob_kzg_proofs = [b'\xc0' + b'\x00' * 47]
|
||||
blob_data = BlobData(blobs, blob_kzg_proofs)
|
||||
|
||||
yield from tick_and_add_block_with_data(spec, store, signed_block, test_steps, blob_data, valid=False)
|
||||
|
||||
assert spec.get_head(store) != signed_block.message.hash_tree_root()
|
||||
|
||||
yield 'steps', test_steps
|
||||
|
||||
|
||||
@with_deneb_and_later
|
||||
@spec_state_test
|
||||
def test_invalid_data_unavailable(spec, state):
|
||||
rng = Random(1234)
|
||||
|
||||
test_steps = []
|
||||
# Initialization
|
||||
store, anchor_block = get_genesis_forkchoice_store_and_block(spec, state)
|
||||
yield 'anchor_state', state
|
||||
yield 'anchor_block', anchor_block
|
||||
current_time = state.slot * spec.config.SECONDS_PER_SLOT + store.genesis_time
|
||||
on_tick_and_append_step(spec, store, current_time, test_steps)
|
||||
assert store.time == current_time
|
||||
|
||||
# On receiving a block of `GENESIS_SLOT + 1` slot
|
||||
block, _, _ = get_block_with_blob(spec, state, rng=rng)
|
||||
signed_block = state_transition_and_sign_block(spec, state, block)
|
||||
|
||||
# data unavailable
|
||||
blob_data = BlobData([], [])
|
||||
|
||||
yield from tick_and_add_block_with_data(spec, store, signed_block, test_steps, blob_data, valid=False)
|
||||
|
||||
assert spec.get_head(store) != signed_block.message.hash_tree_root()
|
||||
|
||||
yield 'steps', test_steps
|
||||
|
||||
|
||||
@with_deneb_and_later
|
||||
@spec_state_test
|
||||
def test_invalid_wrong_proofs_length(spec, state):
|
||||
rng = Random(1234)
|
||||
|
||||
test_steps = []
|
||||
# Initialization
|
||||
store, anchor_block = get_genesis_forkchoice_store_and_block(spec, state)
|
||||
yield 'anchor_state', state
|
||||
yield 'anchor_block', anchor_block
|
||||
current_time = state.slot * spec.config.SECONDS_PER_SLOT + store.genesis_time
|
||||
on_tick_and_append_step(spec, store, current_time, test_steps)
|
||||
assert store.time == current_time
|
||||
|
||||
# On receiving a block of `GENESIS_SLOT + 1` slot
|
||||
block, blobs, _ = get_block_with_blob(spec, state, rng=rng)
|
||||
signed_block = state_transition_and_sign_block(spec, state, block)
|
||||
|
||||
# unavailable proofs
|
||||
blob_data = BlobData(blobs, [])
|
||||
|
||||
yield from tick_and_add_block_with_data(spec, store, signed_block, test_steps, blob_data, valid=False)
|
||||
|
||||
assert spec.get_head(store) != signed_block.message.hash_tree_root()
|
||||
|
||||
yield 'steps', test_steps
|
||||
|
||||
|
||||
@with_deneb_and_later
|
||||
@spec_state_test
|
||||
def test_invalid_wrong_blobs_length(spec, state):
|
||||
rng = Random(1234)
|
||||
|
||||
test_steps = []
|
||||
# Initialization
|
||||
store, anchor_block = get_genesis_forkchoice_store_and_block(spec, state)
|
||||
yield 'anchor_state', state
|
||||
yield 'anchor_block', anchor_block
|
||||
current_time = state.slot * spec.config.SECONDS_PER_SLOT + store.genesis_time
|
||||
on_tick_and_append_step(spec, store, current_time, test_steps)
|
||||
assert store.time == current_time
|
||||
|
||||
# On receiving a block of `GENESIS_SLOT + 1` slot
|
||||
block, _, blob_kzg_proofs = get_block_with_blob(spec, state, rng=rng)
|
||||
signed_block = state_transition_and_sign_block(spec, state, block)
|
||||
|
||||
# unavailable blobs
|
||||
blob_data = BlobData([], blob_kzg_proofs)
|
||||
|
||||
yield from tick_and_add_block_with_data(spec, store, signed_block, test_steps, blob_data, valid=False)
|
||||
|
||||
assert spec.get_head(store) != signed_block.message.hash_tree_root()
|
||||
|
||||
yield 'steps', test_steps
|
|
@ -1,8 +1,10 @@
|
|||
import random
|
||||
|
||||
from eth2spec.test.helpers.state import (
|
||||
state_transition_and_sign_block
|
||||
state_transition_and_sign_block,
|
||||
)
|
||||
from eth2spec.test.helpers.block import (
|
||||
build_empty_block_for_next_slot
|
||||
build_empty_block_for_next_slot,
|
||||
)
|
||||
from eth2spec.test.context import (
|
||||
spec_state_test,
|
||||
|
@ -10,21 +12,34 @@ from eth2spec.test.context import (
|
|||
)
|
||||
from eth2spec.test.helpers.execution_payload import (
|
||||
compute_el_block_hash,
|
||||
get_random_tx,
|
||||
)
|
||||
from eth2spec.test.helpers.sharding import (
|
||||
get_sample_opaque_tx,
|
||||
)
|
||||
|
||||
|
||||
def run_block_with_blobs(spec, state, blob_count, data_gas_used=1, excess_data_gas=1, valid=True):
|
||||
def run_block_with_blobs(spec, state, blob_count, tx_count=1, blob_gas_used=1, excess_blob_gas=1,
|
||||
non_blob_tx_count=0, rng=random.Random(7777), valid=True):
|
||||
yield 'pre', state
|
||||
|
||||
block = build_empty_block_for_next_slot(spec, state)
|
||||
opaque_tx, _, blob_kzg_commitments, _ = get_sample_opaque_tx(spec, blob_count=blob_count)
|
||||
txs = []
|
||||
blob_kzg_commitments = []
|
||||
for _ in range(tx_count):
|
||||
opaque_tx, _, commits, _ = get_sample_opaque_tx(spec, blob_count=blob_count)
|
||||
txs.append(opaque_tx)
|
||||
blob_kzg_commitments += commits
|
||||
|
||||
for _ in range(non_blob_tx_count):
|
||||
txs.append(get_random_tx(rng))
|
||||
|
||||
rng.shuffle(txs)
|
||||
|
||||
block.body.blob_kzg_commitments = blob_kzg_commitments
|
||||
block.body.execution_payload.transactions = [opaque_tx]
|
||||
block.body.execution_payload.data_gas_used = data_gas_used
|
||||
block.body.execution_payload.excess_data_gas = excess_data_gas
|
||||
block.body.execution_payload.transactions = txs
|
||||
block.body.execution_payload.blob_gas_used = blob_gas_used
|
||||
block.body.execution_payload.excess_blob_gas = excess_blob_gas
|
||||
block.body.execution_payload.block_hash = compute_el_block_hash(spec, block.body.execution_payload)
|
||||
|
||||
if valid:
|
||||
|
@ -48,13 +63,43 @@ def test_one_blob(spec, state):
|
|||
yield from run_block_with_blobs(spec, state, blob_count=1)
|
||||
|
||||
|
||||
@with_deneb_and_later
|
||||
@spec_state_test
|
||||
def test_one_blob_two_txs(spec, state):
|
||||
yield from run_block_with_blobs(spec, state, blob_count=1, tx_count=2)
|
||||
|
||||
|
||||
@with_deneb_and_later
|
||||
@spec_state_test
|
||||
def test_one_blob_max_txs(spec, state):
|
||||
yield from run_block_with_blobs(spec, state, blob_count=1, tx_count=spec.MAX_BLOBS_PER_BLOCK)
|
||||
|
||||
|
||||
@with_deneb_and_later
|
||||
@spec_state_test
|
||||
def test_invalid_one_blob_max_plus_one_txs(spec, state):
|
||||
yield from run_block_with_blobs(spec, state, blob_count=1, tx_count=spec.MAX_BLOBS_PER_BLOCK + 1, valid=False)
|
||||
|
||||
|
||||
@with_deneb_and_later
|
||||
@spec_state_test
|
||||
def test_max_blobs_per_block(spec, state):
|
||||
yield from run_block_with_blobs(spec, state, blob_count=spec.MAX_BLOBS_PER_BLOCK)
|
||||
|
||||
|
||||
@with_deneb_and_later
|
||||
@spec_state_test
|
||||
def test_invalid_max_blobs_per_block_two_txs(spec, state):
|
||||
yield from run_block_with_blobs(spec, state, blob_count=spec.MAX_BLOBS_PER_BLOCK, tx_count=2, valid=False)
|
||||
|
||||
|
||||
@with_deneb_and_later
|
||||
@spec_state_test
|
||||
def test_invalid_exceed_max_blobs_per_block(spec, state):
|
||||
yield from run_block_with_blobs(spec, state, blob_count=spec.MAX_BLOBS_PER_BLOCK + 1, valid=False)
|
||||
|
||||
|
||||
@with_deneb_and_later
|
||||
@spec_state_test
|
||||
def test_mix_blob_tx_and_non_blob_tx(spec, state):
|
||||
yield from run_block_with_blobs(spec, state, blob_count=1, tx_count=1, non_blob_tx_count=1)
|
||||
|
|
|
@ -3,12 +3,25 @@ from eth2spec.test.context import (
|
|||
always_bls,
|
||||
with_fork_metas,
|
||||
)
|
||||
from eth2spec.test.helpers.attestations import (
|
||||
get_valid_attestation,
|
||||
)
|
||||
from eth2spec.test.helpers.block import (
|
||||
build_empty_block_for_next_slot,
|
||||
)
|
||||
from eth2spec.test.helpers.constants import (
|
||||
AFTER_DENEB_PRE_POST_FORKS,
|
||||
)
|
||||
from eth2spec.test.helpers.state import (
|
||||
next_epoch_via_block,
|
||||
state_transition_and_sign_block,
|
||||
transition_to,
|
||||
)
|
||||
from eth2spec.test.helpers.fork_transition import (
|
||||
OperationType,
|
||||
do_fork,
|
||||
run_transition_with_operation,
|
||||
transition_until_fork,
|
||||
)
|
||||
|
||||
|
||||
|
@ -52,3 +65,38 @@ def test_transition_with_btec_right_before_fork(state, fork_epoch, spec, post_sp
|
|||
operation_type=OperationType.BLS_TO_EXECUTION_CHANGE,
|
||||
operation_at_slot=fork_epoch * spec.SLOTS_PER_EPOCH - 1,
|
||||
)
|
||||
|
||||
|
||||
@with_fork_metas([ForkMeta(pre_fork_name=pre, post_fork_name=post, fork_epoch=2)
|
||||
for pre, post in AFTER_DENEB_PRE_POST_FORKS])
|
||||
def test_transition_attestation_from_previous_fork_with_new_range(
|
||||
state, fork_epoch, spec, post_spec, pre_tag, post_tag):
|
||||
"""
|
||||
[EIP-7045] test
|
||||
"""
|
||||
# Transition to the epoch prior to the fork epoch
|
||||
next_epoch_via_block(spec, state)
|
||||
|
||||
# Generate an attestation for slot 0 of this epoch
|
||||
attestation = get_valid_attestation(spec, state, signed=True)
|
||||
|
||||
yield 'pre', state
|
||||
|
||||
# Transition to the fork epoch with a block
|
||||
transition_until_fork(spec, state, fork_epoch)
|
||||
state, fork_block = do_fork(state, spec, post_spec, fork_epoch)
|
||||
current_epoch = spec.get_current_epoch(state)
|
||||
assert current_epoch == fork_epoch
|
||||
# Transition to second to last slot in `fork_epoch`
|
||||
penultimate_slot = post_spec.compute_start_slot_at_epoch(current_epoch + 1) - 2
|
||||
transition_to(post_spec, state, penultimate_slot)
|
||||
|
||||
# Ensure the new state is in the increased EIP-7045 slot inclusion range
|
||||
assert penultimate_slot - attestation.data.slot > post_spec.SLOTS_PER_EPOCH
|
||||
|
||||
block = build_empty_block_for_next_slot(post_spec, state)
|
||||
block.body.attestations.append(attestation)
|
||||
signed_block = state_transition_and_sign_block(post_spec, state, block)
|
||||
|
||||
yield 'blocks', [post_tag(fork_block), post_tag(signed_block)]
|
||||
yield 'post', state
|
||||
|
|
|
@ -0,0 +1,107 @@
|
|||
from eth2spec.test.context import spec_state_test, with_eip7002_and_later
|
||||
from eth2spec.test.helpers.execution_layer_exits import run_execution_layer_exit_processing
|
||||
from eth2spec.test.helpers.withdrawals import set_eth1_withdrawal_credential_with_balance
|
||||
|
||||
|
||||
@with_eip7002_and_later
|
||||
@spec_state_test
|
||||
def test_basic_exit(spec, state):
|
||||
# move state forward SHARD_COMMITTEE_PERIOD epochs to allow for exit
|
||||
state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
|
||||
|
||||
current_epoch = spec.get_current_epoch(state)
|
||||
validator_index = spec.get_active_validator_indices(state, current_epoch)[0]
|
||||
validator_pubkey = state.validators[validator_index].pubkey
|
||||
address = b'\x22' * 20
|
||||
set_eth1_withdrawal_credential_with_balance(spec, state, validator_index, address=address)
|
||||
execution_layer_exit = spec.ExecutionLayerExit(
|
||||
source_address=address,
|
||||
validator_pubkey=validator_pubkey,
|
||||
)
|
||||
|
||||
yield from run_execution_layer_exit_processing(spec, state, execution_layer_exit)
|
||||
|
||||
|
||||
@with_eip7002_and_later
|
||||
@spec_state_test
|
||||
def test_incorrect_source_address(spec, state):
|
||||
# move state forward SHARD_COMMITTEE_PERIOD epochs to allow for exit
|
||||
state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
|
||||
|
||||
current_epoch = spec.get_current_epoch(state)
|
||||
validator_index = spec.get_active_validator_indices(state, current_epoch)[0]
|
||||
validator_pubkey = state.validators[validator_index].pubkey
|
||||
address = b'\x22' * 20
|
||||
incorrect_address = b'\x33' * 20
|
||||
set_eth1_withdrawal_credential_with_balance(spec, state, validator_index, address=address)
|
||||
execution_layer_exit = spec.ExecutionLayerExit(
|
||||
source_address=incorrect_address,
|
||||
validator_pubkey=validator_pubkey,
|
||||
)
|
||||
|
||||
yield from run_execution_layer_exit_processing(spec, state, execution_layer_exit, success=False)
|
||||
|
||||
|
||||
@with_eip7002_and_later
|
||||
@spec_state_test
|
||||
def test_incorrect_withdrawal_credential_prefix(spec, state):
|
||||
# move state forward SHARD_COMMITTEE_PERIOD epochs to allow for exit
|
||||
state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
|
||||
|
||||
current_epoch = spec.get_current_epoch(state)
|
||||
validator_index = spec.get_active_validator_indices(state, current_epoch)[0]
|
||||
validator_pubkey = state.validators[validator_index].pubkey
|
||||
address = b'\x22' * 20
|
||||
set_eth1_withdrawal_credential_with_balance(spec, state, validator_index, address=address)
|
||||
# Set incorrect prefix
|
||||
state.validators[validator_index].withdrawal_credentials = (
|
||||
spec.BLS_WITHDRAWAL_PREFIX
|
||||
+ state.validators[validator_index].withdrawal_credentials[1:]
|
||||
)
|
||||
execution_layer_exit = spec.ExecutionLayerExit(
|
||||
source_address=address,
|
||||
validator_pubkey=validator_pubkey,
|
||||
)
|
||||
|
||||
yield from run_execution_layer_exit_processing(spec, state, execution_layer_exit, success=False)
|
||||
|
||||
|
||||
@with_eip7002_and_later
|
||||
@spec_state_test
|
||||
def test_on_exit_initiated_validator(spec, state):
|
||||
# move state forward SHARD_COMMITTEE_PERIOD epochs to allow for exit
|
||||
state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
|
||||
|
||||
current_epoch = spec.get_current_epoch(state)
|
||||
validator_index = spec.get_active_validator_indices(state, current_epoch)[0]
|
||||
validator_pubkey = state.validators[validator_index].pubkey
|
||||
address = b'\x22' * 20
|
||||
set_eth1_withdrawal_credential_with_balance(spec, state, validator_index, address=address)
|
||||
# Initiate exit earlier
|
||||
spec.initiate_validator_exit(state, validator_index)
|
||||
execution_layer_exit = spec.ExecutionLayerExit(
|
||||
source_address=address,
|
||||
validator_pubkey=validator_pubkey,
|
||||
)
|
||||
|
||||
yield from run_execution_layer_exit_processing(spec, state, execution_layer_exit, success=False)
|
||||
|
||||
|
||||
@with_eip7002_and_later
|
||||
@spec_state_test
|
||||
def test_activation_epoch_less_than_shard_committee_period(spec, state):
|
||||
current_epoch = spec.get_current_epoch(state)
|
||||
validator_index = spec.get_active_validator_indices(state, current_epoch)[0]
|
||||
validator_pubkey = state.validators[validator_index].pubkey
|
||||
address = b'\x22' * 20
|
||||
set_eth1_withdrawal_credential_with_balance(spec, state, validator_index, address=address)
|
||||
execution_layer_exit = spec.ExecutionLayerExit(
|
||||
source_address=address,
|
||||
validator_pubkey=validator_pubkey,
|
||||
)
|
||||
|
||||
assert spec.get_current_epoch(state) < (
|
||||
state.validators[validator_index].activation_epoch + spec.config.SHARD_COMMITTEE_PERIOD
|
||||
)
|
||||
|
||||
yield from run_execution_layer_exit_processing(spec, state, execution_layer_exit, success=False)
|
|
@ -0,0 +1,174 @@
|
|||
from eth2spec.test.helpers.block import (
|
||||
build_empty_block_for_next_slot
|
||||
)
|
||||
from eth2spec.test.context import (
|
||||
spec_state_test,
|
||||
with_eip7002_and_later,
|
||||
)
|
||||
from eth2spec.test.helpers.bls_to_execution_changes import (
|
||||
get_signed_address_change,
|
||||
)
|
||||
from eth2spec.test.helpers.execution_payload import (
|
||||
compute_el_block_hash,
|
||||
)
|
||||
from eth2spec.test.helpers.voluntary_exits import (
|
||||
prepare_signed_exits,
|
||||
)
|
||||
from eth2spec.test.helpers.state import (
|
||||
state_transition_and_sign_block,
|
||||
)
|
||||
from eth2spec.test.helpers.withdrawals import (
|
||||
set_eth1_withdrawal_credential_with_balance,
|
||||
)
|
||||
|
||||
|
||||
@with_eip7002_and_later
|
||||
@spec_state_test
|
||||
def test_basic_el_exit(spec, state):
|
||||
# move state forward SHARD_COMMITTEE_PERIOD epochs to allow for exit
|
||||
state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
|
||||
|
||||
yield 'pre', state
|
||||
|
||||
validator_index = 0
|
||||
address = b'\x22' * 20
|
||||
set_eth1_withdrawal_credential_with_balance(spec, state, validator_index, address=address)
|
||||
assert state.validators[validator_index].exit_epoch == spec.FAR_FUTURE_EPOCH
|
||||
|
||||
validator_pubkey = state.validators[validator_index].pubkey
|
||||
execution_layer_exit = spec.ExecutionLayerExit(
|
||||
source_address=address,
|
||||
validator_pubkey=validator_pubkey,
|
||||
)
|
||||
block = build_empty_block_for_next_slot(spec, state)
|
||||
block.body.execution_payload.exits = [execution_layer_exit]
|
||||
block.body.execution_payload.block_hash = compute_el_block_hash(spec, block.body.execution_payload)
|
||||
signed_block = state_transition_and_sign_block(spec, state, block)
|
||||
|
||||
yield 'blocks', [signed_block]
|
||||
yield 'post', state
|
||||
|
||||
assert state.validators[validator_index].exit_epoch < spec.FAR_FUTURE_EPOCH
|
||||
|
||||
|
||||
@with_eip7002_and_later
|
||||
@spec_state_test
|
||||
def test_basic_btec_and_el_exit_in_same_block(spec, state):
|
||||
# move state forward SHARD_COMMITTEE_PERIOD epochs to allow for exit
|
||||
state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
|
||||
|
||||
yield 'pre', state
|
||||
validator_index = 0
|
||||
assert state.validators[validator_index].exit_epoch == spec.FAR_FUTURE_EPOCH
|
||||
|
||||
block = build_empty_block_for_next_slot(spec, state)
|
||||
|
||||
address = b'\x22' * 20
|
||||
signed_address_change = get_signed_address_change(
|
||||
spec,
|
||||
state,
|
||||
validator_index=validator_index,
|
||||
to_execution_address=address,
|
||||
)
|
||||
block.body.bls_to_execution_changes = [signed_address_change]
|
||||
|
||||
validator_pubkey = state.validators[validator_index].pubkey
|
||||
execution_layer_exit = spec.ExecutionLayerExit(
|
||||
source_address=address,
|
||||
validator_pubkey=validator_pubkey,
|
||||
)
|
||||
block.body.execution_payload.exits = [execution_layer_exit]
|
||||
|
||||
block.body.execution_payload.block_hash = compute_el_block_hash(spec, block.body.execution_payload)
|
||||
signed_block = state_transition_and_sign_block(spec, state, block)
|
||||
|
||||
yield 'blocks', [signed_block]
|
||||
yield 'post', state
|
||||
|
||||
# BTEC is executed after EL-Exit, so it doesn't take effect. `initiate_validator_exit` is not called.
|
||||
validator = state.validators[validator_index]
|
||||
assert validator.exit_epoch == spec.FAR_FUTURE_EPOCH
|
||||
# Check if BTEC is effect
|
||||
is_execution_address = validator.withdrawal_credentials[:1] == spec.ETH1_ADDRESS_WITHDRAWAL_PREFIX
|
||||
is_correct_source_address = validator.withdrawal_credentials[12:] == address
|
||||
assert is_execution_address and is_correct_source_address
|
||||
|
||||
|
||||
@with_eip7002_and_later
|
||||
@spec_state_test
|
||||
def test_basic_btec_before_el_exit(spec, state):
|
||||
# move state forward SHARD_COMMITTEE_PERIOD epochs to allow for exit
|
||||
state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
|
||||
|
||||
yield 'pre', state
|
||||
|
||||
validator_index = 0
|
||||
assert state.validators[validator_index].exit_epoch == spec.FAR_FUTURE_EPOCH
|
||||
|
||||
# block_1 contains a BTEC operation of the given validator
|
||||
address = b'\x22' * 20
|
||||
signed_address_change = get_signed_address_change(
|
||||
spec,
|
||||
state,
|
||||
validator_index=validator_index,
|
||||
to_execution_address=address,
|
||||
)
|
||||
block_1 = build_empty_block_for_next_slot(spec, state)
|
||||
block_1.body.bls_to_execution_changes = [signed_address_change]
|
||||
signed_block_1 = state_transition_and_sign_block(spec, state, block_1)
|
||||
|
||||
validator = state.validators[validator_index]
|
||||
assert validator.exit_epoch == spec.FAR_FUTURE_EPOCH
|
||||
# Check if BTEC is effect
|
||||
is_execution_address = validator.withdrawal_credentials[:1] == spec.ETH1_ADDRESS_WITHDRAWAL_PREFIX
|
||||
is_correct_source_address = validator.withdrawal_credentials[12:] == address
|
||||
assert is_execution_address and is_correct_source_address
|
||||
|
||||
# block_2 contains an EL-Exit operation of the given validator
|
||||
validator_pubkey = state.validators[validator_index].pubkey
|
||||
execution_layer_exit = spec.ExecutionLayerExit(
|
||||
source_address=address,
|
||||
validator_pubkey=validator_pubkey,
|
||||
)
|
||||
block_2 = build_empty_block_for_next_slot(spec, state)
|
||||
block_2.body.execution_payload.exits = [execution_layer_exit]
|
||||
block_2.body.execution_payload.block_hash = compute_el_block_hash(spec, block_2.body.execution_payload)
|
||||
signed_block_2 = state_transition_and_sign_block(spec, state, block_2)
|
||||
|
||||
yield 'blocks', [signed_block_1, signed_block_2]
|
||||
yield 'post', state
|
||||
|
||||
assert state.validators[validator_index].exit_epoch < spec.FAR_FUTURE_EPOCH
|
||||
|
||||
|
||||
@with_eip7002_and_later
|
||||
@spec_state_test
|
||||
def test_cl_exit_and_el_exit_in_same_block(spec, state):
|
||||
# move state forward SHARD_COMMITTEE_PERIOD epochs to allow for exit
|
||||
state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
|
||||
|
||||
yield 'pre', state
|
||||
|
||||
validator_index = 0
|
||||
address = b'\x22' * 20
|
||||
set_eth1_withdrawal_credential_with_balance(spec, state, validator_index, address=address)
|
||||
assert state.validators[validator_index].exit_epoch == spec.FAR_FUTURE_EPOCH
|
||||
|
||||
# CL-Exit
|
||||
signed_voluntary_exits = prepare_signed_exits(spec, state, indices=[validator_index])
|
||||
# EL-Exit
|
||||
validator_pubkey = state.validators[validator_index].pubkey
|
||||
execution_layer_exit = spec.ExecutionLayerExit(
|
||||
source_address=address,
|
||||
validator_pubkey=validator_pubkey,
|
||||
)
|
||||
block = build_empty_block_for_next_slot(spec, state)
|
||||
block.body.voluntary_exits = signed_voluntary_exits
|
||||
block.body.execution_payload.exits = [execution_layer_exit]
|
||||
block.body.execution_payload.block_hash = compute_el_block_hash(spec, block.body.execution_payload)
|
||||
signed_block = state_transition_and_sign_block(spec, state, block)
|
||||
|
||||
yield 'blocks', [signed_block]
|
||||
yield 'post', state
|
||||
|
||||
assert state.validators[validator_index].exit_epoch < spec.FAR_FUTURE_EPOCH
|
|
@ -5,7 +5,7 @@ from typing import List
|
|||
from eth2spec.test.context import expect_assertion_error
|
||||
from eth2spec.test.helpers.state import state_transition_and_sign_block, next_epoch, next_slot
|
||||
from eth2spec.test.helpers.block import build_empty_block_for_next_slot
|
||||
from eth2spec.test.helpers.forks import is_post_altair
|
||||
from eth2spec.test.helpers.forks import is_post_altair, is_post_deneb
|
||||
from eth2spec.test.helpers.keys import privkeys
|
||||
from eth2spec.utils import bls
|
||||
from eth2spec.utils.ssz.ssz_typing import Bitlist
|
||||
|
@ -158,6 +158,14 @@ def get_attestation_signature(spec, state, attestation_data, privkey):
|
|||
return bls.Sign(privkey, signing_root)
|
||||
|
||||
|
||||
def compute_max_inclusion_slot(spec, attestation):
|
||||
if is_post_deneb(spec):
|
||||
next_epoch = spec.compute_epoch_at_slot(attestation.data.slot) + 1
|
||||
end_of_next_epoch = spec.compute_start_slot_at_epoch(next_epoch + 1) - 1
|
||||
return end_of_next_epoch
|
||||
return attestation.data.slot + spec.SLOTS_PER_EPOCH
|
||||
|
||||
|
||||
def fill_aggregate_attestation(spec, state, attestation, signed=False, filter_participant_set=None):
|
||||
"""
|
||||
`signed`: Signing is optional.
|
||||
|
|
|
@ -17,6 +17,7 @@ SHARDING = SpecForkName('sharding')
|
|||
CUSTODY_GAME = SpecForkName('custody_game')
|
||||
DAS = SpecForkName('das')
|
||||
EIP6110 = SpecForkName('eip6110')
|
||||
EIP7002 = SpecForkName('eip7002')
|
||||
|
||||
#
|
||||
# SpecFork settings
|
||||
|
@ -30,8 +31,9 @@ ALL_PHASES = (
|
|||
# Formal forks
|
||||
*MAINNET_FORKS,
|
||||
DENEB,
|
||||
# Experimental features
|
||||
# Experimental patches
|
||||
EIP6110,
|
||||
EIP7002,
|
||||
)
|
||||
# The forks that have light client specs
|
||||
LIGHT_CLIENT_TESTING_FORKS = (*[item for item in MAINNET_FORKS if item != PHASE0], DENEB)
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
from eth2spec.test.context import expect_assertion_error
|
||||
from eth2spec.test.helpers.state import get_validator_index_by_pubkey
|
||||
|
||||
|
||||
#
|
||||
# Run processing
|
||||
#
|
||||
|
||||
|
||||
def run_execution_layer_exit_processing(spec, state, execution_layer_exit, valid=True, success=True):
|
||||
"""
|
||||
Run ``process_execution_layer_exit``, yielding:
|
||||
- pre-state ('pre')
|
||||
- execution_layer_exit ('execution_layer_exit')
|
||||
- post-state ('post').
|
||||
If ``valid == False``, run expecting ``AssertionError``
|
||||
If ``success == False``, it doesn't initiate exit successfully
|
||||
"""
|
||||
validator_index = get_validator_index_by_pubkey(state, execution_layer_exit.validator_pubkey)
|
||||
|
||||
yield 'pre', state
|
||||
yield 'execution_layer_exit', execution_layer_exit
|
||||
|
||||
if not valid:
|
||||
expect_assertion_error(lambda: spec.process_execution_layer_exit(state, execution_layer_exit))
|
||||
yield 'post', None
|
||||
return
|
||||
|
||||
pre_exit_epoch = state.validators[validator_index].exit_epoch
|
||||
|
||||
spec.process_execution_layer_exit(state, execution_layer_exit)
|
||||
|
||||
yield 'post', state
|
||||
|
||||
if success:
|
||||
assert pre_exit_epoch == spec.FAR_FUTURE_EPOCH
|
||||
assert state.validators[validator_index].exit_epoch < spec.FAR_FUTURE_EPOCH
|
||||
else:
|
||||
assert state.validators[validator_index].exit_epoch == pre_exit_epoch
|
|
@ -8,6 +8,7 @@ from eth2spec.test.helpers.forks import (
|
|||
is_post_capella,
|
||||
is_post_deneb,
|
||||
is_post_eip6110,
|
||||
is_post_eip7002,
|
||||
)
|
||||
|
||||
|
||||
|
@ -31,10 +32,12 @@ def get_execution_payload_header(spec, execution_payload):
|
|||
if is_post_capella(spec):
|
||||
payload_header.withdrawals_root = spec.hash_tree_root(execution_payload.withdrawals)
|
||||
if is_post_deneb(spec):
|
||||
payload_header.data_gas_used = execution_payload.data_gas_used
|
||||
payload_header.excess_data_gas = execution_payload.excess_data_gas
|
||||
payload_header.blob_gas_used = execution_payload.blob_gas_used
|
||||
payload_header.excess_blob_gas = execution_payload.excess_blob_gas
|
||||
if is_post_eip6110(spec):
|
||||
payload_header.deposit_receipts_root = spec.hash_tree_root(execution_payload.deposit_receipts)
|
||||
if is_post_eip7002(spec):
|
||||
payload_header.exits_root = spec.hash_tree_root(execution_payload.exits)
|
||||
return payload_header
|
||||
|
||||
|
||||
|
@ -56,7 +59,8 @@ def compute_el_header_block_hash(spec,
|
|||
payload_header,
|
||||
transactions_trie_root,
|
||||
withdrawals_trie_root=None,
|
||||
deposit_receipts_trie_root=None):
|
||||
deposit_receipts_trie_root=None,
|
||||
exits_trie_root=None):
|
||||
"""
|
||||
Computes the RLP execution block hash described by an `ExecutionPayloadHeader`.
|
||||
"""
|
||||
|
@ -98,13 +102,16 @@ def compute_el_header_block_hash(spec,
|
|||
# withdrawals_root
|
||||
execution_payload_header_rlp.append((Binary(32, 32), withdrawals_trie_root))
|
||||
if is_post_deneb(spec):
|
||||
# excess_data_gas
|
||||
execution_payload_header_rlp.append((big_endian_int, payload_header.data_gas_used))
|
||||
execution_payload_header_rlp.append((big_endian_int, payload_header.excess_data_gas))
|
||||
# excess_blob_gas
|
||||
execution_payload_header_rlp.append((big_endian_int, payload_header.blob_gas_used))
|
||||
execution_payload_header_rlp.append((big_endian_int, payload_header.excess_blob_gas))
|
||||
if is_post_eip6110(spec):
|
||||
# deposit_receipts_root
|
||||
assert deposit_receipts_trie_root is not None
|
||||
execution_payload_header_rlp.append((Binary(32, 32), deposit_receipts_trie_root))
|
||||
if is_post_eip7002(spec):
|
||||
# exits_trie_root
|
||||
execution_payload_header_rlp.append((Binary(32, 32), exits_trie_root))
|
||||
|
||||
sedes = List([schema for schema, _ in execution_payload_header_rlp])
|
||||
values = [value for _, value in execution_payload_header_rlp]
|
||||
|
@ -114,7 +121,7 @@ def compute_el_header_block_hash(spec,
|
|||
|
||||
|
||||
# https://eips.ethereum.org/EIPS/eip-4895
|
||||
def get_withdrawal_rlp(spec, withdrawal):
|
||||
def get_withdrawal_rlp(withdrawal):
|
||||
withdrawal_rlp = [
|
||||
# index
|
||||
(big_endian_int, withdrawal.index),
|
||||
|
@ -131,6 +138,20 @@ def get_withdrawal_rlp(spec, withdrawal):
|
|||
return encode(values, sedes)
|
||||
|
||||
|
||||
# https://eips.ethereum.org/EIPS/eip-7002
|
||||
def get_exit_rlp(exit):
|
||||
exit_rlp = [
|
||||
# source_address
|
||||
(Binary(20, 20), exit.source_address),
|
||||
# validator_pubkey
|
||||
(Binary(48, 48), exit.validator_pubkey),
|
||||
]
|
||||
|
||||
sedes = List([schema for schema, _ in exit_rlp])
|
||||
values = [value for _, value in exit_rlp]
|
||||
return encode(values, sedes)
|
||||
|
||||
|
||||
def get_deposit_receipt_rlp(spec, deposit_receipt):
|
||||
deposit_receipt_rlp = [
|
||||
# pubkey
|
||||
|
@ -155,13 +176,17 @@ def compute_el_block_hash(spec, payload):
|
|||
|
||||
withdrawals_trie_root = None
|
||||
deposit_receipts_trie_root = None
|
||||
exits_trie_root = None
|
||||
|
||||
if is_post_capella(spec):
|
||||
withdrawals_encoded = [get_withdrawal_rlp(spec, withdrawal) for withdrawal in payload.withdrawals]
|
||||
withdrawals_encoded = [get_withdrawal_rlp(withdrawal) for withdrawal in payload.withdrawals]
|
||||
withdrawals_trie_root = compute_trie_root_from_indexed_data(withdrawals_encoded)
|
||||
if is_post_eip6110(spec):
|
||||
deposit_receipts_encoded = [get_deposit_receipt_rlp(spec, receipt) for receipt in payload.deposit_receipts]
|
||||
deposit_receipts_trie_root = compute_trie_root_from_indexed_data(deposit_receipts_encoded)
|
||||
if is_post_eip7002(spec):
|
||||
exits_encoded = [get_exit_rlp(exit) for exit in payload.exits]
|
||||
exits_trie_root = compute_trie_root_from_indexed_data(exits_encoded)
|
||||
|
||||
payload_header = get_execution_payload_header(spec, payload)
|
||||
|
||||
|
@ -171,6 +196,7 @@ def compute_el_block_hash(spec, payload):
|
|||
transactions_trie_root,
|
||||
withdrawals_trie_root,
|
||||
deposit_receipts_trie_root,
|
||||
exits_trie_root,
|
||||
)
|
||||
|
||||
|
||||
|
@ -203,8 +229,8 @@ def build_empty_execution_payload(spec, state, randao_mix=None):
|
|||
if is_post_capella(spec):
|
||||
payload.withdrawals = spec.get_expected_withdrawals(state)
|
||||
if is_post_deneb(spec):
|
||||
payload.data_gas_used = 0
|
||||
payload.excess_data_gas = 0
|
||||
payload.blob_gas_used = 0
|
||||
payload.excess_blob_gas = 0
|
||||
if is_post_eip6110(spec):
|
||||
# just to be clear
|
||||
payload.deposit_receipts = []
|
||||
|
@ -233,7 +259,7 @@ def build_randomized_execution_payload(spec, state, rng):
|
|||
|
||||
num_transactions = rng.randint(0, 100)
|
||||
execution_payload.transactions = [
|
||||
spec.Transaction(get_random_bytes_list(rng, rng.randint(0, 1000)))
|
||||
get_random_tx(rng)
|
||||
for _ in range(num_transactions)
|
||||
]
|
||||
|
||||
|
@ -264,3 +290,7 @@ def build_state_with_execution_payload_header(spec, state, execution_payload_hea
|
|||
pre_state.latest_execution_payload_header = execution_payload_header
|
||||
|
||||
return pre_state
|
||||
|
||||
|
||||
def get_random_tx(rng):
|
||||
return get_random_bytes_list(rng, rng.randint(0, 1000))
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
from typing import NamedTuple, Sequence, Any
|
||||
|
||||
from eth_utils import encode_hex
|
||||
from eth2spec.test.exceptions import BlockNotFoundException
|
||||
from eth2spec.test.helpers.attestations import (
|
||||
|
@ -7,6 +9,40 @@ from eth2spec.test.helpers.attestations import (
|
|||
)
|
||||
|
||||
|
||||
class BlobData(NamedTuple):
|
||||
"""
|
||||
The return values of ``retrieve_blobs_and_proofs`` helper.
|
||||
"""
|
||||
blobs: Sequence[Any]
|
||||
proofs: Sequence[bytes]
|
||||
|
||||
|
||||
def with_blob_data(spec, blob_data, func):
|
||||
"""
|
||||
This helper runs the given ``func`` with monkeypatched ``retrieve_blobs_and_proofs``
|
||||
that returns ``blob_data.blobs, blob_data.proofs``.
|
||||
"""
|
||||
def retrieve_blobs_and_proofs(beacon_block_root):
|
||||
return blob_data.blobs, blob_data.proofs
|
||||
|
||||
retrieve_blobs_and_proofs_backup = spec.retrieve_blobs_and_proofs
|
||||
spec.retrieve_blobs_and_proofs = retrieve_blobs_and_proofs
|
||||
|
||||
class AtomicBoolean():
|
||||
value = False
|
||||
is_called = AtomicBoolean()
|
||||
|
||||
def wrap(flag: AtomicBoolean):
|
||||
yield from func()
|
||||
flag.value = True
|
||||
|
||||
try:
|
||||
yield from wrap(is_called)
|
||||
finally:
|
||||
spec.retrieve_blobs_and_proofs = retrieve_blobs_and_proofs_backup
|
||||
assert is_called.value
|
||||
|
||||
|
||||
def get_anchor_root(spec, state):
|
||||
anchor_block_header = state.latest_block_header.copy()
|
||||
if anchor_block_header.state_root == spec.Bytes32():
|
||||
|
@ -15,7 +51,8 @@ def get_anchor_root(spec, state):
|
|||
|
||||
|
||||
def tick_and_add_block(spec, store, signed_block, test_steps, valid=True,
|
||||
merge_block=False, block_not_found=False, is_optimistic=False):
|
||||
merge_block=False, block_not_found=False, is_optimistic=False,
|
||||
blob_data=None):
|
||||
pre_state = store.block_states[signed_block.message.parent_root]
|
||||
if merge_block:
|
||||
assert spec.is_merge_transition_block(pre_state, signed_block.message.body)
|
||||
|
@ -30,11 +67,19 @@ def tick_and_add_block(spec, store, signed_block, test_steps, valid=True,
|
|||
valid=valid,
|
||||
block_not_found=block_not_found,
|
||||
is_optimistic=is_optimistic,
|
||||
blob_data=blob_data,
|
||||
)
|
||||
|
||||
return post_state
|
||||
|
||||
|
||||
def tick_and_add_block_with_data(spec, store, signed_block, test_steps, blob_data, valid=True):
|
||||
def run_func():
|
||||
yield from tick_and_add_block(spec, store, signed_block, test_steps, blob_data=blob_data, valid=valid)
|
||||
|
||||
yield from with_blob_data(spec, blob_data, run_func)
|
||||
|
||||
|
||||
def add_attestation(spec, store, attestation, test_steps, is_from_block=False):
|
||||
spec.on_attestation(store, attestation, is_from_block=is_from_block)
|
||||
yield get_attestation_file_name(attestation), attestation
|
||||
|
@ -94,6 +139,13 @@ def get_attester_slashing_file_name(attester_slashing):
|
|||
return f"attester_slashing_{encode_hex(attester_slashing.hash_tree_root())}"
|
||||
|
||||
|
||||
def get_blobs_file_name(blobs=None, blobs_root=None):
|
||||
if blobs:
|
||||
return f"blobs_{encode_hex(blobs.hash_tree_root())}"
|
||||
else:
|
||||
return f"blobs_{encode_hex(blobs_root)}"
|
||||
|
||||
|
||||
def on_tick_and_append_step(spec, store, time, test_steps):
|
||||
spec.on_tick(store, time)
|
||||
test_steps.append({'tick': int(time)})
|
||||
|
@ -119,35 +171,52 @@ def add_block(spec,
|
|||
test_steps,
|
||||
valid=True,
|
||||
block_not_found=False,
|
||||
is_optimistic=False):
|
||||
is_optimistic=False,
|
||||
blob_data=None):
|
||||
"""
|
||||
Run on_block and on_attestation
|
||||
"""
|
||||
yield get_block_file_name(signed_block), signed_block
|
||||
|
||||
# Check blob_data
|
||||
if blob_data is not None:
|
||||
blobs = spec.List[spec.Blob, spec.MAX_BLOBS_PER_BLOCK](blob_data.blobs)
|
||||
blobs_root = blobs.hash_tree_root()
|
||||
yield get_blobs_file_name(blobs_root=blobs_root), blobs
|
||||
|
||||
is_blob_data_test = blob_data is not None
|
||||
|
||||
def _append_step(is_blob_data_test, valid=True):
|
||||
if is_blob_data_test:
|
||||
test_steps.append({
|
||||
'block': get_block_file_name(signed_block),
|
||||
'blobs': get_blobs_file_name(blobs_root=blobs_root),
|
||||
'proofs': [encode_hex(proof) for proof in blob_data.proofs],
|
||||
'valid': valid,
|
||||
})
|
||||
else:
|
||||
test_steps.append({
|
||||
'block': get_block_file_name(signed_block),
|
||||
'valid': valid,
|
||||
})
|
||||
|
||||
if not valid:
|
||||
if is_optimistic:
|
||||
run_on_block(spec, store, signed_block, valid=True)
|
||||
test_steps.append({
|
||||
'block': get_block_file_name(signed_block),
|
||||
'valid': False,
|
||||
})
|
||||
_append_step(is_blob_data_test, valid=False)
|
||||
else:
|
||||
try:
|
||||
run_on_block(spec, store, signed_block, valid=True)
|
||||
except (AssertionError, BlockNotFoundException) as e:
|
||||
if isinstance(e, BlockNotFoundException) and not block_not_found:
|
||||
assert False
|
||||
test_steps.append({
|
||||
'block': get_block_file_name(signed_block),
|
||||
'valid': False,
|
||||
})
|
||||
_append_step(is_blob_data_test, valid=False)
|
||||
return
|
||||
else:
|
||||
assert False
|
||||
else:
|
||||
run_on_block(spec, store, signed_block, valid=True)
|
||||
test_steps.append({'block': get_block_file_name(signed_block)})
|
||||
_append_step(is_blob_data_test)
|
||||
|
||||
# An on_block step implies receiving block's attestations
|
||||
for attestation in signed_block.message.body.attestations:
|
||||
|
|
|
@ -16,6 +16,7 @@ from eth2spec.test.helpers.constants import (
|
|||
CAPELLA,
|
||||
DENEB,
|
||||
EIP6110,
|
||||
EIP7002,
|
||||
)
|
||||
from eth2spec.test.helpers.deposits import (
|
||||
prepare_state_and_deposit,
|
||||
|
@ -161,6 +162,8 @@ def do_fork(state, spec, post_spec, fork_epoch, with_block=True, sync_aggregate=
|
|||
state = post_spec.upgrade_to_deneb(state)
|
||||
elif post_spec.fork == EIP6110:
|
||||
state = post_spec.upgrade_to_eip6110(state)
|
||||
elif post_spec.fork == EIP7002:
|
||||
state = post_spec.upgrade_to_eip7002(state)
|
||||
|
||||
assert state.fork.epoch == fork_epoch
|
||||
|
||||
|
@ -179,6 +182,9 @@ def do_fork(state, spec, post_spec, fork_epoch, with_block=True, sync_aggregate=
|
|||
elif post_spec.fork == EIP6110:
|
||||
assert state.fork.previous_version == post_spec.config.DENEB_FORK_VERSION
|
||||
assert state.fork.current_version == post_spec.config.EIP6110_FORK_VERSION
|
||||
elif post_spec.fork == EIP7002:
|
||||
assert state.fork.previous_version == post_spec.config.CAPELLA_FORK_VERSION
|
||||
assert state.fork.current_version == post_spec.config.EIP7002_FORK_VERSION
|
||||
|
||||
if with_block:
|
||||
return state, _state_transition_and_sign_block_at_slot(
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
from .constants import (
|
||||
PHASE0, ALTAIR, BELLATRIX, CAPELLA, DENEB,
|
||||
EIP6110,
|
||||
EIP6110, EIP7002,
|
||||
)
|
||||
|
||||
|
||||
def is_post_fork(a, b):
|
||||
if a == EIP7002:
|
||||
return b in [PHASE0, ALTAIR, BELLATRIX, CAPELLA, EIP7002]
|
||||
if a == EIP6110:
|
||||
return b in [PHASE0, ALTAIR, BELLATRIX, CAPELLA, DENEB, EIP6110]
|
||||
if a == DENEB:
|
||||
|
@ -38,3 +40,7 @@ def is_post_deneb(spec):
|
|||
|
||||
def is_post_eip6110(spec):
|
||||
return is_post_fork(spec.fork, EIP6110)
|
||||
|
||||
|
||||
def is_post_eip7002(spec):
|
||||
return is_post_fork(spec.fork, EIP7002)
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from eth2spec.test.helpers.constants import (
|
||||
ALTAIR, BELLATRIX, CAPELLA, DENEB, EIP6110,
|
||||
ALTAIR, BELLATRIX, CAPELLA, DENEB, EIP6110, EIP7002,
|
||||
)
|
||||
from eth2spec.test.helpers.execution_payload import (
|
||||
compute_el_header_block_hash,
|
||||
)
|
||||
from eth2spec.test.helpers.forks import (
|
||||
is_post_altair, is_post_bellatrix, is_post_capella, is_post_eip6110,
|
||||
is_post_altair, is_post_bellatrix, is_post_capella, is_post_eip6110, is_post_eip7002,
|
||||
)
|
||||
from eth2spec.test.helpers.keys import pubkeys
|
||||
|
||||
|
@ -49,11 +49,14 @@ def get_sample_genesis_execution_payload_header(spec,
|
|||
transactions_trie_root = bytes.fromhex("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421")
|
||||
withdrawals_trie_root = None
|
||||
deposit_receipts_trie_root = None
|
||||
exits_trie_root = None
|
||||
|
||||
if is_post_capella(spec):
|
||||
withdrawals_trie_root = bytes.fromhex("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421")
|
||||
if is_post_eip6110(spec):
|
||||
deposit_receipts_trie_root = bytes.fromhex("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421")
|
||||
if is_post_eip7002(spec):
|
||||
exits_trie_root = bytes.fromhex("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421")
|
||||
|
||||
payload_header.block_hash = compute_el_header_block_hash(
|
||||
spec,
|
||||
|
@ -61,6 +64,7 @@ def get_sample_genesis_execution_payload_header(spec,
|
|||
transactions_trie_root,
|
||||
withdrawals_trie_root,
|
||||
deposit_receipts_trie_root,
|
||||
exits_trie_root,
|
||||
)
|
||||
return payload_header
|
||||
|
||||
|
@ -86,6 +90,9 @@ def create_genesis_state(spec, validator_balances, activation_threshold):
|
|||
elif spec.fork == EIP6110:
|
||||
previous_version = spec.config.DENEB_FORK_VERSION
|
||||
current_version = spec.config.EIP6110_FORK_VERSION
|
||||
elif spec.fork == EIP7002:
|
||||
previous_version = spec.config.CAPELLA_FORK_VERSION
|
||||
current_version = spec.config.EIP7002_FORK_VERSION
|
||||
|
||||
state = spec.BeaconState(
|
||||
genesis_time=0,
|
||||
|
|
|
@ -41,7 +41,7 @@ class BlobTransaction(Container):
|
|||
value: uint256
|
||||
data: ByteList[MAX_CALLDATA_SIZE]
|
||||
access_list: List[AccessTuple, MAX_ACCESS_LIST_SIZE]
|
||||
max_fee_per_data_gas: uint256
|
||||
max_fee_per_blob_gas: uint256
|
||||
blob_versioned_hashes: List[Bytes32, MAX_VERSIONED_HASHES_LIST_SIZE]
|
||||
|
||||
|
||||
|
|
|
@ -166,3 +166,8 @@ def has_active_balance_differential(spec, state):
|
|||
active_balance = spec.get_total_active_balance(state)
|
||||
total_balance = spec.get_total_balance(state, set(range(len(state.validators))))
|
||||
return active_balance // spec.EFFECTIVE_BALANCE_INCREMENT != total_balance // spec.EFFECTIVE_BALANCE_INCREMENT
|
||||
|
||||
|
||||
def get_validator_index_by_pubkey(state, pubkey):
|
||||
index = next((i for i, validator in enumerate(state.validators) if validator.pubkey == pubkey), None)
|
||||
return index
|
||||
|
|
|
@ -20,9 +20,14 @@ def set_validator_fully_withdrawable(spec, state, index, withdrawable_epoch=None
|
|||
assert spec.is_fully_withdrawable_validator(validator, state.balances[index], withdrawable_epoch)
|
||||
|
||||
|
||||
def set_eth1_withdrawal_credential_with_balance(spec, state, index, balance):
|
||||
def set_eth1_withdrawal_credential_with_balance(spec, state, index, balance=None, address=None):
|
||||
if balance is None:
|
||||
balance = spec.MAX_EFFECTIVE_BALANCE
|
||||
if address is None:
|
||||
address = b'\x11' * 20
|
||||
|
||||
validator = state.validators[index]
|
||||
validator.withdrawal_credentials = spec.ETH1_ADDRESS_WITHDRAWAL_PREFIX + validator.withdrawal_credentials[1:]
|
||||
validator.withdrawal_credentials = spec.ETH1_ADDRESS_WITHDRAWAL_PREFIX + b'\x00' * 11 + address
|
||||
validator.effective_balance = min(balance, spec.MAX_EFFECTIVE_BALANCE)
|
||||
state.balances[index] = balance
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@ from eth2spec.test.helpers.attestations import (
|
|||
get_valid_attestation,
|
||||
sign_aggregate_attestation,
|
||||
sign_attestation,
|
||||
compute_max_inclusion_slot,
|
||||
)
|
||||
from eth2spec.test.helpers.state import (
|
||||
next_slots,
|
||||
|
@ -95,11 +96,22 @@ def test_invalid_before_inclusion_delay(spec, state):
|
|||
|
||||
@with_all_phases
|
||||
@spec_state_test
|
||||
def test_invalid_after_epoch_slots(spec, state):
|
||||
def test_at_max_inclusion_slot(spec, state):
|
||||
attestation = get_valid_attestation(spec, state, signed=True)
|
||||
|
||||
# increment past latest inclusion slot
|
||||
transition_to_slot_via_block(spec, state, state.slot + spec.SLOTS_PER_EPOCH + 1)
|
||||
transition_to_slot_via_block(spec, state, compute_max_inclusion_slot(spec, attestation))
|
||||
|
||||
yield from run_attestation_processing(spec, state, attestation)
|
||||
|
||||
|
||||
@with_all_phases
|
||||
@spec_state_test
|
||||
def test_invalid_after_max_inclusion_slot(spec, state):
|
||||
attestation = get_valid_attestation(spec, state, signed=True)
|
||||
|
||||
# increment past latest inclusion slot
|
||||
transition_to_slot_via_block(spec, state, compute_max_inclusion_slot(spec, attestation) + 1)
|
||||
|
||||
yield from run_attestation_processing(spec, state, attestation, valid=False)
|
||||
|
||||
|
@ -361,7 +373,7 @@ def test_invalid_too_few_aggregation_bits(spec, state):
|
|||
|
||||
|
||||
#
|
||||
# Full correct atttestation contents at different slot inclusions
|
||||
# Full correct attestation contents at different slot inclusions
|
||||
#
|
||||
|
||||
@with_all_phases
|
||||
|
@ -393,11 +405,20 @@ def test_correct_attestation_included_at_one_epoch_delay(spec, state):
|
|||
|
||||
@with_all_phases
|
||||
@spec_state_test
|
||||
def test_invalid_correct_attestation_included_after_epoch_delay(spec, state):
|
||||
def test_correct_attestation_included_at_max_inclusion_slot(spec, state):
|
||||
attestation = get_valid_attestation(spec, state, signed=True)
|
||||
next_slots(spec, state, compute_max_inclusion_slot(spec, attestation))
|
||||
|
||||
yield from run_attestation_processing(spec, state, attestation)
|
||||
|
||||
|
||||
@with_all_phases
|
||||
@spec_state_test
|
||||
def test_invalid_correct_attestation_included_after_max_inclusion_slot(spec, state):
|
||||
attestation = get_valid_attestation(spec, state, signed=True)
|
||||
|
||||
# increment past latest inclusion slot
|
||||
next_slots(spec, state, spec.SLOTS_PER_EPOCH + 1)
|
||||
next_slots(spec, state, compute_max_inclusion_slot(spec, attestation) + 1)
|
||||
|
||||
yield from run_attestation_processing(spec, state, attestation, valid=False)
|
||||
|
||||
|
@ -432,9 +453,9 @@ def test_incorrect_head_included_at_sqrt_epoch_delay(spec, state):
|
|||
|
||||
@with_all_phases
|
||||
@spec_state_test
|
||||
def test_incorrect_head_included_at_epoch_delay(spec, state):
|
||||
def test_incorrect_head_included_at_max_inclusion_slot(spec, state):
|
||||
attestation = get_valid_attestation(spec, state, signed=False)
|
||||
next_slots(spec, state, spec.SLOTS_PER_EPOCH)
|
||||
next_slots(spec, state, compute_max_inclusion_slot(spec, attestation))
|
||||
|
||||
attestation.data.beacon_block_root = b'\x42' * 32
|
||||
sign_attestation(spec, state, attestation)
|
||||
|
@ -444,11 +465,11 @@ def test_incorrect_head_included_at_epoch_delay(spec, state):
|
|||
|
||||
@with_all_phases
|
||||
@spec_state_test
|
||||
def test_invalid_incorrect_head_included_after_epoch_delay(spec, state):
|
||||
def test_invalid_incorrect_head_included_after_max_inclusion_slot(spec, state):
|
||||
attestation = get_valid_attestation(spec, state, signed=False)
|
||||
|
||||
# increment past latest inclusion slot
|
||||
next_slots(spec, state, spec.SLOTS_PER_EPOCH + 1)
|
||||
next_slots(spec, state, compute_max_inclusion_slot(spec, attestation) + 1)
|
||||
|
||||
attestation.data.beacon_block_root = b'\x42' * 32
|
||||
sign_attestation(spec, state, attestation)
|
||||
|
@ -501,10 +522,10 @@ def test_incorrect_head_and_target_included_at_epoch_delay(spec, state):
|
|||
|
||||
@with_all_phases
|
||||
@spec_state_test
|
||||
def test_invalid_incorrect_head_and_target_included_after_epoch_delay(spec, state):
|
||||
def test_invalid_incorrect_head_and_target_included_after_max_inclusion_slot(spec, state):
|
||||
attestation = get_valid_attestation(spec, state, signed=False)
|
||||
# increment past latest inclusion slot
|
||||
next_slots(spec, state, spec.SLOTS_PER_EPOCH + 1)
|
||||
next_slots(spec, state, compute_max_inclusion_slot(spec, attestation) + 1)
|
||||
|
||||
attestation.data.beacon_block_root = b'\x42' * 32
|
||||
attestation.data.target.root = b'\x42' * 32
|
||||
|
@ -555,10 +576,10 @@ def test_incorrect_target_included_at_epoch_delay(spec, state):
|
|||
|
||||
@with_all_phases
|
||||
@spec_state_test
|
||||
def test_invalid_incorrect_target_included_after_epoch_delay(spec, state):
|
||||
def test_invalid_incorrect_target_included_after_max_inclusion_slot(spec, state):
|
||||
attestation = get_valid_attestation(spec, state, signed=False)
|
||||
# increment past latest inclusion slot
|
||||
next_slots(spec, state, spec.SLOTS_PER_EPOCH + 1)
|
||||
next_slots(spec, state, compute_max_inclusion_slot(spec, attestation) + 1)
|
||||
|
||||
attestation.data.target.root = b'\x42' * 32
|
||||
sign_attestation(spec, state, attestation)
|
||||
|
|
|
@ -34,9 +34,6 @@ from eth2spec.test.helpers.state import (
|
|||
)
|
||||
|
||||
|
||||
rng = random.Random(1001)
|
||||
|
||||
|
||||
@with_altair_and_later
|
||||
@spec_state_test
|
||||
def test_genesis(spec, state):
|
||||
|
@ -271,6 +268,7 @@ def test_proposer_boost_correct_head(spec, state):
|
|||
next_slots(spec, state_2, 2)
|
||||
block_2 = build_empty_block_for_next_slot(spec, state_2)
|
||||
signed_block_2 = state_transition_and_sign_block(spec, state_2.copy(), block_2)
|
||||
rng = random.Random(1001)
|
||||
while spec.hash_tree_root(block_1) >= spec.hash_tree_root(block_2):
|
||||
block_2.body.graffiti = spec.Bytes32(hex(rng.getrandbits(8 * 32))[2:].zfill(64))
|
||||
signed_block_2 = state_transition_and_sign_block(spec, state_2.copy(), block_2)
|
||||
|
@ -339,6 +337,7 @@ def test_discard_equivocations_on_attester_slashing(spec, state):
|
|||
next_slots(spec, state_2, 2)
|
||||
block_2 = build_empty_block_for_next_slot(spec, state_2)
|
||||
signed_block_2 = state_transition_and_sign_block(spec, state_2.copy(), block_2)
|
||||
rng = random.Random(1001)
|
||||
while spec.hash_tree_root(block_1) >= spec.hash_tree_root(block_2):
|
||||
block_2.body.graffiti = spec.Bytes32(hex(rng.getrandbits(8 * 32))[2:].zfill(64))
|
||||
signed_block_2 = state_transition_and_sign_block(spec, state_2.copy(), block_2)
|
||||
|
|
|
@ -539,6 +539,56 @@ def test_proposer_boost_root_same_slot_untimely_block(spec, state):
|
|||
yield 'steps', test_steps
|
||||
|
||||
|
||||
@with_altair_and_later
|
||||
@spec_state_test
|
||||
def test_proposer_boost_is_first_block(spec, state):
|
||||
test_steps = []
|
||||
genesis_state = state.copy()
|
||||
|
||||
# Initialization
|
||||
store, anchor_block = get_genesis_forkchoice_store_and_block(spec, state)
|
||||
yield 'anchor_state', state
|
||||
yield 'anchor_block', anchor_block
|
||||
|
||||
# Build block that serves as head ONLY on timely arrival, and ONLY in that slot
|
||||
state = genesis_state.copy()
|
||||
next_slots(spec, state, 3)
|
||||
pre_state = state.copy()
|
||||
block_a = build_empty_block_for_next_slot(spec, state)
|
||||
signed_block_a = state_transition_and_sign_block(spec, state, block_a)
|
||||
|
||||
# Process block on timely arrival just before end of boost interval
|
||||
time = (store.genesis_time + block_a.slot * spec.config.SECONDS_PER_SLOT +
|
||||
spec.config.SECONDS_PER_SLOT // spec.INTERVALS_PER_SLOT - 1)
|
||||
on_tick_and_append_step(spec, store, time, test_steps)
|
||||
yield from add_block(spec, store, signed_block_a, test_steps)
|
||||
# `proposer_boost_root` is now `block_a`
|
||||
assert store.proposer_boost_root == spec.hash_tree_root(block_a)
|
||||
assert spec.get_weight(store, spec.hash_tree_root(block_a)) > 0
|
||||
test_steps.append({
|
||||
'checks': {
|
||||
'proposer_boost_root': encode_hex(store.proposer_boost_root),
|
||||
}
|
||||
})
|
||||
|
||||
# make a different block at the same slot
|
||||
state = pre_state.copy()
|
||||
block_b = block_a.copy()
|
||||
block_b.body.graffiti = b'\x34' * 32
|
||||
signed_block_b = state_transition_and_sign_block(spec, state, block_b)
|
||||
yield from add_block(spec, store, signed_block_b, test_steps)
|
||||
# `proposer_boost_root` is still `block_a`
|
||||
assert store.proposer_boost_root == spec.hash_tree_root(block_a)
|
||||
assert spec.get_weight(store, spec.hash_tree_root(block_b)) == 0
|
||||
test_steps.append({
|
||||
'checks': {
|
||||
'proposer_boost_root': encode_hex(store.proposer_boost_root),
|
||||
}
|
||||
})
|
||||
|
||||
yield 'steps', test_steps
|
||||
|
||||
|
||||
@with_altair_and_later
|
||||
@spec_state_test
|
||||
@with_presets([MINIMAL], reason="too slow")
|
||||
|
|
|
@ -485,7 +485,7 @@ def test_get_aggregate_and_proof_signature(spec, state):
|
|||
|
||||
|
||||
def run_compute_subscribed_subnets_arguments(spec, rng=random.Random(1111)):
|
||||
node_id = rng.randint(0, 2**40 - 1) # try VALIDATOR_REGISTRY_LIMIT
|
||||
node_id = rng.randint(0, 2**256 - 1)
|
||||
epoch = rng.randint(0, 2**64 - 1)
|
||||
subnets = spec.compute_subscribed_subnets(node_id, epoch)
|
||||
assert len(subnets) == spec.config.SUBNETS_PER_NODE
|
||||
|
|
|
@ -9,6 +9,7 @@ from typing import Callable
|
|||
|
||||
from eth2spec.test.helpers.execution_payload import (
|
||||
compute_el_block_hash,
|
||||
build_randomized_execution_payload,
|
||||
)
|
||||
from eth2spec.test.helpers.multi_operations import (
|
||||
build_random_block_from_state_for_next_slot,
|
||||
|
@ -216,14 +217,17 @@ def random_block_altair_with_cycling_sync_committee_participation(spec,
|
|||
return block
|
||||
|
||||
|
||||
def random_block_bellatrix(spec, state, signed_blocks, scenario_state):
|
||||
def random_block_bellatrix(spec, state, signed_blocks, scenario_state, rng=Random(3456)):
|
||||
block = random_block_altair_with_cycling_sync_committee_participation(spec, state, signed_blocks, scenario_state)
|
||||
# TODO: return randomized execution payload
|
||||
# build execution_payload at the next slot
|
||||
state = state.copy()
|
||||
next_slot(spec, state)
|
||||
block.body.execution_payload = build_randomized_execution_payload(spec, state, rng=rng)
|
||||
return block
|
||||
|
||||
|
||||
def random_block_capella(spec, state, signed_blocks, scenario_state, rng=Random(3456)):
|
||||
block = random_block_bellatrix(spec, state, signed_blocks, scenario_state)
|
||||
block = random_block_bellatrix(spec, state, signed_blocks, scenario_state, rng=rng)
|
||||
block.body.bls_to_execution_changes = get_random_bls_to_execution_changes(
|
||||
spec,
|
||||
state,
|
||||
|
@ -233,10 +237,11 @@ def random_block_capella(spec, state, signed_blocks, scenario_state, rng=Random(
|
|||
|
||||
|
||||
def random_block_deneb(spec, state, signed_blocks, scenario_state, rng=Random(3456)):
|
||||
block = random_block_capella(spec, state, signed_blocks, scenario_state)
|
||||
block = random_block_capella(spec, state, signed_blocks, scenario_state, rng=rng)
|
||||
# TODO: more commitments. blob_kzg_commitments: List[KZGCommitment, MAX_BLOBS_PER_BLOCK]
|
||||
opaque_tx, _, blob_kzg_commitments, _ = get_sample_opaque_tx(spec, blob_count=1)
|
||||
block.body.execution_payload.transactions = [opaque_tx]
|
||||
opaque_tx, _, blob_kzg_commitments, _ = get_sample_opaque_tx(
|
||||
spec, blob_count=rng.randint(0, spec.MAX_BLOBS_PER_BLOCK), rng=rng)
|
||||
block.body.execution_payload.transactions.append(opaque_tx)
|
||||
block.body.execution_payload.block_hash = compute_el_block_hash(spec, block.body.execution_payload)
|
||||
block.body.blob_kzg_commitments = blob_kzg_commitments
|
||||
|
||||
|
|
|
@ -2,6 +2,30 @@
|
|||
|
||||
The aim of the fork choice tests is to provide test coverage of the various components of the fork choice.
|
||||
|
||||
## Table of contents
|
||||
<!-- TOC -->
|
||||
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||
|
||||
- [Test case format](#test-case-format)
|
||||
- [`meta.yaml`](#metayaml)
|
||||
- [`anchor_state.ssz_snappy`](#anchor_statessz_snappy)
|
||||
- [`anchor_block.ssz_snappy`](#anchor_blockssz_snappy)
|
||||
- [`steps.yaml`](#stepsyaml)
|
||||
- [`on_tick` execution step](#on_tick-execution-step)
|
||||
- [`on_attestation` execution step](#on_attestation-execution-step)
|
||||
- [`on_block` execution step](#on_block-execution-step)
|
||||
- [`on_merge_block` execution step](#on_merge_block-execution-step)
|
||||
- [`on_attester_slashing` execution step](#on_attester_slashing-execution-step)
|
||||
- [`on_payload_info` execution step](#on_payload_info-execution-step)
|
||||
- [Checks step](#checks-step)
|
||||
- [`attestation_<32-byte-root>.ssz_snappy`](#attestation_32-byte-rootssz_snappy)
|
||||
- [`block_<32-byte-root>.ssz_snappy`](#block_32-byte-rootssz_snappy)
|
||||
- [Condition](#condition)
|
||||
|
||||
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- /TOC -->
|
||||
|
||||
## Test case format
|
||||
|
||||
### `meta.yaml`
|
||||
|
@ -59,14 +83,20 @@ The parameter that is required for executing `on_block(store, block)`.
|
|||
|
||||
```yaml
|
||||
{
|
||||
block: string -- the name of the `block_<32-byte-root>.ssz_snappy` file.
|
||||
To execute `on_block(store, block)` with the given attestation.
|
||||
valid: bool -- optional, default to `true`.
|
||||
If it's `false`, this execution step is expected to be invalid.
|
||||
block: string -- the name of the `block_<32-byte-root>.ssz_snappy` file.
|
||||
To execute `on_block(store, block)` with the given attestation.
|
||||
blobs: string -- optional, the name of the `blobs_<32-byte-root>.ssz_snappy` file.
|
||||
The blobs file content is a `List[Blob, MAX_BLOBS_PER_BLOCK]` SSZ object.
|
||||
proofs: array of byte48 hex string -- optional, the proofs of blob commitments.
|
||||
valid: bool -- optional, default to `true`.
|
||||
If it's `false`, this execution step is expected to be invalid.
|
||||
}
|
||||
```
|
||||
|
||||
The file is located in the same folder (see below).
|
||||
|
||||
`blobs` and `proofs` are new fields from Deneb EIP-4844. These fields indicate the expected values from `retrieve_blobs_and_proofs()` helper inside `is_data_available()` helper. If these two fields are not provided, `retrieve_blobs_and_proofs()` returns empty lists.
|
||||
|
||||
After this step, the `store` object may have been updated.
|
||||
|
||||
#### `on_merge_block` execution step
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# Test format: Verify blob KZG proof batch
|
||||
|
||||
Use the blob KZG proofs to verify that the KZG commitments for given `blob`s are correct
|
||||
Use the blob KZG proofs to verify that the KZG commitments for given `blobs` are correct
|
||||
|
||||
## Test case format
|
||||
|
||||
|
@ -8,13 +8,13 @@ The test data is declared in a `data.yaml` file:
|
|||
|
||||
```yaml
|
||||
input:
|
||||
blob: List[Blob] -- the data blob
|
||||
commitment: List[KZGCommitment] -- the KZG commitment to the data blob
|
||||
proof: List[KZGProof] -- The KZG proof
|
||||
blobs: List[Blob] -- the data blob
|
||||
commitments: List[KZGCommitment] -- the KZG commitment to the data blob
|
||||
proofs: List[KZGProof] -- The KZG proof
|
||||
output: bool -- true (all proofs are valid) or false (some proofs incorrect)
|
||||
```
|
||||
|
||||
- `blob`s here are encoded as a string: hexadecimal encoding of `4096 * 32 = 131072` bytes, prefixed with `0x`.
|
||||
- `blobs` here are encoded as a string: hexadecimal encoding of `4096 * 32 = 131072` bytes, prefixed with `0x`.
|
||||
|
||||
All byte(s) fields are encoded as strings, hexadecimal encoding, prefixed with `0x`.
|
||||
|
||||
|
|
|
@ -19,7 +19,13 @@ if __name__ == "__main__":
|
|||
]}
|
||||
bellatrix_mods = combine_mods(_new_bellatrix_mods, altair_mods)
|
||||
capella_mods = bellatrix_mods # No additional Capella specific fork choice tests
|
||||
deneb_mods = capella_mods # No additional Deneb specific fork choice tests
|
||||
|
||||
# Deneb adds `is_data_available` tests
|
||||
_new_deneb_mods = {key: 'eth2spec.test.deneb.fork_choice.test_' + key for key in [
|
||||
'on_block',
|
||||
]}
|
||||
deneb_mods = combine_mods(_new_deneb_mods, capella_mods)
|
||||
|
||||
eip6110_mods = deneb_mods # No additional EIP6110 specific fork choice tests
|
||||
|
||||
all_mods = {
|
||||
|
|
|
@ -46,11 +46,11 @@ class BitsStruct(Container):
|
|||
E: Bitvector[8]
|
||||
|
||||
|
||||
def container_case_fn(rng: Random, mode: RandomizationMode, typ: Type[View]):
|
||||
def container_case_fn(rng: Random, mode: RandomizationMode, typ: Type[View], chaos: bool=False):
|
||||
return get_random_ssz_object(rng, typ,
|
||||
max_bytes_length=2000,
|
||||
max_list_length=2000,
|
||||
mode=mode, chaos=False)
|
||||
mode=mode, chaos=chaos)
|
||||
|
||||
|
||||
PRESET_CONTAINERS: Dict[str, Tuple[Type[View], Sequence[int]]] = {
|
||||
|
@ -68,17 +68,23 @@ def valid_cases():
|
|||
for (name, (typ, offsets)) in PRESET_CONTAINERS.items():
|
||||
for mode in [RandomizationMode.mode_zero, RandomizationMode.mode_max]:
|
||||
yield f'{name}_{mode.to_name()}', valid_test_case(lambda: container_case_fn(rng, mode, typ))
|
||||
random_modes = [RandomizationMode.mode_random, RandomizationMode.mode_zero, RandomizationMode.mode_max]
|
||||
if len(offsets) != 0:
|
||||
random_modes.extend([RandomizationMode.mode_nil_count,
|
||||
RandomizationMode.mode_one_count,
|
||||
RandomizationMode.mode_max_count])
|
||||
for mode in random_modes:
|
||||
for variation in range(10):
|
||||
yield f'{name}_{mode.to_name()}_{variation}', \
|
||||
valid_test_case(lambda: container_case_fn(rng, mode, typ))
|
||||
|
||||
if len(offsets) == 0:
|
||||
modes = [RandomizationMode.mode_random, RandomizationMode.mode_zero, RandomizationMode.mode_max]
|
||||
else:
|
||||
modes = list(RandomizationMode)
|
||||
|
||||
for mode in modes:
|
||||
for variation in range(3):
|
||||
yield f'{name}_{mode.to_name()}_chaos_{variation}', \
|
||||
valid_test_case(lambda: container_case_fn(rng, mode, typ, chaos=True))
|
||||
# Notes: Below is the second wave of iteration, and only the random mode is selected
|
||||
# for container without offset since ``RandomizationMode.mode_zero`` and ``RandomizationMode.mode_max``
|
||||
# are deterministic.
|
||||
modes = [RandomizationMode.mode_random] if len(offsets) == 0 else list(RandomizationMode)
|
||||
for mode in modes:
|
||||
for variation in range(10):
|
||||
yield f'{name}_{mode.to_name()}_{variation}', \
|
||||
valid_test_case(lambda: container_case_fn(rng, mode, typ))
|
||||
|
||||
|
||||
|
|
|
@ -18,13 +18,16 @@ UINT_TYPES = [uint8, uint16, uint32, uint64, uint128, uint256]
|
|||
def valid_cases():
|
||||
rng = Random(1234)
|
||||
for uint_type in UINT_TYPES:
|
||||
mode = RandomizationMode.mode_random
|
||||
byte_len = uint_type.type_byte_length()
|
||||
yield f'uint_{byte_len * 8}_last_byte_empty', \
|
||||
valid_test_case(lambda: uint_type((2 ** ((byte_len - 1) * 8)) - 1))
|
||||
for variation in range(5):
|
||||
for mode in [RandomizationMode.mode_random, RandomizationMode.mode_zero, RandomizationMode.mode_max]:
|
||||
yield f'uint_{byte_len * 8}_{mode.to_name()}_{variation}', \
|
||||
valid_test_case(lambda: uint_case_fn(rng, mode, uint_type))
|
||||
yield f'uint_{byte_len * 8}_{mode.to_name()}_{variation}', \
|
||||
valid_test_case(lambda: uint_case_fn(rng, mode, uint_type))
|
||||
for mode in [RandomizationMode.mode_zero, RandomizationMode.mode_max]:
|
||||
yield f'uint_{byte_len * 8}_{mode.to_name()}', \
|
||||
valid_test_case(lambda: uint_case_fn(rng, mode, uint_type))
|
||||
|
||||
|
||||
def invalid_cases():
|
||||
|
|
Loading…
Reference in New Issue