Merge branch 'dev' into eip7002

This commit is contained in:
Hsiao-Wei Wang 2023-06-26 18:07:48 +08:00
commit 7b771ccb13
No known key found for this signature in database
GPG Key ID: AE3D6B174F971DE4
38 changed files with 1296 additions and 1063 deletions

View File

@ -6,10 +6,8 @@ defaults:
env:
TEST_PRESET_TYPE: "minimal"
DEFAULT_BRANCH: "dev"
# Run tests on workflow_Dispatch
on:
on:
push:
branches:
- dev
@ -22,10 +20,6 @@ on:
description: Type of test to run, either mainnet or minimal
type: string
required: true
commitRef:
description: The branch, tag or SHA to checkout and build from
default: dev
required: true
schedule:
- cron: '0 0 * * *'
@ -47,8 +41,6 @@ jobs:
steps:
- name: Checkout this repo
uses: actions/checkout@v3.2.0
with:
ref: ${{ github.event.inputs.commitRef || env.DEFAULT_BRANCH }}
- name: Check table of contents
run: sudo npm install -g doctoc@2.2.0 && make check_toc
@ -58,8 +50,6 @@ jobs:
steps:
- name: Checkout this repo
uses: actions/checkout@v3.2.0
with:
ref: ${{ github.event.inputs.commitRef || env.DEFAULT_BRANCH }}
- name: Check codespell
run: pip install 'codespell<3.0.0,>=2.0.0' --user && make codespell
@ -69,8 +59,6 @@ jobs:
steps:
- name: Checkout this repo
uses: actions/checkout@v3.2.0
with:
ref: ${{ github.event.inputs.commitRef || env.DEFAULT_BRANCH }}
- name: Install pyspec requirements
run: make install_test
- name: Run linter for pyspec
@ -87,8 +75,6 @@ jobs:
steps:
- name: Checkout this repo
uses: actions/checkout@v3.2.0
with:
ref: ${{ github.event.inputs.commitRef || env.DEFAULT_BRANCH }}
- name: set TEST_PRESET_TYPE
if: github.event.inputs.test_preset_type != ''
run: |

1
.gitignore vendored
View File

@ -4,6 +4,7 @@ venv
.venvs
.venv
/.pytest_cache
*.swp
build/
output/

View File

@ -53,17 +53,17 @@ For example, if the latest fork is Capella, use `./specs/capella` content as you
### 4. Add `fork.md`
You can refer to the previous fork's `fork.md` file.
### 5. Make it executable
- Update [`constants.py`](https://github.com/ethereum/consensus-specs/blob/dev/tests/core/pyspec/eth2spec/test/helpers/constants.py) with the new feature name.
- Update [`setup.py`](https://github.com/ethereum/consensus-specs/blob/dev/setup.py):
- Add a new `SpecBuilder` with the new feature name constant. e.g., `EIP9999SpecBuilder`
- Add the new `SpecBuilder` to `spec_builders` list.
- Add the path of the new markdown files in `finalize_options` function.
- Update Pyspec [`constants.py`](https://github.com/ethereum/consensus-specs/blob/dev/tests/core/pyspec/eth2spec/test/helpers/constants.py) with the new feature name.
- Update helpers for [`setup.py`](https://github.com/ethereum/consensus-specs/blob/dev/setup.py) for building the spec:
- Update [`pysetup/constants.py`](https://github.com/ethereum/consensus-specs/blob/dev/constants.py) with the new feature name as Pyspec `constants.py` defined.
- Update [`pysetup/spec_builders/__init__.py`](https://github.com/ethereum/consensus-specs/blob/dev/pysetup/spec_builders/__init__.py). Implement a new `<FEATURE_NAME>SpecBuilder` in `pysetup/spec_builders/<FEATURE_NAME>.py` with the new feature name. e.g., `EIP9999SpecBuilder`. Append it to the `spec_builders` list.
- Update [`pysetup/md_doc_paths.py`](https://github.com/ethereum/consensus-specs/blob/dev/pysetup/md_doc_paths.py): add the path of the new markdown files in `get_md_doc_paths` function if needed.
## B: Make it executable for pytest and test generator
### 1. Add `light-client/*` docs if you updated the content of `BeaconBlock`
### 1. [Optional] Add `light-client/*` docs if you updated the content of `BeaconBlock`
- You can refer to the previous fork's `light-client/*` file.
- Add the path of the new markdown files in `setup.py`'s `finalize_options` function.
- Add the path of the new markdown files in [`pysetup/md_doc_paths.py`](https://github.com/ethereum/consensus-specs/blob/dev/pysetup/md_doc_paths.py)'s `get_md_doc_paths` function.
### 2. Add the mainnet and minimal presets and update the configs
- Add presets: `presets/mainnet/<new-feature-name>.yaml` and `presets/minimal/<new-feature-name>.yaml`

View File

@ -0,0 +1,20 @@
# Mainnet preset - Whisk
# Misc
# ---------------------------------------------------------------
# `uint64(4)`
CURDLEPROOFS_N_BLINDERS: 4
# `uint64(2**14)`
WHISK_CANDIDATE_TRACKERS_COUNT: 16384
# `uint64(2**13)` must be < WHISK_CANDIDATE_TRACKERS_COUNT
WHISK_PROPOSER_TRACKERS_COUNT: 8192
# `Epoch(2**8)`
WHISK_EPOCHS_PER_SHUFFLING_PHASE: 256
# `uint64(2**7 - CURDLEPROOFS_N_BLINDERS)`
WHISK_VALIDATORS_PER_SHUFFLE: 124
# `Epoch(2)`
WHISK_PROPOSER_SELECTION_GAP: 2
# `uint64(2**15)` TODO: will be replaced by a fix format once there's a serialized format
WHISK_MAX_SHUFFLE_PROOF_SIZE: 32768
# `uint64(2**10)` TODO: will be replaced by a fix format once there's a serialized format
WHISK_MAX_OPENING_PROOF_SIZE: 1024

View File

@ -0,0 +1,20 @@
# Minimal preset - Whisk
# Misc
# ---------------------------------------------------------------
# [customized]
CURDLEPROOFS_N_BLINDERS: 4
# [customized]
WHISK_CANDIDATE_TRACKERS_COUNT: 32
# [customized]
WHISK_PROPOSER_TRACKERS_COUNT: 16
# [customized]
WHISK_EPOCHS_PER_SHUFFLING_PHASE: 4
# [customized]
WHISK_VALIDATORS_PER_SHUFFLE: 4
# [customized]
WHISK_PROPOSER_SELECTION_GAP: 1
# `uint64(2**15)` TODO: will be replaced by a fix format once there's a serialized format
WHISK_MAX_SHUFFLE_PROOF_SIZE: 32768
# `uint64(2**10)` TODO: will be replaced by a fix format once there's a serialized format
WHISK_MAX_OPENING_PROOF_SIZE: 1024

0
pysetup/__init__.py Normal file
View File

34
pysetup/constants.py Normal file
View File

@ -0,0 +1,34 @@
# Definitions in context.py
PHASE0 = 'phase0'
ALTAIR = 'altair'
BELLATRIX = 'bellatrix'
CAPELLA = 'capella'
DENEB = 'deneb'
EIP6110 = 'eip6110'
EIP7002 = 'eip7002'
WHISK = 'whisk'
# The helper functions that are used when defining constants
CONSTANT_DEP_SUNDRY_CONSTANTS_FUNCTIONS = '''
def ceillog2(x: int) -> uint64:
if x < 1:
raise ValueError(f"ceillog2 accepts only positive values, x={x}")
return uint64((x - 1).bit_length())
def floorlog2(x: int) -> uint64:
if x < 1:
raise ValueError(f"floorlog2 accepts only positive values, x={x}")
return uint64(x.bit_length() - 1)
'''
OPTIMIZED_BLS_AGGREGATE_PUBKEYS = '''
def eth_aggregate_pubkeys(pubkeys: Sequence[BLSPubkey]) -> BLSPubkey:
return bls.AggregatePKs(pubkeys)
'''
ETH2_SPEC_COMMENT_PREFIX = "eth2spec:"

253
pysetup/helpers.py Normal file
View File

@ -0,0 +1,253 @@
import re
from typing import TypeVar, Dict
import textwrap
from functools import reduce
from .constants import CONSTANT_DEP_SUNDRY_CONSTANTS_FUNCTIONS
from .spec_builders import spec_builders
from .md_doc_paths import PREVIOUS_FORK_OF
from .typing import (
ProtocolDefinition,
SpecObject,
VariableDefinition,
)
def collect_prev_forks(fork: str) -> list[str]:
forks = [fork]
while True:
fork = PREVIOUS_FORK_OF[fork]
if fork is None:
return forks
forks.append(fork)
def is_byte_vector(value: str) -> bool:
return value.startswith(('ByteVector'))
def make_function_abstract(protocol_def: ProtocolDefinition, key: str):
function = protocol_def.functions[key].split('"""')
protocol_def.functions[key] = function[0] + "..."
def objects_to_spec(preset_name: str,
spec_object: SpecObject,
fork: str,
ordered_class_objects: Dict[str, str]) -> str:
"""
Given all the objects that constitute a spec, combine them into a single pyfile.
"""
new_type_definitions = (
'\n\n'.join(
[
f"class {key}({value}):\n pass\n" if not is_byte_vector(value) else f"class {key}({value}): # type: ignore\n pass\n"
for key, value in spec_object.custom_types.items()
]
)
)
# Collect builders with the reversed previous forks
# e.g. `[bellatrix, altair, phase0]` -> `[phase0, altair, bellatrix]`
builders = [spec_builders[fork] for fork in collect_prev_forks(fork)[::-1]]
def format_protocol(protocol_name: str, protocol_def: ProtocolDefinition) -> str:
abstract_functions = ["verify_and_notify_new_payload"]
for key in protocol_def.functions.keys():
if key in abstract_functions:
make_function_abstract(protocol_def, key)
protocol = f"class {protocol_name}(Protocol):"
for fn_source in protocol_def.functions.values():
fn_source = fn_source.replace("self: "+protocol_name, "self")
protocol += "\n\n" + textwrap.indent(fn_source, " ")
return protocol
protocols_spec = '\n\n\n'.join(format_protocol(k, v) for k, v in spec_object.protocols.items())
for k in list(spec_object.functions):
if k in [
"ceillog2",
"floorlog2",
"compute_merkle_proof_for_block_body",
"compute_merkle_proof_for_state",
]:
del spec_object.functions[k]
functions = reduce(lambda fns, builder: builder.implement_optimizations(fns), builders, spec_object.functions)
functions_spec = '\n\n\n'.join(functions.values())
# Access global dict of config vars for runtime configurables
for name in spec_object.config_vars.keys():
functions_spec = re.sub(r"\b%s\b" % name, 'config.' + name, functions_spec)
def format_config_var(name: str, vardef: VariableDefinition) -> str:
if vardef.type_name is None:
out = f'{name}={vardef.value},'
else:
out = f'{name}={vardef.type_name}({vardef.value}),'
if vardef.comment is not None:
out += f' # {vardef.comment}'
return out
config_spec = 'class Configuration(NamedTuple):\n'
config_spec += ' PRESET_BASE: str\n'
config_spec += '\n'.join(f' {k}: {v.type_name if v.type_name is not None else "int"}'
for k, v in spec_object.config_vars.items())
config_spec += '\n\n\nconfig = Configuration(\n'
config_spec += f' PRESET_BASE="{preset_name}",\n'
config_spec += '\n'.join(' ' + format_config_var(k, v) for k, v in spec_object.config_vars.items())
config_spec += '\n)\n'
def format_constant(name: str, vardef: VariableDefinition) -> str:
if vardef.type_name is None:
if vardef.type_hint is None:
out = f'{name} = {vardef.value}'
else:
out = f'{name}: {vardef.type_hint} = {vardef.value}'
else:
out = f'{name} = {vardef.type_name}({vardef.value})'
if vardef.comment is not None:
out += f' # {vardef.comment}'
return out
# Merge all constant objects
hardcoded_ssz_dep_constants = reduce(lambda obj, builder: {**obj, **builder.hardcoded_ssz_dep_constants()}, builders, {})
hardcoded_custom_type_dep_constants = reduce(lambda obj, builder: {**obj, **builder.hardcoded_custom_type_dep_constants(spec_object)}, builders, {})
# Concatenate all strings
imports = reduce(lambda txt, builder: (txt + "\n\n" + builder.imports(preset_name) ).strip("\n"), builders, "")
preparations = reduce(lambda txt, builder: (txt + "\n\n" + builder.preparations() ).strip("\n"), builders, "")
sundry_functions = reduce(lambda txt, builder: (txt + "\n\n" + builder.sundry_functions() ).strip("\n"), builders, "")
# Keep engine from the most recent fork
execution_engine_cls = reduce(lambda txt, builder: builder.execution_engine_cls() or txt, builders, "")
constant_vars_spec = '# Constant vars\n' + '\n'.join(format_constant(k, v) for k, v in spec_object.constant_vars.items())
preset_vars_spec = '# Preset vars\n' + '\n'.join(format_constant(k, v) for k, v in spec_object.preset_vars.items())
ordered_class_objects_spec = '\n\n\n'.join(ordered_class_objects.values())
ssz_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, hardcoded_ssz_dep_constants[x]), hardcoded_ssz_dep_constants))
ssz_dep_constants_verification = '\n'.join(map(lambda x: 'assert %s == %s' % (x, spec_object.ssz_dep_constants[x]), hardcoded_ssz_dep_constants))
custom_type_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, hardcoded_custom_type_dep_constants[x]), hardcoded_custom_type_dep_constants))
spec_strs = [
imports,
preparations,
f"fork = \'{fork}\'\n",
# The constants that some SSZ containers require. Need to be defined before `new_type_definitions`
custom_type_dep_constants,
new_type_definitions,
CONSTANT_DEP_SUNDRY_CONSTANTS_FUNCTIONS,
# The constants that some SSZ containers require. Need to be defined before `constants_spec`
ssz_dep_constants,
constant_vars_spec,
preset_vars_spec,
config_spec,
ordered_class_objects_spec,
protocols_spec,
functions_spec,
sundry_functions,
execution_engine_cls,
# Since some constants are hardcoded in setup.py, the following assertions verify that the hardcoded constants are
# as same as the spec definition.
ssz_dep_constants_verification,
]
return "\n\n\n".join([str.strip("\n") for str in spec_strs if str]) + "\n"
def combine_protocols(old_protocols: Dict[str, ProtocolDefinition],
new_protocols: Dict[str, ProtocolDefinition]) -> Dict[str, ProtocolDefinition]:
for key, value in new_protocols.items():
if key not in old_protocols:
old_protocols[key] = value
else:
functions = combine_dicts(old_protocols[key].functions, value.functions)
old_protocols[key] = ProtocolDefinition(functions=functions)
return old_protocols
T = TypeVar('T')
def combine_dicts(old_dict: Dict[str, T], new_dict: Dict[str, T]) -> Dict[str, T]:
return {**old_dict, **new_dict}
ignored_dependencies = [
'bit', 'boolean', 'Vector', 'List', 'Container', 'BLSPubkey', 'BLSSignature',
'Bytes1', 'Bytes4', 'Bytes8', 'Bytes20', 'Bytes32', 'Bytes48', 'Bytes96', 'Bitlist', 'Bitvector',
'uint8', 'uint16', 'uint32', 'uint64', 'uint128', 'uint256',
'bytes', 'byte', 'ByteList', 'ByteVector',
'Dict', 'dict', 'field', 'ceillog2', 'floorlog2', 'Set',
'Optional', 'Sequence',
]
def dependency_order_class_objects(objects: Dict[str, str], custom_types: Dict[str, str]) -> None:
"""
Determines which SSZ Object is dependent on which other and orders them appropriately
"""
items = list(objects.items())
for key, value in items:
dependencies = []
for line in value.split('\n'):
if not re.match(r'\s+\w+: .+', line):
continue # skip whitespace etc.
line = line[line.index(':') + 1:] # strip of field name
if '#' in line:
line = line[:line.index('#')] # strip of comment
dependencies.extend(re.findall(r'(\w+)', line)) # catch all legible words, potential dependencies
dependencies = filter(lambda x: '_' not in x and x.upper() != x, dependencies) # filter out constants
dependencies = filter(lambda x: x not in ignored_dependencies, dependencies)
dependencies = filter(lambda x: x not in custom_types, dependencies)
for dep in dependencies:
key_list = list(objects.keys())
for item in [dep, key] + key_list[key_list.index(dep)+1:]:
objects[item] = objects.pop(item)
def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str], custom_types) -> Dict[str, str]:
"""
Takes in old spec and new spec ssz objects, combines them,
and returns the newer versions of the objects in dependency order.
"""
for key, value in new_objects.items():
old_objects[key] = value
return old_objects
def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject:
"""
Takes in two spec variants (as tuples of their objects) and combines them using the appropriate combiner function.
"""
protocols = combine_protocols(spec0.protocols, spec1.protocols)
functions = combine_dicts(spec0.functions, spec1.functions)
custom_types = combine_dicts(spec0.custom_types, spec1.custom_types)
constant_vars = combine_dicts(spec0.constant_vars, spec1.constant_vars)
preset_vars = combine_dicts(spec0.preset_vars, spec1.preset_vars)
config_vars = combine_dicts(spec0.config_vars, spec1.config_vars)
ssz_dep_constants = combine_dicts(spec0.ssz_dep_constants, spec1.ssz_dep_constants)
ssz_objects = combine_ssz_objects(spec0.ssz_objects, spec1.ssz_objects, custom_types)
dataclasses = combine_dicts(spec0.dataclasses, spec1.dataclasses)
return SpecObject(
functions=functions,
protocols=protocols,
custom_types=custom_types,
constant_vars=constant_vars,
preset_vars=preset_vars,
config_vars=config_vars,
ssz_dep_constants=ssz_dep_constants,
ssz_objects=ssz_objects,
dataclasses=dataclasses,
)
def parse_config_vars(conf: Dict[str, str]) -> Dict[str, str]:
"""
Parses a dict of basic str/int/list types into a dict for insertion into the spec code.
"""
out: Dict[str, str] = dict()
for k, v in conf.items():
if isinstance(v, str) and (v.startswith("0x") or k == 'PRESET_BASE' or k == 'CONFIG_NAME'):
# Represent byte data with string, to avoid misinterpretation as big-endian int.
# Everything except PRESET_BASE and CONFIG_NAME is either byte data or an integer.
out[k] = f"'{v}'"
else:
out[k] = str(int(v))
return out

78
pysetup/md_doc_paths.py Normal file
View File

@ -0,0 +1,78 @@
import os
from .constants import (
PHASE0,
ALTAIR,
BELLATRIX,
CAPELLA,
DENEB,
EIP6110,
WHISK,
EIP7002,
)
PREVIOUS_FORK_OF = {
PHASE0: None,
ALTAIR: PHASE0,
BELLATRIX: ALTAIR,
CAPELLA: BELLATRIX,
DENEB: CAPELLA,
EIP6110: DENEB,
WHISK: CAPELLA,
EIP7002: CAPELLA,
}
ALL_FORKS = list(PREVIOUS_FORK_OF.keys())
IGNORE_SPEC_FILES = [
"specs/phase0/deposit-contract.md"
]
EXTRA_SPEC_FILES = {
BELLATRIX: "sync/optimistic.md"
}
def is_post_fork(a, b) -> bool:
"""
Returns true if fork a is after b, or if a == b
"""
if a == b:
return True
prev_fork = PREVIOUS_FORK_OF[a]
if prev_fork == b:
return True
elif prev_fork == None:
return False
else:
return is_post_fork(prev_fork, b)
def get_fork_directory(fork):
dir1 = f'specs/{fork}'
if os.path.exists(dir1):
return dir1
dir2 = f'specs/_features/{fork}'
if os.path.exists(dir2):
return dir2
raise FileNotFoundError(f"No directory found for fork: {fork}")
def get_md_doc_paths(spec_fork: str) -> str:
md_doc_paths = ""
for fork in ALL_FORKS:
if is_post_fork(spec_fork, fork):
# Append all files in fork directory recursively
for root, dirs, files in os.walk(get_fork_directory(fork)):
for filename in files:
filepath = os.path.join(root, filename)
if filepath.endswith('.md') and filepath not in IGNORE_SPEC_FILES:
md_doc_paths += filepath + "\n"
# Append extra files if any
if fork in EXTRA_SPEC_FILES:
md_doc_paths += EXTRA_SPEC_FILES[fork] + "\n"
return md_doc_paths

View File

@ -0,0 +1,17 @@
from .phase0 import Phase0SpecBuilder
from .altair import AltairSpecBuilder
from .bellatrix import BellatrixSpecBuilder
from .capella import CapellaSpecBuilder
from .deneb import DenebSpecBuilder
from .eip6110 import EIP6110SpecBuilder
from .eip7002 import EIP7002SpecBuilder
from .whisk import WhiskSpecBuilder
spec_builders = {
builder.fork: builder
for builder in (
Phase0SpecBuilder, AltairSpecBuilder, BellatrixSpecBuilder, CapellaSpecBuilder, DenebSpecBuilder,
EIP6110SpecBuilder, EIP7002SpecBuilder, WhiskSpecBuilder,
)
}

View File

@ -0,0 +1,54 @@
from typing import Dict
from .base import BaseSpecBuilder
from ..constants import ALTAIR, OPTIMIZED_BLS_AGGREGATE_PUBKEYS
class AltairSpecBuilder(BaseSpecBuilder):
fork: str = ALTAIR
@classmethod
def imports(cls, preset_name: str) -> str:
return f'''
from typing import NewType, Union as PyUnion
from eth2spec.phase0 import {preset_name} as phase0
from eth2spec.test.helpers.merkle import build_proof
from eth2spec.utils.ssz.ssz_typing import Path
'''
@classmethod
def preparations(cls):
return '''
SSZVariableName = str
GeneralizedIndex = NewType('GeneralizedIndex', int)
'''
@classmethod
def sundry_functions(cls) -> str:
return '''
def get_generalized_index(ssz_class: Any, *path: Sequence[PyUnion[int, SSZVariableName]]) -> GeneralizedIndex:
ssz_path = Path(ssz_class)
for item in path:
ssz_path = ssz_path / item
return GeneralizedIndex(ssz_path.gindex())
def compute_merkle_proof_for_state(state: BeaconState,
index: GeneralizedIndex) -> Sequence[Bytes32]:
return build_proof(state.get_backing(), index)'''
@classmethod
def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]:
return {
'FINALIZED_ROOT_INDEX': 'GeneralizedIndex(105)',
'CURRENT_SYNC_COMMITTEE_INDEX': 'GeneralizedIndex(54)',
'NEXT_SYNC_COMMITTEE_INDEX': 'GeneralizedIndex(55)',
}
@classmethod
def implement_optimizations(cls, functions: Dict[str, str]) -> Dict[str, str]:
if "eth_aggregate_pubkeys" in functions:
functions["eth_aggregate_pubkeys"] = OPTIMIZED_BLS_AGGREGATE_PUBKEYS.strip()
return functions

View File

@ -0,0 +1,52 @@
from abc import ABC, abstractmethod
from typing import Sequence, Dict
from pathlib import Path
class BaseSpecBuilder(ABC):
@property
@abstractmethod
def fork(self) -> str:
raise NotImplementedError()
@classmethod
def imports(cls, preset_name: str) -> str:
"""
Import objects from other libraries.
"""
return ""
@classmethod
def preparations(cls) -> str:
"""
Define special types/constants for building pyspec or call functions.
"""
return ""
@classmethod
def sundry_functions(cls) -> str:
"""
The functions that are (1) defined abstractly in specs or (2) adjusted for getting better performance.
"""
return ""
@classmethod
def execution_engine_cls(cls) -> str:
return ""
@classmethod
def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]:
"""
The constants that are required for SSZ objects.
"""
return {}
@classmethod
def hardcoded_custom_type_dep_constants(cls, spec_object) -> Dict[str, str]: # TODO
"""
The constants that are required for custom types.
"""
return {}
@classmethod
def implement_optimizations(cls, functions: Dict[str, str]) -> Dict[str, str]:
return functions

View File

@ -0,0 +1,66 @@
from .base import BaseSpecBuilder
from ..constants import BELLATRIX
class BellatrixSpecBuilder(BaseSpecBuilder):
fork: str = BELLATRIX
@classmethod
def imports(cls, preset_name: str):
return f'''
from typing import Protocol
from eth2spec.altair import {preset_name} as altair
from eth2spec.utils.ssz.ssz_typing import Bytes8, Bytes20, ByteList, ByteVector
'''
@classmethod
def sundry_functions(cls) -> str:
return """
ExecutionState = Any
def get_pow_block(hash: Bytes32) -> Optional[PowBlock]:
return PowBlock(block_hash=hash, parent_hash=Bytes32(), total_difficulty=uint256(0))
def get_execution_state(_execution_state_root: Bytes32) -> ExecutionState:
pass
def get_pow_chain_head() -> PowBlock:
pass"""
@classmethod
def execution_engine_cls(cls) -> str:
return """
class NoopExecutionEngine(ExecutionEngine):
def notify_new_payload(self: ExecutionEngine, execution_payload: ExecutionPayload) -> bool:
return True
def notify_forkchoice_updated(self: ExecutionEngine,
head_block_hash: Hash32,
safe_block_hash: Hash32,
finalized_block_hash: Hash32,
payload_attributes: Optional[PayloadAttributes]) -> Optional[PayloadId]:
pass
def get_payload(self: ExecutionEngine, payload_id: PayloadId) -> GetPayloadResponse:
# pylint: disable=unused-argument
raise NotImplementedError("no default block production")
def is_valid_block_hash(self: ExecutionEngine, execution_payload: ExecutionPayload) -> bool:
return True
def verify_and_notify_new_payload(self: ExecutionEngine,
new_payload_request: NewPayloadRequest) -> bool:
return True
EXECUTION_ENGINE = NoopExecutionEngine()"""
@classmethod
def hardcoded_custom_type_dep_constants(cls, spec_object) -> str:
return {
'MAX_BYTES_PER_TRANSACTION': spec_object.preset_vars['MAX_BYTES_PER_TRANSACTION'].value,
}

View File

@ -0,0 +1,29 @@
from typing import Dict
from .base import BaseSpecBuilder
from ..constants import CAPELLA
class CapellaSpecBuilder(BaseSpecBuilder):
fork: str = CAPELLA
@classmethod
def imports(cls, preset_name: str):
return f'''
from eth2spec.bellatrix import {preset_name} as bellatrix
'''
@classmethod
def sundry_functions(cls) -> str:
return '''
def compute_merkle_proof_for_block_body(body: BeaconBlockBody,
index: GeneralizedIndex) -> Sequence[Bytes32]:
return build_proof(body.get_backing(), index)'''
@classmethod
def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]:
return {
'EXECUTION_PAYLOAD_INDEX': 'GeneralizedIndex(25)',
}

View File

@ -0,0 +1,71 @@
from .base import BaseSpecBuilder
from ..constants import DENEB
class DenebSpecBuilder(BaseSpecBuilder):
fork: str = DENEB
@classmethod
def imports(cls, preset_name: str):
return f'''
from eth2spec.capella import {preset_name} as capella
'''
@classmethod
def preparations(cls):
return '''
T = TypeVar('T') # For generic function
'''
@classmethod
def sundry_functions(cls) -> str:
return '''
def retrieve_blobs_and_proofs(beacon_block_root: Root) -> PyUnion[Tuple[Blob, KZGProof], Tuple[str, str]]:
# pylint: disable=unused-argument
return ("TEST", "TEST")'''
@classmethod
def execution_engine_cls(cls) -> str:
return """
class NoopExecutionEngine(ExecutionEngine):
def notify_new_payload(self: ExecutionEngine,
execution_payload: ExecutionPayload,
parent_beacon_block_root: Root) -> bool:
return True
def notify_forkchoice_updated(self: ExecutionEngine,
head_block_hash: Hash32,
safe_block_hash: Hash32,
finalized_block_hash: Hash32,
payload_attributes: Optional[PayloadAttributes]) -> Optional[PayloadId]:
pass
def get_payload(self: ExecutionEngine, payload_id: PayloadId) -> GetPayloadResponse:
# pylint: disable=unused-argument
raise NotImplementedError("no default block production")
def is_valid_block_hash(self: ExecutionEngine,
execution_payload: ExecutionPayload,
parent_beacon_block_root: Root) -> bool:
return True
def is_valid_versioned_hashes(self: ExecutionEngine, new_payload_request: NewPayloadRequest) -> bool:
return True
def verify_and_notify_new_payload(self: ExecutionEngine,
new_payload_request: NewPayloadRequest) -> bool:
return True
EXECUTION_ENGINE = NoopExecutionEngine()"""
@classmethod
def hardcoded_custom_type_dep_constants(cls, spec_object) -> str:
return {
'BYTES_PER_FIELD_ELEMENT': spec_object.constant_vars['BYTES_PER_FIELD_ELEMENT'].value,
'FIELD_ELEMENTS_PER_BLOB': spec_object.preset_vars['FIELD_ELEMENTS_PER_BLOB'].value,
'MAX_BLOBS_PER_BLOCK': spec_object.preset_vars['MAX_BLOBS_PER_BLOCK'].value,
}

View File

@ -0,0 +1,12 @@
from .base import BaseSpecBuilder
from ..constants import EIP6110
class EIP6110SpecBuilder(BaseSpecBuilder):
fork: str = EIP6110
@classmethod
def imports(cls, preset_name: str):
return f'''
from eth2spec.deneb import {preset_name} as deneb
'''

View File

@ -0,0 +1,12 @@
from .base import BaseSpecBuilder
from ..constants import EIP7002
class EIP7002SpecBuilder(BaseSpecBuilder):
fork: str = EIP7002
@classmethod
def imports(cls, preset_name: str):
return super().imports(preset_name) + f'''
from eth2spec.capella import {preset_name} as capella
'''

View File

@ -0,0 +1,105 @@
from .base import BaseSpecBuilder
from ..constants import PHASE0
class Phase0SpecBuilder(BaseSpecBuilder):
fork: str = PHASE0
@classmethod
def imports(cls, preset_name: str) -> str:
return '''from lru import LRU
from dataclasses import (
dataclass,
field,
)
from typing import (
Any, Callable, Dict, Set, Sequence, Tuple, Optional, TypeVar, NamedTuple, Final
)
from eth2spec.utils.ssz.ssz_impl import hash_tree_root, copy, uint_to_bytes
from eth2spec.utils.ssz.ssz_typing import (
View, boolean, Container, List, Vector, uint8, uint32, uint64, uint256,
Bytes1, Bytes4, Bytes32, Bytes48, Bytes96, Bitlist)
from eth2spec.utils.ssz.ssz_typing import Bitvector # noqa: F401
from eth2spec.utils import bls
from eth2spec.utils.hash_function import hash
'''
@classmethod
def preparations(cls) -> str:
return '''
SSZObject = TypeVar('SSZObject', bound=View)
'''
@classmethod
def sundry_functions(cls) -> str:
return '''
def get_eth1_data(block: Eth1Block) -> Eth1Data:
"""
A stub function return mocking Eth1Data.
"""
return Eth1Data(
deposit_root=block.deposit_root,
deposit_count=block.deposit_count,
block_hash=hash_tree_root(block))
def cache_this(key_fn, value_fn, lru_size): # type: ignore
cache_dict = LRU(size=lru_size)
def wrapper(*args, **kw): # type: ignore
key = key_fn(*args, **kw)
nonlocal cache_dict
if key not in cache_dict:
cache_dict[key] = value_fn(*args, **kw)
return cache_dict[key]
return wrapper
_compute_shuffled_index = compute_shuffled_index
compute_shuffled_index = cache_this(
lambda index, index_count, seed: (index, index_count, seed),
_compute_shuffled_index, lru_size=SLOTS_PER_EPOCH * 3)
_get_total_active_balance = get_total_active_balance
get_total_active_balance = cache_this(
lambda state: (state.validators.hash_tree_root(), compute_epoch_at_slot(state.slot)),
_get_total_active_balance, lru_size=10)
_get_base_reward = get_base_reward
get_base_reward = cache_this(
lambda state, index: (state.validators.hash_tree_root(), state.slot, index),
_get_base_reward, lru_size=2048)
_get_committee_count_per_slot = get_committee_count_per_slot
get_committee_count_per_slot = cache_this(
lambda state, epoch: (state.validators.hash_tree_root(), epoch),
_get_committee_count_per_slot, lru_size=SLOTS_PER_EPOCH * 3)
_get_active_validator_indices = get_active_validator_indices
get_active_validator_indices = cache_this(
lambda state, epoch: (state.validators.hash_tree_root(), epoch),
_get_active_validator_indices, lru_size=3)
_get_beacon_committee = get_beacon_committee
get_beacon_committee = cache_this(
lambda state, slot, index: (state.validators.hash_tree_root(), state.randao_mixes.hash_tree_root(), slot, index),
_get_beacon_committee, lru_size=SLOTS_PER_EPOCH * MAX_COMMITTEES_PER_SLOT * 3)
_get_matching_target_attestations = get_matching_target_attestations
get_matching_target_attestations = cache_this(
lambda state, epoch: (state.hash_tree_root(), epoch),
_get_matching_target_attestations, lru_size=10)
_get_matching_head_attestations = get_matching_head_attestations
get_matching_head_attestations = cache_this(
lambda state, epoch: (state.hash_tree_root(), epoch),
_get_matching_head_attestations, lru_size=10)
_get_attesting_indices = get_attesting_indices
get_attesting_indices = cache_this(
lambda state, data, bits: (
state.randao_mixes.hash_tree_root(),
state.validators.hash_tree_root(), data.hash_tree_root(), bits.hash_tree_root()
),
_get_attesting_indices, lru_size=SLOTS_PER_EPOCH * MAX_COMMITTEES_PER_SLOT * 3)'''

View File

@ -0,0 +1,20 @@
from .base import BaseSpecBuilder
from ..constants import WHISK
class WhiskSpecBuilder(BaseSpecBuilder):
fork: str = WHISK
@classmethod
def imports(cls, preset_name: str):
return f'''
from eth2spec.capella import {preset_name} as capella
'''
@classmethod
def hardcoded_custom_type_dep_constants(cls, spec_object) -> str:
# Necessary for custom types `WhiskShuffleProof` and `WhiskTrackerProof`
return {
'WHISK_MAX_SHUFFLE_PROOF_SIZE': spec_object.preset_vars['WHISK_MAX_SHUFFLE_PROOF_SIZE'].value,
'WHISK_MAX_OPENING_PROOF_SIZE': spec_object.preset_vars['WHISK_MAX_OPENING_PROOF_SIZE'].value,
}

32
pysetup/typing.py Normal file
View File

@ -0,0 +1,32 @@
from pathlib import Path
from typing import Dict, NamedTuple, Optional, List
class ProtocolDefinition(NamedTuple):
# just function definitions currently. May expand with configuration vars in future.
functions: Dict[str, str]
class VariableDefinition(NamedTuple):
type_name: Optional[str]
value: str
comment: Optional[str] # e.g. "noqa: E501"
type_hint: Optional[str] # e.g., "Final"
class SpecObject(NamedTuple):
functions: Dict[str, str]
protocols: Dict[str, ProtocolDefinition]
custom_types: Dict[str, str]
constant_vars: Dict[str, VariableDefinition]
preset_vars: Dict[str, VariableDefinition]
config_vars: Dict[str, VariableDefinition]
ssz_dep_constants: Dict[str, str] # the constants that depend on ssz_objects
ssz_objects: Dict[str, str]
dataclasses: Dict[str, str]
class BuildTarget(NamedTuple):
name: str
preset_paths: List[Path]
config_path: Path

857
setup.py
View File

@ -4,17 +4,36 @@ from distutils import dir_util
from distutils.util import convert_path
from pathlib import Path
import os
import re
import string
import textwrap
from typing import Dict, NamedTuple, List, Sequence, Optional, TypeVar, Tuple
from abc import ABC, abstractmethod
from typing import Dict, List, Sequence, Optional, Tuple
import ast
import subprocess
import sys
import copy
from collections import OrderedDict
import json
from functools import reduce
from pysetup.constants import (
# code names
PHASE0,
# misc
ETH2_SPEC_COMMENT_PREFIX,
)
from pysetup.spec_builders import spec_builders
from pysetup.typing import (
BuildTarget,
ProtocolDefinition,
SpecObject,
VariableDefinition,
)
from pysetup.helpers import (
combine_spec_objects,
dependency_order_class_objects,
objects_to_spec,
parse_config_vars,
)
from pysetup.md_doc_paths import get_md_doc_paths
# NOTE: have to programmatically include third-party dependencies in `setup.py`.
@ -41,106 +60,6 @@ from marko.ext.gfm import gfm
from marko.ext.gfm.elements import Table
# Definitions in context.py
PHASE0 = 'phase0'
ALTAIR = 'altair'
BELLATRIX = 'bellatrix'
CAPELLA = 'capella'
DENEB = 'deneb'
EIP6110 = 'eip6110'
EIP7002 = 'eip7002'
WHISK = 'whisk'
PREVIOUS_FORK_OF = {
PHASE0: None,
ALTAIR: PHASE0,
BELLATRIX: ALTAIR,
CAPELLA: BELLATRIX,
DENEB: CAPELLA,
EIP6110: DENEB,
EIP7002: CAPELLA,
WHISK: CAPELLA,
}
ALL_FORKS = list(PREVIOUS_FORK_OF.keys())
IGNORE_SPEC_FILES = [
"specs/phase0/deposit-contract.md"
]
EXTRA_SPEC_FILES = {
BELLATRIX: "sync/optimistic.md"
}
# The helper functions that are used when defining constants
CONSTANT_DEP_SUNDRY_CONSTANTS_FUNCTIONS = '''
def ceillog2(x: int) -> uint64:
if x < 1:
raise ValueError(f"ceillog2 accepts only positive values, x={x}")
return uint64((x - 1).bit_length())
def floorlog2(x: int) -> uint64:
if x < 1:
raise ValueError(f"floorlog2 accepts only positive values, x={x}")
return uint64(x.bit_length() - 1)
'''
OPTIMIZED_BLS_AGGREGATE_PUBKEYS = '''
def eth_aggregate_pubkeys(pubkeys: Sequence[BLSPubkey]) -> BLSPubkey:
return bls.AggregatePKs(pubkeys)
'''
class ProtocolDefinition(NamedTuple):
# just function definitions currently. May expand with configuration vars in future.
functions: Dict[str, str]
class VariableDefinition(NamedTuple):
type_name: Optional[str]
value: str
comment: Optional[str] # e.g. "noqa: E501"
type_hint: Optional[str] # e.g., "Final"
class SpecObject(NamedTuple):
functions: Dict[str, str]
protocols: Dict[str, ProtocolDefinition]
custom_types: Dict[str, str]
constant_vars: Dict[str, VariableDefinition]
preset_vars: Dict[str, VariableDefinition]
config_vars: Dict[str, VariableDefinition]
ssz_dep_constants: Dict[str, str] # the constants that depend on ssz_objects
ssz_objects: Dict[str, str]
dataclasses: Dict[str, str]
def is_post_fork(a, b) -> bool:
"""
Returns true if fork a is after b, or if a == b
"""
if a == b:
return True
prev_fork = PREVIOUS_FORK_OF[a]
if prev_fork == b:
return True
elif prev_fork == None:
return False
else:
return is_post_fork(prev_fork, b)
def get_fork_directory(fork):
dir1 = f'specs/{fork}'
if os.path.exists(dir1):
return dir1
dir2 = f'specs/_features/{fork}'
if os.path.exists(dir2):
return dir2
raise FileNotFoundError(f"No directory found for fork: {fork}")
def _get_name_from_heading(heading: Heading) -> Optional[str]:
last_child = heading.children[-1]
if isinstance(last_child, CodeSpan):
@ -205,13 +124,12 @@ def _load_kzg_trusted_setups(preset_name):
return trusted_setup_G1, trusted_setup_G2, trusted_setup_G1_lagrange, roots_of_unity
ALL_KZG_SETUPS = {
'minimal': _load_kzg_trusted_setups('minimal'),
'mainnet': _load_kzg_trusted_setups('mainnet')
}
ETH2_SPEC_COMMENT_PREFIX = "eth2spec:"
def _get_eth2_spec_comment(child: LinkRefDef) -> Optional[str]:
_, _, title = child._parse_info
@ -223,7 +141,7 @@ def _get_eth2_spec_comment(child: LinkRefDef) -> Optional[str]:
return title[len(ETH2_SPEC_COMMENT_PREFIX):].strip()
def _parse_value(name: str, typed_value: str, type_hint: Optional[str]=None) -> VariableDefinition:
def _parse_value(name: str, typed_value: str, type_hint: Optional[str] = None) -> VariableDefinition:
comment = None
if name == "BLS12_381_Q":
comment = "noqa: E501"
@ -352,691 +270,6 @@ def get_spec(file_name: Path, preset: Dict[str, str], config: Dict[str, str], pr
)
class SpecBuilder(ABC):
@property
@abstractmethod
def fork(self) -> str:
raise NotImplementedError()
@classmethod
@abstractmethod
def imports(cls, preset_name: str) -> str:
"""
Import objects from other libraries.
"""
raise NotImplementedError()
@classmethod
@abstractmethod
def preparations(cls) -> str:
"""
Define special types/constants for building pyspec or call functions.
"""
raise NotImplementedError()
@classmethod
@abstractmethod
def sundry_functions(cls) -> str:
"""
The functions that are (1) defined abstractly in specs or (2) adjusted for getting better performance.
"""
raise NotImplementedError()
@classmethod
def execution_engine_cls(cls) -> str:
raise NotImplementedError()
@classmethod
@abstractmethod
def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]:
"""
The constants that are required for SSZ objects.
"""
raise NotImplementedError()
@classmethod
@abstractmethod
def hardcoded_custom_type_dep_constants(cls, spec_object) -> Dict[str, str]: # TODO
"""
The constants that are required for custom types.
"""
raise NotImplementedError()
@classmethod
@abstractmethod
def implement_optimizations(cls, functions: Dict[str, str]) -> Dict[str, str]:
raise NotImplementedError()
@classmethod
@abstractmethod
def build_spec(cls, preset_name: str,
source_files: List[Path], preset_files: Sequence[Path], config_file: Path) -> str:
raise NotImplementedError()
#
# Phase0SpecBuilder
#
class Phase0SpecBuilder(SpecBuilder):
fork: str = PHASE0
@classmethod
def imports(cls, preset_name: str) -> str:
return '''from lru import LRU
from dataclasses import (
dataclass,
field,
)
from typing import (
Any, Callable, Dict, Set, Sequence, Tuple, Optional, TypeVar, NamedTuple, Final
)
from eth2spec.utils.ssz.ssz_impl import hash_tree_root, copy, uint_to_bytes
from eth2spec.utils.ssz.ssz_typing import (
View, boolean, Container, List, Vector, uint8, uint32, uint64, uint256,
Bytes1, Bytes4, Bytes32, Bytes48, Bytes96, Bitlist)
from eth2spec.utils.ssz.ssz_typing import Bitvector # noqa: F401
from eth2spec.utils import bls
from eth2spec.utils.hash_function import hash
'''
@classmethod
def preparations(cls) -> str:
return '''
SSZObject = TypeVar('SSZObject', bound=View)
'''
@classmethod
def sundry_functions(cls) -> str:
return '''
def get_eth1_data(block: Eth1Block) -> Eth1Data:
"""
A stub function return mocking Eth1Data.
"""
return Eth1Data(
deposit_root=block.deposit_root,
deposit_count=block.deposit_count,
block_hash=hash_tree_root(block))
def cache_this(key_fn, value_fn, lru_size): # type: ignore
cache_dict = LRU(size=lru_size)
def wrapper(*args, **kw): # type: ignore
key = key_fn(*args, **kw)
nonlocal cache_dict
if key not in cache_dict:
cache_dict[key] = value_fn(*args, **kw)
return cache_dict[key]
return wrapper
_compute_shuffled_index = compute_shuffled_index
compute_shuffled_index = cache_this(
lambda index, index_count, seed: (index, index_count, seed),
_compute_shuffled_index, lru_size=SLOTS_PER_EPOCH * 3)
_get_total_active_balance = get_total_active_balance
get_total_active_balance = cache_this(
lambda state: (state.validators.hash_tree_root(), compute_epoch_at_slot(state.slot)),
_get_total_active_balance, lru_size=10)
_get_base_reward = get_base_reward
get_base_reward = cache_this(
lambda state, index: (state.validators.hash_tree_root(), state.slot, index),
_get_base_reward, lru_size=2048)
_get_committee_count_per_slot = get_committee_count_per_slot
get_committee_count_per_slot = cache_this(
lambda state, epoch: (state.validators.hash_tree_root(), epoch),
_get_committee_count_per_slot, lru_size=SLOTS_PER_EPOCH * 3)
_get_active_validator_indices = get_active_validator_indices
get_active_validator_indices = cache_this(
lambda state, epoch: (state.validators.hash_tree_root(), epoch),
_get_active_validator_indices, lru_size=3)
_get_beacon_committee = get_beacon_committee
get_beacon_committee = cache_this(
lambda state, slot, index: (state.validators.hash_tree_root(), state.randao_mixes.hash_tree_root(), slot, index),
_get_beacon_committee, lru_size=SLOTS_PER_EPOCH * MAX_COMMITTEES_PER_SLOT * 3)
_get_matching_target_attestations = get_matching_target_attestations
get_matching_target_attestations = cache_this(
lambda state, epoch: (state.hash_tree_root(), epoch),
_get_matching_target_attestations, lru_size=10)
_get_matching_head_attestations = get_matching_head_attestations
get_matching_head_attestations = cache_this(
lambda state, epoch: (state.hash_tree_root(), epoch),
_get_matching_head_attestations, lru_size=10)
_get_attesting_indices = get_attesting_indices
get_attesting_indices = cache_this(
lambda state, data, bits: (
state.randao_mixes.hash_tree_root(),
state.validators.hash_tree_root(), data.hash_tree_root(), bits.hash_tree_root()
),
_get_attesting_indices, lru_size=SLOTS_PER_EPOCH * MAX_COMMITTEES_PER_SLOT * 3)'''
@classmethod
def execution_engine_cls(cls) -> str:
return ""
@classmethod
def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]:
return {}
@classmethod
def hardcoded_custom_type_dep_constants(cls, spec_object) -> Dict[str, str]:
return {}
@classmethod
def implement_optimizations(cls, functions: Dict[str, str]) -> Dict[str, str]:
return functions
@classmethod
def build_spec(cls, preset_name: str,
source_files: Sequence[Path], preset_files: Sequence[Path], config_file: Path) -> str:
return _build_spec(preset_name, cls.fork, source_files, preset_files, config_file)
#
# AltairSpecBuilder
#
class AltairSpecBuilder(Phase0SpecBuilder):
fork: str = ALTAIR
@classmethod
def imports(cls, preset_name: str) -> str:
return super().imports(preset_name) + '\n' + f'''
from typing import NewType, Union as PyUnion
from eth2spec.phase0 import {preset_name} as phase0
from eth2spec.test.helpers.merkle import build_proof
from eth2spec.utils.ssz.ssz_typing import Path
'''
@classmethod
def preparations(cls):
return super().preparations() + '\n' + '''
SSZVariableName = str
GeneralizedIndex = NewType('GeneralizedIndex', int)
'''
@classmethod
def sundry_functions(cls) -> str:
return super().sundry_functions() + '\n\n' + '''
def get_generalized_index(ssz_class: Any, *path: Sequence[PyUnion[int, SSZVariableName]]) -> GeneralizedIndex:
ssz_path = Path(ssz_class)
for item in path:
ssz_path = ssz_path / item
return GeneralizedIndex(ssz_path.gindex())
def compute_merkle_proof_for_state(state: BeaconState,
index: GeneralizedIndex) -> Sequence[Bytes32]:
return build_proof(state.get_backing(), index)'''
@classmethod
def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]:
constants = {
'FINALIZED_ROOT_INDEX': 'GeneralizedIndex(105)',
'CURRENT_SYNC_COMMITTEE_INDEX': 'GeneralizedIndex(54)',
'NEXT_SYNC_COMMITTEE_INDEX': 'GeneralizedIndex(55)',
}
return {**super().hardcoded_ssz_dep_constants(), **constants}
@classmethod
def implement_optimizations(cls, functions: Dict[str, str]) -> Dict[str, str]:
if "eth_aggregate_pubkeys" in functions:
functions["eth_aggregate_pubkeys"] = OPTIMIZED_BLS_AGGREGATE_PUBKEYS.strip()
return super().implement_optimizations(functions)
#
# BellatrixSpecBuilder
#
class BellatrixSpecBuilder(AltairSpecBuilder):
fork: str = BELLATRIX
@classmethod
def imports(cls, preset_name: str):
return super().imports(preset_name) + f'''
from typing import Protocol
from eth2spec.altair import {preset_name} as altair
from eth2spec.utils.ssz.ssz_typing import Bytes8, Bytes20, ByteList, ByteVector
'''
@classmethod
def preparations(cls):
return super().preparations()
@classmethod
def sundry_functions(cls) -> str:
return super().sundry_functions() + '\n\n' + """
ExecutionState = Any
def get_pow_block(hash: Bytes32) -> Optional[PowBlock]:
return PowBlock(block_hash=hash, parent_hash=Bytes32(), total_difficulty=uint256(0))
def get_execution_state(_execution_state_root: Bytes32) -> ExecutionState:
pass
def get_pow_chain_head() -> PowBlock:
pass"""
@classmethod
def execution_engine_cls(cls) -> str:
return "\n\n" + """
class NoopExecutionEngine(ExecutionEngine):
def notify_new_payload(self: ExecutionEngine, execution_payload: ExecutionPayload) -> bool:
return True
def notify_forkchoice_updated(self: ExecutionEngine,
head_block_hash: Hash32,
safe_block_hash: Hash32,
finalized_block_hash: Hash32,
payload_attributes: Optional[PayloadAttributes]) -> Optional[PayloadId]:
pass
def get_payload(self: ExecutionEngine, payload_id: PayloadId) -> GetPayloadResponse:
# pylint: disable=unused-argument
raise NotImplementedError("no default block production")
def is_valid_block_hash(self: ExecutionEngine, execution_payload: ExecutionPayload) -> bool:
return True
def verify_and_notify_new_payload(self: ExecutionEngine,
new_payload_request: NewPayloadRequest) -> bool:
return True
EXECUTION_ENGINE = NoopExecutionEngine()"""
@classmethod
def hardcoded_custom_type_dep_constants(cls, spec_object) -> str:
constants = {
'MAX_BYTES_PER_TRANSACTION': spec_object.preset_vars['MAX_BYTES_PER_TRANSACTION'].value,
}
return {**super().hardcoded_custom_type_dep_constants(spec_object), **constants}
#
# CapellaSpecBuilder
#
class CapellaSpecBuilder(BellatrixSpecBuilder):
fork: str = CAPELLA
@classmethod
def imports(cls, preset_name: str):
return super().imports(preset_name) + f'''
from eth2spec.bellatrix import {preset_name} as bellatrix
'''
@classmethod
def sundry_functions(cls) -> str:
return super().sundry_functions() + '\n\n' + '''
def compute_merkle_proof_for_block_body(body: BeaconBlockBody,
index: GeneralizedIndex) -> Sequence[Bytes32]:
return build_proof(body.get_backing(), index)'''
@classmethod
def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]:
constants = {
'EXECUTION_PAYLOAD_INDEX': 'GeneralizedIndex(25)',
}
return {**super().hardcoded_ssz_dep_constants(), **constants}
#
# DenebSpecBuilder
#
class DenebSpecBuilder(CapellaSpecBuilder):
fork: str = DENEB
@classmethod
def imports(cls, preset_name: str):
return super().imports(preset_name) + f'''
from eth2spec.capella import {preset_name} as capella
'''
@classmethod
def preparations(cls):
return super().preparations() + '\n' + '''
T = TypeVar('T') # For generic function
'''
@classmethod
def sundry_functions(cls) -> str:
return super().sundry_functions() + '\n\n' + '''
def retrieve_blobs_and_proofs(beacon_block_root: Root) -> PyUnion[Tuple[Blob, KZGProof], Tuple[str, str]]:
# pylint: disable=unused-argument
return ("TEST", "TEST")'''
@classmethod
def execution_engine_cls(cls) -> str:
return "\n\n" + """
class NoopExecutionEngine(ExecutionEngine):
def notify_new_payload(self: ExecutionEngine, execution_payload: ExecutionPayload) -> bool:
return True
def notify_forkchoice_updated(self: ExecutionEngine,
head_block_hash: Hash32,
safe_block_hash: Hash32,
finalized_block_hash: Hash32,
payload_attributes: Optional[PayloadAttributes]) -> Optional[PayloadId]:
pass
def get_payload(self: ExecutionEngine, payload_id: PayloadId) -> GetPayloadResponse:
# pylint: disable=unused-argument
raise NotImplementedError("no default block production")
def is_valid_block_hash(self: ExecutionEngine, execution_payload: ExecutionPayload) -> bool:
return True
def is_valid_versioned_hashes(self: ExecutionEngine, new_payload_request: NewPayloadRequest) -> bool:
return True
def verify_and_notify_new_payload(self: ExecutionEngine,
new_payload_request: NewPayloadRequest) -> bool:
return True
EXECUTION_ENGINE = NoopExecutionEngine()"""
@classmethod
def hardcoded_custom_type_dep_constants(cls, spec_object) -> str:
constants = {
'BYTES_PER_FIELD_ELEMENT': spec_object.constant_vars['BYTES_PER_FIELD_ELEMENT'].value,
'FIELD_ELEMENTS_PER_BLOB': spec_object.preset_vars['FIELD_ELEMENTS_PER_BLOB'].value,
'MAX_BLOBS_PER_BLOCK': spec_object.preset_vars['MAX_BLOBS_PER_BLOCK'].value,
}
return {**super().hardcoded_custom_type_dep_constants(spec_object), **constants}
#
# EIP6110SpecBuilder
#
class EIP6110SpecBuilder(DenebSpecBuilder):
fork: str = EIP6110
@classmethod
def imports(cls, preset_name: str):
return super().imports(preset_name) + f'''
from eth2spec.deneb import {preset_name} as deneb
'''
#
# EIP7002SpecBuilder
#
class EIP7002SpecBuilder(CapellaSpecBuilder):
fork: str = EIP7002
@classmethod
def imports(cls, preset_name: str):
return super().imports(preset_name) + f'''
from eth2spec.capella import {preset_name} as capella
'''
#
# WhiskSpecBuilder
#
class WhiskSpecBuilder(CapellaSpecBuilder):
fork: str = WHISK
@classmethod
def imports(cls, preset_name: str):
return super().imports(preset_name) + f'''
from eth2spec.capella import {preset_name} as capella
'''
@classmethod
def hardcoded_custom_type_dep_constants(cls, spec_object) -> str:
# Necessary for custom types `WhiskShuffleProof` and `WhiskTrackerProof`
constants = {
'WHISK_MAX_SHUFFLE_PROOF_SIZE': spec_object.constant_vars['WHISK_MAX_SHUFFLE_PROOF_SIZE'].value,
'WHISK_MAX_OPENING_PROOF_SIZE': spec_object.constant_vars['WHISK_MAX_OPENING_PROOF_SIZE'].value,
}
return {**super().hardcoded_custom_type_dep_constants(spec_object), **constants}
spec_builders = {
builder.fork: builder
for builder in (Phase0SpecBuilder, AltairSpecBuilder, BellatrixSpecBuilder, CapellaSpecBuilder, DenebSpecBuilder,
EIP6110SpecBuilder, EIP7002SpecBuilder, WhiskSpecBuilder)
}
def is_byte_vector(value: str) -> bool:
return value.startswith(('ByteVector'))
def make_function_abstract(protocol_def: ProtocolDefinition, key: str):
function = protocol_def.functions[key].split('"""')
protocol_def.functions[key] = function[0] + "..."
def objects_to_spec(preset_name: str,
spec_object: SpecObject,
builder: SpecBuilder,
ordered_class_objects: Dict[str, str]) -> str:
"""
Given all the objects that constitute a spec, combine them into a single pyfile.
"""
new_type_definitions = (
'\n\n'.join(
[
f"class {key}({value}):\n pass\n" if not is_byte_vector(value) else f"class {key}({value}): # type: ignore\n pass\n"
for key, value in spec_object.custom_types.items()
]
)
)
def format_protocol(protocol_name: str, protocol_def: ProtocolDefinition) -> str:
abstract_functions = ["verify_and_notify_new_payload"]
for key in protocol_def.functions.keys():
if key in abstract_functions:
make_function_abstract(protocol_def, key)
protocol = f"class {protocol_name}(Protocol):"
for fn_source in protocol_def.functions.values():
fn_source = fn_source.replace("self: "+protocol_name, "self")
protocol += "\n\n" + textwrap.indent(fn_source, " ")
return protocol
protocols_spec = '\n\n\n'.join(format_protocol(k, v) for k, v in spec_object.protocols.items())
for k in list(spec_object.functions):
if k in [
"ceillog2",
"floorlog2",
"compute_merkle_proof_for_block_body",
"compute_merkle_proof_for_state",
]:
del spec_object.functions[k]
functions = builder.implement_optimizations(spec_object.functions)
functions_spec = '\n\n\n'.join(functions.values())
# Access global dict of config vars for runtime configurables
for name in spec_object.config_vars.keys():
functions_spec = re.sub(r"\b%s\b" % name, 'config.' + name, functions_spec)
def format_config_var(name: str, vardef: VariableDefinition) -> str:
if vardef.type_name is None:
out = f'{name}={vardef.value},'
else:
out = f'{name}={vardef.type_name}({vardef.value}),'
if vardef.comment is not None:
out += f' # {vardef.comment}'
return out
config_spec = 'class Configuration(NamedTuple):\n'
config_spec += ' PRESET_BASE: str\n'
config_spec += '\n'.join(f' {k}: {v.type_name if v.type_name is not None else "int"}'
for k, v in spec_object.config_vars.items())
config_spec += '\n\n\nconfig = Configuration(\n'
config_spec += f' PRESET_BASE="{preset_name}",\n'
config_spec += '\n'.join(' ' + format_config_var(k, v) for k, v in spec_object.config_vars.items())
config_spec += '\n)\n'
def format_constant(name: str, vardef: VariableDefinition) -> str:
if vardef.type_name is None:
if vardef.type_hint is None:
out = f'{name} = {vardef.value}'
else:
out = f'{name}: {vardef.type_hint} = {vardef.value}'
else:
out = f'{name} = {vardef.type_name}({vardef.value})'
if vardef.comment is not None:
out += f' # {vardef.comment}'
return out
constant_vars_spec = '# Constant vars\n' + '\n'.join(format_constant(k, v) for k, v in spec_object.constant_vars.items())
preset_vars_spec = '# Preset vars\n' + '\n'.join(format_constant(k, v) for k, v in spec_object.preset_vars.items())
ordered_class_objects_spec = '\n\n\n'.join(ordered_class_objects.values())
ssz_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, builder.hardcoded_ssz_dep_constants()[x]), builder.hardcoded_ssz_dep_constants()))
ssz_dep_constants_verification = '\n'.join(map(lambda x: 'assert %s == %s' % (x, spec_object.ssz_dep_constants[x]), builder.hardcoded_ssz_dep_constants()))
custom_type_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, builder.hardcoded_custom_type_dep_constants(spec_object)[x]), builder.hardcoded_custom_type_dep_constants(spec_object)))
spec = (
builder.imports(preset_name)
+ builder.preparations()
+ '\n\n' + f"fork = \'{builder.fork}\'\n"
# The constants that some SSZ containers require. Need to be defined before `new_type_definitions`
+ ('\n\n' + custom_type_dep_constants + '\n' if custom_type_dep_constants != '' else '')
+ '\n\n' + new_type_definitions
+ '\n' + CONSTANT_DEP_SUNDRY_CONSTANTS_FUNCTIONS
# The constants that some SSZ containers require. Need to be defined before `constants_spec`
+ ('\n\n' + ssz_dep_constants if ssz_dep_constants != '' else '')
+ '\n\n' + constant_vars_spec
+ '\n\n' + preset_vars_spec
+ '\n\n\n' + config_spec
+ '\n\n' + ordered_class_objects_spec
+ ('\n\n\n' + protocols_spec if protocols_spec != '' else '')
+ '\n\n\n' + functions_spec
+ '\n\n' + builder.sundry_functions()
+ builder.execution_engine_cls()
# Since some constants are hardcoded in setup.py, the following assertions verify that the hardcoded constants are
# as same as the spec definition.
+ ('\n\n\n' + ssz_dep_constants_verification if ssz_dep_constants_verification != '' else '')
+ '\n'
)
return spec
def combine_protocols(old_protocols: Dict[str, ProtocolDefinition],
new_protocols: Dict[str, ProtocolDefinition]) -> Dict[str, ProtocolDefinition]:
for key, value in new_protocols.items():
if key not in old_protocols:
old_protocols[key] = value
else:
functions = combine_dicts(old_protocols[key].functions, value.functions)
old_protocols[key] = ProtocolDefinition(functions=functions)
return old_protocols
T = TypeVar('T')
def combine_dicts(old_dict: Dict[str, T], new_dict: Dict[str, T]) -> Dict[str, T]:
return {**old_dict, **new_dict}
ignored_dependencies = [
'bit', 'boolean', 'Vector', 'List', 'Container', 'BLSPubkey', 'BLSSignature',
'Bytes1', 'Bytes4', 'Bytes8', 'Bytes20', 'Bytes32', 'Bytes48', 'Bytes96', 'Bitlist', 'Bitvector',
'uint8', 'uint16', 'uint32', 'uint64', 'uint128', 'uint256',
'bytes', 'byte', 'ByteList', 'ByteVector',
'Dict', 'dict', 'field', 'ceillog2', 'floorlog2', 'Set',
'Optional', 'Sequence',
]
def dependency_order_class_objects(objects: Dict[str, str], custom_types: Dict[str, str]) -> None:
"""
Determines which SSZ Object is dependent on which other and orders them appropriately
"""
items = list(objects.items())
for key, value in items:
dependencies = []
for line in value.split('\n'):
if not re.match(r'\s+\w+: .+', line):
continue # skip whitespace etc.
line = line[line.index(':') + 1:] # strip of field name
if '#' in line:
line = line[:line.index('#')] # strip of comment
dependencies.extend(re.findall(r'(\w+)', line)) # catch all legible words, potential dependencies
dependencies = filter(lambda x: '_' not in x and x.upper() != x, dependencies) # filter out constants
dependencies = filter(lambda x: x not in ignored_dependencies, dependencies)
dependencies = filter(lambda x: x not in custom_types, dependencies)
for dep in dependencies:
key_list = list(objects.keys())
for item in [dep, key] + key_list[key_list.index(dep)+1:]:
objects[item] = objects.pop(item)
def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str], custom_types) -> Dict[str, str]:
"""
Takes in old spec and new spec ssz objects, combines them,
and returns the newer versions of the objects in dependency order.
"""
for key, value in new_objects.items():
old_objects[key] = value
return old_objects
def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject:
"""
Takes in two spec variants (as tuples of their objects) and combines them using the appropriate combiner function.
"""
protocols = combine_protocols(spec0.protocols, spec1.protocols)
functions = combine_dicts(spec0.functions, spec1.functions)
custom_types = combine_dicts(spec0.custom_types, spec1.custom_types)
constant_vars = combine_dicts(spec0.constant_vars, spec1.constant_vars)
preset_vars = combine_dicts(spec0.preset_vars, spec1.preset_vars)
config_vars = combine_dicts(spec0.config_vars, spec1.config_vars)
ssz_dep_constants = combine_dicts(spec0.ssz_dep_constants, spec1.ssz_dep_constants)
ssz_objects = combine_ssz_objects(spec0.ssz_objects, spec1.ssz_objects, custom_types)
dataclasses = combine_dicts(spec0.dataclasses, spec1.dataclasses)
return SpecObject(
functions=functions,
protocols=protocols,
custom_types=custom_types,
constant_vars=constant_vars,
preset_vars=preset_vars,
config_vars=config_vars,
ssz_dep_constants=ssz_dep_constants,
ssz_objects=ssz_objects,
dataclasses=dataclasses,
)
def parse_config_vars(conf: Dict[str, str]) -> Dict[str, str]:
"""
Parses a dict of basic str/int/list types into a dict for insertion into the spec code.
"""
out: Dict[str, str] = dict()
for k, v in conf.items():
if isinstance(v, str) and (v.startswith("0x") or k == 'PRESET_BASE' or k == 'CONFIG_NAME'):
# Represent byte data with string, to avoid misinterpretation as big-endian int.
# Everything except PRESET_BASE and CONFIG_NAME is either byte data or an integer.
out[k] = f"'{v}'"
else:
out[k] = str(int(v))
return out
def load_preset(preset_files: Sequence[Path]) -> Dict[str, str]:
"""
Loads the a directory of preset files, merges the result into one preset.
@ -1064,8 +297,11 @@ def load_config(config_path: Path) -> Dict[str, str]:
return parse_config_vars(config_data)
def _build_spec(preset_name: str, fork: str,
source_files: Sequence[Path], preset_files: Sequence[Path], config_file: Path) -> str:
def build_spec(fork: str,
preset_name: str,
source_files: Sequence[Path],
preset_files: Sequence[Path],
config_file: Path) -> str:
preset = load_preset(preset_files)
config = load_config(config_file)
all_specs = [get_spec(spec, preset, config, preset_name) for spec in source_files]
@ -1082,13 +318,7 @@ def _build_spec(preset_name: str, fork: str,
new_objects = copy.deepcopy(class_objects)
dependency_order_class_objects(class_objects, spec_object.custom_types)
return objects_to_spec(preset_name, spec_object, spec_builders[fork], class_objects)
class BuildTarget(NamedTuple):
name: str
preset_paths: List[Path]
config_path: Path
return objects_to_spec(preset_name, spec_object, fork, class_objects)
class PySpecCommand(Command):
@ -1127,20 +357,7 @@ class PySpecCommand(Command):
if len(self.md_doc_paths) == 0:
print("no paths were specified, using default markdown file paths for pyspec"
" build (spec fork: %s)" % self.spec_fork)
self.md_doc_paths = ""
for fork in ALL_FORKS:
if is_post_fork(self.spec_fork, fork):
# Append all files in fork directory recursively
for root, dirs, files in os.walk(get_fork_directory(fork)):
for filename in files:
filepath = os.path.join(root, filename)
if filepath.endswith('.md') and filepath not in IGNORE_SPEC_FILES:
self.md_doc_paths += filepath + "\n"
# Append extra files if any
if fork in EXTRA_SPEC_FILES:
self.md_doc_paths += EXTRA_SPEC_FILES[fork] + "\n"
self.md_doc_paths = get_md_doc_paths(self.spec_fork)
if len(self.md_doc_paths) == 0:
raise Exception('no markdown files specified, and spec fork "%s" is unknown', self.spec_fork)
@ -1173,8 +390,13 @@ class PySpecCommand(Command):
dir_util.mkpath(self.out_dir)
for (name, preset_paths, config_path) in self.parsed_build_targets:
spec_str = spec_builders[self.spec_fork].build_spec(
name, self.parsed_md_doc_paths, preset_paths, config_path)
spec_str = build_spec(
spec_builders[self.spec_fork].fork,
name,
self.parsed_md_doc_paths,
preset_paths,
config_path,
)
if self.dry_run:
self.announce('dry run successfully prepared contents for spec.'
f' out dir: "{self.out_dir}", spec fork: "{self.spec_fork}", build target: "{name}"')
@ -1235,6 +457,7 @@ class PyspecDevCommand(Command):
for spec_fork in spec_builders:
self.run_pyspec_cmd(spec_fork=spec_fork)
commands = {
'pyspec': PySpecCommand,
'build_py': BuildPyCommand,

View File

@ -1,72 +0,0 @@
# EIP-4788 -- The Beacon Chain
## Table of contents
<!-- TOC -->
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
- [Introduction](#introduction)
- [Containers](#containers)
- [Extended Containers](#extended-containers)
- [`ExecutionPayload`](#executionpayload)
- [`ExecutionPayloadHeader`](#executionpayloadheader)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
<!-- /TOC -->
## Introduction
TODO
## Containers
### Extended Containers
#### `ExecutionPayload`
```python
class ExecutionPayload(Container):
# Execution block header fields
parent_hash: Hash32
fee_recipient: ExecutionAddress # 'beneficiary' in the yellow paper
state_root: Bytes32
receipts_root: Bytes32
logs_bloom: ByteVector[BYTES_PER_LOGS_BLOOM]
prev_randao: Bytes32 # 'difficulty' in the yellow paper
block_number: uint64 # 'number' in the yellow paper
gas_limit: uint64
gas_used: uint64
timestamp: uint64
extra_data: ByteList[MAX_EXTRA_DATA_BYTES]
base_fee_per_gas: uint256
# Extra payload fields
block_hash: Hash32 # Hash of execution block
transactions: List[Transaction, MAX_TRANSACTIONS_PER_PAYLOAD]
withdrawals: List[Withdrawal, MAX_WITHDRAWALS_PER_PAYLOAD]
parent_beacon_block_root: Root # [New in EIP-4788]
```
#### `ExecutionPayloadHeader`
```python
class ExecutionPayloadHeader(Container):
# Execution block header fields
parent_hash: Hash32
fee_recipient: ExecutionAddress
state_root: Bytes32
receipts_root: Bytes32
logs_bloom: ByteVector[BYTES_PER_LOGS_BLOOM]
prev_randao: Bytes32
block_number: uint64
gas_limit: uint64
gas_used: uint64
timestamp: uint64
extra_data: ByteList[MAX_EXTRA_DATA_BYTES]
base_fee_per_gas: uint256
# Extra payload fields
block_hash: Hash32 # Hash of execution block
transactions_root: Root
withdrawals_root: Root
parent_beacon_block_root: Root # [New in EIP-4788]
```

View File

@ -1,88 +0,0 @@
# EIP-4788 -- Honest Validator
**Notice**: This document is a work-in-progress for researchers and implementers.
## Table of contents
<!-- TOC -->
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
- [Introduction](#introduction)
- [Prerequisites](#prerequisites)
- [Helpers](#helpers)
- [Protocols](#protocols)
- [`ExecutionEngine`](#executionengine)
- [Modified `get_payload`](#modified-get_payload)
- [Beacon chain responsibilities](#beacon-chain-responsibilities)
- [Block proposal](#block-proposal)
- [Constructing the `BeaconBlockBody`](#constructing-the-beaconblockbody)
- [ExecutionPayload](#executionpayload)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
<!-- /TOC -->
## Introduction
This document represents the changes to be made in the code of an "honest validator" to implement the EIP-4788 feature.
## Prerequisites
This document is an extension of the [Capella -- Honest Validator](../capella/validator.md) guide.
All behaviors and definitions defined in this document, and documents it extends, carry over unless explicitly noted or overridden.
All terminology, constants, functions, and protocol mechanics defined in the updated Beacon Chain doc of [Capella](../capella/beacon-chain.md) are requisite for this document and used throughout.
Please see related Beacon Chain doc before continuing and use them as a reference throughout.
## Helpers
## Protocols
### `ExecutionEngine`
#### Modified `get_payload`
`get_payload` returns the upgraded EIP-4788 `ExecutionPayload` type.
## Beacon chain responsibilities
All validator responsibilities remain unchanged other than those noted below.
### Block proposal
#### Constructing the `BeaconBlockBody`
##### ExecutionPayload
`ExecutionPayload`s are constructed as they were in Capella, except that the parent beacon block root is also supplied.
*Note*: In this section, `state` is the state of the slot for the block proposal _without_ the block yet applied.
That is, `state` is the `previous_state` processed through any empty slots up to the assigned slot using `process_slots(previous_state, slot)`.
*Note*: The only change made to `prepare_execution_payload` is to add the parent beacon block root as an additional
parameter to the `PayloadAttributes`.
```python
def prepare_execution_payload(state: BeaconState,
safe_block_hash: Hash32,
finalized_block_hash: Hash32,
suggested_fee_recipient: ExecutionAddress,
execution_engine: ExecutionEngine) -> Optional[PayloadId]:
# Verify consistency of the parent hash with respect to the previous execution payload header
parent_hash = state.latest_execution_payload_header.block_hash
# Set the forkchoice head and initiate the payload build process
payload_attributes = PayloadAttributes(
timestamp=compute_timestamp_at_slot(state, state.slot),
prev_randao=get_randao_mix(state, get_current_epoch(state)),
suggested_fee_recipient=suggested_fee_recipient,
withdrawals=get_expected_withdrawals(state),
parent_beacon_block_root=hash_tree_root(state.latest_block_header), # [New in EIP-4788]
)
return execution_engine.notify_forkchoice_updated(
head_block_hash=parent_hash,
safe_block_hash=safe_block_hash,
finalized_block_hash=finalized_block_hash,
payload_attributes=payload_attributes,
)
```

View File

@ -224,7 +224,7 @@ def process_deposit_receipt(state: BeaconState, deposit_receipt: DepositReceipt)
state.deposit_receipts_start_index = deposit_receipt.index
apply_deposit(
state=state,
state=state,
pubkey=deposit_receipt.pubkey,
withdrawal_credentials=deposit_receipt.withdrawal_credentials,
amount=deposit_receipt.amount,
@ -251,7 +251,11 @@ def process_execution_payload(state: BeaconState, body: BeaconBlockBody, executi
# Verify the execution payload is valid
versioned_hashes = [kzg_commitment_to_versioned_hash(commitment) for commitment in body.blob_kzg_commitments]
assert execution_engine.verify_and_notify_new_payload(
NewPayloadRequest(execution_payload=payload, versioned_hashes=versioned_hashes)
NewPayloadRequest(
execution_payload=payload,
versioned_hashes=versioned_hashes,
parent_beacon_block_root=state.latest_block_header.parent_root,
)
)
# Cache execution payload header
state.latest_execution_payload_header = ExecutionPayloadHeader(

View File

@ -10,6 +10,8 @@
- [Introduction](#introduction)
- [Constants](#constants)
- [Domain types](#domain-types)
- [Preset](#preset)
- [Cryptography](#cryptography)
- [BLS](#bls)
- [Curdleproofs and opening proofs](#curdleproofs-and-opening-proofs)
@ -35,15 +37,7 @@ This document details the beacon chain additions and changes of to support the W
## Constants
| Name | Value | Description |
| ---------------------------------- | -------------------------- | ----------------------------------------------------------- |
| `WHISK_CANDIDATE_TRACKERS_COUNT` | `uint64(2**14)` (= 16,384) | number of candidate trackers |
| `WHISK_PROPOSER_TRACKERS_COUNT` | `uint64(2**13)` (= 8,192) | number of proposer trackers |
| `WHISK_EPOCHS_PER_SHUFFLING_PHASE` | `Epoch(2**8)` (= 256) | epochs per shuffling phase |
| `WHISK_VALIDATORS_PER_SHUFFLE` | `uint64(2**7)` (= 128) | number of validators shuffled per shuffle step |
| `WHISK_PROPOSER_SELECTION_GAP` | `Epoch(2)` | gap between proposer selection and the block proposal phase |
| `WHISK_MAX_SHUFFLE_PROOF_SIZE` | `uint64(2**15)` | max size of a shuffle proof |
| `WHISK_MAX_OPENING_PROOF_SIZE` | `uint64(2**10)` | max size of a opening proof |
### Domain types
| Name | Value |
| ---------------------------------- | -------------------------- |
@ -51,6 +45,19 @@ This document details the beacon chain additions and changes of to support the W
| `DOMAIN_WHISK_SHUFFLE` | `DomainType('0x07100000')` |
| `DOMAIN_WHISK_PROPOSER_SELECTION` | `DomainType('0x07200000')` |
## Preset
| Name | Value | Description |
| ---------------------------------- | -------------------------- | ----------------------------------------------------------- |
| `CURDLEPROOFS_N_BLINDERS` | `uint64(4)` | number of blinders for curdleproofs |
| `WHISK_CANDIDATE_TRACKERS_COUNT` | `uint64(2**14)` (= 16,384) | number of candidate trackers |
| `WHISK_PROPOSER_TRACKERS_COUNT` | `uint64(2**13)` (= 8,192) | number of proposer trackers |
| `WHISK_EPOCHS_PER_SHUFFLING_PHASE` | `Epoch(2**8)` (= 256) | epochs per shuffling phase |
| `WHISK_VALIDATORS_PER_SHUFFLE` | `uint64(2**7 - 4)` (= 124) | number of validators shuffled per shuffle step |
| `WHISK_PROPOSER_SELECTION_GAP` | `Epoch(2)` | gap between proposer selection and the block proposal phase |
| `WHISK_MAX_SHUFFLE_PROOF_SIZE` | `uint64(2**15)` | max size of a shuffle proof |
| `WHISK_MAX_OPENING_PROOF_SIZE` | `uint64(2**10)` | max size of a opening proof |
## Cryptography
### BLS
@ -177,17 +184,24 @@ class BeaconState(Container):
```
```python
def select_whisk_trackers(state: BeaconState, epoch: Epoch) -> None:
def select_whisk_proposer_trackers(state: BeaconState, epoch: Epoch) -> None:
# Select proposer trackers from candidate trackers
proposer_seed = get_seed(state, epoch - WHISK_PROPOSER_SELECTION_GAP, DOMAIN_WHISK_PROPOSER_SELECTION)
proposer_seed = get_seed(
state,
Epoch(saturating_sub(epoch, WHISK_PROPOSER_SELECTION_GAP)),
DOMAIN_WHISK_PROPOSER_SELECTION
)
for i in range(WHISK_PROPOSER_TRACKERS_COUNT):
index = compute_shuffled_index(uint64(i), uint64(len(state.whisk_candidate_trackers)), proposer_seed)
state.whisk_proposer_trackers[i] = state.whisk_candidate_trackers[index]
```
```python
def select_whisk_candidate_trackers(state: BeaconState, epoch: Epoch) -> None:
# Select candidate trackers from active validator trackers
active_validator_indices = get_active_validator_indices(state, epoch)
for i in range(WHISK_CANDIDATE_TRACKERS_COUNT):
seed = hash(get_seed(state, epoch, DOMAIN_WHISK_CANDIDATE_SELECTION) + uint_to_bytes(i))
seed = hash(get_seed(state, epoch, DOMAIN_WHISK_CANDIDATE_SELECTION) + uint_to_bytes(uint64(i)))
candidate_index = compute_proposer_index(state, active_validator_indices, seed) # sample by effective balance
state.whisk_candidate_trackers[i] = state.whisk_trackers[candidate_index]
```
@ -196,7 +210,8 @@ def select_whisk_trackers(state: BeaconState, epoch: Epoch) -> None:
def process_whisk_updates(state: BeaconState) -> None:
next_epoch = Epoch(get_current_epoch(state) + 1)
if next_epoch % WHISK_EPOCHS_PER_SHUFFLING_PHASE == 0: # select trackers at the start of shuffling phases
select_whisk_trackers(state, next_epoch)
select_whisk_proposer_trackers(state, next_epoch)
select_whisk_candidate_trackers(state, next_epoch)
```
```python

View File

@ -53,27 +53,13 @@ The upgrade occurs after the completion of the inner loop of `process_slots` tha
This ensures that we drop right into the beginning of the shuffling phase but without `process_whisk_epoch()` triggering for this Whisk run. Hence we handle all the setup ourselves in `upgrade_to_whisk()` below.
```python
def whisk_candidate_selection(state: BeaconState, epoch: Epoch) -> None:
# TODO
# pylint: disable=unused-argument
pass
```
```python
def whisk_proposer_selection(state: BeaconState, epoch: Epoch) -> None:
# TODO
# pylint: disable=unused-argument
pass
```
```python
def upgrade_to_whisk(pre: bellatrix.BeaconState) -> BeaconState:
def upgrade_to_whisk(pre: capella.BeaconState) -> BeaconState:
# Compute initial unsafe trackers for all validators
ks = [get_initial_whisk_k(ValidatorIndex(validator_index), 0) for validator_index in range(len(pre.validators))]
whisk_k_commitments = [get_k_commitment(k) for k in ks]
whisk_trackers = [get_initial_tracker(k) for k in ks]
epoch = bellatrix.get_current_epoch(pre)
epoch = get_current_epoch(pre)
post = BeaconState(
# Versioning
genesis_time=pre.genesis_time,
@ -115,6 +101,11 @@ def upgrade_to_whisk(pre: bellatrix.BeaconState) -> BeaconState:
next_sync_committee=pre.next_sync_committee,
# Execution-layer
latest_execution_payload_header=pre.latest_execution_payload_header,
# Withdrawals
next_withdrawal_index=pre.next_withdrawal_index,
next_withdrawal_validator_index=pre.next_withdrawal_validator_index,
# Deep history valid from Capella onwards
historical_summaries=pre.historical_summaries,
# Whisk
whisk_proposer_trackers=[WhiskTracker() for _ in range(WHISK_PROPOSER_TRACKERS_COUNT)], # [New in Whisk]
whisk_candidate_trackers=[WhiskTracker() for _ in range(WHISK_CANDIDATE_TRACKERS_COUNT)], # [New in Whisk]
@ -124,12 +115,12 @@ def upgrade_to_whisk(pre: bellatrix.BeaconState) -> BeaconState:
# Do a candidate selection followed by a proposer selection so that we have proposers for the upcoming day
# Use an old epoch when selecting candidates so that we don't get the same seed as in the next candidate selection
whisk_candidate_selection(post, epoch - WHISK_PROPOSER_SELECTION_GAP - 1)
whisk_proposer_selection(post, epoch)
select_whisk_candidate_trackers(post, Epoch(saturating_sub(epoch, WHISK_PROPOSER_SELECTION_GAP + 1)))
select_whisk_proposer_trackers(post, epoch)
# Do a final round of candidate selection.
# We need it so that we have something to shuffle over the upcoming shuffling phase.
whisk_candidate_selection(post, epoch)
select_whisk_candidate_trackers(post, epoch)
return post
```

View File

@ -24,14 +24,19 @@
- [Helper functions](#helper-functions)
- [Misc](#misc)
- [`kzg_commitment_to_versioned_hash`](#kzg_commitment_to_versioned_hash)
- [Beacon state accessors](#beacon-state-accessors)
- [Modified `get_attestation_participation_flag_indices`](#modified-get_attestation_participation_flag_indices)
- [Beacon chain state transition function](#beacon-chain-state-transition-function)
- [Execution engine](#execution-engine)
- [Request data](#request-data)
- [Modified `NewPayloadRequest`](#modified-newpayloadrequest)
- [Engine APIs](#engine-apis)
- [`is_valid_block_hash`](#is_valid_block_hash)
- [`is_valid_versioned_hashes`](#is_valid_versioned_hashes)
- [Modified `notify_new_payload`](#modified-notify_new_payload)
- [Modified `verify_and_notify_new_payload`](#modified-verify_and_notify_new_payload)
- [Block processing](#block-processing)
- [Modified `process_attestation`](#modified-process_attestation)
- [Execution payload](#execution-payload)
- [Modified `process_execution_payload`](#modified-process_execution_payload)
- [Modified `process_voluntary_exit`](#modified-process_voluntary_exit)
@ -43,8 +48,10 @@
## Introduction
Deneb is a consensus-layer upgrade containing a number of features. Including:
* [EIP-4788](https://eips.ethereum.org/EIPS/eip-4788): Beacon block root in the EVM
* [EIP-4844](https://eips.ethereum.org/EIPS/eip-4844): Shard Blob Transactions scale data-availability of Ethereum in a simple, forwards-compatible manner
* [EIP-7044](https://github.com/ethereum/EIPs/pull/7044): Perpetually Valid Signed Voluntary Exits
* [EIP-7045](https://eips.ethereum.org/EIPS/eip-7045): Increase Max Attestation Inclusion Slot
## Custom types
@ -82,7 +89,6 @@ and are limited by `MAX_DATA_GAS_PER_BLOCK // DATA_GAS_PER_BLOB`. However the CL
## Configuration
## Containers
### Extended containers
@ -170,6 +176,41 @@ def kzg_commitment_to_versioned_hash(kzg_commitment: KZGCommitment) -> Versioned
return VERSIONED_HASH_VERSION_KZG + hash(kzg_commitment)[1:]
```
### Beacon state accessors
#### Modified `get_attestation_participation_flag_indices`
*Note:* The function `get_attestation_participation_flag_indices` is modified to set the `TIMELY_TARGET_FLAG` for any correct target attestation, regardless of `inclusion_delay` as a baseline reward for any speed of inclusion of an attestation that contributes to justification of the contained chain for EIP-7045.
```python
def get_attestation_participation_flag_indices(state: BeaconState,
data: AttestationData,
inclusion_delay: uint64) -> Sequence[int]:
"""
Return the flag indices that are satisfied by an attestation.
"""
if data.target.epoch == get_current_epoch(state):
justified_checkpoint = state.current_justified_checkpoint
else:
justified_checkpoint = state.previous_justified_checkpoint
# Matching roots
is_matching_source = data.source == justified_checkpoint
is_matching_target = is_matching_source and data.target.root == get_block_root(state, data.target.epoch)
is_matching_head = is_matching_target and data.beacon_block_root == get_block_root_at_slot(state, data.slot)
assert is_matching_source
participation_flag_indices = []
if is_matching_source and inclusion_delay <= integer_squareroot(SLOTS_PER_EPOCH):
participation_flag_indices.append(TIMELY_SOURCE_FLAG_INDEX)
if is_matching_target: # [Modified in Deneb:EIP7045]
participation_flag_indices.append(TIMELY_TARGET_FLAG_INDEX)
if is_matching_head and inclusion_delay == MIN_ATTESTATION_INCLUSION_DELAY:
participation_flag_indices.append(TIMELY_HEAD_FLAG_INDEX)
return participation_flag_indices
```
## Beacon chain state transition function
### Execution engine
@ -183,10 +224,25 @@ def kzg_commitment_to_versioned_hash(kzg_commitment: KZGCommitment) -> Versioned
class NewPayloadRequest(object):
execution_payload: ExecutionPayload
versioned_hashes: Sequence[VersionedHash]
parent_beacon_block_root: Root
```
#### Engine APIs
##### `is_valid_block_hash`
*Note*: The function `is_valid_block_hash` is modified to include the additional `parent_beacon_block_root` parameter for EIP-4788.
```python
def is_valid_block_hash(self: ExecutionEngine,
execution_payload: ExecutionPayload,
parent_beacon_block_root: Root) -> bool:
"""
Return ``True`` if and only if ``execution_payload.block_hash`` is computed correctly.
"""
...
```
##### `is_valid_versioned_hashes`
```python
@ -198,6 +254,20 @@ def is_valid_versioned_hashes(self: ExecutionEngine, new_payload_request: NewPay
...
```
##### Modified `notify_new_payload`
*Note*: The function `notify_new_payload` is modified to include the additional `parent_beacon_block_root` parameter for EIP-4788.
```python
def notify_new_payload(self: ExecutionEngine,
execution_payload: ExecutionPayload,
parent_beacon_block_root: Root) -> bool:
"""
Return ``True`` if and only if ``execution_payload`` is valid with respect to ``self.execution_state``.
"""
...
```
##### Modified `verify_and_notify_new_payload`
```python
@ -206,14 +276,19 @@ def verify_and_notify_new_payload(self: ExecutionEngine,
"""
Return ``True`` if and only if ``new_payload_request`` is valid with respect to ``self.execution_state``.
"""
if not self.is_valid_block_hash(new_payload_request.execution_payload):
execution_payload = new_payload_request.execution_payload
parent_beacon_block_root = new_payload_request.parent_beacon_block_root
# [Modified in Deneb:EIP4788]
if not self.is_valid_block_hash(execution_payload, parent_beacon_block_root):
return False
# [New in Deneb:EIP4844]
if not self.is_valid_versioned_hashes(new_payload_request):
return False
if not self.notify_new_payload(new_payload_request.execution_payload):
# [Modified in Deneb:EIP4788]
if not self.notify_new_payload(execution_payload, parent_beacon_block_root):
return False
return True
@ -221,10 +296,52 @@ def verify_and_notify_new_payload(self: ExecutionEngine,
### Block processing
#### Modified `process_attestation`
*Note*: The function `process_attestation` is modified to expand valid slots for inclusion to those in both `target.epoch` epoch and `target.epoch + 1` epoch for EIP-7045. Additionally, it utilizes an updated version of `get_attestation_participation_flag_indices` to ensure rewards are available for the extended attestation inclusion range for EIP-7045.
```python
def process_attestation(state: BeaconState, attestation: Attestation) -> None:
data = attestation.data
assert data.target.epoch in (get_previous_epoch(state), get_current_epoch(state))
assert data.target.epoch == compute_epoch_at_slot(data.slot)
assert data.slot + MIN_ATTESTATION_INCLUSION_DELAY <= state.slot # [Modified in Deneb:EIP7045]
assert data.index < get_committee_count_per_slot(state, data.target.epoch)
committee = get_beacon_committee(state, data.slot, data.index)
assert len(attestation.aggregation_bits) == len(committee)
# Participation flag indices
participation_flag_indices = get_attestation_participation_flag_indices(state, data, state.slot - data.slot)
# Verify signature
assert is_valid_indexed_attestation(state, get_indexed_attestation(state, attestation))
# Update epoch participation flags
if data.target.epoch == get_current_epoch(state):
epoch_participation = state.current_epoch_participation
else:
epoch_participation = state.previous_epoch_participation
proposer_reward_numerator = 0
for index in get_attesting_indices(state, data, attestation.aggregation_bits):
for flag_index, weight in enumerate(PARTICIPATION_FLAG_WEIGHTS):
if flag_index in participation_flag_indices and not has_flag(epoch_participation[index], flag_index):
epoch_participation[index] = add_flag(epoch_participation[index], flag_index)
proposer_reward_numerator += get_base_reward(state, index) * weight
# Reward proposer
proposer_reward_denominator = (WEIGHT_DENOMINATOR - PROPOSER_WEIGHT) * WEIGHT_DENOMINATOR // PROPOSER_WEIGHT
proposer_reward = Gwei(proposer_reward_numerator // proposer_reward_denominator)
increase_balance(state, get_beacon_proposer_index(state), proposer_reward)
```
#### Execution payload
##### Modified `process_execution_payload`
*Note*: The function `process_execution_payload` is modified to pass `versioned_hashes` into `execution_engine.verify_and_notify_new_payload` and to assign the new fields in `ExecutionPayloadHeader` for EIP-4844. It is also modified to pass in the parent beacon block root to support EIP-4788.
```python
def process_execution_payload(state: BeaconState, body: BeaconBlockBody, execution_engine: ExecutionEngine) -> None:
payload = body.execution_payload
@ -241,9 +358,14 @@ def process_execution_payload(state: BeaconState, body: BeaconBlockBody, executi
# Verify the execution payload is valid
# [Modified in Deneb:EIP4844] Pass `versioned_hashes` to Execution Engine
# [Modified in Deneb:EIP4788] Pass `parent_beacon_block_root` to Execution Engine
versioned_hashes = [kzg_commitment_to_versioned_hash(commitment) for commitment in body.blob_kzg_commitments]
assert execution_engine.verify_and_notify_new_payload(
NewPayloadRequest(execution_payload=payload, versioned_hashes=versioned_hashes)
NewPayloadRequest(
execution_payload=payload,
versioned_hashes=versioned_hashes,
parent_beacon_block_root=state.latest_block_header.parent_root,
)
)
# Cache execution payload header
@ -270,7 +392,7 @@ def process_execution_payload(state: BeaconState, body: BeaconBlockBody, executi
#### Modified `process_voluntary_exit`
*Note*: The function `process_voluntary_exit` is modified to use the a fixed fork version -- `CAPELLA_FORK_VERSION` -- for EIP-7044
*Note*: The function `process_voluntary_exit` is modified to use the a fixed fork version -- `CAPELLA_FORK_VERSION` -- for EIP-7044.
```python
def process_voluntary_exit(state: BeaconState, signed_voluntary_exit: SignedVoluntaryExit) -> None:

View File

@ -8,7 +8,8 @@
- [Introduction](#introduction)
- [Containers](#containers)
- [Helpers](#helpers)
- [`is_data_available`](#is_data_available)
- [Extended `PayloadAttributes`](#extended-payloadattributes)
- [`is_data_available`](#is_data_available)
- [Updated fork-choice handlers](#updated-fork-choice-handlers)
- [`on_block`](#on_block)
@ -23,7 +24,21 @@ This is the modification of the fork choice accompanying the Deneb upgrade.
## Helpers
#### `is_data_available`
### Extended `PayloadAttributes`
`PayloadAttributes` is extended with the parent beacon block root for EIP-4788.
```python
@dataclass
class PayloadAttributes(object):
timestamp: uint64
prev_randao: Bytes32
suggested_fee_recipient: ExecutionAddress
withdrawals: Sequence[Withdrawal]
parent_beacon_block_root: Root # [New in Deneb:EIP4788]
```
### `is_data_available`
*[New in Deneb:EIP4844]*

View File

@ -57,7 +57,7 @@ def compute_fork_version(epoch: Epoch) -> Version:
### Fork trigger
TBD. This fork is defined for testing purposes, the EIP may be combined with other consensus-layer upgrade.
TBD. This fork is defined for testing purposes.
For now, we assume the condition will be triggered at epoch `DENEB_FORK_EPOCH`.
Note that for the pure Deneb networks, we don't apply `upgrade_to_deneb` since it starts with Deneb version logic.

View File

@ -23,6 +23,9 @@ The specification of these changes continues in the same format as the network s
- [Global topics](#global-topics)
- [`beacon_block`](#beacon_block)
- [`blob_sidecar_{subnet_id}`](#blob_sidecar_subnet_id)
- [`beacon_aggregate_and_proof`](#beacon_aggregate_and_proof)
- [Attestation subnets](#attestation-subnets)
- [`beacon_attestation_{subnet_id}`](#beacon_attestation_subnet_id)
- [Transitioning the gossip](#transitioning-the-gossip)
- [The Req/Resp domain](#the-reqresp-domain)
- [Messages](#messages)
@ -106,7 +109,11 @@ Some gossip meshes are upgraded in the fork of Deneb to support upgraded types.
Topics follow the same specification as in prior upgrades.
The `beacon_block` topic is modified to also support deneb blocks and new topics are added per table below. All other topics remain stable.
The `beacon_block` topic is modified to also support Deneb blocks and new topics are added per table below.
The `voluntary_exit` topic is implicitly modified due to the lock-in use of `CAPELLA_FORK_VERSION` for this message signature validation for EIP-7044.
The `beacon_aggregate_and_proof` and `beacon_attestation_{subnet_id}` topics are modified to support the gossip of attestations created in epoch `N` to be gossiped through the entire range of slots in epoch `N+1` rather than only through one epoch of slots for EIP-7045.
The specification around the creation, validation, and dissemination of messages has not changed from the Capella document unless explicitly noted here.
@ -124,7 +131,9 @@ Deneb introduces new global topics for blob sidecars.
###### `beacon_block`
The *type* of the payload of this topic changes to the (modified) `SignedBeaconBlock` found in deneb.
The *type* of the payload of this topic changes to the (modified) `SignedBeaconBlock` found in Deneb.
*[Modified in Deneb:EIP4844]*
New validation:
@ -150,6 +159,41 @@ The following validations MUST pass before forwarding the `signed_blob_sidecar`
- _[REJECT]_ The sidecar is proposed by the expected `proposer_index` for the block's slot in the context of the current shuffling (defined by `block_parent_root`/`slot`).
If the `proposer_index` cannot immediately be verified against the expected shuffling, the sidecar MAY be queued for later processing while proposers for the block's branch are calculated -- in such a case _do not_ `REJECT`, instead `IGNORE` this message.
###### `beacon_aggregate_and_proof`
*[Modified in Deneb:EIP7045]*
The following validation is removed:
* _[IGNORE]_ `aggregate.data.slot` is within the last `ATTESTATION_PROPAGATION_SLOT_RANGE` slots (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) --
i.e. `aggregate.data.slot + ATTESTATION_PROPAGATION_SLOT_RANGE >= current_slot >= aggregate.data.slot`
(a client MAY queue future aggregates for processing at the appropriate slot).
The following validations are added in its place:
* _[IGNORE]_ `aggregate.data.slot` is equal to or earlier than the `current_slot` (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) --
i.e. `aggregate.data.slot <= current_slot`
(a client MAY queue future aggregates for processing at the appropriate slot).
* _[IGNORE]_ the epoch of `aggregate.data.slot` is either the current or previous epoch
(with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) --
i.e. `compute_epoch_at_slot(aggregate.data.slot) in (get_previous_epoch(state), get_current_epoch(state))`
##### Attestation subnets
###### `beacon_attestation_{subnet_id}`
*[Modified in Deneb:EIP7045]*
The following validation is removed:
* _[IGNORE]_ `attestation.data.slot` is within the last `ATTESTATION_PROPAGATION_SLOT_RANGE` slots (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) --
i.e. `attestation.data.slot + ATTESTATION_PROPAGATION_SLOT_RANGE >= current_slot >= attestation.data.slot`
(a client MAY queue future attestations for processing at the appropriate slot).
The following validations are added in its place:
* _[IGNORE]_ `attestation.data.slot` is equal to or earlier than the `current_slot` (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) --
i.e. `attestation.data.slot <= current_slot`
(a client MAY queue future attestation for processing at the appropriate slot).
* _[IGNORE]_ the epoch of `attestation.data.slot` is either the current or previous epoch
(with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) --
i.e. `compute_epoch_at_slot(attestation.data.slot) in (get_previous_epoch(state), get_current_epoch(state))`
#### Transitioning the gossip

View File

@ -19,6 +19,7 @@
- [Beacon chain responsibilities](#beacon-chain-responsibilities)
- [Block and sidecar proposal](#block-and-sidecar-proposal)
- [Constructing the `BeaconBlockBody`](#constructing-the-beaconblockbody)
- [ExecutionPayload](#executionpayload)
- [Blob KZG commitments](#blob-kzg-commitments)
- [Constructing the `SignedBlobSidecar`s](#constructing-the-signedblobsidecars)
- [Sidecar](#sidecar)
@ -88,11 +89,46 @@ All validator responsibilities remain unchanged other than those noted below.
#### Constructing the `BeaconBlockBody`
##### ExecutionPayload
`prepare_execution_payload` is updated from the Capella specs to provide the parent beacon block root.
*Note*: In this section, `state` is the state of the slot for the block proposal _without_ the block yet applied.
That is, `state` is the `previous_state` processed through any empty slots up to the assigned slot using `process_slots(previous_state, slot)`.
*Note*: The only change made to `prepare_execution_payload` is to add the parent beacon block root as an additional
parameter to the `PayloadAttributes`.
```python
def prepare_execution_payload(state: BeaconState,
safe_block_hash: Hash32,
finalized_block_hash: Hash32,
suggested_fee_recipient: ExecutionAddress,
execution_engine: ExecutionEngine) -> Optional[PayloadId]:
# Verify consistency of the parent hash with respect to the previous execution payload header
parent_hash = state.latest_execution_payload_header.block_hash
# Set the forkchoice head and initiate the payload build process
payload_attributes = PayloadAttributes(
timestamp=compute_timestamp_at_slot(state, state.slot),
prev_randao=get_randao_mix(state, get_current_epoch(state)),
suggested_fee_recipient=suggested_fee_recipient,
withdrawals=get_expected_withdrawals(state),
parent_beacon_block_root=hash_tree_root(state.latest_block_header), # [New in Deneb:EIP4788]
)
return execution_engine.notify_forkchoice_updated(
head_block_hash=parent_hash,
safe_block_hash=safe_block_hash,
finalized_block_hash=finalized_block_hash,
payload_attributes=payload_attributes,
)
```
##### Blob KZG commitments
*[New in Deneb:EIP4844]*
1. After retrieving the execution payload from the execution engine as specified in Capella,
1. After retrieving the execution payload from the execution engine as specified above,
use the `payload_id` to retrieve `blobs`, `blob_kzg_commitments`, and `blob_kzg_proofs`
via `get_payload(payload_id).blobs_bundle`.
2. Set `block.body.blob_kzg_commitments = blob_kzg_commitments`.

View File

@ -59,6 +59,7 @@
- [`xor`](#xor)
- [`uint_to_bytes`](#uint_to_bytes)
- [`bytes_to_uint64`](#bytes_to_uint64)
- [`saturating_sub`](#saturating_sub)
- [Crypto](#crypto)
- [`hash`](#hash)
- [`hash_tree_root`](#hash_tree_root)
@ -630,6 +631,16 @@ def bytes_to_uint64(data: bytes) -> uint64:
return uint64(int.from_bytes(data, ENDIANNESS))
```
#### `saturating_sub`
```python
def saturating_sub(a: int, b: int) -> int:
"""
Computes a - b, saturating at numeric bounds.
"""
return a - b if a > b else 0
```
### Crypto
#### `hash`

View File

@ -1 +1 @@
1.4.0-alpha.3
1.4.0-beta.0

View File

@ -537,13 +537,14 @@ def with_presets(preset_bases, reason=None):
return decorator
with_light_client = with_phases(LIGHT_CLIENT_TESTING_FORKS)
with_altair_and_later = with_all_phases_from(ALTAIR)
with_bellatrix_and_later = with_all_phases_from(BELLATRIX)
with_capella_and_later = with_all_phases_from(CAPELLA)
with_deneb_and_later = with_all_phases_from(DENEB)
with_eip6110_and_later = with_all_phases_from(EIP6110)
with_eip7002_and_later = with_all_phases_from(EIP7002)
with_light_client = with_phases(LIGHT_CLIENT_TESTING_FORKS)
class quoted_str(str):

View File

@ -1,16 +1,24 @@
from eth2spec.test.helpers.state import (
state_transition_and_sign_block
state_transition_and_sign_block,
next_epoch_via_block,
transition_to,
)
from eth2spec.test.helpers.block import (
build_empty_block_for_next_slot
build_empty_block_for_next_slot,
)
from eth2spec.test.context import (
DENEB,
spec_state_test,
spec_configured_state_test,
with_deneb_and_later,
with_phases,
)
from eth2spec.test.helpers.execution_payload import (
compute_el_block_hash,
)
from eth2spec.test.helpers.attestations import (
get_valid_attestation,
)
from eth2spec.test.helpers.sharding import (
get_sample_opaque_tx,
)
@ -58,3 +66,35 @@ def test_max_blobs_per_block(spec, state):
@spec_state_test
def test_invalid_exceed_max_blobs_per_block(spec, state):
yield from run_block_with_blobs(spec, state, blob_count=spec.MAX_BLOBS_PER_BLOCK + 1, valid=False)
@with_phases([DENEB])
@spec_configured_state_test({
'DENEB_FORK_EPOCH': 2,
})
def test_include_attestation_from_previous_fork_with_new_range(spec, state):
# Transition to the epoch prior to the fork epoch
next_epoch_via_block(spec, state)
# Generate an attestation for slot 0 of this epoch
attestation = get_valid_attestation(spec, state, signed=True)
# Transition to second to last slot in `DENEB_FORK_EPOCH`
next_epoch_via_block(spec, state)
current_epoch = spec.get_current_epoch(state)
assert current_epoch == spec.config.DENEB_FORK_EPOCH
penultimate_slot = spec.compute_start_slot_at_epoch(current_epoch + 1) - 2
transition_to(spec, state, penultimate_slot)
# Ensure the new state is in the increased EIP-7045 slot inclusion range
assert penultimate_slot - attestation.data.slot > spec.SLOTS_PER_EPOCH
block = build_empty_block_for_next_slot(spec, state)
block.body.attestations.append(attestation)
yield 'pre', state
signed_block = state_transition_and_sign_block(spec, state, block)
yield 'blocks', [signed_block]
yield 'post', state

View File

@ -5,7 +5,7 @@ from typing import List
from eth2spec.test.context import expect_assertion_error
from eth2spec.test.helpers.state import state_transition_and_sign_block, next_epoch, next_slot
from eth2spec.test.helpers.block import build_empty_block_for_next_slot
from eth2spec.test.helpers.forks import is_post_altair
from eth2spec.test.helpers.forks import is_post_altair, is_post_deneb
from eth2spec.test.helpers.keys import privkeys
from eth2spec.utils import bls
from eth2spec.utils.ssz.ssz_typing import Bitlist
@ -158,6 +158,14 @@ def get_attestation_signature(spec, state, attestation_data, privkey):
return bls.Sign(privkey, signing_root)
def compute_max_inclusion_slot(spec, attestation):
if is_post_deneb(spec):
next_epoch = spec.compute_epoch_at_slot(attestation.data.slot) + 1
end_of_next_epoch = spec.compute_start_slot_at_epoch(next_epoch + 1) - 1
return end_of_next_epoch
return attestation.data.slot + spec.SLOTS_PER_EPOCH
def fill_aggregate_attestation(spec, state, attestation, signed=False, filter_participant_set=None):
"""
`signed`: Signing is optional.

View File

@ -31,7 +31,7 @@ ALL_PHASES = (
# Formal forks
*MAINNET_FORKS,
DENEB,
# Experimental features
# Experimental patches
EIP6110,
EIP7002,
)

View File

@ -12,6 +12,7 @@ from eth2spec.test.helpers.attestations import (
get_valid_attestation,
sign_aggregate_attestation,
sign_attestation,
compute_max_inclusion_slot,
)
from eth2spec.test.helpers.state import (
next_slots,
@ -95,11 +96,22 @@ def test_invalid_before_inclusion_delay(spec, state):
@with_all_phases
@spec_state_test
def test_invalid_after_epoch_slots(spec, state):
def test_at_max_inclusion_slot(spec, state):
attestation = get_valid_attestation(spec, state, signed=True)
# increment past latest inclusion slot
transition_to_slot_via_block(spec, state, state.slot + spec.SLOTS_PER_EPOCH + 1)
transition_to_slot_via_block(spec, state, compute_max_inclusion_slot(spec, attestation))
yield from run_attestation_processing(spec, state, attestation)
@with_all_phases
@spec_state_test
def test_invalid_after_max_inclusion_slot(spec, state):
attestation = get_valid_attestation(spec, state, signed=True)
# increment past latest inclusion slot
transition_to_slot_via_block(spec, state, compute_max_inclusion_slot(spec, attestation) + 1)
yield from run_attestation_processing(spec, state, attestation, valid=False)
@ -361,7 +373,7 @@ def test_invalid_too_few_aggregation_bits(spec, state):
#
# Full correct atttestation contents at different slot inclusions
# Full correct attestation contents at different slot inclusions
#
@with_all_phases
@ -393,11 +405,20 @@ def test_correct_attestation_included_at_one_epoch_delay(spec, state):
@with_all_phases
@spec_state_test
def test_invalid_correct_attestation_included_after_epoch_delay(spec, state):
def test_correct_attestation_included_at_max_inclusion_slot(spec, state):
attestation = get_valid_attestation(spec, state, signed=True)
next_slots(spec, state, compute_max_inclusion_slot(spec, attestation))
yield from run_attestation_processing(spec, state, attestation)
@with_all_phases
@spec_state_test
def test_invalid_correct_attestation_included_after_max_inclusion_slot(spec, state):
attestation = get_valid_attestation(spec, state, signed=True)
# increment past latest inclusion slot
next_slots(spec, state, spec.SLOTS_PER_EPOCH + 1)
next_slots(spec, state, compute_max_inclusion_slot(spec, attestation) + 1)
yield from run_attestation_processing(spec, state, attestation, valid=False)
@ -432,9 +453,9 @@ def test_incorrect_head_included_at_sqrt_epoch_delay(spec, state):
@with_all_phases
@spec_state_test
def test_incorrect_head_included_at_epoch_delay(spec, state):
def test_incorrect_head_included_at_max_inclusion_slot(spec, state):
attestation = get_valid_attestation(spec, state, signed=False)
next_slots(spec, state, spec.SLOTS_PER_EPOCH)
next_slots(spec, state, compute_max_inclusion_slot(spec, attestation))
attestation.data.beacon_block_root = b'\x42' * 32
sign_attestation(spec, state, attestation)
@ -444,11 +465,11 @@ def test_incorrect_head_included_at_epoch_delay(spec, state):
@with_all_phases
@spec_state_test
def test_invalid_incorrect_head_included_after_epoch_delay(spec, state):
def test_invalid_incorrect_head_included_after_max_inclusion_slot(spec, state):
attestation = get_valid_attestation(spec, state, signed=False)
# increment past latest inclusion slot
next_slots(spec, state, spec.SLOTS_PER_EPOCH + 1)
next_slots(spec, state, compute_max_inclusion_slot(spec, attestation) + 1)
attestation.data.beacon_block_root = b'\x42' * 32
sign_attestation(spec, state, attestation)
@ -501,10 +522,10 @@ def test_incorrect_head_and_target_included_at_epoch_delay(spec, state):
@with_all_phases
@spec_state_test
def test_invalid_incorrect_head_and_target_included_after_epoch_delay(spec, state):
def test_invalid_incorrect_head_and_target_included_after_max_inclusion_slot(spec, state):
attestation = get_valid_attestation(spec, state, signed=False)
# increment past latest inclusion slot
next_slots(spec, state, spec.SLOTS_PER_EPOCH + 1)
next_slots(spec, state, compute_max_inclusion_slot(spec, attestation) + 1)
attestation.data.beacon_block_root = b'\x42' * 32
attestation.data.target.root = b'\x42' * 32
@ -555,10 +576,10 @@ def test_incorrect_target_included_at_epoch_delay(spec, state):
@with_all_phases
@spec_state_test
def test_invalid_incorrect_target_included_after_epoch_delay(spec, state):
def test_invalid_incorrect_target_included_after_max_inclusion_slot(spec, state):
attestation = get_valid_attestation(spec, state, signed=False)
# increment past latest inclusion slot
next_slots(spec, state, spec.SLOTS_PER_EPOCH + 1)
next_slots(spec, state, compute_max_inclusion_slot(spec, attestation) + 1)
attestation.data.target.root = b'\x42' * 32
sign_attestation(spec, state, attestation)