Merge branch 'dev' into prepare_state
This commit is contained in:
commit
2063c96c80
|
@ -15,7 +15,7 @@ Over time, the need to sync an older state may be deprecated.
|
|||
In this case, the prefix on the new constant may be removed, and the old constant will keep a special name before completely being removed.
|
||||
|
||||
A previous iteration of forking made use of "timelines", but this collides with the definitions used in the spec (constants for special forking slots, etc.), and was not integrated sufficiently in any of the spec tools or implementations.
|
||||
Instead, the config essentially doubles as fork definition now, e.g. changing the value for `ALTAIR_FORK_SLOT` changes the fork.
|
||||
Instead, the config essentially doubles as fork definition now, e.g. changing the value for `ALTAIR_FORK_EPOCH` changes the fork.
|
||||
|
||||
Another reason to prefer forking through constants is the ability to program a forking moment based on context, instead of being limited to a static slot number.
|
||||
|
||||
|
|
|
@ -38,7 +38,7 @@ DOMAIN_CONTRIBUTION_AND_PROOF: 0x09000000
|
|||
# 0x01000000
|
||||
ALTAIR_FORK_VERSION: 0x01000000
|
||||
# TBD
|
||||
ALTAIR_FORK_SLOT: 18446744073709551615
|
||||
ALTAIR_FORK_EPOCH: 18446744073709551615
|
||||
|
||||
|
||||
# Sync protocol
|
||||
|
|
|
@ -4,4 +4,4 @@
|
|||
# ---------------------------------------------------------------
|
||||
MERGE_FORK_VERSION: 0x02000000
|
||||
# TBD, temporarily max uint64 value: 2**64 - 1
|
||||
MERGE_FORK_SLOT: 18446744073709551615
|
||||
MERGE_FORK_EPOCH: 18446744073709551615
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
# ---------------------------------------------------------------
|
||||
SHARDING_FORK_VERSION: 0x03000000
|
||||
# TBD, temporarily max uint64 value: 2**64 - 1
|
||||
SHARDING_FORK_SLOT: 18446744073709551615
|
||||
SHARDING_FORK_EPOCH: 18446744073709551615
|
||||
|
||||
|
||||
# Beacon-chain
|
||||
|
|
|
@ -38,7 +38,7 @@ DOMAIN_CONTRIBUTION_AND_PROOF: 0x09000000
|
|||
# [customized] Highest byte set to 0x01 to avoid collisions with mainnet versioning
|
||||
ALTAIR_FORK_VERSION: 0x01000001
|
||||
# [customized]
|
||||
ALTAIR_FORK_SLOT: 18446744073709551615
|
||||
ALTAIR_FORK_EPOCH: 18446744073709551615
|
||||
|
||||
|
||||
# Sync protocol
|
||||
|
|
|
@ -4,4 +4,4 @@
|
|||
# ---------------------------------------------------------------
|
||||
MERGE_FORK_VERSION: 0x02000001
|
||||
# TBD, temporarily max uint64 value: 2**64 - 1
|
||||
MERGE_FORK_SLOT: 18446744073709551615
|
||||
MERGE_FORK_EPOCH: 18446744073709551615
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
# ---------------------------------------------------------------
|
||||
SHARDING_FORK_VERSION: 0x03000001
|
||||
# TBD, temporarily max uint64 value: 2**64 - 1
|
||||
MERGE_FORK_SLOT: 18446744073709551615
|
||||
MERGE_FORK_EPOCH: 18446744073709551615
|
||||
|
||||
|
||||
# Beacon-chain
|
||||
|
|
392
setup.py
392
setup.py
|
@ -5,16 +5,37 @@ from distutils import dir_util
|
|||
from distutils.util import convert_path
|
||||
import os
|
||||
import re
|
||||
from typing import Dict, NamedTuple, List
|
||||
import string
|
||||
from typing import Dict, NamedTuple, List, Sequence
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
|
||||
FUNCTION_REGEX = r'^def [\w_]*'
|
||||
|
||||
|
||||
# Definitions in context.py
|
||||
PHASE0 = 'phase0'
|
||||
ALTAIR = 'altair'
|
||||
MERGE = 'merge'
|
||||
|
||||
CONFIG_LOADER = '''
|
||||
apply_constants_config(globals())
|
||||
'''
|
||||
|
||||
# The helper functions that are used when defining constants
|
||||
CONSTANT_DEP_SUNDRY_CONSTANTS_FUNCTIONS = '''
|
||||
def ceillog2(x: int) -> uint64:
|
||||
if x < 1:
|
||||
raise ValueError(f"ceillog2 accepts only positive values, x={x}")
|
||||
return uint64((x - 1).bit_length())
|
||||
|
||||
|
||||
def floorlog2(x: int) -> uint64:
|
||||
if x < 1:
|
||||
raise ValueError(f"floorlog2 accepts only positive values, x={x}")
|
||||
return uint64(x.bit_length() - 1)
|
||||
'''
|
||||
|
||||
|
||||
class SpecObject(NamedTuple):
|
||||
functions: Dict[str, str]
|
||||
custom_types: Dict[str, str]
|
||||
|
@ -89,10 +110,10 @@ def get_spec(file_name: str) -> SpecObject:
|
|||
if '`' in row[i]:
|
||||
row[i] = row[i][:row[i].find('`')]
|
||||
is_constant_def = True
|
||||
if row[0][0] not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_':
|
||||
if row[0][0] not in string.ascii_uppercase + '_':
|
||||
is_constant_def = False
|
||||
for c in row[0]:
|
||||
if c not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789':
|
||||
if c not in string.ascii_uppercase + '_' + string.digits:
|
||||
is_constant_def = False
|
||||
if is_constant_def:
|
||||
if row[1].startswith('get_generalized_index'):
|
||||
|
@ -111,112 +132,103 @@ def get_spec(file_name: str) -> SpecObject:
|
|||
)
|
||||
|
||||
|
||||
CONFIG_LOADER = '''
|
||||
apply_constants_config(globals())
|
||||
'''
|
||||
class SpecBuilder(ABC):
|
||||
@property
|
||||
@abstractmethod
|
||||
def fork(self) -> str:
|
||||
raise NotImplementedError()
|
||||
|
||||
PHASE0_IMPORTS = '''from eth2spec.config.config_util import apply_constants_config
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def imports(cls) -> str:
|
||||
"""
|
||||
Import objects from other libraries.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def preparations(cls) -> str:
|
||||
"""
|
||||
Define special types/constants for building pyspec or call functions.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def sundry_functions(cls) -> str:
|
||||
"""
|
||||
The functions that are (1) defined abstractly in specs or (2) adjusted for getting better performance.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]:
|
||||
"""
|
||||
The constants that are required for SSZ objects.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def hardcoded_custom_type_dep_constants(cls) -> Dict[str, str]:
|
||||
"""
|
||||
The constants that are required for custom types.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def invariant_checks(cls) -> str:
|
||||
"""
|
||||
The invariant checks
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def build_spec(cls, source_files: List[str]) -> str:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
#
|
||||
# Phase0SpecBuilder
|
||||
#
|
||||
class Phase0SpecBuilder(SpecBuilder):
|
||||
fork: str = PHASE0
|
||||
|
||||
@classmethod
|
||||
def imports(cls) -> str:
|
||||
return '''from lru import LRU
|
||||
from dataclasses import (
|
||||
dataclass,
|
||||
field,
|
||||
)
|
||||
from typing import (
|
||||
Any, Callable, Dict, Set, Sequence, Tuple, Optional, TypeVar
|
||||
)
|
||||
|
||||
from dataclasses import (
|
||||
dataclass,
|
||||
field,
|
||||
)
|
||||
|
||||
from lru import LRU
|
||||
|
||||
from eth2spec.config.config_util import apply_constants_config
|
||||
from eth2spec.utils.ssz.ssz_impl import hash_tree_root, copy, uint_to_bytes
|
||||
from eth2spec.utils.ssz.ssz_typing import (
|
||||
View, boolean, Container, List, Vector, uint8, uint32, uint64,
|
||||
Bytes1, Bytes4, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector,
|
||||
)
|
||||
Bytes1, Bytes4, Bytes32, Bytes48, Bytes96, Bitlist)
|
||||
from eth2spec.utils.ssz.ssz_typing import Bitvector # noqa: F401
|
||||
from eth2spec.utils import bls
|
||||
|
||||
from eth2spec.utils.hash_function import hash
|
||||
'''
|
||||
|
||||
@classmethod
|
||||
def preparations(cls) -> str:
|
||||
return '''
|
||||
SSZObject = TypeVar('SSZObject', bound=View)
|
||||
|
||||
CONFIG_NAME = 'mainnet'
|
||||
'''
|
||||
|
||||
ALTAIR_IMPORTS = '''from eth2spec.phase0 import spec as phase0
|
||||
from eth2spec.config.config_util import apply_constants_config
|
||||
from typing import (
|
||||
Any, Dict, Set, Sequence, NewType, Tuple, TypeVar, Callable, Optional, Union
|
||||
)
|
||||
|
||||
from dataclasses import (
|
||||
dataclass,
|
||||
field,
|
||||
)
|
||||
|
||||
from lru import LRU
|
||||
|
||||
from eth2spec.utils.ssz.ssz_impl import hash_tree_root, copy, uint_to_bytes
|
||||
from eth2spec.utils.ssz.ssz_typing import (
|
||||
View, boolean, Container, List, Vector, uint8, uint32, uint64,
|
||||
Bytes1, Bytes4, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector,
|
||||
Path,
|
||||
)
|
||||
from eth2spec.utils import bls
|
||||
|
||||
from eth2spec.utils.hash_function import hash
|
||||
|
||||
# Whenever altair is loaded, make sure we have the latest phase0
|
||||
from importlib import reload
|
||||
reload(phase0)
|
||||
|
||||
|
||||
SSZVariableName = str
|
||||
GeneralizedIndex = NewType('GeneralizedIndex', int)
|
||||
SSZObject = TypeVar('SSZObject', bound=View)
|
||||
|
||||
CONFIG_NAME = 'mainnet'
|
||||
'''
|
||||
|
||||
MERGE_IMPORTS = '''from eth2spec.phase0 import spec as phase0
|
||||
from eth2spec.config.config_util import apply_constants_config
|
||||
from typing import (
|
||||
Any, Callable, Dict, Set, Sequence, Tuple, Optional, TypeVar
|
||||
)
|
||||
|
||||
from dataclasses import (
|
||||
dataclass,
|
||||
field,
|
||||
)
|
||||
|
||||
from lru import LRU
|
||||
|
||||
from eth2spec.utils.ssz.ssz_impl import hash_tree_root, copy, uint_to_bytes
|
||||
from eth2spec.utils.ssz.ssz_typing import (
|
||||
View, boolean, Container, List, Vector, uint8, uint32, uint64, uint256,
|
||||
Bytes1, Bytes4, Bytes20, Bytes32, Bytes48, Bytes96, Bitlist,
|
||||
ByteList, ByteVector
|
||||
)
|
||||
from eth2spec.utils import bls
|
||||
|
||||
from eth2spec.utils.hash_function import hash
|
||||
|
||||
SSZObject = TypeVar('SSZObject', bound=View)
|
||||
|
||||
CONFIG_NAME = 'mainnet'
|
||||
'''
|
||||
|
||||
SUNDRY_CONSTANTS_FUNCTIONS = '''
|
||||
def ceillog2(x: int) -> uint64:
|
||||
if x < 1:
|
||||
raise ValueError(f"ceillog2 accepts only positive values, x={x}")
|
||||
return uint64((x - 1).bit_length())
|
||||
|
||||
|
||||
def floorlog2(x: int) -> uint64:
|
||||
if x < 1:
|
||||
raise ValueError(f"floorlog2 accepts only positive values, x={x}")
|
||||
return uint64(x.bit_length() - 1)
|
||||
'''
|
||||
PHASE0_SUNDRY_FUNCTIONS = '''
|
||||
@classmethod
|
||||
def sundry_functions(cls) -> str:
|
||||
return '''
|
||||
def get_eth1_data(block: Eth1Block) -> Eth1Data:
|
||||
"""
|
||||
A stub function return mocking Eth1Data.
|
||||
|
@ -287,9 +299,52 @@ get_attesting_indices = cache_this(
|
|||
),
|
||||
_get_attesting_indices, lru_size=SLOTS_PER_EPOCH * MAX_COMMITTEES_PER_SLOT * 3)'''
|
||||
|
||||
@classmethod
|
||||
def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]:
|
||||
return {}
|
||||
|
||||
ALTAIR_SUNDRY_FUNCTIONS = '''
|
||||
@classmethod
|
||||
def hardcoded_custom_type_dep_constants(cls) -> Dict[str, str]:
|
||||
return {}
|
||||
|
||||
@classmethod
|
||||
def invariant_checks(cls) -> str:
|
||||
return ''
|
||||
|
||||
@classmethod
|
||||
def build_spec(cls, source_files: Sequence[str]) -> str:
|
||||
return _build_spec(cls.fork, source_files)
|
||||
|
||||
|
||||
#
|
||||
# AltairSpecBuilder
|
||||
#
|
||||
class AltairSpecBuilder(Phase0SpecBuilder):
|
||||
fork: str = ALTAIR
|
||||
|
||||
@classmethod
|
||||
def imports(cls) -> str:
|
||||
return super().imports() + '\n' + '''
|
||||
from typing import NewType, Union
|
||||
from importlib import reload
|
||||
|
||||
from eth2spec.phase0 import spec as phase0
|
||||
from eth2spec.utils.ssz.ssz_typing import Path
|
||||
'''
|
||||
|
||||
@classmethod
|
||||
def preparations(cls):
|
||||
return super().preparations() + '\n' + '''
|
||||
# Whenever this spec version is loaded, make sure we have the latest phase0
|
||||
reload(phase0)
|
||||
|
||||
SSZVariableName = str
|
||||
GeneralizedIndex = NewType('GeneralizedIndex', int)
|
||||
'''
|
||||
|
||||
@classmethod
|
||||
def sundry_functions(cls) -> str:
|
||||
return super().sundry_functions() + '\n\n' + '''
|
||||
def get_generalized_index(ssz_class: Any, *path: Sequence[Union[int, SSZVariableName]]) -> GeneralizedIndex:
|
||||
ssz_path = Path(ssz_class)
|
||||
for item in path:
|
||||
|
@ -297,7 +352,45 @@ def get_generalized_index(ssz_class: Any, *path: Sequence[Union[int, SSZVariable
|
|||
return GeneralizedIndex(ssz_path.gindex())'''
|
||||
|
||||
|
||||
MERGE_SUNDRY_FUNCTIONS = """
|
||||
@classmethod
|
||||
def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]:
|
||||
constants = {
|
||||
'FINALIZED_ROOT_INDEX': 'GeneralizedIndex(105)',
|
||||
'NEXT_SYNC_COMMITTEE_INDEX': 'GeneralizedIndex(55)',
|
||||
}
|
||||
return {**super().hardcoded_ssz_dep_constants(), **constants}
|
||||
|
||||
@classmethod
|
||||
def invariant_checks(cls) -> str:
|
||||
return '''
|
||||
assert (
|
||||
TIMELY_HEAD_WEIGHT + TIMELY_SOURCE_WEIGHT + TIMELY_TARGET_WEIGHT + SYNC_REWARD_WEIGHT + PROPOSER_WEIGHT
|
||||
) == WEIGHT_DENOMINATOR'''
|
||||
|
||||
|
||||
#
|
||||
# MergeSpecBuilder
|
||||
#
|
||||
class MergeSpecBuilder(Phase0SpecBuilder):
|
||||
fork: str = MERGE
|
||||
|
||||
@classmethod
|
||||
def imports(cls):
|
||||
return super().imports() + '\n' + '''
|
||||
from eth2spec.phase0 import spec as phase0
|
||||
from eth2spec.utils.ssz.ssz_typing import Bytes20, ByteList, ByteVector, uint256
|
||||
from importlib import reload
|
||||
'''
|
||||
|
||||
@classmethod
|
||||
def preparations(cls):
|
||||
return super().preparations() + '\n' + '''
|
||||
reload(phase0)
|
||||
'''
|
||||
|
||||
@classmethod
|
||||
def sundry_functions(cls) -> str:
|
||||
return super().sundry_functions() + '\n\n' + """
|
||||
ExecutionState = Any
|
||||
|
||||
|
||||
|
@ -321,38 +414,21 @@ def produce_execution_payload(parent_hash: Hash32, timestamp: uint64) -> Executi
|
|||
pass"""
|
||||
|
||||
|
||||
# The constants that depend on SSZ objects
|
||||
# Will verify the value at the end of the spec
|
||||
ALTAIR_HARDCODED_SSZ_DEP_CONSTANTS = {
|
||||
'FINALIZED_ROOT_INDEX': 'GeneralizedIndex(105)',
|
||||
'NEXT_SYNC_COMMITTEE_INDEX': 'GeneralizedIndex(55)',
|
||||
@classmethod
|
||||
def hardcoded_custom_type_dep_constants(cls) -> str:
|
||||
constants = {
|
||||
'MAX_BYTES_PER_OPAQUE_TRANSACTION': 'uint64(2**20)',
|
||||
}
|
||||
return {**super().hardcoded_custom_type_dep_constants(), **constants}
|
||||
|
||||
|
||||
spec_builders = {
|
||||
builder.fork: builder
|
||||
for builder in (Phase0SpecBuilder, AltairSpecBuilder, MergeSpecBuilder)
|
||||
}
|
||||
|
||||
|
||||
ALTAIR_INVARIANT_CHECKS = '''
|
||||
assert (
|
||||
TIMELY_HEAD_WEIGHT + TIMELY_SOURCE_WEIGHT + TIMELY_TARGET_WEIGHT + SYNC_REWARD_WEIGHT + PROPOSER_WEIGHT
|
||||
) == WEIGHT_DENOMINATOR'''
|
||||
|
||||
|
||||
MERGE_HARDCODED_CUSTOM_TYPE_DEP_CONSTANTS = {
|
||||
'MAX_BYTES_PER_OPAQUE_TRANSACTION': 'uint64(2**20)',
|
||||
}
|
||||
|
||||
|
||||
def is_phase0(fork):
|
||||
return fork == PHASE0
|
||||
|
||||
|
||||
def is_altair(fork):
|
||||
return fork == ALTAIR
|
||||
|
||||
|
||||
def is_merge(fork):
|
||||
return fork == MERGE
|
||||
|
||||
|
||||
def objects_to_spec(spec_object: SpecObject, imports: str, fork: str, ordered_class_objects: Dict[str, str]) -> str:
|
||||
def objects_to_spec(spec_object: SpecObject, builder: SpecBuilder, ordered_class_objects: Dict[str, str]) -> str:
|
||||
"""
|
||||
Given all the objects that constitute a spec, combine them into a single pyfile.
|
||||
"""
|
||||
|
@ -382,41 +458,30 @@ def objects_to_spec(spec_object: SpecObject, imports: str, fork: str, ordered_cl
|
|||
spec_object.constants[k] += " # noqa: E501"
|
||||
constants_spec = '\n'.join(map(lambda x: '%s = %s' % (x, spec_object.constants[x]), spec_object.constants))
|
||||
ordered_class_objects_spec = '\n\n'.join(ordered_class_objects.values())
|
||||
|
||||
if is_altair(fork):
|
||||
altair_ssz_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, ALTAIR_HARDCODED_SSZ_DEP_CONSTANTS[x]), ALTAIR_HARDCODED_SSZ_DEP_CONSTANTS))
|
||||
|
||||
if is_merge(fork):
|
||||
merge_custom_type_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, MERGE_HARDCODED_CUSTOM_TYPE_DEP_CONSTANTS[x]), MERGE_HARDCODED_CUSTOM_TYPE_DEP_CONSTANTS))
|
||||
|
||||
|
||||
ssz_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, builder.hardcoded_ssz_dep_constants()[x]), builder.hardcoded_ssz_dep_constants()))
|
||||
ssz_dep_constants_verification = '\n'.join(map(lambda x: 'assert %s == %s' % (x, spec_object.ssz_dep_constants[x]), builder.hardcoded_ssz_dep_constants()))
|
||||
custom_type_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, builder.hardcoded_custom_type_dep_constants()[x]), builder.hardcoded_custom_type_dep_constants()))
|
||||
spec = (
|
||||
imports
|
||||
+ '\n\n' + f"fork = \'{fork}\'\n"
|
||||
builder.imports()
|
||||
+ builder.preparations()
|
||||
+ '\n\n' + f"fork = \'{builder.fork}\'\n"
|
||||
# The constants that some SSZ containers require. Need to be defined before `new_type_definitions`
|
||||
+ ('\n\n' + merge_custom_type_dep_constants + '\n' if is_merge(fork) else '')
|
||||
+ ('\n\n' + custom_type_dep_constants + '\n' if custom_type_dep_constants != '' else '')
|
||||
+ '\n\n' + new_type_definitions
|
||||
+ '\n' + SUNDRY_CONSTANTS_FUNCTIONS
|
||||
+ '\n' + CONSTANT_DEP_SUNDRY_CONSTANTS_FUNCTIONS
|
||||
# The constants that some SSZ containers require. Need to be defined before `constants_spec`
|
||||
+ ('\n\n' + altair_ssz_dep_constants if is_altair(fork) else '')
|
||||
+ ('\n\n' + ssz_dep_constants if ssz_dep_constants != '' else '')
|
||||
+ '\n\n' + constants_spec
|
||||
+ '\n\n' + CONFIG_LOADER
|
||||
+ '\n\n' + ordered_class_objects_spec
|
||||
+ '\n\n' + functions_spec
|
||||
# Functions to make pyspec work
|
||||
+ '\n' + PHASE0_SUNDRY_FUNCTIONS
|
||||
+ ('\n' + ALTAIR_SUNDRY_FUNCTIONS if is_altair(fork) else '')
|
||||
+ ('\n' + MERGE_SUNDRY_FUNCTIONS if is_merge(fork) else '')
|
||||
+ '\n' + builder.sundry_functions()
|
||||
# Since some constants are hardcoded in setup.py, the following assertions verify that the hardcoded constants are
|
||||
# as same as the spec definition.
|
||||
+ ('\n\n\n' + ssz_dep_constants_verification if ssz_dep_constants_verification != '' else '')
|
||||
+ ('\n' + builder.invariant_checks() if builder.invariant_checks() != '' else '')
|
||||
+ '\n'
|
||||
)
|
||||
|
||||
# Since some constants are hardcoded in setup.py, the following assertions verify that the hardcoded constants are
|
||||
# as same as the spec definition.
|
||||
if is_altair(fork):
|
||||
altair_ssz_dep_constants_verification = '\n'.join(map(lambda x: 'assert %s == %s' % (x, spec_object.ssz_dep_constants[x]), ALTAIR_HARDCODED_SSZ_DEP_CONSTANTS))
|
||||
spec += '\n\n\n' + altair_ssz_dep_constants_verification
|
||||
spec += '\n' + ALTAIR_INVARIANT_CHECKS
|
||||
|
||||
spec += '\n'
|
||||
return spec
|
||||
|
||||
|
||||
|
@ -496,14 +561,7 @@ def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject:
|
|||
)
|
||||
|
||||
|
||||
fork_imports = {
|
||||
'phase0': PHASE0_IMPORTS,
|
||||
'altair': ALTAIR_IMPORTS,
|
||||
'merge': MERGE_IMPORTS,
|
||||
}
|
||||
|
||||
|
||||
def build_spec(fork: str, source_files: List[str]) -> str:
|
||||
def _build_spec(fork: str, source_files: Sequence[str]) -> str:
|
||||
all_specs = [get_spec(spec) for spec in source_files]
|
||||
|
||||
spec_object = all_specs[0]
|
||||
|
@ -513,7 +571,7 @@ def build_spec(fork: str, source_files: List[str]) -> str:
|
|||
class_objects = {**spec_object.ssz_objects, **spec_object.dataclasses}
|
||||
dependency_order_class_objects(class_objects, spec_object.custom_types)
|
||||
|
||||
return objects_to_spec(spec_object, fork_imports[fork], fork, class_objects)
|
||||
return objects_to_spec(spec_object, spec_builders[fork], class_objects)
|
||||
|
||||
|
||||
class PySpecCommand(Command):
|
||||
|
@ -545,14 +603,14 @@ class PySpecCommand(Command):
|
|||
if len(self.md_doc_paths) == 0:
|
||||
print("no paths were specified, using default markdown file paths for pyspec"
|
||||
" build (spec fork: %s)" % self.spec_fork)
|
||||
if is_phase0(self.spec_fork):
|
||||
if self.spec_fork == PHASE0:
|
||||
self.md_doc_paths = """
|
||||
specs/phase0/beacon-chain.md
|
||||
specs/phase0/fork-choice.md
|
||||
specs/phase0/validator.md
|
||||
specs/phase0/weak-subjectivity.md
|
||||
"""
|
||||
elif is_altair(self.spec_fork):
|
||||
elif self.spec_fork == ALTAIR:
|
||||
self.md_doc_paths = """
|
||||
specs/phase0/beacon-chain.md
|
||||
specs/phase0/fork-choice.md
|
||||
|
@ -563,7 +621,7 @@ class PySpecCommand(Command):
|
|||
specs/altair/validator.md
|
||||
specs/altair/sync-protocol.md
|
||||
"""
|
||||
elif is_merge(self.spec_fork):
|
||||
elif self.spec_fork == MERGE:
|
||||
self.md_doc_paths = """
|
||||
specs/phase0/beacon-chain.md
|
||||
specs/phase0/fork-choice.md
|
||||
|
@ -583,7 +641,7 @@ class PySpecCommand(Command):
|
|||
raise Exception('Pyspec markdown input file "%s" does not exist.' % filename)
|
||||
|
||||
def run(self):
|
||||
spec_str = build_spec(self.spec_fork, self.parsed_md_doc_paths)
|
||||
spec_str = spec_builders[self.spec_fork].build_spec(self.parsed_md_doc_paths)
|
||||
if self.dry_run:
|
||||
self.announce('dry run successfully prepared contents for spec.'
|
||||
f' out dir: "{self.out_dir}", spec fork: "{self.spec_fork}"')
|
||||
|
@ -611,7 +669,7 @@ class BuildPyCommand(build_py):
|
|||
self.run_command('pyspec')
|
||||
|
||||
def run(self):
|
||||
for spec_fork in fork_imports:
|
||||
for spec_fork in spec_builders:
|
||||
self.run_pyspec_cmd(spec_fork=spec_fork)
|
||||
|
||||
super(BuildPyCommand, self).run()
|
||||
|
@ -639,7 +697,7 @@ class PyspecDevCommand(Command):
|
|||
|
||||
def run(self):
|
||||
print("running build_py command")
|
||||
for spec_fork in fork_imports:
|
||||
for spec_fork in spec_builders:
|
||||
self.run_pyspec_cmd(spec_fork=spec_fork)
|
||||
|
||||
commands = {
|
||||
|
|
|
@ -26,19 +26,19 @@ Warning: this configuration is not definitive.
|
|||
| Name | Value |
|
||||
| - | - |
|
||||
| `ALTAIR_FORK_VERSION` | `Version('0x01000000')` |
|
||||
| `ALTAIR_FORK_SLOT` | `Slot(18446744073709551615)` **TBD** |
|
||||
| `ALTAIR_FORK_EPOCH` | `Epoch(18446744073709551615)` **TBD** |
|
||||
|
||||
## Fork to Altair
|
||||
|
||||
### Fork trigger
|
||||
|
||||
TBD. Social consensus, along with state conditions such as epoch boundary, finality, deposits, active validator count, etc. may be part of the decision process to trigger the fork. For now we assume the condition will be triggered at slot `ALTAIR_FORK_SLOT`, where `ALTAIR_FORK_SLOT % SLOTS_PER_EPOCH == 0`.
|
||||
TBD. Social consensus, along with state conditions such as epoch boundary, finality, deposits, active validator count, etc. may be part of the decision process to trigger the fork. For now we assume the condition will be triggered at epoch `ALTAIR_FORK_EPOCH`.
|
||||
|
||||
Note that for the pure Altair testnets, we don't apply `upgrade_to_altair` since it starts with Altair version logic.
|
||||
|
||||
### Upgrading the state
|
||||
|
||||
After `process_slots` of Phase 0 finishes, if `state.slot == ALTAIR_FORK_SLOT`, an irregular state change is made to upgrade to Altair.
|
||||
After `process_slots` of Phase 0 finishes, if `state.slot % SLOTS_PER_EPOCH == 0` and `compute_epoch_at_slot(state.slot) == ALTAIR_FORK_EPOCH`, an irregular state change is made to upgrade to Altair.
|
||||
|
||||
```python
|
||||
def upgrade_to_altair(pre: phase0.BeaconState) -> BeaconState:
|
||||
|
|
|
@ -37,7 +37,7 @@ def get_new_dependencies(state: BeaconState) -> Set[DataCommitment]:
|
|||
|
||||
```python
|
||||
def get_all_dependencies(store: Store, block: BeaconBlock) -> Set[DataCommitment]:
|
||||
if block.slot < SHARDING_FORK_SLOT:
|
||||
if compute_epoch_at_slot(block.slot) < SHARDING_FORK_EPOCH:
|
||||
return set()
|
||||
else:
|
||||
latest = get_new_dependencies(store.block_states[hash_tree_root(block)])
|
||||
|
|
|
@ -61,7 +61,7 @@ We define the following Python custom types for type hinting and readability:
|
|||
| Name | Value |
|
||||
| - | - |
|
||||
| `MAX_BYTES_PER_OPAQUE_TRANSACTION` | `uint64(2**20)` (= 1,048,576) |
|
||||
| `MAX_APPLICATION_TRANSACTIONS` | `uint64(2**14)` (= 16,384) |
|
||||
| `MAX_EXECUTION_TRANSACTIONS` | `uint64(2**14)` (= 16,384) |
|
||||
| `BYTES_PER_LOGS_BLOOM` | `uint64(2**8)` (= 256) |
|
||||
|
||||
## Containers
|
||||
|
@ -108,7 +108,7 @@ class ExecutionPayload(Container):
|
|||
timestamp: uint64
|
||||
receipt_root: Bytes32
|
||||
logs_bloom: ByteVector[BYTES_PER_LOGS_BLOOM]
|
||||
transactions: List[OpaqueTransaction, MAX_APPLICATION_TRANSACTIONS]
|
||||
transactions: List[OpaqueTransaction, MAX_EXECUTION_TRANSACTIONS]
|
||||
```
|
||||
|
||||
#### `ExecutionPayloadHeader`
|
||||
|
|
|
@ -466,7 +466,7 @@ def process_block(state: BeaconState, block: BeaconBlock) -> None:
|
|||
process_randao(state, block.body)
|
||||
process_eth1_data(state, block.body)
|
||||
process_operations(state, block.body) # [Modified in Sharding]
|
||||
process_application_payload(state, block.body) # [New in Merge]
|
||||
process_execution_payload(state, block.body) # [New in Merge]
|
||||
```
|
||||
|
||||
#### Operations
|
||||
|
|
|
@ -75,7 +75,8 @@ For convenience we alias:
|
|||
|
||||
* `bit` to `boolean`
|
||||
* `byte` to `uint8` (this is a basic type)
|
||||
* `BytesN` to `Vector[byte, N]` (this is *not* a basic type)
|
||||
* `BytesN` and `ByteVector[N]` to `Vector[byte, N]` (this is *not* a basic type)
|
||||
* `ByteList[N]` to `List[byte, N]`
|
||||
* `null`: `{}`
|
||||
|
||||
### Default values
|
||||
|
|
|
@ -1,7 +1,12 @@
|
|||
from eth2spec.test.context import (
|
||||
always_bls,
|
||||
spec_state_test,
|
||||
spec_test,
|
||||
with_all_phases_except,
|
||||
with_configs,
|
||||
with_custom_state,
|
||||
single_phase,
|
||||
misc_balances,
|
||||
)
|
||||
from eth2spec.test.helpers.constants import (
|
||||
PHASE0,
|
||||
|
@ -13,20 +18,21 @@ from eth2spec.test.helpers.epoch_processing import (
|
|||
)
|
||||
|
||||
|
||||
@with_all_phases_except([PHASE0])
|
||||
@spec_state_test
|
||||
@with_configs([MINIMAL], reason="too slow")
|
||||
def test_sync_committees_progress(spec, state):
|
||||
current_epoch = spec.get_current_epoch(state)
|
||||
# NOTE: if not in the genesis epoch, period math below needs to be
|
||||
# adjusted relative to the current epoch
|
||||
assert current_epoch == 0
|
||||
#
|
||||
# Note:
|
||||
# Calculating sync committees requires pubkey aggregation, thus all tests are generated with `always_bls`
|
||||
#
|
||||
|
||||
def run_sync_committees_progress_test(spec, state):
|
||||
first_sync_committee = state.current_sync_committee
|
||||
second_sync_committee = state.next_sync_committee
|
||||
|
||||
slot_at_end_of_current_period = spec.EPOCHS_PER_SYNC_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH - 1
|
||||
transition_to(spec, state, slot_at_end_of_current_period)
|
||||
current_period = spec.get_current_epoch(state) // spec.EPOCHS_PER_SYNC_COMMITTEE_PERIOD
|
||||
next_period = current_period + 1
|
||||
next_period_start_epoch = next_period * spec.EPOCHS_PER_SYNC_COMMITTEE_PERIOD
|
||||
next_period_start_slot = next_period_start_epoch * spec.SLOTS_PER_EPOCH
|
||||
end_slot_of_current_period = next_period_start_slot - 1
|
||||
transition_to(spec, state, end_slot_of_current_period)
|
||||
|
||||
# Ensure assignments have not changed:
|
||||
assert state.current_sync_committee == first_sync_committee
|
||||
|
@ -36,7 +42,42 @@ def test_sync_committees_progress(spec, state):
|
|||
|
||||
# Can compute the third committee having computed final balances in the last epoch
|
||||
# of this `EPOCHS_PER_SYNC_COMMITTEE_PERIOD`
|
||||
third_sync_committee = spec.get_sync_committee(state, 2 * spec.EPOCHS_PER_SYNC_COMMITTEE_PERIOD)
|
||||
current_epoch = spec.get_current_epoch(state)
|
||||
third_sync_committee = spec.get_sync_committee(state, current_epoch + 2 * spec.EPOCHS_PER_SYNC_COMMITTEE_PERIOD)
|
||||
|
||||
assert state.current_sync_committee == second_sync_committee
|
||||
assert state.next_sync_committee == third_sync_committee
|
||||
|
||||
|
||||
@with_all_phases_except([PHASE0])
|
||||
@spec_state_test
|
||||
@always_bls
|
||||
@with_configs([MINIMAL], reason="too slow")
|
||||
def test_sync_committees_progress_genesis(spec, state):
|
||||
# Genesis epoch period has an exceptional case
|
||||
assert spec.get_current_epoch(state) == spec.GENESIS_EPOCH
|
||||
|
||||
yield from run_sync_committees_progress_test(spec, state)
|
||||
|
||||
|
||||
@with_all_phases_except([PHASE0])
|
||||
@spec_state_test
|
||||
@always_bls
|
||||
@with_configs([MINIMAL], reason="too slow")
|
||||
def test_sync_committees_progress_not_genesis(spec, state):
|
||||
# Transition out of the genesis epoch period to test non-exceptional case
|
||||
assert spec.get_current_epoch(state) == spec.GENESIS_EPOCH
|
||||
slot_in_next_period = state.slot + spec.EPOCHS_PER_SYNC_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
|
||||
transition_to(spec, state, slot_in_next_period)
|
||||
|
||||
yield from run_sync_committees_progress_test(spec, state)
|
||||
|
||||
|
||||
@with_all_phases_except([PHASE0])
|
||||
@with_custom_state(balances_fn=misc_balances, threshold_fn=lambda spec: spec.EJECTION_BALANCE)
|
||||
@spec_test
|
||||
@single_phase
|
||||
@always_bls
|
||||
@with_configs([MINIMAL], reason="too slow")
|
||||
def test_sync_committees_progress_misc_balances(spec, state):
|
||||
yield from run_sync_committees_progress_test(spec, state)
|
||||
|
|
|
@ -42,9 +42,10 @@ def transition_to_slot_via_block(spec, state, slot):
|
|||
|
||||
def transition_to_valid_shard_slot(spec, state):
|
||||
"""
|
||||
Transition to slot `spec.SHARDING_FORK_SLOT + 1` and fork at `spec.SHARDING_FORK_SLOT`.
|
||||
Transition to slot `compute_epoch_at_slot(spec.SHARDING_FORK_EPOCH) + 1`
|
||||
and fork at `compute_epoch_at_slot(spec.SHARDING_FORK_EPOCH)`.
|
||||
"""
|
||||
transition_to(spec, state, spec.SHARDING_FORK_SLOT)
|
||||
transition_to(spec, state, spec.compute_epoch_at_slot(spec.SHARDING_FORK_EPOCH))
|
||||
next_slot(spec, state)
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue