diff --git a/docs/docs/new-feature.md b/docs/docs/new-feature.md index 5e6180329..b987e2e97 100644 --- a/docs/docs/new-feature.md +++ b/docs/docs/new-feature.md @@ -53,17 +53,17 @@ For example, if the latest fork is Capella, use `./specs/capella` content as you ### 4. Add `fork.md` You can refer to the previous fork's `fork.md` file. ### 5. Make it executable -- Update [`constants.py`](https://github.com/ethereum/consensus-specs/blob/dev/tests/core/pyspec/eth2spec/test/helpers/constants.py) with the new feature name. -- Update [`setup.py`](https://github.com/ethereum/consensus-specs/blob/dev/setup.py): - - Add a new `SpecBuilder` with the new feature name constant. e.g., `EIP9999SpecBuilder` - - Add the new `SpecBuilder` to `spec_builders` list. - - Add the path of the new markdown files in `finalize_options` function. +- Update Pyspec [`constants.py`](https://github.com/ethereum/consensus-specs/blob/dev/tests/core/pyspec/eth2spec/test/helpers/constants.py) with the new feature name. +- Update helpers for [`setup.py`](https://github.com/ethereum/consensus-specs/blob/dev/setup.py) for building the spec: + - Update [`pysetup/constants.py`](https://github.com/ethereum/consensus-specs/blob/dev/constants.py) with the new feature name as Pyspec `constants.py` defined. + - Update [`pysetup/spec_builders/__init__.py`](https://github.com/ethereum/consensus-specs/blob/dev/pysetup/spec_builders/__init__.py). Implement a new `SpecBuilder` in `pysetup/spec_builders/.py` with the new feature name. e.g., `EIP9999SpecBuilder`. Append it to the `spec_builders` list. + - Update [`pysetup/md_doc_paths.py`](https://github.com/ethereum/consensus-specs/blob/dev/pysetup/md_doc_paths.py): add the path of the new markdown files in `get_md_doc_paths` function if needed. ## B: Make it executable for pytest and test generator -### 1. Add `light-client/*` docs if you updated the content of `BeaconBlock` +### 1. [Optional] Add `light-client/*` docs if you updated the content of `BeaconBlock` - You can refer to the previous fork's `light-client/*` file. -- Add the path of the new markdown files in `setup.py`'s `finalize_options` function. +- Add the path of the new markdown files in [`pysetup/md_doc_paths.py`](https://github.com/ethereum/consensus-specs/blob/dev/pysetup/md_doc_paths.py)'s `get_md_doc_paths` function. ### 2. Add the mainnet and minimal presets and update the configs - Add presets: `presets/mainnet/.yaml` and `presets/minimal/.yaml` diff --git a/pysetup/__init__.py b/pysetup/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pysetup/constants.py b/pysetup/constants.py new file mode 100644 index 000000000..1f29117f7 --- /dev/null +++ b/pysetup/constants.py @@ -0,0 +1,32 @@ +# Definitions in context.py +PHASE0 = 'phase0' +ALTAIR = 'altair' +BELLATRIX = 'bellatrix' +CAPELLA = 'capella' +DENEB = 'deneb' +EIP6110 = 'eip6110' +WHISK = 'whisk' + + +# The helper functions that are used when defining constants +CONSTANT_DEP_SUNDRY_CONSTANTS_FUNCTIONS = ''' +def ceillog2(x: int) -> uint64: + if x < 1: + raise ValueError(f"ceillog2 accepts only positive values, x={x}") + return uint64((x - 1).bit_length()) + + +def floorlog2(x: int) -> uint64: + if x < 1: + raise ValueError(f"floorlog2 accepts only positive values, x={x}") + return uint64(x.bit_length() - 1) +''' + + +OPTIMIZED_BLS_AGGREGATE_PUBKEYS = ''' +def eth_aggregate_pubkeys(pubkeys: Sequence[BLSPubkey]) -> BLSPubkey: + return bls.AggregatePKs(pubkeys) +''' + + +ETH2_SPEC_COMMENT_PREFIX = "eth2spec:" diff --git a/pysetup/helpers.py b/pysetup/helpers.py new file mode 100644 index 000000000..692aaa0d7 --- /dev/null +++ b/pysetup/helpers.py @@ -0,0 +1,253 @@ +import re +from typing import TypeVar, Dict +import textwrap +from functools import reduce + +from .constants import CONSTANT_DEP_SUNDRY_CONSTANTS_FUNCTIONS +from .spec_builders import spec_builders +from .md_doc_paths import PREVIOUS_FORK_OF +from .typing import ( + ProtocolDefinition, + SpecObject, + VariableDefinition, +) + + +def collect_prev_forks(fork: str) -> list[str]: + forks = [fork] + while True: + fork = PREVIOUS_FORK_OF[fork] + if fork is None: + return forks + forks.append(fork) + + +def is_byte_vector(value: str) -> bool: + return value.startswith(('ByteVector')) + + +def make_function_abstract(protocol_def: ProtocolDefinition, key: str): + function = protocol_def.functions[key].split('"""') + protocol_def.functions[key] = function[0] + "..." + + +def objects_to_spec(preset_name: str, + spec_object: SpecObject, + fork: str, + ordered_class_objects: Dict[str, str]) -> str: + """ + Given all the objects that constitute a spec, combine them into a single pyfile. + """ + new_type_definitions = ( + '\n\n'.join( + [ + f"class {key}({value}):\n pass\n" if not is_byte_vector(value) else f"class {key}({value}): # type: ignore\n pass\n" + for key, value in spec_object.custom_types.items() + ] + ) + ) + + # Collect builders with the reversed previous forks + # e.g. `[bellatrix, altair, phase0]` -> `[phase0, altair, bellatrix]` + builders = [spec_builders[fork] for fork in collect_prev_forks(fork)[::-1]] + + def format_protocol(protocol_name: str, protocol_def: ProtocolDefinition) -> str: + abstract_functions = ["verify_and_notify_new_payload"] + for key in protocol_def.functions.keys(): + if key in abstract_functions: + make_function_abstract(protocol_def, key) + + protocol = f"class {protocol_name}(Protocol):" + for fn_source in protocol_def.functions.values(): + fn_source = fn_source.replace("self: "+protocol_name, "self") + protocol += "\n\n" + textwrap.indent(fn_source, " ") + return protocol + + protocols_spec = '\n\n\n'.join(format_protocol(k, v) for k, v in spec_object.protocols.items()) + for k in list(spec_object.functions): + if k in [ + "ceillog2", + "floorlog2", + "compute_merkle_proof_for_block_body", + "compute_merkle_proof_for_state", + ]: + del spec_object.functions[k] + + functions = reduce(lambda fns, builder: builder.implement_optimizations(fns), builders, spec_object.functions) + functions_spec = '\n\n\n'.join(functions.values()) + + # Access global dict of config vars for runtime configurables + for name in spec_object.config_vars.keys(): + functions_spec = re.sub(r"\b%s\b" % name, 'config.' + name, functions_spec) + + def format_config_var(name: str, vardef: VariableDefinition) -> str: + if vardef.type_name is None: + out = f'{name}={vardef.value},' + else: + out = f'{name}={vardef.type_name}({vardef.value}),' + if vardef.comment is not None: + out += f' # {vardef.comment}' + return out + + config_spec = 'class Configuration(NamedTuple):\n' + config_spec += ' PRESET_BASE: str\n' + config_spec += '\n'.join(f' {k}: {v.type_name if v.type_name is not None else "int"}' + for k, v in spec_object.config_vars.items()) + config_spec += '\n\n\nconfig = Configuration(\n' + config_spec += f' PRESET_BASE="{preset_name}",\n' + config_spec += '\n'.join(' ' + format_config_var(k, v) for k, v in spec_object.config_vars.items()) + config_spec += '\n)\n' + + def format_constant(name: str, vardef: VariableDefinition) -> str: + if vardef.type_name is None: + if vardef.type_hint is None: + out = f'{name} = {vardef.value}' + else: + out = f'{name}: {vardef.type_hint} = {vardef.value}' + else: + out = f'{name} = {vardef.type_name}({vardef.value})' + if vardef.comment is not None: + out += f' # {vardef.comment}' + return out + + # Merge all constant objects + hardcoded_ssz_dep_constants = reduce(lambda obj, builder: {**obj, **builder.hardcoded_ssz_dep_constants()}, builders, {}) + hardcoded_custom_type_dep_constants = reduce(lambda obj, builder: {**obj, **builder.hardcoded_custom_type_dep_constants(spec_object)}, builders, {}) + # Concatenate all strings + imports = reduce(lambda txt, builder: (txt + "\n\n" + builder.imports(preset_name) ).strip("\n"), builders, "") + preparations = reduce(lambda txt, builder: (txt + "\n\n" + builder.preparations() ).strip("\n"), builders, "") + sundry_functions = reduce(lambda txt, builder: (txt + "\n\n" + builder.sundry_functions() ).strip("\n"), builders, "") + # Keep engine from the most recent fork + execution_engine_cls = reduce(lambda txt, builder: builder.execution_engine_cls() or txt, builders, "") + + constant_vars_spec = '# Constant vars\n' + '\n'.join(format_constant(k, v) for k, v in spec_object.constant_vars.items()) + preset_vars_spec = '# Preset vars\n' + '\n'.join(format_constant(k, v) for k, v in spec_object.preset_vars.items()) + ordered_class_objects_spec = '\n\n\n'.join(ordered_class_objects.values()) + ssz_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, hardcoded_ssz_dep_constants[x]), hardcoded_ssz_dep_constants)) + ssz_dep_constants_verification = '\n'.join(map(lambda x: 'assert %s == %s' % (x, spec_object.ssz_dep_constants[x]), hardcoded_ssz_dep_constants)) + custom_type_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, hardcoded_custom_type_dep_constants[x]), hardcoded_custom_type_dep_constants)) + spec_strs = [ + imports, + preparations, + f"fork = \'{fork}\'\n", + # The constants that some SSZ containers require. Need to be defined before `new_type_definitions` + custom_type_dep_constants, + new_type_definitions, + CONSTANT_DEP_SUNDRY_CONSTANTS_FUNCTIONS, + # The constants that some SSZ containers require. Need to be defined before `constants_spec` + ssz_dep_constants, + constant_vars_spec, + preset_vars_spec, + config_spec, + ordered_class_objects_spec, + protocols_spec, + functions_spec, + sundry_functions, + execution_engine_cls, + # Since some constants are hardcoded in setup.py, the following assertions verify that the hardcoded constants are + # as same as the spec definition. + ssz_dep_constants_verification, + ] + return "\n\n\n".join([str.strip("\n") for str in spec_strs if str]) + "\n" + + +def combine_protocols(old_protocols: Dict[str, ProtocolDefinition], + new_protocols: Dict[str, ProtocolDefinition]) -> Dict[str, ProtocolDefinition]: + for key, value in new_protocols.items(): + if key not in old_protocols: + old_protocols[key] = value + else: + functions = combine_dicts(old_protocols[key].functions, value.functions) + old_protocols[key] = ProtocolDefinition(functions=functions) + return old_protocols + + +T = TypeVar('T') + + +def combine_dicts(old_dict: Dict[str, T], new_dict: Dict[str, T]) -> Dict[str, T]: + return {**old_dict, **new_dict} + + +ignored_dependencies = [ + 'bit', 'boolean', 'Vector', 'List', 'Container', 'BLSPubkey', 'BLSSignature', + 'Bytes1', 'Bytes4', 'Bytes8', 'Bytes20', 'Bytes32', 'Bytes48', 'Bytes96', 'Bitlist', 'Bitvector', + 'uint8', 'uint16', 'uint32', 'uint64', 'uint128', 'uint256', + 'bytes', 'byte', 'ByteList', 'ByteVector', + 'Dict', 'dict', 'field', 'ceillog2', 'floorlog2', 'Set', + 'Optional', 'Sequence', +] + + +def dependency_order_class_objects(objects: Dict[str, str], custom_types: Dict[str, str]) -> None: + """ + Determines which SSZ Object is dependent on which other and orders them appropriately + """ + items = list(objects.items()) + for key, value in items: + dependencies = [] + for line in value.split('\n'): + if not re.match(r'\s+\w+: .+', line): + continue # skip whitespace etc. + line = line[line.index(':') + 1:] # strip of field name + if '#' in line: + line = line[:line.index('#')] # strip of comment + dependencies.extend(re.findall(r'(\w+)', line)) # catch all legible words, potential dependencies + dependencies = filter(lambda x: '_' not in x and x.upper() != x, dependencies) # filter out constants + dependencies = filter(lambda x: x not in ignored_dependencies, dependencies) + dependencies = filter(lambda x: x not in custom_types, dependencies) + for dep in dependencies: + key_list = list(objects.keys()) + for item in [dep, key] + key_list[key_list.index(dep)+1:]: + objects[item] = objects.pop(item) + + +def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str], custom_types) -> Dict[str, str]: + """ + Takes in old spec and new spec ssz objects, combines them, + and returns the newer versions of the objects in dependency order. + """ + for key, value in new_objects.items(): + old_objects[key] = value + return old_objects + + +def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject: + """ + Takes in two spec variants (as tuples of their objects) and combines them using the appropriate combiner function. + """ + protocols = combine_protocols(spec0.protocols, spec1.protocols) + functions = combine_dicts(spec0.functions, spec1.functions) + custom_types = combine_dicts(spec0.custom_types, spec1.custom_types) + constant_vars = combine_dicts(spec0.constant_vars, spec1.constant_vars) + preset_vars = combine_dicts(spec0.preset_vars, spec1.preset_vars) + config_vars = combine_dicts(spec0.config_vars, spec1.config_vars) + ssz_dep_constants = combine_dicts(spec0.ssz_dep_constants, spec1.ssz_dep_constants) + ssz_objects = combine_ssz_objects(spec0.ssz_objects, spec1.ssz_objects, custom_types) + dataclasses = combine_dicts(spec0.dataclasses, spec1.dataclasses) + return SpecObject( + functions=functions, + protocols=protocols, + custom_types=custom_types, + constant_vars=constant_vars, + preset_vars=preset_vars, + config_vars=config_vars, + ssz_dep_constants=ssz_dep_constants, + ssz_objects=ssz_objects, + dataclasses=dataclasses, + ) + + +def parse_config_vars(conf: Dict[str, str]) -> Dict[str, str]: + """ + Parses a dict of basic str/int/list types into a dict for insertion into the spec code. + """ + out: Dict[str, str] = dict() + for k, v in conf.items(): + if isinstance(v, str) and (v.startswith("0x") or k == 'PRESET_BASE' or k == 'CONFIG_NAME'): + # Represent byte data with string, to avoid misinterpretation as big-endian int. + # Everything except PRESET_BASE and CONFIG_NAME is either byte data or an integer. + out[k] = f"'{v}'" + else: + out[k] = str(int(v)) + return out diff --git a/pysetup/md_doc_paths.py b/pysetup/md_doc_paths.py new file mode 100644 index 000000000..d25022324 --- /dev/null +++ b/pysetup/md_doc_paths.py @@ -0,0 +1,76 @@ +import os + +from .constants import ( + PHASE0, + ALTAIR, + BELLATRIX, + CAPELLA, + DENEB, + EIP6110, + WHISK, +) + + +PREVIOUS_FORK_OF = { + PHASE0: None, + ALTAIR: PHASE0, + BELLATRIX: ALTAIR, + CAPELLA: BELLATRIX, + DENEB: CAPELLA, + EIP6110: DENEB, + WHISK: CAPELLA, +} + +ALL_FORKS = list(PREVIOUS_FORK_OF.keys()) + +IGNORE_SPEC_FILES = [ + "specs/phase0/deposit-contract.md" +] + +EXTRA_SPEC_FILES = { + BELLATRIX: "sync/optimistic.md" +} + + +def is_post_fork(a, b) -> bool: + """ + Returns true if fork a is after b, or if a == b + """ + if a == b: + return True + + prev_fork = PREVIOUS_FORK_OF[a] + if prev_fork == b: + return True + elif prev_fork == None: + return False + else: + return is_post_fork(prev_fork, b) + + +def get_fork_directory(fork): + dir1 = f'specs/{fork}' + if os.path.exists(dir1): + return dir1 + dir2 = f'specs/_features/{fork}' + if os.path.exists(dir2): + return dir2 + raise FileNotFoundError(f"No directory found for fork: {fork}") + + +def get_md_doc_paths(spec_fork: str) -> str: + md_doc_paths = "" + + for fork in ALL_FORKS: + if is_post_fork(spec_fork, fork): + # Append all files in fork directory recursively + for root, dirs, files in os.walk(get_fork_directory(fork)): + for filename in files: + filepath = os.path.join(root, filename) + if filepath.endswith('.md') and filepath not in IGNORE_SPEC_FILES: + md_doc_paths += filepath + "\n" + # Append extra files if any + if fork in EXTRA_SPEC_FILES: + md_doc_paths += EXTRA_SPEC_FILES[fork] + "\n" + + return md_doc_paths diff --git a/pysetup/spec_builders/__init__.py b/pysetup/spec_builders/__init__.py new file mode 100644 index 000000000..123939b98 --- /dev/null +++ b/pysetup/spec_builders/__init__.py @@ -0,0 +1,16 @@ +from .phase0 import Phase0SpecBuilder +from .altair import AltairSpecBuilder +from .bellatrix import BellatrixSpecBuilder +from .capella import CapellaSpecBuilder +from .deneb import DenebSpecBuilder +from .eip6110 import EIP6110SpecBuilder +from .whisk import WhiskSpecBuilder + + +spec_builders = { + builder.fork: builder + for builder in ( + Phase0SpecBuilder, AltairSpecBuilder, BellatrixSpecBuilder, CapellaSpecBuilder, DenebSpecBuilder, + EIP6110SpecBuilder, WhiskSpecBuilder, + ) +} diff --git a/pysetup/spec_builders/altair.py b/pysetup/spec_builders/altair.py new file mode 100644 index 000000000..4b35380de --- /dev/null +++ b/pysetup/spec_builders/altair.py @@ -0,0 +1,54 @@ +from typing import Dict + +from .base import BaseSpecBuilder +from ..constants import ALTAIR, OPTIMIZED_BLS_AGGREGATE_PUBKEYS + + +class AltairSpecBuilder(BaseSpecBuilder): + fork: str = ALTAIR + + @classmethod + def imports(cls, preset_name: str) -> str: + return f''' +from typing import NewType, Union as PyUnion + +from eth2spec.phase0 import {preset_name} as phase0 +from eth2spec.test.helpers.merkle import build_proof +from eth2spec.utils.ssz.ssz_typing import Path +''' + + @classmethod + def preparations(cls): + return ''' +SSZVariableName = str +GeneralizedIndex = NewType('GeneralizedIndex', int) +''' + + @classmethod + def sundry_functions(cls) -> str: + return ''' +def get_generalized_index(ssz_class: Any, *path: Sequence[PyUnion[int, SSZVariableName]]) -> GeneralizedIndex: + ssz_path = Path(ssz_class) + for item in path: + ssz_path = ssz_path / item + return GeneralizedIndex(ssz_path.gindex()) + + +def compute_merkle_proof_for_state(state: BeaconState, + index: GeneralizedIndex) -> Sequence[Bytes32]: + return build_proof(state.get_backing(), index)''' + + + @classmethod + def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]: + return { + 'FINALIZED_ROOT_INDEX': 'GeneralizedIndex(105)', + 'CURRENT_SYNC_COMMITTEE_INDEX': 'GeneralizedIndex(54)', + 'NEXT_SYNC_COMMITTEE_INDEX': 'GeneralizedIndex(55)', + } + + @classmethod + def implement_optimizations(cls, functions: Dict[str, str]) -> Dict[str, str]: + if "eth_aggregate_pubkeys" in functions: + functions["eth_aggregate_pubkeys"] = OPTIMIZED_BLS_AGGREGATE_PUBKEYS.strip() + return functions diff --git a/pysetup/spec_builders/base.py b/pysetup/spec_builders/base.py new file mode 100644 index 000000000..44743682a --- /dev/null +++ b/pysetup/spec_builders/base.py @@ -0,0 +1,52 @@ +from abc import ABC, abstractmethod +from typing import Sequence, Dict +from pathlib import Path + +class BaseSpecBuilder(ABC): + @property + @abstractmethod + def fork(self) -> str: + raise NotImplementedError() + + @classmethod + def imports(cls, preset_name: str) -> str: + """ + Import objects from other libraries. + """ + return "" + + @classmethod + def preparations(cls) -> str: + """ + Define special types/constants for building pyspec or call functions. + """ + return "" + + @classmethod + def sundry_functions(cls) -> str: + """ + The functions that are (1) defined abstractly in specs or (2) adjusted for getting better performance. + """ + return "" + + @classmethod + def execution_engine_cls(cls) -> str: + return "" + + @classmethod + def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]: + """ + The constants that are required for SSZ objects. + """ + return {} + + @classmethod + def hardcoded_custom_type_dep_constants(cls, spec_object) -> Dict[str, str]: # TODO + """ + The constants that are required for custom types. + """ + return {} + + @classmethod + def implement_optimizations(cls, functions: Dict[str, str]) -> Dict[str, str]: + return functions diff --git a/pysetup/spec_builders/bellatrix.py b/pysetup/spec_builders/bellatrix.py new file mode 100644 index 000000000..c5753d7df --- /dev/null +++ b/pysetup/spec_builders/bellatrix.py @@ -0,0 +1,66 @@ +from .base import BaseSpecBuilder +from ..constants import BELLATRIX + +class BellatrixSpecBuilder(BaseSpecBuilder): + fork: str = BELLATRIX + + @classmethod + def imports(cls, preset_name: str): + return f''' +from typing import Protocol +from eth2spec.altair import {preset_name} as altair +from eth2spec.utils.ssz.ssz_typing import Bytes8, Bytes20, ByteList, ByteVector +''' + + @classmethod + def sundry_functions(cls) -> str: + return """ +ExecutionState = Any + + +def get_pow_block(hash: Bytes32) -> Optional[PowBlock]: + return PowBlock(block_hash=hash, parent_hash=Bytes32(), total_difficulty=uint256(0)) + + +def get_execution_state(_execution_state_root: Bytes32) -> ExecutionState: + pass + + +def get_pow_chain_head() -> PowBlock: + pass""" + + @classmethod + def execution_engine_cls(cls) -> str: + return """ +class NoopExecutionEngine(ExecutionEngine): + + def notify_new_payload(self: ExecutionEngine, execution_payload: ExecutionPayload) -> bool: + return True + + def notify_forkchoice_updated(self: ExecutionEngine, + head_block_hash: Hash32, + safe_block_hash: Hash32, + finalized_block_hash: Hash32, + payload_attributes: Optional[PayloadAttributes]) -> Optional[PayloadId]: + pass + + def get_payload(self: ExecutionEngine, payload_id: PayloadId) -> GetPayloadResponse: + # pylint: disable=unused-argument + raise NotImplementedError("no default block production") + + def is_valid_block_hash(self: ExecutionEngine, execution_payload: ExecutionPayload) -> bool: + return True + + def verify_and_notify_new_payload(self: ExecutionEngine, + new_payload_request: NewPayloadRequest) -> bool: + return True + + +EXECUTION_ENGINE = NoopExecutionEngine()""" + + + @classmethod + def hardcoded_custom_type_dep_constants(cls, spec_object) -> str: + return { + 'MAX_BYTES_PER_TRANSACTION': spec_object.preset_vars['MAX_BYTES_PER_TRANSACTION'].value, + } diff --git a/pysetup/spec_builders/capella.py b/pysetup/spec_builders/capella.py new file mode 100644 index 000000000..03b619b66 --- /dev/null +++ b/pysetup/spec_builders/capella.py @@ -0,0 +1,29 @@ +from typing import Dict + +from .base import BaseSpecBuilder +from ..constants import CAPELLA + + +class CapellaSpecBuilder(BaseSpecBuilder): + fork: str = CAPELLA + + @classmethod + def imports(cls, preset_name: str): + return f''' +from eth2spec.bellatrix import {preset_name} as bellatrix +''' + + + @classmethod + def sundry_functions(cls) -> str: + return ''' +def compute_merkle_proof_for_block_body(body: BeaconBlockBody, + index: GeneralizedIndex) -> Sequence[Bytes32]: + return build_proof(body.get_backing(), index)''' + + + @classmethod + def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]: + return { + 'EXECUTION_PAYLOAD_INDEX': 'GeneralizedIndex(25)', + } diff --git a/pysetup/spec_builders/deneb.py b/pysetup/spec_builders/deneb.py new file mode 100644 index 000000000..b4e180c2a --- /dev/null +++ b/pysetup/spec_builders/deneb.py @@ -0,0 +1,71 @@ +from .base import BaseSpecBuilder +from ..constants import DENEB + + +class DenebSpecBuilder(BaseSpecBuilder): + fork: str = DENEB + + @classmethod + def imports(cls, preset_name: str): + return f''' +from eth2spec.capella import {preset_name} as capella +''' + + + @classmethod + def preparations(cls): + return ''' +T = TypeVar('T') # For generic function +''' + + @classmethod + def sundry_functions(cls) -> str: + return ''' +def retrieve_blobs_and_proofs(beacon_block_root: Root) -> PyUnion[Tuple[Blob, KZGProof], Tuple[str, str]]: + # pylint: disable=unused-argument + return ("TEST", "TEST")''' + + @classmethod + def execution_engine_cls(cls) -> str: + return """ +class NoopExecutionEngine(ExecutionEngine): + + def notify_new_payload(self: ExecutionEngine, + execution_payload: ExecutionPayload, + parent_beacon_block_root: Root) -> bool: + return True + + def notify_forkchoice_updated(self: ExecutionEngine, + head_block_hash: Hash32, + safe_block_hash: Hash32, + finalized_block_hash: Hash32, + payload_attributes: Optional[PayloadAttributes]) -> Optional[PayloadId]: + pass + + def get_payload(self: ExecutionEngine, payload_id: PayloadId) -> GetPayloadResponse: + # pylint: disable=unused-argument + raise NotImplementedError("no default block production") + + def is_valid_block_hash(self: ExecutionEngine, + execution_payload: ExecutionPayload, + parent_beacon_block_root: Root) -> bool: + return True + + def is_valid_versioned_hashes(self: ExecutionEngine, new_payload_request: NewPayloadRequest) -> bool: + return True + + def verify_and_notify_new_payload(self: ExecutionEngine, + new_payload_request: NewPayloadRequest) -> bool: + return True + + +EXECUTION_ENGINE = NoopExecutionEngine()""" + + + @classmethod + def hardcoded_custom_type_dep_constants(cls, spec_object) -> str: + return { + 'BYTES_PER_FIELD_ELEMENT': spec_object.constant_vars['BYTES_PER_FIELD_ELEMENT'].value, + 'FIELD_ELEMENTS_PER_BLOB': spec_object.preset_vars['FIELD_ELEMENTS_PER_BLOB'].value, + 'MAX_BLOBS_PER_BLOCK': spec_object.preset_vars['MAX_BLOBS_PER_BLOCK'].value, + } diff --git a/pysetup/spec_builders/eip6110.py b/pysetup/spec_builders/eip6110.py new file mode 100644 index 000000000..e0fd253f1 --- /dev/null +++ b/pysetup/spec_builders/eip6110.py @@ -0,0 +1,12 @@ +from .base import BaseSpecBuilder +from ..constants import EIP6110 + + +class EIP6110SpecBuilder(BaseSpecBuilder): + fork: str = EIP6110 + + @classmethod + def imports(cls, preset_name: str): + return f''' +from eth2spec.deneb import {preset_name} as deneb +''' diff --git a/pysetup/spec_builders/phase0.py b/pysetup/spec_builders/phase0.py new file mode 100644 index 000000000..6b3d82617 --- /dev/null +++ b/pysetup/spec_builders/phase0.py @@ -0,0 +1,105 @@ +from .base import BaseSpecBuilder +from ..constants import PHASE0 + + +class Phase0SpecBuilder(BaseSpecBuilder): + fork: str = PHASE0 + + @classmethod + def imports(cls, preset_name: str) -> str: + return '''from lru import LRU +from dataclasses import ( + dataclass, + field, +) +from typing import ( + Any, Callable, Dict, Set, Sequence, Tuple, Optional, TypeVar, NamedTuple, Final +) + +from eth2spec.utils.ssz.ssz_impl import hash_tree_root, copy, uint_to_bytes +from eth2spec.utils.ssz.ssz_typing import ( + View, boolean, Container, List, Vector, uint8, uint32, uint64, uint256, + Bytes1, Bytes4, Bytes32, Bytes48, Bytes96, Bitlist) +from eth2spec.utils.ssz.ssz_typing import Bitvector # noqa: F401 +from eth2spec.utils import bls +from eth2spec.utils.hash_function import hash +''' + + @classmethod + def preparations(cls) -> str: + return ''' +SSZObject = TypeVar('SSZObject', bound=View) +''' + + @classmethod + def sundry_functions(cls) -> str: + return ''' +def get_eth1_data(block: Eth1Block) -> Eth1Data: + """ + A stub function return mocking Eth1Data. + """ + return Eth1Data( + deposit_root=block.deposit_root, + deposit_count=block.deposit_count, + block_hash=hash_tree_root(block)) + + +def cache_this(key_fn, value_fn, lru_size): # type: ignore + cache_dict = LRU(size=lru_size) + + def wrapper(*args, **kw): # type: ignore + key = key_fn(*args, **kw) + nonlocal cache_dict + if key not in cache_dict: + cache_dict[key] = value_fn(*args, **kw) + return cache_dict[key] + return wrapper + + +_compute_shuffled_index = compute_shuffled_index +compute_shuffled_index = cache_this( + lambda index, index_count, seed: (index, index_count, seed), + _compute_shuffled_index, lru_size=SLOTS_PER_EPOCH * 3) + +_get_total_active_balance = get_total_active_balance +get_total_active_balance = cache_this( + lambda state: (state.validators.hash_tree_root(), compute_epoch_at_slot(state.slot)), + _get_total_active_balance, lru_size=10) + +_get_base_reward = get_base_reward +get_base_reward = cache_this( + lambda state, index: (state.validators.hash_tree_root(), state.slot, index), + _get_base_reward, lru_size=2048) + +_get_committee_count_per_slot = get_committee_count_per_slot +get_committee_count_per_slot = cache_this( + lambda state, epoch: (state.validators.hash_tree_root(), epoch), + _get_committee_count_per_slot, lru_size=SLOTS_PER_EPOCH * 3) + +_get_active_validator_indices = get_active_validator_indices +get_active_validator_indices = cache_this( + lambda state, epoch: (state.validators.hash_tree_root(), epoch), + _get_active_validator_indices, lru_size=3) + +_get_beacon_committee = get_beacon_committee +get_beacon_committee = cache_this( + lambda state, slot, index: (state.validators.hash_tree_root(), state.randao_mixes.hash_tree_root(), slot, index), + _get_beacon_committee, lru_size=SLOTS_PER_EPOCH * MAX_COMMITTEES_PER_SLOT * 3) + +_get_matching_target_attestations = get_matching_target_attestations +get_matching_target_attestations = cache_this( + lambda state, epoch: (state.hash_tree_root(), epoch), + _get_matching_target_attestations, lru_size=10) + +_get_matching_head_attestations = get_matching_head_attestations +get_matching_head_attestations = cache_this( + lambda state, epoch: (state.hash_tree_root(), epoch), + _get_matching_head_attestations, lru_size=10) + +_get_attesting_indices = get_attesting_indices +get_attesting_indices = cache_this( + lambda state, data, bits: ( + state.randao_mixes.hash_tree_root(), + state.validators.hash_tree_root(), data.hash_tree_root(), bits.hash_tree_root() + ), + _get_attesting_indices, lru_size=SLOTS_PER_EPOCH * MAX_COMMITTEES_PER_SLOT * 3)''' diff --git a/pysetup/spec_builders/whisk.py b/pysetup/spec_builders/whisk.py new file mode 100644 index 000000000..e9cd4a67d --- /dev/null +++ b/pysetup/spec_builders/whisk.py @@ -0,0 +1,20 @@ +from .base import BaseSpecBuilder +from ..constants import WHISK + + +class WhiskSpecBuilder(BaseSpecBuilder): + fork: str = WHISK + + @classmethod + def imports(cls, preset_name: str): + return f''' +from eth2spec.capella import {preset_name} as capella +''' + + @classmethod + def hardcoded_custom_type_dep_constants(cls, spec_object) -> str: + # Necessary for custom types `WhiskShuffleProof` and `WhiskTrackerProof` + return { + 'WHISK_MAX_SHUFFLE_PROOF_SIZE': spec_object.preset_vars['WHISK_MAX_SHUFFLE_PROOF_SIZE'].value, + 'WHISK_MAX_OPENING_PROOF_SIZE': spec_object.preset_vars['WHISK_MAX_OPENING_PROOF_SIZE'].value, + } diff --git a/pysetup/typing.py b/pysetup/typing.py new file mode 100644 index 000000000..56f0cf3b1 --- /dev/null +++ b/pysetup/typing.py @@ -0,0 +1,32 @@ +from pathlib import Path +from typing import Dict, NamedTuple, Optional, List + + +class ProtocolDefinition(NamedTuple): + # just function definitions currently. May expand with configuration vars in future. + functions: Dict[str, str] + + +class VariableDefinition(NamedTuple): + type_name: Optional[str] + value: str + comment: Optional[str] # e.g. "noqa: E501" + type_hint: Optional[str] # e.g., "Final" + + +class SpecObject(NamedTuple): + functions: Dict[str, str] + protocols: Dict[str, ProtocolDefinition] + custom_types: Dict[str, str] + constant_vars: Dict[str, VariableDefinition] + preset_vars: Dict[str, VariableDefinition] + config_vars: Dict[str, VariableDefinition] + ssz_dep_constants: Dict[str, str] # the constants that depend on ssz_objects + ssz_objects: Dict[str, str] + dataclasses: Dict[str, str] + + +class BuildTarget(NamedTuple): + name: str + preset_paths: List[Path] + config_path: Path diff --git a/setup.py b/setup.py index 550908802..efa57ac2b 100644 --- a/setup.py +++ b/setup.py @@ -4,11 +4,8 @@ from distutils import dir_util from distutils.util import convert_path from pathlib import Path import os -import re import string -import textwrap -from typing import Dict, NamedTuple, List, Sequence, Optional, TypeVar, Tuple -from abc import ABC, abstractmethod +from typing import Dict, List, Sequence, Optional, Tuple import ast import subprocess import sys @@ -17,6 +14,27 @@ from collections import OrderedDict import json from functools import reduce +from pysetup.constants import ( + # code names + PHASE0, + # misc + ETH2_SPEC_COMMENT_PREFIX, +) +from pysetup.spec_builders import spec_builders +from pysetup.typing import ( + BuildTarget, + ProtocolDefinition, + SpecObject, + VariableDefinition, +) +from pysetup.helpers import ( + combine_spec_objects, + dependency_order_class_objects, + objects_to_spec, + parse_config_vars, +) +from pysetup.md_doc_paths import get_md_doc_paths + # NOTE: have to programmatically include third-party dependencies in `setup.py`. def installPackage(package: str): @@ -42,112 +60,6 @@ from marko.ext.gfm import gfm from marko.ext.gfm.elements import Table -# Definitions in context.py -PHASE0 = 'phase0' -ALTAIR = 'altair' -BELLATRIX = 'bellatrix' -CAPELLA = 'capella' -DENEB = 'deneb' -EIP6110 = 'eip6110' -WHISK = 'whisk' - -PREVIOUS_FORK_OF = { - PHASE0: None, - ALTAIR: PHASE0, - BELLATRIX: ALTAIR, - CAPELLA: BELLATRIX, - DENEB: CAPELLA, - EIP6110: DENEB, - WHISK: CAPELLA, -} - -ALL_FORKS = list(PREVIOUS_FORK_OF.keys()) - -IGNORE_SPEC_FILES = [ - "specs/phase0/deposit-contract.md" -] - -EXTRA_SPEC_FILES = { - BELLATRIX: "sync/optimistic.md" -} - -# The helper functions that are used when defining constants -CONSTANT_DEP_SUNDRY_CONSTANTS_FUNCTIONS = ''' -def ceillog2(x: int) -> uint64: - if x < 1: - raise ValueError(f"ceillog2 accepts only positive values, x={x}") - return uint64((x - 1).bit_length()) - - -def floorlog2(x: int) -> uint64: - if x < 1: - raise ValueError(f"floorlog2 accepts only positive values, x={x}") - return uint64(x.bit_length() - 1) -''' - - -OPTIMIZED_BLS_AGGREGATE_PUBKEYS = ''' -def eth_aggregate_pubkeys(pubkeys: Sequence[BLSPubkey]) -> BLSPubkey: - return bls.AggregatePKs(pubkeys) -''' - - -class ProtocolDefinition(NamedTuple): - # just function definitions currently. May expand with configuration vars in future. - functions: Dict[str, str] - - -class VariableDefinition(NamedTuple): - type_name: Optional[str] - value: str - comment: Optional[str] # e.g. "noqa: E501" - type_hint: Optional[str] # e.g., "Final" - - -class SpecObject(NamedTuple): - functions: Dict[str, str] - protocols: Dict[str, ProtocolDefinition] - custom_types: Dict[str, str] - constant_vars: Dict[str, VariableDefinition] - preset_vars: Dict[str, VariableDefinition] - config_vars: Dict[str, VariableDefinition] - ssz_dep_constants: Dict[str, str] # the constants that depend on ssz_objects - ssz_objects: Dict[str, str] - dataclasses: Dict[str, str] - - -def is_post_fork(a, b) -> bool: - """ - Returns true if fork a is after b, or if a == b - """ - if a == b: - return True - - prev_fork = PREVIOUS_FORK_OF[a] - if prev_fork == b: - return True - elif prev_fork == None: - return False - else: - return is_post_fork(prev_fork, b) - -def collect_prev_forks(fork: str) -> List[str]: - forks = [fork] - while True: - fork = PREVIOUS_FORK_OF[fork] - if fork is None: - return forks - forks.append(fork) - -def get_fork_directory(fork): - dir1 = f'specs/{fork}' - if os.path.exists(dir1): - return dir1 - dir2 = f'specs/_features/{fork}' - if os.path.exists(dir2): - return dir2 - raise FileNotFoundError(f"No directory found for fork: {fork}") - def _get_name_from_heading(heading: Heading) -> Optional[str]: last_child = heading.children[-1] if isinstance(last_child, CodeSpan): @@ -212,13 +124,12 @@ def _load_kzg_trusted_setups(preset_name): return trusted_setup_G1, trusted_setup_G2, trusted_setup_G1_lagrange, roots_of_unity + ALL_KZG_SETUPS = { 'minimal': _load_kzg_trusted_setups('minimal'), 'mainnet': _load_kzg_trusted_setups('mainnet') } -ETH2_SPEC_COMMENT_PREFIX = "eth2spec:" - def _get_eth2_spec_comment(child: LinkRefDef) -> Optional[str]: _, _, title = child._parse_info @@ -230,7 +141,7 @@ def _get_eth2_spec_comment(child: LinkRefDef) -> Optional[str]: return title[len(ETH2_SPEC_COMMENT_PREFIX):].strip() -def _parse_value(name: str, typed_value: str, type_hint: Optional[str]=None) -> VariableDefinition: +def _parse_value(name: str, typed_value: str, type_hint: Optional[str] = None) -> VariableDefinition: comment = None if name == "BLS12_381_Q": comment = "noqa: E501" @@ -359,659 +270,6 @@ def get_spec(file_name: Path, preset: Dict[str, str], config: Dict[str, str], pr ) -class SpecBuilder(ABC): - @property - @abstractmethod - def fork(self) -> str: - raise NotImplementedError() - - @classmethod - def imports(cls, preset_name: str) -> str: - """ - Import objects from other libraries. - """ - return "" - - @classmethod - def preparations(cls) -> str: - """ - Define special types/constants for building pyspec or call functions. - """ - return "" - - @classmethod - def sundry_functions(cls) -> str: - """ - The functions that are (1) defined abstractly in specs or (2) adjusted for getting better performance. - """ - return "" - - @classmethod - def execution_engine_cls(cls) -> str: - return "" - - @classmethod - def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]: - """ - The constants that are required for SSZ objects. - """ - return {} - - @classmethod - def hardcoded_custom_type_dep_constants(cls, spec_object) -> Dict[str, str]: # TODO - """ - The constants that are required for custom types. - """ - return {} - - @classmethod - def implement_optimizations(cls, functions: Dict[str, str]) -> Dict[str, str]: - return functions - - @classmethod - def build_spec(cls, preset_name: str, - source_files: Sequence[Path], preset_files: Sequence[Path], config_file: Path) -> str: - return _build_spec(preset_name, cls.fork, source_files, preset_files, config_file) - - -# -# Phase0SpecBuilder -# -class Phase0SpecBuilder(SpecBuilder): - fork: str = PHASE0 - - @classmethod - def imports(cls, preset_name: str) -> str: - return '''from lru import LRU -from dataclasses import ( - dataclass, - field, -) -from typing import ( - Any, Callable, Dict, Set, Sequence, Tuple, Optional, TypeVar, NamedTuple, Final -) - -from eth2spec.utils.ssz.ssz_impl import hash_tree_root, copy, uint_to_bytes -from eth2spec.utils.ssz.ssz_typing import ( - View, boolean, Container, List, Vector, uint8, uint32, uint64, uint256, - Bytes1, Bytes4, Bytes32, Bytes48, Bytes96, Bitlist) -from eth2spec.utils.ssz.ssz_typing import Bitvector # noqa: F401 -from eth2spec.utils import bls -from eth2spec.utils.hash_function import hash -''' - - @classmethod - def preparations(cls) -> str: - return ''' -SSZObject = TypeVar('SSZObject', bound=View) -''' - - @classmethod - def sundry_functions(cls) -> str: - return ''' -def get_eth1_data(block: Eth1Block) -> Eth1Data: - """ - A stub function return mocking Eth1Data. - """ - return Eth1Data( - deposit_root=block.deposit_root, - deposit_count=block.deposit_count, - block_hash=hash_tree_root(block)) - - -def cache_this(key_fn, value_fn, lru_size): # type: ignore - cache_dict = LRU(size=lru_size) - - def wrapper(*args, **kw): # type: ignore - key = key_fn(*args, **kw) - nonlocal cache_dict - if key not in cache_dict: - cache_dict[key] = value_fn(*args, **kw) - return cache_dict[key] - return wrapper - - -_compute_shuffled_index = compute_shuffled_index -compute_shuffled_index = cache_this( - lambda index, index_count, seed: (index, index_count, seed), - _compute_shuffled_index, lru_size=SLOTS_PER_EPOCH * 3) - -_get_total_active_balance = get_total_active_balance -get_total_active_balance = cache_this( - lambda state: (state.validators.hash_tree_root(), compute_epoch_at_slot(state.slot)), - _get_total_active_balance, lru_size=10) - -_get_base_reward = get_base_reward -get_base_reward = cache_this( - lambda state, index: (state.validators.hash_tree_root(), state.slot, index), - _get_base_reward, lru_size=2048) - -_get_committee_count_per_slot = get_committee_count_per_slot -get_committee_count_per_slot = cache_this( - lambda state, epoch: (state.validators.hash_tree_root(), epoch), - _get_committee_count_per_slot, lru_size=SLOTS_PER_EPOCH * 3) - -_get_active_validator_indices = get_active_validator_indices -get_active_validator_indices = cache_this( - lambda state, epoch: (state.validators.hash_tree_root(), epoch), - _get_active_validator_indices, lru_size=3) - -_get_beacon_committee = get_beacon_committee -get_beacon_committee = cache_this( - lambda state, slot, index: (state.validators.hash_tree_root(), state.randao_mixes.hash_tree_root(), slot, index), - _get_beacon_committee, lru_size=SLOTS_PER_EPOCH * MAX_COMMITTEES_PER_SLOT * 3) - -_get_matching_target_attestations = get_matching_target_attestations -get_matching_target_attestations = cache_this( - lambda state, epoch: (state.hash_tree_root(), epoch), - _get_matching_target_attestations, lru_size=10) - -_get_matching_head_attestations = get_matching_head_attestations -get_matching_head_attestations = cache_this( - lambda state, epoch: (state.hash_tree_root(), epoch), - _get_matching_head_attestations, lru_size=10) - -_get_attesting_indices = get_attesting_indices -get_attesting_indices = cache_this( - lambda state, data, bits: ( - state.randao_mixes.hash_tree_root(), - state.validators.hash_tree_root(), data.hash_tree_root(), bits.hash_tree_root() - ), - _get_attesting_indices, lru_size=SLOTS_PER_EPOCH * MAX_COMMITTEES_PER_SLOT * 3)''' - - -# -# AltairSpecBuilder -# -class AltairSpecBuilder(SpecBuilder): - fork: str = ALTAIR - - @classmethod - def imports(cls, preset_name: str) -> str: - return f''' -from typing import NewType, Union as PyUnion - -from eth2spec.phase0 import {preset_name} as phase0 -from eth2spec.test.helpers.merkle import build_proof -from eth2spec.utils.ssz.ssz_typing import Path -''' - - @classmethod - def preparations(cls): - return ''' -SSZVariableName = str -GeneralizedIndex = NewType('GeneralizedIndex', int) -''' - - @classmethod - def sundry_functions(cls) -> str: - return ''' -def get_generalized_index(ssz_class: Any, *path: Sequence[PyUnion[int, SSZVariableName]]) -> GeneralizedIndex: - ssz_path = Path(ssz_class) - for item in path: - ssz_path = ssz_path / item - return GeneralizedIndex(ssz_path.gindex()) - - -def compute_merkle_proof_for_state(state: BeaconState, - index: GeneralizedIndex) -> Sequence[Bytes32]: - return build_proof(state.get_backing(), index)''' - - - @classmethod - def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]: - return { - 'FINALIZED_ROOT_INDEX': 'GeneralizedIndex(105)', - 'CURRENT_SYNC_COMMITTEE_INDEX': 'GeneralizedIndex(54)', - 'NEXT_SYNC_COMMITTEE_INDEX': 'GeneralizedIndex(55)', - } - - @classmethod - def implement_optimizations(cls, functions: Dict[str, str]) -> Dict[str, str]: - if "eth_aggregate_pubkeys" in functions: - functions["eth_aggregate_pubkeys"] = OPTIMIZED_BLS_AGGREGATE_PUBKEYS.strip() - return functions - -# -# BellatrixSpecBuilder -# -class BellatrixSpecBuilder(SpecBuilder): - fork: str = BELLATRIX - - @classmethod - def imports(cls, preset_name: str): - return f''' -from typing import Protocol -from eth2spec.altair import {preset_name} as altair -from eth2spec.utils.ssz.ssz_typing import Bytes8, Bytes20, ByteList, ByteVector -''' - - @classmethod - def sundry_functions(cls) -> str: - return """ -ExecutionState = Any - - -def get_pow_block(hash: Bytes32) -> Optional[PowBlock]: - return PowBlock(block_hash=hash, parent_hash=Bytes32(), total_difficulty=uint256(0)) - - -def get_execution_state(_execution_state_root: Bytes32) -> ExecutionState: - pass - - -def get_pow_chain_head() -> PowBlock: - pass""" - - @classmethod - def execution_engine_cls(cls) -> str: - return """ -class NoopExecutionEngine(ExecutionEngine): - - def notify_new_payload(self: ExecutionEngine, execution_payload: ExecutionPayload) -> bool: - return True - - def notify_forkchoice_updated(self: ExecutionEngine, - head_block_hash: Hash32, - safe_block_hash: Hash32, - finalized_block_hash: Hash32, - payload_attributes: Optional[PayloadAttributes]) -> Optional[PayloadId]: - pass - - def get_payload(self: ExecutionEngine, payload_id: PayloadId) -> GetPayloadResponse: - # pylint: disable=unused-argument - raise NotImplementedError("no default block production") - - def is_valid_block_hash(self: ExecutionEngine, execution_payload: ExecutionPayload) -> bool: - return True - - def verify_and_notify_new_payload(self: ExecutionEngine, - new_payload_request: NewPayloadRequest) -> bool: - return True - - -EXECUTION_ENGINE = NoopExecutionEngine()""" - - - @classmethod - def hardcoded_custom_type_dep_constants(cls, spec_object) -> str: - return { - 'MAX_BYTES_PER_TRANSACTION': spec_object.preset_vars['MAX_BYTES_PER_TRANSACTION'].value, - } - - -# -# CapellaSpecBuilder -# -class CapellaSpecBuilder(SpecBuilder): - fork: str = CAPELLA - - @classmethod - def imports(cls, preset_name: str): - return f''' -from eth2spec.bellatrix import {preset_name} as bellatrix -''' - - - @classmethod - def sundry_functions(cls) -> str: - return ''' -def compute_merkle_proof_for_block_body(body: BeaconBlockBody, - index: GeneralizedIndex) -> Sequence[Bytes32]: - return build_proof(body.get_backing(), index)''' - - - @classmethod - def hardcoded_ssz_dep_constants(cls) -> Dict[str, str]: - return { - 'EXECUTION_PAYLOAD_INDEX': 'GeneralizedIndex(25)', - } - - -# -# DenebSpecBuilder -# -class DenebSpecBuilder(SpecBuilder): - fork: str = DENEB - - @classmethod - def imports(cls, preset_name: str): - return f''' -from eth2spec.capella import {preset_name} as capella -''' - - - @classmethod - def preparations(cls): - return ''' -T = TypeVar('T') # For generic function -''' - - @classmethod - def sundry_functions(cls) -> str: - return ''' -def retrieve_blobs_and_proofs(beacon_block_root: Root) -> PyUnion[Tuple[Blob, KZGProof], Tuple[str, str]]: - # pylint: disable=unused-argument - return ("TEST", "TEST")''' - - @classmethod - def execution_engine_cls(cls) -> str: - return """ -class NoopExecutionEngine(ExecutionEngine): - - def notify_new_payload(self: ExecutionEngine, - execution_payload: ExecutionPayload, - parent_beacon_block_root: Root) -> bool: - return True - - def notify_forkchoice_updated(self: ExecutionEngine, - head_block_hash: Hash32, - safe_block_hash: Hash32, - finalized_block_hash: Hash32, - payload_attributes: Optional[PayloadAttributes]) -> Optional[PayloadId]: - pass - - def get_payload(self: ExecutionEngine, payload_id: PayloadId) -> GetPayloadResponse: - # pylint: disable=unused-argument - raise NotImplementedError("no default block production") - - def is_valid_block_hash(self: ExecutionEngine, - execution_payload: ExecutionPayload, - parent_beacon_block_root: Root) -> bool: - return True - - def is_valid_versioned_hashes(self: ExecutionEngine, new_payload_request: NewPayloadRequest) -> bool: - return True - - def verify_and_notify_new_payload(self: ExecutionEngine, - new_payload_request: NewPayloadRequest) -> bool: - return True - - -EXECUTION_ENGINE = NoopExecutionEngine()""" - - - @classmethod - def hardcoded_custom_type_dep_constants(cls, spec_object) -> str: - return { - 'BYTES_PER_FIELD_ELEMENT': spec_object.constant_vars['BYTES_PER_FIELD_ELEMENT'].value, - 'FIELD_ELEMENTS_PER_BLOB': spec_object.preset_vars['FIELD_ELEMENTS_PER_BLOB'].value, - 'MAX_BLOBS_PER_BLOCK': spec_object.preset_vars['MAX_BLOBS_PER_BLOCK'].value, - } - - -# -# EIP6110SpecBuilder -# -class EIP6110SpecBuilder(SpecBuilder): - fork: str = EIP6110 - - @classmethod - def imports(cls, preset_name: str): - return f''' -from eth2spec.deneb import {preset_name} as deneb -''' - -# -# WhiskSpecBuilder -# -class WhiskSpecBuilder(SpecBuilder): - fork: str = WHISK - - @classmethod - def imports(cls, preset_name: str): - return f''' -from eth2spec.capella import {preset_name} as capella -''' - - @classmethod - def hardcoded_custom_type_dep_constants(cls, spec_object) -> str: - # Necessary for custom types `WhiskShuffleProof` and `WhiskTrackerProof` - return { - 'WHISK_MAX_SHUFFLE_PROOF_SIZE': spec_object.preset_vars['WHISK_MAX_SHUFFLE_PROOF_SIZE'].value, - 'WHISK_MAX_OPENING_PROOF_SIZE': spec_object.preset_vars['WHISK_MAX_OPENING_PROOF_SIZE'].value, - } - - -spec_builders = { - builder.fork: builder - for builder in ( - Phase0SpecBuilder, AltairSpecBuilder, BellatrixSpecBuilder, CapellaSpecBuilder, DenebSpecBuilder, - EIP6110SpecBuilder, WhiskSpecBuilder, - ) -} - - -def is_byte_vector(value: str) -> bool: - return value.startswith(('ByteVector')) - - -def make_function_abstract(protocol_def: ProtocolDefinition, key: str): - function = protocol_def.functions[key].split('"""') - protocol_def.functions[key] = function[0] + "..." - - -def objects_to_spec(preset_name: str, - spec_object: SpecObject, - fork: str, - ordered_class_objects: Dict[str, str]) -> str: - """ - Given all the objects that constitute a spec, combine them into a single pyfile. - """ - new_type_definitions = ( - '\n\n'.join( - [ - f"class {key}({value}):\n pass\n" if not is_byte_vector(value) else f"class {key}({value}): # type: ignore\n pass\n" - for key, value in spec_object.custom_types.items() - ] - ) - ) - - # Collect builders with the reversed previous forks - # e.g. `[bellatrix, altair, phase0]` -> `[phase0, altair, bellatrix]` - builders = [spec_builders[fork] for fork in collect_prev_forks(fork)[::-1]] - - def format_protocol(protocol_name: str, protocol_def: ProtocolDefinition) -> str: - abstract_functions = ["verify_and_notify_new_payload"] - for key in protocol_def.functions.keys(): - if key in abstract_functions: - make_function_abstract(protocol_def, key) - - protocol = f"class {protocol_name}(Protocol):" - for fn_source in protocol_def.functions.values(): - fn_source = fn_source.replace("self: "+protocol_name, "self") - protocol += "\n\n" + textwrap.indent(fn_source, " ") - return protocol - - protocols_spec = '\n\n\n'.join(format_protocol(k, v) for k, v in spec_object.protocols.items()) - for k in list(spec_object.functions): - if k in [ - "ceillog2", - "floorlog2", - "compute_merkle_proof_for_block_body", - "compute_merkle_proof_for_state", - ]: - del spec_object.functions[k] - - functions = reduce(lambda fns, builder: builder.implement_optimizations(fns), builders, spec_object.functions) - functions_spec = '\n\n\n'.join(functions.values()) - - # Access global dict of config vars for runtime configurables - for name in spec_object.config_vars.keys(): - functions_spec = re.sub(r"\b%s\b" % name, 'config.' + name, functions_spec) - - def format_config_var(name: str, vardef: VariableDefinition) -> str: - if vardef.type_name is None: - out = f'{name}={vardef.value},' - else: - out = f'{name}={vardef.type_name}({vardef.value}),' - if vardef.comment is not None: - out += f' # {vardef.comment}' - return out - - config_spec = 'class Configuration(NamedTuple):\n' - config_spec += ' PRESET_BASE: str\n' - config_spec += '\n'.join(f' {k}: {v.type_name if v.type_name is not None else "int"}' - for k, v in spec_object.config_vars.items()) - config_spec += '\n\n\nconfig = Configuration(\n' - config_spec += f' PRESET_BASE="{preset_name}",\n' - config_spec += '\n'.join(' ' + format_config_var(k, v) for k, v in spec_object.config_vars.items()) - config_spec += '\n)\n' - - def format_constant(name: str, vardef: VariableDefinition) -> str: - if vardef.type_name is None: - if vardef.type_hint is None: - out = f'{name} = {vardef.value}' - else: - out = f'{name}: {vardef.type_hint} = {vardef.value}' - else: - out = f'{name} = {vardef.type_name}({vardef.value})' - if vardef.comment is not None: - out += f' # {vardef.comment}' - return out - - # Merge all constant objects - hardcoded_ssz_dep_constants = reduce(lambda obj, builder: {**obj, **builder.hardcoded_ssz_dep_constants()}, builders, {}) - hardcoded_custom_type_dep_constants = reduce(lambda obj, builder: {**obj, **builder.hardcoded_custom_type_dep_constants(spec_object)}, builders, {}) - # Concatenate all strings - imports = reduce(lambda txt, builder: (txt + "\n\n" + builder.imports(preset_name) ).strip("\n"), builders, "") - preparations = reduce(lambda txt, builder: (txt + "\n\n" + builder.preparations() ).strip("\n"), builders, "") - sundry_functions = reduce(lambda txt, builder: (txt + "\n\n" + builder.sundry_functions() ).strip("\n"), builders, "") - # Keep engine from the most recent fork - execution_engine_cls = reduce(lambda txt, builder: builder.execution_engine_cls() or txt, builders, "") - - constant_vars_spec = '# Constant vars\n' + '\n'.join(format_constant(k, v) for k, v in spec_object.constant_vars.items()) - preset_vars_spec = '# Preset vars\n' + '\n'.join(format_constant(k, v) for k, v in spec_object.preset_vars.items()) - ordered_class_objects_spec = '\n\n\n'.join(ordered_class_objects.values()) - ssz_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, hardcoded_ssz_dep_constants[x]), hardcoded_ssz_dep_constants)) - ssz_dep_constants_verification = '\n'.join(map(lambda x: 'assert %s == %s' % (x, spec_object.ssz_dep_constants[x]), hardcoded_ssz_dep_constants)) - custom_type_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, hardcoded_custom_type_dep_constants[x]), hardcoded_custom_type_dep_constants)) - spec_strs = [ - imports, - preparations, - f"fork = \'{fork}\'\n", - # The constants that some SSZ containers require. Need to be defined before `new_type_definitions` - custom_type_dep_constants, - new_type_definitions, - CONSTANT_DEP_SUNDRY_CONSTANTS_FUNCTIONS, - # The constants that some SSZ containers require. Need to be defined before `constants_spec` - ssz_dep_constants, - constant_vars_spec, - preset_vars_spec, - config_spec, - ordered_class_objects_spec, - protocols_spec, - functions_spec, - sundry_functions, - execution_engine_cls, - # Since some constants are hardcoded in setup.py, the following assertions verify that the hardcoded constants are - # as same as the spec definition. - ssz_dep_constants_verification, - ] - return "\n\n\n".join([str.strip("\n") for str in spec_strs if str]) + "\n" - - -def combine_protocols(old_protocols: Dict[str, ProtocolDefinition], - new_protocols: Dict[str, ProtocolDefinition]) -> Dict[str, ProtocolDefinition]: - for key, value in new_protocols.items(): - if key not in old_protocols: - old_protocols[key] = value - else: - functions = combine_dicts(old_protocols[key].functions, value.functions) - old_protocols[key] = ProtocolDefinition(functions=functions) - return old_protocols - - -T = TypeVar('T') - - -def combine_dicts(old_dict: Dict[str, T], new_dict: Dict[str, T]) -> Dict[str, T]: - return {**old_dict, **new_dict} - - -ignored_dependencies = [ - 'bit', 'boolean', 'Vector', 'List', 'Container', 'BLSPubkey', 'BLSSignature', - 'Bytes1', 'Bytes4', 'Bytes8', 'Bytes20', 'Bytes32', 'Bytes48', 'Bytes96', 'Bitlist', 'Bitvector', - 'uint8', 'uint16', 'uint32', 'uint64', 'uint128', 'uint256', - 'bytes', 'byte', 'ByteList', 'ByteVector', - 'Dict', 'dict', 'field', 'ceillog2', 'floorlog2', 'Set', - 'Optional', 'Sequence', -] - - -def dependency_order_class_objects(objects: Dict[str, str], custom_types: Dict[str, str]) -> None: - """ - Determines which SSZ Object is dependent on which other and orders them appropriately - """ - items = list(objects.items()) - for key, value in items: - dependencies = [] - for line in value.split('\n'): - if not re.match(r'\s+\w+: .+', line): - continue # skip whitespace etc. - line = line[line.index(':') + 1:] # strip of field name - if '#' in line: - line = line[:line.index('#')] # strip of comment - dependencies.extend(re.findall(r'(\w+)', line)) # catch all legible words, potential dependencies - dependencies = filter(lambda x: '_' not in x and x.upper() != x, dependencies) # filter out constants - dependencies = filter(lambda x: x not in ignored_dependencies, dependencies) - dependencies = filter(lambda x: x not in custom_types, dependencies) - for dep in dependencies: - key_list = list(objects.keys()) - for item in [dep, key] + key_list[key_list.index(dep)+1:]: - objects[item] = objects.pop(item) - -def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str], custom_types) -> Dict[str, str]: - """ - Takes in old spec and new spec ssz objects, combines them, - and returns the newer versions of the objects in dependency order. - """ - for key, value in new_objects.items(): - old_objects[key] = value - return old_objects - - -def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject: - """ - Takes in two spec variants (as tuples of their objects) and combines them using the appropriate combiner function. - """ - protocols = combine_protocols(spec0.protocols, spec1.protocols) - functions = combine_dicts(spec0.functions, spec1.functions) - custom_types = combine_dicts(spec0.custom_types, spec1.custom_types) - constant_vars = combine_dicts(spec0.constant_vars, spec1.constant_vars) - preset_vars = combine_dicts(spec0.preset_vars, spec1.preset_vars) - config_vars = combine_dicts(spec0.config_vars, spec1.config_vars) - ssz_dep_constants = combine_dicts(spec0.ssz_dep_constants, spec1.ssz_dep_constants) - ssz_objects = combine_ssz_objects(spec0.ssz_objects, spec1.ssz_objects, custom_types) - dataclasses = combine_dicts(spec0.dataclasses, spec1.dataclasses) - return SpecObject( - functions=functions, - protocols=protocols, - custom_types=custom_types, - constant_vars=constant_vars, - preset_vars=preset_vars, - config_vars=config_vars, - ssz_dep_constants=ssz_dep_constants, - ssz_objects=ssz_objects, - dataclasses=dataclasses, - ) - - -def parse_config_vars(conf: Dict[str, str]) -> Dict[str, str]: - """ - Parses a dict of basic str/int/list types into a dict for insertion into the spec code. - """ - out: Dict[str, str] = dict() - for k, v in conf.items(): - if isinstance(v, str) and (v.startswith("0x") or k == 'PRESET_BASE' or k == 'CONFIG_NAME'): - # Represent byte data with string, to avoid misinterpretation as big-endian int. - # Everything except PRESET_BASE and CONFIG_NAME is either byte data or an integer. - out[k] = f"'{v}'" - else: - out[k] = str(int(v)) - return out - - def load_preset(preset_files: Sequence[Path]) -> Dict[str, str]: """ Loads the a directory of preset files, merges the result into one preset. @@ -1039,8 +297,11 @@ def load_config(config_path: Path) -> Dict[str, str]: return parse_config_vars(config_data) -def _build_spec(preset_name: str, fork: str, - source_files: Sequence[Path], preset_files: Sequence[Path], config_file: Path) -> str: +def build_spec(fork: str, + preset_name: str, + source_files: Sequence[Path], + preset_files: Sequence[Path], + config_file: Path) -> str: preset = load_preset(preset_files) config = load_config(config_file) all_specs = [get_spec(spec, preset, config, preset_name) for spec in source_files] @@ -1060,12 +321,6 @@ def _build_spec(preset_name: str, fork: str, return objects_to_spec(preset_name, spec_object, fork, class_objects) -class BuildTarget(NamedTuple): - name: str - preset_paths: List[Path] - config_path: Path - - class PySpecCommand(Command): """Convert spec markdown files to a spec python file""" @@ -1102,20 +357,7 @@ class PySpecCommand(Command): if len(self.md_doc_paths) == 0: print("no paths were specified, using default markdown file paths for pyspec" " build (spec fork: %s)" % self.spec_fork) - self.md_doc_paths = "" - - for fork in ALL_FORKS: - if is_post_fork(self.spec_fork, fork): - # Append all files in fork directory recursively - for root, dirs, files in os.walk(get_fork_directory(fork)): - for filename in files: - filepath = os.path.join(root, filename) - if filepath.endswith('.md') and filepath not in IGNORE_SPEC_FILES: - self.md_doc_paths += filepath + "\n" - # Append extra files if any - if fork in EXTRA_SPEC_FILES: - self.md_doc_paths += EXTRA_SPEC_FILES[fork] + "\n" - + self.md_doc_paths = get_md_doc_paths(self.spec_fork) if len(self.md_doc_paths) == 0: raise Exception('no markdown files specified, and spec fork "%s" is unknown', self.spec_fork) @@ -1148,8 +390,13 @@ class PySpecCommand(Command): dir_util.mkpath(self.out_dir) for (name, preset_paths, config_path) in self.parsed_build_targets: - spec_str = spec_builders[self.spec_fork].build_spec( - name, self.parsed_md_doc_paths, preset_paths, config_path) + spec_str = build_spec( + spec_builders[self.spec_fork].fork, + name, + self.parsed_md_doc_paths, + preset_paths, + config_path, + ) if self.dry_run: self.announce('dry run successfully prepared contents for spec.' f' out dir: "{self.out_dir}", spec fork: "{self.spec_fork}", build target: "{name}"') @@ -1210,6 +457,7 @@ class PyspecDevCommand(Command): for spec_fork in spec_builders: self.run_pyspec_cmd(spec_fork=spec_fork) + commands = { 'pyspec': PySpecCommand, 'build_py': BuildPyCommand,