Make sync-protocol.md pass the linter

This commit is contained in:
Hsiao-Wei Wang 2020-12-09 17:35:22 +08:00
parent acfe49e3f3
commit bcde37c39f
No known key found for this signature in database
GPG Key ID: 95B070122902DEA4
3 changed files with 80 additions and 22 deletions

View File

@ -14,6 +14,7 @@ class SpecObject(NamedTuple):
functions: Dict[str, str] functions: Dict[str, str]
custom_types: Dict[str, str] custom_types: Dict[str, str]
constants: Dict[str, str] constants: Dict[str, str]
ssz_dep_constants: Dict[str, str] # the constants that depend on ssz_objects
ssz_objects: Dict[str, str] ssz_objects: Dict[str, str]
dataclasses: Dict[str, str] dataclasses: Dict[str, str]
@ -35,6 +36,7 @@ def get_spec(file_name: str) -> SpecObject:
current_name = None # most recent section title current_name = None # most recent section title
functions: Dict[str, str] = {} functions: Dict[str, str] = {}
constants: Dict[str, str] = {} constants: Dict[str, str] = {}
ssz_dep_constants: Dict[str, str] = {}
ssz_objects: Dict[str, str] = {} ssz_objects: Dict[str, str] = {}
dataclasses: Dict[str, str] = {} dataclasses: Dict[str, str] = {}
function_matcher = re.compile(FUNCTION_REGEX) function_matcher = re.compile(FUNCTION_REGEX)
@ -88,10 +90,20 @@ def get_spec(file_name: str) -> SpecObject:
if c not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789': if c not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789':
is_constant_def = False is_constant_def = False
if is_constant_def: if is_constant_def:
constants[row[0]] = row[1].replace('**TBD**', '2**32') if row[1].startswith('get_generalized_index'):
ssz_dep_constants[row[0]] = row[1]
else:
constants[row[0]] = row[1].replace('**TBD**', '2**32')
elif row[1].startswith('uint') or row[1].startswith('Bytes'): elif row[1].startswith('uint') or row[1].startswith('Bytes'):
custom_types[row[0]] = row[1] custom_types[row[0]] = row[1]
return SpecObject(functions, custom_types, constants, ssz_objects, dataclasses) return SpecObject(
functions=functions,
custom_types=custom_types,
constants=constants,
ssz_dep_constants=ssz_dep_constants,
ssz_objects=ssz_objects,
dataclasses=dataclasses,
)
CONFIG_LOADER = ''' CONFIG_LOADER = '''
@ -160,7 +172,7 @@ CONFIG_NAME = 'mainnet'
LIGHTCLIENT_IMPORT = '''from eth2spec.phase0 import spec as phase0 LIGHTCLIENT_IMPORT = '''from eth2spec.phase0 import spec as phase0
from eth2spec.config.config_util import apply_constants_config from eth2spec.config.config_util import apply_constants_config
from typing import ( from typing import (
Any, Dict, Set, Sequence, NewType, Tuple, TypeVar, Callable, Optional Any, Dict, Set, Sequence, NewType, Tuple, TypeVar, Callable, Optional, Union
) )
from dataclasses import ( from dataclasses import (
@ -174,6 +186,7 @@ from eth2spec.utils.ssz.ssz_impl import hash_tree_root, copy, uint_to_bytes
from eth2spec.utils.ssz.ssz_typing import ( from eth2spec.utils.ssz.ssz_typing import (
View, boolean, Container, List, Vector, uint8, uint32, uint64, View, boolean, Container, List, Vector, uint8, uint32, uint64,
Bytes1, Bytes4, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector, Bytes1, Bytes4, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector,
Path,
) )
from eth2spec.utils import bls from eth2spec.utils import bls
@ -277,6 +290,24 @@ get_start_shard = cache_this(
_get_start_shard, lru_size=SLOTS_PER_EPOCH * 3)''' _get_start_shard, lru_size=SLOTS_PER_EPOCH * 3)'''
LIGHTCLIENT_PATCH_SUNDRY_FUNCTIONS = '''
def get_generalized_index(ssz_class: Any, *path: Sequence[Union[int, SSZVariableName]]) -> GeneralizedIndex:
ssz_path = Path(ssz_class)
for item in path:
ssz_path = ssz_path / item
return GeneralizedIndex(ssz_path.gindex())
'''
# The constants that depend on SSZ objects
# Will verify the value at the end of the spec
LIGHTCLIENT_PATCH_HARDCODED_SSZ_DEP_CONSTANTS = {
'FINALIZED_ROOT_INDEX': 'GeneralizedIndex(105)',
'NEXT_SYNC_COMMITTEE_INDEX': 'GeneralizedIndex(54)',
}
def objects_to_spec(spec_object: SpecObject, imports: str, fork: str, ordered_class_objects: Dict[str, str]) -> str: def objects_to_spec(spec_object: SpecObject, imports: str, fork: str, ordered_class_objects: Dict[str, str]) -> str:
""" """
Given all the objects that constitute a spec, combine them into a single pyfile. Given all the objects that constitute a spec, combine them into a single pyfile.
@ -303,14 +334,30 @@ def objects_to_spec(spec_object: SpecObject, imports: str, fork: str, ordered_cl
+ '\n\n' + f"fork = \'{fork}\'\n" + '\n\n' + f"fork = \'{fork}\'\n"
+ '\n\n' + new_type_definitions + '\n\n' + new_type_definitions
+ '\n' + SUNDRY_CONSTANTS_FUNCTIONS + '\n' + SUNDRY_CONSTANTS_FUNCTIONS
+ '\n\n' + constants_spec )
if fork == 'lightclient_patch':
lightclient_patch_ssz_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, LIGHTCLIENT_PATCH_HARDCODED_SSZ_DEP_CONSTANTS[x]), LIGHTCLIENT_PATCH_HARDCODED_SSZ_DEP_CONSTANTS))
spec += (
LIGHTCLIENT_PATCH_SUNDRY_FUNCTIONS
+ '\n\n' + lightclient_patch_ssz_dep_constants
)
spec += (
'\n\n' + constants_spec
+ '\n\n' + CONFIG_LOADER + '\n\n' + CONFIG_LOADER
+ '\n\n' + ordered_class_objects_spec + '\n\n' + ordered_class_objects_spec
+ '\n\n' + functions_spec + '\n\n' + functions_spec
+ '\n' + PHASE0_SUNDRY_FUNCTIONS + '\n' + PHASE0_SUNDRY_FUNCTIONS
) )
if fork == 'phase1': if fork == 'phase1':
spec += '\n' + PHASE1_SUNDRY_FUNCTIONS spec += '\n' + PHASE1_SUNDRY_FUNCTIONS
if fork == 'lightclient_patch':
lightclient_patch_ssz_dep_constants_verification = '\n'.join(map(lambda x: 'assert %s == %s' % (x, spec_object.ssz_dep_constants[x]), LIGHTCLIENT_PATCH_HARDCODED_SSZ_DEP_CONSTANTS))
spec += '\n\n' + lightclient_patch_ssz_dep_constants_verification
spec += '\n' spec += '\n'
return spec return spec
@ -332,7 +379,7 @@ ignored_dependencies = [
'Bytes1', 'Bytes4', 'Bytes32', 'Bytes48', 'Bytes96', 'Bitlist', 'Bitvector', 'Bytes1', 'Bytes4', 'Bytes32', 'Bytes48', 'Bytes96', 'Bitlist', 'Bitvector',
'uint8', 'uint16', 'uint32', 'uint64', 'uint128', 'uint256', 'uint8', 'uint16', 'uint32', 'uint64', 'uint128', 'uint256',
'bytes', 'byte', 'ByteList', 'ByteVector', 'bytes', 'byte', 'ByteList', 'ByteVector',
'Dict', 'dict', 'field', 'Dict', 'dict', 'field', 'ceillog2',
] ]
@ -373,14 +420,22 @@ def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject:
""" """
Takes in two spec variants (as tuples of their objects) and combines them using the appropriate combiner function. Takes in two spec variants (as tuples of their objects) and combines them using the appropriate combiner function.
""" """
functions0, custom_types0, constants0, ssz_objects0, dataclasses0 = spec0 functions0, custom_types0, constants0, ssz_dep_constants0, ssz_objects0, dataclasses0 = spec0
functions1, custom_types1, constants1, ssz_objects1, dataclasses1 = spec1 functions1, custom_types1, constants1, ssz_dep_constants1, ssz_objects1, dataclasses1 = spec1
functions = combine_functions(functions0, functions1) functions = combine_functions(functions0, functions1)
custom_types = combine_constants(custom_types0, custom_types1) custom_types = combine_constants(custom_types0, custom_types1)
constants = combine_constants(constants0, constants1) constants = combine_constants(constants0, constants1)
ssz_dep_constants = combine_constants(ssz_dep_constants0, ssz_dep_constants1)
ssz_objects = combine_ssz_objects(ssz_objects0, ssz_objects1, custom_types) ssz_objects = combine_ssz_objects(ssz_objects0, ssz_objects1, custom_types)
dataclasses = combine_functions(dataclasses0, dataclasses1) dataclasses = combine_functions(dataclasses0, dataclasses1)
return SpecObject(functions, custom_types, constants, ssz_objects, dataclasses) return SpecObject(
functions=functions,
custom_types=custom_types,
constants=constants,
ssz_dep_constants=ssz_dep_constants,
ssz_objects=ssz_objects,
dataclasses=dataclasses,
)
fork_imports = { fork_imports = {
@ -461,8 +516,8 @@ class PySpecCommand(Command):
specs/phase0/weak-subjectivity.md specs/phase0/weak-subjectivity.md
specs/lightclient/beacon-chain.md specs/lightclient/beacon-chain.md
specs/lightclient/lightclient-fork.md specs/lightclient/lightclient-fork.md
specs/lightclient/sync-protocol.md
""" """
# TODO: add specs/lightclient/sync-protocol.md back when the GeneralizedIndex helpers are included.
else: else:
raise Exception('no markdown files specified, and spec fork "%s" is unknown', self.spec_fork) raise Exception('no markdown files specified, and spec fork "%s" is unknown', self.spec_fork)

View File

@ -39,8 +39,8 @@ uses sync committees introduced in [this beacon chain extension](./beacon-chain.
| Name | Value | | Name | Value |
| - | - | | - | - |
| `FINALIZED_ROOT_INDEX` | `Index(BeaconState, 'finalized_checkpoint', 'root')` | | `FINALIZED_ROOT_INDEX` | `get_generalized_index(BeaconState, 'finalized_checkpoint', 'root')` |
| `NEXT_SYNC_COMMITTEE_INDEX` | `Index(BeaconState, 'next_sync_committee')` | | `NEXT_SYNC_COMMITTEE_INDEX` | `get_generalized_index(BeaconState, 'next_sync_committee')` |
## Configuration ## Configuration
@ -78,10 +78,10 @@ class LightClientUpdate(Container):
header: BeaconBlockHeader header: BeaconBlockHeader
# Next sync committee corresponding to the header # Next sync committee corresponding to the header
next_sync_committee: SyncCommittee next_sync_committee: SyncCommittee
next_sync_committee_branch: Vector[Bytes32, log2(NEXT_SYNC_COMMITTEE_INDEX)] next_sync_committee_branch: Vector[Bytes32, ceillog2(NEXT_SYNC_COMMITTEE_INDEX)]
# Finality proof for the update header # Finality proof for the update header
finality_header: BeaconBlockHeader finality_header: BeaconBlockHeader
finality_branch: Vector[Bytes32, log2(FINALIZED_ROOT_INDEX)] finality_branch: Vector[Bytes32, ceillog2(FINALIZED_ROOT_INDEX)]
# Sync committee aggregate signature # Sync committee aggregate signature
sync_committee_bits: Bitvector[SYNC_COMMITTEE_SIZE] sync_committee_bits: Bitvector[SYNC_COMMITTEE_SIZE]
sync_committee_signature: BLSSignature sync_committee_signature: BLSSignature
@ -116,28 +116,28 @@ def is_valid_light_client_update(snapshot: LightClientSnapshot, update: LightCli
# Verify update header root is the finalized root of the finality header, if specified # Verify update header root is the finalized root of the finality header, if specified
if update.finality_header == BeaconBlockHeader(): if update.finality_header == BeaconBlockHeader():
signed_header = update.header signed_header = update.header
assert update.finality_branch == [ZERO_HASH for _ in range(log2(FINALIZED_ROOT_INDEX))] assert update.finality_branch == [Bytes32() for _ in range(ceillog2(FINALIZED_ROOT_INDEX))]
else: else:
signed_header = update.finality_header signed_header = update.finality_header
assert is_valid_merkle_branch( assert is_valid_merkle_branch(
leaf=hash_tree_root(update.header), leaf=hash_tree_root(update.header),
branch=update.finality_branch, branch=update.finality_branch,
depth=log2(FINALIZED_ROOT_INDEX), depth=ceillog2(FINALIZED_ROOT_INDEX),
index=FINALIZED_ROOT_INDEX % 2**log2(FINALIZED_ROOT_INDEX), index=FINALIZED_ROOT_INDEX % 2**ceillog2(FINALIZED_ROOT_INDEX),
root=update.finality_header.state_root, root=update.finality_header.state_root,
) )
# Verify update next sync committee if the update period incremented # Verify update next sync committee if the update period incremented
if update_period == snapshot_period: if update_period == snapshot_period:
sync_committee = snapshot.current_sync_committee sync_committee = snapshot.current_sync_committee
assert update.next_sync_committee_branch == [ZERO_HASH for _ in range(log2(NEXT_SYNC_COMMITTEE_INDEX))] assert update.next_sync_committee_branch == [Bytes32() for _ in range(ceillog2(NEXT_SYNC_COMMITTEE_INDEX))]
else: else:
sync_committee = snapshot.next_sync_committee sync_committee = snapshot.next_sync_committee
assert is_valid_merkle_branch( assert is_valid_merkle_branch(
leaf=hash_tree_root(update.next_sync_committee), leaf=hash_tree_root(update.next_sync_committee),
branch=update.next_sync_committee_branch, branch=update.next_sync_committee_branch,
depth=log2(NEXT_SYNC_COMMITTEE_INDEX), depth=ceillog2(NEXT_SYNC_COMMITTEE_INDEX),
index=NEXT_SYNC_COMMITTEE_INDEX % 2**log2(NEXT_SYNC_COMMITTEE_INDEX), index=NEXT_SYNC_COMMITTEE_INDEX % 2**ceillog2(NEXT_SYNC_COMMITTEE_INDEX),
root=update.header.state_root, root=update.header.state_root,
) )
@ -173,11 +173,14 @@ def process_light_client_update(store: LightClientStore, update: LightClientUpda
assert is_valid_light_client_update(store.snapshot, update) assert is_valid_light_client_update(store.snapshot, update)
store.valid_updates.append(update) store.valid_updates.append(update)
if sum(update.sync_committee_bits) * 3 > len(update.sync_committee_bits) * 2 and update.header != update.finality_header: if (
sum(update.sync_committee_bits) * 3 > len(update.sync_committee_bits) * 2
and update.header != update.finality_header
):
# Apply update if 2/3 quorum is reached and we have a finality proof # Apply update if 2/3 quorum is reached and we have a finality proof
apply_light_client_update(store, update) apply_light_client_update(store, update)
store.valid_updates = [] store.valid_updates = []
elif current_slot > snapshot.header.slot + LIGHT_CLIENT_UPDATE_TIMEOUT: elif current_slot > store.snapshot.header.slot + LIGHT_CLIENT_UPDATE_TIMEOUT:
# Forced best update when the update timeout has elapsed # Forced best update when the update timeout has elapsed
apply_light_client_update(store, max(store.valid_updates, key=lambda update: sum(update.sync_committee_bits))) apply_light_client_update(store, max(store.valid_updates, key=lambda update: sum(update.sync_committee_bits)))
store.valid_updates = [] store.valid_updates = []

View File

@ -5,4 +5,4 @@ from remerkleable.complex import Container, Vector, List
from remerkleable.basic import boolean, bit, uint, byte, uint8, uint16, uint32, uint64, uint128, uint256 from remerkleable.basic import boolean, bit, uint, byte, uint8, uint16, uint32, uint64, uint128, uint256
from remerkleable.bitfields import Bitvector, Bitlist from remerkleable.bitfields import Bitvector, Bitlist
from remerkleable.byte_arrays import ByteVector, Bytes1, Bytes4, Bytes8, Bytes32, Bytes48, Bytes96, ByteList from remerkleable.byte_arrays import ByteVector, Bytes1, Bytes4, Bytes8, Bytes32, Bytes48, Bytes96, ByteList
from remerkleable.core import BasicView, View from remerkleable.core import BasicView, View, Path