Merge pull request #2147 from ethereum/lightclient-sync-exe

Make `sync-protocol.md` pass the linter and add basic tests
This commit is contained in:
Danny Ryan 2021-03-12 07:52:45 -07:00 committed by GitHub
commit 65dcf897c2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 433 additions and 74 deletions

View File

@ -31,3 +31,9 @@ EPOCHS_PER_SYNC_COMMITTEE_PERIOD: 256
# Signature domains
# ---------------------------------------------------------------
DOMAIN_SYNC_COMMITTEE: 0x07000000
# Sync protocol
# ---------------------------------------------------------------
# 2**13 (=8192)
LIGHT_CLIENT_UPDATE_TIMEOUT: 8192

View File

@ -31,3 +31,9 @@ EPOCHS_PER_SYNC_COMMITTEE_PERIOD: 8
# Signature domains
# ---------------------------------------------------------------
DOMAIN_SYNC_COMMITTEE: 0x07000000
# Sync protocol
# ---------------------------------------------------------------
# [customized]
LIGHT_CLIENT_UPDATE_TIMEOUT: 32

109
setup.py
View File

@ -10,10 +10,17 @@ from typing import Dict, NamedTuple, List
FUNCTION_REGEX = r'^def [\w_]*'
# Definitions in context.py
PHASE0 = 'phase0'
ALTAIR = 'altair'
PHASE1 = 'phase1'
class SpecObject(NamedTuple):
functions: Dict[str, str]
custom_types: Dict[str, str]
constants: Dict[str, str]
ssz_dep_constants: Dict[str, str] # the constants that depend on ssz_objects
ssz_objects: Dict[str, str]
dataclasses: Dict[str, str]
@ -35,6 +42,7 @@ def get_spec(file_name: str) -> SpecObject:
current_name = None # most recent section title
functions: Dict[str, str] = {}
constants: Dict[str, str] = {}
ssz_dep_constants: Dict[str, str] = {}
ssz_objects: Dict[str, str] = {}
dataclasses: Dict[str, str] = {}
function_matcher = re.compile(FUNCTION_REGEX)
@ -88,10 +96,20 @@ def get_spec(file_name: str) -> SpecObject:
if c not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789':
is_constant_def = False
if is_constant_def:
constants[row[0]] = row[1].replace('**TBD**', '2**32')
if row[1].startswith('get_generalized_index'):
ssz_dep_constants[row[0]] = row[1]
else:
constants[row[0]] = row[1].replace('**TBD**', '2**32')
elif row[1].startswith('uint') or row[1].startswith('Bytes'):
custom_types[row[0]] = row[1]
return SpecObject(functions, custom_types, constants, ssz_objects, dataclasses)
return SpecObject(
functions=functions,
custom_types=custom_types,
constants=constants,
ssz_dep_constants=ssz_dep_constants,
ssz_objects=ssz_objects,
dataclasses=dataclasses,
)
CONFIG_LOADER = '''
@ -157,10 +175,10 @@ SSZObject = TypeVar('SSZObject', bound=View)
CONFIG_NAME = 'mainnet'
'''
LIGHTCLIENT_IMPORT = '''from eth2spec.phase0 import spec as phase0
ALTAIR_IMPORTS = '''from eth2spec.phase0 import spec as phase0
from eth2spec.config.config_util import apply_constants_config
from typing import (
Any, Dict, Set, Sequence, NewType, Tuple, TypeVar, Callable, Optional
Any, Dict, Set, Sequence, NewType, Tuple, TypeVar, Callable, Optional, Union
)
from dataclasses import (
@ -174,6 +192,7 @@ from eth2spec.utils.ssz.ssz_impl import hash_tree_root, copy, uint_to_bytes
from eth2spec.utils.ssz.ssz_typing import (
View, boolean, Container, List, Vector, uint8, uint32, uint64,
Bytes1, Bytes4, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector,
Path,
)
from eth2spec.utils import bls
@ -196,6 +215,12 @@ def ceillog2(x: int) -> uint64:
if x < 1:
raise ValueError(f"ceillog2 accepts only positive values, x={x}")
return uint64((x - 1).bit_length())
def floorlog2(x: int) -> uint64:
if x < 1:
raise ValueError(f"floorlog2 accepts only positive values, x={x}")
return uint64(x.bit_length() - 1)
'''
PHASE0_SUNDRY_FUNCTIONS = '''
def get_eth1_data(block: Eth1Block) -> Eth1Data:
@ -277,6 +302,35 @@ get_start_shard = cache_this(
_get_start_shard, lru_size=SLOTS_PER_EPOCH * 3)'''
ALTAIR_SUNDRY_FUNCTIONS = '''
def get_generalized_index(ssz_class: Any, *path: Sequence[Union[int, SSZVariableName]]) -> GeneralizedIndex:
ssz_path = Path(ssz_class)
for item in path:
ssz_path = ssz_path / item
return GeneralizedIndex(ssz_path.gindex())'''
# The constants that depend on SSZ objects
# Will verify the value at the end of the spec
ALTAIR_HARDCODED_SSZ_DEP_CONSTANTS = {
'FINALIZED_ROOT_INDEX': 'GeneralizedIndex(105)',
'NEXT_SYNC_COMMITTEE_INDEX': 'GeneralizedIndex(54)',
}
def is_phase0(fork):
return fork == PHASE0
def is_altair(fork):
return fork == ALTAIR
def is_phase1(fork):
return fork == PHASE1
def objects_to_spec(spec_object: SpecObject, imports: str, fork: str, ordered_class_objects: Dict[str, str]) -> str:
"""
Given all the objects that constitute a spec, combine them into a single pyfile.
@ -290,7 +344,7 @@ def objects_to_spec(spec_object: SpecObject, imports: str, fork: str, ordered_cl
)
)
for k in list(spec_object.functions):
if "ceillog2" in k:
if "ceillog2" in k or "floorlog2" in k:
del spec_object.functions[k]
functions_spec = '\n\n'.join(spec_object.functions.values())
for k in list(spec_object.constants.keys()):
@ -298,19 +352,33 @@ def objects_to_spec(spec_object: SpecObject, imports: str, fork: str, ordered_cl
spec_object.constants[k] += " # noqa: E501"
constants_spec = '\n'.join(map(lambda x: '%s = %s' % (x, spec_object.constants[x]), spec_object.constants))
ordered_class_objects_spec = '\n\n'.join(ordered_class_objects.values())
if is_altair(fork):
altair_ssz_dep_constants = '\n'.join(map(lambda x: '%s = %s' % (x, ALTAIR_HARDCODED_SSZ_DEP_CONSTANTS[x]), ALTAIR_HARDCODED_SSZ_DEP_CONSTANTS))
spec = (
imports
+ '\n\n' + f"fork = \'{fork}\'\n"
+ '\n\n' + new_type_definitions
+ '\n' + SUNDRY_CONSTANTS_FUNCTIONS
# The constants that some SSZ containers require. Need to be defined before `constants_spec`
+ ('\n\n' + altair_ssz_dep_constants if is_altair(fork) else '')
+ '\n\n' + constants_spec
+ '\n\n' + CONFIG_LOADER
+ '\n\n' + ordered_class_objects_spec
+ '\n\n' + functions_spec
# Functions to make pyspec work
+ '\n' + PHASE0_SUNDRY_FUNCTIONS
+ ('\n' + ALTAIR_SUNDRY_FUNCTIONS if is_altair(fork) else '')
+ ('\n' + PHASE1_SUNDRY_FUNCTIONS if is_phase1(fork) else '')
)
if fork == 'phase1':
spec += '\n' + PHASE1_SUNDRY_FUNCTIONS
# Since some constants are hardcoded in setup.py, the following assertions verify that the hardcoded constants are
# as same as the spec definition.
if is_altair(fork):
altair_ssz_dep_constants_verification = '\n'.join(map(lambda x: 'assert %s == %s' % (x, spec_object.ssz_dep_constants[x]), ALTAIR_HARDCODED_SSZ_DEP_CONSTANTS))
spec += '\n\n\n' + altair_ssz_dep_constants_verification
spec += '\n'
return spec
@ -332,7 +400,7 @@ ignored_dependencies = [
'Bytes1', 'Bytes4', 'Bytes32', 'Bytes48', 'Bytes96', 'Bitlist', 'Bitvector',
'uint8', 'uint16', 'uint32', 'uint64', 'uint128', 'uint256',
'bytes', 'byte', 'ByteList', 'ByteVector',
'Dict', 'dict', 'field',
'Dict', 'dict', 'field', 'ceillog2', 'floorlog2',
]
@ -373,20 +441,28 @@ def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject:
"""
Takes in two spec variants (as tuples of their objects) and combines them using the appropriate combiner function.
"""
functions0, custom_types0, constants0, ssz_objects0, dataclasses0 = spec0
functions1, custom_types1, constants1, ssz_objects1, dataclasses1 = spec1
functions0, custom_types0, constants0, ssz_dep_constants0, ssz_objects0, dataclasses0 = spec0
functions1, custom_types1, constants1, ssz_dep_constants1, ssz_objects1, dataclasses1 = spec1
functions = combine_functions(functions0, functions1)
custom_types = combine_constants(custom_types0, custom_types1)
constants = combine_constants(constants0, constants1)
ssz_dep_constants = combine_constants(ssz_dep_constants0, ssz_dep_constants1)
ssz_objects = combine_ssz_objects(ssz_objects0, ssz_objects1, custom_types)
dataclasses = combine_functions(dataclasses0, dataclasses1)
return SpecObject(functions, custom_types, constants, ssz_objects, dataclasses)
return SpecObject(
functions=functions,
custom_types=custom_types,
constants=constants,
ssz_dep_constants=ssz_dep_constants,
ssz_objects=ssz_objects,
dataclasses=dataclasses,
)
fork_imports = {
'phase0': PHASE0_IMPORTS,
'phase1': PHASE1_IMPORTS,
'altair': LIGHTCLIENT_IMPORT,
'altair': ALTAIR_IMPORTS,
}
@ -423,7 +499,7 @@ class PySpecCommand(Command):
def initialize_options(self):
"""Set default values for options."""
# Each user option must be listed here with their default value.
self.spec_fork = 'phase0'
self.spec_fork = PHASE0
self.md_doc_paths = ''
self.out_dir = 'pyspec_output'
@ -432,14 +508,14 @@ class PySpecCommand(Command):
if len(self.md_doc_paths) == 0:
print("no paths were specified, using default markdown file paths for pyspec"
" build (spec fork: %s)" % self.spec_fork)
if self.spec_fork == "phase0":
if is_phase0(self.spec_fork):
self.md_doc_paths = """
specs/phase0/beacon-chain.md
specs/phase0/fork-choice.md
specs/phase0/validator.md
specs/phase0/weak-subjectivity.md
"""
elif self.spec_fork == "phase1":
elif is_phase1(self.spec_fork):
self.md_doc_paths = """
specs/phase0/beacon-chain.md
specs/phase0/fork-choice.md
@ -453,7 +529,7 @@ class PySpecCommand(Command):
specs/phase1/shard-fork-choice.md
specs/phase1/validator.md
"""
elif self.spec_fork == "altair":
elif is_altair(self.spec_fork):
self.md_doc_paths = """
specs/phase0/beacon-chain.md
specs/phase0/fork-choice.md
@ -461,6 +537,7 @@ class PySpecCommand(Command):
specs/phase0/weak-subjectivity.md
specs/altair/beacon-chain.md
specs/altair/fork.md
specs/altair/sync-protocol.md
"""
# TODO: add specs/altair/sync-protocol.md back when the GeneralizedIndex helpers are included.
else:

View File

@ -14,11 +14,13 @@
- [Misc](#misc)
- [Time parameters](#time-parameters)
- [Containers](#containers)
- [`LightClientSnapshot`](#lightclientsnapshot)
- [`LightClientUpdate`](#lightclientupdate)
- [`LightClientStore`](#lightclientstore)
- [`LightClientSnapshot`](#lightclientsnapshot)
- [`LightClientUpdate`](#lightclientupdate)
- [`LightClientStore`](#lightclientstore)
- [Helper functions](#helper-functions)
- [`get_subtree_index`](#get_subtree_index)
- [Light client state updates](#light-client-state-updates)
- [`is_valid_light_client_update`](#is_valid_light_client_update)
- [`validate_light_client_update`](#validate_light_client_update)
- [`apply_light_client_update`](#apply_light_client_update)
- [`process_light_client_update`](#process_light_client_update)
@ -39,8 +41,8 @@ uses sync committees introduced in [this beacon chain extension](./beacon-chain.
| Name | Value |
| - | - |
| `FINALIZED_ROOT_INDEX` | `Index(BeaconState, 'finalized_checkpoint', 'root')` |
| `NEXT_SYNC_COMMITTEE_INDEX` | `Index(BeaconState, 'next_sync_committee')` |
| `FINALIZED_ROOT_INDEX` | `get_generalized_index(BeaconState, 'finalized_checkpoint', 'root')` |
| `NEXT_SYNC_COMMITTEE_INDEX` | `get_generalized_index(BeaconState, 'next_sync_committee')` |
## Configuration
@ -59,7 +61,7 @@ uses sync committees introduced in [this beacon chain extension](./beacon-chain.
## Containers
#### `LightClientSnapshot`
### `LightClientSnapshot`
```python
class LightClientSnapshot(Container):
@ -70,7 +72,7 @@ class LightClientSnapshot(Container):
next_sync_committee: SyncCommittee
```
#### `LightClientUpdate`
### `LightClientUpdate`
```python
class LightClientUpdate(Container):
@ -78,10 +80,10 @@ class LightClientUpdate(Container):
header: BeaconBlockHeader
# Next sync committee corresponding to the header
next_sync_committee: SyncCommittee
next_sync_committee_branch: Vector[Bytes32, log2(NEXT_SYNC_COMMITTEE_INDEX)]
next_sync_committee_branch: Vector[Bytes32, floorlog2(NEXT_SYNC_COMMITTEE_INDEX)]
# Finality proof for the update header
finality_header: BeaconBlockHeader
finality_branch: Vector[Bytes32, log2(FINALIZED_ROOT_INDEX)]
finality_branch: Vector[Bytes32, floorlog2(FINALIZED_ROOT_INDEX)]
# Sync committee aggregate signature
sync_committee_bits: Bitvector[SYNC_COMMITTEE_SIZE]
sync_committee_signature: BLSSignature
@ -89,7 +91,7 @@ class LightClientUpdate(Container):
fork_version: Version
```
#### `LightClientStore`
### `LightClientStore`
```python
class LightClientStore(Container):
@ -97,14 +99,24 @@ class LightClientStore(Container):
valid_updates: List[LightClientUpdate, MAX_VALID_LIGHT_CLIENT_UPDATES]
```
## Helper functions
### `get_subtree_index`
```python
def get_subtree_index(generalized_index: GeneralizedIndex) -> uint64:
return uint64(generalized_index % 2**(floorlog2(generalized_index)))
```
## Light client state updates
A light client maintains its state in a `store` object of type `LightClientStore` and receives `update` objects of type `LightClientUpdate`. Every `update` triggers `process_light_client_update(store, update, current_slot)` where `current_slot` is the current slot based on some local clock.
#### `is_valid_light_client_update`
#### `validate_light_client_update`
```python
def is_valid_light_client_update(snapshot: LightClientSnapshot, update: LightClientUpdate) -> bool:
def validate_light_client_update(snapshot: LightClientSnapshot, update: LightClientUpdate,
genesis_validators_root: Root) -> None:
# Verify update slot is larger than snapshot slot
assert update.header.slot > snapshot.header.slot
@ -116,28 +128,28 @@ def is_valid_light_client_update(snapshot: LightClientSnapshot, update: LightCli
# Verify update header root is the finalized root of the finality header, if specified
if update.finality_header == BeaconBlockHeader():
signed_header = update.header
assert update.finality_branch == [ZERO_HASH for _ in range(log2(FINALIZED_ROOT_INDEX))]
assert update.finality_branch == [Bytes32() for _ in range(floorlog2(FINALIZED_ROOT_INDEX))]
else:
signed_header = update.finality_header
assert is_valid_merkle_branch(
leaf=hash_tree_root(update.header),
branch=update.finality_branch,
depth=log2(FINALIZED_ROOT_INDEX),
index=FINALIZED_ROOT_INDEX % 2**log2(FINALIZED_ROOT_INDEX),
depth=floorlog2(FINALIZED_ROOT_INDEX),
index=get_subtree_index(FINALIZED_ROOT_INDEX),
root=update.finality_header.state_root,
)
# Verify update next sync committee if the update period incremented
if update_period == snapshot_period:
sync_committee = snapshot.current_sync_committee
assert update.next_sync_committee_branch == [ZERO_HASH for _ in range(log2(NEXT_SYNC_COMMITTEE_INDEX))]
assert update.next_sync_committee_branch == [Bytes32() for _ in range(floorlog2(NEXT_SYNC_COMMITTEE_INDEX))]
else:
sync_committee = snapshot.next_sync_committee
assert is_valid_merkle_branch(
leaf=hash_tree_root(update.next_sync_committee),
branch=update.next_sync_committee_branch,
depth=log2(NEXT_SYNC_COMMITTEE_INDEX),
index=NEXT_SYNC_COMMITTEE_INDEX % 2**log2(NEXT_SYNC_COMMITTEE_INDEX),
depth=floorlog2(NEXT_SYNC_COMMITTEE_INDEX),
index=get_subtree_index(NEXT_SYNC_COMMITTEE_INDEX),
root=update.header.state_root,
)
@ -146,11 +158,9 @@ def is_valid_light_client_update(snapshot: LightClientSnapshot, update: LightCli
# Verify sync committee aggregate signature
participant_pubkeys = [pubkey for (bit, pubkey) in zip(update.sync_committee_bits, sync_committee.pubkeys) if bit]
domain = compute_domain(DOMAIN_SYNC_COMMITTEE, update.fork_version)
domain = compute_domain(DOMAIN_SYNC_COMMITTEE, update.fork_version, genesis_validators_root)
signing_root = compute_signing_root(signed_header, domain)
assert bls.FastAggregateVerify(participant_pubkeys, signing_root, update.sync_committee_signature)
return True
```
#### `apply_light_client_update`
@ -168,17 +178,23 @@ def apply_light_client_update(snapshot: LightClientSnapshot, update: LightClient
#### `process_light_client_update`
```python
def process_light_client_update(store: LightClientStore, update: LightClientUpdate, current_slot: Slot) -> None:
# Validate update
assert is_valid_light_client_update(store.snapshot, update)
def process_light_client_update(store: LightClientStore, update: LightClientUpdate, current_slot: Slot,
genesis_validators_root: Root) -> None:
validate_light_client_update(store.snapshot, update, genesis_validators_root)
store.valid_updates.append(update)
if sum(update.sync_committee_bits) * 3 > len(update.sync_committee_bits) * 2 and update.header != update.finality_header:
# Apply update if 2/3 quorum is reached and we have a finality proof
apply_light_client_update(store, update)
if (
sum(update.sync_committee_bits) * 3 > len(update.sync_committee_bits) * 2
and update.finality_header != BeaconBlockHeader()
):
# Apply update if (1) 2/3 quorum is reached and (2) we have a finality proof.
# Note that (2) means that the current light client design needs finality.
# It may be changed to re-organizable light client design. See the on-going issue eth2.0-specs#2182.
apply_light_client_update(store.snapshot, update)
store.valid_updates = []
elif current_slot > snapshot.header.slot + LIGHT_CLIENT_UPDATE_TIMEOUT:
elif current_slot > store.snapshot.header.slot + LIGHT_CLIENT_UPDATE_TIMEOUT:
# Forced best update when the update timeout has elapsed
apply_light_client_update(store, max(store.valid_updates, key=lambda update: sum(update.sync_committee_bits)))
apply_light_client_update(store.snapshot,
max(store.valid_updates, key=lambda update: sum(update.sync_committee_bits)))
store.valid_updates = []
```

View File

@ -0,0 +1,35 @@
from eth2spec.test.context import (
spec_state_test,
with_phases,
ALTAIR,
)
from eth2spec.test.helpers.merkle import build_proof
@with_phases([ALTAIR])
@spec_state_test
def test_next_sync_committee_tree(spec, state):
state.next_sync_committee: object = spec.SyncCommittee(
pubkeys=[state.validators[i]for i in range(spec.SYNC_COMMITTEE_SIZE)]
)
next_sync_committee_branch = build_proof(state.get_backing(), spec.NEXT_SYNC_COMMITTEE_INDEX)
assert spec.is_valid_merkle_branch(
leaf=state.next_sync_committee.hash_tree_root(),
branch=next_sync_committee_branch,
depth=spec.floorlog2(spec.NEXT_SYNC_COMMITTEE_INDEX),
index=spec.get_subtree_index(spec.NEXT_SYNC_COMMITTEE_INDEX),
root=state.hash_tree_root(),
)
@with_phases([ALTAIR])
@spec_state_test
def test_finality_root_tree(spec, state):
finality_branch = build_proof(state.get_backing(), spec.FINALIZED_ROOT_INDEX)
assert spec.is_valid_merkle_branch(
leaf=state.finalized_checkpoint.root,
branch=finality_branch,
depth=spec.floorlog2(spec.FINALIZED_ROOT_INDEX),
index=spec.get_subtree_index(spec.FINALIZED_ROOT_INDEX),
root=state.hash_tree_root(),
)

View File

@ -0,0 +1,216 @@
from eth2spec.test.context import (
ALTAIR,
MINIMAL,
spec_state_test,
with_configs,
with_phases,
)
from eth2spec.test.helpers.attestations import next_epoch_with_attestations
from eth2spec.test.helpers.block import (
build_empty_block,
build_empty_block_for_next_slot,
)
from eth2spec.test.helpers.state import (
next_slots,
state_transition_and_sign_block,
)
from eth2spec.test.helpers.sync_committee import (
compute_aggregate_sync_committee_signature,
)
from eth2spec.test.helpers.merkle import build_proof
@with_phases([ALTAIR])
@spec_state_test
def test_process_light_client_update_not_updated(spec, state):
pre_snapshot = spec.LightClientSnapshot(
header=spec.BeaconBlockHeader(),
current_sync_committee=state.current_sync_committee,
next_sync_committee=state.next_sync_committee,
)
store = spec.LightClientStore(
snapshot=pre_snapshot,
valid_updates=[]
)
# Block at slot 1 doesn't increase sync committee period, so it won't update snapshot
block = build_empty_block_for_next_slot(spec, state)
signed_block = state_transition_and_sign_block(spec, state, block)
block_header = spec.BeaconBlockHeader(
slot=signed_block.message.slot,
proposer_index=signed_block.message.proposer_index,
parent_root=signed_block.message.parent_root,
state_root=signed_block.message.state_root,
body_root=signed_block.message.body.hash_tree_root(),
)
# Sync committee signing the header
committee = spec.get_sync_committee_indices(state, spec.get_current_epoch(state))
sync_committee_bits = [True] * len(committee)
sync_committee_signature = compute_aggregate_sync_committee_signature(
spec,
state,
block.slot,
committee,
)
next_sync_committee_branch = [spec.Bytes32() for _ in range(spec.floorlog2(spec.NEXT_SYNC_COMMITTEE_INDEX))]
# Ensure that finality checkpoint is genesis
assert state.finalized_checkpoint.epoch == 0
# Finality is unchanged
finality_header = spec.BeaconBlockHeader()
finality_branch = [spec.Bytes32() for _ in range(spec.floorlog2(spec.FINALIZED_ROOT_INDEX))]
update = spec.LightClientUpdate(
header=block_header,
next_sync_committee=state.next_sync_committee,
next_sync_committee_branch=next_sync_committee_branch,
finality_header=finality_header,
finality_branch=finality_branch,
sync_committee_bits=sync_committee_bits,
sync_committee_signature=sync_committee_signature,
fork_version=state.fork.current_version,
)
spec.process_light_client_update(store, update, state.slot, state.genesis_validators_root)
assert len(store.valid_updates) == 1
assert store.valid_updates[0] == update
assert store.snapshot == pre_snapshot
@with_phases([ALTAIR])
@spec_state_test
@with_configs([MINIMAL], reason="too slow")
def test_process_light_client_update_timeout(spec, state):
pre_snapshot = spec.LightClientSnapshot(
header=spec.BeaconBlockHeader(),
current_sync_committee=state.current_sync_committee,
next_sync_committee=state.next_sync_committee,
)
store = spec.LightClientStore(
snapshot=pre_snapshot,
valid_updates=[]
)
# Forward to next sync committee period
next_slots(spec, state, spec.SLOTS_PER_EPOCH * (spec.EPOCHS_PER_SYNC_COMMITTEE_PERIOD))
snapshot_period = spec.compute_epoch_at_slot(pre_snapshot.header.slot) // spec.EPOCHS_PER_SYNC_COMMITTEE_PERIOD
update_period = spec.compute_epoch_at_slot(state.slot) // spec.EPOCHS_PER_SYNC_COMMITTEE_PERIOD
assert snapshot_period + 1 == update_period
block = build_empty_block_for_next_slot(spec, state)
signed_block = state_transition_and_sign_block(spec, state, block)
block_header = spec.BeaconBlockHeader(
slot=signed_block.message.slot,
proposer_index=signed_block.message.proposer_index,
parent_root=signed_block.message.parent_root,
state_root=signed_block.message.state_root,
body_root=signed_block.message.body.hash_tree_root(),
)
# Sync committee signing the finalized_block_header
committee = spec.get_sync_committee_indices(state, spec.get_current_epoch(state))
sync_committee_bits = [True] * len(committee)
sync_committee_signature = compute_aggregate_sync_committee_signature(
spec,
state,
block_header.slot,
committee,
block_root=spec.Root(block_header.hash_tree_root()),
)
# Sync committee is updated
next_sync_committee_branch = build_proof(state.get_backing(), spec.NEXT_SYNC_COMMITTEE_INDEX)
# Finality is unchanged
finality_header = spec.BeaconBlockHeader()
finality_branch = [spec.Bytes32() for _ in range(spec.floorlog2(spec.FINALIZED_ROOT_INDEX))]
update = spec.LightClientUpdate(
header=block_header,
next_sync_committee=state.next_sync_committee,
next_sync_committee_branch=next_sync_committee_branch,
finality_header=finality_header,
finality_branch=finality_branch,
sync_committee_bits=sync_committee_bits,
sync_committee_signature=sync_committee_signature,
fork_version=state.fork.current_version,
)
spec.process_light_client_update(store, update, state.slot, state.genesis_validators_root)
# snapshot has been updated
assert len(store.valid_updates) == 0
assert store.snapshot.header == update.header
@with_phases([ALTAIR])
@spec_state_test
@with_configs([MINIMAL], reason="too slow")
def test_process_light_client_update_finality_updated(spec, state):
pre_snapshot = spec.LightClientSnapshot(
header=spec.BeaconBlockHeader(),
current_sync_committee=state.current_sync_committee,
next_sync_committee=state.next_sync_committee,
)
store = spec.LightClientStore(
snapshot=pre_snapshot,
valid_updates=[]
)
# Change finality
blocks = []
next_slots(spec, state, spec.SLOTS_PER_EPOCH * 2)
for epoch in range(3):
prev_state, new_blocks, state = next_epoch_with_attestations(spec, state, True, True)
blocks += new_blocks
# Ensure that finality checkpoint has changed
assert state.finalized_checkpoint.epoch == 3
# Ensure that it's same period
snapshot_period = spec.compute_epoch_at_slot(pre_snapshot.header.slot) // spec.EPOCHS_PER_SYNC_COMMITTEE_PERIOD
update_period = spec.compute_epoch_at_slot(state.slot) // spec.EPOCHS_PER_SYNC_COMMITTEE_PERIOD
assert snapshot_period == update_period
# Updated sync_committee and finality
next_sync_committee_branch = [spec.Bytes32() for _ in range(spec.floorlog2(spec.NEXT_SYNC_COMMITTEE_INDEX))]
finalized_block_header = blocks[spec.SLOTS_PER_EPOCH - 1].message
assert finalized_block_header.slot == spec.compute_start_slot_at_epoch(state.finalized_checkpoint.epoch)
assert finalized_block_header.hash_tree_root() == state.finalized_checkpoint.root
finality_branch = build_proof(state.get_backing(), spec.FINALIZED_ROOT_INDEX)
# Build block header
block = build_empty_block(spec, state)
block_header = spec.BeaconBlockHeader(
slot=block.slot,
proposer_index=block.proposer_index,
parent_root=block.parent_root,
state_root=state.hash_tree_root(),
body_root=block.body.hash_tree_root(),
)
# Sync committee signing the finalized_block_header
committee = spec.get_sync_committee_indices(state, spec.get_current_epoch(state))
sync_committee_bits = [True] * len(committee)
sync_committee_signature = compute_aggregate_sync_committee_signature(
spec,
state,
block_header.slot,
committee,
block_root=spec.Root(block_header.hash_tree_root()),
)
update = spec.LightClientUpdate(
header=finalized_block_header,
next_sync_committee=state.next_sync_committee,
next_sync_committee_branch=next_sync_committee_branch,
finality_header=block_header, # block_header is the signed header
finality_branch=finality_branch,
sync_committee_bits=sync_committee_bits,
sync_committee_signature=sync_committee_signature,
fork_version=state.fork.current_version,
)
spec.process_light_client_update(store, update, state.slot, state.genesis_validators_root)
# snapshot has been updated
assert len(store.valid_updates) == 0
assert store.snapshot.header == update.header

View File

@ -1,7 +1,7 @@
from eth2spec.test.helpers.keys import privkeys
from eth2spec.test.helpers.merkle import build_proof
from eth2spec.utils import bls
from eth2spec.utils.ssz.ssz_typing import Bitlist, ByteVector, ByteList
from remerkleable.tree import gindex_bit_iter
BYTES_PER_CHUNK = 32
@ -116,26 +116,6 @@ def custody_chunkify(spec, x):
return [ByteVector[spec.BYTES_PER_CUSTODY_CHUNK](c) for c in chunks]
def build_proof(anchor, leaf_index):
if leaf_index <= 1:
return [] # Nothing to prove / invalid index
node = anchor
proof = []
# Walk down, top to bottom to the leaf
bit_iter, _ = gindex_bit_iter(leaf_index)
for bit in bit_iter:
# Always take the opposite hand for the proof.
# 1 = right as leaf, thus get left
if bit:
proof.append(node.get_left().merkle_root())
node = node.get_right()
else:
proof.append(node.get_right().merkle_root())
node = node.get_left()
return list(reversed(proof))
def get_valid_custody_chunk_response(spec, state, chunk_challenge, challenge_index,
block_length_or_custody_data,
invalid_chunk_data=False):

View File

@ -0,0 +1,21 @@
from remerkleable.tree import gindex_bit_iter
def build_proof(anchor, leaf_index):
if leaf_index <= 1:
return [] # Nothing to prove / invalid index
node = anchor
proof = []
# Walk down, top to bottom to the leaf
bit_iter, _ = gindex_bit_iter(leaf_index)
for bit in bit_iter:
# Always take the opposite hand for the proof.
# 1 = right as leaf, thus get left
if bit:
proof.append(node.get_left().merkle_root())
node = node.get_right()
else:
proof.append(node.get_right().merkle_root())
node = node.get_left()
return list(reversed(proof))

View File

@ -5,17 +5,18 @@ from eth2spec.test.helpers.block import (
from eth2spec.utils import bls
def compute_sync_committee_signature(spec, state, slot, privkey):
def compute_sync_committee_signature(spec, state, slot, privkey, block_root=None):
domain = spec.get_domain(state, spec.DOMAIN_SYNC_COMMITTEE, spec.compute_epoch_at_slot(slot))
if slot == state.slot:
block_root = build_empty_block_for_next_slot(spec, state).parent_root
else:
block_root = spec.get_block_root_at_slot(state, slot)
if block_root is None:
if slot == state.slot:
block_root = build_empty_block_for_next_slot(spec, state).parent_root
else:
block_root = spec.get_block_root_at_slot(state, slot)
signing_root = spec.compute_signing_root(block_root, domain)
return bls.Sign(privkey, signing_root)
def compute_aggregate_sync_committee_signature(spec, state, slot, participants):
def compute_aggregate_sync_committee_signature(spec, state, slot, participants, block_root=None):
if len(participants) == 0:
return spec.G2_POINT_AT_INFINITY
@ -28,6 +29,7 @@ def compute_aggregate_sync_committee_signature(spec, state, slot, participants):
state,
slot,
privkey,
block_root=block_root,
)
)
return bls.Aggregate(signatures)

View File

@ -5,4 +5,4 @@ from remerkleable.complex import Container, Vector, List
from remerkleable.basic import boolean, bit, uint, byte, uint8, uint16, uint32, uint64, uint128, uint256
from remerkleable.bitfields import Bitvector, Bitlist
from remerkleable.byte_arrays import ByteVector, Bytes1, Bytes4, Bytes8, Bytes32, Bytes48, Bytes96, ByteList
from remerkleable.core import BasicView, View
from remerkleable.core import BasicView, View, Path