Merge pull request #2982 from ethereum/optimistic-sync-tests
Add optimistic sync tests
This commit is contained in:
commit
f2c26560d5
|
@ -88,8 +88,8 @@ Let `current_slot: Slot` be `(time - genesis_time) // SECONDS_PER_SLOT` where
|
||||||
class OptimisticStore(object):
|
class OptimisticStore(object):
|
||||||
optimistic_roots: Set[Root]
|
optimistic_roots: Set[Root]
|
||||||
head_block_root: Root
|
head_block_root: Root
|
||||||
blocks: Dict[Root, BeaconBlock]
|
blocks: Dict[Root, BeaconBlock] = field(default_factory=dict)
|
||||||
block_states: Dict[Root, BeaconState]
|
block_states: Dict[Root, BeaconState] = field(default_factory=dict)
|
||||||
```
|
```
|
||||||
|
|
||||||
```python
|
```python
|
||||||
|
|
|
@ -9,7 +9,6 @@ import sys
|
||||||
import json
|
import json
|
||||||
from typing import Iterable, AnyStr, Any, Callable
|
from typing import Iterable, AnyStr, Any, Callable
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ruamel.yaml import (
|
from ruamel.yaml import (
|
||||||
YAML,
|
YAML,
|
||||||
)
|
)
|
||||||
|
@ -98,6 +97,11 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]):
|
||||||
yaml = YAML(pure=True)
|
yaml = YAML(pure=True)
|
||||||
yaml.default_flow_style = None
|
yaml.default_flow_style = None
|
||||||
|
|
||||||
|
def _represent_none(self, _):
|
||||||
|
return self.represent_scalar('tag:yaml.org,2002:null', 'null')
|
||||||
|
|
||||||
|
yaml.representer.add_representer(type(None), _represent_none)
|
||||||
|
|
||||||
# Spec config is using a YAML subset
|
# Spec config is using a YAML subset
|
||||||
cfg_yaml = YAML(pure=True)
|
cfg_yaml = YAML(pure=True)
|
||||||
cfg_yaml.default_flow_style = False # Emit separate line for each key
|
cfg_yaml.default_flow_style = False # Emit separate line for each key
|
||||||
|
|
|
@ -0,0 +1,99 @@
|
||||||
|
from eth2spec.test.context import (
|
||||||
|
spec_state_test,
|
||||||
|
with_bellatrix_and_later,
|
||||||
|
)
|
||||||
|
from eth2spec.test.helpers.attestations import (
|
||||||
|
state_transition_with_full_block,
|
||||||
|
)
|
||||||
|
from eth2spec.test.helpers.block import (
|
||||||
|
build_empty_block_for_next_slot,
|
||||||
|
)
|
||||||
|
from eth2spec.test.helpers.fork_choice import (
|
||||||
|
get_genesis_forkchoice_store_and_block,
|
||||||
|
on_tick_and_append_step,
|
||||||
|
)
|
||||||
|
from eth2spec.test.helpers.optimistic_sync import (
|
||||||
|
PayloadStatusV1,
|
||||||
|
PayloadStatusV1Status,
|
||||||
|
MegaStore,
|
||||||
|
add_optimistic_block,
|
||||||
|
get_optimistic_store,
|
||||||
|
)
|
||||||
|
from eth2spec.test.helpers.state import (
|
||||||
|
next_epoch,
|
||||||
|
state_transition_and_sign_block,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@with_bellatrix_and_later
|
||||||
|
@spec_state_test
|
||||||
|
def test_from_syncing_to_invalid(spec, state):
|
||||||
|
test_steps = []
|
||||||
|
# Initialization
|
||||||
|
fc_store, anchor_block = get_genesis_forkchoice_store_and_block(spec, state)
|
||||||
|
op_store = get_optimistic_store(spec, state, anchor_block)
|
||||||
|
mega_store = MegaStore(spec, fc_store, op_store)
|
||||||
|
yield 'anchor_state', state
|
||||||
|
yield 'anchor_block', anchor_block
|
||||||
|
|
||||||
|
next_epoch(spec, state)
|
||||||
|
|
||||||
|
current_time = (
|
||||||
|
(spec.SAFE_SLOTS_TO_IMPORT_OPTIMISTICALLY * 10 + state.slot) * spec.config.SECONDS_PER_SLOT
|
||||||
|
+ fc_store.genesis_time
|
||||||
|
)
|
||||||
|
on_tick_and_append_step(spec, fc_store, current_time, test_steps)
|
||||||
|
|
||||||
|
# Block 0
|
||||||
|
block_0 = build_empty_block_for_next_slot(spec, state)
|
||||||
|
block_0.body.execution_payload.block_hash = spec.hash(bytes(f'block_0', 'UTF-8'))
|
||||||
|
signed_block = state_transition_and_sign_block(spec, state, block_0)
|
||||||
|
yield from add_optimistic_block(spec, mega_store, signed_block, test_steps, status=PayloadStatusV1Status.VALID)
|
||||||
|
assert spec.get_head(mega_store.fc_store) == mega_store.opt_store.head_block_root
|
||||||
|
|
||||||
|
state_0 = state.copy()
|
||||||
|
|
||||||
|
# Create VALID chain `a`
|
||||||
|
signed_blocks_a = []
|
||||||
|
for i in range(3):
|
||||||
|
block = build_empty_block_for_next_slot(spec, state)
|
||||||
|
block.body.execution_payload.block_hash = spec.hash(bytes(f'chain_a_{i}', 'UTF-8'))
|
||||||
|
block.body.execution_payload.parent_hash = (
|
||||||
|
spec.hash(bytes(f'chain_a_{i - 1}', 'UTF-8')) if i != 0 else block_0.body.execution_payload.block_hash
|
||||||
|
)
|
||||||
|
|
||||||
|
signed_block = state_transition_and_sign_block(spec, state, block)
|
||||||
|
yield from add_optimistic_block(spec, mega_store, signed_block, test_steps, status=PayloadStatusV1Status.VALID)
|
||||||
|
assert spec.get_head(mega_store.fc_store) == mega_store.opt_store.head_block_root
|
||||||
|
signed_blocks_a.append(signed_block.copy())
|
||||||
|
|
||||||
|
# Create SYNCING chain `b`
|
||||||
|
signed_blocks_b = []
|
||||||
|
state = state_0.copy()
|
||||||
|
for i in range(3):
|
||||||
|
block = build_empty_block_for_next_slot(spec, state)
|
||||||
|
block.body.execution_payload.block_hash = spec.hash(bytes(f'chain_b_{i}', 'UTF-8'))
|
||||||
|
block.body.execution_payload.parent_hash = (
|
||||||
|
spec.hash(bytes(f'chain_b_{i - 1}', 'UTF-8')) if i != 0 else block_0.body.execution_payload.block_hash
|
||||||
|
)
|
||||||
|
signed_block = state_transition_with_full_block(spec, state, True, True, block=block)
|
||||||
|
signed_blocks_b.append(signed_block.copy())
|
||||||
|
yield from add_optimistic_block(spec, mega_store, signed_block, test_steps,
|
||||||
|
status=PayloadStatusV1Status.SYNCING)
|
||||||
|
assert spec.get_head(mega_store.fc_store) == mega_store.opt_store.head_block_root
|
||||||
|
|
||||||
|
# Now add block 4 to chain `b` with INVALID
|
||||||
|
block = build_empty_block_for_next_slot(spec, state)
|
||||||
|
block.body.execution_payload.block_hash = spec.hash(bytes(f'chain_b_3', 'UTF-8'))
|
||||||
|
block.body.execution_payload.parent_hash = signed_blocks_b[-1].message.body.execution_payload.block_hash
|
||||||
|
signed_block = state_transition_and_sign_block(spec, state, block)
|
||||||
|
payload_status = PayloadStatusV1(
|
||||||
|
status=PayloadStatusV1Status.INVALID,
|
||||||
|
latest_valid_hash=block_0.body.execution_payload.block_hash,
|
||||||
|
validation_error="invalid",
|
||||||
|
)
|
||||||
|
yield from add_optimistic_block(spec, mega_store, signed_block, test_steps,
|
||||||
|
payload_status=payload_status)
|
||||||
|
assert mega_store.opt_store.head_block_root == signed_blocks_a[-1].message.hash_tree_root()
|
||||||
|
|
||||||
|
yield 'steps', test_steps
|
|
@ -251,11 +251,13 @@ def state_transition_with_full_block(spec,
|
||||||
fill_cur_epoch,
|
fill_cur_epoch,
|
||||||
fill_prev_epoch,
|
fill_prev_epoch,
|
||||||
participation_fn=None,
|
participation_fn=None,
|
||||||
sync_aggregate=None):
|
sync_aggregate=None,
|
||||||
|
block=None):
|
||||||
"""
|
"""
|
||||||
Build and apply a block with attestions at the calculated `slot_to_attest` of current epoch and/or previous epoch.
|
Build and apply a block with attestions at the calculated `slot_to_attest` of current epoch and/or previous epoch.
|
||||||
"""
|
"""
|
||||||
block = build_empty_block_for_next_slot(spec, state)
|
if block is None:
|
||||||
|
block = build_empty_block_for_next_slot(spec, state)
|
||||||
if fill_cur_epoch and state.slot >= spec.MIN_ATTESTATION_INCLUSION_DELAY:
|
if fill_cur_epoch and state.slot >= spec.MIN_ATTESTATION_INCLUSION_DELAY:
|
||||||
slot_to_attest = state.slot - spec.MIN_ATTESTATION_INCLUSION_DELAY + 1
|
slot_to_attest = state.slot - spec.MIN_ATTESTATION_INCLUSION_DELAY + 1
|
||||||
if slot_to_attest >= spec.compute_start_slot_at_epoch(spec.get_current_epoch(state)):
|
if slot_to_attest >= spec.compute_start_slot_at_epoch(spec.get_current_epoch(state)):
|
||||||
|
|
|
@ -13,18 +13,8 @@ def get_anchor_root(spec, state):
|
||||||
return spec.hash_tree_root(anchor_block_header)
|
return spec.hash_tree_root(anchor_block_header)
|
||||||
|
|
||||||
|
|
||||||
def add_block_to_store(spec, store, signed_block):
|
|
||||||
pre_state = store.block_states[signed_block.message.parent_root]
|
|
||||||
block_time = pre_state.genesis_time + signed_block.message.slot * spec.config.SECONDS_PER_SLOT
|
|
||||||
|
|
||||||
if store.time < block_time:
|
|
||||||
spec.on_tick(store, block_time)
|
|
||||||
|
|
||||||
spec.on_block(store, signed_block)
|
|
||||||
|
|
||||||
|
|
||||||
def tick_and_add_block(spec, store, signed_block, test_steps, valid=True,
|
def tick_and_add_block(spec, store, signed_block, test_steps, valid=True,
|
||||||
merge_block=False, block_not_found=False):
|
merge_block=False, block_not_found=False, is_optimistic=False):
|
||||||
pre_state = store.block_states[signed_block.message.parent_root]
|
pre_state = store.block_states[signed_block.message.parent_root]
|
||||||
block_time = pre_state.genesis_time + signed_block.message.slot * spec.config.SECONDS_PER_SLOT
|
block_time = pre_state.genesis_time + signed_block.message.slot * spec.config.SECONDS_PER_SLOT
|
||||||
if merge_block:
|
if merge_block:
|
||||||
|
@ -37,6 +27,7 @@ def tick_and_add_block(spec, store, signed_block, test_steps, valid=True,
|
||||||
spec, store, signed_block, test_steps,
|
spec, store, signed_block, test_steps,
|
||||||
valid=valid,
|
valid=valid,
|
||||||
block_not_found=block_not_found,
|
block_not_found=block_not_found,
|
||||||
|
is_optimistic=is_optimistic,
|
||||||
)
|
)
|
||||||
|
|
||||||
return post_state
|
return post_state
|
||||||
|
@ -119,28 +110,36 @@ def add_block(spec,
|
||||||
signed_block,
|
signed_block,
|
||||||
test_steps,
|
test_steps,
|
||||||
valid=True,
|
valid=True,
|
||||||
block_not_found=False):
|
block_not_found=False,
|
||||||
|
is_optimistic=False):
|
||||||
"""
|
"""
|
||||||
Run on_block and on_attestation
|
Run on_block and on_attestation
|
||||||
"""
|
"""
|
||||||
yield get_block_file_name(signed_block), signed_block
|
yield get_block_file_name(signed_block), signed_block
|
||||||
|
|
||||||
if not valid:
|
if not valid:
|
||||||
try:
|
if is_optimistic:
|
||||||
run_on_block(spec, store, signed_block, valid=True)
|
run_on_block(spec, store, signed_block, valid=True)
|
||||||
except (AssertionError, BlockNotFoundException) as e:
|
|
||||||
if isinstance(e, BlockNotFoundException) and not block_not_found:
|
|
||||||
assert False
|
|
||||||
test_steps.append({
|
test_steps.append({
|
||||||
'block': get_block_file_name(signed_block),
|
'block': get_block_file_name(signed_block),
|
||||||
'valid': False,
|
'valid': False,
|
||||||
})
|
})
|
||||||
return
|
|
||||||
else:
|
else:
|
||||||
assert False
|
try:
|
||||||
|
run_on_block(spec, store, signed_block, valid=True)
|
||||||
run_on_block(spec, store, signed_block, valid=True)
|
except (AssertionError, BlockNotFoundException) as e:
|
||||||
test_steps.append({'block': get_block_file_name(signed_block)})
|
if isinstance(e, BlockNotFoundException) and not block_not_found:
|
||||||
|
assert False
|
||||||
|
test_steps.append({
|
||||||
|
'block': get_block_file_name(signed_block),
|
||||||
|
'valid': False,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
assert False
|
||||||
|
else:
|
||||||
|
run_on_block(spec, store, signed_block, valid=True)
|
||||||
|
test_steps.append({'block': get_block_file_name(signed_block)})
|
||||||
|
|
||||||
# An on_block step implies receiving block's attestations
|
# An on_block step implies receiving block's attestations
|
||||||
for attestation in signed_block.message.body.attestations:
|
for attestation in signed_block.message.body.attestations:
|
||||||
|
@ -153,25 +152,26 @@ def add_block(spec,
|
||||||
block_root = signed_block.message.hash_tree_root()
|
block_root = signed_block.message.hash_tree_root()
|
||||||
assert store.blocks[block_root] == signed_block.message
|
assert store.blocks[block_root] == signed_block.message
|
||||||
assert store.block_states[block_root].hash_tree_root() == signed_block.message.state_root
|
assert store.block_states[block_root].hash_tree_root() == signed_block.message.state_root
|
||||||
test_steps.append({
|
if not is_optimistic:
|
||||||
'checks': {
|
test_steps.append({
|
||||||
'time': int(store.time),
|
'checks': {
|
||||||
'head': get_formatted_head_output(spec, store),
|
'time': int(store.time),
|
||||||
'justified_checkpoint': {
|
'head': get_formatted_head_output(spec, store),
|
||||||
'epoch': int(store.justified_checkpoint.epoch),
|
'justified_checkpoint': {
|
||||||
'root': encode_hex(store.justified_checkpoint.root),
|
'epoch': int(store.justified_checkpoint.epoch),
|
||||||
},
|
'root': encode_hex(store.justified_checkpoint.root),
|
||||||
'finalized_checkpoint': {
|
},
|
||||||
'epoch': int(store.finalized_checkpoint.epoch),
|
'finalized_checkpoint': {
|
||||||
'root': encode_hex(store.finalized_checkpoint.root),
|
'epoch': int(store.finalized_checkpoint.epoch),
|
||||||
},
|
'root': encode_hex(store.finalized_checkpoint.root),
|
||||||
'best_justified_checkpoint': {
|
},
|
||||||
'epoch': int(store.best_justified_checkpoint.epoch),
|
'best_justified_checkpoint': {
|
||||||
'root': encode_hex(store.best_justified_checkpoint.root),
|
'epoch': int(store.best_justified_checkpoint.epoch),
|
||||||
},
|
'root': encode_hex(store.best_justified_checkpoint.root),
|
||||||
'proposer_boost_root': encode_hex(store.proposer_boost_root),
|
},
|
||||||
}
|
'proposer_boost_root': encode_hex(store.proposer_boost_root),
|
||||||
})
|
}
|
||||||
|
})
|
||||||
|
|
||||||
return store.block_states[signed_block.message.hash_tree_root()]
|
return store.block_states[signed_block.message.hash_tree_root()]
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,204 @@
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from enum import Enum
|
||||||
|
from typing import (
|
||||||
|
Dict,
|
||||||
|
Optional,
|
||||||
|
)
|
||||||
|
|
||||||
|
from eth_utils import encode_hex
|
||||||
|
|
||||||
|
from eth2spec.utils.ssz.ssz_typing import Bytes32
|
||||||
|
from eth2spec.test.helpers.fork_choice import (
|
||||||
|
add_block,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PayloadStatusV1StatusAlias(Enum):
|
||||||
|
NOT_VALIDATED = "NOT_VALIDATED"
|
||||||
|
INVALIDATED = "INVALIDATED"
|
||||||
|
|
||||||
|
|
||||||
|
class PayloadStatusV1Status(Enum):
|
||||||
|
VALID = "VALID"
|
||||||
|
INVALID = "INVALID"
|
||||||
|
SYNCING = "SYNCING"
|
||||||
|
ACCEPTED = "ACCEPTED"
|
||||||
|
INVALID_BLOCK_HASH = "INVALID_BLOCK_HASH"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def alias(self) -> PayloadStatusV1StatusAlias:
|
||||||
|
if self.value in (self.SYNCING.value, self.ACCEPTED.value):
|
||||||
|
return PayloadStatusV1StatusAlias.NOT_VALIDATED
|
||||||
|
elif self.value in (self.INVALID.value, self.INVALID_BLOCK_HASH.value):
|
||||||
|
return PayloadStatusV1StatusAlias.INVALIDATED
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PayloadStatusV1:
|
||||||
|
status: PayloadStatusV1Status = PayloadStatusV1Status.VALID
|
||||||
|
latest_valid_hash: Optional[Bytes32] = None
|
||||||
|
validation_error: Optional[str] = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def formatted_output(self):
|
||||||
|
return {
|
||||||
|
'status': str(self.status.value),
|
||||||
|
'latest_valid_hash': encode_hex(self.latest_valid_hash) if self.latest_valid_hash is not None else None,
|
||||||
|
'validation_error': str(self.validation_error) if self.validation_error is not None else None
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class MegaStore(object):
|
||||||
|
spec = None
|
||||||
|
fc_store = None
|
||||||
|
opt_store = None
|
||||||
|
block_payload_statuses: Dict[Bytes32, PayloadStatusV1] = dict()
|
||||||
|
|
||||||
|
def __init__(self, spec, fc_store, opt_store):
|
||||||
|
self.spec = spec
|
||||||
|
self.fc_store = fc_store
|
||||||
|
self.opt_store = opt_store
|
||||||
|
|
||||||
|
|
||||||
|
def get_optimistic_store(spec, anchor_state, anchor_block):
|
||||||
|
assert anchor_block.state_root == anchor_state.hash_tree_root()
|
||||||
|
|
||||||
|
opt_store = spec.OptimisticStore(
|
||||||
|
optimistic_roots=set(),
|
||||||
|
head_block_root=anchor_block.hash_tree_root(),
|
||||||
|
|
||||||
|
)
|
||||||
|
anchor_block_root = anchor_block.hash_tree_root()
|
||||||
|
opt_store.blocks[anchor_block_root] = anchor_block.copy()
|
||||||
|
opt_store.block_states[anchor_block_root] = anchor_state.copy()
|
||||||
|
|
||||||
|
return opt_store
|
||||||
|
|
||||||
|
|
||||||
|
def get_valid_flag_value(status: PayloadStatusV1Status) -> bool:
|
||||||
|
if status == PayloadStatusV1Status.VALID:
|
||||||
|
return True
|
||||||
|
elif status.alias == PayloadStatusV1StatusAlias.NOT_VALIDATED:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
# status.alias == PayloadStatusV1StatusAlias.INVALIDATED or other cases
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def add_optimistic_block(spec, mega_store, signed_block, test_steps,
|
||||||
|
payload_status=None, status=PayloadStatusV1Status.SYNCING):
|
||||||
|
"""
|
||||||
|
Add a block with optimistic sync logic
|
||||||
|
|
||||||
|
``valid`` indicates if the given ``signed_block.message.body.execution_payload`` is valid/invalid
|
||||||
|
from ``notify_new_payload`` method response.
|
||||||
|
"""
|
||||||
|
block = signed_block.message
|
||||||
|
block_root = block.hash_tree_root()
|
||||||
|
el_block_hash = block.body.execution_payload.block_hash
|
||||||
|
|
||||||
|
if payload_status is None:
|
||||||
|
payload_status = PayloadStatusV1(status=status)
|
||||||
|
if payload_status.status == PayloadStatusV1Status.VALID:
|
||||||
|
payload_status.latest_valid_hash = el_block_hash
|
||||||
|
|
||||||
|
mega_store.block_payload_statuses[block_root] = payload_status
|
||||||
|
test_steps.append({
|
||||||
|
'block_hash': encode_hex(el_block_hash),
|
||||||
|
'payload_status': payload_status.formatted_output,
|
||||||
|
})
|
||||||
|
|
||||||
|
# Set `valid` flag
|
||||||
|
valid = get_valid_flag_value(payload_status.status)
|
||||||
|
|
||||||
|
# Optimistic sync
|
||||||
|
|
||||||
|
# Case: INVALID
|
||||||
|
if payload_status.status == PayloadStatusV1Status.INVALID:
|
||||||
|
# Update parent status to INVALID
|
||||||
|
assert payload_status.latest_valid_hash is not None
|
||||||
|
current_block = block
|
||||||
|
while el_block_hash != payload_status.latest_valid_hash and el_block_hash != spec.Bytes32():
|
||||||
|
current_block_root = current_block.hash_tree_root()
|
||||||
|
assert current_block_root in mega_store.block_payload_statuses
|
||||||
|
mega_store.block_payload_statuses[current_block_root].status = PayloadStatusV1Status.INVALID
|
||||||
|
# Get parent
|
||||||
|
current_block = mega_store.fc_store.blocks[current_block.parent_root]
|
||||||
|
el_block_hash = current_block.body.execution_payload.block_hash
|
||||||
|
|
||||||
|
yield from add_block(spec, mega_store.fc_store, signed_block,
|
||||||
|
valid=valid,
|
||||||
|
test_steps=test_steps,
|
||||||
|
is_optimistic=True)
|
||||||
|
|
||||||
|
# Update stores
|
||||||
|
is_optimistic_candidate = spec.is_optimistic_candidate_block(
|
||||||
|
mega_store.opt_store,
|
||||||
|
current_slot=spec.get_current_slot(mega_store.fc_store),
|
||||||
|
block=signed_block.message,
|
||||||
|
)
|
||||||
|
if is_optimistic_candidate:
|
||||||
|
mega_store.opt_store.optimistic_roots.add(block_root)
|
||||||
|
mega_store.opt_store.blocks[block_root] = signed_block.message.copy()
|
||||||
|
if not is_invalidated(mega_store, block_root):
|
||||||
|
mega_store.opt_store.block_states[block_root] = mega_store.fc_store.block_states[block_root].copy()
|
||||||
|
|
||||||
|
# Clean up the invalidated blocks
|
||||||
|
clean_up_store(mega_store)
|
||||||
|
|
||||||
|
# Update head
|
||||||
|
mega_store.opt_store.head_block_root = get_opt_head_block_root(spec, mega_store)
|
||||||
|
test_steps.append({
|
||||||
|
'checks': {
|
||||||
|
'head': get_formatted_optimistic_head_output(mega_store),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def get_opt_head_block_root(spec, mega_store):
|
||||||
|
"""
|
||||||
|
Copied and modified from fork-choice spec `get_head` function.
|
||||||
|
"""
|
||||||
|
store = mega_store.fc_store
|
||||||
|
|
||||||
|
# Get filtered block tree that only includes viable branches
|
||||||
|
blocks = spec.get_filtered_block_tree(store)
|
||||||
|
# Execute the LMD-GHOST fork choice
|
||||||
|
head = store.justified_checkpoint.root
|
||||||
|
while True:
|
||||||
|
children = [
|
||||||
|
root for root in blocks.keys()
|
||||||
|
if (
|
||||||
|
blocks[root].parent_root == head
|
||||||
|
and not is_invalidated(mega_store, root) # For optimistic sync
|
||||||
|
)
|
||||||
|
]
|
||||||
|
if len(children) == 0:
|
||||||
|
return head
|
||||||
|
# Sort by latest attesting balance with ties broken lexicographically
|
||||||
|
# Ties broken by favoring block with lexicographically higher root
|
||||||
|
head = max(children, key=lambda root: (spec.get_latest_attesting_balance(store, root), root))
|
||||||
|
|
||||||
|
|
||||||
|
def is_invalidated(mega_store, block_root):
|
||||||
|
if block_root in mega_store.block_payload_statuses:
|
||||||
|
return mega_store.block_payload_statuses[block_root].status.alias == PayloadStatusV1StatusAlias.INVALIDATED
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def get_formatted_optimistic_head_output(mega_store):
|
||||||
|
head = mega_store.opt_store.head_block_root
|
||||||
|
slot = mega_store.fc_store.blocks[head].slot
|
||||||
|
return {
|
||||||
|
'slot': int(slot),
|
||||||
|
'root': encode_hex(head),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def clean_up_store(mega_store):
|
||||||
|
"""
|
||||||
|
Remove invalidated blocks
|
||||||
|
"""
|
||||||
|
# TODO
|
||||||
|
...
|
|
@ -0,0 +1,14 @@
|
||||||
|
from eth2spec.gen_helpers.gen_from_tests.gen import run_state_test_generators
|
||||||
|
from eth2spec.test.helpers.constants import BELLATRIX
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
bellatrix_mods = {key: 'eth2spec.test.bellatrix.sync.test_' + key for key in [
|
||||||
|
'optimistic',
|
||||||
|
]}
|
||||||
|
|
||||||
|
all_mods = {
|
||||||
|
BELLATRIX: bellatrix_mods,
|
||||||
|
}
|
||||||
|
|
||||||
|
run_state_test_generators(runner_name="sync", all_mods=all_mods)
|
|
@ -0,0 +1,2 @@
|
||||||
|
pytest>=4.4
|
||||||
|
../../../[generator]
|
Loading…
Reference in New Issue