mirror of
https://github.com/status-im/eth2.0-specs.git
synced 2025-01-10 02:35:41 +00:00
Merge branch 'dev'
This commit is contained in:
commit
46008929ac
@ -11,7 +11,7 @@ This repository hosts the current Eth2 specifications. Discussions about design
|
||||
|
||||
[![GitHub release](https://img.shields.io/github/v/release/ethereum/eth2.0-specs)](https://github.com/ethereum/eth2.0-specs/releases/) [![PyPI version](https://badge.fury.io/py/eth2spec.svg)](https://badge.fury.io/py/eth2spec)
|
||||
|
||||
Core specifications for Eth2 clients be found in [specs](specs/). These are divided into features.
|
||||
Core specifications for Eth2 clients can be found in [specs](specs/). These are divided into features.
|
||||
Features are researched and developed in parallel, and then consolidated into sequential upgrades when ready.
|
||||
|
||||
The current features are:
|
||||
|
32
setup.py
32
setup.py
@ -447,7 +447,7 @@ class AltairSpecBuilder(Phase0SpecBuilder):
|
||||
@classmethod
|
||||
def imports(cls, preset_name: str) -> str:
|
||||
return super().imports(preset_name) + '\n' + f'''
|
||||
from typing import NewType, Union
|
||||
from typing import NewType, Union as PyUnion
|
||||
|
||||
from eth2spec.phase0 import {preset_name} as phase0
|
||||
from eth2spec.utils.ssz.ssz_typing import Path
|
||||
@ -463,7 +463,7 @@ GeneralizedIndex = NewType('GeneralizedIndex', int)
|
||||
@classmethod
|
||||
def sundry_functions(cls) -> str:
|
||||
return super().sundry_functions() + '\n\n' + '''
|
||||
def get_generalized_index(ssz_class: Any, *path: Sequence[Union[int, SSZVariableName]]) -> GeneralizedIndex:
|
||||
def get_generalized_index(ssz_class: Any, *path: Sequence[PyUnion[int, SSZVariableName]]) -> GeneralizedIndex:
|
||||
ssz_path = Path(ssz_class)
|
||||
for item in path:
|
||||
ssz_path = ssz_path / item
|
||||
@ -487,14 +487,14 @@ def get_generalized_index(ssz_class: Any, *path: Sequence[Union[int, SSZVariable
|
||||
#
|
||||
# MergeSpecBuilder
|
||||
#
|
||||
class MergeSpecBuilder(Phase0SpecBuilder):
|
||||
class MergeSpecBuilder(AltairSpecBuilder):
|
||||
fork: str = MERGE
|
||||
|
||||
@classmethod
|
||||
def imports(cls, preset_name: str):
|
||||
return super().imports(preset_name) + f'''
|
||||
from typing import Protocol
|
||||
from eth2spec.phase0 import {preset_name} as phase0
|
||||
from eth2spec.altair import {preset_name} as altair
|
||||
from eth2spec.utils.ssz.ssz_typing import Bytes20, ByteList, ByteVector, uint256, Union
|
||||
'''
|
||||
|
||||
@ -509,7 +509,7 @@ ExecutionState = Any
|
||||
|
||||
|
||||
def get_pow_block(hash: Bytes32) -> PowBlock:
|
||||
return PowBlock(block_hash=hash, is_valid=True, is_processed=True,
|
||||
return PowBlock(block_hash=hash, parent_hash=Bytes32(), is_valid=True, is_processed=True,
|
||||
total_difficulty=uint256(0), difficulty=uint256(0))
|
||||
|
||||
|
||||
@ -844,19 +844,15 @@ class PySpecCommand(Command):
|
||||
if len(self.md_doc_paths) == 0:
|
||||
print("no paths were specified, using default markdown file paths for pyspec"
|
||||
" build (spec fork: %s)" % self.spec_fork)
|
||||
if self.spec_fork == PHASE0:
|
||||
if self.spec_fork in (PHASE0, ALTAIR, MERGE):
|
||||
self.md_doc_paths = """
|
||||
specs/phase0/beacon-chain.md
|
||||
specs/phase0/fork-choice.md
|
||||
specs/phase0/validator.md
|
||||
specs/phase0/weak-subjectivity.md
|
||||
"""
|
||||
elif self.spec_fork == ALTAIR:
|
||||
self.md_doc_paths = """
|
||||
specs/phase0/beacon-chain.md
|
||||
specs/phase0/fork-choice.md
|
||||
specs/phase0/validator.md
|
||||
specs/phase0/weak-subjectivity.md
|
||||
if self.spec_fork in (ALTAIR, MERGE):
|
||||
self.md_doc_paths += """
|
||||
specs/altair/beacon-chain.md
|
||||
specs/altair/bls.md
|
||||
specs/altair/fork.md
|
||||
@ -864,18 +860,14 @@ class PySpecCommand(Command):
|
||||
specs/altair/p2p-interface.md
|
||||
specs/altair/sync-protocol.md
|
||||
"""
|
||||
elif self.spec_fork == MERGE:
|
||||
self.md_doc_paths = """
|
||||
specs/phase0/beacon-chain.md
|
||||
specs/phase0/fork-choice.md
|
||||
specs/phase0/validator.md
|
||||
specs/phase0/weak-subjectivity.md
|
||||
if self.spec_fork == MERGE:
|
||||
self.md_doc_paths += """
|
||||
specs/merge/beacon-chain.md
|
||||
specs/merge/fork.md
|
||||
specs/merge/fork-choice.md
|
||||
specs/merge/validator.md
|
||||
"""
|
||||
else:
|
||||
if len(self.md_doc_paths) == 0:
|
||||
raise Exception('no markdown files specified, and spec fork "%s" is unknown', self.spec_fork)
|
||||
|
||||
self.parsed_md_doc_paths = self.md_doc_paths.split()
|
||||
@ -1024,7 +1016,7 @@ setup(
|
||||
"py_ecc==5.2.0",
|
||||
"milagro_bls_binding==1.6.3",
|
||||
"dataclasses==0.6",
|
||||
"remerkleable==0.1.21",
|
||||
"remerkleable==0.1.22",
|
||||
RUAMEL_YAML_VERSION,
|
||||
"lru-dict==1.1.6",
|
||||
MARKO_VERSION,
|
||||
|
@ -139,6 +139,8 @@ def get_sync_subcommittee_pubkeys(state: BeaconState, subcommittee_index: uint64
|
||||
|
||||
- _[IGNORE]_ The contribution's slot is for the current slot (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance), i.e. `contribution.slot == current_slot`.
|
||||
- _[REJECT]_ The subcommittee index is in the allowed range, i.e. `contribution.subcommittee_index < SYNC_COMMITTEE_SUBNET_COUNT`.
|
||||
- _[REJECT]_ The contribution has participants --
|
||||
that is, `any(contribution.aggregation_bits)`.
|
||||
- _[REJECT]_ `contribution_and_proof.selection_proof` selects the validator as an aggregator for the slot -- i.e. `is_sync_committee_aggregator(contribution_and_proof.selection_proof)` returns `True`.
|
||||
- _[REJECT]_ The aggregator's validator index is in the declared subcommittee of the current sync committee --
|
||||
i.e. `state.validators[contribution_and_proof.aggregator_index].pubkey in get_sync_subcommittee_pubkeys(state, contribution.subcommittee_index)`.
|
||||
|
@ -354,7 +354,7 @@ def is_sync_committee_aggregator(signature: BLSSignature) -> bool:
|
||||
|
||||
If a validator is selected to aggregate the `SyncCommitteeMessage`s produced on a subnet during a given `slot`, they construct an aggregated `SyncCommitteeContribution`.
|
||||
|
||||
Given all of the (valid) collected `sync_committee_messages: Set[SyncCommitteeMessage]` from the `sync_committee_{subnet_id}` gossip during the selected `slot` with an equivalent `beacon_block_root` to that of the aggregator, the aggregator creates a `contribution: SyncCommitteeContribution` with the following fields:
|
||||
Collect all of the (valid) `sync_committee_messages: Set[SyncCommitteeMessage]` from the `sync_committee_{subnet_id}` gossip during the selected `slot` with an equivalent `beacon_block_root` to that of the aggregator. If `len(sync_committee_messages) > 0`, the aggregator creates a `contribution: SyncCommitteeContribution` with the following fields:
|
||||
|
||||
###### Slot
|
||||
|
||||
|
@ -1,7 +1,5 @@
|
||||
# Ethereum 2.0 The Merge
|
||||
|
||||
**Warning**: This document is currently based on [Phase 0](../phase0/beacon-chain.md) and will be rebased on [Altair](../altair/beacon-chain.md).
|
||||
|
||||
**Notice**: This document is a work-in-progress for researchers and implementers.
|
||||
|
||||
## Table of contents
|
||||
@ -69,7 +67,17 @@ This patch adds transaction execution to the beacon chain as part of the Merge f
|
||||
#### `BeaconBlockBody`
|
||||
|
||||
```python
|
||||
class BeaconBlockBody(phase0.BeaconBlockBody):
|
||||
class BeaconBlockBody(Container):
|
||||
randao_reveal: BLSSignature
|
||||
eth1_data: Eth1Data # Eth1 data vote
|
||||
graffiti: Bytes32 # Arbitrary data
|
||||
# Operations
|
||||
proposer_slashings: List[ProposerSlashing, MAX_PROPOSER_SLASHINGS]
|
||||
attester_slashings: List[AttesterSlashing, MAX_ATTESTER_SLASHINGS]
|
||||
attestations: List[Attestation, MAX_ATTESTATIONS]
|
||||
deposits: List[Deposit, MAX_DEPOSITS]
|
||||
voluntary_exits: List[SignedVoluntaryExit, MAX_VOLUNTARY_EXITS]
|
||||
sync_aggregate: SyncAggregate
|
||||
# Execution
|
||||
execution_payload: ExecutionPayload # [New in Merge]
|
||||
```
|
||||
@ -77,7 +85,41 @@ class BeaconBlockBody(phase0.BeaconBlockBody):
|
||||
#### `BeaconState`
|
||||
|
||||
```python
|
||||
class BeaconState(phase0.BeaconState):
|
||||
class BeaconState(Container):
|
||||
# Versioning
|
||||
genesis_time: uint64
|
||||
genesis_validators_root: Root
|
||||
slot: Slot
|
||||
fork: Fork
|
||||
# History
|
||||
latest_block_header: BeaconBlockHeader
|
||||
block_roots: Vector[Root, SLOTS_PER_HISTORICAL_ROOT]
|
||||
state_roots: Vector[Root, SLOTS_PER_HISTORICAL_ROOT]
|
||||
historical_roots: List[Root, HISTORICAL_ROOTS_LIMIT]
|
||||
# Eth1
|
||||
eth1_data: Eth1Data
|
||||
eth1_data_votes: List[Eth1Data, EPOCHS_PER_ETH1_VOTING_PERIOD * SLOTS_PER_EPOCH]
|
||||
eth1_deposit_index: uint64
|
||||
# Registry
|
||||
validators: List[Validator, VALIDATOR_REGISTRY_LIMIT]
|
||||
balances: List[Gwei, VALIDATOR_REGISTRY_LIMIT]
|
||||
# Randomness
|
||||
randao_mixes: Vector[Bytes32, EPOCHS_PER_HISTORICAL_VECTOR]
|
||||
# Slashings
|
||||
slashings: Vector[Gwei, EPOCHS_PER_SLASHINGS_VECTOR] # Per-epoch sums of slashed effective balances
|
||||
# Participation
|
||||
previous_epoch_participation: List[ParticipationFlags, VALIDATOR_REGISTRY_LIMIT]
|
||||
current_epoch_participation: List[ParticipationFlags, VALIDATOR_REGISTRY_LIMIT]
|
||||
# Finality
|
||||
justification_bits: Bitvector[JUSTIFICATION_BITS_LENGTH] # Bit set for every recent justified epoch
|
||||
previous_justified_checkpoint: Checkpoint
|
||||
current_justified_checkpoint: Checkpoint
|
||||
finalized_checkpoint: Checkpoint
|
||||
# Inactivity
|
||||
inactivity_scores: List[uint64, VALIDATOR_REGISTRY_LIMIT]
|
||||
# Sync
|
||||
current_sync_committee: SyncCommittee
|
||||
next_sync_committee: SyncCommittee
|
||||
# Execution
|
||||
latest_execution_payload_header: ExecutionPayloadHeader # [New in Merge]
|
||||
```
|
||||
@ -190,6 +232,7 @@ def process_block(state: BeaconState, block: BeaconBlock) -> None:
|
||||
process_randao(state, block.body)
|
||||
process_eth1_data(state, block.body)
|
||||
process_operations(state, block.body)
|
||||
process_sync_aggregate(state, block.body.sync_aggregate)
|
||||
if is_execution_enabled(state, block.body):
|
||||
process_execution_payload(state, block.body.execution_payload, EXECUTION_ENGINE) # [New in Merge]
|
||||
```
|
||||
@ -232,7 +275,7 @@ def process_execution_payload(state: BeaconState, payload: ExecutionPayload, exe
|
||||
|
||||
*Note*: The function `initialize_beacon_state_from_eth1` is modified for pure Merge testing only.
|
||||
|
||||
*Note*: The function `initialize_beacon_state_from_eth1` is modified to use `MERGE_FORK_VERSION` and initialize `latest_execution_payload_header`.
|
||||
*Note*: The function `initialize_beacon_state_from_eth1` is modified: (1) using `MERGE_FORK_VERSION` as the current fork version, (2) utilizing the Merge `BeaconBlockBody` when constructing the initial `latest_block_header`, and (3) initialize `latest_execution_payload_header`.
|
||||
|
||||
```python
|
||||
def initialize_beacon_state_from_eth1(eth1_block_hash: Bytes32,
|
||||
@ -269,6 +312,11 @@ def initialize_beacon_state_from_eth1(eth1_block_hash: Bytes32,
|
||||
# Set genesis validators root for domain separation and chain versioning
|
||||
state.genesis_validators_root = hash_tree_root(state.validators)
|
||||
|
||||
# Fill in sync committees
|
||||
# Note: A duplicate committee is assigned for the current and next committee at genesis
|
||||
state.current_sync_committee = get_next_sync_committee(state)
|
||||
state.next_sync_committee = get_next_sync_committee(state)
|
||||
|
||||
# [New in Merge] Initialize the execution payload header (with block number set to 0)
|
||||
state.latest_execution_payload_header.block_hash = eth1_block_hash
|
||||
state.latest_execution_payload_header.timestamp = eth1_timestamp
|
||||
|
@ -82,6 +82,7 @@ class TransitionStore(object):
|
||||
@dataclass
|
||||
class PowBlock(object):
|
||||
block_hash: Hash32
|
||||
parent_hash: Hash32
|
||||
is_processed: boolean
|
||||
is_valid: boolean
|
||||
total_difficulty: uint256
|
||||
@ -99,9 +100,10 @@ Let `get_pow_block(block_hash: Hash32) -> PowBlock` be the function that given t
|
||||
Used by fork-choice handler, `on_block`.
|
||||
|
||||
```python
|
||||
def is_valid_terminal_pow_block(transition_store: TransitionStore, block: PowBlock) -> bool:
|
||||
def is_valid_terminal_pow_block(transition_store: TransitionStore, block: PowBlock, parent: PowBlock) -> bool:
|
||||
is_total_difficulty_reached = block.total_difficulty >= transition_store.transition_total_difficulty
|
||||
return block.is_valid and is_total_difficulty_reached
|
||||
is_parent_total_difficulty_valid = parent.total_difficulty < transition_store.transition_total_difficulty
|
||||
return block.is_valid and is_total_difficulty_reached and is_parent_total_difficulty_valid
|
||||
```
|
||||
|
||||
## Updated fork-choice handlers
|
||||
@ -130,8 +132,9 @@ def on_block(store: Store, signed_block: SignedBeaconBlock, transition_store: Tr
|
||||
if (transition_store is not None) and is_merge_block(pre_state, block):
|
||||
# Delay consideration of block until PoW block is processed by the PoW node
|
||||
pow_block = get_pow_block(block.body.execution_payload.parent_hash)
|
||||
pow_parent = get_pow_block(pow_block.parent_hash)
|
||||
assert pow_block.is_processed
|
||||
assert is_valid_terminal_pow_block(transition_store, pow_block)
|
||||
assert is_valid_terminal_pow_block(transition_store, pow_block, pow_parent)
|
||||
|
||||
# Check the block is valid and compute the post-state
|
||||
state = pre_state.copy()
|
||||
|
@ -43,15 +43,18 @@ Note that for the pure Merge networks, we don't apply `upgrade_to_merge` since i
|
||||
|
||||
### Upgrading the state
|
||||
|
||||
If `state.slot % SLOTS_PER_EPOCH == 0` and `compute_epoch_at_slot(state.slot) == MERGE_FORK_EPOCH`, an irregular state change is made to upgrade to Merge.
|
||||
As with the Phase0-to-Altair upgrade, the `state_transition` is modified to upgrade the `BeaconState`.
|
||||
The `BeaconState` upgrade runs as part of `process_slots`, slots with missing block proposals do not affect the upgrade time.
|
||||
|
||||
If `state.slot % SLOTS_PER_EPOCH == 0` and `compute_epoch_at_slot(state.slot) == MERGE_FORK_EPOCH`, an irregular state change is made to upgrade to Merge.
|
||||
The upgrade occurs after the completion of the inner loop of `process_slots` that sets `state.slot` equal to `MERGE_FORK_EPOCH * SLOTS_PER_EPOCH`.
|
||||
Care must be taken when transitioning through the fork boundary as implementations will need a modified [state transition function](../phase0/beacon-chain.md#beacon-chain-state-transition-function) that deviates from the Phase 0 document.
|
||||
In particular, the outer `state_transition` function defined in the Phase 0 document will not expose the precise fork slot to execute the upgrade in the presence of skipped slots at the fork boundary. Instead the logic must be within `process_slots`.
|
||||
|
||||
When multiple upgrades are scheduled for the same epoch (common for test-networks),
|
||||
all the upgrades run in sequence before resuming the regular state transition.
|
||||
|
||||
```python
|
||||
def upgrade_to_merge(pre: phase0.BeaconState) -> BeaconState:
|
||||
epoch = phase0.get_current_epoch(pre)
|
||||
def upgrade_to_merge(pre: altair.BeaconState) -> BeaconState:
|
||||
epoch = altair.get_current_epoch(pre)
|
||||
post = BeaconState(
|
||||
# Versioning
|
||||
genesis_time=pre.genesis_time,
|
||||
@ -78,14 +81,19 @@ def upgrade_to_merge(pre: phase0.BeaconState) -> BeaconState:
|
||||
randao_mixes=pre.randao_mixes,
|
||||
# Slashings
|
||||
slashings=pre.slashings,
|
||||
# Attestations
|
||||
previous_epoch_attestations=pre.previous_epoch_attestations,
|
||||
current_epoch_attestations=pre.current_epoch_attestations,
|
||||
# Participation
|
||||
previous_epoch_participation=pre.previous_epoch_participation,
|
||||
current_epoch_participation=pre.current_epoch_participation,
|
||||
# Finality
|
||||
justification_bits=pre.justification_bits,
|
||||
previous_justified_checkpoint=pre.previous_justified_checkpoint,
|
||||
current_justified_checkpoint=pre.current_justified_checkpoint,
|
||||
finalized_checkpoint=pre.finalized_checkpoint,
|
||||
# Inactivity
|
||||
inactivity_scores=pre.inactivity_scores,
|
||||
# Sync
|
||||
current_sync_committee=pre.current_sync_committee,
|
||||
next_sync_committee=pre.next_sync_committee,
|
||||
# Execution-layer
|
||||
latest_execution_payload_header=ExecutionPayloadHeader(),
|
||||
)
|
||||
|
@ -1,7 +1,5 @@
|
||||
# Ethereum 2.0 The Merge
|
||||
|
||||
**Warning:** This document is currently based on [Phase 0](../phase0/validator.md) but will be rebased to [Altair](../altair/validator.md) once the latter is shipped.
|
||||
|
||||
**Notice**: This document is a work-in-progress for researchers and implementers.
|
||||
|
||||
## Table of contents
|
||||
@ -19,7 +17,6 @@
|
||||
- [Block proposal](#block-proposal)
|
||||
- [Constructing the `BeaconBlockBody`](#constructing-the-beaconblockbody)
|
||||
- [Execution Payload](#execution-payload)
|
||||
- [`get_pow_chain_head`](#get_pow_chain_head)
|
||||
|
||||
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- /TOC -->
|
||||
@ -30,9 +27,11 @@ This document represents the changes to be made in the code of an "honest valida
|
||||
|
||||
## Prerequisites
|
||||
|
||||
This document is an extension of the [Phase 0 -- Validator](../phase0/validator.md). All behaviors and definitions defined in the Phase 0 doc carry over unless explicitly noted or overridden.
|
||||
This document is an extension of the [Altair -- Honest Validator](../altair/validator.md) guide.
|
||||
All behaviors and definitions defined in this document, and documents it extends, carry over unless explicitly noted or overridden.
|
||||
|
||||
All terminology, constants, functions, and protocol mechanics defined in the updated Beacon Chain doc of [The Merge](./beacon-chain.md) are requisite for this document and used throughout. Please see related Beacon Chain doc before continuing and use them as a reference throughout.
|
||||
All terminology, constants, functions, and protocol mechanics defined in the updated Beacon Chain doc of [The Merge](./beacon-chain.md) are requisite for this document and used throughout.
|
||||
Please see related Beacon Chain doc before continuing and use them as a reference throughout.
|
||||
|
||||
## Protocols
|
||||
|
||||
@ -63,13 +62,19 @@ All validator responsibilities remain unchanged other than those noted below. Na
|
||||
|
||||
##### Execution Payload
|
||||
|
||||
###### `get_pow_chain_head`
|
||||
|
||||
Let `get_pow_chain_head() -> PowBlock` be the function that returns the head of the PoW chain. The body of the function is implementation specific.
|
||||
|
||||
* Set `block.body.execution_payload = get_execution_payload(state, transition_store, execution_engine)` where:
|
||||
* Set `block.body.execution_payload = get_execution_payload(state, transition_store, execution_engine, pow_chain)` where:
|
||||
|
||||
```python
|
||||
def get_pow_block_at_total_difficulty(total_difficulty: uint256, pow_chain: Sequence[PowBlock]) -> Optional[PowBlock]:
|
||||
# `pow_chain` abstractly represents all blocks in the PoW chain
|
||||
for block in pow_chain:
|
||||
parent = get_pow_block(block.parent_hash)
|
||||
if block.total_difficulty >= total_difficulty and parent.total_difficulty < total_difficulty:
|
||||
return block
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def compute_randao_mix(state: BeaconState, randao_reveal: BLSSignature) -> Bytes32:
|
||||
epoch = get_current_epoch(state)
|
||||
return xor(get_randao_mix(state, epoch), hash(randao_reveal))
|
||||
@ -87,15 +92,16 @@ def produce_execution_payload(state: BeaconState,
|
||||
def get_execution_payload(state: BeaconState,
|
||||
transition_store: TransitionStore,
|
||||
randao_reveal: BLSSignature,
|
||||
execution_engine: ExecutionEngine) -> ExecutionPayload:
|
||||
execution_engine: ExecutionEngine,
|
||||
pow_chain: Sequence[PowBlock]) -> ExecutionPayload:
|
||||
if not is_merge_complete(state):
|
||||
pow_block = get_pow_chain_head()
|
||||
if not is_valid_terminal_pow_block(transition_store, pow_block):
|
||||
terminal_pow_block = get_pow_block_at_total_difficulty(transition_store.transition_total_difficulty, pow_chain)
|
||||
if terminal_pow_block is None:
|
||||
# Pre-merge, empty payload
|
||||
return ExecutionPayload()
|
||||
else:
|
||||
# Signify merge via producing on top of the last PoW block
|
||||
return produce_execution_payload(state, pow_block.block_hash, randao_reveal, execution_engine)
|
||||
return produce_execution_payload(state, terminal_pow_block.block_hash, randao_reveal, execution_engine)
|
||||
|
||||
# Post-merge, normal payload
|
||||
parent_hash = state.latest_execution_payload_header.block_hash
|
||||
|
@ -58,7 +58,6 @@
|
||||
- [`process_pending_shard_confirmations`](#process_pending_shard_confirmations)
|
||||
- [`charge_confirmed_shard_fees`](#charge_confirmed_shard_fees)
|
||||
- [`reset_pending_shard_work`](#reset_pending_shard_work)
|
||||
- [`process_shard_epoch_increment`](#process_shard_epoch_increment)
|
||||
|
||||
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- /TOC -->
|
||||
@ -179,7 +178,7 @@ class BeaconBlockBody(merge.BeaconBlockBody): # [extends The Merge block body]
|
||||
### `BeaconState`
|
||||
|
||||
```python
|
||||
class BeaconState(merge.BeaconState): # [extends The Merge state]
|
||||
class BeaconState(merge.BeaconState):
|
||||
# [Updated fields] (Warning: this changes with Altair, Sharding will rebase to use participation-flags)
|
||||
previous_epoch_attestations: List[PendingAttestation, MAX_ATTESTATIONS * SLOTS_PER_EPOCH]
|
||||
current_epoch_attestations: List[PendingAttestation, MAX_ATTESTATIONS * SLOTS_PER_EPOCH]
|
||||
@ -187,7 +186,6 @@ class BeaconState(merge.BeaconState): # [extends The Merge state]
|
||||
# A ring buffer of the latest slots, with information per active shard.
|
||||
shard_buffer: Vector[List[ShardWork, MAX_SHARDS], SHARD_STATE_MEMORY_SLOTS]
|
||||
shard_gasprice: uint64
|
||||
current_epoch_start_shard: Shard
|
||||
```
|
||||
|
||||
## New containers
|
||||
@ -447,22 +445,10 @@ def get_start_shard(state: BeaconState, slot: Slot) -> Shard:
|
||||
"""
|
||||
Return the start shard at ``slot``.
|
||||
"""
|
||||
current_epoch_start_slot = compute_start_slot_at_epoch(get_current_epoch(state))
|
||||
shard = state.current_epoch_start_shard
|
||||
if slot > current_epoch_start_slot:
|
||||
# Current epoch or the next epoch lookahead
|
||||
for _slot in range(current_epoch_start_slot, slot):
|
||||
committee_count = get_committee_count_per_slot(state, compute_epoch_at_slot(Slot(_slot)))
|
||||
active_shard_count = get_active_shard_count(state, compute_epoch_at_slot(Slot(_slot)))
|
||||
shard = (shard + committee_count) % active_shard_count
|
||||
elif slot < current_epoch_start_slot:
|
||||
# Previous epoch
|
||||
for _slot in list(range(slot, current_epoch_start_slot))[::-1]:
|
||||
committee_count = get_committee_count_per_slot(state, compute_epoch_at_slot(Slot(_slot)))
|
||||
active_shard_count = get_active_shard_count(state, compute_epoch_at_slot(Slot(_slot)))
|
||||
# Ensure positive
|
||||
shard = (shard + active_shard_count - committee_count) % active_shard_count
|
||||
return Shard(shard)
|
||||
epoch = compute_epoch_at_slot(Slot(_slot))
|
||||
committee_count = get_committee_count_per_slot(state, epoch)
|
||||
active_shard_count = get_active_shard_count(state, epoch)
|
||||
return committee_count * slot % active_shard_count
|
||||
```
|
||||
|
||||
#### `compute_shard_from_committee_index`
|
||||
@ -494,9 +480,9 @@ def process_block(state: BeaconState, block: BeaconBlock) -> None:
|
||||
process_randao(state, block.body)
|
||||
process_eth1_data(state, block.body)
|
||||
process_operations(state, block.body) # [Modified in Sharding]
|
||||
# Pre-merge, skip execution payload processing
|
||||
if is_execution_enabled(state, block):
|
||||
process_execution_payload(state, block.body.execution_payload, EXECUTION_ENGINE) # [New in Merge]
|
||||
process_sync_aggregate(state, block.body.sync_aggregate)
|
||||
# is_execution_enabled is omitted, execution is enabled by default.
|
||||
process_execution_payload(state, block.body.execution_payload, EXECUTION_ENGINE)
|
||||
```
|
||||
|
||||
#### Operations
|
||||
@ -527,7 +513,7 @@ def process_operations(state: BeaconState, body: BeaconBlockBody) -> None:
|
||||
|
||||
```python
|
||||
def process_attestation(state: BeaconState, attestation: Attestation) -> None:
|
||||
phase0.process_attestation(state, attestation)
|
||||
altair.process_attestation(state, attestation)
|
||||
update_pending_shard_work(state, attestation)
|
||||
```
|
||||
|
||||
@ -681,26 +667,24 @@ This epoch transition overrides the Merge epoch transition:
|
||||
|
||||
```python
|
||||
def process_epoch(state: BeaconState) -> None:
|
||||
# Sharding
|
||||
# Sharding pre-processing
|
||||
process_pending_shard_confirmations(state)
|
||||
charge_confirmed_shard_fees(state)
|
||||
reset_pending_shard_work(state)
|
||||
|
||||
# Phase0
|
||||
# Base functionality
|
||||
process_justification_and_finalization(state)
|
||||
process_inactivity_updates(state)
|
||||
process_rewards_and_penalties(state)
|
||||
process_registry_updates(state)
|
||||
process_slashings(state)
|
||||
|
||||
# Final updates
|
||||
process_eth1_data_reset(state)
|
||||
process_effective_balance_updates(state)
|
||||
process_slashings_reset(state)
|
||||
process_randao_mixes_reset(state)
|
||||
process_historical_roots_update(state)
|
||||
process_participation_record_updates(state)
|
||||
|
||||
process_shard_epoch_increment(state)
|
||||
process_participation_flag_updates(state)
|
||||
process_sync_committee_updates(state)
|
||||
```
|
||||
|
||||
#### `process_pending_shard_confirmations`
|
||||
@ -799,11 +783,3 @@ def reset_pending_shard_work(state: BeaconState) -> None:
|
||||
)
|
||||
# a shard without committee available defaults to SHARD_WORK_UNCONFIRMED.
|
||||
```
|
||||
|
||||
#### `process_shard_epoch_increment`
|
||||
|
||||
```python
|
||||
def process_shard_epoch_increment(state: BeaconState) -> None:
|
||||
# Update current_epoch_start_shard
|
||||
state.current_epoch_start_shard = get_start_shard(state, Slot(state.slot + 1))
|
||||
```
|
||||
|
@ -29,8 +29,7 @@
|
||||
|
||||
## Introduction
|
||||
|
||||
The specification of these changes continues in the same format as the [Phase0](../phase0/p2p-interface.md) and
|
||||
[Altair](../altair/p2p-interface.md) network specifications, and assumes them as pre-requisite.
|
||||
The specification of these changes continues in the same format as the network specifications of previous upgrades, and assumes them as pre-requisite.
|
||||
The adjustments and additions for Shards are outlined in this document.
|
||||
|
||||
## Constants
|
||||
|
@ -1 +1 @@
|
||||
1.1.0-beta.1
|
||||
1.1.0-beta.2
|
||||
|
@ -26,6 +26,7 @@ from eth2spec.test.context import (
|
||||
with_presets,
|
||||
spec_state_test,
|
||||
always_bls,
|
||||
spec_test,
|
||||
)
|
||||
from eth2spec.utils.hash_function import hash
|
||||
|
||||
@ -112,6 +113,47 @@ def test_invalid_signature_missing_participant(spec, state):
|
||||
yield from run_sync_committee_processing(spec, state, block, expect_exception=True)
|
||||
|
||||
|
||||
@with_altair_and_later
|
||||
@spec_state_test
|
||||
@always_bls
|
||||
def test_invalid_signature_no_participants(spec, state):
|
||||
block = build_empty_block_for_next_slot(spec, state)
|
||||
# No participants is an allowed case, but needs a specific signature, not the full-zeroed signature.
|
||||
block.body.sync_aggregate = spec.SyncAggregate(
|
||||
sync_committee_bits=[False] * len(block.body.sync_aggregate.sync_committee_bits),
|
||||
sync_committee_signature=b'\x00' * 96
|
||||
)
|
||||
yield from run_sync_committee_processing(spec, state, block, expect_exception=True)
|
||||
|
||||
# No-participants, with valid signature, is tested in test_sync_committee_rewards_empty_participants already.
|
||||
|
||||
|
||||
@with_altair_and_later
|
||||
@spec_state_test
|
||||
@always_bls
|
||||
def test_invalid_signature_infinite_signature_with_all_participants(spec, state):
|
||||
block = build_empty_block_for_next_slot(spec, state)
|
||||
# Include all participants, try the special-case signature for no-participants
|
||||
block.body.sync_aggregate = spec.SyncAggregate(
|
||||
sync_committee_bits=[True] * len(block.body.sync_aggregate.sync_committee_bits),
|
||||
sync_committee_signature=spec.G2_POINT_AT_INFINITY
|
||||
)
|
||||
yield from run_sync_committee_processing(spec, state, block, expect_exception=True)
|
||||
|
||||
|
||||
@with_altair_and_later
|
||||
@spec_state_test
|
||||
@always_bls
|
||||
def test_invalid_signature_infinite_signature_with_single_participant(spec, state):
|
||||
block = build_empty_block_for_next_slot(spec, state)
|
||||
# Try include a single participant with the special-case signature for no-participants.
|
||||
block.body.sync_aggregate = spec.SyncAggregate(
|
||||
sync_committee_bits=[True] + ([False] * (len(block.body.sync_aggregate.sync_committee_bits) - 1)),
|
||||
sync_committee_signature=spec.G2_POINT_AT_INFINITY
|
||||
)
|
||||
yield from run_sync_committee_processing(spec, state, block, expect_exception=True)
|
||||
|
||||
|
||||
@with_altair_and_later
|
||||
@spec_state_test
|
||||
@always_bls
|
||||
@ -534,6 +576,7 @@ def test_random_all_but_one_participating_with_duplicates(spec, state):
|
||||
|
||||
@with_altair_and_later
|
||||
@with_presets([MAINNET], reason="to create duplicate committee")
|
||||
@spec_test
|
||||
@with_custom_state(balances_fn=misc_balances, threshold_fn=default_activation_threshold)
|
||||
@single_phase
|
||||
def test_random_misc_balances_and_half_participation_with_duplicates(spec, state):
|
||||
@ -596,6 +639,7 @@ def test_random_all_but_one_participating_without_duplicates(spec, state):
|
||||
|
||||
@with_altair_and_later
|
||||
@with_presets([MINIMAL], reason="to create nonduplicate committee")
|
||||
@spec_test
|
||||
@with_custom_state(balances_fn=misc_balances, threshold_fn=default_activation_threshold)
|
||||
@single_phase
|
||||
def test_random_misc_balances_and_half_participation_without_duplicates(spec, state):
|
||||
|
@ -195,17 +195,24 @@ def test_random_inactivity_scores_full_participation_leaking(spec, state):
|
||||
assert spec.is_in_inactivity_leak(state)
|
||||
|
||||
|
||||
def slash_some_validators(spec, state, rng=Random(40404040)):
|
||||
def slash_some_validators_for_inactivity_scores_test(spec, state, rng=Random(40404040)):
|
||||
# ``run_inactivity_scores_test`` runs at the next epoch from `state`.
|
||||
# We retrieve the proposer of this future state to avoid
|
||||
# accidentally slashing that validator
|
||||
future_state = state.copy()
|
||||
next_epoch_via_block(spec, future_state)
|
||||
|
||||
proposer_index = spec.get_beacon_proposer_index(future_state)
|
||||
# Slash ~1/4 of validaors
|
||||
for validator_index in range(len(state.validators)):
|
||||
if rng.choice(range(4)) == 0:
|
||||
if rng.choice(range(4)) == 0 and validator_index != proposer_index:
|
||||
spec.slash_validator(state, validator_index)
|
||||
|
||||
|
||||
@with_altair_and_later
|
||||
@spec_state_test
|
||||
def test_some_slashed_zero_scores_full_participation(spec, state):
|
||||
slash_some_validators(spec, state, rng=Random(33429))
|
||||
slash_some_validators_for_inactivity_scores_test(spec, state, rng=Random(33429))
|
||||
yield from run_inactivity_scores_test(
|
||||
spec, state,
|
||||
set_full_participation, zero_inactivity_scores,
|
||||
@ -218,7 +225,7 @@ def test_some_slashed_zero_scores_full_participation(spec, state):
|
||||
@spec_state_test
|
||||
@leaking()
|
||||
def test_some_slashed_zero_scores_full_participation_leaking(spec, state):
|
||||
slash_some_validators(spec, state, rng=Random(33221))
|
||||
slash_some_validators_for_inactivity_scores_test(spec, state, rng=Random(332243))
|
||||
yield from run_inactivity_scores_test(
|
||||
spec, state,
|
||||
set_full_participation, zero_inactivity_scores,
|
||||
@ -239,7 +246,7 @@ def test_some_slashed_zero_scores_full_participation_leaking(spec, state):
|
||||
@spec_state_test
|
||||
def test_some_slashed_full_random(spec, state):
|
||||
rng = Random(1010222)
|
||||
slash_some_validators(spec, state, rng=rng)
|
||||
slash_some_validators_for_inactivity_scores_test(spec, state, rng=rng)
|
||||
yield from run_inactivity_scores_test(
|
||||
spec, state,
|
||||
randomize_attestation_participation, randomize_inactivity_scores, rng=rng,
|
||||
@ -251,7 +258,7 @@ def test_some_slashed_full_random(spec, state):
|
||||
@leaking()
|
||||
def test_some_slashed_full_random_leaking(spec, state):
|
||||
rng = Random(1102233)
|
||||
slash_some_validators(spec, state, rng=rng)
|
||||
slash_some_validators_for_inactivity_scores_test(spec, state, rng=rng)
|
||||
yield from run_inactivity_scores_test(
|
||||
spec, state,
|
||||
randomize_previous_epoch_participation, randomize_inactivity_scores, rng=rng,
|
||||
|
@ -347,10 +347,6 @@ def with_phases(phases, other_phases=None):
|
||||
preset_name = kw.pop('preset')
|
||||
targets = spec_targets[preset_name]
|
||||
|
||||
# TODO: test state is dependent on phase0 but is immediately transitioned to later phases.
|
||||
# A new state-creation helper for later phases may be in place, and then tests can run without phase0
|
||||
available_phases.add(PHASE0)
|
||||
|
||||
# Populate all phases for multi-phase tests
|
||||
phase_dir = {}
|
||||
if PHASE0 in available_phases:
|
||||
@ -433,23 +429,15 @@ def with_config_overrides(config_overrides):
|
||||
|
||||
|
||||
def is_post_altair(spec):
|
||||
if spec.fork == MERGE: # TODO: remove parallel Altair-Merge condition after rebase.
|
||||
return False
|
||||
if spec.fork in FORKS_BEFORE_ALTAIR:
|
||||
return False
|
||||
return True
|
||||
return spec.fork not in FORKS_BEFORE_ALTAIR
|
||||
|
||||
|
||||
def is_post_merge(spec):
|
||||
if spec.fork == ALTAIR: # TODO: remove parallel Altair-Merge condition after rebase.
|
||||
return False
|
||||
if spec.fork in FORKS_BEFORE_MERGE:
|
||||
return False
|
||||
return True
|
||||
return spec.fork not in FORKS_BEFORE_MERGE
|
||||
|
||||
|
||||
with_altair_and_later = with_phases([ALTAIR]) # TODO: include Merge, but not until Merge work is rebased.
|
||||
with_merge_and_later = with_phases([MERGE])
|
||||
with_altair_and_later = with_phases([ALTAIR, MERGE])
|
||||
with_merge_and_later = with_phases([MERGE]) # TODO: include sharding when spec stabilizes.
|
||||
|
||||
|
||||
def fork_transition_test(pre_fork_name, post_fork_name, fork_epoch=None):
|
||||
|
@ -30,6 +30,7 @@ def get_process_calls(spec):
|
||||
# Merge
|
||||
'process_application_payload':
|
||||
lambda state, block: spec.process_application_payload(state, block.body),
|
||||
# TODO: add sharding processing functions when spec stabilizes.
|
||||
# Custody Game
|
||||
'process_custody_game_operations':
|
||||
lambda state, block: spec.process_custody_game_operations(state, block.body),
|
||||
|
@ -18,12 +18,9 @@ DAS = SpecForkName('das')
|
||||
ALL_PHASES = (PHASE0, ALTAIR, MERGE)
|
||||
# The forks that output to the test vectors.
|
||||
TESTGEN_FORKS = (PHASE0, ALTAIR, MERGE)
|
||||
# TODO: everything runs in parallel to Altair.
|
||||
# After features are rebased on the Altair fork, this can be reduced to just PHASE0.
|
||||
FORKS_BEFORE_ALTAIR = (PHASE0, MERGE, SHARDING, CUSTODY_GAME, DAS)
|
||||
|
||||
# TODO: when rebasing Merge onto Altair, add ALTAIR to this tuple.
|
||||
FORKS_BEFORE_MERGE = (PHASE0,)
|
||||
FORKS_BEFORE_ALTAIR = (PHASE0,)
|
||||
FORKS_BEFORE_MERGE = (PHASE0, ALTAIR)
|
||||
|
||||
#
|
||||
# Config
|
||||
|
@ -28,7 +28,7 @@ def get_process_calls(spec):
|
||||
'process_participation_record_updates'
|
||||
),
|
||||
'process_sync_committee_updates', # altair
|
||||
'process_shard_epoch_increment' # sharding
|
||||
# TODO: add sharding processing functions when spec stabilizes.
|
||||
]
|
||||
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
from eth2spec.test.helpers.constants import (
|
||||
ALTAIR,
|
||||
FORKS_BEFORE_ALTAIR,
|
||||
MERGE,
|
||||
ALTAIR, MERGE,
|
||||
FORKS_BEFORE_ALTAIR, FORKS_BEFORE_MERGE,
|
||||
)
|
||||
from eth2spec.test.helpers.keys import pubkeys
|
||||
|
||||
@ -25,11 +24,13 @@ def create_genesis_state(spec, validator_balances, activation_threshold):
|
||||
deposit_root = b'\x42' * 32
|
||||
|
||||
eth1_block_hash = b'\xda' * 32
|
||||
previous_version = spec.config.GENESIS_FORK_VERSION
|
||||
current_version = spec.config.GENESIS_FORK_VERSION
|
||||
|
||||
if spec.fork == ALTAIR:
|
||||
current_version = spec.config.ALTAIR_FORK_VERSION
|
||||
elif spec.fork == MERGE:
|
||||
previous_version = spec.config.ALTAIR_FORK_VERSION
|
||||
current_version = spec.config.MERGE_FORK_VERSION
|
||||
|
||||
state = spec.BeaconState(
|
||||
@ -41,7 +42,7 @@ def create_genesis_state(spec, validator_balances, activation_threshold):
|
||||
block_hash=eth1_block_hash,
|
||||
),
|
||||
fork=spec.Fork(
|
||||
previous_version=spec.config.GENESIS_FORK_VERSION,
|
||||
previous_version=previous_version,
|
||||
current_version=current_version,
|
||||
epoch=spec.GENESIS_EPOCH,
|
||||
),
|
||||
@ -73,4 +74,9 @@ def create_genesis_state(spec, validator_balances, activation_threshold):
|
||||
state.current_sync_committee = spec.get_next_sync_committee(state)
|
||||
state.next_sync_committee = spec.get_next_sync_committee(state)
|
||||
|
||||
if spec.fork not in FORKS_BEFORE_MERGE:
|
||||
# Initialize the execution payload header (with block number and genesis time set to 0)
|
||||
state.latest_execution_payload_header.block_hash = eth1_block_hash
|
||||
state.latest_execution_payload_header.random = eth1_block_hash
|
||||
|
||||
return state
|
||||
|
@ -4,9 +4,6 @@ MERGE_FORK_TEST_META_TAGS = {
|
||||
|
||||
|
||||
def run_fork_test(post_spec, pre_state):
|
||||
# Clean up state to be more realistic
|
||||
pre_state.current_epoch_attestations = []
|
||||
|
||||
yield 'pre', pre_state
|
||||
|
||||
post_state = post_spec.upgrade_to_merge(pre_state)
|
||||
@ -24,10 +21,14 @@ def run_fork_test(post_spec, pre_state):
|
||||
'randao_mixes',
|
||||
# Slashings
|
||||
'slashings',
|
||||
# Attestations
|
||||
'previous_epoch_attestations', 'current_epoch_attestations',
|
||||
# Participation
|
||||
'previous_epoch_participation', 'current_epoch_participation',
|
||||
# Finality
|
||||
'justification_bits', 'previous_justified_checkpoint', 'current_justified_checkpoint', 'finalized_checkpoint',
|
||||
# Inactivity
|
||||
'inactivity_scores',
|
||||
# Sync
|
||||
'current_sync_committee', 'next_sync_committee'
|
||||
]
|
||||
for field in stable_fields:
|
||||
assert getattr(pre_state, field) == getattr(post_state, field)
|
||||
|
@ -7,7 +7,7 @@ from eth2spec.test.context import (
|
||||
)
|
||||
from eth2spec.test.utils import with_meta_tags
|
||||
from eth2spec.test.helpers.constants import (
|
||||
PHASE0, MERGE,
|
||||
ALTAIR, MERGE,
|
||||
MINIMAL,
|
||||
)
|
||||
from eth2spec.test.helpers.state import (
|
||||
@ -20,7 +20,7 @@ from eth2spec.test.helpers.merge.fork import (
|
||||
)
|
||||
|
||||
|
||||
@with_phases(phases=[PHASE0], other_phases=[MERGE])
|
||||
@with_phases(phases=[ALTAIR], other_phases=[MERGE])
|
||||
@spec_test
|
||||
@with_state
|
||||
@with_meta_tags(MERGE_FORK_TEST_META_TAGS)
|
||||
@ -28,7 +28,7 @@ def test_fork_base_state(spec, phases, state):
|
||||
yield from run_fork_test(phases[MERGE], state)
|
||||
|
||||
|
||||
@with_phases(phases=[PHASE0], other_phases=[MERGE])
|
||||
@with_phases(phases=[ALTAIR], other_phases=[MERGE])
|
||||
@spec_test
|
||||
@with_state
|
||||
@with_meta_tags(MERGE_FORK_TEST_META_TAGS)
|
||||
@ -37,7 +37,7 @@ def test_fork_next_epoch(spec, phases, state):
|
||||
yield from run_fork_test(phases[MERGE], state)
|
||||
|
||||
|
||||
@with_phases(phases=[PHASE0], other_phases=[MERGE])
|
||||
@with_phases(phases=[ALTAIR], other_phases=[MERGE])
|
||||
@spec_test
|
||||
@with_state
|
||||
@with_meta_tags(MERGE_FORK_TEST_META_TAGS)
|
||||
@ -46,7 +46,7 @@ def test_fork_next_epoch_with_block(spec, phases, state):
|
||||
yield from run_fork_test(phases[MERGE], state)
|
||||
|
||||
|
||||
@with_phases(phases=[PHASE0], other_phases=[MERGE])
|
||||
@with_phases(phases=[ALTAIR], other_phases=[MERGE])
|
||||
@spec_test
|
||||
@with_state
|
||||
@with_meta_tags(MERGE_FORK_TEST_META_TAGS)
|
||||
@ -56,7 +56,7 @@ def test_fork_many_next_epoch(spec, phases, state):
|
||||
yield from run_fork_test(phases[MERGE], state)
|
||||
|
||||
|
||||
@with_phases(phases=[PHASE0], other_phases=[MERGE])
|
||||
@with_phases(phases=[ALTAIR], other_phases=[MERGE])
|
||||
@with_custom_state(balances_fn=low_balances, threshold_fn=lambda spec: spec.config.EJECTION_BALANCE)
|
||||
@spec_test
|
||||
@with_meta_tags(MERGE_FORK_TEST_META_TAGS)
|
||||
@ -64,7 +64,7 @@ def test_fork_random_low_balances(spec, phases, state):
|
||||
yield from run_fork_test(phases[MERGE], state)
|
||||
|
||||
|
||||
@with_phases(phases=[PHASE0], other_phases=[MERGE])
|
||||
@with_phases(phases=[ALTAIR], other_phases=[MERGE])
|
||||
@with_custom_state(balances_fn=misc_balances, threshold_fn=lambda spec: spec.config.EJECTION_BALANCE)
|
||||
@spec_test
|
||||
@with_meta_tags(MERGE_FORK_TEST_META_TAGS)
|
||||
@ -72,7 +72,7 @@ def test_fork_random_misc_balances(spec, phases, state):
|
||||
yield from run_fork_test(phases[MERGE], state)
|
||||
|
||||
|
||||
@with_phases(phases=[PHASE0], other_phases=[MERGE])
|
||||
@with_phases(phases=[ALTAIR], other_phases=[MERGE])
|
||||
@with_presets([MINIMAL],
|
||||
reason="mainnet config leads to larger validator set than limit of public/private keys pre-generated")
|
||||
@with_custom_state(balances_fn=large_validator_set, threshold_fn=lambda spec: spec.config.EJECTION_BALANCE)
|
||||
|
@ -9,20 +9,17 @@ from eth2spec.test.context import (
|
||||
)
|
||||
from eth2spec.test.utils import with_meta_tags
|
||||
from eth2spec.test.helpers.constants import (
|
||||
PHASE0, MERGE,
|
||||
ALTAIR, MERGE,
|
||||
MINIMAL,
|
||||
)
|
||||
from eth2spec.test.helpers.merge.fork import (
|
||||
MERGE_FORK_TEST_META_TAGS,
|
||||
run_fork_test,
|
||||
)
|
||||
from eth2spec.test.helpers.random import (
|
||||
randomize_state,
|
||||
randomize_attestation_participation,
|
||||
)
|
||||
from eth2spec.test.helpers.random import randomize_state
|
||||
|
||||
|
||||
@with_phases(phases=[PHASE0], other_phases=[MERGE])
|
||||
@with_phases(phases=[ALTAIR], other_phases=[MERGE])
|
||||
@spec_test
|
||||
@with_state
|
||||
@with_meta_tags(MERGE_FORK_TEST_META_TAGS)
|
||||
@ -31,7 +28,7 @@ def test_merge_fork_random_0(spec, phases, state):
|
||||
yield from run_fork_test(phases[MERGE], state)
|
||||
|
||||
|
||||
@with_phases(phases=[PHASE0], other_phases=[MERGE])
|
||||
@with_phases(phases=[ALTAIR], other_phases=[MERGE])
|
||||
@spec_test
|
||||
@with_state
|
||||
@with_meta_tags(MERGE_FORK_TEST_META_TAGS)
|
||||
@ -40,7 +37,7 @@ def test_merge_fork_random_1(spec, phases, state):
|
||||
yield from run_fork_test(phases[MERGE], state)
|
||||
|
||||
|
||||
@with_phases(phases=[PHASE0], other_phases=[MERGE])
|
||||
@with_phases(phases=[ALTAIR], other_phases=[MERGE])
|
||||
@spec_test
|
||||
@with_state
|
||||
@with_meta_tags(MERGE_FORK_TEST_META_TAGS)
|
||||
@ -49,7 +46,7 @@ def test_merge_fork_random_2(spec, phases, state):
|
||||
yield from run_fork_test(phases[MERGE], state)
|
||||
|
||||
|
||||
@with_phases(phases=[PHASE0], other_phases=[MERGE])
|
||||
@with_phases(phases=[ALTAIR], other_phases=[MERGE])
|
||||
@spec_test
|
||||
@with_state
|
||||
@with_meta_tags(MERGE_FORK_TEST_META_TAGS)
|
||||
@ -58,40 +55,7 @@ def test_merge_fork_random_3(spec, phases, state):
|
||||
yield from run_fork_test(phases[MERGE], state)
|
||||
|
||||
|
||||
@with_phases(phases=[PHASE0], other_phases=[MERGE])
|
||||
@spec_test
|
||||
@with_state
|
||||
@with_meta_tags(MERGE_FORK_TEST_META_TAGS)
|
||||
def test_merge_fork_random_duplicate_attestations(spec, phases, state):
|
||||
randomize_state(spec, state, rng=Random(1111))
|
||||
# Note: `run_fork_test` empties `current_epoch_attestations`
|
||||
state.previous_epoch_attestations = state.previous_epoch_attestations + state.previous_epoch_attestations
|
||||
yield from run_fork_test(phases[MERGE], state)
|
||||
|
||||
|
||||
@with_phases(phases=[PHASE0], other_phases=[MERGE])
|
||||
@spec_test
|
||||
@with_state
|
||||
@with_meta_tags(MERGE_FORK_TEST_META_TAGS)
|
||||
def test_merge_fork_random_mismatched_attestations(spec, phases, state):
|
||||
# Create a random state
|
||||
randomize_state(spec, state, rng=Random(2222))
|
||||
|
||||
# Now make two copies
|
||||
state_0 = state.copy()
|
||||
state_1 = state.copy()
|
||||
|
||||
# Randomize attestation participation of both
|
||||
randomize_attestation_participation(spec, state_0, rng=Random(3333))
|
||||
randomize_attestation_participation(spec, state_1, rng=Random(4444))
|
||||
|
||||
# Note: `run_fork_test` empties `current_epoch_attestations`
|
||||
# Use pending attestations from both random states in a single state for testing
|
||||
state_0.previous_epoch_attestations = state_0.previous_epoch_attestations + state_1.previous_epoch_attestations
|
||||
yield from run_fork_test(phases[MERGE], state_0)
|
||||
|
||||
|
||||
@with_phases(phases=[PHASE0], other_phases=[MERGE])
|
||||
@with_phases(phases=[ALTAIR], other_phases=[MERGE])
|
||||
@spec_test
|
||||
@with_custom_state(balances_fn=low_balances, threshold_fn=lambda spec: spec.config.EJECTION_BALANCE)
|
||||
@with_meta_tags(MERGE_FORK_TEST_META_TAGS)
|
||||
@ -100,7 +64,7 @@ def test_merge_fork_random_low_balances(spec, phases, state):
|
||||
yield from run_fork_test(phases[MERGE], state)
|
||||
|
||||
|
||||
@with_phases(phases=[PHASE0], other_phases=[MERGE])
|
||||
@with_phases(phases=[ALTAIR], other_phases=[MERGE])
|
||||
@spec_test
|
||||
@with_custom_state(balances_fn=misc_balances, threshold_fn=lambda spec: spec.config.EJECTION_BALANCE)
|
||||
@with_meta_tags(MERGE_FORK_TEST_META_TAGS)
|
||||
@ -109,7 +73,7 @@ def test_merge_fork_random_misc_balances(spec, phases, state):
|
||||
yield from run_fork_test(phases[MERGE], state)
|
||||
|
||||
|
||||
@with_phases(phases=[PHASE0], other_phases=[MERGE])
|
||||
@with_phases(phases=[ALTAIR], other_phases=[MERGE])
|
||||
@with_presets([MINIMAL],
|
||||
reason="mainnet config leads to larger validator set than limit of public/private keys pre-generated")
|
||||
@spec_test
|
||||
|
Loading…
x
Reference in New Issue
Block a user