From 88aeba09f6edd9ada8a2ec7fc94e8aa90fb9ecdf Mon Sep 17 00:00:00 2001 From: Vitalik Buterin Date: Sat, 12 Oct 2019 12:05:08 +0900 Subject: [PATCH 001/194] Added new shards --- specs/core/1_new_shards.md | 147 +++++++++++++++++++++++++++++++++++++ 1 file changed, 147 insertions(+) create mode 100644 specs/core/1_new_shards.md diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md new file mode 100644 index 000000000..dec84dfe1 --- /dev/null +++ b/specs/core/1_new_shards.md @@ -0,0 +1,147 @@ +# Ethereum 2.0 Phase 1 -- Crosslinks and Shard Data + +**Notice**: This document is a work-in-progress for researchers and implementers. + +## Table of contents + + + +- [Ethereum 2.0 Phase 1 -- Shard Data Chains](#ethereum-20-phase-1----shard-data-chains) + - [Table of contents](#table-of-contents) + - [Introduction](#introduction) + - [Configuration](#configuration) + - [Misc](#misc) + - [Containers](#containers) + - [Beacon Chain Changes](#beacon-chain-changes) + - [New state variables](#new-state-variables) + + + +## Introduction + +This document describes the shard transition function (data layer only) and the shard fork choice rule as part of Phase 1 of Ethereum 2.0. + +## Configuration + +### Misc + +| Name | Value | +| - | - | +| `MAX_SHARDS` | `2**10` (= 1024) | +| `ACTIVE_SHARDS` | `2**6` (= 64) | +| `SHARD_ROOT_HISTORY_LENGTH` | `2**15` (= 32,768) | +| `MAX_CATCHUP` | `2**3` (= 8) | + +## Containers + +### `AttestationData` + +```python +class AttestationData(Container): + # Slot + slot: Slot + # Shard + shard: shard + # LMD GHOST vote + beacon_block_root: Hash + # FFG vote + source: Checkpoint + target: Checkpoint + # Shard data roots + shard_data_roots: List[Hash, MAX_CATCHUP] + # Intermediate state roots + shard_state_roots: List[Hash, MAX_CATCHUP] +``` + +### `Attestation` + +```python +class Attestation(Container): + aggregation_bits: Bitlist[MAX_VALIDATORS_PER_COMMITTEE] + data: AttestationData + custody_bits: List[Bitlist[MAX_VALIDATORS_PER_COMMITTEE], MAX_CATCHUP] + signature: BLSSignature +``` + +## Beacon Chain Changes + +### New state variables + +``` + shard_state_roots: Vector[Hash, MAX_SHARDS] + shard_next_slot: Vector[Slot, MAX_SHARDS] +``` + +### Attestation processing + +```python +def process_attestation(state: BeaconState, attestation: Attestation) -> None: + data = attestation.data + assert shard < ACTIVE_SHARDS + + # Signature check + committee = get_crosslink_committee(state, get_current_epoch(state), data.shard) + for bits in attestation.custody_bits + [attestation.aggregation_bits]: + assert bits == len(committee) + # Check signature + assert is_valid_indexed_attestation(state, get_indexed_attestation(state, attestation)) + + # Type 1: on-time attestations + if data.custody_bits != []: + # Correct start slot + assert data.slot == state.shard_next_slot[data.shard] + # Correct data root count + assert len(data.shard_data_roots) == len(attestation.custody_bits) == len(data.shard_state_roots) == min(state.slot - data.slot, MAX_CATCHUP) + # Correct parent block root + assert data.beacon_block_root == get_block_root_at_slot(state, state.slot - 1) + # Apply + online_indices = get_online_indices(state) + attesting_indices = get_attesting_indices(state, attestation.data, attestation.aggregation_bits).intersection(get_online_indices) + if get_total_balance(state, attesting_indices) * 3 >= get_total_balance(state, online_indices) * 2: + state.shard_state_roots[data.shard] = data.shard_state_roots[-1] + state.shard_next_slot[data.shard] += len(data.shard_data_roots) + + # Type 2: delayed attestations + else: + assert slot_to_epoch(data.slot) in (get_current_epoch(state), get_previous_epoch(state)) + assert len(data.shard_data_roots) == len(data.intermediate_state_roots) == 0 + + pending_attestation = PendingAttestation( + slot=data.slot, + shard=data.shard, + aggregation_bits=attestation.aggregation_bits, + inclusion_delay=state.slot - attestation_slot, + proposer_index=get_beacon_proposer_index(state), + ) + + if data.target.epoch == get_current_epoch(state): + assert data.source == state.current_justified_checkpoint + state.current_epoch_attestations.append(pending_attestation) + else: + assert data.source == state.previous_justified_checkpoint + state.previous_epoch_attestations.append(pending_attestation) +``` + +### Fraud proofs + +TODO. The intent is to have a single universal fraud proof type, which contains (i) an on-time attestation on shard `s` signing a set of `data_roots`, (ii) an index `i` of a particular data root to focus on, (iii) the full contents of the i'th data, (iii) a Merkle proof to the `shard_state_roots` in the parent block the attestation is referencing, and which then verifies that one of the two conditions is false: + +* `custody_bits[i][j] != generate_custody_bit(subkey, block_contents)` for any `j` +* `execute_state_transition(slot, shard, attestation.shard_state_roots[i-1], parent.shard_state_roots, block_contents) != shard_state_roots[i]` (if `i=0` then instead use `parent.shard_state_roots[s]`) + +For phase 1, we will use a simple state transition function: + +* Check that `data[:32] == prev_state_root` +* Check that `bls_verify(get_shard_proposer(state, slot, shard), hash_tree_root(data[-96:]), BLSSignature(data[-96:]), BLOCK_SIGNATURE_DOMAIN)` +* Output the new state root: `hash_tree_root(prev_state_root, other_prev_state_roots, data)` + +### Honest persistent committee member behavior + +Suppose you are a persistent committee member on shard `i` at slot `s`. Suppose `state.shard_next_slots[i] = s-1` ("the happy case"). In this case, you look for a valid proposal that satisfies the checks in the state transition function above, and if you see such a proposal `data` with post-state `post_state`, make an attestation with `shard_data_roots = [hash_tree_root(data)]` and `shard_state_roots = [post_state]`. If you do not find such a proposal, make an attestation using the "default empty proposal", `data = prev_state_root + b'\x00' * 96`. + +Now suppose `state.shard_next_slots[i] = s-k` for `k>1`. Then, initialize `data = []`, `states = []`, `state = state.shard_state_roots[i]`. For `slot in (state.shard_next_slot, min(state.shard_next_slot + MAX_CATCHUP, s))`, do: + +* Look for all valid proposals for `slot` whose first 32 bytes equal to `state`. If there are none, add a default empty proposal to `data`. If there is one such proposal `p`, add `p` to `data`. If there is more than one, select the one with the largest number of total attestations supporting it or its descendants, and add it to `data`. +* Set `state` to the state after processing the proposal just added to `data`; append it to `states` + +Make an attestation using `shard_data_roots = data` and `shard_state_roots = states`. From f6be6b2b8aa47ef4f9c696e5bed62f7b6d2512f6 Mon Sep 17 00:00:00 2001 From: Vitalik Buterin Date: Sat, 12 Oct 2019 23:59:51 +0900 Subject: [PATCH 002/194] Added a few things --- specs/core/1_new_shards.md | 65 ++++++++++++++++++++++++++++++-------- 1 file changed, 51 insertions(+), 14 deletions(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index dec84dfe1..2066aac8d 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -12,8 +12,13 @@ - [Configuration](#configuration) - [Misc](#misc) - [Containers](#containers) + - [Helpers](#helpers) - [Beacon Chain Changes](#beacon-chain-changes) - [New state variables](#new-state-variables) + - [Attestation processing](#attestation-processing) + - [Epoch transition](#epoch-transition) + - [Fraud proofs](#fraud-proofs) + - [Honest persistent committee member behavior](#honest-persistent-committee-member-behavior) @@ -30,7 +35,8 @@ This document describes the shard transition function (data layer only) and the | `MAX_SHARDS` | `2**10` (= 1024) | | `ACTIVE_SHARDS` | `2**6` (= 64) | | `SHARD_ROOT_HISTORY_LENGTH` | `2**15` (= 32,768) | -| `MAX_CATCHUP` | `2**3` (= 8) | +| `MAX_CATCHUP` | `2**5` (= 32) | +| `ONLINE_PERIOD` | `2**3` (= 8) | ## Containers @@ -40,8 +46,6 @@ This document describes the shard transition function (data layer only) and the class AttestationData(Container): # Slot slot: Slot - # Shard - shard: shard # LMD GHOST vote beacon_block_root: Hash # FFG vote @@ -51,6 +55,8 @@ class AttestationData(Container): shard_data_roots: List[Hash, MAX_CATCHUP] # Intermediate state roots shard_state_roots: List[Hash, MAX_CATCHUP] + # Index + index: uint64 ``` ### `Attestation` @@ -63,13 +69,31 @@ class Attestation(Container): signature: BLSSignature ``` +## Helpers + +### `get_online_validators` + +```python +def get_online_indices(state: BeaconState) -> Set[ValidatorIndex]: + active_validators = get_active_validator_indices(state, get_current_epoch(state)) + return set([i for i in active_validators if state.online_countdown[i] != 0]) +``` + +### `get_shard_state_root` + +```python +def get_shard_state_root(state: BeaconState, shard: Shard) -> Hash: + return state.shard_state_roots[shard][-1] +``` + ## Beacon Chain Changes ### New state variables ``` - shard_state_roots: Vector[Hash, MAX_SHARDS] + shard_state_roots: Vector[List[Hash, MAX_CATCHUP], MAX_SHARDS] shard_next_slot: Vector[Slot, MAX_SHARDS] + online_countdown: Bytes[VALIDATOR_REGISTRY_LIMIT] ``` ### Attestation processing @@ -77,38 +101,43 @@ class Attestation(Container): ```python def process_attestation(state: BeaconState, attestation: Attestation) -> None: data = attestation.data - assert shard < ACTIVE_SHARDS + assert data.index < ACTIVE_SHARDS + shard = (data.index + get_start_shard(state, data.slot)) % ACTIVE_SHARDS # Signature check - committee = get_crosslink_committee(state, get_current_epoch(state), data.shard) + committee = get_crosslink_committee(state, get_current_epoch(state), shard) for bits in attestation.custody_bits + [attestation.aggregation_bits]: assert bits == len(committee) # Check signature assert is_valid_indexed_attestation(state, get_indexed_attestation(state, attestation)) + # Get attesting indices + attesting_indices = get_attesting_indices(state, attestation.data, attestation.aggregation_bits) # Type 1: on-time attestations if data.custody_bits != []: # Correct start slot - assert data.slot == state.shard_next_slot[data.shard] + assert data.slot == state.shard_next_slot[shard] # Correct data root count assert len(data.shard_data_roots) == len(attestation.custody_bits) == len(data.shard_state_roots) == min(state.slot - data.slot, MAX_CATCHUP) # Correct parent block root assert data.beacon_block_root == get_block_root_at_slot(state, state.slot - 1) # Apply online_indices = get_online_indices(state) - attesting_indices = get_attesting_indices(state, attestation.data, attestation.aggregation_bits).intersection(get_online_indices) - if get_total_balance(state, attesting_indices) * 3 >= get_total_balance(state, online_indices) * 2: - state.shard_state_roots[data.shard] = data.shard_state_roots[-1] - state.shard_next_slot[data.shard] += len(data.shard_data_roots) + if get_total_balance(state, online_indices.intersection(attesting_indices)) * 3 >= get_total_balance(state, online_indices) * 2: + state.shard_state_roots[shard] = data.shard_state_roots + state.shard_next_slot[shard] += len(data.shard_data_roots) # Type 2: delayed attestations else: assert slot_to_epoch(data.slot) in (get_current_epoch(state), get_previous_epoch(state)) assert len(data.shard_data_roots) == len(data.intermediate_state_roots) == 0 + for index in attesting_indices: + online_countdown[index] = ONLINE_PERIOD + pending_attestation = PendingAttestation( slot=data.slot, - shard=data.shard, + shard=shard, aggregation_bits=attestation.aggregation_bits, inclusion_delay=state.slot - attestation_slot, proposer_index=get_beacon_proposer_index(state), @@ -122,12 +151,20 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None: state.previous_epoch_attestations.append(pending_attestation) ``` +### Epoch transition + +```python +for index in range(len(state.validators)): + if state.online_countdown[index] != 0: + state.online_countdown[index] = state.online_countdown[index] - 1 +``` + ### Fraud proofs TODO. The intent is to have a single universal fraud proof type, which contains (i) an on-time attestation on shard `s` signing a set of `data_roots`, (ii) an index `i` of a particular data root to focus on, (iii) the full contents of the i'th data, (iii) a Merkle proof to the `shard_state_roots` in the parent block the attestation is referencing, and which then verifies that one of the two conditions is false: * `custody_bits[i][j] != generate_custody_bit(subkey, block_contents)` for any `j` -* `execute_state_transition(slot, shard, attestation.shard_state_roots[i-1], parent.shard_state_roots, block_contents) != shard_state_roots[i]` (if `i=0` then instead use `parent.shard_state_roots[s]`) +* `execute_state_transition(slot, shard, attestation.shard_state_roots[i-1], parent.shard_state_roots, block_contents) != shard_state_roots[i]` (if `i=0` then instead use `parent.shard_state_roots[s][-1]`) For phase 1, we will use a simple state transition function: @@ -135,7 +172,7 @@ For phase 1, we will use a simple state transition function: * Check that `bls_verify(get_shard_proposer(state, slot, shard), hash_tree_root(data[-96:]), BLSSignature(data[-96:]), BLOCK_SIGNATURE_DOMAIN)` * Output the new state root: `hash_tree_root(prev_state_root, other_prev_state_roots, data)` -### Honest persistent committee member behavior +## Honest persistent committee member behavior Suppose you are a persistent committee member on shard `i` at slot `s`. Suppose `state.shard_next_slots[i] = s-1` ("the happy case"). In this case, you look for a valid proposal that satisfies the checks in the state transition function above, and if you see such a proposal `data` with post-state `post_state`, make an attestation with `shard_data_roots = [hash_tree_root(data)]` and `shard_state_roots = [post_state]`. If you do not find such a proposal, make an attestation using the "default empty proposal", `data = prev_state_root + b'\x00' * 96`. From 25db268bfb815709667e15ff6d401e44b143eee5 Mon Sep 17 00:00:00 2001 From: Vitalik Buterin Date: Sun, 13 Oct 2019 15:52:51 +0900 Subject: [PATCH 003/194] Added a few more things --- specs/core/1_new_shards.md | 95 ++++++++++++++++++++++++++++++++++++-- 1 file changed, 91 insertions(+), 4 deletions(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 2066aac8d..cf644b4c0 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -15,7 +15,9 @@ - [Helpers](#helpers) - [Beacon Chain Changes](#beacon-chain-changes) - [New state variables](#new-state-variables) + - [New block data structures](#new-block-data-structures) - [Attestation processing](#attestation-processing) + - [Light client signature processing)(#light-client-signature-processing) - [Epoch transition](#epoch-transition) - [Fraud proofs](#fraud-proofs) - [Honest persistent committee member behavior](#honest-persistent-committee-member-behavior) @@ -30,13 +32,15 @@ This document describes the shard transition function (data layer only) and the ### Misc -| Name | Value | -| - | - | +| Name | Value | Unit | Duration | +| - | - | - | - | | `MAX_SHARDS` | `2**10` (= 1024) | | `ACTIVE_SHARDS` | `2**6` (= 64) | | `SHARD_ROOT_HISTORY_LENGTH` | `2**15` (= 32,768) | -| `MAX_CATCHUP` | `2**5` (= 32) | -| `ONLINE_PERIOD` | `2**3` (= 8) | +| `MAX_CATCHUP` | `2**5` (= 32) | slots | 3.2 min | +| `ONLINE_PERIOD` | `2**3` (= 8) | epochs | ~51 min | +| `LIGHT_CLIENT_COMMITTEE_SIZE` | `2**7` (= 128) | +| `LIGHT_CLIENT_COMMITTEE_PERIOD` | `2**8` (= 256) | epochs | ~29 hours | ## Containers @@ -69,6 +73,14 @@ class Attestation(Container): signature: BLSSignature ``` +### `CompactCommittee` + +```python +class CompactCommittee(Container): + pubkeys: List[BLSPubkey, MAX_VALIDATORS_PER_COMMITTEE] + compact_validators: List[uint64, MAX_VALIDATORS_PER_COMMITTEE] +``` + ## Helpers ### `get_online_validators` @@ -86,6 +98,44 @@ def get_shard_state_root(state: BeaconState, shard: Shard) -> Hash: return state.shard_state_roots[shard][-1] ``` +### `pack_compact_validator` + +```python +def pack_compact_validator(index: int, slashed: bool, balance_in_increments: int) -> int: + """ + Creates a compact validator object representing index, slashed status, and compressed balance. + Takes as input balance-in-increments (// EFFECTIVE_BALANCE_INCREMENT) to preserve symmetry with + the unpacking function. + """ + return (index << 16) + (slashed << 15) + balance_in_increments +``` + +### `unpack_compact_validator` + +```python +def unpack_compact_validator(compact_validator: int) -> Tuple[int, bool, int]: + """ + Returns validator index, slashed, balance // EFFECTIVE_BALANCE_INCREMENT + """ + return compact_validator >> 16, bool((compact_validator >> 15) % 2), compact_validator & (2**15 - 1) +``` + +### `committee_to_compact_committee` + +```python +def committee_to_compact_committee(state: BeaconState, committee: Sequence[ValidatorIndex]) -> CompactCommittee: + """ + Given a state and a list of validator indices, outputs the CompactCommittee representing them. + """ + validators = [state.validators[i] for i in committee] + compact_validators = [ + pack_compact_validator(i, v.slashed, v.effective_balance // EFFECTIVE_BALANCE_INCREMENT) + for i, v in zip(committee, validators) + ] + pubkeys = [v.pubkey for v in validators] + return CompactCommittee(pubkeys=pubkeys, compact_validators=compact_validators) +``` + ## Beacon Chain Changes ### New state variables @@ -94,6 +144,15 @@ def get_shard_state_root(state: BeaconState, shard: Shard) -> Hash: shard_state_roots: Vector[List[Hash, MAX_CATCHUP], MAX_SHARDS] shard_next_slot: Vector[Slot, MAX_SHARDS] online_countdown: Bytes[VALIDATOR_REGISTRY_LIMIT] + current_light_committee: CompactCommittee + next_light_committee: CompactCommittee +``` + +### New block data structures + +``` + light_client_signature_bitfield: Bitlist[LIGHT_CLIENT_COMMITTEE_SIZE] + light_client_signature: BLSSignature ``` ### Attestation processing @@ -151,12 +210,40 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None: state.previous_epoch_attestations.append(pending_attestation) ``` +### Light client processing + +```python +signer_validators = [] +signer_keys = [] +for i, bit in enumerate(block.light_client_signature_bitfield): + if bit: + signer_keys.append(state.current_light_committee.pubkeys[i]) + index, _, _ = unpack_compact_validator(state.current_light_committee.compact_validators[i]) + signer_validators.append(index) + +assert bls_verify( + pubkey=bls_aggregate_pubkeys(signer_keys), + message_hash=get_block_root_at_slot(state, state.slot - 1), + signature=block.light_client_signature, + domain=DOMAIN_LIGHT_CLIENT +) +``` + ### Epoch transition ```python +# Slowly remove validators from the "online" set if they do not show up for index in range(len(state.validators)): if state.online_countdown[index] != 0: state.online_countdown[index] = state.online_countdown[index] - 1 + +# Update light client committees +if get_current_epoch(state) % LIGHT_CLIENT_COMMITTEE_PERIOD == 0: + state.current_light_committee = state.next_light_committee + seed = get_seed(state, get_current_epoch(state), DOMAIN_LIGHT_CLIENT) + active_indices = get_active_validator_indices(state, get_current_epoch(state)) + committee = [active_indices[compute_shuffled_index(ValidatorIndex(i), len(active_indices), seed)] for i in range(LIGHT_CLIENT_COMMITTEE_SIZE)] + state.next_light_committee = committee_to_compact_committee(state, committee) ``` ### Fraud proofs From 26df98bf61b0d07d60263dbe339e80d93c3e6098 Mon Sep 17 00:00:00 2001 From: Vitalik Buterin Date: Sun, 13 Oct 2019 17:11:29 +0900 Subject: [PATCH 004/194] MAX_CATCHUP -> MAX_CATCHUP_RATIO --- specs/core/1_new_shards.md | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index cf644b4c0..2d199a640 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -37,7 +37,7 @@ This document describes the shard transition function (data layer only) and the | `MAX_SHARDS` | `2**10` (= 1024) | | `ACTIVE_SHARDS` | `2**6` (= 64) | | `SHARD_ROOT_HISTORY_LENGTH` | `2**15` (= 32,768) | -| `MAX_CATCHUP` | `2**5` (= 32) | slots | 3.2 min | +| `MAX_CATCHUP_RATIO` | `2**2` (= 4) | | `ONLINE_PERIOD` | `2**3` (= 8) | epochs | ~51 min | | `LIGHT_CLIENT_COMMITTEE_SIZE` | `2**7` (= 128) | | `LIGHT_CLIENT_COMMITTEE_PERIOD` | `2**8` (= 256) | epochs | ~29 hours | @@ -56,9 +56,9 @@ class AttestationData(Container): source: Checkpoint target: Checkpoint # Shard data roots - shard_data_roots: List[Hash, MAX_CATCHUP] + shard_data_roots: List[Hash, MAX_CATCHUP_RATIO * MAX_SHARDS] # Intermediate state roots - shard_state_roots: List[Hash, MAX_CATCHUP] + shard_state_roots: List[Hash, MAX_CATCHUP_RATIO * MAX_SHARDS] # Index index: uint64 ``` @@ -69,7 +69,7 @@ class AttestationData(Container): class Attestation(Container): aggregation_bits: Bitlist[MAX_VALIDATORS_PER_COMMITTEE] data: AttestationData - custody_bits: List[Bitlist[MAX_VALIDATORS_PER_COMMITTEE], MAX_CATCHUP] + custody_bits: List[Bitlist[MAX_VALIDATORS_PER_COMMITTEE], MAX_CATCHUP_RATIO * MAX_SHARDS] signature: BLSSignature ``` @@ -141,7 +141,7 @@ def committee_to_compact_committee(state: BeaconState, committee: Sequence[Valid ### New state variables ``` - shard_state_roots: Vector[List[Hash, MAX_CATCHUP], MAX_SHARDS] + shard_state_roots: Vector[List[Hash, MAX_CATCHUP_RATIO * MAX_SHARDS], MAX_SHARDS] shard_next_slot: Vector[Slot, MAX_SHARDS] online_countdown: Bytes[VALIDATOR_REGISTRY_LIMIT] current_light_committee: CompactCommittee @@ -177,7 +177,8 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None: # Correct start slot assert data.slot == state.shard_next_slot[shard] # Correct data root count - assert len(data.shard_data_roots) == len(attestation.custody_bits) == len(data.shard_state_roots) == min(state.slot - data.slot, MAX_CATCHUP) + max_catchup = ACTIVE_SHARDS * MAX_CATCHUP_RATIO // get_committee_count(state, state.slot) + assert len(data.shard_data_roots) == len(attestation.custody_bits) == len(data.shard_state_roots) == min(state.slot - data.slot, max_catchup) # Correct parent block root assert data.beacon_block_root == get_block_root_at_slot(state, state.slot - 1) # Apply @@ -263,7 +264,7 @@ For phase 1, we will use a simple state transition function: Suppose you are a persistent committee member on shard `i` at slot `s`. Suppose `state.shard_next_slots[i] = s-1` ("the happy case"). In this case, you look for a valid proposal that satisfies the checks in the state transition function above, and if you see such a proposal `data` with post-state `post_state`, make an attestation with `shard_data_roots = [hash_tree_root(data)]` and `shard_state_roots = [post_state]`. If you do not find such a proposal, make an attestation using the "default empty proposal", `data = prev_state_root + b'\x00' * 96`. -Now suppose `state.shard_next_slots[i] = s-k` for `k>1`. Then, initialize `data = []`, `states = []`, `state = state.shard_state_roots[i]`. For `slot in (state.shard_next_slot, min(state.shard_next_slot + MAX_CATCHUP, s))`, do: +Now suppose `state.shard_next_slots[i] = s-k` for `k>1`. Then, initialize `data = []`, `states = []`, `state = state.shard_state_roots[i]`. For `slot in (state.shard_next_slot, min(state.shard_next_slot + max_catchup, s))`, do: * Look for all valid proposals for `slot` whose first 32 bytes equal to `state`. If there are none, add a default empty proposal to `data`. If there is one such proposal `p`, add `p` to `data`. If there is more than one, select the one with the largest number of total attestations supporting it or its descendants, and add it to `data`. * Set `state` to the state after processing the proposal just added to `data`; append it to `states` From fe60b4debaf853bea3c7b77e26de5ea4c8fb4223 Mon Sep 17 00:00:00 2001 From: Vitalik Buterin Date: Sun, 13 Oct 2019 17:13:52 +0900 Subject: [PATCH 005/194] Variable max attestation length --- specs/core/1_new_shards.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 2d199a640..adc8fb412 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -211,6 +211,8 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None: state.previous_epoch_attestations.append(pending_attestation) ``` +Check the length of attestations using `len(block.attestations) <= 4 * get_committee_count(state, state.slot)`. + ### Light client processing ```python From b792fe696a2ddaa7109f1b1210185c27a2338053 Mon Sep 17 00:00:00 2001 From: Vitalik Buterin Date: Sun, 13 Oct 2019 17:42:55 +0900 Subject: [PATCH 006/194] formatting --- specs/core/1_new_shards.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index adc8fb412..19de5773e 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -140,7 +140,7 @@ def committee_to_compact_committee(state: BeaconState, committee: Sequence[Valid ### New state variables -``` +```python shard_state_roots: Vector[List[Hash, MAX_CATCHUP_RATIO * MAX_SHARDS], MAX_SHARDS] shard_next_slot: Vector[Slot, MAX_SHARDS] online_countdown: Bytes[VALIDATOR_REGISTRY_LIMIT] @@ -150,7 +150,7 @@ def committee_to_compact_committee(state: BeaconState, committee: Sequence[Valid ### New block data structures -``` +```python light_client_signature_bitfield: Bitlist[LIGHT_CLIENT_COMMITTEE_SIZE] light_client_signature: BLSSignature ``` From 9abfc6276a59293529c954eb56d8a3f39b0e14c6 Mon Sep 17 00:00:00 2001 From: Vitalik Buterin Date: Mon, 14 Oct 2019 18:20:35 +0900 Subject: [PATCH 007/194] Cleaned up state transition and honest committee member --- specs/core/1_new_shards.md | 57 ++++++++++++++++++++++++++++++-------- 1 file changed, 45 insertions(+), 12 deletions(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 19de5773e..32620ef8d 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -20,7 +20,8 @@ - [Light client signature processing)(#light-client-signature-processing) - [Epoch transition](#epoch-transition) - [Fraud proofs](#fraud-proofs) - - [Honest persistent committee member behavior](#honest-persistent-committee-member-behavior) + - [Shard state transition function](#shard-state-transition-function) + - [Honest committee member behavior](#honest-committee-member-behavior) @@ -36,7 +37,6 @@ This document describes the shard transition function (data layer only) and the | - | - | - | - | | `MAX_SHARDS` | `2**10` (= 1024) | | `ACTIVE_SHARDS` | `2**6` (= 64) | -| `SHARD_ROOT_HISTORY_LENGTH` | `2**15` (= 32,768) | | `MAX_CATCHUP_RATIO` | `2**2` (= 4) | | `ONLINE_PERIOD` | `2**3` (= 8) | epochs | ~51 min | | `LIGHT_CLIENT_COMMITTEE_SIZE` | `2**7` (= 128) | @@ -142,7 +142,7 @@ def committee_to_compact_committee(state: BeaconState, committee: Sequence[Valid ```python shard_state_roots: Vector[List[Hash, MAX_CATCHUP_RATIO * MAX_SHARDS], MAX_SHARDS] - shard_next_slot: Vector[Slot, MAX_SHARDS] + shard_next_slots: Vector[Slot, MAX_SHARDS] online_countdown: Bytes[VALIDATOR_REGISTRY_LIMIT] current_light_committee: CompactCommittee next_light_committee: CompactCommittee @@ -175,7 +175,7 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None: # Type 1: on-time attestations if data.custody_bits != []: # Correct start slot - assert data.slot == state.shard_next_slot[shard] + assert data.slot == state.shard_next_slots[shard] # Correct data root count max_catchup = ACTIVE_SHARDS * MAX_CATCHUP_RATIO // get_committee_count(state, state.slot) assert len(data.shard_data_roots) == len(attestation.custody_bits) == len(data.shard_state_roots) == min(state.slot - data.slot, max_catchup) @@ -185,7 +185,7 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None: online_indices = get_online_indices(state) if get_total_balance(state, online_indices.intersection(attesting_indices)) * 3 >= get_total_balance(state, online_indices) * 2: state.shard_state_roots[shard] = data.shard_state_roots - state.shard_next_slot[shard] += len(data.shard_data_roots) + state.shard_next_slots[shard] += len(data.shard_data_roots) # Type 2: delayed attestations else: @@ -254,7 +254,7 @@ if get_current_epoch(state) % LIGHT_CLIENT_COMMITTEE_PERIOD == 0: TODO. The intent is to have a single universal fraud proof type, which contains (i) an on-time attestation on shard `s` signing a set of `data_roots`, (ii) an index `i` of a particular data root to focus on, (iii) the full contents of the i'th data, (iii) a Merkle proof to the `shard_state_roots` in the parent block the attestation is referencing, and which then verifies that one of the two conditions is false: * `custody_bits[i][j] != generate_custody_bit(subkey, block_contents)` for any `j` -* `execute_state_transition(slot, shard, attestation.shard_state_roots[i-1], parent.shard_state_roots, block_contents) != shard_state_roots[i]` (if `i=0` then instead use `parent.shard_state_roots[s][-1]`) +* `execute_state_transition(shard, slot, attestation.shard_state_roots[i-1], hash_tree_root(parent), get_shard_proposer(state, shard, slot), block_contents) != shard_state_roots[i]` (if `i=0` then instead use `parent.shard_state_roots[s][-1]`) For phase 1, we will use a simple state transition function: @@ -262,13 +262,46 @@ For phase 1, we will use a simple state transition function: * Check that `bls_verify(get_shard_proposer(state, slot, shard), hash_tree_root(data[-96:]), BLSSignature(data[-96:]), BLOCK_SIGNATURE_DOMAIN)` * Output the new state root: `hash_tree_root(prev_state_root, other_prev_state_roots, data)` -## Honest persistent committee member behavior +## Shard state transition function -Suppose you are a persistent committee member on shard `i` at slot `s`. Suppose `state.shard_next_slots[i] = s-1` ("the happy case"). In this case, you look for a valid proposal that satisfies the checks in the state transition function above, and if you see such a proposal `data` with post-state `post_state`, make an attestation with `shard_data_roots = [hash_tree_root(data)]` and `shard_state_roots = [post_state]`. If you do not find such a proposal, make an attestation using the "default empty proposal", `data = prev_state_root + b'\x00' * 96`. +```python +def shard_state_transition(shard: Shard, slot: Slot, pre_state: Hash, previous_beacon_root: Hash, proposer_pubkey: BLSPubkey, block_data: Bytes) -> Hash: + # Beginning of block data is the previous state root + assert block_data[:32] == pre_state + assert block_data[32:64] == int_to_bytes8(slot) + b'\x00' * 24 + # Signature check (nonempty blocks only) + if len(block_data) == 64: + pass + else: + assert len(block_data) >= 160 + assert bls_verify( + pubkey=proposer_pubkey, + message_hash=hash_tree_root(block_data[:-96]), + signature=block_data[-96:], + domain=DOMAIN_SHARD_PROPOSER + ) + # We will add something more substantive in phase 2 + return hash(pre_state + hash_tree_root(block_data)) +``` -Now suppose `state.shard_next_slots[i] = s-k` for `k>1`. Then, initialize `data = []`, `states = []`, `state = state.shard_state_roots[i]`. For `slot in (state.shard_next_slot, min(state.shard_next_slot + max_catchup, s))`, do: +We also provide a method to generate an empty proposal: -* Look for all valid proposals for `slot` whose first 32 bytes equal to `state`. If there are none, add a default empty proposal to `data`. If there is one such proposal `p`, add `p` to `data`. If there is more than one, select the one with the largest number of total attestations supporting it or its descendants, and add it to `data`. -* Set `state` to the state after processing the proposal just added to `data`; append it to `states` +```python +def make_empty_proposal(pre_state: Hash, slot: Slot) -> Bytes[64]: + return pre_state + int_to_bytes8(slot) + b'\x00' * 24 +``` -Make an attestation using `shard_data_roots = data` and `shard_state_roots = states`. +## Honest committee member behavior + +Suppose you are a committee member on shard `shard` at slot `current_slot`. Let `state` be the head beacon state you are building on. Three seconds into slot `slot`, run the following procedure: + +* Initialize `proposals = []`, `shard_states = []`, `shard_state = state.shard_state_roots[shard][-1]`. +* Let `max_catchup = ACTIVE_SHARDS * MAX_CATCHUP_RATIO // get_committee_count(state, current_slot))` +* For `slot in (state.shard_next_slots[shard], min(state.shard_next_slot + max_catchup, current_slot))`, do the following: + * Look for all valid proposals for `slot`; that is, a Bytes `proposal` where `shard_state_transition(shard, slot, shard_state, get_block_root_at_slot(state, state.slot - 1), get_shard_proposer(state, shard, slot), proposal)` returns a result and does not throw an exception. Let `choices` be the set of non-empty valid proposals you discover. + * If `len(choices) == 0`, do `proposals.append(make_empty_proposal(shard_state, slot))` + * If `len(choices) == 1`, do `proposals.append(choices[0])` + * If `len(choices) > 1`, let `winning_proposal` be the proposal with the largest number of total attestations from slots in `state.shard_next_slots[shard]....slot-1` supporting it or any of its descendants, breaking ties by choosing the first proposal locally seen. Do `proposals.append(winning_proposal)`. + * Set `shard_state = shard_state_transition(shard, slot, shard_state, get_block_root_at_slot(state, state.slot - 1), get_shard_proposer(state, shard, slot), proposals[-1])` and do `shard_states.append(shard_state)`. + +Make an attestation using `shard_data_roots = [hash_tree_root(proposal) for proposal in proposals]` and `shard_state_roots = shard_states`. From e5544d055b1a12fb5e9b0d6952c97a3155e9d535 Mon Sep 17 00:00:00 2001 From: Vitalik Buterin Date: Mon, 14 Oct 2019 23:55:41 +0800 Subject: [PATCH 008/194] Made trace a commitment separate from state roots --- specs/core/1_new_shards.md | 24 +++++++++--------------- 1 file changed, 9 insertions(+), 15 deletions(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 32620ef8d..087a8af17 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -91,13 +91,6 @@ def get_online_indices(state: BeaconState) -> Set[ValidatorIndex]: return set([i for i in active_validators if state.online_countdown[i] != 0]) ``` -### `get_shard_state_root` - -```python -def get_shard_state_root(state: BeaconState, shard: Shard) -> Hash: - return state.shard_state_roots[shard][-1] -``` - ### `pack_compact_validator` ```python @@ -141,7 +134,8 @@ def committee_to_compact_committee(state: BeaconState, committee: Sequence[Valid ### New state variables ```python - shard_state_roots: Vector[List[Hash, MAX_CATCHUP_RATIO * MAX_SHARDS], MAX_SHARDS] + shard_state_roots: Vector[Hash, MAX_SHARDS] + shard_trace_commitments: Vector[Hash, MAX_SHARDS] shard_next_slots: Vector[Slot, MAX_SHARDS] online_countdown: Bytes[VALIDATOR_REGISTRY_LIMIT] current_light_committee: CompactCommittee @@ -184,7 +178,13 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None: # Apply online_indices = get_online_indices(state) if get_total_balance(state, online_indices.intersection(attesting_indices)) * 3 >= get_total_balance(state, online_indices) * 2: - state.shard_state_roots[shard] = data.shard_state_roots + # Save trace commitment (used for fraud proofs) + trace = List[Hash, MAX_CATCHUP_RATIO * MAX_SHARDS * 2 + 1]([state.shard_state_roots[shard]]) + for data, state in zip(data.shard_data_roots, data.shard_state_roots): + trace.extend([data, state]) + state.shard_trace_commitments[shard] = hash_tree_root(trace) + # Save state root and next slot + state.shard_state_roots[shard] = data.shard_state_roots[-1] state.shard_next_slots[shard] += len(data.shard_data_roots) # Type 2: delayed attestations @@ -256,12 +256,6 @@ TODO. The intent is to have a single universal fraud proof type, which contains * `custody_bits[i][j] != generate_custody_bit(subkey, block_contents)` for any `j` * `execute_state_transition(shard, slot, attestation.shard_state_roots[i-1], hash_tree_root(parent), get_shard_proposer(state, shard, slot), block_contents) != shard_state_roots[i]` (if `i=0` then instead use `parent.shard_state_roots[s][-1]`) -For phase 1, we will use a simple state transition function: - -* Check that `data[:32] == prev_state_root` -* Check that `bls_verify(get_shard_proposer(state, slot, shard), hash_tree_root(data[-96:]), BLSSignature(data[-96:]), BLOCK_SIGNATURE_DOMAIN)` -* Output the new state root: `hash_tree_root(prev_state_root, other_prev_state_roots, data)` - ## Shard state transition function ```python From 7fc2830730284f964a5371daf86c5ab23b52f37f Mon Sep 17 00:00:00 2001 From: Vitalik Buterin Date: Sun, 27 Oct 2019 09:01:10 +0800 Subject: [PATCH 009/194] Some updates --- specs/core/1_new_shards.md | 178 ++++++++++++++++++++++++++++--------- 1 file changed, 138 insertions(+), 40 deletions(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 087a8af17..54990a43f 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -41,28 +41,61 @@ This document describes the shard transition function (data layer only) and the | `ONLINE_PERIOD` | `2**3` (= 8) | epochs | ~51 min | | `LIGHT_CLIENT_COMMITTEE_SIZE` | `2**7` (= 128) | | `LIGHT_CLIENT_COMMITTEE_PERIOD` | `2**8` (= 256) | epochs | ~29 hours | +| `SHARD_STATE_ROOT_LENGTH` | `2**7` (= 128) | bytes | +| `MAX_SHARD_BLOCK ## Containers +### Aliases + +| Name | Value | +| - | - | +| `SHARD_STATE_ROOT` | `BytesN[SHARD_STATE_ROOT_LENGTH]` | + + ### `AttestationData` ```python class AttestationData(Container): - # Slot slot: Slot + index: CommitteeIndex # LMD GHOST vote beacon_block_root: Hash # FFG vote source: Checkpoint target: Checkpoint + # Shard data + shard_data: AttestationShardData +``` + +### `AttestationShardData` + +```python +class AttestationShardData(Container): + # Shard block lengths + shard_block_lengths: List[uint8, MAX_CATCHUP_RATIO * MAX_SHARDS] # Shard data roots shard_data_roots: List[Hash, MAX_CATCHUP_RATIO * MAX_SHARDS] # Intermediate state roots - shard_state_roots: List[Hash, MAX_CATCHUP_RATIO * MAX_SHARDS] - # Index - index: uint64 + shard_state_roots: List[SHARD_STATE_ROOT, MAX_CATCHUP_RATIO * MAX_SHARDS] ``` +### `ReducedAttestationData` + +```python +class ReducedAttestationData(Container): + slot: Slot + index: CommitteeIndex + # LMD GHOST vote + beacon_block_root: Hash + # FFG vote + source: Checkpoint + target: Checkpoint + # Shard data root + shard_data_root: Hash +``` + + ### `Attestation` ```python @@ -73,6 +106,26 @@ class Attestation(Container): signature: BLSSignature ``` +### `ReducedAttestation` + +```python +class ReducedAttestation(Container): + aggregation_bits: Bitlist[MAX_VALIDATORS_PER_COMMITTEE] + data: ReducedAttestationData + custody_bits: List[Bitlist[MAX_VALIDATORS_PER_COMMITTEE], MAX_CATCHUP_RATIO * MAX_SHARDS] + signature: BLSSignature +``` + +### `IndexedAttestation` + +```python +class IndexedAttestation(Container): + participants: List[ValidatorIndex, MAX_COMMITTEE_SIZE] + data: ReducedAttestationData + custody_bits: List[Bitlist[MAX_VALIDATORS_PER_COMMITTEE], MAX_CATCHUP_RATIO * MAX_SHARDS] + signature: BLSSignature +``` + ### `CompactCommittee` ```python @@ -81,6 +134,15 @@ class CompactCommittee(Container): compact_validators: List[uint64, MAX_VALIDATORS_PER_COMMITTEE] ``` +### `AttestationCustodyBitWrapper` + +``` +class AttestationCustodyBitWrapper(Container): + attestation_root: Hash + index: uint64 + bit: bool +``` + ## Helpers ### `get_online_validators` @@ -103,16 +165,6 @@ def pack_compact_validator(index: int, slashed: bool, balance_in_increments: int return (index << 16) + (slashed << 15) + balance_in_increments ``` -### `unpack_compact_validator` - -```python -def unpack_compact_validator(compact_validator: int) -> Tuple[int, bool, int]: - """ - Returns validator index, slashed, balance // EFFECTIVE_BALANCE_INCREMENT - """ - return compact_validator >> 16, bool((compact_validator >> 15) % 2), compact_validator & (2**15 - 1) -``` - ### `committee_to_compact_committee` ```python @@ -129,6 +181,52 @@ def committee_to_compact_committee(state: BeaconState, committee: Sequence[Valid return CompactCommittee(pubkeys=pubkeys, compact_validators=compact_validators) ``` +### `get_light_client_committee` + +```python +def get_light_client_committee(beacon_state: BeaconState, epoch: Epoch) -> Sequence[ValidatorIndex]: + assert epoch % LIGHT_CLIENT_COMMITTEE_PERIOD == 0 + active_validator_indices = get_active_validator_indices(beacon_state, epoch) + seed = get_seed(beacon_state, epoch, DOMAIN_SHARD_LIGHT_CLIENT) + return compute_committee(active_validator_indices, seed, 0, ACTIVE_SHARDS)[:TARGET_COMMITTEE_SIZE] +``` + +### `get_indexed_attestation` + +```python +def get_indexed_attestation(beacon_state: BeaconState, attestation: Attestation) -> IndexedAttestation: + attesting_indices = get_attesting_indices(state, attestation.data, attestation.aggregation_bits) + return IndexedAttestation(attesting_indices, data, custody_bits, signature) +``` + +### `is_valid_indexed_attestation` + +``python +def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: IndexedAttestation) -> bool: + """ + Check if ``indexed_attestation`` has valid indices and signature. + """ + + # Verify indices are sorted + if indexed_attestation.participants != sorted(indexed_attestation.participants): + return False + + # Verify aggregate signature + all_pubkeys = [] + all_message_hashes = [] + for participant, custody_bits in zip(participants, indexed_attestation.custody_bits): + for i, bit in enumerate(custody_bits): + all_pubkeys.append(state.validators[participant].pubkey) + all_message_hashes.append(AttestationCustodyBitWrapper(hash_tree_root(indexed_attestation.data), i, bit)) + + return bls_verify_multiple( + pubkeys=all_pubkeys, + message_hashes=all_message_hashes, + signature=indexed_attestation.signature, + domain=get_domain(state, DOMAIN_BEACON_ATTESTER, indexed_attestation.data.target.epoch), + ) +``` + ## Beacon Chain Changes ### New state variables @@ -216,37 +314,37 @@ Check the length of attestations using `len(block.attestations) <= 4 * get_commi ### Light client processing ```python -signer_validators = [] -signer_keys = [] -for i, bit in enumerate(block.light_client_signature_bitfield): - if bit: - signer_keys.append(state.current_light_committee.pubkeys[i]) - index, _, _ = unpack_compact_validator(state.current_light_committee.compact_validators[i]) - signer_validators.append(index) - -assert bls_verify( - pubkey=bls_aggregate_pubkeys(signer_keys), - message_hash=get_block_root_at_slot(state, state.slot - 1), - signature=block.light_client_signature, - domain=DOMAIN_LIGHT_CLIENT -) +def verify_light_client_signatures(state: BeaconState, block: BeaconBlock): + period_start = get_current_epoch(state) - get_current_epoch(state) % LIGHT_CLIENT_COMMITTEE_PERIOD + committee = get_light_client_committee(state, period_start - min(period_start, LIGHT_CLIENT_COMMITTEE_PERIOD)) + signer_validators = [] + signer_keys = [] + for i, bit in enumerate(block.light_client_signature_bitfield): + if bit: + signer_keys.append(state.validators[committee[i]].pubkey) + signer_validators.append(committee[i]) + + assert bls_verify( + pubkey=bls_aggregate_pubkeys(signer_keys), + message_hash=get_block_root_at_slot(state, state.slot - 1), + signature=block.light_client_signature, + domain=DOMAIN_LIGHT_CLIENT + ) ``` ### Epoch transition ```python -# Slowly remove validators from the "online" set if they do not show up -for index in range(len(state.validators)): - if state.online_countdown[index] != 0: - state.online_countdown[index] = state.online_countdown[index] - 1 - -# Update light client committees -if get_current_epoch(state) % LIGHT_CLIENT_COMMITTEE_PERIOD == 0: - state.current_light_committee = state.next_light_committee - seed = get_seed(state, get_current_epoch(state), DOMAIN_LIGHT_CLIENT) - active_indices = get_active_validator_indices(state, get_current_epoch(state)) - committee = [active_indices[compute_shuffled_index(ValidatorIndex(i), len(active_indices), seed)] for i in range(LIGHT_CLIENT_COMMITTEE_SIZE)] - state.next_light_committee = committee_to_compact_committee(state, committee) +def phase_1_epoch_transition(state): + # Slowly remove validators from the "online" set if they do not show up + for index in range(len(state.validators)): + if state.online_countdown[index] != 0: + state.online_countdown[index] = state.online_countdown[index] - 1 + + # Update light client committees + if get_current_epoch(state) % LIGHT_CLIENT_COMMITTEE_PERIOD == 0: + state.current_light_committee = state.next_light_committee + state.next_light_committee = committee_to_compact_committee(state, get_light_client_committee(state, get_current_epoch(state))) ``` ### Fraud proofs From bb2835ee1bfc48808f3f15150f5187dbbcffc9b9 Mon Sep 17 00:00:00 2001 From: Vitalik Buterin Date: Mon, 28 Oct 2019 02:01:22 +0800 Subject: [PATCH 010/194] Shard slots to 128 bytes --- specs/core/1_new_shards.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 54990a43f..0701bfa9f 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -42,7 +42,7 @@ This document describes the shard transition function (data layer only) and the | `LIGHT_CLIENT_COMMITTEE_SIZE` | `2**7` (= 128) | | `LIGHT_CLIENT_COMMITTEE_PERIOD` | `2**8` (= 256) | epochs | ~29 hours | | `SHARD_STATE_ROOT_LENGTH` | `2**7` (= 128) | bytes | -| `MAX_SHARD_BLOCK +| `MAX_SHARD_BLOCK_CHUNKS` | `2**2` (= 4) | | ## Containers @@ -75,7 +75,7 @@ class AttestationShardData(Container): # Shard block lengths shard_block_lengths: List[uint8, MAX_CATCHUP_RATIO * MAX_SHARDS] # Shard data roots - shard_data_roots: List[Hash, MAX_CATCHUP_RATIO * MAX_SHARDS] + shard_data_roots: List[Hash, List[Hash, MAX_SHARD_BLOCK_CHUNKS], MAX_CATCHUP_RATIO * MAX_SHARDS] # Intermediate state roots shard_state_roots: List[SHARD_STATE_ROOT, MAX_CATCHUP_RATIO * MAX_SHARDS] ``` @@ -232,7 +232,7 @@ def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: Indexe ### New state variables ```python - shard_state_roots: Vector[Hash, MAX_SHARDS] + shard_state_roots: Vector[SHARD_STATE_ROOT, MAX_SHARDS] shard_trace_commitments: Vector[Hash, MAX_SHARDS] shard_next_slots: Vector[Slot, MAX_SHARDS] online_countdown: Bytes[VALIDATOR_REGISTRY_LIMIT] From 539c6819213479661d0d38a27b90bdda4c77b36e Mon Sep 17 00:00:00 2001 From: Vitalik Buterin Date: Tue, 29 Oct 2019 10:43:13 -0700 Subject: [PATCH 011/194] Updates to add new proposals --- specs/core/1_new_shards.md | 165 ++++++++++++++++++++++--------------- 1 file changed, 97 insertions(+), 68 deletions(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 0701bfa9f..cb2348d65 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -37,21 +37,27 @@ This document describes the shard transition function (data layer only) and the | - | - | - | - | | `MAX_SHARDS` | `2**10` (= 1024) | | `ACTIVE_SHARDS` | `2**6` (= 64) | -| `MAX_CATCHUP_RATIO` | `2**2` (= 4) | | `ONLINE_PERIOD` | `2**3` (= 8) | epochs | ~51 min | | `LIGHT_CLIENT_COMMITTEE_SIZE` | `2**7` (= 128) | | `LIGHT_CLIENT_COMMITTEE_PERIOD` | `2**8` (= 256) | epochs | ~29 hours | -| `SHARD_STATE_ROOT_LENGTH` | `2**7` (= 128) | bytes | +| `SHARD_BLOCK_CHUNK_SIZE` | `2**18` (= 262,144) | | | `MAX_SHARD_BLOCK_CHUNKS` | `2**2` (= 4) | | +| `BLOCK_SIZE_TARGET` | `3 * 2**16` (= 196,608) | | +| `SHARD_BLOCK_OFFSETS` | `[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233]` | | +| `MAX_SHARD_BLOCKS_PER_ATTESTATION` | `len(SHARD_BLOCK_OFFSETS)` | | +| `MAX_SHARD_GASPRICE` | `2**14` (= 16,384) | Gwei | | +| `SHARD_GASPRICE_ADJUSTMENT_COEFFICIENT` | `2**3` (= 8) | | ## Containers -### Aliases - -| Name | Value | -| - | - | -| `SHARD_STATE_ROOT` | `BytesN[SHARD_STATE_ROOT_LENGTH]` | +### `ShardState` +```python +class ShardState(Container): + slot: Slot + gasprice: Gwei + root: Hash +``` ### `AttestationData` @@ -64,55 +70,31 @@ class AttestationData(Container): # FFG vote source: Checkpoint target: Checkpoint - # Shard data - shard_data: AttestationShardData + # Shard transition hash + shard_transition_hash: Hash ``` -### `AttestationShardData` +### `ShardTransition` ```python class AttestationShardData(Container): + # Starting from slot + start_slot: Slot # Shard block lengths - shard_block_lengths: List[uint8, MAX_CATCHUP_RATIO * MAX_SHARDS] + shard_block_lengths: List[uint8, MAX_SHARD_BLOCKS_PER_ATTESTATION] # Shard data roots - shard_data_roots: List[Hash, List[Hash, MAX_SHARD_BLOCK_CHUNKS], MAX_CATCHUP_RATIO * MAX_SHARDS] + shard_data_roots: List[Hash, List[Hash, MAX_SHARD_BLOCK_CHUNKS], MAX_SHARD_BLOCKS_PER_ATTESTATION] # Intermediate state roots - shard_state_roots: List[SHARD_STATE_ROOT, MAX_CATCHUP_RATIO * MAX_SHARDS] + shard_state_roots: List[ShardState, MAX_SHARD_BLOCKS_PER_ATTESTATION] ``` -### `ReducedAttestationData` - -```python -class ReducedAttestationData(Container): - slot: Slot - index: CommitteeIndex - # LMD GHOST vote - beacon_block_root: Hash - # FFG vote - source: Checkpoint - target: Checkpoint - # Shard data root - shard_data_root: Hash -``` - - ### `Attestation` ```python class Attestation(Container): aggregation_bits: Bitlist[MAX_VALIDATORS_PER_COMMITTEE] data: AttestationData - custody_bits: List[Bitlist[MAX_VALIDATORS_PER_COMMITTEE], MAX_CATCHUP_RATIO * MAX_SHARDS] - signature: BLSSignature -``` - -### `ReducedAttestation` - -```python -class ReducedAttestation(Container): - aggregation_bits: Bitlist[MAX_VALIDATORS_PER_COMMITTEE] - data: ReducedAttestationData - custody_bits: List[Bitlist[MAX_VALIDATORS_PER_COMMITTEE], MAX_CATCHUP_RATIO * MAX_SHARDS] + custody_bits: List[Bitlist[MAX_VALIDATORS_PER_COMMITTEE], MAX_SHARD_BLOCKS_PER_ATTESTATION] signature: BLSSignature ``` @@ -121,8 +103,8 @@ class ReducedAttestation(Container): ```python class IndexedAttestation(Container): participants: List[ValidatorIndex, MAX_COMMITTEE_SIZE] - data: ReducedAttestationData - custody_bits: List[Bitlist[MAX_VALIDATORS_PER_COMMITTEE], MAX_CATCHUP_RATIO * MAX_SHARDS] + data: AttestationData + custody_bits: List[Bitlist[MAX_VALIDATORS_PER_COMMITTEE], MAX_SHARD_BLOCKS_PER_ATTESTATION] signature: BLSSignature ``` @@ -199,6 +181,21 @@ def get_indexed_attestation(beacon_state: BeaconState, attestation: Attestation) return IndexedAttestation(attesting_indices, data, custody_bits, signature) ``` +### `update_gasprice` + +```python +def update_gasprice(prev_gasprice: Gwei, length: uint8) -> Gwei: + if length > BLOCK_SIZE_TARGET: + delta = prev_gasprice * (length - BLOCK_SIZE_TARGET) // BLOCK_SIZE_TARGET // SHARD_GASPRICE_ADJUSTMENT_COEFFICIENT + return min(prev_gasprice + delta, MAX_SHARD_GASPRICE) + else: + delta = prev_gasprice * (BLOCK_SIZE_TARGET - length) // BLOCK_SIZE_TARGET // SHARD_GASPRICE_ADJUSTMENT_COEFFICIENT + if delta > prev_gasprice - SHARD_GASPRICE_ADJUSTMENT_COEFFICIENT: + return SHARD_GASPRICE_ADJUSTMENT_COEFFICIENT + else: + return prev_gasprice - delta +``` + ### `is_valid_indexed_attestation` ``python @@ -217,6 +214,7 @@ def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: Indexe for participant, custody_bits in zip(participants, indexed_attestation.custody_bits): for i, bit in enumerate(custody_bits): all_pubkeys.append(state.validators[participant].pubkey) + # Note: only 2N distinct message hashes all_message_hashes.append(AttestationCustodyBitWrapper(hash_tree_root(indexed_attestation.data), i, bit)) return bls_verify_multiple( @@ -232,9 +230,8 @@ def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: Indexe ### New state variables ```python - shard_state_roots: Vector[SHARD_STATE_ROOT, MAX_SHARDS] - shard_trace_commitments: Vector[Hash, MAX_SHARDS] - shard_next_slots: Vector[Slot, MAX_SHARDS] + shard_transitions: Vector[ShardTransition, MAX_SHARDS] + shard_states: Vector[ShardState, MAX_SHARDS] online_countdown: Bytes[VALIDATOR_REGISTRY_LIMIT] current_light_committee: CompactCommittee next_light_committee: CompactCommittee @@ -254,52 +251,76 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None: data = attestation.data assert data.index < ACTIVE_SHARDS shard = (data.index + get_start_shard(state, data.slot)) % ACTIVE_SHARDS + proposer_index=get_beacon_proposer_index(state) # Signature check committee = get_crosslink_committee(state, get_current_epoch(state), shard) for bits in attestation.custody_bits + [attestation.aggregation_bits]: - assert bits == len(committee) + assert len(bits) == len(committee) # Check signature assert is_valid_indexed_attestation(state, get_indexed_attestation(state, attestation)) # Get attesting indices attesting_indices = get_attesting_indices(state, attestation.data, attestation.aggregation_bits) + + # Prepare pending attestation object + pending_attestation = PendingAttestation( + slot=data.slot, + shard=shard, + aggregation_bits=attestation.aggregation_bits, + inclusion_delay=state.slot - data.slot, + crosslink_success=False, + proposer_index=proposer_index + ) # Type 1: on-time attestations if data.custody_bits != []: - # Correct start slot - assert data.slot == state.shard_next_slots[shard] + # Correct slot + assert data.slot == state.slot + # Slot the attestation starts counting from + start_slot = state.shard_next_slots[shard] # Correct data root count - max_catchup = ACTIVE_SHARDS * MAX_CATCHUP_RATIO // get_committee_count(state, state.slot) - assert len(data.shard_data_roots) == len(attestation.custody_bits) == len(data.shard_state_roots) == min(state.slot - data.slot, max_catchup) + offset_slots = [start_slot + x for x in SHARD_BLOCK_OFFSETS if start_slot + x < state.slot] + assert len(attestation.custody_bits) == len(offset_slots) # Correct parent block root assert data.beacon_block_root == get_block_root_at_slot(state, state.slot - 1) # Apply online_indices = get_online_indices(state) if get_total_balance(state, online_indices.intersection(attesting_indices)) * 3 >= get_total_balance(state, online_indices) * 2: - # Save trace commitment (used for fraud proofs) - trace = List[Hash, MAX_CATCHUP_RATIO * MAX_SHARDS * 2 + 1]([state.shard_state_roots[shard]]) - for data, state in zip(data.shard_data_roots, data.shard_state_roots): - trace.extend([data, state]) - state.shard_trace_commitments[shard] = hash_tree_root(trace) - # Save state root and next slot - state.shard_state_roots[shard] = data.shard_state_roots[-1] - state.shard_next_slots[shard] += len(data.shard_data_roots) - + # Check correct formatting of shard transition data + transition = block.shard_transitions[shard] + assert data.shard_transition_hash == hash_tree_root(transition) + assert len(transition.shard_data_roots) == len(transition.shard_states) == len(transition.shard_block_lengths) == len(offset_slots) + assert transition.start_slot == start_slot + + # Verify correct calculation of gas prices and slots and chunk roots + prev_gasprice = state.shard_states[shard].gasprice + for i in range(len(offset_slots)): + assert transition.shard_states[i].gasprice == update_gasprice(prev_gasprice, transition.shard_block_lengths[i]) + assert transition.shard_states[i].slot == offset_slots[i] + assetrt len(transition.shard_data_roots[i]) == transition.shard_block_lengths[i] // SHARD_BLOCK_CHUNK_SIZE + prev_gasprice = transition.shard_states[i].gasprice + + # Save updated state + state.shard_states[shard] = data.shard_states[-1] + state.shard_states[shard].slot = state.slot - 1 + + # Save success (for end-of-epoch rewarding) + pending_attestation.crosslink_success = True + + # Reward and cost proposer + estimated_attester_reward = sum([get_base_reward(state, attester) for attester in attesting_indices]) + increase_balance(state, proposer, estimated_attester_reward // PROPOSER_REWARD_COEFFICIENT) + for state, length in zip(transition.shard_states, transition.shard_block_lengths): + decrease_balance(state, proposer, state.gasprice * length) + # Type 2: delayed attestations else: assert slot_to_epoch(data.slot) in (get_current_epoch(state), get_previous_epoch(state)) - assert len(data.shard_data_roots) == len(data.intermediate_state_roots) == 0 + assert len(attestation.custody_bits) == 0 for index in attesting_indices: online_countdown[index] = ONLINE_PERIOD - pending_attestation = PendingAttestation( - slot=data.slot, - shard=shard, - aggregation_bits=attestation.aggregation_bits, - inclusion_delay=state.slot - attestation_slot, - proposer_index=get_beacon_proposer_index(state), - ) if data.target.epoch == get_current_epoch(state): assert data.source == state.current_justified_checkpoint @@ -309,7 +330,15 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None: state.previous_epoch_attestations.append(pending_attestation) ``` -Check the length of attestations using `len(block.attestations) <= 4 * get_committee_count(state, state.slot)`. +### Misc block post-processing + +```python +def misc_block_post_process(state: BeaconState, block: BeaconBlock): + # Verify that a `shard_transition` in a block is empty if an attestation was not processed for it + for shard in range(MAX_SHARDS): + if state.shard_states[shard].slot != state.slot - 1: + assert block.shard_transition[shard] == ShardTransition() +``` ### Light client processing From 0ff5985c010a442cd3cce7414bd4ad0fbd1f7686 Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Mon, 28 Oct 2019 17:18:27 +0800 Subject: [PATCH 012/194] Fix markdown and ToC --- specs/core/1_new_shards.md | 23 +++++++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index cb2348d65..7721bcf5a 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -6,18 +6,33 @@ -- [Ethereum 2.0 Phase 1 -- Shard Data Chains](#ethereum-20-phase-1----shard-data-chains) +- [Ethereum 2.0 Phase 1 -- Crosslinks and Shard Data](#ethereum-20-phase-1----crosslinks-and-shard-data) - [Table of contents](#table-of-contents) - [Introduction](#introduction) - [Configuration](#configuration) - [Misc](#misc) - [Containers](#containers) + - [Aliases](#aliases) + - [`AttestationData`](#attestationdata) + - [`AttestationShardData`](#attestationsharddata) + - [`ReducedAttestationData`](#reducedattestationdata) + - [`Attestation`](#attestation) + - [`ReducedAttestation`](#reducedattestation) + - [`IndexedAttestation`](#indexedattestation) + - [`CompactCommittee`](#compactcommittee) + - [`AttestationCustodyBitWrapper`](#attestationcustodybitwrapper) - [Helpers](#helpers) + - [`get_online_validators`](#get_online_validators) + - [`pack_compact_validator`](#pack_compact_validator) + - [`committee_to_compact_committee`](#committee_to_compact_committee) + - [`get_light_client_committee`](#get_light_client_committee) + - [`get_indexed_attestation`](#get_indexed_attestation) + - [`is_valid_indexed_attestation`](#is_valid_indexed_attestation) - [Beacon Chain Changes](#beacon-chain-changes) - [New state variables](#new-state-variables) - [New block data structures](#new-block-data-structures) - [Attestation processing](#attestation-processing) - - [Light client signature processing)(#light-client-signature-processing) + - [Light client processing](#light-client-processing) - [Epoch transition](#epoch-transition) - [Fraud proofs](#fraud-proofs) - [Shard state transition function](#shard-state-transition-function) @@ -118,7 +133,7 @@ class CompactCommittee(Container): ### `AttestationCustodyBitWrapper` -``` +```python class AttestationCustodyBitWrapper(Container): attestation_root: Hash index: uint64 @@ -198,7 +213,7 @@ def update_gasprice(prev_gasprice: Gwei, length: uint8) -> Gwei: ### `is_valid_indexed_attestation` -``python +```python def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: IndexedAttestation) -> bool: """ Check if ``indexed_attestation`` has valid indices and signature. From 08fc024fbd6e553116dc4156d9377e38dfa7d96c Mon Sep 17 00:00:00 2001 From: Vitalik Buterin Date: Tue, 29 Oct 2019 10:56:58 -0700 Subject: [PATCH 013/194] Changed fraud proof details --- specs/core/1_new_shards.md | 47 +++++++++++++++++--------------------- 1 file changed, 21 insertions(+), 26 deletions(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 7721bcf5a..13cd4fe8a 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -60,6 +60,7 @@ This document describes the shard transition function (data layer only) and the | `BLOCK_SIZE_TARGET` | `3 * 2**16` (= 196,608) | | | `SHARD_BLOCK_OFFSETS` | `[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233]` | | | `MAX_SHARD_BLOCKS_PER_ATTESTATION` | `len(SHARD_BLOCK_OFFSETS)` | | +| `EMPTY_CHUNK_ROOT` | `hash_tree_root(BytesN[SHARD_BLOCK_CHUNK_SIZE]())` | | | `MAX_SHARD_GASPRICE` | `2**14` (= 16,384) | Gwei | | | `SHARD_GASPRICE_ADJUSTMENT_COEFFICIENT` | `2**3` (= 8) | | @@ -72,6 +73,7 @@ class ShardState(Container): slot: Slot gasprice: Gwei root: Hash + latest_block_hash: Hash ``` ### `AttestationData` @@ -312,7 +314,9 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None: for i in range(len(offset_slots)): assert transition.shard_states[i].gasprice == update_gasprice(prev_gasprice, transition.shard_block_lengths[i]) assert transition.shard_states[i].slot == offset_slots[i] - assetrt len(transition.shard_data_roots[i]) == transition.shard_block_lengths[i] // SHARD_BLOCK_CHUNK_SIZE + assert len(transition.shard_data_roots[i]) == transition.shard_block_lengths[i] // SHARD_BLOCK_CHUNK_SIZE + filled_roots = transition.shard_data_roots + [EMPTY_CHUNK_ROOT] * (MAX_SHARD_BLOCK_CHUNKS - len(transition.shard_data_roots)) + assert transition.shard_states[i].latest_block_hash == hash_tree_root(filled_roots) prev_gasprice = transition.shard_states[i].gasprice # Save updated state @@ -401,39 +405,30 @@ TODO. The intent is to have a single universal fraud proof type, which contains ## Shard state transition function ```python -def shard_state_transition(shard: Shard, slot: Slot, pre_state: Hash, previous_beacon_root: Hash, proposer_pubkey: BLSPubkey, block_data: Bytes) -> Hash: - # Beginning of block data is the previous state root - assert block_data[:32] == pre_state +def shard_state_transition(shard: Shard, slot: Slot, pre_state: Hash, previous_beacon_root: Hash, proposer_pubkey: BLSPubkey, block_data: BytesN[MAX_SHARD_BLOCK_CHUNKS * SHARD_BLOCK_CHUNK_SIZE]) -> Hash: + # Beginning of block data is the previous block hash + assert block_data[:32] == pre_state.latest_block_hash assert block_data[32:64] == int_to_bytes8(slot) + b'\x00' * 24 - # Signature check (nonempty blocks only) - if len(block_data) == 64: - pass - else: - assert len(block_data) >= 160 - assert bls_verify( - pubkey=proposer_pubkey, - message_hash=hash_tree_root(block_data[:-96]), - signature=block_data[-96:], - domain=DOMAIN_SHARD_PROPOSER - ) + # Signature check + assert len(block_data) >= 160 + assert bls_verify( + pubkey=proposer_pubkey, + message_hash=hash_tree_root(block_data[:-96]), + signature=block_data[-96:], + domain=DOMAIN_SHARD_PROPOSER + ) # We will add something more substantive in phase 2 - return hash(pre_state + hash_tree_root(block_data)) -``` - -We also provide a method to generate an empty proposal: - -```python -def make_empty_proposal(pre_state: Hash, slot: Slot) -> Bytes[64]: - return pre_state + int_to_bytes8(slot) + b'\x00' * 24 + length = len(block.data.rstrip(b'\x00')) + return ShardState(slot=slot, root=hash(pre_state + hash_tree_root(block_data)), gasprice=update_gasprice(pre_state, length), latest_block_hash = hash(block_data)) ``` ## Honest committee member behavior Suppose you are a committee member on shard `shard` at slot `current_slot`. Let `state` be the head beacon state you are building on. Three seconds into slot `slot`, run the following procedure: -* Initialize `proposals = []`, `shard_states = []`, `shard_state = state.shard_state_roots[shard][-1]`. -* Let `max_catchup = ACTIVE_SHARDS * MAX_CATCHUP_RATIO // get_committee_count(state, current_slot))` -* For `slot in (state.shard_next_slots[shard], min(state.shard_next_slot + max_catchup, current_slot))`, do the following: +* Initialize `proposals = []`, `shard_states = []`, `shard_state = state.shard_states[shard][-1]`, `start_slot = shard_state.slot`. +* Let `offset_slots = [start_slot + x for x in SHARD_BLOCK_OFFSETS if start_slot + x < state.slot]` +* For `slot in offset_slots`, do the following: * Look for all valid proposals for `slot`; that is, a Bytes `proposal` where `shard_state_transition(shard, slot, shard_state, get_block_root_at_slot(state, state.slot - 1), get_shard_proposer(state, shard, slot), proposal)` returns a result and does not throw an exception. Let `choices` be the set of non-empty valid proposals you discover. * If `len(choices) == 0`, do `proposals.append(make_empty_proposal(shard_state, slot))` * If `len(choices) == 1`, do `proposals.append(choices[0])` From bf0a2b7df983fdd0da92eed5f8f699411931b61e Mon Sep 17 00:00:00 2001 From: Vitalik Buterin Date: Tue, 29 Oct 2019 11:12:55 -0700 Subject: [PATCH 014/194] Renaming --- specs/core/1_new_shards.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 13cd4fe8a..7f7a4dcdd 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -61,8 +61,8 @@ This document describes the shard transition function (data layer only) and the | `SHARD_BLOCK_OFFSETS` | `[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233]` | | | `MAX_SHARD_BLOCKS_PER_ATTESTATION` | `len(SHARD_BLOCK_OFFSETS)` | | | `EMPTY_CHUNK_ROOT` | `hash_tree_root(BytesN[SHARD_BLOCK_CHUNK_SIZE]())` | | -| `MAX_SHARD_GASPRICE` | `2**14` (= 16,384) | Gwei | | -| `SHARD_GASPRICE_ADJUSTMENT_COEFFICIENT` | `2**3` (= 8) | | +| `MAX_GASPRICE` | `2**14` (= 16,384) | Gwei | | +| `GASPRICE_ADJUSTMENT_COEFFICIENT` | `2**3` (= 8) | | ## Containers @@ -203,12 +203,12 @@ def get_indexed_attestation(beacon_state: BeaconState, attestation: Attestation) ```python def update_gasprice(prev_gasprice: Gwei, length: uint8) -> Gwei: if length > BLOCK_SIZE_TARGET: - delta = prev_gasprice * (length - BLOCK_SIZE_TARGET) // BLOCK_SIZE_TARGET // SHARD_GASPRICE_ADJUSTMENT_COEFFICIENT - return min(prev_gasprice + delta, MAX_SHARD_GASPRICE) + delta = prev_gasprice * (length - BLOCK_SIZE_TARGET) // BLOCK_SIZE_TARGET // GASPRICE_ADJUSTMENT_COEFFICIENT + return min(prev_gasprice + delta, MAX_GASPRICE) else: - delta = prev_gasprice * (BLOCK_SIZE_TARGET - length) // BLOCK_SIZE_TARGET // SHARD_GASPRICE_ADJUSTMENT_COEFFICIENT - if delta > prev_gasprice - SHARD_GASPRICE_ADJUSTMENT_COEFFICIENT: - return SHARD_GASPRICE_ADJUSTMENT_COEFFICIENT + delta = prev_gasprice * (BLOCK_SIZE_TARGET - length) // BLOCK_SIZE_TARGET // GASPRICE_ADJUSTMENT_COEFFICIENT + if delta > prev_gasprice - GASPRICE_ADJUSTMENT_COEFFICIENT: + return GASPRICE_ADJUSTMENT_COEFFICIENT else: return prev_gasprice - delta ``` From bf886d698bc37935c410054ba4216397e72be79d Mon Sep 17 00:00:00 2001 From: Vitalik Buterin Date: Tue, 29 Oct 2019 11:33:29 -0700 Subject: [PATCH 015/194] Small edits --- specs/core/1_new_shards.md | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 7f7a4dcdd..0d0abfbd7 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -94,7 +94,7 @@ class AttestationData(Container): ### `ShardTransition` ```python -class AttestationShardData(Container): +class ShardTransition(Container): # Starting from slot start_slot: Slot # Shard block lengths @@ -247,7 +247,6 @@ def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: Indexe ### New state variables ```python - shard_transitions: Vector[ShardTransition, MAX_SHARDS] shard_states: Vector[ShardState, MAX_SHARDS] online_countdown: Bytes[VALIDATOR_REGISTRY_LIMIT] current_light_committee: CompactCommittee @@ -257,6 +256,7 @@ def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: Indexe ### New block data structures ```python + shard_transitions: Vector[ShardTransition, MAX_SHARDS] light_client_signature_bitfield: Bitlist[LIGHT_CLIENT_COMMITTEE_SIZE] light_client_signature: BLSSignature ``` @@ -312,21 +312,23 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None: # Verify correct calculation of gas prices and slots and chunk roots prev_gasprice = state.shard_states[shard].gasprice for i in range(len(offset_slots)): - assert transition.shard_states[i].gasprice == update_gasprice(prev_gasprice, transition.shard_block_lengths[i]) - assert transition.shard_states[i].slot == offset_slots[i] - assert len(transition.shard_data_roots[i]) == transition.shard_block_lengths[i] // SHARD_BLOCK_CHUNK_SIZE - filled_roots = transition.shard_data_roots + [EMPTY_CHUNK_ROOT] * (MAX_SHARD_BLOCK_CHUNKS - len(transition.shard_data_roots)) - assert transition.shard_states[i].latest_block_hash == hash_tree_root(filled_roots) - prev_gasprice = transition.shard_states[i].gasprice + shard_state, block_length, chunks = transition.shard_states[i], transition.shard_block_lengths[i], transition.shard_data_roots[i] + block_length = transition.shard + assert shard_state.gasprice == update_gasprice(prev_gasprice, block_length) + assert shard_state.slot == offset_slots[i] + assert len(chunks) == block_length // SHARD_BLOCK_CHUNK_SIZE + filled_roots = chunks + [EMPTY_CHUNK_ROOT] * (MAX_SHARD_BLOCK_CHUNKS - len(chunks)) + assert shard_state.latest_block_hash == hash_tree_root(filled_roots) + prev_gasprice = shard_state.gasprice # Save updated state - state.shard_states[shard] = data.shard_states[-1] + state.shard_states[shard] = transition.shard_states[-1] state.shard_states[shard].slot = state.slot - 1 # Save success (for end-of-epoch rewarding) pending_attestation.crosslink_success = True - # Reward and cost proposer + # Apply proposer reward and cost estimated_attester_reward = sum([get_base_reward(state, attester) for attester in attesting_indices]) increase_balance(state, proposer, estimated_attester_reward // PROPOSER_REWARD_COEFFICIENT) for state, length in zip(transition.shard_states, transition.shard_block_lengths): @@ -335,6 +337,7 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None: # Type 2: delayed attestations else: assert slot_to_epoch(data.slot) in (get_current_epoch(state), get_previous_epoch(state)) + assert data.shard_transition_hash == Hash() assert len(attestation.custody_bits) == 0 for index in attesting_indices: @@ -365,12 +368,11 @@ def misc_block_post_process(state: BeaconState, block: BeaconBlock): def verify_light_client_signatures(state: BeaconState, block: BeaconBlock): period_start = get_current_epoch(state) - get_current_epoch(state) % LIGHT_CLIENT_COMMITTEE_PERIOD committee = get_light_client_committee(state, period_start - min(period_start, LIGHT_CLIENT_COMMITTEE_PERIOD)) - signer_validators = [] signer_keys = [] for i, bit in enumerate(block.light_client_signature_bitfield): if bit: signer_keys.append(state.validators[committee[i]].pubkey) - signer_validators.append(committee[i]) + increase_balance(state, committee[i], get_base_reward(state, committee[i])) assert bls_verify( pubkey=bls_aggregate_pubkeys(signer_keys), From b36820a26c145708bf43cd014e079ba8c6dc58a5 Mon Sep 17 00:00:00 2001 From: Vitalik Buterin Date: Fri, 1 Nov 2019 07:58:13 -0700 Subject: [PATCH 016/194] Changes to make Danny happy --- specs/core/1_new_shards.md | 77 +++++++++++++++++++++++--------------- 1 file changed, 47 insertions(+), 30 deletions(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 0d0abfbd7..4089c5478 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -29,8 +29,8 @@ - [`get_indexed_attestation`](#get_indexed_attestation) - [`is_valid_indexed_attestation`](#is_valid_indexed_attestation) - [Beacon Chain Changes](#beacon-chain-changes) - - [New state variables](#new-state-variables) - - [New block data structures](#new-block-data-structures) + - [New beacon state fields](#new-beacon-state-fields) + - [New beacon block data fields](#new-beacon-block-data-fields) - [Attestation processing](#attestation-processing) - [Light client processing](#light-client-processing) - [Epoch transition](#epoch-transition) @@ -63,6 +63,7 @@ This document describes the shard transition function (data layer only) and the | `EMPTY_CHUNK_ROOT` | `hash_tree_root(BytesN[SHARD_BLOCK_CHUNK_SIZE]())` | | | `MAX_GASPRICE` | `2**14` (= 16,384) | Gwei | | | `GASPRICE_ADJUSTMENT_COEFFICIENT` | `2**3` (= 8) | | +| `DOMAIN_SHARD_LIGHT_CLIENT` | `192` | | ## Containers @@ -87,8 +88,8 @@ class AttestationData(Container): # FFG vote source: Checkpoint target: Checkpoint - # Shard transition hash - shard_transition_hash: Hash + # Shard transition root + shard_transition_root: Hash ``` ### `ShardTransition` @@ -98,9 +99,9 @@ class ShardTransition(Container): # Starting from slot start_slot: Slot # Shard block lengths - shard_block_lengths: List[uint8, MAX_SHARD_BLOCKS_PER_ATTESTATION] + shard_block_lengths: List[uint64, MAX_SHARD_BLOCKS_PER_ATTESTATION] # Shard data roots - shard_data_roots: List[Hash, List[Hash, MAX_SHARD_BLOCK_CHUNKS], MAX_SHARD_BLOCKS_PER_ATTESTATION] + shard_data_roots: List[List[Hash, MAX_SHARD_BLOCK_CHUNKS], MAX_SHARD_BLOCKS_PER_ATTESTATION] # Intermediate state roots shard_state_roots: List[ShardState, MAX_SHARD_BLOCKS_PER_ATTESTATION] ``` @@ -119,7 +120,7 @@ class Attestation(Container): ```python class IndexedAttestation(Container): - participants: List[ValidatorIndex, MAX_COMMITTEE_SIZE] + participants: List[ValidatorIndex, MAX_VALIDATORS_PER_COMMITTEE] data: AttestationData custody_bits: List[Bitlist[MAX_VALIDATORS_PER_COMMITTEE], MAX_SHARD_BLOCKS_PER_ATTESTATION] signature: BLSSignature @@ -138,10 +139,21 @@ class CompactCommittee(Container): ```python class AttestationCustodyBitWrapper(Container): attestation_root: Hash - index: uint64 + block_index: uint64 bit: bool ``` +### `PendingAttestation` + +```python +class PendingAttestation(Container): + aggregation_bits: Bitlist[MAX_VALIDATORS_PER_COMMITTEE] + data: AttestationData + inclusion_delay: Slot + proposer_index: ValidatorIndex + crosslink_success: bool +``` + ## Helpers ### `get_online_validators` @@ -184,9 +196,11 @@ def committee_to_compact_committee(state: BeaconState, committee: Sequence[Valid ```python def get_light_client_committee(beacon_state: BeaconState, epoch: Epoch) -> Sequence[ValidatorIndex]: - assert epoch % LIGHT_CLIENT_COMMITTEE_PERIOD == 0 - active_validator_indices = get_active_validator_indices(beacon_state, epoch) - seed = get_seed(beacon_state, epoch, DOMAIN_SHARD_LIGHT_CLIENT) + source_epoch = epoch - epoch % LIGHT_CLIENT_COMMITTEE_PERIOD + if source_epoch > 0: + source_epoch -= LIGHT_CLIENT_COMMITTEE_PERIOD + active_validator_indices = get_active_validator_indices(beacon_state, source_epoch) + seed = get_seed(beacon_state, source_epoch, DOMAIN_SHARD_LIGHT_CLIENT) return compute_committee(active_validator_indices, seed, 0, ACTIVE_SHARDS)[:TARGET_COMMITTEE_SIZE] ``` @@ -195,7 +209,7 @@ def get_light_client_committee(beacon_state: BeaconState, epoch: Epoch) -> Seque ```python def get_indexed_attestation(beacon_state: BeaconState, attestation: Attestation) -> IndexedAttestation: attesting_indices = get_attesting_indices(state, attestation.data, attestation.aggregation_bits) - return IndexedAttestation(attesting_indices, data, custody_bits, signature) + return IndexedAttestation(attesting_indices, attestation.data, attestation.custody_bits, attestation.signature) ``` ### `update_gasprice` @@ -228,8 +242,8 @@ def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: Indexe # Verify aggregate signature all_pubkeys = [] all_message_hashes = [] - for participant, custody_bits in zip(participants, indexed_attestation.custody_bits): - for i, bit in enumerate(custody_bits): + for i, custody_bits in enumerate(indexed_attestation.custody_bits): + for participant, bit in zip(participants, custody_bits): all_pubkeys.append(state.validators[participant].pubkey) # Note: only 2N distinct message hashes all_message_hashes.append(AttestationCustodyBitWrapper(hash_tree_root(indexed_attestation.data), i, bit)) @@ -244,7 +258,7 @@ def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: Indexe ## Beacon Chain Changes -### New state variables +### New beacon state fields ```python shard_states: Vector[ShardState, MAX_SHARDS] @@ -253,7 +267,7 @@ def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: Indexe next_light_committee: CompactCommittee ``` -### New block data structures +### New beacon block data fields ```python shard_transitions: Vector[ShardTransition, MAX_SHARDS] @@ -268,10 +282,10 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None: data = attestation.data assert data.index < ACTIVE_SHARDS shard = (data.index + get_start_shard(state, data.slot)) % ACTIVE_SHARDS - proposer_index=get_beacon_proposer_index(state) + proposer_index = get_beacon_proposer_index(state) # Signature check - committee = get_crosslink_committee(state, get_current_epoch(state), shard) + committee = get_beacon_committee(state, get_current_epoch(state), shard) for bits in attestation.custody_bits + [attestation.aggregation_bits]: assert len(bits) == len(committee) # Check signature @@ -281,8 +295,6 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None: # Prepare pending attestation object pending_attestation = PendingAttestation( - slot=data.slot, - shard=shard, aggregation_bits=attestation.aggregation_bits, inclusion_delay=state.slot - data.slot, crosslink_success=False, @@ -305,7 +317,7 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None: if get_total_balance(state, online_indices.intersection(attesting_indices)) * 3 >= get_total_balance(state, online_indices) * 2: # Check correct formatting of shard transition data transition = block.shard_transitions[shard] - assert data.shard_transition_hash == hash_tree_root(transition) + assert data.shard_transition_root == hash_tree_root(transition) assert len(transition.shard_data_roots) == len(transition.shard_states) == len(transition.shard_block_lengths) == len(offset_slots) assert transition.start_slot == start_slot @@ -331,13 +343,13 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None: # Apply proposer reward and cost estimated_attester_reward = sum([get_base_reward(state, attester) for attester in attesting_indices]) increase_balance(state, proposer, estimated_attester_reward // PROPOSER_REWARD_COEFFICIENT) - for state, length in zip(transition.shard_states, transition.shard_block_lengths): - decrease_balance(state, proposer, state.gasprice * length) + for shard_state, slot, length in zip(transition.shard_states, offset_slots, transition.shard_block_lengths): + decrease_balance(state, get_shard_proposer(state, shard, slot), shard_state.gasprice * length) # Type 2: delayed attestations else: - assert slot_to_epoch(data.slot) in (get_current_epoch(state), get_previous_epoch(state)) - assert data.shard_transition_hash == Hash() + assert state.slot - slot_to_epoch(data.slot) < EPOCH_LENGTH + assert data.shard_transition_root == Hash() assert len(attestation.custody_bits) == 0 for index in attesting_indices: @@ -365,14 +377,18 @@ def misc_block_post_process(state: BeaconState, block: BeaconBlock): ### Light client processing ```python -def verify_light_client_signatures(state: BeaconState, block: BeaconBlock): - period_start = get_current_epoch(state) - get_current_epoch(state) % LIGHT_CLIENT_COMMITTEE_PERIOD - committee = get_light_client_committee(state, period_start - min(period_start, LIGHT_CLIENT_COMMITTEE_PERIOD)) +def process_light_client_signatures(state: BeaconState, block: BeaconBlock): + committee = get_light_client_committee(state, get_current_epoch(state)) + assert len(block.light_client_signature_bitfield) == len(committee) + tot_reward = 0 signer_keys = [] for i, bit in enumerate(block.light_client_signature_bitfield): if bit: signer_keys.append(state.validators[committee[i]].pubkey) increase_balance(state, committee[i], get_base_reward(state, committee[i])) + tot_reward += get_base_reward(state, committee[i]) + + increase_balance(state, get_beacon_proposer_index(state), tot_reward // PROPOSER_REWARD_COEFFICIENT) assert bls_verify( pubkey=bls_aggregate_pubkeys(signer_keys), @@ -394,7 +410,8 @@ def phase_1_epoch_transition(state): # Update light client committees if get_current_epoch(state) % LIGHT_CLIENT_COMMITTEE_PERIOD == 0: state.current_light_committee = state.next_light_committee - state.next_light_committee = committee_to_compact_committee(state, get_light_client_committee(state, get_current_epoch(state))) + new_committee = get_light_client_committee(state, get_current_epoch(state) + LIGHT_CLIENT_COMMITTEE_PERIOD) + state.next_light_committee = committee_to_compact_committee(state, new_committee) ``` ### Fraud proofs @@ -420,7 +437,7 @@ def shard_state_transition(shard: Shard, slot: Slot, pre_state: Hash, previous_b domain=DOMAIN_SHARD_PROPOSER ) # We will add something more substantive in phase 2 - length = len(block.data.rstrip(b'\x00')) + length = len(block_data.rstrip(b'\x00')) return ShardState(slot=slot, root=hash(pre_state + hash_tree_root(block_data)), gasprice=update_gasprice(pre_state, length), latest_block_hash = hash(block_data)) ``` From 6b1bc1e02f0a1be575c911e3725f384349d96891 Mon Sep 17 00:00:00 2001 From: Vitalik Buterin Date: Sun, 3 Nov 2019 08:17:46 -0800 Subject: [PATCH 017/194] Reformed attestations --- specs/core/1_new_shards.md | 148 +++++++++++++++++++++++-------------- 1 file changed, 91 insertions(+), 57 deletions(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 4089c5478..cce39ab59 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -256,6 +256,13 @@ def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: Indexe ) ``` +### `get_attestation_shard` + +```python +def get_shard(state: BeaconState, attestation: Attestation): + return (attestation.data.index + get_start_shard(state, data.slot)) % ACTIVE_SHARDS +``` + ## Beacon Chain Changes ### New beacon state fields @@ -275,32 +282,22 @@ def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: Indexe light_client_signature: BLSSignature ``` -### Attestation processing +## Attestation processing + +### `validate_attestation` ```python -def process_attestation(state: BeaconState, attestation: Attestation) -> None: +def validate_attestation(state: BeaconState, attestation: Attestation) -> bool: data = attestation.data assert data.index < ACTIVE_SHARDS - shard = (data.index + get_start_shard(state, data.slot)) % ACTIVE_SHARDS + shard = get_shard(state, attestation) proposer_index = get_beacon_proposer_index(state) # Signature check committee = get_beacon_committee(state, get_current_epoch(state), shard) for bits in attestation.custody_bits + [attestation.aggregation_bits]: assert len(bits) == len(committee) - # Check signature assert is_valid_indexed_attestation(state, get_indexed_attestation(state, attestation)) - # Get attesting indices - attesting_indices = get_attesting_indices(state, attestation.data, attestation.aggregation_bits) - - # Prepare pending attestation object - pending_attestation = PendingAttestation( - aggregation_bits=attestation.aggregation_bits, - inclusion_delay=state.slot - data.slot, - crosslink_success=False, - proposer_index=proposer_index - ) - # Type 1: on-time attestations if data.custody_bits != []: # Correct slot @@ -312,56 +309,93 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None: assert len(attestation.custody_bits) == len(offset_slots) # Correct parent block root assert data.beacon_block_root == get_block_root_at_slot(state, state.slot - 1) - # Apply - online_indices = get_online_indices(state) - if get_total_balance(state, online_indices.intersection(attesting_indices)) * 3 >= get_total_balance(state, online_indices) * 2: - # Check correct formatting of shard transition data - transition = block.shard_transitions[shard] - assert data.shard_transition_root == hash_tree_root(transition) - assert len(transition.shard_data_roots) == len(transition.shard_states) == len(transition.shard_block_lengths) == len(offset_slots) - assert transition.start_slot == start_slot - - # Verify correct calculation of gas prices and slots and chunk roots - prev_gasprice = state.shard_states[shard].gasprice - for i in range(len(offset_slots)): - shard_state, block_length, chunks = transition.shard_states[i], transition.shard_block_lengths[i], transition.shard_data_roots[i] - block_length = transition.shard - assert shard_state.gasprice == update_gasprice(prev_gasprice, block_length) - assert shard_state.slot == offset_slots[i] - assert len(chunks) == block_length // SHARD_BLOCK_CHUNK_SIZE - filled_roots = chunks + [EMPTY_CHUNK_ROOT] * (MAX_SHARD_BLOCK_CHUNKS - len(chunks)) - assert shard_state.latest_block_hash == hash_tree_root(filled_roots) - prev_gasprice = shard_state.gasprice - - # Save updated state - state.shard_states[shard] = transition.shard_states[-1] - state.shard_states[shard].slot = state.slot - 1 - - # Save success (for end-of-epoch rewarding) - pending_attestation.crosslink_success = True - - # Apply proposer reward and cost - estimated_attester_reward = sum([get_base_reward(state, attester) for attester in attesting_indices]) - increase_balance(state, proposer, estimated_attester_reward // PROPOSER_REWARD_COEFFICIENT) - for shard_state, slot, length in zip(transition.shard_states, offset_slots, transition.shard_block_lengths): - decrease_balance(state, get_shard_proposer(state, shard, slot), shard_state.gasprice * length) - # Type 2: delayed attestations else: assert state.slot - slot_to_epoch(data.slot) < EPOCH_LENGTH assert data.shard_transition_root == Hash() assert len(attestation.custody_bits) == 0 +``` - for index in attesting_indices: - online_countdown[index] = ONLINE_PERIOD +### `apply_shard_transition` +```python +def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTransition) -> None: + # Slot the attestation starts counting from + start_slot = state.shard_next_slots[shard] + # Correct data root count + offset_slots = [start_slot + x for x in SHARD_BLOCK_OFFSETS if start_slot + x < state.slot] + assert len(transition.shard_data_roots) == len(transition.shard_states) == len(transition.shard_block_lengths) == len(offset_slots) + assert transition.start_slot == start_slot - if data.target.epoch == get_current_epoch(state): - assert data.source == state.current_justified_checkpoint - state.current_epoch_attestations.append(pending_attestation) - else: - assert data.source == state.previous_justified_checkpoint - state.previous_epoch_attestations.append(pending_attestation) + # Verify correct calculation of gas prices and slots and chunk roots + prev_gasprice = state.shard_states[shard].gasprice + for i in range(len(offset_slots)): + shard_state, block_length, chunks = transition.shard_states[i], transition.shard_block_lengths[i], transition.shard_data_roots[i] + block_length = transition.shard + assert shard_state.gasprice == update_gasprice(prev_gasprice, block_length) + assert shard_state.slot == offset_slots[i] + assert len(chunks) == block_length // SHARD_BLOCK_CHUNK_SIZE + filled_roots = chunks + [EMPTY_CHUNK_ROOT] * (MAX_SHARD_BLOCK_CHUNKS - len(chunks)) + assert shard_state.latest_block_hash == hash_tree_root(filled_roots) + prev_gasprice = shard_state.gasprice + + # Save updated state + state.shard_states[shard] = transition.shard_states[-1] + state.shard_states[shard].slot = state.slot - 1 +``` + +### `process_attestations` + +```python +def process_attestations(state: BeaconState, block: BeaconBlock, attestations: Sequence[Attestation]) -> None: + pending_attestations = [] + # Basic validation + for attestation in attestations: + assert validate_attestation(state, attestation) + # Process crosslinks + online_indices = get_online_indices(state) + winners = set() + for shard in range(ACTIVE_SHARDS): + # All attestations in the block for this shard + this_shard_attestations = [attestation for attestation in attestations if get_shard(state, attestation) == shard and attestation.data.slot == state.slot] + # The committee for this shard + this_shard_committee = get_beacon_committee(state, get_current_epoch(state), shard) + # Loop over all shard transition roots + for shard_transition_root in sorted(set([attestation.data.shard_transition_root for attestation in this_shard_attestations])): + all_participants = set() + participating_attestations = [] + for attestation in this_shard_attestations: + participating_attestations.append(attestation) + if attestation.data.shard_transition_root == shard_transition_root: + all_participants = all_participants.union(get_attesting_indices(state, attestation.data, attestation.aggregation_bits)) + if ( + get_total_balance(state, online_indices.intersection(all_participants)) * 3 >= + get_total_balance(state, online_indices.intersection(this_shard_committee)) * 2 + ): + assert shard_transition_root == hash_tree_root(block.shard_transition) + process_crosslink(state, shard, block.shard_transition) + # Apply proposer reward and cost + estimated_attester_reward = sum([get_base_reward(state, attester) for attester in all_participants]) + increase_balance(state, proposer, estimated_attester_reward // PROPOSER_REWARD_COEFFICIENT) + for shard_state, slot, length in zip(transition.shard_states, offset_slots, block.shard_transition.shard_block_lengths): + decrease_balance(state, get_shard_proposer(state, shard, slot), shard_state.gasprice * length) + winners.add((shard, shard_transition_root)) + for index in all_participants: + online_countdown[index] = ONLINE_PERIOD + for attestation in attestations: + pending_attestation = PendingAttestation( + aggregation_bits=attestation.aggregation_bits, + data=attestation.data, + inclusion_delay=state.slot - data.slot, + crosslink_success=(attestation.shard, attestation.shard_transition_root) in winners and attestation.data.slot == state.slot, + proposer_index=proposer_index + ) + if attestation.data.target.epoch == get_current_epoch(state): + assert attestation.data.source == state.current_justified_checkpoint + state.current_epoch_attestations.append(pending_attestation) + else: + assert attestation.data.source == state.previous_justified_checkpoint + state.previous_epoch_attestations.append(pending_attestation) ``` ### Misc block post-processing From 393436992dd79fa5f31503e6c738c984575162aa Mon Sep 17 00:00:00 2001 From: Vitalik Buterin Date: Sun, 3 Nov 2019 12:06:19 -0800 Subject: [PATCH 018/194] Restructured shard blocks --- specs/core/1_new_shards.md | 82 +++++++++++++++++++++++++++++--------- 1 file changed, 63 insertions(+), 19 deletions(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index cce39ab59..dd479394b 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -64,9 +64,31 @@ This document describes the shard transition function (data layer only) and the | `MAX_GASPRICE` | `2**14` (= 16,384) | Gwei | | | `GASPRICE_ADJUSTMENT_COEFFICIENT` | `2**3` (= 8) | | | `DOMAIN_SHARD_LIGHT_CLIENT` | `192` | | +| `DOMAIN_SHARD_PROPOSAL` | `193` | | ## Containers +### `ShardBlockWrapper` + +```python +class ShardBlockWrapper(Container): + shard_parent_root: Hash + beacon_parent_root: Hash + slot: Slot + body: BytesN[SHARD_BLOCK_CHUNK_SIZE] + signature: BLSSignature +``` + +### `ShardSignedHeader` + +```python +class ShardSignedHeader(Container): + shard_parent_root: Hash + beacon_parent_root: Hash + slot: Slot + body_root: Hash +``` + ### `ShardState` ```python @@ -104,6 +126,8 @@ class ShardTransition(Container): shard_data_roots: List[List[Hash, MAX_SHARD_BLOCK_CHUNKS], MAX_SHARD_BLOCKS_PER_ATTESTATION] # Intermediate state roots shard_state_roots: List[ShardState, MAX_SHARD_BLOCKS_PER_ATTESTATION] + # Proposer signature aggregate + proposer_signature_aggregate: BLSSignature ``` ### `Attestation` @@ -322,23 +346,47 @@ def validate_attestation(state: BeaconState, attestation: Attestation) -> bool: def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTransition) -> None: # Slot the attestation starts counting from start_slot = state.shard_next_slots[shard] + # Correct data root count offset_slots = [start_slot + x for x in SHARD_BLOCK_OFFSETS if start_slot + x < state.slot] assert len(transition.shard_data_roots) == len(transition.shard_states) == len(transition.shard_block_lengths) == len(offset_slots) assert transition.start_slot == start_slot + def chunks_to_body_root(chunks): + return hash_tree_root(chunks + [EMPTY_CHUNK_ROOT] * (MAX_SHARD_BLOCK_CHUNKS - len(chunks))) + + # Reonstruct shard headers + headers = [] + proposers = [] + shard_parent_root = state.shard_states[shard].latest_block_hash + for i in range(len(offset_slots)): + if any(transition.shard_data_roots): + headers.append(ShardSignedHeader( + shard_parent_root=shard_parent_root + parent_hash=get_block_root_at_slot(state, state.slot-1), + slot=offset_slots[i], + body_root=chunks_to_body_root(transition.shard_data_roots[i]) + )) + proposers.append(get_shard_proposer(state, shard, offset_slots[i])) + shard_parent_root = hash_tree_root(headers[-1]) + # Verify correct calculation of gas prices and slots and chunk roots prev_gasprice = state.shard_states[shard].gasprice for i in range(len(offset_slots)): shard_state, block_length, chunks = transition.shard_states[i], transition.shard_block_lengths[i], transition.shard_data_roots[i] - block_length = transition.shard assert shard_state.gasprice == update_gasprice(prev_gasprice, block_length) assert shard_state.slot == offset_slots[i] assert len(chunks) == block_length // SHARD_BLOCK_CHUNK_SIZE - filled_roots = chunks + [EMPTY_CHUNK_ROOT] * (MAX_SHARD_BLOCK_CHUNKS - len(chunks)) - assert shard_state.latest_block_hash == hash_tree_root(filled_roots) prev_gasprice = shard_state.gasprice + # Verify combined signature + assert bls_verify_multiple( + pubkeys=[state.validators[proposer].pubkey for proposer in proposers], + message_hashes=[hash_tree_root(header) for header in headers], + signature=proposer.proposer_signature_aggregate, + domain=DOMAIN_SHARD_PROPOSAL + ) + # Save updated state state.shard_states[shard] = transition.shard_states[-1] state.shard_states[shard].slot = state.slot - 1 @@ -450,29 +498,25 @@ def phase_1_epoch_transition(state): ### Fraud proofs -TODO. The intent is to have a single universal fraud proof type, which contains (i) an on-time attestation on shard `s` signing a set of `data_roots`, (ii) an index `i` of a particular data root to focus on, (iii) the full contents of the i'th data, (iii) a Merkle proof to the `shard_state_roots` in the parent block the attestation is referencing, and which then verifies that one of the two conditions is false: +TODO. The intent is to have a single universal fraud proof type, which contains the following parts: -* `custody_bits[i][j] != generate_custody_bit(subkey, block_contents)` for any `j` -* `execute_state_transition(shard, slot, attestation.shard_state_roots[i-1], hash_tree_root(parent), get_shard_proposer(state, shard, slot), block_contents) != shard_state_roots[i]` (if `i=0` then instead use `parent.shard_state_roots[s][-1]`) +1. An on-time attestation on some `shard` signing a `ShardTransition` +2. An index `i` of a particular position to focus on +3. The `ShardTransition` itself +4. The full body of the block +5. A Merkle proof to the `shard_states` in the parent block the attestation is referencing + +The proof verifies that one of the two conditions is false: + +1. `custody_bits[i][j] != generate_custody_bit(subkey, block_contents)` for any `j` +2. `execute_state_transition(shard, slot, transition.shard_states[i-1].root, hash_tree_root(parent), get_shard_proposer(state, shard, slot), block_contents) != transition.shard_states[i].root` (if `i=0` then instead use `parent.shard_states[shard][-1].root`) ## Shard state transition function ```python def shard_state_transition(shard: Shard, slot: Slot, pre_state: Hash, previous_beacon_root: Hash, proposer_pubkey: BLSPubkey, block_data: BytesN[MAX_SHARD_BLOCK_CHUNKS * SHARD_BLOCK_CHUNK_SIZE]) -> Hash: - # Beginning of block data is the previous block hash - assert block_data[:32] == pre_state.latest_block_hash - assert block_data[32:64] == int_to_bytes8(slot) + b'\x00' * 24 - # Signature check - assert len(block_data) >= 160 - assert bls_verify( - pubkey=proposer_pubkey, - message_hash=hash_tree_root(block_data[:-96]), - signature=block_data[-96:], - domain=DOMAIN_SHARD_PROPOSER - ) # We will add something more substantive in phase 2 - length = len(block_data.rstrip(b'\x00')) - return ShardState(slot=slot, root=hash(pre_state + hash_tree_root(block_data)), gasprice=update_gasprice(pre_state, length), latest_block_hash = hash(block_data)) + return hash(pre_state + hash_tree_root(previous_beacon_root) + hash_tree_root(block_data)) ``` ## Honest committee member behavior From d7234316164cb3cb4fbfeca4f88e346933df03ab Mon Sep 17 00:00:00 2001 From: Vitalik Buterin Date: Sun, 3 Nov 2019 15:49:50 -0800 Subject: [PATCH 019/194] Fixed pending attestation handling and added empty transition check --- specs/core/1_new_shards.md | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index dd479394b..26ec88e3d 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -404,6 +404,7 @@ def process_attestations(state: BeaconState, block: BeaconBlock, attestations: S online_indices = get_online_indices(state) winners = set() for shard in range(ACTIVE_SHARDS): + success = False # All attestations in the block for this shard this_shard_attestations = [attestation for attestation in attestations if get_shard(state, attestation) == shard and attestation.data.slot == state.slot] # The committee for this shard @@ -428,8 +429,9 @@ def process_attestations(state: BeaconState, block: BeaconBlock, attestations: S for shard_state, slot, length in zip(transition.shard_states, offset_slots, block.shard_transition.shard_block_lengths): decrease_balance(state, get_shard_proposer(state, shard, slot), shard_state.gasprice * length) winners.add((shard, shard_transition_root)) - for index in all_participants: - online_countdown[index] = ONLINE_PERIOD + success = True + if not success: + assert block.shard_transition == ShardTransition() for attestation in attestations: pending_attestation = PendingAttestation( aggregation_bits=attestation.aggregation_bits, @@ -454,6 +456,9 @@ def misc_block_post_process(state: BeaconState, block: BeaconBlock): for shard in range(MAX_SHARDS): if state.shard_states[shard].slot != state.slot - 1: assert block.shard_transition[shard] == ShardTransition() + for pending_attestation in state.current_epoch_attestations + state.previous_epoch_attestations: + for index in get_attesting_indices(state, pending_attestation.data, pending_attestation.aggregation_bits): + online_countdown[index] = ONLINE_PERIOD ``` ### Light client processing From 03ab1d57858921524aca3cd77b2c78456346ee93 Mon Sep 17 00:00:00 2001 From: Vitalik Buterin Date: Mon, 4 Nov 2019 08:50:09 -0800 Subject: [PATCH 020/194] A few cleanups --- specs/core/1_new_shards.md | 39 ++++++++++++++++++-------------------- 1 file changed, 18 insertions(+), 21 deletions(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 26ec88e3d..fc8a19921 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -144,10 +144,8 @@ class Attestation(Container): ```python class IndexedAttestation(Container): - participants: List[ValidatorIndex, MAX_VALIDATORS_PER_COMMITTEE] - data: AttestationData - custody_bits: List[Bitlist[MAX_VALIDATORS_PER_COMMITTEE], MAX_SHARD_BLOCKS_PER_ATTESTATION] - signature: BLSSignature + committee: List[ValidatorIndex, MAX_VALIDATORS_PER_COMMITTEE] + attestation: Attestation ``` ### `CompactCommittee` @@ -232,8 +230,8 @@ def get_light_client_committee(beacon_state: BeaconState, epoch: Epoch) -> Seque ```python def get_indexed_attestation(beacon_state: BeaconState, attestation: Attestation) -> IndexedAttestation: - attesting_indices = get_attesting_indices(state, attestation.data, attestation.aggregation_bits) - return IndexedAttestation(attesting_indices, attestation.data, attestation.custody_bits, attestation.signature) + committee = get_beacon_committee(beacon_state, attestation.data.slot, attestation.data.index) + return IndexedAttestation(committee, attestation) ``` ### `update_gasprice` @@ -259,18 +257,20 @@ def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: Indexe Check if ``indexed_attestation`` has valid indices and signature. """ - # Verify indices are sorted - if indexed_attestation.participants != sorted(indexed_attestation.participants): - return False - # Verify aggregate signature all_pubkeys = [] all_message_hashes = [] - for i, custody_bits in enumerate(indexed_attestation.custody_bits): - for participant, bit in zip(participants, custody_bits): - all_pubkeys.append(state.validators[participant].pubkey) - # Note: only 2N distinct message hashes - all_message_hashes.append(AttestationCustodyBitWrapper(hash_tree_root(indexed_attestation.data), i, bit)) + aggregation_bits = indexed_attestation.attestation.aggregation_bits + assert len(aggregation_bits) == len(indexed_attestation.committee) + for i, custody_bits in enumerate(indexed_attestation.attestation.custody_bits): + assert len(custody_bits) == len(indexed_attestation.committee) + for participant, abit, cbit in zip(indexed_attestation.committee, aggregation_bits, custody_bits): + if abit: + all_pubkeys.append(state.validators[participant].pubkey) + # Note: only 2N distinct message hashes + all_message_hashes.append(AttestationCustodyBitWrapper(hash_tree_root(indexed_attestation.data), i, cbit)) + else: + assert cbit == False return bls_verify_multiple( pubkeys=all_pubkeys, @@ -318,17 +318,13 @@ def validate_attestation(state: BeaconState, attestation: Attestation) -> bool: proposer_index = get_beacon_proposer_index(state) # Signature check - committee = get_beacon_committee(state, get_current_epoch(state), shard) - for bits in attestation.custody_bits + [attestation.aggregation_bits]: - assert len(bits) == len(committee) assert is_valid_indexed_attestation(state, get_indexed_attestation(state, attestation)) # Type 1: on-time attestations if data.custody_bits != []: # Correct slot assert data.slot == state.slot - # Slot the attestation starts counting from - start_slot = state.shard_next_slots[shard] # Correct data root count + start_slot = state.shard_next_slots[shard] offset_slots = [start_slot + x for x in SHARD_BLOCK_OFFSETS if start_slot + x < state.slot] assert len(attestation.custody_bits) == len(offset_slots) # Correct parent block root @@ -379,7 +375,7 @@ def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTr assert len(chunks) == block_length // SHARD_BLOCK_CHUNK_SIZE prev_gasprice = shard_state.gasprice - # Verify combined signature + # Verify combined proposer signature assert bls_verify_multiple( pubkeys=[state.validators[proposer].pubkey for proposer in proposers], message_hashes=[hash_tree_root(header) for header in headers], @@ -420,6 +416,7 @@ def process_attestations(state: BeaconState, block: BeaconBlock, attestations: S if ( get_total_balance(state, online_indices.intersection(all_participants)) * 3 >= get_total_balance(state, online_indices.intersection(this_shard_committee)) * 2 + and success is False ): assert shard_transition_root == hash_tree_root(block.shard_transition) process_crosslink(state, shard, block.shard_transition) From 1fdd0332ccf2cb3a54828f3925c5bd209cf0df03 Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Tue, 5 Nov 2019 23:44:15 +0800 Subject: [PATCH 021/194] Update ToC --- specs/core/1_new_shards.md | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index fc8a19921..c49eda516 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -12,29 +12,36 @@ - [Configuration](#configuration) - [Misc](#misc) - [Containers](#containers) - - [Aliases](#aliases) + - [`ShardBlockWrapper`](#shardblockwrapper) + - [`ShardSignedHeader`](#shardsignedheader) + - [`ShardState`](#shardstate) - [`AttestationData`](#attestationdata) - - [`AttestationShardData`](#attestationsharddata) - - [`ReducedAttestationData`](#reducedattestationdata) + - [`ShardTransition`](#shardtransition) - [`Attestation`](#attestation) - - [`ReducedAttestation`](#reducedattestation) - [`IndexedAttestation`](#indexedattestation) - [`CompactCommittee`](#compactcommittee) - [`AttestationCustodyBitWrapper`](#attestationcustodybitwrapper) + - [`PendingAttestation`](#pendingattestation) - [Helpers](#helpers) - [`get_online_validators`](#get_online_validators) - [`pack_compact_validator`](#pack_compact_validator) - [`committee_to_compact_committee`](#committee_to_compact_committee) - [`get_light_client_committee`](#get_light_client_committee) - [`get_indexed_attestation`](#get_indexed_attestation) + - [`update_gasprice`](#update_gasprice) - [`is_valid_indexed_attestation`](#is_valid_indexed_attestation) + - [`get_attestation_shard`](#get_attestation_shard) - [Beacon Chain Changes](#beacon-chain-changes) - [New beacon state fields](#new-beacon-state-fields) - [New beacon block data fields](#new-beacon-block-data-fields) - [Attestation processing](#attestation-processing) + - [`validate_attestation`](#validate_attestation) + - [`apply_shard_transition`](#apply_shard_transition) + - [`process_attestations`](#process_attestations) + - [Misc block post-processing](#misc-block-post-processing) - [Light client processing](#light-client-processing) - [Epoch transition](#epoch-transition) - - [Fraud proofs](#fraud-proofs) + - [Fraud proofs](#fraud-proofs) - [Shard state transition function](#shard-state-transition-function) - [Honest committee member behavior](#honest-committee-member-behavior) @@ -306,9 +313,9 @@ def get_shard(state: BeaconState, attestation: Attestation): light_client_signature: BLSSignature ``` -## Attestation processing +### Attestation processing -### `validate_attestation` +#### `validate_attestation` ```python def validate_attestation(state: BeaconState, attestation: Attestation) -> bool: @@ -336,7 +343,7 @@ def validate_attestation(state: BeaconState, attestation: Attestation) -> bool: assert len(attestation.custody_bits) == 0 ``` -### `apply_shard_transition` +#### `apply_shard_transition` ```python def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTransition) -> None: @@ -388,7 +395,7 @@ def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTr state.shard_states[shard].slot = state.slot - 1 ``` -### `process_attestations` +#### `process_attestations` ```python def process_attestations(state: BeaconState, block: BeaconBlock, attestations: Sequence[Attestation]) -> None: @@ -498,7 +505,7 @@ def phase_1_epoch_transition(state): state.next_light_committee = committee_to_compact_committee(state, new_committee) ``` -### Fraud proofs +## Fraud proofs TODO. The intent is to have a single universal fraud proof type, which contains the following parts: From 462d05d6d8f5be39e2f426e702c9fc557ad757cd Mon Sep 17 00:00:00 2001 From: vbuterin Date: Tue, 5 Nov 2019 10:31:59 -0800 Subject: [PATCH 022/194] Update specs/core/1_new_shards.md Co-Authored-By: Hsiao-Wei Wang --- specs/core/1_new_shards.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index c49eda516..c27b4d0c9 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -61,7 +61,7 @@ This document describes the shard transition function (data layer only) and the | `ACTIVE_SHARDS` | `2**6` (= 64) | | `ONLINE_PERIOD` | `2**3` (= 8) | epochs | ~51 min | | `LIGHT_CLIENT_COMMITTEE_SIZE` | `2**7` (= 128) | -| `LIGHT_CLIENT_COMMITTEE_PERIOD` | `2**8` (= 256) | epochs | ~29 hours | +| `LIGHT_CLIENT_COMMITTEE_PERIOD` | `2**8` (= 256) | epochs | ~27 hours | | `SHARD_BLOCK_CHUNK_SIZE` | `2**18` (= 262,144) | | | `MAX_SHARD_BLOCK_CHUNKS` | `2**2` (= 4) | | | `BLOCK_SIZE_TARGET` | `3 * 2**16` (= 196,608) | | From 213032e51f1a929dc96e3144cd81de65907feaf8 Mon Sep 17 00:00:00 2001 From: vbuterin Date: Tue, 5 Nov 2019 10:32:18 -0800 Subject: [PATCH 023/194] Update specs/core/1_new_shards.md Co-Authored-By: Hsiao-Wei Wang --- specs/core/1_new_shards.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index c27b4d0c9..c3e6ebc53 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -290,7 +290,7 @@ def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: Indexe ### `get_attestation_shard` ```python -def get_shard(state: BeaconState, attestation: Attestation): +def get_shard(state: BeaconState, attestation: Attestation) -> Shard: return (attestation.data.index + get_start_shard(state, data.slot)) % ACTIVE_SHARDS ``` From d3f9bfe68d440b0c49161bbf81e121549bd9ee24 Mon Sep 17 00:00:00 2001 From: vbuterin Date: Tue, 5 Nov 2019 10:32:49 -0800 Subject: [PATCH 024/194] Update specs/core/1_new_shards.md Co-Authored-By: Hsiao-Wei Wang --- specs/core/1_new_shards.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index c3e6ebc53..de436004e 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -275,7 +275,9 @@ def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: Indexe if abit: all_pubkeys.append(state.validators[participant].pubkey) # Note: only 2N distinct message hashes - all_message_hashes.append(AttestationCustodyBitWrapper(hash_tree_root(indexed_attestation.data), i, cbit)) + all_message_hashes.append(hash_tree_root( + AttestationCustodyBitWrapper(hash_tree_root(indexed_attestation.data), i, cbit) + )) else: assert cbit == False From d59e87f8764dd9065913da960abff8e1d3fecb7a Mon Sep 17 00:00:00 2001 From: vbuterin Date: Tue, 5 Nov 2019 10:33:26 -0800 Subject: [PATCH 025/194] Update specs/core/1_new_shards.md Co-Authored-By: Hsiao-Wei Wang --- specs/core/1_new_shards.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index de436004e..477b95a10 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -320,7 +320,7 @@ def get_shard(state: BeaconState, attestation: Attestation) -> Shard: #### `validate_attestation` ```python -def validate_attestation(state: BeaconState, attestation: Attestation) -> bool: +def validate_attestation(state: BeaconState, attestation: Attestation) -> None: data = attestation.data assert data.index < ACTIVE_SHARDS shard = get_shard(state, attestation) From bcd06d88ef85bd11d93237fa7773cc5389a7f2b5 Mon Sep 17 00:00:00 2001 From: vbuterin Date: Tue, 5 Nov 2019 10:33:41 -0800 Subject: [PATCH 026/194] Update specs/core/1_new_shards.md Co-Authored-By: Hsiao-Wei Wang --- specs/core/1_new_shards.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 477b95a10..03b574cb2 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -367,7 +367,7 @@ def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTr for i in range(len(offset_slots)): if any(transition.shard_data_roots): headers.append(ShardSignedHeader( - shard_parent_root=shard_parent_root + shard_parent_root=shard_parent_root, parent_hash=get_block_root_at_slot(state, state.slot-1), slot=offset_slots[i], body_root=chunks_to_body_root(transition.shard_data_roots[i]) From 9d3a230974c1df44cb29cadfa207d57cf20f34cf Mon Sep 17 00:00:00 2001 From: vbuterin Date: Tue, 5 Nov 2019 10:34:04 -0800 Subject: [PATCH 027/194] Update specs/core/1_new_shards.md Co-Authored-By: Hsiao-Wei Wang --- specs/core/1_new_shards.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 03b574cb2..d10578265 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -404,7 +404,7 @@ def process_attestations(state: BeaconState, block: BeaconBlock, attestations: S pending_attestations = [] # Basic validation for attestation in attestations: - assert validate_attestation(state, attestation) + validate_attestation(state, attestation) # Process crosslinks online_indices = get_online_indices(state) winners = set() From 9810a3d2606c41ad56f5ae26f5260bdb0c564bec Mon Sep 17 00:00:00 2001 From: vbuterin Date: Tue, 5 Nov 2019 10:34:34 -0800 Subject: [PATCH 028/194] Update specs/core/1_new_shards.md Co-Authored-By: Hsiao-Wei Wang --- specs/core/1_new_shards.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index d10578265..8e1ede108 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -432,7 +432,7 @@ def process_attestations(state: BeaconState, block: BeaconBlock, attestations: S # Apply proposer reward and cost estimated_attester_reward = sum([get_base_reward(state, attester) for attester in all_participants]) increase_balance(state, proposer, estimated_attester_reward // PROPOSER_REWARD_COEFFICIENT) - for shard_state, slot, length in zip(transition.shard_states, offset_slots, block.shard_transition.shard_block_lengths): + for shard_state, slot, length in zip(block.shard_transition.shard_states, offset_slots, block.shard_transition.shard_block_lengths): decrease_balance(state, get_shard_proposer(state, shard, slot), shard_state.gasprice * length) winners.add((shard, shard_transition_root)) success = True From 4d1e9e15c67f6a8d250999d67695be55137fdc3b Mon Sep 17 00:00:00 2001 From: vbuterin Date: Tue, 5 Nov 2019 10:34:48 -0800 Subject: [PATCH 029/194] Update specs/core/1_new_shards.md Co-Authored-By: Hsiao-Wei Wang --- specs/core/1_new_shards.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 8e1ede108..3659cc599 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -293,7 +293,7 @@ def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: Indexe ```python def get_shard(state: BeaconState, attestation: Attestation) -> Shard: - return (attestation.data.index + get_start_shard(state, data.slot)) % ACTIVE_SHARDS + return Shard((attestation.data.index + get_start_shard(state, data.slot)) % ACTIVE_SHARDS) ``` ## Beacon Chain Changes From 8a6bd201f9f341700d0aba355b623eb7261aedfe Mon Sep 17 00:00:00 2001 From: vbuterin Date: Tue, 5 Nov 2019 12:15:04 -0800 Subject: [PATCH 030/194] Update specs/core/1_new_shards.md Co-Authored-By: Hsiao-Wei Wang --- specs/core/1_new_shards.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 3659cc599..fa8397bad 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -443,7 +443,7 @@ def process_attestations(state: BeaconState, block: BeaconBlock, attestations: S aggregation_bits=attestation.aggregation_bits, data=attestation.data, inclusion_delay=state.slot - data.slot, - crosslink_success=(attestation.shard, attestation.shard_transition_root) in winners and attestation.data.slot == state.slot, + crosslink_success=(get_shard(state, attestation), attestation.shard_transition_root) in winners and attestation.data.slot == state.slot, proposer_index=proposer_index ) if attestation.data.target.epoch == get_current_epoch(state): From 41be60ee6e4b782d54f0de58c69cc47b913669fc Mon Sep 17 00:00:00 2001 From: vbuterin Date: Tue, 5 Nov 2019 12:15:47 -0800 Subject: [PATCH 031/194] Update specs/core/1_new_shards.md --- specs/core/1_new_shards.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index fa8397bad..074e06db7 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -428,7 +428,7 @@ def process_attestations(state: BeaconState, block: BeaconBlock, attestations: S and success is False ): assert shard_transition_root == hash_tree_root(block.shard_transition) - process_crosslink(state, shard, block.shard_transition) + apply_shard_transition(state, shard, block.shard_transition) # Apply proposer reward and cost estimated_attester_reward = sum([get_base_reward(state, attester) for attester in all_participants]) increase_balance(state, proposer, estimated_attester_reward // PROPOSER_REWARD_COEFFICIENT) From 793c8616cd00481dd2b44978dd577ab9cb17b148 Mon Sep 17 00:00:00 2001 From: vbuterin Date: Tue, 5 Nov 2019 12:16:07 -0800 Subject: [PATCH 032/194] Update specs/core/1_new_shards.md Co-Authored-By: Hsiao-Wei Wang --- specs/core/1_new_shards.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 074e06db7..99cf82ce4 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -457,7 +457,7 @@ def process_attestations(state: BeaconState, block: BeaconBlock, attestations: S ### Misc block post-processing ```python -def misc_block_post_process(state: BeaconState, block: BeaconBlock): +def misc_block_post_process(state: BeaconState, block: BeaconBlock) -> None: # Verify that a `shard_transition` in a block is empty if an attestation was not processed for it for shard in range(MAX_SHARDS): if state.shard_states[shard].slot != state.slot - 1: From bf13757722654017d3874b57bc3ed7a42fe01dcf Mon Sep 17 00:00:00 2001 From: vbuterin Date: Tue, 5 Nov 2019 12:16:25 -0800 Subject: [PATCH 033/194] Update specs/core/1_new_shards.md Co-Authored-By: Hsiao-Wei Wang --- specs/core/1_new_shards.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 99cf82ce4..4ee502585 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -464,7 +464,7 @@ def misc_block_post_process(state: BeaconState, block: BeaconBlock) -> None: assert block.shard_transition[shard] == ShardTransition() for pending_attestation in state.current_epoch_attestations + state.previous_epoch_attestations: for index in get_attesting_indices(state, pending_attestation.data, pending_attestation.aggregation_bits): - online_countdown[index] = ONLINE_PERIOD + state.online_countdown[index] = ONLINE_PERIOD ``` ### Light client processing From 0ddc0ba527efc199847f3d3a8285751c06bf39d3 Mon Sep 17 00:00:00 2001 From: vbuterin Date: Tue, 5 Nov 2019 12:16:36 -0800 Subject: [PATCH 034/194] Update specs/core/1_new_shards.md Co-Authored-By: Hsiao-Wei Wang --- specs/core/1_new_shards.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 4ee502585..bd25545f2 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -470,7 +470,7 @@ def misc_block_post_process(state: BeaconState, block: BeaconBlock) -> None: ### Light client processing ```python -def process_light_client_signatures(state: BeaconState, block: BeaconBlock): +def process_light_client_signatures(state: BeaconState, block: BeaconBlock) -> None: committee = get_light_client_committee(state, get_current_epoch(state)) assert len(block.light_client_signature_bitfield) == len(committee) tot_reward = 0 From 822ceade5835261a9b284da79311466545b4f071 Mon Sep 17 00:00:00 2001 From: vbuterin Date: Tue, 5 Nov 2019 12:16:56 -0800 Subject: [PATCH 035/194] Update specs/core/1_new_shards.md Co-Authored-By: Hsiao-Wei Wang --- specs/core/1_new_shards.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index bd25545f2..f6a1ece72 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -473,7 +473,7 @@ def misc_block_post_process(state: BeaconState, block: BeaconBlock) -> None: def process_light_client_signatures(state: BeaconState, block: BeaconBlock) -> None: committee = get_light_client_committee(state, get_current_epoch(state)) assert len(block.light_client_signature_bitfield) == len(committee) - tot_reward = 0 + total_reward = Gwei(0) signer_keys = [] for i, bit in enumerate(block.light_client_signature_bitfield): if bit: From a749dd9a7789f3493992a7bb84251af90f63d71b Mon Sep 17 00:00:00 2001 From: vbuterin Date: Tue, 5 Nov 2019 12:17:07 -0800 Subject: [PATCH 036/194] Update specs/core/1_new_shards.md Co-Authored-By: Hsiao-Wei Wang --- specs/core/1_new_shards.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index f6a1ece72..aa3e4f188 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -479,7 +479,7 @@ def process_light_client_signatures(state: BeaconState, block: BeaconBlock) -> N if bit: signer_keys.append(state.validators[committee[i]].pubkey) increase_balance(state, committee[i], get_base_reward(state, committee[i])) - tot_reward += get_base_reward(state, committee[i]) + total_reward += get_base_reward(state, committee[i]) increase_balance(state, get_beacon_proposer_index(state), tot_reward // PROPOSER_REWARD_COEFFICIENT) From c9cc110ab1d6a334db23d3a8aa1afc53baf553ad Mon Sep 17 00:00:00 2001 From: vbuterin Date: Tue, 5 Nov 2019 12:17:27 -0800 Subject: [PATCH 037/194] Update specs/core/1_new_shards.md Co-Authored-By: Hsiao-Wei Wang --- specs/core/1_new_shards.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index aa3e4f188..0730882dd 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -481,7 +481,7 @@ def process_light_client_signatures(state: BeaconState, block: BeaconBlock) -> N increase_balance(state, committee[i], get_base_reward(state, committee[i])) total_reward += get_base_reward(state, committee[i]) - increase_balance(state, get_beacon_proposer_index(state), tot_reward // PROPOSER_REWARD_COEFFICIENT) + increase_balance(state, get_beacon_proposer_index(state), total_reward // PROPOSER_REWARD_COEFFICIENT) assert bls_verify( pubkey=bls_aggregate_pubkeys(signer_keys), From 2ea8eb9cc3a93885a7058619efa88899c1ea6993 Mon Sep 17 00:00:00 2001 From: vbuterin Date: Tue, 5 Nov 2019 12:17:50 -0800 Subject: [PATCH 038/194] Update specs/core/1_new_shards.md Co-Authored-By: Hsiao-Wei Wang --- specs/core/1_new_shards.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 0730882dd..70a12d218 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -494,7 +494,7 @@ def process_light_client_signatures(state: BeaconState, block: BeaconBlock) -> N ### Epoch transition ```python -def phase_1_epoch_transition(state): +def phase_1_epoch_transition(state: BeaconState) -> None: # Slowly remove validators from the "online" set if they do not show up for index in range(len(state.validators)): if state.online_countdown[index] != 0: From ff3bd725a36e9ac6400c74c640060adb74b92371 Mon Sep 17 00:00:00 2001 From: vbuterin Date: Wed, 6 Nov 2019 15:53:01 -0500 Subject: [PATCH 039/194] Update specs/core/1_new_shards.md Co-Authored-By: Hsiao-Wei Wang --- specs/core/1_new_shards.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 70a12d218..5ec4a7e19 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -132,7 +132,7 @@ class ShardTransition(Container): # Shard data roots shard_data_roots: List[List[Hash, MAX_SHARD_BLOCK_CHUNKS], MAX_SHARD_BLOCKS_PER_ATTESTATION] # Intermediate state roots - shard_state_roots: List[ShardState, MAX_SHARD_BLOCKS_PER_ATTESTATION] + shard_states: List[ShardState, MAX_SHARD_BLOCKS_PER_ATTESTATION] # Proposer signature aggregate proposer_signature_aggregate: BLSSignature ``` From 8cb404d92663fa036b129f218ab6c850f153cac4 Mon Sep 17 00:00:00 2001 From: vbuterin Date: Wed, 6 Nov 2019 15:53:17 -0500 Subject: [PATCH 040/194] Update specs/core/1_new_shards.md Co-Authored-By: Hsiao-Wei Wang --- specs/core/1_new_shards.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 5ec4a7e19..149aee358 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -388,7 +388,7 @@ def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTr assert bls_verify_multiple( pubkeys=[state.validators[proposer].pubkey for proposer in proposers], message_hashes=[hash_tree_root(header) for header in headers], - signature=proposer.proposer_signature_aggregate, + signature=transition.proposer_signature_aggregate, domain=DOMAIN_SHARD_PROPOSAL ) From e66b4e06eff60d4f4f902b88f8fb78bb468a3213 Mon Sep 17 00:00:00 2001 From: vbuterin Date: Wed, 6 Nov 2019 15:53:41 -0500 Subject: [PATCH 041/194] Update specs/core/1_new_shards.md Co-Authored-By: Hsiao-Wei Wang --- specs/core/1_new_shards.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 149aee358..f9fbfde76 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -340,7 +340,7 @@ def validate_attestation(state: BeaconState, attestation: Attestation) -> None: assert data.beacon_block_root == get_block_root_at_slot(state, state.slot - 1) # Type 2: delayed attestations else: - assert state.slot - slot_to_epoch(data.slot) < EPOCH_LENGTH + assert state.slot < data.slot + SLOTS_PER_EPOCH assert data.shard_transition_root == Hash() assert len(attestation.custody_bits) == 0 ``` From b6680d28bb467abb1f83c04d52ec013e64e3ca67 Mon Sep 17 00:00:00 2001 From: vbuterin Date: Wed, 6 Nov 2019 15:54:30 -0500 Subject: [PATCH 042/194] Update specs/core/1_new_shards.md Co-Authored-By: Danny Ryan --- specs/core/1_new_shards.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index f9fbfde76..01701f514 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -437,7 +437,7 @@ def process_attestations(state: BeaconState, block: BeaconBlock, attestations: S winners.add((shard, shard_transition_root)) success = True if not success: - assert block.shard_transition == ShardTransition() + assert block.shard_transitions[shard] == ShardTransition() for attestation in attestations: pending_attestation = PendingAttestation( aggregation_bits=attestation.aggregation_bits, From 8f34a0788dd786f615204e7ba0b5d9565d2b1906 Mon Sep 17 00:00:00 2001 From: Vitalik Buterin Date: Wed, 6 Nov 2019 14:19:00 -0800 Subject: [PATCH 043/194] Some initial changes --- specs/core/1_new_shards.md | 51 ++++++++++++++++++++++---------------- 1 file changed, 29 insertions(+), 22 deletions(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 01701f514..48a575121 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -13,7 +13,7 @@ - [Misc](#misc) - [Containers](#containers) - [`ShardBlockWrapper`](#shardblockwrapper) - - [`ShardSignedHeader`](#shardsignedheader) + - [`ShardSignableHeader`](#shardsignedheader) - [`ShardState`](#shardstate) - [`AttestationData`](#attestationdata) - [`ShardTransition`](#shardtransition) @@ -28,7 +28,7 @@ - [`committee_to_compact_committee`](#committee_to_compact_committee) - [`get_light_client_committee`](#get_light_client_committee) - [`get_indexed_attestation`](#get_indexed_attestation) - - [`update_gasprice`](#update_gasprice) + - [`get_updated_gasprice`](#get_updated_gasprice) - [`is_valid_indexed_attestation`](#is_valid_indexed_attestation) - [`get_attestation_shard`](#get_attestation_shard) - [Beacon Chain Changes](#beacon-chain-changes) @@ -77,6 +77,8 @@ This document describes the shard transition function (data layer only) and the ### `ShardBlockWrapper` +_Wrapper for being broadcasted over the network._ + ```python class ShardBlockWrapper(Container): shard_parent_root: Hash @@ -86,10 +88,10 @@ class ShardBlockWrapper(Container): signature: BLSSignature ``` -### `ShardSignedHeader` +### `ShardSignableHeader` ```python -class ShardSignedHeader(Container): +class ShardSignableHeader(Container): shard_parent_root: Hash beacon_parent_root: Hash slot: Slot @@ -102,7 +104,7 @@ class ShardSignedHeader(Container): class ShardState(Container): slot: Slot gasprice: Gwei - root: Hash + data: Hash latest_block_hash: Hash ``` @@ -131,7 +133,7 @@ class ShardTransition(Container): shard_block_lengths: List[uint64, MAX_SHARD_BLOCKS_PER_ATTESTATION] # Shard data roots shard_data_roots: List[List[Hash, MAX_SHARD_BLOCK_CHUNKS], MAX_SHARD_BLOCKS_PER_ATTESTATION] - # Intermediate state roots + # Intermediate shard states shard_states: List[ShardState, MAX_SHARD_BLOCKS_PER_ATTESTATION] # Proposer signature aggregate proposer_signature_aggregate: BLSSignature @@ -241,10 +243,10 @@ def get_indexed_attestation(beacon_state: BeaconState, attestation: Attestation) return IndexedAttestation(committee, attestation) ``` -### `update_gasprice` +### `get_updated_gasprice` ```python -def update_gasprice(prev_gasprice: Gwei, length: uint8) -> Gwei: +def get_updated_gasprice(prev_gasprice: Gwei, length: uint8) -> Gwei: if length > BLOCK_SIZE_TARGET: delta = prev_gasprice * (length - BLOCK_SIZE_TARGET) // BLOCK_SIZE_TARGET // GASPRICE_ADJUSTMENT_COEFFICIENT return min(prev_gasprice + delta, MAX_GASPRICE) @@ -289,13 +291,20 @@ def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: Indexe ) ``` -### `get_attestation_shard` +### `get_shard` ```python def get_shard(state: BeaconState, attestation: Attestation) -> Shard: return Shard((attestation.data.index + get_start_shard(state, data.slot)) % ACTIVE_SHARDS) ``` +### `get_offset_slots` + +```python +def get_offset_slots(state: BeaconState, start_slot: Slot) -> Sequence[Slot]: + return [start_slot + x for x in SHARD_BLOCK_OFFSETS if start_slot + x < state.slot] +``` + ## Beacon Chain Changes ### New beacon state fields @@ -333,16 +342,13 @@ def validate_attestation(state: BeaconState, attestation: Attestation) -> None: # Correct slot assert data.slot == state.slot # Correct data root count - start_slot = state.shard_next_slots[shard] - offset_slots = [start_slot + x for x in SHARD_BLOCK_OFFSETS if start_slot + x < state.slot] - assert len(attestation.custody_bits) == len(offset_slots) + assert len(attestation.custody_bits) == len(get_offset_slots(state, state.shard_next_slots[shard])) # Correct parent block root assert data.beacon_block_root == get_block_root_at_slot(state, state.slot - 1) # Type 2: delayed attestations else: assert state.slot < data.slot + SLOTS_PER_EPOCH assert data.shard_transition_root == Hash() - assert len(attestation.custody_bits) == 0 ``` #### `apply_shard_transition` @@ -353,7 +359,7 @@ def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTr start_slot = state.shard_next_slots[shard] # Correct data root count - offset_slots = [start_slot + x for x in SHARD_BLOCK_OFFSETS if start_slot + x < state.slot] + offset_slots = get_offset_slots(state, start_slot) assert len(transition.shard_data_roots) == len(transition.shard_states) == len(transition.shard_block_lengths) == len(offset_slots) assert transition.start_slot == start_slot @@ -366,7 +372,7 @@ def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTr shard_parent_root = state.shard_states[shard].latest_block_hash for i in range(len(offset_slots)): if any(transition.shard_data_roots): - headers.append(ShardSignedHeader( + headers.append(ShardSignableHeader( shard_parent_root=shard_parent_root, parent_hash=get_block_root_at_slot(state, state.slot-1), slot=offset_slots[i], @@ -379,7 +385,7 @@ def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTr prev_gasprice = state.shard_states[shard].gasprice for i in range(len(offset_slots)): shard_state, block_length, chunks = transition.shard_states[i], transition.shard_block_lengths[i], transition.shard_data_roots[i] - assert shard_state.gasprice == update_gasprice(prev_gasprice, block_length) + assert shard_state.gasprice == get_updated_gasprice(prev_gasprice, block_length) assert shard_state.slot == offset_slots[i] assert len(chunks) == block_length // SHARD_BLOCK_CHUNK_SIZE prev_gasprice = shard_state.gasprice @@ -462,9 +468,6 @@ def misc_block_post_process(state: BeaconState, block: BeaconBlock) -> None: for shard in range(MAX_SHARDS): if state.shard_states[shard].slot != state.slot - 1: assert block.shard_transition[shard] == ShardTransition() - for pending_attestation in state.current_epoch_attestations + state.previous_epoch_attestations: - for index in get_attesting_indices(state, pending_attestation.data, pending_attestation.aggregation_bits): - state.online_countdown[index] = ONLINE_PERIOD ``` ### Light client processing @@ -505,6 +508,11 @@ def phase_1_epoch_transition(state: BeaconState) -> None: state.current_light_committee = state.next_light_committee new_committee = get_light_client_committee(state, get_current_epoch(state) + LIGHT_CLIENT_COMMITTEE_PERIOD) state.next_light_committee = committee_to_compact_committee(state, new_committee) + + # Process pending attestations + for pending_attestation in state.current_epoch_attestations + state.previous_epoch_attestations: + for index in get_attesting_indices(state, pending_attestation.data, pending_attestation.aggregation_bits): + state.online_countdown[index] = ONLINE_PERIOD ``` ## Fraud proofs @@ -520,7 +528,7 @@ TODO. The intent is to have a single universal fraud proof type, which contains The proof verifies that one of the two conditions is false: 1. `custody_bits[i][j] != generate_custody_bit(subkey, block_contents)` for any `j` -2. `execute_state_transition(shard, slot, transition.shard_states[i-1].root, hash_tree_root(parent), get_shard_proposer(state, shard, slot), block_contents) != transition.shard_states[i].root` (if `i=0` then instead use `parent.shard_states[shard][-1].root`) +2. `execute_state_transition(shard, slot, transition.shard_states[i-1].data, hash_tree_root(parent), get_shard_proposer(state, shard, slot), block_contents) != transition.shard_states[i].data` (if `i=0` then instead use `parent.shard_states[shard][-1].data`) ## Shard state transition function @@ -535,8 +543,7 @@ def shard_state_transition(shard: Shard, slot: Slot, pre_state: Hash, previous_b Suppose you are a committee member on shard `shard` at slot `current_slot`. Let `state` be the head beacon state you are building on. Three seconds into slot `slot`, run the following procedure: * Initialize `proposals = []`, `shard_states = []`, `shard_state = state.shard_states[shard][-1]`, `start_slot = shard_state.slot`. -* Let `offset_slots = [start_slot + x for x in SHARD_BLOCK_OFFSETS if start_slot + x < state.slot]` -* For `slot in offset_slots`, do the following: +* For `slot in get_offset_slots(state, start_slot)`, do the following: * Look for all valid proposals for `slot`; that is, a Bytes `proposal` where `shard_state_transition(shard, slot, shard_state, get_block_root_at_slot(state, state.slot - 1), get_shard_proposer(state, shard, slot), proposal)` returns a result and does not throw an exception. Let `choices` be the set of non-empty valid proposals you discover. * If `len(choices) == 0`, do `proposals.append(make_empty_proposal(shard_state, slot))` * If `len(choices) == 1`, do `proposals.append(choices[0])` From 7b4a6e8307037375fe4023fd2acf986686c023cb Mon Sep 17 00:00:00 2001 From: Vitalik Buterin Date: Thu, 7 Nov 2019 05:21:36 -0800 Subject: [PATCH 044/194] Simplified gasprice update --- specs/core/1_new_shards.md | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index 48a575121..c9cbad161 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -69,6 +69,7 @@ This document describes the shard transition function (data layer only) and the | `MAX_SHARD_BLOCKS_PER_ATTESTATION` | `len(SHARD_BLOCK_OFFSETS)` | | | `EMPTY_CHUNK_ROOT` | `hash_tree_root(BytesN[SHARD_BLOCK_CHUNK_SIZE]())` | | | `MAX_GASPRICE` | `2**14` (= 16,384) | Gwei | | +| `MIN_GASPRICE` | `2**5` (= 32) | Gwei | | | `GASPRICE_ADJUSTMENT_COEFFICIENT` | `2**3` (= 8) | | | `DOMAIN_SHARD_LIGHT_CLIENT` | `192` | | | `DOMAIN_SHARD_PROPOSAL` | `193` | | @@ -252,10 +253,7 @@ def get_updated_gasprice(prev_gasprice: Gwei, length: uint8) -> Gwei: return min(prev_gasprice + delta, MAX_GASPRICE) else: delta = prev_gasprice * (BLOCK_SIZE_TARGET - length) // BLOCK_SIZE_TARGET // GASPRICE_ADJUSTMENT_COEFFICIENT - if delta > prev_gasprice - GASPRICE_ADJUSTMENT_COEFFICIENT: - return GASPRICE_ADJUSTMENT_COEFFICIENT - else: - return prev_gasprice - delta + return max(prev_gasprice, MIN_GASPRICE + delta) - delta ``` ### `is_valid_indexed_attestation` From 4d6dcd15ba29f271e6d00a16080c19ed0e4a1fb7 Mon Sep 17 00:00:00 2001 From: Vitalik Buterin Date: Tue, 12 Nov 2019 05:27:34 -0800 Subject: [PATCH 045/194] Some fixes --- specs/core/1_new_shards.md | 41 +++++++++++++++++++++++--------------- 1 file changed, 25 insertions(+), 16 deletions(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index c9cbad161..b3427afe0 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -18,7 +18,7 @@ - [`AttestationData`](#attestationdata) - [`ShardTransition`](#shardtransition) - [`Attestation`](#attestation) - - [`IndexedAttestation`](#indexedattestation) + - [`AttestationAndCommittee`](#attestationandcommittee) - [`CompactCommittee`](#compactcommittee) - [`AttestationCustodyBitWrapper`](#attestationcustodybitwrapper) - [`PendingAttestation`](#pendingattestation) @@ -106,7 +106,7 @@ class ShardState(Container): slot: Slot gasprice: Gwei data: Hash - latest_block_hash: Hash + latest_block_root: Hash ``` ### `AttestationData` @@ -120,6 +120,8 @@ class AttestationData(Container): # FFG vote source: Checkpoint target: Checkpoint + # Current-slot shard block root + head_shard_root: Hash # Shard transition root shard_transition_root: Hash ``` @@ -150,10 +152,10 @@ class Attestation(Container): signature: BLSSignature ``` -### `IndexedAttestation` +### `AttestationAndCommittee` ```python -class IndexedAttestation(Container): +class AttestationAndCommittee(Container): committee: List[ValidatorIndex, MAX_VALIDATORS_PER_COMMITTEE] attestation: Attestation ``` @@ -239,9 +241,9 @@ def get_light_client_committee(beacon_state: BeaconState, epoch: Epoch) -> Seque ### `get_indexed_attestation` ```python -def get_indexed_attestation(beacon_state: BeaconState, attestation: Attestation) -> IndexedAttestation: +def get_indexed_attestation(beacon_state: BeaconState, attestation: Attestation) -> AttestationAndCommittee: committee = get_beacon_committee(beacon_state, attestation.data.slot, attestation.data.index) - return IndexedAttestation(committee, attestation) + return AttestationAndCommittee(committee, attestation) ``` ### `get_updated_gasprice` @@ -259,7 +261,7 @@ def get_updated_gasprice(prev_gasprice: Gwei, length: uint8) -> Gwei: ### `is_valid_indexed_attestation` ```python -def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: IndexedAttestation) -> bool: +def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: AttestationAndCommittee) -> bool: """ Check if ``indexed_attestation`` has valid indices and signature. """ @@ -303,6 +305,13 @@ def get_offset_slots(state: BeaconState, start_slot: Slot) -> Sequence[Slot]: return [start_slot + x for x in SHARD_BLOCK_OFFSETS if start_slot + x < state.slot] ``` +### `chunks_to_body_root` + +```python +def chunks_to_body_root(chunks): + return hash_tree_root(chunks + [EMPTY_CHUNK_ROOT] * (MAX_SHARD_BLOCK_CHUNKS - len(chunks))) +``` + ## Beacon Chain Changes ### New beacon state fields @@ -336,7 +345,7 @@ def validate_attestation(state: BeaconState, attestation: Attestation) -> None: # Signature check assert is_valid_indexed_attestation(state, get_indexed_attestation(state, attestation)) # Type 1: on-time attestations - if data.custody_bits != []: + if attestation.custody_bits != []: # Correct slot assert data.slot == state.slot # Correct data root count @@ -345,7 +354,7 @@ def validate_attestation(state: BeaconState, attestation: Attestation) -> None: assert data.beacon_block_root == get_block_root_at_slot(state, state.slot - 1) # Type 2: delayed attestations else: - assert state.slot < data.slot + SLOTS_PER_EPOCH + assert state.slot - compute_start_slot_at_epoch(slot_to_epoch(data.slot)) < EPOCH_LENGTH assert data.shard_transition_root == Hash() ``` @@ -361,13 +370,10 @@ def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTr assert len(transition.shard_data_roots) == len(transition.shard_states) == len(transition.shard_block_lengths) == len(offset_slots) assert transition.start_slot == start_slot - def chunks_to_body_root(chunks): - return hash_tree_root(chunks + [EMPTY_CHUNK_ROOT] * (MAX_SHARD_BLOCK_CHUNKS - len(chunks))) - # Reonstruct shard headers headers = [] proposers = [] - shard_parent_root = state.shard_states[shard].latest_block_hash + shard_parent_root = state.shard_states[shard].latest_block_root for i in range(len(offset_slots)): if any(transition.shard_data_roots): headers.append(ShardSignableHeader( @@ -431,7 +437,10 @@ def process_attestations(state: BeaconState, block: BeaconBlock, attestations: S get_total_balance(state, online_indices.intersection(this_shard_committee)) * 2 and success is False ): + # Attestation <-> shard transition consistency assert shard_transition_root == hash_tree_root(block.shard_transition) + assert attestation.data.head_shard_root == chunks_to_body_root(block.shard_transition.shard_data_roots[-1]) + # Apply transition apply_shard_transition(state, shard, block.shard_transition) # Apply proposer reward and cost estimated_attester_reward = sum([get_base_reward(state, attester) for attester in all_participants]) @@ -461,7 +470,7 @@ def process_attestations(state: BeaconState, block: BeaconBlock, attestations: S ### Misc block post-processing ```python -def misc_block_post_process(state: BeaconState, block: BeaconBlock) -> None: +def verify_shard_transition_false_positives(state: BeaconState, block: BeaconBlock) -> None: # Verify that a `shard_transition` in a block is empty if an attestation was not processed for it for shard in range(MAX_SHARDS): if state.shard_states[shard].slot != state.slot - 1: @@ -538,7 +547,7 @@ def shard_state_transition(shard: Shard, slot: Slot, pre_state: Hash, previous_b ## Honest committee member behavior -Suppose you are a committee member on shard `shard` at slot `current_slot`. Let `state` be the head beacon state you are building on. Three seconds into slot `slot`, run the following procedure: +Suppose you are a committee member on shard `shard` at slot `current_slot`. Let `state` be the head beacon state you are building on, and let `QUARTER_PERIOD = SECONDS_PER_SLOT // 4`. `2 * QUARTER_PERIOD` seconds into slot `slot`, run the following procedure: * Initialize `proposals = []`, `shard_states = []`, `shard_state = state.shard_states[shard][-1]`, `start_slot = shard_state.slot`. * For `slot in get_offset_slots(state, start_slot)`, do the following: @@ -546,6 +555,6 @@ Suppose you are a committee member on shard `shard` at slot `current_slot`. Let * If `len(choices) == 0`, do `proposals.append(make_empty_proposal(shard_state, slot))` * If `len(choices) == 1`, do `proposals.append(choices[0])` * If `len(choices) > 1`, let `winning_proposal` be the proposal with the largest number of total attestations from slots in `state.shard_next_slots[shard]....slot-1` supporting it or any of its descendants, breaking ties by choosing the first proposal locally seen. Do `proposals.append(winning_proposal)`. - * Set `shard_state = shard_state_transition(shard, slot, shard_state, get_block_root_at_slot(state, state.slot - 1), get_shard_proposer(state, shard, slot), proposals[-1])` and do `shard_states.append(shard_state)`. + * If `proposals[-1]` is NOT an empty proposal, set `shard_state = shard_state_transition(shard, slot, shard_state, get_block_root_at_slot(state, state.slot - 1), get_shard_proposer(state, shard, slot), proposals[-1])` and do `shard_states.append(shard_state)`. If it is an empty proposal, leave `shard_state` unchanged. Make an attestation using `shard_data_roots = [hash_tree_root(proposal) for proposal in proposals]` and `shard_state_roots = shard_states`. From ef936b94fef5f471a7092e741ed0b7c00a225f93 Mon Sep 17 00:00:00 2001 From: Vitalik Buterin Date: Tue, 12 Nov 2019 06:13:47 -0800 Subject: [PATCH 046/194] Added get_shard_proposer_index --- specs/core/1_new_shards.md | 33 ++++++++++++++++++++++++++++----- 1 file changed, 28 insertions(+), 5 deletions(-) diff --git a/specs/core/1_new_shards.md b/specs/core/1_new_shards.md index b3427afe0..df543f248 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_new_shards.md @@ -62,6 +62,7 @@ This document describes the shard transition function (data layer only) and the | `ONLINE_PERIOD` | `2**3` (= 8) | epochs | ~51 min | | `LIGHT_CLIENT_COMMITTEE_SIZE` | `2**7` (= 128) | | `LIGHT_CLIENT_COMMITTEE_PERIOD` | `2**8` (= 256) | epochs | ~27 hours | +| `SHARD_COMMITTEE_PERIOD` | `2**8` (= 256) | epochs | ~27 hours | | `SHARD_BLOCK_CHUNK_SIZE` | `2**18` (= 262,144) | | | `MAX_SHARD_BLOCK_CHUNKS` | `2**2` (= 4) | | | `BLOCK_SIZE_TARGET` | `3 * 2**16` (= 196,608) | | @@ -72,6 +73,7 @@ This document describes the shard transition function (data layer only) and the | `MIN_GASPRICE` | `2**5` (= 32) | Gwei | | | `GASPRICE_ADJUSTMENT_COEFFICIENT` | `2**3` (= 8) | | | `DOMAIN_SHARD_LIGHT_CLIENT` | `192` | | +| `DOMAIN_SHARD_COMMITTEE` | `192` | | | `DOMAIN_SHARD_PROPOSAL` | `193` | | ## Containers @@ -226,6 +228,27 @@ def committee_to_compact_committee(state: BeaconState, committee: Sequence[Valid return CompactCommittee(pubkeys=pubkeys, compact_validators=compact_validators) ``` +### `get_shard_committee` + +```python +def get_shard_committee(beacon_state: BeaconState, epoch: Epoch, shard: Shard) -> Sequence[ValidatorIndex]: + source_epoch = epoch - epoch % SHARD_COMMITTEE_PERIOD + if source_epoch > 0: + source_epoch -= SHARD_COMMITTEE_PERIOD + active_validator_indices = get_active_validator_indices(beacon_state, source_epoch) + seed = get_seed(beacon_state, source_epoch, DOMAIN_SHARD_COMMITTEE) + return compute_committee(active_validator_indices, seed, 0, ACTIVE_SHARDS) +``` + +### `get_shard_proposer_index` + +```python +def get_shard_proposer_index(beacon_state: BeaconState, slot: Slot, shard: Shard) -> ValidatorIndex: + committee = get_shard_committee(beacon_state, slot_to_epoch(slot), shard) + r = bytes_to_int(get_seed(beacon_state, get_current_epoch(state), DOMAIN_SHARD_COMMITTEE)[:8]) + return committee[r % len(committee)] +``` + ### `get_light_client_committee` ```python @@ -382,7 +405,7 @@ def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTr slot=offset_slots[i], body_root=chunks_to_body_root(transition.shard_data_roots[i]) )) - proposers.append(get_shard_proposer(state, shard, offset_slots[i])) + proposers.append(get_shard_proposer_index(state, shard, offset_slots[i])) shard_parent_root = hash_tree_root(headers[-1]) # Verify correct calculation of gas prices and slots and chunk roots @@ -446,7 +469,7 @@ def process_attestations(state: BeaconState, block: BeaconBlock, attestations: S estimated_attester_reward = sum([get_base_reward(state, attester) for attester in all_participants]) increase_balance(state, proposer, estimated_attester_reward // PROPOSER_REWARD_COEFFICIENT) for shard_state, slot, length in zip(block.shard_transition.shard_states, offset_slots, block.shard_transition.shard_block_lengths): - decrease_balance(state, get_shard_proposer(state, shard, slot), shard_state.gasprice * length) + decrease_balance(state, get_shard_proposer_index(state, shard, slot), shard_state.gasprice * length) winners.add((shard, shard_transition_root)) success = True if not success: @@ -535,7 +558,7 @@ TODO. The intent is to have a single universal fraud proof type, which contains The proof verifies that one of the two conditions is false: 1. `custody_bits[i][j] != generate_custody_bit(subkey, block_contents)` for any `j` -2. `execute_state_transition(shard, slot, transition.shard_states[i-1].data, hash_tree_root(parent), get_shard_proposer(state, shard, slot), block_contents) != transition.shard_states[i].data` (if `i=0` then instead use `parent.shard_states[shard][-1].data`) +2. `execute_state_transition(shard, slot, transition.shard_states[i-1].data, hash_tree_root(parent), get_shard_proposer_index(state, shard, slot), block_contents) != transition.shard_states[i].data` (if `i=0` then instead use `parent.shard_states[shard][-1].data`) ## Shard state transition function @@ -551,10 +574,10 @@ Suppose you are a committee member on shard `shard` at slot `current_slot`. Let * Initialize `proposals = []`, `shard_states = []`, `shard_state = state.shard_states[shard][-1]`, `start_slot = shard_state.slot`. * For `slot in get_offset_slots(state, start_slot)`, do the following: - * Look for all valid proposals for `slot`; that is, a Bytes `proposal` where `shard_state_transition(shard, slot, shard_state, get_block_root_at_slot(state, state.slot - 1), get_shard_proposer(state, shard, slot), proposal)` returns a result and does not throw an exception. Let `choices` be the set of non-empty valid proposals you discover. + * Look for all valid proposals for `slot`; that is, a Bytes `proposal` where `shard_state_transition(shard, slot, shard_state, get_block_root_at_slot(state, state.slot - 1), get_shard_proposer_index(state, shard, slot), proposal)` returns a result and does not throw an exception. Let `choices` be the set of non-empty valid proposals you discover. * If `len(choices) == 0`, do `proposals.append(make_empty_proposal(shard_state, slot))` * If `len(choices) == 1`, do `proposals.append(choices[0])` * If `len(choices) > 1`, let `winning_proposal` be the proposal with the largest number of total attestations from slots in `state.shard_next_slots[shard]....slot-1` supporting it or any of its descendants, breaking ties by choosing the first proposal locally seen. Do `proposals.append(winning_proposal)`. - * If `proposals[-1]` is NOT an empty proposal, set `shard_state = shard_state_transition(shard, slot, shard_state, get_block_root_at_slot(state, state.slot - 1), get_shard_proposer(state, shard, slot), proposals[-1])` and do `shard_states.append(shard_state)`. If it is an empty proposal, leave `shard_state` unchanged. + * If `proposals[-1]` is NOT an empty proposal, set `shard_state = shard_state_transition(shard, slot, shard_state, get_block_root_at_slot(state, state.slot - 1), get_shard_proposer_index(state, shard, slot), proposals[-1])` and do `shard_states.append(shard_state)`. If it is an empty proposal, leave `shard_state` unchanged. Make an attestation using `shard_data_roots = [hash_tree_root(proposal) for proposal in proposals]` and `shard_state_roots = shard_states`. From a33e01dab2d5cab9a6aab7188c3b5456fa23aaa0 Mon Sep 17 00:00:00 2001 From: protolambda Date: Fri, 15 Nov 2019 21:11:42 +0100 Subject: [PATCH 047/194] Start making phase1 work in pyspec again --- specs/core/0_beacon-chain.md | 21 +- .../{1_new_shards.md => 1_beacon-chain.md} | 335 +++++++++++------- specs/core/1_custody-game.md | 54 +-- specs/core/1_fraudproofs.md | 52 +++ specs/{core => old}/1_beacon-chain-misc.md | 0 specs/{core => old}/1_shard-data-chains.md | 0 6 files changed, 262 insertions(+), 200 deletions(-) rename specs/core/{1_new_shards.md => 1_beacon-chain.md} (74%) create mode 100644 specs/core/1_fraudproofs.md rename specs/{core => old}/1_beacon-chain-misc.md (100%) rename specs/{core => old}/1_shard-data-chains.md (100%) diff --git a/specs/core/0_beacon-chain.md b/specs/core/0_beacon-chain.md index fca21994f..e8aa53f13 100644 --- a/specs/core/0_beacon-chain.md +++ b/specs/core/0_beacon-chain.md @@ -1129,19 +1129,13 @@ def process_slot(state: BeaconState) -> None: ### Epoch processing -*Note*: The `# @LabelHere` lines below are placeholders to show that code will be inserted here in a future phase. - ```python def process_epoch(state: BeaconState) -> None: process_justification_and_finalization(state) process_rewards_and_penalties(state) process_registry_updates(state) - # @process_reveal_deadlines - # @process_challenge_deadlines process_slashings(state) - # @update_period_committee process_final_updates(state) - # @after_process_final_updates ``` #### Helper functions @@ -1425,16 +1419,11 @@ def process_operations(state: BeaconState, body: BeaconBlockBody) -> None: # Verify that outstanding deposits are processed up to the maximum number of deposits assert len(body.deposits) == min(MAX_DEPOSITS, state.eth1_data.deposit_count - state.eth1_deposit_index) - for operations, function in ( - (body.proposer_slashings, process_proposer_slashing), - (body.attester_slashings, process_attester_slashing), - (body.attestations, process_attestation), - (body.deposits, process_deposit), - (body.voluntary_exits, process_voluntary_exit), - # @process_shard_receipt_proofs - ): - for operation in operations: - function(state, operation) + process_operations(body.proposer_slashings, process_proposer_slashing) + process_operations(body.attester_slashings, process_attester_slashing) + process_operations(body.attestations, process_attestations) + process_operations(body.deposits, process_deposit) + process_operations(body.voluntary_exits, process_voluntary_exit) ``` ##### Proposer slashings diff --git a/specs/core/1_new_shards.md b/specs/core/1_beacon-chain.md similarity index 74% rename from specs/core/1_new_shards.md rename to specs/core/1_beacon-chain.md index df543f248..04df84e86 100644 --- a/specs/core/1_new_shards.md +++ b/specs/core/1_beacon-chain.md @@ -1,4 +1,4 @@ -# Ethereum 2.0 Phase 1 -- Crosslinks and Shard Data +# Ethereum 2.0 Phase 1 -- The Beacon Chain for Shards **Notice**: This document is a work-in-progress for researchers and implementers. @@ -6,53 +6,20 @@ -- [Ethereum 2.0 Phase 1 -- Crosslinks and Shard Data](#ethereum-20-phase-1----crosslinks-and-shard-data) - - [Table of contents](#table-of-contents) - - [Introduction](#introduction) - - [Configuration](#configuration) - - [Misc](#misc) - - [Containers](#containers) - - [`ShardBlockWrapper`](#shardblockwrapper) - - [`ShardSignableHeader`](#shardsignedheader) - - [`ShardState`](#shardstate) - - [`AttestationData`](#attestationdata) - - [`ShardTransition`](#shardtransition) - - [`Attestation`](#attestation) - - [`AttestationAndCommittee`](#attestationandcommittee) - - [`CompactCommittee`](#compactcommittee) - - [`AttestationCustodyBitWrapper`](#attestationcustodybitwrapper) - - [`PendingAttestation`](#pendingattestation) - - [Helpers](#helpers) - - [`get_online_validators`](#get_online_validators) - - [`pack_compact_validator`](#pack_compact_validator) - - [`committee_to_compact_committee`](#committee_to_compact_committee) - - [`get_light_client_committee`](#get_light_client_committee) - - [`get_indexed_attestation`](#get_indexed_attestation) - - [`get_updated_gasprice`](#get_updated_gasprice) - - [`is_valid_indexed_attestation`](#is_valid_indexed_attestation) - - [`get_attestation_shard`](#get_attestation_shard) - - [Beacon Chain Changes](#beacon-chain-changes) - - [New beacon state fields](#new-beacon-state-fields) - - [New beacon block data fields](#new-beacon-block-data-fields) - - [Attestation processing](#attestation-processing) - - [`validate_attestation`](#validate_attestation) - - [`apply_shard_transition`](#apply_shard_transition) - - [`process_attestations`](#process_attestations) - - [Misc block post-processing](#misc-block-post-processing) - - [Light client processing](#light-client-processing) - - [Epoch transition](#epoch-transition) - - [Fraud proofs](#fraud-proofs) - - [Shard state transition function](#shard-state-transition-function) - - [Honest committee member behavior](#honest-committee-member-behavior) +TODO ## Introduction -This document describes the shard transition function (data layer only) and the shard fork choice rule as part of Phase 1 of Ethereum 2.0. +This document describes the extensions made to the Phase 0 design of The Beacon Chain + to facilitate the new shards as part of Phase 1 of Eth2. ## Configuration +Configuration is not namespaced. Instead it is strictly an extension; + no constants of phase 0 change, but new constants are adopted for changing behaviors. + ### Misc | Name | Value | Unit | Duration | @@ -111,7 +78,7 @@ class ShardState(Container): latest_block_root: Hash ``` -### `AttestationData` +### New `AttestationData` ```python class AttestationData(Container): @@ -144,7 +111,7 @@ class ShardTransition(Container): proposer_signature_aggregate: BLSSignature ``` -### `Attestation` +### New `Attestation` ```python class Attestation(Container): @@ -179,7 +146,7 @@ class AttestationCustodyBitWrapper(Container): bit: bool ``` -### `PendingAttestation` +### New `PendingAttestation` ```python class PendingAttestation(Container): @@ -190,17 +157,112 @@ class PendingAttestation(Container): crosslink_success: bool ``` -## Helpers - -### `get_online_validators` +### New extended `Validator` ```python -def get_online_indices(state: BeaconState) -> Set[ValidatorIndex]: - active_validators = get_active_validator_indices(state, get_current_epoch(state)) - return set([i for i in active_validators if state.online_countdown[i] != 0]) +class Validator(Container): + pubkey: BLSPubkey + withdrawal_credentials: Hash # Commitment to pubkey for withdrawals + effective_balance: Gwei # Balance at stake + slashed: boolean + # Status epochs + activation_eligibility_epoch: Epoch # When criteria for activation were met + activation_epoch: Epoch + exit_epoch: Epoch + withdrawable_epoch: Epoch # When validator can withdraw funds + + # TODO: older pre-proposal custody field additions, keep this? + # + # next_custody_secret_to_reveal is initialised to the custody period + # (of the particular validator) in which the validator is activated + # = get_custody_period_for_validator(...) + next_custody_secret_to_reveal: uint64 + max_reveal_lateness: Epoch ``` -### `pack_compact_validator` + +### New extended `BeaconBlock` + +```python +class BeaconBlock(phase0.BeaconBlock): + slot: Slot + parent_root: Hash + state_root: Hash + body: BeaconBlockBody + shard_transitions: Vector[ShardTransition, MAX_SHARDS] + light_client_signature_bitfield: Bitlist[LIGHT_CLIENT_COMMITTEE_SIZE] + light_client_signature: BLSSignature + + # TODO: older pre-proposal custody field additions, keep this? + custody_chunk_challenges: List[CustodyChunkChallenge, PLACEHOLDER] + custody_bit_challenges: List[CustodyBitChallenge, PLACEHOLDER] + custody_responses: List[CustodyResponse, PLACEHOLDER] + custody_key_reveals: List[CustodyKeyReveal, PLACEHOLDER] + early_derived_secret_reveals: List[EarlyDerivedSecretReveal, PLACEHOLDER] + + signature: BLSSignature +``` + +### New extended `BeaconState` + +```python +class BeaconState(phase0.BeaconState): + # Versioning + genesis_time: uint64 + slot: Slot + fork: Fork + # History + latest_block_header: BeaconBlockHeader + block_roots: Vector[Hash, SLOTS_PER_HISTORICAL_ROOT] + state_roots: Vector[Hash, SLOTS_PER_HISTORICAL_ROOT] + historical_roots: List[Hash, HISTORICAL_ROOTS_LIMIT] + # Eth1 + eth1_data: Eth1Data + eth1_data_votes: List[Eth1Data, SLOTS_PER_ETH1_VOTING_PERIOD] + eth1_deposit_index: uint64 + # Registry + validators: List[Validator, VALIDATOR_REGISTRY_LIMIT] + balances: List[Gwei, VALIDATOR_REGISTRY_LIMIT] + # Randomness + randao_mixes: Vector[Hash, EPOCHS_PER_HISTORICAL_VECTOR] + # Slashings + slashings: Vector[Gwei, EPOCHS_PER_SLASHINGS_VECTOR] # Per-epoch sums of slashed effective balances + # Attestations + previous_epoch_attestations: List[PendingAttestation, MAX_ATTESTATIONS * SLOTS_PER_EPOCH] + current_epoch_attestations: List[PendingAttestation, MAX_ATTESTATIONS * SLOTS_PER_EPOCH] + # Finality + justification_bits: Bitvector[JUSTIFICATION_BITS_LENGTH] # Bit set for every recent justified epoch + previous_justified_checkpoint: Checkpoint # Previous epoch snapshot + current_justified_checkpoint: Checkpoint + finalized_checkpoint: Checkpoint + # Phase 1 + shard_states: Vector[ShardState, MAX_SHARDS] + online_countdown: Bytes[VALIDATOR_REGISTRY_LIMIT] + current_light_committee: CompactCommittee + next_light_committee: CompactCommittee + + # TODO older pre-proposal custody field additions, keep this? + custody_chunk_challenge_records: List[CustodyChunkChallengeRecord, PLACEHOLDER] + custody_bit_challenge_records: List[CustodyBitChallengeRecord, PLACEHOLDER] + custody_challenge_index: uint64 + # Future derived secrets already exposed; contains the indices of the exposed validator + # at RANDAO reveal period % EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS + exposed_derived_secrets: Vector[List[ValidatorIndex, PLACEHOLDER], + EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS] +``` + +## Helper functions + +### Crypto + +#### `bls_verify_multiple` + +`bls_verify_multiple` is a function for verifying a BLS signature constructed from multiple messages, as defined in the [BLS Signature spec](../bls_signature.md#bls_verify_multiple). + + +### Misc + +#### `pack_compact_validator` ```python def pack_compact_validator(index: int, slashed: bool, balance_in_increments: int) -> int: @@ -212,7 +274,7 @@ def pack_compact_validator(index: int, slashed: bool, balance_in_increments: int return (index << 16) + (slashed << 15) + balance_in_increments ``` -### `committee_to_compact_committee` +#### `committee_to_compact_committee` ```python def committee_to_compact_committee(state: BeaconState, committee: Sequence[ValidatorIndex]) -> CompactCommittee: @@ -228,7 +290,24 @@ def committee_to_compact_committee(state: BeaconState, committee: Sequence[Valid return CompactCommittee(pubkeys=pubkeys, compact_validators=compact_validators) ``` -### `get_shard_committee` +#### `chunks_to_body_root` + +```python +def chunks_to_body_root(chunks): + return hash_tree_root(chunks + [EMPTY_CHUNK_ROOT] * (MAX_SHARD_BLOCK_CHUNKS - len(chunks))) +``` + +### Beacon state accessors + +#### `get_online_validators` + +```python +def get_online_indices(state: BeaconState) -> Set[ValidatorIndex]: + active_validators = get_active_validator_indices(state, get_current_epoch(state)) + return set([i for i in active_validators if state.online_countdown[i] != 0]) +``` + +#### `get_shard_committee` ```python def get_shard_committee(beacon_state: BeaconState, epoch: Epoch, shard: Shard) -> Sequence[ValidatorIndex]: @@ -240,7 +319,7 @@ def get_shard_committee(beacon_state: BeaconState, epoch: Epoch, shard: Shard) - return compute_committee(active_validator_indices, seed, 0, ACTIVE_SHARDS) ``` -### `get_shard_proposer_index` +#### `get_shard_proposer_index` ```python def get_shard_proposer_index(beacon_state: BeaconState, slot: Slot, shard: Shard) -> ValidatorIndex: @@ -249,7 +328,7 @@ def get_shard_proposer_index(beacon_state: BeaconState, slot: Slot, shard: Shard return committee[r % len(committee)] ``` -### `get_light_client_committee` +#### `get_light_client_committee` ```python def get_light_client_committee(beacon_state: BeaconState, epoch: Epoch) -> Sequence[ValidatorIndex]: @@ -261,7 +340,7 @@ def get_light_client_committee(beacon_state: BeaconState, epoch: Epoch) -> Seque return compute_committee(active_validator_indices, seed, 0, ACTIVE_SHARDS)[:TARGET_COMMITTEE_SIZE] ``` -### `get_indexed_attestation` +#### `get_indexed_attestation` ```python def get_indexed_attestation(beacon_state: BeaconState, attestation: Attestation) -> AttestationAndCommittee: @@ -269,7 +348,7 @@ def get_indexed_attestation(beacon_state: BeaconState, attestation: Attestation) return AttestationAndCommittee(committee, attestation) ``` -### `get_updated_gasprice` +#### `get_updated_gasprice` ```python def get_updated_gasprice(prev_gasprice: Gwei, length: uint8) -> Gwei: @@ -281,7 +360,24 @@ def get_updated_gasprice(prev_gasprice: Gwei, length: uint8) -> Gwei: return max(prev_gasprice, MIN_GASPRICE + delta) - delta ``` -### `is_valid_indexed_attestation` +#### `get_shard` + +```python +def get_shard(state: BeaconState, attestation: Attestation) -> Shard: + return Shard((attestation.data.index + get_start_shard(state, attestation.data.slot)) % ACTIVE_SHARDS) +``` + +#### `get_offset_slots` + +```python +def get_offset_slots(state: BeaconState, start_slot: Slot) -> Sequence[Slot]: + return [start_slot + x for x in SHARD_BLOCK_OFFSETS if start_slot + x < state.slot] +``` + + +### Predicates + +#### `is_valid_indexed_attestation` ```python def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: AttestationAndCommittee) -> bool: @@ -314,49 +410,46 @@ def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: Attest ) ``` -### `get_shard` + +### Block processing ```python -def get_shard(state: BeaconState, attestation: Attestation) -> Shard: - return Shard((attestation.data.index + get_start_shard(state, data.slot)) % ACTIVE_SHARDS) +def process_block(state: BeaconState, block: BeaconBlock) -> None: + process_block_header(state, block) + process_randao(state, block.body) + process_eth1_data(state, block.body) + verify_shard_transition_false_positives(state, block) + process_light_client_signatures(state, block) + process_operations(state, block.body) ``` -### `get_offset_slots` + +#### Operations ```python -def get_offset_slots(state: BeaconState, start_slot: Slot) -> Sequence[Slot]: - return [start_slot + x for x in SHARD_BLOCK_OFFSETS if start_slot + x < state.slot] +def process_operations(state: BeaconState, body: BeaconBlockBody) -> None: + # Verify that outstanding deposits are processed up to the maximum number of deposits + assert len(body.deposits) == min(MAX_DEPOSITS, state.eth1_data.deposit_count - state.eth1_deposit_index) + + def process_operations(operations, fn): + for operation in operations: + fn(state, operation) + + process_operations(body.proposer_slashings, process_proposer_slashing) + process_operations(body.attester_slashings, process_attester_slashing) + + # New attestation processing + process_attestations(state, block, body.attestations) + + process_operations(body.deposits, process_deposit) + process_operations(body.voluntary_exits, process_voluntary_exit) + + # TODO process_operations(body.shard_receipt_proofs, process_shard_receipt_proofs) ``` -### `chunks_to_body_root` +##### New Attestation processing -```python -def chunks_to_body_root(chunks): - return hash_tree_root(chunks + [EMPTY_CHUNK_ROOT] * (MAX_SHARD_BLOCK_CHUNKS - len(chunks))) -``` - -## Beacon Chain Changes - -### New beacon state fields - -```python - shard_states: Vector[ShardState, MAX_SHARDS] - online_countdown: Bytes[VALIDATOR_REGISTRY_LIMIT] - current_light_committee: CompactCommittee - next_light_committee: CompactCommittee -``` - -### New beacon block data fields - -```python - shard_transitions: Vector[ShardTransition, MAX_SHARDS] - light_client_signature_bitfield: Bitlist[LIGHT_CLIENT_COMMITTEE_SIZE] - light_client_signature: BLSSignature -``` - -### Attestation processing - -#### `validate_attestation` +###### `validate_attestation` ```python def validate_attestation(state: BeaconState, attestation: Attestation) -> None: @@ -381,7 +474,7 @@ def validate_attestation(state: BeaconState, attestation: Attestation) -> None: assert data.shard_transition_root == Hash() ``` -#### `apply_shard_transition` +###### `apply_shard_transition` ```python def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTransition) -> None: @@ -430,7 +523,7 @@ def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTr state.shard_states[shard].slot = state.slot - 1 ``` -#### `process_attestations` +###### `process_attestations` ```python def process_attestations(state: BeaconState, block: BeaconBlock, attestations: Sequence[Attestation]) -> None: @@ -490,7 +583,7 @@ def process_attestations(state: BeaconState, block: BeaconBlock, attestations: S state.previous_epoch_attestations.append(pending_attestation) ``` -### Misc block post-processing +#### Shard transition false positives ```python def verify_shard_transition_false_positives(state: BeaconState, block: BeaconBlock) -> None: @@ -500,7 +593,7 @@ def verify_shard_transition_false_positives(state: BeaconState, block: BeaconBlo assert block.shard_transition[shard] == ShardTransition() ``` -### Light client processing +#### Light client processing ```python def process_light_client_signatures(state: BeaconState, block: BeaconBlock) -> None: @@ -524,10 +617,23 @@ def process_light_client_signatures(state: BeaconState, block: BeaconBlock) -> N ) ``` + ### Epoch transition +This epoch transition overrides the phase0 epoch transition: + ```python -def phase_1_epoch_transition(state: BeaconState) -> None: +def process_epoch(state: BeaconState) -> None: + process_justification_and_finalization(state) + process_rewards_and_penalties(state) + process_registry_updates(state) + # TODO process_reveal_deadlines + # TODO process_challenge_deadlines + process_slashings(state) + # TODO update_period_committee + process_final_updates(state) + # TODO process_custody_final_updates + # Slowly remove validators from the "online" set if they do not show up for index in range(len(state.validators)): if state.online_countdown[index] != 0: @@ -544,40 +650,3 @@ def phase_1_epoch_transition(state: BeaconState) -> None: for index in get_attesting_indices(state, pending_attestation.data, pending_attestation.aggregation_bits): state.online_countdown[index] = ONLINE_PERIOD ``` - -## Fraud proofs - -TODO. The intent is to have a single universal fraud proof type, which contains the following parts: - -1. An on-time attestation on some `shard` signing a `ShardTransition` -2. An index `i` of a particular position to focus on -3. The `ShardTransition` itself -4. The full body of the block -5. A Merkle proof to the `shard_states` in the parent block the attestation is referencing - -The proof verifies that one of the two conditions is false: - -1. `custody_bits[i][j] != generate_custody_bit(subkey, block_contents)` for any `j` -2. `execute_state_transition(shard, slot, transition.shard_states[i-1].data, hash_tree_root(parent), get_shard_proposer_index(state, shard, slot), block_contents) != transition.shard_states[i].data` (if `i=0` then instead use `parent.shard_states[shard][-1].data`) - -## Shard state transition function - -```python -def shard_state_transition(shard: Shard, slot: Slot, pre_state: Hash, previous_beacon_root: Hash, proposer_pubkey: BLSPubkey, block_data: BytesN[MAX_SHARD_BLOCK_CHUNKS * SHARD_BLOCK_CHUNK_SIZE]) -> Hash: - # We will add something more substantive in phase 2 - return hash(pre_state + hash_tree_root(previous_beacon_root) + hash_tree_root(block_data)) -``` - -## Honest committee member behavior - -Suppose you are a committee member on shard `shard` at slot `current_slot`. Let `state` be the head beacon state you are building on, and let `QUARTER_PERIOD = SECONDS_PER_SLOT // 4`. `2 * QUARTER_PERIOD` seconds into slot `slot`, run the following procedure: - -* Initialize `proposals = []`, `shard_states = []`, `shard_state = state.shard_states[shard][-1]`, `start_slot = shard_state.slot`. -* For `slot in get_offset_slots(state, start_slot)`, do the following: - * Look for all valid proposals for `slot`; that is, a Bytes `proposal` where `shard_state_transition(shard, slot, shard_state, get_block_root_at_slot(state, state.slot - 1), get_shard_proposer_index(state, shard, slot), proposal)` returns a result and does not throw an exception. Let `choices` be the set of non-empty valid proposals you discover. - * If `len(choices) == 0`, do `proposals.append(make_empty_proposal(shard_state, slot))` - * If `len(choices) == 1`, do `proposals.append(choices[0])` - * If `len(choices) > 1`, let `winning_proposal` be the proposal with the largest number of total attestations from slots in `state.shard_next_slots[shard]....slot-1` supporting it or any of its descendants, breaking ties by choosing the first proposal locally seen. Do `proposals.append(winning_proposal)`. - * If `proposals[-1]` is NOT an empty proposal, set `shard_state = shard_state_transition(shard, slot, shard_state, get_block_root_at_slot(state, state.slot - 1), get_shard_proposer_index(state, shard, slot), proposals[-1])` and do `shard_states.append(shard_state)`. If it is an empty proposal, leave `shard_state` unchanged. - -Make an attestation using `shard_data_roots = [hash_tree_root(proposal) for proposal in proposals]` and `shard_state_roots = shard_states`. diff --git a/specs/core/1_custody-game.md b/specs/core/1_custody-game.md index bb3a9b32b..087dcdbf9 100644 --- a/specs/core/1_custody-game.md +++ b/specs/core/1_custody-game.md @@ -231,45 +231,6 @@ class EarlyDerivedSecretReveal(Container): mask: Bytes32 ``` -### Phase 0 container updates - -Add the following fields to the end of the specified container objects. Fields with underlying type `uint64` are initialized to `0` and list fields are initialized to `[]`. - -#### `Validator` - -```python -class Validator(Container): - # next_custody_secret_to_reveal is initialised to the custody period - # (of the particular validator) in which the validator is activated - # = get_custody_period_for_validator(...) - next_custody_secret_to_reveal: uint64 - max_reveal_lateness: Epoch -``` - -#### `BeaconState` - -```python -class BeaconState(Container): - custody_chunk_challenge_records: List[CustodyChunkChallengeRecord, PLACEHOLDER] - custody_bit_challenge_records: List[CustodyBitChallengeRecord, PLACEHOLDER] - custody_challenge_index: uint64 - - # Future derived secrets already exposed; contains the indices of the exposed validator - # at RANDAO reveal period % EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS - exposed_derived_secrets: Vector[List[ValidatorIndex, PLACEHOLDER], - EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS] -``` - -#### `BeaconBlockBody` - -```python -class BeaconBlockBody(Container): - custody_chunk_challenges: List[CustodyChunkChallenge, PLACEHOLDER] - custody_bit_challenges: List[CustodyBitChallenge, PLACEHOLDER] - custody_responses: List[CustodyResponse, PLACEHOLDER] - custody_key_reveals: List[CustodyKeyReveal, PLACEHOLDER] - early_derived_secret_reveals: List[EarlyDerivedSecretReveal, PLACEHOLDER] -``` ## Helpers @@ -732,12 +693,9 @@ def process_bit_challenge_response(state: BeaconState, ### Handling of custody-related deadlines -Run `process_reveal_deadlines(state)` immediately after `process_registry_updates(state)`: +Run `process_reveal_deadlines(state)` after `process_registry_updates(state)`: ```python -# begin insert @process_reveal_deadlines - process_reveal_deadlines(state) -# end insert @process_reveal_deadlines def process_reveal_deadlines(state: BeaconState) -> None: for index, validator in enumerate(state.validators): deadline = validator.next_custody_secret_to_reveal + (CUSTODY_RESPONSE_DEADLINE // EPOCHS_PER_CUSTODY_PERIOD) @@ -748,9 +706,6 @@ def process_reveal_deadlines(state: BeaconState) -> None: Run `process_challenge_deadlines(state)` immediately after `process_reveal_deadlines(state)`: ```python -# begin insert @process_challenge_deadlines - process_challenge_deadlines(state) -# end insert @process_challenge_deadlines def process_challenge_deadlines(state: BeaconState) -> None: for custody_chunk_challenge in state.custody_chunk_challenge_records: if get_current_epoch(state) > custody_chunk_challenge.inclusion_epoch + CUSTODY_RESPONSE_DEADLINE: @@ -765,13 +720,10 @@ def process_challenge_deadlines(state: BeaconState) -> None: records[records.index(custody_bit_challenge)] = CustodyBitChallengeRecord() ``` -Append this to `process_final_updates(state)`: +After `process_final_updates(state)`, additional updates are made for the custody game: ```python -# begin insert @after_process_final_updates - after_process_final_updates(state) -# end insert @after_process_final_updates -def after_process_final_updates(state: BeaconState) -> None: +def process_custody_final_updates(state: BeaconState) -> None: current_epoch = get_current_epoch(state) # Clean up exposed RANDAO key reveals state.exposed_derived_secrets[current_epoch % EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS] = [] diff --git a/specs/core/1_fraudproofs.md b/specs/core/1_fraudproofs.md new file mode 100644 index 000000000..46ba390cd --- /dev/null +++ b/specs/core/1_fraudproofs.md @@ -0,0 +1,52 @@ +# Ethereum 2.0 Phase 1 -- Crosslinks and Shard Data + +**Notice**: This document is a work-in-progress for researchers and implementers. + +## Table of contents + + + + TODO + + + +## Introduction + +This document describes the shard transition function (data layer only) and the shard fork choice rule as part of Phase 1 of Ethereum 2.0. + +## Fraud proofs + +TODO. The intent is to have a single universal fraud proof type, which contains the following parts: + +1. An on-time attestation on some `shard` signing a `ShardTransition` +2. An index `i` of a particular position to focus on +3. The `ShardTransition` itself +4. The full body of the block +5. A Merkle proof to the `shard_states` in the parent block the attestation is referencing + +The proof verifies that one of the two conditions is false: + +1. `custody_bits[i][j] != generate_custody_bit(subkey, block_contents)` for any `j` +2. `execute_state_transition(shard, slot, transition.shard_states[i-1].data, hash_tree_root(parent), get_shard_proposer_index(state, shard, slot), block_contents) != transition.shard_states[i].data` (if `i=0` then instead use `parent.shard_states[shard][-1].data`) + +## Shard state transition function + +```python +def shard_state_transition(shard: Shard, slot: Slot, pre_state: Hash, previous_beacon_root: Hash, proposer_pubkey: BLSPubkey, block_data: BytesN[MAX_SHARD_BLOCK_CHUNKS * SHARD_BLOCK_CHUNK_SIZE]) -> Hash: + # We will add something more substantive in phase 2 + return hash(pre_state + hash_tree_root(previous_beacon_root) + hash_tree_root(block_data)) +``` + +## Honest committee member behavior + +Suppose you are a committee member on shard `shard` at slot `current_slot`. Let `state` be the head beacon state you are building on, and let `QUARTER_PERIOD = SECONDS_PER_SLOT // 4`. `2 * QUARTER_PERIOD` seconds into slot `slot`, run the following procedure: + +* Initialize `proposals = []`, `shard_states = []`, `shard_state = state.shard_states[shard][-1]`, `start_slot = shard_state.slot`. +* For `slot in get_offset_slots(state, start_slot)`, do the following: + * Look for all valid proposals for `slot`; that is, a Bytes `proposal` where `shard_state_transition(shard, slot, shard_state, get_block_root_at_slot(state, state.slot - 1), get_shard_proposer_index(state, shard, slot), proposal)` returns a result and does not throw an exception. Let `choices` be the set of non-empty valid proposals you discover. + * If `len(choices) == 0`, do `proposals.append(make_empty_proposal(shard_state, slot))` + * If `len(choices) == 1`, do `proposals.append(choices[0])` + * If `len(choices) > 1`, let `winning_proposal` be the proposal with the largest number of total attestations from slots in `state.shard_next_slots[shard]....slot-1` supporting it or any of its descendants, breaking ties by choosing the first proposal locally seen. Do `proposals.append(winning_proposal)`. + * If `proposals[-1]` is NOT an empty proposal, set `shard_state = shard_state_transition(shard, slot, shard_state, get_block_root_at_slot(state, state.slot - 1), get_shard_proposer_index(state, shard, slot), proposals[-1])` and do `shard_states.append(shard_state)`. If it is an empty proposal, leave `shard_state` unchanged. + +Make an attestation using `shard_data_roots = [hash_tree_root(proposal) for proposal in proposals]` and `shard_state_roots = shard_states`. diff --git a/specs/core/1_beacon-chain-misc.md b/specs/old/1_beacon-chain-misc.md similarity index 100% rename from specs/core/1_beacon-chain-misc.md rename to specs/old/1_beacon-chain-misc.md diff --git a/specs/core/1_shard-data-chains.md b/specs/old/1_shard-data-chains.md similarity index 100% rename from specs/core/1_shard-data-chains.md rename to specs/old/1_shard-data-chains.md From 7d2341b40d4e4353395535250b578819c8a8668d Mon Sep 17 00:00:00 2001 From: protolambda Date: Fri, 15 Nov 2019 22:35:07 +0100 Subject: [PATCH 048/194] Note: is_valid_indexed_attestation is overriden --- specs/core/1_beacon-chain.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index 04df84e86..57bd1fcd7 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -377,7 +377,7 @@ def get_offset_slots(state: BeaconState, start_slot: Slot) -> Sequence[Slot]: ### Predicates -#### `is_valid_indexed_attestation` +#### New `is_valid_indexed_attestation` ```python def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: AttestationAndCommittee) -> bool: From edef2fd8ae4aa8a0a352a1ce00369d2f47a89c44 Mon Sep 17 00:00:00 2001 From: protolambda Date: Fri, 15 Nov 2019 23:42:28 +0100 Subject: [PATCH 049/194] more refactoring: more immediate custody game, general phase1 beacon chain changes --- specs/core/1_beacon-chain.md | 130 ++++++++++++++++++------------ specs/core/1_custody-game.md | 150 +++++++++++------------------------ 2 files changed, 126 insertions(+), 154 deletions(-) diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index 57bd1fcd7..94dfd624f 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -180,6 +180,32 @@ class Validator(Container): max_reveal_lateness: Epoch ``` +### New extended `BeaconBlockBody` + +```python +class BeaconBlockBody(phase0.BeaconBlockBody): + randao_reveal: BLSSignature + eth1_data: Eth1Data # Eth1 data vote + graffiti: Bytes32 # Arbitrary data + # Slashings + proposer_slashings: List[ProposerSlashing, MAX_PROPOSER_SLASHINGS] + attester_slashings: List[AttesterSlashing, MAX_ATTESTER_SLASHINGS] + # Attesting + attestations: List[Attestation, MAX_ATTESTATIONS] + # Enty & exit + deposits: List[Deposit, MAX_DEPOSITS] + voluntary_exits: List[VoluntaryExit, MAX_VOLUNTARY_EXITS] + # Custody game + custody_chunk_challenges: List[CustodyChunkChallenge, PLACEHOLDER] + custody_bit_challenges: List[CustodyBitChallenge, PLACEHOLDER] + custody_key_reveals: List[CustodyKeyReveal, PLACEHOLDER] + early_derived_secret_reveals: List[EarlyDerivedSecretReveal, PLACEHOLDER] + # Shards + shard_transitions: Vector[ShardTransition, MAX_SHARDS] + # Light clients + light_client_signature_bitfield: Bitlist[LIGHT_CLIENT_COMMITTEE_SIZE] + light_client_signature: BLSSignature +``` ### New extended `BeaconBlock` @@ -189,17 +215,6 @@ class BeaconBlock(phase0.BeaconBlock): parent_root: Hash state_root: Hash body: BeaconBlockBody - shard_transitions: Vector[ShardTransition, MAX_SHARDS] - light_client_signature_bitfield: Bitlist[LIGHT_CLIENT_COMMITTEE_SIZE] - light_client_signature: BLSSignature - - # TODO: older pre-proposal custody field additions, keep this? - custody_chunk_challenges: List[CustodyChunkChallenge, PLACEHOLDER] - custody_bit_challenges: List[CustodyBitChallenge, PLACEHOLDER] - custody_responses: List[CustodyResponse, PLACEHOLDER] - custody_key_reveals: List[CustodyKeyReveal, PLACEHOLDER] - early_derived_secret_reveals: List[EarlyDerivedSecretReveal, PLACEHOLDER] - signature: BLSSignature ``` @@ -240,10 +255,8 @@ class BeaconState(phase0.BeaconState): online_countdown: Bytes[VALIDATOR_REGISTRY_LIMIT] current_light_committee: CompactCommittee next_light_committee: CompactCommittee - - # TODO older pre-proposal custody field additions, keep this? - custody_chunk_challenge_records: List[CustodyChunkChallengeRecord, PLACEHOLDER] - custody_bit_challenge_records: List[CustodyBitChallengeRecord, PLACEHOLDER] + + # TODO: custody game refactor, no challenge-records, immediate processing. custody_challenge_index: uint64 # Future derived secrets already exposed; contains the indices of the exposed validator # at RANDAO reveal period % EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS @@ -439,11 +452,14 @@ def process_operations(state: BeaconState, body: BeaconBlockBody) -> None: process_operations(body.attester_slashings, process_attester_slashing) # New attestation processing - process_attestations(state, block, body.attestations) + process_attestations(state, body, body.attestations) process_operations(body.deposits, process_deposit) process_operations(body.voluntary_exits, process_voluntary_exit) + # See custody game spec. + process_custody_game_operations(state, body) + # TODO process_operations(body.shard_receipt_proofs, process_shard_receipt_proofs) ``` @@ -526,7 +542,7 @@ def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTr ###### `process_attestations` ```python -def process_attestations(state: BeaconState, block: BeaconBlock, attestations: Sequence[Attestation]) -> None: +def process_attestations(state: BeaconState, block_body: BeaconBlockBody, attestations: Sequence[Attestation]) -> None: pending_attestations = [] # Basic validation for attestation in attestations: @@ -548,25 +564,25 @@ def process_attestations(state: BeaconState, block: BeaconBlock, attestations: S participating_attestations.append(attestation) if attestation.data.shard_transition_root == shard_transition_root: all_participants = all_participants.union(get_attesting_indices(state, attestation.data, attestation.aggregation_bits)) - if ( - get_total_balance(state, online_indices.intersection(all_participants)) * 3 >= - get_total_balance(state, online_indices.intersection(this_shard_committee)) * 2 - and success is False - ): - # Attestation <-> shard transition consistency - assert shard_transition_root == hash_tree_root(block.shard_transition) - assert attestation.data.head_shard_root == chunks_to_body_root(block.shard_transition.shard_data_roots[-1]) - # Apply transition - apply_shard_transition(state, shard, block.shard_transition) - # Apply proposer reward and cost - estimated_attester_reward = sum([get_base_reward(state, attester) for attester in all_participants]) - increase_balance(state, proposer, estimated_attester_reward // PROPOSER_REWARD_COEFFICIENT) - for shard_state, slot, length in zip(block.shard_transition.shard_states, offset_slots, block.shard_transition.shard_block_lengths): - decrease_balance(state, get_shard_proposer_index(state, shard, slot), shard_state.gasprice * length) - winners.add((shard, shard_transition_root)) - success = True + if ( + get_total_balance(state, online_indices.intersection(all_participants)) * 3 >= + get_total_balance(state, online_indices.intersection(this_shard_committee)) * 2 + and success is False + ): + # Attestation <-> shard transition consistency + assert shard_transition_root == hash_tree_root(block_body.shard_transition) + assert attestation.data.head_shard_root == chunks_to_body_root(block_body.shard_transition.shard_data_roots[-1]) + # Apply transition + apply_shard_transition(state, shard, block_body.shard_transition) + # Apply proposer reward and cost + estimated_attester_reward = sum([get_base_reward(state, attester) for attester in all_participants]) + increase_balance(state, proposer, estimated_attester_reward // PROPOSER_REWARD_COEFFICIENT) + for shard_state, slot, length in zip(block_body.shard_transition.shard_states, offset_slots, block_body.shard_transition.shard_block_lengths): + decrease_balance(state, get_shard_proposer_index(state, shard, slot), shard_state.gasprice * length) + winners.add((shard, shard_transition_root)) + success = True if not success: - assert block.shard_transitions[shard] == ShardTransition() + assert block_body.shard_transitions[shard] == ShardTransition() for attestation in attestations: pending_attestation = PendingAttestation( aggregation_bits=attestation.aggregation_bits, @@ -586,22 +602,22 @@ def process_attestations(state: BeaconState, block: BeaconBlock, attestations: S #### Shard transition false positives ```python -def verify_shard_transition_false_positives(state: BeaconState, block: BeaconBlock) -> None: +def verify_shard_transition_false_positives(state: BeaconState, block_body: BeaconBlockBody) -> None: # Verify that a `shard_transition` in a block is empty if an attestation was not processed for it for shard in range(MAX_SHARDS): if state.shard_states[shard].slot != state.slot - 1: - assert block.shard_transition[shard] == ShardTransition() + assert block_body.shard_transition[shard] == ShardTransition() ``` #### Light client processing ```python -def process_light_client_signatures(state: BeaconState, block: BeaconBlock) -> None: +def process_light_client_signatures(state: BeaconState, block_body: BeaconBlockBody) -> None: committee = get_light_client_committee(state, get_current_epoch(state)) - assert len(block.light_client_signature_bitfield) == len(committee) + assert len(block_body.light_client_signature_bitfield) == len(committee) total_reward = Gwei(0) signer_keys = [] - for i, bit in enumerate(block.light_client_signature_bitfield): + for i, bit in enumerate(block_body.light_client_signature_bitfield): if bit: signer_keys.append(state.validators[committee[i]].pubkey) increase_balance(state, committee[i], get_base_reward(state, committee[i])) @@ -612,7 +628,7 @@ def process_light_client_signatures(state: BeaconState, block: BeaconBlock) -> N assert bls_verify( pubkey=bls_aggregate_pubkeys(signer_keys), message_hash=get_block_root_at_slot(state, state.slot - 1), - signature=block.light_client_signature, + signature=block_body.light_client_signature, domain=DOMAIN_LIGHT_CLIENT ) ``` @@ -627,26 +643,38 @@ def process_epoch(state: BeaconState) -> None: process_justification_and_finalization(state) process_rewards_and_penalties(state) process_registry_updates(state) - # TODO process_reveal_deadlines - # TODO process_challenge_deadlines + process_reveal_deadlines(state) + process_challenge_deadlines(state) process_slashings(state) - # TODO update_period_committee process_final_updates(state) - # TODO process_custody_final_updates + process_custody_final_updates(state) + process_online_tracking(state) + process_light_client_committee_updates(state) +``` +#### Online-tracking + +```python +def process_online_tracking(state: BeaconState) -> None: # Slowly remove validators from the "online" set if they do not show up for index in range(len(state.validators)): if state.online_countdown[index] != 0: state.online_countdown[index] = state.online_countdown[index] - 1 - - # Update light client committees - if get_current_epoch(state) % LIGHT_CLIENT_COMMITTEE_PERIOD == 0: - state.current_light_committee = state.next_light_committee - new_committee = get_light_client_committee(state, get_current_epoch(state) + LIGHT_CLIENT_COMMITTEE_PERIOD) - state.next_light_committee = committee_to_compact_committee(state, new_committee) # Process pending attestations for pending_attestation in state.current_epoch_attestations + state.previous_epoch_attestations: for index in get_attesting_indices(state, pending_attestation.data, pending_attestation.aggregation_bits): state.online_countdown[index] = ONLINE_PERIOD ``` + +#### Light client committee updates + +```python +def process_light_client_committee_updates(state: BeaconState) -> None: + # Update light client committees + if get_current_epoch(state) % LIGHT_CLIENT_COMMITTEE_PERIOD == 0: + state.current_light_committee = state.next_light_committee + new_committee = get_light_client_committee(state, get_current_epoch(state) + LIGHT_CLIENT_COMMITTEE_PERIOD) + state.next_light_committee = committee_to_compact_committee(state, new_committee) +``` + diff --git a/specs/core/1_custody-game.md b/specs/core/1_custody-game.md index 087dcdbf9..9e880d02b 100644 --- a/specs/core/1_custody-game.md +++ b/specs/core/1_custody-game.md @@ -17,7 +17,6 @@ - [Max operations per block](#max-operations-per-block) - [Reward and penalty quotients](#reward-and-penalty-quotients) - [Signature domain types](#signature-domain-types) - - [TODO PLACEHOLDER](#todo-placeholder) - [Data structures](#data-structures) - [Custody objects](#custody-objects) - [`CustodyChunkChallenge`](#custodychunkchallenge) @@ -71,8 +70,6 @@ This document details the beacon chain additions and changes in Phase 1 of Ether - **Custody key**— - **Custody key reveal**— - **Custody key mask**— -- **Custody response**— -- **Custody response deadline**— ## Constants @@ -115,7 +112,6 @@ This document details the beacon chain additions and changes in Phase 1 of Ether | `MAX_EARLY_DERIVED_SECRET_REVEALS` | `1` | | `MAX_CUSTODY_CHUNK_CHALLENGES` | `2**2` (= 4) | | `MAX_CUSTODY_BIT_CHALLENGES` | `2**2` (= 4) | -| `MAX_CUSTODY_RESPONSES` | `2**5` (= 32) | ### Reward and penalty quotients @@ -131,11 +127,6 @@ The following types are defined, mapping into `DomainType` (little endian): | - | - | | `DOMAIN_CUSTODY_BIT_CHALLENGE` | `6` | -### TODO PLACEHOLDER - -| Name | Value | -| - | - | -| `PLACEHOLDER` | `2**32` | ## Data structures @@ -189,19 +180,7 @@ class CustodyBitChallengeRecord(Container): responder_key: BLSSignature ``` -#### `CustodyResponse` - -```python -class CustodyResponse(Container): - challenge_index: uint64 - chunk_index: uint64 - chunk: ByteVector[BYTES_PER_CUSTODY_CHUNK] - data_branch: List[Bytes32, CUSTODY_DATA_DEPTH] - chunk_bits_branch: List[Bytes32, CUSTODY_CHUNK_BIT_DEPTH] - chunk_bits_leaf: Bitvector[256] -``` - -### New beacon operations +### New Beacon Chain operations #### `CustodyKeyReveal` @@ -363,16 +342,27 @@ def replace_empty_or_append(list: MutableSequence[Any], new_element: Any) -> int ## Per-block processing -### Operations +### Custody Game Operations -Add the following operations to the per-block processing, in the order given below and after all other operations in Phase 0. +```python +def process_custody_game_operations(state: BeaconState, body: BeaconBlockBody) -> None: + assert len(block.body.custody_key_reveals) <= MAX_CUSTODY_KEY_REVEALS + assert len(block.body.early_derived_secret_reveals) <= MAX_EARLY_DERIVED_SECRET_REVEALS + assert len(block.body.custody_bit_challenges) <= MAX_CUSTODY_BIT_CHALLENGES + assert len(block.body.custody_chunk_challenges) <= MAX_CUSTODY_CHUNK_CHALLENGES + + def process_operations(operations, fn): + for operation in operations: + fn(state, operation) + + process_operations(body.custody_key_reveals, process_custody_key_reveal) + process_operations(body.early_derived_secret_reveals, process_early_derived_secret_reveal) + process_operations(body.custody_chunk_challenges, process_chunk_challenge) + process_operations(body.custody_bit_challenges, process_bit_challenge) +``` #### Custody key reveals -Verify that `len(block.body.custody_key_reveals) <= MAX_CUSTODY_KEY_REVEALS`. - -For each `reveal` in `block.body.custody_key_reveals`, run the following function: - ```python def process_custody_key_reveal(state: BeaconState, reveal: CustodyKeyReveal) -> None: """ @@ -425,10 +415,6 @@ def process_custody_key_reveal(state: BeaconState, reveal: CustodyKeyReveal) -> #### Early derived secret reveals -Verify that `len(block.body.early_derived_secret_reveals) <= MAX_EARLY_DERIVED_SECRET_REVEALS`. - -For each `reveal` in `block.body.early_derived_secret_reveals`, run the following function: - ```python def process_early_derived_secret_reveal(state: BeaconState, reveal: EarlyDerivedSecretReveal) -> None: """ @@ -499,10 +485,6 @@ def process_early_derived_secret_reveal(state: BeaconState, reveal: EarlyDerived #### Chunk challenges -Verify that `len(block.body.custody_chunk_challenges) <= MAX_CUSTODY_CHUNK_CHALLENGES`. - -For each `challenge` in `block.body.custody_chunk_challenges`, run the following function: - ```python def process_chunk_challenge(state: BeaconState, challenge: CustodyChunkChallenge) -> None: # Verify the attestation @@ -541,12 +523,36 @@ def process_chunk_challenge(state: BeaconState, challenge: CustodyChunkChallenge responder.withdrawable_epoch = FAR_FUTURE_EPOCH ``` +TODO: immediate challenge processing, no state records. + +```python +def process_chunk_challenge_response(state: BeaconState, + response: CustodyResponse, + challenge: CustodyChunkChallengeRecord) -> None: + # Verify chunk index + assert response.chunk_index == challenge.chunk_index + # Verify bit challenge data is null + assert response.chunk_bits_branch == [] and response.chunk_bits_leaf == Hash() + # Verify minimum delay + assert get_current_epoch(state) >= challenge.inclusion_epoch + MAX_SEED_LOOKAHEAD + # Verify the chunk matches the crosslink data root + assert is_valid_merkle_branch( + leaf=hash_tree_root(response.chunk), + branch=response.data_branch, + depth=challenge.depth, + index=response.chunk_index, + root=challenge.data_root, + ) + # Clear the challenge + records = state.custody_chunk_challenge_records + records[records.index(challenge)] = CustodyChunkChallengeRecord() + # Reward the proposer + proposer_index = get_beacon_proposer_index(state) + increase_balance(state, proposer_index, Gwei(get_base_reward(state, proposer_index) // MINOR_REWARD_QUOTIENT)) +``` + #### Bit challenges -Verify that `len(block.body.custody_bit_challenges) <= MAX_CUSTODY_BIT_CHALLENGES`. - -For each `challenge` in `block.body.custody_bit_challenges`, run the following function: - ```python def process_bit_challenge(state: BeaconState, challenge: CustodyBitChallenge) -> None: attestation = challenge.attestation @@ -606,52 +612,7 @@ def process_bit_challenge(state: BeaconState, challenge: CustodyBitChallenge) -> responder.withdrawable_epoch = FAR_FUTURE_EPOCH ``` -#### Custody responses - -Verify that `len(block.body.custody_responses) <= MAX_CUSTODY_RESPONSES`. - -For each `response` in `block.body.custody_responses`, run the following function: - -```python -def process_custody_response(state: BeaconState, response: CustodyResponse) -> None: - chunk_challenge = next((record for record in state.custody_chunk_challenge_records - if record.challenge_index == response.challenge_index), None) - if chunk_challenge is not None: - return process_chunk_challenge_response(state, response, chunk_challenge) - - bit_challenge = next((record for record in state.custody_bit_challenge_records - if record.challenge_index == response.challenge_index), None) - if bit_challenge is not None: - return process_bit_challenge_response(state, response, bit_challenge) - - assert False -``` - -```python -def process_chunk_challenge_response(state: BeaconState, - response: CustodyResponse, - challenge: CustodyChunkChallengeRecord) -> None: - # Verify chunk index - assert response.chunk_index == challenge.chunk_index - # Verify bit challenge data is null - assert response.chunk_bits_branch == [] and response.chunk_bits_leaf == Bytes32() - # Verify minimum delay - assert get_current_epoch(state) >= challenge.inclusion_epoch + MAX_SEED_LOOKAHEAD - # Verify the chunk matches the crosslink data root - assert is_valid_merkle_branch( - leaf=hash_tree_root(response.chunk), - branch=response.data_branch, - depth=challenge.depth, - index=response.chunk_index, - root=challenge.data_root, - ) - # Clear the challenge - records = state.custody_chunk_challenge_records - records[records.index(challenge)] = CustodyChunkChallengeRecord() - # Reward the proposer - proposer_index = get_beacon_proposer_index(state) - increase_balance(state, proposer_index, Gwei(get_base_reward(state, proposer_index) // MINOR_REWARD_QUOTIENT)) -``` +TODO: immediate challenge processing, no state records. ```python def process_bit_challenge_response(state: BeaconState, @@ -703,23 +664,6 @@ def process_reveal_deadlines(state: BeaconState) -> None: slash_validator(state, ValidatorIndex(index)) ``` -Run `process_challenge_deadlines(state)` immediately after `process_reveal_deadlines(state)`: - -```python -def process_challenge_deadlines(state: BeaconState) -> None: - for custody_chunk_challenge in state.custody_chunk_challenge_records: - if get_current_epoch(state) > custody_chunk_challenge.inclusion_epoch + CUSTODY_RESPONSE_DEADLINE: - slash_validator(state, custody_chunk_challenge.responder_index, custody_chunk_challenge.challenger_index) - records = state.custody_chunk_challenge - records[records.index(custody_chunk_challenge)] = CustodyChunkChallengeRecord() - - for custody_bit_challenge in state.custody_bit_challenge_records: - if get_current_epoch(state) > custody_bit_challenge.inclusion_epoch + CUSTODY_RESPONSE_DEADLINE: - slash_validator(state, custody_bit_challenge.responder_index, custody_bit_challenge.challenger_index) - records = state.custody_bit_challenge_records - records[records.index(custody_bit_challenge)] = CustodyBitChallengeRecord() -``` - After `process_final_updates(state)`, additional updates are made for the custody game: ```python From 465f6cb7f020c969ca83f2f11c604841225ada6e Mon Sep 17 00:00:00 2001 From: protolambda Date: Fri, 15 Nov 2019 23:46:31 +0100 Subject: [PATCH 050/194] update readme spec links --- README.md | 4 ++-- specs/core/{1_fraudproofs.md => 1_fraud_proofs.md} | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) rename specs/core/{1_fraudproofs.md => 1_fraud_proofs.md} (94%) diff --git a/README.md b/README.md index fa103394d..1bb2ae49f 100644 --- a/README.md +++ b/README.md @@ -18,9 +18,9 @@ Core specifications for Eth2 client validation can be found in [specs/core](spec * [Honest Validator](specs/validator/0_beacon-chain-validator.md) ### Phase 1 +* [The Beacon Chain for Shards](specs/core/1_beacon-chain.md) * [Custody Game](specs/core/1_custody-game.md) -* [Shard Data Chains](specs/core/1_shard-data-chains.md) -* [Misc beacon chain updates](specs/core/1_beacon-chain-misc.md) +* [Shard Transition and Fraud Proofs](specs/core/1_fraud_proofs.md) ### Phase 2 diff --git a/specs/core/1_fraudproofs.md b/specs/core/1_fraud_proofs.md similarity index 94% rename from specs/core/1_fraudproofs.md rename to specs/core/1_fraud_proofs.md index 46ba390cd..e44f790ed 100644 --- a/specs/core/1_fraudproofs.md +++ b/specs/core/1_fraud_proofs.md @@ -1,4 +1,4 @@ -# Ethereum 2.0 Phase 1 -- Crosslinks and Shard Data +# Ethereum 2.0 Phase 1 -- Shard Transition and Fraud Proofs **Notice**: This document is a work-in-progress for researchers and implementers. @@ -12,7 +12,7 @@ ## Introduction -This document describes the shard transition function (data layer only) and the shard fork choice rule as part of Phase 1 of Ethereum 2.0. +This document describes the shard transition function and fraud proofs as part of Phase 1 of Ethereum 2.0. ## Fraud proofs From 589d5a4f9ae06fe6b49acfdfab46e13024b9f7db Mon Sep 17 00:00:00 2001 From: protolambda Date: Fri, 15 Nov 2019 23:52:57 +0100 Subject: [PATCH 051/194] Add notes on custody game spec funcs --- specs/core/1_beacon-chain.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index 94dfd624f..169c71123 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -644,7 +644,6 @@ def process_epoch(state: BeaconState) -> None: process_rewards_and_penalties(state) process_registry_updates(state) process_reveal_deadlines(state) - process_challenge_deadlines(state) process_slashings(state) process_final_updates(state) process_custody_final_updates(state) @@ -652,6 +651,10 @@ def process_epoch(state: BeaconState) -> None: process_light_client_committee_updates(state) ``` +#### Custody game updates + +`process_reveal_deadlines` and `process_custody_final_updates` are defined in [the Custody Game spec](./1_custody-game.md), + #### Online-tracking ```python From 74d6021507dc5b54cbfde25a9bc5b280a94b44cd Mon Sep 17 00:00:00 2001 From: protolambda Date: Sat, 16 Nov 2019 11:13:47 +0100 Subject: [PATCH 052/194] PR comment fixes --- specs/core/0_beacon-chain.md | 14 +++++++----- specs/core/1_beacon-chain.md | 43 +++++++++++++++++++----------------- specs/core/1_custody-game.md | 15 +++++-------- 3 files changed, 37 insertions(+), 35 deletions(-) diff --git a/specs/core/0_beacon-chain.md b/specs/core/0_beacon-chain.md index e8aa53f13..3b204e2c7 100644 --- a/specs/core/0_beacon-chain.md +++ b/specs/core/0_beacon-chain.md @@ -1419,11 +1419,15 @@ def process_operations(state: BeaconState, body: BeaconBlockBody) -> None: # Verify that outstanding deposits are processed up to the maximum number of deposits assert len(body.deposits) == min(MAX_DEPOSITS, state.eth1_data.deposit_count - state.eth1_deposit_index) - process_operations(body.proposer_slashings, process_proposer_slashing) - process_operations(body.attester_slashings, process_attester_slashing) - process_operations(body.attestations, process_attestations) - process_operations(body.deposits, process_deposit) - process_operations(body.voluntary_exits, process_voluntary_exit) + def for_ops(operations, fn): + for operation in operations: + fn(state, operation) + + for_ops(body.proposer_slashings, process_proposer_slashing) + for_ops(body.attester_slashings, process_attester_slashing) + for_ops(body.attestations, process_attestations) + for_ops(body.deposits, process_deposit) + for_ops(body.voluntary_exits, process_voluntary_exit) ``` ##### Proposer slashings diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index 169c71123..4e98445f2 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -146,10 +146,10 @@ class AttestationCustodyBitWrapper(Container): bit: bool ``` -### New `PendingAttestation` +### New extended `PendingAttestation` ```python -class PendingAttestation(Container): +class PendingAttestation(phase0.PendingAttestation): aggregation_bits: Bitlist[MAX_VALIDATORS_PER_COMMITTEE] data: AttestationData inclusion_delay: Slot @@ -160,7 +160,7 @@ class PendingAttestation(Container): ### New extended `Validator` ```python -class Validator(Container): +class Validator(phase0.Validator): pubkey: BLSPubkey withdrawal_credentials: Hash # Commitment to pubkey for withdrawals effective_balance: Gwei # Balance at stake @@ -170,9 +170,7 @@ class Validator(Container): activation_epoch: Epoch exit_epoch: Epoch withdrawable_epoch: Epoch # When validator can withdraw funds - - # TODO: older pre-proposal custody field additions, keep this? - # + # Custody game # next_custody_secret_to_reveal is initialised to the custody period # (of the particular validator) in which the validator is activated # = get_custody_period_for_validator(...) @@ -196,10 +194,10 @@ class BeaconBlockBody(phase0.BeaconBlockBody): deposits: List[Deposit, MAX_DEPOSITS] voluntary_exits: List[VoluntaryExit, MAX_VOLUNTARY_EXITS] # Custody game - custody_chunk_challenges: List[CustodyChunkChallenge, PLACEHOLDER] - custody_bit_challenges: List[CustodyBitChallenge, PLACEHOLDER] - custody_key_reveals: List[CustodyKeyReveal, PLACEHOLDER] - early_derived_secret_reveals: List[EarlyDerivedSecretReveal, PLACEHOLDER] + custody_chunk_challenges: List[CustodyChunkChallenge, MAX_CUSTODY_CHUNK_CHALLENGES] + custody_bit_challenges: List[CustodyBitChallenge, MAX_CUSTODY_BIT_CHALLENGES] + custody_key_reveals: List[CustodyKeyReveal, MAX_CUSTODY_KEY_REVEALS] + early_derived_secret_reveals: List[EarlyDerivedSecretReveal, MAX_EARLY_DERIVED_SECRET_REVEALS] # Shards shard_transitions: Vector[ShardTransition, MAX_SHARDS] # Light clients @@ -209,6 +207,8 @@ class BeaconBlockBody(phase0.BeaconBlockBody): ### New extended `BeaconBlock` +Note that the `body` has a new `BeaconBlockBody` definition. + ```python class BeaconBlock(phase0.BeaconBlock): slot: Slot @@ -220,6 +220,8 @@ class BeaconBlock(phase0.BeaconBlock): ### New extended `BeaconState` +Note that aside from the new additions, `Validator` and `PendingAttestation` have new definitions. + ```python class BeaconState(phase0.BeaconState): # Versioning @@ -312,10 +314,10 @@ def chunks_to_body_root(chunks): ### Beacon state accessors -#### `get_online_validators` +#### `get_online_validator_indices` ```python -def get_online_indices(state: BeaconState) -> Set[ValidatorIndex]: +def get_online_validator_indices(state: BeaconState) -> Set[ValidatorIndex]: active_validators = get_active_validator_indices(state, get_current_epoch(state)) return set([i for i in active_validators if state.online_countdown[i] != 0]) ``` @@ -390,7 +392,9 @@ def get_offset_slots(state: BeaconState, start_slot: Slot) -> Sequence[Slot]: ### Predicates -#### New `is_valid_indexed_attestation` +#### Updated `is_valid_indexed_attestation` + +Note that this replaces the Phase 0 `is_valid_indexed_attestation`. ```python def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: AttestationAndCommittee) -> bool: @@ -444,18 +448,18 @@ def process_operations(state: BeaconState, body: BeaconBlockBody) -> None: # Verify that outstanding deposits are processed up to the maximum number of deposits assert len(body.deposits) == min(MAX_DEPOSITS, state.eth1_data.deposit_count - state.eth1_deposit_index) - def process_operations(operations, fn): + def for_ops(operations, fn): for operation in operations: fn(state, operation) - process_operations(body.proposer_slashings, process_proposer_slashing) - process_operations(body.attester_slashings, process_attester_slashing) + for_ops(body.proposer_slashings, process_proposer_slashing) + for_ops(body.attester_slashings, process_attester_slashing) # New attestation processing process_attestations(state, body, body.attestations) - process_operations(body.deposits, process_deposit) - process_operations(body.voluntary_exits, process_voluntary_exit) + for_ops(body.deposits, process_deposit) + for_ops(body.voluntary_exits, process_voluntary_exit) # See custody game spec. process_custody_game_operations(state, body) @@ -543,12 +547,11 @@ def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTr ```python def process_attestations(state: BeaconState, block_body: BeaconBlockBody, attestations: Sequence[Attestation]) -> None: - pending_attestations = [] # Basic validation for attestation in attestations: validate_attestation(state, attestation) # Process crosslinks - online_indices = get_online_indices(state) + online_indices = get_online_validator_indices(state) winners = set() for shard in range(ACTIVE_SHARDS): success = False diff --git a/specs/core/1_custody-game.md b/specs/core/1_custody-game.md index 9e880d02b..0e804f628 100644 --- a/specs/core/1_custody-game.md +++ b/specs/core/1_custody-game.md @@ -346,19 +346,14 @@ def replace_empty_or_append(list: MutableSequence[Any], new_element: Any) -> int ```python def process_custody_game_operations(state: BeaconState, body: BeaconBlockBody) -> None: - assert len(block.body.custody_key_reveals) <= MAX_CUSTODY_KEY_REVEALS - assert len(block.body.early_derived_secret_reveals) <= MAX_EARLY_DERIVED_SECRET_REVEALS - assert len(block.body.custody_bit_challenges) <= MAX_CUSTODY_BIT_CHALLENGES - assert len(block.body.custody_chunk_challenges) <= MAX_CUSTODY_CHUNK_CHALLENGES - - def process_operations(operations, fn): + def for_ops(operations, fn): for operation in operations: fn(state, operation) - process_operations(body.custody_key_reveals, process_custody_key_reveal) - process_operations(body.early_derived_secret_reveals, process_early_derived_secret_reveal) - process_operations(body.custody_chunk_challenges, process_chunk_challenge) - process_operations(body.custody_bit_challenges, process_bit_challenge) + for_ops(body.custody_key_reveals, process_custody_key_reveal) + for_ops(body.early_derived_secret_reveals, process_early_derived_secret_reveal) + for_ops(body.custody_chunk_challenges, process_chunk_challenge) + for_ops(body.custody_bit_challenges, process_bit_challenge) ``` #### Custody key reveals From cc0aac3aaed2b85ab45e0303efc04f33ba7b1159 Mon Sep 17 00:00:00 2001 From: protolambda Date: Sat, 16 Nov 2019 11:17:39 +0100 Subject: [PATCH 053/194] typo --- specs/core/1_beacon-chain.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index 4e98445f2..e7f53865d 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -190,7 +190,7 @@ class BeaconBlockBody(phase0.BeaconBlockBody): attester_slashings: List[AttesterSlashing, MAX_ATTESTER_SLASHINGS] # Attesting attestations: List[Attestation, MAX_ATTESTATIONS] - # Enty & exit + # Entry & exit deposits: List[Deposit, MAX_DEPOSITS] voluntary_exits: List[VoluntaryExit, MAX_VOLUNTARY_EXITS] # Custody game From 36503f98e43b24cdc82715793849fcf9e71c0c85 Mon Sep 17 00:00:00 2001 From: protolambda Date: Sat, 16 Nov 2019 12:23:45 +0100 Subject: [PATCH 054/194] placeholder: rudimentary phase 0 to phase 1 fork process --- specs/core/1_beacon-chain.md | 6 +- specs/core/1_phase1_fork.md | 107 +++++++++++++++++++++++++++++++++++ 2 files changed, 110 insertions(+), 3 deletions(-) create mode 100644 specs/core/1_phase1_fork.md diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index e7f53865d..74ad4f5f9 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -253,11 +253,11 @@ class BeaconState(phase0.BeaconState): current_justified_checkpoint: Checkpoint finalized_checkpoint: Checkpoint # Phase 1 - shard_states: Vector[ShardState, MAX_SHARDS] + shard_states: List[ShardState, MAX_SHARDS] online_countdown: Bytes[VALIDATOR_REGISTRY_LIMIT] current_light_committee: CompactCommittee next_light_committee: CompactCommittee - + # Custody game # TODO: custody game refactor, no challenge-records, immediate processing. custody_challenge_index: uint64 # Future derived secrets already exposed; contains the indices of the exposed validator @@ -607,7 +607,7 @@ def process_attestations(state: BeaconState, block_body: BeaconBlockBody, attest ```python def verify_shard_transition_false_positives(state: BeaconState, block_body: BeaconBlockBody) -> None: # Verify that a `shard_transition` in a block is empty if an attestation was not processed for it - for shard in range(MAX_SHARDS): + for shard in range(ACTIVE_SHARDS): if state.shard_states[shard].slot != state.slot - 1: assert block_body.shard_transition[shard] == ShardTransition() ``` diff --git a/specs/core/1_phase1_fork.md b/specs/core/1_phase1_fork.md new file mode 100644 index 000000000..f5ab658f5 --- /dev/null +++ b/specs/core/1_phase1_fork.md @@ -0,0 +1,107 @@ +# Ethereum 2.0 Phase 1 -- From Phase 0 to Phase 1 + +**Notice**: This document is a work-in-progress for researchers and implementers. + +## Table of contents + + + + TODO + + + +## Introduction + +This document describes the process of moving from Phase 0 to Phase 1 of Ethereum 2.0. + +## Configuration + +TODO: very unstable/experimental. PLACEHOLDER. + +| Name | Value | Unit | +| - | - | - | +| `PHASE_1_FORK_VERSION` | `0x00000001` | `Version` | +| `INITIAL_GASPRICE` | `10` | `Gwei` | + +## Fork to Phase 1 + +### Fork trigger. + +TBD. Social consensus, along with state conditions such as epoch boundary, finality, deposits, active validator count, etc. may be part of the decision process to trigger the fork. + +### Upgrading the state + +After `process_slots` of Phase 0 finishes, but before the first Phase 1 block is processed, an irregular state change is made to upgrade to Phase 1. + +```python +def upgrade_to_phase1(pre: phase0.BeaconState) -> BeaconState: + post = BeaconState( + genesis_time=pre.genesis_time, + slot=pre.slot, + fork=Fork( + previous_version=pre.current_version, + current_version=PHASE_1_FORK_VERSION, + epoch=get_current_epoch(pre), + ), + # History + latest_block_header=pre.latest_block_header, + block_roots=pre.block_roots, + state_roots=pre.state_roots, + historical_roots=pre.historical_roots, + # Eth1 + eth1_data=pre.eth1_data, + eth1_data_votes=pre.eth1_data_votes, + eth1_deposit_index=pre.eth1_deposit_index, + # Registry + validators=List[Validator, VALIDATOR_REGISTRY_LIMIT]( + Validator( + pubkey=phase0_validator.pubkey, + withdrawal_credentials=phase0_validator.withdrawal_credentials, + effective_balance=phase0_validator.effective_balance, + slashed=phase0.slashed, + activation_eligibility_epoch=phase0_validator.activation_eligibility_epoch, + activation_epoch=phase0_validator.activation_eligibility_epoch, + exit_epoch=phase0_validator.exit_epoch, + withdrawable_epoch=phase0_validator.withdrawable_epoch, + next_custody_secret_to_reveal=, + max_reveal_lateness=, + ) for phase0_validator in pre.validators + ), + balances=pre.balances, + # Randomness + randao_mixes=pre.randao_mixes, + # Slashings + slashings=pre.slashings, + # Attestations + # previous_epoch_attestations is cleared on upgrade. + previous_epoch_attestations=List[PendingAttestation, MAX_ATTESTATIONS * SLOTS_PER_EPOCH](), + # empty in pre state, since the upgrade is performed just after an epoch boundary. + current_epoch_attestations=List[PendingAttestation, MAX_ATTESTATIONS * SLOTS_PER_EPOCH](), + # Finality + justification_bits=pre.justification_bits, + previous_justified_checkpoint=pre.previous_justified_checkpoint, + current_justified_checkpoint=pre.current_justified_checkpoint, + finalized_checkpoint=pre.finalized_checkpoint, + # Phase 1 + shard_states=List[ShardState, MAX_SHARDS]( + ShardState( + slot=0, + gasprice=INITIAL_GASPRICE, + data=Root(), + latest_block_root=Hash(), + ) for i in range(ACTIVE_SHARDS) + ), + online_countdown=Bytes[VALIDATOR_REGISTRY_LIMIT]( + ONLINE_PERIOD for i in range(len(pre.validators)) + ), + current_light_committee=CompactCommittee(), # computed after state creation + next_light_committee=CompactCommittee(), + # Custody game + custody_challenge_index=0, + exposed_derived_secrets=Vector[List[ValidatorIndex, PLACEHOLDER], + EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS]() + ) + post.current_light_committee = get_light_client_committee(post, post.epoch) + post.next_light_committee = get_light_client_committee(post, post.epoch + 1) + return post +``` From 4c9e5b89501ee84e66404100275fb6dadbaf77c7 Mon Sep 17 00:00:00 2001 From: protolambda Date: Sat, 16 Nov 2019 12:25:13 +0100 Subject: [PATCH 055/194] add link to phase1 upgrade doc --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 1bb2ae49f..b801cbd3d 100644 --- a/README.md +++ b/README.md @@ -18,6 +18,7 @@ Core specifications for Eth2 client validation can be found in [specs/core](spec * [Honest Validator](specs/validator/0_beacon-chain-validator.md) ### Phase 1 +* [From Phase 0 to Phase 1](specs/core/1_phase1_fork.md) * [The Beacon Chain for Shards](specs/core/1_beacon-chain.md) * [Custody Game](specs/core/1_custody-game.md) * [Shard Transition and Fraud Proofs](specs/core/1_fraud_proofs.md) From 03e956f9c95fc59094a9d199e90e5796dabab650 Mon Sep 17 00:00:00 2001 From: protolambda Date: Sat, 16 Nov 2019 12:33:24 +0100 Subject: [PATCH 056/194] validator init adjustments --- specs/core/1_custody-game.md | 10 +++++----- specs/core/1_phase1_fork.md | 11 ++++++----- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/specs/core/1_custody-game.md b/specs/core/1_custody-game.md index 0e804f628..e4e752763 100644 --- a/specs/core/1_custody-game.md +++ b/specs/core/1_custody-game.md @@ -320,11 +320,11 @@ def get_randao_epoch_for_custody_period(period: uint64, validator_index: Validat ### `get_custody_period_for_validator` ```python -def get_custody_period_for_validator(state: BeaconState, validator_index: ValidatorIndex, epoch: Epoch=None) -> int: +def get_custody_period_for_validator(validator_index: ValidatorIndex, epoch: Epoch=None) -> int: ''' Return the reveal period for a given validator. ''' - epoch = get_current_epoch(state) if epoch is None else epoch + epoch = if epoch is None else epoch return (epoch + validator_index % EPOCHS_PER_CUSTODY_PERIOD) // EPOCHS_PER_CUSTODY_PERIOD ``` @@ -367,7 +367,7 @@ def process_custody_key_reveal(state: BeaconState, reveal: CustodyKeyReveal) -> revealer = state.validators[reveal.revealer_index] epoch_to_sign = get_randao_epoch_for_custody_period(revealer.next_custody_secret_to_reveal, reveal.revealer_index) - assert revealer.next_custody_secret_to_reveal < get_custody_period_for_validator(state, reveal.revealer_index) + assert revealer.next_custody_secret_to_reveal < get_custody_period_for_validator(reveal.revealer_index, get_current_epoch(state)) # Revealed validator is active or exited, but not withdrawn assert is_slashable_validator(revealer, get_current_epoch(state)) @@ -566,7 +566,7 @@ def process_bit_challenge(state: BeaconState, challenge: CustodyBitChallenge) -> # Verify attestation is eligible for challenging responder = state.validators[challenge.responder_index] assert get_current_epoch(state) <= get_randao_epoch_for_custody_period( - get_custody_period_for_validator(state, challenge.responder_index, epoch), + get_custody_period_for_validator(challenge.responder_index, epoch), challenge.responder_index ) + 2 * EPOCHS_PER_CUSTODY_PERIOD + responder.max_reveal_lateness @@ -578,7 +578,7 @@ def process_bit_challenge(state: BeaconState, challenge: CustodyBitChallenge) -> assert record.challenger_index != challenge.challenger_index # Verify the responder custody key epoch_to_sign = get_randao_epoch_for_custody_period( - get_custody_period_for_validator(state, challenge.responder_index, epoch), + get_custody_period_for_validator(challenge.responder_index, epoch), challenge.responder_index, ) domain = get_domain(state, DOMAIN_RANDAO, epoch_to_sign) diff --git a/specs/core/1_phase1_fork.md b/specs/core/1_phase1_fork.md index f5ab658f5..e257d6eb1 100644 --- a/specs/core/1_phase1_fork.md +++ b/specs/core/1_phase1_fork.md @@ -35,13 +35,14 @@ After `process_slots` of Phase 0 finishes, but before the first Phase 1 block is ```python def upgrade_to_phase1(pre: phase0.BeaconState) -> BeaconState: + epoch = get_current_epoch(pre) post = BeaconState( genesis_time=pre.genesis_time, slot=pre.slot, fork=Fork( previous_version=pre.current_version, current_version=PHASE_1_FORK_VERSION, - epoch=get_current_epoch(pre), + epoch=epoch, ), # History latest_block_header=pre.latest_block_header, @@ -58,14 +59,14 @@ def upgrade_to_phase1(pre: phase0.BeaconState) -> BeaconState: pubkey=phase0_validator.pubkey, withdrawal_credentials=phase0_validator.withdrawal_credentials, effective_balance=phase0_validator.effective_balance, - slashed=phase0.slashed, + slashed=phase0_validator.slashed, activation_eligibility_epoch=phase0_validator.activation_eligibility_epoch, activation_epoch=phase0_validator.activation_eligibility_epoch, exit_epoch=phase0_validator.exit_epoch, withdrawable_epoch=phase0_validator.withdrawable_epoch, - next_custody_secret_to_reveal=, - max_reveal_lateness=, - ) for phase0_validator in pre.validators + next_custody_secret_to_reveal=get_custody_period_for_validator(validator_index, epoch), + max_reveal_lateness=0, # TODO custody refactor. Outdated? + ) for validator_index, phase0_validator in enumerate(pre.validators) ), balances=pre.balances, # Randomness From 79be0345d61a7d6ec96f7f45d1a9369f6bce2aee Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Mon, 18 Nov 2019 13:47:53 -0700 Subject: [PATCH 057/194] fix call to process_attestation --- specs/core/0_beacon-chain.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/0_beacon-chain.md b/specs/core/0_beacon-chain.md index 3b204e2c7..32c10c4ec 100644 --- a/specs/core/0_beacon-chain.md +++ b/specs/core/0_beacon-chain.md @@ -1425,7 +1425,7 @@ def process_operations(state: BeaconState, body: BeaconBlockBody) -> None: for_ops(body.proposer_slashings, process_proposer_slashing) for_ops(body.attester_slashings, process_attester_slashing) - for_ops(body.attestations, process_attestations) + for_ops(body.attestations, process_attestation) for_ops(body.deposits, process_deposit) for_ops(body.voluntary_exits, process_voluntary_exit) ``` From 82c9adce8baea5ef1961720629979b2fc9381c4e Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Mon, 18 Nov 2019 14:04:29 -0700 Subject: [PATCH 058/194] fix phase 1 make build (not lint or tests) --- Makefile | 2 +- scripts/build_spec.py | 21 +++++++++++-------- specs/core/1_beacon-chain.md | 4 ++-- .../{1_fraud_proofs.md => 1_fraud-proofs.md} | 0 .../{1_phase1_fork.md => 1_phase1-fork.md} | 0 5 files changed, 15 insertions(+), 12 deletions(-) rename specs/core/{1_fraud_proofs.md => 1_fraud-proofs.md} (100%) rename specs/core/{1_phase1_fork.md => 1_phase1-fork.md} (100%) diff --git a/Makefile b/Makefile index bfbc28070..d88c91651 100644 --- a/Makefile +++ b/Makefile @@ -89,7 +89,7 @@ $(PY_SPEC_PHASE_0_TARGETS): $(PY_SPEC_PHASE_0_DEPS) python3 $(SCRIPT_DIR)/build_spec.py -p0 $(SPEC_DIR)/core/0_beacon-chain.md $(SPEC_DIR)/core/0_fork-choice.md $(SPEC_DIR)/validator/0_beacon-chain-validator.md $@ $(PY_SPEC_DIR)/eth2spec/phase1/spec.py: $(PY_SPEC_PHASE_1_DEPS) - python3 $(SCRIPT_DIR)/build_spec.py -p1 $(SPEC_DIR)/core/0_beacon-chain.md $(SPEC_DIR)/core/0_fork-choice.md $(SPEC_DIR)/light_client/merkle_proofs.md $(SPEC_DIR)/core/1_custody-game.md $(SPEC_DIR)/core/1_shard-data-chains.md $(SPEC_DIR)/core/1_beacon-chain-misc.md $@ + python3 $(SCRIPT_DIR)/build_spec.py -p1 $(SPEC_DIR)/core/0_beacon-chain.md $(SPEC_DIR)/core/0_fork-choice.md $(SPEC_DIR)/light_client/merkle_proofs.md $(SPEC_DIR)/core/1_custody-game.md $(SPEC_DIR)/core/1_beacon-chain.md $(SPEC_DIR)/core/1_fraud-proofs.md $(SPEC_DIR)/core/1_phase1-fork.md $@ CURRENT_DIR = ${CURDIR} diff --git a/scripts/build_spec.py b/scripts/build_spec.py index 9fb52eb1c..8bc4398cc 100644 --- a/scripts/build_spec.py +++ b/scripts/build_spec.py @@ -289,16 +289,18 @@ def build_phase1_spec(phase0_beacon_sourcefile: str, phase0_fork_choice_sourcefile: str, merkle_proofs_sourcefile: str, phase1_custody_sourcefile: str, - phase1_shard_sourcefile: str, - phase1_beacon_misc_sourcefile: str, + phase1_beacon_sourcefile: str, + phase1_fraud_sourcefile: str, + phase1_fork_sourcefile: str, outfile: str=None) -> Optional[str]: all_sourcefiles = ( phase0_beacon_sourcefile, phase0_fork_choice_sourcefile, merkle_proofs_sourcefile, phase1_custody_sourcefile, - phase1_shard_sourcefile, - phase1_beacon_misc_sourcefile, + phase1_beacon_sourcefile, + phase1_fraud_sourcefile, + phase1_fork_sourcefile, ) all_spescs = [get_spec(spec) for spec in all_sourcefiles] for spec in all_spescs: @@ -327,9 +329,10 @@ If building phase 1: 2nd argument is input /core/0_fork-choice.md 3rd argument is input /light_client/merkle_proofs.md 4th argument is input /core/1_custody-game.md - 5th argument is input /core/1_shard-data-chains.md - 6th argument is input /core/1_beacon-chain-misc.md - 7th argument is output spec.py + 5th argument is input /core/1_beacon-chain.md + 6th argument is input /core/1_fraud-proofs.md + 7th argument is input /core/1_phase1-fork.md + 8th argument is output spec.py ''' parser = ArgumentParser(description=description) parser.add_argument("-p", "--phase", dest="phase", type=int, default=0, help="Build for phase #") @@ -342,14 +345,14 @@ If building phase 1: else: print(" Phase 0 requires spec, forkchoice, and v-guide inputs as well as an output file.") elif args.phase == 1: - if len(args.files) == 7: + if len(args.files) == 8: build_phase1_spec(*args.files) else: print( " Phase 1 requires input files as well as an output file:\n" "\t core/phase_0: (0_beacon-chain.md, 0_fork-choice.md)\n" "\t light_client: (merkle_proofs.md)\n" - "\t core/phase_1: (1_custody-game.md, 1_shard-data-chains.md, 1_beacon-chain-misc.md)\n" + "\t core/phase_1: (1_custody-game.md, 1_beacon-chain.md, 1_fraud-proofs.md, 1_phase1-fork.md)\n" "\t and output.py" ) else: diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index 74ad4f5f9..1cb1c3525 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -143,7 +143,7 @@ class CompactCommittee(Container): class AttestationCustodyBitWrapper(Container): attestation_root: Hash block_index: uint64 - bit: bool + bit: boolean ``` ### New extended `PendingAttestation` @@ -154,7 +154,7 @@ class PendingAttestation(phase0.PendingAttestation): data: AttestationData inclusion_delay: Slot proposer_index: ValidatorIndex - crosslink_success: bool + crosslink_success: boolean ``` ### New extended `Validator` diff --git a/specs/core/1_fraud_proofs.md b/specs/core/1_fraud-proofs.md similarity index 100% rename from specs/core/1_fraud_proofs.md rename to specs/core/1_fraud-proofs.md diff --git a/specs/core/1_phase1_fork.md b/specs/core/1_phase1-fork.md similarity index 100% rename from specs/core/1_phase1_fork.md rename to specs/core/1_phase1-fork.md From 9f80a7f703dce4bad748875a212949f3653d1689 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Mon, 18 Nov 2019 15:07:50 -0700 Subject: [PATCH 059/194] phase1 lint (1) --- scripts/build_spec.py | 8 +++--- specs/core/1_beacon-chain.md | 50 ++++++++++++++++++++++++++---------- specs/core/1_custody-game.md | 6 ++--- specs/core/1_fraud-proofs.md | 7 ++++- specs/core/1_phase1-fork.md | 4 +-- 5 files changed, 50 insertions(+), 25 deletions(-) diff --git a/scripts/build_spec.py b/scripts/build_spec.py index 8bc4398cc..d6a06cbbc 100644 --- a/scripts/build_spec.py +++ b/scripts/build_spec.py @@ -171,7 +171,7 @@ def objects_to_spec(functions: Dict[str, str], ssz_objects_instantiation_spec = '\n\n'.join(ssz_objects.values()) ssz_objects_reinitialization_spec = ( 'def init_SSZ_types() -> None:\n global_vars = globals()\n\n ' - + '\n\n '.join([strip_comments(re.sub(r'(?!\n\n)\n', r'\n ', value[:-1])) + + '\n\n '.join([strip_comments(re.sub(r'\n\n', r'\n', re.sub(r'(?!\n\n)\n', r'\n ', value[:-1]))) for value in ssz_objects.values()]) + '\n\n' + '\n'.join(map(lambda x: ' global_vars[\'%s\'] = %s' % (x, x), ssz_objects.keys())) @@ -243,10 +243,8 @@ def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str] """ for key, value in new_objects.items(): if key in old_objects: - # remove trailing newline - old_objects[key] = old_objects[key] - # remove leading variable name - value = re.sub(r'^class [\w]*\(Container\):\n', '', value) + # add proper spacing + old_objects[key] = old_objects[key] + "\n\n" old_objects[key] = old_objects.get(key, '') + value dependency_order_ssz_objects(old_objects, custom_types) return old_objects diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index 1cb1c3525..708937059 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -81,7 +81,7 @@ class ShardState(Container): ### New `AttestationData` ```python -class AttestationData(Container): +class AttestationData(phase0.AttestationData): slot: Slot index: CommitteeIndex # LMD GHOST vote @@ -417,7 +417,7 @@ def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: Attest AttestationCustodyBitWrapper(hash_tree_root(indexed_attestation.data), i, cbit) )) else: - assert cbit == False + assert not cbit return bls_verify_multiple( pubkeys=all_pubkeys, @@ -503,7 +503,12 @@ def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTr # Correct data root count offset_slots = get_offset_slots(state, start_slot) - assert len(transition.shard_data_roots) == len(transition.shard_states) == len(transition.shard_block_lengths) == len(offset_slots) + assert ( + len(transition.shard_data_roots) + == len(transition.shard_states) + == len(transition.shard_block_lengths) + == len(offset_slots) + ) assert transition.start_slot == start_slot # Reonstruct shard headers @@ -514,7 +519,7 @@ def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTr if any(transition.shard_data_roots): headers.append(ShardSignableHeader( shard_parent_root=shard_parent_root, - parent_hash=get_block_root_at_slot(state, state.slot-1), + parent_hash=get_block_root_at_slot(state, state.slot - 1), slot=offset_slots[i], body_root=chunks_to_body_root(transition.shard_data_roots[i]) )) @@ -524,8 +529,10 @@ def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTr # Verify correct calculation of gas prices and slots and chunk roots prev_gasprice = state.shard_states[shard].gasprice for i in range(len(offset_slots)): - shard_state, block_length, chunks = transition.shard_states[i], transition.shard_block_lengths[i], transition.shard_data_roots[i] - assert shard_state.gasprice == get_updated_gasprice(prev_gasprice, block_length) + shard_state = transition.shard_states[i] + block_length = transition.shard_block_lengths[i] + chunks = transition.shard_data_roots[i] + assert dfhard_state.gasprice == get_updated_gasprice(prev_gasprice, block_length) assert shard_state.slot == offset_slots[i] assert len(chunks) == block_length // SHARD_BLOCK_CHUNK_SIZE prev_gasprice = shard_state.gasprice @@ -549,24 +556,29 @@ def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTr def process_attestations(state: BeaconState, block_body: BeaconBlockBody, attestations: Sequence[Attestation]) -> None: # Basic validation for attestation in attestations: - validate_attestation(state, attestation) + validate_attestation(state, attestation) # Process crosslinks online_indices = get_online_validator_indices(state) winners = set() for shard in range(ACTIVE_SHARDS): success = False # All attestations in the block for this shard - this_shard_attestations = [attestation for attestation in attestations if get_shard(state, attestation) == shard and attestation.data.slot == state.slot] + this_shard_attestations = [ + attestation for attestation in attestations + if get_shard(state, attestation) == shard and attestation.data.slot == state.slot + ] # The committee for this shard this_shard_committee = get_beacon_committee(state, get_current_epoch(state), shard) # Loop over all shard transition roots - for shard_transition_root in sorted(set([attestation.data.shard_transition_root for attestation in this_shard_attestations])): + shard_transition_roots = set([a.data.shard_transition_root for a in this_shard_attestations]) + for shard_transition_root in sorted(shard_transition_roots): all_participants = set() participating_attestations = [] for attestation in this_shard_attestations: participating_attestations.append(attestation) if attestation.data.shard_transition_root == shard_transition_root: - all_participants = all_participants.union(get_attesting_indices(state, attestation.data, attestation.aggregation_bits)) + participants = get_attesting_indices(state, attestation.data, attestation.aggregation_bits) + all_participants = all_participants.union(participants) if ( get_total_balance(state, online_indices.intersection(all_participants)) * 3 >= get_total_balance(state, online_indices.intersection(this_shard_committee)) * 2 @@ -574,24 +586,34 @@ def process_attestations(state: BeaconState, block_body: BeaconBlockBody, attest ): # Attestation <-> shard transition consistency assert shard_transition_root == hash_tree_root(block_body.shard_transition) - assert attestation.data.head_shard_root == chunks_to_body_root(block_body.shard_transition.shard_data_roots[-1]) + assert ( + attestation.data.head_shard_root + == chunks_to_body_root(block_body.shard_transition.shard_data_roots[-1]) + ) # Apply transition apply_shard_transition(state, shard, block_body.shard_transition) # Apply proposer reward and cost estimated_attester_reward = sum([get_base_reward(state, attester) for attester in all_participants]) increase_balance(state, proposer, estimated_attester_reward // PROPOSER_REWARD_COEFFICIENT) - for shard_state, slot, length in zip(block_body.shard_transition.shard_states, offset_slots, block_body.shard_transition.shard_block_lengths): - decrease_balance(state, get_shard_proposer_index(state, shard, slot), shard_state.gasprice * length) + states_slots_lengths = zip( + block_body.shard_transition.shard_states, + offset_slots, + block_body.shard_transition.shard_block_lengths + ) + for shard_state, slot, length in states_slots_lengths: + proposer_index = get_shard_proposer_index(state, shard, slot) + decrease_balance(state, proposer_index, shard_state.gasprice * length) winners.add((shard, shard_transition_root)) success = True if not success: assert block_body.shard_transitions[shard] == ShardTransition() for attestation in attestations: + is_winning_transition = (get_shard(state, attestation), attestation.shard_transition_root) in winners pending_attestation = PendingAttestation( aggregation_bits=attestation.aggregation_bits, data=attestation.data, inclusion_delay=state.slot - data.slot, - crosslink_success=(get_shard(state, attestation), attestation.shard_transition_root) in winners and attestation.data.slot == state.slot, + crosslink_success=is_winning_transition and attestation.data.slot == state.slot, proposer_index=proposer_index ) if attestation.data.target.epoch == get_current_epoch(state): diff --git a/specs/core/1_custody-game.md b/specs/core/1_custody-game.md index e4e752763..689882551 100644 --- a/specs/core/1_custody-game.md +++ b/specs/core/1_custody-game.md @@ -320,11 +320,10 @@ def get_randao_epoch_for_custody_period(period: uint64, validator_index: Validat ### `get_custody_period_for_validator` ```python -def get_custody_period_for_validator(validator_index: ValidatorIndex, epoch: Epoch=None) -> int: +def get_custody_period_for_validator(validator_index: ValidatorIndex, epoch: Epoch) -> int: ''' Return the reveal period for a given validator. ''' - epoch = if epoch is None else epoch return (epoch + validator_index % EPOCHS_PER_CUSTODY_PERIOD) // EPOCHS_PER_CUSTODY_PERIOD ``` @@ -367,7 +366,8 @@ def process_custody_key_reveal(state: BeaconState, reveal: CustodyKeyReveal) -> revealer = state.validators[reveal.revealer_index] epoch_to_sign = get_randao_epoch_for_custody_period(revealer.next_custody_secret_to_reveal, reveal.revealer_index) - assert revealer.next_custody_secret_to_reveal < get_custody_period_for_validator(reveal.revealer_index, get_current_epoch(state)) + custody_reveal_period = get_custody_period_for_validator(reveal.revealer_index, get_current_epoch(state)) + assert revealer.next_custody_secret_to_reveal < custody_reveal_period # Revealed validator is active or exited, but not withdrawn assert is_slashable_validator(revealer, get_current_epoch(state)) diff --git a/specs/core/1_fraud-proofs.md b/specs/core/1_fraud-proofs.md index e44f790ed..bd5cce117 100644 --- a/specs/core/1_fraud-proofs.md +++ b/specs/core/1_fraud-proofs.md @@ -32,7 +32,12 @@ The proof verifies that one of the two conditions is false: ## Shard state transition function ```python -def shard_state_transition(shard: Shard, slot: Slot, pre_state: Hash, previous_beacon_root: Hash, proposer_pubkey: BLSPubkey, block_data: BytesN[MAX_SHARD_BLOCK_CHUNKS * SHARD_BLOCK_CHUNK_SIZE]) -> Hash: +def shard_state_transition(shard: Shard, + slot: Slot, + pre_state: Hash, + previous_beacon_root: Hash, + proposer_pubkey: BLSPubkey, + block_data: BytesN[MAX_SHARD_BLOCK_CHUNKS * SHARD_BLOCK_CHUNK_SIZE]) -> Hash: # We will add something more substantive in phase 2 return hash(pre_state + hash_tree_root(previous_beacon_root) + hash_tree_root(block_data)) ``` diff --git a/specs/core/1_phase1-fork.md b/specs/core/1_phase1-fork.md index e257d6eb1..62c896669 100644 --- a/specs/core/1_phase1-fork.md +++ b/specs/core/1_phase1-fork.md @@ -93,14 +93,14 @@ def upgrade_to_phase1(pre: phase0.BeaconState) -> BeaconState: ) for i in range(ACTIVE_SHARDS) ), online_countdown=Bytes[VALIDATOR_REGISTRY_LIMIT]( - ONLINE_PERIOD for i in range(len(pre.validators)) + ONLINE_PERIOD for i in range(len(pre.validators)) ), current_light_committee=CompactCommittee(), # computed after state creation next_light_committee=CompactCommittee(), # Custody game custody_challenge_index=0, exposed_derived_secrets=Vector[List[ValidatorIndex, PLACEHOLDER], - EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS]() + EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS]() ) post.current_light_committee = get_light_client_committee(post, post.epoch) post.next_light_committee = get_light_client_committee(post, post.epoch + 1) From d6bfe5d35c498c526209c71d554b0614e06b2d92 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Mon, 18 Nov 2019 16:40:02 -0700 Subject: [PATCH 060/194] lint phase 1 (2) --- scripts/build_spec.py | 4 +++- specs/core/1_beacon-chain.md | 39 ++++++++++++++++++++++++++---------- specs/core/1_custody-game.md | 27 +++++++++++++++++++++++++ 3 files changed, 58 insertions(+), 12 deletions(-) diff --git a/scripts/build_spec.py b/scripts/build_spec.py index d6a06cbbc..bcc6595c6 100644 --- a/scripts/build_spec.py +++ b/scripts/build_spec.py @@ -53,7 +53,7 @@ from eth2spec.utils.ssz.ssz_typing import ( BasicValue, Elements, BaseBytes, BaseList, SSZType, Container, List, Vector, ByteList, ByteVector, Bitlist, Bitvector, Bits, Bytes1, Bytes4, Bytes8, Bytes32, Bytes48, Bytes96, - uint64, bit, boolean, byte, + uint64, uint8, bit, boolean, ) from eth2spec.utils.bls import ( bls_aggregate_pubkeys, @@ -245,6 +245,8 @@ def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str] if key in old_objects: # add proper spacing old_objects[key] = old_objects[key] + "\n\n" + lines = value.split("\n") + value = "\n".join([lines[0] + " # noqa: F811"] + lines[1:]) old_objects[key] = old_objects.get(key, '') + value dependency_order_ssz_objects(old_objects, custom_types) return old_objects diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index 708937059..63c2071a7 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -15,6 +15,14 @@ TODO This document describes the extensions made to the Phase 0 design of The Beacon Chain to facilitate the new shards as part of Phase 1 of Eth2. +## Custom types + +We define the following Python custom types for type hinting and readability: + +| Name | SSZ equivalent | Description | +| - | - | - | +| `Shard` | `uint64` | a shard number | + ## Configuration Configuration is not namespaced. Instead it is strictly an extension; @@ -39,7 +47,7 @@ Configuration is not namespaced. Instead it is strictly an extension; | `MAX_GASPRICE` | `2**14` (= 16,384) | Gwei | | | `MIN_GASPRICE` | `2**5` (= 32) | Gwei | | | `GASPRICE_ADJUSTMENT_COEFFICIENT` | `2**3` (= 8) | | -| `DOMAIN_SHARD_LIGHT_CLIENT` | `192` | | +| `DOMAIN_LIGHT_CLIENT` | `192` | | | `DOMAIN_SHARD_COMMITTEE` | `192` | | | `DOMAIN_SHARD_PROPOSAL` | `193` | | @@ -338,8 +346,8 @@ def get_shard_committee(beacon_state: BeaconState, epoch: Epoch, shard: Shard) - ```python def get_shard_proposer_index(beacon_state: BeaconState, slot: Slot, shard: Shard) -> ValidatorIndex: - committee = get_shard_committee(beacon_state, slot_to_epoch(slot), shard) - r = bytes_to_int(get_seed(beacon_state, get_current_epoch(state), DOMAIN_SHARD_COMMITTEE)[:8]) + committee = get_shard_committee(beacon_state, compute_epoch_at_slot(slot), shard) + r = bytes_to_int(get_seed(beacon_state, get_current_epoch(beacon_state), DOMAIN_SHARD_COMMITTEE)[:8]) return committee[r % len(committee)] ``` @@ -351,7 +359,7 @@ def get_light_client_committee(beacon_state: BeaconState, epoch: Epoch) -> Seque if source_epoch > 0: source_epoch -= LIGHT_CLIENT_COMMITTEE_PERIOD active_validator_indices = get_active_validator_indices(beacon_state, source_epoch) - seed = get_seed(beacon_state, source_epoch, DOMAIN_SHARD_LIGHT_CLIENT) + seed = get_seed(beacon_state, source_epoch, DOMAIN_LIGHT_CLIENT) return compute_committee(active_validator_indices, seed, 0, ACTIVE_SHARDS)[:TARGET_COMMITTEE_SIZE] ``` @@ -375,6 +383,14 @@ def get_updated_gasprice(prev_gasprice: Gwei, length: uint8) -> Gwei: return max(prev_gasprice, MIN_GASPRICE + delta) - delta ``` +#### `get_start_shard` + +```python +def get_start_shard(state: BeaconState, slot: Slot) -> Shard: + # TODO: implement start shard logic + return Shard(0) +``` + #### `get_shard` ```python @@ -476,7 +492,6 @@ def validate_attestation(state: BeaconState, attestation: Attestation) -> None: data = attestation.data assert data.index < ACTIVE_SHARDS shard = get_shard(state, attestation) - proposer_index = get_beacon_proposer_index(state) # Signature check assert is_valid_indexed_attestation(state, get_indexed_attestation(state, attestation)) @@ -490,7 +505,7 @@ def validate_attestation(state: BeaconState, attestation: Attestation) -> None: assert data.beacon_block_root == get_block_root_at_slot(state, state.slot - 1) # Type 2: delayed attestations else: - assert state.slot - compute_start_slot_at_epoch(slot_to_epoch(data.slot)) < EPOCH_LENGTH + assert state.slot - compute_start_slot_at_epoch(compute_epoch_at_slot(data.slot)) < SLOTS_PER_EPOCH assert data.shard_transition_root == Hash() ``` @@ -532,7 +547,7 @@ def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTr shard_state = transition.shard_states[i] block_length = transition.shard_block_lengths[i] chunks = transition.shard_data_roots[i] - assert dfhard_state.gasprice == get_updated_gasprice(prev_gasprice, block_length) + assert shard_state.gasprice == get_updated_gasprice(prev_gasprice, block_length) assert shard_state.slot == offset_slots[i] assert len(chunks) == block_length // SHARD_BLOCK_CHUNK_SIZE prev_gasprice = shard_state.gasprice @@ -593,11 +608,13 @@ def process_attestations(state: BeaconState, block_body: BeaconBlockBody, attest # Apply transition apply_shard_transition(state, shard, block_body.shard_transition) # Apply proposer reward and cost + beacon_proposer_index = get_beacon_proposer_index(state) estimated_attester_reward = sum([get_base_reward(state, attester) for attester in all_participants]) - increase_balance(state, proposer, estimated_attester_reward // PROPOSER_REWARD_COEFFICIENT) + proposer_reward = estimated_attester_reward // PROPOSER_REWARD_COEFFICIENT + increase_balance(state, beacon_proposer_index, proposer_reward) states_slots_lengths = zip( block_body.shard_transition.shard_states, - offset_slots, + get_offset_slots(state, state.shard_next_slots[get_shard(attestation)]), block_body.shard_transition.shard_block_lengths ) for shard_state, slot, length in states_slots_lengths: @@ -612,7 +629,7 @@ def process_attestations(state: BeaconState, block_body: BeaconBlockBody, attest pending_attestation = PendingAttestation( aggregation_bits=attestation.aggregation_bits, data=attestation.data, - inclusion_delay=state.slot - data.slot, + inclusion_delay=state.slot - attestation.data.slot, crosslink_success=is_winning_transition and attestation.data.slot == state.slot, proposer_index=proposer_index ) @@ -654,7 +671,7 @@ def process_light_client_signatures(state: BeaconState, block_body: BeaconBlockB pubkey=bls_aggregate_pubkeys(signer_keys), message_hash=get_block_root_at_slot(state, state.slot - 1), signature=block_body.light_client_signature, - domain=DOMAIN_LIGHT_CLIENT + domain=DOMAIN_LIGHT_CLIENT, ) ``` diff --git a/specs/core/1_custody-game.md b/specs/core/1_custody-game.md index 689882551..6c49ee470 100644 --- a/specs/core/1_custody-game.md +++ b/specs/core/1_custody-game.md @@ -132,6 +132,21 @@ The following types are defined, mapping into `DomainType` (little endian): ### Custody objects +#### `Crosslink` + +*Note*: Crosslinks have been removed in the phase 1 redesign. This is a placeholder until the custody game is revamped. + +```python +class Crosslink(Container): + shard: uint64 + parent_root: Hash + # Crosslinking data + start_epoch: Epoch + end_epoch: Epoch + data_root: Hash +``` + + #### `CustodyChunkChallenge` ```python @@ -180,6 +195,18 @@ class CustodyBitChallengeRecord(Container): responder_key: BLSSignature ``` +#### `CustodyResponse` + +```python +class CustodyResponse(Container): + challenge_index: uint64 + chunk_index: uint64 + chunk: Vector[Bytes[PLACEHOLDER], BYTES_PER_CUSTODY_CHUNK] + data_branch: List[Bytes32, PLACEHOLDER] + chunk_bits_branch: List[Bytes32, PLACEHOLDER] + chunk_bits_leaf: Bytes32 +``` + ### New Beacon Chain operations #### `CustodyKeyReveal` From e73316c13fa034ef57d67a639197a6ff6d316670 Mon Sep 17 00:00:00 2001 From: protolambda Date: Tue, 19 Nov 2019 20:16:40 +0100 Subject: [PATCH 061/194] change configs to be loaded in time; reload specs module to make new config presets effective. Also fix more lint and consistency problems. --- scripts/build_spec.py | 78 ++++--------------- scripts/function_puller.py | 25 ++---- specs/core/1_beacon-chain.md | 22 +++--- specs/core/1_custody-game.md | 76 +----------------- specs/core/1_phase1-fork.md | 2 +- test_libs/pyspec/eth2spec/config/__init__.py | 0 .../pyspec/eth2spec/config/apply_config.py | 19 +++++ test_libs/pyspec/eth2spec/test/conftest.py | 8 +- test_libs/pyspec/eth2spec/test/context.py | 11 ++- 9 files changed, 65 insertions(+), 176 deletions(-) create mode 100644 test_libs/pyspec/eth2spec/config/__init__.py create mode 100644 test_libs/pyspec/eth2spec/config/apply_config.py diff --git a/scripts/build_spec.py b/scripts/build_spec.py index bcc6595c6..335286437 100644 --- a/scripts/build_spec.py +++ b/scripts/build_spec.py @@ -9,9 +9,13 @@ from typing import ( Optional, ) +CONFIG_LOADER = ''' +apply_constants_preset(globals()) +''' -PHASE0_IMPORTS = '''from typing import ( - Any, Dict, Set, Sequence, Tuple, Optional +PHASE0_IMPORTS = '''from eth2spec.config.apply_config import apply_constants_preset +from typing import ( + Dict, Set, Sequence, Tuple, Optional ) from dataclasses import ( @@ -33,8 +37,10 @@ from eth2spec.utils.bls import ( from eth2spec.utils.hash_function import hash ''' -PHASE1_IMPORTS = '''from typing import ( - Any, Dict, Set, Sequence, MutableSequence, NewType, Tuple, Union, +PHASE1_IMPORTS = '''from eth2spec.phase0 import spec as phase0 +from eth2spec.config.apply_config import apply_constants_preset +from typing import ( + Dict, Set, Sequence, NewType, Tuple, Union, ) from math import ( log2, @@ -101,24 +107,7 @@ def compute_committee(indices: Sequence[ValidatorIndex], # type: ignore if param_hash not in committee_cache: committee_cache[param_hash] = _compute_committee(indices, seed, index, count) - return committee_cache[param_hash] - - -# Access to overwrite spec constants based on configuration -def apply_constants_preset(preset: Dict[str, Any]) -> None: - global_vars = globals() - for k, v in preset.items(): - if k.startswith('DOMAIN_'): - global_vars[k] = DomainType(v) # domain types are defined as bytes in the configs - else: - global_vars[k] = v - - # Deal with derived constants - global_vars['GENESIS_EPOCH'] = compute_epoch_at_slot(GENESIS_SLOT) - - # Initialize SSZ types again, to account for changed lengths - init_SSZ_types() -''' + return committee_cache[param_hash]''' def remove_for_phase1(functions: Dict[str, str]): @@ -128,23 +117,10 @@ def remove_for_phase1(functions: Dict[str, str]): functions[key] = "\n".join(lines) -def strip_comments(raw: str) -> str: - comment_line_regex = re.compile(r'^\s+# ') - lines = raw.split('\n') - out = [] - for line in lines: - if not comment_line_regex.match(line): - if ' #' in line: - line = line[:line.index(' #')] - out.append(line) - return '\n'.join(out) - - def objects_to_spec(functions: Dict[str, str], custom_types: Dict[str, str], constants: Dict[str, str], ssz_objects: Dict[str, str], - inserts: Dict[str, str], imports: Dict[str, str], ) -> str: """ @@ -169,27 +145,17 @@ def objects_to_spec(functions: Dict[str, str], constants[k] += " # noqa: E501" constants_spec = '\n'.join(map(lambda x: '%s = %s' % (x, constants[x]), constants)) ssz_objects_instantiation_spec = '\n\n'.join(ssz_objects.values()) - ssz_objects_reinitialization_spec = ( - 'def init_SSZ_types() -> None:\n global_vars = globals()\n\n ' - + '\n\n '.join([strip_comments(re.sub(r'\n\n', r'\n', re.sub(r'(?!\n\n)\n', r'\n ', value[:-1]))) - for value in ssz_objects.values()]) - + '\n\n' - + '\n'.join(map(lambda x: ' global_vars[\'%s\'] = %s' % (x, x), ssz_objects.keys())) - ) spec = ( imports + '\n\n' + new_type_definitions + '\n' + SUNDRY_CONSTANTS_FUNCTIONS + '\n\n' + constants_spec - + '\n\n\n' + ssz_objects_instantiation_spec + + '\n\n' + CONFIG_LOADER + + '\n\n' + ssz_objects_instantiation_spec + '\n\n' + functions_spec + '\n' + SUNDRY_FUNCTIONS - + '\n\n' + ssz_objects_reinitialization_spec + '\n' ) - # Handle @inserts - for key, value in inserts.items(): - spec = re.sub('[ ]*# %s\\n' % key, value, spec) return spec @@ -242,32 +208,22 @@ def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str] and returns the newer versions of the objects in dependency order. """ for key, value in new_objects.items(): - if key in old_objects: - # add proper spacing - old_objects[key] = old_objects[key] + "\n\n" - lines = value.split("\n") - value = "\n".join([lines[0] + " # noqa: F811"] + lines[1:]) - old_objects[key] = old_objects.get(key, '') + value + old_objects[key] = value dependency_order_ssz_objects(old_objects, custom_types) return old_objects -# inserts are handeled the same way as functions -combine_inserts = combine_functions - - def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject: """ Takes in two spec variants (as tuples of their objects) and combines them using the appropriate combiner function. """ - functions0, custom_types0, constants0, ssz_objects0, inserts0 = spec0 - functions1, custom_types1, constants1, ssz_objects1, inserts1 = spec1 + functions0, custom_types0, constants0, ssz_objects0 = spec0 + functions1, custom_types1, constants1, ssz_objects1 = spec1 functions = combine_functions(functions0, functions1) custom_types = combine_constants(custom_types0, custom_types1) constants = combine_constants(constants0, constants1) ssz_objects = combine_ssz_objects(ssz_objects0, ssz_objects1, custom_types) - inserts = combine_inserts(inserts0, inserts1) - return functions, custom_types, constants, ssz_objects, inserts + return functions, custom_types, constants, ssz_objects def build_phase0_spec(phase0_sourcefile: str, fork_choice_sourcefile: str, diff --git a/scripts/function_puller.py b/scripts/function_puller.py index b30e5b75c..1a134007e 100644 --- a/scripts/function_puller.py +++ b/scripts/function_puller.py @@ -3,8 +3,6 @@ from typing import Dict, Tuple, NewType FUNCTION_REGEX = r'^def [\w_]*' -BEGIN_INSERT_REGEX = r'# begin insert ' -END_INSERT_REGEX = r'# end insert' SpecObject = NewType('SpecObjects', Tuple[Dict[str, str], Dict[str, str], Dict[str, str], Dict[str, str]]) @@ -15,22 +13,18 @@ def get_spec(file_name: str) -> SpecObject: functions = {function_name: function_code} constants= {constant_name: constant_code} ssz_objects= {object_name: object} - inserts= {insert_tag: code to be inserted} Note: This function makes heavy use of the inherent ordering of dicts, if this is not supported by your python version, it will not work. """ pulling_from = None # line number of start of latest object current_name = None # most recent section title - insert_name = None # stores the label of the current insert object - functions = {} - constants = {} - ssz_objects = {} - inserts = {} + functions: Dict[str, str] = {} + constants: Dict[str, str] = {} + ssz_objects: Dict[str, str] = {} function_matcher = re.compile(FUNCTION_REGEX) - inserts_matcher = re.compile(BEGIN_INSERT_REGEX) is_ssz = False - custom_types = {} + custom_types: Dict[str, str] = {} for linenum, line in enumerate(open(file_name).readlines()): line = line.rstrip() if pulling_from is None and len(line) > 0 and line[0] == '#' and line[-1] == '`': @@ -40,15 +34,6 @@ def get_spec(file_name: str) -> SpecObject: pulling_from = linenum + 1 elif line[:3] == '```': pulling_from = None - elif inserts_matcher.match(line) is not None: - # Find @insert names - insert_name = re.search(r'@[\w]*', line).group(0) - elif insert_name is not None: - # In insert mode, either the next line is more code, or the end of the insert - if re.match(END_INSERT_REGEX, line) is not None: - insert_name = None - else: - inserts[insert_name] = inserts.get(insert_name, '') + line + '\n' else: # Handle function definitions & ssz_objects if pulling_from is not None: @@ -84,4 +69,4 @@ def get_spec(file_name: str) -> SpecObject: constants[row[0]] = row[1].replace('**TBD**', '2**32') elif row[1].startswith('uint') or row[1].startswith('Bytes'): custom_types[row[0]] = row[1] - return functions, custom_types, constants, ssz_objects, inserts + return SpecObject((functions, custom_types, constants, ssz_objects)) diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index 63c2071a7..919c2a36a 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -157,7 +157,7 @@ class AttestationCustodyBitWrapper(Container): ### New extended `PendingAttestation` ```python -class PendingAttestation(phase0.PendingAttestation): +class PendingAttestation(Container): aggregation_bits: Bitlist[MAX_VALIDATORS_PER_COMMITTEE] data: AttestationData inclusion_delay: Slot @@ -168,7 +168,7 @@ class PendingAttestation(phase0.PendingAttestation): ### New extended `Validator` ```python -class Validator(phase0.Validator): +class Validator(Container): pubkey: BLSPubkey withdrawal_credentials: Hash # Commitment to pubkey for withdrawals effective_balance: Gwei # Balance at stake @@ -189,7 +189,7 @@ class Validator(phase0.Validator): ### New extended `BeaconBlockBody` ```python -class BeaconBlockBody(phase0.BeaconBlockBody): +class BeaconBlockBody(Container): randao_reveal: BLSSignature eth1_data: Eth1Data # Eth1 data vote graffiti: Bytes32 # Arbitrary data @@ -218,7 +218,7 @@ class BeaconBlockBody(phase0.BeaconBlockBody): Note that the `body` has a new `BeaconBlockBody` definition. ```python -class BeaconBlock(phase0.BeaconBlock): +class BeaconBlock(Container): slot: Slot parent_root: Hash state_root: Hash @@ -231,7 +231,7 @@ class BeaconBlock(phase0.BeaconBlock): Note that aside from the new additions, `Validator` and `PendingAttestation` have new definitions. ```python -class BeaconState(phase0.BeaconState): +class BeaconState(Container): # Versioning genesis_time: uint64 slot: Slot @@ -270,7 +270,7 @@ class BeaconState(phase0.BeaconState): custody_challenge_index: uint64 # Future derived secrets already exposed; contains the indices of the exposed validator # at RANDAO reveal period % EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS - exposed_derived_secrets: Vector[List[ValidatorIndex, PLACEHOLDER], + exposed_derived_secrets: Vector[List[ValidatorIndex, MAX_EARLY_DERIVED_SECRET_REVEALS * SLOTS_PER_EPOCH], EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS] ``` @@ -610,7 +610,7 @@ def process_attestations(state: BeaconState, block_body: BeaconBlockBody, attest # Apply proposer reward and cost beacon_proposer_index = get_beacon_proposer_index(state) estimated_attester_reward = sum([get_base_reward(state, attester) for attester in all_participants]) - proposer_reward = estimated_attester_reward // PROPOSER_REWARD_COEFFICIENT + proposer_reward = estimated_attester_reward // PROPOSER_REWARD_QUOTIENT increase_balance(state, beacon_proposer_index, proposer_reward) states_slots_lengths = zip( block_body.shard_transition.shard_states, @@ -659,19 +659,19 @@ def process_light_client_signatures(state: BeaconState, block_body: BeaconBlockB assert len(block_body.light_client_signature_bitfield) == len(committee) total_reward = Gwei(0) signer_keys = [] - for i, bit in enumerate(block_body.light_client_signature_bitfield): - if bit: + for i, participant_bit in enumerate(block_body.light_client_signature_bitfield): + if participant_bit: signer_keys.append(state.validators[committee[i]].pubkey) increase_balance(state, committee[i], get_base_reward(state, committee[i])) total_reward += get_base_reward(state, committee[i]) - increase_balance(state, get_beacon_proposer_index(state), total_reward // PROPOSER_REWARD_COEFFICIENT) + increase_balance(state, get_beacon_proposer_index(state), total_reward // PROPOSER_REWARD_QUOTIENT) assert bls_verify( pubkey=bls_aggregate_pubkeys(signer_keys), message_hash=get_block_root_at_slot(state, state.slot - 1), signature=block_body.light_client_signature, - domain=DOMAIN_LIGHT_CLIENT, + domain=DOMAIN_LIGHT_CLIENT ) ``` diff --git a/specs/core/1_custody-game.md b/specs/core/1_custody-game.md index 6c49ee470..7b554720d 100644 --- a/specs/core/1_custody-game.md +++ b/specs/core/1_custody-game.md @@ -34,14 +34,12 @@ - [Helpers](#helpers) - [`ceillog2`](#ceillog2) - [`is_valid_merkle_branch_with_mixin`](#is_valid_merkle_branch_with_mixin) - - [`get_crosslink_chunk_count`](#get_crosslink_chunk_count) - [`legendre_bit`](#legendre_bit) - [`custody_subchunkify`](#custody_subchunkify) - [`get_custody_chunk_bit`](#get_custody_chunk_bit) - [`get_chunk_bits_root`](#get_chunk_bits_root) - [`get_randao_epoch_for_custody_period`](#get_randao_epoch_for_custody_period) - [`get_custody_period_for_validator`](#get_custody_period_for_validator) - - [`replace_empty_or_append`](#replace_empty_or_append) - [Per-block processing](#per-block-processing) - [Operations](#operations) - [Custody key reveals](#custody-key-reveals) @@ -168,45 +166,6 @@ class CustodyBitChallenge(Container): signature: BLSSignature ``` -#### `CustodyChunkChallengeRecord` - -```python -class CustodyChunkChallengeRecord(Container): - challenge_index: uint64 - challenger_index: ValidatorIndex - responder_index: ValidatorIndex - inclusion_epoch: Epoch - data_root: Root - depth: uint64 - chunk_index: uint64 -``` - -#### `CustodyBitChallengeRecord` - -```python -class CustodyBitChallengeRecord(Container): - challenge_index: uint64 - challenger_index: ValidatorIndex - responder_index: ValidatorIndex - inclusion_epoch: Epoch - data_root: Root - chunk_count: uint64 - chunk_bits_merkle_root: Root - responder_key: BLSSignature -``` - -#### `CustodyResponse` - -```python -class CustodyResponse(Container): - challenge_index: uint64 - chunk_index: uint64 - chunk: Vector[Bytes[PLACEHOLDER], BYTES_PER_CUSTODY_CHUNK] - data_branch: List[Bytes32, PLACEHOLDER] - chunk_bits_branch: List[Bytes32, PLACEHOLDER] - chunk_bits_leaf: Bytes32 -``` - ### New Beacon Chain operations #### `CustodyKeyReveal` @@ -266,13 +225,6 @@ def is_valid_merkle_branch_with_mixin(leaf: Bytes32, return value == root ``` -### `get_crosslink_chunk_count` - -```python -def get_custody_chunk_count(crosslink: Crosslink) -> int: - crosslink_length = min(MAX_EPOCHS_PER_CROSSLINK, crosslink.end_epoch - crosslink.start_epoch) - return crosslink_length * CHUNKS_PER_EPOCH -``` ### `legendre_bit` @@ -354,17 +306,6 @@ def get_custody_period_for_validator(validator_index: ValidatorIndex, epoch: Epo return (epoch + validator_index % EPOCHS_PER_CUSTODY_PERIOD) // EPOCHS_PER_CUSTODY_PERIOD ``` -### `replace_empty_or_append` - -```python -def replace_empty_or_append(list: MutableSequence[Any], new_element: Any) -> int: - for i in range(len(list)): - if is_zero(list[i]): - list[i] = new_element - return i - list.append(new_element) - return len(list) - 1 -``` ## Per-block processing @@ -526,7 +467,7 @@ def process_chunk_challenge(state: BeaconState, challenge: CustodyChunkChallenge record.chunk_index != challenge.chunk_index ) # Verify depth - depth = ceillog2(get_custody_chunk_count(challenge.attestation.data.crosslink)) + depth = 123 # TODO assert challenge.chunk_index < 2**depth # Add new chunk challenge record new_record = CustodyChunkChallengeRecord( @@ -611,24 +552,13 @@ def process_bit_challenge(state: BeaconState, challenge: CustodyBitChallenge) -> domain = get_domain(state, DOMAIN_RANDAO, epoch_to_sign) assert bls_verify(responder.pubkey, hash_tree_root(epoch_to_sign), challenge.responder_key, domain) # Verify the chunk count - chunk_count = get_custody_chunk_count(attestation.data.crosslink) + chunk_count = 123 # TODO assert chunk_count == len(challenge.chunk_bits) # Verify custody bit is incorrect committee = get_beacon_committee(state, epoch, shard) custody_bit = attestation.custody_bits[committee.index(challenge.responder_index)] assert custody_bit != get_chunk_bits_root(challenge.chunk_bits) - # Add new bit challenge record - new_record = CustodyBitChallengeRecord( - challenge_index=state.custody_challenge_index, - challenger_index=challenge.challenger_index, - responder_index=challenge.responder_index, - inclusion_epoch=get_current_epoch(state), - data_root=attestation.data.crosslink.data_root, - chunk_count=chunk_count, - chunk_bits_merkle_root=hash_tree_root(challenge.chunk_bits), - responder_key=challenge.responder_key, - ) - replace_empty_or_append(state.custody_bit_challenge_records, new_record) + # TODO: immediate processing of challenge? state.custody_challenge_index += 1 # Postpone responder withdrawability responder.withdrawable_epoch = FAR_FUTURE_EPOCH diff --git a/specs/core/1_phase1-fork.md b/specs/core/1_phase1-fork.md index 62c896669..ff258366a 100644 --- a/specs/core/1_phase1-fork.md +++ b/specs/core/1_phase1-fork.md @@ -99,7 +99,7 @@ def upgrade_to_phase1(pre: phase0.BeaconState) -> BeaconState: next_light_committee=CompactCommittee(), # Custody game custody_challenge_index=0, - exposed_derived_secrets=Vector[List[ValidatorIndex, PLACEHOLDER], + exposed_derived_secrets=Vector[List[ValidatorIndex, MAX_EARLY_DERIVED_SECRET_REVEALS * SLOTS_PER_EPOCH], EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS]() ) post.current_light_committee = get_light_client_committee(post, post.epoch) diff --git a/test_libs/pyspec/eth2spec/config/__init__.py b/test_libs/pyspec/eth2spec/config/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/test_libs/pyspec/eth2spec/config/apply_config.py b/test_libs/pyspec/eth2spec/config/apply_config.py new file mode 100644 index 000000000..768abba64 --- /dev/null +++ b/test_libs/pyspec/eth2spec/config/apply_config.py @@ -0,0 +1,19 @@ +from preset_loader import loader +from typing import Dict, Any + +presets: Dict[str, Any] = {} + + +# Access to overwrite spec constants based on configuration +def apply_constants_preset(spec_globals: Dict[str, Any]) -> None: + global presets + for k, v in presets.items(): + if k.startswith('DOMAIN_'): + spec_globals[k] = spec_globals['DomainType'](v) # domain types are defined as bytes in the configs + else: + spec_globals[k] = v + + +def load_presets(configs_path, config_name): + global presets + presets = loader.load_presets(configs_path, config_name) diff --git a/test_libs/pyspec/eth2spec/test/conftest.py b/test_libs/pyspec/eth2spec/test/conftest.py index 5713c3470..a0a200f0b 100644 --- a/test_libs/pyspec/eth2spec/test/conftest.py +++ b/test_libs/pyspec/eth2spec/test/conftest.py @@ -1,5 +1,4 @@ -from eth2spec.phase0 import spec as spec_phase0 -from eth2spec.phase1 import spec as spec_phase1 +from eth2spec.config import apply_config # We import pytest only when it's present, i.e. when we are running tests. # The test-cases themselves can be generated without installing pytest. @@ -33,7 +32,4 @@ def pytest_addoption(parser): @fixture(autouse=True) def config(request): config_name = request.config.getoption("--config") - from preset_loader import loader - presets = loader.load_presets('../../configs/', config_name) - spec_phase0.apply_constants_preset(presets) - spec_phase1.apply_constants_preset(presets) + apply_config.load_presets('../../configs/', config_name) diff --git a/test_libs/pyspec/eth2spec/test/context.py b/test_libs/pyspec/eth2spec/test/context.py index 3177cd0b8..0fb46aa50 100644 --- a/test_libs/pyspec/eth2spec/test/context.py +++ b/test_libs/pyspec/eth2spec/test/context.py @@ -1,5 +1,5 @@ from eth2spec.phase0 import spec as spec_phase0 -# from eth2spec.phase1 import spec as spec_phase1 +from eth2spec.phase1 import spec as spec_phase1 from eth2spec.utils import bls from .helpers.genesis import create_genesis_state @@ -8,6 +8,11 @@ from .utils import vector_test, with_meta_tags from typing import Any, Callable, Sequence +from importlib import reload + +reload(spec_phase0) +reload(spec_phase1) + def with_custom_state(balances_fn: Callable[[Any], Sequence[int]], threshold_fn: Callable[[Any], int]): @@ -191,9 +196,7 @@ def with_phases(phases): if 'phase0' in run_phases: ret = run_with_spec_version(spec_phase0, *args, **kw) if 'phase1' in run_phases: - # temporarily disable phase 1 tests - return - # ret = run_with_spec_version(spec_phase1, *args, **kw) + ret = run_with_spec_version(spec_phase1, *args, **kw) return ret return wrapper return decorator From 1a1c3773f935d0c8634d49dd55fc7c74edcd16cf Mon Sep 17 00:00:00 2001 From: protolambda Date: Wed, 20 Nov 2019 04:15:15 +0100 Subject: [PATCH 062/194] implement custody game revamp for new shard proposal: simplifications and immediate processing, since custody data is bounded --- specs/core/1_beacon-chain.md | 25 +-- specs/core/1_custody-game.md | 385 ++++++++--------------------------- 2 files changed, 100 insertions(+), 310 deletions(-) diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index 919c2a36a..021636967 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -39,11 +39,11 @@ Configuration is not namespaced. Instead it is strictly an extension; | `LIGHT_CLIENT_COMMITTEE_PERIOD` | `2**8` (= 256) | epochs | ~27 hours | | `SHARD_COMMITTEE_PERIOD` | `2**8` (= 256) | epochs | ~27 hours | | `SHARD_BLOCK_CHUNK_SIZE` | `2**18` (= 262,144) | | -| `MAX_SHARD_BLOCK_CHUNKS` | `2**2` (= 4) | | -| `BLOCK_SIZE_TARGET` | `3 * 2**16` (= 196,608) | | +| `SHARD_BLOCK_CHUNKS` | `2**2` (= 4) | | +| `TARGET_SHARD_BLOCK_SIZE` | `3 * 2**16` (= 196,608) | | | `SHARD_BLOCK_OFFSETS` | `[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233]` | | | `MAX_SHARD_BLOCKS_PER_ATTESTATION` | `len(SHARD_BLOCK_OFFSETS)` | | -| `EMPTY_CHUNK_ROOT` | `hash_tree_root(BytesN[SHARD_BLOCK_CHUNK_SIZE]())` | | +| `EMPTY_CHUNK_ROOT` | `hash_tree_root(ByteVector[SHARD_BLOCK_CHUNK_SIZE]())` | | | `MAX_GASPRICE` | `2**14` (= 16,384) | Gwei | | | `MIN_GASPRICE` | `2**5` (= 32) | Gwei | | | `GASPRICE_ADJUSTMENT_COEFFICIENT` | `2**3` (= 8) | | @@ -62,7 +62,7 @@ class ShardBlockWrapper(Container): shard_parent_root: Hash beacon_parent_root: Hash slot: Slot - body: BytesN[SHARD_BLOCK_CHUNK_SIZE] + body: ByteVector[MAX_SHARD_BLOCK_SIZE] signature: BLSSignature ``` @@ -202,8 +202,7 @@ class BeaconBlockBody(Container): deposits: List[Deposit, MAX_DEPOSITS] voluntary_exits: List[VoluntaryExit, MAX_VOLUNTARY_EXITS] # Custody game - custody_chunk_challenges: List[CustodyChunkChallenge, MAX_CUSTODY_CHUNK_CHALLENGES] - custody_bit_challenges: List[CustodyBitChallenge, MAX_CUSTODY_BIT_CHALLENGES] + custody_slashings: List[CustodySlashing, MAX_CUSTODY_SLASHINGS] custody_key_reveals: List[CustodyKeyReveal, MAX_CUSTODY_KEY_REVEALS] early_derived_secret_reveals: List[EarlyDerivedSecretReveal, MAX_EARLY_DERIVED_SECRET_REVEALS] # Shards @@ -266,8 +265,6 @@ class BeaconState(Container): current_light_committee: CompactCommittee next_light_committee: CompactCommittee # Custody game - # TODO: custody game refactor, no challenge-records, immediate processing. - custody_challenge_index: uint64 # Future derived secrets already exposed; contains the indices of the exposed validator # at RANDAO reveal period % EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS exposed_derived_secrets: Vector[List[ValidatorIndex, MAX_EARLY_DERIVED_SECRET_REVEALS * SLOTS_PER_EPOCH], @@ -317,7 +314,9 @@ def committee_to_compact_committee(state: BeaconState, committee: Sequence[Valid ```python def chunks_to_body_root(chunks): - return hash_tree_root(chunks + [EMPTY_CHUNK_ROOT] * (MAX_SHARD_BLOCK_CHUNKS - len(chunks))) + return hash_tree_root(Vector[Hash, MAX_SHARD_BLOCK_CHUNKS]( + chunks + [EMPTY_CHUNK_ROOT] * (MAX_SHARD_BLOCK_CHUNKS - len(chunks)) + )) ``` ### Beacon state accessors @@ -375,11 +374,13 @@ def get_indexed_attestation(beacon_state: BeaconState, attestation: Attestation) ```python def get_updated_gasprice(prev_gasprice: Gwei, length: uint8) -> Gwei: - if length > BLOCK_SIZE_TARGET: - delta = prev_gasprice * (length - BLOCK_SIZE_TARGET) // BLOCK_SIZE_TARGET // GASPRICE_ADJUSTMENT_COEFFICIENT + if length > TARGET_SHARD_BLOCK_SIZE: + delta = (prev_gasprice * (length - TARGET_SHARD_BLOCK_SIZE) + // TARGET_SHARD_BLOCK_SIZE // GASPRICE_ADJUSTMENT_COEFFICIENT) return min(prev_gasprice + delta, MAX_GASPRICE) else: - delta = prev_gasprice * (BLOCK_SIZE_TARGET - length) // BLOCK_SIZE_TARGET // GASPRICE_ADJUSTMENT_COEFFICIENT + delta = (prev_gasprice * (TARGET_SHARD_BLOCK_SIZE - length) + // TARGET_SHARD_BLOCK_SIZE // GASPRICE_ADJUSTMENT_COEFFICIENT) return max(prev_gasprice, MIN_GASPRICE + delta) - delta ``` diff --git a/specs/core/1_custody-game.md b/specs/core/1_custody-game.md index 7b554720d..7ae1c5170 100644 --- a/specs/core/1_custody-game.md +++ b/specs/core/1_custody-game.md @@ -12,43 +12,29 @@ - [Terminology](#terminology) - [Constants](#constants) - [Misc](#misc) - - [Custody game parameters](#custody-game-parameters) - [Time parameters](#time-parameters) - [Max operations per block](#max-operations-per-block) - [Reward and penalty quotients](#reward-and-penalty-quotients) - [Signature domain types](#signature-domain-types) - [Data structures](#data-structures) - - [Custody objects](#custody-objects) - - [`CustodyChunkChallenge`](#custodychunkchallenge) - - [`CustodyBitChallenge`](#custodybitchallenge) - - [`CustodyChunkChallengeRecord`](#custodychunkchallengerecord) - - [`CustodyBitChallengeRecord`](#custodybitchallengerecord) - - [`CustodyResponse`](#custodyresponse) - [New beacon operations](#new-beacon-operations) + - [`CustodySlashing`](#custody-slashing) - [`CustodyKeyReveal`](#custodykeyreveal) - [`EarlyDerivedSecretReveal`](#earlyderivedsecretreveal) - - [Phase 0 container updates](#phase-0-container-updates) - - [`Validator`](#validator) - - [`BeaconState`](#beaconstate) - - [`BeaconBlockBody`](#beaconblockbody) - [Helpers](#helpers) - - [`ceillog2`](#ceillog2) - - [`is_valid_merkle_branch_with_mixin`](#is_valid_merkle_branch_with_mixin) - [`legendre_bit`](#legendre_bit) - - [`custody_subchunkify`](#custody_subchunkify) - - [`get_custody_chunk_bit`](#get_custody_chunk_bit) - - [`get_chunk_bits_root`](#get_chunk_bits_root) + - [`get_custody_atoms`](#get_custody_atoms) + - [`compute_custody_bit`](#compute_custody_bit) - [`get_randao_epoch_for_custody_period`](#get_randao_epoch_for_custody_period) - [`get_custody_period_for_validator`](#get_custody_period_for_validator) - [Per-block processing](#per-block-processing) - - [Operations](#operations) + - [Custody Game Operations](#custody-game-operations) - [Custody key reveals](#custody-key-reveals) - [Early derived secret reveals](#early-derived-secret-reveals) - - [Chunk challenges](#chunk-challenges) - - [Bit challenges](#bit-challenges) - - [Custody responses](#custody-responses) + - [Custody Slashings](#custody-slashings) - [Per-epoch processing](#per-epoch-processing) - - [Handling of custody-related deadlines](#handling-of-custody-related-deadlines) + - [Handling of reveal deadlines](#handling-of-reveal-deadlines) + - [Final updates](#final-updates) @@ -56,46 +42,19 @@ This document details the beacon chain additions and changes in Phase 1 of Ethereum 2.0 to support the shard data custody game, building upon the [Phase 0](0_beacon-chain.md) specification. -## Terminology - -- **Custody game**— -- **Custody period**— -- **Custody chunk**— -- **Custody chunk bit**— -- **Custody chunk challenge**— -- **Custody bit**— -- **Custody bit challenge**— -- **Custody key**— -- **Custody key reveal**— -- **Custody key mask**— - ## Constants ### Misc -| Name | Value | +| Name | Value | Unit | | - | - | | `BLS12_381_Q` | `4002409555221667393417789825735904156556882819939007885332058136124031650490837864442687629129015664037894272559787` | | `MINOR_REWARD_QUOTIENT` | `2**8` (= 256) | -| `MAX_EPOCHS_PER_CROSSLINK` | `2**6` (= 64) | epochs | ~7 hours | - -### Custody game parameters - -| Name | Value | -| - | - | -| `BYTES_PER_SHARD_BLOCK` | `2**14` (= 16,384) | -| `BYTES_PER_CUSTODY_CHUNK` | `2**9` (= 512) | -| `BYTES_PER_CUSTODY_SUBCHUNK` | `48` | -| `CHUNKS_PER_EPOCH` | `2 * BYTES_PER_SHARD_BLOCK * SLOTS_PER_EPOCH // BYTES_PER_CUSTODY_CHUNK` | -| `MAX_CUSTODY_CHUNKS` | `MAX_EPOCHS_PER_CROSSLINK * CHUNKS_PER_EPOCH` | -| `CUSTODY_DATA_DEPTH` | `ceillog2(MAX_CUSTODY_CHUNKS) + 1` | -| `CUSTODY_CHUNK_BIT_DEPTH` | `ceillog2(MAX_EPOCHS_PER_CROSSLINK * CHUNKS_PER_EPOCH // 256) + 2` | +| `BYTES_PER_CUSTODY_ATOM` | `48` | bytes | ### Time parameters | Name | Value | Unit | Duration | | - | - | :-: | :-: | -| `MAX_CHUNK_CHALLENGE_DELAY` | `2**11` (= 2,048) | epochs | ~9 days | -| `CUSTODY_RESPONSE_DEADLINE` | `2**14` (= 16,384) | epochs | ~73 days | | `RANDAO_PENALTY_EPOCHS` | `2**1` (= 2) | epochs | 12.8 minutes | | `EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS` | `2**14` | epochs | ~73 days | | `EPOCHS_PER_CUSTODY_PERIOD` | `2**11` (= 2,048) | epochs | ~9 days | @@ -108,8 +67,7 @@ This document details the beacon chain additions and changes in Phase 1 of Ether | - | - | | `MAX_CUSTODY_KEY_REVEALS` | `2**4` (= 16) | | `MAX_EARLY_DERIVED_SECRET_REVEALS` | `1` | -| `MAX_CUSTODY_CHUNK_CHALLENGES` | `2**2` (= 4) | -| `MAX_CUSTODY_BIT_CHALLENGES` | `2**2` (= 4) | +| `MAX_CUSTODY_SLASHINGS` | `1` | ### Reward and penalty quotients @@ -123,51 +81,29 @@ The following types are defined, mapping into `DomainType` (little endian): | Name | Value | | - | - | -| `DOMAIN_CUSTODY_BIT_CHALLENGE` | `6` | +| `DOMAIN_CUSTODY_BIT_SLASHING` | `6` | ## Data structures -### Custody objects +### New Beacon Chain operations -#### `Crosslink` - -*Note*: Crosslinks have been removed in the phase 1 redesign. This is a placeholder until the custody game is revamped. +#### `CustodySlashing` ```python -class Crosslink(Container): - shard: uint64 - parent_root: Hash - # Crosslinking data - start_epoch: Epoch - end_epoch: Epoch - data_root: Hash -``` - - -#### `CustodyChunkChallenge` - -```python -class CustodyChunkChallenge(Container): - responder_index: ValidatorIndex +class CustodySlashing(Container): + # Attestation.custody_bits[data_index][committee.index(malefactor_index)] is the target custody bit to check. + # (Attestation.data.shard_transition_root as ShardTransition).shard_data_roots[data_index] is the root of the data. + data_index: uint64 + malefactor_index: ValidatorIndex + malefactor_key: BLSSignature + whistleblower_index: ValidatorIndex + shard_transition: ShardTransition attestation: Attestation - chunk_index: uint64 -``` - -#### `CustodyBitChallenge` - -```python -class CustodyBitChallenge(Container): - responder_index: ValidatorIndex - attestation: Attestation - challenger_index: ValidatorIndex - responder_key: BLSSignature - chunk_bits: Bitlist[MAX_CUSTODY_CHUNKS] + data: ByteList[MAX_SHARD_BLOCK_SIZE] signature: BLSSignature ``` -### New Beacon Chain operations - #### `CustodyKeyReveal` ```python @@ -199,33 +135,6 @@ class EarlyDerivedSecretReveal(Container): ## Helpers -### `ceillog2` - -```python -def ceillog2(x: uint64) -> int: - return (x - 1).bit_length() -``` - -### `is_valid_merkle_branch_with_mixin` - -```python -def is_valid_merkle_branch_with_mixin(leaf: Bytes32, - branch: Sequence[Bytes32], - depth: uint64, - index: uint64, - root: Root, - mixin: uint64) -> bool: - value = leaf - for i in range(depth): - if index // (2**i) % 2: - value = hash(branch[i] + value) - else: - value = hash(value + branch[i]) - value = hash(value + mixin.to_bytes(32, "little")) - return value == root -``` - - ### `legendre_bit` Returns the Legendre symbol `(a/q)` normalizes as a bit (i.e. `((a/q) + 1) // 2`). In a production implementation, a well-optimized library (e.g. GMP) should be used for this. @@ -255,39 +164,29 @@ def legendre_bit(a: int, q: int) -> int: return 0 ``` -### `custody_subchunkify` +### `custody_atoms` -Given one proof of custody chunk, returns the proof of custody subchunks of the correct sizes. +Given one set of data, return the custody atoms: each atom will be combined with one legendre bit. ```python -def custody_subchunkify(bytez: bytes) -> Sequence[bytes]: - bytez += b'\x00' * (-len(bytez) % BYTES_PER_CUSTODY_SUBCHUNK) - return [bytez[i:i + BYTES_PER_CUSTODY_SUBCHUNK] - for i in range(0, len(bytez), BYTES_PER_CUSTODY_SUBCHUNK)] +def get_custody_atoms(bytez: bytes) -> Sequence[bytes]: + bytez += b'\x00' * (-len(bytez) % BYTES_PER_CUSTODY_ATOM) # right-padding + return [bytez[i:i + BYTES_PER_CUSTODY_ATOM] + for i in range(0, len(bytez), BYTES_PER_CUSTODY_ATOM)] ``` -### `get_custody_chunk_bit` +### `compute_custody_bit` ```python -def get_custody_chunk_bit(key: BLSSignature, chunk: bytes) -> bool: +def compute_custody_bit(key: BLSSignature, data: bytes) -> bool: full_G2_element = bls_signature_to_G2(key) s = full_G2_element[0].coeffs - bits = [legendre_bit((i + 1) * s[i % 2] + int.from_bytes(subchunk, "little"), BLS12_381_Q) - for i, subchunk in enumerate(custody_subchunkify(chunk))] - + bits = [legendre_bit((i + 1) * s[i % 2] + int.from_bytes(atom, "little"), BLS12_381_Q) + for i, atom in enumerate(get_custody_atoms(data))] + # XOR all atom bits return bool(sum(bits) % 2) ``` -### `get_chunk_bits_root` - -```python -def get_chunk_bits_root(chunk_bits: Bitlist[MAX_CUSTODY_CHUNKS]) -> bit: - aggregated_bits = 0 - for i, b in enumerate(chunk_bits): - aggregated_bits += 2**i * b - return legendre_bit(aggregated_bits, BLS12_381_Q) -``` - ### `get_randao_epoch_for_custody_period` ```python @@ -319,8 +218,7 @@ def process_custody_game_operations(state: BeaconState, body: BeaconBlockBody) - for_ops(body.custody_key_reveals, process_custody_key_reveal) for_ops(body.early_derived_secret_reveals, process_early_derived_secret_reveal) - for_ops(body.custody_chunk_challenges, process_chunk_challenge) - for_ops(body.custody_bit_challenges, process_bit_challenge) + for_ops(body.custody_slashings, process_custody_slashing) ``` #### Custody key reveals @@ -367,7 +265,7 @@ def process_custody_key_reveal(state: BeaconState, reveal: CustodyKeyReveal) -> # Process reveal revealer.next_custody_secret_to_reveal += 1 - # Reward Block Preposer + # Reward Block Proposer proposer_index = get_beacon_proposer_index(state) increase_balance( state, @@ -446,190 +344,81 @@ def process_early_derived_secret_reveal(state: BeaconState, reveal: EarlyDerived state.exposed_derived_secrets[derived_secret_location].append(reveal.revealed_index) ``` -#### Chunk challenges +#### Custody Slashings ```python -def process_chunk_challenge(state: BeaconState, challenge: CustodyChunkChallenge) -> None: +def process_custody_slashing(state: BeaconState, custody_slashing: CustodySlashing) -> None: + attestation = custody_slashing.attestation + + # Any signed custody-slashing should result in at least one slashing. + # If the custody bits are valid, then the claim itself is slashed. + malefactor = state.validators[custody_slashing.malefactor_index] + whistleblower = state.validators[custody_slashing.whistleblower_index] + domain = get_domain(state, DOMAIN_CUSTODY_BIT_SLASHING, get_current_epoch(state)) + assert bls_verify(whistleblower.pubkey, signing_root(custody_slashing), custody_slashing.signature, domain) + # Verify that the whistleblower is slashable + assert is_slashable_validator(whistleblower, get_current_epoch(state)) + # Verify that the claimed malefactor is slashable + assert is_slashable_validator(malefactor, get_current_epoch(state)) + # Verify the attestation - assert is_valid_indexed_attestation(state, get_indexed_attestation(state, challenge.attestation)) - # Verify it is not too late to challenge - assert (compute_epoch_at_slot(challenge.attestation.data.slot) - >= get_current_epoch(state) - MAX_CHUNK_CHALLENGE_DELAY) - responder = state.validators[challenge.responder_index] - assert responder.exit_epoch >= get_current_epoch(state) - MAX_CHUNK_CHALLENGE_DELAY - # Verify the responder participated in the attestation - attesters = get_attesting_indices(state, challenge.attestation.data, challenge.attestation.aggregation_bits) - assert challenge.responder_index in attesters - # Verify the challenge is not a duplicate - for record in state.custody_chunk_challenge_records: - assert ( - record.data_root != challenge.attestation.data.crosslink.data_root or - record.chunk_index != challenge.chunk_index - ) - # Verify depth - depth = 123 # TODO - assert challenge.chunk_index < 2**depth - # Add new chunk challenge record - new_record = CustodyChunkChallengeRecord( - challenge_index=state.custody_challenge_index, - challenger_index=get_beacon_proposer_index(state), - responder_index=challenge.responder_index, - inclusion_epoch=get_current_epoch(state), - data_root=challenge.attestation.data.crosslink.data_root, - depth=depth, - chunk_index=challenge.chunk_index, - ) - replace_empty_or_append(state.custody_chunk_challenge_records, new_record) - - state.custody_challenge_index += 1 - # Postpone responder withdrawability - responder.withdrawable_epoch = FAR_FUTURE_EPOCH -``` - -TODO: immediate challenge processing, no state records. - -```python -def process_chunk_challenge_response(state: BeaconState, - response: CustodyResponse, - challenge: CustodyChunkChallengeRecord) -> None: - # Verify chunk index - assert response.chunk_index == challenge.chunk_index - # Verify bit challenge data is null - assert response.chunk_bits_branch == [] and response.chunk_bits_leaf == Hash() - # Verify minimum delay - assert get_current_epoch(state) >= challenge.inclusion_epoch + MAX_SEED_LOOKAHEAD - # Verify the chunk matches the crosslink data root - assert is_valid_merkle_branch( - leaf=hash_tree_root(response.chunk), - branch=response.data_branch, - depth=challenge.depth, - index=response.chunk_index, - root=challenge.data_root, - ) - # Clear the challenge - records = state.custody_chunk_challenge_records - records[records.index(challenge)] = CustodyChunkChallengeRecord() - # Reward the proposer - proposer_index = get_beacon_proposer_index(state) - increase_balance(state, proposer_index, Gwei(get_base_reward(state, proposer_index) // MINOR_REWARD_QUOTIENT)) -``` - -#### Bit challenges - -```python -def process_bit_challenge(state: BeaconState, challenge: CustodyBitChallenge) -> None: - attestation = challenge.attestation - epoch = attestation.data.target.epoch - shard = attestation.data.crosslink.shard - - # Verify challenge signature - challenger = state.validators[challenge.challenger_index] - domain = get_domain(state, DOMAIN_CUSTODY_BIT_CHALLENGE, get_current_epoch(state)) - # TODO incorrect hash-tree-root, but this changes with phase 1 PR #1483 - assert bls_verify(challenger.pubkey, hash_tree_root(challenge), challenge.signature, domain) - # Verify challenger is slashable - assert is_slashable_validator(challenger, get_current_epoch(state)) - # Verify attestation assert is_valid_indexed_attestation(state, get_indexed_attestation(state, attestation)) - # Verify attestation is eligible for challenging - responder = state.validators[challenge.responder_index] - assert get_current_epoch(state) <= get_randao_epoch_for_custody_period( - get_custody_period_for_validator(challenge.responder_index, epoch), - challenge.responder_index - ) + 2 * EPOCHS_PER_CUSTODY_PERIOD + responder.max_reveal_lateness - # Verify the responder participated in the attestation + # TODO: custody_slashing.data is not chunked like shard blocks yet, result is lots of padding. + + # TODO: can do a single combined merkle proof of data being attested. + # Verify the shard transition is indeed attested by the attestation + shard_transition = custody_slashing.shard_transition + assert hash_tree_root(shard_transition) == attestation.shard_transition_root + # Verify that the provided data matches the shard-transition + shard_chunk_roots = shard_transition.shard_data_roots[custody_slashing.data_index] + assert hash_tree_root(custody_slashing.data) == chunks_to_body_root(shard_chunk_roots) + + # Verify existence of claimed malefactor attesters = get_attesting_indices(state, attestation.data, attestation.aggregation_bits) - assert challenge.responder_index in attesters - # Verifier challenger is not already challenging - for record in state.custody_bit_challenge_records: - assert record.challenger_index != challenge.challenger_index - # Verify the responder custody key - epoch_to_sign = get_randao_epoch_for_custody_period( - get_custody_period_for_validator(challenge.responder_index, epoch), - challenge.responder_index, - ) - domain = get_domain(state, DOMAIN_RANDAO, epoch_to_sign) - assert bls_verify(responder.pubkey, hash_tree_root(epoch_to_sign), challenge.responder_key, domain) - # Verify the chunk count - chunk_count = 123 # TODO - assert chunk_count == len(challenge.chunk_bits) - # Verify custody bit is incorrect - committee = get_beacon_committee(state, epoch, shard) - custody_bit = attestation.custody_bits[committee.index(challenge.responder_index)] - assert custody_bit != get_chunk_bits_root(challenge.chunk_bits) - # TODO: immediate processing of challenge? - state.custody_challenge_index += 1 - # Postpone responder withdrawability - responder.withdrawable_epoch = FAR_FUTURE_EPOCH + assert custody_slashing.malefactor_index in attesters + + # Get the custody bit + custody_bits = attestation.custody_bits[custody_slashing.data_index] + claimed_custody_bit = custody_bits[attesters.index(custody_slashing.malefactor_index)] + + # Compute the custody bit + computed_custody_bit = compute_custody_bit(custody_slashing.data) + + # Verify the claim + if claimed_custody_bit != computed_custody_bit: + # Slash the malefactor, reward the other committee members + slash_validator(state, custody_slashing.malefactor_index) + whistleblower_reward = Gwei(malefactor.effective_balance // WHISTLEBLOWER_REWARD_QUOTIENT) // len(attesters - 1) + for attester_index in attesters: + if attester_index != custody_slashing.malefactor_index: + increase_balance(state, attester_index, whistleblower_reward) + # No special whisteblower reward: it is expected to be an attester. Others are free to slash too however. + else: + # The claim was false, the custody bit was correct. Slash the whistleblower that induced this work. + slash_validator(state, custody_slashing.whistleblower_index) ``` -TODO: immediate challenge processing, no state records. - -```python -def process_bit_challenge_response(state: BeaconState, - response: CustodyResponse, - challenge: CustodyBitChallengeRecord) -> None: - # Verify chunk index - assert response.chunk_index < challenge.chunk_count - # Verify responder has not been slashed - responder = state.validators[challenge.responder_index] - assert not responder.slashed - # Verify the chunk matches the crosslink data root - assert is_valid_merkle_branch( - leaf=hash_tree_root(response.chunk), - branch=response.data_branch, - depth=ceillog2(challenge.chunk_count), - index=response.chunk_index, - root=challenge.data_root, - ) - # Verify the chunk bit leaf matches the challenge data - assert is_valid_merkle_branch_with_mixin( - leaf=hash_tree_root(response.chunk_bits_leaf), - branch=response.chunk_bits_branch, - depth=ceillog2(MAX_CUSTODY_CHUNKS // 256), - index=response.chunk_index // 256, - root=challenge.chunk_bits_merkle_root, - mixin=challenge.chunk_count, - ) - # Verify the chunk bit does not match the challenge chunk bit - assert (get_custody_chunk_bit(challenge.responder_key, response.chunk) - != response.chunk_bits_leaf[response.chunk_index % 256]) - # Clear the challenge - records = state.custody_bit_challenge_records - records[records.index(challenge)] = CustodyBitChallengeRecord() - # Slash challenger - slash_validator(state, challenge.challenger_index, challenge.responder_index) -``` ## Per-epoch processing -### Handling of custody-related deadlines +### Handling of reveal deadlines Run `process_reveal_deadlines(state)` after `process_registry_updates(state)`: ```python def process_reveal_deadlines(state: BeaconState) -> None: for index, validator in enumerate(state.validators): - deadline = validator.next_custody_secret_to_reveal + (CUSTODY_RESPONSE_DEADLINE // EPOCHS_PER_CUSTODY_PERIOD) - if get_custody_period_for_validator(state, ValidatorIndex(index)) > deadline: + if get_custody_period_for_validator(state, ValidatorIndex(index)) > validator.next_custody_secret_to_reveal: slash_validator(state, ValidatorIndex(index)) ``` +### Final updates + After `process_final_updates(state)`, additional updates are made for the custody game: ```python def process_custody_final_updates(state: BeaconState) -> None: - current_epoch = get_current_epoch(state) # Clean up exposed RANDAO key reveals - state.exposed_derived_secrets[current_epoch % EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS] = [] - # Reset withdrawable epochs if challenge records are empty - records = state.custody_chunk_challenge_records + state.custody_bit_challenge_records - validator_indices_in_records = set( - [record.challenger_index for record in records] + [record.responder_index for record in records] - ) - for index, validator in enumerate(state.validators): - if index not in validator_indices_in_records: - if validator.exit_epoch != FAR_FUTURE_EPOCH and validator.withdrawable_epoch == FAR_FUTURE_EPOCH: - validator.withdrawable_epoch = Epoch(validator.exit_epoch + MIN_VALIDATOR_WITHDRAWABILITY_DELAY) + state.exposed_derived_secrets[get_current_epoch(state) % EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS] = [] ``` From 41be2ed3ce84fff54a9a0a05b8736b8583687810 Mon Sep 17 00:00:00 2001 From: protolambda Date: Wed, 20 Nov 2019 04:43:32 +0100 Subject: [PATCH 063/194] bugfix custody bit index lookup + lint fixes --- scripts/build_spec.py | 25 +++++++++++-------------- specs/core/0_beacon-chain.md | 2 +- specs/core/1_beacon-chain.md | 14 +++++++------- specs/core/1_custody-game.md | 13 +++++++------ 4 files changed, 26 insertions(+), 28 deletions(-) diff --git a/scripts/build_spec.py b/scripts/build_spec.py index 335286437..6dfc4e6b2 100644 --- a/scripts/build_spec.py +++ b/scripts/build_spec.py @@ -15,7 +15,7 @@ apply_constants_preset(globals()) PHASE0_IMPORTS = '''from eth2spec.config.apply_config import apply_constants_preset from typing import ( - Dict, Set, Sequence, Tuple, Optional + Any, Callable, Dict, Set, Sequence, Tuple, Optional ) from dataclasses import ( @@ -40,7 +40,7 @@ from eth2spec.utils.hash_function import hash PHASE1_IMPORTS = '''from eth2spec.phase0 import spec as phase0 from eth2spec.config.apply_config import apply_constants_preset from typing import ( - Dict, Set, Sequence, NewType, Tuple, Union, + Any, Callable, Dict, Set, Sequence, NewType, Tuple, Union, ) from math import ( log2, @@ -110,13 +110,6 @@ def compute_committee(indices: Sequence[ValidatorIndex], # type: ignore return committee_cache[param_hash]''' -def remove_for_phase1(functions: Dict[str, str]): - for key, value in functions.items(): - lines = value.split("\n") - lines = filter(lambda s: "[to be removed in phase 1]" not in s, lines) - functions[key] = "\n".join(lines) - - def objects_to_spec(functions: Dict[str, str], custom_types: Dict[str, str], constants: Dict[str, str], @@ -172,10 +165,10 @@ def combine_constants(old_constants: Dict[str, str], new_constants: Dict[str, st ignored_dependencies = [ - 'bit', 'boolean', 'Vector', 'List', 'Container', 'Hash', 'BLSPubkey', 'BLSSignature', 'ByteList', 'ByteVector' + 'bit', 'boolean', 'Vector', 'List', 'Container', 'Hash', 'BLSPubkey', 'BLSSignature', 'Bytes1', 'Bytes4', 'Bytes32', 'Bytes48', 'Bytes96', 'Bitlist', 'Bitvector', 'uint8', 'uint16', 'uint32', 'uint64', 'uint128', 'uint256', - 'bytes', 'byte', 'ByteVector' # to be removed after updating spec doc + 'bytes', 'byte', 'Bytes', 'BytesN' # to be removed after updating spec doc ] @@ -209,7 +202,6 @@ def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str] """ for key, value in new_objects.items(): old_objects[key] = value - dependency_order_ssz_objects(old_objects, custom_types) return old_objects @@ -226,6 +218,11 @@ def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject: return functions, custom_types, constants, ssz_objects +def dependency_order_spec(objs: SpecObject): + functions, custom_types, constants, ssz_objects = objs + dependency_order_ssz_objects(ssz_objects, custom_types) + + def build_phase0_spec(phase0_sourcefile: str, fork_choice_sourcefile: str, v_guide_sourcefile: str, outfile: str=None) -> Optional[str]: phase0_spec = get_spec(phase0_sourcefile) @@ -234,6 +231,7 @@ def build_phase0_spec(phase0_sourcefile: str, fork_choice_sourcefile: str, spec_objects = phase0_spec for value in [fork_choice_spec, v_guide]: spec_objects = combine_spec_objects(spec_objects, value) + dependency_order_spec(spec_objects) spec = objects_to_spec(*spec_objects, PHASE0_IMPORTS) if outfile is not None: with open(outfile, 'w') as out: @@ -259,11 +257,10 @@ def build_phase1_spec(phase0_beacon_sourcefile: str, phase1_fork_sourcefile, ) all_spescs = [get_spec(spec) for spec in all_sourcefiles] - for spec in all_spescs: - remove_for_phase1(spec[0]) spec_objects = all_spescs[0] for value in all_spescs[1:]: spec_objects = combine_spec_objects(spec_objects, value) + dependency_order_spec(spec_objects) spec = objects_to_spec(*spec_objects, PHASE1_IMPORTS) if outfile is not None: with open(outfile, 'w') as out: diff --git a/specs/core/0_beacon-chain.md b/specs/core/0_beacon-chain.md index 32c10c4ec..7b182e087 100644 --- a/specs/core/0_beacon-chain.md +++ b/specs/core/0_beacon-chain.md @@ -1419,7 +1419,7 @@ def process_operations(state: BeaconState, body: BeaconBlockBody) -> None: # Verify that outstanding deposits are processed up to the maximum number of deposits assert len(body.deposits) == min(MAX_DEPOSITS, state.eth1_data.deposit_count - state.eth1_deposit_index) - def for_ops(operations, fn): + def for_ops(operations: Sequence[Any], fn: Callable[[BeaconState, Any], None]) -> None: for operation in operations: fn(state, operation) diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index 021636967..3f964641c 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -39,11 +39,11 @@ Configuration is not namespaced. Instead it is strictly an extension; | `LIGHT_CLIENT_COMMITTEE_PERIOD` | `2**8` (= 256) | epochs | ~27 hours | | `SHARD_COMMITTEE_PERIOD` | `2**8` (= 256) | epochs | ~27 hours | | `SHARD_BLOCK_CHUNK_SIZE` | `2**18` (= 262,144) | | -| `SHARD_BLOCK_CHUNKS` | `2**2` (= 4) | | +| `MAX_SHARD_BLOCK_CHUNKS` | `2**2` (= 4) | | | `TARGET_SHARD_BLOCK_SIZE` | `3 * 2**16` (= 196,608) | | | `SHARD_BLOCK_OFFSETS` | `[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233]` | | | `MAX_SHARD_BLOCKS_PER_ATTESTATION` | `len(SHARD_BLOCK_OFFSETS)` | | -| `EMPTY_CHUNK_ROOT` | `hash_tree_root(ByteVector[SHARD_BLOCK_CHUNK_SIZE]())` | | +| `EMPTY_CHUNK_ROOT` | `hash_tree_root(BytesN[SHARD_BLOCK_CHUNK_SIZE]())` | | | `MAX_GASPRICE` | `2**14` (= 16,384) | Gwei | | | `MIN_GASPRICE` | `2**5` (= 32) | Gwei | | | `GASPRICE_ADJUSTMENT_COEFFICIENT` | `2**3` (= 8) | | @@ -62,7 +62,7 @@ class ShardBlockWrapper(Container): shard_parent_root: Hash beacon_parent_root: Hash slot: Slot - body: ByteVector[MAX_SHARD_BLOCK_SIZE] + body: BytesN[MAX_SHARD_BLOCK_CHUNKS * SHARD_BLOCK_CHUNK_SIZE] signature: BLSSignature ``` @@ -315,7 +315,7 @@ def committee_to_compact_committee(state: BeaconState, committee: Sequence[Valid ```python def chunks_to_body_root(chunks): return hash_tree_root(Vector[Hash, MAX_SHARD_BLOCK_CHUNKS]( - chunks + [EMPTY_CHUNK_ROOT] * (MAX_SHARD_BLOCK_CHUNKS - len(chunks)) + chunks + [EMPTY_CHUNK_ROOT] * (MAX_SHARD_BLOCK_CHUNKS - len(chunks)) )) ``` @@ -376,11 +376,11 @@ def get_indexed_attestation(beacon_state: BeaconState, attestation: Attestation) def get_updated_gasprice(prev_gasprice: Gwei, length: uint8) -> Gwei: if length > TARGET_SHARD_BLOCK_SIZE: delta = (prev_gasprice * (length - TARGET_SHARD_BLOCK_SIZE) - // TARGET_SHARD_BLOCK_SIZE // GASPRICE_ADJUSTMENT_COEFFICIENT) + // TARGET_SHARD_BLOCK_SIZE // GASPRICE_ADJUSTMENT_COEFFICIENT) return min(prev_gasprice + delta, MAX_GASPRICE) else: delta = (prev_gasprice * (TARGET_SHARD_BLOCK_SIZE - length) - // TARGET_SHARD_BLOCK_SIZE // GASPRICE_ADJUSTMENT_COEFFICIENT) + // TARGET_SHARD_BLOCK_SIZE // GASPRICE_ADJUSTMENT_COEFFICIENT) return max(prev_gasprice, MIN_GASPRICE + delta) - delta ``` @@ -465,7 +465,7 @@ def process_operations(state: BeaconState, body: BeaconBlockBody) -> None: # Verify that outstanding deposits are processed up to the maximum number of deposits assert len(body.deposits) == min(MAX_DEPOSITS, state.eth1_data.deposit_count - state.eth1_deposit_index) - def for_ops(operations, fn): + def for_ops(operations: Sequence[Any], fn: Callable[[BeaconState, Any], None]) -> None: for operation in operations: fn(state, operation) diff --git a/specs/core/1_custody-game.md b/specs/core/1_custody-game.md index 7ae1c5170..ebba75a7f 100644 --- a/specs/core/1_custody-game.md +++ b/specs/core/1_custody-game.md @@ -100,7 +100,7 @@ class CustodySlashing(Container): whistleblower_index: ValidatorIndex shard_transition: ShardTransition attestation: Attestation - data: ByteList[MAX_SHARD_BLOCK_SIZE] + data: Bytes[MAX_SHARD_BLOCK_CHUNKS * SHARD_BLOCK_CHUNK_SIZE] signature: BLSSignature ``` @@ -178,13 +178,13 @@ def get_custody_atoms(bytez: bytes) -> Sequence[bytes]: ### `compute_custody_bit` ```python -def compute_custody_bit(key: BLSSignature, data: bytes) -> bool: +def compute_custody_bit(key: BLSSignature, data: bytes) -> bit: full_G2_element = bls_signature_to_G2(key) s = full_G2_element[0].coeffs bits = [legendre_bit((i + 1) * s[i % 2] + int.from_bytes(atom, "little"), BLS12_381_Q) for i, atom in enumerate(get_custody_atoms(data))] # XOR all atom bits - return bool(sum(bits) % 2) + return bit(sum(bits) % 2) ``` ### `get_randao_epoch_for_custody_period` @@ -212,7 +212,7 @@ def get_custody_period_for_validator(validator_index: ValidatorIndex, epoch: Epo ```python def process_custody_game_operations(state: BeaconState, body: BeaconBlockBody) -> None: - def for_ops(operations, fn): + def for_ops(operations: Sequence[Any], fn: Callable[[BeaconState, Any], None]) -> None: for operation in operations: fn(state, operation) @@ -374,13 +374,14 @@ def process_custody_slashing(state: BeaconState, custody_slashing: CustodySlashi shard_chunk_roots = shard_transition.shard_data_roots[custody_slashing.data_index] assert hash_tree_root(custody_slashing.data) == chunks_to_body_root(shard_chunk_roots) - # Verify existence of claimed malefactor + # Verify existence and participation of claimed malefactor attesters = get_attesting_indices(state, attestation.data, attestation.aggregation_bits) assert custody_slashing.malefactor_index in attesters # Get the custody bit custody_bits = attestation.custody_bits[custody_slashing.data_index] - claimed_custody_bit = custody_bits[attesters.index(custody_slashing.malefactor_index)] + committee = get_beacon_committee(state, attestation.data.slot, attestation.data.index) + claimed_custody_bit = custody_bits[committee.index(custody_slashing.malefactor_index)] # Compute the custody bit computed_custody_bit = compute_custody_bit(custody_slashing.data) From 1623d40b6df8d9c03c691ad79413ec4eb8050084 Mon Sep 17 00:00:00 2001 From: protolambda Date: Wed, 20 Nov 2019 04:48:00 +0100 Subject: [PATCH 064/194] fix: check malefactor custody key --- specs/core/1_custody-game.md | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/specs/core/1_custody-game.md b/specs/core/1_custody-game.md index ebba75a7f..ed91bf8a0 100644 --- a/specs/core/1_custody-game.md +++ b/specs/core/1_custody-game.md @@ -378,13 +378,21 @@ def process_custody_slashing(state: BeaconState, custody_slashing: CustodySlashi attesters = get_attesting_indices(state, attestation.data, attestation.aggregation_bits) assert custody_slashing.malefactor_index in attesters + # Verify the malefactor custody key + epoch_to_sign = get_randao_epoch_for_custody_period( + get_custody_period_for_validator(state, custody_slashing.malefactor_index, attestation.data.target.epoch), + custody_slashing.malefactor_index, + ) + domain = get_domain(state, DOMAIN_RANDAO, epoch_to_sign) + assert bls_verify(malefactor.pubkey, hash_tree_root(epoch_to_sign), custody_slashing.malefactor_key, domain) + # Get the custody bit custody_bits = attestation.custody_bits[custody_slashing.data_index] committee = get_beacon_committee(state, attestation.data.slot, attestation.data.index) claimed_custody_bit = custody_bits[committee.index(custody_slashing.malefactor_index)] # Compute the custody bit - computed_custody_bit = compute_custody_bit(custody_slashing.data) + computed_custody_bit = compute_custody_bit(custody_slashing.malefactor_key, custody_slashing.data) # Verify the claim if claimed_custody_bit != computed_custody_bit: From 72a9dfc43092967e47775cf62fc5e07947296a3f Mon Sep 17 00:00:00 2001 From: protolambda Date: Wed, 20 Nov 2019 05:11:19 +0100 Subject: [PATCH 065/194] fix lint problems --- specs/core/1_beacon-chain.md | 40 ++++++++++++++++++++++-------------- specs/core/1_custody-game.md | 8 +++++--- specs/core/1_phase1-fork.md | 8 ++++---- 3 files changed, 34 insertions(+), 22 deletions(-) diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index 3f964641c..4fe49c92d 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -34,18 +34,18 @@ Configuration is not namespaced. Instead it is strictly an extension; | - | - | - | - | | `MAX_SHARDS` | `2**10` (= 1024) | | `ACTIVE_SHARDS` | `2**6` (= 64) | -| `ONLINE_PERIOD` | `2**3` (= 8) | epochs | ~51 min | +| `ONLINE_PERIOD` | `Epoch(2**3)` (= 8) | epochs | ~51 min | | `LIGHT_CLIENT_COMMITTEE_SIZE` | `2**7` (= 128) | -| `LIGHT_CLIENT_COMMITTEE_PERIOD` | `2**8` (= 256) | epochs | ~27 hours | -| `SHARD_COMMITTEE_PERIOD` | `2**8` (= 256) | epochs | ~27 hours | +| `LIGHT_CLIENT_COMMITTEE_PERIOD` | `Epoch(2**8)` (= 256) | epochs | ~27 hours | +| `SHARD_COMMITTEE_PERIOD` | `Epoch(2**8)` (= 256) | epochs | ~27 hours | | `SHARD_BLOCK_CHUNK_SIZE` | `2**18` (= 262,144) | | | `MAX_SHARD_BLOCK_CHUNKS` | `2**2` (= 4) | | | `TARGET_SHARD_BLOCK_SIZE` | `3 * 2**16` (= 196,608) | | | `SHARD_BLOCK_OFFSETS` | `[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233]` | | | `MAX_SHARD_BLOCKS_PER_ATTESTATION` | `len(SHARD_BLOCK_OFFSETS)` | | | `EMPTY_CHUNK_ROOT` | `hash_tree_root(BytesN[SHARD_BLOCK_CHUNK_SIZE]())` | | -| `MAX_GASPRICE` | `2**14` (= 16,384) | Gwei | | -| `MIN_GASPRICE` | `2**5` (= 32) | Gwei | | +| `MAX_GASPRICE` | `Gwei(2**14)` (= 16,384) | Gwei | | +| `MIN_GASPRICE` | `Gwei(2**5)` (= 32) | Gwei | | | `GASPRICE_ADJUSTMENT_COEFFICIENT` | `2**3` (= 8) | | | `DOMAIN_LIGHT_CLIENT` | `192` | | | `DOMAIN_SHARD_COMMITTEE` | `192` | | @@ -313,7 +313,7 @@ def committee_to_compact_committee(state: BeaconState, committee: Sequence[Valid #### `chunks_to_body_root` ```python -def chunks_to_body_root(chunks): +def chunks_to_body_root(chunks: List[Hash, MAX_SHARD_BLOCK_CHUNKS]) -> Hash: return hash_tree_root(Vector[Hash, MAX_SHARD_BLOCK_CHUNKS]( chunks + [EMPTY_CHUNK_ROOT] * (MAX_SHARD_BLOCK_CHUNKS - len(chunks)) )) @@ -321,6 +321,16 @@ def chunks_to_body_root(chunks): ### Beacon state accessors +#### `get_previous_slot` + +```python +def get_previous_slot(state: BeaconState) -> Slot: + if state.slot > 0: + return Slot(state.slot - 1) + else: + return Slot(0) +``` + #### `get_online_validator_indices` ```python @@ -403,7 +413,7 @@ def get_shard(state: BeaconState, attestation: Attestation) -> Shard: ```python def get_offset_slots(state: BeaconState, start_slot: Slot) -> Sequence[Slot]: - return [start_slot + x for x in SHARD_BLOCK_OFFSETS if start_slot + x < state.slot] + return [Slot(start_slot + x) for x in SHARD_BLOCK_OFFSETS if start_slot + x < state.slot] ``` @@ -503,7 +513,7 @@ def validate_attestation(state: BeaconState, attestation: Attestation) -> None: # Correct data root count assert len(attestation.custody_bits) == len(get_offset_slots(state, state.shard_next_slots[shard])) # Correct parent block root - assert data.beacon_block_root == get_block_root_at_slot(state, state.slot - 1) + assert data.beacon_block_root == get_block_root_at_slot(state, get_previous_slot(state)) # Type 2: delayed attestations else: assert state.slot - compute_start_slot_at_epoch(compute_epoch_at_slot(data.slot)) < SLOTS_PER_EPOCH @@ -535,7 +545,7 @@ def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTr if any(transition.shard_data_roots): headers.append(ShardSignableHeader( shard_parent_root=shard_parent_root, - parent_hash=get_block_root_at_slot(state, state.slot - 1), + parent_hash=get_block_root_at_slot(state, get_previous_slot(state)), slot=offset_slots[i], body_root=chunks_to_body_root(transition.shard_data_roots[i]) )) @@ -576,7 +586,7 @@ def process_attestations(state: BeaconState, block_body: BeaconBlockBody, attest # Process crosslinks online_indices = get_online_validator_indices(state) winners = set() - for shard in range(ACTIVE_SHARDS): + for shard in map(Shard, range(ACTIVE_SHARDS)): success = False # All attestations in the block for this shard this_shard_attestations = [ @@ -588,7 +598,7 @@ def process_attestations(state: BeaconState, block_body: BeaconBlockBody, attest # Loop over all shard transition roots shard_transition_roots = set([a.data.shard_transition_root for a in this_shard_attestations]) for shard_transition_root in sorted(shard_transition_roots): - all_participants = set() + all_participants: Set[ValidatorIndex] = set() participating_attestations = [] for attestation in this_shard_attestations: participating_attestations.append(attestation) @@ -611,11 +621,11 @@ def process_attestations(state: BeaconState, block_body: BeaconBlockBody, attest # Apply proposer reward and cost beacon_proposer_index = get_beacon_proposer_index(state) estimated_attester_reward = sum([get_base_reward(state, attester) for attester in all_participants]) - proposer_reward = estimated_attester_reward // PROPOSER_REWARD_QUOTIENT + proposer_reward = Gwei(estimated_attester_reward // PROPOSER_REWARD_QUOTIENT) increase_balance(state, beacon_proposer_index, proposer_reward) states_slots_lengths = zip( block_body.shard_transition.shard_states, - get_offset_slots(state, state.shard_next_slots[get_shard(attestation)]), + get_offset_slots(state, state.shard_next_slots[get_shard(state, attestation)]), block_body.shard_transition.shard_block_lengths ) for shard_state, slot, length in states_slots_lengths: @@ -666,11 +676,11 @@ def process_light_client_signatures(state: BeaconState, block_body: BeaconBlockB increase_balance(state, committee[i], get_base_reward(state, committee[i])) total_reward += get_base_reward(state, committee[i]) - increase_balance(state, get_beacon_proposer_index(state), total_reward // PROPOSER_REWARD_QUOTIENT) + increase_balance(state, get_beacon_proposer_index(state), Gwei(total_reward // PROPOSER_REWARD_QUOTIENT)) assert bls_verify( pubkey=bls_aggregate_pubkeys(signer_keys), - message_hash=get_block_root_at_slot(state, state.slot - 1), + message_hash=get_block_root_at_slot(state, get_previous_slot(state)), signature=block_body.light_client_signature, domain=DOMAIN_LIGHT_CLIENT ) diff --git a/specs/core/1_custody-game.md b/specs/core/1_custody-game.md index ed91bf8a0..61710cd5f 100644 --- a/specs/core/1_custody-game.md +++ b/specs/core/1_custody-game.md @@ -380,7 +380,7 @@ def process_custody_slashing(state: BeaconState, custody_slashing: CustodySlashi # Verify the malefactor custody key epoch_to_sign = get_randao_epoch_for_custody_period( - get_custody_period_for_validator(state, custody_slashing.malefactor_index, attestation.data.target.epoch), + get_custody_period_for_validator(custody_slashing.malefactor_index, attestation.data.target.epoch), custody_slashing.malefactor_index, ) domain = get_domain(state, DOMAIN_RANDAO, epoch_to_sign) @@ -398,7 +398,8 @@ def process_custody_slashing(state: BeaconState, custody_slashing: CustodySlashi if claimed_custody_bit != computed_custody_bit: # Slash the malefactor, reward the other committee members slash_validator(state, custody_slashing.malefactor_index) - whistleblower_reward = Gwei(malefactor.effective_balance // WHISTLEBLOWER_REWARD_QUOTIENT) // len(attesters - 1) + others_count = len(committee) - 1 + whistleblower_reward = Gwei(malefactor.effective_balance // WHISTLEBLOWER_REWARD_QUOTIENT // others_count) for attester_index in attesters: if attester_index != custody_slashing.malefactor_index: increase_balance(state, attester_index, whistleblower_reward) @@ -417,8 +418,9 @@ Run `process_reveal_deadlines(state)` after `process_registry_updates(state)`: ```python def process_reveal_deadlines(state: BeaconState) -> None: + epoch = get_current_epoch(state) for index, validator in enumerate(state.validators): - if get_custody_period_for_validator(state, ValidatorIndex(index)) > validator.next_custody_secret_to_reveal: + if get_custody_period_for_validator(ValidatorIndex(index), epoch) > validator.next_custody_secret_to_reveal: slash_validator(state, ValidatorIndex(index)) ``` diff --git a/specs/core/1_phase1-fork.md b/specs/core/1_phase1-fork.md index ff258366a..a1e14ec35 100644 --- a/specs/core/1_phase1-fork.md +++ b/specs/core/1_phase1-fork.md @@ -64,9 +64,9 @@ def upgrade_to_phase1(pre: phase0.BeaconState) -> BeaconState: activation_epoch=phase0_validator.activation_eligibility_epoch, exit_epoch=phase0_validator.exit_epoch, withdrawable_epoch=phase0_validator.withdrawable_epoch, - next_custody_secret_to_reveal=get_custody_period_for_validator(validator_index, epoch), + next_custody_secret_to_reveal=get_custody_period_for_validator(ValidatorIndex(i), epoch), max_reveal_lateness=0, # TODO custody refactor. Outdated? - ) for validator_index, phase0_validator in enumerate(pre.validators) + ) for i, phase0_validator in enumerate(pre.validators) ), balances=pre.balances, # Randomness @@ -102,7 +102,7 @@ def upgrade_to_phase1(pre: phase0.BeaconState) -> BeaconState: exposed_derived_secrets=Vector[List[ValidatorIndex, MAX_EARLY_DERIVED_SECRET_REVEALS * SLOTS_PER_EPOCH], EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS]() ) - post.current_light_committee = get_light_client_committee(post, post.epoch) - post.next_light_committee = get_light_client_committee(post, post.epoch + 1) + post.current_light_committee = committee_to_compact_committee(post, get_light_client_committee(post, post.epoch)) + post.next_light_committee = committee_to_compact_committee(post, get_light_client_committee(post, post.epoch + 1)) return post ``` From ddd7cd29640c4216fb899a9b88d47c95bd7a58c8 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Fri, 22 Nov 2019 15:45:55 -0700 Subject: [PATCH 066/194] divide new and old containers out in phase 1 --- specs/core/1_beacon-chain.md | 170 ++++++++++++++++++----------------- 1 file changed, 88 insertions(+), 82 deletions(-) diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index 4fe49c92d..6950d3517 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -51,45 +51,14 @@ Configuration is not namespaced. Instead it is strictly an extension; | `DOMAIN_SHARD_COMMITTEE` | `192` | | | `DOMAIN_SHARD_PROPOSAL` | `193` | | -## Containers +## Updated containers -### `ShardBlockWrapper` +The following containers have updated definitions in Phase 1. -_Wrapper for being broadcasted over the network._ +### Extended `AttestationData` ```python -class ShardBlockWrapper(Container): - shard_parent_root: Hash - beacon_parent_root: Hash - slot: Slot - body: BytesN[MAX_SHARD_BLOCK_CHUNKS * SHARD_BLOCK_CHUNK_SIZE] - signature: BLSSignature -``` - -### `ShardSignableHeader` - -```python -class ShardSignableHeader(Container): - shard_parent_root: Hash - beacon_parent_root: Hash - slot: Slot - body_root: Hash -``` - -### `ShardState` - -```python -class ShardState(Container): - slot: Slot - gasprice: Gwei - data: Hash - latest_block_root: Hash -``` - -### New `AttestationData` - -```python -class AttestationData(phase0.AttestationData): +class AttestationData(Container): slot: Slot index: CommitteeIndex # LMD GHOST vote @@ -103,23 +72,7 @@ class AttestationData(phase0.AttestationData): shard_transition_root: Hash ``` -### `ShardTransition` - -```python -class ShardTransition(Container): - # Starting from slot - start_slot: Slot - # Shard block lengths - shard_block_lengths: List[uint64, MAX_SHARD_BLOCKS_PER_ATTESTATION] - # Shard data roots - shard_data_roots: List[List[Hash, MAX_SHARD_BLOCK_CHUNKS], MAX_SHARD_BLOCKS_PER_ATTESTATION] - # Intermediate shard states - shard_states: List[ShardState, MAX_SHARD_BLOCKS_PER_ATTESTATION] - # Proposer signature aggregate - proposer_signature_aggregate: BLSSignature -``` - -### New `Attestation` +### Extended `Attestation` ```python class Attestation(Container): @@ -129,32 +82,7 @@ class Attestation(Container): signature: BLSSignature ``` -### `AttestationAndCommittee` - -```python -class AttestationAndCommittee(Container): - committee: List[ValidatorIndex, MAX_VALIDATORS_PER_COMMITTEE] - attestation: Attestation -``` - -### `CompactCommittee` - -```python -class CompactCommittee(Container): - pubkeys: List[BLSPubkey, MAX_VALIDATORS_PER_COMMITTEE] - compact_validators: List[uint64, MAX_VALIDATORS_PER_COMMITTEE] -``` - -### `AttestationCustodyBitWrapper` - -```python -class AttestationCustodyBitWrapper(Container): - attestation_root: Hash - block_index: uint64 - bit: boolean -``` - -### New extended `PendingAttestation` +### Extended `PendingAttestation` ```python class PendingAttestation(Container): @@ -165,7 +93,7 @@ class PendingAttestation(Container): crosslink_success: boolean ``` -### New extended `Validator` +### Extended `Validator` ```python class Validator(Container): @@ -186,7 +114,7 @@ class Validator(Container): max_reveal_lateness: Epoch ``` -### New extended `BeaconBlockBody` +### Extended `BeaconBlockBody` ```python class BeaconBlockBody(Container): @@ -212,7 +140,7 @@ class BeaconBlockBody(Container): light_client_signature: BLSSignature ``` -### New extended `BeaconBlock` +### Extended `BeaconBlock` Note that the `body` has a new `BeaconBlockBody` definition. @@ -225,7 +153,7 @@ class BeaconBlock(Container): signature: BLSSignature ``` -### New extended `BeaconState` +### Extended `BeaconState` Note that aside from the new additions, `Validator` and `PendingAttestation` have new definitions. @@ -271,6 +199,84 @@ class BeaconState(Container): EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS] ``` +## New containers + +The following containers are new in Phase 1. + +### `ShardBlockWrapper` + +_Wrapper for being broadcasted over the network._ + +```python +class ShardBlockWrapper(Container): + shard_parent_root: Hash + beacon_parent_root: Hash + slot: Slot + body: BytesN[MAX_SHARD_BLOCK_CHUNKS * SHARD_BLOCK_CHUNK_SIZE] + signature: BLSSignature +``` + +### `ShardSignableHeader` + +```python +class ShardSignableHeader(Container): + shard_parent_root: Hash + beacon_parent_root: Hash + slot: Slot + body_root: Hash +``` + +### `ShardState` + +```python +class ShardState(Container): + slot: Slot + gasprice: Gwei + data: Hash + latest_block_root: Hash +``` + +### `ShardTransition` + +```python +class ShardTransition(Container): + # Starting from slot + start_slot: Slot + # Shard block lengths + shard_block_lengths: List[uint64, MAX_SHARD_BLOCKS_PER_ATTESTATION] + # Shard data roots + shard_data_roots: List[List[Hash, MAX_SHARD_BLOCK_CHUNKS], MAX_SHARD_BLOCKS_PER_ATTESTATION] + # Intermediate shard states + shard_states: List[ShardState, MAX_SHARD_BLOCKS_PER_ATTESTATION] + # Proposer signature aggregate + proposer_signature_aggregate: BLSSignature +``` + +### `AttestationAndCommittee` + +```python +class AttestationAndCommittee(Container): + committee: List[ValidatorIndex, MAX_VALIDATORS_PER_COMMITTEE] + attestation: Attestation +``` + +### `CompactCommittee` + +```python +class CompactCommittee(Container): + pubkeys: List[BLSPubkey, MAX_VALIDATORS_PER_COMMITTEE] + compact_validators: List[uint64, MAX_VALIDATORS_PER_COMMITTEE] +``` + +### `AttestationCustodyBitWrapper` + +```python +class AttestationCustodyBitWrapper(Container): + attestation_root: Hash + block_index: uint64 + bit: boolean +``` + ## Helper functions ### Crypto From a1ac0d5a80df8d88dccae15a1b9fc2ac5aca4372 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Fri, 22 Nov 2019 16:26:36 -0700 Subject: [PATCH 067/194] fix get_shard_committee --- specs/core/1_beacon-chain.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index 6950d3517..d4531d7d9 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -354,7 +354,7 @@ def get_shard_committee(beacon_state: BeaconState, epoch: Epoch, shard: Shard) - source_epoch -= SHARD_COMMITTEE_PERIOD active_validator_indices = get_active_validator_indices(beacon_state, source_epoch) seed = get_seed(beacon_state, source_epoch, DOMAIN_SHARD_COMMITTEE) - return compute_committee(active_validator_indices, seed, 0, ACTIVE_SHARDS) + return compute_committee(active_validator_indices, seed, shard, ACTIVE_SHARDS) ``` #### `get_shard_proposer_index` From f533fef1670d8827dcf92205c99239d86f6d1e00 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Fri, 22 Nov 2019 18:09:31 -0700 Subject: [PATCH 068/194] add missing attestation validations; cleanup process_attestations and fix minor bugs --- specs/core/1_beacon-chain.md | 169 ++++++++++++++++++++++------------- specs/core/1_phase1-fork.md | 2 +- 2 files changed, 110 insertions(+), 61 deletions(-) diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index d4531d7d9..510ca8d5f 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -415,6 +415,14 @@ def get_shard(state: BeaconState, attestation: Attestation) -> Shard: return Shard((attestation.data.index + get_start_shard(state, attestation.data.slot)) % ACTIVE_SHARDS) ``` +#### `get_next_slot_for_shard` + +```python +def get_next_slot_for_shard(state: BeaconState, shard: Shard) -> Slot: + return Slot(state.shard_transitions[shard].slot + 1) +``` + + #### `get_offset_slots` ```python @@ -422,7 +430,6 @@ def get_offset_slots(state: BeaconState, start_slot: Slot) -> Sequence[Slot]: return [Slot(start_slot + x) for x in SHARD_BLOCK_OFFSETS if start_slot + x < state.slot] ``` - ### Predicates #### Updated `is_valid_indexed_attestation` @@ -507,17 +514,30 @@ def process_operations(state: BeaconState, body: BeaconBlockBody) -> None: ```python def validate_attestation(state: BeaconState, attestation: Attestation) -> None: data = attestation.data + assert data.index < get_committee_count_at_slot(state, data.slot) assert data.index < ACTIVE_SHARDS + assert data.target.epoch in (get_previous_epoch(state), get_current_epoch(state)) + assert data.slot + MIN_ATTESTATION_INCLUSION_DELAY <= state.slot <= data.slot + SLOTS_PER_EPOCH + + committee = get_beacon_committee(state, data.slot, data.index) + assert len(attestation.aggregation_bits) == len(committee) + + if attestation.data.target.epoch == get_current_epoch(state): + assert attestation.data.source == state.current_justified_checkpoint + else: + assert attestation.data.source == state.previous_justified_checkpoint + shard = get_shard(state, attestation) + shard_start_slot = get_next_slot_for_shard(state, shard) # Signature check assert is_valid_indexed_attestation(state, get_indexed_attestation(state, attestation)) # Type 1: on-time attestations if attestation.custody_bits != []: # Correct slot - assert data.slot == state.slot + assert data.slot + MIN_ATTESTATION_INCLUSION_DELAY == state.slot # Correct data root count - assert len(attestation.custody_bits) == len(get_offset_slots(state, state.shard_next_slots[shard])) + assert len(attestation.custody_bits) == len(get_offset_slots(state, shard_start_slot)) # Correct parent block root assert data.beacon_block_root == get_block_root_at_slot(state, get_previous_slot(state)) # Type 2: delayed attestations @@ -531,7 +551,7 @@ def validate_attestation(state: BeaconState, attestation: Attestation) -> None: ```python def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTransition) -> None: # Slot the attestation starts counting from - start_slot = state.shard_next_slots[shard] + start_slot = get_next_slot_for_shard(state, shard) # Correct data root count offset_slots = get_offset_slots(state, start_slot) @@ -543,7 +563,7 @@ def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTr ) assert transition.start_slot == start_slot - # Reonstruct shard headers + # Reconstruct shard headers headers = [] proposers = [] shard_parent_root = state.shard_states[shard].latest_block_root @@ -582,6 +602,84 @@ def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTr state.shard_states[shard].slot = state.slot - 1 ``` +###### `process_crosslink_for_shard` + +```python +def process_crosslink_for_shard(state: BeaconState, + shard: Shard, + shard_transition: ShardTransition, + attestations: Sequence[Attestation]) -> Hash: + committee = get_beacon_committee(state, get_current_epoch(state), shard) + online_indices = get_online_validator_indices(state) + + # Loop over all shard transition roots + shard_transition_roots = set([a.data.shard_transition_root for a in attestations]) + for shard_transition_root in sorted(shard_transition_roots): + transition_attestations = [a for a in attestations if a.data.shard_transition_root == shard_transition_root] + transition_participants: Set[ValidatorIndex] = set() + for attestation in transition_attestations: + participants = get_attesting_indices(state, attestation.data, attestation.aggregation_bits) + transition_participants = transition_participants.union(participants) + + enough_online_stake = ( + get_total_balance(state, online_indices.intersection(transition_participants)) * 3 >= + get_total_balance(state, online_indices.intersection(committee)) * 2 + ) + # If not enough stake, try next transition root + if not enough_online_stake: + continue + + # Attestation <-> shard transition consistency + assert shard_transition_root == hash_tree_root(shard_transition) + assert ( + attestation.data.head_shard_root + == chunks_to_body_root(shard_transition.shard_data_roots[-1]) + ) + + # Apply transition + apply_shard_transition(state, shard, shard_transition) + # Apply proposer reward and cost + beacon_proposer_index = get_beacon_proposer_index(state) + estimated_attester_reward = sum([get_base_reward(state, attester) for attester in transition_participants]) + proposer_reward = Gwei(estimated_attester_reward // PROPOSER_REWARD_QUOTIENT) + increase_balance(state, beacon_proposer_index, proposer_reward) + states_slots_lengths = zip( + shard_transition.shard_states, + get_offset_slots(state, get_next_slot_for_shard(state, shard)), + shard_transition.shard_block_lengths + ) + for shard_state, slot, length in states_slots_lengths: + proposer_index = get_shard_proposer_index(state, shard, slot) + decrease_balance(state, proposer_index, shard_state.gasprice * length) + + # Return winning transition root + return shard_transition_root + + # No winning transition root, ensure empty and return empty root + assert shard_transition == ShardTransition() + return Hash() +``` + +###### `process_crosslinks` + +```python +def process_crosslinks(state: BeaconState, + block_body: BeaconBlockBody, + attestations: Sequence[Attestation]) -> Set[Tuple[Shard, Hash]]: + winners: Set[Tuple[Shard, Hash]] = set() + for shard in map(Shard, range(ACTIVE_SHARDS)): + # All attestations in the block for this shard + shard_attestations = [ + attestation for attestation in attestations + if get_shard(state, attestation) == shard and attestation.data.slot == state.slot + ] + shard_transition = block_body.shard_transitions[shard] + winning_root = process_crosslink_for_shard(state, shard, shard_transition, shard_attestations) + if winning_root != Hash(): + winners.add((shard, winning_root)) + return winners +``` + ###### `process_attestations` ```python @@ -589,72 +687,23 @@ def process_attestations(state: BeaconState, block_body: BeaconBlockBody, attest # Basic validation for attestation in attestations: validate_attestation(state, attestation) + # Process crosslinks - online_indices = get_online_validator_indices(state) - winners = set() - for shard in map(Shard, range(ACTIVE_SHARDS)): - success = False - # All attestations in the block for this shard - this_shard_attestations = [ - attestation for attestation in attestations - if get_shard(state, attestation) == shard and attestation.data.slot == state.slot - ] - # The committee for this shard - this_shard_committee = get_beacon_committee(state, get_current_epoch(state), shard) - # Loop over all shard transition roots - shard_transition_roots = set([a.data.shard_transition_root for a in this_shard_attestations]) - for shard_transition_root in sorted(shard_transition_roots): - all_participants: Set[ValidatorIndex] = set() - participating_attestations = [] - for attestation in this_shard_attestations: - participating_attestations.append(attestation) - if attestation.data.shard_transition_root == shard_transition_root: - participants = get_attesting_indices(state, attestation.data, attestation.aggregation_bits) - all_participants = all_participants.union(participants) - if ( - get_total_balance(state, online_indices.intersection(all_participants)) * 3 >= - get_total_balance(state, online_indices.intersection(this_shard_committee)) * 2 - and success is False - ): - # Attestation <-> shard transition consistency - assert shard_transition_root == hash_tree_root(block_body.shard_transition) - assert ( - attestation.data.head_shard_root - == chunks_to_body_root(block_body.shard_transition.shard_data_roots[-1]) - ) - # Apply transition - apply_shard_transition(state, shard, block_body.shard_transition) - # Apply proposer reward and cost - beacon_proposer_index = get_beacon_proposer_index(state) - estimated_attester_reward = sum([get_base_reward(state, attester) for attester in all_participants]) - proposer_reward = Gwei(estimated_attester_reward // PROPOSER_REWARD_QUOTIENT) - increase_balance(state, beacon_proposer_index, proposer_reward) - states_slots_lengths = zip( - block_body.shard_transition.shard_states, - get_offset_slots(state, state.shard_next_slots[get_shard(state, attestation)]), - block_body.shard_transition.shard_block_lengths - ) - for shard_state, slot, length in states_slots_lengths: - proposer_index = get_shard_proposer_index(state, shard, slot) - decrease_balance(state, proposer_index, shard_state.gasprice * length) - winners.add((shard, shard_transition_root)) - success = True - if not success: - assert block_body.shard_transitions[shard] == ShardTransition() + winners = process_crosslinks(state, block_body, attestations) + + # Store pending attestations for epoch processing for attestation in attestations: - is_winning_transition = (get_shard(state, attestation), attestation.shard_transition_root) in winners + is_winning_transition = (get_shard(state, attestation), attestation.data.shard_transition_root) in winners pending_attestation = PendingAttestation( aggregation_bits=attestation.aggregation_bits, data=attestation.data, inclusion_delay=state.slot - attestation.data.slot, crosslink_success=is_winning_transition and attestation.data.slot == state.slot, - proposer_index=proposer_index + proposer_index=get_beacon_proposer_index(state), ) if attestation.data.target.epoch == get_current_epoch(state): - assert attestation.data.source == state.current_justified_checkpoint state.current_epoch_attestations.append(pending_attestation) else: - assert attestation.data.source == state.previous_justified_checkpoint state.previous_epoch_attestations.append(pending_attestation) ``` diff --git a/specs/core/1_phase1-fork.md b/specs/core/1_phase1-fork.md index a1e14ec35..1ec8acaa0 100644 --- a/specs/core/1_phase1-fork.md +++ b/specs/core/1_phase1-fork.md @@ -86,7 +86,7 @@ def upgrade_to_phase1(pre: phase0.BeaconState) -> BeaconState: # Phase 1 shard_states=List[ShardState, MAX_SHARDS]( ShardState( - slot=0, + slot=pre.slot, gasprice=INITIAL_GASPRICE, data=Root(), latest_block_root=Hash(), From 4bf5eb86c46b6899a45d06db502230901a338c29 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Fri, 22 Nov 2019 18:17:38 -0700 Subject: [PATCH 069/194] fix issues using indexed attestation and custody bit wrapper --- specs/core/1_beacon-chain.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index 510ca8d5f..300d45fdf 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -272,7 +272,7 @@ class CompactCommittee(Container): ```python class AttestationCustodyBitWrapper(Container): - attestation_root: Hash + attestation_data_root: Hash block_index: uint64 bit: boolean ``` @@ -441,20 +441,20 @@ def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: Attest """ Check if ``indexed_attestation`` has valid indices and signature. """ - # Verify aggregate signature all_pubkeys = [] all_message_hashes = [] - aggregation_bits = indexed_attestation.attestation.aggregation_bits + attestation = indexed_attestation.attestation + aggregation_bits = attestation.aggregation_bits assert len(aggregation_bits) == len(indexed_attestation.committee) - for i, custody_bits in enumerate(indexed_attestation.attestation.custody_bits): + for i, custody_bits in enumerate(attestation.custody_bits): assert len(custody_bits) == len(indexed_attestation.committee) for participant, abit, cbit in zip(indexed_attestation.committee, aggregation_bits, custody_bits): if abit: all_pubkeys.append(state.validators[participant].pubkey) # Note: only 2N distinct message hashes all_message_hashes.append(hash_tree_root( - AttestationCustodyBitWrapper(hash_tree_root(indexed_attestation.data), i, cbit) + AttestationCustodyBitWrapper(hash_tree_root(attestation.data), i, cbit) )) else: assert not cbit @@ -462,8 +462,8 @@ def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: Attest return bls_verify_multiple( pubkeys=all_pubkeys, message_hashes=all_message_hashes, - signature=indexed_attestation.signature, - domain=get_domain(state, DOMAIN_BEACON_ATTESTER, indexed_attestation.data.target.epoch), + signature=attestation.signature, + domain=get_domain(state, DOMAIN_BEACON_ATTESTER, attestation.data.target.epoch), ) ``` From a32b0100ff338561b5f88425ab3eca8e1aaebf28 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Thu, 5 Dec 2019 12:36:48 -0700 Subject: [PATCH 070/194] rebase fixes --- scripts/build_spec.py | 4 +-- specs/core/1_beacon-chain.md | 60 ++++++++++++++++++------------------ specs/core/1_custody-game.md | 2 +- specs/core/1_fraud-proofs.md | 6 ++-- specs/core/1_phase1-fork.md | 4 +-- 5 files changed, 38 insertions(+), 38 deletions(-) diff --git a/scripts/build_spec.py b/scripts/build_spec.py index 6dfc4e6b2..b88104140 100644 --- a/scripts/build_spec.py +++ b/scripts/build_spec.py @@ -165,10 +165,10 @@ def combine_constants(old_constants: Dict[str, str], new_constants: Dict[str, st ignored_dependencies = [ - 'bit', 'boolean', 'Vector', 'List', 'Container', 'Hash', 'BLSPubkey', 'BLSSignature', + 'bit', 'boolean', 'Vector', 'List', 'Container', 'BLSPubkey', 'BLSSignature', 'Bytes1', 'Bytes4', 'Bytes32', 'Bytes48', 'Bytes96', 'Bitlist', 'Bitvector', 'uint8', 'uint16', 'uint32', 'uint64', 'uint128', 'uint256', - 'bytes', 'byte', 'Bytes', 'BytesN' # to be removed after updating spec doc + 'bytes', 'byte', 'ByteList', 'ByteVector' ] diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index 300d45fdf..6837987b1 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -43,7 +43,7 @@ Configuration is not namespaced. Instead it is strictly an extension; | `TARGET_SHARD_BLOCK_SIZE` | `3 * 2**16` (= 196,608) | | | `SHARD_BLOCK_OFFSETS` | `[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233]` | | | `MAX_SHARD_BLOCKS_PER_ATTESTATION` | `len(SHARD_BLOCK_OFFSETS)` | | -| `EMPTY_CHUNK_ROOT` | `hash_tree_root(BytesN[SHARD_BLOCK_CHUNK_SIZE]())` | | +| `EMPTY_CHUNK_ROOT` | `hash_tree_root(ByteList[SHARD_BLOCK_CHUNK_SIZE]())` | | | `MAX_GASPRICE` | `Gwei(2**14)` (= 16,384) | Gwei | | | `MIN_GASPRICE` | `Gwei(2**5)` (= 32) | Gwei | | | `GASPRICE_ADJUSTMENT_COEFFICIENT` | `2**3` (= 8) | | @@ -62,14 +62,14 @@ class AttestationData(Container): slot: Slot index: CommitteeIndex # LMD GHOST vote - beacon_block_root: Hash + beacon_block_root: Root # FFG vote source: Checkpoint target: Checkpoint # Current-slot shard block root - head_shard_root: Hash + head_shard_root: Root # Shard transition root - shard_transition_root: Hash + shard_transition_root: Root ``` ### Extended `Attestation` @@ -98,7 +98,7 @@ class PendingAttestation(Container): ```python class Validator(Container): pubkey: BLSPubkey - withdrawal_credentials: Hash # Commitment to pubkey for withdrawals + withdrawal_credentials: Bytes32 # Commitment to pubkey for withdrawals effective_balance: Gwei # Balance at stake slashed: boolean # Status epochs @@ -147,8 +147,8 @@ Note that the `body` has a new `BeaconBlockBody` definition. ```python class BeaconBlock(Container): slot: Slot - parent_root: Hash - state_root: Hash + parent_root: Root + state_root: Root body: BeaconBlockBody signature: BLSSignature ``` @@ -165,9 +165,9 @@ class BeaconState(Container): fork: Fork # History latest_block_header: BeaconBlockHeader - block_roots: Vector[Hash, SLOTS_PER_HISTORICAL_ROOT] - state_roots: Vector[Hash, SLOTS_PER_HISTORICAL_ROOT] - historical_roots: List[Hash, HISTORICAL_ROOTS_LIMIT] + block_roots: Vector[Root, SLOTS_PER_HISTORICAL_ROOT] + state_roots: Vector[Root, SLOTS_PER_HISTORICAL_ROOT] + historical_roots: List[Root, HISTORICAL_ROOTS_LIMIT] # Eth1 eth1_data: Eth1Data eth1_data_votes: List[Eth1Data, SLOTS_PER_ETH1_VOTING_PERIOD] @@ -176,7 +176,7 @@ class BeaconState(Container): validators: List[Validator, VALIDATOR_REGISTRY_LIMIT] balances: List[Gwei, VALIDATOR_REGISTRY_LIMIT] # Randomness - randao_mixes: Vector[Hash, EPOCHS_PER_HISTORICAL_VECTOR] + randao_mixes: Vector[Root, EPOCHS_PER_HISTORICAL_VECTOR] # Slashings slashings: Vector[Gwei, EPOCHS_PER_SLASHINGS_VECTOR] # Per-epoch sums of slashed effective balances # Attestations @@ -189,7 +189,7 @@ class BeaconState(Container): finalized_checkpoint: Checkpoint # Phase 1 shard_states: List[ShardState, MAX_SHARDS] - online_countdown: Bytes[VALIDATOR_REGISTRY_LIMIT] + online_countdown: ByteList[VALIDATOR_REGISTRY_LIMIT] current_light_committee: CompactCommittee next_light_committee: CompactCommittee # Custody game @@ -209,10 +209,10 @@ _Wrapper for being broadcasted over the network._ ```python class ShardBlockWrapper(Container): - shard_parent_root: Hash - beacon_parent_root: Hash + shard_parent_root: Root + beacon_parent_root: Root slot: Slot - body: BytesN[MAX_SHARD_BLOCK_CHUNKS * SHARD_BLOCK_CHUNK_SIZE] + body: ByteList[MAX_SHARD_BLOCK_CHUNKS * SHARD_BLOCK_CHUNK_SIZE] signature: BLSSignature ``` @@ -220,10 +220,10 @@ class ShardBlockWrapper(Container): ```python class ShardSignableHeader(Container): - shard_parent_root: Hash - beacon_parent_root: Hash + shard_parent_root: Root + beacon_parent_root: Root slot: Slot - body_root: Hash + body_root: Root ``` ### `ShardState` @@ -232,8 +232,8 @@ class ShardSignableHeader(Container): class ShardState(Container): slot: Slot gasprice: Gwei - data: Hash - latest_block_root: Hash + data: Bytes32 + latest_block_root: Root ``` ### `ShardTransition` @@ -245,7 +245,7 @@ class ShardTransition(Container): # Shard block lengths shard_block_lengths: List[uint64, MAX_SHARD_BLOCKS_PER_ATTESTATION] # Shard data roots - shard_data_roots: List[List[Hash, MAX_SHARD_BLOCK_CHUNKS], MAX_SHARD_BLOCKS_PER_ATTESTATION] + shard_data_roots: List[List[Bytes32, MAX_SHARD_BLOCK_CHUNKS], MAX_SHARD_BLOCKS_PER_ATTESTATION] # Intermediate shard states shard_states: List[ShardState, MAX_SHARD_BLOCKS_PER_ATTESTATION] # Proposer signature aggregate @@ -272,7 +272,7 @@ class CompactCommittee(Container): ```python class AttestationCustodyBitWrapper(Container): - attestation_data_root: Hash + attestation_data_root: Root block_index: uint64 bit: boolean ``` @@ -319,8 +319,8 @@ def committee_to_compact_committee(state: BeaconState, committee: Sequence[Valid #### `chunks_to_body_root` ```python -def chunks_to_body_root(chunks: List[Hash, MAX_SHARD_BLOCK_CHUNKS]) -> Hash: - return hash_tree_root(Vector[Hash, MAX_SHARD_BLOCK_CHUNKS]( +def chunks_to_body_root(chunks: List[Bytes32, MAX_SHARD_BLOCK_CHUNKS]) -> Root: + return hash_tree_root(Vector[Bytes32, MAX_SHARD_BLOCK_CHUNKS]( chunks + [EMPTY_CHUNK_ROOT] * (MAX_SHARD_BLOCK_CHUNKS - len(chunks)) )) ``` @@ -543,7 +543,7 @@ def validate_attestation(state: BeaconState, attestation: Attestation) -> None: # Type 2: delayed attestations else: assert state.slot - compute_start_slot_at_epoch(compute_epoch_at_slot(data.slot)) < SLOTS_PER_EPOCH - assert data.shard_transition_root == Hash() + assert data.shard_transition_root == Root() ``` ###### `apply_shard_transition` @@ -608,7 +608,7 @@ def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTr def process_crosslink_for_shard(state: BeaconState, shard: Shard, shard_transition: ShardTransition, - attestations: Sequence[Attestation]) -> Hash: + attestations: Sequence[Attestation]) -> Root: committee = get_beacon_committee(state, get_current_epoch(state), shard) online_indices = get_online_validator_indices(state) @@ -657,7 +657,7 @@ def process_crosslink_for_shard(state: BeaconState, # No winning transition root, ensure empty and return empty root assert shard_transition == ShardTransition() - return Hash() + return Root() ``` ###### `process_crosslinks` @@ -665,8 +665,8 @@ def process_crosslink_for_shard(state: BeaconState, ```python def process_crosslinks(state: BeaconState, block_body: BeaconBlockBody, - attestations: Sequence[Attestation]) -> Set[Tuple[Shard, Hash]]: - winners: Set[Tuple[Shard, Hash]] = set() + attestations: Sequence[Attestation]) -> Set[Tuple[Shard, Root]]: + winners: Set[Tuple[Shard, Root]] = set() for shard in map(Shard, range(ACTIVE_SHARDS)): # All attestations in the block for this shard shard_attestations = [ @@ -675,7 +675,7 @@ def process_crosslinks(state: BeaconState, ] shard_transition = block_body.shard_transitions[shard] winning_root = process_crosslink_for_shard(state, shard, shard_transition, shard_attestations) - if winning_root != Hash(): + if winning_root != Root(): winners.add((shard, winning_root)) return winners ``` diff --git a/specs/core/1_custody-game.md b/specs/core/1_custody-game.md index 61710cd5f..aa96b387f 100644 --- a/specs/core/1_custody-game.md +++ b/specs/core/1_custody-game.md @@ -100,7 +100,7 @@ class CustodySlashing(Container): whistleblower_index: ValidatorIndex shard_transition: ShardTransition attestation: Attestation - data: Bytes[MAX_SHARD_BLOCK_CHUNKS * SHARD_BLOCK_CHUNK_SIZE] + data: ByteList[MAX_SHARD_BLOCK_CHUNKS * SHARD_BLOCK_CHUNK_SIZE] signature: BLSSignature ``` diff --git a/specs/core/1_fraud-proofs.md b/specs/core/1_fraud-proofs.md index bd5cce117..8c424d259 100644 --- a/specs/core/1_fraud-proofs.md +++ b/specs/core/1_fraud-proofs.md @@ -34,10 +34,10 @@ The proof verifies that one of the two conditions is false: ```python def shard_state_transition(shard: Shard, slot: Slot, - pre_state: Hash, - previous_beacon_root: Hash, + pre_state: Root, + previous_beacon_root: Root, proposer_pubkey: BLSPubkey, - block_data: BytesN[MAX_SHARD_BLOCK_CHUNKS * SHARD_BLOCK_CHUNK_SIZE]) -> Hash: + block_data: ByteVector[MAX_SHARD_BLOCK_CHUNKS * SHARD_BLOCK_CHUNK_SIZE]) -> Root: # We will add something more substantive in phase 2 return hash(pre_state + hash_tree_root(previous_beacon_root) + hash_tree_root(block_data)) ``` diff --git a/specs/core/1_phase1-fork.md b/specs/core/1_phase1-fork.md index 1ec8acaa0..0b5e2085b 100644 --- a/specs/core/1_phase1-fork.md +++ b/specs/core/1_phase1-fork.md @@ -89,10 +89,10 @@ def upgrade_to_phase1(pre: phase0.BeaconState) -> BeaconState: slot=pre.slot, gasprice=INITIAL_GASPRICE, data=Root(), - latest_block_root=Hash(), + latest_block_root=Root(), ) for i in range(ACTIVE_SHARDS) ), - online_countdown=Bytes[VALIDATOR_REGISTRY_LIMIT]( + online_countdown=ByteList[VALIDATOR_REGISTRY_LIMIT]( ONLINE_PERIOD for i in range(len(pre.validators)) ), current_light_committee=CompactCommittee(), # computed after state creation From e31f17f03700da5c5edf3a555c4951ee8ee1059f Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Thu, 5 Dec 2019 13:49:52 -0700 Subject: [PATCH 071/194] modify phase 1 to use new signature (no signing_root) pattern --- scripts/build_spec.py | 5 +---- specs/core/1_beacon-chain.md | 13 +++++++++++-- specs/core/1_custody-game.md | 13 +++++++++++-- 3 files changed, 23 insertions(+), 8 deletions(-) diff --git a/scripts/build_spec.py b/scripts/build_spec.py index b88104140..b1a8b3485 100644 --- a/scripts/build_spec.py +++ b/scripts/build_spec.py @@ -51,10 +51,7 @@ from dataclasses import ( field, ) -from eth2spec.utils.ssz.ssz_impl import ( - hash_tree_root, - is_zero, -) +from eth2spec.utils.ssz.ssz_impl import hash_tree_root from eth2spec.utils.ssz.ssz_typing import ( BasicValue, Elements, BaseBytes, BaseList, SSZType, Container, List, Vector, ByteList, ByteVector, Bitlist, Bitvector, Bits, diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index 6837987b1..3e88d01da 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -128,9 +128,9 @@ class BeaconBlockBody(Container): attestations: List[Attestation, MAX_ATTESTATIONS] # Entry & exit deposits: List[Deposit, MAX_DEPOSITS] - voluntary_exits: List[VoluntaryExit, MAX_VOLUNTARY_EXITS] + voluntary_exits: List[SignedVoluntaryExit, MAX_VOLUNTARY_EXITS] # Custody game - custody_slashings: List[CustodySlashing, MAX_CUSTODY_SLASHINGS] + custody_slashings: List[SignedCustodySlashing, MAX_CUSTODY_SLASHINGS] custody_key_reveals: List[CustodyKeyReveal, MAX_CUSTODY_KEY_REVEALS] early_derived_secret_reveals: List[EarlyDerivedSecretReveal, MAX_EARLY_DERIVED_SECRET_REVEALS] # Shards @@ -150,6 +150,15 @@ class BeaconBlock(Container): parent_root: Root state_root: Root body: BeaconBlockBody +``` + +#### Extended `SignedBeaconBlock` + +Note that the `message` has a new `BeaconBlock` definition. + +```python +class SignedBeaconBlock(Container): + message: BeaconBlock signature: BLSSignature ``` diff --git a/specs/core/1_custody-game.md b/specs/core/1_custody-game.md index aa96b387f..60d63db03 100644 --- a/specs/core/1_custody-game.md +++ b/specs/core/1_custody-game.md @@ -101,9 +101,17 @@ class CustodySlashing(Container): shard_transition: ShardTransition attestation: Attestation data: ByteList[MAX_SHARD_BLOCK_CHUNKS * SHARD_BLOCK_CHUNK_SIZE] +``` + +#### `SignedCustodySlashing` + +```python +class SignedCustodySlashing(Container): + message: CustodySlashing signature: BLSSignature ``` + #### `CustodyKeyReveal` ```python @@ -347,7 +355,8 @@ def process_early_derived_secret_reveal(state: BeaconState, reveal: EarlyDerived #### Custody Slashings ```python -def process_custody_slashing(state: BeaconState, custody_slashing: CustodySlashing) -> None: +def process_custody_slashing(state: BeaconState, signed_custody_slashing: SignedCustodySlashing) -> None: + custody_slashing = signed_custody_slashing.message attestation = custody_slashing.attestation # Any signed custody-slashing should result in at least one slashing. @@ -355,7 +364,7 @@ def process_custody_slashing(state: BeaconState, custody_slashing: CustodySlashi malefactor = state.validators[custody_slashing.malefactor_index] whistleblower = state.validators[custody_slashing.whistleblower_index] domain = get_domain(state, DOMAIN_CUSTODY_BIT_SLASHING, get_current_epoch(state)) - assert bls_verify(whistleblower.pubkey, signing_root(custody_slashing), custody_slashing.signature, domain) + assert bls_verify(whistleblower.pubkey, hash_tree_root(custody_slashing), signed_custody_slashing.signature, domain) # Verify that the whistleblower is slashable assert is_slashable_validator(whistleblower, get_current_epoch(state)) # Verify that the claimed malefactor is slashable From ce406660031c227e532f9bebec96fa4821824067 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Thu, 5 Dec 2019 15:06:32 -0700 Subject: [PATCH 072/194] version pyspec module and test on one attesterslashing test --- scripts/build_spec.py | 6 +- specs/core/1_beacon-chain.md | 78 ++++++++++++++++--- .../eth2spec/test/helpers/attestations.py | 14 +++- .../test_process_attester_slashing.py | 6 +- 4 files changed, 87 insertions(+), 17 deletions(-) diff --git a/scripts/build_spec.py b/scripts/build_spec.py index b1a8b3485..f3a76817f 100644 --- a/scripts/build_spec.py +++ b/scripts/build_spec.py @@ -112,6 +112,7 @@ def objects_to_spec(functions: Dict[str, str], constants: Dict[str, str], ssz_objects: Dict[str, str], imports: Dict[str, str], + version: str, ) -> str: """ Given all the objects that constitute a spec, combine them into a single pyfile. @@ -137,6 +138,7 @@ def objects_to_spec(functions: Dict[str, str], ssz_objects_instantiation_spec = '\n\n'.join(ssz_objects.values()) spec = ( imports + + '\n\n' + f"version = \'{version}\'\n" + '\n\n' + new_type_definitions + '\n' + SUNDRY_CONSTANTS_FUNCTIONS + '\n\n' + constants_spec @@ -229,7 +231,7 @@ def build_phase0_spec(phase0_sourcefile: str, fork_choice_sourcefile: str, for value in [fork_choice_spec, v_guide]: spec_objects = combine_spec_objects(spec_objects, value) dependency_order_spec(spec_objects) - spec = objects_to_spec(*spec_objects, PHASE0_IMPORTS) + spec = objects_to_spec(*spec_objects, PHASE0_IMPORTS, 'phase0') if outfile is not None: with open(outfile, 'w') as out: out.write(spec) @@ -258,7 +260,7 @@ def build_phase1_spec(phase0_beacon_sourcefile: str, for value in all_spescs[1:]: spec_objects = combine_spec_objects(spec_objects, value) dependency_order_spec(spec_objects) - spec = objects_to_spec(*spec_objects, PHASE1_IMPORTS) + spec = objects_to_spec(*spec_objects, PHASE1_IMPORTS, 'phase1') if outfile is not None: with open(outfile, 'w') as out: out.write(spec) diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index 3e88d01da..b0c0b5fbf 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -93,6 +93,24 @@ class PendingAttestation(Container): crosslink_success: boolean ``` +### `IndexedAttestation` + +```python +class IndexedAttestation(Container): + committee: List[ValidatorIndex, MAX_VALIDATORS_PER_COMMITTEE] + attestation: Attestation +``` + +#### Extended `AttesterSlashing` + +Note that the `attestation_1` and `attestation_2` have a new `IndexedAttestation` definition. + +```python +class AttesterSlashing(Container): + attestation_1: IndexedAttestation + attestation_2: IndexedAttestation +``` + ### Extended `Validator` ```python @@ -261,14 +279,6 @@ class ShardTransition(Container): proposer_signature_aggregate: BLSSignature ``` -### `AttestationAndCommittee` - -```python -class AttestationAndCommittee(Container): - committee: List[ValidatorIndex, MAX_VALIDATORS_PER_COMMITTEE] - attestation: Attestation -``` - ### `CompactCommittee` ```python @@ -390,9 +400,12 @@ def get_light_client_committee(beacon_state: BeaconState, epoch: Epoch) -> Seque #### `get_indexed_attestation` ```python -def get_indexed_attestation(beacon_state: BeaconState, attestation: Attestation) -> AttestationAndCommittee: +def get_indexed_attestation(beacon_state: BeaconState, attestation: Attestation) -> IndexedAttestation: committee = get_beacon_committee(beacon_state, attestation.data.slot, attestation.data.index) - return AttestationAndCommittee(committee, attestation) + return IndexedAttestation( + committee=committee, + attestation=attestation, + ) ``` #### `get_updated_gasprice` @@ -446,7 +459,7 @@ def get_offset_slots(state: BeaconState, start_slot: Slot) -> Sequence[Slot]: Note that this replaces the Phase 0 `is_valid_indexed_attestation`. ```python -def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: AttestationAndCommittee) -> bool: +def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: IndexedAttestation) -> bool: """ Check if ``indexed_attestation`` has valid indices and signature. """ @@ -467,7 +480,7 @@ def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: Attest )) else: assert not cbit - + return bls_verify_multiple( pubkeys=all_pubkeys, message_hashes=all_message_hashes, @@ -716,6 +729,47 @@ def process_attestations(state: BeaconState, block_body: BeaconBlockBody, attest state.previous_epoch_attestations.append(pending_attestation) ``` +##### New Attester slashing processing + +```python +def get_indices_from_committee( + committee: List[ValidatorIndex, MAX_VALIDATORS_PER_COMMITTEE], + bits: Bitlist[MAX_VALIDATORS_PER_COMMITTEE]) -> List[ValidatorIndex, MAX_VALIDATORS_PER_COMMITTEE]: + assert len(bits) == len(committee) + return List[ValidatorIndex, MAX_VALIDATORS_PER_COMMITTEE]( + [validator_index for i, validator_index in enumerate(committee) if bits[i]] + ) +``` + +```python +def process_attester_slashing(state: BeaconState, attester_slashing: AttesterSlashing) -> None: + indexed_attestation_1 = attester_slashing.attestation_1 + indexed_attestation_2 = attester_slashing.attestation_2 + assert is_slashable_attestation_data( + indexed_attestation_1.attestation.data, + indexed_attestation_2.attestation.data, + ) + assert is_valid_indexed_attestation(state, indexed_attestation_1) + assert is_valid_indexed_attestation(state, indexed_attestation_2) + + indices_1 = get_indices_from_committee( + indexed_attestation_1.committee, + indexed_attestation_1.attestation.aggregation_bits, + ) + indices_2 = get_indices_from_committee( + indexed_attestation_2.committee, + indexed_attestation_2.attestation.aggregation_bits, + ) + + slashed_any = False + indices = set(indices_1).intersection(indices_2) + for index in sorted(indices): + if is_slashable_validator(state.validators[index], get_current_epoch(state)): + slash_validator(state, index) + slashed_any = True + assert slashed_any +``` + #### Shard transition false positives ```python diff --git a/test_libs/pyspec/eth2spec/test/helpers/attestations.py b/test_libs/pyspec/eth2spec/test/helpers/attestations.py index 3ed54888f..0c64a0316 100644 --- a/test_libs/pyspec/eth2spec/test/helpers/attestations.py +++ b/test_libs/pyspec/eth2spec/test/helpers/attestations.py @@ -82,8 +82,18 @@ def sign_aggregate_attestation(spec, state, attestation_data, participants: List def sign_indexed_attestation(spec, state, indexed_attestation): - participants = indexed_attestation.attesting_indices - indexed_attestation.signature = sign_aggregate_attestation(spec, state, indexed_attestation.data, participants) + if spec.version == 'phase0': + participants = indexed_attestation.attesting_indices + data = indexed_attestation.data + indexed_attestation.signature = sign_aggregate_attestation(spec, state, data, participants) + else: + participants = spec.get_indices_from_committee( + indexed_attestation.committee, + indexed_attestation.attestation.aggregation_bits, + ) + data = indexed_attestation.attestation.data + indexed_attestation.attestation.signature = sign_aggregate_attestation(spec, state, data, participants) + def sign_attestation(spec, state, attestation): diff --git a/test_libs/pyspec/eth2spec/test/phase_0/block_processing/test_process_attester_slashing.py b/test_libs/pyspec/eth2spec/test/phase_0/block_processing/test_process_attester_slashing.py index 85e807ec0..9a227625a 100644 --- a/test_libs/pyspec/eth2spec/test/phase_0/block_processing/test_process_attester_slashing.py +++ b/test_libs/pyspec/eth2spec/test/phase_0/block_processing/test_process_attester_slashing.py @@ -169,7 +169,11 @@ def test_same_data(spec, state): def test_no_double_or_surround(spec, state): attester_slashing = get_valid_attester_slashing(spec, state, signed_1=False, signed_2=True) - attester_slashing.attestation_1.data.target.epoch += 1 + if spec.version == 'phase0': + attester_slashing.attestation_1.data.target.epoch += 1 + else: + attester_slashing.attestation_1.attestation.data.target.epoch += 1 + sign_indexed_attestation(spec, state, attester_slashing.attestation_1) yield from run_attester_slashing_processing(spec, state, attester_slashing, False) From 0a3cc8a84a6c834242d90a7cf4696ca7d5725598 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Fri, 3 Jan 2020 07:29:23 -0700 Subject: [PATCH 073/194] malefactor_key -> malefactor_secret --- specs/core/1_custody-game.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/specs/core/1_custody-game.md b/specs/core/1_custody-game.md index 60d63db03..c92af627b 100644 --- a/specs/core/1_custody-game.md +++ b/specs/core/1_custody-game.md @@ -96,7 +96,7 @@ class CustodySlashing(Container): # (Attestation.data.shard_transition_root as ShardTransition).shard_data_roots[data_index] is the root of the data. data_index: uint64 malefactor_index: ValidatorIndex - malefactor_key: BLSSignature + malefactor_secret: BLSSignature whistleblower_index: ValidatorIndex shard_transition: ShardTransition attestation: Attestation @@ -393,7 +393,7 @@ def process_custody_slashing(state: BeaconState, signed_custody_slashing: Signed custody_slashing.malefactor_index, ) domain = get_domain(state, DOMAIN_RANDAO, epoch_to_sign) - assert bls_verify(malefactor.pubkey, hash_tree_root(epoch_to_sign), custody_slashing.malefactor_key, domain) + assert bls_verify(malefactor.pubkey, hash_tree_root(epoch_to_sign), custody_slashing.malefactor_secret, domain) # Get the custody bit custody_bits = attestation.custody_bits[custody_slashing.data_index] @@ -401,7 +401,7 @@ def process_custody_slashing(state: BeaconState, signed_custody_slashing: Signed claimed_custody_bit = custody_bits[committee.index(custody_slashing.malefactor_index)] # Compute the custody bit - computed_custody_bit = compute_custody_bit(custody_slashing.malefactor_key, custody_slashing.data) + computed_custody_bit = compute_custody_bit(custody_slashing.malefactor_secret, custody_slashing.data) # Verify the claim if claimed_custody_bit != computed_custody_bit: From 13cdfa8edb748d2d108fcb8bafdcb7e73888b63b Mon Sep 17 00:00:00 2001 From: protolambda Date: Sat, 4 Jan 2020 18:33:15 +0100 Subject: [PATCH 074/194] apply configs in tests properly --- test_libs/pyspec/eth2spec/config/apply_config.py | 3 +++ test_libs/pyspec/eth2spec/test/conftest.py | 3 +++ test_libs/pyspec/eth2spec/test/context.py | 6 ++++-- 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/test_libs/pyspec/eth2spec/config/apply_config.py b/test_libs/pyspec/eth2spec/config/apply_config.py index 768abba64..2f0ce5902 100644 --- a/test_libs/pyspec/eth2spec/config/apply_config.py +++ b/test_libs/pyspec/eth2spec/config/apply_config.py @@ -5,6 +5,7 @@ presets: Dict[str, Any] = {} # Access to overwrite spec constants based on configuration +# This is called by the spec module after declaring its globals, and applies the loaded presets. def apply_constants_preset(spec_globals: Dict[str, Any]) -> None: global presets for k, v in presets.items(): @@ -14,6 +15,8 @@ def apply_constants_preset(spec_globals: Dict[str, Any]) -> None: spec_globals[k] = v +# Load presets from a file. This does not apply the presets. +# To apply the presets, reload the spec module (it will re-initialize with the presets taken from here). def load_presets(configs_path, config_name): global presets presets = loader.load_presets(configs_path, config_name) diff --git a/test_libs/pyspec/eth2spec/test/conftest.py b/test_libs/pyspec/eth2spec/test/conftest.py index a0a200f0b..35ffb3cb8 100644 --- a/test_libs/pyspec/eth2spec/test/conftest.py +++ b/test_libs/pyspec/eth2spec/test/conftest.py @@ -1,4 +1,5 @@ from eth2spec.config import apply_config +from eth2spec.test.context import reload_specs # We import pytest only when it's present, i.e. when we are running tests. # The test-cases themselves can be generated without installing pytest. @@ -33,3 +34,5 @@ def pytest_addoption(parser): def config(request): config_name = request.config.getoption("--config") apply_config.load_presets('../../configs/', config_name) + # now that the presets are loaded, reload the specs to apply them + reload_specs() diff --git a/test_libs/pyspec/eth2spec/test/context.py b/test_libs/pyspec/eth2spec/test/context.py index 0fb46aa50..195d1e5fa 100644 --- a/test_libs/pyspec/eth2spec/test/context.py +++ b/test_libs/pyspec/eth2spec/test/context.py @@ -10,8 +10,10 @@ from typing import Any, Callable, Sequence from importlib import reload -reload(spec_phase0) -reload(spec_phase1) + +def reload_specs(): + reload(spec_phase0) + reload(spec_phase1) def with_custom_state(balances_fn: Callable[[Any], Sequence[int]], From 018927def0b1e0cd27bfb42a3998a9031bad9249 Mon Sep 17 00:00:00 2001 From: protolambda Date: Sun, 5 Jan 2020 20:11:55 +0100 Subject: [PATCH 075/194] generalize previous-slot function --- specs/core/1_beacon-chain.md | 29 ++++++++++++++--------------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index e44dcfcb7..a4b79df38 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -29,11 +29,11 @@ - [Crypto](#crypto) - [`bls_verify_multiple`](#bls_verify_multiple) - [Misc](#misc-1) + - [`get_previous_slot`](#get_previous_slot) - [`pack_compact_validator`](#pack_compact_validator) - [`committee_to_compact_committee`](#committee_to_compact_committee) - [`chunks_to_body_root`](#chunks_to_body_root) - [Beacon state accessors](#beacon-state-accessors) - - [`get_previous_slot`](#get_previous_slot) - [`get_online_validator_indices`](#get_online_validator_indices) - [`get_shard_committee`](#get_shard_committee) - [`get_shard_proposer_index`](#get_shard_proposer_index) @@ -98,7 +98,6 @@ Configuration is not namespaced. Instead it is strictly an extension; | Name | Value | Unit | Duration | | - | - | - | - | | `MAX_SHARDS` | `2**10` (= 1024) | -| `ACTIVE_SHARDS` | `2**6` (= 64) | | `ONLINE_PERIOD` | `Epoch(2**3)` (= 8) | epochs | ~51 min | | `LIGHT_CLIENT_COMMITTEE_SIZE` | `2**7` (= 128) | | `LIGHT_CLIENT_COMMITTEE_PERIOD` | `Epoch(2**8)` (= 256) | epochs | ~27 hours | @@ -372,6 +371,16 @@ class AttestationCustodyBitWrapper(Container): ### Misc +#### `get_previous_slot` + +```python +def get_previous_slot(slot: Slot) -> Slot: + if slot > 0: + return Slot(slot - 1) + else: + return Slot(0) +``` + #### `pack_compact_validator` ```python @@ -411,16 +420,6 @@ def chunks_to_body_root(chunks: List[Bytes32, MAX_SHARD_BLOCK_CHUNKS]) -> Root: ### Beacon state accessors -#### `get_previous_slot` - -```python -def get_previous_slot(state: BeaconState) -> Slot: - if state.slot > 0: - return Slot(state.slot - 1) - else: - return Slot(0) -``` - #### `get_online_validator_indices` ```python @@ -626,7 +625,7 @@ def validate_attestation(state: BeaconState, attestation: Attestation) -> None: # Correct data root count assert len(attestation.custody_bits) == len(get_offset_slots(state, shard_start_slot)) # Correct parent block root - assert data.beacon_block_root == get_block_root_at_slot(state, get_previous_slot(state)) + assert data.beacon_block_root == get_block_root_at_slot(state, get_previous_slot(state.slot)) # Type 2: delayed attestations else: assert state.slot - compute_start_slot_at_epoch(compute_epoch_at_slot(data.slot)) < SLOTS_PER_EPOCH @@ -658,7 +657,7 @@ def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTr if any(transition.shard_data_roots): headers.append(ShardSignableHeader( shard_parent_root=shard_parent_root, - parent_hash=get_block_root_at_slot(state, get_previous_slot(state)), + parent_hash=get_block_root_at_slot(state, get_previous_slot(state.slot)), slot=offset_slots[i], body_root=chunks_to_body_root(transition.shard_data_roots[i]) )) @@ -863,7 +862,7 @@ def process_light_client_signatures(state: BeaconState, block_body: BeaconBlockB assert bls_verify( pubkey=bls_aggregate_pubkeys(signer_keys), - message_hash=get_block_root_at_slot(state, get_previous_slot(state)), + message_hash=get_block_root_at_slot(state, get_previous_slot(state.slot)), signature=block_body.light_client_signature, domain=DOMAIN_LIGHT_CLIENT ) From a8276f683ef31c00ef8db897859783b7c660cb9b Mon Sep 17 00:00:00 2001 From: protolambda Date: Sun, 5 Jan 2020 20:20:20 +0100 Subject: [PATCH 076/194] don't hardcode shard count, bad example, need upgradeability --- specs/core/1_beacon-chain.md | 22 ++++++++++++++++------ specs/core/1_phase1-fork.md | 3 ++- 2 files changed, 18 insertions(+), 7 deletions(-) diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index a4b79df38..118689cf0 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -34,6 +34,7 @@ - [`committee_to_compact_committee`](#committee_to_compact_committee) - [`chunks_to_body_root`](#chunks_to_body_root) - [Beacon state accessors](#beacon-state-accessors) + - [`get_active_shard_count`](#get_active_shard_count) - [`get_online_validator_indices`](#get_online_validator_indices) - [`get_shard_committee`](#get_shard_committee) - [`get_shard_proposer_index`](#get_shard_proposer_index) @@ -420,6 +421,13 @@ def chunks_to_body_root(chunks: List[Bytes32, MAX_SHARD_BLOCK_CHUNKS]) -> Root: ### Beacon state accessors +#### `get_active_shard_count` + +```python +def get_active_shard_count(state: BeaconState) -> uint64: + return len(state.shard_states) # May adapt in the future, or change over time. +``` + #### `get_online_validator_indices` ```python @@ -437,7 +445,7 @@ def get_shard_committee(beacon_state: BeaconState, epoch: Epoch, shard: Shard) - source_epoch -= SHARD_COMMITTEE_PERIOD active_validator_indices = get_active_validator_indices(beacon_state, source_epoch) seed = get_seed(beacon_state, source_epoch, DOMAIN_SHARD_COMMITTEE) - return compute_committee(active_validator_indices, seed, shard, ACTIVE_SHARDS) + return compute_committee(active_validator_indices, seed, shard, get_active_shard_count(beacon_state)) ``` #### `get_shard_proposer_index` @@ -458,7 +466,8 @@ def get_light_client_committee(beacon_state: BeaconState, epoch: Epoch) -> Seque source_epoch -= LIGHT_CLIENT_COMMITTEE_PERIOD active_validator_indices = get_active_validator_indices(beacon_state, source_epoch) seed = get_seed(beacon_state, source_epoch, DOMAIN_LIGHT_CLIENT) - return compute_committee(active_validator_indices, seed, 0, ACTIVE_SHARDS)[:TARGET_COMMITTEE_SIZE] + active_shards = get_active_shard_count(beacon_state) + return compute_committee(active_validator_indices, seed, 0, active_shards)[:TARGET_COMMITTEE_SIZE] ``` #### `get_indexed_attestation` @@ -498,7 +507,8 @@ def get_start_shard(state: BeaconState, slot: Slot) -> Shard: ```python def get_shard(state: BeaconState, attestation: Attestation) -> Shard: - return Shard((attestation.data.index + get_start_shard(state, attestation.data.slot)) % ACTIVE_SHARDS) + active_shards = get_active_shard_count(state) + return Shard((attestation.data.index + get_start_shard(state, attestation.data.slot)) % active_shards) ``` #### `get_next_slot_for_shard` @@ -601,7 +611,7 @@ def process_operations(state: BeaconState, body: BeaconBlockBody) -> None: def validate_attestation(state: BeaconState, attestation: Attestation) -> None: data = attestation.data assert data.index < get_committee_count_at_slot(state, data.slot) - assert data.index < ACTIVE_SHARDS + assert data.index < get_active_shard_count(state) assert data.target.epoch in (get_previous_epoch(state), get_current_epoch(state)) assert data.slot + MIN_ATTESTATION_INCLUSION_DELAY <= state.slot <= data.slot + SLOTS_PER_EPOCH @@ -753,7 +763,7 @@ def process_crosslinks(state: BeaconState, block_body: BeaconBlockBody, attestations: Sequence[Attestation]) -> Set[Tuple[Shard, Root]]: winners: Set[Tuple[Shard, Root]] = set() - for shard in map(Shard, range(ACTIVE_SHARDS)): + for shard in range(get_active_shard_count(state)): # All attestations in the block for this shard shard_attestations = [ attestation for attestation in attestations @@ -839,7 +849,7 @@ def process_attester_slashing(state: BeaconState, attester_slashing: AttesterSla ```python def verify_shard_transition_false_positives(state: BeaconState, block_body: BeaconBlockBody) -> None: # Verify that a `shard_transition` in a block is empty if an attestation was not processed for it - for shard in range(ACTIVE_SHARDS): + for shard in range(get_active_shard_count(state)): if state.shard_states[shard].slot != state.slot - 1: assert block_body.shard_transition[shard] == ShardTransition() ``` diff --git a/specs/core/1_phase1-fork.md b/specs/core/1_phase1-fork.md index 6d87a30b6..1b3fddcd8 100644 --- a/specs/core/1_phase1-fork.md +++ b/specs/core/1_phase1-fork.md @@ -35,6 +35,7 @@ TODO: very unstable/experimental. PLACEHOLDER. | Name | Value | Unit | | - | - | - | | `PHASE_1_FORK_VERSION` | `0x00000001` | `Version` | +| `INITIAL_ACTIVE_SHARDS` | `2**6` (= 64) | `uint64` | | `INITIAL_GASPRICE` | `10` | `Gwei` | ## Fork to Phase 1 @@ -104,7 +105,7 @@ def upgrade_to_phase1(pre: phase0.BeaconState) -> BeaconState: gasprice=INITIAL_GASPRICE, data=Root(), latest_block_root=Root(), - ) for i in range(ACTIVE_SHARDS) + ) for i in range(INITIAL_ACTIVE_SHARDS) ), online_countdown=ByteList[VALIDATOR_REGISTRY_LIMIT]( ONLINE_PERIOD for i in range(len(pre.validators)) From 11011f2544336327aecebbc891ebc01fc0a482b7 Mon Sep 17 00:00:00 2001 From: protolambda Date: Sun, 5 Jan 2020 23:37:05 +0100 Subject: [PATCH 077/194] test context can handle multiple phases better now --- test_libs/pyspec/eth2spec/test/context.py | 84 ++++++++++++++++++----- 1 file changed, 66 insertions(+), 18 deletions(-) diff --git a/test_libs/pyspec/eth2spec/test/context.py b/test_libs/pyspec/eth2spec/test/context.py index 195d1e5fa..650f1ec81 100644 --- a/test_libs/pyspec/eth2spec/test/context.py +++ b/test_libs/pyspec/eth2spec/test/context.py @@ -6,7 +6,7 @@ from .helpers.genesis import create_genesis_state from .utils import vector_test, with_meta_tags -from typing import Any, Callable, Sequence +from typing import Any, Callable, Sequence, TypedDict, Protocol from importlib import reload @@ -16,21 +16,48 @@ def reload_specs(): reload(spec_phase1) +# Some of the Spec module functionality is exposed here to deal with phase-specific changes. + +# TODO: currently phases are defined as python modules. +# It would be better if they would be more well-defined interfaces for stronger typing. +class Spec(Protocol): + version: str + + +class Phase0(Spec): + ... + + +class Phase1(Spec): + def upgrade_to_phase1(self, state: spec_phase0.BeaconState) -> spec_phase1.BeaconState: ... + + +# add transfer, bridge, etc. as the spec evolves +class SpecForks(TypedDict, total=False): + phase0: Phase0 + phase1: Phase1 + + def with_custom_state(balances_fn: Callable[[Any], Sequence[int]], threshold_fn: Callable[[Any], int]): def deco(fn): - def entry(*args, **kw): + def entry(*args, spec: Spec, phases: SpecForks, **kw): try: - spec = kw['spec'] + p0 = phases["phase0"] + balances = balances_fn(p0) + activation_threshold = threshold_fn(p0) - balances = balances_fn(spec) - activation_threshold = threshold_fn(spec) + state = create_genesis_state(spec=p0, validator_balances=balances, + activation_threshold=activation_threshold) + if spec.version == 'phase1': + # TODO: instead of upgrading a test phase0 genesis state we can also write a phase1 state helper. + # Decide based on performance/consistency results later. + state = phases["phase1"].upgrade_to_phase1(state) - kw['state'] = create_genesis_state(spec=spec, validator_balances=balances, - activation_threshold=activation_threshold) + kw['state'] = state except KeyError: raise TypeError('Spec decorator must come within state decorator to inject spec into state.') - return fn(*args, **kw) + return fn(*args, spec=spec, phases=phases, **kw) return entry return deco @@ -76,6 +103,19 @@ def misc_balances(spec): return [spec.MAX_EFFECTIVE_BALANCE] * num_validators + [spec.MIN_DEPOSIT_AMOUNT] * num_misc_validators +def single_phase(fn): + """ + Decorator that filters out the phases data. + most state tests only focus on behavior of a single phase (the "spec"). + This decorator is applied as part of spec_state_test(fn). + """ + def entry(*args, **kw): + if 'phases' in kw: + kw.pop('phases') + fn(*args, **kw) + return entry + + # BLS is turned off by default *for performance purposes during TESTING*. # The runner of the test can indicate the preferred setting (test generators prefer BLS to be ON). # - Some tests are marked as BLS-requiring, and ignore this setting. @@ -95,9 +135,9 @@ def spec_test(fn): return vector_test()(bls_switch(fn)) -# shorthand for decorating @spectest() @with_state +# shorthand for decorating @spectest() @with_state @single_phase def spec_state_test(fn): - return spec_test(with_state(fn)) + return spec_test(with_state(single_phase(fn))) def expect_assertion_error(fn): @@ -176,15 +216,12 @@ def with_all_phases_except(exclusion_phases): return decorator -def with_phases(phases): +def with_phases(phases, other_phases=None): """ - Decorator factory that returns a decorator that runs a test for the appropriate phases + Decorator factory that returns a decorator that runs a test for the appropriate phases. + Additional phases that do not initially run, but are made available through the test, are optional. """ def decorator(fn): - def run_with_spec_version(spec, *args, **kw): - kw['spec'] = spec - return fn(*args, **kw) - def wrapper(*args, **kw): run_phases = phases @@ -195,10 +232,21 @@ def with_phases(phases): return run_phases = [phase] + available_phases = set(run_phases) + if other_phases is not None: + available_phases += set(other_phases) + + phase_dir = {} + if 'phase0' in available_phases: + phase_dir['phase0'] = spec_phase0 + if 'phase1' in available_phases: + phase_dir['phase1'] = spec_phase1 + + # return is ignored whenever multiple phases are ran. If if 'phase0' in run_phases: - ret = run_with_spec_version(spec_phase0, *args, **kw) + ret = fn(spec=spec_phase0, phases=phase_dir, *args, **kw) if 'phase1' in run_phases: - ret = run_with_spec_version(spec_phase1, *args, **kw) + ret = fn(spec=spec_phase1, phases=phase_dir, *args, **kw) return ret return wrapper return decorator From 2e6090fcd8d47ddbb18eccdb96bb89002270fadf Mon Sep 17 00:00:00 2001 From: protolambda Date: Sun, 5 Jan 2020 23:37:42 +0100 Subject: [PATCH 078/194] bugfixes for experimental phase0 -> phase1 fork py code --- specs/core/1_phase1-fork.md | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/specs/core/1_phase1-fork.md b/specs/core/1_phase1-fork.md index 1b3fddcd8..97ad3252b 100644 --- a/specs/core/1_phase1-fork.md +++ b/specs/core/1_phase1-fork.md @@ -30,13 +30,13 @@ This document describes the process of moving from Phase 0 to Phase 1 of Ethereu ## Configuration -TODO: very unstable/experimental. PLACEHOLDER. +Warning: this configuration is not definitive. -| Name | Value | Unit | +| Name | Value | | - | - | - | -| `PHASE_1_FORK_VERSION` | `0x00000001` | `Version` | -| `INITIAL_ACTIVE_SHARDS` | `2**6` (= 64) | `uint64` | -| `INITIAL_GASPRICE` | `10` | `Gwei` | +| `PHASE_1_FORK_VERSION` | `Version('0x00000001')` | +| `INITIAL_ACTIVE_SHARDS` | `2**6` (= 64) | +| `INITIAL_GASPRICE` | `Gwei(10)` | ## Fork to Phase 1 @@ -55,7 +55,7 @@ def upgrade_to_phase1(pre: phase0.BeaconState) -> BeaconState: genesis_time=pre.genesis_time, slot=pre.slot, fork=Fork( - previous_version=pre.current_version, + previous_version=pre.fork.current_version, current_version=PHASE_1_FORK_VERSION, epoch=epoch, ), @@ -114,10 +114,10 @@ def upgrade_to_phase1(pre: phase0.BeaconState) -> BeaconState: next_light_committee=CompactCommittee(), # Custody game custody_challenge_index=0, - exposed_derived_secrets=Vector[List[ValidatorIndex, MAX_EARLY_DERIVED_SECRET_REVEALS * SLOTS_PER_EPOCH], - EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS]() + # exposed_derived_secrets will fully default to zeroes ) - post.current_light_committee = committee_to_compact_committee(post, get_light_client_committee(post, post.epoch)) - post.next_light_committee = committee_to_compact_committee(post, get_light_client_committee(post, post.epoch + 1)) + epoch = get_current_epoch(post) + post.current_light_committee = committee_to_compact_committee(post, get_light_client_committee(post, epoch)) + post.next_light_committee = committee_to_compact_committee(post, get_light_client_committee(post, epoch + 1)) return post ``` From 3995bd633c42201bf5656ab6190169118d35dd19 Mon Sep 17 00:00:00 2001 From: protolambda Date: Sun, 5 Jan 2020 23:38:06 +0100 Subject: [PATCH 079/194] coerce container types, since classes do not match between phases --- test_libs/pyspec/eth2spec/utils/ssz/ssz_typing.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/test_libs/pyspec/eth2spec/utils/ssz/ssz_typing.py b/test_libs/pyspec/eth2spec/utils/ssz/ssz_typing.py index 4ddff1b5e..823a4a8d5 100644 --- a/test_libs/pyspec/eth2spec/utils/ssz/ssz_typing.py +++ b/test_libs/pyspec/eth2spec/utils/ssz/ssz_typing.py @@ -119,6 +119,8 @@ def coerce_type_maybe(v, typ: SSZType, strict: bool = False): return typ(v) elif isinstance(v, GeneratorType): return typ(v) + elif issubclass(typ, Container) and not isinstance(v, typ): + return typ(**{field_name: getattr(v, field_name) for field_name in typ.get_field_names()}) # just return as-is, Value-checkers will take care of it not being coerced, if we are not strict. if strict and not isinstance(v, typ): @@ -192,7 +194,7 @@ class Container(Series, metaclass=SSZType): return dict(cls.__annotations__) @classmethod - def get_field_names(cls) -> Iterable[SSZType]: + def get_field_names(cls) -> Iterable[str]: if not hasattr(cls, '__annotations__'): # no container fields return () return list(cls.__annotations__.keys()) From 752eeecaf4f065a87c9079ec6dfd0dc48419d6d3 Mon Sep 17 00:00:00 2001 From: protolambda Date: Sun, 5 Jan 2020 23:44:20 +0100 Subject: [PATCH 080/194] old challenge based custody game tests are not relevant anymore --- .../test_process_bit_challenge.py | 350 ------------------ 1 file changed, 350 deletions(-) delete mode 100644 test_libs/pyspec/eth2spec/test/phase_1/block_processing/test_process_bit_challenge.py diff --git a/test_libs/pyspec/eth2spec/test/phase_1/block_processing/test_process_bit_challenge.py b/test_libs/pyspec/eth2spec/test/phase_1/block_processing/test_process_bit_challenge.py deleted file mode 100644 index ae6ff258c..000000000 --- a/test_libs/pyspec/eth2spec/test/phase_1/block_processing/test_process_bit_challenge.py +++ /dev/null @@ -1,350 +0,0 @@ -from eth2spec.test.helpers.custody import ( - get_valid_bit_challenge, - get_valid_custody_response, - get_custody_test_vector, - get_custody_merkle_root -) -from eth2spec.test.helpers.attestations import ( - get_valid_attestation, -) -from eth2spec.utils.ssz.ssz_impl import hash_tree_root -from eth2spec.test.helpers.state import next_epoch, get_balance -from eth2spec.test.helpers.block import apply_empty_block -from eth2spec.test.context import ( - with_all_phases_except, - spec_state_test, - expect_assertion_error, -) -from eth2spec.test.phase_0.block_processing.test_process_attestation import run_attestation_processing - - -def run_bit_challenge_processing(spec, state, custody_bit_challenge, valid=True): - """ - Run ``process_bit_challenge``, yielding: - - pre-state ('pre') - - CustodyBitChallenge ('custody_bit_challenge') - - post-state ('post'). - If ``valid == False``, run expecting ``AssertionError`` - """ - yield 'pre', state - yield 'custody_bit_challenge', custody_bit_challenge - - if not valid: - expect_assertion_error(lambda: spec.process_bit_challenge(state, custody_bit_challenge)) - yield 'post', None - return - - spec.process_bit_challenge(state, custody_bit_challenge) - - assert state.custody_bit_challenge_records[state.custody_challenge_index - 1].chunk_bits_merkle_root == \ - hash_tree_root(custody_bit_challenge.chunk_bits) - assert state.custody_bit_challenge_records[state.custody_challenge_index - 1].challenger_index == \ - custody_bit_challenge.challenger_index - assert state.custody_bit_challenge_records[state.custody_challenge_index - 1].responder_index == \ - custody_bit_challenge.responder_index - - yield 'post', state - - -def run_custody_response_processing(spec, state, custody_response, valid=True): - """ - Run ``process_bit_challenge_response``, yielding: - - pre-state ('pre') - - CustodyResponse ('custody_response') - - post-state ('post'). - If ``valid == False``, run expecting ``AssertionError`` - """ - yield 'pre', state - yield 'custody_response', custody_response - - if not valid: - expect_assertion_error(lambda: spec.process_custody_response(state, custody_response)) - yield 'post', None - return - - # TODO: Add capability to also process chunk challenges, not only bit challenges - challenge = state.custody_bit_challenge_records[custody_response.challenge_index] - pre_slashed_balance = get_balance(state, challenge.challenger_index) - - spec.process_custody_response(state, custody_response) - - slashed_validator = state.validators[challenge.challenger_index] - - assert slashed_validator.slashed - assert slashed_validator.exit_epoch < spec.FAR_FUTURE_EPOCH - assert slashed_validator.withdrawable_epoch < spec.FAR_FUTURE_EPOCH - - assert get_balance(state, challenge.challenger_index) < pre_slashed_balance - yield 'post', state - - -@with_all_phases_except(['phase0']) -@spec_state_test -def test_challenge_appended(spec, state): - state.slot = spec.SLOTS_PER_EPOCH - attestation = get_valid_attestation(spec, state, signed=True) - - test_vector = get_custody_test_vector( - spec.get_custody_chunk_count(attestation.data.crosslink) * spec.BYTES_PER_CUSTODY_CHUNK) - shard_root = get_custody_merkle_root(test_vector) - attestation.data.crosslink.data_root = shard_root - attestation.custody_bits[0] = 0 - - state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY - - _, _, _ = run_attestation_processing(spec, state, attestation) - - state.slot += spec.SLOTS_PER_EPOCH * spec.EPOCHS_PER_CUSTODY_PERIOD - - challenge = get_valid_bit_challenge(spec, state, attestation) - - yield from run_bit_challenge_processing(spec, state, challenge) - - -@with_all_phases_except(['phase0']) -@spec_state_test -def test_multiple_epochs_custody(spec, state): - state.slot = spec.SLOTS_PER_EPOCH * 3 - attestation = get_valid_attestation(spec, state, signed=True) - - test_vector = get_custody_test_vector( - spec.get_custody_chunk_count(attestation.data.crosslink) * spec.BYTES_PER_CUSTODY_CHUNK) - shard_root = get_custody_merkle_root(test_vector) - attestation.data.crosslink.data_root = shard_root - attestation.custody_bits[0] = 0 - - state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY - - _, _, _ = run_attestation_processing(spec, state, attestation) - - state.slot += spec.SLOTS_PER_EPOCH * (spec.EPOCHS_PER_CUSTODY_PERIOD - 1) - - challenge = get_valid_bit_challenge(spec, state, attestation) - - yield from run_bit_challenge_processing(spec, state, challenge) - - -@with_all_phases_except(['phase0']) -@spec_state_test -def test_many_epochs_custody(spec, state): - state.slot = spec.SLOTS_PER_EPOCH * 100 - attestation = get_valid_attestation(spec, state, signed=True) - - test_vector = get_custody_test_vector( - spec.get_custody_chunk_count(attestation.data.crosslink) * spec.BYTES_PER_CUSTODY_CHUNK) - shard_root = get_custody_merkle_root(test_vector) - attestation.data.crosslink.data_root = shard_root - attestation.custody_bits[0] = 0 - - state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY - - _, _, _ = run_attestation_processing(spec, state, attestation) - - state.slot += spec.SLOTS_PER_EPOCH * (spec.EPOCHS_PER_CUSTODY_PERIOD - 1) - - challenge = get_valid_bit_challenge(spec, state, attestation) - - yield from run_bit_challenge_processing(spec, state, challenge) - - -@with_all_phases_except(['phase0']) -@spec_state_test -def test_off_chain_attestation(spec, state): - state.slot = spec.SLOTS_PER_EPOCH - attestation = get_valid_attestation(spec, state, signed=True) - - test_vector = get_custody_test_vector( - spec.get_custody_chunk_count(attestation.data.crosslink) * spec.BYTES_PER_CUSTODY_CHUNK) - shard_root = get_custody_merkle_root(test_vector) - attestation.data.crosslink.data_root = shard_root - attestation.custody_bits[0] = 0 - - state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY - state.slot += spec.SLOTS_PER_EPOCH * spec.EPOCHS_PER_CUSTODY_PERIOD - - challenge = get_valid_bit_challenge(spec, state, attestation) - - yield from run_bit_challenge_processing(spec, state, challenge) - - -@with_all_phases_except(['phase0']) -@spec_state_test -def test_invalid_custody_bit_challenge(spec, state): - state.slot = spec.SLOTS_PER_EPOCH - attestation = get_valid_attestation(spec, state, signed=True) - - test_vector = get_custody_test_vector( - spec.get_custody_chunk_count(attestation.data.crosslink) * spec.BYTES_PER_CUSTODY_CHUNK) - shard_root = get_custody_merkle_root(test_vector) - attestation.data.crosslink.data_root = shard_root - attestation.custody_bits[0] = 0 - - state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY - - _, _, _ = run_attestation_processing(spec, state, attestation) - - state.slot += spec.SLOTS_PER_EPOCH * spec.EPOCHS_PER_CUSTODY_PERIOD - - challenge = get_valid_bit_challenge(spec, state, attestation, invalid_custody_bit=True) - - yield from run_bit_challenge_processing(spec, state, challenge, valid=False) - - -@with_all_phases_except(['phase0']) -@spec_state_test -def test_max_reveal_lateness_1(spec, state): - next_epoch(spec, state) - apply_empty_block(spec, state) - - attestation = get_valid_attestation(spec, state, signed=True) - - test_vector = get_custody_test_vector( - spec.get_custody_chunk_count(attestation.data.crosslink) * spec.BYTES_PER_CUSTODY_CHUNK) - shard_root = get_custody_merkle_root(test_vector) - attestation.data.crosslink.data_root = shard_root - attestation.custody_bits[0] = 0 - - next_epoch(spec, state) - apply_empty_block(spec, state) - - _, _, _ = run_attestation_processing(spec, state, attestation) - - challenge = get_valid_bit_challenge(spec, state, attestation) - - responder_index = challenge.responder_index - target_epoch = attestation.data.target.epoch - - state.validators[responder_index].max_reveal_lateness = 3 - - latest_reveal_epoch = spec.get_randao_epoch_for_custody_period( - spec.get_custody_period_for_validator(state, responder_index, target_epoch), - responder_index - ) + 2 * spec.EPOCHS_PER_CUSTODY_PERIOD + state.validators[responder_index].max_reveal_lateness - - while spec.get_current_epoch(state) < latest_reveal_epoch - 2: - next_epoch(spec, state) - apply_empty_block(spec, state) - - yield from run_bit_challenge_processing(spec, state, challenge) - - -@with_all_phases_except(['phase0']) -@spec_state_test -def test_max_reveal_lateness_2(spec, state): - next_epoch(spec, state) - apply_empty_block(spec, state) - - attestation = get_valid_attestation(spec, state, signed=True) - - test_vector = get_custody_test_vector( - spec.get_custody_chunk_count(attestation.data.crosslink) * spec.BYTES_PER_CUSTODY_CHUNK) - shard_root = get_custody_merkle_root(test_vector) - attestation.data.crosslink.data_root = shard_root - attestation.custody_bits[0] = 0 - - next_epoch(spec, state) - apply_empty_block(spec, state) - - _, _, _ = run_attestation_processing(spec, state, attestation) - - challenge = get_valid_bit_challenge(spec, state, attestation) - - responder_index = challenge.responder_index - - state.validators[responder_index].max_reveal_lateness = 3 - - for i in range(spec.get_randao_epoch_for_custody_period( - spec.get_custody_period_for_validator(state, responder_index), - responder_index - ) + 2 * spec.EPOCHS_PER_CUSTODY_PERIOD + state.validators[responder_index].max_reveal_lateness - 1): - next_epoch(spec, state) - apply_empty_block(spec, state) - - yield from run_bit_challenge_processing(spec, state, challenge, False) - - -@with_all_phases_except(['phase0']) -@spec_state_test -def test_custody_response(spec, state): - state.slot = spec.SLOTS_PER_EPOCH - attestation = get_valid_attestation(spec, state, signed=True) - - test_vector = get_custody_test_vector( - spec.get_custody_chunk_count(attestation.data.crosslink) * spec.BYTES_PER_CUSTODY_CHUNK) - shard_root = get_custody_merkle_root(test_vector) - attestation.data.crosslink.data_root = shard_root - attestation.custody_bits[0] = 0 - - state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY - - _, _, _ = run_attestation_processing(spec, state, attestation) - - state.slot += spec.SLOTS_PER_EPOCH * spec.EPOCHS_PER_CUSTODY_PERIOD - - challenge = get_valid_bit_challenge(spec, state, attestation) - - _, _, _ = run_bit_challenge_processing(spec, state, challenge) - - bit_challenge_index = state.custody_challenge_index - 1 - - custody_response = get_valid_custody_response(spec, state, challenge, test_vector, bit_challenge_index) - - yield from run_custody_response_processing(spec, state, custody_response) - - -@with_all_phases_except(['phase0']) -@spec_state_test -def test_custody_response_multiple_epochs(spec, state): - state.slot = spec.SLOTS_PER_EPOCH * 3 - attestation = get_valid_attestation(spec, state, signed=True) - - test_vector = get_custody_test_vector( - spec.get_custody_chunk_count(attestation.data.crosslink) * spec.BYTES_PER_CUSTODY_CHUNK) - shard_root = get_custody_merkle_root(test_vector) - attestation.data.crosslink.data_root = shard_root - attestation.custody_bits[0] = 0 - - state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY - - _, _, _ = run_attestation_processing(spec, state, attestation) - - state.slot += spec.SLOTS_PER_EPOCH * spec.EPOCHS_PER_CUSTODY_PERIOD - - challenge = get_valid_bit_challenge(spec, state, attestation) - - _, _, _ = run_bit_challenge_processing(spec, state, challenge) - - bit_challenge_index = state.custody_challenge_index - 1 - - custody_response = get_valid_custody_response(spec, state, challenge, test_vector, bit_challenge_index) - - yield from run_custody_response_processing(spec, state, custody_response) - - -@with_all_phases_except(['phase0']) -@spec_state_test -def test_custody_response_many_epochs(spec, state): - state.slot = spec.SLOTS_PER_EPOCH * 100 - attestation = get_valid_attestation(spec, state, signed=True) - - test_vector = get_custody_test_vector( - spec.get_custody_chunk_count(attestation.data.crosslink) * spec.BYTES_PER_CUSTODY_CHUNK) - shard_root = get_custody_merkle_root(test_vector) - attestation.data.crosslink.data_root = shard_root - attestation.custody_bits[0] = 0 - - state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY - - _, _, _ = run_attestation_processing(spec, state, attestation) - - state.slot += spec.SLOTS_PER_EPOCH * spec.EPOCHS_PER_CUSTODY_PERIOD - - challenge = get_valid_bit_challenge(spec, state, attestation) - - _, _, _ = run_bit_challenge_processing(spec, state, challenge) - - bit_challenge_index = state.custody_challenge_index - 1 - - custody_response = get_valid_custody_response(spec, state, challenge, test_vector, bit_challenge_index) - - yield from run_custody_response_processing(spec, state, custody_response) From bf86a716ee6b4ce1d01011dbafad943f0b2083dd Mon Sep 17 00:00:00 2001 From: protolambda Date: Sun, 5 Jan 2020 23:51:02 +0100 Subject: [PATCH 081/194] force-add in phase0 in test context, since state creation is not independent yet --- test_libs/pyspec/eth2spec/test/context.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/test_libs/pyspec/eth2spec/test/context.py b/test_libs/pyspec/eth2spec/test/context.py index 650f1ec81..149836787 100644 --- a/test_libs/pyspec/eth2spec/test/context.py +++ b/test_libs/pyspec/eth2spec/test/context.py @@ -236,6 +236,10 @@ def with_phases(phases, other_phases=None): if other_phases is not None: available_phases += set(other_phases) + # TODO: test state is dependent on phase0 but is immediately transitioned to phase1. + # A new state-creation helper for phase 1 may be in place, and then phase1+ tests can run without phase0 + available_phases.add('phase0') + phase_dir = {} if 'phase0' in available_phases: phase_dir['phase0'] = spec_phase0 From 2f3919c176a3228843e163f4ccc799910e37f17e Mon Sep 17 00:00:00 2001 From: protolambda Date: Sun, 5 Jan 2020 23:51:24 +0100 Subject: [PATCH 082/194] bugfix: pass block body to sub processing functions --- specs/core/1_beacon-chain.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index 118689cf0..f7f133589 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -571,8 +571,8 @@ def process_block(state: BeaconState, block: BeaconBlock) -> None: process_block_header(state, block) process_randao(state, block.body) process_eth1_data(state, block.body) - verify_shard_transition_false_positives(state, block) - process_light_client_signatures(state, block) + verify_shard_transition_false_positives(state, block.body) + process_light_client_signatures(state, block.body) process_operations(state, block.body) ``` From 0cf866e7993e86a12a3c32fdedd447a3ad922419 Mon Sep 17 00:00:00 2001 From: protolambda Date: Mon, 6 Jan 2020 00:06:57 +0100 Subject: [PATCH 083/194] fix bug, test should run properly, forward generator in return --- test_libs/pyspec/eth2spec/test/context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test_libs/pyspec/eth2spec/test/context.py b/test_libs/pyspec/eth2spec/test/context.py index 149836787..0add9f638 100644 --- a/test_libs/pyspec/eth2spec/test/context.py +++ b/test_libs/pyspec/eth2spec/test/context.py @@ -112,7 +112,7 @@ def single_phase(fn): def entry(*args, **kw): if 'phases' in kw: kw.pop('phases') - fn(*args, **kw) + return fn(*args, **kw) return entry From 6b872da3ecc92747c6af28de3eda34bbb131b27e Mon Sep 17 00:00:00 2001 From: protolambda Date: Mon, 6 Jan 2020 00:17:48 +0100 Subject: [PATCH 084/194] light client bitfield is vector now, small size anyway, better separation of committee complexity from data init --- specs/core/1_beacon-chain.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index f7f133589..384c75f51 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -219,7 +219,7 @@ class BeaconBlockBody(Container): # Shards shard_transitions: Vector[ShardTransition, MAX_SHARDS] # Light clients - light_client_signature_bitfield: Bitlist[LIGHT_CLIENT_COMMITTEE_SIZE] + light_client_signature_bitfield: Bitvector[LIGHT_CLIENT_COMMITTEE_SIZE] light_client_signature: BLSSignature ``` @@ -862,11 +862,11 @@ def process_light_client_signatures(state: BeaconState, block_body: BeaconBlockB assert len(block_body.light_client_signature_bitfield) == len(committee) total_reward = Gwei(0) signer_keys = [] - for i, participant_bit in enumerate(block_body.light_client_signature_bitfield): - if participant_bit: - signer_keys.append(state.validators[committee[i]].pubkey) - increase_balance(state, committee[i], get_base_reward(state, committee[i])) - total_reward += get_base_reward(state, committee[i]) + for bit_index, participant_index in enumerate(committee): + if block_body.light_client_signature_bitfield[bit_index]: + signer_keys.append(state.validators[participant_index].pubkey) + increase_balance(state, participant_index, get_base_reward(state, participant_index)) + total_reward += get_base_reward(state, participant_index) increase_balance(state, get_beacon_proposer_index(state), Gwei(total_reward // PROPOSER_REWARD_QUOTIENT)) From ce84ddd20763bb46e8d40bb856f96375eef09c4c Mon Sep 17 00:00:00 2001 From: protolambda Date: Mon, 6 Jan 2020 00:24:06 +0100 Subject: [PATCH 085/194] initial update of phase1 section of configs --- configs/mainnet.yaml | 5 +++++ configs/minimal.yaml | 8 ++------ 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/configs/mainnet.yaml b/configs/mainnet.yaml index 99bba1d4e..078b2cced 100644 --- a/configs/mainnet.yaml +++ b/configs/mainnet.yaml @@ -146,3 +146,8 @@ DOMAIN_SHARD_PROPOSAL: 0x80000000 DOMAIN_SHARD_COMMITTEE: 0x81000000 DOMAIN_LIGHT_CLIENT: 0x82000000 DOMAIN_CUSTODY_BIT_SLASHING: 0x83000000 + + +# Phase 1 +# --------------------------------------------------------------- +INITIAL_ACTIVE_SHARDS: 64 diff --git a/configs/minimal.yaml b/configs/minimal.yaml index 4aacf17e1..90af6605f 100644 --- a/configs/minimal.yaml +++ b/configs/minimal.yaml @@ -153,9 +153,5 @@ DOMAIN_CUSTODY_BIT_SLASHING: 0x83000000 # Phase 1 # --------------------------------------------------------------- -SHARD_SLOTS_PER_BEACON_SLOT: 2 -EPOCHS_PER_SHARD_PERIOD: 4 -# PHASE_1_FORK_EPOCH >= EPOCHS_PER_SHARD_PERIOD * 2 -PHASE_1_FORK_EPOCH: 8 -# PHASE_1_FORK_SLOT = PHASE_1_FORK_EPOCH * SLOTS_PER_EPOCH -PHASE_1_FORK_SLOT: 64 +# [customized] reduced for testing +INITIAL_ACTIVE_SHARDS: 4 From 9c07e26a4d8693b32c13d050aba89b7314551210 Mon Sep 17 00:00:00 2001 From: protolambda Date: Mon, 6 Jan 2020 00:25:18 +0100 Subject: [PATCH 086/194] length assert not applicable anymore --- specs/core/1_beacon-chain.md | 1 - 1 file changed, 1 deletion(-) diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index 384c75f51..af7e6c299 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -859,7 +859,6 @@ def verify_shard_transition_false_positives(state: BeaconState, block_body: Beac ```python def process_light_client_signatures(state: BeaconState, block_body: BeaconBlockBody) -> None: committee = get_light_client_committee(state, get_current_epoch(state)) - assert len(block_body.light_client_signature_bitfield) == len(committee) total_reward = Gwei(0) signer_keys = [] for bit_index, participant_index in enumerate(committee): From 68d93f945c6a10af3eb96743e6f7ddb3eef6e2d7 Mon Sep 17 00:00:00 2001 From: protolambda Date: Mon, 6 Jan 2020 00:44:22 +0100 Subject: [PATCH 087/194] online countdown fixes --- specs/core/1_beacon-chain.md | 9 +++++---- specs/core/1_phase1-fork.md | 4 +--- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index af7e6c299..0e66e2472 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -88,6 +88,7 @@ We define the following Python custom types for type hinting and readability: | Name | SSZ equivalent | Description | | - | - | - | | `Shard` | `uint64` | a shard number | +| `OnlineEpochs` | `uint8` | online countdown epochs | ## Configuration @@ -99,7 +100,7 @@ Configuration is not namespaced. Instead it is strictly an extension; | Name | Value | Unit | Duration | | - | - | - | - | | `MAX_SHARDS` | `2**10` (= 1024) | -| `ONLINE_PERIOD` | `Epoch(2**3)` (= 8) | epochs | ~51 min | +| `ONLINE_PERIOD` | `OnlineEpochs(2**3)` (= 8) | online epochs | ~51 min | | `LIGHT_CLIENT_COMMITTEE_SIZE` | `2**7` (= 128) | | `LIGHT_CLIENT_COMMITTEE_PERIOD` | `Epoch(2**8)` (= 256) | epochs | ~27 hours | | `SHARD_COMMITTEE_PERIOD` | `Epoch(2**8)` (= 256) | epochs | ~27 hours | @@ -281,7 +282,7 @@ class BeaconState(Container): finalized_checkpoint: Checkpoint # Phase 1 shard_states: List[ShardState, MAX_SHARDS] - online_countdown: ByteList[VALIDATOR_REGISTRY_LIMIT] + online_countdown: List[OnlineEpochs, VALIDATOR_REGISTRY_LIMIT] # not a raw byte array, considered its large size. current_light_committee: CompactCommittee next_light_committee: CompactCommittee # Custody game @@ -515,7 +516,7 @@ def get_shard(state: BeaconState, attestation: Attestation) -> Shard: ```python def get_next_slot_for_shard(state: BeaconState, shard: Shard) -> Slot: - return Slot(state.shard_transitions[shard].slot + 1) + return Slot(state.shard_states[shard].slot + 1) ``` @@ -851,7 +852,7 @@ def verify_shard_transition_false_positives(state: BeaconState, block_body: Beac # Verify that a `shard_transition` in a block is empty if an attestation was not processed for it for shard in range(get_active_shard_count(state)): if state.shard_states[shard].slot != state.slot - 1: - assert block_body.shard_transition[shard] == ShardTransition() + assert block_body.shard_transitions[shard] == ShardTransition() ``` #### Light client processing diff --git a/specs/core/1_phase1-fork.md b/specs/core/1_phase1-fork.md index 97ad3252b..fdd97a24e 100644 --- a/specs/core/1_phase1-fork.md +++ b/specs/core/1_phase1-fork.md @@ -107,9 +107,7 @@ def upgrade_to_phase1(pre: phase0.BeaconState) -> BeaconState: latest_block_root=Root(), ) for i in range(INITIAL_ACTIVE_SHARDS) ), - online_countdown=ByteList[VALIDATOR_REGISTRY_LIMIT]( - ONLINE_PERIOD for i in range(len(pre.validators)) - ), + online_countdown=[ONLINE_PERIOD] * len(pre.validators), # all online current_light_committee=CompactCommittee(), # computed after state creation next_light_committee=CompactCommittee(), # Custody game From ba10046cc1eab2e5baa7e66fcc31bcbf18f665c9 Mon Sep 17 00:00:00 2001 From: protolambda Date: Mon, 6 Jan 2020 00:49:32 +0100 Subject: [PATCH 088/194] minor lint fixes --- test_libs/pyspec/eth2spec/test/context.py | 3 ++- test_libs/pyspec/eth2spec/test/helpers/attestations.py | 1 - 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/test_libs/pyspec/eth2spec/test/context.py b/test_libs/pyspec/eth2spec/test/context.py index 0add9f638..6134243df 100644 --- a/test_libs/pyspec/eth2spec/test/context.py +++ b/test_libs/pyspec/eth2spec/test/context.py @@ -29,7 +29,8 @@ class Phase0(Spec): class Phase1(Spec): - def upgrade_to_phase1(self, state: spec_phase0.BeaconState) -> spec_phase1.BeaconState: ... + def upgrade_to_phase1(self, state: spec_phase0.BeaconState) -> spec_phase1.BeaconState: + ... # add transfer, bridge, etc. as the spec evolves diff --git a/test_libs/pyspec/eth2spec/test/helpers/attestations.py b/test_libs/pyspec/eth2spec/test/helpers/attestations.py index 0c64a0316..27014a347 100644 --- a/test_libs/pyspec/eth2spec/test/helpers/attestations.py +++ b/test_libs/pyspec/eth2spec/test/helpers/attestations.py @@ -95,7 +95,6 @@ def sign_indexed_attestation(spec, state, indexed_attestation): indexed_attestation.attestation.signature = sign_aggregate_attestation(spec, state, data, participants) - def sign_attestation(spec, state, attestation): participants = spec.get_attesting_indices( state, From cd7a2149f9da0aa388722f2cd0f4647606c705fc Mon Sep 17 00:00:00 2001 From: protolambda Date: Mon, 6 Jan 2020 00:57:36 +0100 Subject: [PATCH 089/194] fix genesis tests: only run on single phase, ignore phases extra data --- .../pyspec/eth2spec/test/genesis/test_initialization.py | 4 +++- test_libs/pyspec/eth2spec/test/genesis/test_validity.py | 7 ++++++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/test_libs/pyspec/eth2spec/test/genesis/test_initialization.py b/test_libs/pyspec/eth2spec/test/genesis/test_initialization.py index 462065bb9..65b9a41e7 100644 --- a/test_libs/pyspec/eth2spec/test/genesis/test_initialization.py +++ b/test_libs/pyspec/eth2spec/test/genesis/test_initialization.py @@ -1,4 +1,4 @@ -from eth2spec.test.context import spec_test, with_phases +from eth2spec.test.context import spec_test, with_phases, single_phase from eth2spec.test.helpers.deposits import ( prepare_genesis_deposits, ) @@ -6,6 +6,7 @@ from eth2spec.test.helpers.deposits import ( @with_phases(['phase0']) @spec_test +@single_phase def test_initialize_beacon_state_from_eth1(spec): deposit_count = spec.MIN_GENESIS_ACTIVE_VALIDATOR_COUNT deposits, deposit_root, _ = prepare_genesis_deposits(spec, deposit_count, spec.MAX_EFFECTIVE_BALANCE, signed=True) @@ -33,6 +34,7 @@ def test_initialize_beacon_state_from_eth1(spec): @with_phases(['phase0']) @spec_test +@single_phase def test_initialize_beacon_state_some_small_balances(spec): main_deposit_count = spec.MIN_GENESIS_ACTIVE_VALIDATOR_COUNT main_deposits, _, deposit_data_list = prepare_genesis_deposits(spec, main_deposit_count, diff --git a/test_libs/pyspec/eth2spec/test/genesis/test_validity.py b/test_libs/pyspec/eth2spec/test/genesis/test_validity.py index a003938e7..a90b4a695 100644 --- a/test_libs/pyspec/eth2spec/test/genesis/test_validity.py +++ b/test_libs/pyspec/eth2spec/test/genesis/test_validity.py @@ -1,4 +1,4 @@ -from eth2spec.test.context import spec_test, with_phases +from eth2spec.test.context import spec_test, with_phases, single_phase from eth2spec.test.helpers.deposits import ( prepare_genesis_deposits, ) @@ -27,6 +27,7 @@ def run_is_valid_genesis_state(spec, state, valid=True): @with_phases(['phase0']) @spec_test +@single_phase def test_is_valid_genesis_state_true(spec): state = create_valid_beacon_state(spec) @@ -35,6 +36,7 @@ def test_is_valid_genesis_state_true(spec): @with_phases(['phase0']) @spec_test +@single_phase def test_is_valid_genesis_state_false_invalid_timestamp(spec): state = create_valid_beacon_state(spec) state.genesis_time = spec.MIN_GENESIS_TIME - 1 @@ -44,6 +46,7 @@ def test_is_valid_genesis_state_false_invalid_timestamp(spec): @with_phases(['phase0']) @spec_test +@single_phase def test_is_valid_genesis_state_true_more_balance(spec): state = create_valid_beacon_state(spec) state.validators[0].effective_balance = spec.MAX_EFFECTIVE_BALANCE + 1 @@ -63,6 +66,7 @@ def test_is_valid_genesis_state_true_more_balance(spec): @with_phases(['phase0']) @spec_test +@single_phase def test_is_valid_genesis_state_true_one_more_validator(spec): deposit_count = spec.MIN_GENESIS_ACTIVE_VALIDATOR_COUNT + 1 deposits, _, _ = prepare_genesis_deposits(spec, deposit_count, spec.MAX_EFFECTIVE_BALANCE, signed=True) @@ -76,6 +80,7 @@ def test_is_valid_genesis_state_true_one_more_validator(spec): @with_phases(['phase0']) @spec_test +@single_phase def test_is_valid_genesis_state_false_not_enough_validator(spec): deposit_count = spec.MIN_GENESIS_ACTIVE_VALIDATOR_COUNT - 1 deposits, _, _ = prepare_genesis_deposits(spec, deposit_count, spec.MAX_EFFECTIVE_BALANCE, signed=True) From 9dd8d1d880d0a7c476fd674cbfad13ae9021faa7 Mon Sep 17 00:00:00 2001 From: protolambda Date: Mon, 6 Jan 2020 01:18:52 +0100 Subject: [PATCH 090/194] add two missing decorators --- .../test/phase_0/block_processing/test_process_attestation.py | 3 ++- .../epoch_processing/test_process_rewards_and_penalties.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/test_libs/pyspec/eth2spec/test/phase_0/block_processing/test_process_attestation.py b/test_libs/pyspec/eth2spec/test/phase_0/block_processing/test_process_attestation.py index d48386fd4..7937614a4 100644 --- a/test_libs/pyspec/eth2spec/test/phase_0/block_processing/test_process_attestation.py +++ b/test_libs/pyspec/eth2spec/test/phase_0/block_processing/test_process_attestation.py @@ -6,7 +6,7 @@ from eth2spec.test.context import ( spec_test, low_balances, with_custom_state, -) + single_phase) from eth2spec.test.helpers.attestations import ( get_valid_attestation, sign_aggregate_attestation, @@ -66,6 +66,7 @@ def test_success(spec, state): @with_all_phases @spec_test @with_custom_state(balances_fn=low_balances, threshold_fn=lambda spec: spec.EJECTION_BALANCE) +@single_phase def test_success_multi_proposer_index_iterations(spec, state): state.slot += spec.SLOTS_PER_EPOCH * 2 attestation = get_valid_attestation(spec, state, signed=True) diff --git a/test_libs/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_rewards_and_penalties.py b/test_libs/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_rewards_and_penalties.py index b4fc46b7d..dd3dae50a 100644 --- a/test_libs/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_rewards_and_penalties.py +++ b/test_libs/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_rewards_and_penalties.py @@ -1,7 +1,7 @@ from copy import deepcopy from eth2spec.test.context import spec_state_test, with_all_phases, spec_test, \ - misc_balances, with_custom_state, default_activation_threshold + misc_balances, with_custom_state, default_activation_threshold, single_phase from eth2spec.test.helpers.state import ( next_epoch, next_slot, @@ -96,6 +96,7 @@ def test_full_attestations(spec, state): @with_all_phases @spec_test @with_custom_state(balances_fn=misc_balances, threshold_fn=default_activation_threshold) +@single_phase def test_full_attestations_misc_balances(spec, state): attestations = prepare_state_with_full_attestations(spec, state) From f810e6b9c2b2eeefe4b37dd9bdf08cf731646bbb Mon Sep 17 00:00:00 2001 From: protolambda Date: Thu, 9 Jan 2020 15:16:46 +0100 Subject: [PATCH 091/194] update to python v3.8 --- .circleci/config.yml | 22 +++++++++++----------- test_libs/pyspec/setup.py | 1 + 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index f3870d922..afa5b82d9 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -35,32 +35,32 @@ commands: description: "Restore the cache with pyspec keys" steps: - restore_cached_venv: - venv_name: v5-pyspec + venv_name: v6-pyspec reqs_checksum: cache-{{ checksum "test_libs/pyspec/requirements.txt" }}-{{ checksum "test_libs/pyspec/requirements-testing.txt" }} save_pyspec_cached_venv: description: Save a venv into a cache with pyspec keys" steps: - save_cached_venv: - venv_name: v5-pyspec + venv_name: v6-pyspec reqs_checksum: cache-{{ checksum "test_libs/pyspec/requirements.txt" }}-{{ checksum "test_libs/pyspec/requirements-testing.txt" }} venv_path: ./test_libs/pyspec/venv restore_deposit_contract_cached_venv: description: "Restore the cache with deposit_contract keys" steps: - restore_cached_venv: - venv_name: v8-deposit-contract + venv_name: v9-deposit-contract reqs_checksum: cache-{{ checksum "test_libs/pyspec/requirements.txt" }}-{{ checksum "deposit_contract/requirements-testing.txt" }} save_deposit_contract_cached_venv: description: Save a venv into a cache with deposit_contract keys" steps: - save_cached_venv: - venv_name: v8-deposit-contract + venv_name: v9-deposit-contract reqs_checksum: cache-{{ checksum "test_libs/pyspec/requirements.txt" }}-{{ checksum "deposit_contract/requirements-testing.txt" }} venv_path: ./deposit_contract/venv jobs: checkout_specs: docker: - - image: circleci/python:3.6 + - image: circleci/python:3.8 working_directory: ~/specs-repo steps: # Restore git repo at point close to target branch/revision, to speed up checkout @@ -80,7 +80,7 @@ jobs: - ~/specs-repo install_pyspec_test: docker: - - image: circleci/python:3.6 + - image: circleci/python:3.8 working_directory: ~/specs-repo steps: - restore_cache: @@ -92,7 +92,7 @@ jobs: - save_pyspec_cached_venv test: docker: - - image: circleci/python:3.6 + - image: circleci/python:3.8 working_directory: ~/specs-repo steps: - restore_cache: @@ -114,7 +114,7 @@ jobs: command: sudo npm install -g doctoc && make check_toc codespell: docker: - - image: circleci/python:3.6 + - image: circleci/python:3.8 working_directory: ~/specs-repo steps: - checkout @@ -123,7 +123,7 @@ jobs: command: pip install codespell --user && make codespell lint: docker: - - image: circleci/python:3.6 + - image: circleci/python:3.8 working_directory: ~/specs-repo steps: - restore_cache: @@ -134,7 +134,7 @@ jobs: command: make lint install_deposit_contract_test: docker: - - image: circleci/python:3.6 + - image: circleci/python:3.8 working_directory: ~/specs-repo steps: - restore_cache: @@ -146,7 +146,7 @@ jobs: - save_deposit_contract_cached_venv deposit_contract: docker: - - image: circleci/python:3.6 + - image: circleci/python:3.8 working_directory: ~/specs-repo steps: - restore_cache: diff --git a/test_libs/pyspec/setup.py b/test_libs/pyspec/setup.py index 5be0db7f8..204187efc 100644 --- a/test_libs/pyspec/setup.py +++ b/test_libs/pyspec/setup.py @@ -3,6 +3,7 @@ from setuptools import setup, find_packages setup( name='pyspec', packages=find_packages(), + python_requires=">=3.8, <4", tests_require=["pytest"], install_requires=[ "eth-utils>=1.3.0,<2", From 68ff136b5de5213093f0c9c4e8057eaaf8cceea5 Mon Sep 17 00:00:00 2001 From: protolambda Date: Fri, 10 Jan 2020 00:00:10 +0100 Subject: [PATCH 092/194] warn about broken attestation validation, fix most attester slashings tests --- specs/core/1_beacon-chain.md | 13 ++-- specs/core/1_custody-game.md | 4 +- .../test/helpers/attester_slashings.py | 37 ++++++++++++ .../test_process_attester_slashing.py | 59 +++++++++++-------- .../eth2spec/test/sanity/test_blocks.py | 4 +- 5 files changed, 82 insertions(+), 35 deletions(-) diff --git a/specs/core/1_beacon-chain.md b/specs/core/1_beacon-chain.md index 23748277b..a10382ff4 100644 --- a/specs/core/1_beacon-chain.md +++ b/specs/core/1_beacon-chain.md @@ -142,7 +142,7 @@ class AttestationData(Container): class Attestation(Container): aggregation_bits: Bitlist[MAX_VALIDATORS_PER_COMMITTEE] data: AttestationData - custody_bits: List[Bitlist[MAX_VALIDATORS_PER_COMMITTEE], MAX_SHARD_BLOCKS_PER_ATTESTATION] + custody_bits_blocks: List[Bitlist[MAX_VALIDATORS_PER_COMMITTEE], MAX_SHARD_BLOCKS_PER_ATTESTATION] signature: BLSSignature ``` @@ -536,7 +536,7 @@ def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: Indexe domain=get_domain(state, DOMAIN_BEACON_ATTESTER, attestation.data.target.epoch) aggregation_bits = attestation.aggregation_bits assert len(aggregation_bits) == len(indexed_attestation.committee) - for i, custody_bits in enumerate(attestation.custody_bits): + for i, custody_bits in enumerate(attestation.custody_bits_blocks): assert len(custody_bits) == len(indexed_attestation.committee) for participant, abit, cbit in zip(indexed_attestation.committee, aggregation_bits, custody_bits): if abit: @@ -546,7 +546,10 @@ def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: Indexe AttestationCustodyBitWrapper(hash_tree_root(attestation.data), i, cbit), domain)) else: assert not cbit - + # WARNING: this is BROKEN. If no custody_bits_blocks, + # a valid empty signature can pass validation, even though aggregate bits are set. + # Decide between: force at least 1 shard block (even if empty data), + # or fast-aggregate-verify with attestation data with empty shard data as message (alike to phase0) return bls.AggregateVerify(zip(all_pubkeys, all_signing_roots), signature=attestation.signature) ``` @@ -616,11 +619,11 @@ def validate_attestation(state: BeaconState, attestation: Attestation) -> None: # Signature check assert is_valid_indexed_attestation(state, get_indexed_attestation(state, attestation)) # Type 1: on-time attestations - if attestation.custody_bits != []: + if attestation.custody_bits_blocks != []: # Correct slot assert data.slot + MIN_ATTESTATION_INCLUSION_DELAY == state.slot # Correct data root count - assert len(attestation.custody_bits) == len(get_offset_slots(state, shard_start_slot)) + assert len(attestation.custody_bits_blocks) == len(get_offset_slots(state, shard_start_slot)) # Correct parent block root assert data.beacon_block_root == get_block_root_at_slot(state, get_previous_slot(state.slot)) # Type 2: delayed attestations diff --git a/specs/core/1_custody-game.md b/specs/core/1_custody-game.md index 5de4997ae..3ca0a29b7 100644 --- a/specs/core/1_custody-game.md +++ b/specs/core/1_custody-game.md @@ -91,7 +91,7 @@ The following types are defined, mapping into `DomainType` (little endian): ```python class CustodySlashing(Container): - # Attestation.custody_bits[data_index][committee.index(malefactor_index)] is the target custody bit to check. + # Attestation.custody_bits_blocks[data_index][committee.index(malefactor_index)] is the target custody bit to check. # (Attestation.data.shard_transition_root as ShardTransition).shard_data_roots[data_index] is the root of the data. data_index: uint64 malefactor_index: ValidatorIndex @@ -378,7 +378,7 @@ def process_custody_slashing(state: BeaconState, signed_custody_slashing: Signed assert bls.Verify(malefactor.pubkey, signing_root, custody_slashing.malefactor_secret) # Get the custody bit - custody_bits = attestation.custody_bits[custody_slashing.data_index] + custody_bits = attestation.custody_bits_blocks[custody_slashing.data_index] committee = get_beacon_committee(state, attestation.data.slot, attestation.data.index) claimed_custody_bit = custody_bits[committee.index(custody_slashing.malefactor_index)] diff --git a/test_libs/pyspec/eth2spec/test/helpers/attester_slashings.py b/test_libs/pyspec/eth2spec/test/helpers/attester_slashings.py index 20abcacfb..8a342dd4d 100644 --- a/test_libs/pyspec/eth2spec/test/helpers/attester_slashings.py +++ b/test_libs/pyspec/eth2spec/test/helpers/attester_slashings.py @@ -16,3 +16,40 @@ def get_valid_attester_slashing(spec, state, signed_1=False, signed_2=False): attestation_1=spec.get_indexed_attestation(state, attestation_1), attestation_2=spec.get_indexed_attestation(state, attestation_2), ) + + +def get_indexed_attestation_participants(spec, indexed_att): + """ + Wrapper around index-attestation to return the list of participant indices, regardless of spec phase. + """ + if spec.version == "phase1": + return list(spec.get_indices_from_committee( + indexed_att.committee, + indexed_att.attestation.aggregation_bits, + )) + else: + return list(indexed_att.attesting_indices) + + +def set_indexed_attestation_participants(spec, indexed_att, participants): + """ + Wrapper around index-attestation to return the list of participant indices, regardless of spec phase. + """ + if spec.version == "phase1": + indexed_att.attestation.aggregation_bits = [bool(i in participants) for i in indexed_att.committee] + else: + indexed_att.attesting_indices = participants + + +def get_attestation_1_data(spec, att_slashing): + if spec.version == "phase1": + return att_slashing.attestation_1.attestation.data + else: + return att_slashing.attestation_1.data + + +def get_attestation_2_data(spec, att_slashing): + if spec.version == "phase1": + return att_slashing.attestation_2.attestation.data + else: + return att_slashing.attestation_2.data diff --git a/test_libs/pyspec/eth2spec/test/phase_0/block_processing/test_process_attester_slashing.py b/test_libs/pyspec/eth2spec/test/phase_0/block_processing/test_process_attester_slashing.py index 50cd7f706..4bd3a96b5 100644 --- a/test_libs/pyspec/eth2spec/test/phase_0/block_processing/test_process_attester_slashing.py +++ b/test_libs/pyspec/eth2spec/test/phase_0/block_processing/test_process_attester_slashing.py @@ -1,6 +1,7 @@ -from eth2spec.test.context import spec_state_test, expect_assertion_error, always_bls, with_all_phases +from eth2spec.test.context import spec_state_test, expect_assertion_error, always_bls, with_all_phases, with_phases from eth2spec.test.helpers.attestations import sign_indexed_attestation -from eth2spec.test.helpers.attester_slashings import get_valid_attester_slashing +from eth2spec.test.helpers.attester_slashings import get_valid_attester_slashing, \ + get_indexed_attestation_participants, get_attestation_2_data, get_attestation_1_data from eth2spec.test.helpers.block import apply_empty_block from eth2spec.test.helpers.state import ( get_balance, @@ -25,7 +26,7 @@ def run_attester_slashing_processing(spec, state, attester_slashing, valid=True) yield 'post', None return - slashed_indices = attester_slashing.attestation_1.attesting_indices + slashed_indices = get_indexed_attestation_participants(spec, attester_slashing.attestation_1) proposer_index = spec.get_beacon_proposer_index(state) pre_proposer_balance = get_balance(state, proposer_index) @@ -92,12 +93,12 @@ def test_success_surround(spec, state): state.current_justified_checkpoint.epoch += 1 attester_slashing = get_valid_attester_slashing(spec, state, signed_1=False, signed_2=True) - attestation_1 = attester_slashing.attestation_1 - attestation_2 = attester_slashing.attestation_2 + att_1_data = get_attestation_1_data(spec, attester_slashing) + att_2_data = get_attestation_2_data(spec, attester_slashing) # set attestion1 to surround attestation 2 - attestation_1.data.source.epoch = attestation_2.data.source.epoch - 1 - attestation_1.data.target.epoch = attestation_2.data.target.epoch + 1 + att_1_data.source.epoch = att_2_data.source.epoch - 1 + att_1_data.target.epoch = att_2_data.target.epoch + 1 sign_indexed_attestation(spec, state, attester_slashing.attestation_1) @@ -109,7 +110,7 @@ def test_success_surround(spec, state): @always_bls def test_success_already_exited_recent(spec, state): attester_slashing = get_valid_attester_slashing(spec, state, signed_1=True, signed_2=True) - slashed_indices = attester_slashing.attestation_1.attesting_indices + slashed_indices = get_indexed_attestation_participants(spec, attester_slashing.attestation_1) for index in slashed_indices: spec.initiate_validator_exit(state, index) @@ -121,7 +122,7 @@ def test_success_already_exited_recent(spec, state): @always_bls def test_success_already_exited_long_ago(spec, state): attester_slashing = get_valid_attester_slashing(spec, state, signed_1=True, signed_2=True) - slashed_indices = attester_slashing.attestation_1.attesting_indices + slashed_indices = get_indexed_attestation_participants(spec, attester_slashing.attestation_1) for index in slashed_indices: spec.initiate_validator_exit(state, index) state.validators[index].withdrawable_epoch = spec.get_current_epoch(state) + 2 @@ -158,7 +159,12 @@ def test_invalid_sig_1_and_2(spec, state): def test_same_data(spec, state): attester_slashing = get_valid_attester_slashing(spec, state, signed_1=False, signed_2=True) - attester_slashing.attestation_1.data = attester_slashing.attestation_2.data + indexed_att_1 = attester_slashing.attestation_1 + att_2_data = get_attestation_2_data(spec, attester_slashing) + if spec.version == 'phase1': + indexed_att_1.attestation.data = att_2_data + else: + indexed_att_1.data = att_2_data sign_indexed_attestation(spec, state, attester_slashing.attestation_1) yield from run_attester_slashing_processing(spec, state, attester_slashing, False) @@ -169,10 +175,8 @@ def test_same_data(spec, state): def test_no_double_or_surround(spec, state): attester_slashing = get_valid_attester_slashing(spec, state, signed_1=False, signed_2=True) - if spec.version == 'phase0': - attester_slashing.attestation_1.data.target.epoch += 1 - else: - attester_slashing.attestation_1.attestation.data.target.epoch += 1 + att_1_data = get_attestation_1_data(spec, attester_slashing) + att_1_data.target.epoch += 1 sign_indexed_attestation(spec, state, attester_slashing.attestation_1) @@ -185,20 +189,23 @@ def test_participants_already_slashed(spec, state): attester_slashing = get_valid_attester_slashing(spec, state, signed_1=True, signed_2=True) # set all indices to slashed - validator_indices = attester_slashing.attestation_1.attesting_indices + validator_indices = get_indexed_attestation_participants(spec, attester_slashing.attestation_1) for index in validator_indices: state.validators[index].slashed = True yield from run_attester_slashing_processing(spec, state, attester_slashing, False) -@with_all_phases +# Some of the following tests are phase0 only: phase 1 lists participants with bitfields instead of index list. + + +@with_phases(['phase0']) @spec_state_test @always_bls def test_att1_bad_extra_index(spec, state): attester_slashing = get_valid_attester_slashing(spec, state, signed_1=True, signed_2=True) - indices = attester_slashing.attestation_1.attesting_indices + indices = get_indexed_attestation_participants(spec, attester_slashing.attestation_1) options = list(set(range(len(state.validators))) - set(indices)) indices.append(options[len(options) // 2]) # add random index, not previously in attestation. attester_slashing.attestation_1.attesting_indices = sorted(indices) @@ -208,7 +215,7 @@ def test_att1_bad_extra_index(spec, state): yield from run_attester_slashing_processing(spec, state, attester_slashing, False) -@with_all_phases +@with_phases(['phase0']) @spec_state_test @always_bls def test_att1_bad_replaced_index(spec, state): @@ -224,7 +231,7 @@ def test_att1_bad_replaced_index(spec, state): yield from run_attester_slashing_processing(spec, state, attester_slashing, False) -@with_all_phases +@with_phases(['phase0']) @spec_state_test @always_bls def test_att2_bad_extra_index(spec, state): @@ -240,7 +247,7 @@ def test_att2_bad_extra_index(spec, state): yield from run_attester_slashing_processing(spec, state, attester_slashing, False) -@with_all_phases +@with_phases(['phase0']) @spec_state_test @always_bls def test_att2_bad_replaced_index(spec, state): @@ -256,7 +263,7 @@ def test_att2_bad_replaced_index(spec, state): yield from run_attester_slashing_processing(spec, state, attester_slashing, False) -@with_all_phases +@with_phases(['phase0']) @spec_state_test @always_bls def test_att1_duplicate_index_normal_signed(spec, state): @@ -276,7 +283,7 @@ def test_att1_duplicate_index_normal_signed(spec, state): yield from run_attester_slashing_processing(spec, state, attester_slashing, False) -@with_all_phases +@with_phases(['phase0']) @spec_state_test @always_bls def test_att2_duplicate_index_normal_signed(spec, state): @@ -296,7 +303,7 @@ def test_att2_duplicate_index_normal_signed(spec, state): yield from run_attester_slashing_processing(spec, state, attester_slashing, False) -@with_all_phases +@with_phases(['phase0']) @spec_state_test @always_bls def test_att1_duplicate_index_double_signed(spec, state): @@ -311,7 +318,7 @@ def test_att1_duplicate_index_double_signed(spec, state): yield from run_attester_slashing_processing(spec, state, attester_slashing, False) -@with_all_phases +@with_phases(['phase0']) @spec_state_test @always_bls def test_att2_duplicate_index_double_signed(spec, state): @@ -326,7 +333,7 @@ def test_att2_duplicate_index_double_signed(spec, state): yield from run_attester_slashing_processing(spec, state, attester_slashing, False) -@with_all_phases +@with_phases(['phase0']) @spec_state_test def test_unsorted_att_1(spec, state): attester_slashing = get_valid_attester_slashing(spec, state, signed_1=False, signed_2=True) @@ -339,7 +346,7 @@ def test_unsorted_att_1(spec, state): yield from run_attester_slashing_processing(spec, state, attester_slashing, False) -@with_all_phases +@with_phases(['phase0']) @spec_state_test def test_unsorted_att_2(spec, state): attester_slashing = get_valid_attester_slashing(spec, state, signed_1=True, signed_2=False) diff --git a/test_libs/pyspec/eth2spec/test/sanity/test_blocks.py b/test_libs/pyspec/eth2spec/test/sanity/test_blocks.py index b386d36b4..9027660ab 100644 --- a/test_libs/pyspec/eth2spec/test/sanity/test_blocks.py +++ b/test_libs/pyspec/eth2spec/test/sanity/test_blocks.py @@ -6,7 +6,7 @@ from eth2spec.test.helpers.state import get_balance, state_transition_and_sign_b from eth2spec.test.helpers.block import build_empty_block_for_next_slot, build_empty_block, sign_block, \ transition_unsigned_block from eth2spec.test.helpers.keys import privkeys, pubkeys -from eth2spec.test.helpers.attester_slashings import get_valid_attester_slashing +from eth2spec.test.helpers.attester_slashings import get_valid_attester_slashing, get_indexed_attestation_participants from eth2spec.test.helpers.proposer_slashings import get_valid_proposer_slashing from eth2spec.test.helpers.attestations import get_valid_attestation from eth2spec.test.helpers.deposits import prepare_state_and_deposit @@ -220,7 +220,7 @@ def test_attester_slashing(spec, state): pre_state = deepcopy(state) attester_slashing = get_valid_attester_slashing(spec, state, signed_1=True, signed_2=True) - validator_index = attester_slashing.attestation_1.attesting_indices[0] + validator_index = get_indexed_attestation_participants(spec, attester_slashing.attestation_1)[0] assert not state.validators[validator_index].slashed From efbea8c53c114dc93771a6cf11eb2a0f9415ef13 Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Mon, 13 Jan 2020 12:49:03 +0800 Subject: [PATCH 093/194] Fix BLS API description --- specs/phase0/beacon-chain.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/specs/phase0/beacon-chain.md b/specs/phase0/beacon-chain.md index 503286e48..9b4d7e492 100644 --- a/specs/phase0/beacon-chain.md +++ b/specs/phase0/beacon-chain.md @@ -588,8 +588,8 @@ Eth2 makes use of BLS signatures as specified in the [IETF draft BLS specificati - `def Sign(SK: int, message: Bytes) -> BLSSignature` - `def Verify(PK: BLSPubkey, message: Bytes, signature: BLSSignature) -> bool` - `def Aggregate(signatures: Sequence[BLSSignature]) -> BLSSignature` -- `def FastAggregateVerify(PKs: Sequence[BLSSignature], message: Bytes, signature: BLSSignature) -> bool` -- `def AggregateVerify(pairs: Sequence[PK: BLSSignature, message: Bytes], signature: BLSSignature) -> bool` +- `def FastAggregateVerify(PKs: Sequence[BLSPubkey], message: Bytes, signature: BLSSignature) -> bool` +- `def AggregateVerify(pairs: Sequence[PK: BLSPubkey, message: Bytes], signature: BLSSignature) -> bool` Within these specifications, BLS signatures are treated as a module for notational clarity, thus to verify a signature `bls.Verify(...)` is used. From b001f250db3ca785a21a233e2a87d6056df4a86c Mon Sep 17 00:00:00 2001 From: Ben Edgington Date: Mon, 13 Jan 2020 09:32:34 +0000 Subject: [PATCH 094/194] Correct typo in heading level --- specs/phase0/beacon-chain.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/specs/phase0/beacon-chain.md b/specs/phase0/beacon-chain.md index 503286e48..9d784ddf4 100644 --- a/specs/phase0/beacon-chain.md +++ b/specs/phase0/beacon-chain.md @@ -76,7 +76,7 @@ - [`compute_start_slot_at_epoch`](#compute_start_slot_at_epoch) - [`compute_activation_exit_epoch`](#compute_activation_exit_epoch) - [`compute_domain`](#compute_domain) - - [`compute_signing_root`](#compute_signing_root) + - [`compute_signing_root`](#compute_signing_root) - [Beacon state accessors](#beacon-state-accessors) - [`get_current_epoch`](#get_current_epoch) - [`get_previous_epoch`](#get_previous_epoch) @@ -795,7 +795,7 @@ def compute_domain(domain_type: DomainType, fork_version: Version=GENESIS_FORK_V return Domain(domain_type + fork_version) ``` -### `compute_signing_root` +#### `compute_signing_root` ```python def compute_signing_root(ssz_object: SSZObject, domain: Domain) -> Root: From 065b4ef856aeb7f84f1bed5c4a2cd4d6ac1edc87 Mon Sep 17 00:00:00 2001 From: Chih Cheng Liang Date: Mon, 13 Jan 2020 20:42:23 +0800 Subject: [PATCH 095/194] Fix p2p interface indentation --- specs/phase0/p2p-interface.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index c04d2ae4c..674f2e2b8 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -80,7 +80,7 @@ It consists of four main sections: - [How do we upgrade gossip channels (e.g. changes in encoding, compression)?](#how-do-we-upgrade-gossip-channels-eg-changes-in-encoding-compression) - [Why must all clients use the same gossip topic instead of one negotiated between each peer pair?](#why-must-all-clients-use-the-same-gossip-topic-instead-of-one-negotiated-between-each-peer-pair) - [Why are the topics strings and not hashes?](#why-are-the-topics-strings-and-not-hashes) - - [Why are we overriding the default libp2p pubsub `message-id`?](#why-are-we-overriding-the-default-libp2p-pubsub-message-id) + - [Why are we overriding the default libp2p pubsub `message-id`?](#why-are-we-overriding-the-default-libp2p-pubsub-message-id) - [Why is there `MAXIMUM_GOSSIP_CLOCK_DISPARITY` when validating slot ranges of messages in gossip subnets?](#why-is-there-maximum_gossip_clock_disparity-when-validating-slot-ranges-of-messages-in-gossip-subnets) - [Why are there `ATTESTATION_SUBNET_COUNT` attestation subnets?](#why-are-there-attestation_subnet_count-attestation-subnets) - [Why are attestations limited to be broadcast on gossip channels within `SLOTS_PER_EPOCH` slots?](#why-are-attestations-limited-to-be-broadcast-on-gossip-channels-within-slots_per_epoch-slots) @@ -759,7 +759,7 @@ No security or privacy guarantees are lost as a result of choosing plaintext top Furthermore, the Eth2 topic names are shorter than their digest equivalents (assuming SHA-256 hash), so hashing topics would bloat messages unnecessarily. -## Why are we overriding the default libp2p pubsub `message-id`? +### Why are we overriding the default libp2p pubsub `message-id`? For our current purposes, there is no need to address messages based on source peer, and it seems likely we might even override the message `from` to obfuscate the peer. By overriding the default `message-id` to use content-addressing we can filter unnecessary duplicates before hitting the application layer. From 507a9afbfb0d434f4c2cd59ef444860f9dce70e8 Mon Sep 17 00:00:00 2001 From: protolambda Date: Mon, 13 Jan 2020 18:57:56 +0100 Subject: [PATCH 096/194] apply custody bit fix suggestion from Dankrad --- specs/phase1/custody-game.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/phase1/custody-game.md b/specs/phase1/custody-game.md index d8c4954f3..fd804a722 100644 --- a/specs/phase1/custody-game.md +++ b/specs/phase1/custody-game.md @@ -188,7 +188,7 @@ def get_custody_atoms(bytez: bytes) -> Sequence[bytes]: def compute_custody_bit(key: BLSSignature, data: bytes) -> bit: full_G2_element = bls.signature_to_G2(key) s = full_G2_element[0].coeffs - bits = [legendre_bit((i + 1) * s[i % 2] + int.from_bytes(atom, "little"), BLS12_381_Q) + bits = [legendre_bit(sum(s[i % 2]**i * int.from_bytes(atom, "little")), BLS12_381_Q) for i, atom in enumerate(get_custody_atoms(data))] # XOR all atom bits return bit(sum(bits) % 2) From 419b6a3250b5f33d88b8f4f847701ad13c599b03 Mon Sep 17 00:00:00 2001 From: protolambda Date: Mon, 13 Jan 2020 19:00:24 +0100 Subject: [PATCH 097/194] config change, need more space for worst-case reveals --- specs/phase1/custody-game.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/phase1/custody-game.md b/specs/phase1/custody-game.md index fd804a722..1e7dc0216 100644 --- a/specs/phase1/custody-game.md +++ b/specs/phase1/custody-game.md @@ -65,7 +65,7 @@ This document details the beacon chain additions and changes in Phase 1 of Ether | Name | Value | | - | - | -| `MAX_CUSTODY_KEY_REVEALS` | `2**4` (= 16) | +| `MAX_CUSTODY_KEY_REVEALS` | `2**8` (= 256) | | `MAX_EARLY_DERIVED_SECRET_REVEALS` | `1` | | `MAX_CUSTODY_SLASHINGS` | `1` | From 702b253361ace68d0130d87c4309fc095567e017 Mon Sep 17 00:00:00 2001 From: protolambda Date: Mon, 13 Jan 2020 19:47:33 +0100 Subject: [PATCH 098/194] update configs for phase1 --- configs/mainnet.yaml | 66 +++++++++++++++++++++++++++++-- configs/minimal.yaml | 75 +++++++++++++++++++++++++++++++----- specs/phase1/beacon-chain.md | 4 +- specs/phase1/custody-game.md | 7 +++- specs/phase1/phase1-fork.md | 2 +- 5 files changed, 135 insertions(+), 19 deletions(-) diff --git a/configs/mainnet.yaml b/configs/mainnet.yaml index 2d8de5390..6eb5641d0 100644 --- a/configs/mainnet.yaml +++ b/configs/mainnet.yaml @@ -94,9 +94,6 @@ PERSISTENT_COMMITTEE_PERIOD: 2048 MAX_EPOCHS_PER_CROSSLINK: 64 # 2**2 (= 4) epochs 25.6 minutes MIN_EPOCHS_TO_INACTIVITY_PENALTY: 4 -# 2**14 (= 16,384) epochs ~73 days -EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS: 16384 - # State vector lengths @@ -146,12 +143,73 @@ DOMAIN_BEACON_ATTESTER: 0x01000000 DOMAIN_RANDAO: 0x02000000 DOMAIN_DEPOSIT: 0x03000000 DOMAIN_VOLUNTARY_EXIT: 0x04000000 +# Phase 1 DOMAIN_SHARD_PROPOSAL: 0x80000000 DOMAIN_SHARD_COMMITTEE: 0x81000000 DOMAIN_LIGHT_CLIENT: 0x82000000 DOMAIN_CUSTODY_BIT_SLASHING: 0x83000000 -# Phase 1 +# Phase 1: Upgrade from Phase 0 # --------------------------------------------------------------- +PHASE_1_FORK_VERSION: 0x01000000 INITIAL_ACTIVE_SHARDS: 64 +# Placeholder +INITIAL_GASPRICE: 10 + + +# Phase 1: General +# --------------------------------------------------------------- +# 2**10` (= 1024) +MAX_SHARDS: 1024 +# 2**3 (= 8) | online epochs | ~51 min +ONLINE_PERIOD: 8 +# 2**7 (= 128) +LIGHT_CLIENT_COMMITTEE_SIZE: 128 +# 2**8 (= 256) | epochs | ~27 hours +LIGHT_CLIENT_COMMITTEE_PERIOD: 256 +# 2**8 (= 256) | epochs | ~27 hours +SHARD_COMMITTEE_PERIOD: 256 +# 2**18 (= 262,144) +SHARD_BLOCK_CHUNK_SIZE: 262144 +# 2**2 (= 4) +MAX_SHARD_BLOCK_CHUNKS: 4 +# 3 * 2**16` (= 196,608) +TARGET_SHARD_BLOCK_SIZE: 196608 +# Note: MAX_SHARD_BLOCKS_PER_ATTESTATION is derived from the list length. +SHARD_BLOCK_OFFSETS: [1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233] +# len(SHARD_BLOCK_OFFSETS) +MAX_SHARD_BLOCKS_PER_ATTESTATION: 12 +# 2**14 (= 16,384) Gwei +MAX_GASPRICE: 16384 +# 2**5 (= 32) Gwei +MIN_GASPRICE: 32 +# 2**3 (= 8) +GASPRICE_ADJUSTMENT_COEFFICIENT: 8 + + +# Phase 1: Custody Game +# --------------------------------------------------------------- + +# Time parameters +# 2**1 (= 2) epochs, 12.8 minutes +RANDAO_PENALTY_EPOCHS: 2 +# 2**14 (= 16,384) epochs ~73 days +EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS: 16384 +# 2**11 (= 2,048) epochs, ~9 days +EPOCHS_PER_CUSTODY_PERIOD: 2048 +# 2**11 (= 2,048) epochs, ~9 days +CUSTODY_PERIOD_TO_RANDAO_PADDING: 2048 +# 2**7 (= 128) epochs, ~14 hours +MAX_REVEAL_LATENESS_DECREMENT: 128 + +# Max operations +# 2**8 (= 256) +MAX_CUSTODY_KEY_REVEALS: 256 +MAX_EARLY_DERIVED_SECRET_REVEALS: 1 +MAX_CUSTODY_SLASHINGS: 1 + +# Reward and penalty quotients +EARLY_DERIVED_SECRET_REVEAL_SLOT_REWARD_MULTIPLE: 2 +# 2**8 (= 256) +MINOR_REWARD_QUOTIENT: 256 diff --git a/configs/minimal.yaml b/configs/minimal.yaml index d45bdbd09..486803467 100644 --- a/configs/minimal.yaml +++ b/configs/minimal.yaml @@ -21,14 +21,13 @@ MIN_GENESIS_ACTIVE_VALIDATOR_COUNT: 64 # Jan 3, 2020 MIN_GENESIS_TIME: 1578009600 -# -# + # Fork Choice # --------------------------------------------------------------- # 2**1 (= 1) SAFE_SLOTS_TO_UPDATE_JUSTIFIED: 2 -# + # Validator # --------------------------------------------------------------- # [customized] process deposits more quickly, but insecure @@ -94,12 +93,6 @@ PERSISTENT_COMMITTEE_PERIOD: 2048 MAX_EPOCHS_PER_CROSSLINK: 4 # 2**2 (= 4) epochs MIN_EPOCHS_TO_INACTIVITY_PENALTY: 4 -# [customized] 2**12 (= 4,096) epochs -EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS: 4096 -# 2**2 (= 4) epochs -EPOCHS_PER_CUSTODY_PERIOD: 4 -# 2**2 (= 4) epochs -CUSTODY_PERIOD_TO_RANDAO_PADDING: 4 # State vector lengths @@ -149,13 +142,75 @@ DOMAIN_BEACON_ATTESTER: 0x01000000 DOMAIN_RANDAO: 0x02000000 DOMAIN_DEPOSIT: 0x03000000 DOMAIN_VOLUNTARY_EXIT: 0x04000000 +# Phase 1 DOMAIN_SHARD_PROPOSAL: 0x80000000 DOMAIN_SHARD_COMMITTEE: 0x81000000 DOMAIN_LIGHT_CLIENT: 0x82000000 DOMAIN_CUSTODY_BIT_SLASHING: 0x83000000 -# Phase 1 +# Phase 1: Upgrade from Phase 0 # --------------------------------------------------------------- +# [customized] for testnet distinction +PHASE_1_FORK_VERSION: 0x01000001 # [customized] reduced for testing INITIAL_ACTIVE_SHARDS: 4 +# Placeholder +INITIAL_GASPRICE: 10 + + +# Phase 1: General +# --------------------------------------------------------------- +# [customized] reduced for testing +MAX_SHARDS: 8 +# 2**3 (= 8) | online epochs +ONLINE_PERIOD: 8 +# 2**7 (= 128) +LIGHT_CLIENT_COMMITTEE_SIZE: 128 +# 2**8 (= 256) | epochs +LIGHT_CLIENT_COMMITTEE_PERIOD: 256 +# 2**8 (= 256) | epochs +SHARD_COMMITTEE_PERIOD: 256 +# 2**18 (= 262,144) +SHARD_BLOCK_CHUNK_SIZE: 262144 +# 2**2 (= 4) +MAX_SHARD_BLOCK_CHUNKS: 4 +# 3 * 2**16` (= 196,608) +TARGET_SHARD_BLOCK_SIZE: 196608 +# Note: MAX_SHARD_BLOCKS_PER_ATTESTATION is derived from the list length. +SHARD_BLOCK_OFFSETS: [1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233] +# len(SHARD_BLOCK_OFFSETS) +MAX_SHARD_BLOCKS_PER_ATTESTATION: 12 +# 2**14 (= 16,384) Gwei +MAX_GASPRICE: 16384 +# 2**5 (= 32) Gwei +MIN_GASPRICE: 32 +# 2**3 (= 8) +GASPRICE_ADJUSTMENT_COEFFICIENT: 8 + + +# Phase 1: Custody Game +# --------------------------------------------------------------- + +# Time parameters +# 2**1 (= 2) epochs +RANDAO_PENALTY_EPOCHS: 2 +# [customized] quicker for testing +EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS: 4096 +# 2**11 (= 2,048) epochs, +EPOCHS_PER_CUSTODY_PERIOD: 2048 +# 2**11 (= 2,048) epochs, +CUSTODY_PERIOD_TO_RANDAO_PADDING: 2048 +# 2**7 (= 128) epochs, +MAX_REVEAL_LATENESS_DECREMENT: 128 + +# Max operations +# 2**8 (= 256) +MAX_CUSTODY_KEY_REVEALS: 256 +MAX_EARLY_DERIVED_SECRET_REVEALS: 1 +MAX_CUSTODY_SLASHINGS: 1 + +# Reward and penalty quotients +EARLY_DERIVED_SECRET_REVEAL_SLOT_REWARD_MULTIPLE: 2 +# 2**8 (= 256) +MINOR_REWARD_QUOTIENT: 256 diff --git a/specs/phase1/beacon-chain.md b/specs/phase1/beacon-chain.md index 14b83dd03..b53ce1de1 100644 --- a/specs/phase1/beacon-chain.md +++ b/specs/phase1/beacon-chain.md @@ -107,7 +107,6 @@ Configuration is not namespaced. Instead it is strictly an extension; | `TARGET_SHARD_BLOCK_SIZE` | `3 * 2**16` (= 196,608) | | | `SHARD_BLOCK_OFFSETS` | `[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233]` | | | `MAX_SHARD_BLOCKS_PER_ATTESTATION` | `len(SHARD_BLOCK_OFFSETS)` | | -| `EMPTY_CHUNK_ROOT` | `hash_tree_root(ByteList[SHARD_BLOCK_CHUNK_SIZE]())` | | | `MAX_GASPRICE` | `Gwei(2**14)` (= 16,384) | Gwei | | | `MIN_GASPRICE` | `Gwei(2**5)` (= 32) | Gwei | | | `GASPRICE_ADJUSTMENT_COEFFICIENT` | `2**3` (= 8) | | @@ -406,8 +405,9 @@ def committee_to_compact_committee(state: BeaconState, committee: Sequence[Valid ```python def chunks_to_body_root(chunks: List[Bytes32, MAX_SHARD_BLOCK_CHUNKS]) -> Root: + empty_chunk_root = hash_tree_root(ByteList[SHARD_BLOCK_CHUNK_SIZE]()) return hash_tree_root(Vector[Bytes32, MAX_SHARD_BLOCK_CHUNKS]( - chunks + [EMPTY_CHUNK_ROOT] * (MAX_SHARD_BLOCK_CHUNKS - len(chunks)) + chunks + [empty_chunk_root] * (MAX_SHARD_BLOCK_CHUNKS - len(chunks)) )) ``` diff --git a/specs/phase1/custody-game.md b/specs/phase1/custody-game.md index 1e7dc0216..54a3f4332 100644 --- a/specs/phase1/custody-game.md +++ b/specs/phase1/custody-game.md @@ -45,18 +45,20 @@ This document details the beacon chain additions and changes in Phase 1 of Ether ## Constants ### Misc + | Name | Value | Unit | | - | - | | `BLS12_381_Q` | `4002409555221667393417789825735904156556882819939007885332058136124031650490837864442687629129015664037894272559787` | -| `MINOR_REWARD_QUOTIENT` | `2**8` (= 256) | | `BYTES_PER_CUSTODY_ATOM` | `48` | bytes | +## Configuration + ### Time parameters | Name | Value | Unit | Duration | | - | - | :-: | :-: | | `RANDAO_PENALTY_EPOCHS` | `2**1` (= 2) | epochs | 12.8 minutes | -| `EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS` | `2**14` | epochs | ~73 days | +| `EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS` | `2**14` (= 16,384) | epochs | ~73 days | | `EPOCHS_PER_CUSTODY_PERIOD` | `2**11` (= 2,048) | epochs | ~9 days | | `CUSTODY_PERIOD_TO_RANDAO_PADDING` | `2**11` (= 2,048) | epochs | ~9 days | | `MAX_REVEAL_LATENESS_DECREMENT` | `2**7` (= 128) | epochs | ~14 hours | @@ -74,6 +76,7 @@ This document details the beacon chain additions and changes in Phase 1 of Ether | Name | Value | | - | - | | `EARLY_DERIVED_SECRET_REVEAL_SLOT_REWARD_MULTIPLE` | `2**1` (= 2) | +| `MINOR_REWARD_QUOTIENT` | `2**8` (= 256) | ### Signature domain types diff --git a/specs/phase1/phase1-fork.md b/specs/phase1/phase1-fork.md index c6bbc7363..56eee410b 100644 --- a/specs/phase1/phase1-fork.md +++ b/specs/phase1/phase1-fork.md @@ -34,7 +34,7 @@ Warning: this configuration is not definitive. | Name | Value | | - | - | - | -| `PHASE_1_FORK_VERSION` | `Version('0x00000001')` | +| `PHASE_1_FORK_VERSION` | `Version('0x01000000')` | | `INITIAL_ACTIVE_SHARDS` | `2**6` (= 64) | | `INITIAL_GASPRICE` | `Gwei(10)` | From 46be6aed1d26a0227d2f66dfca7b132bafe1e428 Mon Sep 17 00:00:00 2001 From: protolambda Date: Mon, 13 Jan 2020 20:01:42 +0100 Subject: [PATCH 099/194] fix config loader to support list inputs --- tests/core/config_helpers/preset_loader/loader.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/core/config_helpers/preset_loader/loader.py b/tests/core/config_helpers/preset_loader/loader.py index 9d75932df..95f147f6e 100644 --- a/tests/core/config_helpers/preset_loader/loader.py +++ b/tests/core/config_helpers/preset_loader/loader.py @@ -18,7 +18,9 @@ def load_presets(configs_dir, presets_name) -> Dict[str, Any]: loaded = yaml.load(path) out = dict() for k, v in loaded.items(): - if v.startswith("0x"): + if isinstance(v, list): + out[k] = v + elif isinstance(v, str) and v.startswith("0x"): out[k] = bytes.fromhex(v[2:]) else: out[k] = int(v) From 6c744681422573dec89e3e8909a7b8f5deb71459 Mon Sep 17 00:00:00 2001 From: protolambda Date: Tue, 14 Jan 2020 01:02:02 +0100 Subject: [PATCH 100/194] forkchoice store on top of any state now --- specs/phase0/fork-choice.md | 34 +++++++++++-------- .../test/fork_choice/test_get_head.py | 10 +++--- .../test/fork_choice/test_on_attestation.py | 20 +++++------ .../test/fork_choice/test_on_block.py | 14 ++++---- .../eth2spec/test/fork_choice/test_on_tick.py | 12 +++---- 5 files changed, 48 insertions(+), 42 deletions(-) diff --git a/specs/phase0/fork-choice.md b/specs/phase0/fork-choice.md index 0d9823fcd..feab5bb7a 100644 --- a/specs/phase0/fork-choice.md +++ b/specs/phase0/fork-choice.md @@ -14,7 +14,7 @@ - [Helpers](#helpers) - [`LatestMessage`](#latestmessage) - [`Store`](#store) - - [`get_genesis_store`](#get_genesis_store) + - [`get_forkchoice_store`](#get_forkchoice_store) - [`get_slots_since_genesis`](#get_slots_since_genesis) - [`get_current_slot`](#get_current_slot) - [`compute_slots_since_epoch_start`](#compute_slots_since_epoch_start) @@ -38,7 +38,7 @@ This document is the beacon chain fork choice spec, part of Ethereum 2.0 Phase 0 ## Fork choice -The head block root associated with a `store` is defined as `get_head(store)`. At genesis, let `store = get_genesis_store(genesis_state)` and update `store` by running: +The head block root associated with a `store` is defined as `get_head(store)`. At genesis, let `store = get_checkpoint_store(genesis_state)` and update `store` by running: - `on_tick(time)` whenever `time > store.time` where `time` is the current Unix time - `on_block(block)` whenever a block `block: SignedBeaconBlock` is received @@ -79,29 +79,35 @@ class Store(object): justified_checkpoint: Checkpoint finalized_checkpoint: Checkpoint best_justified_checkpoint: Checkpoint - blocks: Dict[Root, BeaconBlock] = field(default_factory=dict) + blocks: Dict[Root, BeaconBlockHeader] = field(default_factory=dict) block_states: Dict[Root, BeaconState] = field(default_factory=dict) checkpoint_states: Dict[Checkpoint, BeaconState] = field(default_factory=dict) latest_messages: Dict[ValidatorIndex, LatestMessage] = field(default_factory=dict) ``` -#### `get_genesis_store` +#### `get_forkchoice_store` + +The provided anchor-state will be regarded as a trusted state, to not roll back beyond. +This should be the genesis state for a full client. ```python -def get_genesis_store(genesis_state: BeaconState) -> Store: - genesis_block = BeaconBlock(state_root=hash_tree_root(genesis_state)) - root = hash_tree_root(genesis_block) - justified_checkpoint = Checkpoint(epoch=GENESIS_EPOCH, root=root) - finalized_checkpoint = Checkpoint(epoch=GENESIS_EPOCH, root=root) +def get_forkchoice_store(anchor_state: BeaconState) -> Store: + anchor_block_header = anchor_state.latest_block_header.copy() + if anchor_block_header.state_root == Bytes32(): + anchor_block_header.state_root = hash_tree_root(anchor_state) + anchor_root = hash_tree_root(anchor_block_header) + anchor_epoch = get_current_epoch(anchor_state) + justified_checkpoint = Checkpoint(epoch=anchor_epoch, root=anchor_root) + finalized_checkpoint = Checkpoint(epoch=anchor_epoch, root=anchor_root) return Store( - time=genesis_state.genesis_time, - genesis_time=genesis_state.genesis_time, + time=anchor_state.genesis_time, + genesis_time=anchor_state.genesis_time, justified_checkpoint=justified_checkpoint, finalized_checkpoint=finalized_checkpoint, best_justified_checkpoint=justified_checkpoint, - blocks={root: genesis_block}, - block_states={root: genesis_state.copy()}, - checkpoint_states={justified_checkpoint: genesis_state.copy()}, + blocks={anchor_root: anchor_block_header}, + block_states={anchor_root: anchor_state.copy()}, + checkpoint_states={justified_checkpoint: anchor_state.copy()}, ) ``` diff --git a/tests/core/pyspec/eth2spec/test/fork_choice/test_get_head.py b/tests/core/pyspec/eth2spec/test/fork_choice/test_get_head.py index 1f412e787..a5a6b2fe0 100644 --- a/tests/core/pyspec/eth2spec/test/fork_choice/test_get_head.py +++ b/tests/core/pyspec/eth2spec/test/fork_choice/test_get_head.py @@ -34,7 +34,7 @@ def add_attestation_to_store(spec, store, attestation): @spec_state_test def test_genesis(spec, state): # Initialization - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) genesis_block = spec.BeaconBlock(state_root=state.hash_tree_root()) assert spec.get_head(store) == spec.hash_tree_root(genesis_block) @@ -43,7 +43,7 @@ def test_genesis(spec, state): @spec_state_test def test_chain_no_attestations(spec, state): # Initialization - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) genesis_block = spec.BeaconBlock(state_root=state.hash_tree_root()) assert spec.get_head(store) == spec.hash_tree_root(genesis_block) @@ -66,7 +66,7 @@ def test_split_tie_breaker_no_attestations(spec, state): genesis_state = state.copy() # Initialization - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) genesis_block = spec.BeaconBlock(state_root=state.hash_tree_root()) assert spec.get_head(store) == spec.hash_tree_root(genesis_block) @@ -94,7 +94,7 @@ def test_shorter_chain_but_heavier_weight(spec, state): genesis_state = state.copy() # Initialization - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) genesis_block = spec.BeaconBlock(state_root=state.hash_tree_root()) assert spec.get_head(store) == spec.hash_tree_root(genesis_block) @@ -123,7 +123,7 @@ def test_shorter_chain_but_heavier_weight(spec, state): def test_filtered_block_tree(spec, state): # Initialization genesis_state_root = state.hash_tree_root() - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) genesis_block = spec.BeaconBlock(state_root=genesis_state_root) # transition state past initial couple of epochs diff --git a/tests/core/pyspec/eth2spec/test/fork_choice/test_on_attestation.py b/tests/core/pyspec/eth2spec/test/fork_choice/test_on_attestation.py index d7fbc4777..0fa6809ab 100644 --- a/tests/core/pyspec/eth2spec/test/fork_choice/test_on_attestation.py +++ b/tests/core/pyspec/eth2spec/test/fork_choice/test_on_attestation.py @@ -27,7 +27,7 @@ def run_on_attestation(spec, state, store, attestation, valid=True): @with_all_phases @spec_state_test def test_on_attestation_current_epoch(spec, state): - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) spec.on_tick(store, store.time + spec.SECONDS_PER_SLOT * 2) block = build_empty_block_for_next_slot(spec, state) @@ -46,7 +46,7 @@ def test_on_attestation_current_epoch(spec, state): @with_all_phases @spec_state_test def test_on_attestation_previous_epoch(spec, state): - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) spec.on_tick(store, store.time + spec.SECONDS_PER_SLOT * spec.SLOTS_PER_EPOCH) block = build_empty_block_for_next_slot(spec, state) @@ -65,7 +65,7 @@ def test_on_attestation_previous_epoch(spec, state): @with_all_phases @spec_state_test def test_on_attestation_past_epoch(spec, state): - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) # move time forward 2 epochs time = store.time + 2 * spec.SECONDS_PER_SLOT * spec.SLOTS_PER_EPOCH @@ -87,7 +87,7 @@ def test_on_attestation_past_epoch(spec, state): @with_all_phases @spec_state_test def test_on_attestation_mismatched_target_and_slot(spec, state): - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) spec.on_tick(store, store.time + spec.SECONDS_PER_SLOT * spec.SLOTS_PER_EPOCH) block = build_empty_block_for_next_slot(spec, state) @@ -110,7 +110,7 @@ def test_on_attestation_mismatched_target_and_slot(spec, state): @with_all_phases @spec_state_test def test_on_attestation_target_not_in_store(spec, state): - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) time = spec.SECONDS_PER_SLOT * spec.SLOTS_PER_EPOCH spec.on_tick(store, time) @@ -131,7 +131,7 @@ def test_on_attestation_target_not_in_store(spec, state): @with_all_phases @spec_state_test def test_on_attestation_beacon_block_not_in_store(spec, state): - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) time = spec.SECONDS_PER_SLOT * spec.SLOTS_PER_EPOCH spec.on_tick(store, time) @@ -159,7 +159,7 @@ def test_on_attestation_beacon_block_not_in_store(spec, state): @with_all_phases @spec_state_test def test_on_attestation_future_epoch(spec, state): - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) time = 3 * spec.SECONDS_PER_SLOT spec.on_tick(store, time) @@ -179,7 +179,7 @@ def test_on_attestation_future_epoch(spec, state): @with_all_phases @spec_state_test def test_on_attestation_future_block(spec, state): - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) time = spec.SECONDS_PER_SLOT * 5 spec.on_tick(store, time) @@ -199,7 +199,7 @@ def test_on_attestation_future_block(spec, state): @with_all_phases @spec_state_test def test_on_attestation_same_slot(spec, state): - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) time = 1 * spec.SECONDS_PER_SLOT spec.on_tick(store, time) @@ -215,7 +215,7 @@ def test_on_attestation_same_slot(spec, state): @with_all_phases @spec_state_test def test_on_attestation_invalid_attestation(spec, state): - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) time = 3 * spec.SECONDS_PER_SLOT spec.on_tick(store, time) diff --git a/tests/core/pyspec/eth2spec/test/fork_choice/test_on_block.py b/tests/core/pyspec/eth2spec/test/fork_choice/test_on_block.py index 10d1c0011..6a72d61e1 100644 --- a/tests/core/pyspec/eth2spec/test/fork_choice/test_on_block.py +++ b/tests/core/pyspec/eth2spec/test/fork_choice/test_on_block.py @@ -36,7 +36,7 @@ def apply_next_epoch_with_attestations(spec, state, store): @spec_state_test def test_basic(spec, state): # Initialization - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) time = 100 spec.on_tick(store, time) assert store.time == time @@ -60,7 +60,7 @@ def test_basic(spec, state): @spec_state_test def test_on_block_checkpoints(spec, state): # Initialization - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) time = 100 spec.on_tick(store, time) @@ -86,7 +86,7 @@ def test_on_block_checkpoints(spec, state): @spec_state_test def test_on_block_future_block(spec, state): # Initialization - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) # do not tick time @@ -100,7 +100,7 @@ def test_on_block_future_block(spec, state): @spec_state_test def test_on_block_bad_parent_root(spec, state): # Initialization - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) time = 100 spec.on_tick(store, time) @@ -120,7 +120,7 @@ def test_on_block_bad_parent_root(spec, state): @spec_state_test def test_on_block_before_finalized(spec, state): # Initialization - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) time = 100 spec.on_tick(store, time) @@ -139,7 +139,7 @@ def test_on_block_before_finalized(spec, state): @spec_state_test def test_on_block_update_justified_checkpoint_within_safe_slots(spec, state): # Initialization - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) time = 100 spec.on_tick(store, time) @@ -170,7 +170,7 @@ def test_on_block_update_justified_checkpoint_within_safe_slots(spec, state): @spec_state_test def test_on_block_outside_safe_slots_and_multiple_better_justified(spec, state): # Initialization - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) time = 100 spec.on_tick(store, time) diff --git a/tests/core/pyspec/eth2spec/test/fork_choice/test_on_tick.py b/tests/core/pyspec/eth2spec/test/fork_choice/test_on_tick.py index 77222f65c..27b64ac09 100644 --- a/tests/core/pyspec/eth2spec/test/fork_choice/test_on_tick.py +++ b/tests/core/pyspec/eth2spec/test/fork_choice/test_on_tick.py @@ -19,14 +19,14 @@ def run_on_tick(spec, store, time, new_justified_checkpoint=False): @with_all_phases @spec_state_test def test_basic(spec, state): - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) run_on_tick(spec, store, store.time + 1) @with_all_phases @spec_state_test def test_update_justified_single(spec, state): - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) seconds_per_epoch = spec.SECONDS_PER_SLOT * spec.SLOTS_PER_EPOCH store.best_justified_checkpoint = spec.Checkpoint( @@ -40,7 +40,7 @@ def test_update_justified_single(spec, state): @with_all_phases @spec_state_test def test_no_update_same_slot_at_epoch_boundary(spec, state): - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) seconds_per_epoch = spec.SECONDS_PER_SLOT * spec.SLOTS_PER_EPOCH store.best_justified_checkpoint = spec.Checkpoint( @@ -57,7 +57,7 @@ def test_no_update_same_slot_at_epoch_boundary(spec, state): @with_all_phases @spec_state_test def test_no_update_not_epoch_boundary(spec, state): - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) store.best_justified_checkpoint = spec.Checkpoint( epoch=store.justified_checkpoint.epoch + 1, @@ -70,7 +70,7 @@ def test_no_update_not_epoch_boundary(spec, state): @with_all_phases @spec_state_test def test_no_update_new_justified_equal_epoch(spec, state): - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) seconds_per_epoch = spec.SECONDS_PER_SLOT * spec.SLOTS_PER_EPOCH store.best_justified_checkpoint = spec.Checkpoint( @@ -89,7 +89,7 @@ def test_no_update_new_justified_equal_epoch(spec, state): @with_all_phases @spec_state_test def test_no_update_new_justified_later_epoch(spec, state): - store = spec.get_genesis_store(state) + store = spec.get_forkchoice_store(state) seconds_per_epoch = spec.SECONDS_PER_SLOT * spec.SLOTS_PER_EPOCH store.best_justified_checkpoint = spec.Checkpoint( From fee3baaf8ccc094c78772bae02fb031e7b801eef Mon Sep 17 00:00:00 2001 From: protolambda Date: Tue, 14 Jan 2020 01:02:24 +0100 Subject: [PATCH 101/194] fix attesting indices error --- .../epoch_processing/test_process_rewards_and_penalties.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_rewards_and_penalties.py b/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_rewards_and_penalties.py index dd3dae50a..fa394df56 100644 --- a/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_rewards_and_penalties.py +++ b/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_rewards_and_penalties.py @@ -10,6 +10,7 @@ from eth2spec.test.helpers.attestations import ( add_attestations_to_state, get_valid_attestation, ) +from eth2spec.test.helpers.attester_slashings import get_indexed_attestation_participants from eth2spec.test.phase_0.epoch_processing.run_epoch_process_base import run_epoch_processing_with @@ -142,7 +143,7 @@ def test_duplicate_attestation(spec, state): attestation = get_valid_attestation(spec, state, signed=True) indexed_attestation = spec.get_indexed_attestation(state, attestation) - participants = indexed_attestation.attesting_indices + participants = get_indexed_attestation_participants(spec, indexed_attestation) assert len(participants) > 0 From e8654bff1077f3c2580376c73b71f9359e4d1285 Mon Sep 17 00:00:00 2001 From: protolambda Date: Tue, 14 Jan 2020 01:04:15 +0100 Subject: [PATCH 102/194] merkle proofs out, moving to ssz spec --- .../eth2spec/test/merkle_proofs/__init__.py | 0 .../test/merkle_proofs/test_merkle_proofs.py | 152 ------------------ 2 files changed, 152 deletions(-) delete mode 100644 tests/core/pyspec/eth2spec/test/merkle_proofs/__init__.py delete mode 100644 tests/core/pyspec/eth2spec/test/merkle_proofs/test_merkle_proofs.py diff --git a/tests/core/pyspec/eth2spec/test/merkle_proofs/__init__.py b/tests/core/pyspec/eth2spec/test/merkle_proofs/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/core/pyspec/eth2spec/test/merkle_proofs/test_merkle_proofs.py b/tests/core/pyspec/eth2spec/test/merkle_proofs/test_merkle_proofs.py deleted file mode 100644 index 62a2f6379..000000000 --- a/tests/core/pyspec/eth2spec/test/merkle_proofs/test_merkle_proofs.py +++ /dev/null @@ -1,152 +0,0 @@ -import re -from eth_utils import ( - to_tuple, -) - -from eth2spec.test.context import ( - expect_assertion_error, - spec_state_test, - with_all_phases_except, -) -from eth2spec.utils.ssz.ssz_typing import ( - Bytes32, - Container, - List, - uint64, -) - - -class Foo(Container): - x: uint64 - y: List[Bytes32, 2] - -# Tree -# root -# / \ -# x y_root -# / \ -# y_data_root len(y) -# / \ -# / \ / \ -# -# Generalized indices -# 1 -# / \ -# 2 (x) 3 (y_root) -# / \ -# 6 7 -# / \ -# 12 13 - - -@to_tuple -def ssz_object_to_path(start, end): - is_len = False - len_findall = re.findall(r"(?<=len\().*(?=\))", end) - if len_findall: - is_len = True - end = len_findall[0] - - route = '' - if end.startswith(start): - route = end[len(start):] - - segments = route.split('.') - for word in segments: - index_match = re.match(r"(\w+)\[(\d+)]", word) - if index_match: - yield from index_match.groups() - elif len(word): - yield word - if is_len: - yield '__len__' - - -to_path_test_cases = [ - ('foo', 'foo.x', ('x',)), - ('foo', 'foo.x[100].y', ('x', '100', 'y')), - ('foo', 'foo.x[100].y[1].z[2]', ('x', '100', 'y', '1', 'z', '2')), - ('foo', 'len(foo.x[100].y[1].z[2])', ('x', '100', 'y', '1', 'z', '2', '__len__')), -] - - -def test_to_path(): - for test_case in to_path_test_cases: - start, end, expected = test_case - assert ssz_object_to_path(start, end) == expected - - -generalized_index_cases = [ - (Foo, ('x',), 2), - (Foo, ('y',), 3), - (Foo, ('y', 0), 12), - (Foo, ('y', 1), 13), - (Foo, ('y', '__len__'), None), -] - - -@with_all_phases_except(['phase0']) -@spec_state_test -def test_get_generalized_index(spec, state): - for typ, path, generalized_index in generalized_index_cases: - if generalized_index is not None: - assert spec.get_generalized_index( - typ=typ, - path=path, - ) == generalized_index - else: - expect_assertion_error(lambda: spec.get_generalized_index(typ=typ, path=path)) - - yield 'typ', typ - yield 'path', path - yield 'generalized_index', generalized_index - - -@with_all_phases_except(['phase0']) -@spec_state_test -def test_verify_merkle_proof(spec, state): - h = spec.hash - a = b'\x11' * 32 - b = b'\x22' * 32 - c = b'\x33' * 32 - d = b'\x44' * 32 - root = h(h(a + b) + h(c + d)) - leaf = a - generalized_index = 4 - proof = [b, h(c + d)] - - is_valid = spec.verify_merkle_proof( - leaf=leaf, - proof=proof, - index=generalized_index, - root=root, - ) - assert is_valid - - yield 'proof', proof - yield 'is_valid', is_valid - - -@with_all_phases_except(['phase0']) -@spec_state_test -def test_verify_merkle_multiproof(spec, state): - h = spec.hash - a = b'\x11' * 32 - b = b'\x22' * 32 - c = b'\x33' * 32 - d = b'\x44' * 32 - root = h(h(a + b) + h(c + d)) - leaves = [a, d] - generalized_indices = [4, 7] - proof = [c, b] # helper_indices = [6, 5] - - is_valid = spec.verify_merkle_multiproof( - leaves=leaves, - proof=proof, - indices=generalized_indices, - root=root, - ) - assert is_valid - - yield 'proof', proof - yield 'is_valid', is_valid From f6f8bd535046cd783d40e1a941791e7bae5bedaa Mon Sep 17 00:00:00 2001 From: protolambda Date: Tue, 14 Jan 2020 01:34:38 +0100 Subject: [PATCH 103/194] no custody bits fallback --- specs/phase1/beacon-chain.md | 31 +++++++++++-------- .../eth2spec/test/helpers/attestations.py | 1 + .../pyspec/eth2spec/test/helpers/custody.py | 3 +- .../pyspec/eth2spec/utils/ssz/ssz_impl.py | 8 ++--- 4 files changed, 24 insertions(+), 19 deletions(-) diff --git a/specs/phase1/beacon-chain.md b/specs/phase1/beacon-chain.md index b53ce1de1..fdd3d5c01 100644 --- a/specs/phase1/beacon-chain.md +++ b/specs/phase1/beacon-chain.md @@ -536,21 +536,26 @@ def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: Indexe domain = get_domain(state, DOMAIN_BEACON_ATTESTER, attestation.data.target.epoch) aggregation_bits = attestation.aggregation_bits assert len(aggregation_bits) == len(indexed_attestation.committee) - for i, custody_bits in enumerate(attestation.custody_bits_blocks): - assert len(custody_bits) == len(indexed_attestation.committee) - for participant, abit, cbit in zip(indexed_attestation.committee, aggregation_bits, custody_bits): + + if len(attestation.custody_bits_blocks) == 0: + # fall back on phase0 behavior if there is no shard data. + for participant, abit in zip(indexed_attestation.committee, aggregation_bits): if abit: all_pubkeys.append(state.validators[participant].pubkey) - # Note: only 2N distinct message hashes - all_signing_roots.append(compute_signing_root( - AttestationCustodyBitWrapper(hash_tree_root(attestation.data), i, cbit), domain)) - else: - assert not cbit - # WARNING: this is BROKEN. If no custody_bits_blocks, - # a valid empty signature can pass validation, even though aggregate bits are set. - # Decide between: force at least 1 shard block (even if empty data), - # or fast-aggregate-verify with attestation data with empty shard data as message (alike to phase0) - return bls.AggregateVerify(zip(all_pubkeys, all_signing_roots), signature=attestation.signature) + signing_root = compute_signing_root(indexed_attestation.attestation.data, domain) + return bls.FastAggregateVerify(all_pubkeys, signing_root, signature=attestation.signature) + else: + for i, custody_bits in enumerate(attestation.custody_bits_blocks): + assert len(custody_bits) == len(indexed_attestation.committee) + for participant, abit, cbit in zip(indexed_attestation.committee, aggregation_bits, custody_bits): + if abit: + all_pubkeys.append(state.validators[participant].pubkey) + # Note: only 2N distinct message hashes + all_signing_roots.append(compute_signing_root( + AttestationCustodyBitWrapper(hash_tree_root(attestation.data), i, cbit), domain)) + else: + assert not cbit + return bls.AggregateVerify(zip(all_pubkeys, all_signing_roots), signature=attestation.signature) ``` diff --git a/tests/core/pyspec/eth2spec/test/helpers/attestations.py b/tests/core/pyspec/eth2spec/test/helpers/attestations.py index 1cd9c07c4..b8733705a 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/attestations.py +++ b/tests/core/pyspec/eth2spec/test/helpers/attestations.py @@ -77,6 +77,7 @@ def sign_aggregate_attestation(spec, state, attestation_data, participants: List privkey ) ) + # TODO: we should try signing custody bits if spec.version == 'phase1' return bls.Aggregate(signatures) diff --git a/tests/core/pyspec/eth2spec/test/helpers/custody.py b/tests/core/pyspec/eth2spec/test/helpers/custody.py index e00d64a17..bcf2c199b 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/custody.py +++ b/tests/core/pyspec/eth2spec/test/helpers/custody.py @@ -1,6 +1,5 @@ from eth2spec.test.helpers.keys import privkeys from eth2spec.utils import bls -from eth2spec.utils.hash_function import hash from eth2spec.utils.ssz.ssz_typing import Bitlist, ByteVector, Bitvector from eth2spec.utils.ssz.ssz_impl import chunkify, pack, hash_tree_root from eth2spec.utils.merkle_minimal import get_merkle_tree, get_merkle_proof @@ -21,7 +20,7 @@ def get_valid_early_derived_secret_reveal(spec, state, epoch=None): signing_root = spec.compute_signing_root(spec.Epoch(epoch), domain) reveal = bls.Sign(privkeys[revealed_index], signing_root) # Generate the mask (any random 32 bytes that don't reveal the masker's secret will do) - mask = hash(reveal) + mask = spec.hash(reveal) # Generate masker's signature on the mask signing_root = spec.compute_signing_root(mask, domain) masker_signature = bls.Sign(privkeys[masker_index], signing_root) diff --git a/tests/core/pyspec/eth2spec/utils/ssz/ssz_impl.py b/tests/core/pyspec/eth2spec/utils/ssz/ssz_impl.py index c7a144ec2..113bcf169 100644 --- a/tests/core/pyspec/eth2spec/utils/ssz/ssz_impl.py +++ b/tests/core/pyspec/eth2spec/utils/ssz/ssz_impl.py @@ -2,7 +2,7 @@ from ..merkle_minimal import merkleize_chunks from ..hash_function import hash from .ssz_typing import ( SSZValue, SSZType, BasicValue, BasicType, Series, Elements, Bits, boolean, Container, List, ByteList, - Bitlist, Bitvector, uint, + Bitlist, Bitvector, uint, Bytes32 ) # SSZ Serialization @@ -140,7 +140,7 @@ def chunk_count(typ: SSZType) -> int: raise Exception(f"Type not supported: {typ}") -def hash_tree_root(obj: SSZValue): +def hash_tree_root(obj: SSZValue) -> Bytes32: if isinstance(obj, Series): if is_bottom_layer_kind(obj.type()): leaves = chunkify(pack(obj)) @@ -152,6 +152,6 @@ def hash_tree_root(obj: SSZValue): raise Exception(f"Type not supported: {type(obj)}") if isinstance(obj, (List, ByteList, Bitlist)): - return mix_in_length(merkleize_chunks(leaves, limit=chunk_count(obj.type())), len(obj)) + return Bytes32(mix_in_length(merkleize_chunks(leaves, limit=chunk_count(obj.type())), len(obj))) else: - return merkleize_chunks(leaves) + return Bytes32(merkleize_chunks(leaves)) From f04a686db7043cb4d8a1f3c7f079ab9fb0ed0ca9 Mon Sep 17 00:00:00 2001 From: protolambda Date: Tue, 14 Jan 2020 01:42:19 +0100 Subject: [PATCH 104/194] doctoc --- specs/phase1/beacon-chain.md | 4 +++- specs/phase1/custody-game.md | 1 + 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/specs/phase1/beacon-chain.md b/specs/phase1/beacon-chain.md index fdd3d5c01..c557bf123 100644 --- a/specs/phase1/beacon-chain.md +++ b/specs/phase1/beacon-chain.md @@ -12,6 +12,8 @@ - [Extended `AttestationData`](#extended-attestationdata) - [Extended `Attestation`](#extended-attestation) - [Extended `PendingAttestation`](#extended-pendingattestation) + - [`IndexedAttestation`](#indexedattestation) + - [Extended `AttesterSlashing`](#extended-attesterslashing) - [Extended `Validator`](#extended-validator) - [Extended `BeaconBlockBody`](#extended-beaconblockbody) - [Extended `BeaconBlock`](#extended-beaconblock) @@ -22,7 +24,6 @@ - [`ShardSignableHeader`](#shardsignableheader) - [`ShardState`](#shardstate) - [`ShardTransition`](#shardtransition) - - [`AttestationAndCommittee`](#attestationandcommittee) - [`CompactCommittee`](#compactcommittee) - [`AttestationCustodyBitWrapper`](#attestationcustodybitwrapper) - [Helper functions](#helper-functions) @@ -53,6 +54,7 @@ - [`process_crosslink_for_shard`](#process_crosslink_for_shard) - [`process_crosslinks`](#process_crosslinks) - [`process_attestations`](#process_attestations) + - [New Attester slashing processing](#new-attester-slashing-processing) - [Shard transition false positives](#shard-transition-false-positives) - [Light client processing](#light-client-processing) - [Epoch transition](#epoch-transition) diff --git a/specs/phase1/custody-game.md b/specs/phase1/custody-game.md index 54a3f4332..fd35e6515 100644 --- a/specs/phase1/custody-game.md +++ b/specs/phase1/custody-game.md @@ -11,6 +11,7 @@ - [Introduction](#introduction) - [Constants](#constants) - [Misc](#misc) +- [Configuration](#configuration) - [Time parameters](#time-parameters) - [Max operations per block](#max-operations-per-block) - [Reward and penalty quotients](#reward-and-penalty-quotients) From d98c50a7b1f69f9ecc2809efed2b26467ce771bd Mon Sep 17 00:00:00 2001 From: protolambda Date: Tue, 14 Jan 2020 01:43:58 +0100 Subject: [PATCH 105/194] update ci caches --- .circleci/config.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 8189166d3..23425a179 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -35,26 +35,26 @@ commands: description: "Restore the cache with pyspec keys" steps: - restore_cached_venv: - venv_name: v8-pyspec + venv_name: v9-pyspec reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "tests/core/pyspec/requirements-testing.txt" }} save_pyspec_cached_venv: description: Save a venv into a cache with pyspec keys" steps: - save_cached_venv: - venv_name: v8-pyspec + venv_name: v9-pyspec reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "tests/core/pyspec/requirements-testing.txt" }} venv_path: ./tests/core/pyspec/venv restore_deposit_contract_cached_venv: description: "Restore the cache with deposit_contract keys" steps: - restore_cached_venv: - venv_name: v9-deposit-contract + venv_name: v10-deposit-contract reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "deposit_contract/requirements-testing.txt" }} save_deposit_contract_cached_venv: description: Save a venv into a cache with deposit_contract keys" steps: - save_cached_venv: - venv_name: v9-deposit-contract + venv_name: v10-deposit-contract reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "deposit_contract/requirements-testing.txt" }} venv_path: ./deposit_contract/venv jobs: From 5785b4fc5bcf0337fb073a9b21312379d1f228f1 Mon Sep 17 00:00:00 2001 From: protolambda Date: Tue, 14 Jan 2020 01:59:01 +0100 Subject: [PATCH 106/194] custody bits temporary solution --- specs/phase1/beacon-chain.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/specs/phase1/beacon-chain.md b/specs/phase1/beacon-chain.md index c557bf123..185d27f52 100644 --- a/specs/phase1/beacon-chain.md +++ b/specs/phase1/beacon-chain.md @@ -633,9 +633,9 @@ def validate_attestation(state: BeaconState, attestation: Attestation) -> None: assert len(attestation.custody_bits_blocks) == len(get_offset_slots(state, shard_start_slot)) # Correct parent block root assert data.beacon_block_root == get_block_root_at_slot(state, get_previous_slot(state.slot)) - # Type 2: delayed attestations + # Type 2: no shard transition, no custody bits # TODO: could only allow for older attestations. else: - assert state.slot - compute_start_slot_at_epoch(compute_epoch_at_slot(data.slot)) < SLOTS_PER_EPOCH + # assert state.slot - compute_start_slot_at_epoch(compute_epoch_at_slot(data.slot)) < SLOTS_PER_EPOCH assert data.shard_transition_root == Root() ``` From f16d74d703e782280aa413e8c94604d629fc444d Mon Sep 17 00:00:00 2001 From: protolambda Date: Tue, 14 Jan 2020 01:59:27 +0100 Subject: [PATCH 107/194] fix custody key reveal tests not running due to decorator order problem --- .../test_process_custody_key_reveal.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/core/pyspec/eth2spec/test/phase_1/block_processing/test_process_custody_key_reveal.py b/tests/core/pyspec/eth2spec/test/phase_1/block_processing/test_process_custody_key_reveal.py index f8860cf87..fb9157f2f 100644 --- a/tests/core/pyspec/eth2spec/test/phase_1/block_processing/test_process_custody_key_reveal.py +++ b/tests/core/pyspec/eth2spec/test/phase_1/block_processing/test_process_custody_key_reveal.py @@ -55,8 +55,8 @@ def run_custody_key_reveal_processing(spec, state, custody_key_reveal, valid=Tru @with_all_phases_except(['phase0']) -@always_bls @spec_state_test +@always_bls def test_success(spec, state): state.slot += spec.EPOCHS_PER_CUSTODY_PERIOD * spec.SLOTS_PER_EPOCH custody_key_reveal = get_valid_custody_key_reveal(spec, state) @@ -65,8 +65,8 @@ def test_success(spec, state): @with_all_phases_except(['phase0']) -@always_bls @spec_state_test +@always_bls def test_reveal_too_early(spec, state): custody_key_reveal = get_valid_custody_key_reveal(spec, state) @@ -74,8 +74,8 @@ def test_reveal_too_early(spec, state): @with_all_phases_except(['phase0']) -@always_bls @spec_state_test +@always_bls def test_wrong_period(spec, state): custody_key_reveal = get_valid_custody_key_reveal(spec, state, period=5) @@ -83,8 +83,8 @@ def test_wrong_period(spec, state): @with_all_phases_except(['phase0']) -@always_bls @spec_state_test +@always_bls def test_late_reveal(spec, state): state.slot += spec.EPOCHS_PER_CUSTODY_PERIOD * spec.SLOTS_PER_EPOCH * 3 + 150 custody_key_reveal = get_valid_custody_key_reveal(spec, state) @@ -93,8 +93,8 @@ def test_late_reveal(spec, state): @with_all_phases_except(['phase0']) -@always_bls @spec_state_test +@always_bls def test_double_reveal(spec, state): state.slot += spec.EPOCHS_PER_CUSTODY_PERIOD * spec.SLOTS_PER_EPOCH * 2 custody_key_reveal = get_valid_custody_key_reveal(spec, state) @@ -105,8 +105,8 @@ def test_double_reveal(spec, state): @with_all_phases_except(['phase0']) -@always_bls @spec_state_test +@always_bls def test_max_decrement(spec, state): state.slot += spec.EPOCHS_PER_CUSTODY_PERIOD * spec.SLOTS_PER_EPOCH * 3 + 150 custody_key_reveal = get_valid_custody_key_reveal(spec, state) From 0d8a2ef92a21a5b9f2d881b8f698f503f8204c81 Mon Sep 17 00:00:00 2001 From: protolambda Date: Tue, 14 Jan 2020 02:25:59 +0100 Subject: [PATCH 108/194] fix broken fork choice tests; compute anchor root for state, not just any genesis block --- .../test/fork_choice/test_get_head.py | 28 +++++++++++-------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/tests/core/pyspec/eth2spec/test/fork_choice/test_get_head.py b/tests/core/pyspec/eth2spec/test/fork_choice/test_get_head.py index a5a6b2fe0..e34c32c0e 100644 --- a/tests/core/pyspec/eth2spec/test/fork_choice/test_get_head.py +++ b/tests/core/pyspec/eth2spec/test/fork_choice/test_get_head.py @@ -30,13 +30,20 @@ def add_attestation_to_store(spec, store, attestation): spec.on_attestation(store, attestation) +def get_anchor_root(spec, state): + anchor_block_header = state.latest_block_header.copy() + if anchor_block_header.state_root == spec.Bytes32(): + anchor_block_header.state_root = spec.hash_tree_root(state) + return spec.hash_tree_root(anchor_block_header) + + @with_all_phases @spec_state_test def test_genesis(spec, state): # Initialization store = spec.get_forkchoice_store(state) - genesis_block = spec.BeaconBlock(state_root=state.hash_tree_root()) - assert spec.get_head(store) == spec.hash_tree_root(genesis_block) + anchor_root = get_anchor_root(spec, state) + assert spec.get_head(store) == anchor_root @with_all_phases @@ -44,8 +51,8 @@ def test_genesis(spec, state): def test_chain_no_attestations(spec, state): # Initialization store = spec.get_forkchoice_store(state) - genesis_block = spec.BeaconBlock(state_root=state.hash_tree_root()) - assert spec.get_head(store) == spec.hash_tree_root(genesis_block) + anchor_root = get_anchor_root(spec, state) + assert spec.get_head(store) == anchor_root # On receiving a block of `GENESIS_SLOT + 1` slot block_1 = build_empty_block_for_next_slot(spec, state) @@ -67,8 +74,8 @@ def test_split_tie_breaker_no_attestations(spec, state): # Initialization store = spec.get_forkchoice_store(state) - genesis_block = spec.BeaconBlock(state_root=state.hash_tree_root()) - assert spec.get_head(store) == spec.hash_tree_root(genesis_block) + anchor_root = get_anchor_root(spec, state) + assert spec.get_head(store) == anchor_root # block at slot 1 block_1_state = genesis_state.copy() @@ -95,8 +102,8 @@ def test_shorter_chain_but_heavier_weight(spec, state): # Initialization store = spec.get_forkchoice_store(state) - genesis_block = spec.BeaconBlock(state_root=state.hash_tree_root()) - assert spec.get_head(store) == spec.hash_tree_root(genesis_block) + anchor_root = get_anchor_root(spec, state) + assert spec.get_head(store) == anchor_root # build longer tree long_state = genesis_state.copy() @@ -122,15 +129,14 @@ def test_shorter_chain_but_heavier_weight(spec, state): @spec_state_test def test_filtered_block_tree(spec, state): # Initialization - genesis_state_root = state.hash_tree_root() store = spec.get_forkchoice_store(state) - genesis_block = spec.BeaconBlock(state_root=genesis_state_root) + anchor_root = get_anchor_root(spec, state) # transition state past initial couple of epochs next_epoch(spec, state) next_epoch(spec, state) - assert spec.get_head(store) == spec.hash_tree_root(genesis_block) + assert spec.get_head(store) == anchor_root # fill in attestations for entire epoch, justifying the recent epoch prev_state, signed_blocks, state = next_epoch_with_attestations(spec, state, True, False) From e6f98eb041693ab68d635bd645352ac339125ab5 Mon Sep 17 00:00:00 2001 From: protolambda Date: Tue, 14 Jan 2020 02:26:15 +0100 Subject: [PATCH 109/194] minor config style fixes --- configs/minimal.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/configs/minimal.yaml b/configs/minimal.yaml index 486803467..a8ea385e7 100644 --- a/configs/minimal.yaml +++ b/configs/minimal.yaml @@ -197,11 +197,11 @@ GASPRICE_ADJUSTMENT_COEFFICIENT: 8 RANDAO_PENALTY_EPOCHS: 2 # [customized] quicker for testing EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS: 4096 -# 2**11 (= 2,048) epochs, +# 2**11 (= 2,048) epochs EPOCHS_PER_CUSTODY_PERIOD: 2048 -# 2**11 (= 2,048) epochs, +# 2**11 (= 2,048) epochs CUSTODY_PERIOD_TO_RANDAO_PADDING: 2048 -# 2**7 (= 128) epochs, +# 2**7 (= 128) epochs MAX_REVEAL_LATENESS_DECREMENT: 128 # Max operations From 90bc1f558b7f4d7654add5917ab24d907edf12e7 Mon Sep 17 00:00:00 2001 From: protolambda Date: Tue, 14 Jan 2020 02:26:29 +0100 Subject: [PATCH 110/194] Fix config loading problem --- deposit_contract/requirements-testing.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/deposit_contract/requirements-testing.txt b/deposit_contract/requirements-testing.txt index cbf6983c1..b8ebe3b84 100644 --- a/deposit_contract/requirements-testing.txt +++ b/deposit_contract/requirements-testing.txt @@ -2,4 +2,5 @@ eth-tester[py-evm]==0.1.0b39 git+https://github.com/vyperlang/vyper@1761-HOTFIX-v0.1.0-beta.13 web3==5.0.0b2 pytest==3.6.1 +../tests/core/config_helpers ../tests/core/pyspec From 8d0e1bda6e339322c89fbd57bd449e4a4644cb30 Mon Sep 17 00:00:00 2001 From: protolambda Date: Tue, 14 Jan 2020 02:32:26 +0100 Subject: [PATCH 111/194] downgrade deposit contract back to python 3.6, vyper syntax parsing is broken on v3.8 --- .circleci/config.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 23425a179..83a82f62e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -48,13 +48,13 @@ commands: description: "Restore the cache with deposit_contract keys" steps: - restore_cached_venv: - venv_name: v10-deposit-contract + venv_name: v11-deposit-contract reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "deposit_contract/requirements-testing.txt" }} save_deposit_contract_cached_venv: description: Save a venv into a cache with deposit_contract keys" steps: - save_cached_venv: - venv_name: v10-deposit-contract + venv_name: v11-deposit-contract reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "deposit_contract/requirements-testing.txt" }} venv_path: ./deposit_contract/venv jobs: @@ -134,7 +134,7 @@ jobs: command: make lint install_deposit_contract_test: docker: - - image: circleci/python:3.8 + - image: circleci/python:3.6 working_directory: ~/specs-repo steps: - restore_cache: @@ -146,7 +146,7 @@ jobs: - save_deposit_contract_cached_venv deposit_contract: docker: - - image: circleci/python:3.8 + - image: circleci/python:3.6 working_directory: ~/specs-repo steps: - restore_cache: From 3c07b2c954909d2c5f4d3b8ea35a22bd659103a4 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Wed, 15 Jan 2020 16:03:07 -0700 Subject: [PATCH 112/194] Make phase 0 fork choice more modular to more easily adopt for slight modifications in phase 1 --- Makefile | 2 +- scripts/build_spec.py | 11 ++- specs/phase0/fork-choice.md | 97 ++++++++++++------- specs/phase1/fork-choice.md | 52 ++++++++++ .../test/fork_choice/test_on_attestation.py | 11 ++- 5 files changed, 133 insertions(+), 40 deletions(-) create mode 100644 specs/phase1/fork-choice.md diff --git a/Makefile b/Makefile index b2ea88e2f..93f1a9bda 100644 --- a/Makefile +++ b/Makefile @@ -107,7 +107,7 @@ $(PY_SPEC_PHASE_0_TARGETS): $(PY_SPEC_PHASE_0_DEPS) python3 $(SCRIPT_DIR)/build_spec.py -p0 $(PHASE0_SPEC_DIR)/beacon-chain.md $(PHASE0_SPEC_DIR)/fork-choice.md $(PHASE0_SPEC_DIR)/validator.md $@ $(PY_SPEC_DIR)/eth2spec/phase1/spec.py: $(PY_SPEC_PHASE_1_DEPS) - python3 $(SCRIPT_DIR)/build_spec.py -p1 $(PHASE0_SPEC_DIR)/beacon-chain.md $(PHASE0_SPEC_DIR)/fork-choice.md $(PHASE1_SPEC_DIR)/custody-game.md $(PHASE1_SPEC_DIR)/beacon-chain.md $(PHASE1_SPEC_DIR)/fraud-proofs.md $(PHASE1_SPEC_DIR)/phase1-fork.md $@ + python3 $(SCRIPT_DIR)/build_spec.py -p1 $(PHASE0_SPEC_DIR)/beacon-chain.md $(PHASE0_SPEC_DIR)/fork-choice.md $(PHASE1_SPEC_DIR)/custody-game.md $(PHASE1_SPEC_DIR)/beacon-chain.md $(PHASE1_SPEC_DIR)/fraud-proofs.md $(PHASE1_SPEC_DIR)/fork-choice.md $(PHASE1_SPEC_DIR)/phase1-fork.md $@ # TODO: also build validator spec and light-client-sync diff --git a/scripts/build_spec.py b/scripts/build_spec.py index 1831cfa34..90e9b3fb4 100644 --- a/scripts/build_spec.py +++ b/scripts/build_spec.py @@ -230,6 +230,7 @@ def build_phase1_spec(phase0_beacon_sourcefile: str, phase1_custody_sourcefile: str, phase1_beacon_sourcefile: str, phase1_fraud_sourcefile: str, + phase1_fork_choice_sourcefile: str, phase1_fork_sourcefile: str, outfile: str=None) -> Optional[str]: all_sourcefiles = ( @@ -238,6 +239,7 @@ def build_phase1_spec(phase0_beacon_sourcefile: str, phase1_custody_sourcefile, phase1_beacon_sourcefile, phase1_fraud_sourcefile, + phase1_fork_choice_sourcefile, phase1_fork_sourcefile, ) all_spescs = [get_spec(spec) for spec in all_sourcefiles] @@ -267,8 +269,9 @@ If building phase 1: 3rd argument is input phase1/custody-game.md 4th argument is input phase1/beacon-chain.md 5th argument is input phase1/fraud-proofs.md - 6th argument is input phase1/phase1-fork.md - 7th argument is output spec.py + 6th argument is input phase1/fork-choice.md + 7th argument is input phase1/phase1-fork.md + 8th argument is output spec.py ''' parser = ArgumentParser(description=description) parser.add_argument("-p", "--phase", dest="phase", type=int, default=0, help="Build for phase #") @@ -281,13 +284,13 @@ If building phase 1: else: print(" Phase 0 requires spec, forkchoice, and v-guide inputs as well as an output file.") elif args.phase == 1: - if len(args.files) == 7: + if len(args.files) == 8: build_phase1_spec(*args.files) else: print( " Phase 1 requires input files as well as an output file:\n" "\t phase0: (beacon-chain.md, fork-choice.md)\n" - "\t phase1: (custody-game.md, beacon-chain.md, fraud-proofs.md, phase1-fork.md)\n" + "\t phase1: (custody-game.md, beacon-chain.md, fraud-proofs.md, fork-choice.md, phase1-fork.md)\n" "\t and output.py" ) else: diff --git a/specs/phase0/fork-choice.md b/specs/phase0/fork-choice.md index feab5bb7a..e2f24705e 100644 --- a/specs/phase0/fork-choice.md +++ b/specs/phase0/fork-choice.md @@ -24,6 +24,10 @@ - [`get_filtered_block_tree`](#get_filtered_block_tree) - [`get_head`](#get_head) - [`should_update_justified_checkpoint`](#should_update_justified_checkpoint) + - [`on_attestation` helpers](#on_attestation-helpers) + - [`validate_on_attestation`](#validate_on_attestation) + - [`store_target_checkpoint_state`](#store_target_checkpoint_state) + - [`update_latest_messages`](#update_latest_messages) - [Handlers](#handlers) - [`on_tick`](#on_tick) - [`on_block`](#on_block) @@ -257,6 +261,59 @@ def should_update_justified_checkpoint(store: Store, new_justified_checkpoint: C return True ``` +#### `on_attestation` helpers + +##### `validate_on_attestation` + +```python +def validate_on_attestation(store: Store, attestation: Attestation) -> None: + target = attestation.data.target + + # Attestations must be from the current or previous epoch + current_epoch = compute_epoch_at_slot(get_current_slot(store)) + # Use GENESIS_EPOCH for previous when genesis to avoid underflow + previous_epoch = current_epoch - 1 if current_epoch > GENESIS_EPOCH else GENESIS_EPOCH + assert target.epoch in [current_epoch, previous_epoch] + assert target.epoch == compute_epoch_at_slot(attestation.data.slot) + + # Attestations target be for a known block. If target block is unknown, delay consideration until the block is found + assert target.root in store.blocks + # Attestations cannot be from future epochs. If they are, delay consideration until the epoch arrives + assert get_current_slot(store) >= compute_start_slot_at_epoch(target.epoch) + + # Attestations must be for a known block. If block is unknown, delay consideration until the block is found + assert attestation.data.beacon_block_root in store.blocks + # Attestations must not be for blocks in the future. If not, the attestation should not be considered + assert store.blocks[attestation.data.beacon_block_root].slot <= attestation.data.slot + + # Attestations can only affect the fork choice of subsequent slots. + # Delay consideration in the fork choice until their slot is in the past. + assert get_current_slot(store) >= attestation.data.slot + 1 +``` + +##### `store_target_checkpoint_state` + +```python +def store_target_checkpoint_state(store: Store, target: Checkpoint) -> None: + # Store target checkpoint state if not yet seen + if target not in store.checkpoint_states: + base_state = store.block_states[target.root].copy() + process_slots(base_state, compute_start_slot_at_epoch(target.epoch)) + store.checkpoint_states[target] = base_state +``` + +##### `update_latest_messages` + +```python +def update_latest_messages(store: Store, attesting_indices: Sequence[ValidatorIndex], attestation: Attestation) -> None: + target = attestation.data.target + beacon_block_root = attestation.data.beacon_block_root + for i in attesting_indices: + if i not in store.latest_messages or target.epoch > store.latest_messages[i].epoch: + store.latest_messages[i] = LatestMessage(epoch=target.epoch, root=beacon_block_root) +``` + + ### Handlers #### `on_tick` @@ -323,42 +380,14 @@ def on_attestation(store: Store, attestation: Attestation) -> None: An ``attestation`` that is asserted as invalid may be valid at a later time, consider scheduling it for later processing in such case. """ - target = attestation.data.target + validate_on_attestation(store, attestation) + store_target_checkpoint_state(store, attestation.data.target) - # Attestations must be from the current or previous epoch - current_epoch = compute_epoch_at_slot(get_current_slot(store)) - # Use GENESIS_EPOCH for previous when genesis to avoid underflow - previous_epoch = current_epoch - 1 if current_epoch > GENESIS_EPOCH else GENESIS_EPOCH - assert target.epoch in [current_epoch, previous_epoch] - assert target.epoch == compute_epoch_at_slot(attestation.data.slot) - - # Attestations target be for a known block. If target block is unknown, delay consideration until the block is found - assert target.root in store.blocks - # Attestations cannot be from future epochs. If they are, delay consideration until the epoch arrives - base_state = store.block_states[target.root].copy() - assert get_current_slot(store) >= compute_start_slot_at_epoch(target.epoch) - - # Attestations must be for a known block. If block is unknown, delay consideration until the block is found - assert attestation.data.beacon_block_root in store.blocks - # Attestations must not be for blocks in the future. If not, the attestation should not be considered - assert store.blocks[attestation.data.beacon_block_root].slot <= attestation.data.slot - - # Store target checkpoint state if not yet seen - if target not in store.checkpoint_states: - process_slots(base_state, compute_start_slot_at_epoch(target.epoch)) - store.checkpoint_states[target] = base_state - target_state = store.checkpoint_states[target] - - # Attestations can only affect the fork choice of subsequent slots. - # Delay consideration in the fork choice until their slot is in the past. - assert get_current_slot(store) >= attestation.data.slot + 1 - - # Get state at the `target` to validate attestation and calculate the committees + # Get state at the `target` to fully validate attestation + target_state = store.checkpoint_states[attestation.data.target] indexed_attestation = get_indexed_attestation(target_state, attestation) assert is_valid_indexed_attestation(target_state, indexed_attestation) - # Update latest messages - for i in indexed_attestation.attesting_indices: - if i not in store.latest_messages or target.epoch > store.latest_messages[i].epoch: - store.latest_messages[i] = LatestMessage(epoch=target.epoch, root=attestation.data.beacon_block_root) + # Update latest messages for attesting indices + update_latest_messages(store, indexed_attestation.attesting_indices, attestation) ``` diff --git a/specs/phase1/fork-choice.md b/specs/phase1/fork-choice.md new file mode 100644 index 000000000..d8bf7fa09 --- /dev/null +++ b/specs/phase1/fork-choice.md @@ -0,0 +1,52 @@ +# Ethereum 2.0 Phase 1 -- Beacon Chain Fork Choice + +**Notice**: This document is a work-in-progress for researchers and implementers. + +## Table of contents + + + + + +- [Introduction](#introduction) +- [Fork choice](#fork-choice) + - [Handlers](#handlers) + + + + +## Introduction + +This document is the beacon chain fork choice spec for part of Ethereum 2.0 Phase 1. + +## Fork choice + +Due to the changes in the structure of `IndexedAttestation` in Phase 1, `on_attestation` must be re-specified to handle this. The bulk of `on_attestation` has been moved out into a few helpers to reduce code duplication where possible. + +The rest of the fork choice remains stable. + +### Handlers + +```python +def on_attestation(store: Store, attestation: Attestation) -> None: + """ + Run ``on_attestation`` upon receiving a new ``attestation`` from either within a block or directly on the wire. + + An ``attestation`` that is asserted as invalid may be valid at a later time, + consider scheduling it for later processing in such case. + """ + validate_on_attestation(store, attestation) + store_target_checkpoint_state(store, attestation.data.target) + + # Get state at the `target` to fully validate attestation + target_state = store.checkpoint_states[attestation.data.target] + indexed_attestation = get_indexed_attestation(target_state, attestation) + assert is_valid_indexed_attestation(target_state, indexed_attestation) + + # Update latest messages for attesting indices + attesting_indices = [ + index for i, index in enumerate(indexed_attestation.committee) + if attestation.aggregation_bits[i] + ] + update_latest_messages(store, attesting_indices, attestation) +``` \ No newline at end of file diff --git a/tests/core/pyspec/eth2spec/test/fork_choice/test_on_attestation.py b/tests/core/pyspec/eth2spec/test/fork_choice/test_on_attestation.py index 0fa6809ab..a0a33ca50 100644 --- a/tests/core/pyspec/eth2spec/test/fork_choice/test_on_attestation.py +++ b/tests/core/pyspec/eth2spec/test/fork_choice/test_on_attestation.py @@ -15,8 +15,17 @@ def run_on_attestation(spec, state, store, attestation, valid=True): indexed_attestation = spec.get_indexed_attestation(state, attestation) spec.on_attestation(store, attestation) + + if spec.version == 'phase0': + sample_index = indexed_attestation.attesting_indices[0] + else: + attesting_indices = [ + index for i, index in enumerate(indexed_attestation.committee) + if attestation.aggregation_bits[i] + ] + sample_index = attesting_indices[0] assert ( - store.latest_messages[indexed_attestation.attesting_indices[0]] == + store.latest_messages[sample_index] == spec.LatestMessage( epoch=attestation.data.target.epoch, root=attestation.data.beacon_block_root, From c0b69e531f3d10aa5a90a5a6461b8178e7b62c0f Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Wed, 15 Jan 2020 17:43:11 -0700 Subject: [PATCH 113/194] cycle through committee indexes instead of through active shards when forming crosslinks --- specs/phase1/beacon-chain.md | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/specs/phase1/beacon-chain.md b/specs/phase1/beacon-chain.md index 185d27f52..06b66eca5 100644 --- a/specs/phase1/beacon-chain.md +++ b/specs/phase1/beacon-chain.md @@ -413,6 +413,14 @@ def chunks_to_body_root(chunks: List[Bytes32, MAX_SHARD_BLOCK_CHUNKS]) -> Root: )) ``` +#### `compute_shard_from_committee_index` + +```python +def compute_shard_from_committee_index(state: BeaconState, index: CommitteeIndex, slot: Slot) -> Shard: + active_shards = get_active_shard_count(state) + return Shard((index + get_start_shard(state, slot)) % active_shards) +``` + ### Beacon state accessors #### `get_active_shard_count` @@ -501,8 +509,7 @@ def get_start_shard(state: BeaconState, slot: Slot) -> Shard: ```python def get_shard(state: BeaconState, attestation: Attestation) -> Shard: - active_shards = get_active_shard_count(state) - return Shard((attestation.data.index + get_start_shard(state, attestation.data.slot)) % active_shards) + return compute_shard_from_committee_index(state, attestation.data.index, attestation.data.slot) ``` #### `get_next_slot_for_shard` @@ -760,7 +767,9 @@ def process_crosslinks(state: BeaconState, block_body: BeaconBlockBody, attestations: Sequence[Attestation]) -> Set[Tuple[Shard, Root]]: winners: Set[Tuple[Shard, Root]] = set() - for shard in map(Shard, range(get_active_shard_count(state))): + committee_count = get_committee_count_at_slot(state, state.slot) + for committee_index in map(CommitteeIndex, range(committee_count)): + shard = compute_shard_from_committee_index(state, committee_index, state.slot) # All attestations in the block for this shard shard_attestations = [ attestation for attestation in attestations From 7a412534d94a8c0c66a0630450192f32fb4d1ca8 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Wed, 15 Jan 2020 18:16:09 -0700 Subject: [PATCH 114/194] remove test_shard_blocks (outdated) and reduce PERSISTENT_COMMITTEE_PERIOD in minimal config --- configs/minimal.yaml | 4 +- specs/phase1/beacon-chain.md | 1 + .../test/phase_1/sanity/test_shard_blocks.py | 177 ------------------ 3 files changed, 3 insertions(+), 179 deletions(-) delete mode 100644 tests/core/pyspec/eth2spec/test/phase_1/sanity/test_shard_blocks.py diff --git a/configs/minimal.yaml b/configs/minimal.yaml index a8ea385e7..03ffa90e3 100644 --- a/configs/minimal.yaml +++ b/configs/minimal.yaml @@ -87,8 +87,8 @@ SLOTS_PER_ETH1_VOTING_PERIOD: 16 SLOTS_PER_HISTORICAL_ROOT: 64 # 2**8 (= 256) epochs MIN_VALIDATOR_WITHDRAWABILITY_DELAY: 256 -# 2**11 (= 2,048) epochs -PERSISTENT_COMMITTEE_PERIOD: 2048 +# [customized] higher frequency of committee turnover and faster time to acceptable voluntary exit +PERSISTENT_COMMITTEE_PERIOD: 128 # [customized] fast catchup crosslinks MAX_EPOCHS_PER_CROSSLINK: 4 # 2**2 (= 4) epochs diff --git a/specs/phase1/beacon-chain.md b/specs/phase1/beacon-chain.md index 06b66eca5..294459673 100644 --- a/specs/phase1/beacon-chain.md +++ b/specs/phase1/beacon-chain.md @@ -32,6 +32,7 @@ - [`pack_compact_validator`](#pack_compact_validator) - [`committee_to_compact_committee`](#committee_to_compact_committee) - [`chunks_to_body_root`](#chunks_to_body_root) + - [`compute_shard_from_committee_index`](#compute_shard_from_committee_index) - [Beacon state accessors](#beacon-state-accessors) - [`get_active_shard_count`](#get_active_shard_count) - [`get_online_validator_indices`](#get_online_validator_indices) diff --git a/tests/core/pyspec/eth2spec/test/phase_1/sanity/test_shard_blocks.py b/tests/core/pyspec/eth2spec/test/phase_1/sanity/test_shard_blocks.py deleted file mode 100644 index 43b0c56c2..000000000 --- a/tests/core/pyspec/eth2spec/test/phase_1/sanity/test_shard_blocks.py +++ /dev/null @@ -1,177 +0,0 @@ -from copy import deepcopy - -from eth2spec.test.helpers.phase1.shard_block import ( - build_empty_shard_block, - sign_shard_block, -) -from eth2spec.test.helpers.phase1.shard_state import ( - configure_shard_state, - shard_state_transition_and_sign_block, -) -from eth2spec.test.context import ( - always_bls, - expect_assertion_error, - spec_state_test, - with_all_phases_except, -) - - -@with_all_phases_except(['phase0']) -@spec_state_test -@always_bls -def test_process_empty_shard_block(spec, state): - beacon_state, shard_state = configure_shard_state(spec, state) - - block = build_empty_shard_block( - spec, - beacon_state, - shard_state, - slot=shard_state.slot + 1, - signed=True, - full_attestation=False, - ) - - yield 'pre', shard_state - yield 'beacon_state', beacon_state - - shard_state_transition_and_sign_block(spec, beacon_state, shard_state, block) - - yield 'blocks', [block] - yield 'post', shard_state - - -@with_all_phases_except(['phase0']) -@spec_state_test -@always_bls -def test_process_full_attestation_shard_block(spec, state): - beacon_state, shard_state = configure_shard_state(spec, state) - - block = build_empty_shard_block( - spec, - beacon_state, - shard_state, - slot=shard_state.slot + 1, - signed=True, - full_attestation=True, - ) - - yield 'pre', shard_state - yield 'beacon_state', beacon_state - - shard_state_transition_and_sign_block(spec, beacon_state, shard_state, block) - - yield 'blocks', [block] - yield 'post', shard_state - - -@with_all_phases_except(['phase0']) -@spec_state_test -def test_prev_slot_block_transition(spec, state): - beacon_state, shard_state = configure_shard_state(spec, state) - - # Go to clean slot - spec.process_shard_slots(shard_state, shard_state.slot + 1) - # Make a block for it - block = build_empty_shard_block(spec, beacon_state, shard_state, slot=shard_state.slot, signed=True) - # Transition to next slot, above block will not be invalid on top of new state. - spec.process_shard_slots(shard_state, shard_state.slot + 1) - - yield 'pre', shard_state - yield 'beacon_state', beacon_state - expect_assertion_error( - lambda: spec.shard_state_transition(beacon_state, shard_state, block) - ) - yield 'blocks', [block] - yield 'post', None - - -@with_all_phases_except(['phase0']) -@spec_state_test -def test_same_slot_block_transition(spec, state): - beacon_state, shard_state = configure_shard_state(spec, state) - - # Same slot on top of pre-state, but move out of slot 0 first. - spec.process_shard_slots(shard_state, shard_state.slot + 1) - block = build_empty_shard_block(spec, beacon_state, shard_state, slot=shard_state.slot, signed=True) - - yield 'pre', shard_state - yield 'beacon_state', beacon_state - - shard_state_transition_and_sign_block(spec, beacon_state, shard_state, block) - - yield 'blocks', [block] - yield 'post', shard_state - - -@with_all_phases_except(['phase0']) -@spec_state_test -def test_invalid_state_root(spec, state): - beacon_state, shard_state = configure_shard_state(spec, state) - - spec.process_shard_slots(shard_state, shard_state.slot + 1) - block = build_empty_shard_block(spec, beacon_state, shard_state, slot=shard_state.slot) - block.state_root = b'\x36' * 32 - sign_shard_block(spec, beacon_state, shard_state, block) - - yield 'pre', shard_state - yield 'beacon_state', beacon_state - expect_assertion_error( - lambda: spec.shard_state_transition(beacon_state, shard_state, block, validate_state_root=True) - ) - yield 'blocks', [block] - yield 'post', None - - -@with_all_phases_except(['phase0']) -@spec_state_test -def test_skipped_slots(spec, state): - beacon_state, shard_state = configure_shard_state(spec, state) - - block = build_empty_shard_block(spec, beacon_state, shard_state, slot=shard_state.slot + 3, signed=True) - - yield 'pre', shard_state - yield 'beacon_state', beacon_state - - shard_state_transition_and_sign_block(spec, beacon_state, shard_state, block) - - yield 'blocks', [block] - yield 'post', shard_state - - assert shard_state.slot == block.slot - latest_block_header = deepcopy(shard_state.latest_block_header) - latest_block_header.state_root = shard_state.hash_tree_root() - assert latest_block_header.hash_tree_root() == block.hash_tree_root() - - -@with_all_phases_except(['phase0']) -@spec_state_test -def test_empty_shard_period_transition(spec, state): - beacon_state, shard_state = configure_shard_state(spec, state) - - # modify some of the deltas to ensure the period transition works properly - stub_delta = 10 - shard_state.newer_committee_positive_deltas[0] = stub_delta - shard_state.newer_committee_negative_deltas[0] = stub_delta - - slot = shard_state.slot + spec.SHARD_SLOTS_PER_EPOCH * spec.EPOCHS_PER_SHARD_PERIOD - beacon_state.slot = spec.compute_epoch_of_shard_slot(slot) * spec.SLOTS_PER_EPOCH - 4 - spec.process_slots(beacon_state, spec.compute_epoch_of_shard_slot(slot) * spec.SLOTS_PER_EPOCH) - - # all validators get slashed for not revealing keys - # undo this to allow for a block proposal - for index in range(len(beacon_state.validators)): - beacon_state.validators[index].slashed = False - block = build_empty_shard_block(spec, beacon_state, shard_state, slot=slot, signed=True) - - yield 'pre', shard_state - yield 'beacon_state', beacon_state - - shard_state_transition_and_sign_block(spec, beacon_state, shard_state, block) - - yield 'blocks', [block] - yield 'post', shard_state - - shard_state.older_committee_positive_deltas[0] == stub_delta - shard_state.older_committee_negative_deltas[0] == stub_delta - shard_state.newer_committee_positive_deltas[0] == 0 - shard_state.newer_committee_negative_deltas[0] == 0 From 01f7c6c1e23bc4d20f6b4f5d5e8bac68519d5eb6 Mon Sep 17 00:00:00 2001 From: protolambda Date: Wed, 22 Jan 2020 18:09:40 +0100 Subject: [PATCH 115/194] separate deposit contract compiler and tester, pin compiler to python 3.7. Upgrade others to 3.8 --- .circleci/config.yml | 100 +++++++++++++----- Makefile | 35 +++--- deposit_contract/README.md | 16 ++- .../deposit_contract}/__init__.py | 0 .../deposit_contract/compile.py} | 4 +- .../compiler/deposit_contract/test_compile.py | 29 +++++ deposit_contract/compiler/requirements.txt | 7 ++ deposit_contract/compiler/setup.py | 10 ++ deposit_contract/requirements-testing.txt | 5 - .../deposit_contract}/__init__.py | 0 .../deposit_contract}/conftest.py | 27 +++-- .../deposit_contract}/test_deposit.py | 21 ++-- deposit_contract/tester/requirements.txt | 4 + deposit_contract/tester/setup.py | 9 ++ deposit_contract/tests/contracts/__init__.py | 0 .../tests/contracts/test_compile.py | 19 ---- deposit_contract/tests/contracts/utils.py | 16 --- tests/core/pyspec/setup.py | 1 + 18 files changed, 190 insertions(+), 113 deletions(-) rename deposit_contract/{contracts => compiler/deposit_contract}/__init__.py (100%) rename deposit_contract/{tool/compile_deposit_contract.py => compiler/deposit_contract/compile.py} (96%) create mode 100644 deposit_contract/compiler/deposit_contract/test_compile.py create mode 100644 deposit_contract/compiler/requirements.txt create mode 100644 deposit_contract/compiler/setup.py delete mode 100644 deposit_contract/requirements-testing.txt rename deposit_contract/{tests => tester/deposit_contract}/__init__.py (100%) rename deposit_contract/{tests/contracts => tester/deposit_contract}/conftest.py (81%) rename deposit_contract/{tests/contracts => tester/deposit_contract}/test_deposit.py (96%) create mode 100644 deposit_contract/tester/requirements.txt create mode 100644 deposit_contract/tester/setup.py delete mode 100644 deposit_contract/tests/contracts/__init__.py delete mode 100644 deposit_contract/tests/contracts/test_compile.py delete mode 100644 deposit_contract/tests/contracts/utils.py diff --git a/.circleci/config.yml b/.circleci/config.yml index 45cb958a3..1c8aa4e80 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -35,32 +35,45 @@ commands: description: "Restore the cache with pyspec keys" steps: - restore_cached_venv: - venv_name: v7-pyspec + venv_name: v9-pyspec reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "tests/core/pyspec/requirements-testing.txt" }} save_pyspec_cached_venv: description: Save a venv into a cache with pyspec keys" steps: - save_cached_venv: - venv_name: v7-pyspec + venv_name: v9-pyspec reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "tests/core/pyspec/requirements-testing.txt" }} venv_path: ./tests/core/pyspec/venv - restore_deposit_contract_cached_venv: - description: "Restore the cache with deposit_contract keys" + restore_deposit_contract_compiler_cached_venv: + description: "Restore the venv from cache for the deposit contract compiler" steps: - restore_cached_venv: - venv_name: v9-deposit-contract - reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "deposit_contract/requirements-testing.txt" }} - save_deposit_contract_cached_venv: - description: Save a venv into a cache with deposit_contract keys" + venv_name: v15-deposit-contract-compiler + reqs_checksum: cache-{{ checksum "deposit_contract/compiler/requirements.txt" }} + save_deposit_contract_compiler_cached_venv: + description: "Save the venv to cache for later use of the deposit contract compiler" steps: - save_cached_venv: - venv_name: v9-deposit-contract - reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "deposit_contract/requirements-testing.txt" }} - venv_path: ./deposit_contract/venv + venv_name: v15-deposit-contract-compiler + reqs_checksum: cache-{{ checksum "deposit_contract/compiler/requirements.txt" }} + venv_path: ./deposit_contract/compiler/venv + restore_deposit_contract_tester_cached_venv: + description: "Restore the venv from cache for the deposit contract tester" + steps: + - restore_cached_venv: + venv_name: v15-deposit-contract-tester + reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "deposit_contract/tester/requirements.txt" }} + save_deposit_contract_tester_cached_venv: + description: "Save the venv to cache for later use of the deposit contract tester" + steps: + - save_cached_venv: + venv_name: v15-deposit-contract-tester + reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "deposit_contract/tester/requirements.txt" }} + venv_path: ./deposit_contract/tester/venv jobs: checkout_specs: docker: - - image: circleci/python:3.6 + - image: circleci/python:3.8 working_directory: ~/specs-repo steps: # Restore git repo at point close to target branch/revision, to speed up checkout @@ -80,7 +93,7 @@ jobs: - ~/specs-repo install_pyspec_test: docker: - - image: circleci/python:3.6 + - image: circleci/python:3.8 working_directory: ~/specs-repo steps: - restore_cache: @@ -92,7 +105,7 @@ jobs: - save_pyspec_cached_venv test: docker: - - image: circleci/python:3.6 + - image: circleci/python:3.8 working_directory: ~/specs-repo steps: - restore_cache: @@ -114,7 +127,7 @@ jobs: command: sudo npm install -g doctoc && make check_toc codespell: docker: - - image: circleci/python:3.6 + - image: circleci/python:3.8 working_directory: ~/specs-repo steps: - checkout @@ -123,7 +136,7 @@ jobs: command: pip install codespell --user && make codespell lint: docker: - - image: circleci/python:3.6 + - image: circleci/python:3.8 working_directory: ~/specs-repo steps: - restore_cache: @@ -132,29 +145,54 @@ jobs: - run: name: Run linter command: make lint - install_deposit_contract_test: + install_deposit_contract_compiler: docker: - - image: circleci/python:3.6 + # The deposit contract compiler is pinned to python 3.7 because of the vyper version pin. + - image: circleci/python:3.7 working_directory: ~/specs-repo steps: - restore_cache: key: v2-specs-repo-{{ .Branch }}-{{ .Revision }} - - restore_deposit_contract_cached_venv + - restore_deposit_contract_compiler_cached_venv - run: - name: Install deposit contract requirements - command: make install_deposit_contract_test - - save_deposit_contract_cached_venv - deposit_contract: + name: Install deposit contract compiler requirements + command: make install_deposit_contract_compiler + - save_deposit_contract_compiler_cached_venv + install_deposit_contract_tester: docker: - - image: circleci/python:3.6 + - image: circleci/python:3.8 working_directory: ~/specs-repo steps: - restore_cache: key: v2-specs-repo-{{ .Branch }}-{{ .Revision }} - - restore_deposit_contract_cached_venv + - restore_deposit_contract_tester_cached_venv + - run: + name: Install deposit contract tester requirements + command: make install_deposit_contract_tester + - save_deposit_contract_tester_cached_venv + test_compile_deposit_contract: + docker: + - image: circleci/python:3.7 + working_directory: ~/specs-repo + steps: + - restore_cache: + key: v2-specs-repo-{{ .Branch }}-{{ .Revision }} + - restore_deposit_contract_compiler_cached_venv + - run: + name: Run deposit contract compile test + command: make test_compile_deposit_contract + test_deposit_contract: + docker: + - image: circleci/python:3.8 + working_directory: ~/specs-repo + steps: + - restore_cache: + key: v2-specs-repo-{{ .Branch }}-{{ .Revision }} + - restore_deposit_contract_tester_cached_venv - run: name: Run deposit contract test command: make test_deposit_contract + workflows: version: 2.1 test_spec: @@ -171,9 +209,15 @@ workflows: - lint: requires: - test - - install_deposit_contract_test: + - install_deposit_contract_compiler: requires: - checkout_specs - - deposit_contract: + - test_compile_deposit_contract: requires: - - install_deposit_contract_test + - install_deposit_contract_compiler + - install_deposit_contract_tester: + requires: + - checkout_specs + - test_deposit_contract: + requires: + - install_deposit_contract_tester diff --git a/Makefile b/Makefile index e91a686f1..0d9fd9ae5 100644 --- a/Makefile +++ b/Makefile @@ -5,7 +5,8 @@ TEST_LIBS_DIR = ./tests/core PY_SPEC_DIR = $(TEST_LIBS_DIR)/pyspec TEST_VECTOR_DIR = ./eth2.0-spec-tests/tests GENERATOR_DIR = ./tests/generators -DEPOSIT_CONTRACT_DIR = ./deposit_contract +DEPOSIT_CONTRACT_COMPILER_DIR = ./deposit_contract/compiler +DEPOSIT_CONTRACT_TESTER_DIR = ./deposit_contract/tester CONFIGS_DIR = ./configs # Collect a list of generator names @@ -35,7 +36,8 @@ COV_HTML_OUT=.htmlcov COV_INDEX_FILE=$(PY_SPEC_DIR)/$(COV_HTML_OUT)/index.html .PHONY: clean partial_clean all test citest lint generate_tests pyspec phase0 phase1 install_test open_cov \ - install_deposit_contract_test test_deposit_contract compile_deposit_contract check_toc + install_deposit_contract_tester test_deposit_contract install_deposit_contract_compiler \ + compile_deposit_contract test_compile_deposit_contract check_toc all: $(PY_SPEC_ALL_TARGETS) @@ -45,14 +47,16 @@ partial_clean: rm -rf $(GENERATOR_VENVS) rm -rf $(PY_SPEC_DIR)/.pytest_cache rm -rf $(PY_SPEC_ALL_TARGETS) - rm -rf $(DEPOSIT_CONTRACT_DIR)/.pytest_cache + rm -rf $(DEPOSIT_CONTRACT_COMPILER_DIR)/.pytest_cache + rm -rf $(DEPOSIT_CONTRACT_TESTER_DIR)/.pytest_cache rm -rf $(PY_SPEC_DIR)/$(COV_HTML_OUT) rm -rf $(PY_SPEC_DIR)/.coverage rm -rf $(PY_SPEC_DIR)/test-reports clean: partial_clean rm -rf $(PY_SPEC_DIR)/venv - rm -rf $(DEPOSIT_CONTRACT_DIR)/venv + rm -rf $(DEPOSIT_CONTRACT_COMPILER_DIR)/venv + rm -rf $(DEPOSIT_CONTRACT_TESTER_DIR)/venv # "make generate_tests" to run all generators generate_tests: $(PY_SPEC_ALL_TARGETS) $(GENERATOR_TARGETS) @@ -66,7 +70,7 @@ test: $(PY_SPEC_ALL_TARGETS) python -m pytest -n 4 --cov=eth2spec.phase0.spec --cov=eth2spec.phase1.spec --cov-report="html:$(COV_HTML_OUT)" --cov-branch eth2spec citest: $(PY_SPEC_ALL_TARGETS) - cd $(PY_SPEC_DIR); mkdir -p test-reports/eth2spec; . venv/bin/activate; \ + cd $(PY_SPEC_DIR); mkdir -p test-reports/eth2spec; . venv/bin/activate; export PYTHONPATH="./"; \ python -m pytest -n 4 --junitxml=test-reports/eth2spec/test_results.xml eth2spec open_cov: @@ -89,17 +93,24 @@ lint: $(PY_SPEC_ALL_TARGETS) && cd ./eth2spec && mypy --follow-imports=silent --warn-unused-ignores --ignore-missing-imports --check-untyped-defs --disallow-incomplete-defs --disallow-untyped-defs -p phase0 \ && mypy --follow-imports=silent --warn-unused-ignores --ignore-missing-imports --check-untyped-defs --disallow-incomplete-defs --disallow-untyped-defs -p phase1; -install_deposit_contract_test: $(PY_SPEC_ALL_TARGETS) - cd $(DEPOSIT_CONTRACT_DIR); python3 -m venv venv; . venv/bin/activate; pip3 install -r requirements-testing.txt - -compile_deposit_contract: - cd $(DEPOSIT_CONTRACT_DIR); . venv/bin/activate; \ - python tool/compile_deposit_contract.py contracts/validator_registration.vy; +install_deposit_contract_tester: $(PY_SPEC_ALL_TARGETS) + cd $(DEPOSIT_CONTRACT_TESTER_DIR); python3 -m venv venv; . venv/bin/activate; pip3 install -r requirements.txt test_deposit_contract: - cd $(DEPOSIT_CONTRACT_DIR); . venv/bin/activate; \ + cd $(DEPOSIT_CONTRACT_TESTER_DIR); . venv/bin/activate; \ python -m pytest . +install_deposit_contract_compiler: + cd $(DEPOSIT_CONTRACT_COMPILER_DIR); python3.7 -m venv venv; . venv/bin/activate; pip3.7 install -r requirements.txt + +compile_deposit_contract: + cd $(DEPOSIT_CONTRACT_COMPILER_DIR); . venv/bin/activate; \ + python3.7 deposit_contract/compile.py contracts/validator_registration.vy + +test_compile_deposit_contract: + cd $(DEPOSIT_CONTRACT_COMPILER_DIR); . venv/bin/activate; \ + python3.7 -m pytest . + # "make pyspec" to create the pyspec for all phases. pyspec: $(PY_SPEC_ALL_TARGETS) diff --git a/deposit_contract/README.md b/deposit_contract/README.md index 16779e777..e7ec591e5 100644 --- a/deposit_contract/README.md +++ b/deposit_contract/README.md @@ -5,7 +5,7 @@ Under the `eth2.0-specs` directory, execute: ```sh -make install_deposit_contract_test +make install_deposit_contract_tester ``` ## How to compile the contract? @@ -14,11 +14,25 @@ make install_deposit_contract_test make compile_deposit_contract ``` +The compiler dependencies can be installed with: + +```sh +make install_deposit_contract_compiler +``` + +Note that this requires python 3.7 to be installed. The pinned vyper version will not work on 3.8. + The ABI and bytecode will be updated at [`contracts/validator_registration.json`](./contracts/validator_registration.json). ## How to run tests? +For running the contract tests: ```sh make test_deposit_contract ``` + +For testing the compiler output against the expected formally-verified bytecode: +```sh +make test_compile_deposit_contract +``` diff --git a/deposit_contract/contracts/__init__.py b/deposit_contract/compiler/deposit_contract/__init__.py similarity index 100% rename from deposit_contract/contracts/__init__.py rename to deposit_contract/compiler/deposit_contract/__init__.py diff --git a/deposit_contract/tool/compile_deposit_contract.py b/deposit_contract/compiler/deposit_contract/compile.py similarity index 96% rename from deposit_contract/tool/compile_deposit_contract.py rename to deposit_contract/compiler/deposit_contract/compile.py index 58f974b8d..6d6781878 100644 --- a/deposit_contract/tool/compile_deposit_contract.py +++ b/deposit_contract/compiler/deposit_contract/compile.py @@ -2,9 +2,7 @@ import argparse import json import os -from vyper import ( - compiler, -) +from vyper import compiler DIR = os.path.dirname(__file__) diff --git a/deposit_contract/compiler/deposit_contract/test_compile.py b/deposit_contract/compiler/deposit_contract/test_compile.py new file mode 100644 index 000000000..6922cf80c --- /dev/null +++ b/deposit_contract/compiler/deposit_contract/test_compile.py @@ -0,0 +1,29 @@ +from vyper import compiler + +import json +import os + +DIR = os.path.dirname(__file__) + + +def get_deposit_contract_code(): + file_path = os.path.join(DIR, '../../contracts/validator_registration.vy') + deposit_contract_code = open(file_path).read() + return deposit_contract_code + + +def get_deposit_contract_json(): + file_path = os.path.join(DIR, '../../contracts/validator_registration.json') + deposit_contract_json = open(file_path).read() + return json.loads(deposit_contract_json) + + +def test_compile_deposit_contract(): + compiled_deposit_contract_json = get_deposit_contract_json() + + deposit_contract_code = get_deposit_contract_code() + abi = compiler.mk_full_signature(deposit_contract_code) + bytecode = compiler.compile_code(deposit_contract_code)['bytecode'] + + assert abi == compiled_deposit_contract_json["abi"] + assert bytecode == compiled_deposit_contract_json["bytecode"] diff --git a/deposit_contract/compiler/requirements.txt b/deposit_contract/compiler/requirements.txt new file mode 100644 index 000000000..209d43012 --- /dev/null +++ b/deposit_contract/compiler/requirements.txt @@ -0,0 +1,7 @@ +# Vyper beta version used to generate the bytecode that was then formally verified. +# On top of this beta version, a later change was backported, and included in the formal verification: +# https://github.com/vyperlang/vyper/issues/1761 +# The resulting vyper version is pinned and maintained as protected branch. +git+https://github.com/vyperlang/vyper@1761-HOTFIX-v0.1.0-beta.13 + +pytest==3.6.1 diff --git a/deposit_contract/compiler/setup.py b/deposit_contract/compiler/setup.py new file mode 100644 index 000000000..add6d8043 --- /dev/null +++ b/deposit_contract/compiler/setup.py @@ -0,0 +1,10 @@ +from distutils.core import setup + +setup( + name='deposit_contract_compiler', + packages=['deposit_contract'], + package_dir={"": "."}, + python_requires="3.7", # pinned vyper compiler stops working after 3.7. See vyper issue 1835. + tests_requires=["pytest==3.6.1"], + install_requires=[], # see requirements.txt file +) diff --git a/deposit_contract/requirements-testing.txt b/deposit_contract/requirements-testing.txt deleted file mode 100644 index cbf6983c1..000000000 --- a/deposit_contract/requirements-testing.txt +++ /dev/null @@ -1,5 +0,0 @@ -eth-tester[py-evm]==0.1.0b39 -git+https://github.com/vyperlang/vyper@1761-HOTFIX-v0.1.0-beta.13 -web3==5.0.0b2 -pytest==3.6.1 -../tests/core/pyspec diff --git a/deposit_contract/tests/__init__.py b/deposit_contract/tester/deposit_contract/__init__.py similarity index 100% rename from deposit_contract/tests/__init__.py rename to deposit_contract/tester/deposit_contract/__init__.py diff --git a/deposit_contract/tests/contracts/conftest.py b/deposit_contract/tester/deposit_contract/conftest.py similarity index 81% rename from deposit_contract/tests/contracts/conftest.py rename to deposit_contract/tester/deposit_contract/conftest.py index d4c7da9aa..c20501b11 100644 --- a/deposit_contract/tests/contracts/conftest.py +++ b/deposit_contract/tester/deposit_contract/conftest.py @@ -1,8 +1,3 @@ -from random import ( - randint, -) -import re - import pytest import eth_tester @@ -10,17 +5,19 @@ from eth_tester import ( EthereumTester, PyEVMBackend, ) -from vyper import ( - compiler, -) from web3 import Web3 -from web3.providers.eth_tester import ( - EthereumTesterProvider, -) -from .utils import ( - get_deposit_contract_code, - get_deposit_contract_json, -) +from web3.providers.eth_tester import EthereumTesterProvider + +import json +import os + +DIR = os.path.dirname(__file__) + + +def get_deposit_contract_json(): + file_path = os.path.join(DIR, '../../contracts/validator_registration.json') + deposit_contract_json = open(file_path).read() + return json.loads(deposit_contract_json) # Constants diff --git a/deposit_contract/tests/contracts/test_deposit.py b/deposit_contract/tester/deposit_contract/test_deposit.py similarity index 96% rename from deposit_contract/tests/contracts/test_deposit.py rename to deposit_contract/tester/deposit_contract/test_deposit.py index 01586d070..5fa98e232 100644 --- a/deposit_contract/tests/contracts/test_deposit.py +++ b/deposit_contract/tester/deposit_contract/test_deposit.py @@ -1,23 +1,16 @@ -from random import ( - randint, -) - +from random import randint import pytest - import eth_utils -from tests.contracts.conftest import ( + +from eth2spec.phase0.spec import DepositData +from eth2spec.utils.ssz.ssz_typing import List +from eth2spec.utils.ssz.ssz_impl import hash_tree_root + +from deposit_contract.conftest import ( FULL_DEPOSIT_AMOUNT, MIN_DEPOSIT_AMOUNT, ) -from eth2spec.phase0.spec import ( - DepositData, -) -from eth2spec.utils.ssz.ssz_typing import List -from eth2spec.utils.ssz.ssz_impl import ( - hash_tree_root, -) - SAMPLE_PUBKEY = b'\x11' * 48 SAMPLE_WITHDRAWAL_CREDENTIALS = b'\x22' * 32 diff --git a/deposit_contract/tester/requirements.txt b/deposit_contract/tester/requirements.txt new file mode 100644 index 000000000..545894f9a --- /dev/null +++ b/deposit_contract/tester/requirements.txt @@ -0,0 +1,4 @@ +eth-tester[py-evm]>=0.3.0b1,<0.4 +web3==5.4.0 +pytest==3.6.1 +../../tests/core/pyspec diff --git a/deposit_contract/tester/setup.py b/deposit_contract/tester/setup.py new file mode 100644 index 000000000..7e14c343b --- /dev/null +++ b/deposit_contract/tester/setup.py @@ -0,0 +1,9 @@ +from distutils.core import setup + +setup( + name='deposit_contract_tester', + packages=['deposit_contract'], + package_dir={"": "."}, + tests_requires=[], + install_requires=[] # see requirements.txt file +) diff --git a/deposit_contract/tests/contracts/__init__.py b/deposit_contract/tests/contracts/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/deposit_contract/tests/contracts/test_compile.py b/deposit_contract/tests/contracts/test_compile.py deleted file mode 100644 index fc732a6db..000000000 --- a/deposit_contract/tests/contracts/test_compile.py +++ /dev/null @@ -1,19 +0,0 @@ -from vyper import ( - compiler, -) - -from .utils import ( - get_deposit_contract_code, - get_deposit_contract_json, -) - - -def test_compile_deposit_contract(): - compiled_deposit_contract_json = get_deposit_contract_json() - - deposit_contract_code = get_deposit_contract_code() - abi = compiler.mk_full_signature(deposit_contract_code) - bytecode = compiler.compile_code(deposit_contract_code)['bytecode'] - - assert abi == compiled_deposit_contract_json["abi"] - assert bytecode == compiled_deposit_contract_json["bytecode"] diff --git a/deposit_contract/tests/contracts/utils.py b/deposit_contract/tests/contracts/utils.py deleted file mode 100644 index 12eac5832..000000000 --- a/deposit_contract/tests/contracts/utils.py +++ /dev/null @@ -1,16 +0,0 @@ -import json -import os - -DIR = os.path.dirname(__file__) - - -def get_deposit_contract_code(): - file_path = os.path.join(DIR, './../../contracts/validator_registration.vy') - deposit_contract_code = open(file_path).read() - return deposit_contract_code - - -def get_deposit_contract_json(): - file_path = os.path.join(DIR, './../../contracts/validator_registration.json') - deposit_contract_json = open(file_path).read() - return json.loads(deposit_contract_json) diff --git a/tests/core/pyspec/setup.py b/tests/core/pyspec/setup.py index 5be0db7f8..0364ccfd8 100644 --- a/tests/core/pyspec/setup.py +++ b/tests/core/pyspec/setup.py @@ -11,5 +11,6 @@ setup( "py_ecc==2.0.0", "ssz==0.1.3", "dataclasses==0.6", + "pytest" ] ) From ebac60374571a758676a4575e82a34071583bb33 Mon Sep 17 00:00:00 2001 From: Jim McDonald Date: Wed, 22 Jan 2020 21:15:23 +0000 Subject: [PATCH 116/194] Update for renamed constant --- specs/phase0/validator.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/phase0/validator.md b/specs/phase0/validator.md index 75b9f9311..694bce993 100644 --- a/specs/phase0/validator.md +++ b/specs/phase0/validator.md @@ -128,7 +128,7 @@ To submit a deposit: ### Process deposit -Deposits cannot be processed into the beacon chain until the Eth1 block in which they were deposited or any of its descendants is added to the beacon chain `state.eth1_data`. This takes _a minimum_ of `ETH1_FOLLOW_DISTANCE` Eth1 blocks (~4 hours) plus `ETH1_DATA_VOTING_PERIOD` epochs (~1.7 hours). Once the requisite Eth1 data is added, the deposit will normally be added to a beacon chain block and processed into the `state.validators` within an epoch or two. The validator is then in a queue to be activated. +Deposits cannot be processed into the beacon chain until the Eth1 block in which they were deposited or any of its descendants is added to the beacon chain `state.eth1_data`. This takes _a minimum_ of `ETH1_FOLLOW_DISTANCE` Eth1 blocks (~4 hours) plus `SLOTS_PER_ETH1_VOTING_PERIOD` slots (~1.7 hours). Once the requisite Eth1 data is added, the deposit will normally be added to a beacon chain block and processed into the `state.validators` within an epoch or two. The validator is then in a queue to be activated. ### Validator index From d9f62f9303cdebca198e60e6b8d0f7de72e88a9e Mon Sep 17 00:00:00 2001 From: protolambda Date: Sat, 25 Jan 2020 00:43:43 +0100 Subject: [PATCH 117/194] Remerkleable - merkle tree based ssz for better and faster spec --- .circleci/config.yml | 12 +- .gitignore | 3 + deposit_contract/tester/requirements.txt | 1 + scripts/build_spec.py | 50 +- specs/phase1/beacon-chain.md | 132 +++-- tests/core/pyspec/eth2spec/debug/decode.py | 14 +- tests/core/pyspec/eth2spec/debug/encode.py | 13 +- .../pyspec/eth2spec/debug/random_value.py | 67 +-- .../core/pyspec/eth2spec/fuzzing/__init__.py | 0 tests/core/pyspec/eth2spec/fuzzing/decoder.py | 87 --- .../pyspec/eth2spec/fuzzing/test_decoder.py | 35 -- tests/core/pyspec/eth2spec/test/conftest.py | 1 + .../test/helpers/attester_slashings.py | 4 +- .../pyspec/eth2spec/test/helpers/block.py | 8 +- .../pyspec/eth2spec/test/helpers/custody.py | 15 +- .../pyspec/eth2spec/test/helpers/genesis.py | 3 +- .../test/helpers/phase1/shard_block.py | 6 +- .../test/helpers/proposer_slashings.py | 6 +- .../pyspec/eth2spec/test/helpers/state.py | 4 +- .../test_process_attester_slashing.py | 8 +- tests/core/pyspec/eth2spec/test/utils.py | 8 +- .../pyspec/eth2spec/utils/ssz/ssz_impl.py | 159 +----- .../pyspec/eth2spec/utils/ssz/ssz_typing.py | 526 +----------------- .../eth2spec/utils/ssz/test_ssz_impl.py | 264 --------- .../eth2spec/utils/ssz/test_ssz_typing.py | 233 -------- tests/core/pyspec/requirements.txt | 2 +- tests/core/pyspec/setup.py | 3 +- 27 files changed, 199 insertions(+), 1465 deletions(-) delete mode 100644 tests/core/pyspec/eth2spec/fuzzing/__init__.py delete mode 100644 tests/core/pyspec/eth2spec/fuzzing/decoder.py delete mode 100644 tests/core/pyspec/eth2spec/fuzzing/test_decoder.py delete mode 100644 tests/core/pyspec/eth2spec/utils/ssz/test_ssz_impl.py delete mode 100644 tests/core/pyspec/eth2spec/utils/ssz/test_ssz_typing.py diff --git a/.circleci/config.yml b/.circleci/config.yml index 1c8aa4e80..bd77f2e5e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -35,39 +35,39 @@ commands: description: "Restore the cache with pyspec keys" steps: - restore_cached_venv: - venv_name: v9-pyspec + venv_name: v17-pyspec reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "tests/core/pyspec/requirements-testing.txt" }} save_pyspec_cached_venv: description: Save a venv into a cache with pyspec keys" steps: - save_cached_venv: - venv_name: v9-pyspec + venv_name: v17-pyspec reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "tests/core/pyspec/requirements-testing.txt" }} venv_path: ./tests/core/pyspec/venv restore_deposit_contract_compiler_cached_venv: description: "Restore the venv from cache for the deposit contract compiler" steps: - restore_cached_venv: - venv_name: v15-deposit-contract-compiler + venv_name: v16-deposit-contract-compiler reqs_checksum: cache-{{ checksum "deposit_contract/compiler/requirements.txt" }} save_deposit_contract_compiler_cached_venv: description: "Save the venv to cache for later use of the deposit contract compiler" steps: - save_cached_venv: - venv_name: v15-deposit-contract-compiler + venv_name: v16-deposit-contract-compiler reqs_checksum: cache-{{ checksum "deposit_contract/compiler/requirements.txt" }} venv_path: ./deposit_contract/compiler/venv restore_deposit_contract_tester_cached_venv: description: "Restore the venv from cache for the deposit contract tester" steps: - restore_cached_venv: - venv_name: v15-deposit-contract-tester + venv_name: v17-deposit-contract-tester reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "deposit_contract/tester/requirements.txt" }} save_deposit_contract_tester_cached_venv: description: "Save the venv to cache for later use of the deposit contract tester" steps: - save_cached_venv: - venv_name: v15-deposit-contract-tester + venv_name: v17-deposit-contract-tester reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "deposit_contract/tester/requirements.txt" }} venv_path: ./deposit_contract/tester/venv jobs: diff --git a/.gitignore b/.gitignore index ff1f2d9f8..c4256032c 100644 --- a/.gitignore +++ b/.gitignore @@ -20,6 +20,9 @@ tests/core/pyspec/eth2spec/phase1/spec.py # coverage reports .htmlcov .coverage +.coverage.* # local CI testing output tests/core/pyspec/test-reports + +*.egg-info diff --git a/deposit_contract/tester/requirements.txt b/deposit_contract/tester/requirements.txt index 545894f9a..e6acaf825 100644 --- a/deposit_contract/tester/requirements.txt +++ b/deposit_contract/tester/requirements.txt @@ -2,3 +2,4 @@ eth-tester[py-evm]>=0.3.0b1,<0.4 web3==5.4.0 pytest==3.6.1 ../../tests/core/pyspec +../../tests/core/config_helpers \ No newline at end of file diff --git a/scripts/build_spec.py b/scripts/build_spec.py index 1a67c4c31..fa351db2f 100644 --- a/scripts/build_spec.py +++ b/scripts/build_spec.py @@ -25,14 +25,14 @@ from dataclasses import ( from eth2spec.utils.ssz.ssz_impl import hash_tree_root from eth2spec.utils.ssz.ssz_typing import ( - boolean, Container, List, Vector, uint64, SSZType, + View, boolean, Container, List, Vector, uint64, Bytes1, Bytes4, Bytes8, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector, ) from eth2spec.utils import bls from eth2spec.utils.hash_function import hash -SSZObject = TypeVar('SSZObject', bound=SSZType) +SSZObject = TypeVar('SSZObject', bound=View) ''' PHASE1_IMPORTS = '''from eth2spec.phase0 import spec as phase0 from eth2spec.config.apply_config import apply_constants_preset @@ -47,9 +47,8 @@ from dataclasses import ( from eth2spec.utils.ssz.ssz_impl import hash_tree_root from eth2spec.utils.ssz.ssz_typing import ( - SSZType, Container, List, Vector, ByteList, ByteVector, Bitlist, Bitvector, - Bytes1, Bytes4, Bytes8, Bytes32, Bytes48, Bytes96, - uint64, uint8, bit, boolean, + View, boolean, Container, List, Vector, uint64, uint8, bit, + ByteVector, ByteList, Bytes1, Bytes4, Bytes8, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector, ) from eth2spec.utils import bls @@ -58,7 +57,7 @@ from eth2spec.utils.hash_function import hash SSZVariableName = str GeneralizedIndex = NewType('GeneralizedIndex', int) -SSZObject = TypeVar('SSZObject', bound=SSZType) +SSZObject = TypeVar('SSZObject', bound=View) ''' SUNDRY_CONSTANTS_FUNCTIONS = ''' def ceillog2(x: uint64) -> int: @@ -80,27 +79,40 @@ def hash(x: bytes) -> Bytes32: # type: ignore return hash_cache[x] -# Monkey patch validator compute committee code -_compute_committee = compute_committee -committee_cache: Dict[Tuple[Bytes32, Bytes32, int, int], Sequence[ValidatorIndex]] = {} +def cache_this(key_fn, value_fn): # type: ignore + cache_dict = {} # type: ignore + + def wrapper(*args, **kw): # type: ignore + key = key_fn(*args, **kw) + nonlocal cache_dict + if key not in cache_dict: + cache_dict[key] = value_fn(*args, **kw) + return cache_dict[key] + return wrapper -def compute_committee(indices: Sequence[ValidatorIndex], # type: ignore - seed: Bytes32, - index: int, - count: int) -> Sequence[ValidatorIndex]: - param_hash = (hash(b''.join(index.to_bytes(length=4, byteorder='little') for index in indices)), seed, index, count) +get_base_reward = cache_this( + lambda state, index: (state.validators.hash_tree_root(), state.slot), + get_base_reward) - if param_hash not in committee_cache: - committee_cache[param_hash] = _compute_committee(indices, seed, index, count) - return committee_cache[param_hash]''' +get_committee_count_at_slot = cache_this( + lambda state, epoch: (state.validators.hash_tree_root(), epoch), + get_committee_count_at_slot) + +get_active_validator_indices = cache_this( + lambda state, epoch: (state.validators.hash_tree_root(), epoch), + get_active_validator_indices) + +get_beacon_committee = cache_this( + lambda state, slot, index: (state.validators.hash_tree_root(), state.randao_mixes.hash_tree_root(), slot, index), + get_beacon_committee)''' def objects_to_spec(functions: Dict[str, str], custom_types: Dict[str, str], constants: Dict[str, str], ssz_objects: Dict[str, str], - imports: Dict[str, str], + imports: str, version: str, ) -> str: """ @@ -201,7 +213,7 @@ def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject: custom_types = combine_constants(custom_types0, custom_types1) constants = combine_constants(constants0, constants1) ssz_objects = combine_ssz_objects(ssz_objects0, ssz_objects1, custom_types) - return functions, custom_types, constants, ssz_objects + return SpecObject((functions, custom_types, constants, ssz_objects)) def dependency_order_spec(objs: SpecObject): diff --git a/specs/phase1/beacon-chain.md b/specs/phase1/beacon-chain.md index 294459673..858e840b3 100644 --- a/specs/phase1/beacon-chain.md +++ b/specs/phase1/beacon-chain.md @@ -1,81 +1,73 @@ - - -**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - -- [Ethereum 2.0 Phase 1 -- The Beacon Chain for Shards](#ethereum-20-phase-1----the-beacon-chain-for-shards) - - [Table of contents](#table-of-contents) - - [Introduction](#introduction) - - [Custom types](#custom-types) - - [Configuration](#configuration) - - [Misc](#misc) - - [Updated containers](#updated-containers) - - [Extended `AttestationData`](#extended-attestationdata) - - [Extended `Attestation`](#extended-attestation) - - [Extended `PendingAttestation`](#extended-pendingattestation) - - [`IndexedAttestation`](#indexedattestation) - - [Extended `AttesterSlashing`](#extended-attesterslashing) - - [Extended `Validator`](#extended-validator) - - [Extended `BeaconBlockBody`](#extended-beaconblockbody) - - [Extended `BeaconBlock`](#extended-beaconblock) - - [Extended `SignedBeaconBlock`](#extended-signedbeaconblock) - - [Extended `BeaconState`](#extended-beaconstate) - - [New containers](#new-containers) - - [`ShardBlockWrapper`](#shardblockwrapper) - - [`ShardSignableHeader`](#shardsignableheader) - - [`ShardState`](#shardstate) - - [`ShardTransition`](#shardtransition) - - [`CompactCommittee`](#compactcommittee) - - [`AttestationCustodyBitWrapper`](#attestationcustodybitwrapper) - - [Helper functions](#helper-functions) - - [Misc](#misc-1) - - [`get_previous_slot`](#get_previous_slot) - - [`pack_compact_validator`](#pack_compact_validator) - - [`committee_to_compact_committee`](#committee_to_compact_committee) - - [`chunks_to_body_root`](#chunks_to_body_root) - - [`compute_shard_from_committee_index`](#compute_shard_from_committee_index) - - [Beacon state accessors](#beacon-state-accessors) - - [`get_active_shard_count`](#get_active_shard_count) - - [`get_online_validator_indices`](#get_online_validator_indices) - - [`get_shard_committee`](#get_shard_committee) - - [`get_shard_proposer_index`](#get_shard_proposer_index) - - [`get_light_client_committee`](#get_light_client_committee) - - [`get_indexed_attestation`](#get_indexed_attestation) - - [`get_updated_gasprice`](#get_updated_gasprice) - - [`get_start_shard`](#get_start_shard) - - [`get_shard`](#get_shard) - - [`get_next_slot_for_shard`](#get_next_slot_for_shard) - - [`get_offset_slots`](#get_offset_slots) - - [Predicates](#predicates) - - [Updated `is_valid_indexed_attestation`](#updated-is_valid_indexed_attestation) - - [Block processing](#block-processing) - - [Operations](#operations) - - [New Attestation processing](#new-attestation-processing) - - [`validate_attestation`](#validate_attestation) - - [`apply_shard_transition`](#apply_shard_transition) - - [`process_crosslink_for_shard`](#process_crosslink_for_shard) - - [`process_crosslinks`](#process_crosslinks) - - [`process_attestations`](#process_attestations) - - [New Attester slashing processing](#new-attester-slashing-processing) - - [Shard transition false positives](#shard-transition-false-positives) - - [Light client processing](#light-client-processing) - - [Epoch transition](#epoch-transition) - - [Custody game updates](#custody-game-updates) - - [Online-tracking](#online-tracking) - - [Light client committee updates](#light-client-committee-updates) - - - # Ethereum 2.0 Phase 1 -- The Beacon Chain for Shards **Notice**: This document is a work-in-progress for researchers and implementers. ## Table of contents - + + +**Table of Contents** -TODO +- [Introduction](#introduction) +- [Custom types](#custom-types) +- [Configuration](#configuration) + - [Misc](#misc) +- [Updated containers](#updated-containers) + - [Extended `AttestationData`](#extended-attestationdata) + - [Extended `Attestation`](#extended-attestation) + - [Extended `PendingAttestation`](#extended-pendingattestation) + - [`IndexedAttestation`](#indexedattestation) + - [Extended `AttesterSlashing`](#extended-attesterslashing) + - [Extended `Validator`](#extended-validator) + - [Extended `BeaconBlockBody`](#extended-beaconblockbody) + - [Extended `BeaconBlock`](#extended-beaconblock) + - [Extended `SignedBeaconBlock`](#extended-signedbeaconblock) + - [Extended `BeaconState`](#extended-beaconstate) +- [New containers](#new-containers) + - [`ShardBlockWrapper`](#shardblockwrapper) + - [`ShardSignableHeader`](#shardsignableheader) + - [`ShardState`](#shardstate) + - [`ShardTransition`](#shardtransition) + - [`CompactCommittee`](#compactcommittee) + - [`AttestationCustodyBitWrapper`](#attestationcustodybitwrapper) +- [Helper functions](#helper-functions) + - [Misc](#misc-1) + - [`get_previous_slot`](#get_previous_slot) + - [`pack_compact_validator`](#pack_compact_validator) + - [`committee_to_compact_committee`](#committee_to_compact_committee) + - [`chunks_to_body_root`](#chunks_to_body_root) + - [`compute_shard_from_committee_index`](#compute_shard_from_committee_index) + - [Beacon state accessors](#beacon-state-accessors) + - [`get_active_shard_count`](#get_active_shard_count) + - [`get_online_validator_indices`](#get_online_validator_indices) + - [`get_shard_committee`](#get_shard_committee) + - [`get_shard_proposer_index`](#get_shard_proposer_index) + - [`get_light_client_committee`](#get_light_client_committee) + - [`get_indexed_attestation`](#get_indexed_attestation) + - [`get_updated_gasprice`](#get_updated_gasprice) + - [`get_start_shard`](#get_start_shard) + - [`get_shard`](#get_shard) + - [`get_next_slot_for_shard`](#get_next_slot_for_shard) + - [`get_offset_slots`](#get_offset_slots) + - [Predicates](#predicates) + - [Updated `is_valid_indexed_attestation`](#updated-is_valid_indexed_attestation) + - [Block processing](#block-processing) + - [Operations](#operations) + - [New Attestation processing](#new-attestation-processing) + - [`validate_attestation`](#validate_attestation) + - [`apply_shard_transition`](#apply_shard_transition) + - [`process_crosslink_for_shard`](#process_crosslink_for_shard) + - [`process_crosslinks`](#process_crosslinks) + - [`process_attestations`](#process_attestations) + - [New Attester slashing processing](#new-attester-slashing-processing) + - [Shard transition false positives](#shard-transition-false-positives) + - [Light client processing](#light-client-processing) + - [Epoch transition](#epoch-transition) + - [Custody game updates](#custody-game-updates) + - [Online-tracking](#online-tracking) + - [Light client committee updates](#light-client-committee-updates) - + ## Introduction diff --git a/tests/core/pyspec/eth2spec/debug/decode.py b/tests/core/pyspec/eth2spec/debug/decode.py index da2682276..871748d54 100644 --- a/tests/core/pyspec/eth2spec/debug/decode.py +++ b/tests/core/pyspec/eth2spec/debug/decode.py @@ -1,21 +1,23 @@ from typing import Any from eth2spec.utils.ssz.ssz_impl import hash_tree_root from eth2spec.utils.ssz.ssz_typing import ( - SSZType, SSZValue, uint, Container, ByteList, List, boolean, - Vector, ByteVector + uint, Container, List, boolean, + Vector, ByteVector, ByteList ) -def decode(data: Any, typ: SSZType) -> SSZValue: +def decode(data: Any, typ): if issubclass(typ, (uint, boolean)): return typ(data) elif issubclass(typ, (List, Vector)): - return typ(decode(element, typ.elem_type) for element in data) - elif issubclass(typ, (ByteList, ByteVector)): + return typ(decode(element, typ.element_cls()) for element in data) + elif issubclass(typ, ByteVector): + return typ(bytes.fromhex(data[2:])) + elif issubclass(typ, ByteList): return typ(bytes.fromhex(data[2:])) elif issubclass(typ, Container): temp = {} - for field_name, field_type in typ.get_fields().items(): + for field_name, field_type in typ.fields().items(): temp[field_name] = decode(data[field_name], field_type) if field_name + "_hash_tree_root" in data: assert (data[field_name + "_hash_tree_root"][2:] == diff --git a/tests/core/pyspec/eth2spec/debug/encode.py b/tests/core/pyspec/eth2spec/debug/encode.py index 4857f7114..10bf4bedd 100644 --- a/tests/core/pyspec/eth2spec/debug/encode.py +++ b/tests/core/pyspec/eth2spec/debug/encode.py @@ -1,27 +1,30 @@ from eth2spec.utils.ssz.ssz_impl import hash_tree_root, serialize from eth2spec.utils.ssz.ssz_typing import ( uint, boolean, - Bitlist, Bitvector, Container + Bitlist, Bitvector, Container, Vector, List ) def encode(value, include_hash_tree_roots=False): if isinstance(value, uint): # Larger uints are boxed and the class declares their byte length - if value.type().byte_len > 8: + if value.__class__.type_byte_length() > 8: return str(int(value)) return int(value) elif isinstance(value, boolean): return value == 1 elif isinstance(value, (Bitlist, Bitvector)): return '0x' + serialize(value).hex() - elif isinstance(value, list): # normal python lists, ssz-List, Vector + elif isinstance(value, list): # normal python lists return [encode(element, include_hash_tree_roots) for element in value] - elif isinstance(value, bytes): # both bytes and ByteVector + elif isinstance(value, (List, Vector)): + return [encode(element, include_hash_tree_roots) for element in value] + elif isinstance(value, bytes): # bytes, ByteList, ByteVector return '0x' + value.hex() elif isinstance(value, Container): ret = {} - for field_value, field_name in zip(value, value.get_fields().keys()): + for field_name in value.fields().keys(): + field_value = getattr(value, field_name) ret[field_name] = encode(field_value, include_hash_tree_roots) if include_hash_tree_roots: ret[field_name + "_hash_tree_root"] = '0x' + hash_tree_root(field_value).hex() diff --git a/tests/core/pyspec/eth2spec/debug/random_value.py b/tests/core/pyspec/eth2spec/debug/random_value.py index 8055312d7..9fc3be978 100644 --- a/tests/core/pyspec/eth2spec/debug/random_value.py +++ b/tests/core/pyspec/eth2spec/debug/random_value.py @@ -1,9 +1,11 @@ from random import Random from enum import Enum +from typing import Type + from eth2spec.utils.ssz.ssz_typing import ( - SSZType, SSZValue, BasicValue, BasicType, uint, Container, ByteList, List, boolean, - Vector, ByteVector, Bitlist, Bitvector + View, BasicView, uint, Container, List, boolean, + Vector, ByteVector, ByteList, Bitlist, Bitvector ) # in bytes @@ -34,11 +36,11 @@ class RandomizationMode(Enum): def get_random_ssz_object(rng: Random, - typ: SSZType, + typ: Type[View], max_bytes_length: int, max_list_length: int, mode: RandomizationMode, - chaos: bool) -> SSZValue: + chaos: bool) -> View: """ Create an object for a given type, filled with random data. :param rng: The random number generator to use. @@ -56,26 +58,26 @@ def get_random_ssz_object(rng: Random, if mode == RandomizationMode.mode_nil_count: return typ(b'') elif mode == RandomizationMode.mode_max_count: - return typ(get_random_bytes_list(rng, min(max_bytes_length, typ.length))) + return typ(get_random_bytes_list(rng, min(max_bytes_length, typ.limit()))) elif mode == RandomizationMode.mode_one_count: - return typ(get_random_bytes_list(rng, min(1, typ.length))) + return typ(get_random_bytes_list(rng, min(1, typ.limit()))) elif mode == RandomizationMode.mode_zero: - return typ(b'\x00' * min(1, typ.length)) + return typ(b'\x00' * min(1, typ.limit())) elif mode == RandomizationMode.mode_max: - return typ(b'\xff' * min(1, typ.length)) + return typ(b'\xff' * min(1, typ.limit())) else: - return typ(get_random_bytes_list(rng, rng.randint(0, min(max_bytes_length, typ.length)))) - elif issubclass(typ, ByteVector): + return typ(get_random_bytes_list(rng, rng.randint(0, min(max_bytes_length, typ.limit())))) + if issubclass(typ, ByteVector): # Sanity, don't generate absurdly big random values # If a client is aiming to performance-test, they should create a benchmark suite. - assert typ.length <= max_bytes_length + assert typ.type_byte_length() <= max_bytes_length if mode == RandomizationMode.mode_zero: - return typ(b'\x00' * typ.length) + return typ(b'\x00' * typ.type_byte_length()) elif mode == RandomizationMode.mode_max: - return typ(b'\xff' * typ.length) + return typ(b'\xff' * typ.type_byte_length()) else: - return typ(get_random_bytes_list(rng, typ.length)) - elif issubclass(typ, BasicValue): + return typ(get_random_bytes_list(rng, typ.type_byte_length())) + elif issubclass(typ, (boolean, uint)): # Basic types if mode == RandomizationMode.mode_zero: return get_min_basic_value(typ) @@ -83,13 +85,14 @@ def get_random_ssz_object(rng: Random, return get_max_basic_value(typ) else: return get_random_basic_value(rng, typ) - elif issubclass(typ, Vector) or issubclass(typ, Bitvector): + elif issubclass(typ, (Vector, Bitvector)): + elem_type = typ.element_cls() if issubclass(typ, Vector) else boolean return typ( - get_random_ssz_object(rng, typ.elem_type, max_bytes_length, max_list_length, mode, chaos) - for _ in range(typ.length) + get_random_ssz_object(rng, elem_type, max_bytes_length, max_list_length, mode, chaos) + for _ in range(typ.vector_length()) ) elif issubclass(typ, List) or issubclass(typ, Bitlist): - length = rng.randint(0, min(typ.length, max_list_length)) + length = rng.randint(0, min(typ.limit(), max_list_length)) if mode == RandomizationMode.mode_one_count: length = 1 elif mode == RandomizationMode.mode_max_count: @@ -97,19 +100,21 @@ def get_random_ssz_object(rng: Random, elif mode == RandomizationMode.mode_nil_count: length = 0 - if typ.length < length: # SSZ imposes a hard limit on lists, we can't put in more than that - length = typ.length + if typ.limit() < length: # SSZ imposes a hard limit on lists, we can't put in more than that + length = typ.limit() + elem_type = typ.element_cls() if issubclass(typ, List) else boolean return typ( - get_random_ssz_object(rng, typ.elem_type, max_bytes_length, max_list_length, mode, chaos) + get_random_ssz_object(rng, elem_type, max_bytes_length, max_list_length, mode, chaos) for _ in range(length) ) elif issubclass(typ, Container): + fields = typ.fields() # Container return typ(**{ field_name: get_random_ssz_object(rng, field_type, max_bytes_length, max_list_length, mode, chaos) - for field_name, field_type in typ.get_fields().items() + for field_name, field_type in fields.items() }) else: raise Exception(f"Type not recognized: typ={typ}") @@ -119,31 +124,31 @@ def get_random_bytes_list(rng: Random, length: int) -> bytes: return bytes(rng.getrandbits(8) for _ in range(length)) -def get_random_basic_value(rng: Random, typ: BasicType) -> BasicValue: +def get_random_basic_value(rng: Random, typ) -> BasicView: if issubclass(typ, boolean): return typ(rng.choice((True, False))) elif issubclass(typ, uint): - assert typ.byte_len in UINT_BYTE_SIZES - return typ(rng.randint(0, 256 ** typ.byte_len - 1)) + assert typ.type_byte_length() in UINT_BYTE_SIZES + return typ(rng.randint(0, 256 ** typ.type_byte_length() - 1)) else: raise ValueError(f"Not a basic type: typ={typ}") -def get_min_basic_value(typ: BasicType) -> BasicValue: +def get_min_basic_value(typ) -> BasicView: if issubclass(typ, boolean): return typ(False) elif issubclass(typ, uint): - assert typ.byte_len in UINT_BYTE_SIZES + assert typ.type_byte_length() in UINT_BYTE_SIZES return typ(0) else: raise ValueError(f"Not a basic type: typ={typ}") -def get_max_basic_value(typ: BasicType) -> BasicValue: +def get_max_basic_value(typ) -> BasicView: if issubclass(typ, boolean): return typ(True) elif issubclass(typ, uint): - assert typ.byte_len in UINT_BYTE_SIZES - return typ(256 ** typ.byte_len - 1) + assert typ.type_byte_length() in UINT_BYTE_SIZES + return typ(256 ** typ.type_byte_length() - 1) else: raise ValueError(f"Not a basic type: typ={typ}") diff --git a/tests/core/pyspec/eth2spec/fuzzing/__init__.py b/tests/core/pyspec/eth2spec/fuzzing/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/core/pyspec/eth2spec/fuzzing/decoder.py b/tests/core/pyspec/eth2spec/fuzzing/decoder.py deleted file mode 100644 index 007e96845..000000000 --- a/tests/core/pyspec/eth2spec/fuzzing/decoder.py +++ /dev/null @@ -1,87 +0,0 @@ -from eth2spec.utils.ssz import ssz_typing as spec_ssz -import ssz - - -def translate_typ(typ) -> ssz.BaseSedes: - """ - Translates a spec type to a Py-SSZ type description (sedes). - :param typ: The spec type, a class. - :return: The Py-SSZ equivalent. - """ - if issubclass(typ, spec_ssz.Container): - return ssz.Container( - [translate_typ(field_typ) for field_name, field_typ in typ.get_fields().items()]) - elif issubclass(typ, spec_ssz.ByteVector): - return ssz.ByteVector(typ.length) - elif issubclass(typ, spec_ssz.ByteList): - return ssz.ByteList() - elif issubclass(typ, spec_ssz.Vector): - return ssz.Vector(translate_typ(typ.elem_type), typ.length) - elif issubclass(typ, spec_ssz.List): - return ssz.List(translate_typ(typ.elem_type), typ.length) - elif issubclass(typ, spec_ssz.Bitlist): - return ssz.Bitlist(typ.length) - elif issubclass(typ, spec_ssz.Bitvector): - return ssz.Bitvector(typ.length) - elif issubclass(typ, spec_ssz.boolean): - return ssz.boolean - elif issubclass(typ, spec_ssz.uint): - if typ.byte_len == 1: - return ssz.uint8 - elif typ.byte_len == 2: - return ssz.uint16 - elif typ.byte_len == 4: - return ssz.uint32 - elif typ.byte_len == 8: - return ssz.uint64 - elif typ.byte_len == 16: - return ssz.uint128 - elif typ.byte_len == 32: - return ssz.uint256 - else: - raise TypeError("invalid uint size") - else: - raise TypeError("Type not supported: {}".format(typ)) - - -def translate_value(value, typ): - """ - Translate a value output from Py-SSZ deserialization into the given spec type. - :param value: The PySSZ value - :param typ: The type from the spec to translate into - :return: the translated value - """ - if issubclass(typ, spec_ssz.uint): - if typ.byte_len == 1: - return spec_ssz.uint8(value) - elif typ.byte_len == 2: - return spec_ssz.uint16(value) - elif typ.byte_len == 4: - return spec_ssz.uint32(value) - elif typ.byte_len == 8: - return spec_ssz.uint64(value) - elif typ.byte_len == 16: - return spec_ssz.uint128(value) - elif typ.byte_len == 32: - return spec_ssz.uint256(value) - else: - raise TypeError("invalid uint size") - elif issubclass(typ, spec_ssz.List): - return [translate_value(elem, typ.elem_type) for elem in value] - elif issubclass(typ, spec_ssz.boolean): - return value - elif issubclass(typ, spec_ssz.Vector): - return typ(*(translate_value(elem, typ.elem_type) for elem in value)) - elif issubclass(typ, spec_ssz.Bitlist): - return typ(value) - elif issubclass(typ, spec_ssz.Bitvector): - return typ(value) - elif issubclass(typ, spec_ssz.ByteVector): - return typ(value) - elif issubclass(typ, spec_ssz.ByteList): - return value - if issubclass(typ, spec_ssz.Container): - return typ(**{f_name: translate_value(f_val, f_typ) for (f_val, (f_name, f_typ)) - in zip(value, typ.get_fields().items())}) - else: - raise TypeError("Type not supported: {}".format(typ)) diff --git a/tests/core/pyspec/eth2spec/fuzzing/test_decoder.py b/tests/core/pyspec/eth2spec/fuzzing/test_decoder.py deleted file mode 100644 index 9cabefb13..000000000 --- a/tests/core/pyspec/eth2spec/fuzzing/test_decoder.py +++ /dev/null @@ -1,35 +0,0 @@ -from eth2spec.fuzzing.decoder import translate_typ, translate_value -from eth2spec.phase0 import spec -from eth2spec.utils.ssz import ssz_impl as spec_ssz_impl -from random import Random -from eth2spec.debug import random_value - - -def test_decoder(): - rng = Random(123) - - # check these types only, Block covers a lot of operation types already. - for typ in [spec.Attestation, spec.BeaconState, spec.BeaconBlock]: - # create a random pyspec value - original = random_value.get_random_ssz_object(rng, typ, 100, 10, - mode=random_value.RandomizationMode.mode_random, - chaos=True) - # serialize it, using pyspec - pyspec_data = spec_ssz_impl.serialize(original) - # get the py-ssz type for it - block_sedes = translate_typ(typ) - # try decoding using the py-ssz type - raw_value = block_sedes.deserialize(pyspec_data) - - # serialize it using py-ssz - pyssz_data = block_sedes.serialize(raw_value) - # now check if the serialized form is equal. If so, we confirmed decoding and encoding to work. - assert pyspec_data == pyssz_data - - # now translate the py-ssz value in a pyspec-value - block = translate_value(raw_value, typ) - - # and see if the hash-tree-root of the original matches the hash-tree-root of the decoded & translated value. - original_hash_tree_root = spec_ssz_impl.hash_tree_root(original) - assert original_hash_tree_root == spec_ssz_impl.hash_tree_root(block) - assert original_hash_tree_root == block_sedes.get_hash_tree_root(raw_value) diff --git a/tests/core/pyspec/eth2spec/test/conftest.py b/tests/core/pyspec/eth2spec/test/conftest.py index 9e9252d2e..08cd850ab 100644 --- a/tests/core/pyspec/eth2spec/test/conftest.py +++ b/tests/core/pyspec/eth2spec/test/conftest.py @@ -1,6 +1,7 @@ from eth2spec.config import apply_config from eth2spec.test.context import reload_specs + # We import pytest only when it's present, i.e. when we are running tests. # The test-cases themselves can be generated without installing pytest. diff --git a/tests/core/pyspec/eth2spec/test/helpers/attester_slashings.py b/tests/core/pyspec/eth2spec/test/helpers/attester_slashings.py index 8a342dd4d..70654759e 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/attester_slashings.py +++ b/tests/core/pyspec/eth2spec/test/helpers/attester_slashings.py @@ -1,12 +1,10 @@ -from copy import deepcopy - from eth2spec.test.helpers.attestations import get_valid_attestation, sign_attestation def get_valid_attester_slashing(spec, state, signed_1=False, signed_2=False): attestation_1 = get_valid_attestation(spec, state, signed=signed_1) - attestation_2 = deepcopy(attestation_1) + attestation_2 = attestation_1.copy() attestation_2.data.target.root = b'\x01' * 32 if signed_2: diff --git a/tests/core/pyspec/eth2spec/test/helpers/block.py b/tests/core/pyspec/eth2spec/test/helpers/block.py index dda03cbf1..488e051bd 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/block.py +++ b/tests/core/pyspec/eth2spec/test/helpers/block.py @@ -1,5 +1,3 @@ -from copy import deepcopy - from eth2spec.test.helpers.keys import privkeys from eth2spec.utils import bls from eth2spec.utils.bls import only_with_bls @@ -16,7 +14,7 @@ def get_proposer_index_maybe(spec, state, slot, proposer_index=None): print("warning: block slot far away, and no proposer index manually given." " Signing block is slow due to transition for proposer index calculation.") # use stub state to get proposer index of future slot - stub_state = deepcopy(state) + stub_state = state.copy() spec.process_slots(stub_state, slot) proposer_index = spec.get_beacon_proposer_index(stub_state) return proposer_index @@ -72,9 +70,9 @@ def build_empty_block(spec, state, slot=None): empty_block = spec.BeaconBlock() empty_block.slot = slot empty_block.body.eth1_data.deposit_count = state.eth1_deposit_index - previous_block_header = deepcopy(state.latest_block_header) + previous_block_header = state.latest_block_header.copy() if previous_block_header.state_root == spec.Root(): - previous_block_header.state_root = state.hash_tree_root() + previous_block_header.state_root = hash_tree_root(state) empty_block.parent_root = hash_tree_root(previous_block_header) apply_randao_reveal(spec, state, empty_block) return empty_block diff --git a/tests/core/pyspec/eth2spec/test/helpers/custody.py b/tests/core/pyspec/eth2spec/test/helpers/custody.py index bcf2c199b..7c51675cd 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/custody.py +++ b/tests/core/pyspec/eth2spec/test/helpers/custody.py @@ -1,8 +1,10 @@ from eth2spec.test.helpers.keys import privkeys from eth2spec.utils import bls from eth2spec.utils.ssz.ssz_typing import Bitlist, ByteVector, Bitvector -from eth2spec.utils.ssz.ssz_impl import chunkify, pack, hash_tree_root +from eth2spec.utils.ssz.ssz_impl import hash_tree_root from eth2spec.utils.merkle_minimal import get_merkle_tree, get_merkle_proof +from remerkleable.core import pack_bits_to_chunks +from remerkleable.tree import subtree_fill_to_contents, get_depth BYTES_PER_CHUNK = 32 @@ -119,10 +121,11 @@ def get_valid_custody_response(spec, state, bit_challenge, custody_data, challen data_branch = get_merkle_proof(data_tree, chunk_index) bitlist_chunk_index = chunk_index // BYTES_PER_CHUNK - bitlist_chunks = chunkify(pack(bit_challenge.chunk_bits)) - bitlist_tree = get_merkle_tree(bitlist_chunks, pad_to=spec.MAX_CUSTODY_CHUNKS // 256) - bitlist_chunk_branch = get_merkle_proof(bitlist_tree, chunk_index // 256) + \ - [len(bit_challenge.chunk_bits).to_bytes(32, "little")] + print(bitlist_chunk_index) + bitlist_chunk_nodes = pack_bits_to_chunks(bit_challenge.chunk_bits) + bitlist_tree = subtree_fill_to_contents(bitlist_chunk_nodes, get_depth(spec.MAX_CUSTODY_CHUNKS)) + print(bitlist_tree) + bitlist_chunk_branch = None # TODO; extract proof from merkle tree bitlist_chunk_index = chunk_index // 256 @@ -145,4 +148,4 @@ def get_custody_test_vector(bytelength): def get_custody_merkle_root(data): - return get_merkle_tree(chunkify(data))[-1][0] + return None # get_merkle_tree(chunkify(data))[-1][0] diff --git a/tests/core/pyspec/eth2spec/test/helpers/genesis.py b/tests/core/pyspec/eth2spec/test/helpers/genesis.py index d96b2ceb2..c60787b92 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/genesis.py +++ b/tests/core/pyspec/eth2spec/test/helpers/genesis.py @@ -1,4 +1,3 @@ -import copy from eth2spec.test.helpers.keys import pubkeys @@ -35,7 +34,7 @@ def create_genesis_state(spec, validator_balances, activation_threshold): # We "hack" in the initial validators, # as it is much faster than creating and processing genesis deposits for every single test case. - state.balances = copy.deepcopy(validator_balances) + state.balances = validator_balances state.validators = [build_mock_validator(spec, i, state.balances[i]) for i in range(len(validator_balances))] # Process genesis activations diff --git a/tests/core/pyspec/eth2spec/test/helpers/phase1/shard_block.py b/tests/core/pyspec/eth2spec/test/helpers/phase1/shard_block.py index 6e1fba8dc..6ef0cf79b 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/phase1/shard_block.py +++ b/tests/core/pyspec/eth2spec/test/helpers/phase1/shard_block.py @@ -1,5 +1,3 @@ -from copy import deepcopy - from eth2spec.test.helpers.keys import privkeys from eth2spec.utils import bls from eth2spec.utils.bls import only_with_bls @@ -32,12 +30,12 @@ def build_empty_shard_block(spec, if slot is None: slot = shard_state.slot - previous_beacon_header = deepcopy(beacon_state.latest_block_header) + previous_beacon_header = beacon_state.latest_block_header.copy() if previous_beacon_header.state_root == spec.Bytes32(): previous_beacon_header.state_root = beacon_state.hash_tree_root() beacon_block_root = hash_tree_root(previous_beacon_header) - previous_block_header = deepcopy(shard_state.latest_block_header) + previous_block_header = shard_state.latest_block_header.copy() if previous_block_header.state_root == spec.Bytes32(): previous_block_header.state_root = shard_state.hash_tree_root() parent_root = hash_tree_root(previous_block_header) diff --git a/tests/core/pyspec/eth2spec/test/helpers/proposer_slashings.py b/tests/core/pyspec/eth2spec/test/helpers/proposer_slashings.py index 59eaa56cf..79a0b9009 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/proposer_slashings.py +++ b/tests/core/pyspec/eth2spec/test/helpers/proposer_slashings.py @@ -1,5 +1,3 @@ -from copy import deepcopy - from eth2spec.test.helpers.block_header import sign_block_header from eth2spec.test.helpers.keys import pubkey_to_privkey @@ -14,9 +12,9 @@ def get_valid_proposer_slashing(spec, state, signed_1=False, signed_2=False): slot=slot, parent_root=b'\x33' * 32, state_root=b'\x44' * 32, - block_body_root=b'\x55' * 32, + body_root=b'\x55' * 32, ) - header_2 = deepcopy(header_1) + header_2 = header_1.copy() header_2.parent_root = b'\x99' * 32 if signed_1: diff --git a/tests/core/pyspec/eth2spec/test/helpers/state.py b/tests/core/pyspec/eth2spec/test/helpers/state.py index 059469f6c..aad329ff4 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/state.py +++ b/tests/core/pyspec/eth2spec/test/helpers/state.py @@ -1,5 +1,3 @@ -from copy import deepcopy - from eth2spec.test.context import expect_assertion_error from eth2spec.test.helpers.attestations import get_valid_attestation from eth2spec.test.helpers.block import sign_block, build_empty_block_for_next_slot, transition_unsigned_block @@ -61,7 +59,7 @@ def next_epoch_with_attestations(spec, fill_prev_epoch): assert state.slot % spec.SLOTS_PER_EPOCH == 0 - post_state = deepcopy(state) + post_state = state.copy() signed_blocks = [] for _ in range(spec.SLOTS_PER_EPOCH): block = build_empty_block_for_next_slot(spec, post_state) diff --git a/tests/core/pyspec/eth2spec/test/phase_0/block_processing/test_process_attester_slashing.py b/tests/core/pyspec/eth2spec/test/phase_0/block_processing/test_process_attester_slashing.py index 4bd3a96b5..c2fada6ba 100644 --- a/tests/core/pyspec/eth2spec/test/phase_0/block_processing/test_process_attester_slashing.py +++ b/tests/core/pyspec/eth2spec/test/phase_0/block_processing/test_process_attester_slashing.py @@ -269,7 +269,7 @@ def test_att2_bad_replaced_index(spec, state): def test_att1_duplicate_index_normal_signed(spec, state): attester_slashing = get_valid_attester_slashing(spec, state, signed_1=False, signed_2=True) - indices = attester_slashing.attestation_1.attesting_indices + indices = list(attester_slashing.attestation_1.attesting_indices) indices.pop(1) # remove an index, make room for the additional duplicate index. attester_slashing.attestation_1.attesting_indices = sorted(indices) @@ -289,7 +289,7 @@ def test_att1_duplicate_index_normal_signed(spec, state): def test_att2_duplicate_index_normal_signed(spec, state): attester_slashing = get_valid_attester_slashing(spec, state, signed_1=True, signed_2=False) - indices = attester_slashing.attestation_2.attesting_indices + indices = list(attester_slashing.attestation_2.attesting_indices) indices.pop(2) # remove an index, make room for the additional duplicate index. attester_slashing.attestation_2.attesting_indices = sorted(indices) @@ -309,7 +309,7 @@ def test_att2_duplicate_index_normal_signed(spec, state): def test_att1_duplicate_index_double_signed(spec, state): attester_slashing = get_valid_attester_slashing(spec, state, signed_1=False, signed_2=True) - indices = attester_slashing.attestation_1.attesting_indices + indices = list(attester_slashing.attestation_1.attesting_indices) indices.pop(1) # remove an index, make room for the additional duplicate index. indices.append(indices[2]) # add one of the indices a second time attester_slashing.attestation_1.attesting_indices = sorted(indices) @@ -324,7 +324,7 @@ def test_att1_duplicate_index_double_signed(spec, state): def test_att2_duplicate_index_double_signed(spec, state): attester_slashing = get_valid_attester_slashing(spec, state, signed_1=True, signed_2=False) - indices = attester_slashing.attestation_2.attesting_indices + indices = list(attester_slashing.attestation_2.attesting_indices) indices.pop(1) # remove an index, make room for the additional duplicate index. indices.append(indices[2]) # add one of the indices a second time attester_slashing.attestation_2.attesting_indices = sorted(indices) diff --git a/tests/core/pyspec/eth2spec/test/utils.py b/tests/core/pyspec/eth2spec/test/utils.py index f02e4153b..12ff68443 100644 --- a/tests/core/pyspec/eth2spec/test/utils.py +++ b/tests/core/pyspec/eth2spec/test/utils.py @@ -1,6 +1,6 @@ from typing import Dict, Any from eth2spec.debug.encode import encode -from eth2spec.utils.ssz.ssz_typing import SSZValue +from eth2spec.utils.ssz.ssz_typing import View from eth2spec.utils.ssz.ssz_impl import serialize @@ -38,15 +38,15 @@ def vector_test(description: str = None): (key, value) = data if value is None: continue - if isinstance(value, SSZValue): + if isinstance(value, View): yield key, 'data', encode(value) yield key, 'ssz', serialize(value) elif isinstance(value, bytes): yield key, 'data', encode(value) yield key, 'ssz', value - elif isinstance(value, list) and all([isinstance(el, (SSZValue, bytes)) for el in value]): + elif isinstance(value, list) and all([isinstance(el, (View, bytes)) for el in value]): for i, el in enumerate(value): - if isinstance(el, SSZValue): + if isinstance(el, View): yield f'{key}_{i}', 'data', encode(el) yield f'{key}_{i}', 'ssz', serialize(el) elif isinstance(el, bytes): diff --git a/tests/core/pyspec/eth2spec/utils/ssz/ssz_impl.py b/tests/core/pyspec/eth2spec/utils/ssz/ssz_impl.py index 113bcf169..34e6c4ee8 100644 --- a/tests/core/pyspec/eth2spec/utils/ssz/ssz_impl.py +++ b/tests/core/pyspec/eth2spec/utils/ssz/ssz_impl.py @@ -1,157 +1,10 @@ -from ..merkle_minimal import merkleize_chunks -from ..hash_function import hash -from .ssz_typing import ( - SSZValue, SSZType, BasicValue, BasicType, Series, Elements, Bits, boolean, Container, List, ByteList, - Bitlist, Bitvector, uint, Bytes32 -) - -# SSZ Serialization -# ----------------------------- - -BYTES_PER_LENGTH_OFFSET = 4 +from remerkleable.core import View +from remerkleable.byte_arrays import Bytes32 -def serialize_basic(value: SSZValue): - if isinstance(value, uint): - return value.to_bytes(value.type().byte_len, 'little') - elif isinstance(value, boolean): - if value: - return b'\x01' - else: - return b'\x00' - else: - raise Exception(f"Type not supported: {type(value)}") +def serialize(obj: View) -> bytes: + return obj.encode_bytes() -def deserialize_basic(value, typ: BasicType): - if issubclass(typ, uint): - return typ(int.from_bytes(value, 'little')) - elif issubclass(typ, boolean): - assert value in (b'\x00', b'\x01') - return typ(value == b'\x01') - else: - raise Exception(f"Type not supported: {typ}") - - -def is_zero(obj: SSZValue): - return type(obj).default() == obj - - -def serialize(obj: SSZValue): - if isinstance(obj, BasicValue): - return serialize_basic(obj) - elif isinstance(obj, Bitvector): - return obj.as_bytes() - elif isinstance(obj, Bitlist): - as_bytearray = list(obj.as_bytes()) - if len(obj) % 8 == 0: - as_bytearray.append(1) - else: - as_bytearray[len(obj) // 8] |= 1 << (len(obj) % 8) - return bytes(as_bytearray) - elif isinstance(obj, Series): - return encode_series(obj) - else: - raise Exception(f"Type not supported: {type(obj)}") - - -def encode_series(values: Series): - if isinstance(values, bytes): # ByteList and ByteVector are already like serialized output - return values - - # Recursively serialize - parts = [(v.type().is_fixed_size(), serialize(v)) for v in values] - - # Compute and check lengths - fixed_lengths = [len(serialized) if constant_size else BYTES_PER_LENGTH_OFFSET - for (constant_size, serialized) in parts] - variable_lengths = [len(serialized) if not constant_size else 0 - for (constant_size, serialized) in parts] - - # Check if integer is not out of bounds (Python) - assert sum(fixed_lengths + variable_lengths) < 2 ** (BYTES_PER_LENGTH_OFFSET * 8) - - # Interleave offsets of variable-size parts with fixed-size parts. - # Avoid quadratic complexity in calculation of offsets. - offset = sum(fixed_lengths) - variable_parts = [] - fixed_parts = [] - for (constant_size, serialized) in parts: - if constant_size: - fixed_parts.append(serialized) - else: - fixed_parts.append(offset.to_bytes(BYTES_PER_LENGTH_OFFSET, 'little')) - variable_parts.append(serialized) - offset += len(serialized) - - # Return the concatenation of the fixed-size parts (offsets interleaved) with the variable-size parts - return b''.join(fixed_parts + variable_parts) - - -# SSZ Hash-tree-root -# ----------------------------- - - -def pack(values: Series): - if isinstance(values, bytes): # ByteList and ByteVector are already packed - return values - elif isinstance(values, Bits): - # packs the bits in bytes, left-aligned. - # Exclusive length delimiting bits for bitlists. - return values.as_bytes() - return b''.join([serialize_basic(value) for value in values]) - - -def chunkify(bytez): - # pad `bytez` to nearest 32-byte multiple - bytez += b'\x00' * (-len(bytez) % 32) - return [bytez[i:i + 32] for i in range(0, len(bytez), 32)] - - -def mix_in_length(root, length): - return hash(root + length.to_bytes(32, 'little')) - - -def is_bottom_layer_kind(typ: SSZType): - return ( - isinstance(typ, BasicType) or - (issubclass(typ, Elements) and isinstance(typ.elem_type, BasicType)) - ) - - -def item_length(typ: SSZType) -> int: - if issubclass(typ, BasicValue): - return typ.byte_len - else: - return 32 - - -def chunk_count(typ: SSZType) -> int: - # note that for lists, .length *on the type* describes the list limit. - if isinstance(typ, BasicType): - return 1 - elif issubclass(typ, Bits): - return (typ.length + 255) // 256 - elif issubclass(typ, Elements): - return (typ.length * item_length(typ.elem_type) + 31) // 32 - elif issubclass(typ, Container): - return len(typ.get_fields()) - else: - raise Exception(f"Type not supported: {typ}") - - -def hash_tree_root(obj: SSZValue) -> Bytes32: - if isinstance(obj, Series): - if is_bottom_layer_kind(obj.type()): - leaves = chunkify(pack(obj)) - else: - leaves = [hash_tree_root(value) for value in obj] - elif isinstance(obj, BasicValue): - leaves = chunkify(serialize_basic(obj)) - else: - raise Exception(f"Type not supported: {type(obj)}") - - if isinstance(obj, (List, ByteList, Bitlist)): - return Bytes32(mix_in_length(merkleize_chunks(leaves, limit=chunk_count(obj.type())), len(obj))) - else: - return Bytes32(merkleize_chunks(leaves)) +def hash_tree_root(obj: View) -> Bytes32: + return Bytes32(obj.get_backing().merkle_root()) diff --git a/tests/core/pyspec/eth2spec/utils/ssz/ssz_typing.py b/tests/core/pyspec/eth2spec/utils/ssz/ssz_typing.py index 823a4a8d5..e6536c748 100644 --- a/tests/core/pyspec/eth2spec/utils/ssz/ssz_typing.py +++ b/tests/core/pyspec/eth2spec/utils/ssz/ssz_typing.py @@ -1,518 +1,8 @@ -from typing import Dict, Iterator, Iterable -import copy -from types import GeneratorType - - -class DefaultingTypeMeta(type): - def default(cls): - raise Exception("Not implemented") - - -class SSZType(DefaultingTypeMeta): - - def is_fixed_size(cls): - raise Exception("Not implemented") - - -class SSZValue(object, metaclass=SSZType): - - def type(self): - return self.__class__ - - -class BasicType(SSZType): - byte_len = 0 - - def is_fixed_size(cls): - return True - - -class BasicValue(int, SSZValue, metaclass=BasicType): - pass - - -class boolean(BasicValue): # can't subclass bool. - byte_len = 1 - - def __new__(cls, value: int): # int value, but can be any subclass of int (bool, Bit, Bool, etc...) - if value < 0 or value > 1: - raise ValueError(f"value {value} out of bounds for bit") - return super().__new__(cls, value) - - @classmethod - def default(cls): - return cls(0) - - def __bool__(self): - return self > 0 - - -# Alias for Bool -class bit(boolean): - pass - - -class uint(BasicValue, metaclass=BasicType): - - def __new__(cls, value: int): - if value < 0: - raise ValueError("unsigned types must not be negative") - if cls.byte_len and value.bit_length() > (cls.byte_len << 3): - raise ValueError("value out of bounds for uint{}".format(cls.byte_len * 8)) - return super().__new__(cls, value) - - def __add__(self, other): - return self.__class__(super().__add__(coerce_type_maybe(other, self.__class__, strict=True))) - - def __sub__(self, other): - return self.__class__(super().__sub__(coerce_type_maybe(other, self.__class__, strict=True))) - - @classmethod - def default(cls): - return cls(0) - - -class uint8(uint): - byte_len = 1 - - -# Alias for uint8 -class byte(uint8): - pass - - -class uint16(uint): - byte_len = 2 - - -class uint32(uint): - byte_len = 4 - - -class uint64(uint): - byte_len = 8 - - -class uint128(uint): - byte_len = 16 - - -class uint256(uint): - byte_len = 32 - - -def coerce_type_maybe(v, typ: SSZType, strict: bool = False): - v_typ = type(v) - # shortcut if it's already the type we are looking for - if v_typ == typ: - return v - elif isinstance(v, int): - if isinstance(v, uint): # do not coerce from one uintX to another uintY - if issubclass(typ, uint) and v.type().byte_len == typ.byte_len: - return typ(v) - # revert to default behavior below if-else. (ValueError/bare) - else: - return typ(v) - elif isinstance(v, (list, tuple)): - return typ(*v) - elif isinstance(v, (bytes, ByteVector, ByteList)): - return typ(v) - elif isinstance(v, GeneratorType): - return typ(v) - elif issubclass(typ, Container) and not isinstance(v, typ): - return typ(**{field_name: getattr(v, field_name) for field_name in typ.get_field_names()}) - - # just return as-is, Value-checkers will take care of it not being coerced, if we are not strict. - if strict and not isinstance(v, typ): - raise ValueError("Type coercion of {} to {} failed".format(v, typ)) - return v - - -class Series(SSZValue): - - def __iter__(self) -> Iterator[SSZValue]: - raise Exception("Not implemented") - - -# Note: importing ssz functionality locally, to avoid import loop - -class Container(Series, metaclass=SSZType): - - def __init__(self, **kwargs): - cls = self.__class__ - for f, t in cls.get_fields().items(): - if f not in kwargs: - setattr(self, f, t.default()) - else: - value = coerce_type_maybe(kwargs[f], t) - if not isinstance(value, t): - raise ValueError(f"Bad input for class {self.__class__}:" - f" field: {f} type: {t} value: {value} value type: {type(value)}") - setattr(self, f, value) - - def serialize(self): - from .ssz_impl import serialize - return serialize(self) - - def hash_tree_root(self): - from .ssz_impl import hash_tree_root - return hash_tree_root(self) - - def __setattr__(self, name, value): - if name not in self.__class__.__annotations__: - raise AttributeError("Cannot change non-existing SSZ-container attribute") - field_typ = self.__class__.__annotations__[name] - value = coerce_type_maybe(value, field_typ) - if not isinstance(value, field_typ): - raise ValueError(f"Cannot set field of {self.__class__}:" - f" field: {name} type: {field_typ} value: {value} value type: {type(value)}") - super().__setattr__(name, value) - - def __repr__(self): - return repr({field: (getattr(self, field) if hasattr(self, field) else 'unset') - for field in self.get_fields().keys()}) - - def __str__(self): - output = [f'{self.__class__.__name__}'] - for field in self.get_fields().keys(): - output.append(f' {field}: {getattr(self, field)}') - return "\n".join(output) - - def __eq__(self, other): - return self.hash_tree_root() == other.hash_tree_root() - - def __hash__(self): - return hash(self.hash_tree_root()) - - def copy(self): - return copy.deepcopy(self) - - @classmethod - def get_fields(cls) -> Dict[str, SSZType]: - if not hasattr(cls, '__annotations__'): # no container fields - return {} - return dict(cls.__annotations__) - - @classmethod - def get_field_names(cls) -> Iterable[str]: - if not hasattr(cls, '__annotations__'): # no container fields - return () - return list(cls.__annotations__.keys()) - - @classmethod - def default(cls): - return cls(**{f: t.default() for f, t in cls.get_fields().items()}) - - @classmethod - def is_fixed_size(cls): - return all(t.is_fixed_size() for t in cls.get_fields().values()) - - def __iter__(self) -> Iterator[SSZValue]: - return iter([getattr(self, field) for field in self.get_fields().keys()]) - - -class ParamsBase(Series): - _has_params = False - - def __new__(cls, *args, **kwargs): - if not cls._has_params: - raise Exception("cannot init bare type without params") - return super().__new__(cls, **kwargs) - - -class ParamsMeta(SSZType): - - def __new__(cls, class_name, parents, attrs): - out = type.__new__(cls, class_name, parents, attrs) - if hasattr(out, "_has_params") and getattr(out, "_has_params"): - for k, v in attrs.items(): - setattr(out, k, v) - return out - - def __getitem__(self, params): - o = self.__class__(self.__name__, (self,), self.attr_from_params(params)) - return o - - def __str__(self): - return f"{self.__name__}~{self.__class__.__name__}" - - def __repr__(self): - return f"{self.__name__}~{self.__class__.__name__}" - - def attr_from_params(self, p): - # single key params are valid too. Wrap them in a tuple. - params = p if isinstance(p, tuple) else (p,) - res = {'_has_params': True} - i = 0 - for (name, typ) in self.__annotations__.items(): - if hasattr(self.__class__, name): - res[name] = getattr(self.__class__, name) - else: - if i >= len(params): - i += 1 - continue - param = params[i] - if not isinstance(param, typ): - raise TypeError( - "cannot create parametrized class with param {} as {} of type {}".format(param, name, typ)) - res[name] = param - i += 1 - if len(params) != i: - raise TypeError("provided parameters {} mismatch required parameter count {}".format(params, i)) - return res - - def __subclasscheck__(self, subclass): - # check regular class system if we can, solves a lot of the normal cases. - if super().__subclasscheck__(subclass): - return True - # if they are not normal subclasses, they are of the same class. - # then they should have the same name - if subclass.__name__ != self.__name__: - return False - # If they do have the same name, they should also have the same params. - for name, typ in self.__annotations__.items(): - if hasattr(self, name) and hasattr(subclass, name) \ - and getattr(subclass, name) != getattr(self, name): - return False - return True - - def __instancecheck__(self, obj): - return self.__subclasscheck__(obj.__class__) - - -class ElementsType(ParamsMeta): - elem_type: SSZType - length: int - - -class Elements(ParamsBase, metaclass=ElementsType): - pass - - -class BaseList(list, Elements): - - def __init__(self, *args): - items = self.extract_args(*args) - - if not self.value_check(items): - raise ValueError(f"Bad input for class {self.__class__}: {items}") - super().__init__(items) - - @classmethod - def value_check(cls, value): - return all(isinstance(v, cls.elem_type) for v in value) and len(value) <= cls.length - - @classmethod - def extract_args(cls, *args): - x = list(args) - if len(x) == 1 and isinstance(x[0], (GeneratorType, list, tuple)): - x = list(x[0]) - x = [coerce_type_maybe(v, cls.elem_type) for v in x] - return x - - def __str__(self): - cls = self.__class__ - return f"{cls.__name__}[{cls.elem_type.__name__}, {cls.length}]({', '.join(str(v) for v in self)})" - - def __repr__(self): - cls = self.__class__ - return f"{cls.__name__}[{cls.elem_type.__name__}, {cls.length}]({', '.join(str(v) for v in self)})" - - def __getitem__(self, k) -> SSZValue: - if isinstance(k, int): # check if we are just doing a lookup, and not slicing - if k < 0: - raise IndexError(f"cannot get item in type {self.__class__} at negative index {k}") - if k > len(self): - raise IndexError(f"cannot get item in type {self.__class__}" - f" at out of bounds index {k}") - return super().__getitem__(k) - - def __setitem__(self, k, v): - if type(k) == slice: - if (k.start is not None and k.start < 0) or (k.stop is not None and k.stop > len(self)): - raise IndexError(f"cannot set item in type {self.__class__}" - f" at out of bounds slice {k} (to {v}, bound: {len(self)})") - super().__setitem__(k, [coerce_type_maybe(x, self.__class__.elem_type) for x in v]) - else: - if k < 0: - raise IndexError(f"cannot set item in type {self.__class__} at negative index {k} (to {v})") - if k > len(self): - raise IndexError(f"cannot set item in type {self.__class__}" - f" at out of bounds index {k} (to {v}, bound: {len(self)})") - super().__setitem__(k, coerce_type_maybe(v, self.__class__.elem_type, strict=True)) - - def append(self, v): - super().append(coerce_type_maybe(v, self.__class__.elem_type, strict=True)) - - def __iter__(self) -> Iterator[SSZValue]: - return super().__iter__() - - def last(self): - # be explicit about getting the last item, for the non-python readers, and negative-index safety - return self[len(self) - 1] - - -class BitElementsType(ElementsType): - elem_type: SSZType = boolean - length: int - - -class Bits(BaseList, metaclass=BitElementsType): - - def as_bytes(self): - as_bytearray = [0] * ((len(self) + 7) // 8) - for i in range(len(self)): - as_bytearray[i // 8] |= int(self[i]) << (i % 8) - return bytes(as_bytearray) - - -class Bitlist(Bits): - @classmethod - def is_fixed_size(cls): - return False - - @classmethod - def default(cls): - return cls() - - -class Bitvector(Bits): - - @classmethod - def extract_args(cls, *args): - if len(args) == 0: - return cls.default() - else: - return super().extract_args(*args) - - @classmethod - def value_check(cls, value): - # check length limit strictly - return len(value) == cls.length and super().value_check(value) - - @classmethod - def is_fixed_size(cls): - return True - - @classmethod - def default(cls): - return cls(0 for _ in range(cls.length)) - - -class List(BaseList): - - @classmethod - def default(cls): - return cls() - - @classmethod - def is_fixed_size(cls): - return False - - -class Vector(BaseList): - - @classmethod - def value_check(cls, value): - # check length limit strictly - return len(value) == cls.length and super().value_check(value) - - @classmethod - def default(cls): - return cls(cls.elem_type.default() for _ in range(cls.length)) - - @classmethod - def is_fixed_size(cls): - return cls.elem_type.is_fixed_size() - - def append(self, v): - # Deep-copy and other utils like to change the internals during work. - # Only complain if we had the right size. - if len(self) == self.__class__.length: - raise Exception("cannot modify vector length") - else: - super().append(v) - - def pop(self, *args): - raise Exception("cannot modify vector length") - - -class BytesType(ElementsType): - elem_type: SSZType = byte - length: int - - -class BaseBytes(bytes, Elements, metaclass=BytesType): - - def __new__(cls, *args) -> "BaseBytes": - extracted_val = cls.extract_args(*args) - if not cls.value_check(extracted_val): - raise ValueError(f"Bad input for class {cls}: {extracted_val}") - return super().__new__(cls, extracted_val) - - @classmethod - def extract_args(cls, *args): - x = args - if len(x) == 1 and isinstance(x[0], (GeneratorType, bytes, str)): - x = x[0] - if isinstance(x, bytes): # Includes BytesLike - return x - if isinstance(x, str): - if x[:2] == '0x': - return bytes.fromhex(x[2:]) - else: - return bytes.fromhex(x) - else: - return bytes(x) # E.g. GeneratorType put into bytes. - - @classmethod - def value_check(cls, value): - # check type and virtual length limit - return isinstance(value, bytes) and len(value) <= cls.length - - def __str__(self): - cls = self.__class__ - return f"{cls.__name__}[{cls.length}]: {self.hex()}" - - -class ByteList(BaseBytes): - - @classmethod - def default(cls): - return b'' - - @classmethod - def is_fixed_size(cls): - return False - - -class ByteVector(BaseBytes): - - @classmethod - def extract_args(cls, *args): - if len(args) == 0: - return cls.default() - else: - return super().extract_args(*args) - - @classmethod - def default(cls): - return b'\x00' * cls.length - - @classmethod - def value_check(cls, value): - # check length limit strictly - return len(value) == cls.length and super().value_check(value) - - @classmethod - def is_fixed_size(cls): - return True - - -# Helpers for common ByteVector types -Bytes1: BytesType = ByteVector[1] -Bytes4: BytesType = ByteVector[4] -Bytes8: BytesType = ByteVector[8] -Bytes32: BytesType = ByteVector[32] -Bytes48: BytesType = ByteVector[48] -Bytes96: BytesType = ByteVector[96] +# flake8: noqa +# Ignore linter: This module makes importing SSZ types easy, and hides away the underlying library from the spec. + +from remerkleable.complex import Container, Vector, List +from remerkleable.basic import boolean, bit, uint, byte, uint8, uint16, uint32, uint64, uint128, uint256 +from remerkleable.bitfields import Bitvector, Bitlist +from remerkleable.byte_arrays import ByteVector, Bytes1, Bytes4, Bytes8, Bytes32, Bytes48, Bytes96, ByteList +from remerkleable.core import BasicView, View, TypeDef diff --git a/tests/core/pyspec/eth2spec/utils/ssz/test_ssz_impl.py b/tests/core/pyspec/eth2spec/utils/ssz/test_ssz_impl.py deleted file mode 100644 index 6166d14ee..000000000 --- a/tests/core/pyspec/eth2spec/utils/ssz/test_ssz_impl.py +++ /dev/null @@ -1,264 +0,0 @@ -from typing import Iterable -from .ssz_impl import serialize, hash_tree_root -from .ssz_typing import ( - bit, boolean, Container, List, Vector, ByteList, ByteVector, - Bitlist, Bitvector, - uint8, uint16, uint32, uint64, uint256, byte -) -from ..hash_function import hash as bytes_hash - -import pytest - - -class EmptyTestStruct(Container): - pass - - -class SingleFieldTestStruct(Container): - A: byte - - -class SmallTestStruct(Container): - A: uint16 - B: uint16 - - -class FixedTestStruct(Container): - A: uint8 - B: uint64 - C: uint32 - - -class VarTestStruct(Container): - A: uint16 - B: List[uint16, 1024] - C: uint8 - - -class ComplexTestStruct(Container): - A: uint16 - B: List[uint16, 128] - C: uint8 - D: ByteList[256] - E: VarTestStruct - F: Vector[FixedTestStruct, 4] - G: Vector[VarTestStruct, 2] - - -sig_test_data = [0 for i in range(96)] -for k, v in {0: 1, 32: 2, 64: 3, 95: 0xff}.items(): - sig_test_data[k] = v - - -def chunk(hex: str) -> str: - return (hex + ("00" * 32))[:64] # just pad on the right, to 32 bytes (64 hex chars) - - -def h(a: str, b: str) -> str: - return bytes_hash(bytes.fromhex(a) + bytes.fromhex(b)).hex() - - -# zero hashes, as strings, for -zero_hashes = [chunk("")] -for layer in range(1, 32): - zero_hashes.append(h(zero_hashes[layer - 1], zero_hashes[layer - 1])) - - -def merge(a: str, branch: Iterable[str]) -> str: - """ - Merge (out on left, branch on right) leaf a with branch items, branch is from bottom to top. - """ - out = a - for b in branch: - out = h(out, b) - return out - - -test_data = [ - ("bit F", bit(False), "00", chunk("00")), - ("bit T", bit(True), "01", chunk("01")), - ("boolean F", boolean(False), "00", chunk("00")), - ("boolean T", boolean(True), "01", chunk("01")), - ("bitvector TTFTFTFF", Bitvector[8](1, 1, 0, 1, 0, 1, 0, 0), "2b", chunk("2b")), - ("bitlist TTFTFTFF", Bitlist[8](1, 1, 0, 1, 0, 1, 0, 0), "2b01", h(chunk("2b"), chunk("08"))), - ("bitvector FTFT", Bitvector[4](0, 1, 0, 1), "0a", chunk("0a")), - ("bitlist FTFT", Bitlist[4](0, 1, 0, 1), "1a", h(chunk("0a"), chunk("04"))), - ("bitvector FTF", Bitvector[3](0, 1, 0), "02", chunk("02")), - ("bitlist FTF", Bitlist[3](0, 1, 0), "0a", h(chunk("02"), chunk("03"))), - ("bitvector TFTFFFTTFT", Bitvector[10](1, 0, 1, 0, 0, 0, 1, 1, 0, 1), "c502", chunk("c502")), - ("bitlist TFTFFFTTFT", Bitlist[16](1, 0, 1, 0, 0, 0, 1, 1, 0, 1), "c506", h(chunk("c502"), chunk("0A"))), - ("bitvector TFTFFFTTFTFFFFTT", Bitvector[16](1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1), - "c5c2", chunk("c5c2")), - ("bitlist TFTFFFTTFTFFFFTT", Bitlist[16](1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1), - "c5c201", h(chunk("c5c2"), chunk("10"))), - ("long bitvector", Bitvector[512](1 for i in range(512)), - "ff" * 64, h("ff" * 32, "ff" * 32)), - ("long bitlist", Bitlist[512](1), - "03", h(h(chunk("01"), chunk("")), chunk("01"))), - ("long bitlist", Bitlist[512](1 for i in range(512)), - "ff" * 64 + "01", h(h("ff" * 32, "ff" * 32), chunk("0002"))), - ("odd bitvector", Bitvector[513](1 for i in range(513)), - "ff" * 64 + "01", h(h("ff" * 32, "ff" * 32), h(chunk("01"), chunk("")))), - ("odd bitlist", Bitlist[513](1 for i in range(513)), - "ff" * 64 + "03", h(h(h("ff" * 32, "ff" * 32), h(chunk("01"), chunk(""))), chunk("0102"))), - ("uint8 00", uint8(0x00), "00", chunk("00")), - ("uint8 01", uint8(0x01), "01", chunk("01")), - ("uint8 ab", uint8(0xab), "ab", chunk("ab")), - ("byte 00", byte(0x00), "00", chunk("00")), - ("byte 01", byte(0x01), "01", chunk("01")), - ("byte ab", byte(0xab), "ab", chunk("ab")), - ("uint16 0000", uint16(0x0000), "0000", chunk("0000")), - ("uint16 abcd", uint16(0xabcd), "cdab", chunk("cdab")), - ("uint32 00000000", uint32(0x00000000), "00000000", chunk("00000000")), - ("uint32 01234567", uint32(0x01234567), "67452301", chunk("67452301")), - ("small (4567, 0123)", SmallTestStruct(A=0x4567, B=0x0123), "67452301", h(chunk("6745"), chunk("2301"))), - ("small [4567, 0123]::2", Vector[uint16, 2](uint16(0x4567), uint16(0x0123)), "67452301", chunk("67452301")), - ("uint32 01234567", uint32(0x01234567), "67452301", chunk("67452301")), - ("uint64 0000000000000000", uint64(0x00000000), "0000000000000000", chunk("0000000000000000")), - ("uint64 0123456789abcdef", uint64(0x0123456789abcdef), "efcdab8967452301", chunk("efcdab8967452301")), - ("sig", ByteVector[96](*sig_test_data), - "0100000000000000000000000000000000000000000000000000000000000000" - "0200000000000000000000000000000000000000000000000000000000000000" - "03000000000000000000000000000000000000000000000000000000000000ff", - h(h(chunk("01"), chunk("02")), - h("03000000000000000000000000000000000000000000000000000000000000ff", chunk("")))), - ("emptyTestStruct", EmptyTestStruct(), "", chunk("")), - ("singleFieldTestStruct", SingleFieldTestStruct(A=0xab), "ab", chunk("ab")), - ("uint16 list", List[uint16, 32](uint16(0xaabb), uint16(0xc0ad), uint16(0xeeff)), "bbaaadc0ffee", - h(h(chunk("bbaaadc0ffee"), chunk("")), chunk("03000000")) # max length: 32 * 2 = 64 bytes = 2 chunks - ), - ("uint32 list", List[uint32, 128](uint32(0xaabb), uint32(0xc0ad), uint32(0xeeff)), "bbaa0000adc00000ffee0000", - # max length: 128 * 4 = 512 bytes = 16 chunks - h(merge(chunk("bbaa0000adc00000ffee0000"), zero_hashes[0:4]), chunk("03000000")) - ), - ("uint256 list", List[uint256, 32](uint256(0xaabb), uint256(0xc0ad), uint256(0xeeff)), - "bbaa000000000000000000000000000000000000000000000000000000000000" - "adc0000000000000000000000000000000000000000000000000000000000000" - "ffee000000000000000000000000000000000000000000000000000000000000", - h(merge(h(h(chunk("bbaa"), chunk("adc0")), h(chunk("ffee"), chunk(""))), zero_hashes[2:5]), chunk("03000000")) - ), - ("uint256 list long", List[uint256, 128](i for i in range(1, 20)), - "".join([i.to_bytes(length=32, byteorder='little').hex() for i in range(1, 20)]), - h(merge( - h( - h( - h( - h(h(chunk("01"), chunk("02")), h(chunk("03"), chunk("04"))), - h(h(chunk("05"), chunk("06")), h(chunk("07"), chunk("08"))), - ), - h( - h(h(chunk("09"), chunk("0a")), h(chunk("0b"), chunk("0c"))), - h(h(chunk("0d"), chunk("0e")), h(chunk("0f"), chunk("10"))), - ) - ), - h( - h( - h(h(chunk("11"), chunk("12")), h(chunk("13"), chunk(""))), - zero_hashes[2] - ), - zero_hashes[3] - ) - ), - zero_hashes[5:7]), chunk("13000000")) # 128 chunks = 7 deep - ), - ("fixedTestStruct", FixedTestStruct(A=0xab, B=0xaabbccdd00112233, C=0x12345678), "ab33221100ddccbbaa78563412", - h(h(chunk("ab"), chunk("33221100ddccbbaa")), h(chunk("78563412"), chunk("")))), - ("varTestStruct nil", VarTestStruct(A=0xabcd, C=0xff), "cdab07000000ff", - h(h(chunk("cdab"), h(zero_hashes[6], chunk("00000000"))), h(chunk("ff"), chunk("")))), - ("varTestStruct empty", VarTestStruct(A=0xabcd, B=List[uint16, 1024](), C=0xff), "cdab07000000ff", - h(h(chunk("cdab"), h(zero_hashes[6], chunk("00000000"))), h(chunk("ff"), chunk("")))), # log2(1024*2/32)= 6 deep - ("varTestStruct some", VarTestStruct(A=0xabcd, B=List[uint16, 1024](1, 2, 3), C=0xff), - "cdab07000000ff010002000300", - h( - h( - chunk("cdab"), - h( - merge( - chunk("010002000300"), - zero_hashes[0:6] - ), - chunk("03000000") # length mix in - ) - ), - h(chunk("ff"), chunk("")) - )), - ("complexTestStruct", - ComplexTestStruct( - A=0xaabb, - B=List[uint16, 128](0x1122, 0x3344), - C=0xff, - D=ByteList[256](b"foobar"), - E=VarTestStruct(A=0xabcd, B=List[uint16, 1024](1, 2, 3), C=0xff), - F=Vector[FixedTestStruct, 4]( - FixedTestStruct(A=0xcc, B=0x4242424242424242, C=0x13371337), - FixedTestStruct(A=0xdd, B=0x3333333333333333, C=0xabcdabcd), - FixedTestStruct(A=0xee, B=0x4444444444444444, C=0x00112233), - FixedTestStruct(A=0xff, B=0x5555555555555555, C=0x44556677)), - G=Vector[VarTestStruct, 2]( - VarTestStruct(A=0xdead, B=List[uint16, 1024](1, 2, 3), C=0x11), - VarTestStruct(A=0xbeef, B=List[uint16, 1024](4, 5, 6), C=0x22)), - ), - "bbaa" - "47000000" # offset of B, []uint16 - "ff" - "4b000000" # offset of foobar - "51000000" # offset of E - "cc424242424242424237133713" - "dd3333333333333333cdabcdab" - "ee444444444444444433221100" - "ff555555555555555577665544" - "5e000000" # pointer to G - "22114433" # contents of B - "666f6f626172" # foobar - "cdab07000000ff010002000300" # contents of E - "08000000" "15000000" # [start G]: local offsets of [2]varTestStruct - "adde0700000011010002000300" - "efbe0700000022040005000600", - h( - h( - h( # A and B - chunk("bbaa"), - h(merge(chunk("22114433"), zero_hashes[0:3]), chunk("02000000")) # 2*128/32 = 8 chunks - ), - h( # C and D - chunk("ff"), - h(merge(chunk("666f6f626172"), zero_hashes[0:3]), chunk("06000000")) # 256/32 = 8 chunks - ) - ), - h( - h( # E and F - h(h(chunk("cdab"), h(merge(chunk("010002000300"), zero_hashes[0:6]), chunk("03000000"))), - h(chunk("ff"), chunk(""))), - h( - h( - h(h(chunk("cc"), chunk("4242424242424242")), h(chunk("37133713"), chunk(""))), - h(h(chunk("dd"), chunk("3333333333333333")), h(chunk("cdabcdab"), chunk(""))), - ), - h( - h(h(chunk("ee"), chunk("4444444444444444")), h(chunk("33221100"), chunk(""))), - h(h(chunk("ff"), chunk("5555555555555555")), h(chunk("77665544"), chunk(""))), - ), - ) - ), - h( # G and padding - h( - h(h(chunk("adde"), h(merge(chunk("010002000300"), zero_hashes[0:6]), chunk("03000000"))), - h(chunk("11"), chunk(""))), - h(h(chunk("efbe"), h(merge(chunk("040005000600"), zero_hashes[0:6]), chunk("03000000"))), - h(chunk("22"), chunk(""))), - ), - chunk("") - ) - ) - )) -] - - -@pytest.mark.parametrize("name, value, serialized, _", test_data) -def test_serialize(name, value, serialized, _): - assert serialize(value) == bytes.fromhex(serialized) - - -@pytest.mark.parametrize("name, value, _, root", test_data) -def test_hash_tree_root(name, value, _, root): - assert hash_tree_root(value) == bytes.fromhex(root) diff --git a/tests/core/pyspec/eth2spec/utils/ssz/test_ssz_typing.py b/tests/core/pyspec/eth2spec/utils/ssz/test_ssz_typing.py deleted file mode 100644 index 21613b922..000000000 --- a/tests/core/pyspec/eth2spec/utils/ssz/test_ssz_typing.py +++ /dev/null @@ -1,233 +0,0 @@ -from .ssz_typing import ( - SSZValue, SSZType, BasicValue, BasicType, Series, ElementsType, - Elements, bit, boolean, Container, List, Vector, ByteList, ByteVector, - byte, uint, uint8, uint16, uint32, uint64, uint128, uint256, - Bytes32, Bytes48 -) - - -def expect_value_error(fn, msg): - try: - fn() - raise AssertionError(msg) - except ValueError: - pass - - -def test_subclasses(): - for u in [uint, uint8, uint16, uint32, uint64, uint128, uint256]: - assert issubclass(u, uint) - assert issubclass(u, int) - assert issubclass(u, BasicValue) - assert issubclass(u, SSZValue) - assert isinstance(u, SSZType) - assert isinstance(u, BasicType) - assert issubclass(boolean, BasicValue) - assert isinstance(boolean, BasicType) - - for c in [Container, List, Vector, ByteList, ByteVector]: - assert issubclass(c, Series) - assert issubclass(c, SSZValue) - assert isinstance(c, SSZType) - assert not issubclass(c, BasicValue) - assert not isinstance(c, BasicType) - - for c in [List, Vector, ByteList, ByteVector]: - assert issubclass(c, Elements) - assert isinstance(c, ElementsType) - - -def test_basic_instances(): - for u in [uint, uint8, byte, uint16, uint32, uint64, uint128, uint256]: - v = u(123) - assert isinstance(v, uint) - assert isinstance(v, int) - assert isinstance(v, BasicValue) - assert isinstance(v, SSZValue) - - assert isinstance(boolean(True), BasicValue) - assert isinstance(boolean(False), BasicValue) - assert isinstance(bit(True), boolean) - assert isinstance(bit(False), boolean) - - -def test_basic_value_bounds(): - max = { - boolean: 2 ** 1, - bit: 2 ** 1, - uint8: 2 ** (8 * 1), - byte: 2 ** (8 * 1), - uint16: 2 ** (8 * 2), - uint32: 2 ** (8 * 4), - uint64: 2 ** (8 * 8), - uint128: 2 ** (8 * 16), - uint256: 2 ** (8 * 32), - } - for k, v in max.items(): - # this should work - assert k(v - 1) == v - 1 - # but we do not allow overflows - expect_value_error(lambda: k(v), "no overflows allowed") - - for k, _ in max.items(): - # this should work - assert k(0) == 0 - # but we do not allow underflows - expect_value_error(lambda: k(-1), "no underflows allowed") - - -def test_container(): - class Foo(Container): - a: uint8 - b: uint32 - - empty = Foo() - assert empty.a == uint8(0) - assert empty.b == uint32(0) - - assert issubclass(Foo, Container) - assert issubclass(Foo, SSZValue) - assert issubclass(Foo, Series) - - assert Foo.is_fixed_size() - x = Foo(a=uint8(123), b=uint32(45)) - assert x.a == 123 - assert x.b == 45 - assert isinstance(x.a, uint8) - assert isinstance(x.b, uint32) - assert x.type().is_fixed_size() - - class Bar(Container): - a: uint8 - b: List[uint8, 1024] - - assert not Bar.is_fixed_size() - - y = Bar(a=123, b=List[uint8, 1024](uint8(1), uint8(2))) - assert y.a == 123 - assert isinstance(y.a, uint8) - assert len(y.b) == 2 - assert isinstance(y.a, uint8) - assert isinstance(y.b, List[uint8, 1024]) - assert not y.type().is_fixed_size() - assert y.b[0] == 1 - v: List = y.b - assert v.type().elem_type == uint8 - assert v.type().length == 1024 - - y.a = 42 - try: - y.a = 256 # out of bounds - assert False - except ValueError: - pass - - try: - y.a = uint16(255) # within bounds, wrong type - assert False - except ValueError: - pass - - try: - y.not_here = 5 - assert False - except AttributeError: - pass - - -def test_list(): - typ = List[uint64, 128] - assert issubclass(typ, List) - assert issubclass(typ, SSZValue) - assert issubclass(typ, Series) - assert issubclass(typ, Elements) - assert isinstance(typ, ElementsType) - - assert not typ.is_fixed_size() - - assert len(typ()) == 0 # empty - assert len(typ(uint64(0))) == 1 # single arg - assert len(typ(uint64(i) for i in range(10))) == 10 # generator - assert len(typ(uint64(0), uint64(1), uint64(2))) == 3 # args - assert isinstance(typ(1, 2, 3, 4, 5)[4], uint64) # coercion - assert isinstance(typ(i for i in range(10))[9], uint64) # coercion in generator - - v = typ(uint64(0)) - v[0] = uint64(123) - assert v[0] == 123 - assert isinstance(v[0], uint64) - - assert isinstance(v, List) - assert isinstance(v, List[uint64, 128]) - assert isinstance(v, typ) - assert isinstance(v, SSZValue) - assert isinstance(v, Series) - assert issubclass(v.type(), Elements) - assert isinstance(v.type(), ElementsType) - - assert len(typ([i for i in range(10)])) == 10 # cast py list to SSZ list - - foo = List[uint32, 128](0 for i in range(128)) - foo[0] = 123 - foo[1] = 654 - foo[127] = 222 - assert sum(foo) == 999 - try: - foo[3] = 2 ** 32 # out of bounds - except ValueError: - pass - - try: - foo[3] = uint64(2 ** 32 - 1) # within bounds, wrong type - assert False - except ValueError: - pass - - try: - foo[128] = 100 - assert False - except IndexError: - pass - - try: - foo[-1] = 100 # valid in normal python lists - assert False - except IndexError: - pass - - try: - foo[128] = 100 # out of bounds - assert False - except IndexError: - pass - - -def test_bytesn_subclass(): - assert isinstance(ByteVector[32](b'\xab' * 32), Bytes32) - assert not isinstance(ByteVector[32](b'\xab' * 32), Bytes48) - assert issubclass(ByteVector[32](b'\xab' * 32).type(), Bytes32) - assert issubclass(ByteVector[32], Bytes32) - - class Root(Bytes32): - pass - - assert isinstance(Root(b'\xab' * 32), Bytes32) - assert not isinstance(Root(b'\xab' * 32), Bytes48) - assert issubclass(Root(b'\xab' * 32).type(), Bytes32) - assert issubclass(Root, Bytes32) - - assert not issubclass(Bytes48, Bytes32) - - assert len(Bytes32() + Bytes48()) == 80 - - -def test_uint_math(): - assert uint8(0) + uint8(uint32(16)) == uint8(16) # allow explicit casting to make invalid addition valid - - expect_value_error(lambda: uint8(0) - uint8(1), "no underflows allowed") - expect_value_error(lambda: uint8(1) + uint8(255), "no overflows allowed") - expect_value_error(lambda: uint8(0) + 256, "no overflows allowed") - expect_value_error(lambda: uint8(42) + uint32(123), "no mixed types") - expect_value_error(lambda: uint32(42) + uint8(123), "no mixed types") - - assert type(uint32(1234) + 56) == uint32 diff --git a/tests/core/pyspec/requirements.txt b/tests/core/pyspec/requirements.txt index 60acb5d35..01f1caed7 100644 --- a/tests/core/pyspec/requirements.txt +++ b/tests/core/pyspec/requirements.txt @@ -3,4 +3,4 @@ eth-typing>=2.1.0,<3.0.0 pycryptodome==3.9.4 py_ecc==2.0.0 dataclasses==0.6 -ssz==0.1.3 +remerkleable==0.1.10 diff --git a/tests/core/pyspec/setup.py b/tests/core/pyspec/setup.py index a196c27a3..319b86953 100644 --- a/tests/core/pyspec/setup.py +++ b/tests/core/pyspec/setup.py @@ -10,8 +10,7 @@ setup( "eth-typing>=2.1.0,<3.0.0", "pycryptodome==3.9.4", "py_ecc==2.0.0", - "ssz==0.1.3", "dataclasses==0.6", - "pytest" + "remerkleable==0.1.10", ] ) From 675d404c380c48629e0705b06a5ea1f53e884f30 Mon Sep 17 00:00:00 2001 From: protolambda Date: Sat, 25 Jan 2020 01:26:10 +0100 Subject: [PATCH 118/194] Package eth2spec for tooling and experimentation See tests/core/pyspec/README.md for usage description. This commit: - refactors config loading to be part of the pyspec package - updates requirements and main files to use new config loading - cleans up the build script - converts the build script to a distutil command - runs pyspec build as part of build package command - provides pyspecdev command to get editable spec python files --- .circleci/config.yml | 22 +- .gitignore | 6 +- Makefile | 63 +-- deposit_contract/tester/requirements.txt | 4 +- scripts/README.md | 32 -- scripts/__init__.py | 0 scripts/build_spec.py | 309 ------------ scripts/function_puller.py | 72 --- setup.py | 465 ++++++++++++++++++ tests/core/config_helpers/README.md | 19 - .../config_helpers/preset_loader/__init__.py | 0 .../config_helpers/preset_loader/loader.py | 27 - tests/core/config_helpers/requirements.txt | 1 - tests/core/config_helpers/setup.py | 9 - tests/core/pyspec/README.md | 33 +- tests/core/pyspec/eth2spec/config/README.md | 20 + .../pyspec/eth2spec/config/apply_config.py | 22 - .../pyspec/eth2spec/config/config_util.py | 44 ++ tests/core/pyspec/eth2spec/phase0/__init__.py | 0 tests/core/pyspec/eth2spec/phase1/__init__.py | 0 tests/core/pyspec/eth2spec/test/conftest.py | 4 +- tests/core/pyspec/requirements-testing.txt | 7 - tests/core/pyspec/requirements.txt | 6 - tests/core/pyspec/setup.py | 16 - tests/generators/epoch_processing/main.py | 9 +- .../epoch_processing/requirements.txt | 3 +- tests/generators/genesis/main.py | 7 +- tests/generators/genesis/requirements.txt | 3 +- tests/generators/operations/main.py | 9 +- tests/generators/operations/requirements.txt | 3 +- tests/generators/sanity/main.py | 11 +- tests/generators/sanity/requirements.txt | 3 +- tests/generators/shuffling/main.py | 8 +- tests/generators/shuffling/requirements.txt | 3 +- tests/generators/ssz_generic/requirements.txt | 3 +- tests/generators/ssz_static/main.py | 10 +- tests/generators/ssz_static/requirements.txt | 3 +- 37 files changed, 632 insertions(+), 624 deletions(-) delete mode 100644 scripts/README.md delete mode 100644 scripts/__init__.py delete mode 100644 scripts/build_spec.py delete mode 100644 scripts/function_puller.py create mode 100644 setup.py delete mode 100644 tests/core/config_helpers/README.md delete mode 100644 tests/core/config_helpers/preset_loader/__init__.py delete mode 100644 tests/core/config_helpers/preset_loader/loader.py delete mode 100644 tests/core/config_helpers/requirements.txt delete mode 100644 tests/core/config_helpers/setup.py create mode 100644 tests/core/pyspec/eth2spec/config/README.md delete mode 100644 tests/core/pyspec/eth2spec/config/apply_config.py create mode 100644 tests/core/pyspec/eth2spec/config/config_util.py delete mode 100644 tests/core/pyspec/eth2spec/phase0/__init__.py delete mode 100644 tests/core/pyspec/eth2spec/phase1/__init__.py delete mode 100644 tests/core/pyspec/requirements-testing.txt delete mode 100644 tests/core/pyspec/requirements.txt delete mode 100644 tests/core/pyspec/setup.py diff --git a/.circleci/config.yml b/.circleci/config.yml index bd77f2e5e..6b243921f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -35,40 +35,40 @@ commands: description: "Restore the cache with pyspec keys" steps: - restore_cached_venv: - venv_name: v17-pyspec - reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "tests/core/pyspec/requirements-testing.txt" }} + venv_name: v19-pyspec + reqs_checksum: cache-{{ checksum "setup.py" }} save_pyspec_cached_venv: description: Save a venv into a cache with pyspec keys" steps: - save_cached_venv: - venv_name: v17-pyspec - reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "tests/core/pyspec/requirements-testing.txt" }} - venv_path: ./tests/core/pyspec/venv + venv_name: v19-pyspec + reqs_checksum: cache-{{ checksum "setup.py" }} + venv_path: ./venv restore_deposit_contract_compiler_cached_venv: description: "Restore the venv from cache for the deposit contract compiler" steps: - restore_cached_venv: - venv_name: v16-deposit-contract-compiler + venv_name: v18-deposit-contract-compiler reqs_checksum: cache-{{ checksum "deposit_contract/compiler/requirements.txt" }} save_deposit_contract_compiler_cached_venv: description: "Save the venv to cache for later use of the deposit contract compiler" steps: - save_cached_venv: - venv_name: v16-deposit-contract-compiler + venv_name: v18-deposit-contract-compiler reqs_checksum: cache-{{ checksum "deposit_contract/compiler/requirements.txt" }} venv_path: ./deposit_contract/compiler/venv restore_deposit_contract_tester_cached_venv: description: "Restore the venv from cache for the deposit contract tester" steps: - restore_cached_venv: - venv_name: v17-deposit-contract-tester - reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "deposit_contract/tester/requirements.txt" }} + venv_name: v18-deposit-contract-tester + reqs_checksum: cache-{{ checksum "setup.py" }}-{{ checksum "deposit_contract/tester/requirements.txt" }} save_deposit_contract_tester_cached_venv: description: "Save the venv to cache for later use of the deposit contract tester" steps: - save_cached_venv: - venv_name: v17-deposit-contract-tester - reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "deposit_contract/tester/requirements.txt" }} + venv_name: v18-deposit-contract-tester + reqs_checksum: cache-{{ checksum "setup.py" }}-{{ checksum "deposit_contract/tester/requirements.txt" }} venv_path: ./deposit_contract/tester/venv jobs: checkout_specs: diff --git a/.gitignore b/.gitignore index c4256032c..bcd96f885 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,7 @@ venv build/ output/ +dist/ eth2.0-spec-tests/ @@ -14,8 +15,8 @@ eth2.0-spec-tests/ .mypy_cache # Dynamically built from Markdown spec -tests/core/pyspec/eth2spec/phase0/spec.py -tests/core/pyspec/eth2spec/phase1/spec.py +tests/core/pyspec/eth2spec/phase0/ +tests/core/pyspec/eth2spec/phase1/ # coverage reports .htmlcov @@ -24,5 +25,6 @@ tests/core/pyspec/eth2spec/phase1/spec.py # local CI testing output tests/core/pyspec/test-reports +tests/core/pyspec/eth2spec/test_results.xml *.egg-info diff --git a/Makefile b/Makefile index c779c5e8e..efa776997 100644 --- a/Makefile +++ b/Makefile @@ -3,7 +3,7 @@ SSZ_DIR = ./ssz SCRIPT_DIR = ./scripts TEST_LIBS_DIR = ./tests/core PY_SPEC_DIR = $(TEST_LIBS_DIR)/pyspec -TEST_VECTOR_DIR = ./eth2.0-spec-tests/tests +TEST_VECTOR_DIR = ../eth2.0-spec-tests/tests GENERATOR_DIR = ./tests/generators DEPOSIT_CONTRACT_COMPILER_DIR = ./deposit_contract/compiler DEPOSIT_CONTRACT_TESTER_DIR = ./deposit_contract/tester @@ -18,24 +18,12 @@ GENERATOR_VENVS = $(patsubst $(GENERATOR_DIR)/%, $(GENERATOR_DIR)/%venv, $(GENER # To check generator matching: #$(info $$GENERATOR_TARGETS is [${GENERATOR_TARGETS}]) -PHASE0_SPEC_DIR = $(SPEC_DIR)/phase0 -PY_SPEC_PHASE_0_TARGETS = $(PY_SPEC_DIR)/eth2spec/phase0/spec.py -PY_SPEC_PHASE_0_DEPS = $(wildcard $(SPEC_DIR)/phase0/*.md) - -PHASE1_SPEC_DIR = $(SPEC_DIR)/phase1 -PY_SPEC_PHASE_1_TARGETS = $(PY_SPEC_DIR)/eth2spec/phase1/spec.py -PY_SPEC_PHASE_1_DEPS = $(wildcard $(SPEC_DIR)/phase1/*.md) - -PY_SPEC_ALL_DEPS = $(PY_SPEC_PHASE_0_DEPS) $(PY_SPEC_PHASE_1_DEPS) - -PY_SPEC_ALL_TARGETS = $(PY_SPEC_PHASE_0_TARGETS) $(PY_SPEC_PHASE_1_TARGETS) - -MARKDOWN_FILES = $(PY_SPEC_ALL_DEPS) $(wildcard $(SPEC_DIR)/*.md) $(wildcard $(SSZ_DIR)/*.md) $(wildcard $(SPEC_DIR)/networking/*.md) $(wildcard $(SPEC_DIR)/validator/*.md) +MARKDOWN_FILES = $(wildcard $(SPEC_DIR)/phase0/*.md) $(wildcard $(SPEC_DIR)/phase1/*.md) $(wildcard $(SSZ_DIR)/*.md) $(wildcard $(SPEC_DIR)/networking/*.md) $(wildcard $(SPEC_DIR)/validator/*.md) COV_HTML_OUT=.htmlcov COV_INDEX_FILE=$(PY_SPEC_DIR)/$(COV_HTML_OUT)/index.html -.PHONY: clean partial_clean all test citest lint generate_tests pyspec phase0 phase1 install_test open_cov \ +.PHONY: clean partial_clean all test citest lint generate_tests pyspec install_test open_cov \ install_deposit_contract_tester test_deposit_contract install_deposit_contract_compiler \ compile_deposit_contract test_compile_deposit_contract check_toc @@ -45,33 +33,43 @@ all: $(PY_SPEC_ALL_TARGETS) partial_clean: rm -rf $(TEST_VECTOR_DIR) rm -rf $(GENERATOR_VENVS) + rm -rf .pytest_cache + rm -f .coverage rm -rf $(PY_SPEC_DIR)/.pytest_cache - rm -rf $(PY_SPEC_ALL_TARGETS) rm -rf $(DEPOSIT_CONTRACT_COMPILER_DIR)/.pytest_cache rm -rf $(DEPOSIT_CONTRACT_TESTER_DIR)/.pytest_cache + rm -rf $(PY_SPEC_DIR)/phase0 + rm -rf $(PY_SPEC_DIR)/phase1 rm -rf $(PY_SPEC_DIR)/$(COV_HTML_OUT) rm -rf $(PY_SPEC_DIR)/.coverage rm -rf $(PY_SPEC_DIR)/test-reports + rm -rf eth2spec.egg-info dist build + clean: partial_clean + rm -rf venv rm -rf $(PY_SPEC_DIR)/venv rm -rf $(DEPOSIT_CONTRACT_COMPILER_DIR)/venv rm -rf $(DEPOSIT_CONTRACT_TESTER_DIR)/venv # "make generate_tests" to run all generators -generate_tests: $(PY_SPEC_ALL_TARGETS) $(GENERATOR_TARGETS) +generate_tests: $(GENERATOR_TARGETS) + +# "make pyspec" to create the pyspec for all phases. +pyspec: + . venv/bin/activate; python3 setup.py pyspecdev # installs the packages to run pyspec tests install_test: - cd $(PY_SPEC_DIR); python3 -m venv venv; . venv/bin/activate; pip3 install -r requirements-testing.txt; + python3 -m venv venv; . venv/bin/activate; pip3 install .[testing] .[linting] -test: $(PY_SPEC_ALL_TARGETS) - cd $(PY_SPEC_DIR); . venv/bin/activate; export PYTHONPATH="./"; \ +test: pyspec + . venv/bin/activate; cd $(PY_SPEC_DIR); \ python -m pytest -n 4 --cov=eth2spec.phase0.spec --cov=eth2spec.phase1.spec --cov-report="html:$(COV_HTML_OUT)" --cov-branch eth2spec -citest: $(PY_SPEC_ALL_TARGETS) - cd $(PY_SPEC_DIR); mkdir -p test-reports/eth2spec; . venv/bin/activate; export PYTHONPATH="./"; \ - python -m pytest -n 4 --junitxml=test-reports/eth2spec/test_results.xml eth2spec +citest: pyspec + mkdir -p tests/core/pyspec/test-reports/eth2spec; . venv/bin/activate; cd $(PY_SPEC_DIR); \ + python -m pytest -n 4 --junitxml=eth2spec/test_results.xml eth2spec open_cov: ((open "$(COV_INDEX_FILE)" || xdg-open "$(COV_INDEX_FILE)") &> /dev/null) & @@ -87,13 +85,13 @@ check_toc: $(MARKDOWN_FILES:=.toc) codespell: codespell . --skip ./.git -I .codespell-whitelist -lint: $(PY_SPEC_ALL_TARGETS) - cd $(PY_SPEC_DIR); . venv/bin/activate; \ +lint: pyspec + . venv/bin/activate; cd $(PY_SPEC_DIR); \ flake8 --ignore=E252,W504,W503 --max-line-length=120 ./eth2spec \ && cd ./eth2spec && mypy --follow-imports=silent --warn-unused-ignores --ignore-missing-imports --check-untyped-defs --disallow-incomplete-defs --disallow-untyped-defs -p phase0 \ && mypy --follow-imports=silent --warn-unused-ignores --ignore-missing-imports --check-untyped-defs --disallow-incomplete-defs --disallow-untyped-defs -p phase1; -install_deposit_contract_tester: $(PY_SPEC_ALL_TARGETS) +install_deposit_contract_tester: cd $(DEPOSIT_CONTRACT_TESTER_DIR); python3 -m venv venv; . venv/bin/activate; pip3 install -r requirements.txt test_deposit_contract: @@ -111,17 +109,6 @@ test_compile_deposit_contract: cd $(DEPOSIT_CONTRACT_COMPILER_DIR); . venv/bin/activate; \ python3.7 -m pytest . -# "make pyspec" to create the pyspec for all phases. -pyspec: $(PY_SPEC_ALL_TARGETS) - -$(PY_SPEC_PHASE_0_TARGETS): $(PY_SPEC_PHASE_0_DEPS) - python3 $(SCRIPT_DIR)/build_spec.py -p0 $(PHASE0_SPEC_DIR)/beacon-chain.md $(PHASE0_SPEC_DIR)/fork-choice.md $(PHASE0_SPEC_DIR)/validator.md $@ - -$(PY_SPEC_DIR)/eth2spec/phase1/spec.py: $(PY_SPEC_PHASE_1_DEPS) - python3 $(SCRIPT_DIR)/build_spec.py -p1 $(PHASE0_SPEC_DIR)/beacon-chain.md $(PHASE0_SPEC_DIR)/fork-choice.md $(PHASE1_SPEC_DIR)/custody-game.md $(PHASE1_SPEC_DIR)/beacon-chain.md $(PHASE1_SPEC_DIR)/fraud-proofs.md $(PHASE1_SPEC_DIR)/fork-choice.md $(PHASE1_SPEC_DIR)/phase1-fork.md $@ - -# TODO: also build validator spec and light-client-sync - CURRENT_DIR = ${CURDIR} # Runs a generator, identified by param 1 @@ -154,5 +141,5 @@ $(TEST_VECTOR_DIR)/: # For any generator, build it using the run_generator function. # (creation of output dir is a dependency) -gen_%: $(PY_SPEC_ALL_TARGETS) $(TEST_VECTOR_DIR) +gen_%: $(TEST_VECTOR_DIR) $(call run_generator,$*) diff --git a/deposit_contract/tester/requirements.txt b/deposit_contract/tester/requirements.txt index e6acaf825..8dadab0bc 100644 --- a/deposit_contract/tester/requirements.txt +++ b/deposit_contract/tester/requirements.txt @@ -1,5 +1,5 @@ eth-tester[py-evm]>=0.3.0b1,<0.4 web3==5.4.0 pytest==3.6.1 -../../tests/core/pyspec -../../tests/core/config_helpers \ No newline at end of file +# The eth2spec +../../ diff --git a/scripts/README.md b/scripts/README.md deleted file mode 100644 index 9d5849053..000000000 --- a/scripts/README.md +++ /dev/null @@ -1,32 +0,0 @@ -# Building pyspecs from specs.md - -The benefit of the particular spec design is that the given Markdown files can be converted to a `spec.py` file for the purposes of testing and linting. As a result, bugs are discovered and patched more quickly. - -Specs can be built from either a single Markdown document or multiple files that must be combined in a given order. Given 2 spec objects, `build_spec.combine_spec_objects` will combine them into a single spec object which, subsequently, can be converted into a `specs.py`. - -## Usage - -For usage of the spec builder, run `python3 -m build_spec --help`. - -## `@Labels` and inserts - -The functioning of the spec combiner is largely automatic in that given `spec0.md` and `spec1.md`, SSZ Objects will be extended and old functions will be overwritten. Extra functionality is provided for more granular control over how files are combined. In the event that only a small portion of code is to be added to an existing function, insert functionality is provided. This saves having to completely redefine the old function from `spec0.md` in `spec1.md`. This is done by marking where the change is to occur in the old file and marking which code is to be inserted in the new file. This is done as follows: - -* In the old file, a label is added as a Python comment marking where the code is to be inserted. This would appear as follows in `spec0.md`: - -```python -def foo(x): - x << 1 - # @YourLabelHere - return x -``` - -* In spec1, the new code can then be inserted by having a code-block that looks as follows: - -```python -#begin insert @YourLabelHere - x += x -#end insert @YourLabelHere -``` - -*Note*: The code to be inserted has the **same level of indentation** as the surrounding code of its destination insert point. diff --git a/scripts/__init__.py b/scripts/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/scripts/build_spec.py b/scripts/build_spec.py deleted file mode 100644 index fa351db2f..000000000 --- a/scripts/build_spec.py +++ /dev/null @@ -1,309 +0,0 @@ -import re -from function_puller import ( - get_spec, - SpecObject, -) -from argparse import ArgumentParser -from typing import ( - Dict, - Optional, -) - -CONFIG_LOADER = ''' -apply_constants_preset(globals()) -''' - -PHASE0_IMPORTS = '''from eth2spec.config.apply_config import apply_constants_preset -from typing import ( - Any, Callable, Dict, Set, Sequence, Tuple, Optional, TypeVar -) - -from dataclasses import ( - dataclass, - field, -) - -from eth2spec.utils.ssz.ssz_impl import hash_tree_root -from eth2spec.utils.ssz.ssz_typing import ( - View, boolean, Container, List, Vector, uint64, - Bytes1, Bytes4, Bytes8, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector, -) -from eth2spec.utils import bls - -from eth2spec.utils.hash_function import hash - -SSZObject = TypeVar('SSZObject', bound=View) -''' -PHASE1_IMPORTS = '''from eth2spec.phase0 import spec as phase0 -from eth2spec.config.apply_config import apply_constants_preset -from typing import ( - Any, Callable, Dict, Set, Sequence, NewType, Optional, Tuple, TypeVar -) - -from dataclasses import ( - dataclass, - field, -) - -from eth2spec.utils.ssz.ssz_impl import hash_tree_root -from eth2spec.utils.ssz.ssz_typing import ( - View, boolean, Container, List, Vector, uint64, uint8, bit, - ByteVector, ByteList, Bytes1, Bytes4, Bytes8, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector, -) -from eth2spec.utils import bls - -from eth2spec.utils.hash_function import hash - - -SSZVariableName = str -GeneralizedIndex = NewType('GeneralizedIndex', int) -SSZObject = TypeVar('SSZObject', bound=View) -''' -SUNDRY_CONSTANTS_FUNCTIONS = ''' -def ceillog2(x: uint64) -> int: - return (x - 1).bit_length() -''' -SUNDRY_FUNCTIONS = ''' -# Monkey patch hash cache -_hash = hash -hash_cache: Dict[bytes, Bytes32] = {} - - -def get_eth1_data(distance: uint64) -> Bytes32: - return hash(distance) - - -def hash(x: bytes) -> Bytes32: # type: ignore - if x not in hash_cache: - hash_cache[x] = Bytes32(_hash(x)) - return hash_cache[x] - - -def cache_this(key_fn, value_fn): # type: ignore - cache_dict = {} # type: ignore - - def wrapper(*args, **kw): # type: ignore - key = key_fn(*args, **kw) - nonlocal cache_dict - if key not in cache_dict: - cache_dict[key] = value_fn(*args, **kw) - return cache_dict[key] - return wrapper - - -get_base_reward = cache_this( - lambda state, index: (state.validators.hash_tree_root(), state.slot), - get_base_reward) - -get_committee_count_at_slot = cache_this( - lambda state, epoch: (state.validators.hash_tree_root(), epoch), - get_committee_count_at_slot) - -get_active_validator_indices = cache_this( - lambda state, epoch: (state.validators.hash_tree_root(), epoch), - get_active_validator_indices) - -get_beacon_committee = cache_this( - lambda state, slot, index: (state.validators.hash_tree_root(), state.randao_mixes.hash_tree_root(), slot, index), - get_beacon_committee)''' - - -def objects_to_spec(functions: Dict[str, str], - custom_types: Dict[str, str], - constants: Dict[str, str], - ssz_objects: Dict[str, str], - imports: str, - version: str, - ) -> str: - """ - Given all the objects that constitute a spec, combine them into a single pyfile. - """ - new_type_definitions = ( - '\n\n'.join( - [ - f"class {key}({value}):\n pass\n" - for key, value in custom_types.items() - ] - ) - ) - for k in list(functions): - if "ceillog2" in k: - del functions[k] - functions_spec = '\n\n'.join(functions.values()) - for k in list(constants.keys()): - if k == "BLS12_381_Q": - constants[k] += " # noqa: E501" - constants_spec = '\n'.join(map(lambda x: '%s = %s' % (x, constants[x]), constants)) - ssz_objects_instantiation_spec = '\n\n'.join(ssz_objects.values()) - spec = ( - imports - + '\n\n' + f"version = \'{version}\'\n" - + '\n\n' + new_type_definitions - + '\n' + SUNDRY_CONSTANTS_FUNCTIONS - + '\n\n' + constants_spec - + '\n\n' + CONFIG_LOADER - + '\n\n' + ssz_objects_instantiation_spec - + '\n\n' + functions_spec - + '\n' + SUNDRY_FUNCTIONS - + '\n' - ) - return spec - - -def combine_functions(old_functions: Dict[str, str], new_functions: Dict[str, str]) -> Dict[str, str]: - for key, value in new_functions.items(): - old_functions[key] = value - return old_functions - - -def combine_constants(old_constants: Dict[str, str], new_constants: Dict[str, str]) -> Dict[str, str]: - for key, value in new_constants.items(): - old_constants[key] = value - return old_constants - - -ignored_dependencies = [ - 'bit', 'boolean', 'Vector', 'List', 'Container', 'BLSPubkey', 'BLSSignature', - 'Bytes1', 'Bytes4', 'Bytes32', 'Bytes48', 'Bytes96', 'Bitlist', 'Bitvector', - 'uint8', 'uint16', 'uint32', 'uint64', 'uint128', 'uint256', - 'bytes', 'byte', 'ByteList', 'ByteVector' -] - - -def dependency_order_ssz_objects(objects: Dict[str, str], custom_types: Dict[str, str]) -> None: - """ - Determines which SSZ Object is dependent on which other and orders them appropriately - """ - items = list(objects.items()) - for key, value in items: - dependencies = [] - for line in value.split('\n'): - if not re.match(r'\s+\w+: .+', line): - continue # skip whitespace etc. - line = line[line.index(':') + 1:] # strip of field name - if '#' in line: - line = line[:line.index('#')] # strip of comment - dependencies.extend(re.findall(r'(\w+)', line)) # catch all legible words, potential dependencies - dependencies = filter(lambda x: '_' not in x and x.upper() != x, dependencies) # filter out constants - dependencies = filter(lambda x: x not in ignored_dependencies, dependencies) - dependencies = filter(lambda x: x not in custom_types, dependencies) - for dep in dependencies: - key_list = list(objects.keys()) - for item in [dep, key] + key_list[key_list.index(dep)+1:]: - objects[item] = objects.pop(item) - - -def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str], custom_types) -> Dict[str, str]: - """ - Takes in old spec and new spec ssz objects, combines them, - and returns the newer versions of the objects in dependency order. - """ - for key, value in new_objects.items(): - old_objects[key] = value - return old_objects - - -def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject: - """ - Takes in two spec variants (as tuples of their objects) and combines them using the appropriate combiner function. - """ - functions0, custom_types0, constants0, ssz_objects0 = spec0 - functions1, custom_types1, constants1, ssz_objects1 = spec1 - functions = combine_functions(functions0, functions1) - custom_types = combine_constants(custom_types0, custom_types1) - constants = combine_constants(constants0, constants1) - ssz_objects = combine_ssz_objects(ssz_objects0, ssz_objects1, custom_types) - return SpecObject((functions, custom_types, constants, ssz_objects)) - - -def dependency_order_spec(objs: SpecObject): - functions, custom_types, constants, ssz_objects = objs - dependency_order_ssz_objects(ssz_objects, custom_types) - - -def build_phase0_spec(phase0_sourcefile: str, fork_choice_sourcefile: str, - v_guide_sourcefile: str, outfile: str=None) -> Optional[str]: - phase0_spec = get_spec(phase0_sourcefile) - fork_choice_spec = get_spec(fork_choice_sourcefile) - v_guide = get_spec(v_guide_sourcefile) - spec_objects = phase0_spec - for value in [fork_choice_spec, v_guide]: - spec_objects = combine_spec_objects(spec_objects, value) - dependency_order_spec(spec_objects) - spec = objects_to_spec(*spec_objects, PHASE0_IMPORTS, 'phase0') - if outfile is not None: - with open(outfile, 'w') as out: - out.write(spec) - return spec - - -def build_phase1_spec(phase0_beacon_sourcefile: str, - phase0_fork_choice_sourcefile: str, - phase1_custody_sourcefile: str, - phase1_beacon_sourcefile: str, - phase1_fraud_sourcefile: str, - phase1_fork_choice_sourcefile: str, - phase1_fork_sourcefile: str, - outfile: str=None) -> Optional[str]: - all_sourcefiles = ( - phase0_beacon_sourcefile, - phase0_fork_choice_sourcefile, - phase1_custody_sourcefile, - phase1_beacon_sourcefile, - phase1_fraud_sourcefile, - phase1_fork_choice_sourcefile, - phase1_fork_sourcefile, - ) - all_spescs = [get_spec(spec) for spec in all_sourcefiles] - spec_objects = all_spescs[0] - for value in all_spescs[1:]: - spec_objects = combine_spec_objects(spec_objects, value) - dependency_order_spec(spec_objects) - spec = objects_to_spec(*spec_objects, PHASE1_IMPORTS, 'phase1') - if outfile is not None: - with open(outfile, 'w') as out: - out.write(spec) - return spec - - -if __name__ == '__main__': - description = ''' -Build the specs from the md docs. -If building phase 0: - 1st argument is input phase0/beacon-chain.md - 2nd argument is input phase0/fork-choice.md - 3rd argument is input phase0/validator.md - 4th argument is output spec.py - -If building phase 1: - 1st argument is input phase0/beacon-chain.md - 2nd argument is input phase0/fork-choice.md - 3rd argument is input phase1/custody-game.md - 4th argument is input phase1/beacon-chain.md - 5th argument is input phase1/fraud-proofs.md - 6th argument is input phase1/fork-choice.md - 7th argument is input phase1/phase1-fork.md - 8th argument is output spec.py -''' - parser = ArgumentParser(description=description) - parser.add_argument("-p", "--phase", dest="phase", type=int, default=0, help="Build for phase #") - parser.add_argument(dest="files", help="Input and output files", nargs="+") - - args = parser.parse_args() - if args.phase == 0: - if len(args.files) == 4: - build_phase0_spec(*args.files) - else: - print(" Phase 0 requires spec, forkchoice, and v-guide inputs as well as an output file.") - elif args.phase == 1: - if len(args.files) == 8: - build_phase1_spec(*args.files) - else: - print( - " Phase 1 requires input files as well as an output file:\n" - "\t phase0: (beacon-chain.md, fork-choice.md)\n" - "\t phase1: (custody-game.md, beacon-chain.md, fraud-proofs.md, fork-choice.md, phase1-fork.md)\n" - "\t and output.py" - ) - else: - print("Invalid phase: {0}".format(args.phase)) diff --git a/scripts/function_puller.py b/scripts/function_puller.py deleted file mode 100644 index 1a134007e..000000000 --- a/scripts/function_puller.py +++ /dev/null @@ -1,72 +0,0 @@ -import re -from typing import Dict, Tuple, NewType - - -FUNCTION_REGEX = r'^def [\w_]*' - -SpecObject = NewType('SpecObjects', Tuple[Dict[str, str], Dict[str, str], Dict[str, str], Dict[str, str]]) - - -def get_spec(file_name: str) -> SpecObject: - """ - Takes in the file name of a spec.md file, opens it and returns the following objects: - functions = {function_name: function_code} - constants= {constant_name: constant_code} - ssz_objects= {object_name: object} - - Note: This function makes heavy use of the inherent ordering of dicts, - if this is not supported by your python version, it will not work. - """ - pulling_from = None # line number of start of latest object - current_name = None # most recent section title - functions: Dict[str, str] = {} - constants: Dict[str, str] = {} - ssz_objects: Dict[str, str] = {} - function_matcher = re.compile(FUNCTION_REGEX) - is_ssz = False - custom_types: Dict[str, str] = {} - for linenum, line in enumerate(open(file_name).readlines()): - line = line.rstrip() - if pulling_from is None and len(line) > 0 and line[0] == '#' and line[-1] == '`': - current_name = line[line[:-1].rfind('`') + 1: -1] - if line[:9] == '```python': - assert pulling_from is None - pulling_from = linenum + 1 - elif line[:3] == '```': - pulling_from = None - else: - # Handle function definitions & ssz_objects - if pulling_from is not None: - # SSZ Object - if len(line) > 18 and line[:6] == 'class ' and line[-12:] == '(Container):': - name = line[6:-12] - # Check consistency with markdown header - assert name == current_name - is_ssz = True - # function definition - elif function_matcher.match(line) is not None: - current_name = function_matcher.match(line).group(0) - is_ssz = False - if is_ssz: - ssz_objects[current_name] = ssz_objects.get(current_name, '') + line + '\n' - else: - functions[current_name] = functions.get(current_name, '') + line + '\n' - # Handle constant and custom types table entries - elif pulling_from is None and len(line) > 0 and line[0] == '|': - row = line[1:].split('|') - if len(row) >= 2: - for i in range(2): - row[i] = row[i].strip().strip('`') - if '`' in row[i]: - row[i] = row[i][:row[i].find('`')] - is_constant_def = True - if row[0][0] not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_': - is_constant_def = False - for c in row[0]: - if c not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789': - is_constant_def = False - if is_constant_def: - constants[row[0]] = row[1].replace('**TBD**', '2**32') - elif row[1].startswith('uint') or row[1].startswith('Bytes'): - custom_types[row[0]] = row[1] - return SpecObject((functions, custom_types, constants, ssz_objects)) diff --git a/setup.py b/setup.py new file mode 100644 index 000000000..17851d6a1 --- /dev/null +++ b/setup.py @@ -0,0 +1,465 @@ +from setuptools import setup, find_packages, Command +from setuptools.command.build_py import build_py +from distutils import dir_util +from distutils.util import convert_path +import os +import re +from typing import Dict, NamedTuple, List + +FUNCTION_REGEX = r'^def [\w_]*' + + +class SpecObject(NamedTuple): + functions: Dict[str, str] + custom_types: Dict[str, str] + constants: Dict[str, str] + ssz_objects: Dict[str, str] + + +def get_spec(file_name: str) -> SpecObject: + """ + Takes in the file name of a spec.md file, opens it and returns a parsed spec object. + + Note: This function makes heavy use of the inherent ordering of dicts, + if this is not supported by your python version, it will not work. + """ + pulling_from = None # line number of start of latest object + current_name = None # most recent section title + functions: Dict[str, str] = {} + constants: Dict[str, str] = {} + ssz_objects: Dict[str, str] = {} + function_matcher = re.compile(FUNCTION_REGEX) + is_ssz = False + custom_types: Dict[str, str] = {} + for linenum, line in enumerate(open(file_name).readlines()): + line = line.rstrip() + if pulling_from is None and len(line) > 0 and line[0] == '#' and line[-1] == '`': + current_name = line[line[:-1].rfind('`') + 1: -1] + if line[:9] == '```python': + assert pulling_from is None + pulling_from = linenum + 1 + elif line[:3] == '```': + pulling_from = None + else: + # Handle function definitions & ssz_objects + if pulling_from is not None: + # SSZ Object + if len(line) > 18 and line[:6] == 'class ' and line[-12:] == '(Container):': + name = line[6:-12] + # Check consistency with markdown header + assert name == current_name + is_ssz = True + # function definition + elif function_matcher.match(line) is not None: + current_name = function_matcher.match(line).group(0) + is_ssz = False + if is_ssz: + ssz_objects[current_name] = ssz_objects.get(current_name, '') + line + '\n' + else: + functions[current_name] = functions.get(current_name, '') + line + '\n' + # Handle constant and custom types table entries + elif pulling_from is None and len(line) > 0 and line[0] == '|': + row = line[1:].split('|') + if len(row) >= 2: + for i in range(2): + row[i] = row[i].strip().strip('`') + if '`' in row[i]: + row[i] = row[i][:row[i].find('`')] + is_constant_def = True + if row[0][0] not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_': + is_constant_def = False + for c in row[0]: + if c not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789': + is_constant_def = False + if is_constant_def: + constants[row[0]] = row[1].replace('**TBD**', '2**32') + elif row[1].startswith('uint') or row[1].startswith('Bytes'): + custom_types[row[0]] = row[1] + return SpecObject(functions, custom_types, constants, ssz_objects) + + +CONFIG_LOADER = ''' +apply_constants_config(globals()) +''' + +PHASE0_IMPORTS = '''from eth2spec.config.config_util import apply_constants_config +from typing import ( + Any, Callable, Dict, Set, Sequence, Tuple, Optional, TypeVar +) + +from dataclasses import ( + dataclass, + field, +) + +from eth2spec.utils.ssz.ssz_impl import hash_tree_root +from eth2spec.utils.ssz.ssz_typing import ( + View, boolean, Container, List, Vector, uint64, + Bytes1, Bytes4, Bytes8, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector, +) +from eth2spec.utils import bls + +from eth2spec.utils.hash_function import hash + +SSZObject = TypeVar('SSZObject', bound=View) +''' +PHASE1_IMPORTS = '''from eth2spec.phase0 import spec as phase0 +from eth2spec.config.config_util import apply_constants_config +from typing import ( + Any, Dict, Set, Sequence, NewType, Tuple, TypeVar, Callable, Optional +) + +from dataclasses import ( + dataclass, + field, +) + +from eth2spec.utils.ssz.ssz_impl import hash_tree_root +from eth2spec.utils.ssz.ssz_typing import ( + View, boolean, Container, List, Vector, uint64, uint8, bit, + ByteVector, ByteList, Bytes1, Bytes4, Bytes8, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector, +) +from eth2spec.utils import bls + +from eth2spec.utils.hash_function import hash + +# Whenever phase 1 is loaded, make sure we have the latest phase0 +from importlib import reload +reload(phase0) + + +SSZVariableName = str +GeneralizedIndex = NewType('GeneralizedIndex', int) +SSZObject = TypeVar('SSZObject', bound=View) +''' +SUNDRY_CONSTANTS_FUNCTIONS = ''' +def ceillog2(x: uint64) -> int: + return (x - 1).bit_length() +''' +SUNDRY_FUNCTIONS = ''' +# Monkey patch hash cache +_hash = hash +hash_cache: Dict[bytes, Bytes32] = {} + + +def get_eth1_data(distance: uint64) -> Bytes32: + return hash(distance) + + +def hash(x: bytes) -> Bytes32: # type: ignore + if x not in hash_cache: + hash_cache[x] = Bytes32(_hash(x)) + return hash_cache[x] + + +def cache_this(key_fn, value_fn): # type: ignore + cache_dict = {} # type: ignore + + def wrapper(*args, **kw): # type: ignore + key = key_fn(*args, **kw) + nonlocal cache_dict + if key not in cache_dict: + cache_dict[key] = value_fn(*args, **kw) + return cache_dict[key] + return wrapper + + +get_base_reward = cache_this( + lambda state, index: (state.validators.hash_tree_root(), state.slot), + get_base_reward) + +get_committee_count_at_slot = cache_this( + lambda state, epoch: (state.validators.hash_tree_root(), epoch), + get_committee_count_at_slot) + +get_active_validator_indices = cache_this( + lambda state, epoch: (state.validators.hash_tree_root(), epoch), + get_active_validator_indices) + +get_beacon_committee = cache_this( + lambda state, slot, index: (state.validators.hash_tree_root(), state.randao_mixes.hash_tree_root(), slot, index), + get_beacon_committee)''' + + +def objects_to_spec(spec_object: SpecObject, imports: str, version: str) -> str: + """ + Given all the objects that constitute a spec, combine them into a single pyfile. + """ + new_type_definitions = ( + '\n\n'.join( + [ + f"class {key}({value}):\n pass\n" + for key, value in spec_object.custom_types.items() + ] + ) + ) + for k in list(spec_object.functions): + if "ceillog2" in k: + del spec_object.functions[k] + functions_spec = '\n\n'.join(spec_object.functions.values()) + for k in list(spec_object.constants.keys()): + if k == "BLS12_381_Q": + spec_object.constants[k] += " # noqa: E501" + constants_spec = '\n'.join(map(lambda x: '%s = %s' % (x, spec_object.constants[x]), spec_object.constants)) + ssz_objects_instantiation_spec = '\n\n'.join(spec_object.ssz_objects.values()) + spec = ( + imports + + '\n\n' + f"version = \'{version}\'\n" + + '\n\n' + new_type_definitions + + '\n' + SUNDRY_CONSTANTS_FUNCTIONS + + '\n\n' + constants_spec + + '\n\n' + CONFIG_LOADER + + '\n\n' + ssz_objects_instantiation_spec + + '\n\n' + functions_spec + + '\n' + SUNDRY_FUNCTIONS + + '\n' + ) + return spec + + +def combine_functions(old_functions: Dict[str, str], new_functions: Dict[str, str]) -> Dict[str, str]: + for key, value in new_functions.items(): + old_functions[key] = value + return old_functions + + +def combine_constants(old_constants: Dict[str, str], new_constants: Dict[str, str]) -> Dict[str, str]: + for key, value in new_constants.items(): + old_constants[key] = value + return old_constants + + +ignored_dependencies = [ + 'bit', 'boolean', 'Vector', 'List', 'Container', 'BLSPubkey', 'BLSSignature', + 'Bytes1', 'Bytes4', 'Bytes32', 'Bytes48', 'Bytes96', 'Bitlist', 'Bitvector', + 'uint8', 'uint16', 'uint32', 'uint64', 'uint128', 'uint256', + 'bytes', 'byte', 'ByteList', 'ByteVector' +] + + +def dependency_order_ssz_objects(objects: Dict[str, str], custom_types: Dict[str, str]) -> None: + """ + Determines which SSZ Object is dependent on which other and orders them appropriately + """ + items = list(objects.items()) + for key, value in items: + dependencies = [] + for line in value.split('\n'): + if not re.match(r'\s+\w+: .+', line): + continue # skip whitespace etc. + line = line[line.index(':') + 1:] # strip of field name + if '#' in line: + line = line[:line.index('#')] # strip of comment + dependencies.extend(re.findall(r'(\w+)', line)) # catch all legible words, potential dependencies + dependencies = filter(lambda x: '_' not in x and x.upper() != x, dependencies) # filter out constants + dependencies = filter(lambda x: x not in ignored_dependencies, dependencies) + dependencies = filter(lambda x: x not in custom_types, dependencies) + for dep in dependencies: + key_list = list(objects.keys()) + for item in [dep, key] + key_list[key_list.index(dep)+1:]: + objects[item] = objects.pop(item) + + +def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str], custom_types) -> Dict[str, str]: + """ + Takes in old spec and new spec ssz objects, combines them, + and returns the newer versions of the objects in dependency order. + """ + for key, value in new_objects.items(): + old_objects[key] = value + return old_objects + + +def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject: + """ + Takes in two spec variants (as tuples of their objects) and combines them using the appropriate combiner function. + """ + functions0, custom_types0, constants0, ssz_objects0 = spec0 + functions1, custom_types1, constants1, ssz_objects1 = spec1 + functions = combine_functions(functions0, functions1) + custom_types = combine_constants(custom_types0, custom_types1) + constants = combine_constants(constants0, constants1) + ssz_objects = combine_ssz_objects(ssz_objects0, ssz_objects1, custom_types) + return SpecObject(functions, custom_types, constants, ssz_objects) + + +def dependency_order_spec(objs: SpecObject): + functions, custom_types, constants, ssz_objects = objs + dependency_order_ssz_objects(ssz_objects, custom_types) + + +version_imports = { + 'phase0': PHASE0_IMPORTS, + 'phase1': PHASE1_IMPORTS, +} + + +def build_spec(version: str, source_files: List[str]) -> str: + all_spescs = [get_spec(spec) for spec in source_files] + + spec_object = all_spescs[0] + for value in all_spescs[1:]: + spec_object = combine_spec_objects(spec_object, value) + + dependency_order_spec(spec_object) + + return objects_to_spec(spec_object, version_imports[version], version) + + +class PySpecCommand(Command): + """Convert spec markdown files to a spec python file""" + + description = "Convert spec markdown files to a spec python file" + + spec_version: str + md_doc_paths: str + parsed_md_doc_paths: List[str] + out_dir: str + + # The format is (long option, short option, description). + user_options = [ + ('spec-version=', None, "Spec version to tag build with. Used to select md-docs defaults."), + ('md-doc-paths=', None, "List of paths of markdown files to build spec with"), + ('out-dir=', None, "Output directory to write spec package to") + ] + + def initialize_options(self): + """Set default values for options.""" + # Each user option must be listed here with their default value. + self.spec_version = 'phase0' + self.md_doc_paths = '' + self.out_dir = 'pyspec_output' + + def finalize_options(self): + """Post-process options.""" + if len(self.md_doc_paths) == 0: + if self.spec_version == "phase0": + self.md_doc_paths = """ + specs/phase0/beacon-chain.md + specs/phase0/fork-choice.md + specs/phase0/validator.md + """ + elif self.spec_version == "phase1": + self.md_doc_paths = """ + specs/phase0/beacon-chain.md + specs/phase0/fork-choice.md + specs/phase1/custody-game.md + specs/phase1/beacon-chain.md + specs/phase1/fraud-proofs.md + specs/phase1/fork-choice.md + specs/phase1/phase1-fork.md + """ + else: + raise Exception('no markdown files specified, and spec version "%s" is unknown', self.spec_version) + print("no paths were specified, using default markdown file paths for pyspec build (spec version: %s)" % self.spec_version) + + self.parsed_md_doc_paths = self.md_doc_paths.split() + + for filename in self.parsed_md_doc_paths: + if not os.path.exists(filename): + raise Exception('Pyspec markdown input file "%s" does not exist.' % filename) + + def run(self): + spec_str = build_spec(self.spec_version, self.parsed_md_doc_paths) + if self.dry_run: + self.announce('dry run successfully prepared contents for spec.' + f' out dir: "{self.out_dir}", spec version: "{self.spec_version}"') + self.debug_print(spec_str) + else: + dir_util.mkpath(self.out_dir) + with open(os.path.join(self.out_dir, 'spec.py'), 'w') as out: + out.write(spec_str) + with open(os.path.join(self.out_dir, '__init__.py'), 'w') as out: + out.write("") + + +class BuildPyCommand(build_py): + """Customize the build command to run the spec-builder on setup.py build""" + + def initialize_options(self): + super(BuildPyCommand, self).initialize_options() + + def run_pyspec_cmd(self, spec_version: str, **opts): + cmd_obj: PySpecCommand = self.distribution.reinitialize_command("pyspec") + cmd_obj.spec_version = spec_version + cmd_obj.out_dir = os.path.join(self.build_lib, 'eth2spec', spec_version) + for k, v in opts.items(): + setattr(cmd_obj, k, v) + self.run_command('pyspec') + + def run(self): + self.run_pyspec_cmd(spec_version="phase0") + self.run_pyspec_cmd(spec_version="phase1") + + super(BuildPyCommand, self).run() + + +class PyspecDevCommand(Command): + """Build the markdown files in-place to their source location for testing.""" + description = "Build the markdown files in-place to their source location for testing." + user_options = [] + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + def run_pyspec_cmd(self, spec_version: str, **opts): + cmd_obj: PySpecCommand = self.distribution.reinitialize_command("pyspec") + cmd_obj.spec_version = spec_version + eth2spec_dir = convert_path(self.distribution.package_dir['eth2spec']) + cmd_obj.out_dir = os.path.join(eth2spec_dir, spec_version) + for k, v in opts.items(): + setattr(cmd_obj, k, v) + self.run_command('pyspec') + + def run(self): + print("running build_py command") + self.run_pyspec_cmd(spec_version="phase0", package_inplace=False) + self.run_pyspec_cmd(spec_version="phase1", package_inplace=False) + + +commands = { + 'pyspec': PySpecCommand, + 'build_py': BuildPyCommand, + 'pyspecdev': PyspecDevCommand, +} + +with open("README.md", "rt", encoding="utf8") as f: + readme = f.read() + +setup( + name='eth2spec', + description="Eth2 spec, provided as Python package for tooling and testing", + long_description=readme, + long_description_content_type="text/markdown", + author="ethereum", + url="https://github.com/ethereum/eth2.0-specs", + include_package_data=False, + package_data={'configs': ['*.yaml'], + 'specs': ['**/*.md']}, + package_dir={ + "eth2spec": "tests/core/pyspec/eth2spec", + "configs": "configs", + "specs": "specs" + }, + packages=find_packages(where='tests/core/pyspec') + ['configs', 'specs'], + py_modules=["eth2spec"], + cmdclass=commands, + python_requires=">=3.8, <4", + tests_require=[], # avoid old style tests require. Enable explicit (re-)installs, e.g. `pip install .[testing]` + extras_require={ + "testing": ["pytest>=4.4", "pytest-cov", "pytest-xdist"], + "linting": ["flake8==3.7.7", "mypy==0.750"], + }, + install_requires=[ + "eth-utils>=1.3.0,<2", + "eth-typing>=2.1.0,<3.0.0", + "pycryptodome==3.9.4", + "py_ecc==2.0.0", + "dataclasses==0.6", + "remerkleable==0.1.10", + "ruamel.yaml==0.16.5" + ] +) diff --git a/tests/core/config_helpers/README.md b/tests/core/config_helpers/README.md deleted file mode 100644 index 85a9304d7..000000000 --- a/tests/core/config_helpers/README.md +++ /dev/null @@ -1,19 +0,0 @@ -# Eth2 config helpers - -`preset_loader`: A util to load config-presets with. -See [Configs documentation](../../../configs/README.md). - -Usage: - -```python -configs_path = 'configs/' - -... - -import preset_loader -from eth2spec.phase0 import spec -my_presets = preset_loader.load_presets(configs_path, 'mainnet') -spec.apply_constants_preset(my_presets) -``` - -WARNING: this overwrites globals, make sure to prevent accidental collisions with other usage of the same imported specs package. diff --git a/tests/core/config_helpers/preset_loader/__init__.py b/tests/core/config_helpers/preset_loader/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/core/config_helpers/preset_loader/loader.py b/tests/core/config_helpers/preset_loader/loader.py deleted file mode 100644 index 95f147f6e..000000000 --- a/tests/core/config_helpers/preset_loader/loader.py +++ /dev/null @@ -1,27 +0,0 @@ -from typing import Dict, Any - -from ruamel.yaml import ( - YAML, -) -from pathlib import Path -from os.path import join - - -def load_presets(configs_dir, presets_name) -> Dict[str, Any]: - """ - Loads the given preset - :param presets_name: The name of the presets. (lowercase snake_case) - :return: Dictionary, mapping of constant-name -> constant-value - """ - path = Path(join(configs_dir, presets_name+'.yaml')) - yaml = YAML(typ='base') - loaded = yaml.load(path) - out = dict() - for k, v in loaded.items(): - if isinstance(v, list): - out[k] = v - elif isinstance(v, str) and v.startswith("0x"): - out[k] = bytes.fromhex(v[2:]) - else: - out[k] = int(v) - return out diff --git a/tests/core/config_helpers/requirements.txt b/tests/core/config_helpers/requirements.txt deleted file mode 100644 index 6c7334268..000000000 --- a/tests/core/config_helpers/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -ruamel.yaml==0.16.5 diff --git a/tests/core/config_helpers/setup.py b/tests/core/config_helpers/setup.py deleted file mode 100644 index 3f893f3d4..000000000 --- a/tests/core/config_helpers/setup.py +++ /dev/null @@ -1,9 +0,0 @@ -from distutils.core import setup - -setup( - name='config_helpers', - packages=['preset_loader'], - install_requires=[ - "ruamel.yaml==0.16.5" - ] -) diff --git a/tests/core/pyspec/README.md b/tests/core/pyspec/README.md index 2e596520c..4b2269a2b 100644 --- a/tests/core/pyspec/README.md +++ b/tests/core/pyspec/README.md @@ -7,22 +7,31 @@ With this executable spec, test-generators can easily create test-vectors for client implementations, and the spec itself can be verified to be consistent and coherent through sanity tests implemented with pytest. - ## Building -All the dynamic parts of the spec can be build at once with `make pyspec`. +Building the pyspec is simply: `python setup.py build` + (or `pip install .`, but beware that ignored files will still be copied over to a temporary dir, due to pip issue 2195). +This outputs the build files to the `./build/lib/eth2spec/...` dir, and can't be used for local test running. Instead, use the dev-install as described below. -Alternatively, you can build a sub-set of the pyspec: `make phase0`. +## Dev Install -Or, to build a single file, specify the path, e.g. `make test_libs/pyspec/eth2spec/phase0/spec.py`. +All the dynamic parts of the spec are automatically built with `python setup.py pyspecdev`. +Unlike the regular install, this outputs spec files to their original source location, instead of build output only. +Alternatively, you can build a sub-set of the pyspec with the distutil command: +```bash +python setup.py pyspec --spec-version=phase0 --md-doc-paths="specs/phase0/beacon-chain.md specs/phase0/fork-choice.md" --out-dir=my_spec_dir +``` ## Py-tests -After building, you can install the dependencies for running the `pyspec` tests with `make install_test`. +After installing, you can install the optional dependencies for testing and linting. +With makefile: `make install_test`. +Or manually: run `pip install .[testing]` and `pip install .[linting]`. These tests are not intended for client-consumption. -These tests are sanity tests, to verify if the spec itself is consistent. +These tests are testing the spec itself, to verify consistency and provide feedback on modifications of the spec. +However, most of the tests can be run in generator-mode, to output test vectors for client-consumption. ### How to run tests @@ -32,23 +41,19 @@ Run `make test` from the root of the specs repository (after running `make insta #### Manual -From within the `pyspec` folder: +From the repository root: -Install dependencies: +Install venv and install: ```bash python3 -m venv venv . venv/bin/activate -pip3 install -r requirements-testing.txt +python setup.py pyspecdev ``` -*Note*: Make sure to run `make -B pyspec` from the root of the specs repository, - to build the parts of the pyspec module derived from the markdown specs. -The `-B` flag may be helpful to force-overwrite the `pyspec` output after you made a change to the markdown source files. -Run the tests: +Run the test command from the `tests/core/pyspec` directory: ``` pytest --config=minimal eth2spec ``` -Note the package-name, this is to locate the tests. ### How to view code coverage report diff --git a/tests/core/pyspec/eth2spec/config/README.md b/tests/core/pyspec/eth2spec/config/README.md new file mode 100644 index 000000000..ea2b2ccd8 --- /dev/null +++ b/tests/core/pyspec/eth2spec/config/README.md @@ -0,0 +1,20 @@ +# Eth2 config util + +For configuration, see [Configs documentation](../../../../../configs/README.md). + +## Usage: + +```python +configs_path = 'configs/' + +... + +from eth2spec.config import config_util +from eth2spec.phase0 import spec +from importlib import reload +my_presets = config_util.prepare_config(configs_path, 'mainnet') +# reload spec to make loaded config effective +reload(spec) +``` + +WARNING: this overwrites globals, make sure to prevent accidental collisions with other usage of the same imported specs package. diff --git a/tests/core/pyspec/eth2spec/config/apply_config.py b/tests/core/pyspec/eth2spec/config/apply_config.py deleted file mode 100644 index 2f0ce5902..000000000 --- a/tests/core/pyspec/eth2spec/config/apply_config.py +++ /dev/null @@ -1,22 +0,0 @@ -from preset_loader import loader -from typing import Dict, Any - -presets: Dict[str, Any] = {} - - -# Access to overwrite spec constants based on configuration -# This is called by the spec module after declaring its globals, and applies the loaded presets. -def apply_constants_preset(spec_globals: Dict[str, Any]) -> None: - global presets - for k, v in presets.items(): - if k.startswith('DOMAIN_'): - spec_globals[k] = spec_globals['DomainType'](v) # domain types are defined as bytes in the configs - else: - spec_globals[k] = v - - -# Load presets from a file. This does not apply the presets. -# To apply the presets, reload the spec module (it will re-initialize with the presets taken from here). -def load_presets(configs_path, config_name): - global presets - presets = loader.load_presets(configs_path, config_name) diff --git a/tests/core/pyspec/eth2spec/config/config_util.py b/tests/core/pyspec/eth2spec/config/config_util.py new file mode 100644 index 000000000..42ad76d69 --- /dev/null +++ b/tests/core/pyspec/eth2spec/config/config_util.py @@ -0,0 +1,44 @@ +from ruamel.yaml import YAML +from pathlib import Path +from os.path import join +from typing import Dict, Any + +config: Dict[str, Any] = {} + + +# Access to overwrite spec constants based on configuration +# This is called by the spec module after declaring its globals, and applies the loaded presets. +def apply_constants_config(spec_globals: Dict[str, Any]) -> None: + global config + for k, v in config.items(): + if k.startswith('DOMAIN_'): + spec_globals[k] = spec_globals['DomainType'](v) # domain types are defined as bytes in the configs + else: + spec_globals[k] = v + + +# Load presets from a file, and then prepares the global config setting. This does not apply the config. +# To apply the config, reload the spec module (it will re-initialize with the config taken from here). +def prepare_config(configs_path, config_name): + global config + config = load_config_file(configs_path, config_name) + + +def load_config_file(configs_dir, presets_name) -> Dict[str, Any]: + """ + Loads the given preset + :param presets_name: The name of the presets. (lowercase snake_case) + :return: Dictionary, mapping of constant-name -> constant-value + """ + path = Path(join(configs_dir, presets_name + '.yaml')) + yaml = YAML(typ='base') + loaded = yaml.load(path) + out = dict() + for k, v in loaded.items(): + if isinstance(v, list): + out[k] = v + elif isinstance(v, str) and v.startswith("0x"): + out[k] = bytes.fromhex(v[2:]) + else: + out[k] = int(v) + return out diff --git a/tests/core/pyspec/eth2spec/phase0/__init__.py b/tests/core/pyspec/eth2spec/phase0/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/core/pyspec/eth2spec/phase1/__init__.py b/tests/core/pyspec/eth2spec/phase1/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/core/pyspec/eth2spec/test/conftest.py b/tests/core/pyspec/eth2spec/test/conftest.py index 08cd850ab..01187b05f 100644 --- a/tests/core/pyspec/eth2spec/test/conftest.py +++ b/tests/core/pyspec/eth2spec/test/conftest.py @@ -1,4 +1,4 @@ -from eth2spec.config import apply_config +from eth2spec.config import config_util from eth2spec.test.context import reload_specs @@ -34,6 +34,6 @@ def pytest_addoption(parser): @fixture(autouse=True) def config(request): config_name = request.config.getoption("--config") - apply_config.load_presets('../../../configs/', config_name) + config_util.prepare_config('../../../configs/', config_name) # now that the presets are loaded, reload the specs to apply them reload_specs() diff --git a/tests/core/pyspec/requirements-testing.txt b/tests/core/pyspec/requirements-testing.txt deleted file mode 100644 index e8ecd12a6..000000000 --- a/tests/core/pyspec/requirements-testing.txt +++ /dev/null @@ -1,7 +0,0 @@ --r requirements.txt -pytest>=4.4 -../config_helpers -flake8==3.7.7 -mypy==0.750 -pytest-cov -pytest-xdist diff --git a/tests/core/pyspec/requirements.txt b/tests/core/pyspec/requirements.txt deleted file mode 100644 index 01f1caed7..000000000 --- a/tests/core/pyspec/requirements.txt +++ /dev/null @@ -1,6 +0,0 @@ -eth-utils>=1.3.0,<2 -eth-typing>=2.1.0,<3.0.0 -pycryptodome==3.9.4 -py_ecc==2.0.0 -dataclasses==0.6 -remerkleable==0.1.10 diff --git a/tests/core/pyspec/setup.py b/tests/core/pyspec/setup.py deleted file mode 100644 index 319b86953..000000000 --- a/tests/core/pyspec/setup.py +++ /dev/null @@ -1,16 +0,0 @@ -from setuptools import setup, find_packages - -setup( - name='pyspec', - packages=find_packages(), - python_requires=">=3.8, <4", - tests_require=["pytest"], - install_requires=[ - "eth-utils>=1.3.0,<2", - "eth-typing>=2.1.0,<3.0.0", - "pycryptodome==3.9.4", - "py_ecc==2.0.0", - "dataclasses==0.6", - "remerkleable==0.1.10", - ] -) diff --git a/tests/generators/epoch_processing/main.py b/tests/generators/epoch_processing/main.py index b6e3d6c04..8f2a6e94f 100644 --- a/tests/generators/epoch_processing/main.py +++ b/tests/generators/epoch_processing/main.py @@ -11,15 +11,16 @@ from eth2spec.test.phase_0.epoch_processing import ( ) from gen_base import gen_runner, gen_typing from gen_from_tests.gen import generate_from_tests -from preset_loader import loader +from importlib import reload +from eth2spec.config import config_util def create_provider(handler_name: str, tests_src, config_name: str) -> gen_typing.TestProvider: def prepare_fn(configs_path: str) -> str: - presets = loader.load_presets(configs_path, config_name) - spec_phase0.apply_constants_preset(presets) - spec_phase1.apply_constants_preset(presets) + config_util.prepare_config(configs_path, config_name) + reload(spec_phase0) + reload(spec_phase1) return config_name def cases_fn() -> Iterable[gen_typing.TestCase]: diff --git a/tests/generators/epoch_processing/requirements.txt b/tests/generators/epoch_processing/requirements.txt index 3c318f56b..b82314298 100644 --- a/tests/generators/epoch_processing/requirements.txt +++ b/tests/generators/epoch_processing/requirements.txt @@ -1,3 +1,2 @@ ../../core/gen_helpers -../../core/config_helpers -../../core/pyspec \ No newline at end of file +../../../ \ No newline at end of file diff --git a/tests/generators/genesis/main.py b/tests/generators/genesis/main.py index 9a91afbfd..3563c3fd9 100644 --- a/tests/generators/genesis/main.py +++ b/tests/generators/genesis/main.py @@ -4,15 +4,16 @@ from eth2spec.test.genesis import test_initialization, test_validity from gen_base import gen_runner, gen_typing from gen_from_tests.gen import generate_from_tests -from preset_loader import loader from eth2spec.phase0 import spec as spec +from importlib import reload +from eth2spec.config import config_util def create_provider(handler_name: str, tests_src, config_name: str) -> gen_typing.TestProvider: def prepare_fn(configs_path: str) -> str: - presets = loader.load_presets(configs_path, config_name) - spec.apply_constants_preset(presets) + config_util.prepare_config(configs_path, config_name) + reload(spec) return config_name def cases_fn() -> Iterable[gen_typing.TestCase]: diff --git a/tests/generators/genesis/requirements.txt b/tests/generators/genesis/requirements.txt index 3c318f56b..b82314298 100644 --- a/tests/generators/genesis/requirements.txt +++ b/tests/generators/genesis/requirements.txt @@ -1,3 +1,2 @@ ../../core/gen_helpers -../../core/config_helpers -../../core/pyspec \ No newline at end of file +../../../ \ No newline at end of file diff --git a/tests/generators/operations/main.py b/tests/generators/operations/main.py index de3eb7cf1..6906c9df7 100644 --- a/tests/generators/operations/main.py +++ b/tests/generators/operations/main.py @@ -11,7 +11,8 @@ from eth2spec.test.phase_0.block_processing import ( from gen_base import gen_runner, gen_typing from gen_from_tests.gen import generate_from_tests -from preset_loader import loader +from importlib import reload +from eth2spec.config import config_util from eth2spec.phase0 import spec as spec_phase0 from eth2spec.phase1 import spec as spec_phase1 @@ -19,9 +20,9 @@ from eth2spec.phase1 import spec as spec_phase1 def create_provider(handler_name: str, tests_src, config_name: str) -> gen_typing.TestProvider: def prepare_fn(configs_path: str) -> str: - presets = loader.load_presets(configs_path, config_name) - spec_phase0.apply_constants_preset(presets) - spec_phase1.apply_constants_preset(presets) + config_util.prepare_config(configs_path, config_name) + reload(spec_phase0) + reload(spec_phase1) return config_name def cases_fn() -> Iterable[gen_typing.TestCase]: diff --git a/tests/generators/operations/requirements.txt b/tests/generators/operations/requirements.txt index f34243cf4..a6ea61aea 100644 --- a/tests/generators/operations/requirements.txt +++ b/tests/generators/operations/requirements.txt @@ -1,4 +1,3 @@ eth-utils==1.6.0 ../../core/gen_helpers -../../core/config_helpers -../../core/pyspec \ No newline at end of file +../../../ \ No newline at end of file diff --git a/tests/generators/sanity/main.py b/tests/generators/sanity/main.py index 712f51c07..051f4877f 100644 --- a/tests/generators/sanity/main.py +++ b/tests/generators/sanity/main.py @@ -4,7 +4,10 @@ from eth2spec.test.sanity import test_blocks, test_slots from gen_base import gen_runner, gen_typing from gen_from_tests.gen import generate_from_tests -from preset_loader import loader + +from importlib import reload +from eth2spec.config import config_util + from eth2spec.phase0 import spec as spec_phase0 from eth2spec.phase1 import spec as spec_phase1 @@ -12,9 +15,9 @@ from eth2spec.phase1 import spec as spec_phase1 def create_provider(handler_name: str, tests_src, config_name: str) -> gen_typing.TestProvider: def prepare_fn(configs_path: str) -> str: - presets = loader.load_presets(configs_path, config_name) - spec_phase0.apply_constants_preset(presets) - spec_phase1.apply_constants_preset(presets) + config_util.prepare_config(configs_path, config_name) + reload(spec_phase0) + reload(spec_phase1) return config_name def cases_fn() -> Iterable[gen_typing.TestCase]: diff --git a/tests/generators/sanity/requirements.txt b/tests/generators/sanity/requirements.txt index 3c318f56b..b82314298 100644 --- a/tests/generators/sanity/requirements.txt +++ b/tests/generators/sanity/requirements.txt @@ -1,3 +1,2 @@ ../../core/gen_helpers -../../core/config_helpers -../../core/pyspec \ No newline at end of file +../../../ \ No newline at end of file diff --git a/tests/generators/shuffling/main.py b/tests/generators/shuffling/main.py index 6425c708a..63284db2c 100644 --- a/tests/generators/shuffling/main.py +++ b/tests/generators/shuffling/main.py @@ -1,9 +1,11 @@ from eth2spec.phase0 import spec as spec from eth_utils import to_tuple from gen_base import gen_runner, gen_typing -from preset_loader import loader from typing import Iterable +from importlib import reload +from eth2spec.config import config_util + def shuffling_case_fn(seed, count): yield 'mapping', 'data', { @@ -27,8 +29,8 @@ def shuffling_test_cases(): def create_provider(config_name: str) -> gen_typing.TestProvider: def prepare_fn(configs_path: str) -> str: - presets = loader.load_presets(configs_path, config_name) - spec.apply_constants_preset(presets) + config_util.prepare_config(configs_path, config_name) + reload(spec) return config_name def cases_fn() -> Iterable[gen_typing.TestCase]: diff --git a/tests/generators/shuffling/requirements.txt b/tests/generators/shuffling/requirements.txt index f34243cf4..a6ea61aea 100644 --- a/tests/generators/shuffling/requirements.txt +++ b/tests/generators/shuffling/requirements.txt @@ -1,4 +1,3 @@ eth-utils==1.6.0 ../../core/gen_helpers -../../core/config_helpers -../../core/pyspec \ No newline at end of file +../../../ \ No newline at end of file diff --git a/tests/generators/ssz_generic/requirements.txt b/tests/generators/ssz_generic/requirements.txt index 6b11d61af..af061a3b1 100644 --- a/tests/generators/ssz_generic/requirements.txt +++ b/tests/generators/ssz_generic/requirements.txt @@ -1,4 +1,3 @@ eth-utils==1.6.0 ../../core/gen_helpers -../../core/config_helpers -../../core/pyspec +../../../ diff --git a/tests/generators/ssz_static/main.py b/tests/generators/ssz_static/main.py index 334f45fa7..bae911a0e 100644 --- a/tests/generators/ssz_static/main.py +++ b/tests/generators/ssz_static/main.py @@ -10,7 +10,11 @@ from eth2spec.utils.ssz.ssz_impl import ( serialize, ) from gen_base import gen_runner, gen_typing -from preset_loader import loader + + +from importlib import reload +from eth2spec.config import config_util + MAX_BYTES_LENGTH = 100 MAX_LIST_LENGTH = 10 @@ -54,8 +58,8 @@ def create_provider(config_name: str, seed: int, mode: random_value.Randomizatio cases_if_random: int) -> gen_typing.TestProvider: def prepare_fn(configs_path: str) -> str: # Apply changes to presets, this affects some of the vector types. - presets = loader.load_presets(configs_path, config_name) - spec.apply_constants_preset(presets) + config_util.prepare_config(configs_path, config_name) + reload(spec) return config_name def cases_fn() -> Iterable[gen_typing.TestCase]: diff --git a/tests/generators/ssz_static/requirements.txt b/tests/generators/ssz_static/requirements.txt index 3c318f56b..b82314298 100644 --- a/tests/generators/ssz_static/requirements.txt +++ b/tests/generators/ssz_static/requirements.txt @@ -1,3 +1,2 @@ ../../core/gen_helpers -../../core/config_helpers -../../core/pyspec \ No newline at end of file +../../../ \ No newline at end of file From 7848500ea9c239b027899335f846eca82d7347e0 Mon Sep 17 00:00:00 2001 From: protolambda Date: Sat, 25 Jan 2020 22:10:03 +0100 Subject: [PATCH 119/194] spec packaging; implement review suggestions from hww --- Makefile | 3 +-- setup.py | 42 ++++++++++++++--------------- tests/core/pyspec/README.md | 2 +- tests/generators/sanity/main.py | 6 ++--- tests/generators/shuffling/main.py | 7 ++--- tests/generators/ssz_static/main.py | 9 +++---- 6 files changed, 32 insertions(+), 37 deletions(-) diff --git a/Makefile b/Makefile index efa776997..abd240b51 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,5 @@ SPEC_DIR = ./specs SSZ_DIR = ./ssz -SCRIPT_DIR = ./scripts TEST_LIBS_DIR = ./tests/core PY_SPEC_DIR = $(TEST_LIBS_DIR)/pyspec TEST_VECTOR_DIR = ../eth2.0-spec-tests/tests @@ -61,7 +60,7 @@ pyspec: # installs the packages to run pyspec tests install_test: - python3 -m venv venv; . venv/bin/activate; pip3 install .[testing] .[linting] + python3 -m venv venv; . venv/bin/activate; pip3 install .[test] .[lint] test: pyspec . venv/bin/activate; cd $(PY_SPEC_DIR); \ diff --git a/setup.py b/setup.py index 17851d6a1..328df6bef 100644 --- a/setup.py +++ b/setup.py @@ -164,21 +164,25 @@ def cache_this(key_fn, value_fn): # type: ignore return wrapper +_get_base_reward = get_base_reward get_base_reward = cache_this( lambda state, index: (state.validators.hash_tree_root(), state.slot), - get_base_reward) + _get_base_reward) +_get_committee_count_at_slot = get_committee_count_at_slot get_committee_count_at_slot = cache_this( lambda state, epoch: (state.validators.hash_tree_root(), epoch), - get_committee_count_at_slot) + _get_committee_count_at_slot) +_get_active_validator_indices = get_active_validator_indices get_active_validator_indices = cache_this( lambda state, epoch: (state.validators.hash_tree_root(), epoch), - get_active_validator_indices) + _get_active_validator_indices) +_get_beacon_committee = get_beacon_committee get_beacon_committee = cache_this( lambda state, slot, index: (state.validators.hash_tree_root(), state.randao_mixes.hash_tree_root(), slot, index), - get_beacon_committee)''' + _get_beacon_committee)''' def objects_to_spec(spec_object: SpecObject, imports: str, version: str) -> str: @@ -283,11 +287,6 @@ def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject: return SpecObject(functions, custom_types, constants, ssz_objects) -def dependency_order_spec(objs: SpecObject): - functions, custom_types, constants, ssz_objects = objs - dependency_order_ssz_objects(ssz_objects, custom_types) - - version_imports = { 'phase0': PHASE0_IMPORTS, 'phase1': PHASE1_IMPORTS, @@ -295,13 +294,13 @@ version_imports = { def build_spec(version: str, source_files: List[str]) -> str: - all_spescs = [get_spec(spec) for spec in source_files] + all_specs = [get_spec(spec) for spec in source_files] - spec_object = all_spescs[0] - for value in all_spescs[1:]: + spec_object = all_specs[0] + for value in all_specs[1:]: spec_object = combine_spec_objects(spec_object, value) - dependency_order_spec(spec_object) + dependency_order_ssz_objects(spec_object.ssz_objects, spec_object.custom_types) return objects_to_spec(spec_object, version_imports[version], version) @@ -333,6 +332,8 @@ class PySpecCommand(Command): def finalize_options(self): """Post-process options.""" if len(self.md_doc_paths) == 0: + print("no paths were specified, using default markdown file paths for pyspec" + " build (spec version: %s)" % self.spec_version) if self.spec_version == "phase0": self.md_doc_paths = """ specs/phase0/beacon-chain.md @@ -351,7 +352,6 @@ class PySpecCommand(Command): """ else: raise Exception('no markdown files specified, and spec version "%s" is unknown', self.spec_version) - print("no paths were specified, using default markdown file paths for pyspec build (spec version: %s)" % self.spec_version) self.parsed_md_doc_paths = self.md_doc_paths.split() @@ -388,8 +388,8 @@ class BuildPyCommand(build_py): self.run_command('pyspec') def run(self): - self.run_pyspec_cmd(spec_version="phase0") - self.run_pyspec_cmd(spec_version="phase1") + for spec_version in version_imports: + self.run_pyspec_cmd(spec_version=spec_version) super(BuildPyCommand, self).run() @@ -416,9 +416,8 @@ class PyspecDevCommand(Command): def run(self): print("running build_py command") - self.run_pyspec_cmd(spec_version="phase0", package_inplace=False) - self.run_pyspec_cmd(spec_version="phase1", package_inplace=False) - + for spec_version in version_imports: + self.run_pyspec_cmd(spec_version=spec_version) commands = { 'pyspec': PySpecCommand, @@ -448,10 +447,9 @@ setup( py_modules=["eth2spec"], cmdclass=commands, python_requires=">=3.8, <4", - tests_require=[], # avoid old style tests require. Enable explicit (re-)installs, e.g. `pip install .[testing]` extras_require={ - "testing": ["pytest>=4.4", "pytest-cov", "pytest-xdist"], - "linting": ["flake8==3.7.7", "mypy==0.750"], + "test": ["pytest>=4.4", "pytest-cov", "pytest-xdist"], + "lint": ["flake8==3.7.7", "mypy==0.750"], }, install_requires=[ "eth-utils>=1.3.0,<2", diff --git a/tests/core/pyspec/README.md b/tests/core/pyspec/README.md index 4b2269a2b..1f0bb6642 100644 --- a/tests/core/pyspec/README.md +++ b/tests/core/pyspec/README.md @@ -9,7 +9,7 @@ With this executable spec, ## Building -Building the pyspec is simply: `python setup.py build` +To build the pyspec: `python setup.py build` (or `pip install .`, but beware that ignored files will still be copied over to a temporary dir, due to pip issue 2195). This outputs the build files to the `./build/lib/eth2spec/...` dir, and can't be used for local test running. Instead, use the dev-install as described below. diff --git a/tests/generators/sanity/main.py b/tests/generators/sanity/main.py index 051f4877f..cfcbcfdb6 100644 --- a/tests/generators/sanity/main.py +++ b/tests/generators/sanity/main.py @@ -1,13 +1,11 @@ from typing import Iterable - -from eth2spec.test.sanity import test_blocks, test_slots +from importlib import reload from gen_base import gen_runner, gen_typing from gen_from_tests.gen import generate_from_tests -from importlib import reload +from eth2spec.test.sanity import test_blocks, test_slots from eth2spec.config import config_util - from eth2spec.phase0 import spec as spec_phase0 from eth2spec.phase1 import spec as spec_phase1 diff --git a/tests/generators/shuffling/main.py b/tests/generators/shuffling/main.py index 63284db2c..0ef2657c4 100644 --- a/tests/generators/shuffling/main.py +++ b/tests/generators/shuffling/main.py @@ -1,10 +1,11 @@ -from eth2spec.phase0 import spec as spec from eth_utils import to_tuple -from gen_base import gen_runner, gen_typing from typing import Iterable - from importlib import reload + +from gen_base import gen_runner, gen_typing + from eth2spec.config import config_util +from eth2spec.phase0 import spec as spec def shuffling_case_fn(seed, count): diff --git a/tests/generators/ssz_static/main.py b/tests/generators/ssz_static/main.py index bae911a0e..b7c948767 100644 --- a/tests/generators/ssz_static/main.py +++ b/tests/generators/ssz_static/main.py @@ -1,19 +1,18 @@ from random import Random from typing import Iterable +from importlib import reload from inspect import getmembers, isclass +from gen_base import gen_runner, gen_typing + from eth2spec.debug import random_value, encode +from eth2spec.config import config_util from eth2spec.phase0 import spec from eth2spec.utils.ssz.ssz_typing import Container from eth2spec.utils.ssz.ssz_impl import ( hash_tree_root, serialize, ) -from gen_base import gen_runner, gen_typing - - -from importlib import reload -from eth2spec.config import config_util MAX_BYTES_LENGTH = 100 From e118045a59923f4cf5ca42770950a31fcd940174 Mon Sep 17 00:00:00 2001 From: protolambda Date: Tue, 28 Jan 2020 02:31:39 +0100 Subject: [PATCH 120/194] update remerkleable dependency --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 328df6bef..1c652e0eb 100644 --- a/setup.py +++ b/setup.py @@ -457,7 +457,7 @@ setup( "pycryptodome==3.9.4", "py_ecc==2.0.0", "dataclasses==0.6", - "remerkleable==0.1.10", + "remerkleable==0.1.11", "ruamel.yaml==0.16.5" ] ) From 2a91b43eaf6d883750216d82e921227b03be3223 Mon Sep 17 00:00:00 2001 From: vbuterin Date: Wed, 8 Jan 2020 18:19:18 +0800 Subject: [PATCH 121/194] Remove shard block chunking Only store a 32 byte root for every shard block Rationale: originally, I added shard block chunking (store 4 chunks for every shard block instead of one root) to facilitate construction of data availability roots. However, it turns out that there is an easier technique. Set the width of the data availability rectangle's rows to be 1/4 the max size of a shard block, so each block would fill multiple rows. Then, non-full blocks will generally create lots of zero rows. For example if the block bodies are `31415926535` and `897932` with a max size of 24 bytes, the rows might look like this: ``` 31415926 53500000 00000000 89793200 00000000 00000000 ``` Zero rows would extend rightward to complete zero rows, and when extending downward we can count the number of zero rows, and reduce the number of extra rows that we make, so we only make a new row for every nonzero row in the original data. This way we get only a close-to-optimal ~4-5x blowup in the data even if the data has zero rows in the middle. --- scripts/build_spec.py | 2 +- specs/phase1/beacon-chain.md | 30 +++++++----------------------- specs/phase1/custody-game.md | 5 ++--- specs/phase1/fraud-proofs.md | 2 +- 4 files changed, 11 insertions(+), 28 deletions(-) diff --git a/scripts/build_spec.py b/scripts/build_spec.py index fa351db2f..6306a13c9 100644 --- a/scripts/build_spec.py +++ b/scripts/build_spec.py @@ -48,7 +48,7 @@ from dataclasses import ( from eth2spec.utils.ssz.ssz_impl import hash_tree_root from eth2spec.utils.ssz.ssz_typing import ( View, boolean, Container, List, Vector, uint64, uint8, bit, - ByteVector, ByteList, Bytes1, Bytes4, Bytes8, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector, + ByteList, Bytes1, Bytes4, Bytes8, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector, ) from eth2spec.utils import bls diff --git a/specs/phase1/beacon-chain.md b/specs/phase1/beacon-chain.md index 858e840b3..d1b7d52a0 100644 --- a/specs/phase1/beacon-chain.md +++ b/specs/phase1/beacon-chain.md @@ -97,9 +97,8 @@ Configuration is not namespaced. Instead it is strictly an extension; | `LIGHT_CLIENT_COMMITTEE_SIZE` | `2**7` (= 128) | | `LIGHT_CLIENT_COMMITTEE_PERIOD` | `Epoch(2**8)` (= 256) | epochs | ~27 hours | | `SHARD_COMMITTEE_PERIOD` | `Epoch(2**8)` (= 256) | epochs | ~27 hours | -| `SHARD_BLOCK_CHUNK_SIZE` | `2**18` (= 262,144) | | -| `MAX_SHARD_BLOCK_CHUNKS` | `2**2` (= 4) | | -| `TARGET_SHARD_BLOCK_SIZE` | `3 * 2**16` (= 196,608) | | +| `MAX_SHARD_BLOCK_SIZE` | `2**20` (= 1,048,576) | | +| `TARGET_SHARD_BLOCK_SIZE` | `2**18` (= 262,144) | | | `SHARD_BLOCK_OFFSETS` | `[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233]` | | | `MAX_SHARD_BLOCKS_PER_ATTESTATION` | `len(SHARD_BLOCK_OFFSETS)` | | | `MAX_GASPRICE` | `Gwei(2**14)` (= 16,384) | Gwei | | @@ -297,7 +296,7 @@ class ShardBlockWrapper(Container): shard_parent_root: Root beacon_parent_root: Root slot: Slot - body: ByteList[MAX_SHARD_BLOCK_CHUNKS * SHARD_BLOCK_CHUNK_SIZE] + body: ByteList[MAX_SHARD_BLOCK_SIZE] signature: BLSSignature ``` @@ -330,7 +329,7 @@ class ShardTransition(Container): # Shard block lengths shard_block_lengths: List[uint64, MAX_SHARD_BLOCKS_PER_ATTESTATION] # Shard data roots - shard_data_roots: List[List[Bytes32, MAX_SHARD_BLOCK_CHUNKS], MAX_SHARD_BLOCKS_PER_ATTESTATION] + shard_data_roots: List[Bytes32, MAX_SHARD_BLOCKS_PER_ATTESTATION] # Intermediate shard states shard_states: List[ShardState, MAX_SHARD_BLOCKS_PER_ATTESTATION] # Proposer signature aggregate @@ -396,16 +395,6 @@ def committee_to_compact_committee(state: BeaconState, committee: Sequence[Valid return CompactCommittee(pubkeys=pubkeys, compact_validators=compact_validators) ``` -#### `chunks_to_body_root` - -```python -def chunks_to_body_root(chunks: List[Bytes32, MAX_SHARD_BLOCK_CHUNKS]) -> Root: - empty_chunk_root = hash_tree_root(ByteList[SHARD_BLOCK_CHUNK_SIZE]()) - return hash_tree_root(Vector[Bytes32, MAX_SHARD_BLOCK_CHUNKS]( - chunks + [empty_chunk_root] * (MAX_SHARD_BLOCK_CHUNKS - len(chunks)) - )) -``` - #### `compute_shard_from_committee_index` ```python @@ -666,20 +655,18 @@ def apply_shard_transition(state: BeaconState, shard: Shard, transition: ShardTr shard_parent_root=shard_parent_root, parent_hash=get_block_root_at_slot(state, get_previous_slot(state.slot)), slot=offset_slots[i], - body_root=chunks_to_body_root(transition.shard_data_roots[i]) + body_root=transition.shard_data_roots[i] )) proposers.append(get_shard_proposer_index(state, shard, offset_slots[i])) shard_parent_root = hash_tree_root(headers[-1]) - # Verify correct calculation of gas prices and slots and chunk roots + # Verify correct calculation of gas prices and slots prev_gasprice = state.shard_states[shard].gasprice for i in range(len(offset_slots)): shard_state = transition.shard_states[i] block_length = transition.shard_block_lengths[i] - chunks = transition.shard_data_roots[i] assert shard_state.gasprice == get_updated_gasprice(prev_gasprice, block_length) assert shard_state.slot == offset_slots[i] - assert len(chunks) == block_length // SHARD_BLOCK_CHUNK_SIZE prev_gasprice = shard_state.gasprice pubkeys = [state.validators[proposer].pubkey for proposer in proposers] @@ -724,10 +711,7 @@ def process_crosslink_for_shard(state: BeaconState, # Attestation <-> shard transition consistency assert shard_transition_root == hash_tree_root(shard_transition) - assert ( - attestation.data.head_shard_root - == chunks_to_body_root(shard_transition.shard_data_roots[-1]) - ) + assert attestation.data.head_shard_root == shard_transition.shard_data_roots[-1] # Apply transition apply_shard_transition(state, shard, shard_transition) diff --git a/specs/phase1/custody-game.md b/specs/phase1/custody-game.md index fd35e6515..121f91f97 100644 --- a/specs/phase1/custody-game.md +++ b/specs/phase1/custody-game.md @@ -103,7 +103,7 @@ class CustodySlashing(Container): whistleblower_index: ValidatorIndex shard_transition: ShardTransition attestation: Attestation - data: ByteList[MAX_SHARD_BLOCK_CHUNKS * SHARD_BLOCK_CHUNK_SIZE] + data: ByteList[MAX_SHARD_BLOCK_SIZE] ``` #### `SignedCustodySlashing` @@ -366,8 +366,7 @@ def process_custody_slashing(state: BeaconState, signed_custody_slashing: Signed shard_transition = custody_slashing.shard_transition assert hash_tree_root(shard_transition) == attestation.shard_transition_root # Verify that the provided data matches the shard-transition - shard_chunk_roots = shard_transition.shard_data_roots[custody_slashing.data_index] - assert hash_tree_root(custody_slashing.data) == chunks_to_body_root(shard_chunk_roots) + assert hash_tree_root(custody_slashing.data) == shard_transition.shard_data_roots[custody_slashing.data_index] # Verify existence and participation of claimed malefactor attesters = get_attesting_indices(state, attestation.data, attestation.aggregation_bits) diff --git a/specs/phase1/fraud-proofs.md b/specs/phase1/fraud-proofs.md index c9368ad2c..0688f5f47 100644 --- a/specs/phase1/fraud-proofs.md +++ b/specs/phase1/fraud-proofs.md @@ -50,7 +50,7 @@ def shard_state_transition(shard: Shard, pre_state: Root, previous_beacon_root: Root, proposer_pubkey: BLSPubkey, - block_data: ByteVector[MAX_SHARD_BLOCK_CHUNKS * SHARD_BLOCK_CHUNK_SIZE]) -> Root: + block_data: ByteList[MAX_SHARD_BLOCK_SIZE]) -> Root: # We will add something more substantive in phase 2 return hash(pre_state + hash_tree_root(previous_beacon_root) + hash_tree_root(block_data)) ``` From 52fb929978685b10ef3b807eba6b461fc1744d08 Mon Sep 17 00:00:00 2001 From: vbuterin Date: Sat, 11 Jan 2020 12:42:34 +0800 Subject: [PATCH 122/194] Update specs/core/1_beacon-chain.md --- specs/phase1/beacon-chain.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/specs/phase1/beacon-chain.md b/specs/phase1/beacon-chain.md index d1b7d52a0..78b3b3d25 100644 --- a/specs/phase1/beacon-chain.md +++ b/specs/phase1/beacon-chain.md @@ -35,7 +35,6 @@ - [`get_previous_slot`](#get_previous_slot) - [`pack_compact_validator`](#pack_compact_validator) - [`committee_to_compact_committee`](#committee_to_compact_committee) - - [`chunks_to_body_root`](#chunks_to_body_root) - [`compute_shard_from_committee_index`](#compute_shard_from_committee_index) - [Beacon state accessors](#beacon-state-accessors) - [`get_active_shard_count`](#get_active_shard_count) @@ -905,4 +904,3 @@ def process_light_client_committee_updates(state: BeaconState) -> None: new_committee = get_light_client_committee(state, get_current_epoch(state) + LIGHT_CLIENT_COMMITTEE_PERIOD) state.next_light_committee = committee_to_compact_committee(state, new_committee) ``` - From 51f2974678bd0711439ec0f8a7a066f9d9f18bb3 Mon Sep 17 00:00:00 2001 From: Herman Junge Date: Wed, 29 Jan 2020 13:51:38 +0000 Subject: [PATCH 123/194] Update validator.md Minor edit --- specs/phase0/validator.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/phase0/validator.md b/specs/phase0/validator.md index 494035bda..5816e0a74 100644 --- a/specs/phase0/validator.md +++ b/specs/phase0/validator.md @@ -189,7 +189,7 @@ def is_proposer(state: BeaconState, *Note*: To see if a validator is assigned to propose during the slot, the beacon state must be in the epoch in question. At the epoch boundaries, the validator must run an epoch transition into the epoch to successfully check the proposal assignment of the first slot. -*Note*: `BeaconBlock` proposal is distinct from beacon committee assignment, and in a given epoch each responsibility might occur at different a different slot. +*Note*: `BeaconBlock` proposal is distinct from beacon committee assignment, and in a given epoch each responsibility might occur at a different slot. ### Lookahead From fe58c78da8ecef9f9bdeabacf87c90bbdb94e0e0 Mon Sep 17 00:00:00 2001 From: terence tsao Date: Wed, 29 Jan 2020 14:08:48 -0800 Subject: [PATCH 124/194] Fix table --- specs/phase1/phase1-fork.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/phase1/phase1-fork.md b/specs/phase1/phase1-fork.md index 56eee410b..adb0cd236 100644 --- a/specs/phase1/phase1-fork.md +++ b/specs/phase1/phase1-fork.md @@ -33,7 +33,7 @@ This document describes the process of moving from Phase 0 to Phase 1 of Ethereu Warning: this configuration is not definitive. | Name | Value | -| - | - | - | +| - | - | | `PHASE_1_FORK_VERSION` | `Version('0x01000000')` | | `INITIAL_ACTIVE_SHARDS` | `2**6` (= 64) | | `INITIAL_GASPRICE` | `Gwei(10)` | From 340549aed62caa15a9f04cf393805c2003bd5c8e Mon Sep 17 00:00:00 2001 From: protolambda Date: Fri, 31 Jan 2020 11:52:30 +0100 Subject: [PATCH 125/194] spec version -> spec fork --- setup.py | 50 +++++++++---------- tests/core/pyspec/eth2spec/test/context.py | 2 +- .../test/fork_choice/test_on_attestation.py | 2 +- .../eth2spec/test/helpers/attestations.py | 4 +- .../test/helpers/attester_slashings.py | 8 +-- .../test_process_attester_slashing.py | 2 +- 6 files changed, 34 insertions(+), 34 deletions(-) diff --git a/setup.py b/setup.py index 1c652e0eb..e12d7e65e 100644 --- a/setup.py +++ b/setup.py @@ -185,7 +185,7 @@ get_beacon_committee = cache_this( _get_beacon_committee)''' -def objects_to_spec(spec_object: SpecObject, imports: str, version: str) -> str: +def objects_to_spec(spec_object: SpecObject, imports: str, fork: str) -> str: """ Given all the objects that constitute a spec, combine them into a single pyfile. """ @@ -208,7 +208,7 @@ def objects_to_spec(spec_object: SpecObject, imports: str, version: str) -> str: ssz_objects_instantiation_spec = '\n\n'.join(spec_object.ssz_objects.values()) spec = ( imports - + '\n\n' + f"version = \'{version}\'\n" + + '\n\n' + f"fork = \'{fork}\'\n" + '\n\n' + new_type_definitions + '\n' + SUNDRY_CONSTANTS_FUNCTIONS + '\n\n' + constants_spec @@ -267,7 +267,7 @@ def dependency_order_ssz_objects(objects: Dict[str, str], custom_types: Dict[str def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str], custom_types) -> Dict[str, str]: """ Takes in old spec and new spec ssz objects, combines them, - and returns the newer versions of the objects in dependency order. + and returns the newer forks of the objects in dependency order. """ for key, value in new_objects.items(): old_objects[key] = value @@ -287,13 +287,13 @@ def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject: return SpecObject(functions, custom_types, constants, ssz_objects) -version_imports = { +fork_imports = { 'phase0': PHASE0_IMPORTS, 'phase1': PHASE1_IMPORTS, } -def build_spec(version: str, source_files: List[str]) -> str: +def build_spec(fork: str, source_files: List[str]) -> str: all_specs = [get_spec(spec) for spec in source_files] spec_object = all_specs[0] @@ -302,7 +302,7 @@ def build_spec(version: str, source_files: List[str]) -> str: dependency_order_ssz_objects(spec_object.ssz_objects, spec_object.custom_types) - return objects_to_spec(spec_object, version_imports[version], version) + return objects_to_spec(spec_object, fork_imports[fork], fork) class PySpecCommand(Command): @@ -310,14 +310,14 @@ class PySpecCommand(Command): description = "Convert spec markdown files to a spec python file" - spec_version: str + spec_fork: str md_doc_paths: str parsed_md_doc_paths: List[str] out_dir: str # The format is (long option, short option, description). user_options = [ - ('spec-version=', None, "Spec version to tag build with. Used to select md-docs defaults."), + ('spec-fork=', None, "Spec fork to tag build with. Used to select md-docs defaults."), ('md-doc-paths=', None, "List of paths of markdown files to build spec with"), ('out-dir=', None, "Output directory to write spec package to") ] @@ -325,7 +325,7 @@ class PySpecCommand(Command): def initialize_options(self): """Set default values for options.""" # Each user option must be listed here with their default value. - self.spec_version = 'phase0' + self.spec_fork = 'phase0' self.md_doc_paths = '' self.out_dir = 'pyspec_output' @@ -333,14 +333,14 @@ class PySpecCommand(Command): """Post-process options.""" if len(self.md_doc_paths) == 0: print("no paths were specified, using default markdown file paths for pyspec" - " build (spec version: %s)" % self.spec_version) - if self.spec_version == "phase0": + " build (spec fork: %s)" % self.spec_fork) + if self.spec_fork == "phase0": self.md_doc_paths = """ specs/phase0/beacon-chain.md specs/phase0/fork-choice.md specs/phase0/validator.md """ - elif self.spec_version == "phase1": + elif self.spec_fork == "phase1": self.md_doc_paths = """ specs/phase0/beacon-chain.md specs/phase0/fork-choice.md @@ -351,7 +351,7 @@ class PySpecCommand(Command): specs/phase1/phase1-fork.md """ else: - raise Exception('no markdown files specified, and spec version "%s" is unknown', self.spec_version) + raise Exception('no markdown files specified, and spec fork "%s" is unknown', self.spec_fork) self.parsed_md_doc_paths = self.md_doc_paths.split() @@ -360,10 +360,10 @@ class PySpecCommand(Command): raise Exception('Pyspec markdown input file "%s" does not exist.' % filename) def run(self): - spec_str = build_spec(self.spec_version, self.parsed_md_doc_paths) + spec_str = build_spec(self.spec_fork, self.parsed_md_doc_paths) if self.dry_run: self.announce('dry run successfully prepared contents for spec.' - f' out dir: "{self.out_dir}", spec version: "{self.spec_version}"') + f' out dir: "{self.out_dir}", spec fork: "{self.spec_fork}"') self.debug_print(spec_str) else: dir_util.mkpath(self.out_dir) @@ -379,17 +379,17 @@ class BuildPyCommand(build_py): def initialize_options(self): super(BuildPyCommand, self).initialize_options() - def run_pyspec_cmd(self, spec_version: str, **opts): + def run_pyspec_cmd(self, spec_fork: str, **opts): cmd_obj: PySpecCommand = self.distribution.reinitialize_command("pyspec") - cmd_obj.spec_version = spec_version - cmd_obj.out_dir = os.path.join(self.build_lib, 'eth2spec', spec_version) + cmd_obj.spec_fork = spec_fork + cmd_obj.out_dir = os.path.join(self.build_lib, 'eth2spec', spec_fork) for k, v in opts.items(): setattr(cmd_obj, k, v) self.run_command('pyspec') def run(self): - for spec_version in version_imports: - self.run_pyspec_cmd(spec_version=spec_version) + for spec_fork in fork_imports: + self.run_pyspec_cmd(spec_fork=spec_fork) super(BuildPyCommand, self).run() @@ -405,19 +405,19 @@ class PyspecDevCommand(Command): def finalize_options(self): pass - def run_pyspec_cmd(self, spec_version: str, **opts): + def run_pyspec_cmd(self, spec_fork: str, **opts): cmd_obj: PySpecCommand = self.distribution.reinitialize_command("pyspec") - cmd_obj.spec_version = spec_version + cmd_obj.spec_fork = spec_fork eth2spec_dir = convert_path(self.distribution.package_dir['eth2spec']) - cmd_obj.out_dir = os.path.join(eth2spec_dir, spec_version) + cmd_obj.out_dir = os.path.join(eth2spec_dir, spec_fork) for k, v in opts.items(): setattr(cmd_obj, k, v) self.run_command('pyspec') def run(self): print("running build_py command") - for spec_version in version_imports: - self.run_pyspec_cmd(spec_version=spec_version) + for spec_fork in fork_imports: + self.run_pyspec_cmd(spec_fork=spec_fork) commands = { 'pyspec': PySpecCommand, diff --git a/tests/core/pyspec/eth2spec/test/context.py b/tests/core/pyspec/eth2spec/test/context.py index 6134243df..5338ccb9d 100644 --- a/tests/core/pyspec/eth2spec/test/context.py +++ b/tests/core/pyspec/eth2spec/test/context.py @@ -50,7 +50,7 @@ def with_custom_state(balances_fn: Callable[[Any], Sequence[int]], state = create_genesis_state(spec=p0, validator_balances=balances, activation_threshold=activation_threshold) - if spec.version == 'phase1': + if spec.fork == 'phase1': # TODO: instead of upgrading a test phase0 genesis state we can also write a phase1 state helper. # Decide based on performance/consistency results later. state = phases["phase1"].upgrade_to_phase1(state) diff --git a/tests/core/pyspec/eth2spec/test/fork_choice/test_on_attestation.py b/tests/core/pyspec/eth2spec/test/fork_choice/test_on_attestation.py index a0a33ca50..09248944c 100644 --- a/tests/core/pyspec/eth2spec/test/fork_choice/test_on_attestation.py +++ b/tests/core/pyspec/eth2spec/test/fork_choice/test_on_attestation.py @@ -16,7 +16,7 @@ def run_on_attestation(spec, state, store, attestation, valid=True): indexed_attestation = spec.get_indexed_attestation(state, attestation) spec.on_attestation(store, attestation) - if spec.version == 'phase0': + if spec.fork == 'phase0': sample_index = indexed_attestation.attesting_indices[0] else: attesting_indices = [ diff --git a/tests/core/pyspec/eth2spec/test/helpers/attestations.py b/tests/core/pyspec/eth2spec/test/helpers/attestations.py index b8733705a..047966890 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/attestations.py +++ b/tests/core/pyspec/eth2spec/test/helpers/attestations.py @@ -77,12 +77,12 @@ def sign_aggregate_attestation(spec, state, attestation_data, participants: List privkey ) ) - # TODO: we should try signing custody bits if spec.version == 'phase1' + # TODO: we should try signing custody bits if spec.fork == 'phase1' return bls.Aggregate(signatures) def sign_indexed_attestation(spec, state, indexed_attestation): - if spec.version == 'phase0': + if spec.fork == 'phase0': participants = indexed_attestation.attesting_indices data = indexed_attestation.data indexed_attestation.signature = sign_aggregate_attestation(spec, state, data, participants) diff --git a/tests/core/pyspec/eth2spec/test/helpers/attester_slashings.py b/tests/core/pyspec/eth2spec/test/helpers/attester_slashings.py index 70654759e..5dfedc200 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/attester_slashings.py +++ b/tests/core/pyspec/eth2spec/test/helpers/attester_slashings.py @@ -20,7 +20,7 @@ def get_indexed_attestation_participants(spec, indexed_att): """ Wrapper around index-attestation to return the list of participant indices, regardless of spec phase. """ - if spec.version == "phase1": + if spec.fork == "phase1": return list(spec.get_indices_from_committee( indexed_att.committee, indexed_att.attestation.aggregation_bits, @@ -33,21 +33,21 @@ def set_indexed_attestation_participants(spec, indexed_att, participants): """ Wrapper around index-attestation to return the list of participant indices, regardless of spec phase. """ - if spec.version == "phase1": + if spec.fork == "phase1": indexed_att.attestation.aggregation_bits = [bool(i in participants) for i in indexed_att.committee] else: indexed_att.attesting_indices = participants def get_attestation_1_data(spec, att_slashing): - if spec.version == "phase1": + if spec.fork == "phase1": return att_slashing.attestation_1.attestation.data else: return att_slashing.attestation_1.data def get_attestation_2_data(spec, att_slashing): - if spec.version == "phase1": + if spec.fork == "phase1": return att_slashing.attestation_2.attestation.data else: return att_slashing.attestation_2.data diff --git a/tests/core/pyspec/eth2spec/test/phase_0/block_processing/test_process_attester_slashing.py b/tests/core/pyspec/eth2spec/test/phase_0/block_processing/test_process_attester_slashing.py index c2fada6ba..e9665e714 100644 --- a/tests/core/pyspec/eth2spec/test/phase_0/block_processing/test_process_attester_slashing.py +++ b/tests/core/pyspec/eth2spec/test/phase_0/block_processing/test_process_attester_slashing.py @@ -161,7 +161,7 @@ def test_same_data(spec, state): indexed_att_1 = attester_slashing.attestation_1 att_2_data = get_attestation_2_data(spec, attester_slashing) - if spec.version == 'phase1': + if spec.fork == 'phase1': indexed_att_1.attestation.data = att_2_data else: indexed_att_1.data = att_2_data From c943b5832e252851bcc85a872e515fd7507d1f9e Mon Sep 17 00:00:00 2001 From: Jacek Sieka Date: Fri, 31 Jan 2020 21:59:37 +0100 Subject: [PATCH 126/194] drop `head_block_root` from BeaconBlocksByRange This change simplifies the protocol and removes a race condition between block request and response. In the case of honest server, this helps serve the canonical / fork-chosen chain better while dishonest or broken servers still need to be handled the same way. Might as well get started on versions and upgrade it to 2, since the change is backwards incompatible. --- specs/phase0/p2p-interface.md | 30 ++++++++++++++++++++++-------- 1 file changed, 22 insertions(+), 8 deletions(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index 674f2e2b8..fa4ecb66b 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -94,6 +94,8 @@ It consists of four main sections: - [Why do we version protocol strings with ordinals instead of semver?](#why-do-we-version-protocol-strings-with-ordinals-instead-of-semver) - [Why is it called Req/Resp and not RPC?](#why-is-it-called-reqresp-and-not-rpc) - [Why do we allow empty responses in block requests?](#why-do-we-allow-empty-responses-in-block-requests) + - [Why does `BeaconBlocksByRange` let the server choose which chain to send blocks from?](#why-does-beaconblocksbyrange-let-the-server-choose-which-chain-to-send-blocks-from) + - [What's the effect of empty slots on the sync algorithm?](#whats-the-effect-of-empty-slots-on-the-sync-algorithm) - [Discovery](#discovery) - [Why are we using discv5 and not libp2p Kademlia DHT?](#why-are-we-using-discv5-and-not-libp2p-kademlia-dht) - [What is the difference between an ENR and a multiaddr, and why are we using ENRs?](#what-is-the-difference-between-an-enr-and-a-multiaddr-and-why-are-we-using-enrs) @@ -478,12 +480,11 @@ The response MUST consist of a single `response_chunk`. #### BeaconBlocksByRange -**Protocol ID:** `/eth2/beacon_chain/req/beacon_blocks_by_range/1/` +**Protocol ID:** `/eth2/beacon_chain/req/beacon_blocks_by_range/2/` Request Content: ``` ( - head_block_root: Bytes32 start_slot: uint64 count: uint64 step: uint64 @@ -497,22 +498,23 @@ Response Content: ) ``` -Requests count beacon blocks from the peer starting from `start_slot` on the chain defined by `head_block_root` (= `hash_tree_root(SignedBeaconBlock.message)`). The response MUST contain no more than count blocks. `step` defines the slot increment between blocks. For example, requesting blocks starting at `start_slot` 2 with a step value of 2 would return the blocks at [2, 4, 6, …]. In cases where a slot is empty for a given slot number, no block is returned. For example, if slot 4 were empty in the previous example, the returned array would contain [2, 6, …]. A step value of 1 returns all blocks on the range `[start_slot, start_slot + count)`. +Requests count beacon blocks from the peer starting from `start_slot`, leading up to the current head block as selected by fork choice. `step` defines the slot increment between blocks. For example, requesting blocks starting at `start_slot` 2 with a step value of 2 would return the blocks at slots [2, 4, 6, …]. In cases where a slot is empty for a given slot number, no block is returned. For example, if slot 4 were empty in the previous example, the returned array would contain [2, 6, …]. A step value of 1 returns all blocks on the range `[start_slot, start_slot + count)`. + +`BeaconBlocksByRange` is primarily used to sync historical blocks. The request MUST be encoded as an SSZ-container. The response MUST consist of zero or more `response_chunk`. Each _successful_ `response_chunk` MUST contain a single `SignedBeaconBlock` payload. - -`BeaconBlocksByRange` is primarily used to sync historical blocks. - Clients MUST support requesting blocks since the start of the weak subjectivity period and up to the given `head_block_root`. -Clients MUST support `head_block_root` values since the latest finalized epoch. - Clients MUST respond with at least one block, if they have it and it exists in the range. Clients MAY limit the number of blocks in the response. +The response MUST contain no more than `count` blocks. + Clients MUST order blocks by increasing slot number. +Clients MUST respond with blocks from what they consider to be the canonical chain as select be fork choice. In particular, blocks from slots before the finalization MUST lead to the finalized block reported in the `Status` handshake. + #### BeaconBlocksByRoot **Protocol ID:** `/eth2/beacon_chain/req/beacon_blocks_by_root/1/` @@ -886,6 +888,18 @@ Assuming option 0 with no special `null` encoding, consider a request for slots Failing to provide blocks that nodes "should" have is reason to trust a peer less - for example, if a particular peer gossips a block, it should have access to its parent. If a request for the parent fails, it's indicative of poor peer quality since peers should validate blocks before gossiping them. +### Why does `BeaconBlocksByRange` let the server choose which chain to send blocks from? + +When connecting, the `Status` message gives an idea about the sync status of a particular peer, but this changes over time. By the time a subsequent `BeaconBlockByRange` request is processed, the information may be stale, and the responding side might have moved on to a new finalization point and pruned blocks around the previous head and finalized blocks. + +To avoid this race condition, we allow the responding side to choose which chain to send to the requesting client. The requesting client then goes on to validate the blocks and incorporate them in their own database - because they follow the same rules, they should at this point arrive at the same chain. + +### What's the effect of empty slots on the sync algorithm? + +When syncing one can only tell that a slot has been skipped on a particular chain by examining subsequent blocks and analyzing the graph formed by the parent root. Because the server side may choose to omit blocks in the response for any reason, clients must validate the graph and be prepared to fill in gaps. + +For example, if a peer responds with blocks [2, 3] when asked for [2, 3, 4], clients may not assume that block 4 doesn't exist - it merely means that the responding peer did not send it (they may not have it yet or may maliciously be trying to hide it) and successive blocks will be needed to determine if there exists a block at slot 4 in this particular chain. + ## Discovery ### Why are we using discv5 and not libp2p Kademlia DHT? From 3184ade0f34d53da18c6c81aa778b1b1b9814e8d Mon Sep 17 00:00:00 2001 From: protolambda Date: Sat, 1 Feb 2020 11:22:01 +0100 Subject: [PATCH 127/194] version stub --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index b4621d79c..23cad7dfa 100644 --- a/setup.py +++ b/setup.py @@ -430,6 +430,7 @@ with open("README.md", "rt", encoding="utf8") as f: setup( name='eth2spec', + version='0.0.1', # initial version, see #1584 and #1596 description="Eth2 spec, provided as Python package for tooling and testing", long_description=readme, long_description_content_type="text/markdown", From 74c75fd6f18b396a958c8c10de7ba44b9a073c4c Mon Sep 17 00:00:00 2001 From: Jim McDonald Date: Sun, 2 Feb 2020 22:08:11 +0000 Subject: [PATCH 128/194] Remove extraneous word --- specs/phase0/validator.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/phase0/validator.md b/specs/phase0/validator.md index 5816e0a74..a61524f1b 100644 --- a/specs/phase0/validator.md +++ b/specs/phase0/validator.md @@ -356,7 +356,7 @@ A validator is expected to create, sign, and broadcast an attestation during eac A validator should create and broadcast the `attestation` to the associated attestation subnet when either (a) the validator has received a valid block from the expected block proposer for the assigned `slot` or (b) one-third of the `slot` hash transpired (`SECONDS_PER_SLOT / 3` seconds after the start of `slot`) -- whichever comes _first_. -*Note*: Although attestations during `GENESIS_EPOCH` do not count toward FFG finality, these initial attestations do give weight to the fork choice, are rewarded fork, and should be made. +*Note*: Although attestations during `GENESIS_EPOCH` do not count toward FFG finality, these initial attestations do give weight to the fork choice, are rewarded, and should be made. #### Attestation data From 93249aadda184289b716484399b00797a7cb7bea Mon Sep 17 00:00:00 2001 From: protolambda Date: Tue, 4 Feb 2020 13:56:32 +0100 Subject: [PATCH 129/194] Proposal to focus on length-encoding SSZ contents, enable streaming of chunk contents, and put stricter DOS limits in place --- specs/phase0/p2p-interface.md | 44 +++++++++++++++++++++-------------- 1 file changed, 27 insertions(+), 17 deletions(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index 674f2e2b8..8ac00cf06 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -403,9 +403,28 @@ The token of the negotiated protocol ID specifies the type of encoding to be use #### SSZ-encoding strategy (with or without Snappy) -The [SimpleSerialize (SSZ) specification](../../ssz/simple-serialize.md) outlines how objects are SSZ-encoded. If the Snappy variant is selected, we feed the serialized form to the Snappy compressor on encoding. The inverse happens on decoding. +The [SimpleSerialize (SSZ) specification](../../ssz/simple-serialize.md) outlines how objects are SSZ-encoded. -**Encoding-dependent header:** Req/Resp protocols using the `ssz` or `ssz_snappy` encoding strategies MUST prefix all encoded and compressed (if applicable) payloads with an unsigned [protobuf varint](https://developers.google.com/protocol-buffers/docs/encoding#varints). +If the Snappy variant is selected, we feed the serialized form of the object to the Snappy compressor on encoding. The inverse happens on decoding. + +Snappy has two formats: "block" and "frames" (streaming). To support large requests and response chunks, snappy-framing is used. + +Since snappy frame contents [have a maximum size of `65536` bytes](https://github.com/google/snappy/blob/master/framing_format.txt#L104) + and frame headers are just `identifier (1) + checksum (4)` bytes, the expected buffering of a single frame is acceptable. + +**Encoding-dependent header:** Req/Resp protocols using the `ssz` or `ssz_snappy` encoding strategies MUST encode the length of the raw SSZ bytes, encoded as an unsigned [protobuf varint](https://developers.google.com/protocol-buffers/docs/encoding#varints). + +*Writing*: By first computing and writing the SSZ byte length the SSZ encoder can then directly write the chunk contents to the stream. +If Snappy is applied, it can be passed through a buffered Snappy writer to compress frame by frame. + +*Reading*: After reading the expected SSZ byte length, the SSZ decoder can directly read the contents from the stream. +If snappy is applied, it can be passed through a buffered Snappy reader to decompress frame by frame. + +A reader: +- SHOULD not read more than `max_encoded_len(n)` bytes (`32 + n + n/6`) after reading the SSZ length prefix `n` from the header, [this is considered the worst-case compression result by Snappy](https://github.com/google/snappy/blob/537f4ad6240e586970fe554614542e9717df7902/snappy.cc#L98). +- SHOULD not accept a SSZ length prefix that is bigger than the expected maximum length for the SSZ type (derived from SSZ type information such as vector lengths and list limits). +- MUST consider remaining bytes, after having read the `n` SSZ bytes, as an invalid input. An EOF is expected. +- MUST consider an early EOF, before fully reading the declared length prefix worth of SSZ bytes, as an invalid input. All messages that contain only a single field MUST be encoded directly as the type of that field and MUST NOT be encoded as an SSZ container. @@ -829,23 +848,14 @@ Requests are segregated by protocol ID to: We are using single-use streams where each stream is closed at the end of the message. Thus, libp2p transparently handles message delimiting in the underlying stream. libp2p streams are full-duplex, and each party is responsible for closing their write side (like in TCP). We can therefore use stream closure to mark the end of the request and response independently. -Nevertheless, messages are still length-prefixed—this is now being considered for removal. - -Advantages of length-prefixing include: - -* Reader can prepare a correctly sized buffer before reading message +Nevertheless, in the case of `ssz` and `ssz_snappy`, messages are still length-prefixed with the length of the underlying data: +* A basic reader can prepare a correctly sized buffer before reading the message +* A more advanced reader can stream-decode SSZ given the length of the SSZ data. * Alignment with protocols like gRPC over HTTP/2 that prefix with length -* Sanity checking of stream closure / message length +* Sanity checking of message length, and enabling much stricter message length limiting based on SSZ type information, + to provide even more DOS protection than the global message length already does. E.g. a small `Status` message does not nearly require `MAX_CHUNK_SIZE` bytes. -Disadvantages include: - -* Redundant methods of message delimiting—both stream end marker and length prefix -* Harder to stream as length must be known up-front -* Additional code path required to verify length - -In some protocols, adding a length prefix serves as a form of DoS protection against very long messages, allowing the client to abort if an overlong message is about to be sent. In this protocol, we are globally limiting message sizes using `MAX_CHUNK_SIZE`, thus the length prefix does not afford any additional protection. - -[Protobuf varint](https://developers.google.com/protocol-buffers/docs/encoding#varints) is an efficient technique to encode variable-length ints. Instead of reserving a fixed-size field of as many bytes as necessary to convey the maximum possible value, this field is elastic in exchange for 1-bit overhead per byte. +[Protobuf varint](https://developers.google.com/protocol-buffers/docs/encoding#varints) is an efficient technique to encode variable-length (unsigned here) ints. Instead of reserving a fixed-size field of as many bytes as necessary to convey the maximum possible value, this field is elastic in exchange for 1-bit overhead per byte. ### Why do we version protocol strings with ordinals instead of semver? From 9355a6ebca5dcbfa3bb11da875b5ad4c7a3e18df Mon Sep 17 00:00:00 2001 From: Age Manning Date: Wed, 5 Feb 2020 14:33:00 +1100 Subject: [PATCH 130/194] Noise libp2p specification --- specs/phase0/p2p-interface.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index 674f2e2b8..caf528b03 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -149,9 +149,11 @@ The following SecIO parameters MUST be supported by all stacks: #### Mainnet -[Noise Framework](http://www.noiseprotocol.org/) handshakes will be used for mainnet. libp2p Noise support [is in the process of being standardized](https://github.com/libp2p/specs/issues/195) in the libp2p project. +The [Libp2p-noise](https://github.com/libp2p/specs/tree/master/noise) secure +channel handshake with `secp256k1` identities will be used for mainnet. -Noise support will presumably include IX, IK, and XX handshake patterns, and may rely on Curve25519 keys, ChaCha20 and Poly1305 ciphers, and SHA-256 as a hash function. These aspects are being actively debated in the referenced issue (Eth2 implementers are welcome to comment and contribute to the discussion). +As specified in the libp2p specification, clients MUST support the `XX` handshake pattern and +can optionally implement the `IK` and `XXfallback` patterns for optimistic 0-RTT. ## Protocol Negotiation From 1a16e9a3ab972fff0ed0fdb363df04a39a2e4ac0 Mon Sep 17 00:00:00 2001 From: protolambda Date: Wed, 5 Feb 2020 18:38:21 +0100 Subject: [PATCH 131/194] version eth2spec --- Makefile | 11 +++++++++++ setup.py | 16 ++++++++++++++-- tests/core/pyspec/eth2spec/VERSION.txt | 1 + tests/core/pyspec/eth2spec/__init__.py | 3 +++ 4 files changed, 29 insertions(+), 2 deletions(-) create mode 100644 tests/core/pyspec/eth2spec/VERSION.txt diff --git a/Makefile b/Makefile index abd240b51..b468e648c 100644 --- a/Makefile +++ b/Makefile @@ -51,6 +51,17 @@ clean: partial_clean rm -rf $(DEPOSIT_CONTRACT_COMPILER_DIR)/venv rm -rf $(DEPOSIT_CONTRACT_TESTER_DIR)/venv +# The pyspec is rebuilt to enforce the /specs being part of eth2specs source distribution. It could be forgotten otherwise. +dist_build: pyspec + python3 setup.py sdist bdist_wheel + +dist_check: + python3 -m twine check dist/* + +dist_upload: + python3 -m twine upload dist/* + + # "make generate_tests" to run all generators generate_tests: $(GENERATOR_TARGETS) diff --git a/setup.py b/setup.py index 23cad7dfa..444489d2e 100644 --- a/setup.py +++ b/setup.py @@ -428,9 +428,20 @@ commands = { with open("README.md", "rt", encoding="utf8") as f: readme = f.read() +# How to use "VERSION.txt" file: +# - dev branch contains "X.Y.Z.dev", where "X.Y.Z" is the target version to release dev into. +# -> Changed as part of 'master' backport to 'dev' +# - master branch contains "X.Y.Z", where "X.Y.Z" is the current version. +# -> Changed as part of 'dev' release (or other branch) into 'master' +# -> In case of a commit on master without git tag, target the next version +# with ".postN" (release candidate, numbered) suffixed. +# See https://www.python.org/dev/peps/pep-0440/#public-version-identifiers +with open(os.path.join('tests', 'core', 'pyspec', 'eth2spec', 'VERSION.txt')) as f: + spec_version = f.read().strip() + setup( name='eth2spec', - version='0.0.1', # initial version, see #1584 and #1596 + version=spec_version, description="Eth2 spec, provided as Python package for tooling and testing", long_description=readme, long_description_content_type="text/markdown", @@ -438,7 +449,8 @@ setup( url="https://github.com/ethereum/eth2.0-specs", include_package_data=False, package_data={'configs': ['*.yaml'], - 'specs': ['**/*.md']}, + 'specs': ['**/*.md'], + 'eth2spec': ['VERSION.txt']}, package_dir={ "eth2spec": "tests/core/pyspec/eth2spec", "configs": "configs", diff --git a/tests/core/pyspec/eth2spec/VERSION.txt b/tests/core/pyspec/eth2spec/VERSION.txt new file mode 100644 index 000000000..9f20097b6 --- /dev/null +++ b/tests/core/pyspec/eth2spec/VERSION.txt @@ -0,0 +1 @@ +0.10.2.dev0 \ No newline at end of file diff --git a/tests/core/pyspec/eth2spec/__init__.py b/tests/core/pyspec/eth2spec/__init__.py index e69de29bb..6941b68f0 100644 --- a/tests/core/pyspec/eth2spec/__init__.py +++ b/tests/core/pyspec/eth2spec/__init__.py @@ -0,0 +1,3 @@ +# See setup.py about usage of VERSION.txt +with open('VERSION.txt') as f: + __version__ = f.read().strip() From f5723cdd9374b469df6243a9a59c836faefac99d Mon Sep 17 00:00:00 2001 From: Chih Cheng Liang Date: Wed, 5 Feb 2020 13:04:51 +0800 Subject: [PATCH 132/194] fix confusing directory name --- tests/generators/bls/main.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/generators/bls/main.py b/tests/generators/bls/main.py index 19468f041..50b18806a 100644 --- a/tests/generators/bls/main.py +++ b/tests/generators/bls/main.py @@ -171,7 +171,7 @@ def case05_aggregate_verify(): sigs.append(sig) aggregate_signature = bls.G2ProofOfPossession.Aggregate(sigs) - yield f'fast_aggregate_verify_valid', { + yield f'aggregate_verify_valid', { 'input': { 'pairs': pairs, 'signature': encode_hex(aggregate_signature), @@ -180,7 +180,7 @@ def case05_aggregate_verify(): } tampered_signature = aggregate_signature[:4] + b'\xff\xff\xff\xff' - yield f'fast_aggregate_verify_tampered_signature', { + yield f'aggregate_verify_tampered_signature', { 'input': { 'pairs': pairs, 'signature': encode_hex(tampered_signature), From b57b4f5e0d8274c4e2cfd0517bf62eb1fc57381f Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Thu, 6 Feb 2020 13:02:16 -0600 Subject: [PATCH 133/194] move desription to human readable name --- tests/generators/bls/main.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/tests/generators/bls/main.py b/tests/generators/bls/main.py index 50b18806a..76135f282 100644 --- a/tests/generators/bls/main.py +++ b/tests/generators/bls/main.py @@ -69,8 +69,8 @@ def case02_verify(): # Valid signature signature = bls.G2ProofOfPossession.Sign(privkey, message) pubkey = bls.G2ProofOfPossession.PrivToPub(privkey) - full_name = f'{encode_hex(pubkey)}_{encode_hex(message)}_valid' - yield f'verify_case_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', { + full_name = f'{encode_hex(pubkey)}_{encode_hex(message)}' + yield f'verify_valid_case_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', { 'input': { 'pubkey': encode_hex(pubkey), 'message': encode_hex(message), @@ -81,8 +81,8 @@ def case02_verify(): # Invalid signatures -- wrong pubkey wrong_pubkey = bls.G2ProofOfPossession.PrivToPub(PRIVKEYS[(i + 1) % len(PRIVKEYS)]) - full_name = f'{encode_hex(wrong_pubkey)}_{encode_hex(message)}_wrong_pubkey' - yield f'verify_case_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', { + full_name = f'{encode_hex(wrong_pubkey)}_{encode_hex(message)}' + yield f'verify_wrong_pubkey_case_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', { 'input': { 'pubkey': encode_hex(wrong_pubkey), 'message': encode_hex(message), @@ -93,8 +93,8 @@ def case02_verify(): # Invalid signature -- tampered with signature tampered_signature = signature[:-4] + b'\xFF\xFF\xFF\xFF' - full_name = f'{encode_hex(pubkey)}_{encode_hex(message)}_tampered_signature' - yield f'verify_case_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', { + full_name = f'{encode_hex(pubkey)}_{encode_hex(message)}' + yield f'verify_tampered_signature_case_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', { 'input': { 'pubkey': encode_hex(pubkey), 'message': encode_hex(message), @@ -122,8 +122,8 @@ def case04_fast_aggregate_verify(): pubkeys_serial = [encode_hex(pubkey) for pubkey in pubkeys] # Valid signature - full_name = f'{pubkeys_serial}_{encode_hex(message)}_valid' - yield f'fast_aggregate_verify_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', { + full_name = f'{pubkeys_serial}_{encode_hex(message)}' + yield f'fast_aggregate_verify_valid_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', { 'input': { 'pubkeys': pubkeys_serial, 'message': encode_hex(message), @@ -135,8 +135,8 @@ def case04_fast_aggregate_verify(): # Invalid signature -- extra pubkey pubkeys_extra = pubkeys + [bls.G2ProofOfPossession.PrivToPub(PRIVKEYS[-1])] pubkeys_extra_serial = [encode_hex(pubkey) for pubkey in pubkeys_extra] - full_name = f'{pubkeys_extra_serial}_{encode_hex(message)}_extra_pubkey' - yield f'fast_aggregate_verify_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', { + full_name = f'{pubkeys_extra_serial}_{encode_hex(message)}' + yield f'fast_aggregate_verify_extra_pubkey_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', { 'input': { 'pubkeys': pubkeys_extra_serial, 'message': encode_hex(message), @@ -147,8 +147,8 @@ def case04_fast_aggregate_verify(): # Invalid signature -- tampered with signature tampered_signature = aggregate_signature[:-4] + b'\xff\xff\xff\xff' - full_name = f'{pubkeys_serial}_{encode_hex(message)}_tampered_signature' - yield f'fast_aggregate_verify_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', { + full_name = f'{pubkeys_serial}_{encode_hex(message)}' + yield f'fast_aggregate_verify_tampered_signature_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', { 'input': { 'pubkeys': pubkeys_serial, 'message': encode_hex(message), From 7aeeb372a782c0fea47720afcce65f2fafc26562 Mon Sep 17 00:00:00 2001 From: Alex Stokes Date: Thu, 6 Feb 2020 16:03:40 -0800 Subject: [PATCH 134/194] [validator-guide] typo fix --- specs/phase0/validator.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/phase0/validator.md b/specs/phase0/validator.md index a61524f1b..c10b2442a 100644 --- a/specs/phase0/validator.md +++ b/specs/phase0/validator.md @@ -354,7 +354,7 @@ def get_block_signature(state: BeaconState, header: BeaconBlockHeader, privkey: A validator is expected to create, sign, and broadcast an attestation during each epoch. The `committee`, assigned `index`, and assigned `slot` for which the validator performs this role during an epoch are defined by `get_committee_assignment(state, epoch, validator_index)`. -A validator should create and broadcast the `attestation` to the associated attestation subnet when either (a) the validator has received a valid block from the expected block proposer for the assigned `slot` or (b) one-third of the `slot` hash transpired (`SECONDS_PER_SLOT / 3` seconds after the start of `slot`) -- whichever comes _first_. +A validator should create and broadcast the `attestation` to the associated attestation subnet when either (a) the validator has received a valid block from the expected block proposer for the assigned `slot` or (b) one-third of the `slot` has transpired (`SECONDS_PER_SLOT / 3` seconds after the start of `slot`) -- whichever comes _first_. *Note*: Although attestations during `GENESIS_EPOCH` do not count toward FFG finality, these initial attestations do give weight to the fork choice, are rewarded, and should be made. From b398e42439e1d608f4ccb51cecd3f6047c016f8d Mon Sep 17 00:00:00 2001 From: Alex Stokes Date: Thu, 6 Feb 2020 16:41:53 -0800 Subject: [PATCH 135/194] another typo in pubsub channel name --- specs/phase0/validator.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/phase0/validator.md b/specs/phase0/validator.md index c10b2442a..be151447d 100644 --- a/specs/phase0/validator.md +++ b/specs/phase0/validator.md @@ -411,7 +411,7 @@ def get_signed_attestation_data(state: BeaconState, attestation: IndexedAttestat #### Broadcast attestation -Finally, the validator broadcasts `attestation` to the associated attestation subnet -- the `index{attestation.data.index % ATTESTATION_SUBNET_COUNT}_beacon_attestation` pubsub topic. +Finally, the validator broadcasts `attestation` to the associated attestation subnet -- the `committee_index{attestation.data.index % ATTESTATION_SUBNET_COUNT}_beacon_attestation` pubsub topic. ### Attestation aggregation From 97d931b7051d978f92da80c3d4e9b7ccc5226682 Mon Sep 17 00:00:00 2001 From: Jacek Sieka Date: Fri, 7 Feb 2020 19:03:09 +0100 Subject: [PATCH 136/194] rephrase fork choice requirement --- specs/phase0/p2p-interface.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index fa4ecb66b..053009513 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -513,7 +513,7 @@ The response MUST contain no more than `count` blocks. Clients MUST order blocks by increasing slot number. -Clients MUST respond with blocks from what they consider to be the canonical chain as select be fork choice. In particular, blocks from slots before the finalization MUST lead to the finalized block reported in the `Status` handshake. +Clients MUST respond with blocks from their view of the current fork choice. In particular, blocks from slots before the finalization MUST lead to the finalized block reported in the `Status` handshake. #### BeaconBlocksByRoot From 6188f350f6173e9485fc2cdb2454f201f9bf30e1 Mon Sep 17 00:00:00 2001 From: Jacek Sieka Date: Fri, 7 Feb 2020 19:03:33 +0100 Subject: [PATCH 137/194] it's just a number --- specs/phase0/p2p-interface.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index 053009513..852315e94 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -480,7 +480,7 @@ The response MUST consist of a single `response_chunk`. #### BeaconBlocksByRange -**Protocol ID:** `/eth2/beacon_chain/req/beacon_blocks_by_range/2/` +**Protocol ID:** `/eth2/beacon_chain/req/beacon_blocks_by_range/1/` Request Content: ``` From 305a54847c59a570f15e5605c5cd4ef507b22169 Mon Sep 17 00:00:00 2001 From: protolambda Date: Sat, 8 Feb 2020 23:59:08 +0100 Subject: [PATCH 138/194] more explicit about version file location --- tests/core/pyspec/eth2spec/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/core/pyspec/eth2spec/__init__.py b/tests/core/pyspec/eth2spec/__init__.py index 6941b68f0..1ff584383 100644 --- a/tests/core/pyspec/eth2spec/__init__.py +++ b/tests/core/pyspec/eth2spec/__init__.py @@ -1,3 +1,4 @@ # See setup.py about usage of VERSION.txt -with open('VERSION.txt') as f: +import os +with open(os.path.join(os.path.dirname(__file__), 'VERSION.txt')) as f: __version__ = f.read().strip() From 501bac8ece40fe75d31d029b53b04935300ab018 Mon Sep 17 00:00:00 2001 From: protolambda Date: Sun, 9 Feb 2020 00:07:08 +0100 Subject: [PATCH 139/194] update deposit tests cache to read eth2spec version correctly --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 6b243921f..5c4b77e78 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -61,13 +61,13 @@ commands: description: "Restore the venv from cache for the deposit contract tester" steps: - restore_cached_venv: - venv_name: v18-deposit-contract-tester + venv_name: v19-deposit-contract-tester reqs_checksum: cache-{{ checksum "setup.py" }}-{{ checksum "deposit_contract/tester/requirements.txt" }} save_deposit_contract_tester_cached_venv: description: "Save the venv to cache for later use of the deposit contract tester" steps: - save_cached_venv: - venv_name: v18-deposit-contract-tester + venv_name: v19-deposit-contract-tester reqs_checksum: cache-{{ checksum "setup.py" }}-{{ checksum "deposit_contract/tester/requirements.txt" }} venv_path: ./deposit_contract/tester/venv jobs: From 43cacc3fe92720bcd5fe7d04491c2c69bf826c8c Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Sat, 8 Feb 2020 16:58:05 -0700 Subject: [PATCH 140/194] fix validator guide to show that block slashing is per slot rather than per epoch --- specs/phase0/validator.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/specs/phase0/validator.md b/specs/phase0/validator.md index a61524f1b..cf7f85da0 100644 --- a/specs/phase0/validator.md +++ b/specs/phase0/validator.md @@ -495,13 +495,13 @@ Because Phase 0 does not have shards and thus does not have Shard Committees, th ### Proposer slashing -To avoid "proposer slashings", a validator must not sign two conflicting [`BeaconBlock`](./beacon-chain.md#beaconblock) where conflicting is defined as two distinct blocks within the same epoch. +To avoid "proposer slashings", a validator must not sign two conflicting [`BeaconBlock`](./beacon-chain.md#beaconblock) where conflicting is defined as two distinct blocks within the same slot. -*In Phase 0, as long as the validator does not sign two different beacon blocks for the same epoch, the validator is safe against proposer slashings.* +*In Phase 0, as long as the validator does not sign two different beacon blocks for the same slot, the validator is safe against proposer slashings.* Specifically, when signing a `BeaconBlock`, a validator should perform the following steps in the following order: -1. Save a record to hard disk that a beacon block has been signed for the `epoch=compute_epoch_at_slot(block.slot)`. +1. Save a record to hard disk that a beacon block has been signed for the `slot=block.slot`. 2. Generate and broadcast the block. If the software crashes at some point within this routine, then when the validator comes back online, the hard disk has the record of the *potentially* signed/broadcast block and can effectively avoid slashing. From 0a429a479b86ae0ab31253944bc1582d153507db Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Sat, 8 Feb 2020 17:04:19 -0700 Subject: [PATCH 141/194] 'full_name' -> 'indentifier' for bls generators --- tests/generators/bls/main.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/tests/generators/bls/main.py b/tests/generators/bls/main.py index 76135f282..bad4aab06 100644 --- a/tests/generators/bls/main.py +++ b/tests/generators/bls/main.py @@ -53,8 +53,8 @@ def case01_sign(): for privkey in PRIVKEYS: for message in MESSAGES: sig = bls.G2ProofOfPossession.Sign(privkey, message) - full_name = f'{int_to_hex(privkey)}_{encode_hex(message)}' - yield f'sign_case_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', { + identifier = f'{int_to_hex(privkey)}_{encode_hex(message)}' + yield f'sign_case_{(hash(bytes(identifier, "utf-8"))[:8]).hex()}', { 'input': { 'privkey': int_to_hex(privkey), 'message': encode_hex(message), @@ -69,8 +69,8 @@ def case02_verify(): # Valid signature signature = bls.G2ProofOfPossession.Sign(privkey, message) pubkey = bls.G2ProofOfPossession.PrivToPub(privkey) - full_name = f'{encode_hex(pubkey)}_{encode_hex(message)}' - yield f'verify_valid_case_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', { + identifier = f'{encode_hex(pubkey)}_{encode_hex(message)}' + yield f'verify_valid_case_{(hash(bytes(identifier, "utf-8"))[:8]).hex()}', { 'input': { 'pubkey': encode_hex(pubkey), 'message': encode_hex(message), @@ -81,8 +81,8 @@ def case02_verify(): # Invalid signatures -- wrong pubkey wrong_pubkey = bls.G2ProofOfPossession.PrivToPub(PRIVKEYS[(i + 1) % len(PRIVKEYS)]) - full_name = f'{encode_hex(wrong_pubkey)}_{encode_hex(message)}' - yield f'verify_wrong_pubkey_case_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', { + identifier = f'{encode_hex(wrong_pubkey)}_{encode_hex(message)}' + yield f'verify_wrong_pubkey_case_{(hash(bytes(identifier, "utf-8"))[:8]).hex()}', { 'input': { 'pubkey': encode_hex(wrong_pubkey), 'message': encode_hex(message), @@ -93,8 +93,8 @@ def case02_verify(): # Invalid signature -- tampered with signature tampered_signature = signature[:-4] + b'\xFF\xFF\xFF\xFF' - full_name = f'{encode_hex(pubkey)}_{encode_hex(message)}' - yield f'verify_tampered_signature_case_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', { + identifier = f'{encode_hex(pubkey)}_{encode_hex(message)}' + yield f'verify_tampered_signature_case_{(hash(bytes(identifier, "utf-8"))[:8]).hex()}', { 'input': { 'pubkey': encode_hex(pubkey), 'message': encode_hex(message), @@ -122,8 +122,8 @@ def case04_fast_aggregate_verify(): pubkeys_serial = [encode_hex(pubkey) for pubkey in pubkeys] # Valid signature - full_name = f'{pubkeys_serial}_{encode_hex(message)}' - yield f'fast_aggregate_verify_valid_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', { + identifier = f'{pubkeys_serial}_{encode_hex(message)}' + yield f'fast_aggregate_verify_valid_{(hash(bytes(identifier, "utf-8"))[:8]).hex()}', { 'input': { 'pubkeys': pubkeys_serial, 'message': encode_hex(message), @@ -135,8 +135,8 @@ def case04_fast_aggregate_verify(): # Invalid signature -- extra pubkey pubkeys_extra = pubkeys + [bls.G2ProofOfPossession.PrivToPub(PRIVKEYS[-1])] pubkeys_extra_serial = [encode_hex(pubkey) for pubkey in pubkeys_extra] - full_name = f'{pubkeys_extra_serial}_{encode_hex(message)}' - yield f'fast_aggregate_verify_extra_pubkey_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', { + identifier = f'{pubkeys_extra_serial}_{encode_hex(message)}' + yield f'fast_aggregate_verify_extra_pubkey_{(hash(bytes(identifier, "utf-8"))[:8]).hex()}', { 'input': { 'pubkeys': pubkeys_extra_serial, 'message': encode_hex(message), @@ -147,8 +147,8 @@ def case04_fast_aggregate_verify(): # Invalid signature -- tampered with signature tampered_signature = aggregate_signature[:-4] + b'\xff\xff\xff\xff' - full_name = f'{pubkeys_serial}_{encode_hex(message)}' - yield f'fast_aggregate_verify_tampered_signature_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', { + identifier = f'{pubkeys_serial}_{encode_hex(message)}' + yield f'fast_aggregate_verify_tampered_signature_{(hash(bytes(identifier, "utf-8"))[:8]).hex()}', { 'input': { 'pubkeys': pubkeys_serial, 'message': encode_hex(message), From c1a51a9358ebbe841faebf5b8a91bf68b92a0410 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Mon, 10 Feb 2020 18:53:26 -0700 Subject: [PATCH 142/194] Add no repeat attestation condition for committee_index_beacon_attestation gossip channel --- specs/phase0/p2p-interface.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index d47bac734..1fadd153a 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -273,9 +273,11 @@ Attestation subnets are used to propagate unaggregated attestations to subsectio - `committee_index{subnet_id}_beacon_attestation` - These topics are used to propagate unaggregated attestations to the subnet `subnet_id` (typically beacon and persistent committees) to be aggregated before being gossiped to `beacon_aggregate_and_proof`. The following validations MUST pass before forwarding the `attestation` on the subnet. - The attestation's committee index (`attestation.data.index`) is for the correct subnet. - - The attestation is unaggregated -- that is, it has exactly one participating validator (`len([bit for bit in attestation.aggregation_bits if bit == 0b1]) == 1`). - - The block being voted for (`attestation.data.beacon_block_root`) passes validation. + - The attestation is the first attestation received for the participating validator for the slot `attestation.data.slot` - `attestation.data.slot` is within the last `ATTESTATION_PROPAGATION_SLOT_RANGE` slots (within a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. `attestation.data.slot + ATTESTATION_PROPAGATION_SLOT_RANGE >= current_slot >= attestation.data.slot`. + - The attestation is unaggregated -- that is, it has exactly one participating validator (`len([bit for bit in attestation.aggregation_bits if bit == 0b1]) == 1`). + - This is the first attestation for the participating validator at this slot. + - The block being voted for (`attestation.data.beacon_block_root`) passes validation. - The signature of `attestation` is valid. #### Interop From 118a755bbb704668cd44836ea16cbad3d0ec4470 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Mon, 10 Feb 2020 19:13:50 -0700 Subject: [PATCH 143/194] create SignedAggregateAndProof to prevent DoS attacks --- specs/phase0/p2p-interface.md | 38 ++++++++++++++++++----------------- specs/phase0/validator.md | 14 +++++++++++-- 2 files changed, 32 insertions(+), 20 deletions(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index 1fadd153a..38adfb316 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -229,15 +229,15 @@ where `base64` is the [URL-safe base64 alphabet](https://tools.ietf.org/html/rfc The payload is carried in the `data` field of a gossipsub message, and varies depending on the topic: -| Topic | Message Type | -|------------------------------------------------|----------------------| -| beacon_block | SignedBeaconBlock | -| beacon_aggregate_and_proof | AggregateAndProof | -| beacon_attestation\* | Attestation | -| committee_index{subnet_id}\_beacon_attestation | Attestation | -| voluntary_exit | SignedVoluntaryExit | -| proposer_slashing | ProposerSlashing | -| attester_slashing | AttesterSlashing | +| Topic | Message Type | +|------------------------------------------------|-------------------------| +| beacon_block | SignedBeaconBlock | +| beacon_aggregate_and_proof | SignedAggregateAndProof | +| beacon_attestation\* | Attestation | +| committee_index{subnet_id}\_beacon_attestation | Attestation | +| voluntary_exit | SignedVoluntaryExit | +| proposer_slashing | ProposerSlashing | +| attester_slashing | AttesterSlashing | Clients MUST reject (fail validation) messages containing an incorrect type, or invalid payload. @@ -250,16 +250,18 @@ When processing incoming gossip, clients MAY descore or disconnect peers who fai There are two primary global topics used to propagate beacon blocks and aggregate attestations to all nodes on the network. Their `TopicName`s are: - `beacon_block` - This topic is used solely for propagating new signed beacon blocks to all nodes on the networks. Signed blocks are sent in their entirety. The following validations MUST pass before forwarding the `signed_beacon_block` on the network - - The proposer signature, `signed_beacon_block.signature` is valid. + - The proposer signature, `signed_beacon_block.signature`, is valid. - The block is not from a future slot (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. validate that `signed_beacon_block.message.slot <= current_slot` (a client MAY queue future blocks for processing at the appropriate slot). -- `beacon_aggregate_and_proof` - This topic is used to propagate aggregated attestations (as `AggregateAndProof`s) to subscribing nodes (typically validators) to be included in future blocks. The following validations MUST pass before forwarding the `aggregate_and_proof` on the network. - - The aggregate attestation defined by `hash_tree_root(aggregate_and_proof.aggregate)` has _not_ already been seen (via aggregate gossip, within a block, or through the creation of an equivalent aggregate locally). - - The block being voted for (`aggregate_and_proof.aggregate.data.beacon_block_root`) passes validation. - - `aggregate_and_proof.aggregate.data.slot` is within the last `ATTESTATION_PROPAGATION_SLOT_RANGE` slots (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. `aggregate_and_proof.aggregate.data.slot + ATTESTATION_PROPAGATION_SLOT_RANGE >= current_slot >= aggregate_and_proof.aggregate.data.slot`. - - The validator index is within the aggregate's committee -- i.e. `aggregate_and_proof.aggregator_index in get_attesting_indices(state, aggregate_and_proof.aggregate.data, aggregate_and_proof.aggregate.aggregation_bits)`. - - `aggregate_and_proof.selection_proof` selects the validator as an aggregator for the slot -- i.e. `is_aggregator(state, aggregate_and_proof.aggregate.data.slot, aggregate_and_proof.aggregate.data.index, aggregate_and_proof.selection_proof)` returns `True`. - - The `aggregate_and_proof.selection_proof` is a valid signature of the `aggregate_and_proof.aggregate.data.slot` by the validator with index `aggregate_and_proof.aggregator_index`. - - The signature of `aggregate_and_proof.aggregate` is valid. +- `beacon_aggregate_and_proof` - This topic is used to propagate aggregated attestations (as `SignedAggregateAndProof`s) to subscribing nodes (typically validators) to be included in future blocks. The following validations MUST pass before forwarding the `signed_aggregate_and_proof` on the network. (We define the following for convenience -- `aggregate_and_proof = signed_aggregate_and_proof.message` and `aggregate = aggregate_and_proof.aggregate`) + - The aggregate attestation defined by `hash_tree_root(aggregate)` has _not_ already been seen (via aggregate gossip, within a block, or through the creation of an equivalent aggregate locally). + - The block being voted for (`aggregate.data.beacon_block_root`) passes validation. + - `aggregate.data.slot` is within the last `ATTESTATION_PROPAGATION_SLOT_RANGE` slots (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. `aggregate.data.slot + ATTESTATION_PROPAGATION_SLOT_RANGE >= current_slot >= aggregate.data.slot`. + - The validator index is within the aggregate's committee -- i.e. `aggregate_and_proof.aggregator_index in get_attesting_indices(state, aggregate.data, aggregate.aggregation_bits)`. + - `aggregate_and_proof.selection_proof` selects the validator as an aggregator for the slot -- i.e. `is_aggregator(state, aggregate.data.slot, aggregate.data.index, aggregate_and_proof.selection_proof)` returns `True`. + - The `signed_aggregate_and_proof` is the first aggregate received for the aggregator for the slot `aggregate.data.slot` + - The `aggregate_and_proof.selection_proof` is a valid signature of the `aggregate.data.slot` by the validator with index `aggregate_and_proof.aggregator_index`. + - The aggregator signature, `signed_aggregate_and_proof.signature`, is valid. + - The signature of `aggregate` is valid. Additional global topics are used to propagate lower frequency validator messages. Their `TopicName`s are: diff --git a/specs/phase0/validator.md b/specs/phase0/validator.md index be151447d..3cf3fb2bd 100644 --- a/specs/phase0/validator.md +++ b/specs/phase0/validator.md @@ -461,9 +461,11 @@ def get_aggregate_signature(attestations: Sequence[Attestation]) -> BLSSignature #### Broadcast aggregate -If the validator is selected to aggregate (`is_aggregator`), then they broadcast their best aggregate to the global aggregate channel (`beacon_aggregate_and_proof`) two-thirds of the way through the `slot`-that is, `SECONDS_PER_SLOT * 2 / 3` seconds after the start of `slot`. +If the validator is selected to aggregate (`is_aggregator`), then they broadcast their best aggregate as a `SignedAggregateAndProof` to the global aggregate channel (`beacon_aggregate_and_proof`) two-thirds of the way through the `slot`-that is, `SECONDS_PER_SLOT * 2 / 3` seconds after the start of `slot`. -Aggregate attestations are broadcast as `AggregateAndProof` objects to prove to the gossip channel that the validator has been selected as an aggregator. +Selection proofs are provided in `AggregateAndProof` to prove to the gossip channel that the validator has been selected as an aggregator. + +`AggregateAndProof` messages are signed and broadcast inside of `SignedAggregateAndProof` objects to prevent a class of DoS attacks and message forgeries. ##### `AggregateAndProof` @@ -479,6 +481,14 @@ Where * `aggregate` is the `aggregate_attestation` constructed in the previous section. * `selection_proof` is the signature of the slot (`get_slot_signature()`). +##### `SignedAggregateAndProof` + +```python +class SignedAggregateAndProof(Container): + message: AggregateAndProof + signature: BLSSignature +``` + ## Phase 0 attestation subnet stability Because Phase 0 does not have shards and thus does not have Shard Committees, there is no stable backbone to the attestation subnets (`committee_index{subnet_id}_beacon_attestation`). To provide this stability, each validator must: From 1ca4e3538cc97f0f5894e64e9dcbacb57b3173d4 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Mon, 10 Feb 2020 19:28:37 -0700 Subject: [PATCH 144/194] prevent multiple beacon blocks from same proposer in a given slot --- specs/phase0/p2p-interface.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index 38adfb316..9979cbb14 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -250,6 +250,7 @@ When processing incoming gossip, clients MAY descore or disconnect peers who fai There are two primary global topics used to propagate beacon blocks and aggregate attestations to all nodes on the network. Their `TopicName`s are: - `beacon_block` - This topic is used solely for propagating new signed beacon blocks to all nodes on the networks. Signed blocks are sent in their entirety. The following validations MUST pass before forwarding the `signed_beacon_block` on the network + - The block is the first block received for the proposer for the slot, `signed_beacon_block.message.slot`. - The proposer signature, `signed_beacon_block.signature`, is valid. - The block is not from a future slot (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. validate that `signed_beacon_block.message.slot <= current_slot` (a client MAY queue future blocks for processing at the appropriate slot). - `beacon_aggregate_and_proof` - This topic is used to propagate aggregated attestations (as `SignedAggregateAndProof`s) to subscribing nodes (typically validators) to be included in future blocks. The following validations MUST pass before forwarding the `signed_aggregate_and_proof` on the network. (We define the following for convenience -- `aggregate_and_proof = signed_aggregate_and_proof.message` and `aggregate = aggregate_and_proof.aggregate`) @@ -258,7 +259,7 @@ There are two primary global topics used to propagate beacon blocks and aggregat - `aggregate.data.slot` is within the last `ATTESTATION_PROPAGATION_SLOT_RANGE` slots (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. `aggregate.data.slot + ATTESTATION_PROPAGATION_SLOT_RANGE >= current_slot >= aggregate.data.slot`. - The validator index is within the aggregate's committee -- i.e. `aggregate_and_proof.aggregator_index in get_attesting_indices(state, aggregate.data, aggregate.aggregation_bits)`. - `aggregate_and_proof.selection_proof` selects the validator as an aggregator for the slot -- i.e. `is_aggregator(state, aggregate.data.slot, aggregate.data.index, aggregate_and_proof.selection_proof)` returns `True`. - - The `signed_aggregate_and_proof` is the first aggregate received for the aggregator for the slot `aggregate.data.slot` + - The `signed_aggregate_and_proof` is the first aggregate received for the aggregator for the slot, `aggregate.data.slot` - The `aggregate_and_proof.selection_proof` is a valid signature of the `aggregate.data.slot` by the validator with index `aggregate_and_proof.aggregator_index`. - The aggregator signature, `signed_aggregate_and_proof.signature`, is valid. - The signature of `aggregate` is valid. @@ -275,7 +276,7 @@ Attestation subnets are used to propagate unaggregated attestations to subsectio - `committee_index{subnet_id}_beacon_attestation` - These topics are used to propagate unaggregated attestations to the subnet `subnet_id` (typically beacon and persistent committees) to be aggregated before being gossiped to `beacon_aggregate_and_proof`. The following validations MUST pass before forwarding the `attestation` on the subnet. - The attestation's committee index (`attestation.data.index`) is for the correct subnet. - - The attestation is the first attestation received for the participating validator for the slot `attestation.data.slot` + - The attestation is the first attestation received for the participating validator for the slot, `attestation.data.slot` - `attestation.data.slot` is within the last `ATTESTATION_PROPAGATION_SLOT_RANGE` slots (within a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. `attestation.data.slot + ATTESTATION_PROPAGATION_SLOT_RANGE >= current_slot >= attestation.data.slot`. - The attestation is unaggregated -- that is, it has exactly one participating validator (`len([bit for bit in attestation.aggregation_bits if bit == 0b1]) == 1`). - This is the first attestation for the participating validator at this slot. From 8a2a689c8070876ec4a983ce4d4d015a93b0bbb4 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Mon, 10 Feb 2020 19:35:36 -0700 Subject: [PATCH 145/194] add clarifying note to signedaggregateandproof in vaidator guide and fix tocs --- specs/phase0/validator.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/specs/phase0/validator.md b/specs/phase0/validator.md index 3cf3fb2bd..3b9e85d16 100644 --- a/specs/phase0/validator.md +++ b/specs/phase0/validator.md @@ -59,6 +59,7 @@ - [Aggregate signature](#aggregate-signature-1) - [Broadcast aggregate](#broadcast-aggregate) - [`AggregateAndProof`](#aggregateandproof) + - [`SignedAggregateAndProof`](#signedaggregateandproof) - [Phase 0 attestation subnet stability](#phase-0-attestation-subnet-stability) - [How to avoid slashing](#how-to-avoid-slashing) - [Proposer slashing](#proposer-slashing) @@ -465,7 +466,7 @@ If the validator is selected to aggregate (`is_aggregator`), then they broadcast Selection proofs are provided in `AggregateAndProof` to prove to the gossip channel that the validator has been selected as an aggregator. -`AggregateAndProof` messages are signed and broadcast inside of `SignedAggregateAndProof` objects to prevent a class of DoS attacks and message forgeries. +`AggregateAndProof` messages are signed by the aggregator and broadcast inside of `SignedAggregateAndProof` objects to prevent a class of DoS attacks and message forgeries. ##### `AggregateAndProof` From 8d16d428e0142389dfb3e7cac7f97a375865a15f Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Mon, 10 Feb 2020 21:35:39 -0700 Subject: [PATCH 146/194] fix up some p2p validation conditions based on PR feedback --- specs/phase0/p2p-interface.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index 9979cbb14..c60ba560c 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -276,10 +276,9 @@ Attestation subnets are used to propagate unaggregated attestations to subsectio - `committee_index{subnet_id}_beacon_attestation` - These topics are used to propagate unaggregated attestations to the subnet `subnet_id` (typically beacon and persistent committees) to be aggregated before being gossiped to `beacon_aggregate_and_proof`. The following validations MUST pass before forwarding the `attestation` on the subnet. - The attestation's committee index (`attestation.data.index`) is for the correct subnet. - - The attestation is the first attestation received for the participating validator for the slot, `attestation.data.slot` - `attestation.data.slot` is within the last `ATTESTATION_PROPAGATION_SLOT_RANGE` slots (within a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. `attestation.data.slot + ATTESTATION_PROPAGATION_SLOT_RANGE >= current_slot >= attestation.data.slot`. - The attestation is unaggregated -- that is, it has exactly one participating validator (`len([bit for bit in attestation.aggregation_bits if bit == 0b1]) == 1`). - - This is the first attestation for the participating validator at this slot. + - The attestation is the first attestation received for the participating validator for the slot, `attestation.data.slot`. - The block being voted for (`attestation.data.beacon_block_root`) passes validation. - The signature of `attestation` is valid. From eec57f29b77e6737d0e2b2a216643229fd040d39 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Tue, 11 Feb 2020 15:45:51 -0700 Subject: [PATCH 147/194] add explicit instrucutions for construction of signed_aggregate_and_proof. add DOMAIN_SELECTION_PROOF and DOMAIN_AGGREGATE_AND_PROOF --- configs/mainnet.yaml | 2 ++ configs/minimal.yaml | 2 ++ specs/phase0/beacon-chain.md | 13 ++++++++----- specs/phase0/validator.md | 28 +++++++++++++++++++++++++++- 4 files changed, 39 insertions(+), 6 deletions(-) diff --git a/configs/mainnet.yaml b/configs/mainnet.yaml index 6eb5641d0..74f062d9b 100644 --- a/configs/mainnet.yaml +++ b/configs/mainnet.yaml @@ -143,6 +143,8 @@ DOMAIN_BEACON_ATTESTER: 0x01000000 DOMAIN_RANDAO: 0x02000000 DOMAIN_DEPOSIT: 0x03000000 DOMAIN_VOLUNTARY_EXIT: 0x04000000 +DOMAIN_SELECTION_PROOF: 0x05000000 +DOMAIN_AGGREGATE_AND_PROOF: 0x06000000 # Phase 1 DOMAIN_SHARD_PROPOSAL: 0x80000000 DOMAIN_SHARD_COMMITTEE: 0x81000000 diff --git a/configs/minimal.yaml b/configs/minimal.yaml index 03ffa90e3..42c63e301 100644 --- a/configs/minimal.yaml +++ b/configs/minimal.yaml @@ -142,6 +142,8 @@ DOMAIN_BEACON_ATTESTER: 0x01000000 DOMAIN_RANDAO: 0x02000000 DOMAIN_DEPOSIT: 0x03000000 DOMAIN_VOLUNTARY_EXIT: 0x04000000 +DOMAIN_SELECTION_PROOF: 0x05000000 +DOMAIN_AGGREGATE_AND_PROOF: 0x06000000 # Phase 1 DOMAIN_SHARD_PROPOSAL: 0x80000000 DOMAIN_SHARD_COMMITTEE: 0x81000000 diff --git a/specs/phase0/beacon-chain.md b/specs/phase0/beacon-chain.md index 96ea351c8..2ab51079d 100644 --- a/specs/phase0/beacon-chain.md +++ b/specs/phase0/beacon-chain.md @@ -253,11 +253,14 @@ The following values are (non-configurable) constants used throughout the specif | Name | Value | | - | - | -| `DOMAIN_BEACON_PROPOSER` | `DomainType('0x00000000')` | -| `DOMAIN_BEACON_ATTESTER` | `DomainType('0x01000000')` | -| `DOMAIN_RANDAO` | `DomainType('0x02000000')` | -| `DOMAIN_DEPOSIT` | `DomainType('0x03000000')` | -| `DOMAIN_VOLUNTARY_EXIT` | `DomainType('0x04000000')` | +| `DOMAIN_BEACON_PROPOSER` | `DomainType('0x00000000')` | +| `DOMAIN_BEACON_ATTESTER` | `DomainType('0x01000000')` | +| `DOMAIN_RANDAO` | `DomainType('0x02000000')` | +| `DOMAIN_DEPOSIT` | `DomainType('0x03000000')` | +| `DOMAIN_VOLUNTARY_EXIT` | `DomainType('0x04000000')` | +| `DOMAIN_SELECTION_PROOF` | `DomainType('0x05000000')` | +| `DOMAIN_AGGREGATE_AND_PROOF` | `DomainType('0x06000000')` | + ## Containers diff --git a/specs/phase0/validator.md b/specs/phase0/validator.md index 3b9e85d16..9229a34f4 100644 --- a/specs/phase0/validator.md +++ b/specs/phase0/validator.md @@ -424,7 +424,7 @@ A validator is selected to aggregate based upon the return value of `is_aggregat ```python def get_slot_signature(state: BeaconState, slot: Slot, privkey: int) -> BLSSignature: - domain = get_domain(state, DOMAIN_BEACON_ATTESTER, compute_epoch_at_slot(slot)) + domain = get_domain(state, DOMAIN_SELECTION_PROOF, compute_epoch_at_slot(slot)) signing_root = compute_signing_root(slot, domain) return bls.Sign(privkey, signing_root) ``` @@ -468,6 +468,32 @@ Selection proofs are provided in `AggregateAndProof` to prove to the gossip chan `AggregateAndProof` messages are signed by the aggregator and broadcast inside of `SignedAggregateAndProof` objects to prevent a class of DoS attacks and message forgeries. +First, `aggregate_and_proof = get_aggregate_and_proof(state, aggregate_attestation, validator_index, privkey)` is contructed. + +```python +def get_aggregate_and_proof(state: BeaconState, + aggregate: Attestation, + aggregator_index: ValidatorIndex, + privkey: int) -> AggregateAndProof: + return AggregateAndProof( + aggregator_index=aggregator_index, + aggregate=aggregate, + selection_proof=get_slot_signature(state, aggregate.data.slot, privkey), + ) +``` + +Then `signed_aggregate_and_proof = SignedAggregateAndProof(message=aggregate_and_proof, signature=signature)` is constructed and broadast. Where `signature` is obtained from: + +```python +def get_aggregate_and_proof_signature(state: BeaconState, + aggregate_and_proof: AggregateAndProof, + privkey: int) -> BLSSignature: + aggregate = aggregate_and_proof.aggregate + domain = get_domain(state, DOMAIN_AGGREGATE_AND_PROOF, compute_epoch_at_slot(aggregate.data.slot)) + signing_root = compute_signing_root(aggregate_and_proof, domain) + return bls.Sign(privkey, signing_root) +``` + ##### `AggregateAndProof` ```python From bf8252aeb66d02443d5bb2fabccbe5f1a916dfab Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Tue, 11 Feb 2020 15:46:30 -0700 Subject: [PATCH 148/194] subnet validation PR feedback --- specs/phase0/p2p-interface.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index c60ba560c..c0dd88643 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -257,9 +257,9 @@ There are two primary global topics used to propagate beacon blocks and aggregat - The aggregate attestation defined by `hash_tree_root(aggregate)` has _not_ already been seen (via aggregate gossip, within a block, or through the creation of an equivalent aggregate locally). - The block being voted for (`aggregate.data.beacon_block_root`) passes validation. - `aggregate.data.slot` is within the last `ATTESTATION_PROPAGATION_SLOT_RANGE` slots (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. `aggregate.data.slot + ATTESTATION_PROPAGATION_SLOT_RANGE >= current_slot >= aggregate.data.slot`. - - The validator index is within the aggregate's committee -- i.e. `aggregate_and_proof.aggregator_index in get_attesting_indices(state, aggregate.data, aggregate.aggregation_bits)`. + - The aggregator's validator index is within the aggregate's committee -- i.e. `aggregate_and_proof.aggregator_index in get_attesting_indices(state, aggregate.data, aggregate.aggregation_bits)`. - `aggregate_and_proof.selection_proof` selects the validator as an aggregator for the slot -- i.e. `is_aggregator(state, aggregate.data.slot, aggregate.data.index, aggregate_and_proof.selection_proof)` returns `True`. - - The `signed_aggregate_and_proof` is the first aggregate received for the aggregator for the slot, `aggregate.data.slot` + - The `aggregate` is the first aggregate received for the aggregator with index `aggregate_and_proof.aggregator_index` for the slot `aggregate.data.slot`. - The `aggregate_and_proof.selection_proof` is a valid signature of the `aggregate.data.slot` by the validator with index `aggregate_and_proof.aggregator_index`. - The aggregator signature, `signed_aggregate_and_proof.signature`, is valid. - The signature of `aggregate` is valid. From 90476388feed9581ff517a59b77298bc2f515eca Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Tue, 11 Feb 2020 15:49:45 -0700 Subject: [PATCH 149/194] reverse params in get_aggregate_and_proof to match ssz type --- specs/phase0/validator.md | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/specs/phase0/validator.md b/specs/phase0/validator.md index 9229a34f4..3368201b6 100644 --- a/specs/phase0/validator.md +++ b/specs/phase0/validator.md @@ -468,12 +468,12 @@ Selection proofs are provided in `AggregateAndProof` to prove to the gossip chan `AggregateAndProof` messages are signed by the aggregator and broadcast inside of `SignedAggregateAndProof` objects to prevent a class of DoS attacks and message forgeries. -First, `aggregate_and_proof = get_aggregate_and_proof(state, aggregate_attestation, validator_index, privkey)` is contructed. +First, `aggregate_and_proof = get_aggregate_and_proof(state, validator_index, aggregate_attestation, privkey)` is constructed. ```python def get_aggregate_and_proof(state: BeaconState, - aggregate: Attestation, aggregator_index: ValidatorIndex, + aggregate: Attestation, privkey: int) -> AggregateAndProof: return AggregateAndProof( aggregator_index=aggregator_index, @@ -503,11 +503,6 @@ class AggregateAndProof(Container): selection_proof: BLSSignature ``` -Where -* `aggregator_index` is the validator's `ValidatorIndex`. -* `aggregate` is the `aggregate_attestation` constructed in the previous section. -* `selection_proof` is the signature of the slot (`get_slot_signature()`). - ##### `SignedAggregateAndProof` ```python From 714a7de8a2a3d8706c0f7035e793f3ac3045aec4 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Wed, 12 Feb 2020 11:59:00 -0700 Subject: [PATCH 150/194] reorder gossip conditions to put cheap checks before signature verifications --- specs/phase0/p2p-interface.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index c0dd88643..f5a82bad4 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -250,16 +250,16 @@ When processing incoming gossip, clients MAY descore or disconnect peers who fai There are two primary global topics used to propagate beacon blocks and aggregate attestations to all nodes on the network. Their `TopicName`s are: - `beacon_block` - This topic is used solely for propagating new signed beacon blocks to all nodes on the networks. Signed blocks are sent in their entirety. The following validations MUST pass before forwarding the `signed_beacon_block` on the network + - The block is not from a future slot (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. validate that `signed_beacon_block.message.slot <= current_slot` (a client MAY queue future blocks for processing at the appropriate slot). - The block is the first block received for the proposer for the slot, `signed_beacon_block.message.slot`. - The proposer signature, `signed_beacon_block.signature`, is valid. - - The block is not from a future slot (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. validate that `signed_beacon_block.message.slot <= current_slot` (a client MAY queue future blocks for processing at the appropriate slot). - `beacon_aggregate_and_proof` - This topic is used to propagate aggregated attestations (as `SignedAggregateAndProof`s) to subscribing nodes (typically validators) to be included in future blocks. The following validations MUST pass before forwarding the `signed_aggregate_and_proof` on the network. (We define the following for convenience -- `aggregate_and_proof = signed_aggregate_and_proof.message` and `aggregate = aggregate_and_proof.aggregate`) + - `aggregate.data.slot` is within the last `ATTESTATION_PROPAGATION_SLOT_RANGE` slots (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. `aggregate.data.slot + ATTESTATION_PROPAGATION_SLOT_RANGE >= current_slot >= aggregate.data.slot` (a client MAY queue future aggregates for processing at the appropriate slot). - The aggregate attestation defined by `hash_tree_root(aggregate)` has _not_ already been seen (via aggregate gossip, within a block, or through the creation of an equivalent aggregate locally). - - The block being voted for (`aggregate.data.beacon_block_root`) passes validation. - - `aggregate.data.slot` is within the last `ATTESTATION_PROPAGATION_SLOT_RANGE` slots (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. `aggregate.data.slot + ATTESTATION_PROPAGATION_SLOT_RANGE >= current_slot >= aggregate.data.slot`. - - The aggregator's validator index is within the aggregate's committee -- i.e. `aggregate_and_proof.aggregator_index in get_attesting_indices(state, aggregate.data, aggregate.aggregation_bits)`. - - `aggregate_and_proof.selection_proof` selects the validator as an aggregator for the slot -- i.e. `is_aggregator(state, aggregate.data.slot, aggregate.data.index, aggregate_and_proof.selection_proof)` returns `True`. - The `aggregate` is the first aggregate received for the aggregator with index `aggregate_and_proof.aggregator_index` for the slot `aggregate.data.slot`. + - The block being voted for (`aggregate.data.beacon_block_root`) passes validation. + - `aggregate_and_proof.selection_proof` selects the validator as an aggregator for the slot -- i.e. `is_aggregator(state, aggregate.data.slot, aggregate.data.index, aggregate_and_proof.selection_proof)` returns `True`. + - The aggregator's validator index is within the aggregate's committee -- i.e. `aggregate_and_proof.aggregator_index in get_attesting_indices(state, aggregate.data, aggregate.aggregation_bits)`. - The `aggregate_and_proof.selection_proof` is a valid signature of the `aggregate.data.slot` by the validator with index `aggregate_and_proof.aggregator_index`. - The aggregator signature, `signed_aggregate_and_proof.signature`, is valid. - The signature of `aggregate` is valid. @@ -276,7 +276,7 @@ Attestation subnets are used to propagate unaggregated attestations to subsectio - `committee_index{subnet_id}_beacon_attestation` - These topics are used to propagate unaggregated attestations to the subnet `subnet_id` (typically beacon and persistent committees) to be aggregated before being gossiped to `beacon_aggregate_and_proof`. The following validations MUST pass before forwarding the `attestation` on the subnet. - The attestation's committee index (`attestation.data.index`) is for the correct subnet. - - `attestation.data.slot` is within the last `ATTESTATION_PROPAGATION_SLOT_RANGE` slots (within a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. `attestation.data.slot + ATTESTATION_PROPAGATION_SLOT_RANGE >= current_slot >= attestation.data.slot`. + - `attestation.data.slot` is within the last `ATTESTATION_PROPAGATION_SLOT_RANGE` slots (within a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. `attestation.data.slot + ATTESTATION_PROPAGATION_SLOT_RANGE >= current_slot >= attestation.data.slot` (a client MAY queue future attestations for processing at the appropriate slot). - The attestation is unaggregated -- that is, it has exactly one participating validator (`len([bit for bit in attestation.aggregation_bits if bit == 0b1]) == 1`). - The attestation is the first attestation received for the participating validator for the slot, `attestation.data.slot`. - The block being voted for (`attestation.data.beacon_block_root`) passes validation. From f8b72f5057e1dd526a7ab665bc1de26bd92263c2 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Wed, 12 Feb 2020 12:07:57 -0700 Subject: [PATCH 151/194] add lower bound condition on block gossip --- specs/phase0/p2p-interface.md | 1 + 1 file changed, 1 insertion(+) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index f5a82bad4..81536a314 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -251,6 +251,7 @@ There are two primary global topics used to propagate beacon blocks and aggregat - `beacon_block` - This topic is used solely for propagating new signed beacon blocks to all nodes on the networks. Signed blocks are sent in their entirety. The following validations MUST pass before forwarding the `signed_beacon_block` on the network - The block is not from a future slot (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. validate that `signed_beacon_block.message.slot <= current_slot` (a client MAY queue future blocks for processing at the appropriate slot). + - The block is not from later than the latest finalized slot (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. validate that `signed_beacon_block.message.slot > compute_start_slot_at_epoch(state.finalized_checkpoint.epoch)` (a client MAY choose to validate and store such blocks for additional purposes -- e.g. slashing detection, archive nodes, etc). - The block is the first block received for the proposer for the slot, `signed_beacon_block.message.slot`. - The proposer signature, `signed_beacon_block.signature`, is valid. - `beacon_aggregate_and_proof` - This topic is used to propagate aggregated attestations (as `SignedAggregateAndProof`s) to subscribing nodes (typically validators) to be included in future blocks. The following validations MUST pass before forwarding the `signed_aggregate_and_proof` on the network. (We define the following for convenience -- `aggregate_and_proof = signed_aggregate_and_proof.message` and `aggregate = aggregate_and_proof.aggregate`) From f441fadfff2f0050bf78e5a2503ceac552bc3892 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Wed, 12 Feb 2020 15:46:48 -0700 Subject: [PATCH 152/194] pr feedback Co-Authored-By: Diederik Loerakker --- specs/phase0/p2p-interface.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index 81536a314..d7fbbf8a7 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -251,7 +251,7 @@ There are two primary global topics used to propagate beacon blocks and aggregat - `beacon_block` - This topic is used solely for propagating new signed beacon blocks to all nodes on the networks. Signed blocks are sent in their entirety. The following validations MUST pass before forwarding the `signed_beacon_block` on the network - The block is not from a future slot (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. validate that `signed_beacon_block.message.slot <= current_slot` (a client MAY queue future blocks for processing at the appropriate slot). - - The block is not from later than the latest finalized slot (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. validate that `signed_beacon_block.message.slot > compute_start_slot_at_epoch(state.finalized_checkpoint.epoch)` (a client MAY choose to validate and store such blocks for additional purposes -- e.g. slashing detection, archive nodes, etc). + - The block is from a slot greater than the latest finalized slot (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. validate that `signed_beacon_block.message.slot > compute_start_slot_at_epoch(state.finalized_checkpoint.epoch)` (a client MAY choose to validate and store such blocks for additional purposes -- e.g. slashing detection, archive nodes, etc). - The block is the first block received for the proposer for the slot, `signed_beacon_block.message.slot`. - The proposer signature, `signed_beacon_block.signature`, is valid. - `beacon_aggregate_and_proof` - This topic is used to propagate aggregated attestations (as `SignedAggregateAndProof`s) to subscribing nodes (typically validators) to be included in future blocks. The following validations MUST pass before forwarding the `signed_aggregate_and_proof` on the network. (We define the following for convenience -- `aggregate_and_proof = signed_aggregate_and_proof.message` and `aggregate = aggregate_and_proof.aggregate`) From a7ef70eb9b48b0d15a0511566e1f8c3cb9514efe Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Wed, 12 Feb 2020 15:48:49 -0700 Subject: [PATCH 153/194] add DoS prevention validation conditions to voluntary_exit, proposer_slashing, and attester_slashing gossipsub channels --- specs/phase0/p2p-interface.md | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index d7fbbf8a7..54db92539 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -252,12 +252,12 @@ There are two primary global topics used to propagate beacon blocks and aggregat - `beacon_block` - This topic is used solely for propagating new signed beacon blocks to all nodes on the networks. Signed blocks are sent in their entirety. The following validations MUST pass before forwarding the `signed_beacon_block` on the network - The block is not from a future slot (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. validate that `signed_beacon_block.message.slot <= current_slot` (a client MAY queue future blocks for processing at the appropriate slot). - The block is from a slot greater than the latest finalized slot (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. validate that `signed_beacon_block.message.slot > compute_start_slot_at_epoch(state.finalized_checkpoint.epoch)` (a client MAY choose to validate and store such blocks for additional purposes -- e.g. slashing detection, archive nodes, etc). - - The block is the first block received for the proposer for the slot, `signed_beacon_block.message.slot`. + - The block is the first block with valid signature received for the proposer for the slot, `signed_beacon_block.message.slot`. - The proposer signature, `signed_beacon_block.signature`, is valid. - `beacon_aggregate_and_proof` - This topic is used to propagate aggregated attestations (as `SignedAggregateAndProof`s) to subscribing nodes (typically validators) to be included in future blocks. The following validations MUST pass before forwarding the `signed_aggregate_and_proof` on the network. (We define the following for convenience -- `aggregate_and_proof = signed_aggregate_and_proof.message` and `aggregate = aggregate_and_proof.aggregate`) - `aggregate.data.slot` is within the last `ATTESTATION_PROPAGATION_SLOT_RANGE` slots (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. `aggregate.data.slot + ATTESTATION_PROPAGATION_SLOT_RANGE >= current_slot >= aggregate.data.slot` (a client MAY queue future aggregates for processing at the appropriate slot). - The aggregate attestation defined by `hash_tree_root(aggregate)` has _not_ already been seen (via aggregate gossip, within a block, or through the creation of an equivalent aggregate locally). - - The `aggregate` is the first aggregate received for the aggregator with index `aggregate_and_proof.aggregator_index` for the slot `aggregate.data.slot`. + - The `aggregate` is the first valid aggregate received for the aggregator with index `aggregate_and_proof.aggregator_index` for the slot `aggregate.data.slot`. - The block being voted for (`aggregate.data.beacon_block_root`) passes validation. - `aggregate_and_proof.selection_proof` selects the validator as an aggregator for the slot -- i.e. `is_aggregator(state, aggregate.data.slot, aggregate.data.index, aggregate_and_proof.selection_proof)` returns `True`. - The aggregator's validator index is within the aggregate's committee -- i.e. `aggregate_and_proof.aggregator_index in get_attesting_indices(state, aggregate.data, aggregate.aggregation_bits)`. @@ -267,9 +267,16 @@ There are two primary global topics used to propagate beacon blocks and aggregat Additional global topics are used to propagate lower frequency validator messages. Their `TopicName`s are: -- `voluntary_exit` - This topic is used solely for propagating signed voluntary validator exits to proposers on the network. Signed voluntary exits are sent in their entirety. Clients who receive a signed voluntary exit on this topic MUST validate the conditions within `process_voluntary_exit` before forwarding it across the network. -- `proposer_slashing` - This topic is used solely for propagating proposer slashings to proposers on the network. Proposer slashings are sent in their entirety. Clients who receive a proposer slashing on this topic MUST validate the conditions within `process_proposer_slashing` before forwarding it across the network. +- `voluntary_exit` - This topic is used solely for propagating signed voluntary validator exits to proposers on the network. Signed voluntary exits are sent in their entirety. The following validations MUST pass before forwarding the `signed_voluntary_exit` on to the network + - The voluntary exit is the first valid voluntary exit received for the validator with index `signed_voluntary_exit.message.validator_index`. + - All of the conditions within `process_voluntary_exit` pass validation. +- `proposer_slashing` - This topic is used solely for propagating proposer slashings to proposers on the network. Proposer slashings are sent in their entirety. The following validations MUST pass before forwarding the `proposer_slashing` on to the network + - The proposer slashing is the first valid proposer slashing received for the proposer with index `proposer_slashing.index`. + - All of the conditions within `process_proposer_slashing` pass validation. - `attester_slashing` - This topic is used solely for propagating attester slashings to proposers on the network. Attester slashings are sent in their entirety. Clients who receive an attester slashing on this topic MUST validate the conditions within `process_attester_slashing` before forwarding it across the network. + - At least one index in the intersection of the attesting indices of each attestation has not yet been seen in any prior `attester_slashing` (i.e. `any((set(attestation_1.attesting_indices).intersection(attestation_2.attesting_indices)).difference(prior_seen_attester_slashed_indices))`). + - All of the conditions within `process_attester_slashing` pass validation. + #### Attestation subnets @@ -279,7 +286,7 @@ Attestation subnets are used to propagate unaggregated attestations to subsectio - The attestation's committee index (`attestation.data.index`) is for the correct subnet. - `attestation.data.slot` is within the last `ATTESTATION_PROPAGATION_SLOT_RANGE` slots (within a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. `attestation.data.slot + ATTESTATION_PROPAGATION_SLOT_RANGE >= current_slot >= attestation.data.slot` (a client MAY queue future attestations for processing at the appropriate slot). - The attestation is unaggregated -- that is, it has exactly one participating validator (`len([bit for bit in attestation.aggregation_bits if bit == 0b1]) == 1`). - - The attestation is the first attestation received for the participating validator for the slot, `attestation.data.slot`. + - The attestation is the first valid attestation received for the participating validator for the slot, `attestation.data.slot`. - The block being voted for (`attestation.data.beacon_block_root`) passes validation. - The signature of `attestation` is valid. From 40b4931f0176f4b7f6527e793becd833278cface Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Thu, 13 Feb 2020 11:17:19 -0700 Subject: [PATCH 154/194] cleanup attester slashing conditon code snippet Co-Authored-By: Diederik Loerakker --- specs/phase0/p2p-interface.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index 54db92539..7ad8759d3 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -274,7 +274,7 @@ Additional global topics are used to propagate lower frequency validator message - The proposer slashing is the first valid proposer slashing received for the proposer with index `proposer_slashing.index`. - All of the conditions within `process_proposer_slashing` pass validation. - `attester_slashing` - This topic is used solely for propagating attester slashings to proposers on the network. Attester slashings are sent in their entirety. Clients who receive an attester slashing on this topic MUST validate the conditions within `process_attester_slashing` before forwarding it across the network. - - At least one index in the intersection of the attesting indices of each attestation has not yet been seen in any prior `attester_slashing` (i.e. `any((set(attestation_1.attesting_indices).intersection(attestation_2.attesting_indices)).difference(prior_seen_attester_slashed_indices))`). + - At least one index in the intersection of the attesting indices of each attestation has not yet been seen in any prior `attester_slashing` (i.e. `attester_slashed_indices = set(attestation_1.attesting_indices).intersection(attestation_2.attesting_indices)`, verify if `any(attester_slashed_indices.difference(prior_seen_attester_slashed_indices))`). - All of the conditions within `process_attester_slashing` pass validation. From 8620adcdf1ad3dea4751aa883627eeddd360a254 Mon Sep 17 00:00:00 2001 From: Ben Edgington Date: Fri, 14 Feb 2020 14:29:45 +0000 Subject: [PATCH 155/194] Correct the duration of HISTORICAL_ROOTS_LIMIT The duration of HISTORICAL_ROOTS_LIMIT is: SECONDS_PER_SLOT * SLOTS_PER_HISTORICAL_ROOT * HISTORICAL_ROOTS_LIMIT which is 12s * 2^13 * 2^24 = 1.65e12s = 52,262 years --- specs/phase0/beacon-chain.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/phase0/beacon-chain.md b/specs/phase0/beacon-chain.md index 2ab51079d..acf098663 100644 --- a/specs/phase0/beacon-chain.md +++ b/specs/phase0/beacon-chain.md @@ -224,7 +224,7 @@ The following values are (non-configurable) constants used throughout the specif | - | - | :-: | :-: | | `EPOCHS_PER_HISTORICAL_VECTOR` | `2**16` (= 65,536) | epochs | ~0.8 years | | `EPOCHS_PER_SLASHINGS_VECTOR` | `2**13` (= 8,192) | epochs | ~36 days | -| `HISTORICAL_ROOTS_LIMIT` | `2**24` (= 16,777,216) | historical roots | ~26,131 years | +| `HISTORICAL_ROOTS_LIMIT` | `2**24` (= 16,777,216) | historical roots | ~52,262 years | | `VALIDATOR_REGISTRY_LIMIT` | `2**40` (= 1,099,511,627,776) | validators | ### Rewards and penalties From 7ad710e2f36f50a6df32128a2f92fad5dee81131 Mon Sep 17 00:00:00 2001 From: nathaniel gentile Date: Sat, 15 Feb 2020 15:40:06 -0700 Subject: [PATCH 156/194] fix dev install example distutil command the option is now --spec-fork, not --spec-version see: 340549aed62caa15a9f04cf393805c2003bd5c8e --- tests/core/pyspec/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/core/pyspec/README.md b/tests/core/pyspec/README.md index 1f0bb6642..a9ee80105 100644 --- a/tests/core/pyspec/README.md +++ b/tests/core/pyspec/README.md @@ -20,7 +20,7 @@ Unlike the regular install, this outputs spec files to their original source loc Alternatively, you can build a sub-set of the pyspec with the distutil command: ```bash -python setup.py pyspec --spec-version=phase0 --md-doc-paths="specs/phase0/beacon-chain.md specs/phase0/fork-choice.md" --out-dir=my_spec_dir +python setup.py pyspec --spec-fork=phase0 --md-doc-paths="specs/phase0/beacon-chain.md specs/phase0/fork-choice.md" --out-dir=my_spec_dir ``` ## Py-tests From dc7286113617ef382aa341b26e15b1bb057b0b37 Mon Sep 17 00:00:00 2001 From: ethers <6937903+ethers@users.noreply.github.com> Date: Mon, 17 Feb 2020 00:14:07 +0000 Subject: [PATCH 157/194] add Gasper paper --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 49d851197..3f58b3dc4 100644 --- a/README.md +++ b/README.md @@ -59,6 +59,7 @@ The following are the broad design goals for Ethereum 2.0: * [Design Rationale](https://notes.ethereum.org/s/rkhCgQteN#) * [Phase 0 Onboarding Document](https://notes.ethereum.org/s/Bkn3zpwxB) +* [Gasper paper](https://github.com/ethereum/research/blob/master/papers/ffg%2Bghost/paper.pdf) ## For spec contributors From 52b45ab9de7a08c5a218a44d417aa7ba31e213ea Mon Sep 17 00:00:00 2001 From: Jacek Sieka Date: Mon, 17 Feb 2020 10:03:32 +0100 Subject: [PATCH 158/194] Add fork version to topic Gossipsub peers are separate from the ETH2 RPC protocol, and thus cannot rely on the application-level `Status` negotiation to establish if they're on the same network. Segregating gossipsub topics by fork version decouples RPC from gossip further and allows peers to more easily listen only to the traffic of the network they're interested in, without further negotiation. --- specs/phase0/p2p-interface.md | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index 7ad8759d3..40c9055ac 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -216,7 +216,17 @@ The following gossipsub [parameters](https://github.com/libp2p/specs/tree/master ### Topics and messages -Topics are plain UTF-8 strings and are encoded on the wire as determined by protobuf (gossipsub messages are enveloped in protobuf messages). Topic strings have form: `/eth2/TopicName/TopicEncoding`. This defines both the type of data being sent on the topic and how the data field of the message is encoded. +Topics are plain UTF-8 strings and are encoded on the wire as determined by protobuf (gossipsub messages are enveloped in protobuf messages). Topic strings have form: `/eth2/ForkVersion/Name/Encoding`. This defines both the type of data being sent on the topic and how the data field of the message is encoded. + +- `ForkVersion` - the hex-encoded bytes from `state.fork.current_version` of the head state of the client, as also seen in `Status.head_fork_version`. +- `Name` - see table below +- `Encoding` - the encoding strategy describes a specific representation of bytes that will be transmitted over the wire. See the [Encodings](#Encoding-strategies) section for further details. + +The fork version is hex-encoded using the following scheme: +```python + ForkVersion = ''.join('{:02x}'.format(x) for x in state.fork.current_version) +``` +For example, the fork version `[0, 1, 2, 10]` will be encoded as `0001020a`. Each gossipsub [message](https://github.com/libp2p/go-libp2p-pubsub/blob/master/pb/rpc.proto#L17-L24) has a maximum size of `GOSSIP_MAX_SIZE`. Clients MUST reject (fail validation) messages that are over this size limit. Likewise, clients MUST NOT emit or propagate messages larger than this limit. @@ -229,7 +239,7 @@ where `base64` is the [URL-safe base64 alphabet](https://tools.ietf.org/html/rfc The payload is carried in the `data` field of a gossipsub message, and varies depending on the topic: -| Topic | Message Type | +| Name | Message Type | |------------------------------------------------|-------------------------| | beacon_block | SignedBeaconBlock | | beacon_aggregate_and_proof | SignedAggregateAndProof | From cfcb7b2f015ddf637dae03b7bf262a6d8349a9e0 Mon Sep 17 00:00:00 2001 From: Anton Nashatyrev Date: Tue, 18 Feb 2020 16:12:43 +0300 Subject: [PATCH 159/194] Measure eth1 voting period in epochs instead of slots --- specs/phase0/beacon-chain.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/specs/phase0/beacon-chain.md b/specs/phase0/beacon-chain.md index acf098663..0bf953c43 100644 --- a/specs/phase0/beacon-chain.md +++ b/specs/phase0/beacon-chain.md @@ -213,7 +213,7 @@ The following values are (non-configurable) constants used throughout the specif | `MIN_SEED_LOOKAHEAD` | `2**0` (= 1) | epochs | 6.4 minutes | | `MAX_SEED_LOOKAHEAD` | `2**2` (= 4) | epochs | 25.6 minutes | | `MIN_EPOCHS_TO_INACTIVITY_PENALTY` | `2**2` (= 4) | epochs | 25.6 minutes | -| `SLOTS_PER_ETH1_VOTING_PERIOD` | `2**10` (= 1,024) | slots | ~3.4 hours | +| `EPOCHS_PER_ETH1_VOTING_PERIOD` | `2**5` (= 32) | epochs | ~3.4 hours | | `SLOTS_PER_HISTORICAL_ROOT` | `2**13` (= 8,192) | slots | ~27 hours | | `MIN_VALIDATOR_WITHDRAWABILITY_DELAY` | `2**8` (= 256) | epochs | ~27 hours | | `PERSISTENT_COMMITTEE_PERIOD` | `2**11` (= 2,048) | epochs | 9 days | @@ -478,7 +478,7 @@ class BeaconState(Container): historical_roots: List[Root, HISTORICAL_ROOTS_LIMIT] # Eth1 eth1_data: Eth1Data - eth1_data_votes: List[Eth1Data, SLOTS_PER_ETH1_VOTING_PERIOD] + eth1_data_votes: List[Eth1Data, EPOCHS_PER_ETH1_VOTING_PERIOD * SLOTS_PER_EPOCH] eth1_deposit_index: uint64 # Registry validators: List[Validator, VALIDATOR_REGISTRY_LIMIT] @@ -1397,7 +1397,7 @@ def process_final_updates(state: BeaconState) -> None: current_epoch = get_current_epoch(state) next_epoch = Epoch(current_epoch + 1) # Reset eth1 data votes - if (state.slot + 1) % SLOTS_PER_ETH1_VOTING_PERIOD == 0: + if next_epoch % EPOCHS_PER_ETH1_VOTING_PERIOD == 0: state.eth1_data_votes = [] # Update effective balances with hysteresis for index, validator in enumerate(state.validators): @@ -1468,7 +1468,7 @@ def process_randao(state: BeaconState, body: BeaconBlockBody) -> None: ```python def process_eth1_data(state: BeaconState, body: BeaconBlockBody) -> None: state.eth1_data_votes.append(body.eth1_data) - if state.eth1_data_votes.count(body.eth1_data) * 2 > SLOTS_PER_ETH1_VOTING_PERIOD: + if state.eth1_data_votes.count(body.eth1_data) * 2 > EPOCHS_PER_ETH1_VOTING_PERIOD * SLOTS_PER_EPOCH: state.eth1_data = body.eth1_data ``` From 757f5a31ddad30381f53ca07bddab0a3156b3aec Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Mon, 17 Feb 2020 12:02:24 -0700 Subject: [PATCH 160/194] add proposer index and add/modify tests --- specs/phase0/beacon-chain.md | 23 +++++++---- specs/phase0/validator.md | 9 ++-- specs/phase1/beacon-chain.md | 1 + .../pyspec/eth2spec/test/helpers/block.py | 12 ++++++ .../test/helpers/proposer_slashings.py | 2 +- .../test_process_block_header.py | 12 ++++++ .../test_process_proposer_slashing.py | 41 +++++++++++++------ .../eth2spec/test/sanity/test_blocks.py | 2 +- 8 files changed, 78 insertions(+), 24 deletions(-) diff --git a/specs/phase0/beacon-chain.md b/specs/phase0/beacon-chain.md index acf098663..fc0a5f53b 100644 --- a/specs/phase0/beacon-chain.md +++ b/specs/phase0/beacon-chain.md @@ -377,6 +377,7 @@ class DepositData(Container): ```python class BeaconBlockHeader(Container): slot: Slot + proposer_index: ValidatorIndex parent_root: Root state_root: Root body_root: Root @@ -396,7 +397,6 @@ class SigningRoot(Container): ```python class ProposerSlashing(Container): - proposer_index: ValidatorIndex signed_header_1: SignedBeaconBlockHeader signed_header_2: SignedBeaconBlockHeader ``` @@ -456,6 +456,7 @@ class BeaconBlockBody(Container): ```python class BeaconBlock(Container): slot: Slot + proposer_index: ValidatorIndex parent_root: Root state_root: Root body: BeaconBlockBody @@ -1163,7 +1164,7 @@ def state_transition(state: BeaconState, signed_block: SignedBeaconBlock, valida ```python def verify_block_signature(state: BeaconState, signed_block: SignedBeaconBlock) -> bool: - proposer = state.validators[get_beacon_proposer_index(state)] + proposer = state.validators[signed_block.message.proposer_index] signing_root = compute_signing_root(signed_block.message, get_domain(state, DOMAIN_BEACON_PROPOSER)) return bls.Verify(proposer.pubkey, signing_root, signed_block.signature) ``` @@ -1434,18 +1435,21 @@ def process_block(state: BeaconState, block: BeaconBlock) -> None: def process_block_header(state: BeaconState, block: BeaconBlock) -> None: # Verify that the slots match assert block.slot == state.slot + # Verify that proposer index is the correct index + assert block.proposer_index == get_beacon_proposer_index(state) # Verify that the parent matches assert block.parent_root == hash_tree_root(state.latest_block_header) # Cache current block as the new latest block state.latest_block_header = BeaconBlockHeader( slot=block.slot, + proposer_index=block.proposer_index, parent_root=block.parent_root, state_root=Bytes32(), # Overwritten in the next process_slot call body_root=hash_tree_root(block.body), ) # Verify proposer is not slashed - proposer = state.validators[get_beacon_proposer_index(state)] + proposer = state.validators[block.proposer_index] assert not proposer.slashed ``` @@ -1494,12 +1498,17 @@ def process_operations(state: BeaconState, body: BeaconBlockBody) -> None: ```python def process_proposer_slashing(state: BeaconState, proposer_slashing: ProposerSlashing) -> None: + header_1 = proposer_slashing.signed_header_1.message + header_2 = proposer_slashing.signed_header_2.message + # Verify header slots match - assert proposer_slashing.signed_header_1.message.slot == proposer_slashing.signed_header_2.message.slot + assert header_1.slot == header_2.slot + # Verify header proposer indices match + assert header_1.proposer_index == header_2.proposer_index # Verify the headers are different - assert proposer_slashing.signed_header_1 != proposer_slashing.signed_header_2 + assert header_1 != header_2 # Verify the proposer is slashable - proposer = state.validators[proposer_slashing.proposer_index] + proposer = state.validators[header_1.proposer_index] assert is_slashable_validator(proposer, get_current_epoch(state)) # Verify signatures for signed_header in (proposer_slashing.signed_header_1, proposer_slashing.signed_header_2): @@ -1507,7 +1516,7 @@ def process_proposer_slashing(state: BeaconState, proposer_slashing: ProposerSla signing_root = compute_signing_root(signed_header.message, domain) assert bls.Verify(proposer.pubkey, signing_root, signed_header.signature) - slash_validator(state, proposer_slashing.proposer_index) + slash_validator(state, header_1.proposer_index) ``` ##### Attester slashings diff --git a/specs/phase0/validator.md b/specs/phase0/validator.md index 0bde81e60..ffd75caf4 100644 --- a/specs/phase0/validator.md +++ b/specs/phase0/validator.md @@ -27,6 +27,7 @@ - [Block proposal](#block-proposal) - [Preparing for a `BeaconBlock`](#preparing-for-a-beaconblock) - [Slot](#slot) + - [Proposer index](#proposer-index) - [Parent root](#parent-root) - [Constructing the `BeaconBlockBody`](#constructing-the-beaconblockbody) - [Randao reveal](#randao-reveal) @@ -183,8 +184,7 @@ def get_committee_assignment(state: BeaconState, A validator can use the following function to see if they are supposed to propose during a slot. This function can only be run with a `state` of the slot in question. Proposer selection is only stable within the context of the current epoch. ```python -def is_proposer(state: BeaconState, - validator_index: ValidatorIndex) -> bool: +def is_proposer(state: BeaconState, validator_index: ValidatorIndex) -> bool: return get_beacon_proposer_index(state) == validator_index ``` @@ -224,11 +224,14 @@ Set `block.slot = slot` where `slot` is the current slot at which the validator *Note*: There might be "skipped" slots between the `parent` and `block`. These skipped slots are processed in the state transition function without per-block processing. +##### Proposer index + +Set `block.proposer_index = validator_index` where `validator_index` is the validator chosen to propose at this slot. The private key mapping to `state.validators[validator_index].pubkey` is used to sign the block. + ##### Parent root Set `block.parent_root = hash_tree_root(parent)`. - #### Constructing the `BeaconBlockBody` ##### Randao reveal diff --git a/specs/phase1/beacon-chain.md b/specs/phase1/beacon-chain.md index 78b3b3d25..f5084ef21 100644 --- a/specs/phase1/beacon-chain.md +++ b/specs/phase1/beacon-chain.md @@ -221,6 +221,7 @@ Note that the `body` has a new `BeaconBlockBody` definition. ```python class BeaconBlock(Container): slot: Slot + proposer_index: ValidatorIndex parent_root: Root state_root: Root body: BeaconBlockBody diff --git a/tests/core/pyspec/eth2spec/test/helpers/block.py b/tests/core/pyspec/eth2spec/test/helpers/block.py index 488e051bd..96cc30e35 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/block.py +++ b/tests/core/pyspec/eth2spec/test/helpers/block.py @@ -65,10 +65,22 @@ def apply_empty_block(spec, state): def build_empty_block(spec, state, slot=None): + """ + Build empty block for ``slot``, built upon the latest block header seen by ``state``. + Slot must be greater than or equal to the current slot in ``state``. + """ if slot is None: slot = state.slot + if slot < state.slot: + raise Exception("build_empty_block cannot build blocks for past slots") + if slot > state.slot: + # transition forward in copied state to grab relevant data from state + state = state.copy() + spec.process_slots(state, slot) + empty_block = spec.BeaconBlock() empty_block.slot = slot + empty_block.proposer_index = spec.get_beacon_proposer_index(state) empty_block.body.eth1_data.deposit_count = state.eth1_deposit_index previous_block_header = state.latest_block_header.copy() if previous_block_header.state_root == spec.Root(): diff --git a/tests/core/pyspec/eth2spec/test/helpers/proposer_slashings.py b/tests/core/pyspec/eth2spec/test/helpers/proposer_slashings.py index 79a0b9009..ac2ebcf9c 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/proposer_slashings.py +++ b/tests/core/pyspec/eth2spec/test/helpers/proposer_slashings.py @@ -10,6 +10,7 @@ def get_valid_proposer_slashing(spec, state, signed_1=False, signed_2=False): header_1 = spec.BeaconBlockHeader( slot=slot, + proposer_index=validator_index, parent_root=b'\x33' * 32, state_root=b'\x44' * 32, body_root=b'\x55' * 32, @@ -27,7 +28,6 @@ def get_valid_proposer_slashing(spec, state, signed_1=False, signed_2=False): signed_header_2 = spec.SignedBeaconBlockHeader(message=header_2) return spec.ProposerSlashing( - proposer_index=validator_index, signed_header_1=signed_header_1, signed_header_2=signed_header_2, ) diff --git a/tests/core/pyspec/eth2spec/test/phase_0/block_processing/test_process_block_header.py b/tests/core/pyspec/eth2spec/test/phase_0/block_processing/test_process_block_header.py index b51584ce5..a2eb744b9 100644 --- a/tests/core/pyspec/eth2spec/test/phase_0/block_processing/test_process_block_header.py +++ b/tests/core/pyspec/eth2spec/test/phase_0/block_processing/test_process_block_header.py @@ -47,6 +47,18 @@ def test_invalid_slot_block_header(spec, state): yield from run_block_header_processing(spec, state, block, valid=False) +@with_all_phases +@spec_state_test +def test_invalid_proposer_index(spec, state): + block = build_empty_block_for_next_slot(spec, state) + + active_indices = spec.get_active_validator_indices(state, spec.get_current_epoch(state)) + active_indices = [i for i in active_indices if i != block.proposer_index] + block.proposer_index = active_indices[0] # invalid proposer index + + yield from run_block_header_processing(spec, state, block, valid=False) + + @with_all_phases @spec_state_test def test_invalid_parent_root(spec, state): diff --git a/tests/core/pyspec/eth2spec/test/phase_0/block_processing/test_process_proposer_slashing.py b/tests/core/pyspec/eth2spec/test/phase_0/block_processing/test_process_proposer_slashing.py index 30b3c1fdd..5f1fca969 100644 --- a/tests/core/pyspec/eth2spec/test/phase_0/block_processing/test_process_proposer_slashing.py +++ b/tests/core/pyspec/eth2spec/test/phase_0/block_processing/test_process_proposer_slashing.py @@ -22,22 +22,20 @@ def run_proposer_slashing_processing(spec, state, proposer_slashing, valid=True) yield 'post', None return - pre_proposer_balance = get_balance(state, proposer_slashing.proposer_index) + proposer_index = proposer_slashing.signed_header_1.message.proposer_index + pre_proposer_balance = get_balance(state, proposer_index) spec.process_proposer_slashing(state, proposer_slashing) yield 'post', state # check if slashed - slashed_validator = state.validators[proposer_slashing.proposer_index] + slashed_validator = state.validators[proposer_index] assert slashed_validator.slashed assert slashed_validator.exit_epoch < spec.FAR_FUTURE_EPOCH assert slashed_validator.withdrawable_epoch < spec.FAR_FUTURE_EPOCH # lost whistleblower reward - assert ( - get_balance(state, proposer_slashing.proposer_index) < - pre_proposer_balance - ) + assert get_balance(state, proposer_index) < pre_proposer_balance @with_all_phases @@ -77,7 +75,24 @@ def test_invalid_sig_1_and_2(spec, state): def test_invalid_proposer_index(spec, state): proposer_slashing = get_valid_proposer_slashing(spec, state, signed_1=True, signed_2=True) # Index just too high (by 1) - proposer_slashing.proposer_index = len(state.validators) + proposer_slashing.signed_header_1.message.proposer_index = len(state.validators) + proposer_slashing.signed_header_2.message.proposer_index = len(state.validators) + + yield from run_proposer_slashing_processing(spec, state, proposer_slashing, False) + + +@with_all_phases +@spec_state_test +def test_invalid_different_proposer_indices(spec, state): + proposer_slashing = get_valid_proposer_slashing(spec, state, signed_1=True, signed_2=True) + # set different index and sign + header_1 = proposer_slashing.signed_header_1.message + header_2 = proposer_slashing.signed_header_2.message + active_indices = spec.get_active_validator_indices(state, spec.get_current_epoch(state)) + active_indices = [i for i in active_indices if i != header_1.proposer_index] + + header_2.proposer_index = active_indices[0] + proposer_slashing.signed_header_2 = sign_block_header(spec, state, header_2, privkeys[header_2.proposer_index]) yield from run_proposer_slashing_processing(spec, state, proposer_slashing, False) @@ -89,9 +104,9 @@ def test_epochs_are_different(spec, state): # set slots to be in different epochs header_2 = proposer_slashing.signed_header_2.message + proposer_index = header_2.proposer_index header_2.slot += spec.SLOTS_PER_EPOCH - proposer_slashing.signed_header_2 = sign_block_header( - spec, state, header_2, privkeys[proposer_slashing.proposer_index]) + proposer_slashing.signed_header_2 = sign_block_header(spec, state, header_2, privkeys[proposer_index]) yield from run_proposer_slashing_processing(spec, state, proposer_slashing, False) @@ -113,7 +128,8 @@ def test_proposer_is_not_activated(spec, state): proposer_slashing = get_valid_proposer_slashing(spec, state, signed_1=True, signed_2=True) # set proposer to be not active yet - state.validators[proposer_slashing.proposer_index].activation_epoch = spec.get_current_epoch(state) + 1 + proposer_index = proposer_slashing.signed_header_1.message.proposer_index + state.validators[proposer_index].activation_epoch = spec.get_current_epoch(state) + 1 yield from run_proposer_slashing_processing(spec, state, proposer_slashing, False) @@ -124,7 +140,8 @@ def test_proposer_is_slashed(spec, state): proposer_slashing = get_valid_proposer_slashing(spec, state, signed_1=True, signed_2=True) # set proposer to slashed - state.validators[proposer_slashing.proposer_index].slashed = True + proposer_index = proposer_slashing.signed_header_1.message.proposer_index + state.validators[proposer_index].slashed = True yield from run_proposer_slashing_processing(spec, state, proposer_slashing, False) @@ -138,7 +155,7 @@ def test_proposer_is_withdrawn(spec, state): state.slot += spec.SLOTS_PER_EPOCH # set proposer withdrawable_epoch in past current_epoch = spec.get_current_epoch(state) - proposer_index = proposer_slashing.proposer_index + proposer_index = proposer_slashing.signed_header_1.message.proposer_index state.validators[proposer_index].withdrawable_epoch = current_epoch - 1 yield from run_proposer_slashing_processing(spec, state, proposer_slashing, False) diff --git a/tests/core/pyspec/eth2spec/test/sanity/test_blocks.py b/tests/core/pyspec/eth2spec/test/sanity/test_blocks.py index 9027660ab..acfef9cd7 100644 --- a/tests/core/pyspec/eth2spec/test/sanity/test_blocks.py +++ b/tests/core/pyspec/eth2spec/test/sanity/test_blocks.py @@ -187,7 +187,7 @@ def test_proposer_slashing(spec, state): # copy for later balance lookups. pre_state = deepcopy(state) proposer_slashing = get_valid_proposer_slashing(spec, state, signed_1=True, signed_2=True) - validator_index = proposer_slashing.proposer_index + validator_index = proposer_slashing.signed_header_1.message.proposer_index assert not state.validators[validator_index].slashed From a02aac43c290b82d0d647e7a54e439257fdab1bf Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Tue, 18 Feb 2020 12:36:20 -0600 Subject: [PATCH 161/194] adjust hysteresis to avoid initial over-deposit incentive --- specs/phase0/beacon-chain.md | 7 +++++-- .../test_process_final_updates.py | 19 +++++++++++-------- 2 files changed, 16 insertions(+), 10 deletions(-) diff --git a/specs/phase0/beacon-chain.md b/specs/phase0/beacon-chain.md index acf098663..2e6c64be5 100644 --- a/specs/phase0/beacon-chain.md +++ b/specs/phase0/beacon-chain.md @@ -1402,8 +1402,11 @@ def process_final_updates(state: BeaconState) -> None: # Update effective balances with hysteresis for index, validator in enumerate(state.validators): balance = state.balances[index] - HALF_INCREMENT = EFFECTIVE_BALANCE_INCREMENT // 2 - if balance < validator.effective_balance or validator.effective_balance + 3 * HALF_INCREMENT < balance: + QUARTER_INCREMENT = EFFECTIVE_BALANCE_INCREMENT // 4 + if ( + balance + QUARTER_INCREMENT < validator.effective_balance + or validator.effective_balance + 7 * QUARTER_INCREMENT < balance + ): validator.effective_balance = min(balance - balance % EFFECTIVE_BALANCE_INCREMENT, MAX_EFFECTIVE_BALANCE) # Reset slashings state.slashings[next_epoch % EPOCHS_PER_SLASHINGS_VECTOR] = Gwei(0) diff --git a/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_final_updates.py b/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_final_updates.py index 58882a44f..b0d05427a 100644 --- a/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_final_updates.py +++ b/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_final_updates.py @@ -51,19 +51,22 @@ def test_effective_balance_hysteresis(spec, state): max = spec.MAX_EFFECTIVE_BALANCE min = spec.EJECTION_BALANCE inc = spec.EFFECTIVE_BALANCE_INCREMENT - half_inc = inc // 2 + quar_inc = inc // 4 cases = [ (max, max, max, "as-is"), - (max, max - 1, max - inc, "round down, step lower"), + (max, max - 1, max, "round up"), (max, max + 1, max, "round down"), + (max, max - quar_inc, max, "lower balance, but not low enough"), + (max, max - quar_inc, max, "lower balance, step down"), + (max, max + (3 * quar_inc) + 1, max, "already at max, as is"), (max, max - inc, max - inc, "exactly 1 step lower"), - (max, max - inc - 1, max - (2 * inc), "just 1 over 1 step lower"), + (max, max - inc - 1, max - (2 * inc), "past 1 step lower, double step"), (max, max - inc + 1, max - inc, "close to 1 step lower"), - (min, min + (half_inc * 3), min, "bigger balance, but not high enough"), - (min, min + (half_inc * 3) + 1, min + inc, "bigger balance, high enough, but small step"), - (min, min + (half_inc * 4) - 1, min + inc, "bigger balance, high enough, close to double step"), - (min, min + (half_inc * 4), min + (2 * inc), "exact two step balance increment"), - (min, min + (half_inc * 4) + 1, min + (2 * inc), "over two steps, round down"), + (min, min + (quar_inc * 7), min, "bigger balance, but not high enough"), + (min, min + (quar_inc * 7) + 1, min + inc, "bigger balance, high enough, but small step"), + (min, min + (quar_inc * 8) - 1, min + inc, "bigger balance, high enough, close to double step"), + (min, min + (quar_inc * 8), min + (2 * inc), "exact two step balance increment"), + (min, min + (quar_inc * 8) + 1, min + (2 * inc), "over two steps, round down"), ] current_epoch = spec.get_current_epoch(state) for i, (pre_eff, bal, _, _) in enumerate(cases): From 71be8940b60dcf6958907f796666464acc51df5b Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Tue, 18 Feb 2020 12:56:37 -0600 Subject: [PATCH 162/194] add a couple more sanity block tests for added rpoposer_index --- .../eth2spec/test/sanity/test_blocks.py | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/tests/core/pyspec/eth2spec/test/sanity/test_blocks.py b/tests/core/pyspec/eth2spec/test/sanity/test_blocks.py index acfef9cd7..ad7c20802 100644 --- a/tests/core/pyspec/eth2spec/test/sanity/test_blocks.py +++ b/tests/core/pyspec/eth2spec/test/sanity/test_blocks.py @@ -119,6 +119,49 @@ def test_invalid_block_sig(spec, state): yield 'post', None +@with_all_phases +@spec_state_test +@always_bls +def test_invalid_proposer_index_sig_from_expected_proposer(spec, state): + yield 'pre', state + + block = build_empty_block_for_next_slot(spec, state) + expect_proposer_index = block.proposer_index + + # Set invalid proposer index but correct signature wrt expected proposer + active_indices = spec.get_active_validator_indices(state, spec.get_current_epoch(state)) + active_indices = [i for i in active_indices if i != block.proposer_index] + block.proposer_index = active_indices[0] # invalid proposer index + + invalid_signed_block = sign_block(spec, state, block, expect_proposer_index) + + expect_assertion_error(lambda: spec.state_transition(state, invalid_signed_block)) + + yield 'blocks', [invalid_signed_block] + yield 'post', None + + +@with_all_phases +@spec_state_test +@always_bls +def test_invalid_proposer_index_sig_from_proposer_index(spec, state): + yield 'pre', state + + block = build_empty_block_for_next_slot(spec, state) + + # Set invalid proposer index but correct signature wrt proposer_index + active_indices = spec.get_active_validator_indices(state, spec.get_current_epoch(state)) + active_indices = [i for i in active_indices if i != block.proposer_index] + block.proposer_index = active_indices[0] # invalid proposer index + + invalid_signed_block = sign_block(spec, state, block, block.proposer_index) + + expect_assertion_error(lambda: spec.state_transition(state, invalid_signed_block)) + + yield 'blocks', [invalid_signed_block] + yield 'post', None + + @with_all_phases @spec_state_test def test_skipped_slots(spec, state): From 61f661b3ecbc3e2895c4400a9f1362ae43fb6441 Mon Sep 17 00:00:00 2001 From: Jacek Sieka Date: Thu, 20 Feb 2020 08:23:09 +0100 Subject: [PATCH 163/194] Update specs/phase0/p2p-interface.md Co-Authored-By: Danny Ryan --- specs/phase0/p2p-interface.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index 40c9055ac..c5607c0c5 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -226,7 +226,7 @@ The fork version is hex-encoded using the following scheme: ```python ForkVersion = ''.join('{:02x}'.format(x) for x in state.fork.current_version) ``` -For example, the fork version `[0, 1, 2, 10]` will be encoded as `0001020a`. +For example, the fork version `Version('0x0001020a')` will be encoded as `0001020a`. Each gossipsub [message](https://github.com/libp2p/go-libp2p-pubsub/blob/master/pb/rpc.proto#L17-L24) has a maximum size of `GOSSIP_MAX_SIZE`. Clients MUST reject (fail validation) messages that are over this size limit. Likewise, clients MUST NOT emit or propagate messages larger than this limit. From 4d72dcf3abf80511b496d5b7c1540cb4e55e810e Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Thu, 27 Feb 2020 12:00:55 -0600 Subject: [PATCH 164/194] @hwwhww feedback Co-Authored-By: Hsiao-Wei Wang --- specs/phase0/p2p-interface.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index 8ac00cf06..06e8191cd 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -414,15 +414,15 @@ Since snappy frame contents [have a maximum size of `65536` bytes](https://githu **Encoding-dependent header:** Req/Resp protocols using the `ssz` or `ssz_snappy` encoding strategies MUST encode the length of the raw SSZ bytes, encoded as an unsigned [protobuf varint](https://developers.google.com/protocol-buffers/docs/encoding#varints). -*Writing*: By first computing and writing the SSZ byte length the SSZ encoder can then directly write the chunk contents to the stream. +*Writing*: By first computing and writing the SSZ byte length, the SSZ encoder can then directly write the chunk contents to the stream. If Snappy is applied, it can be passed through a buffered Snappy writer to compress frame by frame. *Reading*: After reading the expected SSZ byte length, the SSZ decoder can directly read the contents from the stream. If snappy is applied, it can be passed through a buffered Snappy reader to decompress frame by frame. A reader: -- SHOULD not read more than `max_encoded_len(n)` bytes (`32 + n + n/6`) after reading the SSZ length prefix `n` from the header, [this is considered the worst-case compression result by Snappy](https://github.com/google/snappy/blob/537f4ad6240e586970fe554614542e9717df7902/snappy.cc#L98). -- SHOULD not accept a SSZ length prefix that is bigger than the expected maximum length for the SSZ type (derived from SSZ type information such as vector lengths and list limits). +- SHOULD NOT read more than `max_encoded_len(n)` bytes (`32 + n + n // 6`) after reading the SSZ length prefix `n` from the header, [this is considered the worst-case compression result by Snappy](https://github.com/google/snappy/blob/537f4ad6240e586970fe554614542e9717df7902/snappy.cc#L98). +- SHOULD NOT accept an SSZ length prefix that is bigger than the expected maximum length for the SSZ type (derived from SSZ type information such as vector lengths and list limits). - MUST consider remaining bytes, after having read the `n` SSZ bytes, as an invalid input. An EOF is expected. - MUST consider an early EOF, before fully reading the declared length prefix worth of SSZ bytes, as an invalid input. From bb82a051ff439b466ffe66ce374968778f82bdec Mon Sep 17 00:00:00 2001 From: protolambda Date: Thu, 27 Feb 2020 19:39:34 +0000 Subject: [PATCH 165/194] clean up, add invalid input handling --- specs/phase0/p2p-interface.md | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index 06e8191cd..377f98869 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -420,11 +420,20 @@ If Snappy is applied, it can be passed through a buffered Snappy writer to compr *Reading*: After reading the expected SSZ byte length, the SSZ decoder can directly read the contents from the stream. If snappy is applied, it can be passed through a buffered Snappy reader to decompress frame by frame. -A reader: -- SHOULD NOT read more than `max_encoded_len(n)` bytes (`32 + n + n // 6`) after reading the SSZ length prefix `n` from the header, [this is considered the worst-case compression result by Snappy](https://github.com/google/snappy/blob/537f4ad6240e586970fe554614542e9717df7902/snappy.cc#L98). -- SHOULD NOT accept an SSZ length prefix that is bigger than the expected maximum length for the SSZ type (derived from SSZ type information such as vector lengths and list limits). -- MUST consider remaining bytes, after having read the `n` SSZ bytes, as an invalid input. An EOF is expected. -- MUST consider an early EOF, before fully reading the declared length prefix worth of SSZ bytes, as an invalid input. +A reader SHOULD NOT read more than `max_encoded_len(n)` bytes after reading the SSZ length prefix `n` from the header. +- For `ssz` this is: `n` +- For `ssz_snappy` this is: `32 + n + n // 6`. This is considered the [worst-case compression result](https://github.com/google/snappy/blob/537f4ad6240e586970fe554614542e9717df7902/snappy.cc#L98) by Snappy. + +A reader SHOULD consider the following cases as invalid input: +- A SSZ length prefix that, compared against the SSZ type information (vector lengths, list limits, integer sizes, etc.), is: + - Smaller than the expected minimum serialized length. + - Bigger than the expected maximum serialized length. +- Any remaining bytes, after having read the `n` SSZ bytes. An EOF is expected. +- An early EOF, before fully reading the declared length prefix worth of SSZ bytes. + +In case of an invalid input, a reader MUST: +- From requests: send back an error message, response code `InvalidRequest`. The request itself is ignored. +- From responses: ignore the response, the response MUST be considered bad server behavior. All messages that contain only a single field MUST be encoded directly as the type of that field and MUST NOT be encoded as an SSZ container. From 38323d8186793ba1cbbfa6c8000a4a4b60aa97b9 Mon Sep 17 00:00:00 2001 From: Jacek Sieka Date: Sun, 1 Mar 2020 17:17:29 +0100 Subject: [PATCH 166/194] Add faq --- specs/phase0/p2p-interface.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index c5607c0c5..85d9a1e9f 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -763,9 +763,9 @@ For future extensibility with almost zero overhead now (besides the extra bytes ### How do we upgrade gossip channels (e.g. changes in encoding, compression)? -Changing gossipsub/broadcasts requires a coordinated upgrade where all clients start publishing to the new topic together, for example during a hard fork. +Changing gossipsub/broadcasts requires a coordinated upgrade where all clients start publishing to the new topic together, during a hard fork. -One can envision a two-phase deployment as well where clients start listening to the new topic in the first phase then start publishing some time later, letting the traffic naturally move over to the new topic. +When a node is preparing for upcoming tasks (e.g. validator duty lookahead) on a gossipsub topic, the node should join the topic of the future epoch in which the task is to occur in addition to listening to the topics for the current epoch. ### Why must all clients use the same gossip topic instead of one negotiated between each peer pair? From 0122081d05cea01dbe70ceec383a8fc75b414f3c Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Mon, 2 Mar 2020 15:55:01 -0700 Subject: [PATCH 167/194] hysteresis to -0.25/+1.25 --- specs/phase0/beacon-chain.md | 2 +- .../phase_0/epoch_processing/test_process_final_updates.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/specs/phase0/beacon-chain.md b/specs/phase0/beacon-chain.md index 2e6c64be5..aa573ff16 100644 --- a/specs/phase0/beacon-chain.md +++ b/specs/phase0/beacon-chain.md @@ -1405,7 +1405,7 @@ def process_final_updates(state: BeaconState) -> None: QUARTER_INCREMENT = EFFECTIVE_BALANCE_INCREMENT // 4 if ( balance + QUARTER_INCREMENT < validator.effective_balance - or validator.effective_balance + 7 * QUARTER_INCREMENT < balance + or validator.effective_balance + 5 * QUARTER_INCREMENT < balance ): validator.effective_balance = min(balance - balance % EFFECTIVE_BALANCE_INCREMENT, MAX_EFFECTIVE_BALANCE) # Reset slashings diff --git a/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_final_updates.py b/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_final_updates.py index b0d05427a..36dae8fb7 100644 --- a/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_final_updates.py +++ b/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_final_updates.py @@ -62,8 +62,8 @@ def test_effective_balance_hysteresis(spec, state): (max, max - inc, max - inc, "exactly 1 step lower"), (max, max - inc - 1, max - (2 * inc), "past 1 step lower, double step"), (max, max - inc + 1, max - inc, "close to 1 step lower"), - (min, min + (quar_inc * 7), min, "bigger balance, but not high enough"), - (min, min + (quar_inc * 7) + 1, min + inc, "bigger balance, high enough, but small step"), + (min, min + (quar_inc * 5), min, "bigger balance, but not high enough"), + (min, min + (quar_inc * 5) + 1, min + inc, "bigger balance, high enough, but small step"), (min, min + (quar_inc * 8) - 1, min + inc, "bigger balance, high enough, close to double step"), (min, min + (quar_inc * 8), min + (2 * inc), "exact two step balance increment"), (min, min + (quar_inc * 8) + 1, min + (2 * inc), "over two steps, round down"), From b4c7481b35260587eb00c00f2da3b477f1780a85 Mon Sep 17 00:00:00 2001 From: Hsiao-Wei Wang Date: Tue, 3 Mar 2020 01:28:58 +0100 Subject: [PATCH 168/194] Fix the misc table --- specs/phase1/custody-game.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/specs/phase1/custody-game.md b/specs/phase1/custody-game.md index 121f91f97..af3aadc96 100644 --- a/specs/phase1/custody-game.md +++ b/specs/phase1/custody-game.md @@ -48,8 +48,8 @@ This document details the beacon chain additions and changes in Phase 1 of Ether ### Misc | Name | Value | Unit | -| - | - | -| `BLS12_381_Q` | `4002409555221667393417789825735904156556882819939007885332058136124031650490837864442687629129015664037894272559787` | +| - | - | - | +| `BLS12_381_Q` | `4002409555221667393417789825735904156556882819939007885332058136124031650490837864442687629129015664037894272559787` | - | | `BYTES_PER_CUSTODY_ATOM` | `48` | bytes | ## Configuration From 2d4ec7d52f0759b5e1d2ea8337d369e300a8f301 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Tue, 3 Mar 2020 09:29:59 -0700 Subject: [PATCH 169/194] add REWARD_OVERFLOW_INCREMENT to avoid overflow in rewards calculation --- specs/phase0/beacon-chain.md | 5 ++++- .../epoch_processing/test_process_rewards_and_penalties.py | 7 +++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/specs/phase0/beacon-chain.md b/specs/phase0/beacon-chain.md index acf098663..7c14550bb 100644 --- a/specs/phase0/beacon-chain.md +++ b/specs/phase0/beacon-chain.md @@ -194,6 +194,7 @@ The following values are (non-configurable) constants used throughout the specif | `MAX_EFFECTIVE_BALANCE` | `Gwei(2**5 * 10**9)` (= 32,000,000,000) | | `EJECTION_BALANCE` | `Gwei(2**4 * 10**9)` (= 16,000,000,000) | | `EFFECTIVE_BALANCE_INCREMENT` | `Gwei(2**0 * 10**9)` (= 1,000,000,000) | +| `REWARD_OVERFLOW_INCREMENT` | `Gwei(2**6)` (= 64) | ### Initial values @@ -1313,7 +1314,9 @@ def get_attestation_deltas(state: BeaconState) -> Tuple[Sequence[Gwei], Sequence attesting_balance = get_total_balance(state, unslashed_attesting_indices) for index in eligible_validator_indices: if index in unslashed_attesting_indices: - rewards[index] += get_base_reward(state, index) * attesting_balance // total_balance + increment = REWARD_OVERFLOW_INCREMENT # Factored out from reward numerator to avoid uint64 overflow + reward_numerator = get_base_reward(state, index) // increment * attesting_balance + rewards[index] = reward_numerator // total_balance * increment else: penalties[index] += get_base_reward(state, index) diff --git a/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_rewards_and_penalties.py b/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_rewards_and_penalties.py index fa394df56..b4f50179e 100644 --- a/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_rewards_and_penalties.py +++ b/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_rewards_and_penalties.py @@ -1,7 +1,10 @@ from copy import deepcopy -from eth2spec.test.context import spec_state_test, with_all_phases, spec_test, \ - misc_balances, with_custom_state, default_activation_threshold, single_phase +from eth2spec.test.context import ( + spec_state_test, with_all_phases, spec_test, + misc_balances, with_custom_state, default_activation_threshold, + single_phase, +) from eth2spec.test.helpers.state import ( next_epoch, next_slot, From 33e768083679999d4ec4b289c9a15b37c7f101dd Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Tue, 3 Mar 2020 10:58:47 -0700 Subject: [PATCH 170/194] make hysteresis calculations configurable --- configs/mainnet.yaml | 6 ++++++ configs/minimal.yaml | 7 +++++++ specs/phase0/beacon-chain.md | 12 ++++++++--- .../test_process_final_updates.py | 21 +++++++++++-------- 4 files changed, 34 insertions(+), 12 deletions(-) diff --git a/configs/mainnet.yaml b/configs/mainnet.yaml index 74f062d9b..46977f087 100644 --- a/configs/mainnet.yaml +++ b/configs/mainnet.yaml @@ -21,6 +21,12 @@ SHUFFLE_ROUND_COUNT: 90 MIN_GENESIS_ACTIVE_VALIDATOR_COUNT: 16384 # Jan 3, 2020 MIN_GENESIS_TIME: 1578009600 +# 4 +HYSTERESIS_QUOTIENT: 4 +# 1 (minus 0.25) +HYSTERESIS_DOWNWARD_MULTIPLIER: 1 +# 5 (plus 1.25) +HYSTERESIS_UPWARD_MULTIPLIER: 5 # Fork Choice diff --git a/configs/minimal.yaml b/configs/minimal.yaml index 42c63e301..202da8237 100644 --- a/configs/minimal.yaml +++ b/configs/minimal.yaml @@ -20,6 +20,13 @@ SHUFFLE_ROUND_COUNT: 10 MIN_GENESIS_ACTIVE_VALIDATOR_COUNT: 64 # Jan 3, 2020 MIN_GENESIS_TIME: 1578009600 +# 4 +HYSTERESIS_QUOTIENT: 4 +# 1 (minus 0.25) +HYSTERESIS_DOWNWARD_MULTIPLIER: 1 +# 5 (plus 1.25) +HYSTERESIS_UPWARD_MULTIPLIER: 5 + # Fork Choice diff --git a/specs/phase0/beacon-chain.md b/specs/phase0/beacon-chain.md index aa573ff16..c7a76d066 100644 --- a/specs/phase0/beacon-chain.md +++ b/specs/phase0/beacon-chain.md @@ -183,6 +183,10 @@ The following values are (non-configurable) constants used throughout the specif | `SHUFFLE_ROUND_COUNT` | `90` | | `MIN_GENESIS_ACTIVE_VALIDATOR_COUNT` | `2**14` (= 16,384) | | `MIN_GENESIS_TIME` | `1578009600` (Jan 3, 2020) | +| `HYSTERESIS_QUOTIENT` | `4` | +| `HYSTERESIS_DOWNWARD_MULTIPLIER` | `1` | +| `HYSTERESIS_UPWARD_MULTIPLIER` | `5` | + - For the safety of committees, `TARGET_COMMITTEE_SIZE` exceeds [the recommended minimum committee size of 111](http://web.archive.org/web/20190504131341/https://vitalik.ca/files/Ithaca201807_Sharding.pdf); with sufficient active validators (at least `SLOTS_PER_EPOCH * TARGET_COMMITTEE_SIZE`), the shuffling algorithm ensures committee sizes of at least `TARGET_COMMITTEE_SIZE`. (Unbiasable randomness with a Verifiable Delay Function (VDF) will improve committee robustness and lower the safe minimum committee size.) @@ -1402,10 +1406,12 @@ def process_final_updates(state: BeaconState) -> None: # Update effective balances with hysteresis for index, validator in enumerate(state.validators): balance = state.balances[index] - QUARTER_INCREMENT = EFFECTIVE_BALANCE_INCREMENT // 4 + HYSTERESIS_INCREMENT = EFFECTIVE_BALANCE_INCREMENT // HYSTERESIS_QUOTIENT + DOWNWARD_THRESHOLD = HYSTERESIS_INCREMENT * HYSTERESIS_DOWNWARD_MULTIPLIER + UPWARD_THRESHOLD = HYSTERESIS_INCREMENT * HYSTERESIS_UPWARD_MULTIPLIER if ( - balance + QUARTER_INCREMENT < validator.effective_balance - or validator.effective_balance + 5 * QUARTER_INCREMENT < balance + balance + DOWNWARD_THRESHOLD < validator.effective_balance + or validator.effective_balance + UPWARD_THRESHOLD < balance ): validator.effective_balance = min(balance - balance % EFFECTIVE_BALANCE_INCREMENT, MAX_EFFECTIVE_BALANCE) # Reset slashings diff --git a/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_final_updates.py b/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_final_updates.py index 36dae8fb7..d959ce16e 100644 --- a/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_final_updates.py +++ b/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_final_updates.py @@ -51,22 +51,25 @@ def test_effective_balance_hysteresis(spec, state): max = spec.MAX_EFFECTIVE_BALANCE min = spec.EJECTION_BALANCE inc = spec.EFFECTIVE_BALANCE_INCREMENT - quar_inc = inc // 4 + div = spec.HYSTERESIS_QUOTIENT + hys_inc = inc // div + down = spec.HYSTERESIS_DOWNWARD_MULTIPLIER + up = spec.HYSTERESIS_UPWARD_MULTIPLIER cases = [ (max, max, max, "as-is"), (max, max - 1, max, "round up"), (max, max + 1, max, "round down"), - (max, max - quar_inc, max, "lower balance, but not low enough"), - (max, max - quar_inc, max, "lower balance, step down"), - (max, max + (3 * quar_inc) + 1, max, "already at max, as is"), + (max, max - down * hys_inc, max, "lower balance, but not low enough"), + (max, max - down * hys_inc - 1, max - inc, "lower balance, step down"), + (max, max + (up * hys_inc) + 1, max, "already at max, as is"), (max, max - inc, max - inc, "exactly 1 step lower"), (max, max - inc - 1, max - (2 * inc), "past 1 step lower, double step"), (max, max - inc + 1, max - inc, "close to 1 step lower"), - (min, min + (quar_inc * 5), min, "bigger balance, but not high enough"), - (min, min + (quar_inc * 5) + 1, min + inc, "bigger balance, high enough, but small step"), - (min, min + (quar_inc * 8) - 1, min + inc, "bigger balance, high enough, close to double step"), - (min, min + (quar_inc * 8), min + (2 * inc), "exact two step balance increment"), - (min, min + (quar_inc * 8) + 1, min + (2 * inc), "over two steps, round down"), + (min, min + (hys_inc * up), min, "bigger balance, but not high enough"), + (min, min + (hys_inc * up) + 1, min + inc, "bigger balance, high enough, but small step"), + (min, min + (hys_inc * div * 2) - 1, min + inc, "bigger balance, high enough, close to double step"), + (min, min + (hys_inc * div * 2), min + (2 * inc), "exact two step balance increment"), + (min, min + (hys_inc * div * 2) + 1, min + (2 * inc), "over two steps, round down"), ] current_epoch = spec.get_current_epoch(state) for i, (pre_eff, bal, _, _) in enumerate(cases): From f082aa6ca9dfac0b1e03fe91b0cdf1e8886f3199 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Tue, 3 Mar 2020 15:34:02 -0700 Subject: [PATCH 171/194] use EFFECTIVE_BALANCE_INCREMENT to normalize reward calculations --- specs/phase0/beacon-chain.md | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/specs/phase0/beacon-chain.md b/specs/phase0/beacon-chain.md index 7c14550bb..cd1d765fe 100644 --- a/specs/phase0/beacon-chain.md +++ b/specs/phase0/beacon-chain.md @@ -194,7 +194,6 @@ The following values are (non-configurable) constants used throughout the specif | `MAX_EFFECTIVE_BALANCE` | `Gwei(2**5 * 10**9)` (= 32,000,000,000) | | `EJECTION_BALANCE` | `Gwei(2**4 * 10**9)` (= 16,000,000,000) | | `EFFECTIVE_BALANCE_INCREMENT` | `Gwei(2**0 * 10**9)` (= 1,000,000,000) | -| `REWARD_OVERFLOW_INCREMENT` | `Gwei(2**6)` (= 64) | ### Initial values @@ -1314,9 +1313,9 @@ def get_attestation_deltas(state: BeaconState) -> Tuple[Sequence[Gwei], Sequence attesting_balance = get_total_balance(state, unslashed_attesting_indices) for index in eligible_validator_indices: if index in unslashed_attesting_indices: - increment = REWARD_OVERFLOW_INCREMENT # Factored out from reward numerator to avoid uint64 overflow - reward_numerator = get_base_reward(state, index) // increment * attesting_balance - rewards[index] = reward_numerator // total_balance * increment + increment = EFFECTIVE_BALANCE_INCREMENT # Factored out from balance totals to avoid uint64 overflow + reward_numerator = get_base_reward(state, index) * (attesting_balance // increment) + rewards[index] = reward_numerator // (total_balance // increment) else: penalties[index] += get_base_reward(state, index) From 5dae252f56f4e780dceb35476407d00a0ada597b Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Mon, 10 Feb 2020 15:53:10 -0700 Subject: [PATCH 172/194] add eth2 key/value ENR to phase 0 p2p --- specs/phase0/p2p-interface.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index 7ad8759d3..651938a54 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -604,6 +604,12 @@ Nonetheless, ENRs MUST carry a generic `eth2` key with nil value, denoting that #### Mainnet +ENRs MUST carry a generic `eth2` with a 4-byte value of the node's current fork version to ensure connections are made with peers on the intended eth2 network. + +| Key | Value | +|:-------------|:--------------------| +| `eth2` | SSZ `Bytes4` | + On mainnet, ENRs MUST include a structure enumerating the capabilities offered by the peer in an efficient manner. The concrete solution is currently undefined. Proposals include using namespaced bloom filters mapping capabilities to specific protocol IDs supported under that capability. ### Topic advertisement From 37b1fed8ff03db4ad6d0113ea69551609067fbf6 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Wed, 4 Mar 2020 14:01:50 -0700 Subject: [PATCH 173/194] update eth2 ENR field to use ENRForkID --- specs/phase0/p2p-interface.md | 39 +++++++++++++++++++++++++++++++++-- 1 file changed, 37 insertions(+), 2 deletions(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index 651938a54..c8d4bc1b4 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -55,6 +55,8 @@ It consists of four main sections: - [Attestation subnet bitfield](#attestation-subnet-bitfield) - [Interop](#interop-5) - [Mainnet](#mainnet-5) + - [`eth2` field](#eth2-field) + - [General capabilities](#general-capabilities) - [Topic advertisement](#topic-advertisement) - [Mainnet](#mainnet-6) - [Design decision rationale](#design-decision-rationale) @@ -88,6 +90,7 @@ It consists of four main sections: - [Why are we sending entire objects in the pubsub and not just hashes?](#why-are-we-sending-entire-objects-in-the-pubsub-and-not-just-hashes) - [Should clients gossip blocks if they *cannot* validate the proposer signature due to not yet being synced, not knowing the head block, etc?](#should-clients-gossip-blocks-if-they-cannot-validate-the-proposer-signature-due-to-not-yet-being-synced-not-knowing-the-head-block-etc) - [How are we going to discover peers in a gossipsub topic?](#how-are-we-going-to-discover-peers-in-a-gossipsub-topic) + - [How should fork version be used in practice?](#how-should-fork-version-be-used-in-practice) - [Req/Resp](#reqresp) - [Why segregate requests into dedicated protocol IDs?](#why-segregate-requests-into-dedicated-protocol-ids) - [Why are messages length-prefixed with a protobuf varint in the SSZ-encoding?](#why-are-messages-length-prefixed-with-a-protobuf-varint-in-the-ssz-encoding) @@ -604,11 +607,35 @@ Nonetheless, ENRs MUST carry a generic `eth2` key with nil value, denoting that #### Mainnet -ENRs MUST carry a generic `eth2` with a 4-byte value of the node's current fork version to ensure connections are made with peers on the intended eth2 network. +##### `eth2` field + +ENRs MUST carry a generic `eth2` key with an 16-byte value of the node's current fork version, next fork version, and next fork epoch to ensure connections are made with peers on the intended eth2 network. | Key | Value | |:-------------|:--------------------| -| `eth2` | SSZ `Bytes4` | +| `eth2` | SSZ `ENRForkID` | + +Specifically, the value of the `eth2` key MUST be the following SSZ encoded object (`ENRForkID`), where + +``` +( + current_fork_version: Fork + next_fork_version: Fork + next_fork_epoch: Epoch +) +``` + +where the fields of `ENRForkID` are defined as + +* `current_fork_version` is the fork version at the node's current epoch defined by the wall-clock time (not necessarily the epoch to which the node is sync) +* `next_fork_version` is the fork version corresponding to the next planned hard fork at a future epoch. If no future fork is planned, set `next_fork_version = current_fork_version` to signal this fact +* `next_fork_epoch` is the epoch at which the next fork is planned and the `current_fork_version` will be updated. If no future fork is planned, set `next_fork_epoch = FAR_FUTURE_EPOCH` to signal this fact + +Clients SHOULD connect to peers with `current_fork_version`, `next_fork_version`, and `next_fork_epoch` that match local values. + +Clients MAY connect to peers with the same `current_fork_version` but a different `next_fork_version`/`next_fork_epoch`. Unless `ENRForkID` is manually updated to matching prior to the earlier `next_fork_epoch` of the two clients, these type of connecting clients will be unable to successfully interact starting at the earlier `next_fork_epoch`. + +##### General capabilities On mainnet, ENRs MUST include a structure enumerating the capabilities offered by the peer in an efficient manner. The concrete solution is currently undefined. Proposals include using namespaced bloom filters mapping capabilities to specific protocol IDs supported under that capability. @@ -829,6 +856,14 @@ In Phase 0, peers for attestation subnets will be found using the `attnets` entr Although this method will be sufficient for early phases of Eth2, we aim to use the more appropriate discv5 topics for this and other similar tasks in the future. ENRs should ultimately not be used for this purpose. They are best suited to store identity, location, and capability information, rather than more volatile advertisements. +### How should fork version be used in practice? + +Fork versions are to be manually updated (likely via incrementing or using the less collision-prone git spec hash) at each hard fork. This is to provide native domain separation for signatures as well as to aid in usefulness for identitying peers (via ENRs) and versioning network protocols (e.g. using fork version to naturally version gossipsub topics). + +To reap the full benefit of the native versioning scheme, networks SHOULD avoid collisions. For example, a testnet might us mainnet versioning but use a unique higher order byte to signal the testnet. + +A node locally stores all previous and future planned fork versions along with the each fork epoch. This allows for handling sync starting from past forks/epochs and for connections to safely be made with peers syncing from past forks/epochs. + ## Req/Resp ### Why segregate requests into dedicated protocol IDs? From 7e04989e29306c8d6049338457e3ba89c7959218 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Thu, 5 Mar 2020 09:21:32 -0700 Subject: [PATCH 174/194] add genesis_validators_root to beaconstate and utilize in sig domain separation as well as fork separation --- setup.py | 4 ++-- specs/phase0/beacon-chain.md | 14 +++++++---- specs/phase0/p2p-interface.md | 24 +++++++++++++++---- specs/phase1/beacon-chain.md | 1 + .../pyspec/eth2spec/test/helpers/genesis.py | 3 +++ 5 files changed, 35 insertions(+), 11 deletions(-) diff --git a/setup.py b/setup.py index 444489d2e..6dd4de861 100644 --- a/setup.py +++ b/setup.py @@ -95,7 +95,7 @@ from dataclasses import ( from eth2spec.utils.ssz.ssz_impl import hash_tree_root from eth2spec.utils.ssz.ssz_typing import ( View, boolean, Container, List, Vector, uint64, - Bytes1, Bytes4, Bytes8, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector, + Bytes1, Bytes4, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector, ) from eth2spec.utils import bls @@ -117,7 +117,7 @@ from dataclasses import ( from eth2spec.utils.ssz.ssz_impl import hash_tree_root from eth2spec.utils.ssz.ssz_typing import ( View, boolean, Container, List, Vector, uint64, uint8, bit, - ByteList, Bytes1, Bytes4, Bytes8, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector, + ByteList, Bytes1, Bytes4, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector, ) from eth2spec.utils import bls diff --git a/specs/phase0/beacon-chain.md b/specs/phase0/beacon-chain.md index c7a76d066..731b02b1a 100644 --- a/specs/phase0/beacon-chain.md +++ b/specs/phase0/beacon-chain.md @@ -149,7 +149,7 @@ We define the following Python custom types for type hinting and readability: | `Root` | `Bytes32` | a Merkle root | | `Version` | `Bytes4` | a fork version number | | `DomainType` | `Bytes4` | a domain type | -| `Domain` | `Bytes8` | a signature domain | +| `Domain` | `Bytes32` | a signature domain | | `BLSPubkey` | `Bytes48` | a BLS12-381 public key | | `BLSSignature` | `Bytes96` | a BLS12-381 signature | @@ -473,6 +473,7 @@ class BeaconBlock(Container): class BeaconState(Container): # Versioning genesis_time: uint64 + genesis_validators_root: Root slot: Slot fork: Fork # History @@ -795,13 +796,15 @@ def compute_activation_exit_epoch(epoch: Epoch) -> Epoch: #### `compute_domain` ```python -def compute_domain(domain_type: DomainType, fork_version: Optional[Version]=None) -> Domain: +def compute_domain(domain_type: DomainType, fork_version: Optional[Version]=None, genesis_root: Root=None) -> Domain: """ Return the domain for the ``domain_type`` and ``fork_version``. """ if fork_version is None: fork_version = GENESIS_FORK_VERSION - return Domain(domain_type + fork_version) + if genesis_root is None: + genesis_root = Root() + return Domain(domain_type + fork_version + genesis_root[:24]) ``` #### `compute_signing_root` @@ -977,7 +980,7 @@ def get_domain(state: BeaconState, domain_type: DomainType, epoch: Epoch=None) - """ epoch = get_current_epoch(state) if epoch is None else epoch fork_version = state.fork.previous_version if epoch < state.fork.epoch else state.fork.current_version - return compute_domain(domain_type, fork_version) + return compute_domain(domain_type, fork_version, state.genesis_validators_root) ``` #### `get_indexed_attestation` @@ -1122,6 +1125,9 @@ def initialize_beacon_state_from_eth1(eth1_block_hash: Bytes32, validator.activation_eligibility_epoch = GENESIS_EPOCH validator.activation_epoch = GENESIS_EPOCH + # Set genesis validators root for domain separation and chain versioning + state.genesis_validators_root = hash_tree_root(state.validators) + return state ``` diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index c8d4bc1b4..f2dcc2324 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -615,23 +615,37 @@ ENRs MUST carry a generic `eth2` key with an 16-byte value of the node's current |:-------------|:--------------------| | `eth2` | SSZ `ENRForkID` | -Specifically, the value of the `eth2` key MUST be the following SSZ encoded object (`ENRForkID`), where +First we define `current_fork` as the following SSZ encoded object ``` ( - current_fork_version: Fork - next_fork_version: Fork + current_fork_version: Version + genesis_validators_root: Root +) +``` + +where + +* `current_fork_version` is the fork version at the node's current epoch defined by the wall-clock time (not necessarily the epoch to which the node is sync) +* `genesis_validators_root` is the static `Root` found in `state.genesis_validators_root` + +Specifically, the value of the `eth2` key MUST be the following SSZ encoded object (`ENRForkID`) + +``` +( + current_fork_digest: Bytes4 + next_fork_version: Version next_fork_epoch: Epoch ) ``` where the fields of `ENRForkID` are defined as -* `current_fork_version` is the fork version at the node's current epoch defined by the wall-clock time (not necessarily the epoch to which the node is sync) +* `current_fork_digest` is `hash_tree_root(current_fork)[:4]` * `next_fork_version` is the fork version corresponding to the next planned hard fork at a future epoch. If no future fork is planned, set `next_fork_version = current_fork_version` to signal this fact * `next_fork_epoch` is the epoch at which the next fork is planned and the `current_fork_version` will be updated. If no future fork is planned, set `next_fork_epoch = FAR_FUTURE_EPOCH` to signal this fact -Clients SHOULD connect to peers with `current_fork_version`, `next_fork_version`, and `next_fork_epoch` that match local values. +Clients SHOULD connect to peers with `current_fork_digest`, `next_fork_version`, and `next_fork_epoch` that match local values. Clients MAY connect to peers with the same `current_fork_version` but a different `next_fork_version`/`next_fork_epoch`. Unless `ENRForkID` is manually updated to matching prior to the earlier `next_fork_epoch` of the two clients, these type of connecting clients will be unable to successfully interact starting at the earlier `next_fork_epoch`. diff --git a/specs/phase1/beacon-chain.md b/specs/phase1/beacon-chain.md index 78b3b3d25..095bbafc1 100644 --- a/specs/phase1/beacon-chain.md +++ b/specs/phase1/beacon-chain.md @@ -244,6 +244,7 @@ Note that aside from the new additions, `Validator` and `PendingAttestation` hav class BeaconState(Container): # Versioning genesis_time: uint64 + genesis_validators_root: Root slot: Slot fork: Fork # History diff --git a/tests/core/pyspec/eth2spec/test/helpers/genesis.py b/tests/core/pyspec/eth2spec/test/helpers/genesis.py index c60787b92..46bc62fe5 100644 --- a/tests/core/pyspec/eth2spec/test/helpers/genesis.py +++ b/tests/core/pyspec/eth2spec/test/helpers/genesis.py @@ -43,4 +43,7 @@ def create_genesis_state(spec, validator_balances, activation_threshold): validator.activation_eligibility_epoch = spec.GENESIS_EPOCH validator.activation_epoch = spec.GENESIS_EPOCH + # Set genesis validators root for domain separation and chain versioning + state.genesis_validators_root = spec.hash_tree_root(state.validators) + return state From c5aca062b435825cb33731b19c5ce933eafe5016 Mon Sep 17 00:00:00 2001 From: Herman Junge Date: Mon, 9 Mar 2020 17:16:02 +0000 Subject: [PATCH 175/194] Update reference to Gasper paper --- README.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/README.md b/README.md index 3f58b3dc4..83dbbd3fe 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,7 @@ The following are the broad design goals for Ethereum 2.0: * [Design Rationale](https://notes.ethereum.org/s/rkhCgQteN#) * [Phase 0 Onboarding Document](https://notes.ethereum.org/s/Bkn3zpwxB) -* [Gasper paper](https://github.com/ethereum/research/blob/master/papers/ffg%2Bghost/paper.pdf) +* [Gasper paper](https://arxiv.org/abs/2003.03052) ## For spec contributors @@ -67,4 +67,3 @@ The following are the broad design goals for Ethereum 2.0: Documentation on the different components used during spec writing can be found here: * [YAML Test Generators](tests/generators/README.md) * [Executable Python Spec, with Py-tests](tests/core/pyspec/README.md) - From 7d4997f0afa1bee02d5fc9922ad3f09b8c446363 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Mon, 9 Mar 2020 12:03:12 -0600 Subject: [PATCH 176/194] bump version to v0.11.0 for coming release --- tests/core/pyspec/eth2spec/VERSION.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/core/pyspec/eth2spec/VERSION.txt b/tests/core/pyspec/eth2spec/VERSION.txt index 9f20097b6..142464bf2 100644 --- a/tests/core/pyspec/eth2spec/VERSION.txt +++ b/tests/core/pyspec/eth2spec/VERSION.txt @@ -1 +1 @@ -0.10.2.dev0 \ No newline at end of file +0.11.0 \ No newline at end of file From da5720f9d1c3e5e1314aa34278cd4856e042f92a Mon Sep 17 00:00:00 2001 From: Sly Gryphon Date: Tue, 10 Mar 2020 21:31:03 +1000 Subject: [PATCH 177/194] Put back in a requirement to store recent signed blocks that was removed when SignedBeaconBlock was introduced (prior to that the signature was in BeaconBlock, which was recorded in Store). --- specs/phase0/p2p-interface.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index 8bde2b80a..b214c458d 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -547,7 +547,8 @@ Requests count beacon blocks from the peer starting from `start_slot`, leading u The request MUST be encoded as an SSZ-container. The response MUST consist of zero or more `response_chunk`. Each _successful_ `response_chunk` MUST contain a single `SignedBeaconBlock` payload. -Clients MUST support requesting blocks since the start of the weak subjectivity period and up to the given `head_block_root`. + +Clients MUST keep a record of signed blocks seen since the since the start of the weak subjectivity period and MUST support requesting blocks up to the given `head_block_root`. Clients MUST respond with at least one block, if they have it and it exists in the range. Clients MAY limit the number of blocks in the response. From 2d7a292d3663b398ecb222b873ac700726ddf57a Mon Sep 17 00:00:00 2001 From: protolambda Date: Tue, 10 Mar 2020 18:36:53 +0100 Subject: [PATCH 178/194] eth1 vote period constant in epochs: update configs, phase1, tests --- configs/mainnet.yaml | 4 +-- configs/minimal.yaml | 2 +- specs/phase0/validator.md | 4 +-- specs/phase1/beacon-chain.md | 2 +- .../test_process_final_updates.py | 4 +-- .../eth2spec/test/sanity/test_blocks.py | 26 +++++++++++-------- 6 files changed, 23 insertions(+), 19 deletions(-) diff --git a/configs/mainnet.yaml b/configs/mainnet.yaml index 74f062d9b..69bbbff22 100644 --- a/configs/mainnet.yaml +++ b/configs/mainnet.yaml @@ -82,8 +82,8 @@ SLOTS_PER_EPOCH: 32 MIN_SEED_LOOKAHEAD: 1 # 2**2 (= 4) epochs 25.6 minutes MAX_SEED_LOOKAHEAD: 4 -# 2**10 (= 1,024) slots ~1.7 hours -SLOTS_PER_ETH1_VOTING_PERIOD: 1024 +# 2**5 (= 32) epochs ~3.4 hours +EPOCHS_PER_ETH1_VOTING_PERIOD: 32 # 2**13 (= 8,192) slots ~13 hours SLOTS_PER_HISTORICAL_ROOT: 8192 # 2**8 (= 256) epochs ~27 hours diff --git a/configs/minimal.yaml b/configs/minimal.yaml index 42c63e301..23ebbfbbc 100644 --- a/configs/minimal.yaml +++ b/configs/minimal.yaml @@ -82,7 +82,7 @@ MIN_SEED_LOOKAHEAD: 1 # 2**2 (= 4) epochs MAX_SEED_LOOKAHEAD: 4 # [customized] higher frequency new deposits from eth1 for testing -SLOTS_PER_ETH1_VOTING_PERIOD: 16 +EPOCHS_PER_ETH1_VOTING_PERIOD: 2 # [customized] smaller state SLOTS_PER_HISTORICAL_ROOT: 64 # 2**8 (= 256) epochs diff --git a/specs/phase0/validator.md b/specs/phase0/validator.md index 0bde81e60..7865e3921 100644 --- a/specs/phase0/validator.md +++ b/specs/phase0/validator.md @@ -129,7 +129,7 @@ To submit a deposit: ### Process deposit -Deposits cannot be processed into the beacon chain until the Eth1 block in which they were deposited or any of its descendants is added to the beacon chain `state.eth1_data`. This takes _a minimum_ of `ETH1_FOLLOW_DISTANCE` Eth1 blocks (~4 hours) plus `SLOTS_PER_ETH1_VOTING_PERIOD` slots (~3.4 hours). Once the requisite Eth1 data is added, the deposit will normally be added to a beacon chain block and processed into the `state.validators` within an epoch or two. The validator is then in a queue to be activated. +Deposits cannot be processed into the beacon chain until the Eth1 block in which they were deposited or any of its descendants is added to the beacon chain `state.eth1_data`. This takes _a minimum_ of `ETH1_FOLLOW_DISTANCE` Eth1 blocks (~4 hours) plus `EPOCHS_PER_ETH1_VOTING_PERIOD * SLOTS_PER_EPOCH` slots (~3.4 hours). Once the requisite Eth1 data is added, the deposit will normally be added to a beacon chain block and processed into the `state.validators` within an epoch or two. The validator is then in a queue to be activated. ### Validator index @@ -269,7 +269,7 @@ def compute_time_at_slot(state: BeaconState, slot: Slot) -> uint64: ```python def voting_period_start_time(state: BeaconState) -> uint64: - eth1_voting_period_start_slot = Slot(state.slot - state.slot % SLOTS_PER_ETH1_VOTING_PERIOD) + eth1_voting_period_start_slot = Slot(state.slot - state.slot % (EPOCHS_PER_ETH1_VOTING_PERIOD * SLOTS_PER_EPOCH)) return compute_time_at_slot(state, eth1_voting_period_start_slot) ``` diff --git a/specs/phase1/beacon-chain.md b/specs/phase1/beacon-chain.md index 78b3b3d25..259d18601 100644 --- a/specs/phase1/beacon-chain.md +++ b/specs/phase1/beacon-chain.md @@ -253,7 +253,7 @@ class BeaconState(Container): historical_roots: List[Root, HISTORICAL_ROOTS_LIMIT] # Eth1 eth1_data: Eth1Data - eth1_data_votes: List[Eth1Data, SLOTS_PER_ETH1_VOTING_PERIOD] + eth1_data_votes: List[Eth1Data, EPOCHS_PER_ETH1_VOTING_PERIOD * SLOTS_PER_EPOCH] eth1_deposit_index: uint64 # Registry validators: List[Validator, VALIDATOR_REGISTRY_LIMIT] diff --git a/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_final_updates.py b/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_final_updates.py index 58882a44f..110bd35a7 100644 --- a/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_final_updates.py +++ b/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_final_updates.py @@ -11,7 +11,7 @@ def run_process_final_updates(spec, state): @with_all_phases @spec_state_test def test_eth1_vote_no_reset(spec, state): - assert spec.SLOTS_PER_ETH1_VOTING_PERIOD > spec.SLOTS_PER_EPOCH + assert spec.EPOCHS_PER_ETH1_VOTING_PERIOD > 1 # skip ahead to the end of the epoch state.slot = spec.SLOTS_PER_EPOCH - 1 for i in range(state.slot + 1): # add a vote for each skipped slot. @@ -29,7 +29,7 @@ def test_eth1_vote_no_reset(spec, state): @spec_state_test def test_eth1_vote_reset(spec, state): # skip ahead to the end of the voting period - state.slot = spec.SLOTS_PER_ETH1_VOTING_PERIOD - 1 + state.slot = (spec.EPOCHS_PER_ETH1_VOTING_PERIOD * spec.SLOTS_PER_EPOCH) - 1 for i in range(state.slot + 1): # add a vote for each skipped slot. state.eth1_data_votes.append( spec.Eth1Data(deposit_root=b'\xaa' * 32, diff --git a/tests/core/pyspec/eth2spec/test/sanity/test_blocks.py b/tests/core/pyspec/eth2spec/test/sanity/test_blocks.py index 9027660ab..c6f9d8576 100644 --- a/tests/core/pyspec/eth2spec/test/sanity/test_blocks.py +++ b/tests/core/pyspec/eth2spec/test/sanity/test_blocks.py @@ -486,10 +486,12 @@ def test_historical_batch(spec, state): @spec_state_test def test_eth1_data_votes_consensus(spec, state): # Don't run when it will take very, very long to simulate. Minimal configuration suffices. - if spec.SLOTS_PER_ETH1_VOTING_PERIOD > 16: + if spec.EPOCHS_PER_ETH1_VOTING_PERIOD > 2: return - offset_block = build_empty_block(spec, state, slot=spec.SLOTS_PER_ETH1_VOTING_PERIOD - 1) + voting_period_slots = spec.EPOCHS_PER_ETH1_VOTING_PERIOD * spec.SLOTS_PER_EPOCH + + offset_block = build_empty_block(spec, state, slot=voting_period_slots - 1) state_transition_and_sign_block(spec, state, offset_block) yield 'pre', state @@ -499,14 +501,14 @@ def test_eth1_data_votes_consensus(spec, state): blocks = [] - for i in range(0, spec.SLOTS_PER_ETH1_VOTING_PERIOD): + for i in range(0, voting_period_slots): block = build_empty_block_for_next_slot(spec, state) # wait for over 50% for A, then start voting B - block.body.eth1_data.block_hash = b if i * 2 > spec.SLOTS_PER_ETH1_VOTING_PERIOD else a + block.body.eth1_data.block_hash = b if i * 2 > voting_period_slots else a signed_block = state_transition_and_sign_block(spec, state, block) blocks.append(signed_block) - assert len(state.eth1_data_votes) == spec.SLOTS_PER_ETH1_VOTING_PERIOD + assert len(state.eth1_data_votes) == voting_period_slots assert state.eth1_data.block_hash == a # transition to next eth1 voting period @@ -519,7 +521,7 @@ def test_eth1_data_votes_consensus(spec, state): yield 'post', state assert state.eth1_data.block_hash == a - assert state.slot % spec.SLOTS_PER_ETH1_VOTING_PERIOD == 0 + assert state.slot % voting_period_slots == 0 assert len(state.eth1_data_votes) == 1 assert state.eth1_data_votes[0].block_hash == c @@ -528,12 +530,14 @@ def test_eth1_data_votes_consensus(spec, state): @spec_state_test def test_eth1_data_votes_no_consensus(spec, state): # Don't run when it will take very, very long to simulate. Minimal configuration suffices. - if spec.SLOTS_PER_ETH1_VOTING_PERIOD > 16: + if spec.EPOCHS_PER_ETH1_VOTING_PERIOD > 2: return + voting_period_slots = spec.EPOCHS_PER_ETH1_VOTING_PERIOD * spec.SLOTS_PER_EPOCH + pre_eth1_hash = state.eth1_data.block_hash - offset_block = build_empty_block(spec, state, slot=spec.SLOTS_PER_ETH1_VOTING_PERIOD - 1) + offset_block = build_empty_block(spec, state, slot=voting_period_slots - 1) state_transition_and_sign_block(spec, state, offset_block) yield 'pre', state @@ -542,14 +546,14 @@ def test_eth1_data_votes_no_consensus(spec, state): blocks = [] - for i in range(0, spec.SLOTS_PER_ETH1_VOTING_PERIOD): + for i in range(0, voting_period_slots): block = build_empty_block_for_next_slot(spec, state) # wait for precisely 50% for A, then start voting B for other 50% - block.body.eth1_data.block_hash = b if i * 2 >= spec.SLOTS_PER_ETH1_VOTING_PERIOD else a + block.body.eth1_data.block_hash = b if i * 2 >= voting_period_slots else a signed_block = state_transition_and_sign_block(spec, state, block) blocks.append(signed_block) - assert len(state.eth1_data_votes) == spec.SLOTS_PER_ETH1_VOTING_PERIOD + assert len(state.eth1_data_votes) == voting_period_slots assert state.eth1_data.block_hash == pre_eth1_hash yield 'blocks', blocks From 55d436db517f95fa39dd202619c02adf92b61c8a Mon Sep 17 00:00:00 2001 From: protolambda Date: Tue, 10 Mar 2020 18:55:59 +0100 Subject: [PATCH 179/194] simplify description of voting period time --- specs/phase0/validator.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/phase0/validator.md b/specs/phase0/validator.md index 7865e3921..009816c2e 100644 --- a/specs/phase0/validator.md +++ b/specs/phase0/validator.md @@ -129,7 +129,7 @@ To submit a deposit: ### Process deposit -Deposits cannot be processed into the beacon chain until the Eth1 block in which they were deposited or any of its descendants is added to the beacon chain `state.eth1_data`. This takes _a minimum_ of `ETH1_FOLLOW_DISTANCE` Eth1 blocks (~4 hours) plus `EPOCHS_PER_ETH1_VOTING_PERIOD * SLOTS_PER_EPOCH` slots (~3.4 hours). Once the requisite Eth1 data is added, the deposit will normally be added to a beacon chain block and processed into the `state.validators` within an epoch or two. The validator is then in a queue to be activated. +Deposits cannot be processed into the beacon chain until the Eth1 block in which they were deposited or any of its descendants is added to the beacon chain `state.eth1_data`. This takes _a minimum_ of `ETH1_FOLLOW_DISTANCE` Eth1 blocks (~4 hours) plus `EPOCHS_PER_ETH1_VOTING_PERIOD` epochs (~3.4 hours). Once the requisite Eth1 data is added, the deposit will normally be added to a beacon chain block and processed into the `state.validators` within an epoch or two. The validator is then in a queue to be activated. ### Validator index From 1818f349ad9200c0d9e1d8b9633fa4efc9330950 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Tue, 10 Mar 2020 11:59:34 -0600 Subject: [PATCH 180/194] add ForkDigest type, clarify how genesis_validators_root is mixed into domains for chain isolation in p2p faq --- specs/phase0/beacon-chain.md | 3 ++- specs/phase0/p2p-interface.md | 10 +++++----- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/specs/phase0/beacon-chain.md b/specs/phase0/beacon-chain.md index 731b02b1a..8c93df7ef 100644 --- a/specs/phase0/beacon-chain.md +++ b/specs/phase0/beacon-chain.md @@ -149,6 +149,7 @@ We define the following Python custom types for type hinting and readability: | `Root` | `Bytes32` | a Merkle root | | `Version` | `Bytes4` | a fork version number | | `DomainType` | `Bytes4` | a domain type | +| `ForkDigest` | `Bytes4` | a digest of the current fork data | | `Domain` | `Bytes32` | a signature domain | | `BLSPubkey` | `Bytes48` | a BLS12-381 public key | | `BLSSignature` | `Bytes96` | a BLS12-381 signature | @@ -803,7 +804,7 @@ def compute_domain(domain_type: DomainType, fork_version: Optional[Version]=None if fork_version is None: fork_version = GENESIS_FORK_VERSION if genesis_root is None: - genesis_root = Root() + genesis_root = Root() # all bytes zero by default return Domain(domain_type + fork_version + genesis_root[:24]) ``` diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index f2dcc2324..0b6a5798a 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -615,7 +615,7 @@ ENRs MUST carry a generic `eth2` key with an 16-byte value of the node's current |:-------------|:--------------------| | `eth2` | SSZ `ENRForkID` | -First we define `current_fork` as the following SSZ encoded object +First we define `current_fork_data` as the following SSZ encoded object ``` ( @@ -633,7 +633,7 @@ Specifically, the value of the `eth2` key MUST be the following SSZ encoded obje ``` ( - current_fork_digest: Bytes4 + current_fork_digest: ForkDigest next_fork_version: Version next_fork_epoch: Epoch ) @@ -641,7 +641,7 @@ Specifically, the value of the `eth2` key MUST be the following SSZ encoded obje where the fields of `ENRForkID` are defined as -* `current_fork_digest` is `hash_tree_root(current_fork)[:4]` +* `current_fork_digest` is `ForkDigest(hash_tree_root(current_fork_data)[:4])` * `next_fork_version` is the fork version corresponding to the next planned hard fork at a future epoch. If no future fork is planned, set `next_fork_version = current_fork_version` to signal this fact * `next_fork_epoch` is the epoch at which the next fork is planned and the `current_fork_version` will be updated. If no future fork is planned, set `next_fork_epoch = FAR_FUTURE_EPOCH` to signal this fact @@ -872,9 +872,9 @@ Although this method will be sufficient for early phases of Eth2, we aim to use ### How should fork version be used in practice? -Fork versions are to be manually updated (likely via incrementing or using the less collision-prone git spec hash) at each hard fork. This is to provide native domain separation for signatures as well as to aid in usefulness for identitying peers (via ENRs) and versioning network protocols (e.g. using fork version to naturally version gossipsub topics). +Fork versions are to be manually updated (likely via incrementing) at each hard fork. This is to provide native domain separation for signatures as well as to aid in usefulness for identitying peers (via ENRs) and versioning network protocols (e.g. using fork version to naturally version gossipsub topics). -To reap the full benefit of the native versioning scheme, networks SHOULD avoid collisions. For example, a testnet might us mainnet versioning but use a unique higher order byte to signal the testnet. +`BeaconState.genesis_validators_root` is mixed into signature and ENR fork domains to aid in the ease of domain separation between chains. This allows fork versions to safely be reused across chains except for the case of contentious forks using the same genesis. In these cases, extra care should be taken to isolate fork versions (e.g. flip a high order bit in all future versions of one of the chains). A node locally stores all previous and future planned fork versions along with the each fork epoch. This allows for handling sync starting from past forks/epochs and for connections to safely be made with peers syncing from past forks/epochs. From d6eedd95c034736ebee274a5bcf18c71b8ca893e Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Tue, 10 Mar 2020 13:04:28 -0600 Subject: [PATCH 181/194] fix wording to be clear it is about serving blocks Co-Authored-By: Diederik Loerakker --- specs/phase0/p2p-interface.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index b214c458d..c0580fafd 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -548,7 +548,7 @@ The request MUST be encoded as an SSZ-container. The response MUST consist of zero or more `response_chunk`. Each _successful_ `response_chunk` MUST contain a single `SignedBeaconBlock` payload. -Clients MUST keep a record of signed blocks seen since the since the start of the weak subjectivity period and MUST support requesting blocks up to the given `head_block_root`. +Clients MUST keep a record of signed blocks seen since the since the start of the weak subjectivity period and MUST support serving requests of blocks up to their own `head_block_root`. Clients MUST respond with at least one block, if they have it and it exists in the range. Clients MAY limit the number of blocks in the response. From 1579072e15e564904e41252f7db4ed5d1a04c057 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Tue, 10 Mar 2020 13:12:17 -0600 Subject: [PATCH 182/194] add note about total balance overflowing --- specs/phase0/beacon-chain.md | 1 + 1 file changed, 1 insertion(+) diff --git a/specs/phase0/beacon-chain.md b/specs/phase0/beacon-chain.md index a90a8aa1a..b2b96de9d 100644 --- a/specs/phase0/beacon-chain.md +++ b/specs/phase0/beacon-chain.md @@ -954,6 +954,7 @@ def get_beacon_proposer_index(state: BeaconState) -> ValidatorIndex: def get_total_balance(state: BeaconState, indices: Set[ValidatorIndex]) -> Gwei: """ Return the combined effective balance of the ``indices``. (1 Gwei minimum to avoid divisions by zero.) + Math safe up to ~10B ETH, afterwhich this overflows uint64. """ return Gwei(max(1, sum([state.validators[index].effective_balance for index in indices]))) ``` From c91eee6bdf046a800cd2c0155f8fd1dd5ff976bd Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Tue, 10 Mar 2020 13:20:57 -0600 Subject: [PATCH 183/194] revert fork choice store.blocks to store BeaconBlock --- specs/phase0/fork-choice.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/phase0/fork-choice.md b/specs/phase0/fork-choice.md index 79d37f28d..0ccabec75 100644 --- a/specs/phase0/fork-choice.md +++ b/specs/phase0/fork-choice.md @@ -83,7 +83,7 @@ class Store(object): justified_checkpoint: Checkpoint finalized_checkpoint: Checkpoint best_justified_checkpoint: Checkpoint - blocks: Dict[Root, BeaconBlockHeader] = field(default_factory=dict) + blocks: Dict[Root, BeaconBlock] = field(default_factory=dict) block_states: Dict[Root, BeaconState] = field(default_factory=dict) checkpoint_states: Dict[Checkpoint, BeaconState] = field(default_factory=dict) latest_messages: Dict[ValidatorIndex, LatestMessage] = field(default_factory=dict) From 415544bf038315c82d40035c79a3872c38efec75 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Tue, 10 Mar 2020 14:41:33 -0600 Subject: [PATCH 184/194] modify gossip topics to use ForkDigest --- specs/phase0/beacon-chain.md | 26 +++++++++++++++++++++- specs/phase0/p2p-interface.md | 42 +++++++++++------------------------ 2 files changed, 38 insertions(+), 30 deletions(-) diff --git a/specs/phase0/beacon-chain.md b/specs/phase0/beacon-chain.md index ce29ceda3..489e9b55f 100644 --- a/specs/phase0/beacon-chain.md +++ b/specs/phase0/beacon-chain.md @@ -24,6 +24,7 @@ - [Containers](#containers) - [Misc dependencies](#misc-dependencies) - [`Fork`](#fork) + - [`ForkData`](#forkdata) - [`Checkpoint`](#checkpoint) - [`Validator`](#validator) - [`AttestationData`](#attestationdata) @@ -75,6 +76,7 @@ - [`compute_epoch_at_slot`](#compute_epoch_at_slot) - [`compute_start_slot_at_epoch`](#compute_start_slot_at_epoch) - [`compute_activation_exit_epoch`](#compute_activation_exit_epoch) + - [`compute_fork_data_root`](#compute_fork_data_root) - [`compute_domain`](#compute_domain) - [`compute_signing_root`](#compute_signing_root) - [Beacon state accessors](#beacon-state-accessors) @@ -286,6 +288,14 @@ class Fork(Container): epoch: Epoch # Epoch of latest fork ``` +#### `ForkData` + +```python +class ForkData(Container): + current_version: Version + genesis_root: Root +``` + #### `Checkpoint` ```python @@ -794,6 +804,19 @@ def compute_activation_exit_epoch(epoch: Epoch) -> Epoch: return Epoch(epoch + 1 + MAX_SEED_LOOKAHEAD) ``` +#### `compute_fork_data_root` + +```python +def compute_fork_data_root(current_version: Version, genesis_validators_root: Root) -> Root: + """ + Return the fork digest for the ``current_fork_version`` and ``genesis_validators_root`` + """ + return hash_tree_root(ForkData( + current_version=current_version, + genesis_validators_root=genesis_validators_root, + )) +``` + #### `compute_domain` ```python @@ -805,7 +828,8 @@ def compute_domain(domain_type: DomainType, fork_version: Optional[Version]=None fork_version = GENESIS_FORK_VERSION if genesis_root is None: genesis_root = Root() # all bytes zero by default - return Domain(domain_type + fork_version + genesis_root[:24]) + fork_data_root = compute_fork_data_root(fork_version, genesis_root) + return Domain(domain_type + fork_data_root[:28]) ``` #### `compute_signing_root` diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index 7e7d38341..8b6423277 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -219,18 +219,14 @@ The following gossipsub [parameters](https://github.com/libp2p/specs/tree/master ### Topics and messages -Topics are plain UTF-8 strings and are encoded on the wire as determined by protobuf (gossipsub messages are enveloped in protobuf messages). Topic strings have form: `/eth2/ForkVersion/Name/Encoding`. This defines both the type of data being sent on the topic and how the data field of the message is encoded. +Topics are plain UTF-8 strings and are encoded on the wire as determined by protobuf (gossipsub messages are enveloped in protobuf messages). Topic strings have form: `/eth2/ForkDigest/Name/Encoding`. This defines both the type of data being sent on the topic and how the data field of the message is encoded. -- `ForkVersion` - the hex-encoded bytes from `state.fork.current_version` of the head state of the client, as also seen in `Status.head_fork_version`. +- `ForkDigest` - the hex-encoded bytes of `ForkDigest(compute_fork_data_root(current_fork_version, genesis_validators_root)[:4])` where + - `current_fork_version` is the fork version of the epoch of the message to be sent on the topic + - `genesis_validators_root` is the static `Root` found in `state.genesis_validators_root` - `Name` - see table below - `Encoding` - the encoding strategy describes a specific representation of bytes that will be transmitted over the wire. See the [Encodings](#Encoding-strategies) section for further details. -The fork version is hex-encoded using the following scheme: -```python - ForkVersion = ''.join('{:02x}'.format(x) for x in state.fork.current_version) -``` -For example, the fork version `Version('0x0001020a')` will be encoded as `0001020a`. - Each gossipsub [message](https://github.com/libp2p/go-libp2p-pubsub/blob/master/pb/rpc.proto#L17-L24) has a maximum size of `GOSSIP_MAX_SIZE`. Clients MUST reject (fail validation) messages that are over this size limit. Likewise, clients MUST NOT emit or propagate messages larger than this limit. The `message-id` of a gossipsub message MUST be: @@ -260,7 +256,7 @@ When processing incoming gossip, clients MAY descore or disconnect peers who fai #### Global topics -There are two primary global topics used to propagate beacon blocks and aggregate attestations to all nodes on the network. Their `TopicName`s are: +There are two primary global topics used to propagate beacon blocks and aggregate attestations to all nodes on the network. Their `Name`s are: - `beacon_block` - This topic is used solely for propagating new signed beacon blocks to all nodes on the networks. Signed blocks are sent in their entirety. The following validations MUST pass before forwarding the `signed_beacon_block` on the network - The block is not from a future slot (with a `MAXIMUM_GOSSIP_CLOCK_DISPARITY` allowance) -- i.e. validate that `signed_beacon_block.message.slot <= current_slot` (a client MAY queue future blocks for processing at the appropriate slot). @@ -278,7 +274,7 @@ There are two primary global topics used to propagate beacon blocks and aggregat - The aggregator signature, `signed_aggregate_and_proof.signature`, is valid. - The signature of `aggregate` is valid. -Additional global topics are used to propagate lower frequency validator messages. Their `TopicName`s are: +Additional global topics are used to propagate lower frequency validator messages. Their `Name`s are: - `voluntary_exit` - This topic is used solely for propagating signed voluntary validator exits to proposers on the network. Signed voluntary exits are sent in their entirety. The following validations MUST pass before forwarding the `signed_voluntary_exit` on to the network - The voluntary exit is the first valid voluntary exit received for the validator with index `signed_voluntary_exit.message.validator_index`. @@ -293,7 +289,7 @@ Additional global topics are used to propagate lower frequency validator message #### Attestation subnets -Attestation subnets are used to propagate unaggregated attestations to subsections of the network. Their `TopicName`s are: +Attestation subnets are used to propagate unaggregated attestations to subsections of the network. Their `Name`s are: - `committee_index{subnet_id}_beacon_attestation` - These topics are used to propagate unaggregated attestations to the subnet `subnet_id` (typically beacon and persistent committees) to be aggregated before being gossiped to `beacon_aggregate_and_proof`. The following validations MUST pass before forwarding the `attestation` on the subnet. - The attestation's committee index (`attestation.data.index`) is for the correct subnet. @@ -654,25 +650,11 @@ ENRs MUST carry a generic `eth2` key with an 16-byte value of the node's current |:-------------|:--------------------| | `eth2` | SSZ `ENRForkID` | -First we define `current_fork_data` as the following SSZ encoded object - -``` -( - current_fork_version: Version - genesis_validators_root: Root -) -``` - -where - -* `current_fork_version` is the fork version at the node's current epoch defined by the wall-clock time (not necessarily the epoch to which the node is sync) -* `genesis_validators_root` is the static `Root` found in `state.genesis_validators_root` - Specifically, the value of the `eth2` key MUST be the following SSZ encoded object (`ENRForkID`) ``` ( - current_fork_digest: ForkDigest + fork_digest: ForkDigest next_fork_version: Version next_fork_epoch: Epoch ) @@ -680,13 +662,15 @@ Specifically, the value of the `eth2` key MUST be the following SSZ encoded obje where the fields of `ENRForkID` are defined as -* `current_fork_digest` is `ForkDigest(hash_tree_root(current_fork_data)[:4])` +* `fork_digest` is `ForkDigest(compute_fork_data_root(current_fork_version, genesis_validators_root)[:4])` where + * `current_fork_version` is the fork version at the node's current epoch defined by the wall-clock time (not necessarily the epoch to which the node is sync) + * `genesis_validators_root` is the static `Root` found in `state.genesis_validators_root` * `next_fork_version` is the fork version corresponding to the next planned hard fork at a future epoch. If no future fork is planned, set `next_fork_version = current_fork_version` to signal this fact * `next_fork_epoch` is the epoch at which the next fork is planned and the `current_fork_version` will be updated. If no future fork is planned, set `next_fork_epoch = FAR_FUTURE_EPOCH` to signal this fact -Clients SHOULD connect to peers with `current_fork_digest`, `next_fork_version`, and `next_fork_epoch` that match local values. +Clients SHOULD connect to peers with `fork_digest`, `next_fork_version`, and `next_fork_epoch` that match local values. -Clients MAY connect to peers with the same `current_fork_version` but a different `next_fork_version`/`next_fork_epoch`. Unless `ENRForkID` is manually updated to matching prior to the earlier `next_fork_epoch` of the two clients, these type of connecting clients will be unable to successfully interact starting at the earlier `next_fork_epoch`. +Clients MAY connect to peers with the same `fork_digest` but a different `next_fork_version`/`next_fork_epoch`. Unless `ENRForkID` is manually updated to matching prior to the earlier `next_fork_epoch` of the two clients, these type of connecting clients will be unable to successfully interact starting at the earlier `next_fork_epoch`. ##### General capabilities From fccd3ab1ce13bc54f35bbe760637176d8e71f973 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Tue, 10 Mar 2020 15:04:44 -0600 Subject: [PATCH 185/194] clarify hex-encoded bytes string representation for fork digest in gossip topic Co-Authored-By: Diederik Loerakker --- specs/phase0/p2p-interface.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index 8b6423277..580e42183 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -221,7 +221,7 @@ The following gossipsub [parameters](https://github.com/libp2p/specs/tree/master Topics are plain UTF-8 strings and are encoded on the wire as determined by protobuf (gossipsub messages are enveloped in protobuf messages). Topic strings have form: `/eth2/ForkDigest/Name/Encoding`. This defines both the type of data being sent on the topic and how the data field of the message is encoded. -- `ForkDigest` - the hex-encoded bytes of `ForkDigest(compute_fork_data_root(current_fork_version, genesis_validators_root)[:4])` where +- `ForkDigest` - the lowercase hex-encoded (no "0x" prefix) bytes of `ForkDigest(compute_fork_data_root(current_fork_version, genesis_validators_root)[:4])` where - `current_fork_version` is the fork version of the epoch of the message to be sent on the topic - `genesis_validators_root` is the static `Root` found in `state.genesis_validators_root` - `Name` - see table below From baee6731241b21244ec5f9ca44567b96108cfe0d Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Tue, 10 Mar 2020 15:09:15 -0600 Subject: [PATCH 186/194] add note about preparing for subnet backbone for forks --- specs/phase0/validator.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/specs/phase0/validator.md b/specs/phase0/validator.md index 009816c2e..b74a57486 100644 --- a/specs/phase0/validator.md +++ b/specs/phase0/validator.md @@ -519,6 +519,8 @@ Because Phase 0 does not have shards and thus does not have Shard Committees, th * Maintain advertisement of the randomly selected subnets in their node's ENR `attnets` entry by setting the randomly selected `subnet_id` bits to `True` (e.g. `ENR["attnets"][subnet_id] = True`) for all persistent attestation subnets * Set the lifetime of each random subscription to a random number of epochs between `EPOCHS_PER_RANDOM_SUBNET_SUBSCRIPTION` and `2 * EPOCHS_PER_RANDOM_SUBNET_SUBSCRIPTION]`. At the end of life for a subscription, select a new random subnet, update subnet subscriptions, and publish an updated ENR +*Note*: When preparing for a hard fork, a validator must select and subscribe to random subnets of the future fork versioning at least `EPOCHS_PER_RANDOM_SUBNET_SUBSCRIPTION` epochs in advance of the fork. These new subnets for the fork are maintained in addition to those for the current fork until the fork occurs. After the fork occurs, let the subnets from the previous fork reach end of life with no replacements. + ## How to avoid slashing "Slashing" is the burning of some amount of validator funds and immediate ejection from the active validator set. In Phase 0, there are two ways in which funds can be slashed: [proposer slashing](#proposer-slashing) and [attester slashing](#attester-slashing). Although being slashed has serious repercussions, it is simple enough to avoid being slashed all together by remaining _consistent_ with respect to the messages a validator has previously signed. From 0881e21dc5461af1d04b5bea6b2729c03791b72a Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Wed, 11 Mar 2020 11:51:31 -0600 Subject: [PATCH 187/194] cleanup gossip topic fork digest based on PR feedback --- setup.py | 2 +- specs/phase0/beacon-chain.md | 26 ++++++++++++++++++++------ specs/phase0/p2p-interface.md | 34 ++++++++++++++++++---------------- 3 files changed, 39 insertions(+), 23 deletions(-) diff --git a/setup.py b/setup.py index 6dd4de861..5edff0164 100644 --- a/setup.py +++ b/setup.py @@ -106,7 +106,7 @@ SSZObject = TypeVar('SSZObject', bound=View) PHASE1_IMPORTS = '''from eth2spec.phase0 import spec as phase0 from eth2spec.config.config_util import apply_constants_config from typing import ( - Any, Dict, Set, Sequence, NewType, Tuple, TypeVar, Callable, Optional + Any, Dict, Set, Sequence, NewType, Tuple, TypeVar, Callable ) from dataclasses import ( diff --git a/specs/phase0/beacon-chain.md b/specs/phase0/beacon-chain.md index 489e9b55f..b14da493b 100644 --- a/specs/phase0/beacon-chain.md +++ b/specs/phase0/beacon-chain.md @@ -77,6 +77,7 @@ - [`compute_start_slot_at_epoch`](#compute_start_slot_at_epoch) - [`compute_activation_exit_epoch`](#compute_activation_exit_epoch) - [`compute_fork_data_root`](#compute_fork_data_root) + - [`compute_fork_digest`](#compute_fork_digest) - [`compute_domain`](#compute_domain) - [`compute_signing_root`](#compute_signing_root) - [Beacon state accessors](#beacon-state-accessors) @@ -293,7 +294,7 @@ class Fork(Container): ```python class ForkData(Container): current_version: Version - genesis_root: Root + genesis_validators_root: Root ``` #### `Checkpoint` @@ -809,7 +810,8 @@ def compute_activation_exit_epoch(epoch: Epoch) -> Epoch: ```python def compute_fork_data_root(current_version: Version, genesis_validators_root: Root) -> Root: """ - Return the fork digest for the ``current_fork_version`` and ``genesis_validators_root`` + Return the 32-byte fork data root for the ``current_version`` and ``genesis_validators_root``. + This is used primarily in signature domains to avoid collisions across forks/chains. """ return hash_tree_root(ForkData( current_version=current_version, @@ -817,18 +819,30 @@ def compute_fork_data_root(current_version: Version, genesis_validators_root: Ro )) ``` +#### `compute_fork_digest` + +```python +def compute_fork_digest(current_version: Version, genesis_validators_root: Root) -> Root: + """ + Return the 4-byte fork digest for the ``current_version`` and ``genesis_validators_root``. + This is a digest primarily used for domain separation on the p2p layer. + 4-bytes suffices for practical separation of forks/chains. + """ + return ForkDigest(compute_fork_data_root(current_version, genesis_validators_root)[:4]) +``` + #### `compute_domain` ```python -def compute_domain(domain_type: DomainType, fork_version: Optional[Version]=None, genesis_root: Root=None) -> Domain: +def compute_domain(domain_type: DomainType, fork_version: Version=None, genesis_validators_root: Root=None) -> Domain: """ Return the domain for the ``domain_type`` and ``fork_version``. """ if fork_version is None: fork_version = GENESIS_FORK_VERSION - if genesis_root is None: - genesis_root = Root() # all bytes zero by default - fork_data_root = compute_fork_data_root(fork_version, genesis_root) + if genesis_validators_root is None: + genesis_validators_root = Root() # all bytes zero by default + fork_data_root = compute_fork_data_root(fork_version, genesis_validators_root) return Domain(domain_type + fork_data_root[:28]) ``` diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index 580e42183..5a4464f5c 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -219,9 +219,9 @@ The following gossipsub [parameters](https://github.com/libp2p/specs/tree/master ### Topics and messages -Topics are plain UTF-8 strings and are encoded on the wire as determined by protobuf (gossipsub messages are enveloped in protobuf messages). Topic strings have form: `/eth2/ForkDigest/Name/Encoding`. This defines both the type of data being sent on the topic and how the data field of the message is encoded. +Topics are plain UTF-8 strings and are encoded on the wire as determined by protobuf (gossipsub messages are enveloped in protobuf messages). Topic strings have form: `/eth2/ForkDigestValue/Name/Encoding`. This defines both the type of data being sent on the topic and how the data field of the message is encoded. -- `ForkDigest` - the lowercase hex-encoded (no "0x" prefix) bytes of `ForkDigest(compute_fork_data_root(current_fork_version, genesis_validators_root)[:4])` where +- `ForkDigestValue` - the lowercase hex-encoded (no "0x" prefix) bytes of `compute_fork_digest(current_fork_version, genesis_validators_root)` where - `current_fork_version` is the fork version of the epoch of the message to be sent on the topic - `genesis_validators_root` is the static `Root` found in `state.genesis_validators_root` - `Name` - see table below @@ -423,7 +423,7 @@ Here, `result` represents the 1-byte response code. The token of the negotiated protocol ID specifies the type of encoding to be used for the req/resp interaction. Two values are possible at this time: -- `ssz`: the contents are [SSZ-encoded](../../ssz/simple-serialize.md). This encoding type MUST be supported by all clients. For objects containing a single field, only the field is SSZ-encoded not a container with a single field. For example, the `BeaconBlocksByRoot` request is an SSZ-encoded list of `Bytes32`'s. +- `ssz`: the contents are [SSZ-encoded](../../ssz/simple-serialize.md). This encoding type MUST be supported by all clients. For objects containing a single field, only the field is SSZ-encoded not a container with a single field. For example, the `BeaconBlocksByRoot` request is an SSZ-encoded list of `Root`'s. - `ssz_snappy`: The contents are SSZ-encoded and then compressed with [Snappy](https://github.com/google/snappy). MAY be supported in the interoperability testnet; MUST be supported in mainnet. #### SSZ-encoding strategy (with or without Snappy) @@ -475,16 +475,18 @@ constituents individually as `response_chunk`s. For example, the Request, Response Content: ``` ( - head_fork_version: Bytes4 - finalized_root: Bytes32 - finalized_epoch: uint64 - head_root: Bytes32 - head_slot: uint64 + fork_digest: ForkDigest + finalized_root: Root + finalized_epoch: Epoch + head_root: Root + head_slot: Slot ) ``` The fields are, as seen by the client at the time of sending the message: -- `head_fork_version`: The beacon_state `Fork` version. +- `fork_digest`: The node's `ForkDigest` (`compute_fork_digest(current_fork_version, genesis_validators_root)`) where + - `current_fork_version` is the fork version at the node's current epoch defined by the wall-clock time (not necessarily the epoch to which the node is sync) + - `genesis_validators_root` is the static `Root` found in `state.genesis_validators_root` - `finalized_root`: `state.finalized_checkpoint.root` for the state corresponding to the head block. - `finalized_epoch`: `state.finalized_checkpoint.epoch` for the state corresponding to the head block. - `head_root`: The hash_tree_root root of the current head block. @@ -498,7 +500,7 @@ The response MUST consist of a single `response_chunk`. Clients SHOULD immediately disconnect from one another following the handshake above under the following conditions: -1. If `head_fork_version` does not match the expected fork version at the epoch of the `head_slot`, since the client’s chain is on another fork. `head_fork_version` can also be used to segregate testnets. +1. If `fork_digest` does not match the node's local `fork_digest`, since the client’s chain is on another fork. 2. If the (`finalized_root`, `finalized_epoch`) shared by the peer is not in the client's chain at the expected epoch. For example, if Peer 1 sends (root, epoch) of (A, 5) and Peer 2 sends (B, 3) but Peer 1 has root C at epoch 3, then Peer 1 would disconnect because it knows that their chains are irreparably disjoint. Once the handshake completes, the client with the lower `finalized_epoch` or `head_slot` (if the clients have equal `finalized_epoch`s) SHOULD request beacon blocks from its counterparty via the `BeaconBlocksByRange` request. @@ -536,7 +538,7 @@ The response MUST consist of a single `response_chunk`. Request Content: ``` ( - start_slot: uint64 + start_slot: Slot count: uint64 step: uint64 ) @@ -575,7 +577,7 @@ Request Content: ``` ( - []Bytes32 + []Root ) ``` @@ -662,7 +664,7 @@ Specifically, the value of the `eth2` key MUST be the following SSZ encoded obje where the fields of `ENRForkID` are defined as -* `fork_digest` is `ForkDigest(compute_fork_data_root(current_fork_version, genesis_validators_root)[:4])` where +* `fork_digest` is `compute_fork_digest(current_fork_version, genesis_validators_root)` where * `current_fork_version` is the fork version at the node's current epoch defined by the wall-clock time (not necessarily the epoch to which the node is sync) * `genesis_validators_root` is the static `Root` found in `state.genesis_validators_root` * `next_fork_version` is the fork version corresponding to the next planned hard fork at a future epoch. If no future fork is planned, set `next_fork_version = current_fork_version` to signal this fact @@ -670,7 +672,7 @@ where the fields of `ENRForkID` are defined as Clients SHOULD connect to peers with `fork_digest`, `next_fork_version`, and `next_fork_epoch` that match local values. -Clients MAY connect to peers with the same `fork_digest` but a different `next_fork_version`/`next_fork_epoch`. Unless `ENRForkID` is manually updated to matching prior to the earlier `next_fork_epoch` of the two clients, these type of connecting clients will be unable to successfully interact starting at the earlier `next_fork_epoch`. +Clients MAY connect to peers with the same `fork_digest` but a different `next_fork_version`/`next_fork_epoch`. Unless `ENRForkID` is manually updated to matching prior to the earlier `next_fork_epoch` of the two clients, these connecting clients will be unable to successfully interact starting at the earlier `next_fork_epoch`. ##### General capabilities @@ -897,9 +899,9 @@ Although this method will be sufficient for early phases of Eth2, we aim to use Fork versions are to be manually updated (likely via incrementing) at each hard fork. This is to provide native domain separation for signatures as well as to aid in usefulness for identitying peers (via ENRs) and versioning network protocols (e.g. using fork version to naturally version gossipsub topics). -`BeaconState.genesis_validators_root` is mixed into signature and ENR fork domains to aid in the ease of domain separation between chains. This allows fork versions to safely be reused across chains except for the case of contentious forks using the same genesis. In these cases, extra care should be taken to isolate fork versions (e.g. flip a high order bit in all future versions of one of the chains). +`BeaconState.genesis_validators_root` is mixed into signature and ENR fork domains (`ForkDigest`) to aid in the ease of domain separation between chains. This allows fork versions to safely be reused across chains except for the case of contentious forks using the same genesis. In these cases, extra care should be taken to isolate fork versions (e.g. flip a high order bit in all future versions of one of the chains). -A node locally stores all previous and future planned fork versions along with the each fork epoch. This allows for handling sync starting from past forks/epochs and for connections to safely be made with peers syncing from past forks/epochs. +A node locally stores all previous and future planned fork versions along with the each fork epoch. This allows for handling sync and processing messages starting from past forks/epochs. ## Req/Resp From 36e48fba99be3db6fb64a25497a226f8bebab819 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Wed, 11 Mar 2020 12:46:47 -0600 Subject: [PATCH 188/194] enforce must match target to match head to avoid perverse incentive path --- setup.py | 12 +++++++++- specs/phase0/beacon-chain.md | 2 +- .../test_process_rewards_and_penalties.py | 23 +++++++++++++++++++ 3 files changed, 35 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 6dd4de861..084f72901 100644 --- a/setup.py +++ b/setup.py @@ -182,7 +182,17 @@ get_active_validator_indices = cache_this( _get_beacon_committee = get_beacon_committee get_beacon_committee = cache_this( lambda state, slot, index: (state.validators.hash_tree_root(), state.randao_mixes.hash_tree_root(), slot, index), - _get_beacon_committee)''' + _get_beacon_committee) + +_get_matching_target_attestations = get_matching_target_attestations +get_matching_target_attestations = cache_this( + lambda state, epoch: (state.hash_tree_root(), epoch), + _get_matching_target_attestations) + +_get_matching_head_attestations = get_matching_head_attestations +get_matching_head_attestations = cache_this( + lambda state, epoch: (state.hash_tree_root(), epoch), + _get_matching_head_attestations)''' def objects_to_spec(spec_object: SpecObject, imports: str, fork: str) -> str: diff --git a/specs/phase0/beacon-chain.md b/specs/phase0/beacon-chain.md index 91cb9f714..ff80360c6 100644 --- a/specs/phase0/beacon-chain.md +++ b/specs/phase0/beacon-chain.md @@ -1235,7 +1235,7 @@ def get_matching_target_attestations(state: BeaconState, epoch: Epoch) -> Sequen ```python def get_matching_head_attestations(state: BeaconState, epoch: Epoch) -> Sequence[PendingAttestation]: return [ - a for a in get_matching_source_attestations(state, epoch) + a for a in get_matching_target_attestations(state, epoch) if a.data.beacon_block_root == get_block_root_at_slot(state, a.data.slot) ] ``` diff --git a/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_rewards_and_penalties.py b/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_rewards_and_penalties.py index b4f50179e..111033799 100644 --- a/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_rewards_and_penalties.py +++ b/tests/core/pyspec/eth2spec/test/phase_0/epoch_processing/test_process_rewards_and_penalties.py @@ -97,6 +97,29 @@ def test_full_attestations(spec, state): assert state.balances[index] < pre_state.balances[index] +@with_all_phases +@spec_state_test +def test_full_attestations_random_incorrect_fields(spec, state): + attestations = prepare_state_with_full_attestations(spec, state) + for i, attestation in enumerate(state.previous_epoch_attestations): + if i % 3 == 0: + # Mess up some head votes + attestation.data.beacon_block_root = b'\x56' * 32 + if i % 3 == 1: + # Message up some target votes + attestation.data.target.root = b'\x23' * 32 + if i % 3 == 2: + # Keep some votes 100% correct + pass + + yield from run_process_rewards_and_penalties(spec, state) + + attesting_indices = spec.get_unslashed_attesting_indices(state, attestations) + assert len(attesting_indices) > 0 + # No balance checks, non-trivial base on group rewards + # Mainly for consensus tests + + @with_all_phases @spec_test @with_custom_state(balances_fn=misc_balances, threshold_fn=default_activation_threshold) From 4bcdf91e8b1cf0e36d26b30ddf6d52144b9eb2f1 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Wed, 11 Mar 2020 13:24:30 -0600 Subject: [PATCH 189/194] Apply suggestions from code review PR feedback Co-Authored-By: Hsiao-Wei Wang --- specs/phase0/beacon-chain.md | 2 +- specs/phase0/validator.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/specs/phase0/beacon-chain.md b/specs/phase0/beacon-chain.md index b14da493b..5b5a13f93 100644 --- a/specs/phase0/beacon-chain.md +++ b/specs/phase0/beacon-chain.md @@ -822,7 +822,7 @@ def compute_fork_data_root(current_version: Version, genesis_validators_root: Ro #### `compute_fork_digest` ```python -def compute_fork_digest(current_version: Version, genesis_validators_root: Root) -> Root: +def compute_fork_digest(current_version: Version, genesis_validators_root: Root) -> ForkDigest: """ Return the 4-byte fork digest for the ``current_version`` and ``genesis_validators_root``. This is a digest primarily used for domain separation on the p2p layer. diff --git a/specs/phase0/validator.md b/specs/phase0/validator.md index b74a57486..d082ed04e 100644 --- a/specs/phase0/validator.md +++ b/specs/phase0/validator.md @@ -519,7 +519,7 @@ Because Phase 0 does not have shards and thus does not have Shard Committees, th * Maintain advertisement of the randomly selected subnets in their node's ENR `attnets` entry by setting the randomly selected `subnet_id` bits to `True` (e.g. `ENR["attnets"][subnet_id] = True`) for all persistent attestation subnets * Set the lifetime of each random subscription to a random number of epochs between `EPOCHS_PER_RANDOM_SUBNET_SUBSCRIPTION` and `2 * EPOCHS_PER_RANDOM_SUBNET_SUBSCRIPTION]`. At the end of life for a subscription, select a new random subnet, update subnet subscriptions, and publish an updated ENR -*Note*: When preparing for a hard fork, a validator must select and subscribe to random subnets of the future fork versioning at least `EPOCHS_PER_RANDOM_SUBNET_SUBSCRIPTION` epochs in advance of the fork. These new subnets for the fork are maintained in addition to those for the current fork until the fork occurs. After the fork occurs, let the subnets from the previous fork reach end of life with no replacements. +*Note*: When preparing for a hard fork, a validator must select and subscribe to random subnets of the future fork versioning at least `EPOCHS_PER_RANDOM_SUBNET_SUBSCRIPTION` epochs in advance of the fork. These new subnets for the fork are maintained in addition to those for the current fork until the fork occurs. After the fork occurs, let the subnets from the previous fork reach the end of life with no replacements. ## How to avoid slashing From a49fc814eb80c850c1161e81e8c0ad2cfe1f08e1 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Wed, 11 Mar 2020 14:52:31 -0600 Subject: [PATCH 190/194] change 'gasper' to 'combining ghost and casper' for paper description --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 83dbbd3fe..b6e25d570 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,7 @@ The following are the broad design goals for Ethereum 2.0: * [Design Rationale](https://notes.ethereum.org/s/rkhCgQteN#) * [Phase 0 Onboarding Document](https://notes.ethereum.org/s/Bkn3zpwxB) -* [Gasper paper](https://arxiv.org/abs/2003.03052) +* [Combining GHOST and Casper paper](https://arxiv.org/abs/2003.03052) ## For spec contributors From 47bbffa0d6a0c26a150f3fc61ebf8ba09f55ff8b Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Wed, 11 Mar 2020 15:03:14 -0600 Subject: [PATCH 191/194] 'get_checkpoint_store' -> 'get_forkchoice_store' typo --- specs/phase0/fork-choice.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/specs/phase0/fork-choice.md b/specs/phase0/fork-choice.md index 0ccabec75..aeef277f2 100644 --- a/specs/phase0/fork-choice.md +++ b/specs/phase0/fork-choice.md @@ -42,7 +42,7 @@ This document is the beacon chain fork choice spec, part of Ethereum 2.0 Phase 0 ## Fork choice -The head block root associated with a `store` is defined as `get_head(store)`. At genesis, let `store = get_checkpoint_store(genesis_state)` and update `store` by running: +The head block root associated with a `store` is defined as `get_head(store)`. At genesis, let `store = get_forkchoice_store(genesis_state)` and update `store` by running: - `on_tick(time)` whenever `time > store.time` where `time` is the current Unix time - `on_block(block)` whenever a block `block: SignedBeaconBlock` is received From a612df1119c06921fa4ad11129a00f44b7056070 Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Wed, 11 Mar 2020 15:51:07 -0600 Subject: [PATCH 192/194] minor typos and clarifications in fork choice --- specs/phase0/p2p-interface.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/specs/phase0/p2p-interface.md b/specs/phase0/p2p-interface.md index 5a4464f5c..77eeb21df 100644 --- a/specs/phase0/p2p-interface.md +++ b/specs/phase0/p2p-interface.md @@ -97,7 +97,7 @@ It consists of four main sections: - [Why do we version protocol strings with ordinals instead of semver?](#why-do-we-version-protocol-strings-with-ordinals-instead-of-semver) - [Why is it called Req/Resp and not RPC?](#why-is-it-called-reqresp-and-not-rpc) - [Why do we allow empty responses in block requests?](#why-do-we-allow-empty-responses-in-block-requests) - - [Why does `BeaconBlocksByRange` let the server choose which chain to send blocks from?](#why-does-beaconblocksbyrange-let-the-server-choose-which-chain-to-send-blocks-from) + - [Why does `BeaconBlocksByRange` let the server choose which branch to send blocks from?](#why-does-beaconblocksbyrange-let-the-server-choose-which-branch-to-send-blocks-from) - [What's the effect of empty slots on the sync algorithm?](#whats-the-effect-of-empty-slots-on-the-sync-algorithm) - [Discovery](#discovery) - [Why are we using discv5 and not libp2p Kademlia DHT?](#why-are-we-using-discv5-and-not-libp2p-kademlia-dht) @@ -646,7 +646,7 @@ Nonetheless, ENRs MUST carry a generic `eth2` key with nil value, denoting that ##### `eth2` field -ENRs MUST carry a generic `eth2` key with an 16-byte value of the node's current fork version, next fork version, and next fork epoch to ensure connections are made with peers on the intended eth2 network. +ENRs MUST carry a generic `eth2` key with an 16-byte value of the node's current fork digest, next fork version, and next fork epoch to ensure connections are made with peers on the intended eth2 network. | Key | Value | |:-------------|:--------------------| @@ -973,17 +973,17 @@ Assuming option 0 with no special `null` encoding, consider a request for slots Failing to provide blocks that nodes "should" have is reason to trust a peer less - for example, if a particular peer gossips a block, it should have access to its parent. If a request for the parent fails, it's indicative of poor peer quality since peers should validate blocks before gossiping them. -### Why does `BeaconBlocksByRange` let the server choose which chain to send blocks from? +### Why does `BeaconBlocksByRange` let the server choose which branch to send blocks from? When connecting, the `Status` message gives an idea about the sync status of a particular peer, but this changes over time. By the time a subsequent `BeaconBlockByRange` request is processed, the information may be stale, and the responding side might have moved on to a new finalization point and pruned blocks around the previous head and finalized blocks. -To avoid this race condition, we allow the responding side to choose which chain to send to the requesting client. The requesting client then goes on to validate the blocks and incorporate them in their own database - because they follow the same rules, they should at this point arrive at the same chain. +To avoid this race condition, we allow the responding side to choose which branch to send to the requesting client. The requesting client then goes on to validate the blocks and incorporate them in their own database - because they follow the same rules, they should at this point arrive at the same canonical chain. ### What's the effect of empty slots on the sync algorithm? -When syncing one can only tell that a slot has been skipped on a particular chain by examining subsequent blocks and analyzing the graph formed by the parent root. Because the server side may choose to omit blocks in the response for any reason, clients must validate the graph and be prepared to fill in gaps. +When syncing one can only tell that a slot has been skipped on a particular branch by examining subsequent blocks and analyzing the graph formed by the parent root. Because the server side may choose to omit blocks in the response for any reason, clients must validate the graph and be prepared to fill in gaps. -For example, if a peer responds with blocks [2, 3] when asked for [2, 3, 4], clients may not assume that block 4 doesn't exist - it merely means that the responding peer did not send it (they may not have it yet or may maliciously be trying to hide it) and successive blocks will be needed to determine if there exists a block at slot 4 in this particular chain. +For example, if a peer responds with blocks [2, 3] when asked for [2, 3, 4], clients may not assume that block 4 doesn't exist - it merely means that the responding peer did not send it (they may not have it yet or may maliciously be trying to hide it) and successive blocks will be needed to determine if there exists a block at slot 4 in this particular branch. ## Discovery From 22620bfe5dec9b75acc686d025c375ad72dfeafc Mon Sep 17 00:00:00 2001 From: protolambda Date: Wed, 11 Mar 2020 23:18:06 +0100 Subject: [PATCH 193/194] Fix generic SSZ tests, update remerkleable with small bugfix --- setup.py | 2 +- .../ssz_generic/ssz_basic_vector.py | 17 +++++++---- tests/generators/ssz_generic/ssz_container.py | 8 ++--- tests/generators/ssz_generic/ssz_test_case.py | 4 +-- tests/generators/ssz_generic/ssz_uints.py | 29 +++++++++++-------- 5 files changed, 35 insertions(+), 25 deletions(-) diff --git a/setup.py b/setup.py index 6dd4de861..e0f2880d9 100644 --- a/setup.py +++ b/setup.py @@ -470,7 +470,7 @@ setup( "pycryptodome==3.9.4", "py_ecc==2.0.0", "dataclasses==0.6", - "remerkleable==0.1.11", + "remerkleable==0.1.12", "ruamel.yaml==0.16.5" ] ) diff --git a/tests/generators/ssz_generic/ssz_basic_vector.py b/tests/generators/ssz_generic/ssz_basic_vector.py index 6e7e08daa..51dfd4ba1 100644 --- a/tests/generators/ssz_generic/ssz_basic_vector.py +++ b/tests/generators/ssz_generic/ssz_basic_vector.py @@ -1,19 +1,19 @@ from ssz_test_case import invalid_test_case, valid_test_case -from eth2spec.utils.ssz.ssz_typing import boolean, uint8, uint16, uint32, uint64, uint128, uint256, Vector, BasicType +from eth2spec.utils.ssz.ssz_typing import boolean, uint8, uint16, uint32, uint64, uint128, uint256, Vector, BasicView from eth2spec.utils.ssz.ssz_impl import serialize from random import Random -from typing import Dict +from typing import Dict, Type from eth2spec.debug.random_value import RandomizationMode, get_random_ssz_object -def basic_vector_case_fn(rng: Random, mode: RandomizationMode, elem_type: BasicType, length: int): +def basic_vector_case_fn(rng: Random, mode: RandomizationMode, elem_type: Type[BasicView], length: int): return get_random_ssz_object(rng, Vector[elem_type, length], max_bytes_length=length * 8, max_list_length=length, mode=mode, chaos=False) -BASIC_TYPES: Dict[str, BasicType] = { +BASIC_TYPES: Dict[str, Type[BasicView]] = { 'bool': boolean, 'uint8': uint8, 'uint16': uint16, @@ -49,8 +49,13 @@ def invalid_cases(): for length in [1, 2, 3, 4, 5, 8, 16, 31, 512, 513]: yield f'vec_{name}_{length}_nil', invalid_test_case(lambda: b'') for mode in random_modes: - yield f'vec_{name}_{length}_{mode.to_name()}_one_less', \ - invalid_test_case(lambda: serialize(basic_vector_case_fn(rng, mode, typ, length - 1))) + if length == 1: + # empty bytes, no elements. It may seem valid, but empty fixed-size elements are not valid SSZ. + yield f'vec_{name}_{length}_{mode.to_name()}_one_less', \ + invalid_test_case(lambda: b"") + else: + yield f'vec_{name}_{length}_{mode.to_name()}_one_less', \ + invalid_test_case(lambda: serialize(basic_vector_case_fn(rng, mode, typ, length - 1))) yield f'vec_{name}_{length}_{mode.to_name()}_one_more', \ invalid_test_case(lambda: serialize(basic_vector_case_fn(rng, mode, typ, length + 1))) yield f'vec_{name}_{length}_{mode.to_name()}_one_byte_less', \ diff --git a/tests/generators/ssz_generic/ssz_container.py b/tests/generators/ssz_generic/ssz_container.py index ecb2d8c34..cf7c33839 100644 --- a/tests/generators/ssz_generic/ssz_container.py +++ b/tests/generators/ssz_generic/ssz_container.py @@ -1,9 +1,9 @@ from ssz_test_case import invalid_test_case, valid_test_case -from eth2spec.utils.ssz.ssz_typing import SSZType, Container, byte, uint8, uint16, \ +from eth2spec.utils.ssz.ssz_typing import View, Container, byte, uint8, uint16, \ uint32, uint64, List, ByteList, Vector, Bitvector, Bitlist from eth2spec.utils.ssz.ssz_impl import serialize from random import Random -from typing import Dict, Tuple, Sequence, Callable +from typing import Dict, Tuple, Sequence, Callable, Type from eth2spec.debug.random_value import RandomizationMode, get_random_ssz_object @@ -46,14 +46,14 @@ class BitsStruct(Container): E: Bitvector[8] -def container_case_fn(rng: Random, mode: RandomizationMode, typ: SSZType): +def container_case_fn(rng: Random, mode: RandomizationMode, typ: Type[View]): return get_random_ssz_object(rng, typ, max_bytes_length=2000, max_list_length=2000, mode=mode, chaos=False) -PRESET_CONTAINERS: Dict[str, Tuple[SSZType, Sequence[int]]] = { +PRESET_CONTAINERS: Dict[str, Tuple[Type[View], Sequence[int]]] = { 'SingleFieldTestStruct': (SingleFieldTestStruct, []), 'SmallTestStruct': (SmallTestStruct, []), 'FixedTestStruct': (FixedTestStruct, []), diff --git a/tests/generators/ssz_generic/ssz_test_case.py b/tests/generators/ssz_generic/ssz_test_case.py index 42955bd3e..6cef4960b 100644 --- a/tests/generators/ssz_generic/ssz_test_case.py +++ b/tests/generators/ssz_generic/ssz_test_case.py @@ -1,10 +1,10 @@ from eth2spec.utils.ssz.ssz_impl import serialize, hash_tree_root from eth2spec.debug.encode import encode -from eth2spec.utils.ssz.ssz_typing import SSZValue, Container +from eth2spec.utils.ssz.ssz_typing import View from typing import Callable -def valid_test_case(value_fn: Callable[[], SSZValue]): +def valid_test_case(value_fn: Callable[[], View]): def case_fn(): value = value_fn() yield "value", "data", encode(value) diff --git a/tests/generators/ssz_generic/ssz_uints.py b/tests/generators/ssz_generic/ssz_uints.py index b21fb251c..896443f4c 100644 --- a/tests/generators/ssz_generic/ssz_uints.py +++ b/tests/generators/ssz_generic/ssz_uints.py @@ -1,12 +1,13 @@ from ssz_test_case import invalid_test_case, valid_test_case -from eth2spec.utils.ssz.ssz_typing import BasicType, uint8, uint16, uint32, uint64, uint128, uint256 +from eth2spec.utils.ssz.ssz_typing import BasicView, uint8, uint16, uint32, uint64, uint128, uint256 from random import Random +from typing import Type from eth2spec.debug.random_value import RandomizationMode, get_random_ssz_object -def uint_case_fn(rng: Random, mode: RandomizationMode, typ: BasicType): +def uint_case_fn(rng: Random, mode: RandomizationMode, typ: Type[BasicView]): return get_random_ssz_object(rng, typ, - max_bytes_length=typ.byte_len, + max_bytes_length=typ.type_byte_length(), max_list_length=1, mode=mode, chaos=False) @@ -17,21 +18,25 @@ UINT_TYPES = [uint8, uint16, uint32, uint64, uint128, uint256] def valid_cases(): rng = Random(1234) for uint_type in UINT_TYPES: - yield f'uint_{uint_type.byte_len * 8}_last_byte_empty', \ - valid_test_case(lambda: uint_type((2 ** ((uint_type.byte_len - 1) * 8)) - 1)) + byte_len = uint_type.type_byte_length() + yield f'uint_{byte_len * 8}_last_byte_empty', \ + valid_test_case(lambda: uint_type((2 ** ((byte_len - 1) * 8)) - 1)) for variation in range(5): for mode in [RandomizationMode.mode_random, RandomizationMode.mode_zero, RandomizationMode.mode_max]: - yield f'uint_{uint_type.byte_len * 8}_{mode.to_name()}_{variation}', \ + yield f'uint_{byte_len * 8}_{mode.to_name()}_{variation}', \ valid_test_case(lambda: uint_case_fn(rng, mode, uint_type)) def invalid_cases(): for uint_type in UINT_TYPES: - yield f'uint_{uint_type.byte_len * 8}_one_too_high', \ - invalid_test_case(lambda: (2 ** (uint_type.byte_len * 8)).to_bytes(uint_type.byte_len + 1, 'little')) + byte_len = uint_type.type_byte_length() + yield f'uint_{byte_len * 8}_one_too_high', \ + invalid_test_case(lambda: (2 ** (byte_len * 8)).to_bytes(byte_len + 1, 'little')) for uint_type in [uint8, uint16, uint32, uint64, uint128, uint256]: - yield f'uint_{uint_type.byte_len * 8}_one_byte_longer', \ - invalid_test_case(lambda: (2 ** (uint_type.byte_len * 8) - 1).to_bytes(uint_type.byte_len + 1, 'little')) + byte_len = uint_type.type_byte_length() + yield f'uint_{byte_len * 8}_one_byte_longer', \ + invalid_test_case(lambda: (2 ** (byte_len * 8) - 1).to_bytes(byte_len + 1, 'little')) for uint_type in [uint8, uint16, uint32, uint64, uint128, uint256]: - yield f'uint_{uint_type.byte_len * 8}_one_byte_shorter', \ - invalid_test_case(lambda: (2 ** ((uint_type.byte_len - 1) * 8) - 1).to_bytes(uint_type.byte_len - 1, 'little')) + byte_len = uint_type.type_byte_length() + yield f'uint_{byte_len * 8}_one_byte_shorter', \ + invalid_test_case(lambda: (2 ** ((byte_len - 1) * 8) - 1).to_bytes(byte_len - 1, 'little')) From c894f5ecece827425c29aed94d122bf7d8f3f70d Mon Sep 17 00:00:00 2001 From: Danny Ryan Date: Wed, 11 Mar 2020 16:41:27 -0600 Subject: [PATCH 194/194] fork choice error note --- specs/phase0/fork-choice.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/specs/phase0/fork-choice.md b/specs/phase0/fork-choice.md index aeef277f2..c42609be0 100644 --- a/specs/phase0/fork-choice.md +++ b/specs/phase0/fork-choice.md @@ -94,6 +94,10 @@ class Store(object): The provided anchor-state will be regarded as a trusted state, to not roll back beyond. This should be the genesis state for a full client. +*Note* With regards to fork choice, block headers are interchangeable with blocks. The spec is likely to move to headers for reduced overhead in test vectors and better encapsulation. Full implementations store blocks as part of their database and will often use full blocks when dealing with production fork choice. + +_The block for `anchor_root` is incorrectly initialized to the block header, rather than the full block. This does not affect functionality but will be cleaned up in subsequent releases._ + ```python def get_forkchoice_store(anchor_state: BeaconState) -> Store: anchor_block_header = anchor_state.latest_block_header.copy()