Merge branch 'dev' into cov-hunt

This commit is contained in:
protolambda 2019-06-29 02:30:53 +02:00
commit d31f1b2353
No known key found for this signature in database
GPG Key ID: EC89FDBB2B4C7623
35 changed files with 979 additions and 523 deletions

View File

@ -76,7 +76,7 @@ MIN_EPOCHS_TO_INACTIVITY_PENALTY: 4
# 2**16 (= 65,536) epochs ~0.8 years # 2**16 (= 65,536) epochs ~0.8 years
EPOCHS_PER_HISTORICAL_VECTOR: 65536 EPOCHS_PER_HISTORICAL_VECTOR: 65536
# 2**13 (= 8,192) epochs ~36 days # 2**13 (= 8,192) epochs ~36 days
EPOCHS_PER_SLASHED_BALANCES_VECTOR: 8192 EPOCHS_PER_SLASHINGS_VECTOR: 8192
# 2**24 (= 16,777,216) historical roots, ~26,131 years # 2**24 (= 16,777,216) historical roots, ~26,131 years
HISTORICAL_ROOTS_LIMIT: 16777216 HISTORICAL_ROOTS_LIMIT: 16777216
# 2**40 (= 1,099,511,627,776) validator spots # 2**40 (= 1,099,511,627,776) validator spots
@ -88,7 +88,7 @@ VALIDATOR_REGISTRY_LIMIT: 1099511627776
# 2**5 (= 32) # 2**5 (= 32)
BASE_REWARD_FACTOR: 32 BASE_REWARD_FACTOR: 32
# 2**9 (= 512) # 2**9 (= 512)
WHISTLEBLOWING_REWARD_QUOTIENT: 512 WHISTLEBLOWER_REWARD_QUOTIENT: 512
# 2**3 (= 8) # 2**3 (= 8)
PROPOSER_REWARD_QUOTIENT: 8 PROPOSER_REWARD_QUOTIENT: 8
# 2**25 (= 33,554,432) # 2**25 (= 33,554,432)

View File

@ -77,7 +77,7 @@ EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS: 4096
# [customized] smaller state # [customized] smaller state
EPOCHS_PER_HISTORICAL_VECTOR: 64 EPOCHS_PER_HISTORICAL_VECTOR: 64
# [customized] smaller state # [customized] smaller state
EPOCHS_PER_SLASHED_BALANCES_VECTOR: 64 EPOCHS_PER_SLASHINGS_VECTOR: 64
# 2**24 (= 16,777,216) historical roots # 2**24 (= 16,777,216) historical roots
HISTORICAL_ROOTS_LIMIT: 16777216 HISTORICAL_ROOTS_LIMIT: 16777216
# 2**40 (= 1,099,511,627,776) validator spots # 2**40 (= 1,099,511,627,776) validator spots
@ -89,7 +89,7 @@ VALIDATOR_REGISTRY_LIMIT: 1099511627776
# 2**5 (= 32) # 2**5 (= 32)
BASE_REWARD_FACTOR: 32 BASE_REWARD_FACTOR: 32
# 2**9 (= 512) # 2**9 (= 512)
WHISTLEBLOWING_REWARD_QUOTIENT: 512 WHISTLEBLOWER_REWARD_QUOTIENT: 512
# 2**3 (= 8) # 2**3 (= 8)
PROPOSER_REWARD_QUOTIENT: 8 PROPOSER_REWARD_QUOTIENT: 8
# 2**25 (= 33,554,432) # 2**25 (= 33,554,432)

View File

@ -25,8 +25,8 @@ from eth2spec.utils.ssz.ssz_impl import (
signing_root, signing_root,
) )
from eth2spec.utils.ssz.ssz_typing import ( from eth2spec.utils.ssz.ssz_typing import (
Bit, Bool, Container, List, Vector, Bytes, uint64, bit, boolean, Container, List, Vector, uint64,
Bytes4, Bytes32, Bytes48, Bytes96, Bytes4, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector,
) )
from eth2spec.utils.bls import ( from eth2spec.utils.bls import (
bls_aggregate_pubkeys, bls_aggregate_pubkeys,
@ -52,8 +52,8 @@ from eth2spec.utils.ssz.ssz_impl import (
is_empty, is_empty,
) )
from eth2spec.utils.ssz.ssz_typing import ( from eth2spec.utils.ssz.ssz_typing import (
Bit, Bool, Container, List, Vector, Bytes, uint64, bit, boolean, Container, List, Vector, Bytes, uint64,
Bytes4, Bytes32, Bytes48, Bytes96, Bytes4, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector,
) )
from eth2spec.utils.bls import ( from eth2spec.utils.bls import (
bls_aggregate_pubkeys, bls_aggregate_pubkeys,
@ -174,8 +174,8 @@ def combine_constants(old_constants: Dict[str, str], new_constants: Dict[str, st
ignored_dependencies = [ ignored_dependencies = [
'Bit', 'Bool', 'Vector', 'List', 'Container', 'Hash', 'BLSPubkey', 'BLSSignature', 'Bytes', 'BytesN' 'bit', 'boolean', 'Vector', 'List', 'Container', 'Hash', 'BLSPubkey', 'BLSSignature', 'Bytes', 'BytesN'
'Bytes4', 'Bytes32', 'Bytes48', 'Bytes96', 'Bytes4', 'Bytes32', 'Bytes48', 'Bytes96', 'Bitlist', 'Bitvector',
'uint8', 'uint16', 'uint32', 'uint64', 'uint128', 'uint256', 'uint8', 'uint16', 'uint32', 'uint64', 'uint128', 'uint256',
'bytes' # to be removed after updating spec doc 'bytes' # to be removed after updating spec doc
] ]

View File

@ -71,7 +71,7 @@ We require:
G2_cofactor = 305502333931268344200999753193121504214466019254188142667664032982267604182971884026507427359259977847832272839041616661285803823378372096355777062779109 G2_cofactor = 305502333931268344200999753193121504214466019254188142667664032982267604182971884026507427359259977847832272839041616661285803823378372096355777062779109
q = 4002409555221667393417789825735904156556882819939007885332058136124031650490837864442687629129015664037894272559787 q = 4002409555221667393417789825735904156556882819939007885332058136124031650490837864442687629129015664037894272559787
def hash_to_G2(message_hash: Bytes32, domain: uint64) -> [uint384]: def hash_to_G2(message_hash: Bytes32, domain: uint64) -> Tuple[uint384, uint384]:
# Initial candidate x coordinate # Initial candidate x coordinate
x_re = int.from_bytes(hash(message_hash + bytes8(domain) + b'\x01'), 'big') x_re = int.from_bytes(hash(message_hash + bytes8(domain) + b'\x01'), 'big')
x_im = int.from_bytes(hash(message_hash + bytes8(domain) + b'\x02'), 'big') x_im = int.from_bytes(hash(message_hash + bytes8(domain) + b'\x02'), 'big')

View File

@ -24,6 +24,7 @@
- [Containers](#containers) - [Containers](#containers)
- [Misc dependencies](#misc-dependencies) - [Misc dependencies](#misc-dependencies)
- [`Fork`](#fork) - [`Fork`](#fork)
- [`Checkpoint`](#checkpoint)
- [`Validator`](#validator) - [`Validator`](#validator)
- [`Crosslink`](#crosslink) - [`Crosslink`](#crosslink)
- [`AttestationData`](#attestationdata) - [`AttestationData`](#attestationdata)
@ -80,8 +81,6 @@
- [`bytes_to_int`](#bytes_to_int) - [`bytes_to_int`](#bytes_to_int)
- [`get_total_balance`](#get_total_balance) - [`get_total_balance`](#get_total_balance)
- [`get_domain`](#get_domain) - [`get_domain`](#get_domain)
- [`get_bitfield_bit`](#get_bitfield_bit)
- [`verify_bitfield`](#verify_bitfield)
- [`convert_to_indexed`](#convert_to_indexed) - [`convert_to_indexed`](#convert_to_indexed)
- [`validate_indexed_attestation`](#validate_indexed_attestation) - [`validate_indexed_attestation`](#validate_indexed_attestation)
- [`is_slashable_attestation_data`](#is_slashable_attestation_data) - [`is_slashable_attestation_data`](#is_slashable_attestation_data)
@ -191,6 +190,7 @@ The following values are (non-configurable) constants used throughout the specif
| `MIN_PER_EPOCH_CHURN_LIMIT` | `2**2` (= 4) | | `MIN_PER_EPOCH_CHURN_LIMIT` | `2**2` (= 4) |
| `CHURN_LIMIT_QUOTIENT` | `2**16` (= 65,536) | | `CHURN_LIMIT_QUOTIENT` | `2**16` (= 65,536) |
| `SHUFFLE_ROUND_COUNT` | `90` | | `SHUFFLE_ROUND_COUNT` | `90` |
| `JUSTIFICATION_BITS_LENGTH` | `4` |
* For the safety of crosslinks, `TARGET_COMMITTEE_SIZE` exceeds [the recommended minimum committee size of 111](https://vitalik.ca/files/Ithaca201807_Sharding.pdf); with sufficient active validators (at least `SLOTS_PER_EPOCH * TARGET_COMMITTEE_SIZE`), the shuffling algorithm ensures committee sizes of at least `TARGET_COMMITTEE_SIZE`. (Unbiasable randomness with a Verifiable Delay Function (VDF) will improve committee robustness and lower the safe minimum committee size.) * For the safety of crosslinks, `TARGET_COMMITTEE_SIZE` exceeds [the recommended minimum committee size of 111](https://vitalik.ca/files/Ithaca201807_Sharding.pdf); with sufficient active validators (at least `SLOTS_PER_EPOCH * TARGET_COMMITTEE_SIZE`), the shuffling algorithm ensures committee sizes of at least `TARGET_COMMITTEE_SIZE`. (Unbiasable randomness with a Verifiable Delay Function (VDF) will improve committee robustness and lower the safe minimum committee size.)
@ -233,7 +233,7 @@ The following values are (non-configurable) constants used throughout the specif
| Name | Value | Unit | Duration | | Name | Value | Unit | Duration |
| - | - | :-: | :-: | | - | - | :-: | :-: |
| `EPOCHS_PER_HISTORICAL_VECTOR` | `2**16` (= 65,536) | epochs | ~0.8 years | | `EPOCHS_PER_HISTORICAL_VECTOR` | `2**16` (= 65,536) | epochs | ~0.8 years |
| `EPOCHS_PER_SLASHED_BALANCES_VECTOR` | `2**13` (= 8,192) | epochs | ~36 days | | `EPOCHS_PER_SLASHINGS_VECTOR` | `2**13` (= 8,192) | epochs | ~36 days |
| `HISTORICAL_ROOTS_LIMIT` | `2**24` (= 16,777,216) | historical roots | ~26,131 years | | `HISTORICAL_ROOTS_LIMIT` | `2**24` (= 16,777,216) | historical roots | ~26,131 years |
| `VALIDATOR_REGISTRY_LIMIT` | `2**40` (= 1,099,511,627,776) | validator spots | | | `VALIDATOR_REGISTRY_LIMIT` | `2**40` (= 1,099,511,627,776) | validator spots | |
@ -242,7 +242,7 @@ The following values are (non-configurable) constants used throughout the specif
| Name | Value | | Name | Value |
| - | - | | - | - |
| `BASE_REWARD_FACTOR` | `2**6` (= 64) | | `BASE_REWARD_FACTOR` | `2**6` (= 64) |
| `WHISTLEBLOWING_REWARD_QUOTIENT` | `2**9` (= 512) | | `WHISTLEBLOWER_REWARD_QUOTIENT` | `2**9` (= 512) |
| `PROPOSER_REWARD_QUOTIENT` | `2**3` (= 8) | | `PROPOSER_REWARD_QUOTIENT` | `2**3` (= 8) |
| `INACTIVITY_PENALTY_QUOTIENT` | `2**25` (= 33,554,432) | | `INACTIVITY_PENALTY_QUOTIENT` | `2**25` (= 33,554,432) |
| `MIN_SLASHING_PENALTY_QUOTIENT` | `2**5` (= 32) | | `MIN_SLASHING_PENALTY_QUOTIENT` | `2**5` (= 32) |
@ -290,6 +290,14 @@ class Fork(Container):
epoch: Epoch # Epoch of latest fork epoch: Epoch # Epoch of latest fork
``` ```
#### `Checkpoint`
```python
class Checkpoint(Container):
epoch: Epoch
root: Hash
```
#### `Validator` #### `Validator`
```python ```python
@ -297,7 +305,7 @@ class Validator(Container):
pubkey: BLSPubkey pubkey: BLSPubkey
withdrawal_credentials: Hash # Commitment to pubkey for withdrawals and transfers withdrawal_credentials: Hash # Commitment to pubkey for withdrawals and transfers
effective_balance: Gwei # Balance at stake effective_balance: Gwei # Balance at stake
slashed: Bool slashed: boolean
# Status epochs # Status epochs
activation_eligibility_epoch: Epoch # When criteria for activation were met activation_eligibility_epoch: Epoch # When criteria for activation were met
activation_epoch: Epoch activation_epoch: Epoch
@ -324,10 +332,8 @@ class AttestationData(Container):
# LMD GHOST vote # LMD GHOST vote
beacon_block_root: Hash beacon_block_root: Hash
# FFG vote # FFG vote
source_epoch: Epoch source: Checkpoint
source_root: Hash target: Checkpoint
target_epoch: Epoch
target_root: Hash
# Crosslink vote # Crosslink vote
crosslink: Crosslink crosslink: Crosslink
``` ```
@ -337,7 +343,7 @@ class AttestationData(Container):
```python ```python
class AttestationDataAndCustodyBit(Container): class AttestationDataAndCustodyBit(Container):
data: AttestationData data: AttestationData
custody_bit: Bit # Challengeable bit (SSZ-bool, 1 byte) for the custody of crosslink data custody_bit: bit # Challengeable bit (SSZ-bool, 1 byte) for the custody of crosslink data
``` ```
#### `IndexedAttestation` #### `IndexedAttestation`
@ -354,7 +360,7 @@ class IndexedAttestation(Container):
```python ```python
class PendingAttestation(Container): class PendingAttestation(Container):
aggregation_bitfield: Bytes[MAX_INDICES_PER_ATTESTATION // 8] aggregation_bits: Bitlist[MAX_INDICES_PER_ATTESTATION]
data: AttestationData data: AttestationData
inclusion_delay: Slot inclusion_delay: Slot
proposer_index: ValidatorIndex proposer_index: ValidatorIndex
@ -421,9 +427,9 @@ class AttesterSlashing(Container):
```python ```python
class Attestation(Container): class Attestation(Container):
aggregation_bitfield: Bytes[MAX_INDICES_PER_ATTESTATION // 8] aggregation_bits: Bitlist[MAX_INDICES_PER_ATTESTATION]
data: AttestationData data: AttestationData
custody_bitfield: Bytes[MAX_INDICES_PER_ATTESTATION // 8] custody_bits: Bitlist[MAX_INDICES_PER_ATTESTATION]
signature: BLSSignature signature: BLSSignature
``` ```
@ -513,22 +519,18 @@ class BeaconState(Container):
randao_mixes: Vector[Hash, EPOCHS_PER_HISTORICAL_VECTOR] randao_mixes: Vector[Hash, EPOCHS_PER_HISTORICAL_VECTOR]
active_index_roots: Vector[Hash, EPOCHS_PER_HISTORICAL_VECTOR] # Active registry digests for light clients active_index_roots: Vector[Hash, EPOCHS_PER_HISTORICAL_VECTOR] # Active registry digests for light clients
# Slashings # Slashings
slashed_balances: Vector[Gwei, EPOCHS_PER_SLASHED_BALANCES_VECTOR] # Sums of slashed effective balances slashings: Vector[Gwei, EPOCHS_PER_SLASHINGS_VECTOR] # Per-epoch sums of slashed effective balances
# Attestations # Attestations
previous_epoch_attestations: List[PendingAttestation, MAX_ATTESTATIONS * SLOTS_PER_EPOCH] previous_epoch_attestations: List[PendingAttestation, MAX_ATTESTATIONS * SLOTS_PER_EPOCH]
current_epoch_attestations: List[PendingAttestation, MAX_ATTESTATIONS * SLOTS_PER_EPOCH] current_epoch_attestations: List[PendingAttestation, MAX_ATTESTATIONS * SLOTS_PER_EPOCH]
# Crosslinks # Crosslinks
previous_crosslinks: Vector[Crosslink, SHARD_COUNT] # Previous epoch snapshot previous_crosslinks: Vector[Crosslink, SHARD_COUNT] # Previous epoch snapshot
current_crosslinks: Vector[Crosslink, SHARD_COUNT] current_crosslinks: Vector[Crosslink, SHARD_COUNT]
# Justification
previous_justified_epoch: Epoch # Previous epoch snapshot
previous_justified_root: Hash # Previous epoch snapshot
current_justified_epoch: Epoch
current_justified_root: Hash
justification_bitfield: uint64 # Bit set for every recent justified epoch
# Finality # Finality
finalized_epoch: Epoch justification_bits: Bitvector[JUSTIFICATION_BITS_LENGTH] # Bit set for every recent justified epoch
finalized_root: Hash previous_justified_checkpoint: Checkpoint # Previous epoch snapshot
current_justified_checkpoint: Checkpoint
finalized_checkpoint: Checkpoint
``` ```
## Helper functions ## Helper functions
@ -702,9 +704,9 @@ def get_epoch_start_shard(state: BeaconState, epoch: Epoch) -> Shard:
```python ```python
def get_attestation_data_slot(state: BeaconState, data: AttestationData) -> Slot: def get_attestation_data_slot(state: BeaconState, data: AttestationData) -> Slot:
committee_count = get_epoch_committee_count(state, data.target_epoch) committee_count = get_epoch_committee_count(state, data.target.epoch)
offset = (data.crosslink.shard + SHARD_COUNT - get_epoch_start_shard(state, data.target_epoch)) % SHARD_COUNT offset = (data.crosslink.shard + SHARD_COUNT - get_epoch_start_shard(state, data.target.epoch)) % SHARD_COUNT
return Slot(get_epoch_start_slot(data.target_epoch) + offset // (committee_count // SLOTS_PER_EPOCH)) return Slot(get_epoch_start_slot(data.target.epoch) + offset // (committee_count // SLOTS_PER_EPOCH))
``` ```
### `get_block_root_at_slot` ### `get_block_root_at_slot`
@ -864,14 +866,13 @@ def get_crosslink_committee(state: BeaconState, epoch: Epoch, shard: Shard) -> S
```python ```python
def get_attesting_indices(state: BeaconState, def get_attesting_indices(state: BeaconState,
attestation_data: AttestationData, data: AttestationData,
bitfield: bytes) -> Sequence[ValidatorIndex]: bits: Bitlist[MAX_INDICES_PER_ATTESTATION]) -> Set[ValidatorIndex]:
""" """
Return the sorted attesting indices corresponding to ``attestation_data`` and ``bitfield``. Return the set of attesting indices corresponding to ``data`` and ``bitfield``.
""" """
committee = get_crosslink_committee(state, attestation_data.target_epoch, attestation_data.crosslink.shard) committee = get_crosslink_committee(state, data.target.epoch, data.crosslink.shard)
assert verify_bitfield(bitfield, len(committee)) return set(index for i, index in enumerate(committee) if bits[i])
return sorted([index for i, index in enumerate(committee) if get_bitfield_bit(bitfield, i) == 0b1])
``` ```
### `int_to_bytes` ### `int_to_bytes`
@ -912,34 +913,6 @@ def get_domain(state: BeaconState,
return bls_domain(domain_type, fork_version) return bls_domain(domain_type, fork_version)
``` ```
### `get_bitfield_bit`
```python
def get_bitfield_bit(bitfield: bytes, i: int) -> int:
"""
Extract the bit in ``bitfield`` at position ``i``.
"""
return (bitfield[i // 8] >> (i % 8)) % 2
```
### `verify_bitfield`
```python
def verify_bitfield(bitfield: bytes, committee_size: int) -> bool:
"""
Verify ``bitfield`` against the ``committee_size``.
"""
if len(bitfield) != (committee_size + 7) // 8:
return False
# Check `bitfield` is padded with zero bits only
for i in range(committee_size, len(bitfield) * 8):
if get_bitfield_bit(bitfield, i) == 0b1:
return False
return True
```
### `convert_to_indexed` ### `convert_to_indexed`
```python ```python
@ -947,14 +920,14 @@ def convert_to_indexed(state: BeaconState, attestation: Attestation) -> IndexedA
""" """
Convert ``attestation`` to (almost) indexed-verifiable form. Convert ``attestation`` to (almost) indexed-verifiable form.
""" """
attesting_indices = get_attesting_indices(state, attestation.data, attestation.aggregation_bitfield) attesting_indices = get_attesting_indices(state, attestation.data, attestation.aggregation_bits)
custody_bit_1_indices = get_attesting_indices(state, attestation.data, attestation.custody_bitfield) custody_bit_1_indices = get_attesting_indices(state, attestation.data, attestation.custody_bits)
assert set(custody_bit_1_indices).issubset(attesting_indices) assert custody_bit_1_indices.issubset(attesting_indices)
custody_bit_0_indices = [index for index in attesting_indices if index not in custody_bit_1_indices] custody_bit_0_indices = attesting_indices.difference(custody_bit_1_indices)
return IndexedAttestation( return IndexedAttestation(
custody_bit_0_indices=custody_bit_0_indices, custody_bit_0_indices=sorted(custody_bit_0_indices),
custody_bit_1_indices=custody_bit_1_indices, custody_bit_1_indices=sorted(custody_bit_1_indices),
data=attestation.data, data=attestation.data,
signature=attestation.signature, signature=attestation.signature,
) )
@ -989,7 +962,7 @@ def validate_indexed_attestation(state: BeaconState, indexed_attestation: Indexe
hash_tree_root(AttestationDataAndCustodyBit(data=indexed_attestation.data, custody_bit=0b1)), hash_tree_root(AttestationDataAndCustodyBit(data=indexed_attestation.data, custody_bit=0b1)),
], ],
signature=indexed_attestation.signature, signature=indexed_attestation.signature,
domain=get_domain(state, DOMAIN_ATTESTATION, indexed_attestation.data.target_epoch), domain=get_domain(state, DOMAIN_ATTESTATION, indexed_attestation.data.target.epoch),
) )
``` ```
@ -1002,9 +975,9 @@ def is_slashable_attestation_data(data_1: AttestationData, data_2: AttestationDa
""" """
return ( return (
# Double vote # Double vote
(data_1 != data_2 and data_1.target_epoch == data_2.target_epoch) or (data_1 != data_2 and data_1.target.epoch == data_2.target.epoch) or
# Surround vote # Surround vote
(data_1.source_epoch < data_2.source_epoch and data_2.target_epoch < data_1.target_epoch) (data_1.source.epoch < data_2.source.epoch and data_2.target.epoch < data_1.target.epoch)
) )
``` ```
@ -1096,21 +1069,22 @@ def slash_validator(state: BeaconState,
""" """
Slash the validator with index ``slashed_index``. Slash the validator with index ``slashed_index``.
""" """
current_epoch = get_current_epoch(state) epoch = get_current_epoch(state)
initiate_validator_exit(state, slashed_index) initiate_validator_exit(state, slashed_index)
state.validators[slashed_index].slashed = True validator = state.validators[slashed_index]
state.validators[slashed_index].withdrawable_epoch = Epoch(current_epoch + EPOCHS_PER_SLASHED_BALANCES_VECTOR) validator.slashed = True
slashed_balance = state.validators[slashed_index].effective_balance validator.withdrawable_epoch = max(validator.withdrawable_epoch, Epoch(epoch + EPOCHS_PER_SLASHINGS_VECTOR))
state.slashed_balances[current_epoch % EPOCHS_PER_SLASHED_BALANCES_VECTOR] += slashed_balance state.slashings[epoch % EPOCHS_PER_SLASHINGS_VECTOR] += validator.effective_balance
decrease_balance(state, slashed_index, validator.effective_balance // MIN_SLASHING_PENALTY_QUOTIENT)
# Apply proposer and whistleblower rewards
proposer_index = get_beacon_proposer_index(state) proposer_index = get_beacon_proposer_index(state)
if whistleblower_index is None: if whistleblower_index is None:
whistleblower_index = proposer_index whistleblower_index = proposer_index
whistleblowing_reward = Gwei(slashed_balance // WHISTLEBLOWING_REWARD_QUOTIENT) whistleblower_reward = Gwei(validator.effective_balance // WHISTLEBLOWER_REWARD_QUOTIENT)
proposer_reward = Gwei(whistleblowing_reward // PROPOSER_REWARD_QUOTIENT) proposer_reward = Gwei(whistleblower_reward // PROPOSER_REWARD_QUOTIENT)
increase_balance(state, proposer_index, proposer_reward) increase_balance(state, proposer_index, proposer_reward)
increase_balance(state, whistleblower_index, whistleblowing_reward - proposer_reward) increase_balance(state, whistleblower_index, whistleblower_reward - proposer_reward)
decrease_balance(state, slashed_index, whistleblowing_reward)
``` ```
## Genesis ## Genesis
@ -1173,7 +1147,7 @@ def get_genesis_beacon_state(deposits: Sequence[Deposit], genesis_time: int, eth
validator.activation_eligibility_epoch = GENESIS_EPOCH validator.activation_eligibility_epoch = GENESIS_EPOCH
validator.activation_epoch = GENESIS_EPOCH validator.activation_epoch = GENESIS_EPOCH
# Populate active_index_roots # Populate active_index_roots
genesis_active_index_root = hash_tree_root( genesis_active_index_root = hash_tree_root(
List[ValidatorIndex, VALIDATOR_REGISTRY_LIMIT](get_active_validator_indices(state, GENESIS_EPOCH)) List[ValidatorIndex, VALIDATOR_REGISTRY_LIMIT](get_active_validator_indices(state, GENESIS_EPOCH))
) )
@ -1256,7 +1230,7 @@ def get_total_active_balance(state: BeaconState) -> Gwei:
```python ```python
def get_matching_source_attestations(state: BeaconState, epoch: Epoch) -> Sequence[PendingAttestation]: def get_matching_source_attestations(state: BeaconState, epoch: Epoch) -> Sequence[PendingAttestation]:
assert epoch in (get_current_epoch(state), get_previous_epoch(state)) assert epoch in (get_previous_epoch(state), get_current_epoch(state))
return state.current_epoch_attestations if epoch == get_current_epoch(state) else state.previous_epoch_attestations return state.current_epoch_attestations if epoch == get_current_epoch(state) else state.previous_epoch_attestations
``` ```
@ -1264,7 +1238,7 @@ def get_matching_source_attestations(state: BeaconState, epoch: Epoch) -> Sequen
def get_matching_target_attestations(state: BeaconState, epoch: Epoch) -> Sequence[PendingAttestation]: def get_matching_target_attestations(state: BeaconState, epoch: Epoch) -> Sequence[PendingAttestation]:
return [ return [
a for a in get_matching_source_attestations(state, epoch) a for a in get_matching_source_attestations(state, epoch)
if a.data.target_root == get_block_root(state, epoch) if a.data.target.root == get_block_root(state, epoch)
] ]
``` ```
@ -1281,7 +1255,7 @@ def get_unslashed_attesting_indices(state: BeaconState,
attestations: Sequence[PendingAttestation]) -> Set[ValidatorIndex]: attestations: Sequence[PendingAttestation]) -> Set[ValidatorIndex]:
output = set() # type: Set[ValidatorIndex] output = set() # type: Set[ValidatorIndex]
for a in attestations: for a in attestations:
output = output.union(get_attesting_indices(state, a.data, a.aggregation_bitfield)) output = output.union(get_attesting_indices(state, a.data, a.aggregation_bits))
return set(filter(lambda index: not state.validators[index].slashed, list(output))) return set(filter(lambda index: not state.validators[index].slashed, list(output)))
``` ```
@ -1316,46 +1290,38 @@ def process_justification_and_finalization(state: BeaconState) -> None:
previous_epoch = get_previous_epoch(state) previous_epoch = get_previous_epoch(state)
current_epoch = get_current_epoch(state) current_epoch = get_current_epoch(state)
old_previous_justified_epoch = state.previous_justified_epoch old_previous_justified_checkpoint = state.previous_justified_checkpoint
old_current_justified_epoch = state.current_justified_epoch old_current_justified_checkpoint = state.current_justified_checkpoint
# Process justifications # Process justifications
state.previous_justified_epoch = state.current_justified_epoch state.previous_justified_checkpoint = state.current_justified_checkpoint
state.previous_justified_root = state.current_justified_root state.justification_bits[1:] = state.justification_bits[:-1]
state.justification_bitfield = (state.justification_bitfield << 1) % 2**64 state.justification_bits[0] = 0b0
previous_epoch_matching_target_balance = get_attesting_balance( matching_target_attestations = get_matching_target_attestations(state, previous_epoch) # Previous epoch
state, get_matching_target_attestations(state, previous_epoch) if get_attesting_balance(state, matching_target_attestations) * 3 >= get_total_active_balance(state) * 2:
) state.current_justified_checkpoint = Checkpoint(epoch=previous_epoch,
if previous_epoch_matching_target_balance * 3 >= get_total_active_balance(state) * 2: root=get_block_root(state, previous_epoch))
state.current_justified_epoch = previous_epoch state.justification_bits[1] = 0b1
state.current_justified_root = get_block_root(state, state.current_justified_epoch) matching_target_attestations = get_matching_target_attestations(state, current_epoch) # Current epoch
state.justification_bitfield |= (1 << 1) if get_attesting_balance(state, matching_target_attestations) * 3 >= get_total_active_balance(state) * 2:
current_epoch_matching_target_balance = get_attesting_balance( state.current_justified_checkpoint = Checkpoint(epoch=current_epoch,
state, get_matching_target_attestations(state, current_epoch) root=get_block_root(state, current_epoch))
) state.justification_bits[0] = 0b1
if current_epoch_matching_target_balance * 3 >= get_total_active_balance(state) * 2:
state.current_justified_epoch = current_epoch
state.current_justified_root = get_block_root(state, state.current_justified_epoch)
state.justification_bitfield |= (1 << 0)
# Process finalizations # Process finalizations
bitfield = state.justification_bitfield bits = state.justification_bits
# The 2nd/3rd/4th most recent epochs are justified, the 2nd using the 4th as source # The 2nd/3rd/4th most recent epochs are justified, the 2nd using the 4th as source
if (bitfield >> 1) % 8 == 0b111 and old_previous_justified_epoch + 3 == current_epoch: if all(bits[1:4]) and old_previous_justified_checkpoint.epoch + 3 == current_epoch:
state.finalized_epoch = old_previous_justified_epoch state.finalized_checkpoint = old_previous_justified_checkpoint
state.finalized_root = get_block_root(state, state.finalized_epoch)
# The 2nd/3rd most recent epochs are justified, the 2nd using the 3rd as source # The 2nd/3rd most recent epochs are justified, the 2nd using the 3rd as source
if (bitfield >> 1) % 4 == 0b11 and old_previous_justified_epoch + 2 == current_epoch: if all(bits[1:3]) and old_previous_justified_checkpoint.epoch + 2 == current_epoch:
state.finalized_epoch = old_previous_justified_epoch state.finalized_checkpoint = old_previous_justified_checkpoint
state.finalized_root = get_block_root(state, state.finalized_epoch)
# The 1st/2nd/3rd most recent epochs are justified, the 1st using the 3rd as source # The 1st/2nd/3rd most recent epochs are justified, the 1st using the 3rd as source
if (bitfield >> 0) % 8 == 0b111 and old_current_justified_epoch + 2 == current_epoch: if all(bits[0:3]) and old_current_justified_checkpoint.epoch + 2 == current_epoch:
state.finalized_epoch = old_current_justified_epoch state.finalized_checkpoint = old_current_justified_checkpoint
state.finalized_root = get_block_root(state, state.finalized_epoch)
# The 1st/2nd most recent epochs are justified, the 1st using the 2nd as source # The 1st/2nd most recent epochs are justified, the 1st using the 2nd as source
if (bitfield >> 0) % 4 == 0b11 and old_current_justified_epoch + 1 == current_epoch: if all(bits[0:2]) and old_current_justified_checkpoint.epoch + 1 == current_epoch:
state.finalized_epoch = old_current_justified_epoch state.finalized_checkpoint = old_current_justified_checkpoint
state.finalized_root = get_block_root(state, state.finalized_epoch)
``` ```
#### Crosslinks #### Crosslinks
@ -1410,7 +1376,7 @@ def get_attestation_deltas(state: BeaconState) -> Tuple[Sequence[Gwei], Sequence
index = ValidatorIndex(index) index = ValidatorIndex(index)
attestation = min([ attestation = min([
a for a in matching_source_attestations a for a in matching_source_attestations
if index in get_attesting_indices(state, a.data, a.aggregation_bitfield) if index in get_attesting_indices(state, a.data, a.aggregation_bits)
], key=lambda a: a.inclusion_delay) ], key=lambda a: a.inclusion_delay)
proposer_reward = Gwei(get_base_reward(state, index) // PROPOSER_REWARD_QUOTIENT) proposer_reward = Gwei(get_base_reward(state, index) // PROPOSER_REWARD_QUOTIENT)
rewards[attestation.proposer_index] += proposer_reward rewards[attestation.proposer_index] += proposer_reward
@ -1418,7 +1384,7 @@ def get_attestation_deltas(state: BeaconState) -> Tuple[Sequence[Gwei], Sequence
rewards[index] += Gwei(max_attester_reward * MIN_ATTESTATION_INCLUSION_DELAY // attestation.inclusion_delay) rewards[index] += Gwei(max_attester_reward * MIN_ATTESTATION_INCLUSION_DELAY // attestation.inclusion_delay)
# Inactivity penalty # Inactivity penalty
finality_delay = previous_epoch - state.finalized_epoch finality_delay = previous_epoch - state.finalized_checkpoint.epoch
if finality_delay > MIN_EPOCHS_TO_INACTIVITY_PENALTY: if finality_delay > MIN_EPOCHS_TO_INACTIVITY_PENALTY:
matching_target_attesting_indices = get_unslashed_attesting_indices(state, matching_target_attestations) matching_target_attesting_indices = get_unslashed_attesting_indices(state, matching_target_attestations)
for index in eligible_validator_indices: for index in eligible_validator_indices:
@ -1483,7 +1449,7 @@ def process_registry_updates(state: BeaconState) -> None:
activation_queue = sorted([ activation_queue = sorted([
index for index, validator in enumerate(state.validators) if index for index, validator in enumerate(state.validators) if
validator.activation_eligibility_epoch != FAR_FUTURE_EPOCH and validator.activation_eligibility_epoch != FAR_FUTURE_EPOCH and
validator.activation_epoch >= get_delayed_activation_exit_epoch(state.finalized_epoch) validator.activation_epoch >= get_delayed_activation_exit_epoch(state.finalized_checkpoint.epoch)
], key=lambda index: state.validators[index].activation_eligibility_epoch) ], key=lambda index: state.validators[index].activation_eligibility_epoch)
# Dequeued validators for activation up to churn limit (without resetting activation epoch) # Dequeued validators for activation up to churn limit (without resetting activation epoch)
for index in activation_queue[:get_churn_limit(state)]: for index in activation_queue[:get_churn_limit(state)]:
@ -1498,18 +1464,9 @@ def process_registry_updates(state: BeaconState) -> None:
def process_slashings(state: BeaconState) -> None: def process_slashings(state: BeaconState) -> None:
epoch = get_current_epoch(state) epoch = get_current_epoch(state)
total_balance = get_total_active_balance(state) total_balance = get_total_active_balance(state)
# Compute slashed balances in the current epoch
total_at_start = state.slashed_balances[(epoch + 1) % EPOCHS_PER_SLASHED_BALANCES_VECTOR]
total_at_end = state.slashed_balances[epoch % EPOCHS_PER_SLASHED_BALANCES_VECTOR]
total_penalties = total_at_end - total_at_start
for index, validator in enumerate(state.validators): for index, validator in enumerate(state.validators):
if validator.slashed and epoch + EPOCHS_PER_SLASHED_BALANCES_VECTOR // 2 == validator.withdrawable_epoch: if validator.slashed and epoch + EPOCHS_PER_SLASHINGS_VECTOR // 2 == validator.withdrawable_epoch:
penalty = max( penalty = validator.effective_balance * min(sum(state.slashings) * 3, total_balance) // total_balance
validator.effective_balance * min(total_penalties * 3, total_balance) // total_balance,
validator.effective_balance // MIN_SLASHING_PENALTY_QUOTIENT
)
decrease_balance(state, ValidatorIndex(index), penalty) decrease_balance(state, ValidatorIndex(index), penalty)
``` ```
@ -1537,10 +1494,8 @@ def process_final_updates(state: BeaconState) -> None:
get_active_validator_indices(state, Epoch(next_epoch + ACTIVATION_EXIT_DELAY)) get_active_validator_indices(state, Epoch(next_epoch + ACTIVATION_EXIT_DELAY))
) )
) )
# Set total slashed balances # Reset slashings
state.slashed_balances[next_epoch % EPOCHS_PER_SLASHED_BALANCES_VECTOR] = ( state.slashings[next_epoch % EPOCHS_PER_SLASHINGS_VECTOR] = Gwei(0)
state.slashed_balances[current_epoch % EPOCHS_PER_SLASHED_BALANCES_VECTOR]
)
# Set randao mix # Set randao mix
state.randao_mixes[next_epoch % EPOCHS_PER_HISTORICAL_VECTOR] = get_randao_mix(state, current_epoch) state.randao_mixes[next_epoch % EPOCHS_PER_HISTORICAL_VECTOR] = get_randao_mix(state, current_epoch)
# Set historical root accumulator # Set historical root accumulator
@ -1684,35 +1639,35 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None:
Process ``Attestation`` operation. Process ``Attestation`` operation.
""" """
data = attestation.data data = attestation.data
assert data.crosslink.shard < SHARD_COUNT assert data.crosslink.shard < SHARD_COUNT
assert data.target_epoch in (get_previous_epoch(state), get_current_epoch(state)) assert data.target.epoch in (get_previous_epoch(state), get_current_epoch(state))
attestation_slot = get_attestation_data_slot(state, data) attestation_slot = get_attestation_data_slot(state, data)
assert attestation_slot + MIN_ATTESTATION_INCLUSION_DELAY <= state.slot <= attestation_slot + SLOTS_PER_EPOCH assert attestation_slot + MIN_ATTESTATION_INCLUSION_DELAY <= state.slot <= attestation_slot + SLOTS_PER_EPOCH
pending_attestation = PendingAttestation( pending_attestation = PendingAttestation(
data=data, data=data,
aggregation_bitfield=attestation.aggregation_bitfield, aggregation_bits=attestation.aggregation_bits,
inclusion_delay=state.slot - attestation_slot, inclusion_delay=state.slot - attestation_slot,
proposer_index=get_beacon_proposer_index(state), proposer_index=get_beacon_proposer_index(state),
) )
if data.target_epoch == get_current_epoch(state): if data.target.epoch == get_current_epoch(state):
ffg_data = (state.current_justified_epoch, state.current_justified_root, get_current_epoch(state)) assert data.source == state.current_justified_checkpoint
parent_crosslink = state.current_crosslinks[data.crosslink.shard] parent_crosslink = state.current_crosslinks[data.crosslink.shard]
state.current_epoch_attestations.append(pending_attestation) state.current_epoch_attestations.append(pending_attestation)
else: else:
ffg_data = (state.previous_justified_epoch, state.previous_justified_root, get_previous_epoch(state)) assert data.source == state.previous_justified_checkpoint
parent_crosslink = state.previous_crosslinks[data.crosslink.shard] parent_crosslink = state.previous_crosslinks[data.crosslink.shard]
state.previous_epoch_attestations.append(pending_attestation) state.previous_epoch_attestations.append(pending_attestation)
# Check FFG data, crosslink data, and signature # Check crosslink against expected parent crosslink
assert ffg_data == (data.source_epoch, data.source_root, data.target_epoch)
assert data.crosslink.start_epoch == parent_crosslink.end_epoch
assert data.crosslink.end_epoch == min(data.target_epoch, parent_crosslink.end_epoch + MAX_EPOCHS_PER_CROSSLINK)
assert data.crosslink.parent_root == hash_tree_root(parent_crosslink) assert data.crosslink.parent_root == hash_tree_root(parent_crosslink)
assert data.crosslink.start_epoch == parent_crosslink.end_epoch
assert data.crosslink.end_epoch == min(data.target.epoch, parent_crosslink.end_epoch + MAX_EPOCHS_PER_CROSSLINK)
assert data.crosslink.data_root == ZERO_HASH # [to be removed in phase 1] assert data.crosslink.data_root == ZERO_HASH # [to be removed in phase 1]
# Check signature
validate_indexed_attestation(state, convert_to_indexed(state, attestation)) validate_indexed_attestation(state, convert_to_indexed(state, attestation))
``` ```

View File

@ -12,7 +12,7 @@
- [Time parameters](#time-parameters) - [Time parameters](#time-parameters)
- [Fork choice](#fork-choice) - [Fork choice](#fork-choice)
- [Helpers](#helpers) - [Helpers](#helpers)
- [`Target`](#target) - [`Checkpoint`](#checkpoint)
- [`Store`](#store) - [`Store`](#store)
- [`get_genesis_store`](#get_genesis_store) - [`get_genesis_store`](#get_genesis_store)
- [`get_ancestor`](#get_ancestor) - [`get_ancestor`](#get_ancestor)
@ -55,11 +55,11 @@ The head block root associated with a `store` is defined as `get_head(store)`. A
### Helpers ### Helpers
#### `Target` #### `LatestMessage`
```python ```python
@dataclass @dataclass(eq=True, frozen=True)
class Target(object): class LatestMessage(object):
epoch: Epoch epoch: Epoch
root: Hash root: Hash
``` ```
@ -69,12 +69,13 @@ class Target(object):
```python ```python
@dataclass @dataclass
class Store(object): class Store(object):
time: int
justified_checkpoint: Checkpoint
finalized_checkpoint: Checkpoint
blocks: Dict[Hash, BeaconBlock] = field(default_factory=dict) blocks: Dict[Hash, BeaconBlock] = field(default_factory=dict)
states: Dict[Hash, BeaconState] = field(default_factory=dict) block_states: Dict[Hash, BeaconState] = field(default_factory=dict)
time: int = 0 checkpoint_states: Dict[Checkpoint, BeaconState] = field(default_factory=dict)
latest_targets: Dict[ValidatorIndex, Target] = field(default_factory=dict) latest_messages: Dict[ValidatorIndex, LatestMessage] = field(default_factory=dict)
justified_root: Hash = ZERO_HASH
finalized_root: Hash = ZERO_HASH
``` ```
#### `get_genesis_store` #### `get_genesis_store`
@ -83,12 +84,15 @@ class Store(object):
def get_genesis_store(genesis_state: BeaconState) -> Store: def get_genesis_store(genesis_state: BeaconState) -> Store:
genesis_block = BeaconBlock(state_root=hash_tree_root(genesis_state)) genesis_block = BeaconBlock(state_root=hash_tree_root(genesis_state))
root = signing_root(genesis_block) root = signing_root(genesis_block)
justified_checkpoint = Checkpoint(epoch=GENESIS_EPOCH, root=root)
finalized_checkpoint = Checkpoint(epoch=GENESIS_EPOCH, root=root)
return Store( return Store(
blocks={root: genesis_block},
states={root: genesis_state},
time=genesis_state.genesis_time, time=genesis_state.genesis_time,
justified_root=root, justified_checkpoint=justified_checkpoint,
finalized_root=root, finalized_checkpoint=finalized_checkpoint,
blocks={root: genesis_block},
block_states={root: genesis_state.copy()},
checkpoint_states={justified_checkpoint: genesis_state.copy()},
) )
``` ```
@ -105,11 +109,12 @@ def get_ancestor(store: Store, root: Hash, slot: Slot) -> Hash:
```python ```python
def get_latest_attesting_balance(store: Store, root: Hash) -> Gwei: def get_latest_attesting_balance(store: Store, root: Hash) -> Gwei:
state = store.states[store.justified_root] state = store.checkpoint_states[store.justified_checkpoint]
active_indices = get_active_validator_indices(state.validator_registry, get_current_epoch(state)) active_indices = get_active_validator_indices(state, get_current_epoch(state))
return Gwei(sum( return Gwei(sum(
state.validator_registry[i].effective_balance for i in active_indices state.validators[i].effective_balance for i in active_indices
if get_ancestor(store, store.latest_targets[i].root, store.blocks[root].slot) == root if (i in store.latest_messages and
get_ancestor(store, store.latest_messages[i].root, store.blocks[root].slot) == root)
)) ))
``` ```
@ -118,9 +123,13 @@ def get_latest_attesting_balance(store: Store, root: Hash) -> Gwei:
```python ```python
def get_head(store: Store) -> Hash: def get_head(store: Store) -> Hash:
# Execute the LMD-GHOST fork choice # Execute the LMD-GHOST fork choice
head = store.justified_root head = store.justified_checkpoint.root
justified_slot = get_epoch_start_slot(store.justified_checkpoint.epoch)
while True: while True:
children = [root for root in store.blocks.keys() if store.blocks[root].parent_root == head] children = [
root for root in store.blocks.keys()
if store.blocks[root].parent_root == head and store.blocks[root].slot > justified_slot
]
if len(children) == 0: if len(children) == 0:
return head return head
# Sort by latest attesting balance with ties broken lexicographically # Sort by latest attesting balance with ties broken lexicographically
@ -141,35 +150,65 @@ def on_tick(store: Store, time: int) -> None:
```python ```python
def on_block(store: Store, block: BeaconBlock) -> None: def on_block(store: Store, block: BeaconBlock) -> None:
# Make a copy of the state to avoid mutability issues # Make a copy of the state to avoid mutability issues
pre_state = store.states[block.parent_root].copy() assert block.parent_root in store.block_states
pre_state = store.block_states[block.parent_root].copy()
# Blocks cannot be in the future. If they are, their consideration must be delayed until the are in the past. # Blocks cannot be in the future. If they are, their consideration must be delayed until the are in the past.
assert store.time >= pre_state.genesis_time + block.slot * SECONDS_PER_SLOT assert store.time >= pre_state.genesis_time + block.slot * SECONDS_PER_SLOT
# Add new block to the store # Add new block to the store
store.blocks[signing_root(block)] = block store.blocks[signing_root(block)] = block
# Check block is a descendant of the finalized block # Check block is a descendant of the finalized block
assert get_ancestor(store, signing_root(block), store.blocks[store.finalized_root].slot) == store.finalized_root assert (
get_ancestor(store, signing_root(block), store.blocks[store.finalized_checkpoint.root].slot) ==
store.finalized_checkpoint.root
)
# Check that block is later than the finalized epoch slot
assert block.slot > get_epoch_start_slot(store.finalized_checkpoint.epoch)
# Check the block is valid and compute the post-state # Check the block is valid and compute the post-state
state = state_transition(pre_state, block) state = state_transition(pre_state, block)
# Add new state to the store # Add new state for this block to the store
store.states[signing_root(block)] = state store.block_states[signing_root(block)] = state
# Update justified block root
if state.current_justified_epoch > slot_to_epoch(store.blocks[store.justified_root].slot): # Update justified checkpoint
store.justified_root = state.current_justified_root if state.current_justified_checkpoint.epoch > store.justified_checkpoint.epoch:
elif state.previous_justified_epoch > slot_to_epoch(store.blocks[store.justified_root].slot): store.justified_checkpoint = state.current_justified_checkpoint
store.justified_root = state.previous_justified_root elif state.previous_justified_checkpoint.epoch > store.justified_checkpoint.epoch:
# Update finalized block root store.justified_checkpoint = state.previous_justified_checkpoint
if state.finalized_epoch > slot_to_epoch(store.blocks[store.finalized_root].slot):
store.finalized_root = state.finalized_root # Update finalized checkpoint
if state.finalized_checkpoint.epoch > store.finalized_checkpoint.epoch:
store.finalized_checkpoint = state.finalized_checkpoint
``` ```
#### `on_attestation` #### `on_attestation`
```python ```python
def on_attestation(store: Store, attestation: Attestation) -> None: def on_attestation(store: Store, attestation: Attestation) -> None:
state = store.states[get_head(store)] target = attestation.data.target
indexed_attestation = convert_to_indexed(state, attestation)
validate_indexed_attestation(state, indexed_attestation) # Cannot calculate the current shuffling if have not seen the target
assert target.root in store.blocks
# Attestations cannot be from future epochs. If they are, delay consideration until the epoch arrivesr
base_state = store.block_states[target.root].copy()
assert store.time >= base_state.genesis_time + get_epoch_start_slot(target.epoch) * SECONDS_PER_SLOT
# Store target checkpoint state if not yet seen
if target not in store.checkpoint_states:
process_slots(base_state, get_epoch_start_slot(target.epoch))
store.checkpoint_states[target] = base_state
target_state = store.checkpoint_states[target]
# Attestations can only affect the fork choice of subsequent slots.
# Delay consideration in the fork choice until their slot is in the past.
attestation_slot = get_attestation_data_slot(target_state, attestation.data)
assert store.time >= (attestation_slot + 1) * SECONDS_PER_SLOT
# Get state at the `target` to validate attestation and calculate the committees
indexed_attestation = convert_to_indexed(target_state, attestation)
validate_indexed_attestation(target_state, indexed_attestation)
# Update latest messages
for i in indexed_attestation.custody_bit_0_indices + indexed_attestation.custody_bit_1_indices: for i in indexed_attestation.custody_bit_0_indices + indexed_attestation.custody_bit_1_indices:
if i not in store.latest_targets or attestation.data.target_epoch > store.latest_targets[i].epoch: if i not in store.latest_messages or target.epoch > store.latest_messages[i].epoch:
store.latest_targets[i] = Target(attestation.data.target_epoch, attestation.data.target_root) store.latest_messages[i] = LatestMessage(epoch=target.epoch, root=attestation.data.beacon_block_root)
``` ```

View File

@ -36,7 +36,7 @@
- [`get_custody_chunk_bit`](#get_custody_chunk_bit) - [`get_custody_chunk_bit`](#get_custody_chunk_bit)
- [`get_chunk_bits_root`](#get_chunk_bits_root) - [`get_chunk_bits_root`](#get_chunk_bits_root)
- [`get_randao_epoch_for_custody_period`](#get_randao_epoch_for_custody_period) - [`get_randao_epoch_for_custody_period`](#get_randao_epoch_for_custody_period)
- [`get_validators_custody_reveal_period`](#get_validators_custody_reveal_period) - [`get_reveal_period`](#get_reveal_period)
- [`replace_empty_or_append`](#replace_empty_or_append) - [`replace_empty_or_append`](#replace_empty_or_append)
- [Per-block processing](#per-block-processing) - [Per-block processing](#per-block-processing)
- [Operations](#operations) - [Operations](#operations)
@ -211,7 +211,7 @@ class EarlyDerivedSecretReveal(Container):
# Index of the validator who revealed (whistleblower) # Index of the validator who revealed (whistleblower)
masker_index: ValidatorIndex masker_index: ValidatorIndex
# Mask used to hide the actual reveal signature (prevent reveal from being stolen) # Mask used to hide the actual reveal signature (prevent reveal from being stolen)
mask: Bytes32 mask: Hash
``` ```
### Phase 0 container updates ### Phase 0 container updates
@ -224,7 +224,7 @@ Add the following fields to the end of the specified container objects. Fields w
class Validator(Container): class Validator(Container):
# next_custody_reveal_period is initialised to the custody period # next_custody_reveal_period is initialised to the custody period
# (of the particular validator) in which the validator is activated # (of the particular validator) in which the validator is activated
# = get_validators_custody_reveal_period(...) # = get_reveal_period(...)
next_custody_reveal_period: uint64 next_custody_reveal_period: uint64
max_reveal_lateness: uint64 max_reveal_lateness: uint64
``` ```
@ -272,22 +272,32 @@ def get_custody_chunk_count(crosslink: Crosslink) -> int:
return crosslink_length * chunks_per_epoch return crosslink_length * chunks_per_epoch
``` ```
### `get_bit`
```python
def get_bit(serialization: bytes, i: int) -> int:
"""
Extract the bit in ``serialization`` at position ``i``.
"""
return (serialization[i // 8] >> (i % 8)) % 2
```
### `get_custody_chunk_bit` ### `get_custody_chunk_bit`
```python ```python
def get_custody_chunk_bit(key: BLSSignature, chunk: bytes) -> bool: def get_custody_chunk_bit(key: BLSSignature, chunk: bytes) -> bool:
# TODO: Replace with something MPC-friendly, e.g. the Legendre symbol # TODO: Replace with something MPC-friendly, e.g. the Legendre symbol
return bool(get_bitfield_bit(hash(key + chunk), 0)) return bool(get_bit(hash(key + chunk), 0))
``` ```
### `get_chunk_bits_root` ### `get_chunk_bits_root`
```python ```python
def get_chunk_bits_root(chunk_bitfield: bytes) -> Bytes32: def get_chunk_bits_root(chunk_bits: bytes) -> Bytes32:
aggregated_bits = bytearray([0] * 32) aggregated_bits = bytearray([0] * 32)
for i in range(0, len(chunk_bitfield), 32): for i in range(0, len(chunk_bits), 32):
for j in range(32): for j in range(32):
aggregated_bits[j] ^= chunk_bitfield[i + j] aggregated_bits[j] ^= chunk_bits[i + j]
return hash(aggregated_bits) return hash(aggregated_bits)
``` ```
@ -299,17 +309,12 @@ def get_randao_epoch_for_custody_period(period: int, validator_index: ValidatorI
return Epoch(next_period_start + CUSTODY_PERIOD_TO_RANDAO_PADDING) return Epoch(next_period_start + CUSTODY_PERIOD_TO_RANDAO_PADDING)
``` ```
### `get_validators_custody_reveal_period` ### `get_reveal_period`
```python ```python
def get_validators_custody_reveal_period(state: BeaconState, def get_reveal_period(state: BeaconState, validator_index: ValidatorIndex, epoch: Epoch=None) -> int:
validator_index: ValidatorIndex,
epoch: Epoch=None) -> int:
''' '''
This function returns the reveal period for a given validator. Return the reveal period for a given validator.
If no epoch is supplied, the current epoch is assumed.
Note: This function implicitly requires that validators are not removed from the
validator set in fewer than EPOCHS_PER_CUSTODY_PERIOD epochs
''' '''
epoch = get_current_epoch(state) if epoch is None else epoch epoch = get_current_epoch(state) if epoch is None else epoch
return (epoch + validator_index % EPOCHS_PER_CUSTODY_PERIOD) // EPOCHS_PER_CUSTODY_PERIOD return (epoch + validator_index % EPOCHS_PER_CUSTODY_PERIOD) // EPOCHS_PER_CUSTODY_PERIOD
@ -340,17 +345,15 @@ Verify that `len(block.body.custody_key_reveals) <= MAX_CUSTODY_KEY_REVEALS`.
For each `reveal` in `block.body.custody_key_reveals`, run the following function: For each `reveal` in `block.body.custody_key_reveals`, run the following function:
```python ```python
def process_custody_key_reveal(state: BeaconState, def process_custody_key_reveal(state: BeaconState, reveal: CustodyKeyReveal) -> None:
reveal: CustodyKeyReveal) -> None:
""" """
Process ``CustodyKeyReveal`` operation. Process ``CustodyKeyReveal`` operation.
Note that this function mutates ``state``. Note that this function mutates ``state``.
""" """
revealer = state.validators[reveal.revealer_index] revealer = state.validators[reveal.revealer_index]
epoch_to_sign = get_randao_epoch_for_custody_period(revealer.next_custody_reveal_period, reveal.revealed_index) epoch_to_sign = get_randao_epoch_for_custody_period(revealer.next_custody_reveal_period, reveal.revealed_index)
assert revealer.next_custody_reveal_period < get_validators_custody_reveal_period(state, reveal.revealed_index) assert revealer.next_custody_reveal_period < get_reveal_period(state, reveal.revealed_index)
# Revealed validator is active or exited, but not withdrawn # Revealed validator is active or exited, but not withdrawn
assert is_slashable_validator(revealer, get_current_epoch(state)) assert is_slashable_validator(revealer, get_current_epoch(state))
@ -368,11 +371,11 @@ def process_custody_key_reveal(state: BeaconState,
) )
# Decrement max reveal lateness if response is timely # Decrement max reveal lateness if response is timely
if revealer.next_custody_reveal_period == get_validators_custody_reveal_period(state, reveal.revealer_index) - 2: if revealer.next_custody_reveal_period == get_reveal_period(state, reveal.revealer_index) - 2:
revealer.max_reveal_lateness -= MAX_REVEAL_LATENESS_DECREMENT revealer.max_reveal_lateness -= MAX_REVEAL_LATENESS_DECREMENT
revealer.max_reveal_lateness = max( revealer.max_reveal_lateness = max(
revealer.max_reveal_lateness, revealer.max_reveal_lateness,
get_validators_custody_reveal_period(state, reveal.revealed_index) - revealer.next_custody_reveal_period get_reveal_period(state, reveal.revealed_index) - revealer.next_custody_reveal_period
) )
# Process reveal # Process reveal
@ -394,13 +397,11 @@ Verify that `len(block.body.early_derived_secret_reveals) <= MAX_EARLY_DERIVED_S
For each `reveal` in `block.body.early_derived_secret_reveals`, run the following function: For each `reveal` in `block.body.early_derived_secret_reveals`, run the following function:
```python ```python
def process_early_derived_secret_reveal(state: BeaconState, def process_early_derived_secret_reveal(state: BeaconState, reveal: EarlyDerivedSecretReveal) -> None:
reveal: EarlyDerivedSecretReveal) -> None:
""" """
Process ``EarlyDerivedSecretReveal`` operation. Process ``EarlyDerivedSecretReveal`` operation.
Note that this function mutates ``state``. Note that this function mutates ``state``.
""" """
revealed_validator = state.validators[reveal.revealed_index] revealed_validator = state.validators[reveal.revealed_index]
derived_secret_location = reveal.epoch % EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS derived_secret_location = reveal.epoch % EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS
@ -453,7 +454,7 @@ def process_early_derived_secret_reveal(state: BeaconState,
# Apply penalty # Apply penalty
proposer_index = get_beacon_proposer_index(state) proposer_index = get_beacon_proposer_index(state)
whistleblower_index = reveal.masker_index whistleblower_index = reveal.masker_index
whistleblowing_reward = Gwei(penalty // WHISTLEBLOWING_REWARD_QUOTIENT) whistleblowing_reward = Gwei(penalty // WHISTLEBLOWER_REWARD_QUOTIENT)
proposer_reward = Gwei(whistleblowing_reward // PROPOSER_REWARD_QUOTIENT) proposer_reward = Gwei(whistleblowing_reward // PROPOSER_REWARD_QUOTIENT)
increase_balance(state, proposer_index, proposer_reward) increase_balance(state, proposer_index, proposer_reward)
increase_balance(state, whistleblower_index, whistleblowing_reward - proposer_reward) increase_balance(state, whistleblower_index, whistleblowing_reward - proposer_reward)
@ -470,8 +471,7 @@ Verify that `len(block.body.custody_chunk_challenges) <= MAX_CUSTODY_CHUNK_CHALL
For each `challenge` in `block.body.custody_chunk_challenges`, run the following function: For each `challenge` in `block.body.custody_chunk_challenges`, run the following function:
```python ```python
def process_chunk_challenge(state: BeaconState, def process_chunk_challenge(state: BeaconState, challenge: CustodyChunkChallenge) -> None:
challenge: CustodyChunkChallenge) -> None:
# Verify the attestation # Verify the attestation
validate_indexed_attestation(state, convert_to_indexed(state, challenge.attestation)) validate_indexed_attestation(state, convert_to_indexed(state, challenge.attestation))
# Verify it is not too late to challenge # Verify it is not too late to challenge
@ -479,7 +479,7 @@ def process_chunk_challenge(state: BeaconState,
responder = state.validators[challenge.responder_index] responder = state.validators[challenge.responder_index]
assert responder.exit_epoch >= get_current_epoch(state) - MAX_CHUNK_CHALLENGE_DELAY assert responder.exit_epoch >= get_current_epoch(state) - MAX_CHUNK_CHALLENGE_DELAY
# Verify the responder participated in the attestation # Verify the responder participated in the attestation
attesters = get_attesting_indices(state, challenge.attestation.data, challenge.attestation.aggregation_bitfield) attesters = get_attesting_indices(state, challenge.attestation.data, challenge.attestation.aggregation_bits)
assert challenge.responder_index in attesters assert challenge.responder_index in attesters
# Verify the challenge is not a duplicate # Verify the challenge is not a duplicate
for record in state.custody_chunk_challenge_records: for record in state.custody_chunk_challenge_records:
@ -514,60 +514,42 @@ Verify that `len(block.body.custody_bit_challenges) <= MAX_CUSTODY_BIT_CHALLENGE
For each `challenge` in `block.body.custody_bit_challenges`, run the following function: For each `challenge` in `block.body.custody_bit_challenges`, run the following function:
```python ```python
def process_bit_challenge(state: BeaconState, def process_bit_challenge(state: BeaconState, challenge: CustodyBitChallenge) -> None:
challenge: CustodyBitChallenge) -> None: attestation = challenge.attestation
epoch = slot_to_epoch(attestation.data.slot)
shard = attestation.data.crosslink.shard
# Verify challenge signature # Verify challenge signature
challenger = state.validators[challenge.challenger_index] challenger = state.validators[challenge.challenger_index]
assert bls_verify( domain = get_domain(state, DOMAIN_CUSTODY_BIT_CHALLENGE, get_current_epoch(state))
pubkey=challenger.pubkey, assert bls_verify(challenger.pubkey, signing_root(challenge), challenge.signature, domain)
message_hash=signing_root(challenge), # Verify challenger is slashable
signature=challenge.signature,
domain=get_domain(state, DOMAIN_CUSTODY_BIT_CHALLENGE, get_current_epoch(state)),
)
assert is_slashable_validator(challenger, get_current_epoch(state)) assert is_slashable_validator(challenger, get_current_epoch(state))
# Verify attestation
# Verify the attestation
attestation = challenge.attestation
validate_indexed_attestation(state, convert_to_indexed(state, attestation)) validate_indexed_attestation(state, convert_to_indexed(state, attestation))
# Verify the attestation is eligible for challenging # Verify attestation is eligible for challenging
responder = state.validators[challenge.responder_index] responder = state.validators[challenge.responder_index]
assert (slot_to_epoch(attestation.data.slot) + responder.max_reveal_lateness <= assert epoch + responder.max_reveal_lateness <= get_reveal_period(state, challenge.responder_index)
get_validators_custody_reveal_period(state, challenge.responder_index))
# Verify the responder participated in the attestation # Verify the responder participated in the attestation
attesters = get_attesting_indices(state, attestation.data, attestation.aggregation_bitfield) attesters = get_attesting_indices(state, attestation.data, attestation.aggregation_bits)
assert challenge.responder_index in attesters assert challenge.responder_index in attesters
# Verifier challenger is not already challenging
# A validator can be the challenger for at most one challenge at a time
for record in state.custody_bit_challenge_records: for record in state.custody_bit_challenge_records:
assert record.challenger_index != challenge.challenger_index assert record.challenger_index != challenge.challenger_index
# Verify the responder custody key
# Verify the responder is a valid custody key
epoch_to_sign = get_randao_epoch_for_custody_period( epoch_to_sign = get_randao_epoch_for_custody_period(
get_validators_custody_reveal_period( get_reveal_period(state, challenge.responder_index, epoch),
state, challenge.responder_index,
challenge.responder_index,
epoch=slot_to_epoch(attestation.data.slot)),
challenge.responder_index
) )
assert bls_verify( domain = get_domain(state, DOMAIN_RANDAO, epoch_to_sign)
pubkey=responder.pubkey, assert bls_verify(responder.pubkey, hash_tree_root(epoch_to_sign), challenge.responder_key, domain)
message_hash=hash_tree_root(epoch_to_sign),
signature=challenge.responder_key,
domain=get_domain(
state=state,
domain_type=DOMAIN_RANDAO,
message_epoch=epoch_to_sign,
),
)
# Verify the chunk count # Verify the chunk count
chunk_count = get_custody_chunk_count(attestation.data.crosslink) chunk_count = get_custody_chunk_count(attestation.data.crosslink)
assert verify_bitfield(challenge.chunk_bits, chunk_count)
# Verify the first bit of the hash of the chunk bits does not equal the custody bit # Verify the first bit of the hash of the chunk bits does not equal the custody bit
custody_bit = get_bitfield_bit(attestation.custody_bitfield, attesters.index(challenge.responder_index)) committee = get_crosslink_committee(state, epoch, shard)
assert custody_bit != get_bitfield_bit(get_chunk_bits_root(challenge.chunk_bits), 0) custody_bit = attestation.custody_bits[committee.index(challenge.responder_index)]
assert custody_bit != get_bit(get_chunk_bits_root(challenge.chunk_bits), 0)
# Add new bit challenge record # Add new bit challenge record
new_record = CustodyBitChallengeRecord( new_record = CustodyBitChallengeRecord(
challenge_index=state.custody_challenge_index, challenge_index=state.custody_challenge_index,
@ -581,7 +563,6 @@ def process_bit_challenge(state: BeaconState,
) )
replace_empty_or_append(state.custody_bit_challenge_records, new_record) replace_empty_or_append(state.custody_bit_challenge_records, new_record)
state.custody_challenge_index += 1 state.custody_challenge_index += 1
# Postpone responder withdrawability # Postpone responder withdrawability
responder.withdrawable_epoch = FAR_FUTURE_EPOCH responder.withdrawable_epoch = FAR_FUTURE_EPOCH
``` ```
@ -593,8 +574,7 @@ Verify that `len(block.body.custody_responses) <= MAX_CUSTODY_RESPONSES`.
For each `response` in `block.body.custody_responses`, run the following function: For each `response` in `block.body.custody_responses`, run the following function:
```python ```python
def process_custody_response(state: BeaconState, def process_custody_response(state: BeaconState, response: CustodyResponse) -> None:
response: CustodyResponse) -> None:
chunk_challenge = next((record for record in state.custody_chunk_challenge_records chunk_challenge = next((record for record in state.custody_chunk_challenge_records
if record.challenge_index == response.challenge_index), None) if record.challenge_index == response.challenge_index), None)
if chunk_challenge is not None: if chunk_challenge is not None:
@ -661,7 +641,7 @@ def process_bit_challenge_response(state: BeaconState,
) )
# Verify the chunk bit does not match the challenge chunk bit # Verify the chunk bit does not match the challenge chunk bit
assert (get_custody_chunk_bit(challenge.responder_key, response.chunk) assert (get_custody_chunk_bit(challenge.responder_key, response.chunk)
!= get_bitfield_bit(challenge.chunk_bits_leaf, response.chunk_index % 256)) != get_bit(challenge.chunk_bits_leaf, response.chunk_index % 256))
# Clear the challenge # Clear the challenge
records = state.custody_bit_challenge_records records = state.custody_bit_challenge_records
records[records.index(challenge)] = CustodyBitChallengeRecord() records[records.index(challenge)] = CustodyBitChallengeRecord()
@ -682,7 +662,7 @@ Run `process_reveal_deadlines(state)` immediately after `process_registry_update
def process_reveal_deadlines(state: BeaconState) -> None: def process_reveal_deadlines(state: BeaconState) -> None:
for index, validator in enumerate(state.validators): for index, validator in enumerate(state.validators):
deadline = validator.next_custody_reveal_period + (CUSTODY_RESPONSE_DEADLINE // EPOCHS_PER_CUSTODY_PERIOD) deadline = validator.next_custody_reveal_period + (CUSTODY_RESPONSE_DEADLINE // EPOCHS_PER_CUSTODY_PERIOD)
if get_validators_custody_reveal_period(state, ValidatorIndex(index)) > deadline: if get_reveal_period(state, ValidatorIndex(index)) > deadline:
slash_validator(state, ValidatorIndex(index)) slash_validator(state, ValidatorIndex(index))
``` ```

View File

@ -92,7 +92,7 @@ class ShardAttestation(Container):
slot: Slot slot: Slot
shard: Shard shard: Shard
shard_block_root: Bytes32 shard_block_root: Bytes32
aggregation_bitfield: Bytes[PLACEHOLDER] aggregation_bits: Bitlist[PLACEHOLDER]
aggregate_signature: BLSSignature aggregate_signature: BLSSignature
``` ```
@ -230,10 +230,9 @@ def verify_shard_attestation_signature(state: BeaconState,
attestation: ShardAttestation) -> None: attestation: ShardAttestation) -> None:
data = attestation.data data = attestation.data
persistent_committee = get_persistent_committee(state, data.shard, data.slot) persistent_committee = get_persistent_committee(state, data.shard, data.slot)
assert verify_bitfield(attestation.aggregation_bitfield, len(persistent_committee))
pubkeys = [] pubkeys = []
for i, index in enumerate(persistent_committee): for i, index in enumerate(persistent_committee):
if get_bitfield_bit(attestation.aggregation_bitfield, i) == 0b1: if attestation.aggregation_bits[i]:
validator = state.validators[index] validator = state.validators[index]
assert is_active_validator(validator, get_current_epoch(state)) assert is_active_validator(validator, get_current_epoch(state))
pubkeys.append(validator.pubkey) pubkeys.append(validator.pubkey)

View File

@ -168,7 +168,7 @@ If a client wants to update its `finalized_header` it asks the network for a `Bl
{ {
'header': BeaconBlockHeader, 'header': BeaconBlockHeader,
'shard_aggregate_signature': BLSSignature, 'shard_aggregate_signature': BLSSignature,
'shard_bitfield': 'bytes', 'shard_bits': Bitlist[PLACEHOLDER],
'shard_parent_block': ShardBlock, 'shard_parent_block': ShardBlock,
} }
``` ```
@ -180,13 +180,13 @@ def verify_block_validity_proof(proof: BlockValidityProof, validator_memory: Val
assert proof.shard_parent_block.beacon_chain_root == hash_tree_root(proof.header) assert proof.shard_parent_block.beacon_chain_root == hash_tree_root(proof.header)
committee = compute_committee(proof.header, validator_memory) committee = compute_committee(proof.header, validator_memory)
# Verify that we have >=50% support # Verify that we have >=50% support
support_balance = sum([v.effective_balance for i, v in enumerate(committee) if get_bitfield_bit(proof.shard_bitfield, i) is True]) support_balance = sum([v.effective_balance for i, v in enumerate(committee) if proof.shard_bits[i]])
total_balance = sum([v.effective_balance for i, v in enumerate(committee)]) total_balance = sum([v.effective_balance for i, v in enumerate(committee)])
assert support_balance * 2 > total_balance assert support_balance * 2 > total_balance
# Verify shard attestations # Verify shard attestations
group_public_key = bls_aggregate_pubkeys([ group_public_key = bls_aggregate_pubkeys([
v.pubkey for v, index in enumerate(committee) v.pubkey for v, index in enumerate(committee)
if get_bitfield_bit(proof.shard_bitfield, index) is True if proof.shard_bits[index]
]) ])
assert bls_verify( assert bls_verify(
pubkey=group_public_key, pubkey=group_public_key,
@ -196,4 +196,4 @@ def verify_block_validity_proof(proof: BlockValidityProof, validator_memory: Val
) )
``` ```
The size of this proof is only 200 (header) + 96 (signature) + 16 (bitfield) + 352 (shard block) = 664 bytes. It can be reduced further by replacing `ShardBlock` with `MerklePartial(lambda x: x.beacon_chain_root, ShardBlock)`, which would cut off ~220 bytes. The size of this proof is only 200 (header) + 96 (signature) + 16 (bits) + 352 (shard block) = 664 bytes. It can be reduced further by replacing `ShardBlock` with `MerklePartial(lambda x: x.beacon_chain_root, ShardBlock)`, which would cut off ~220 bytes.

View File

@ -15,9 +15,9 @@
- [Default values](#default-values) - [Default values](#default-values)
- [Illegal types](#illegal-types) - [Illegal types](#illegal-types)
- [Serialization](#serialization) - [Serialization](#serialization)
- [`"uintN"`](#uintn) - [`uintN`](#uintn)
- [`"bool"`](#bool) - [`boolean`](#boolean)
- [`"null`](#null) - [`null`](#null)
- [Vectors, containers, lists, unions](#vectors-containers-lists-unions) - [Vectors, containers, lists, unions](#vectors-containers-lists-unions)
- [Deserialization](#deserialization) - [Deserialization](#deserialization)
- [Merkleization](#merkleization) - [Merkleization](#merkleization)
@ -37,36 +37,45 @@
## Typing ## Typing
### Basic types ### Basic types
* `"uintN"`: `N`-bit unsigned integer (where `N in [8, 16, 32, 64, 128, 256]`) * `uintN`: `N`-bit unsigned integer (where `N in [8, 16, 32, 64, 128, 256]`)
* `"bool"`: `True` or `False` * `boolean`: `True` or `False`
### Composite types ### Composite types
* **container**: ordered heterogeneous collection of values * **container**: ordered heterogeneous collection of values
* key-pair curly bracket notation `{}`, e.g. `{"foo": "uint64", "bar": "bool"}` * python dataclass notation with key-type pairs, e.g.
* **vector**: ordered fixed-length homogeneous collection of values ```python
* angle bracket notation `[type, N]`, e.g. `["uint64", N]` class ContainerExample(Container):
* **list**: ordered variable-length homogeneous collection of values foo: uint64
* angle bracket notation `[type]`, e.g. `["uint64"]` bar: boolean
```
* **vector**: ordered fixed-length homogeneous collection, with `N` values
* notation `Vector[type, N]`, e.g. `Vector[uint64, N]`
* **list**: ordered variable-length homogeneous collection, limited to `N` values
* notation `List[type, N]`, e.g. `List[uint64, N]`
* **bitvector**: ordered fixed-length collection of `boolean` values, with `N` bits
* notation `Bitvector[N]`
* **bitlist**: ordered variable-length collection of `boolean` values, limited to `N` bits
* notation `Bitlist[N]`
* **union**: union type containing one of the given subtypes * **union**: union type containing one of the given subtypes
* round bracket notation `(type_1, type_2, ...)`, e.g. `("null", "uint64")` * notation `Union[type_1, type_2, ...]`, e.g. `union[null, uint64]`
### Variable-size and fixed-size ### Variable-size and fixed-size
We recursively define "variable-size" types to be lists and unions and all types that contain a variable-size type. All other types are said to be "fixed-size". We recursively define "variable-size" types to be lists, unions, `Bitlist` and all types that contain a variable-size type. All other types are said to be "fixed-size".
### Aliases ### Aliases
For convenience we alias: For convenience we alias:
* `"byte"` to `"uint8"` (this is a basic type) * `bit` to `boolean`
* `"bytes"` to `["byte"]` (this is *not* a basic type) * `byte` to `uint8` (this is a basic type)
* `"bytesN"` to `["byte", N]` (this is *not* a basic type) * `BytesN` to `Vector[byte, N]` (this is *not* a basic type)
* `"null"`: `{}`, i.e. the empty container * `null`: `{}`, i.e. the empty container
### Default values ### Default values
The default value of a type upon initialization is recursively defined using `0` for `"uintN"`, `False` for `"bool"`, and `[]` for lists. Unions default to the first type in the union (with type index zero), which is `"null"` if present in the union. The default value of a type upon initialization is recursively defined using `0` for `uintN`, `False` for `boolean` and the elements of `Bitvector`, and `[]` for lists and `Bitlist`. Unions default to the first type in the union (with type index zero), which is `null` if present in the union.
#### `is_empty` #### `is_empty`
@ -74,34 +83,50 @@ An SSZ object is called empty (and thus, `is_empty(object)` returns true) if it
### Illegal types ### Illegal types
Empty vector types (i.e. `[subtype, 0]` for some `subtype`) are not legal. The `"null"` type is only legal as the first type in a union subtype (i.e. with type index zero). Empty vector types (i.e. `[subtype, 0]` for some `subtype`) are not legal. The `null` type is only legal as the first type in a union subtype (i.e. with type index zero).
## Serialization ## Serialization
We recursively define the `serialize` function which consumes an object `value` (of the type specified) and returns a bytestring of type `"bytes"`. We recursively define the `serialize` function which consumes an object `value` (of the type specified) and returns a bytestring of type `bytes`.
*Note*: In the function definitions below (`serialize`, `hash_tree_root`, `signing_root`, `is_variable_size`, etc.) objects implicitly carry their type. *Note*: In the function definitions below (`serialize`, `hash_tree_root`, `signing_root`, `is_variable_size`, etc.) objects implicitly carry their type.
### `"uintN"` ### `uintN`
```python ```python
assert N in [8, 16, 32, 64, 128, 256] assert N in [8, 16, 32, 64, 128, 256]
return value.to_bytes(N // 8, "little") return value.to_bytes(N // 8, "little")
``` ```
### `"bool"` ### `boolean`
```python ```python
assert value in (True, False) assert value in (True, False)
return b"\x01" if value is True else b"\x00" return b"\x01" if value is True else b"\x00"
``` ```
### `"null"` ### `null`
```python ```python
return b"" return b""
``` ```
### `Bitvector[N]`
```python
as_integer = sum([value[i] << i for i in range(len(value))])
return as_integer.to_bytes((N + 7) // 8, "little")
```
### `Bitlist[N]`
Note that from the offset coding, the length (in bytes) of the bitlist is known. An additional leading `1` bit is added so that the length in bits will also be known.
```python
as_integer = (1 << len(value)) + sum([value[i] << i for i in range(len(value))])
return as_integer.to_bytes((as_integer.bit_length() + 7) // 8, "little")
```
### Vectors, containers, lists, unions ### Vectors, containers, lists, unions
```python ```python
@ -136,23 +161,47 @@ return serialized_type_index + serialized_bytes
Because serialization is an injective function (i.e. two distinct objects of the same type will serialize to different values) any bytestring has at most one object it could deserialize to. Efficient algorithms for computing this object can be found in [the implementations](#implementations). Because serialization is an injective function (i.e. two distinct objects of the same type will serialize to different values) any bytestring has at most one object it could deserialize to. Efficient algorithms for computing this object can be found in [the implementations](#implementations).
Note that deserialization requires hardening against invalid inputs. A non-exhaustive list:
- Offsets: out of order, out of range, mismatching minimum element size
- Scope: Extra unused bytes, not aligned with element size.
- More elements than a list limit allows. Part of enforcing consensus.
## Merkleization ## Merkleization
We first define helper functions: We first define helper functions:
* `pack`: Given ordered objects of the same basic type, serialize them, pack them into `BYTES_PER_CHUNK`-byte chunks, right-pad the last chunk with zero bytes, and return the chunks. * `pack`: Given ordered objects of the same basic type, serialize them, pack them into `BYTES_PER_CHUNK`-byte chunks, right-pad the last chunk with zero bytes, and return the chunks.
* `merkleize`: Given ordered `BYTES_PER_CHUNK`-byte chunks, if necessary append zero chunks so that the number of chunks is a power of two, Merkleize the chunks, and return the root. Note that `merkleize` on a single chunk is simply that chunk, i.e. the identity when the number of chunks is one. * `next_pow_of_two(i)`: get the next power of 2 of `i`, if not already a power of 2, with 0 mapping to 1. Examples: `0->1, 1->1, 2->2, 3->4, 4->4, 6->8, 9->16`
* `merkleize(data, pad_for)`: Given ordered `BYTES_PER_CHUNK`-byte chunks, if necessary append zero chunks so that the number of chunks is a power of two, Merkleize the chunks, and return the root.
The merkleization depends on the effective input, which can be padded: if `pad_for=L`, then pad the `data` with zeroed chunks to `next_pow_of_two(L)` (virtually for memory efficiency).
Then, merkleize the chunks (empty input is padded to 1 zero chunk):
- If `1` chunk: A single chunk is simply that chunk, i.e. the identity when the number of chunks is one.
- If `> 1` chunks: pad to `next_pow_of_two(len(chunks))`, merkleize as binary tree.
* `mix_in_length`: Given a Merkle root `root` and a length `length` (`"uint256"` little-endian serialization) return `hash(root + length)`. * `mix_in_length`: Given a Merkle root `root` and a length `length` (`"uint256"` little-endian serialization) return `hash(root + length)`.
* `mix_in_type`: Given a Merkle root `root` and a type_index `type_index` (`"uint256"` little-endian serialization) return `hash(root + type_index)`. * `mix_in_type`: Given a Merkle root `root` and a type_index `type_index` (`"uint256"` little-endian serialization) return `hash(root + type_index)`.
We now define Merkleization `hash_tree_root(value)` of an object `value` recursively: We now define Merkleization `hash_tree_root(value)` of an object `value` recursively:
* `merkleize(pack(value))` if `value` is a basic object or a vector of basic objects * `merkleize(pack(value))` if `value` is a basic object or a vector of basic objects
* `mix_in_length(merkleize(pack(value)), len(value))` if `value` is a list of basic objects * `mix_in_length(merkleize(pack(value), pad_for=(N * elem_size / BYTES_PER_CHUNK)), len(value))` if `value` is a list of basic objects.
* `merkleize([hash_tree_root(element) for element in value])` if `value` is a vector of composite objects or a container * `merkleize([hash_tree_root(element) for element in value])` if `value` is a vector of composite objects or a container
* `mix_in_length(merkleize([hash_tree_root(element) for element in value]), len(value))` if `value` is a list of composite objects * `mix_in_length(merkleize([hash_tree_root(element) for element in value], pad_for=N), len(value))` if `value` is a list of composite objects.
* `mix_in_type(merkleize(value.value), value.type_index)` if `value` is of union type * `mix_in_type(merkleize(value.value), value.type_index)` if `value` is of union type
### Merkleization of `Bitvector[N]`
```python
as_integer = sum([value[i] << i for i in range(len(value))])
return merkleize(as_integer.to_bytes((N + 7) // 8, "little"))
```
### `Bitlist[N]`
```python
as_integer = sum([value[i] << i for i in range(len(value))])
return mix_in_length(merkleize(as_integer.to_bytes((N + 7) // 8, "little")), len(value))
```
## Self-signed containers ## Self-signed containers
Let `value` be a self-signed container object. The convention is that the signature (e.g. a `"bytes96"` BLS12-381 signature) be the last field of `value`. Further, the signed message for `value` is `signing_root(value) = hash_tree_root(truncate_last(value))` where `truncate_last` truncates the last element of `value`. Let `value` be a self-signed container object. The convention is that the signature (e.g. a `"bytes96"` BLS12-381 signature) be the last field of `value`. Further, the signed message for `value` is `signing_root(value) = hash_tree_root(truncate_last(value))` where `truncate_last` truncates the last element of `value`.

View File

@ -44,8 +44,8 @@
- [Crosslink vote](#crosslink-vote) - [Crosslink vote](#crosslink-vote)
- [Construct attestation](#construct-attestation) - [Construct attestation](#construct-attestation)
- [Data](#data) - [Data](#data)
- [Aggregation bitfield](#aggregation-bitfield) - [Aggregation bits](#aggregation-bits)
- [Custody bitfield](#custody-bitfield) - [Custody bits](#custody-bits)
- [Aggregate signature](#aggregate-signature) - [Aggregate signature](#aggregate-signature)
- [How to avoid slashing](#how-to-avoid-slashing) - [How to avoid slashing](#how-to-avoid-slashing)
- [Proposer slashing](#proposer-slashing) - [Proposer slashing](#proposer-slashing)
@ -221,19 +221,26 @@ epoch_signature = bls_sign(
##### Eth1 Data ##### Eth1 Data
`block.eth1_data` is a mechanism used by block proposers vote on a recent Ethereum 1.0 block hash and an associated deposit root found in the Ethereum 1.0 deposit contract. When consensus is formed, `state.eth1_data` is updated, and validator deposits up to this root can be processed. The deposit root can be calculated by calling the `get_deposit_root()` function of the deposit contract using the post-state of the block hash. The `block.eth1_data` field is for block proposers to vote on recent Eth 1.0 data. This recent data contains an Eth 1.0 block hash as well as the associated deposit root (as calculated by the `get_deposit_root()` method of the deposit contract) and deposit count after execution of the corresponding Eth 1.0 block. If over half of the block proposers in the current Eth 1.0 voting period vote for the same `eth1_data` then `state.eth1_data` updates at the end of the voting period. Each deposit in `block.body.deposits` must verify against `state.eth1_data.eth1_deposit_root`.
* Let `D` be the list of `Eth1DataVote` objects `vote` in `state.eth1_data_votes` where: Let `get_eth1_data(distance: int) -> Eth1Data` be the (subjective) function that returns the Eth 1.0 data at distance `distance` relative to the Eth 1.0 head at the start of the current Eth 1.0 voting period. Let `previous_eth1_distance` be the distance relative to the Eth 1.0 block corresponding to `state.eth1_data.block_hash` at the start of the current Eth 1.0 voting period. An honest block proposer sets `block.eth1_data = get_eth1_vote(state, previous_eth1_distance)` where:
* `vote.eth1_data.block_hash` is the hash of an Eth 1.0 block that is (i) part of the canonical chain, (ii) >= `ETH1_FOLLOW_DISTANCE` blocks behind the head, and (iii) newer than `state.eth1_data.block_hash`.
* `vote.eth1_data.deposit_count` is the deposit count of the Eth 1.0 deposit contract at the block defined by `vote.eth1_data.block_hash`. ```python
* `vote.eth1_data.deposit_root` is the deposit root of the Eth 1.0 deposit contract at the block defined by `vote.eth1_data.block_hash`. def get_eth1_vote(state: BeaconState, previous_eth1_distance: uint64) -> Eth1Data:
* If `D` is empty: new_eth1_data = [get_eth1_data(distance) for distance in range(ETH1_FOLLOW_DISTANCE, 2 * ETH1_FOLLOW_DISTANCE)]
* Let `block_hash` be the block hash of the `ETH1_FOLLOW_DISTANCE`'th ancestor of the head of the canonical Eth 1.0 chain. all_eth1_data = [get_eth1_data(distance) for distance in range(ETH1_FOLLOW_DISTANCE, previous_eth1_distance)]
* Let `deposit_root` and `deposit_count` be the deposit root and deposit count of the Eth 1.0 deposit contract in the post-state of the block referenced by `block_hash`
* Let `best_vote_data = Eth1Data(block_hash=block_hash, deposit_root=deposit_root, deposit_count=deposit_count)`. valid_votes = []
* If `D` is nonempty: for slot, vote in enumerate(state.eth1_data_votes):
* Let `best_vote_data` be the `eth1_data` member of `D` that has the highest vote count (`D.count(eth1_data)`), breaking ties by favoring block hashes with higher associated block height. period_tail = slot % SLOTS_PER_ETH1_VOTING_PERIOD >= integer_square_root(SLOTS_PER_ETH1_VOTING_PERIOD)
* Set `block.eth1_data = best_vote_data`. if vote in new_eth1_data or (period_tail and vote in all_eth1_data):
valid_votes.append(vote)
return max(valid_votes,
key=lambda v: (valid_votes.count(v), -all_eth1_data.index(v)), # Tiebreak by smallest distance
default=get_eth1_data(ETH1_FOLLOW_DISTANCE),
)
```
##### Signature ##### Signature
@ -322,19 +329,15 @@ Next, the validator creates `attestation`, an [`Attestation`](../core/0_beacon-c
Set `attestation.data = attestation_data` where `attestation_data` is the `AttestationData` object defined in the previous section, [attestation data](#attestation-data). Set `attestation.data = attestation_data` where `attestation_data` is the `AttestationData` object defined in the previous section, [attestation data](#attestation-data).
##### Aggregation bitfield ##### Aggregation bits
* Let `aggregation_bitfield` be a byte array filled with zeros of length `(len(committee) + 7) // 8`. * Let `attestation.aggregation_bits` be a `Bitlist[MAX_INDICES_PER_ATTESTATION]` where the bits at the index in the aggregated validator's `committee` is set to `0b1`.
* Let `index_into_committee` be the index into the validator's `committee` at which `validator_index` is located.
* Set `aggregation_bitfield[index_into_committee // 8] |= 2 ** (index_into_committee % 8)`.
* Set `attestation.aggregation_bitfield = aggregation_bitfield`.
*Note*: Calling `get_attesting_indices(state, attestation.data, attestation.aggregation_bitfield)` should return a list of length equal to 1, containing `validator_index`. *Note*: Calling `get_attesting_indices(state, attestation.data, attestation.aggregation_bits)` should return a list of length equal to 1, containing `validator_index`.
##### Custody bitfield ##### Custody bits
* Let `custody_bitfield` be a byte array filled with zeros of length `(len(committee) + 7) // 8`. * Let `attestation.custody_bits` be a `Bitlist[MAX_INDICES_PER_ATTESTATION]` filled with zeros of length `len(committee)`.
* Set `attestation.custody_bitfield = custody_bitfield`.
*Note*: This is a stub for Phase 0. *Note*: This is a stub for Phase 0.

View File

@ -415,16 +415,16 @@ components:
type: object type: object
description: "The [`Attestation`](https://github.com/ethereum/eth2.0-specs/blob/master/specs/core/0_beacon-chain.md#attestation) object from the Eth2.0 spec." description: "The [`Attestation`](https://github.com/ethereum/eth2.0-specs/blob/master/specs/core/0_beacon-chain.md#attestation) object from the Eth2.0 spec."
properties: properties:
aggregation_bitfield: aggregation_bits:
type: string type: string
format: byte format: byte
pattern: "^0x[a-fA-F0-9]+$" pattern: "^0x[a-fA-F0-9]+$"
description: "Attester aggregation bitfield." description: "Attester aggregation bits."
custody_bitfield: custody_bits:
type: string type: string
format: byte format: byte
pattern: "^0x[a-fA-F0-9]+$" pattern: "^0x[a-fA-F0-9]+$"
description: "Custody bitfield." description: "Custody bits."
signature: signature:
type: string type: string
format: byte format: byte

View File

@ -1,13 +1,13 @@
from typing import Any from typing import Any
from eth2spec.utils.ssz.ssz_impl import hash_tree_root from eth2spec.utils.ssz.ssz_impl import hash_tree_root
from eth2spec.utils.ssz.ssz_typing import ( from eth2spec.utils.ssz.ssz_typing import (
SSZType, SSZValue, uint, Container, Bytes, List, Bool, SSZType, SSZValue, uint, Container, Bytes, List, boolean,
Vector, BytesN Vector, BytesN
) )
def decode(data: Any, typ: SSZType) -> SSZValue: def decode(data: Any, typ: SSZType) -> SSZValue:
if issubclass(typ, (uint, Bool)): if issubclass(typ, (uint, boolean)):
return typ(data) return typ(data)
elif issubclass(typ, (List, Vector)): elif issubclass(typ, (List, Vector)):
return typ(decode(element, typ.elem_type) for element in data) return typ(decode(element, typ.elem_type) for element in data)

View File

@ -1,6 +1,6 @@
from eth2spec.utils.ssz.ssz_impl import hash_tree_root from eth2spec.utils.ssz.ssz_impl import hash_tree_root
from eth2spec.utils.ssz.ssz_typing import ( from eth2spec.utils.ssz.ssz_typing import (
SSZValue, uint, Container, Bool SSZValue, uint, Container, boolean
) )
@ -10,7 +10,7 @@ def encode(value: SSZValue, include_hash_tree_roots=False):
if value.type().byte_len > 8: if value.type().byte_len > 8:
return str(int(value)) return str(int(value))
return int(value) return int(value)
elif isinstance(value, Bool): elif isinstance(value, boolean):
return value == 1 return value == 1
elif isinstance(value, list): # normal python lists, ssz-List, Vector elif isinstance(value, list): # normal python lists, ssz-List, Vector
return [encode(element, include_hash_tree_roots) for element in value] return [encode(element, include_hash_tree_roots) for element in value]

View File

@ -2,8 +2,8 @@ from random import Random
from enum import Enum from enum import Enum
from eth2spec.utils.ssz.ssz_typing import ( from eth2spec.utils.ssz.ssz_typing import (
SSZType, SSZValue, BasicValue, BasicType, uint, Container, Bytes, List, Bool, SSZType, SSZValue, BasicValue, BasicType, uint, Container, Bytes, List, boolean,
Vector, BytesN Vector, BytesN, Bitlist, Bitvector
) )
# in bytes # in bytes
@ -83,12 +83,12 @@ def get_random_ssz_object(rng: Random,
return get_max_basic_value(typ) return get_max_basic_value(typ)
else: else:
return get_random_basic_value(rng, typ) return get_random_basic_value(rng, typ)
elif issubclass(typ, Vector): elif issubclass(typ, Vector) or issubclass(typ, Bitvector):
return typ( return typ(
get_random_ssz_object(rng, typ.elem_type, max_bytes_length, max_list_length, mode, chaos) get_random_ssz_object(rng, typ.elem_type, max_bytes_length, max_list_length, mode, chaos)
for _ in range(typ.length) for _ in range(typ.length)
) )
elif issubclass(typ, List): elif issubclass(typ, List) or issubclass(typ, Bitlist):
length = rng.randint(0, min(typ.length, max_list_length)) length = rng.randint(0, min(typ.length, max_list_length))
if mode == RandomizationMode.mode_one_count: if mode == RandomizationMode.mode_one_count:
length = 1 length = 1
@ -118,7 +118,7 @@ def get_random_bytes_list(rng: Random, length: int) -> bytes:
def get_random_basic_value(rng: Random, typ: BasicType) -> BasicValue: def get_random_basic_value(rng: Random, typ: BasicType) -> BasicValue:
if issubclass(typ, Bool): if issubclass(typ, boolean):
return typ(rng.choice((True, False))) return typ(rng.choice((True, False)))
elif issubclass(typ, uint): elif issubclass(typ, uint):
assert typ.byte_len in UINT_BYTE_SIZES assert typ.byte_len in UINT_BYTE_SIZES
@ -128,7 +128,7 @@ def get_random_basic_value(rng: Random, typ: BasicType) -> BasicValue:
def get_min_basic_value(typ: BasicType) -> BasicValue: def get_min_basic_value(typ: BasicType) -> BasicValue:
if issubclass(typ, Bool): if issubclass(typ, boolean):
return typ(False) return typ(False)
elif issubclass(typ, uint): elif issubclass(typ, uint):
assert typ.byte_len in UINT_BYTE_SIZES assert typ.byte_len in UINT_BYTE_SIZES
@ -138,7 +138,7 @@ def get_min_basic_value(typ: BasicType) -> BasicValue:
def get_max_basic_value(typ: BasicType) -> BasicValue: def get_max_basic_value(typ: BasicType) -> BasicValue:
if issubclass(typ, Bool): if issubclass(typ, boolean):
return typ(True) return typ(True)
elif issubclass(typ, uint): elif issubclass(typ, uint):
assert typ.byte_len in UINT_BYTE_SIZES assert typ.byte_len in UINT_BYTE_SIZES

View File

@ -18,8 +18,15 @@ def translate_typ(typ) -> ssz.BaseSedes:
elif issubclass(typ, spec_ssz.Vector): elif issubclass(typ, spec_ssz.Vector):
return ssz.Vector(translate_typ(typ.elem_type), typ.length) return ssz.Vector(translate_typ(typ.elem_type), typ.length)
elif issubclass(typ, spec_ssz.List): elif issubclass(typ, spec_ssz.List):
# TODO: Make py-ssz List support the new fixed length list
return ssz.List(translate_typ(typ.elem_type)) return ssz.List(translate_typ(typ.elem_type))
elif issubclass(typ, spec_ssz.Bool): elif issubclass(typ, spec_ssz.Bitlist):
# TODO: Once Bitlist implemented in py-ssz, use appropriate type
return ssz.List(translate_typ(typ.elem_type))
elif issubclass(typ, spec_ssz.Bitvector):
# TODO: Once Bitvector implemented in py-ssz, use appropriate type
return ssz.Vector(translate_typ(typ.elem_type), typ.length)
elif issubclass(typ, spec_ssz.boolean):
return ssz.boolean return ssz.boolean
elif issubclass(typ, spec_ssz.uint): elif issubclass(typ, spec_ssz.uint):
if typ.byte_len == 1: if typ.byte_len == 1:
@ -64,10 +71,14 @@ def translate_value(value, typ):
raise TypeError("invalid uint size") raise TypeError("invalid uint size")
elif issubclass(typ, spec_ssz.List): elif issubclass(typ, spec_ssz.List):
return [translate_value(elem, typ.elem_type) for elem in value] return [translate_value(elem, typ.elem_type) for elem in value]
elif issubclass(typ, spec_ssz.Bool): elif issubclass(typ, spec_ssz.boolean):
return value return value
elif issubclass(typ, spec_ssz.Vector): elif issubclass(typ, spec_ssz.Vector):
return typ(*(translate_value(elem, typ.elem_type) for elem in value)) return typ(*(translate_value(elem, typ.elem_type) for elem in value))
elif issubclass(typ, spec_ssz.Bitlist):
return typ(value)
elif issubclass(typ, spec_ssz.Bitvector):
return typ(value)
elif issubclass(typ, spec_ssz.BytesN): elif issubclass(typ, spec_ssz.BytesN):
return typ(value) return typ(value)
elif issubclass(typ, spec_ssz.Bytes): elif issubclass(typ, spec_ssz.Bytes):

View File

@ -9,7 +9,9 @@ def test_decoder():
rng = Random(123) rng = Random(123)
# check these types only, Block covers a lot of operation types already. # check these types only, Block covers a lot of operation types already.
for typ in [spec.BeaconBlock, spec.BeaconState, spec.IndexedAttestation, spec.AttestationDataAndCustodyBit]: # TODO: Once has Bitlists and Bitvectors, add back
# spec.BeaconState and spec.BeaconBlock
for typ in [spec.IndexedAttestation, spec.AttestationDataAndCustodyBit]:
# create a random pyspec value # create a random pyspec value
original = random_value.get_random_ssz_object(rng, typ, 100, 10, original = random_value.get_random_ssz_object(rng, typ, 100, 10,
mode=random_value.RandomizationMode.mode_random, mode=random_value.RandomizationMode.mode_random,

View File

@ -0,0 +1,118 @@
from eth2spec.test.context import with_all_phases, with_state, bls_switch
from eth2spec.test.helpers.attestations import get_valid_attestation
from eth2spec.test.helpers.block import build_empty_block_for_next_slot
from eth2spec.test.helpers.state import state_transition_and_sign_block
def add_block_to_store(spec, store, block):
pre_state = store.block_states[block.parent_root]
block_time = pre_state.genesis_time + block.slot * spec.SECONDS_PER_SLOT
if store.time < block_time:
spec.on_tick(store, block_time)
spec.on_block(store, block)
def add_attestation_to_store(spec, store, attestation):
parent_block = store.blocks[attestation.data.beacon_block_root]
pre_state = store.block_states[spec.signing_root(parent_block)]
block_time = pre_state.genesis_time + parent_block.slot * spec.SECONDS_PER_SLOT
next_epoch_time = block_time + spec.SLOTS_PER_EPOCH * spec.SECONDS_PER_SLOT
if store.time < next_epoch_time:
spec.on_tick(store, next_epoch_time)
spec.on_attestation(store, attestation)
@with_all_phases
@with_state
@bls_switch
def test_genesis(spec, state):
# Initialization
store = spec.get_genesis_store(state)
genesis_block = spec.BeaconBlock(state_root=state.hash_tree_root())
assert spec.get_head(store) == spec.signing_root(genesis_block)
@with_all_phases
@with_state
@bls_switch
def test_chain_no_attestations(spec, state):
# Initialization
store = spec.get_genesis_store(state)
genesis_block = spec.BeaconBlock(state_root=state.hash_tree_root())
assert spec.get_head(store) == spec.signing_root(genesis_block)
# On receiving a block of `GENESIS_SLOT + 1` slot
block_1 = build_empty_block_for_next_slot(spec, state)
state_transition_and_sign_block(spec, state, block_1)
add_block_to_store(spec, store, block_1)
# On receiving a block of next epoch
block_2 = build_empty_block_for_next_slot(spec, state)
state_transition_and_sign_block(spec, state, block_2)
add_block_to_store(spec, store, block_2)
assert spec.get_head(store) == spec.signing_root(block_2)
@with_all_phases
@with_state
@bls_switch
def test_split_tie_breaker_no_attestations(spec, state):
genesis_state = state.copy()
# Initialization
store = spec.get_genesis_store(state)
genesis_block = spec.BeaconBlock(state_root=state.hash_tree_root())
assert spec.get_head(store) == spec.signing_root(genesis_block)
# block at slot 1
block_1_state = genesis_state.copy()
block_1 = build_empty_block_for_next_slot(spec, block_1_state)
state_transition_and_sign_block(spec, block_1_state, block_1)
add_block_to_store(spec, store, block_1)
# additional block at slot 1
block_2_state = genesis_state.copy()
block_2 = build_empty_block_for_next_slot(spec, block_2_state)
block_2.body.graffiti = b'\x42' * 32
state_transition_and_sign_block(spec, block_2_state, block_2)
add_block_to_store(spec, store, block_2)
highest_root = max(spec.signing_root(block_1), spec.signing_root(block_2))
assert spec.get_head(store) == highest_root
@with_all_phases
@with_state
@bls_switch
def test_shorter_chain_but_heavier_weight(spec, state):
genesis_state = state.copy()
# Initialization
store = spec.get_genesis_store(state)
genesis_block = spec.BeaconBlock(state_root=state.hash_tree_root())
assert spec.get_head(store) == spec.signing_root(genesis_block)
# build longer tree
long_state = genesis_state.copy()
for i in range(3):
long_block = build_empty_block_for_next_slot(spec, long_state)
state_transition_and_sign_block(spec, long_state, long_block)
add_block_to_store(spec, store, long_block)
# build short tree
short_state = genesis_state.copy()
short_block = build_empty_block_for_next_slot(spec, short_state)
short_block.body.graffiti = b'\x42' * 32
state_transition_and_sign_block(spec, short_state, short_block)
add_block_to_store(spec, store, short_block)
short_attestation = get_valid_attestation(spec, short_state, short_block.slot, signed=True)
add_attestation_to_store(spec, store, short_attestation)
assert spec.get_head(store) == spec.signing_root(short_block)

View File

@ -0,0 +1,122 @@
from eth2spec.test.context import with_all_phases, with_state, bls_switch
from eth2spec.test.helpers.block import build_empty_block_for_next_slot
from eth2spec.test.helpers.attestations import get_valid_attestation
from eth2spec.test.helpers.state import next_slot
def run_on_attestation(spec, state, store, attestation, valid=True):
if not valid:
try:
spec.on_attestation(store, attestation)
except AssertionError:
return
else:
assert False
indexed_attestation = spec.convert_to_indexed(state, attestation)
spec.on_attestation(store, attestation)
assert (
store.latest_messages[indexed_attestation.custody_bit_0_indices[0]] ==
spec.LatestMessage(
epoch=attestation.data.target.epoch,
root=attestation.data.beacon_block_root,
)
)
@with_all_phases
@with_state
@bls_switch
def test_on_attestation(spec, state):
store = spec.get_genesis_store(state)
time = 100
spec.on_tick(store, time)
block = build_empty_block_for_next_slot(spec, state, signed=True)
# store block in store
spec.on_block(store, block)
next_slot(spec, state)
attestation = get_valid_attestation(spec, state, slot=block.slot)
run_on_attestation(spec, state, store, attestation)
@with_all_phases
@with_state
@bls_switch
def test_on_attestation_target_not_in_store(spec, state):
store = spec.get_genesis_store(state)
time = 100
spec.on_tick(store, time)
# move to next epoch to make block new target
state.slot += spec.SLOTS_PER_EPOCH
block = build_empty_block_for_next_slot(spec, state, signed=True)
# do not add block to store
next_slot(spec, state)
attestation = get_valid_attestation(spec, state, slot=block.slot)
run_on_attestation(spec, state, store, attestation, False)
@with_all_phases
@with_state
@bls_switch
def test_on_attestation_future_epoch(spec, state):
store = spec.get_genesis_store(state)
time = 3 * spec.SECONDS_PER_SLOT
spec.on_tick(store, time)
block = build_empty_block_for_next_slot(spec, state, signed=True)
# store block in store
spec.on_block(store, block)
next_slot(spec, state)
# move state forward but not store
attestation_slot = block.slot + spec.SLOTS_PER_EPOCH
state.slot = attestation_slot
attestation = get_valid_attestation(spec, state, slot=state.slot)
run_on_attestation(spec, state, store, attestation, False)
@with_all_phases
@with_state
@bls_switch
def test_on_attestation_same_slot(spec, state):
store = spec.get_genesis_store(state)
time = 1 * spec.SECONDS_PER_SLOT
spec.on_tick(store, time)
block = build_empty_block_for_next_slot(spec, state, signed=True)
spec.on_block(store, block)
next_slot(spec, state)
attestation = get_valid_attestation(spec, state, slot=block.slot)
run_on_attestation(spec, state, store, attestation, False)
@with_all_phases
@with_state
@bls_switch
def test_on_attestation_invalid_attestation(spec, state):
store = spec.get_genesis_store(state)
time = 3 * spec.SECONDS_PER_SLOT
spec.on_tick(store, time)
block = build_empty_block_for_next_slot(spec, state, signed=True)
spec.on_block(store, block)
next_slot(spec, state)
attestation = get_valid_attestation(spec, state, slot=block.slot)
# make attestation invalid
attestation.custody_bits[0:8] = [0, 0, 0, 0, 1, 1, 1, 1]
run_on_attestation(spec, state, store, attestation, False)

View File

@ -0,0 +1,89 @@
from eth2spec.utils.ssz.ssz_impl import signing_root
from eth2spec.test.context import with_all_phases, with_state, bls_switch
from eth2spec.test.helpers.block import build_empty_block_for_next_slot
def run_on_block(spec, state, store, block, valid=True):
if not valid:
try:
spec.on_block(store, block)
except AssertionError:
return
else:
assert False
spec.on_block(store, block)
assert store.blocks[signing_root(block)] == block
@with_all_phases
@with_state
@bls_switch
def test_basic(spec, state):
# Initialization
store = spec.get_genesis_store(state)
time = 100
spec.on_tick(store, time)
assert store.time == time
# On receiving a block of `GENESIS_SLOT + 1` slot
block = build_empty_block_for_next_slot(spec, state)
run_on_block(spec, state, store, block)
# On receiving a block of next epoch
store.time = time + spec.SECONDS_PER_SLOT * spec.SLOTS_PER_EPOCH
block = build_empty_block_for_next_slot(spec, state)
block.slot += spec.SLOTS_PER_EPOCH
run_on_block(spec, state, store, block)
# TODO: add tests for justified_root and finalized_root
@with_all_phases
@with_state
@bls_switch
def test_on_block_future_block(spec, state):
# Initialization
store = spec.get_genesis_store(state)
# do not tick time
# Fail receiving block of `GENESIS_SLOT + 1` slot
block = build_empty_block_for_next_slot(spec, state)
run_on_block(spec, state, store, block, False)
@with_all_phases
@with_state
@bls_switch
def test_on_block_bad_parent_root(spec, state):
# Initialization
store = spec.get_genesis_store(state)
time = 100
spec.on_tick(store, time)
# Fail receiving block of `GENESIS_SLOT + 1` slot
block = build_empty_block_for_next_slot(spec, state)
block.parent_root = b'\x45' * 32
run_on_block(spec, state, store, block, False)
@with_all_phases
@with_state
@bls_switch
def test_on_block_before_finalized(spec, state):
# Initialization
store = spec.get_genesis_store(state)
time = 100
spec.on_tick(store, time)
store.finalized_checkpoint = spec.Checkpoint(
epoch=store.finalized_checkpoint.epoch + 2,
root=store.finalized_checkpoint.root
)
# Fail receiving block of `GENESIS_SLOT + 1` slot
block = build_empty_block_for_next_slot(spec, state)
run_on_block(spec, state, store, block, False)

View File

@ -1,10 +1,10 @@
from typing import List from typing import List
from eth2spec.test.helpers.bitfields import set_bitfield_bit
from eth2spec.test.helpers.block import build_empty_block_for_next_slot, sign_block from eth2spec.test.helpers.block import build_empty_block_for_next_slot, sign_block
from eth2spec.test.helpers.keys import privkeys from eth2spec.test.helpers.keys import privkeys
from eth2spec.utils.bls import bls_sign, bls_aggregate_signatures from eth2spec.utils.bls import bls_sign, bls_aggregate_signatures
from eth2spec.utils.ssz.ssz_impl import hash_tree_root from eth2spec.utils.ssz.ssz_impl import hash_tree_root
from eth2spec.utils.ssz.ssz_typing import Bitlist
def build_attestation_data(spec, state, slot, shard): def build_attestation_data(spec, state, slot, shard):
@ -24,11 +24,11 @@ def build_attestation_data(spec, state, slot, shard):
epoch_boundary_root = spec.get_block_root(state, spec.get_current_epoch(state)) epoch_boundary_root = spec.get_block_root(state, spec.get_current_epoch(state))
if slot < current_epoch_start_slot: if slot < current_epoch_start_slot:
justified_epoch = state.previous_justified_epoch source_epoch = state.previous_justified_checkpoint.epoch
justified_block_root = state.previous_justified_root source_root = state.previous_justified_checkpoint.root
else: else:
justified_epoch = state.current_justified_epoch source_epoch = state.current_justified_checkpoint.epoch
justified_block_root = state.current_justified_root source_root = state.current_justified_checkpoint.root
if spec.slot_to_epoch(slot) == spec.get_current_epoch(state): if spec.slot_to_epoch(slot) == spec.get_current_epoch(state):
parent_crosslink = state.current_crosslinks[shard] parent_crosslink = state.current_crosslinks[shard]
@ -37,10 +37,8 @@ def build_attestation_data(spec, state, slot, shard):
return spec.AttestationData( return spec.AttestationData(
beacon_block_root=block_root, beacon_block_root=block_root,
source_epoch=justified_epoch, source=spec.Checkpoint(epoch=source_epoch, root=source_root),
source_root=justified_block_root, target=spec.Checkpoint(epoch=spec.slot_to_epoch(slot), root=epoch_boundary_root),
target_epoch=spec.slot_to_epoch(slot),
target_root=epoch_boundary_root,
crosslink=spec.Crosslink( crosslink=spec.Crosslink(
shard=shard, shard=shard,
start_epoch=parent_crosslink.end_epoch, start_epoch=parent_crosslink.end_epoch,
@ -64,18 +62,17 @@ def get_valid_attestation(spec, state, slot=None, signed=False):
crosslink_committee = spec.get_crosslink_committee( crosslink_committee = spec.get_crosslink_committee(
state, state,
attestation_data.target_epoch, attestation_data.target.epoch,
attestation_data.crosslink.shard attestation_data.crosslink.shard,
) )
committee_size = len(crosslink_committee) committee_size = len(crosslink_committee)
bitfield_length = (committee_size + 7) // 8 aggregation_bits = Bitlist[spec.MAX_INDICES_PER_ATTESTATION](*([0] * committee_size))
aggregation_bitfield = b'\x00' * bitfield_length custody_bits = Bitlist[spec.MAX_INDICES_PER_ATTESTATION](*([0] * committee_size))
custody_bitfield = b'\x00' * bitfield_length
attestation = spec.Attestation( attestation = spec.Attestation(
aggregation_bitfield=aggregation_bitfield, aggregation_bits=aggregation_bits,
data=attestation_data, data=attestation_data,
custody_bitfield=custody_bitfield, custody_bits=custody_bits,
) )
fill_aggregate_attestation(spec, state, attestation) fill_aggregate_attestation(spec, state, attestation)
if signed: if signed:
@ -108,7 +105,7 @@ def sign_attestation(spec, state, attestation):
participants = spec.get_attesting_indices( participants = spec.get_attesting_indices(
state, state,
attestation.data, attestation.data,
attestation.aggregation_bitfield, attestation.aggregation_bits,
) )
attestation.signature = sign_aggregate_attestation(spec, state, attestation.data, participants) attestation.signature = sign_aggregate_attestation(spec, state, attestation.data, participants)
@ -126,7 +123,7 @@ def get_attestation_signature(spec, state, attestation_data, privkey, custody_bi
domain=spec.get_domain( domain=spec.get_domain(
state=state, state=state,
domain_type=spec.DOMAIN_ATTESTATION, domain_type=spec.DOMAIN_ATTESTATION,
message_epoch=attestation_data.target_epoch, message_epoch=attestation_data.target.epoch,
) )
) )
@ -134,11 +131,11 @@ def get_attestation_signature(spec, state, attestation_data, privkey, custody_bi
def fill_aggregate_attestation(spec, state, attestation): def fill_aggregate_attestation(spec, state, attestation):
crosslink_committee = spec.get_crosslink_committee( crosslink_committee = spec.get_crosslink_committee(
state, state,
attestation.data.target_epoch, attestation.data.target.epoch,
attestation.data.crosslink.shard, attestation.data.crosslink.shard,
) )
for i in range(len(crosslink_committee)): for i in range(len(crosslink_committee)):
attestation.aggregation_bitfield = set_bitfield_bit(attestation.aggregation_bitfield, i) attestation.aggregation_bits[i] = True
def add_attestation_to_state(spec, state, attestation, slot): def add_attestation_to_state(spec, state, attestation, slot):

View File

@ -7,7 +7,7 @@ def get_valid_attester_slashing(spec, state, signed_1=False, signed_2=False):
attestation_1 = get_valid_attestation(spec, state, signed=signed_1) attestation_1 = get_valid_attestation(spec, state, signed=signed_1)
attestation_2 = deepcopy(attestation_1) attestation_2 = deepcopy(attestation_1)
attestation_2.data.target_root = b'\x01' * 32 attestation_2.data.target.root = b'\x01' * 32
if signed_2: if signed_2:
sign_attestation(spec, state, attestation_2) sign_attestation(spec, state, attestation_2)

View File

@ -1,11 +0,0 @@
def set_bitfield_bit(bitfield, i):
"""
Set the bit in ``bitfield`` at position ``i`` to ``1``.
"""
byte_index = i // 8
bit_index = i % 8
return (
bitfield[:byte_index] +
bytes([bitfield[byte_index] | (1 << bit_index)]) +
bitfield[byte_index + 1:]
)

View File

@ -1,5 +1,6 @@
from eth2spec.test.helpers.keys import privkeys from eth2spec.test.helpers.keys import privkeys
from eth2spec.utils.bls import bls_sign from eth2spec.utils.bls import bls_sign, bls_aggregate_signatures
from eth2spec.utils.hash_function import hash
def get_valid_early_derived_secret_reveal(spec, state, epoch=None): def get_valid_early_derived_secret_reveal(spec, state, epoch=None):
@ -10,6 +11,7 @@ def get_valid_early_derived_secret_reveal(spec, state, epoch=None):
if epoch is None: if epoch is None:
epoch = current_epoch + spec.CUSTODY_PERIOD_TO_RANDAO_PADDING epoch = current_epoch + spec.CUSTODY_PERIOD_TO_RANDAO_PADDING
# Generate the secret that is being revealed
reveal = bls_sign( reveal = bls_sign(
message_hash=spec.hash_tree_root(spec.Epoch(epoch)), message_hash=spec.hash_tree_root(spec.Epoch(epoch)),
privkey=privkeys[revealed_index], privkey=privkeys[revealed_index],
@ -19,20 +21,24 @@ def get_valid_early_derived_secret_reveal(spec, state, epoch=None):
message_epoch=epoch, message_epoch=epoch,
), ),
) )
mask = bls_sign( # Generate the mask (any random 32 bytes that don't reveal the masker's secret will do)
message_hash=spec.hash_tree_root(spec.Epoch(epoch)), mask = hash(reveal)
# Generate masker's signature on the mask
masker_signature = bls_sign(
message_hash=mask,
privkey=privkeys[masker_index], privkey=privkeys[masker_index],
domain=spec.get_domain( domain=spec.get_domain(
state=state, state=state,
domain_type=spec.DOMAIN_RANDAO, domain_type=spec.DOMAIN_RANDAO,
message_epoch=epoch, message_epoch=epoch,
), ),
)[:32] # TODO(Carl): mask is 32 bytes, and signature is 96? Correct to slice the first 32 out? )
masked_reveal = bls_aggregate_signatures([reveal, masker_signature])
return spec.EarlyDerivedSecretReveal( return spec.EarlyDerivedSecretReveal(
revealed_index=revealed_index, revealed_index=revealed_index,
epoch=epoch, epoch=epoch,
reveal=reveal, reveal=masked_reveal,
masker_index=masker_index, masker_index=masker_index,
mask=mask, mask=mask,
) )

View File

@ -28,7 +28,9 @@ def create_genesis_state(spec, num_validators):
deposit_root=deposit_root, deposit_root=deposit_root,
deposit_count=num_validators, deposit_count=num_validators,
block_hash=spec.ZERO_HASH, block_hash=spec.ZERO_HASH,
)) ),
latest_block_header=spec.BeaconBlockHeader(body_root=spec.hash_tree_root(spec.BeaconBlockBody())),
)
# We "hack" in the initial validators, # We "hack" in the initial validators,
# as it is much faster than creating and processing genesis deposits for every single test case. # as it is much faster than creating and processing genesis deposits for every single test case.

View File

@ -1,5 +1,3 @@
from copy import deepcopy
from eth2spec.test.context import spec_state_test, expect_assertion_error, always_bls, with_all_phases, with_phases from eth2spec.test.context import spec_state_test, expect_assertion_error, always_bls, with_all_phases, with_phases
from eth2spec.test.helpers.attestations import ( from eth2spec.test.helpers.attestations import (
get_valid_attestation, get_valid_attestation,
@ -10,6 +8,7 @@ from eth2spec.test.helpers.state import (
next_slot, next_slot,
) )
from eth2spec.test.helpers.block import apply_empty_block from eth2spec.test.helpers.block import apply_empty_block
from eth2spec.utils.ssz.ssz_typing import Bitlist
def run_attestation_processing(spec, state, attestation, valid=True): def run_attestation_processing(spec, state, attestation, valid=True):
@ -38,7 +37,7 @@ def run_attestation_processing(spec, state, attestation, valid=True):
spec.process_attestation(state, attestation) spec.process_attestation(state, attestation)
# Make sure the attestation has been processed # Make sure the attestation has been processed
if attestation.data.target_epoch == spec.get_current_epoch(state): if attestation.data.target.epoch == spec.get_current_epoch(state):
assert len(state.current_epoch_attestations) == current_epoch_count + 1 assert len(state.current_epoch_attestations) == current_epoch_count + 1
else: else:
assert len(state.previous_epoch_attestations) == previous_epoch_count + 1 assert len(state.previous_epoch_attestations) == previous_epoch_count + 1
@ -142,16 +141,16 @@ def test_after_epoch_slots(spec, state):
@spec_state_test @spec_state_test
def test_old_source_epoch(spec, state): def test_old_source_epoch(spec, state):
state.slot = spec.SLOTS_PER_EPOCH * 5 state.slot = spec.SLOTS_PER_EPOCH * 5
state.finalized_epoch = 2 state.finalized_checkpoint.epoch = 2
state.previous_justified_epoch = 3 state.previous_justified_checkpoint.epoch = 3
state.current_justified_epoch = 4 state.current_justified_checkpoint.epoch = 4
attestation = get_valid_attestation(spec, state, slot=(spec.SLOTS_PER_EPOCH * 3) + 1) attestation = get_valid_attestation(spec, state, slot=(spec.SLOTS_PER_EPOCH * 3) + 1)
# test logic sanity check: make sure the attestation is pointing to oldest known source epoch # test logic sanity check: make sure the attestation is pointing to oldest known source epoch
assert attestation.data.source_epoch == state.previous_justified_epoch assert attestation.data.source.epoch == state.previous_justified_checkpoint.epoch
# Now go beyond that, it will be invalid # Now go beyond that, it will be invalid
attestation.data.source_epoch -= 1 attestation.data.source.epoch -= 1
sign_attestation(spec, state, attestation) sign_attestation(spec, state, attestation)
@ -218,7 +217,7 @@ def test_new_source_epoch(spec, state):
attestation = get_valid_attestation(spec, state) attestation = get_valid_attestation(spec, state)
state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY
attestation.data.source_epoch += 1 attestation.data.source.epoch += 1
sign_attestation(spec, state, attestation) sign_attestation(spec, state, attestation)
@ -231,7 +230,7 @@ def test_source_root_is_target_root(spec, state):
attestation = get_valid_attestation(spec, state) attestation = get_valid_attestation(spec, state)
state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY
attestation.data.source_root = attestation.data.target_root attestation.data.source.root = attestation.data.target.root
sign_attestation(spec, state, attestation) sign_attestation(spec, state, attestation)
@ -242,23 +241,20 @@ def test_source_root_is_target_root(spec, state):
@spec_state_test @spec_state_test
def test_invalid_current_source_root(spec, state): def test_invalid_current_source_root(spec, state):
state.slot = spec.SLOTS_PER_EPOCH * 5 state.slot = spec.SLOTS_PER_EPOCH * 5
state.finalized_epoch = 2 state.finalized_checkpoint.epoch = 2
state.previous_justified_epoch = 3 state.previous_justified_checkpoint = spec.Checkpoint(epoch=3, root=b'\x01' * 32)
state.previous_justified_root = b'\x01' * 32 state.current_justified_checkpoint = spec.Checkpoint(epoch=4, root=b'\x32' * 32)
state.current_justified_epoch = 4
state.current_justified_root = b'\xff' * 32
attestation = get_valid_attestation(spec, state, slot=(spec.SLOTS_PER_EPOCH * 3) + 1) attestation = get_valid_attestation(spec, state, slot=(spec.SLOTS_PER_EPOCH * 3) + 1)
state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY
# Test logic sanity checks: # Test logic sanity checks:
assert state.current_justified_root != state.previous_justified_root assert state.current_justified_checkpoint.root != state.previous_justified_checkpoint.root
assert attestation.data.source_root == state.previous_justified_root assert attestation.data.source.root == state.previous_justified_checkpoint.root
# Make attestation source root invalid: should be previous justified, not current one # Make attestation source root invalid: should be previous justified, not current one
attestation.data.source_root = state.current_justified_root attestation.data.source.root = state.current_justified_checkpoint.root
sign_attestation(spec, state, attestation) sign_attestation(spec, state, attestation)
@ -271,7 +267,7 @@ def test_bad_source_root(spec, state):
attestation = get_valid_attestation(spec, state) attestation = get_valid_attestation(spec, state)
state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY
attestation.data.source_root = b'\x42' * 32 attestation.data.source.root = b'\x42' * 32
sign_attestation(spec, state, attestation) sign_attestation(spec, state, attestation)
@ -341,11 +337,14 @@ def test_bad_crosslink_end_epoch(spec, state):
@with_all_phases @with_all_phases
@spec_state_test @spec_state_test
def test_inconsistent_bitfields(spec, state): def test_inconsistent_bits(spec, state):
attestation = get_valid_attestation(spec, state) attestation = get_valid_attestation(spec, state)
state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY
attestation.custody_bitfield = deepcopy(attestation.aggregation_bitfield) + b'\x00' custody_bits = attestation.aggregation_bits[:]
custody_bits.append(False)
attestation.custody_bits = custody_bits
sign_attestation(spec, state, attestation) sign_attestation(spec, state, attestation)
@ -354,11 +353,11 @@ def test_inconsistent_bitfields(spec, state):
@with_phases(['phase0']) @with_phases(['phase0'])
@spec_state_test @spec_state_test
def test_non_empty_custody_bitfield(spec, state): def test_non_empty_custody_bits(spec, state):
attestation = get_valid_attestation(spec, state) attestation = get_valid_attestation(spec, state)
state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY
attestation.custody_bitfield = deepcopy(attestation.aggregation_bitfield) attestation.custody_bits = attestation.aggregation_bits[:]
sign_attestation(spec, state, attestation) sign_attestation(spec, state, attestation)
@ -367,11 +366,12 @@ def test_non_empty_custody_bitfield(spec, state):
@with_all_phases @with_all_phases
@spec_state_test @spec_state_test
def test_empty_aggregation_bitfield(spec, state): def test_empty_aggregation_bits(spec, state):
attestation = get_valid_attestation(spec, state) attestation = get_valid_attestation(spec, state)
state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY
attestation.aggregation_bitfield = b'\x00' * len(attestation.aggregation_bitfield) attestation.aggregation_bits = Bitlist[spec.MAX_INDICES_PER_ATTESTATION](
*([0b0] * len(attestation.aggregation_bits)))
sign_attestation(spec, state, attestation) sign_attestation(spec, state, attestation)

View File

@ -25,31 +25,56 @@ def run_attester_slashing_processing(spec, state, attester_slashing, valid=True)
yield 'post', None yield 'post', None
return return
slashed_index = attester_slashing.attestation_1.custody_bit_0_indices[0] slashed_indices = (
pre_slashed_balance = get_balance(state, slashed_index) attester_slashing.attestation_1.custody_bit_0_indices
+ attester_slashing.attestation_1.custody_bit_1_indices
)
proposer_index = spec.get_beacon_proposer_index(state) proposer_index = spec.get_beacon_proposer_index(state)
pre_proposer_balance = get_balance(state, proposer_index) pre_proposer_balance = get_balance(state, proposer_index)
pre_slashings = {slashed_index: get_balance(state, slashed_index) for slashed_index in slashed_indices}
pre_withdrawalable_epochs = {
slashed_index: state.validators[slashed_index].withdrawable_epoch
for slashed_index in slashed_indices
}
total_proposer_rewards = sum(
balance // spec.WHISTLEBLOWER_REWARD_QUOTIENT
for balance in pre_slashings.values()
)
# Process slashing # Process slashing
spec.process_attester_slashing(state, attester_slashing) spec.process_attester_slashing(state, attester_slashing)
slashed_validator = state.validators[slashed_index] for slashed_index in slashed_indices:
pre_withdrawalable_epoch = pre_withdrawalable_epochs[slashed_index]
slashed_validator = state.validators[slashed_index]
# Check slashing # Check slashing
assert slashed_validator.slashed assert slashed_validator.slashed
assert slashed_validator.exit_epoch < spec.FAR_FUTURE_EPOCH assert slashed_validator.exit_epoch < spec.FAR_FUTURE_EPOCH
assert slashed_validator.withdrawable_epoch < spec.FAR_FUTURE_EPOCH if pre_withdrawalable_epoch < spec.FAR_FUTURE_EPOCH:
expected_withdrawable_epoch = max(
pre_withdrawalable_epoch,
spec.get_current_epoch(state) + spec.EPOCHS_PER_SLASHINGS_VECTOR
)
assert slashed_validator.withdrawable_epoch == expected_withdrawable_epoch
else:
assert slashed_validator.withdrawable_epoch < spec.FAR_FUTURE_EPOCH
assert get_balance(state, slashed_index) < pre_slashings[slashed_index]
if slashed_index != proposer_index: if proposer_index not in slashed_indices:
# lost whistleblower reward
assert get_balance(state, slashed_index) < pre_slashed_balance
# gained whistleblower reward # gained whistleblower reward
assert get_balance(state, proposer_index) > pre_proposer_balance assert get_balance(state, proposer_index) == pre_proposer_balance + total_proposer_rewards
else: else:
# gained rewards for all slashings, which may include others. And only lost that of themselves. # gained rewards for all slashings, which may include others. And only lost that of themselves.
# Netto at least 0, if more people where slashed, a balance increase. expected_balance = (
assert get_balance(state, slashed_index) >= pre_slashed_balance pre_proposer_balance
+ total_proposer_rewards
- pre_slashings[proposer_index] // spec.MIN_SLASHING_PENALTY_QUOTIENT
)
assert get_balance(state, proposer_index) == expected_balance
yield 'post', state yield 'post', state
@ -68,18 +93,51 @@ def test_success_surround(spec, state):
next_epoch(spec, state) next_epoch(spec, state)
apply_empty_block(spec, state) apply_empty_block(spec, state)
state.current_justified_epoch += 1 state.current_justified_checkpoint.epoch += 1
attester_slashing = get_valid_attester_slashing(spec, state, signed_1=False, signed_2=True) attester_slashing = get_valid_attester_slashing(spec, state, signed_1=False, signed_2=True)
attestation_1 = attester_slashing.attestation_1
attestation_2 = attester_slashing.attestation_2
# set attestion1 to surround attestation 2 # set attestion1 to surround attestation 2
attester_slashing.attestation_1.data.source_epoch = attester_slashing.attestation_2.data.source_epoch - 1 attestation_1.data.source.epoch = attestation_2.data.source.epoch - 1
attester_slashing.attestation_1.data.target_epoch = attester_slashing.attestation_2.data.target_epoch + 1 attestation_1.data.target.epoch = attestation_2.data.target.epoch + 1
sign_indexed_attestation(spec, state, attester_slashing.attestation_1) sign_indexed_attestation(spec, state, attester_slashing.attestation_1)
yield from run_attester_slashing_processing(spec, state, attester_slashing) yield from run_attester_slashing_processing(spec, state, attester_slashing)
@with_all_phases
@always_bls
@spec_state_test
def test_success_already_exited_recent(spec, state):
attester_slashing = get_valid_attester_slashing(spec, state, signed_1=True, signed_2=True)
slashed_indices = (
attester_slashing.attestation_1.custody_bit_0_indices
+ attester_slashing.attestation_1.custody_bit_1_indices
)
for index in slashed_indices:
spec.initiate_validator_exit(state, index)
yield from run_attester_slashing_processing(spec, state, attester_slashing)
@with_all_phases
@always_bls
@spec_state_test
def test_success_already_exited_long_ago(spec, state):
attester_slashing = get_valid_attester_slashing(spec, state, signed_1=True, signed_2=True)
slashed_indices = (
attester_slashing.attestation_1.custody_bit_0_indices
+ attester_slashing.attestation_1.custody_bit_1_indices
)
for index in slashed_indices:
spec.initiate_validator_exit(state, index)
state.validators[index].withdrawable_epoch = spec.get_current_epoch(state) + 2
yield from run_attester_slashing_processing(spec, state, attester_slashing)
@with_all_phases @with_all_phases
@always_bls @always_bls
@spec_state_test @spec_state_test
@ -120,7 +178,7 @@ def test_same_data(spec, state):
def test_no_double_or_surround(spec, state): def test_no_double_or_surround(spec, state):
attester_slashing = get_valid_attester_slashing(spec, state, signed_1=False, signed_2=True) attester_slashing = get_valid_attester_slashing(spec, state, signed_1=False, signed_2=True)
attester_slashing.attestation_1.data.target_epoch += 1 attester_slashing.attestation_1.data.target.epoch += 1
sign_indexed_attestation(spec, state, attester_slashing.attestation_1) sign_indexed_attestation(spec, state, attester_slashing.attestation_1)
yield from run_attester_slashing_processing(spec, state, attester_slashing, False) yield from run_attester_slashing_processing(spec, state, attester_slashing, False)

View File

@ -74,7 +74,7 @@ def test_single_crosslink_update_from_previous_epoch(spec, state):
# ensure rewarded # ensure rewarded
for index in spec.get_crosslink_committee( for index in spec.get_crosslink_committee(
state, state,
attestation.data.target_epoch, attestation.data.target.epoch,
attestation.data.crosslink.shard): attestation.data.crosslink.shard):
assert crosslink_deltas[0][index] > 0 assert crosslink_deltas[0][index] > 0
assert crosslink_deltas[1][index] == 0 assert crosslink_deltas[1][index] == 0
@ -126,7 +126,7 @@ def test_double_late_crosslink(spec, state):
# ensure no reward, only penalties for the failed crosslink # ensure no reward, only penalties for the failed crosslink
for index in spec.get_crosslink_committee( for index in spec.get_crosslink_committee(
state, state,
attestation_2.data.target_epoch, attestation_2.data.target.epoch,
attestation_2.data.crosslink.shard): attestation_2.data.crosslink.shard):
assert crosslink_deltas[0][index] == 0 assert crosslink_deltas[0][index] == 0
assert crosslink_deltas[1][index] > 0 assert crosslink_deltas[1][index] > 0

View File

@ -1,7 +1,13 @@
from eth2spec.test.helpers.custody import get_valid_early_derived_secret_reveal from eth2spec.test.helpers.custody import get_valid_early_derived_secret_reveal
from eth2spec.test.helpers.block import apply_empty_block from eth2spec.test.helpers.block import apply_empty_block
from eth2spec.test.helpers.state import next_epoch, get_balance from eth2spec.test.helpers.state import next_epoch, get_balance
from eth2spec.test.context import with_all_phases_except, spec_state_test, expect_assertion_error from eth2spec.test.context import (
with_all_phases_except,
spec_state_test,
expect_assertion_error,
always_bls,
never_bls,
)
def run_early_derived_secret_reveal_processing(spec, state, randao_key_reveal, valid=True): def run_early_derived_secret_reveal_processing(spec, state, randao_key_reveal, valid=True):
@ -36,6 +42,7 @@ def run_early_derived_secret_reveal_processing(spec, state, randao_key_reveal, v
@with_all_phases_except(['phase0']) @with_all_phases_except(['phase0'])
@always_bls
@spec_state_test @spec_state_test
def test_success(spec, state): def test_success(spec, state):
randao_key_reveal = get_valid_early_derived_secret_reveal(spec, state) randao_key_reveal = get_valid_early_derived_secret_reveal(spec, state)
@ -44,6 +51,7 @@ def test_success(spec, state):
@with_all_phases_except(['phase0']) @with_all_phases_except(['phase0'])
@never_bls
@spec_state_test @spec_state_test
def test_reveal_from_current_epoch(spec, state): def test_reveal_from_current_epoch(spec, state):
randao_key_reveal = get_valid_early_derived_secret_reveal(spec, state, spec.get_current_epoch(state)) randao_key_reveal = get_valid_early_derived_secret_reveal(spec, state, spec.get_current_epoch(state))
@ -52,6 +60,7 @@ def test_reveal_from_current_epoch(spec, state):
@with_all_phases_except(['phase0']) @with_all_phases_except(['phase0'])
@never_bls
@spec_state_test @spec_state_test
def test_reveal_from_past_epoch(spec, state): def test_reveal_from_past_epoch(spec, state):
next_epoch(spec, state) next_epoch(spec, state)
@ -62,6 +71,7 @@ def test_reveal_from_past_epoch(spec, state):
@with_all_phases_except(['phase0']) @with_all_phases_except(['phase0'])
@always_bls
@spec_state_test @spec_state_test
def test_reveal_with_custody_padding(spec, state): def test_reveal_with_custody_padding(spec, state):
randao_key_reveal = get_valid_early_derived_secret_reveal( randao_key_reveal = get_valid_early_derived_secret_reveal(
@ -73,6 +83,7 @@ def test_reveal_with_custody_padding(spec, state):
@with_all_phases_except(['phase0']) @with_all_phases_except(['phase0'])
@always_bls
@spec_state_test @spec_state_test
def test_reveal_with_custody_padding_minus_one(spec, state): def test_reveal_with_custody_padding_minus_one(spec, state):
randao_key_reveal = get_valid_early_derived_secret_reveal( randao_key_reveal = get_valid_early_derived_secret_reveal(
@ -84,6 +95,7 @@ def test_reveal_with_custody_padding_minus_one(spec, state):
@with_all_phases_except(['phase0']) @with_all_phases_except(['phase0'])
@never_bls
@spec_state_test @spec_state_test
def test_double_reveal(spec, state): def test_double_reveal(spec, state):
randao_key_reveal1 = get_valid_early_derived_secret_reveal( randao_key_reveal1 = get_valid_early_derived_secret_reveal(
@ -108,6 +120,7 @@ def test_double_reveal(spec, state):
@with_all_phases_except(['phase0']) @with_all_phases_except(['phase0'])
@never_bls
@spec_state_test @spec_state_test
def test_revealer_is_slashed(spec, state): def test_revealer_is_slashed(spec, state):
randao_key_reveal = get_valid_early_derived_secret_reveal(spec, state, spec.get_current_epoch(state)) randao_key_reveal = get_valid_early_derived_secret_reveal(spec, state, spec.get_current_epoch(state))
@ -117,6 +130,7 @@ def test_revealer_is_slashed(spec, state):
@with_all_phases_except(['phase0']) @with_all_phases_except(['phase0'])
@never_bls
@spec_state_test @spec_state_test
def test_far_future_epoch(spec, state): def test_far_future_epoch(spec, state):
randao_key_reveal = get_valid_early_derived_secret_reveal( randao_key_reveal = get_valid_early_derived_secret_reveal(

View File

@ -13,25 +13,22 @@ def check_finality(spec,
previous_justified_changed, previous_justified_changed,
finalized_changed): finalized_changed):
if current_justified_changed: if current_justified_changed:
assert state.current_justified_epoch > prev_state.current_justified_epoch assert state.current_justified_checkpoint.epoch > prev_state.current_justified_checkpoint.epoch
assert state.current_justified_root != prev_state.current_justified_root assert state.current_justified_checkpoint.root != prev_state.current_justified_checkpoint.root
else: else:
assert state.current_justified_epoch == prev_state.current_justified_epoch assert state.current_justified_checkpoint == prev_state.current_justified_checkpoint
assert state.current_justified_root == prev_state.current_justified_root
if previous_justified_changed: if previous_justified_changed:
assert state.previous_justified_epoch > prev_state.previous_justified_epoch assert state.previous_justified_checkpoint.epoch > prev_state.previous_justified_checkpoint.epoch
assert state.previous_justified_root != prev_state.previous_justified_root assert state.previous_justified_checkpoint.root != prev_state.previous_justified_checkpoint.root
else: else:
assert state.previous_justified_epoch == prev_state.previous_justified_epoch assert state.previous_justified_checkpoint == prev_state.previous_justified_checkpoint
assert state.previous_justified_root == prev_state.previous_justified_root
if finalized_changed: if finalized_changed:
assert state.finalized_epoch > prev_state.finalized_epoch assert state.finalized_checkpoint.epoch > prev_state.finalized_checkpoint.epoch
assert state.finalized_root != prev_state.finalized_root assert state.finalized_checkpoint.root != prev_state.finalized_checkpoint.root
else: else:
assert state.finalized_epoch == prev_state.finalized_epoch assert state.finalized_checkpoint == prev_state.finalized_checkpoint
assert state.finalized_root == prev_state.finalized_root
def next_epoch_with_attestations(spec, def next_epoch_with_attestations(spec,
@ -107,8 +104,7 @@ def test_finality_rule_4(spec, state):
elif epoch == 1: elif epoch == 1:
# rule 4 of finality # rule 4 of finality
check_finality(spec, state, prev_state, True, True, True) check_finality(spec, state, prev_state, True, True, True)
assert state.finalized_epoch == prev_state.current_justified_epoch assert state.finalized_checkpoint == prev_state.current_justified_checkpoint
assert state.finalized_root == prev_state.current_justified_root
yield 'blocks', blocks yield 'blocks', blocks
yield 'post', state yield 'post', state
@ -138,8 +134,7 @@ def test_finality_rule_1(spec, state):
elif epoch == 2: elif epoch == 2:
# finalized by rule 1 # finalized by rule 1
check_finality(spec, state, prev_state, True, True, True) check_finality(spec, state, prev_state, True, True, True)
assert state.finalized_epoch == prev_state.previous_justified_epoch assert state.finalized_checkpoint == prev_state.previous_justified_checkpoint
assert state.finalized_root == prev_state.previous_justified_root
yield 'blocks', blocks yield 'blocks', blocks
yield 'post', state yield 'post', state
@ -169,8 +164,7 @@ def test_finality_rule_2(spec, state):
prev_state, new_blocks, state = next_epoch_with_attestations(spec, state, False, True) prev_state, new_blocks, state = next_epoch_with_attestations(spec, state, False, True)
# finalized by rule 2 # finalized by rule 2
check_finality(spec, state, prev_state, True, False, True) check_finality(spec, state, prev_state, True, False, True)
assert state.finalized_epoch == prev_state.previous_justified_epoch assert state.finalized_checkpoint == prev_state.previous_justified_checkpoint
assert state.finalized_root == prev_state.previous_justified_root
blocks += new_blocks blocks += new_blocks
@ -221,8 +215,7 @@ def test_finality_rule_3(spec, state):
blocks += new_blocks blocks += new_blocks
# rule 3 # rule 3
check_finality(spec, state, prev_state, True, True, True) check_finality(spec, state, prev_state, True, True, True)
assert state.finalized_epoch == prev_state.current_justified_epoch assert state.finalized_checkpoint == prev_state.current_justified_checkpoint
assert state.finalized_root == prev_state.current_justified_root
yield 'blocks', blocks yield 'blocks', blocks
yield 'post', state yield 'post', state

View File

@ -1,60 +0,0 @@
from eth2spec.utils.ssz.ssz_impl import signing_root, hash_tree_root
from eth2spec.test.context import with_all_phases, with_state, bls_switch
from eth2spec.test.helpers.block import build_empty_block_for_next_slot
from eth2spec.test.helpers.attestations import get_valid_attestation
from eth2spec.test.helpers.state import next_slot
@with_all_phases
@with_state
@bls_switch
def test_basic(spec, state):
state.latest_block_header = spec.BeaconBlockHeader(body_root=hash_tree_root(spec.BeaconBlockBody()))
# Initialization
store = spec.get_genesis_store(state)
blocks = []
time = 100
spec.on_tick(store, time)
assert store.time == time
# On receiving a block of `GENESIS_SLOT + 1` slot
block = build_empty_block_for_next_slot(spec, state)
blocks.append(block)
spec.on_block(store, block)
assert store.blocks[signing_root(block)] == block
# On receiving a block of next epoch
store.time = time + spec.SECONDS_PER_SLOT * spec.SLOTS_PER_EPOCH
block = build_empty_block_for_next_slot(spec, state)
block.slot += spec.SLOTS_PER_EPOCH
blocks.append(block)
spec.on_block(store, block)
assert store.blocks[signing_root(block)] == block
# TODO: add tests for justified_root and finalized_root
@with_all_phases
@with_state
@bls_switch
def test_on_attestation(spec, state):
store = spec.get_genesis_store(state)
time = 100
spec.on_tick(store, time)
next_slot(spec, state)
attestation = get_valid_attestation(spec, state, slot=1)
indexed_attestation = spec.convert_to_indexed(state, attestation)
spec.on_attestation(store, attestation)
assert (
store.latest_targets[indexed_attestation.custody_bit_0_indices[0]] ==
spec.Target(
epoch=attestation.data.target_epoch,
root=attestation.data.target_root,
)
)

View File

@ -1,7 +1,8 @@
from ..merkle_minimal import merkleize_chunks from ..merkle_minimal import merkleize_chunks
from ..hash_function import hash from ..hash_function import hash
from .ssz_typing import ( from .ssz_typing import (
SSZValue, SSZType, BasicValue, BasicType, Series, Elements, Bool, Container, List, Bytes, uint, SSZValue, SSZType, BasicValue, BasicType, Series, Elements, Bits, boolean, Container, List, Bytes,
Bitlist, Bitvector, uint,
) )
# SSZ Serialization # SSZ Serialization
@ -13,7 +14,7 @@ BYTES_PER_LENGTH_OFFSET = 4
def serialize_basic(value: SSZValue): def serialize_basic(value: SSZValue):
if isinstance(value, uint): if isinstance(value, uint):
return value.to_bytes(value.type().byte_len, 'little') return value.to_bytes(value.type().byte_len, 'little')
elif isinstance(value, Bool): elif isinstance(value, boolean):
if value: if value:
return b'\x01' return b'\x01'
else: else:
@ -25,7 +26,7 @@ def serialize_basic(value: SSZValue):
def deserialize_basic(value, typ: BasicType): def deserialize_basic(value, typ: BasicType):
if issubclass(typ, uint): if issubclass(typ, uint):
return typ(int.from_bytes(value, 'little')) return typ(int.from_bytes(value, 'little'))
elif issubclass(typ, Bool): elif issubclass(typ, boolean):
assert value in (b'\x00', b'\x01') assert value in (b'\x00', b'\x01')
return typ(value == b'\x01') return typ(value == b'\x01')
else: else:
@ -39,6 +40,12 @@ def is_empty(obj: SSZValue):
def serialize(obj: SSZValue): def serialize(obj: SSZValue):
if isinstance(obj, BasicValue): if isinstance(obj, BasicValue):
return serialize_basic(obj) return serialize_basic(obj)
elif isinstance(obj, Bitvector):
as_integer = sum([obj[i] << i for i in range(len(obj))])
return as_integer.to_bytes((len(obj) + 7) // 8, "little")
elif isinstance(obj, Bitlist):
as_integer = (1 << len(obj)) + sum([obj[i] << i for i in range(len(obj))])
return as_integer.to_bytes((as_integer.bit_length() + 7) // 8, "little")
elif isinstance(obj, Series): elif isinstance(obj, Series):
return encode_series(obj) return encode_series(obj)
else: else:
@ -85,6 +92,12 @@ def encode_series(values: Series):
def pack(values: Series): def pack(values: Series):
if isinstance(values, bytes): # Bytes and BytesN are already packed if isinstance(values, bytes): # Bytes and BytesN are already packed
return values return values
elif isinstance(values, Bitvector):
as_integer = sum([values[i] << i for i in range(len(values))])
return as_integer.to_bytes((values.length + 7) // 8, "little")
elif isinstance(values, Bitlist):
as_integer = sum([values[i] << i for i in range(len(values))])
return as_integer.to_bytes((values.length + 7) // 8, "little")
return b''.join([serialize_basic(value) for value in values]) return b''.join([serialize_basic(value) for value in values])
@ -115,6 +128,8 @@ def item_length(typ: SSZType) -> int:
def chunk_count(typ: SSZType) -> int: def chunk_count(typ: SSZType) -> int:
if isinstance(typ, BasicType): if isinstance(typ, BasicType):
return 1 return 1
elif issubclass(typ, Bits):
return (typ.length + 255) // 256
elif issubclass(typ, Elements): elif issubclass(typ, Elements):
return (typ.length * item_length(typ.elem_type) + 31) // 32 return (typ.length * item_length(typ.elem_type) + 31) // 32
elif issubclass(typ, Container): elif issubclass(typ, Container):
@ -134,7 +149,7 @@ def hash_tree_root(obj: SSZValue):
else: else:
raise Exception(f"Type not supported: {type(obj)}") raise Exception(f"Type not supported: {type(obj)}")
if isinstance(obj, (List, Bytes)): if isinstance(obj, (List, Bytes, Bitlist)):
return mix_in_length(merkleize_chunks(leaves, pad_to=chunk_count(obj.type())), len(obj)) return mix_in_length(merkleize_chunks(leaves, pad_to=chunk_count(obj.type())), len(obj))
else: else:
return merkleize_chunks(leaves) return merkleize_chunks(leaves)

View File

@ -31,7 +31,7 @@ class BasicValue(int, SSZValue, metaclass=BasicType):
pass pass
class Bool(BasicValue): # can't subclass bool. class boolean(BasicValue): # can't subclass bool.
byte_len = 1 byte_len = 1
def __new__(cls, value: int): # int value, but can be any subclass of int (bool, Bit, Bool, etc...) def __new__(cls, value: int): # int value, but can be any subclass of int (bool, Bit, Bool, etc...)
@ -48,7 +48,7 @@ class Bool(BasicValue): # can't subclass bool.
# Alias for Bool # Alias for Bool
class Bit(Bool): class bit(boolean):
pass pass
@ -233,7 +233,7 @@ class ParamsMeta(SSZType):
return f"{self.__name__}~{self.__class__.__name__}" return f"{self.__name__}~{self.__class__.__name__}"
def __repr__(self): def __repr__(self):
return self, self.__class__ return f"{self.__name__}~{self.__class__.__name__}"
def attr_from_params(self, p): def attr_from_params(self, p):
# single key params are valid too. Wrap them in a tuple. # single key params are valid too. Wrap them in a tuple.
@ -310,6 +310,10 @@ class BaseList(list, Elements):
cls = self.__class__ cls = self.__class__
return f"{cls.__name__}[{cls.elem_type.__name__}, {cls.length}]({', '.join(str(v) for v in self)})" return f"{cls.__name__}[{cls.elem_type.__name__}, {cls.length}]({', '.join(str(v) for v in self)})"
def __repr__(self):
cls = self.__class__
return f"{cls.__name__}[{cls.elem_type.__name__}, {cls.length}]({', '.join(str(v) for v in self)})"
def __getitem__(self, k) -> SSZValue: def __getitem__(self, k) -> SSZValue:
if isinstance(k, int): # check if we are just doing a lookup, and not slicing if isinstance(k, int): # check if we are just doing a lookup, and not slicing
if k < 0: if k < 0:
@ -320,12 +324,18 @@ class BaseList(list, Elements):
return super().__getitem__(k) return super().__getitem__(k)
def __setitem__(self, k, v): def __setitem__(self, k, v):
if k < 0: if type(k) == slice:
raise IndexError(f"cannot set item in type {self.__class__} at negative index {k} (to {v})") if (k.start is not None and k.start < 0) or (k.stop is not None and k.stop > len(self)):
if k > len(self): raise IndexError(f"cannot set item in type {self.__class__}"
raise IndexError(f"cannot set item in type {self.__class__}" f" at out of bounds slice {k} (to {v}, bound: {len(self)})")
f" at out of bounds index {k} (to {v}, bound: {len(self)})") super().__setitem__(k, [coerce_type_maybe(x, self.__class__.elem_type) for x in v])
super().__setitem__(k, coerce_type_maybe(v, self.__class__.elem_type, strict=True)) else:
if k < 0:
raise IndexError(f"cannot set item in type {self.__class__} at negative index {k} (to {v})")
if k > len(self):
raise IndexError(f"cannot set item in type {self.__class__}"
f" at out of bounds index {k} (to {v}, bound: {len(self)})")
super().__setitem__(k, coerce_type_maybe(v, self.__class__.elem_type, strict=True))
def append(self, v): def append(self, v):
super().append(coerce_type_maybe(v, self.__class__.elem_type, strict=True)) super().append(coerce_type_maybe(v, self.__class__.elem_type, strict=True))
@ -338,6 +348,48 @@ class BaseList(list, Elements):
return self[len(self) - 1] return self[len(self) - 1]
class BitElementsType(ElementsType):
elem_type: SSZType = boolean
length: int
class Bits(BaseList, metaclass=BitElementsType):
pass
class Bitlist(Bits):
@classmethod
def is_fixed_size(cls):
return False
@classmethod
def default(cls):
return cls()
class Bitvector(Bits):
@classmethod
def extract_args(cls, *args):
if len(args) == 0:
return cls.default()
else:
return super().extract_args(*args)
@classmethod
def value_check(cls, value):
# check length limit strictly
return len(value) == cls.length and super().value_check(value)
@classmethod
def is_fixed_size(cls):
return True
@classmethod
def default(cls):
return cls(0 for _ in range(cls.length))
class List(BaseList): class List(BaseList):
@classmethod @classmethod

View File

@ -1,7 +1,8 @@
from typing import Iterable from typing import Iterable
from .ssz_impl import serialize, hash_tree_root from .ssz_impl import serialize, hash_tree_root
from .ssz_typing import ( from .ssz_typing import (
Bit, Bool, Container, List, Vector, Bytes, BytesN, bit, boolean, Container, List, Vector, Bytes, BytesN,
Bitlist, Bitvector,
uint8, uint16, uint32, uint64, uint256, byte uint8, uint16, uint32, uint64, uint256, byte
) )
from ..hash_function import hash as bytes_hash from ..hash_function import hash as bytes_hash
@ -74,10 +75,32 @@ def merge(a: str, branch: Iterable[str]) -> str:
test_data = [ test_data = [
("bit F", Bit(False), "00", chunk("00")), ("bit F", bit(False), "00", chunk("00")),
("bit T", Bit(True), "01", chunk("01")), ("bit T", bit(True), "01", chunk("01")),
("bool F", Bool(False), "00", chunk("00")), ("boolean F", boolean(False), "00", chunk("00")),
("bool T", Bool(True), "01", chunk("01")), ("boolean T", boolean(True), "01", chunk("01")),
("bitvector TTFTFTFF", Bitvector[8](1, 1, 0, 1, 0, 1, 0, 0), "2b", chunk("2b")),
("bitlist TTFTFTFF", Bitlist[8](1, 1, 0, 1, 0, 1, 0, 0), "2b01", h(chunk("2b"), chunk("08"))),
("bitvector FTFT", Bitvector[4](0, 1, 0, 1), "0a", chunk("0a")),
("bitlist FTFT", Bitlist[4](0, 1, 0, 1), "1a", h(chunk("0a"), chunk("04"))),
("bitvector FTF", Bitvector[3](0, 1, 0), "02", chunk("02")),
("bitlist FTF", Bitlist[3](0, 1, 0), "0a", h(chunk("02"), chunk("03"))),
("bitvector TFTFFFTTFT", Bitvector[10](1, 0, 1, 0, 0, 0, 1, 1, 0, 1), "c502", chunk("c502")),
("bitlist TFTFFFTTFT", Bitlist[16](1, 0, 1, 0, 0, 0, 1, 1, 0, 1), "c506", h(chunk("c502"), chunk("0A"))),
("bitvector TFTFFFTTFTFFFFTT", Bitvector[16](1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1),
"c5c2", chunk("c5c2")),
("bitlist TFTFFFTTFTFFFFTT", Bitlist[16](1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1),
"c5c201", h(chunk("c5c2"), chunk("10"))),
("long bitvector", Bitvector[512](1 for i in range(512)),
"ff" * 64, h("ff" * 32, "ff" * 32)),
("long bitlist", Bitlist[512](1),
"03", h(h(chunk("01"), chunk("")), chunk("01"))),
("long bitlist", Bitlist[512](1 for i in range(512)),
"ff" * 64 + "01", h(h("ff" * 32, "ff" * 32), chunk("0002"))),
("odd bitvector", Bitvector[513](1 for i in range(513)),
"ff" * 64 + "01", h(h("ff" * 32, "ff" * 32), h(chunk("01"), chunk("")))),
("odd bitlist", Bitlist[513](1 for i in range(513)),
"ff" * 64 + "03", h(h(h("ff" * 32, "ff" * 32), h(chunk("01"), chunk(""))), chunk("0102"))),
("uint8 00", uint8(0x00), "00", chunk("00")), ("uint8 00", uint8(0x00), "00", chunk("00")),
("uint8 01", uint8(0x01), "01", chunk("01")), ("uint8 01", uint8(0x01), "01", chunk("01")),
("uint8 ab", uint8(0xab), "ab", chunk("ab")), ("uint8 ab", uint8(0xab), "ab", chunk("ab")),

View File

@ -1,6 +1,6 @@
from .ssz_typing import ( from .ssz_typing import (
SSZValue, SSZType, BasicValue, BasicType, Series, ElementsType, SSZValue, SSZType, BasicValue, BasicType, Series, ElementsType,
Elements, Bit, Bool, Container, List, Vector, Bytes, BytesN, Elements, bit, boolean, Container, List, Vector, Bytes, BytesN,
byte, uint, uint8, uint16, uint32, uint64, uint128, uint256, byte, uint, uint8, uint16, uint32, uint64, uint128, uint256,
Bytes32, Bytes48 Bytes32, Bytes48
) )
@ -22,8 +22,8 @@ def test_subclasses():
assert issubclass(u, SSZValue) assert issubclass(u, SSZValue)
assert isinstance(u, SSZType) assert isinstance(u, SSZType)
assert isinstance(u, BasicType) assert isinstance(u, BasicType)
assert issubclass(Bool, BasicValue) assert issubclass(boolean, BasicValue)
assert isinstance(Bool, BasicType) assert isinstance(boolean, BasicType)
for c in [Container, List, Vector, Bytes, BytesN]: for c in [Container, List, Vector, Bytes, BytesN]:
assert issubclass(c, Series) assert issubclass(c, Series)
@ -45,16 +45,16 @@ def test_basic_instances():
assert isinstance(v, BasicValue) assert isinstance(v, BasicValue)
assert isinstance(v, SSZValue) assert isinstance(v, SSZValue)
assert isinstance(Bool(True), BasicValue) assert isinstance(boolean(True), BasicValue)
assert isinstance(Bool(False), BasicValue) assert isinstance(boolean(False), BasicValue)
assert isinstance(Bit(True), Bool) assert isinstance(bit(True), boolean)
assert isinstance(Bit(False), Bool) assert isinstance(bit(False), boolean)
def test_basic_value_bounds(): def test_basic_value_bounds():
max = { max = {
Bool: 2 ** 1, boolean: 2 ** 1,
Bit: 2 ** 1, bit: 2 ** 1,
uint8: 2 ** (8 * 1), uint8: 2 ** (8 * 1),
byte: 2 ** (8 * 1), byte: 2 ** (8 * 1),
uint16: 2 ** (8 * 2), uint16: 2 ** (8 * 2),