Merge branch 'dev' into dev-merge

This commit is contained in:
Danny Ryan 2021-02-24 16:22:18 -07:00
commit 63e014791c
No known key found for this signature in database
GPG Key ID: 2765A792E42CE07A
94 changed files with 2761 additions and 688 deletions

View File

@ -35,13 +35,13 @@ commands:
description: "Restore the cache with pyspec keys" description: "Restore the cache with pyspec keys"
steps: steps:
- restore_cached_venv: - restore_cached_venv:
venv_name: v22-pyspec venv_name: v24-pyspec
reqs_checksum: cache-{{ checksum "setup.py" }} reqs_checksum: cache-{{ checksum "setup.py" }}
save_pyspec_cached_venv: save_pyspec_cached_venv:
description: Save a venv into a cache with pyspec keys" description: Save a venv into a cache with pyspec keys"
steps: steps:
- save_cached_venv: - save_cached_venv:
venv_name: v22-pyspec venv_name: v24-pyspec
reqs_checksum: cache-{{ checksum "setup.py" }} reqs_checksum: cache-{{ checksum "setup.py" }}
venv_path: ./venv venv_path: ./venv
restore_deposit_contract_tester_cached_venv: restore_deposit_contract_tester_cached_venv:
@ -216,15 +216,17 @@ workflows:
- lint: - lint:
requires: requires:
- test - test
- install_deposit_contract_web3_tester: # NOTE: Since phase 0 has been launched, we disabled the deposit contract tests.
requires: # - install_deposit_contract_web3_tester:
- checkout_specs # requires:
- test_deposit_contract_web3_tests: # - checkout_specs
requires: # - test_deposit_contract_web3_tests:
- install_deposit_contract_web3_tester # requires:
# - install_deposit_contract_web3_tester
build_and_test_deposit_contract: build_and_test_deposit_contract:
jobs: jobs:
- build_deposit_contract - build_deposit_contract
- test_deposit_contract: # NOTE: Since phase 0 has been launched, we disabled the deposit contract tests.
requires: # - test_deposit_contract:
- build_deposit_contract # requires:
# - build_deposit_contract

1
.gitignore vendored
View File

@ -17,6 +17,7 @@ eth2.0-spec-tests/
# Dynamically built from Markdown spec # Dynamically built from Markdown spec
tests/core/pyspec/eth2spec/phase0/ tests/core/pyspec/eth2spec/phase0/
tests/core/pyspec/eth2spec/phase1/ tests/core/pyspec/eth2spec/phase1/
tests/core/pyspec/eth2spec/lightclient_patch/
# coverage reports # coverage reports
.htmlcov .htmlcov

View File

@ -20,7 +20,7 @@ GENERATOR_VENVS = $(patsubst $(GENERATOR_DIR)/%, $(GENERATOR_DIR)/%venv, $(GENER
# To check generator matching: # To check generator matching:
#$(info $$GENERATOR_TARGETS is [${GENERATOR_TARGETS}]) #$(info $$GENERATOR_TARGETS is [${GENERATOR_TARGETS}])
MARKDOWN_FILES = $(wildcard $(SPEC_DIR)/phase0/*.md) $(wildcard $(SPEC_DIR)/phase1/*.md) $(wildcard $(SSZ_DIR)/*.md) $(wildcard $(SPEC_DIR)/networking/*.md) $(wildcard $(SPEC_DIR)/validator/*.md) MARKDOWN_FILES = $(wildcard $(SPEC_DIR)/phase0/*.md) $(wildcard $(SPEC_DIR)/phase1/*.md) $(wildcard $(SPEC_DIR)/lightclient/*.md) $(wildcard $(SSZ_DIR)/*.md) $(wildcard $(SPEC_DIR)/networking/*.md) $(wildcard $(SPEC_DIR)/validator/*.md)
COV_HTML_OUT=.htmlcov COV_HTML_OUT=.htmlcov
COV_INDEX_FILE=$(PY_SPEC_DIR)/$(COV_HTML_OUT)/index.html COV_INDEX_FILE=$(PY_SPEC_DIR)/$(COV_HTML_OUT)/index.html
@ -49,6 +49,7 @@ partial_clean:
rm -rf $(DEPOSIT_CONTRACT_TESTER_DIR)/.pytest_cache rm -rf $(DEPOSIT_CONTRACT_TESTER_DIR)/.pytest_cache
rm -rf $(PY_SPEC_DIR)/phase0 rm -rf $(PY_SPEC_DIR)/phase0
rm -rf $(PY_SPEC_DIR)/phase1 rm -rf $(PY_SPEC_DIR)/phase1
rm -rf $(PY_SPEC_DIR)/lightclient
rm -rf $(PY_SPEC_DIR)/$(COV_HTML_OUT) rm -rf $(PY_SPEC_DIR)/$(COV_HTML_OUT)
rm -rf $(PY_SPEC_DIR)/.coverage rm -rf $(PY_SPEC_DIR)/.coverage
rm -rf $(PY_SPEC_DIR)/test-reports rm -rf $(PY_SPEC_DIR)/test-reports
@ -81,19 +82,19 @@ pyspec:
# installs the packages to run pyspec tests # installs the packages to run pyspec tests
install_test: install_test:
python3.8 -m venv venv; . venv/bin/activate; pip3 install .[lint]; pip3 install -e .[test] python3 -m venv venv; . venv/bin/activate; python3 -m pip install .[lint]; python3 -m pip install -e .[test]
test: pyspec test: pyspec
. venv/bin/activate; cd $(PY_SPEC_DIR); \ . venv/bin/activate; cd $(PY_SPEC_DIR); \
python -m pytest -n 4 --disable-bls --cov=eth2spec.phase0.spec --cov=eth2spec.phase1.spec --cov-report="html:$(COV_HTML_OUT)" --cov-branch eth2spec python3 -m pytest -n 4 --disable-bls --cov=eth2spec.phase0.spec --cov=eth2spec.phase1.spec --cov=eth2spec.lightclient_patch.spec -cov-report="html:$(COV_HTML_OUT)" --cov-branch eth2spec
find_test: pyspec find_test: pyspec
. venv/bin/activate; cd $(PY_SPEC_DIR); \ . venv/bin/activate; cd $(PY_SPEC_DIR); \
python -m pytest -k=$(K) --disable-bls --cov=eth2spec.phase0.spec --cov=eth2spec.phase1.spec --cov-report="html:$(COV_HTML_OUT)" --cov-branch eth2spec python3 -m pytest -k=$(K) --disable-bls --cov=eth2spec.phase0.spec --cov=eth2spec.phase1.spec --cov=eth2spec.lightclient_patch.spec --cov-report="html:$(COV_HTML_OUT)" --cov-branch eth2spec
citest: pyspec citest: pyspec
mkdir -p tests/core/pyspec/test-reports/eth2spec; . venv/bin/activate; cd $(PY_SPEC_DIR); \ mkdir -p tests/core/pyspec/test-reports/eth2spec; . venv/bin/activate; cd $(PY_SPEC_DIR); \
python -m pytest -n 4 --bls-type=milagro --junitxml=eth2spec/test_results.xml eth2spec python3 -m pytest -n 4 --bls-type=milagro --junitxml=eth2spec/test_results.xml eth2spec
open_cov: open_cov:
((open "$(COV_INDEX_FILE)" || xdg-open "$(COV_INDEX_FILE)") &> /dev/null) & ((open "$(COV_INDEX_FILE)" || xdg-open "$(COV_INDEX_FILE)") &> /dev/null) &
@ -112,7 +113,7 @@ codespell:
lint: pyspec lint: pyspec
. venv/bin/activate; cd $(PY_SPEC_DIR); \ . venv/bin/activate; cd $(PY_SPEC_DIR); \
flake8 --config $(LINTER_CONFIG_FILE) ./eth2spec \ flake8 --config $(LINTER_CONFIG_FILE) ./eth2spec \
&& mypy --config-file $(LINTER_CONFIG_FILE) -p eth2spec.phase0 -p eth2spec.phase1 && mypy --config-file $(LINTER_CONFIG_FILE) -p eth2spec.phase0 -p eth2spec.phase1 -p eth2spec.lightclient_patch
lint_generators: pyspec lint_generators: pyspec
. venv/bin/activate; cd $(TEST_GENERATORS_DIR); \ . venv/bin/activate; cd $(TEST_GENERATORS_DIR); \
@ -132,11 +133,11 @@ test_deposit_contract:
dapp test -v --fuzz-runs 5 dapp test -v --fuzz-runs 5
install_deposit_contract_web3_tester: install_deposit_contract_web3_tester:
cd $(DEPOSIT_CONTRACT_TESTER_DIR); python3 -m venv venv; . venv/bin/activate; pip3 install -r requirements.txt cd $(DEPOSIT_CONTRACT_TESTER_DIR); python3 -m venv venv; . venv/bin/activate; python3 -m pip install -r requirements.txt
test_deposit_contract_web3_tests: test_deposit_contract_web3_tests:
cd $(DEPOSIT_CONTRACT_TESTER_DIR); . venv/bin/activate; \ cd $(DEPOSIT_CONTRACT_TESTER_DIR); . venv/bin/activate; \
python -m pytest . python3 -m pytest .
# Runs a generator, identified by param 1 # Runs a generator, identified by param 1
define run_generator define run_generator

View File

@ -11,29 +11,28 @@ This repository hosts the current Eth2 specifications. Discussions about design
[![GitHub release](https://img.shields.io/github/v/release/ethereum/eth2.0-specs)](https://github.com/ethereum/eth2.0-specs/releases/) [![PyPI version](https://badge.fury.io/py/eth2spec.svg)](https://badge.fury.io/py/eth2spec) [![GitHub release](https://img.shields.io/github/v/release/ethereum/eth2.0-specs)](https://github.com/ethereum/eth2.0-specs/releases/) [![PyPI version](https://badge.fury.io/py/eth2spec.svg)](https://badge.fury.io/py/eth2spec)
Core specifications for Eth2 clients be found in [specs](specs/). These are divided into phases. Each subsequent phase depends upon the prior. The current phases specified are: Core specifications for Eth2 clients be found in [specs](specs/). These are divided into phases. Each subsequent phase depends upon the prior. The current phases specified are:
### Phase 0 ### Phase 0
* [The Beacon Chain](specs/phase0/beacon-chain.md) * [The Beacon Chain](specs/phase0/beacon-chain.md)
* [Beacon Chain Fork Choice](specs/phase0/fork-choice.md) * [Beacon Chain Fork Choice](specs/phase0/fork-choice.md)
* [Deposit Contract](specs/phase0/deposit-contract.md) * [Deposit Contract](specs/phase0/deposit-contract.md)
* [Honest Validator](specs/phase0/validator.md) * [Honest Validator](specs/phase0/validator.md)
* [P2P Networking](specs/phase0/p2p-interface.md) * [P2P Networking](specs/phase0/p2p-interface.md)
### Phase 1 ### Light clients
* [From Phase 0 to Phase 1](specs/phase1/phase1-fork.md)
* [The Beacon Chain for Shards](specs/phase1/beacon-chain.md)
* [Custody Game](specs/phase1/custody-game.md)
* [Shard Transition and Fraud Proofs](specs/phase1/shard-transition.md)
* [Light client syncing protocol](specs/phase1/light-client-sync.md)
* [Beacon Chain Fork Choice for Shards](specs/phase1/fork-choice.md)
### Phase 2 * [Beacon chain changes](specs/lightclient/beacon-chain.md)
* [Light client sync protocol](specs/lightclient/sync-protocol.md)
Phase 2 is still actively in R&D and does not yet have any formal specifications. ### Sharding
See the [Eth2 Phase 2 Wiki](https://hackmd.io/UzysWse1Th240HELswKqVA?view) for current progress, discussions, and definitions regarding this work. The sharding spec is still actively in R&D; see the most recent available pull request [here](https://github.com/ethereum/eth2.0-specs/pull/2146) and some technical details [here](https://hackmd.io/@HWeNw8hNRimMm2m2GH56Cw/B1YJPGkpD).
### Merge
The merge is still actively in R&D; see an [ethresear.ch](https://ethresear.ch) post describing the proposed basic mechanism [here](https://ethresear.ch/t/the-eth1-eth2-transition/6265) and the section of [ethereum.org](https://ethereum.org) describing the merge at a high level [here](https://ethereum.org/en/eth2/docking/).
### Accompanying documents can be found in [specs](specs) and include: ### Accompanying documents can be found in [specs](specs) and include:

View File

@ -0,0 +1,31 @@
# Mainnet preset - lightclient patch
CONFIG_NAME: "mainnet"
# Updated penalty values
# ---------------------------------------------------------------
# 3 * 2**24) (= 50,331,648)
HF1_INACTIVITY_PENALTY_QUOTIENT: 50331648
# 2**6 (= 64)
HF1_MIN_SLASHING_PENALTY_QUOTIENT: 64
# 2
HF1_PROPORTIONAL_SLASHING_MULTIPLIER: 2
# Misc
# ---------------------------------------------------------------
# 2**10 (=1,024)
SYNC_COMMITTEE_SIZE: 1024
# 2**6 (=64)
SYNC_COMMITTEE_PUBKEY_AGGREGATES_SIZE: 64
# Time parameters
# ---------------------------------------------------------------
# 2**8 (= 256)
EPOCHS_PER_SYNC_COMMITTEE_PERIOD: 256
# Signature domains
# ---------------------------------------------------------------
DOMAIN_SYNC_COMMITTEE: 0x07000000

View File

@ -0,0 +1,31 @@
# Minimal preset - lightclient patch
CONFIG_NAME: "minimal"
# Updated penalty values
# ---------------------------------------------------------------
# 3 * 2**24) (= 50,331,648)
HF1_INACTIVITY_PENALTY_QUOTIENT: 50331648
# 2**6 (= 64)
HF1_MIN_SLASHING_PENALTY_QUOTIENT: 64
# 2
HF1_PROPORTIONAL_SLASHING_MULTIPLIER: 2
# Misc
# ---------------------------------------------------------------
# [customized]
SYNC_COMMITTEE_SIZE: 32
# [customized]
SYNC_COMMITTEE_PUBKEY_AGGREGATES_SIZE: 16
# Time parameters
# ---------------------------------------------------------------
# [customized]
EPOCHS_PER_SYNC_COMMITTEE_PERIOD: 8
# Signature domains
# ---------------------------------------------------------------
DOMAIN_SYNC_COMMITTEE: 0x07000000

View File

@ -52,8 +52,9 @@ def get_spec(file_name: str) -> SpecObject:
else: else:
# Handle function definitions & ssz_objects # Handle function definitions & ssz_objects
if pulling_from is not None: if pulling_from is not None:
if len(line) > 18 and line[:6] == 'class ' and line[-12:] == '(Container):': if len(line) > 18 and line[:6] == 'class ' and (line[-12:] == '(Container):' or '(phase' in line):
name = line[6:-12] end = -12 if line[-12:] == '(Container):' else line.find('(')
name = line[6:end]
# Check consistency with markdown header # Check consistency with markdown header
assert name == current_name assert name == current_name
block_type = CodeBlockType.SSZ block_type = CodeBlockType.SSZ
@ -156,6 +157,40 @@ SSZObject = TypeVar('SSZObject', bound=View)
CONFIG_NAME = 'mainnet' CONFIG_NAME = 'mainnet'
''' '''
LIGHTCLIENT_IMPORT = '''from eth2spec.phase0 import spec as phase0
from eth2spec.config.config_util import apply_constants_config
from typing import (
Any, Dict, Set, Sequence, NewType, Tuple, TypeVar, Callable, Optional
)
from dataclasses import (
dataclass,
field,
)
from lru import LRU
from eth2spec.utils.ssz.ssz_impl import hash_tree_root, copy, uint_to_bytes
from eth2spec.utils.ssz.ssz_typing import (
View, boolean, Container, List, Vector, uint8, uint32, uint64,
Bytes1, Bytes4, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector,
)
from eth2spec.utils import bls
from eth2spec.utils.hash_function import hash
# Whenever lightclient is loaded, make sure we have the latest phase0
from importlib import reload
reload(phase0)
SSZVariableName = str
GeneralizedIndex = NewType('GeneralizedIndex', int)
SSZObject = TypeVar('SSZObject', bound=View)
CONFIG_NAME = 'mainnet'
'''
SUNDRY_CONSTANTS_FUNCTIONS = ''' SUNDRY_CONSTANTS_FUNCTIONS = '''
def ceillog2(x: int) -> uint64: def ceillog2(x: int) -> uint64:
if x < 1: if x < 1:
@ -351,6 +386,7 @@ def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject:
fork_imports = { fork_imports = {
'phase0': PHASE0_IMPORTS, 'phase0': PHASE0_IMPORTS,
'phase1': PHASE1_IMPORTS, 'phase1': PHASE1_IMPORTS,
'lightclient_patch': LIGHTCLIENT_IMPORT,
} }
@ -417,6 +453,16 @@ class PySpecCommand(Command):
specs/phase1/shard-fork-choice.md specs/phase1/shard-fork-choice.md
specs/phase1/validator.md specs/phase1/validator.md
""" """
elif self.spec_fork == "lightclient_patch":
self.md_doc_paths = """
specs/phase0/beacon-chain.md
specs/phase0/fork-choice.md
specs/phase0/validator.md
specs/phase0/weak-subjectivity.md
specs/lightclient/beacon-chain.md
specs/lightclient/lightclient-fork.md
"""
# TODO: add specs/lightclient/sync-protocol.md back when the GeneralizedIndex helpers are included.
else: else:
raise Exception('no markdown files specified, and spec fork "%s" is unknown', self.spec_fork) raise Exception('no markdown files specified, and spec fork "%s" is unknown', self.spec_fork)
@ -516,13 +562,12 @@ setup(
url="https://github.com/ethereum/eth2.0-specs", url="https://github.com/ethereum/eth2.0-specs",
include_package_data=False, include_package_data=False,
package_data={'configs': ['*.yaml'], package_data={'configs': ['*.yaml'],
'specs': ['**/*.md'], 'specs': ['**/*.md'],
'eth2spec': ['VERSION.txt']}, 'eth2spec': ['VERSION.txt']},
package_dir={ package_dir={
"eth2spec": "tests/core/pyspec/eth2spec", "eth2spec": "tests/core/pyspec/eth2spec",
"configs": "configs", "configs": "configs",
"specs": "specs" "specs": "specs",
}, },
packages=find_packages(where='tests/core/pyspec') + ['configs', 'specs'], packages=find_packages(where='tests/core/pyspec') + ['configs', 'specs'],
py_modules=["eth2spec"], py_modules=["eth2spec"],
@ -536,10 +581,10 @@ setup(
"eth-utils>=1.3.0,<2", "eth-utils>=1.3.0,<2",
"eth-typing>=2.1.0,<3.0.0", "eth-typing>=2.1.0,<3.0.0",
"pycryptodome==3.9.4", "pycryptodome==3.9.4",
"py_ecc==5.0.0", "py_ecc==5.1.0",
"milagro_bls_binding==1.5.0", "milagro_bls_binding==1.6.3",
"dataclasses==0.6", "dataclasses==0.6",
"remerkleable==0.1.17", "remerkleable==0.1.18",
"ruamel.yaml==0.16.5", "ruamel.yaml==0.16.5",
"lru-dict==1.1.6" "lru-dict==1.1.6"
] ]

View File

@ -0,0 +1,673 @@
# Ethereum 2.0 HF1
## Table of contents
<!-- TOC -->
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
- [Introduction](#introduction)
- [Custom types](#custom-types)
- [Constants](#constants)
- [Validator action flags](#validator-action-flags)
- [Participation rewards](#participation-rewards)
- [Misc](#misc)
- [Configuration](#configuration)
- [Updated penalty values](#updated-penalty-values)
- [Misc](#misc-1)
- [Time parameters](#time-parameters)
- [Domain types](#domain-types)
- [Containers](#containers)
- [Extended containers](#extended-containers)
- [`BeaconBlockBody`](#beaconblockbody)
- [`BeaconState`](#beaconstate)
- [New containers](#new-containers)
- [`SyncCommittee`](#synccommittee)
- [Helper functions](#helper-functions)
- [`Predicates`](#predicates)
- [`eth2_fast_aggregate_verify`](#eth2_fast_aggregate_verify)
- [Misc](#misc-2)
- [`flags_and_numerators`](#flags_and_numerators)
- [Beacon state accessors](#beacon-state-accessors)
- [`get_sync_committee_indices`](#get_sync_committee_indices)
- [`get_sync_committee`](#get_sync_committee)
- [`get_base_reward`](#get_base_reward)
- [`get_unslashed_participating_indices`](#get_unslashed_participating_indices)
- [`get_flag_deltas`](#get_flag_deltas)
- [New `get_inactivity_penalty_deltas`](#new-get_inactivity_penalty_deltas)
- [Beacon state mutators](#beacon-state-mutators)
- [New `slash_validator`](#new-slash_validator)
- [Block processing](#block-processing)
- [New `process_attestation`](#new-process_attestation)
- [New `process_deposit`](#new-process_deposit)
- [Sync committee processing](#sync-committee-processing)
- [Epoch processing](#epoch-processing)
- [New `process_justification_and_finalization`](#new-process_justification_and_finalization)
- [New `process_rewards_and_penalties`](#new-process_rewards_and_penalties)
- [New `process_slashings`](#new-process_slashings)
- [Sync committee updates](#sync-committee-updates)
- [Participation flags updates](#participation-flags-updates)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
<!-- /TOC -->
## Introduction
This is a patch implementing the first hard fork to the beacon chain, tentatively named HF1 pending a permanent name.
It has four main features:
* Light client support via sync committees
* Incentive accounting reforms, reducing spec complexity
and [TODO] reducing the cost of processing chains that have very little or zero participation for a long span of epochs
* Update penalty configuration values, moving them toward their planned maximally punitive configuration
* Fork choice rule changes to address weaknesses recently discovered in the existing fork choice
## Custom types
| Name | SSZ equivalent | Description |
| - | - | - |
| `ValidatorFlag` | `uint8` | Bitflags to track validator actions with |
## Constants
### Validator action flags
This is formatted as an enum, with values `2**i` that can be combined as bit-flags.
The `0` value is reserved as default. Remaining bits in `ValidatorFlag` may be used in future hardforks.
**Note**: Unlike Phase0, a `TIMELY_TARGET_FLAG` does not necessarily imply a `TIMELY_SOURCE_FLAG`
due to the varying slot delay requirements of each.
| Name | Value |
| - | - |
| `TIMELY_HEAD_FLAG` | `ValidatorFlag(2**0)` (= 1) |
| `TIMELY_SOURCE_FLAG` | `ValidatorFlag(2**1)` (= 2) |
| `TIMELY_TARGET_FLAG` | `ValidatorFlag(2**2)` (= 4) |
### Participation rewards
| Name | Value |
| - | - |
| `TIMELY_HEAD_NUMERATOR` | `12` |
| `TIMELY_SOURCE_NUMERATOR` | `12` |
| `TIMELY_TARGET_NUMERATOR` | `32` |
| `REWARD_DENOMINATOR` | `64` |
The reward fractions add up to 7/8, leaving the remaining 1/8 for proposer rewards and other future micro-rewards.
### Misc
| Name | Value |
| - | - |
| `G2_POINT_AT_INFINITY` | `BLSSignature(b'\xc0' + b'\x00' * 95)` |
## Configuration
### Updated penalty values
This patch updates a few configuration values to move penalty constants toward their final, maxmium security values.
*Note*: The spec does *not* override previous configuration values but instead creates new values and replaces usage throughout.
| Name | Value |
| - | - |
| `HF1_INACTIVITY_PENALTY_QUOTIENT` | `uint64(3 * 2**24)` (= 50,331,648) |
| `HF1_MIN_SLASHING_PENALTY_QUOTIENT` | `uint64(2**6)` (=64) |
| `HF1_PROPORTIONAL_SLASHING_MULTIPLIER` | `uint64(2)` |
### Misc
| Name | Value |
| - | - |
| `SYNC_COMMITTEE_SIZE` | `uint64(2**10)` (= 1024) |
| `SYNC_COMMITTEE_PUBKEY_AGGREGATES_SIZE` | `uint64(2**6)` (= 64) |
### Time parameters
| Name | Value | Unit | Duration |
| - | - | :-: | :-: |
| `EPOCHS_PER_SYNC_COMMITTEE_PERIOD` | `Epoch(2**8)` (= 256) | epochs | ~27 hours |
### Domain types
| Name | Value |
| - | - |
| `DOMAIN_SYNC_COMMITTEE` | `DomainType('0x07000000')` |
## Containers
### Extended containers
*Note*: Extended SSZ containers inherit all fields from the parent in the original
order and append any additional fields to the end.
#### `BeaconBlockBody`
```python
class BeaconBlockBody(phase0.BeaconBlockBody):
# Sync committee aggregate signature
sync_committee_bits: Bitvector[SYNC_COMMITTEE_SIZE]
sync_committee_signature: BLSSignature
```
#### `BeaconState`
```python
class BeaconState(Container):
# Versioning
genesis_time: uint64
genesis_validators_root: Root
slot: Slot
fork: Fork
# History
latest_block_header: BeaconBlockHeader
block_roots: Vector[Root, SLOTS_PER_HISTORICAL_ROOT]
state_roots: Vector[Root, SLOTS_PER_HISTORICAL_ROOT]
historical_roots: List[Root, HISTORICAL_ROOTS_LIMIT]
# Eth1
eth1_data: Eth1Data
eth1_data_votes: List[Eth1Data, EPOCHS_PER_ETH1_VOTING_PERIOD * SLOTS_PER_EPOCH]
eth1_deposit_index: uint64
# Registry
validators: List[Validator, VALIDATOR_REGISTRY_LIMIT]
balances: List[Gwei, VALIDATOR_REGISTRY_LIMIT]
# Randomness
randao_mixes: Vector[Bytes32, EPOCHS_PER_HISTORICAL_VECTOR]
# Slashings
slashings: Vector[Gwei, EPOCHS_PER_SLASHINGS_VECTOR] # Per-epoch sums of slashed effective balances
# Participation
previous_epoch_participation: List[ValidatorFlag, VALIDATOR_REGISTRY_LIMIT]
current_epoch_participation: List[ValidatorFlag, VALIDATOR_REGISTRY_LIMIT]
# Finality
justification_bits: Bitvector[JUSTIFICATION_BITS_LENGTH] # Bit set for every recent justified epoch
previous_justified_checkpoint: Checkpoint
current_justified_checkpoint: Checkpoint
finalized_checkpoint: Checkpoint
# Light client sync committees
current_sync_committee: SyncCommittee
next_sync_committee: SyncCommittee
```
### New containers
#### `SyncCommittee`
```python
class SyncCommittee(Container):
pubkeys: Vector[BLSPubkey, SYNC_COMMITTEE_SIZE]
pubkey_aggregates: Vector[BLSPubkey, SYNC_COMMITTEE_SIZE // SYNC_COMMITTEE_PUBKEY_AGGREGATES_SIZE]
```
## Helper functions
### `Predicates`
#### `eth2_fast_aggregate_verify`
```python
def eth2_fast_aggregate_verify(pubkeys: Sequence[BLSPubkey], message: Bytes32, signature: BLSSignature) -> bool:
"""
Wrapper to ``bls.FastAggregateVerify`` accepting the ``G2_POINT_AT_INFINITY`` signature when ``pubkeys`` is empty.
"""
if len(pubkeys) == 0 and signature == G2_POINT_AT_INFINITY:
return True
return bls.FastAggregateVerify(pubkeys, message, signature)
```
### Misc
#### `flags_and_numerators`
```python
def get_flags_and_numerators() -> Sequence[Tuple[ValidatorFlag, int]]:
return (
(TIMELY_HEAD_FLAG, TIMELY_HEAD_NUMERATOR),
(TIMELY_SOURCE_FLAG, TIMELY_SOURCE_NUMERATOR),
(TIMELY_TARGET_FLAG, TIMELY_TARGET_NUMERATOR)
)
```
```python
def add_validator_flags(flags: ValidatorFlag, add: ValidatorFlag) -> ValidatorFlag:
return flags | add
```
```python
def has_validator_flags(flags: ValidatorFlag, has: ValidatorFlag) -> bool:
return flags & has == has
```
### Beacon state accessors
#### `get_sync_committee_indices`
```python
def get_sync_committee_indices(state: BeaconState, epoch: Epoch) -> Sequence[ValidatorIndex]:
"""
Return the sequence of sync committee indices (which may include duplicate indices) for a given state and epoch.
"""
MAX_RANDOM_BYTE = 2**8 - 1
base_epoch = Epoch((max(epoch // EPOCHS_PER_SYNC_COMMITTEE_PERIOD, 1) - 1) * EPOCHS_PER_SYNC_COMMITTEE_PERIOD)
active_validator_indices = get_active_validator_indices(state, base_epoch)
active_validator_count = uint64(len(active_validator_indices))
seed = get_seed(state, base_epoch, DOMAIN_SYNC_COMMITTEE)
i = 0
sync_committee_indices: List[ValidatorIndex] = []
while len(sync_committee_indices) < SYNC_COMMITTEE_SIZE:
shuffled_index = compute_shuffled_index(uint64(i % active_validator_count), active_validator_count, seed)
candidate_index = active_validator_indices[shuffled_index]
random_byte = hash(seed + uint_to_bytes(uint64(i // 32)))[i % 32]
effective_balance = state.validators[candidate_index].effective_balance
if effective_balance * MAX_RANDOM_BYTE >= MAX_EFFECTIVE_BALANCE * random_byte: # Sample with replacement
sync_committee_indices.append(candidate_index)
i += 1
return sync_committee_indices
```
#### `get_sync_committee`
```python
def get_sync_committee(state: BeaconState, epoch: Epoch) -> SyncCommittee:
"""
Return the sync committee for a given state and epoch.
"""
indices = get_sync_committee_indices(state, epoch)
validators = [state.validators[index] for index in indices]
pubkeys = [validator.pubkey for validator in validators]
aggregates = [
bls.AggregatePKs(pubkeys[i:i + SYNC_COMMITTEE_PUBKEY_AGGREGATES_SIZE])
for i in range(0, len(pubkeys), SYNC_COMMITTEE_PUBKEY_AGGREGATES_SIZE)
]
return SyncCommittee(pubkeys=pubkeys, pubkey_aggregates=aggregates)
```
#### `get_base_reward`
*Note*: The function `get_base_reward` is modified with the removal of `BASE_REWARDS_PER_EPOCH`.
```python
def get_base_reward(state: BeaconState, index: ValidatorIndex) -> Gwei:
total_balance = get_total_active_balance(state)
effective_balance = state.validators[index].effective_balance
return Gwei(effective_balance * BASE_REWARD_FACTOR // integer_squareroot(total_balance))
```
#### `get_unslashed_participating_indices`
```python
def get_unslashed_participating_indices(state: BeaconState, flags: ValidatorFlag, epoch: Epoch) -> Set[ValidatorIndex]:
"""
Retrieve the active validator indices of the given epoch, which are not slashed, and have all of the given flags.
"""
assert epoch in (get_previous_epoch(state), get_current_epoch(state))
if epoch == get_current_epoch(state):
epoch_participation = state.current_epoch_participation
else:
epoch_participation = state.previous_epoch_participation
participating_indices = [
index for index in get_active_validator_indices(state, epoch)
if has_validator_flags(epoch_participation[index], flags)
]
return set(filter(lambda index: not state.validators[index].slashed, participating_indices))
```
#### `get_flag_deltas`
```python
def get_flag_deltas(state: BeaconState,
flag: ValidatorFlag,
numerator: uint64) -> Tuple[Sequence[Gwei], Sequence[Gwei]]:
"""
Compute the rewards and penalties associated with a particular duty, by scanning through the participation
flags to determine who participated and who did not and assigning them the appropriate rewards and penalties.
"""
rewards = [Gwei(0)] * len(state.validators)
penalties = [Gwei(0)] * len(state.validators)
unslashed_participating_indices = get_unslashed_participating_indices(state, flag, get_previous_epoch(state))
increment = EFFECTIVE_BALANCE_INCREMENT # Factored out from balances to avoid uint64 overflow
unslashed_participating_increments = get_total_balance(state, unslashed_participating_indices) // increment
active_increments = get_total_active_balance(state) // increment
for index in get_eligible_validator_indices(state):
base_reward = get_base_reward(state, index)
if index in unslashed_participating_indices:
if is_in_inactivity_leak(state):
# Optimal participation is fully rewarded to cancel the inactivity penalty
rewards[index] = base_reward * numerator // REWARD_DENOMINATOR
else:
rewards[index] = (
(base_reward * numerator * unslashed_participating_increments)
// (active_increments * REWARD_DENOMINATOR)
)
else:
penalties[index] = base_reward * numerator // REWARD_DENOMINATOR
return rewards, penalties
```
#### New `get_inactivity_penalty_deltas`
*Note*: The function `get_inactivity_penalty_deltas` is modified in the selection of matching target indices
and the removal of `BASE_REWARDS_PER_EPOCH`.
```python
def get_inactivity_penalty_deltas(state: BeaconState) -> Tuple[Sequence[Gwei], Sequence[Gwei]]:
"""
Compute the penalties associated with the inactivity leak, by scanning through the participation
flags to determine who participated and who did not, applying the leak penalty globally and applying
compensatory rewards to participants.
"""
penalties = [Gwei(0) for _ in range(len(state.validators))]
if is_in_inactivity_leak(state):
reward_numerator_sum = sum(numerator for (_, numerator) in get_flags_and_numerators())
matching_target_attesting_indices = get_unslashed_participating_indices(
state, TIMELY_TARGET_FLAG, get_previous_epoch(state)
)
for index in get_eligible_validator_indices(state):
# If validator is performing optimally this cancels all attestation rewards for a neutral balance
penalties[index] += Gwei(get_base_reward(state, index) * reward_numerator_sum // REWARD_DENOMINATOR)
if index not in matching_target_attesting_indices:
effective_balance = state.validators[index].effective_balance
penalties[index] += Gwei(
effective_balance * get_finality_delay(state)
// HF1_INACTIVITY_PENALTY_QUOTIENT
)
rewards = [Gwei(0) for _ in range(len(state.validators))]
return rewards, penalties
```
### Beacon state mutators
#### New `slash_validator`
*Note*: The function `slash_validator` is modified
with the substitution of `MIN_SLASHING_PENALTY_QUOTIENT` with `HF1_MIN_SLASHING_PENALTY_QUOTIENT`.
```python
def slash_validator(state: BeaconState,
slashed_index: ValidatorIndex,
whistleblower_index: ValidatorIndex=None) -> None:
"""
Slash the validator with index ``slashed_index``.
"""
epoch = get_current_epoch(state)
initiate_validator_exit(state, slashed_index)
validator = state.validators[slashed_index]
validator.slashed = True
validator.withdrawable_epoch = max(validator.withdrawable_epoch, Epoch(epoch + EPOCHS_PER_SLASHINGS_VECTOR))
state.slashings[epoch % EPOCHS_PER_SLASHINGS_VECTOR] += validator.effective_balance
decrease_balance(state, slashed_index, validator.effective_balance // HF1_MIN_SLASHING_PENALTY_QUOTIENT)
# Apply proposer and whistleblower rewards
proposer_index = get_beacon_proposer_index(state)
if whistleblower_index is None:
whistleblower_index = proposer_index
whistleblower_reward = Gwei(validator.effective_balance // WHISTLEBLOWER_REWARD_QUOTIENT)
proposer_reward = Gwei(whistleblower_reward // PROPOSER_REWARD_QUOTIENT)
increase_balance(state, proposer_index, proposer_reward)
increase_balance(state, whistleblower_index, Gwei(whistleblower_reward - proposer_reward))
```
### Block processing
```python
def process_block(state: BeaconState, block: BeaconBlock) -> None:
process_block_header(state, block)
process_randao(state, block.body)
process_eth1_data(state, block.body)
process_operations(state, block.body)
# Light client support
process_sync_committee(state, block.body)
```
#### New `process_attestation`
*Note*: The function `process_attestation` is modified to do incentive accounting with epoch participation flags.
```python
def process_attestation(state: BeaconState, attestation: Attestation) -> None:
data = attestation.data
assert data.target.epoch in (get_previous_epoch(state), get_current_epoch(state))
assert data.target.epoch == compute_epoch_at_slot(data.slot)
assert data.slot + MIN_ATTESTATION_INCLUSION_DELAY <= state.slot <= data.slot + SLOTS_PER_EPOCH
assert data.index < get_committee_count_per_slot(state, data.target.epoch)
committee = get_beacon_committee(state, data.slot, data.index)
assert len(attestation.aggregation_bits) == len(committee)
if data.target.epoch == get_current_epoch(state):
epoch_participation = state.current_epoch_participation
justified_checkpoint = state.current_justified_checkpoint
else:
epoch_participation = state.previous_epoch_participation
justified_checkpoint = state.previous_justified_checkpoint
# Matching roots
is_matching_head = data.beacon_block_root == get_block_root_at_slot(state, data.slot)
is_matching_source = data.source == justified_checkpoint
is_matching_target = data.target.root == get_block_root(state, data.target.epoch)
assert is_matching_source
# Verify signature
assert is_valid_indexed_attestation(state, get_indexed_attestation(state, attestation))
# Participation flags
participation_flags = []
if is_matching_head and is_matching_target and state.slot <= data.slot + MIN_ATTESTATION_INCLUSION_DELAY:
participation_flags.append(TIMELY_HEAD_FLAG)
if is_matching_source and state.slot <= data.slot + integer_squareroot(SLOTS_PER_EPOCH):
participation_flags.append(TIMELY_SOURCE_FLAG)
if is_matching_target and state.slot <= data.slot + SLOTS_PER_EPOCH:
participation_flags.append(TIMELY_TARGET_FLAG)
# Update epoch participation flags
proposer_reward_numerator = 0
for index in get_attesting_indices(state, data, attestation.aggregation_bits):
for flag, numerator in get_flags_and_numerators():
if flag in participation_flags and not has_validator_flags(epoch_participation[index], flag):
epoch_participation[index] = add_validator_flags(epoch_participation[index], flag)
proposer_reward_numerator += get_base_reward(state, index) * numerator
# Reward proposer
proposer_reward = Gwei(proposer_reward_numerator // (REWARD_DENOMINATOR * PROPOSER_REWARD_QUOTIENT))
increase_balance(state, get_beacon_proposer_index(state), proposer_reward)
```
#### New `process_deposit`
*Note*: The function `process_deposit` is modified to initialize `previous_epoch_participation` and `current_epoch_participation`.
```python
def process_deposit(state: BeaconState, deposit: Deposit) -> None:
# Verify the Merkle branch
assert is_valid_merkle_branch(
leaf=hash_tree_root(deposit.data),
branch=deposit.proof,
depth=DEPOSIT_CONTRACT_TREE_DEPTH + 1, # Add 1 for the List length mix-in
index=state.eth1_deposit_index,
root=state.eth1_data.deposit_root,
)
# Deposits must be processed in order
state.eth1_deposit_index += 1
pubkey = deposit.data.pubkey
amount = deposit.data.amount
validator_pubkeys = [v.pubkey for v in state.validators]
if pubkey not in validator_pubkeys:
# Verify the deposit signature (proof of possession) which is not checked by the deposit contract
deposit_message = DepositMessage(
pubkey=deposit.data.pubkey,
withdrawal_credentials=deposit.data.withdrawal_credentials,
amount=deposit.data.amount,
)
domain = compute_domain(DOMAIN_DEPOSIT) # Fork-agnostic domain since deposits are valid across forks
signing_root = compute_signing_root(deposit_message, domain)
if not bls.Verify(pubkey, signing_root, deposit.data.signature):
return
# Add validator and balance entries
state.validators.append(get_validator_from_deposit(state, deposit))
state.balances.append(amount)
# [Added in hf-1] Initialize empty participation flags for new validator
state.previous_epoch_participation.append(ValidatorFlag(0))
state.current_epoch_participation.append(ValidatorFlag(0))
else:
# Increase balance by deposit amount
index = ValidatorIndex(validator_pubkeys.index(pubkey))
increase_balance(state, index, amount)
```
#### Sync committee processing
```python
def process_sync_committee(state: BeaconState, body: BeaconBlockBody) -> None:
# Verify sync committee aggregate signature signing over the previous slot block root
previous_slot = Slot(max(int(state.slot), 1) - 1)
committee_indices = get_sync_committee_indices(state, get_current_epoch(state))
participant_indices = [index for index, bit in zip(committee_indices, body.sync_committee_bits) if bit]
committee_pubkeys = state.current_sync_committee.pubkeys
participant_pubkeys = [pubkey for pubkey, bit in zip(committee_pubkeys, body.sync_committee_bits) if bit]
domain = get_domain(state, DOMAIN_SYNC_COMMITTEE, compute_epoch_at_slot(previous_slot))
signing_root = compute_signing_root(get_block_root_at_slot(state, previous_slot), domain)
assert eth2_fast_aggregate_verify(participant_pubkeys, signing_root, body.sync_committee_signature)
# Reward sync committee participants
total_proposer_reward = Gwei(0)
active_validator_count = uint64(len(get_active_validator_indices(state, get_current_epoch(state))))
for participant_index in participant_indices:
base_reward = get_base_reward(state, participant_index)
proposer_reward = get_proposer_reward(state, participant_index)
max_participant_reward = base_reward - proposer_reward
reward = Gwei(max_participant_reward * active_validator_count // len(committee_indices) // SLOTS_PER_EPOCH)
increase_balance(state, participant_index, reward)
total_proposer_reward += proposer_reward
# Reward beacon proposer
increase_balance(state, get_beacon_proposer_index(state), total_proposer_reward)
```
### Epoch processing
```python
def process_epoch(state: BeaconState) -> None:
process_justification_and_finalization(state) # [Updated in HF1]
process_rewards_and_penalties(state) # [Updated in HF1]
process_registry_updates(state)
process_slashings(state)
process_eth1_data_reset(state)
process_effective_balance_updates(state)
process_slashings_reset(state)
process_randao_mixes_reset(state)
process_historical_roots_update(state)
# [Removed in HF1] -- process_participation_record_updates(state)
# [Added in HF1]
process_participation_flag_updates(state)
process_sync_committee_updates(state)
```
#### New `process_justification_and_finalization`
*Note*: The function `process_justification_and_finalization` is modified with `matching_target_attestations` replaced by `matching_target_indices`.
```python
def process_justification_and_finalization(state: BeaconState) -> None:
# Initial FFG checkpoint values have a `0x00` stub for `root`.
# Skip FFG updates in the first two epochs to avoid corner cases that might result in modifying this stub.
if get_current_epoch(state) <= GENESIS_EPOCH + 1:
return
previous_epoch = get_previous_epoch(state)
current_epoch = get_current_epoch(state)
old_previous_justified_checkpoint = state.previous_justified_checkpoint
old_current_justified_checkpoint = state.current_justified_checkpoint
# Process justifications
state.previous_justified_checkpoint = state.current_justified_checkpoint
state.justification_bits[1:] = state.justification_bits[:JUSTIFICATION_BITS_LENGTH - 1]
state.justification_bits[0] = 0b0
matching_target_indices = get_unslashed_participating_indices(state, TIMELY_TARGET_FLAG, previous_epoch)
if get_total_balance(state, matching_target_indices) * 3 >= get_total_active_balance(state) * 2:
state.current_justified_checkpoint = Checkpoint(epoch=previous_epoch,
root=get_block_root(state, previous_epoch))
state.justification_bits[1] = 0b1
matching_target_indices = get_unslashed_participating_indices(state, TIMELY_TARGET_FLAG, current_epoch)
if get_total_balance(state, matching_target_indices) * 3 >= get_total_active_balance(state) * 2:
state.current_justified_checkpoint = Checkpoint(epoch=current_epoch,
root=get_block_root(state, current_epoch))
state.justification_bits[0] = 0b1
# Process finalizations
bits = state.justification_bits
# The 2nd/3rd/4th most recent epochs are justified, the 2nd using the 4th as source
if all(bits[1:4]) and old_previous_justified_checkpoint.epoch + 3 == current_epoch:
state.finalized_checkpoint = old_previous_justified_checkpoint
# The 2nd/3rd most recent epochs are justified, the 2nd using the 3rd as source
if all(bits[1:3]) and old_previous_justified_checkpoint.epoch + 2 == current_epoch:
state.finalized_checkpoint = old_previous_justified_checkpoint
# The 1st/2nd/3rd most recent epochs are justified, the 1st using the 3rd as source
if all(bits[0:3]) and old_current_justified_checkpoint.epoch + 2 == current_epoch:
state.finalized_checkpoint = old_current_justified_checkpoint
# The 1st/2nd most recent epochs are justified, the 1st using the 2nd as source
if all(bits[0:2]) and old_current_justified_checkpoint.epoch + 1 == current_epoch:
state.finalized_checkpoint = old_current_justified_checkpoint
```
#### New `process_rewards_and_penalties`
*Note*: The function `process_rewards_and_penalties` is modified to use participation flag deltas.
```python
def process_rewards_and_penalties(state: BeaconState) -> None:
# No rewards are applied at the end of `GENESIS_EPOCH` because rewards are for work done in the previous epoch
if get_current_epoch(state) == GENESIS_EPOCH:
return
flag_deltas = [get_flag_deltas(state, flag, numerator) for (flag, numerator) in get_flags_and_numerators()]
deltas = flag_deltas + [get_inactivity_penalty_deltas(state)]
for (rewards, penalties) in deltas:
for index in range(len(state.validators)):
increase_balance(state, ValidatorIndex(index), rewards[index])
decrease_balance(state, ValidatorIndex(index), penalties[index])
```
#### New `process_slashings`
*Note*: The function `process_slashings` is modified
with the substitution of `PROPORTIONAL_SLASHING_MULTIPLIER` with `HF1_PROPORTIONAL_SLASHING_MULTIPLIER`.
```python
def process_slashings(state: BeaconState) -> None:
epoch = get_current_epoch(state)
total_balance = get_total_active_balance(state)
adjusted_total_slashing_balance = min(sum(state.slashings) * HF1_PROPORTIONAL_SLASHING_MULTIPLIER, total_balance)
for index, validator in enumerate(state.validators):
if validator.slashed and epoch + EPOCHS_PER_SLASHINGS_VECTOR // 2 == validator.withdrawable_epoch:
increment = EFFECTIVE_BALANCE_INCREMENT # Factored out from penalty numerator to avoid uint64 overflow
penalty_numerator = validator.effective_balance // increment * adjusted_total_slashing_balance
penalty = penalty_numerator // total_balance * increment
decrease_balance(state, ValidatorIndex(index), penalty)
```
#### Sync committee updates
```python
def process_sync_committee_updates(state: BeaconState) -> None:
"""
Call to ``proces_sync_committee_updates`` added to ``process_epoch`` in HF1
"""
next_epoch = get_current_epoch(state) + Epoch(1)
if next_epoch % EPOCHS_PER_SYNC_COMMITTEE_PERIOD == 0:
state.current_sync_committee = state.next_sync_committee
state.next_sync_committee = get_sync_committee(state, next_epoch + EPOCHS_PER_SYNC_COMMITTEE_PERIOD)
```
#### Participation flags updates
```python
def process_participation_flag_updates(state: BeaconState) -> None:
"""
Call to ``process_participation_flag_updates`` added to ``process_epoch`` in HF1
"""
state.previous_epoch_participation = state.current_epoch_participation
state.current_epoch_participation = [ValidatorFlag(0) for _ in range(len(state.validators))]
```

View File

@ -0,0 +1,82 @@
# Ethereum 2.0 Light Client Support -- From Phase 0 to Light Client Patch
**Notice**: This document is a work-in-progress for researchers and implementers.
## Table of contents
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
- [Introduction](#introduction)
- [Configuration](#configuration)
- [Fork to Light-client patch](#fork-to-light-client-patch)
- [Fork trigger](#fork-trigger)
- [Upgrading the state](#upgrading-the-state)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
## Introduction
This document describes the process of moving from Phase 0 to Phase 1 of Ethereum 2.0.
## Configuration
Warning: this configuration is not definitive.
| Name | Value |
| - | - |
| `LIGHTCLIENT_PATCH_FORK_VERSION` | `Version('0x01000000')` |
| `LIGHTCLIENT_PATCH_FORK_SLOT` | `Slot(0)` **TBD** |
## Fork to Light-client patch
### Fork trigger
TBD. Social consensus, along with state conditions such as epoch boundary, finality, deposits, active validator count, etc. may be part of the decision process to trigger the fork. For now we assume the condition will be triggered at slot `LIGHTCLIENT_PATCH_FORK_SLOT`, where `LIGHTCLIENT_PATCH_FORK_SLOT % SLOTS_PER_EPOCH == 0`.
### Upgrading the state
After `process_slots` of Phase 0 finishes, if `state.slot == LIGHTCLIENT_PATCH_FORK_SLOT`, an irregular state change is made to upgrade to light-client patch.
```python
def upgrade_to_lightclient_patch(pre: phase0.BeaconState) -> BeaconState:
epoch = get_current_epoch(pre)
post = BeaconState(
genesis_time=pre.genesis_time,
genesis_validators_root=pre.genesis_validators_root,
slot=pre.slot,
fork=Fork(
previous_version=pre.fork.current_version,
current_version=LIGHTCLIENT_PATCH_FORK_VERSION,
epoch=epoch,
),
# History
latest_block_header=pre.latest_block_header,
block_roots=pre.block_roots,
state_roots=pre.state_roots,
historical_roots=pre.historical_roots,
# Eth1
eth1_data=pre.eth1_data,
eth1_data_votes=pre.eth1_data_votes,
eth1_deposit_index=pre.eth1_deposit_index,
# Registry
validators=pre.validators,
balances=pre.balances,
# Randomness
randao_mixes=pre.randao_mixes,
# Slashings
slashings=pre.slashings,
# Attestations
previous_epoch_participation=[ValidatorFlag(0) for _ in range(len(pre.validators))],
current_epoch_participation=[ValidatorFlag(0) for _ in range(len(pre.validators))],
# Finality
justification_bits=pre.justification_bits,
previous_justified_checkpoint=pre.previous_justified_checkpoint,
current_justified_checkpoint=pre.current_justified_checkpoint,
finalized_checkpoint=pre.finalized_checkpoint,
)
# Fill in sync committees
post.current_sync_committee = get_sync_committee(post, get_current_epoch(post))
post.next_sync_committee = get_sync_committee(post, get_current_epoch(post) + EPOCHS_PER_SYNC_COMMITTEE_PERIOD)
return post
```

View File

@ -0,0 +1,184 @@
# Minimal Light Client
**Notice**: This document is a work-in-progress for researchers and implementers.
## Table of contents
<!-- TOC -->
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
- [Introduction](#introduction)
- [Constants](#constants)
- [Configuration](#configuration)
- [Misc](#misc)
- [Time parameters](#time-parameters)
- [Containers](#containers)
- [`LightClientSnapshot`](#lightclientsnapshot)
- [`LightClientUpdate`](#lightclientupdate)
- [`LightClientStore`](#lightclientstore)
- [Light client state updates](#light-client-state-updates)
- [`is_valid_light_client_update`](#is_valid_light_client_update)
- [`apply_light_client_update`](#apply_light_client_update)
- [`process_light_client_update`](#process_light_client_update)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
<!-- /TOC -->
## Introduction
Eth2 is designed to be light client friendly for constrained environments to
access Eth2 with reasonable safety and liveness.
Such environments include resource-constrained devices (e.g. phones for trust-minimised wallets)
and metered VMs (e.g. blockchain VMs for cross-chain bridges).
This document suggests a minimal light client design for the beacon chain that
uses sync committees introduced in [this beacon chain extension](./beacon-chain.md).
## Constants
| Name | Value |
| - | - |
| `FINALIZED_ROOT_INDEX` | `Index(BeaconState, 'finalized_checkpoint', 'root')` |
| `NEXT_SYNC_COMMITTEE_INDEX` | `Index(BeaconState, 'next_sync_committee')` |
## Configuration
### Misc
| Name | Value |
| - | - |
| `MIN_SYNC_COMMITTEE_PARTICIPANTS` | `1` |
| `MAX_VALID_LIGHT_CLIENT_UPDATES` | `uint64(2**64 - 1)` |
### Time parameters
| Name | Value | Unit | Duration |
| - | - | :-: | :-: |
| `LIGHT_CLIENT_UPDATE_TIMEOUT` | `Slot(2**13)` | slots | ~27 hours |
## Containers
#### `LightClientSnapshot`
```python
class LightClientSnapshot(Container):
# Beacon block header
header: BeaconBlockHeader
# Sync committees corresponding to the header
current_sync_committee: SyncCommittee
next_sync_committee: SyncCommittee
```
#### `LightClientUpdate`
```python
class LightClientUpdate(Container):
# Update beacon block header
header: BeaconBlockHeader
# Next sync committee corresponding to the header
next_sync_committee: SyncCommittee
next_sync_committee_branch: Vector[Bytes32, log2(NEXT_SYNC_COMMITTEE_INDEX)]
# Finality proof for the update header
finality_header: BeaconBlockHeader
finality_branch: Vector[Bytes32, log2(FINALIZED_ROOT_INDEX)]
# Sync committee aggregate signature
sync_committee_bits: Bitvector[SYNC_COMMITTEE_SIZE]
sync_committee_signature: BLSSignature
# Fork version for the aggregate signature
fork_version: Version
```
#### `LightClientStore`
```python
class LightClientStore(Container):
snapshot: LightClientSnapshot
valid_updates: List[LightClientUpdate, MAX_VALID_LIGHT_CLIENT_UPDATES]
```
## Light client state updates
A light client maintains its state in a `store` object of type `LightClientStore` and receives `update` objects of type `LightClientUpdate`. Every `update` triggers `process_light_client_update(store, update, current_slot)` where `current_slot` is the current slot based on some local clock.
#### `is_valid_light_client_update`
```python
def is_valid_light_client_update(snapshot: LightClientSnapshot, update: LightClientUpdate) -> bool:
# Verify update slot is larger than snapshot slot
assert update.header.slot > snapshot.header.slot
# Verify update does not skip a sync committee period
snapshot_period = compute_epoch_at_slot(snapshot.header.slot) // EPOCHS_PER_SYNC_COMMITTEE_PERIOD
update_period = compute_epoch_at_slot(update.header.slot) // EPOCHS_PER_SYNC_COMMITTEE_PERIOD
assert update_period in (snapshot_period, snapshot_period + 1)
# Verify update header root is the finalized root of the finality header, if specified
if update.finality_header == BeaconBlockHeader():
signed_header = update.header
assert update.finality_branch == [ZERO_HASH for _ in range(log2(FINALIZED_ROOT_INDEX))]
else:
signed_header = update.finality_header
assert is_valid_merkle_branch(
leaf=hash_tree_root(update.header),
branch=update.finality_branch,
depth=log2(FINALIZED_ROOT_INDEX),
index=FINALIZED_ROOT_INDEX % 2**log2(FINALIZED_ROOT_INDEX),
root=update.finality_header.state_root,
)
# Verify update next sync committee if the update period incremented
if update_period == snapshot_period:
sync_committee = snapshot.current_sync_committee
assert update.next_sync_committee_branch == [ZERO_HASH for _ in range(log2(NEXT_SYNC_COMMITTEE_INDEX))]
else:
sync_committee = snapshot.next_sync_committee
assert is_valid_merkle_branch(
leaf=hash_tree_root(update.next_sync_committee),
branch=update.next_sync_committee_branch,
depth=log2(NEXT_SYNC_COMMITTEE_INDEX),
index=NEXT_SYNC_COMMITTEE_INDEX % 2**log2(NEXT_SYNC_COMMITTEE_INDEX),
root=update.header.state_root,
)
# Verify sync committee has sufficient participants
assert sum(update.sync_committee_bits) >= MIN_SYNC_COMMITTEE_PARTICIPANTS
# Verify sync committee aggregate signature
participant_pubkeys = [pubkey for (bit, pubkey) in zip(update.sync_committee_bits, sync_committee.pubkeys) if bit]
domain = compute_domain(DOMAIN_SYNC_COMMITTEE, update.fork_version)
signing_root = compute_signing_root(signed_header, domain)
assert bls.FastAggregateVerify(participant_pubkeys, signing_root, update.sync_committee_signature)
return True
```
#### `apply_light_client_update`
```python
def apply_light_client_update(snapshot: LightClientSnapshot, update: LightClientUpdate) -> None:
snapshot_period = compute_epoch_at_slot(snapshot.header.slot) // EPOCHS_PER_SYNC_COMMITTEE_PERIOD
update_period = compute_epoch_at_slot(update.header.slot) // EPOCHS_PER_SYNC_COMMITTEE_PERIOD
if update_period == snapshot_period + 1:
snapshot.current_sync_committee = snapshot.next_sync_committee
snapshot.next_sync_committee = update.next_sync_committee
snapshot.header = update.header
```
#### `process_light_client_update`
```python
def process_light_client_update(store: LightClientStore, update: LightClientUpdate, current_slot: Slot) -> None:
# Validate update
assert is_valid_light_client_update(store.snapshot, update)
store.valid_updates.append(update)
if sum(update.sync_committee_bits) * 3 > len(update.sync_committee_bits) * 2 and update.header != update.finality_header:
# Apply update if 2/3 quorum is reached and we have a finality proof
apply_light_client_update(store, update)
store.valid_updates = []
elif current_slot > snapshot.header.slot + LIGHT_CLIENT_UPDATE_TIMEOUT:
# Forced best update when the update timeout has elapsed
apply_light_client_update(store, max(store.valid_updates, key=lambda update: sum(update.sync_committee_bits)))
store.valid_updates = []
```

View File

@ -58,7 +58,7 @@
- [Crypto](#crypto) - [Crypto](#crypto)
- [`hash`](#hash) - [`hash`](#hash)
- [`hash_tree_root`](#hash_tree_root) - [`hash_tree_root`](#hash_tree_root)
- [BLS Signatures](#bls-signatures) - [BLS signatures](#bls-signatures)
- [Predicates](#predicates) - [Predicates](#predicates)
- [`is_active_validator`](#is_active_validator) - [`is_active_validator`](#is_active_validator)
- [`is_eligible_for_activation_queue`](#is_eligible_for_activation_queue) - [`is_eligible_for_activation_queue`](#is_eligible_for_activation_queue)
@ -114,7 +114,12 @@
- [`process_rewards_and_penalties`](#process_rewards_and_penalties) - [`process_rewards_and_penalties`](#process_rewards_and_penalties)
- [Registry updates](#registry-updates) - [Registry updates](#registry-updates)
- [Slashings](#slashings) - [Slashings](#slashings)
- [Final updates](#final-updates) - [Eth1 data votes updates](#eth1-data-votes-updates)
- [Effective balances updates](#effective-balances-updates)
- [Slashings balances updates](#slashings-balances-updates)
- [Randao mixes updates](#randao-mixes-updates)
- [Historical roots updates](#historical-roots-updates)
- [Participation records rotation](#participation-records-rotation)
- [Block processing](#block-processing) - [Block processing](#block-processing)
- [Block header](#block-header) - [Block header](#block-header)
- [RANDAO](#randao) - [RANDAO](#randao)
@ -257,7 +262,7 @@ The following values are (non-configurable) constants used throughout the specif
- The `INACTIVITY_PENALTY_QUOTIENT` equals `INVERSE_SQRT_E_DROP_TIME**2` where `INVERSE_SQRT_E_DROP_TIME := 2**13` epochs (about 36 days) is the time it takes the inactivity penalty to reduce the balance of non-participating validators to about `1/sqrt(e) ~= 60.6%`. Indeed, the balance retained by offline validators after `n` epochs is about `(1 - 1/INACTIVITY_PENALTY_QUOTIENT)**(n**2/2)`; so after `INVERSE_SQRT_E_DROP_TIME` epochs, it is roughly `(1 - 1/INACTIVITY_PENALTY_QUOTIENT)**(INACTIVITY_PENALTY_QUOTIENT/2) ~= 1/sqrt(e)`. Note this value will be upgraded to `2**24` after Phase 0 mainnet stabilizes to provide a faster recovery in the event of an inactivity leak. - The `INACTIVITY_PENALTY_QUOTIENT` equals `INVERSE_SQRT_E_DROP_TIME**2` where `INVERSE_SQRT_E_DROP_TIME := 2**13` epochs (about 36 days) is the time it takes the inactivity penalty to reduce the balance of non-participating validators to about `1/sqrt(e) ~= 60.6%`. Indeed, the balance retained by offline validators after `n` epochs is about `(1 - 1/INACTIVITY_PENALTY_QUOTIENT)**(n**2/2)`; so after `INVERSE_SQRT_E_DROP_TIME` epochs, it is roughly `(1 - 1/INACTIVITY_PENALTY_QUOTIENT)**(INACTIVITY_PENALTY_QUOTIENT/2) ~= 1/sqrt(e)`. Note this value will be upgraded to `2**24` after Phase 0 mainnet stabilizes to provide a faster recovery in the event of an inactivity leak.
- The `PROPORTIONAL_SLASHING_MULTIPLIER` is set to `1` at initial mainnet launch, resulting in one-third of the minimum accountable safety margin in the event of a finality attack. After Phase 0 mainnet stablizes, this value will be upgraded to `3` to provide the maximal minimum accoutable safety margin. - The `PROPORTIONAL_SLASHING_MULTIPLIER` is set to `1` at initial mainnet launch, resulting in one-third of the minimum accountable safety margin in the event of a finality attack. After Phase 0 mainnet stablizes, this value will be upgraded to `3` to provide the maximal minimum accountable safety margin.
### Max operations per block ### Max operations per block
@ -607,17 +612,17 @@ def bytes_to_uint64(data: bytes) -> uint64:
`def hash_tree_root(object: SSZSerializable) -> Root` is a function for hashing objects into a single root by utilizing a hash tree structure, as defined in the [SSZ spec](../../ssz/simple-serialize.md#merkleization). `def hash_tree_root(object: SSZSerializable) -> Root` is a function for hashing objects into a single root by utilizing a hash tree structure, as defined in the [SSZ spec](../../ssz/simple-serialize.md#merkleization).
#### BLS Signatures #### BLS signatures
Eth2 makes use of BLS signatures as specified in the [IETF draft BLS specification draft-irtf-cfrg-bls-signature-04](https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-04). Specifically, eth2 uses the `BLS_SIG_BLS12381G2_XMD:SHA-256_SSWU_RO_POP_` ciphersuite which implements the following interfaces: The [IETF BLS signature draft standard v4](https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-04) with ciphersuite `BLS_SIG_BLS12381G2_XMD:SHA-256_SSWU_RO_POP_` defines the following functions:
- `def Sign(SK: int, message: Bytes) -> BLSSignature` - `def Sign(privkey: int, message: Bytes) -> BLSSignature`
- `def Verify(PK: BLSPubkey, message: Bytes, signature: BLSSignature) -> bool` - `def Verify(pubkey: BLSPubkey, message: Bytes, signature: BLSSignature) -> bool`
- `def Aggregate(signatures: Sequence[BLSSignature]) -> BLSSignature` - `def Aggregate(signatures: Sequence[BLSSignature]) -> BLSSignature`
- `def FastAggregateVerify(PKs: Sequence[BLSPubkey], message: Bytes, signature: BLSSignature) -> bool` - `def FastAggregateVerify(pubkeys: Sequence[BLSPubkey], message: Bytes, signature: BLSSignature) -> bool`
- `def AggregateVerify(PKs: Sequence[BLSPubkey], messages: Sequence[Bytes], signature: BLSSignature) -> bool` - `def AggregateVerify(pubkeys: Sequence[BLSPubkey], messages: Sequence[Bytes], signature: BLSSignature) -> bool`
Within these specifications, BLS signatures are treated as a module for notational clarity, thus to verify a signature `bls.Verify(...)` is used. The above functions are accessed through the `bls` module, e.g. `bls.Verify`.
### Predicates ### Predicates
@ -1257,7 +1262,12 @@ def process_epoch(state: BeaconState) -> None:
process_rewards_and_penalties(state) process_rewards_and_penalties(state)
process_registry_updates(state) process_registry_updates(state)
process_slashings(state) process_slashings(state)
process_final_updates(state) process_eth1_data_reset(state)
process_effective_balance_updates(state)
process_slashings_reset(state)
process_randao_mixes_reset(state)
process_historical_roots_update(state)
process_participation_record_updates(state)
``` ```
#### Helper functions #### Helper functions
@ -1564,15 +1574,19 @@ def process_slashings(state: BeaconState) -> None:
decrease_balance(state, ValidatorIndex(index), penalty) decrease_balance(state, ValidatorIndex(index), penalty)
``` ```
#### Final updates #### Eth1 data votes updates
```python ```python
def process_final_updates(state: BeaconState) -> None: def process_eth1_data_reset(state: BeaconState) -> None:
current_epoch = get_current_epoch(state) next_epoch = Epoch(get_current_epoch(state) + 1)
next_epoch = Epoch(current_epoch + 1)
# Reset eth1 data votes # Reset eth1 data votes
if next_epoch % EPOCHS_PER_ETH1_VOTING_PERIOD == 0: if next_epoch % EPOCHS_PER_ETH1_VOTING_PERIOD == 0:
state.eth1_data_votes = [] state.eth1_data_votes = []
```
#### Effective balances updates
```python
def process_effective_balance_updates(state: BeaconState) -> None:
# Update effective balances with hysteresis # Update effective balances with hysteresis
for index, validator in enumerate(state.validators): for index, validator in enumerate(state.validators):
balance = state.balances[index] balance = state.balances[index]
@ -1584,14 +1598,41 @@ def process_final_updates(state: BeaconState) -> None:
or validator.effective_balance + UPWARD_THRESHOLD < balance or validator.effective_balance + UPWARD_THRESHOLD < balance
): ):
validator.effective_balance = min(balance - balance % EFFECTIVE_BALANCE_INCREMENT, MAX_EFFECTIVE_BALANCE) validator.effective_balance = min(balance - balance % EFFECTIVE_BALANCE_INCREMENT, MAX_EFFECTIVE_BALANCE)
```
#### Slashings balances updates
```python
def process_slashings_reset(state: BeaconState) -> None:
next_epoch = Epoch(get_current_epoch(state) + 1)
# Reset slashings # Reset slashings
state.slashings[next_epoch % EPOCHS_PER_SLASHINGS_VECTOR] = Gwei(0) state.slashings[next_epoch % EPOCHS_PER_SLASHINGS_VECTOR] = Gwei(0)
```
#### Randao mixes updates
```python
def process_randao_mixes_reset(state: BeaconState) -> None:
current_epoch = get_current_epoch(state)
next_epoch = Epoch(current_epoch + 1)
# Set randao mix # Set randao mix
state.randao_mixes[next_epoch % EPOCHS_PER_HISTORICAL_VECTOR] = get_randao_mix(state, current_epoch) state.randao_mixes[next_epoch % EPOCHS_PER_HISTORICAL_VECTOR] = get_randao_mix(state, current_epoch)
```
#### Historical roots updates
```python
def process_historical_roots_update(state: BeaconState) -> None:
# Set historical root accumulator # Set historical root accumulator
next_epoch = Epoch(get_current_epoch(state) + 1)
if next_epoch % (SLOTS_PER_HISTORICAL_ROOT // SLOTS_PER_EPOCH) == 0: if next_epoch % (SLOTS_PER_HISTORICAL_ROOT // SLOTS_PER_EPOCH) == 0:
historical_batch = HistoricalBatch(block_roots=state.block_roots, state_roots=state.state_roots) historical_batch = HistoricalBatch(block_roots=state.block_roots, state_roots=state.state_roots)
state.historical_roots.append(hash_tree_root(historical_batch)) state.historical_roots.append(hash_tree_root(historical_batch))
```
#### Participation records rotation
```python
def process_participation_record_updates(state: BeaconState) -> None:
# Rotate current/previous epoch attestations # Rotate current/previous epoch attestations
state.previous_epoch_attestations = state.current_epoch_attestations state.previous_epoch_attestations = state.current_epoch_attestations
state.current_epoch_attestations = [] state.current_epoch_attestations = []

View File

@ -101,7 +101,7 @@ It consists of four main sections:
- [Compression/Encoding](#compressionencoding) - [Compression/Encoding](#compressionencoding)
- [Why are we using SSZ for encoding?](#why-are-we-using-ssz-for-encoding) - [Why are we using SSZ for encoding?](#why-are-we-using-ssz-for-encoding)
- [Why are we compressing, and at which layers?](#why-are-we-compressing-and-at-which-layers) - [Why are we compressing, and at which layers?](#why-are-we-compressing-and-at-which-layers)
- [Why are using Snappy for compression?](#why-are-using-snappy-for-compression) - [Why are we using Snappy for compression?](#why-are-we-using-snappy-for-compression)
- [Can I get access to unencrypted bytes on the wire for debugging purposes?](#can-i-get-access-to-unencrypted-bytes-on-the-wire-for-debugging-purposes) - [Can I get access to unencrypted bytes on the wire for debugging purposes?](#can-i-get-access-to-unencrypted-bytes-on-the-wire-for-debugging-purposes)
- [What are SSZ type size bounds?](#what-are-ssz-type-size-bounds) - [What are SSZ type size bounds?](#what-are-ssz-type-size-bounds)
- [libp2p implementations matrix](#libp2p-implementations-matrix) - [libp2p implementations matrix](#libp2p-implementations-matrix)
@ -313,6 +313,7 @@ The following validations MUST pass before forwarding the `signed_beacon_block`
(via both gossip and non-gossip sources) (via both gossip and non-gossip sources)
(a client MAY queue blocks for processing once the parent block is retrieved). (a client MAY queue blocks for processing once the parent block is retrieved).
- _[REJECT]_ The block's parent (defined by `block.parent_root`) passes validation. - _[REJECT]_ The block's parent (defined by `block.parent_root`) passes validation.
- _[REJECT]_ The block is from a higher slot than its parent.
- _[REJECT]_ The current `finalized_checkpoint` is an ancestor of `block` -- i.e. - _[REJECT]_ The current `finalized_checkpoint` is an ancestor of `block` -- i.e.
`get_ancestor(store, block.parent_root, compute_start_slot_at_epoch(store.finalized_checkpoint.epoch)) `get_ancestor(store, block.parent_root, compute_start_slot_at_epoch(store.finalized_checkpoint.epoch))
== store.finalized_checkpoint.root` == store.finalized_checkpoint.root`
@ -419,7 +420,7 @@ The following validations MUST pass before forwarding the `attestation` on the s
- _[REJECT]_ The signature of `attestation` is valid. - _[REJECT]_ The signature of `attestation` is valid.
- _[IGNORE]_ The block being voted for (`attestation.data.beacon_block_root`) has been seen - _[IGNORE]_ The block being voted for (`attestation.data.beacon_block_root`) has been seen
(via both gossip and non-gossip sources) (via both gossip and non-gossip sources)
(a client MAY queue aggregates for processing once block is retrieved). (a client MAY queue attestations for processing once block is retrieved).
- _[REJECT]_ The block being voted for (`attestation.data.beacon_block_root`) passes validation. - _[REJECT]_ The block being voted for (`attestation.data.beacon_block_root`) passes validation.
- _[REJECT]_ The attestation's target block is an ancestor of the block named in the LMD vote -- i.e. - _[REJECT]_ The attestation's target block is an ancestor of the block named in the LMD vote -- i.e.
`get_ancestor(store, attestation.data.beacon_block_root, compute_start_slot_at_epoch(attestation.data.target.epoch)) == attestation.data.target.root` `get_ancestor(store, attestation.data.beacon_block_root, compute_start_slot_at_epoch(attestation.data.target.epoch)) == attestation.data.target.root`
@ -937,7 +938,7 @@ where the fields of `ENRForkID` are defined as
* `next_fork_epoch` is the epoch at which the next fork is planned and the `current_fork_version` will be updated. * `next_fork_epoch` is the epoch at which the next fork is planned and the `current_fork_version` will be updated.
If no future fork is planned, set `next_fork_epoch = FAR_FUTURE_EPOCH` to signal this fact If no future fork is planned, set `next_fork_epoch = FAR_FUTURE_EPOCH` to signal this fact
*Note*: `fork_digest` is composed of values that are not not known until the genesis block/state are available. *Note*: `fork_digest` is composed of values that are not known until the genesis block/state are available.
Due to this, clients SHOULD NOT form ENRs and begin peer discovery until genesis values are known. Due to this, clients SHOULD NOT form ENRs and begin peer discovery until genesis values are known.
One notable exception to this rule is the distribution of bootnode ENRs prior to genesis. One notable exception to this rule is the distribution of bootnode ENRs prior to genesis.
In this case, bootnode ENRs SHOULD be initially distributed with `eth2` field set as In this case, bootnode ENRs SHOULD be initially distributed with `eth2` field set as
@ -1223,7 +1224,7 @@ the node's fork choice prevents integration of these messages into the actual co
Depending on the number of validators, it may be more efficient to group shard subnets and might provide better stability for the gossipsub channel. Depending on the number of validators, it may be more efficient to group shard subnets and might provide better stability for the gossipsub channel.
The exact grouping will be dependent on more involved network tests. The exact grouping will be dependent on more involved network tests.
This constant allows for more flexibility in setting up the network topology for attestation aggregation (as aggregation should happen on each subnet). This constant allows for more flexibility in setting up the network topology for attestation aggregation (as aggregation should happen on each subnet).
The value is currently set to to be equal `MAX_COMMITTEES_PER_SLOT` if/until network tests indicate otherwise. The value is currently set to be equal to `MAX_COMMITTEES_PER_SLOT` if/until network tests indicate otherwise.
### Why are attestations limited to be broadcast on gossip channels within `SLOTS_PER_EPOCH` slots? ### Why are attestations limited to be broadcast on gossip channels within `SLOTS_PER_EPOCH` slots?
@ -1369,7 +1370,7 @@ Thus, it may happen that we need to transmit an empty list - there are several w
Semantically, it is not an error that a block is missing during a slot making option 2 unnatural. Semantically, it is not an error that a block is missing during a slot making option 2 unnatural.
Option 1 allows allows the responder to signal "no block", but this information may be wrong - for example in the case of a malicious node. Option 1 allows the responder to signal "no block", but this information may be wrong - for example in the case of a malicious node.
Under option 0, there is no way for a client to distinguish between a slot without a block and an incomplete response, Under option 0, there is no way for a client to distinguish between a slot without a block and an incomplete response,
but given that it already must contain logic to handle the uncertainty of a malicious peer, option 0 was chosen. but given that it already must contain logic to handle the uncertainty of a malicious peer, option 0 was chosen.
@ -1495,7 +1496,7 @@ This looks different depending on the interaction layer:
implementers are encouraged to encapsulate the encoding and compression logic behind implementers are encouraged to encapsulate the encoding and compression logic behind
MessageReader and MessageWriter components/strategies that can be layered on top of the raw byte streams. MessageReader and MessageWriter components/strategies that can be layered on top of the raw byte streams.
### Why are using Snappy for compression? ### Why are we using Snappy for compression?
Snappy is used in Ethereum 1.0. It is well maintained by Google, has good benchmarks, Snappy is used in Ethereum 1.0. It is well maintained by Google, has good benchmarks,
and can calculate the size of the uncompressed object without inflating it in memory. and can calculate the size of the uncompressed object without inflating it in memory.

View File

@ -356,7 +356,9 @@ def get_eth1_vote(state: BeaconState, eth1_chain: Sequence[Eth1Block]) -> Eth1Da
valid_votes = [vote for vote in state.eth1_data_votes if vote in votes_to_consider] valid_votes = [vote for vote in state.eth1_data_votes if vote in votes_to_consider]
# Default vote on latest eth1 block data in the period range unless eth1 chain is not live # Default vote on latest eth1 block data in the period range unless eth1 chain is not live
default_vote = votes_to_consider[len(votes_to_consider) - 1] if any(votes_to_consider) else state.eth1_data # Non-substantive casting for linter
state_eth1_data: Eth1Data = state.eth1_data
default_vote = votes_to_consider[len(votes_to_consider) - 1] if any(votes_to_consider) else state_eth1_data
return max( return max(
valid_votes, valid_votes,

View File

@ -8,13 +8,17 @@
- [Introduction](#introduction) - [Introduction](#introduction)
- [Prerequisites](#prerequisites) - [Prerequisites](#prerequisites)
- [Custom Types](#custom-types)
- [Constants](#constants) - [Constants](#constants)
- [Configuration](#configuration)
- [Weak Subjectivity Checkpoint](#weak-subjectivity-checkpoint) - [Weak Subjectivity Checkpoint](#weak-subjectivity-checkpoint)
- [Weak Subjectivity Period](#weak-subjectivity-period) - [Weak Subjectivity Period](#weak-subjectivity-period)
- [Calculating the Weak Subjectivity Period](#calculating-the-weak-subjectivity-period) - [Calculating the Weak Subjectivity Period](#calculating-the-weak-subjectivity-period)
- [`compute_weak_subjectivity_period`](#compute_weak_subjectivity_period)
- [Weak Subjectivity Sync](#weak-subjectivity-sync) - [Weak Subjectivity Sync](#weak-subjectivity-sync)
- [Weak Subjectivity Sync Procedure](#weak-subjectivity-sync-procedure) - [Weak Subjectivity Sync Procedure](#weak-subjectivity-sync-procedure)
- [Checking for Stale Weak Subjectivity Checkpoint](#checking-for-stale-weak-subjectivity-checkpoint) - [Checking for Stale Weak Subjectivity Checkpoint](#checking-for-stale-weak-subjectivity-checkpoint)
- [`is_within_weak_subjectivity_period`](#is_within_weak_subjectivity_period)
- [Distributing Weak Subjectivity Checkpoints](#distributing-weak-subjectivity-checkpoints) - [Distributing Weak Subjectivity Checkpoints](#distributing-weak-subjectivity-checkpoints)
<!-- END doctoc generated TOC please keep comment here to allow auto update --> <!-- END doctoc generated TOC please keep comment here to allow auto update -->
@ -34,15 +38,27 @@ For more information about weak subjectivity and why it is required, please refe
This document uses data structures, constants, functions, and terminology from This document uses data structures, constants, functions, and terminology from
[Phase 0 -- The Beacon Chain](./beacon-chain.md) and [Phase 0 -- Beacon Chain Fork Choice](./fork-choice.md). [Phase 0 -- The Beacon Chain](./beacon-chain.md) and [Phase 0 -- Beacon Chain Fork Choice](./fork-choice.md).
## Custom Types
| Name | SSZ Equivalent | Description |
|---|---|---|
| `Ether` | `uint64` | an amount in Ether |
## Constants ## Constants
| Name | Value | | Name | Value |
|----------------|--------------| |---|---|
| `ETH_TO_GWEI` | `uint64(10**9)` |
## Configuration
| Name | Value |
|---|---|
| `SAFETY_DECAY` | `uint64(10)` | | `SAFETY_DECAY` | `uint64(10)` |
## Weak Subjectivity Checkpoint ## Weak Subjectivity Checkpoint
Any `Checkpoint` can used be a Weak Subjectivity Checkpoint. Any `Checkpoint` object can be used as a Weak Subjectivity Checkpoint.
These Weak Subjectivity Checkpoints are distributed by providers, These Weak Subjectivity Checkpoints are distributed by providers,
downloaded by users and/or distributed as a part of clients, and used as input while syncing a client. downloaded by users and/or distributed as a part of clients, and used as input while syncing a client.
@ -59,38 +75,64 @@ a safety margin of at least `1/3 - SAFETY_DECAY/100`.
### Calculating the Weak Subjectivity Period ### Calculating the Weak Subjectivity Period
*Note*: `compute_weak_subjectivity_period()` is planned to be updated when a more accurate calculation is made. A detailed analysis of the calculation of the weak subjectivity period is made in [this report](https://github.com/runtimeverification/beacon-chain-verification/blob/master/weak-subjectivity/weak-subjectivity-analysis.pdf).
*Note*: The expressions in the report use fractions, whereas eth2.0-specs uses only `uint64` arithmetic. The expressions have been simplified to avoid computing fractions, and more details can be found [here](https://www.overleaf.com/read/wgjzjdjpvpsd).
*Note*: The calculations here use `Ether` instead of `Gwei`, because the large magnitude of balances in `Gwei` can cause an overflow while computing using `uint64` arithmetic operations. Using `Ether` reduces the magnitude of the multiplicative factors by an order of `ETH_TO_GWEI` (`= 10**9`) and avoid the scope for overflows in `uint64`.
#### `compute_weak_subjectivity_period`
```python ```python
def compute_weak_subjectivity_period(state: BeaconState) -> uint64: def compute_weak_subjectivity_period(state: BeaconState) -> uint64:
weak_subjectivity_period = MIN_VALIDATOR_WITHDRAWABILITY_DELAY """
validator_count = len(get_active_validator_indices(state, get_current_epoch(state))) Returns the weak subjectivity period for the current ``state``.
if validator_count >= MIN_PER_EPOCH_CHURN_LIMIT * CHURN_LIMIT_QUOTIENT: This computation takes into account the effect of:
weak_subjectivity_period += SAFETY_DECAY * CHURN_LIMIT_QUOTIENT // (2 * 100) - validator set churn (bounded by ``get_validator_churn_limit()`` per epoch), and
- validator balance top-ups (bounded by ``MAX_DEPOSITS * SLOTS_PER_EPOCH`` per epoch).
A detailed calculation can be found at:
https://github.com/runtimeverification/beacon-chain-verification/blob/master/weak-subjectivity/weak-subjectivity-analysis.pdf
"""
ws_period = MIN_VALIDATOR_WITHDRAWABILITY_DELAY
N = len(get_active_validator_indices(state, get_current_epoch(state)))
t = get_total_active_balance(state) // N // ETH_TO_GWEI
T = MAX_EFFECTIVE_BALANCE // ETH_TO_GWEI
delta = get_validator_churn_limit(state)
Delta = MAX_DEPOSITS * SLOTS_PER_EPOCH
D = SAFETY_DECAY
if T * (200 + 3 * D) < t * (200 + 12 * D):
epochs_for_validator_set_churn = (
N * (t * (200 + 12 * D) - T * (200 + 3 * D)) // (600 * delta * (2 * t + T))
)
epochs_for_balance_top_ups = (
N * (200 + 3 * D) // (600 * Delta)
)
ws_period += max(epochs_for_validator_set_churn, epochs_for_balance_top_ups)
else: else:
weak_subjectivity_period += SAFETY_DECAY * validator_count // (2 * 100 * MIN_PER_EPOCH_CHURN_LIMIT) ws_period += (
return weak_subjectivity_period 3 * N * D * t // (200 * Delta * (T - t))
)
return ws_period
``` ```
*Details about the calculation*: A brief reference for what these values look like in practice ([reference script](https://gist.github.com/adiasg/3aceab409b36aa9a9d9156c1baa3c248)):
- `100` appears in the denominator to get the actual percentage ratio from `SAFETY_DECAY`
- For more information about other terms in this equation, refer to
[Weak Subjectivity in Eth2.0](https://notes.ethereum.org/@adiasg/weak-subjectvity-eth2)
A brief reference for what these values look like in practice: | Safety Decay | Avg. Val. Balance (ETH) | Val. Count | Weak Sub. Period (Epochs) |
| ---- | ---- | ---- | ---- |
| `validator_count` | `weak_subjectivity_period` | | 10 | 28 | 32768 | 504 |
| ---- | ---- | | 10 | 28 | 65536 | 752 |
| 1024 | 268 | | 10 | 28 | 131072 | 1248 |
| 2048 | 281 | | 10 | 28 | 262144 | 2241 |
| 4096 | 307 | | 10 | 28 | 524288 | 2241 |
| 8192 | 358 | | 10 | 28 | 1048576 | 2241 |
| 16384 | 460 | | 10 | 32 | 32768 | 665 |
| 32768 | 665 | | 10 | 32 | 65536 | 1075 |
| 65536 | 1075 | | 10 | 32 | 131072 | 1894 |
| 131072 | 1894 | | 10 | 32 | 262144 | 3532 |
| 262144 | 3532 | | 10 | 32 | 524288 | 3532 |
| 524288 | 3532 | | 10 | 32 | 1048576 | 3532 |
## Weak Subjectivity Sync ## Weak Subjectivity Sync
@ -101,22 +143,28 @@ Clients should allow users to input a Weak Subjectivity Checkpoint at startup, a
1. Input a Weak Subjectivity Checkpoint as a CLI parameter in `block_root:epoch_number` format, 1. Input a Weak Subjectivity Checkpoint as a CLI parameter in `block_root:epoch_number` format,
where `block_root` (an "0x" prefixed 32-byte hex string) and `epoch_number` (an integer) represent a valid `Checkpoint`. where `block_root` (an "0x" prefixed 32-byte hex string) and `epoch_number` (an integer) represent a valid `Checkpoint`.
Example of the format: Example of the format:
``` ```
0x8584188b86a9296932785cc2827b925f9deebacce6d72ad8d53171fa046b43d9:9544 0x8584188b86a9296932785cc2827b925f9deebacce6d72ad8d53171fa046b43d9:9544
``` ```
2. - *IF* `epoch_number > store.finalized_checkpoint.epoch`,
then *ASSERT* during block sync that block with root `block_root` is in the sync path at epoch `epoch_number`. 2. Check the weak subjectivity requirements:
Emit descriptive critical error if this assert fails, then exit client process. - *IF* `epoch_number > store.finalized_checkpoint.epoch`,
then *ASSERT* during block sync that block with root `block_root` is in the sync path at epoch `epoch_number`.
Emit descriptive critical error if this assert fails, then exit client process.
- *IF* `epoch_number <= store.finalized_checkpoint.epoch`, - *IF* `epoch_number <= store.finalized_checkpoint.epoch`,
then *ASSERT* that the block in the canonical chain at epoch `epoch_number` has root `block_root`. then *ASSERT* that the block in the canonical chain at epoch `epoch_number` has root `block_root`.
Emit descriptive critical error if this assert fails, then exit client process. Emit descriptive critical error if this assert fails, then exit client process.
### Checking for Stale Weak Subjectivity Checkpoint ### Checking for Stale Weak Subjectivity Checkpoint
Clients may choose to validate that the input Weak Subjectivity Checkpoint is not stale at the time of startup. Clients may choose to validate that the input Weak Subjectivity Checkpoint is not stale at the time of startup.
To support this mechanism, the client needs to take the state at the Weak Subjectivity Checkpoint as To support this mechanism, the client needs to take the state at the Weak Subjectivity Checkpoint as
a CLI parameter input (or fetch the state associated with the input Weak Subjectivity Checkpoint from some source). a CLI parameter input (or fetch the state associated with the input Weak Subjectivity Checkpoint from some source).
The check can be implemented in the following way: The check can be implemented in the following way:
#### `is_within_weak_subjectivity_period`
```python ```python
def is_within_weak_subjectivity_period(store: Store, ws_state: BeaconState, ws_checkpoint: Checkpoint) -> bool: def is_within_weak_subjectivity_period(store: Store, ws_state: BeaconState, ws_checkpoint: Checkpoint) -> bool:
# Clients may choose to validate the input state against the input Weak Subjectivity Checkpoint # Clients may choose to validate the input state against the input Weak Subjectivity Checkpoint
@ -130,4 +178,5 @@ def is_within_weak_subjectivity_period(store: Store, ws_state: BeaconState, ws_c
``` ```
## Distributing Weak Subjectivity Checkpoints ## Distributing Weak Subjectivity Checkpoints
This section will be updated soon. This section will be updated soon.

View File

@ -1054,11 +1054,16 @@ def process_epoch(state: BeaconState) -> None:
process_justification_and_finalization(state) process_justification_and_finalization(state)
process_rewards_and_penalties(state) process_rewards_and_penalties(state)
process_registry_updates(state) process_registry_updates(state)
process_reveal_deadlines(state) process_reveal_deadlines(state) # Phase 1
process_challenge_deadlines(state) process_challenge_deadlines(state) # Phase 1
process_slashings(state) process_slashings(state)
process_final_updates(state) # phase 0 final updates process_eth1_data_reset(state)
process_phase_1_final_updates(state) process_effective_balance_updates(state)
process_slashings_reset(state)
process_randao_mixes_reset(state)
process_historical_roots_update(state)
process_participation_record_updates(state)
process_phase_1_final_updates(state) # Phase 1
``` ```
#### Phase 1 final updates #### Phase 1 final updates

View File

@ -67,7 +67,7 @@ A validator is an entity that participates in the consensus of the Ethereum 2.0
This document is an extension of the [Phase 0 -- Validator](../phase0/validator.md). All behaviors and definitions defined in the Phase 0 doc carry over unless explicitly noted or overridden. This document is an extension of the [Phase 0 -- Validator](../phase0/validator.md). All behaviors and definitions defined in the Phase 0 doc carry over unless explicitly noted or overridden.
All terminology, constants, functions, and protocol mechanics defined in the [Phase 1 -- The Beacon Chain](./beacon-chain.md) and [Phase 1 -- Custody Game](./custody-game.md) docs are requisite for this document and used throughout. Please see the Phase 1 docs before continuing and use as a reference throughout. All terminology, constants, functions, and protocol mechanics defined in the [Phase 1 -- The Beacon Chain](./beacon-chain.md) and [Phase 1 -- Custody Game](./custody-game.md) docs are requisite for this document and used throughout. Please see the Phase 1 docs before continuing and use them as a reference throughout.
## Constants ## Constants
@ -352,7 +352,7 @@ Aggregation selection and the core of this duty are largely unchanged from Phase
Note the timing of when to broadcast aggregates is altered in Phase 1+. Note the timing of when to broadcast aggregates is altered in Phase 1+.
If the validator is selected to aggregate (`is_aggregator`), then they broadcast their best aggregate as a `SignedAggregateAndProof` to the global aggregate channel (`beacon_aggregate_and_proof`) three-fourths of the way through the `slot`-that is, `SECONDS_PER_SLOT * 3 / 4` seconds after the start of `slot`. If the validator is selected to aggregate (`is_aggregator`), then they broadcast their best aggregate as a `SignedAggregateAndProof` to the global aggregate channel (`beacon_aggregate_and_proof`) three-fourths of the way through the `slot` -- that is, `SECONDS_PER_SLOT * 3 / 4` seconds after the start of `slot`.
##### `AggregateAndProof` ##### `AggregateAndProof`

View File

@ -246,16 +246,4 @@ We similarly define "summary types" and "expansion types". For example, [`Beacon
## Implementations ## Implementations
| Language | Project | Maintainer | Implementation | See https://github.com/ethereum/eth2.0-specs/issues/2138 for a list of current known implementations.
|-|-|-|-|
| Python | Ethereum 2.0 | Ethereum Foundation | [https://github.com/ethereum/py-ssz](https://github.com/ethereum/py-ssz) |
| Rust | Lighthouse | Sigma Prime | [https://github.com/sigp/lighthouse/tree/master/consensus/ssz](https://github.com/sigp/lighthouse/tree/master/consensus/ssz) |
| Nim | Nimbus | Status | [https://github.com/status-im/nim-beacon-chain/blob/master/beacon_chain/ssz.nim](https://github.com/status-im/nim-beacon-chain/blob/master/beacon_chain/ssz.nim) |
| Rust | Shasper | ParityTech | [https://github.com/paritytech/shasper/tree/master/utils/ssz](https://github.com/paritytech/shasper/tree/master/utils/ssz) |
| TypeScript | Lodestar | ChainSafe Systems | [https://github.com/ChainSafe/ssz-js](https://github.com/ChainSafe/ssz) |
| Java | Cava | ConsenSys | [https://www.github.com/ConsenSys/cava/tree/master/ssz](https://www.github.com/ConsenSys/cava/tree/master/ssz) |
| Go | Prysm | Prysmatic Labs | [https://github.com/prysmaticlabs/go-ssz](https://github.com/prysmaticlabs/go-ssz) |
| Swift | Yeeth | Dean Eigenmann | [https://github.com/yeeth/SimpleSerialize.swift](https://github.com/yeeth/SimpleSerialize.swift) |
| C# | | Jordan Andrews | [https://github.com/codingupastorm/csharp-ssz](https://github.com/codingupastorm/csharp-ssz) |
| C# | Cortex | Sly Gryphon | [https://www.nuget.org/packages/Cortex.SimpleSerialize](https://www.nuget.org/packages/Cortex.SimpleSerialize) |
| C++ | | Jiyun Kim | [https://github.com/NAKsir-melody/cpp_ssz](https://github.com/NAKsir-melody/cpp_ssz) |

View File

@ -1,40 +0,0 @@
from inspect import getmembers, isfunction
from typing import Any, Iterable
from gen_base.gen_typing import TestCase
def generate_from_tests(runner_name: str, handler_name: str, src: Any,
fork_name: str, bls_active: bool = True) -> Iterable[TestCase]:
"""
Generate a list of test cases by running tests from the given src in generator-mode.
:param runner_name: to categorize the test in general as.
:param handler_name: to categorize the test specialization as.
:param src: to retrieve tests from (discovered using inspect.getmembers).
:param fork_name: to run tests against particular phase and/or fork.
(if multiple forks are applicable, indicate the last fork)
:param bls_active: optional, to override BLS switch preference. Defaults to True.
:return: an iterable of test cases.
"""
fn_names = [
name for (name, _) in getmembers(src, isfunction)
if name.startswith('test_')
]
print("generating test vectors from tests source: %s" % src.__name__)
for name in fn_names:
tfn = getattr(src, name)
# strip off the `test_`
case_name = name
if case_name.startswith('test_'):
case_name = case_name[5:]
yield TestCase(
fork_name=fork_name,
runner_name=runner_name,
handler_name=handler_name,
suite_name='pyspec_tests',
case_name=case_name,
# TODO: with_all_phases and other per-phase tooling, should be replaced with per-fork equivalent.
case_fn=lambda: tfn(generator_mode=True, phase=fork_name, bls_active=bls_active)
)

View File

@ -1,3 +0,0 @@
ruamel.yaml==0.16.5
eth-utils==1.6.0
pytest>=4.4

View File

@ -1,11 +0,0 @@
from distutils.core import setup
setup(
name='gen_helpers',
packages=['gen_base', 'gen_from_tests'],
install_requires=[
"ruamel.yaml==0.16.5",
"eth-utils==1.6.0",
"pytest>=4.4",
]
)

View File

@ -27,7 +27,7 @@ python setup.py pyspec --spec-fork=phase0 --md-doc-paths="specs/phase0/beacon-ch
After installing, you can install the optional dependencies for testing and linting. After installing, you can install the optional dependencies for testing and linting.
With makefile: `make install_test`. With makefile: `make install_test`.
Or manually: run `pip install .[testing]` and `pip install .[linting]`. Or manually: run `pip install .[test]` and `pip install .[lint]`.
These tests are not intended for client-consumption. These tests are not intended for client-consumption.
These tests are testing the spec itself, to verify consistency and provide feedback on modifications of the spec. These tests are testing the spec itself, to verify consistency and provide feedback on modifications of the spec.

View File

@ -4,7 +4,7 @@
A util to quickly write new test suite generators with. A util to quickly write new test suite generators with.
See [Generators documentation](../../generators/README.md) for integration details. See [Generators documentation](../../../../generators/README.md) for integration details.
Options: Options:

View File

@ -8,11 +8,11 @@ from ruamel.yaml import (
YAML, YAML,
) )
from gen_base.gen_typing import TestProvider
from eth2spec.test import context from eth2spec.test import context
from eth2spec.test.exceptions import SkippedTest from eth2spec.test.exceptions import SkippedTest
from .gen_typing import TestProvider
# Flag that the runner does NOT run test via pytest # Flag that the runner does NOT run test via pytest
context.is_pytest = False context.is_pytest = False
@ -119,10 +119,11 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]):
print(f"generating tests with config '{config_name}' ...") print(f"generating tests with config '{config_name}' ...")
for test_case in tprov.make_cases(): for test_case in tprov.make_cases():
case_dir = Path(output_dir) / Path(config_name) / Path(test_case.fork_name) \ case_dir = (
/ Path(test_case.runner_name) / Path(test_case.handler_name) \ Path(output_dir) / Path(config_name) / Path(test_case.fork_name)
/ Path(test_case.suite_name) / Path(test_case.case_name) / Path(test_case.runner_name) / Path(test_case.handler_name)
/ Path(test_case.suite_name) / Path(test_case.case_name)
)
if case_dir.exists(): if case_dir.exists():
if not args.force: if not args.force:
print(f'Skipping already existing test: {case_dir}') print(f'Skipping already existing test: {case_dir}')

View File

@ -0,0 +1,99 @@
from importlib import reload, import_module
from inspect import getmembers, isfunction
from typing import Any, Callable, Dict, Iterable
from eth2spec.config import config_util
from eth2spec.utils import bls
from eth2spec.test.context import ALL_CONFIGS, TESTGEN_FORKS, SpecForkName, ConfigName
from eth2spec.gen_helpers.gen_base import gen_runner
from eth2spec.gen_helpers.gen_base.gen_typing import TestCase, TestProvider
def generate_from_tests(runner_name: str, handler_name: str, src: Any,
fork_name: SpecForkName, bls_active: bool = True) -> Iterable[TestCase]:
"""
Generate a list of test cases by running tests from the given src in generator-mode.
:param runner_name: to categorize the test in general as.
:param handler_name: to categorize the test specialization as.
:param src: to retrieve tests from (discovered using inspect.getmembers).
:param fork_name: to run tests against particular phase and/or fork.
(if multiple forks are applicable, indicate the last fork)
:param bls_active: optional, to override BLS switch preference. Defaults to True.
:return: an iterable of test cases.
"""
fn_names = [
name for (name, _) in getmembers(src, isfunction)
if name.startswith('test_')
]
print("generating test vectors from tests source: %s" % src.__name__)
for name in fn_names:
tfn = getattr(src, name)
# strip off the `test_`
case_name = name
if case_name.startswith('test_'):
case_name = case_name[5:]
yield TestCase(
fork_name=fork_name,
runner_name=runner_name,
handler_name=handler_name,
suite_name='pyspec_tests',
case_name=case_name,
# TODO: with_all_phases and other per-phase tooling, should be replaced with per-fork equivalent.
case_fn=lambda: tfn(generator_mode=True, phase=fork_name, bls_active=bls_active)
)
def get_provider(create_provider_fn: Callable[[SpecForkName, str, str, ConfigName], TestProvider],
config_name: ConfigName,
fork_name: SpecForkName,
all_mods: Dict[str, Dict[str, str]]) -> Iterable[TestProvider]:
for key, mod_name in all_mods[fork_name].items():
yield create_provider_fn(
fork_name=fork_name,
handler_name=key,
tests_src_mod_name=mod_name,
config_name=config_name,
)
def get_create_provider_fn(
runner_name: str, config_name: ConfigName, specs: Iterable[Any]
) -> Callable[[SpecForkName, str, str, ConfigName], TestProvider]:
def prepare_fn(configs_path: str) -> str:
config_util.prepare_config(configs_path, config_name)
for spec in specs:
reload(spec)
bls.use_milagro()
return config_name
def create_provider(fork_name: SpecForkName, handler_name: str,
tests_src_mod_name: str, config_name: ConfigName) -> TestProvider:
def cases_fn() -> Iterable[TestCase]:
tests_src = import_module(tests_src_mod_name)
return generate_from_tests(
runner_name=runner_name,
handler_name=handler_name,
src=tests_src,
fork_name=fork_name,
)
return TestProvider(prepare=prepare_fn, make_cases=cases_fn)
return create_provider
def run_state_test_generators(runner_name: str, specs: Iterable[Any], all_mods: Dict[str, Dict[str, str]]) -> None:
"""
Generate all available state tests of `TESTGEN_FORKS` forks of `ALL_CONFIGS` configs of the given runner.
"""
for config_name in ALL_CONFIGS:
for fork_name in TESTGEN_FORKS:
if fork_name in all_mods:
gen_runner.run_generator(runner_name, get_provider(
create_provider_fn=get_create_provider_fn(runner_name, config_name, specs),
config_name=config_name,
fork_name=fork_name,
all_mods=all_mods,
))

View File

@ -2,6 +2,7 @@ import pytest
from eth2spec.phase0 import spec as spec_phase0 from eth2spec.phase0 import spec as spec_phase0
from eth2spec.phase1 import spec as spec_phase1 from eth2spec.phase1 import spec as spec_phase1
from eth2spec.lightclient_patch import spec as spec_lightclient_patch
from eth2spec.utils import bls from eth2spec.utils import bls
from .exceptions import SkippedTest from .exceptions import SkippedTest
@ -19,6 +20,7 @@ from importlib import reload
def reload_specs(): def reload_specs():
reload(spec_phase0) reload(spec_phase0)
reload(spec_phase1) reload(spec_phase1)
reload(spec_lightclient_patch)
# Some of the Spec module functionality is exposed here to deal with phase-specific changes. # Some of the Spec module functionality is exposed here to deal with phase-specific changes.
@ -28,11 +30,17 @@ ConfigName = NewType("ConfigName", str)
PHASE0 = SpecForkName('phase0') PHASE0 = SpecForkName('phase0')
PHASE1 = SpecForkName('phase1') PHASE1 = SpecForkName('phase1')
ALL_PHASES = (PHASE0, PHASE1) LIGHTCLIENT_PATCH = SpecForkName('lightclient_patch')
ALL_PHASES = (PHASE0, PHASE1, LIGHTCLIENT_PATCH)
MAINNET = ConfigName('mainnet') MAINNET = ConfigName('mainnet')
MINIMAL = ConfigName('minimal') MINIMAL = ConfigName('minimal')
ALL_CONFIGS = (MINIMAL, MAINNET)
# The forks that output to the test vectors.
TESTGEN_FORKS = (PHASE0, LIGHTCLIENT_PATCH)
# TODO: currently phases are defined as python modules. # TODO: currently phases are defined as python modules.
# It would be better if they would be more well-defined interfaces for stronger typing. # It would be better if they would be more well-defined interfaces for stronger typing.
@ -47,14 +55,18 @@ class SpecPhase0(Spec):
class SpecPhase1(Spec): class SpecPhase1(Spec):
def upgrade_to_phase1(self, state: spec_phase0.BeaconState) -> spec_phase1.BeaconState: ...
...
class SpecLightclient(Spec):
...
# add transfer, bridge, etc. as the spec evolves # add transfer, bridge, etc. as the spec evolves
class SpecForks(TypedDict, total=False): class SpecForks(TypedDict, total=False):
PHASE0: SpecPhase0 PHASE0: SpecPhase0
PHASE1: SpecPhase1 PHASE1: SpecPhase1
LIGHTCLIENT_PATCH: SpecLightclient
def _prepare_state(balances_fn: Callable[[Any], Sequence[int]], threshold_fn: Callable[[Any], int], def _prepare_state(balances_fn: Callable[[Any], Sequence[int]], threshold_fn: Callable[[Any], int],
@ -70,6 +82,8 @@ def _prepare_state(balances_fn: Callable[[Any], Sequence[int]], threshold_fn: Ca
# TODO: instead of upgrading a test phase0 genesis state we can also write a phase1 state helper. # TODO: instead of upgrading a test phase0 genesis state we can also write a phase1 state helper.
# Decide based on performance/consistency results later. # Decide based on performance/consistency results later.
state = phases[PHASE1].upgrade_to_phase1(state) state = phases[PHASE1].upgrade_to_phase1(state)
elif spec.fork == LIGHTCLIENT_PATCH: # not generalizing this just yet, unclear final spec fork/patch order.
state = phases[LIGHTCLIENT_PATCH].upgrade_to_lightclient_patch(state)
return state return state
@ -337,12 +351,16 @@ def with_phases(phases, other_phases=None):
phase_dir[PHASE0] = spec_phase0 phase_dir[PHASE0] = spec_phase0
if PHASE1 in available_phases: if PHASE1 in available_phases:
phase_dir[PHASE1] = spec_phase1 phase_dir[PHASE1] = spec_phase1
if LIGHTCLIENT_PATCH in available_phases:
phase_dir[LIGHTCLIENT_PATCH] = spec_lightclient_patch
# return is ignored whenever multiple phases are ran. If # return is ignored whenever multiple phases are ran. If
if PHASE0 in run_phases: if PHASE0 in run_phases:
ret = fn(spec=spec_phase0, phases=phase_dir, *args, **kw) ret = fn(spec=spec_phase0, phases=phase_dir, *args, **kw)
if PHASE1 in run_phases: if PHASE1 in run_phases:
ret = fn(spec=spec_phase1, phases=phase_dir, *args, **kw) ret = fn(spec=spec_phase1, phases=phase_dir, *args, **kw)
if LIGHTCLIENT_PATCH in run_phases:
ret = fn(spec=spec_lightclient_patch, phases=phase_dir, *args, **kw)
return ret return ret
return wrapper return wrapper
return decorator return decorator
@ -376,3 +394,11 @@ def only_full_crosslink(fn):
return None return None
return fn(*args, spec=spec, state=state, **kw) return fn(*args, spec=spec, state=state, **kw)
return wrapper return wrapper
def is_post_lightclient_patch(spec):
if spec.fork in [PHASE0, PHASE1]:
# TODO: PHASE1 fork is temporarily parallel to LIGHTCLIENT_PATCH.
# Will make PHASE1 fork inherit LIGHTCLIENT_PATCH later.
return False
return True

View File

@ -2,7 +2,7 @@ from lru import LRU
from typing import List from typing import List
from eth2spec.test.context import expect_assertion_error, PHASE1 from eth2spec.test.context import expect_assertion_error, PHASE1, is_post_lightclient_patch
from eth2spec.test.helpers.state import state_transition_and_sign_block, next_epoch, next_slot from eth2spec.test.helpers.state import state_transition_and_sign_block, next_epoch, next_slot
from eth2spec.test.helpers.block import build_empty_block_for_next_slot from eth2spec.test.helpers.block import build_empty_block_for_next_slot
from eth2spec.test.helpers.shard_transitions import get_shard_transition_of_committee from eth2spec.test.helpers.shard_transitions import get_shard_transition_of_committee
@ -30,17 +30,22 @@ def run_attestation_processing(spec, state, attestation, valid=True):
yield 'post', None yield 'post', None
return return
current_epoch_count = len(state.current_epoch_attestations) if not is_post_lightclient_patch(spec):
previous_epoch_count = len(state.previous_epoch_attestations) current_epoch_count = len(state.current_epoch_attestations)
previous_epoch_count = len(state.previous_epoch_attestations)
# process attestation # process attestation
spec.process_attestation(state, attestation) spec.process_attestation(state, attestation)
# Make sure the attestation has been processed # Make sure the attestation has been processed
if attestation.data.target.epoch == spec.get_current_epoch(state): if not is_post_lightclient_patch(spec):
assert len(state.current_epoch_attestations) == current_epoch_count + 1 if attestation.data.target.epoch == spec.get_current_epoch(state):
assert len(state.current_epoch_attestations) == current_epoch_count + 1
else:
assert len(state.previous_epoch_attestations) == previous_epoch_count + 1
else: else:
assert len(state.previous_epoch_attestations) == previous_epoch_count + 1 # After accounting reform, there are cases when processing an attestation does not result in any flag updates
pass
# yield post-state # yield post-state
yield 'post', state yield 'post', state
@ -315,7 +320,8 @@ def prepare_state_with_attestations(spec, state, participation_fn=None):
next_slot(spec, state) next_slot(spec, state)
assert state.slot == next_epoch_start_slot + spec.MIN_ATTESTATION_INCLUSION_DELAY assert state.slot == next_epoch_start_slot + spec.MIN_ATTESTATION_INCLUSION_DELAY
assert len(state.previous_epoch_attestations) == len(attestations) if not is_post_lightclient_patch(spec):
assert len(state.previous_epoch_attestations) == len(attestations)
return attestations return attestations

View File

@ -1,3 +1,4 @@
from eth2spec.test.context import is_post_lightclient_patch
from eth2spec.test.helpers.keys import privkeys from eth2spec.test.helpers.keys import privkeys
from eth2spec.utils import bls from eth2spec.utils import bls
from eth2spec.utils.bls import only_with_bls from eth2spec.utils.bls import only_with_bls
@ -89,6 +90,10 @@ def build_empty_block(spec, state, slot=None):
empty_block.proposer_index = spec.get_beacon_proposer_index(state) empty_block.proposer_index = spec.get_beacon_proposer_index(state)
empty_block.body.eth1_data.deposit_count = state.eth1_deposit_index empty_block.body.eth1_data.deposit_count = state.eth1_deposit_index
empty_block.parent_root = parent_block_root empty_block.parent_root = parent_block_root
if is_post_lightclient_patch(spec):
empty_block.body.sync_committee_signature = spec.G2_POINT_AT_INFINITY
apply_randao_reveal(spec, state, empty_block) apply_randao_reveal(spec, state, empty_block)
return empty_block return empty_block

View File

@ -1,14 +1,29 @@
process_calls = [ from eth2spec.test.context import is_post_lightclient_patch
'process_justification_and_finalization',
'process_rewards_and_penalties',
'process_registry_updates', def get_process_calls(spec):
'process_reveal_deadlines', return [
'process_challenge_deadlines', # PHASE0
'process_slashings', 'process_justification_and_finalization',
'process_final_updates', 'process_rewards_and_penalties',
'after_process_final_updates', 'process_registry_updates',
] 'process_reveal_deadlines',
'process_challenge_deadlines',
'process_slashings',
'process_eth1_data_reset',
'process_effective_balance_updates',
'process_slashings_reset',
'process_randao_mixes_reset',
'process_historical_roots_update',
# HF1 replaced `process_participation_record_updates` with `process_participation_flag_updates`
'process_participation_flag_updates' if is_post_lightclient_patch(spec) else (
'process_participation_record_updates'
),
'process_sync_committee_updates',
# PHASE1
'process_phase_1_final_updates',
]
def run_epoch_processing_to(spec, state, process_name: str): def run_epoch_processing_to(spec, state, process_name: str):
@ -25,7 +40,7 @@ def run_epoch_processing_to(spec, state, process_name: str):
spec.process_slot(state) spec.process_slot(state)
# process components of epoch transition before final-updates # process components of epoch transition before final-updates
for name in process_calls: for name in get_process_calls(spec):
if name == process_name: if name == process_name:
break break
# only run when present. Later phases introduce more to the epoch-processing. # only run when present. Later phases introduce more to the epoch-processing.

View File

@ -1,8 +1,16 @@
from eth2spec.test.context import is_post_lightclient_patch
from eth2spec.test.helpers.block_header import sign_block_header from eth2spec.test.helpers.block_header import sign_block_header
from eth2spec.test.helpers.keys import pubkey_to_privkey from eth2spec.test.helpers.keys import pubkey_to_privkey
from eth2spec.test.helpers.state import get_balance from eth2spec.test.helpers.state import get_balance
def get_min_slashing_penalty_quotient(spec):
if is_post_lightclient_patch(spec):
return spec.HF1_MIN_SLASHING_PENALTY_QUOTIENT
else:
return spec.MIN_SLASHING_PENALTY_QUOTIENT
def check_proposer_slashing_effect(spec, pre_state, state, slashed_index): def check_proposer_slashing_effect(spec, pre_state, state, slashed_index):
slashed_validator = state.validators[slashed_index] slashed_validator = state.validators[slashed_index]
assert slashed_validator.slashed assert slashed_validator.slashed
@ -10,7 +18,7 @@ def check_proposer_slashing_effect(spec, pre_state, state, slashed_index):
assert slashed_validator.withdrawable_epoch < spec.FAR_FUTURE_EPOCH assert slashed_validator.withdrawable_epoch < spec.FAR_FUTURE_EPOCH
proposer_index = spec.get_beacon_proposer_index(state) proposer_index = spec.get_beacon_proposer_index(state)
slash_penalty = state.validators[slashed_index].effective_balance // spec.MIN_SLASHING_PENALTY_QUOTIENT slash_penalty = state.validators[slashed_index].effective_balance // get_min_slashing_penalty_quotient(spec)
whistleblower_reward = state.validators[slashed_index].effective_balance // spec.WHISTLEBLOWER_REWARD_QUOTIENT whistleblower_reward = state.validators[slashed_index].effective_balance // spec.WHISTLEBLOWER_REWARD_QUOTIENT
if proposer_index != slashed_index: if proposer_index != slashed_index:
# slashed validator lost initial slash penalty # slashed validator lost initial slash penalty

View File

@ -2,6 +2,7 @@ from random import Random
from lru import LRU from lru import LRU
from eth2spec.phase0 import spec as spec_phase0 from eth2spec.phase0 import spec as spec_phase0
from eth2spec.test.context import is_post_lightclient_patch
from eth2spec.test.helpers.attestations import cached_prepare_state_with_attestations from eth2spec.test.helpers.attestations import cached_prepare_state_with_attestations
from eth2spec.test.helpers.deposits import mock_deposit from eth2spec.test.helpers.deposits import mock_deposit
from eth2spec.test.helpers.state import next_epoch from eth2spec.test.helpers.state import next_epoch
@ -37,24 +38,35 @@ def run_deltas(spec, state):
- inactivity penalty deltas ('inactivity_penalty_deltas') - inactivity penalty deltas ('inactivity_penalty_deltas')
""" """
yield 'pre', state yield 'pre', state
if is_post_lightclient_patch(spec):
def get_source_deltas(state):
return spec.get_flag_deltas(state, spec.TIMELY_SOURCE_FLAG, spec.TIMELY_SOURCE_NUMERATOR)
def get_head_deltas(state):
return spec.get_flag_deltas(state, spec.TIMELY_HEAD_FLAG, spec.TIMELY_HEAD_NUMERATOR)
def get_target_deltas(state):
return spec.get_flag_deltas(state, spec.TIMELY_TARGET_FLAG, spec.TIMELY_TARGET_NUMERATOR)
yield from run_attestation_component_deltas( yield from run_attestation_component_deltas(
spec, spec,
state, state,
spec.get_source_deltas, spec.get_source_deltas if not is_post_lightclient_patch(spec) else get_source_deltas,
spec.get_matching_source_attestations, spec.get_matching_source_attestations,
'source_deltas', 'source_deltas',
) )
yield from run_attestation_component_deltas( yield from run_attestation_component_deltas(
spec, spec,
state, state,
spec.get_target_deltas, spec.get_target_deltas if not is_post_lightclient_patch(spec) else get_target_deltas,
spec.get_matching_target_attestations, spec.get_matching_target_attestations,
'target_deltas', 'target_deltas',
) )
yield from run_attestation_component_deltas( yield from run_attestation_component_deltas(
spec, spec,
state, state,
spec.get_head_deltas, spec.get_head_deltas if not is_post_lightclient_patch(spec) else get_head_deltas,
spec.get_matching_head_attestations, spec.get_matching_head_attestations,
'head_deltas', 'head_deltas',
) )
@ -62,6 +74,16 @@ def run_deltas(spec, state):
yield from run_get_inactivity_penalty_deltas(spec, state) yield from run_get_inactivity_penalty_deltas(spec, state)
def deltas_name_to_flag(spec, deltas_name):
if 'source' in deltas_name:
return spec.TIMELY_SOURCE_FLAG
elif 'head' in deltas_name:
return spec.TIMELY_HEAD_FLAG
elif 'target' in deltas_name:
return spec.TIMELY_TARGET_FLAG
raise ValueError("Wrong deltas_name %s" % deltas_name)
def run_attestation_component_deltas(spec, state, component_delta_fn, matching_att_fn, deltas_name): def run_attestation_component_deltas(spec, state, component_delta_fn, matching_att_fn, deltas_name):
""" """
Run ``component_delta_fn``, yielding: Run ``component_delta_fn``, yielding:
@ -71,8 +93,14 @@ def run_attestation_component_deltas(spec, state, component_delta_fn, matching_a
yield deltas_name, Deltas(rewards=rewards, penalties=penalties) yield deltas_name, Deltas(rewards=rewards, penalties=penalties)
matching_attestations = matching_att_fn(state, spec.get_previous_epoch(state)) if not is_post_lightclient_patch(spec):
matching_indices = spec.get_unslashed_attesting_indices(state, matching_attestations) matching_attestations = matching_att_fn(state, spec.get_previous_epoch(state))
matching_indices = spec.get_unslashed_attesting_indices(state, matching_attestations)
else:
matching_indices = spec.get_unslashed_participating_indices(
state, deltas_name_to_flag(spec, deltas_name), spec.get_previous_epoch(state)
)
eligible_indices = spec.get_eligible_validator_indices(state) eligible_indices = spec.get_eligible_validator_indices(state)
for index in range(len(state.validators)): for index in range(len(state.validators)):
if index not in eligible_indices: if index not in eligible_indices:
@ -101,6 +129,12 @@ def run_get_inclusion_delay_deltas(spec, state):
Run ``get_inclusion_delay_deltas``, yielding: Run ``get_inclusion_delay_deltas``, yielding:
- inclusion delay deltas ('inclusion_delay_deltas') - inclusion delay deltas ('inclusion_delay_deltas')
""" """
if is_post_lightclient_patch(spec):
# No inclusion_delay_deltas
yield 'inclusion_delay_deltas', Deltas(rewards=[0] * len(state.validators),
penalties=[0] * len(state.validators))
return
rewards, penalties = spec.get_inclusion_delay_deltas(state) rewards, penalties = spec.get_inclusion_delay_deltas(state)
yield 'inclusion_delay_deltas', Deltas(rewards=rewards, penalties=penalties) yield 'inclusion_delay_deltas', Deltas(rewards=rewards, penalties=penalties)
@ -148,8 +182,14 @@ def run_get_inactivity_penalty_deltas(spec, state):
yield 'inactivity_penalty_deltas', Deltas(rewards=rewards, penalties=penalties) yield 'inactivity_penalty_deltas', Deltas(rewards=rewards, penalties=penalties)
matching_attestations = spec.get_matching_target_attestations(state, spec.get_previous_epoch(state)) if not is_post_lightclient_patch(spec):
matching_attesting_indices = spec.get_unslashed_attesting_indices(state, matching_attestations) matching_attestations = spec.get_matching_target_attestations(state, spec.get_previous_epoch(state))
matching_attesting_indices = spec.get_unslashed_attesting_indices(state, matching_attestations)
else:
matching_attesting_indices = spec.get_unslashed_participating_indices(
state, spec.TIMELY_TARGET_FLAG, spec.get_previous_epoch(state)
)
reward_numerator_sum = sum(numerator for (_, numerator) in spec.get_flags_and_numerators())
eligible_indices = spec.get_eligible_validator_indices(state) eligible_indices = spec.get_eligible_validator_indices(state)
for index in range(len(state.validators)): for index in range(len(state.validators)):
@ -159,8 +199,14 @@ def run_get_inactivity_penalty_deltas(spec, state):
continue continue
if spec.is_in_inactivity_leak(state): if spec.is_in_inactivity_leak(state):
base_reward = spec.get_base_reward(state, index) # Compute base_penalty
base_penalty = spec.BASE_REWARDS_PER_EPOCH * base_reward - spec.get_proposer_reward(state, index) if not is_post_lightclient_patch(spec):
cancel_base_rewards_per_epoch = spec.BASE_REWARDS_PER_EPOCH
base_reward = spec.get_base_reward(state, index)
base_penalty = cancel_base_rewards_per_epoch * base_reward - spec.get_proposer_reward(state, index)
else:
base_penalty = spec.get_base_reward(state, index) * reward_numerator_sum // spec.REWARD_DENOMINATOR
if not has_enough_for_reward(spec, state, index): if not has_enough_for_reward(spec, state, index):
assert penalties[index] == 0 assert penalties[index] == 0
elif index in matching_attesting_indices: elif index in matching_attesting_indices:
@ -262,8 +308,13 @@ def run_test_full_all_correct(spec, state):
def run_test_full_but_partial_participation(spec, state, rng=Random(5522)): def run_test_full_but_partial_participation(spec, state, rng=Random(5522)):
cached_prepare_state_with_attestations(spec, state) cached_prepare_state_with_attestations(spec, state)
for a in state.previous_epoch_attestations: if not is_post_lightclient_patch(spec):
a.aggregation_bits = [rng.choice([True, False]) for _ in a.aggregation_bits] for a in state.previous_epoch_attestations:
a.aggregation_bits = [rng.choice([True, False]) for _ in a.aggregation_bits]
else:
for index in range(len(state.validators)):
if rng.choice([True, False]):
state.previous_epoch_participation[index] = spec.ValidatorFlag(0)
yield from run_deltas(spec, state) yield from run_deltas(spec, state)
@ -272,8 +323,12 @@ def run_test_partial(spec, state, fraction_filled):
cached_prepare_state_with_attestations(spec, state) cached_prepare_state_with_attestations(spec, state)
# Remove portion of attestations # Remove portion of attestations
num_attestations = int(len(state.previous_epoch_attestations) * fraction_filled) if not is_post_lightclient_patch(spec):
state.previous_epoch_attestations = state.previous_epoch_attestations[:num_attestations] num_attestations = int(len(state.previous_epoch_attestations) * fraction_filled)
state.previous_epoch_attestations = state.previous_epoch_attestations[:num_attestations]
else:
for index in range(int(len(state.validators) * fraction_filled)):
state.previous_epoch_participation[index] = spec.ValidatorFlag(0)
yield from run_deltas(spec, state) yield from run_deltas(spec, state)
@ -328,13 +383,18 @@ def run_test_some_very_low_effective_balances_that_attested(spec, state):
def run_test_some_very_low_effective_balances_that_did_not_attest(spec, state): def run_test_some_very_low_effective_balances_that_did_not_attest(spec, state):
cached_prepare_state_with_attestations(spec, state) cached_prepare_state_with_attestations(spec, state)
# Remove attestation if not is_post_lightclient_patch(spec):
attestation = state.previous_epoch_attestations[0] # Remove attestation
state.previous_epoch_attestations = state.previous_epoch_attestations[1:] attestation = state.previous_epoch_attestations[0]
# Set removed indices effective balance to very low amount state.previous_epoch_attestations = state.previous_epoch_attestations[1:]
indices = spec.get_unslashed_attesting_indices(state, [attestation]) # Set removed indices effective balance to very low amount
for i, index in enumerate(indices): indices = spec.get_unslashed_attesting_indices(state, [attestation])
state.validators[index].effective_balance = i for i, index in enumerate(indices):
state.validators[index].effective_balance = i
else:
index = 0
state.validators[index].effective_balance = 1
state.previous_epoch_participation[index] = spec.ValidatorFlag(0)
yield from run_deltas(spec, state) yield from run_deltas(spec, state)
@ -442,16 +502,42 @@ def run_test_full_random(spec, state, rng=Random(8020)):
cached_prepare_state_with_attestations(spec, state) cached_prepare_state_with_attestations(spec, state)
for pending_attestation in state.previous_epoch_attestations: if not is_post_lightclient_patch(spec):
# ~1/3 have bad target for pending_attestation in state.previous_epoch_attestations:
if rng.randint(0, 2) == 0: # ~1/3 have bad target
pending_attestation.data.target.root = b'\x55' * 32 if rng.randint(0, 2) == 0:
# ~1/3 have bad head pending_attestation.data.target.root = b'\x55' * 32
if rng.randint(0, 2) == 0: # ~1/3 have bad head
pending_attestation.data.beacon_block_root = b'\x66' * 32 if rng.randint(0, 2) == 0:
# ~50% participation pending_attestation.data.beacon_block_root = b'\x66' * 32
pending_attestation.aggregation_bits = [rng.choice([True, False]) for _ in pending_attestation.aggregation_bits] # ~50% participation
# Random inclusion delay pending_attestation.aggregation_bits = [rng.choice([True, False])
pending_attestation.inclusion_delay = rng.randint(1, spec.SLOTS_PER_EPOCH) for _ in pending_attestation.aggregation_bits]
# Random inclusion delay
pending_attestation.inclusion_delay = rng.randint(1, spec.SLOTS_PER_EPOCH)
else:
for index in range(len(state.validators)):
# ~1/3 have bad head or bad target or not timely enough
is_timely_correct_head = rng.randint(0, 2) != 0
flags = state.previous_epoch_participation[index]
def set_flag(f, v):
nonlocal flags
if v:
flags |= f
else:
flags &= 0xff ^ f
set_flag(spec.TIMELY_HEAD_FLAG, is_timely_correct_head)
if is_timely_correct_head:
# If timely head, then must be timely target
set_flag(spec.TIMELY_TARGET_FLAG, True)
# If timely head, then must be timely source
set_flag(spec.TIMELY_SOURCE_FLAG, True)
else:
# ~50% of remaining have bad target or not timely enough
set_flag(spec.TIMELY_TARGET_FLAG, rng.choice([True, False]))
# ~50% of remaining have bad source or not timely enough
set_flag(spec.TIMELY_SOURCE_FLAG, rng.choice([True, False]))
state.previous_epoch_participation[index] = flags
yield from run_deltas(spec, state) yield from run_deltas(spec, state)

View File

@ -0,0 +1,33 @@
from eth2spec.test.helpers.keys import privkeys
from eth2spec.test.helpers.block import (
build_empty_block_for_next_slot,
)
from eth2spec.utils import bls
def compute_sync_committee_signature(spec, state, slot, privkey):
domain = spec.get_domain(state, spec.DOMAIN_SYNC_COMMITTEE, spec.compute_epoch_at_slot(slot))
if slot == state.slot:
block_root = build_empty_block_for_next_slot(spec, state).parent_root
else:
block_root = spec.get_block_root_at_slot(state, slot)
signing_root = spec.compute_signing_root(block_root, domain)
return bls.Sign(privkey, signing_root)
def compute_aggregate_sync_committee_signature(spec, state, slot, participants):
if len(participants) == 0:
return spec.G2_POINT_AT_INFINITY
signatures = []
for validator_index in participants:
privkey = privkeys[validator_index]
signatures.append(
compute_sync_committee_signature(
spec,
state,
slot,
privkey,
)
)
return bls.Aggregate(signatures)

View File

@ -0,0 +1,319 @@
from collections import Counter
import random
from eth2spec.test.helpers.block import (
build_empty_block_for_next_slot,
transition_unsigned_block,
)
from eth2spec.test.helpers.state import (
state_transition_and_sign_block,
transition_to,
)
from eth2spec.test.helpers.sync_committee import (
compute_aggregate_sync_committee_signature,
)
from eth2spec.test.context import (
PHASE0, PHASE1,
MAINNET, MINIMAL,
expect_assertion_error,
with_all_phases_except,
with_configs,
spec_state_test,
always_bls,
)
from eth2spec.utils.hash_function import hash
def get_committee_indices(spec, state, duplicates=False):
'''
This utility function allows the caller to ensure there are or are not
duplicate validator indices in the returned committee based on
the boolean ``duplicates``.
'''
state = state.copy()
current_epoch = spec.get_current_epoch(state)
randao_index = current_epoch % spec.EPOCHS_PER_HISTORICAL_VECTOR
while True:
committee = spec.get_sync_committee_indices(state, spec.get_current_epoch(state))
if duplicates:
if len(committee) != len(set(committee)):
return committee
else:
if len(committee) == len(set(committee)):
return committee
state.randao_mixes[randao_index] = hash(state.randao_mixes[randao_index])
@with_all_phases_except([PHASE0, PHASE1])
@spec_state_test
def test_invalid_signature_missing_participant(spec, state):
committee = spec.get_sync_committee_indices(state, spec.get_current_epoch(state))
random_participant = random.choice(committee)
yield 'pre', state
block = build_empty_block_for_next_slot(spec, state)
# Exclude one participant whose signature was included.
block.body.sync_committee_bits = [index != random_participant for index in committee]
block.body.sync_committee_signature = compute_aggregate_sync_committee_signature(
spec,
state,
block.slot - 1,
committee, # full committee signs
)
yield 'blocks', [block]
expect_assertion_error(lambda: spec.process_sync_committee(state, block.body))
yield 'post', None
@with_all_phases_except([PHASE0, PHASE1])
@spec_state_test
def test_invalid_signature_extra_participant(spec, state):
committee = spec.get_sync_committee_indices(state, spec.get_current_epoch(state))
random_participant = random.choice(committee)
yield 'pre', state
block = build_empty_block_for_next_slot(spec, state)
# Exclude one signature even though the block claims the entire committee participated.
block.body.sync_committee_bits = [True] * len(committee)
block.body.sync_committee_signature = compute_aggregate_sync_committee_signature(
spec,
state,
block.slot - 1,
[index for index in committee if index != random_participant],
)
yield 'blocks', [block]
expect_assertion_error(lambda: spec.process_sync_committee(state, block.body))
yield 'post', None
def compute_sync_committee_participant_reward(spec, state, participant_index, active_validator_count, committee_size):
base_reward = spec.get_base_reward(state, participant_index)
proposer_reward = spec.get_proposer_reward(state, participant_index)
max_participant_reward = base_reward - proposer_reward
return max_participant_reward * active_validator_count // committee_size // spec.SLOTS_PER_EPOCH
@with_all_phases_except([PHASE0, PHASE1])
@with_configs([MINIMAL], reason="to create nonduplicate committee")
@spec_state_test
def test_sync_committee_rewards_nonduplicate_committee(spec, state):
committee = get_committee_indices(spec, state, duplicates=False)
committee_size = len(committee)
active_validator_count = len(spec.get_active_validator_indices(state, spec.get_current_epoch(state)))
# Preconditions of this test case
assert active_validator_count >= spec.SYNC_COMMITTEE_SIZE
assert committee_size == len(set(committee))
yield 'pre', state
pre_balances = state.balances.copy()
block = build_empty_block_for_next_slot(spec, state)
block.body.sync_committee_bits = [True] * committee_size
block.body.sync_committee_signature = compute_aggregate_sync_committee_signature(
spec,
state,
block.slot - 1,
committee,
)
signed_block = state_transition_and_sign_block(spec, state, block)
yield 'blocks', [signed_block]
yield 'post', state
for index in range(len(state.validators)):
expected_reward = 0
if index == block.proposer_index:
expected_reward += sum([spec.get_proposer_reward(state, index) for index in committee])
if index in committee:
expected_reward += compute_sync_committee_participant_reward(
spec,
state,
index,
active_validator_count,
committee_size
)
assert state.balances[index] == pre_balances[index] + expected_reward
@with_all_phases_except([PHASE0, PHASE1])
@with_configs([MAINNET], reason="to create duplicate committee")
@spec_state_test
def test_sync_committee_rewards_duplicate_committee(spec, state):
committee = get_committee_indices(spec, state, duplicates=True)
committee_size = len(committee)
active_validator_count = len(spec.get_active_validator_indices(state, spec.get_current_epoch(state)))
# Preconditions of this test case
assert active_validator_count < spec.SYNC_COMMITTEE_SIZE
assert committee_size > len(set(committee))
yield 'pre', state
pre_balances = state.balances.copy()
block = build_empty_block_for_next_slot(spec, state)
block.body.sync_committee_bits = [True] * committee_size
block.body.sync_committee_signature = compute_aggregate_sync_committee_signature(
spec,
state,
block.slot - 1,
committee,
)
signed_block = state_transition_and_sign_block(spec, state, block)
yield 'blocks', [signed_block]
yield 'post', state
multiplicities = Counter(committee)
for index in range(len(state.validators)):
expected_reward = 0
if index == block.proposer_index:
expected_reward += sum([spec.get_proposer_reward(state, index) for index in committee])
if index in committee:
reward = compute_sync_committee_participant_reward(
spec,
state,
index,
active_validator_count,
committee_size,
)
expected_reward += reward * multiplicities[index]
assert state.balances[index] == pre_balances[index] + expected_reward
@with_all_phases_except([PHASE0, PHASE1])
@spec_state_test
@always_bls
def test_invalid_signature_past_block(spec, state):
committee = spec.get_sync_committee_indices(state, spec.get_current_epoch(state))
yield 'pre', state
blocks = []
for _ in range(2):
# NOTE: need to transition twice to move beyond the degenerate case at genesis
block = build_empty_block_for_next_slot(spec, state)
# Valid sync committee signature here...
block.body.sync_committee_bits = [True] * len(committee)
block.body.sync_committee_signature = compute_aggregate_sync_committee_signature(
spec,
state,
block.slot - 1,
committee,
)
signed_block = state_transition_and_sign_block(spec, state, block)
blocks.append(signed_block)
invalid_block = build_empty_block_for_next_slot(spec, state)
# Invalid signature from a slot other than the previous
invalid_block.body.sync_committee_bits = [True] * len(committee)
invalid_block.body.sync_committee_signature = compute_aggregate_sync_committee_signature(
spec,
state,
invalid_block.slot - 2,
committee,
)
blocks.append(invalid_block)
expect_assertion_error(lambda: transition_unsigned_block(spec, state, invalid_block))
yield 'blocks', blocks
yield 'post', None
@with_all_phases_except([PHASE0, PHASE1])
@with_configs([MINIMAL], reason="to produce different committee sets")
@spec_state_test
@always_bls
def test_invalid_signature_previous_committee(spec, state):
# NOTE: the `state` provided is at genesis and the process to select
# sync committees currently returns the same committee for the first and second
# periods at genesis.
# To get a distinct committee so we can generate an "old" signature, we need to advance
# 2 EPOCHS_PER_SYNC_COMMITTEE_PERIOD periods.
current_epoch = spec.get_current_epoch(state)
old_sync_committee = state.next_sync_committee
epoch_in_future_sync_commitee_period = current_epoch + 2 * spec.EPOCHS_PER_SYNC_COMMITTEE_PERIOD
slot_in_future_sync_committee_period = epoch_in_future_sync_commitee_period * spec.SLOTS_PER_EPOCH
transition_to(spec, state, slot_in_future_sync_committee_period)
yield 'pre', state
# Use the previous sync committee to produce the signature.
pubkeys = [validator.pubkey for validator in state.validators]
# Ensure that the pubkey sets are different.
assert set(old_sync_committee.pubkeys) != set(state.current_sync_committee.pubkeys)
committee = [pubkeys.index(pubkey) for pubkey in old_sync_committee.pubkeys]
block = build_empty_block_for_next_slot(spec, state)
block.body.sync_committee_bits = [True] * len(committee)
block.body.sync_committee_signature = compute_aggregate_sync_committee_signature(
spec,
state,
block.slot - 1,
committee,
)
yield 'blocks', [block]
expect_assertion_error(lambda: spec.process_sync_committee(state, block.body))
yield 'post', None
@with_all_phases_except([PHASE0, PHASE1])
@spec_state_test
def test_valid_signature_future_committee(spec, state):
# NOTE: the `state` provided is at genesis and the process to select
# sync committees currently returns the same committee for the first and second
# periods at genesis.
# To get a distinct committee so we can generate an "old" signature, we need to advance
# 2 EPOCHS_PER_SYNC_COMMITTEE_PERIOD periods.
current_epoch = spec.get_current_epoch(state)
old_current_sync_committee = state.current_sync_committee
old_next_sync_committee = state.next_sync_committee
epoch_in_future_sync_committee_period = current_epoch + 2 * spec.EPOCHS_PER_SYNC_COMMITTEE_PERIOD
slot_in_future_sync_committee_period = epoch_in_future_sync_committee_period * spec.SLOTS_PER_EPOCH
transition_to(spec, state, slot_in_future_sync_committee_period)
sync_committee = state.current_sync_committee
expected_sync_committee = spec.get_sync_committee(state, epoch_in_future_sync_committee_period)
assert sync_committee == expected_sync_committee
assert sync_committee != old_current_sync_committee
assert sync_committee != old_next_sync_committee
pubkeys = [validator.pubkey for validator in state.validators]
committee_indices = [pubkeys.index(pubkey) for pubkey in sync_committee.pubkeys]
yield 'pre', state
block = build_empty_block_for_next_slot(spec, state)
block.body.sync_committee_bits = [True] * len(committee_indices)
block.body.sync_committee_signature = compute_aggregate_sync_committee_signature(
spec,
state,
block.slot - 1,
committee_indices,
)
signed_block = state_transition_and_sign_block(spec, state, block)
yield 'blocks', [signed_block]
yield 'post', state

View File

@ -0,0 +1,37 @@
from eth2spec.test.context import (
PHASE0, PHASE1,
with_all_phases_except,
spec_state_test,
)
from eth2spec.test.helpers.state import transition_to
from eth2spec.test.helpers.epoch_processing import (
run_epoch_processing_with,
)
@with_all_phases_except([PHASE0, PHASE1])
@spec_state_test
def test_sync_committees_progress(spec, state):
current_epoch = spec.get_current_epoch(state)
# NOTE: if not in the genesis epoch, period math below needs to be
# adjusted relative to the current epoch
assert current_epoch == 0
first_sync_committee = state.current_sync_committee
second_sync_committee = state.next_sync_committee
slot_at_end_of_current_period = spec.EPOCHS_PER_SYNC_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH - 1
transition_to(spec, state, slot_at_end_of_current_period)
# Ensure assignments have not changed:
assert state.current_sync_committee == first_sync_committee
assert state.next_sync_committee == second_sync_committee
yield from run_epoch_processing_with(spec, state, 'process_sync_committee_updates')
# Can compute the third committee having computed final balances in the last epoch
# of this `EPOCHS_PER_SYNC_COMMITTEE_PERIOD`
third_sync_committee = spec.get_sync_committee(state, 2 * spec.EPOCHS_PER_SYNC_COMMITTEE_PERIOD)
assert state.current_sync_committee == second_sync_committee
assert state.next_sync_committee == third_sync_committee

View File

@ -0,0 +1,75 @@
import random
from eth2spec.test.helpers.state import (
state_transition_and_sign_block,
next_epoch,
)
from eth2spec.test.helpers.block import (
build_empty_block_for_next_slot,
)
from eth2spec.test.helpers.sync_committee import (
compute_aggregate_sync_committee_signature,
)
from eth2spec.test.context import (
PHASE0, PHASE1,
with_all_phases_except,
spec_state_test,
)
def run_sync_committee_sanity_test(spec, state, fraction_full=1.0):
committee = spec.get_sync_committee_indices(state, spec.get_current_epoch(state))
participants = random.sample(committee, int(len(committee) * fraction_full))
yield 'pre', state
block = build_empty_block_for_next_slot(spec, state)
block.body.sync_committee_bits = [index in participants for index in committee]
block.body.sync_committee_signature = compute_aggregate_sync_committee_signature(
spec,
state,
block.slot - 1,
participants,
)
signed_block = state_transition_and_sign_block(spec, state, block)
yield 'blocks', [signed_block]
yield 'post', state
@with_all_phases_except([PHASE0, PHASE1])
@spec_state_test
def test_full_sync_committee_committee(spec, state):
next_epoch(spec, state)
yield from run_sync_committee_sanity_test(spec, state, fraction_full=1.0)
@with_all_phases_except([PHASE0, PHASE1])
@spec_state_test
def test_half_sync_committee_committee(spec, state):
next_epoch(spec, state)
yield from run_sync_committee_sanity_test(spec, state, fraction_full=0.5)
@with_all_phases_except([PHASE0, PHASE1])
@spec_state_test
def test_empty_sync_committee_committee(spec, state):
next_epoch(spec, state)
yield from run_sync_committee_sanity_test(spec, state, fraction_full=0.0)
@with_all_phases_except([PHASE0, PHASE1])
@spec_state_test
def test_full_sync_committee_committee_genesis(spec, state):
yield from run_sync_committee_sanity_test(spec, state, fraction_full=1.0)
@with_all_phases_except([PHASE0, PHASE1])
@spec_state_test
def test_half_sync_committee_committee_genesis(spec, state):
yield from run_sync_committee_sanity_test(spec, state, fraction_full=0.5)
@with_all_phases_except([PHASE0, PHASE1])
@spec_state_test
def test_empty_sync_committee_committee_genesis(spec, state):
yield from run_sync_committee_sanity_test(spec, state, fraction_full=0.0)

View File

@ -2,10 +2,13 @@ from eth2spec.test.context import (
spec_state_test, spec_state_test,
always_bls, never_bls, always_bls, never_bls,
with_all_phases, with_all_phases,
with_all_phases_except,
spec_test, spec_test,
low_balances, low_balances,
with_custom_state, with_custom_state,
single_phase) single_phase,
PHASE1,
)
from eth2spec.test.helpers.attestations import ( from eth2spec.test.helpers.attestations import (
run_attestation_processing, run_attestation_processing,
get_valid_attestation, get_valid_attestation,
@ -329,3 +332,212 @@ def test_too_few_aggregation_bits(spec, state):
attestation.aggregation_bits = attestation.aggregation_bits[:-1] attestation.aggregation_bits = attestation.aggregation_bits[:-1]
yield from run_attestation_processing(spec, state, attestation, False) yield from run_attestation_processing(spec, state, attestation, False)
#
# Full correct atttestation contents at different slot inclusions
#
@with_all_phases
@spec_state_test
def test_correct_min_inclusion_delay(spec, state):
attestation = get_valid_attestation(spec, state, signed=True)
next_slots(spec, state, spec.MIN_ATTESTATION_INCLUSION_DELAY)
yield from run_attestation_processing(spec, state, attestation)
@with_all_phases
@spec_state_test
def test_correct_sqrt_epoch_delay(spec, state):
attestation = get_valid_attestation(spec, state, signed=True, on_time=False)
next_slots(spec, state, spec.integer_squareroot(spec.SLOTS_PER_EPOCH))
yield from run_attestation_processing(spec, state, attestation)
@with_all_phases
@spec_state_test
def test_correct_epoch_delay(spec, state):
attestation = get_valid_attestation(spec, state, signed=True, on_time=False)
next_slots(spec, state, spec.SLOTS_PER_EPOCH)
yield from run_attestation_processing(spec, state, attestation)
@with_all_phases
@spec_state_test
def test_correct_after_epoch_delay(spec, state):
attestation = get_valid_attestation(spec, state, signed=True, on_time=False)
# increment past latest inclusion slot
next_slots(spec, state, spec.SLOTS_PER_EPOCH + 1)
yield from run_attestation_processing(spec, state, attestation, False)
#
# Incorrect head but correct source/target at different slot inclusions
#
@with_all_phases_except([PHASE1])
@spec_state_test
def test_incorrect_head_min_inclusion_delay(spec, state):
attestation = get_valid_attestation(spec, state, signed=False)
next_slots(spec, state, spec.MIN_ATTESTATION_INCLUSION_DELAY)
attestation.data.beacon_block_root = b'\x42' * 32
sign_attestation(spec, state, attestation)
yield from run_attestation_processing(spec, state, attestation)
@with_all_phases
@spec_state_test
def test_incorrect_head_sqrt_epoch_delay(spec, state):
attestation = get_valid_attestation(spec, state, signed=False, on_time=False)
next_slots(spec, state, spec.integer_squareroot(spec.SLOTS_PER_EPOCH))
attestation.data.beacon_block_root = b'\x42' * 32
sign_attestation(spec, state, attestation)
yield from run_attestation_processing(spec, state, attestation)
@with_all_phases
@spec_state_test
def test_incorrect_head_epoch_delay(spec, state):
attestation = get_valid_attestation(spec, state, signed=False, on_time=False)
next_slots(spec, state, spec.SLOTS_PER_EPOCH)
attestation.data.beacon_block_root = b'\x42' * 32
sign_attestation(spec, state, attestation)
yield from run_attestation_processing(spec, state, attestation)
@with_all_phases
@spec_state_test
def test_incorrect_head_after_epoch_delay(spec, state):
attestation = get_valid_attestation(spec, state, signed=False, on_time=False)
# increment past latest inclusion slot
next_slots(spec, state, spec.SLOTS_PER_EPOCH + 1)
attestation.data.beacon_block_root = b'\x42' * 32
sign_attestation(spec, state, attestation)
yield from run_attestation_processing(spec, state, attestation, False)
#
# Incorrect head and target but correct source at different slot inclusions
#
# Note: current phase 1 spec checks
# `assert data.beacon_block_root == get_block_root_at_slot(state, compute_previous_slot(state.slot))`
# so this test can't pass that until phase 1 refactor is merged
@with_all_phases_except([PHASE1])
@spec_state_test
def test_incorrect_head_and_target_min_inclusion_delay(spec, state):
attestation = get_valid_attestation(spec, state, signed=False)
next_slots(spec, state, spec.MIN_ATTESTATION_INCLUSION_DELAY)
attestation.data.beacon_block_root = b'\x42' * 32
attestation.data.target.root = b'\x42' * 32
sign_attestation(spec, state, attestation)
yield from run_attestation_processing(spec, state, attestation)
@with_all_phases
@spec_state_test
def test_incorrect_head_and_target_sqrt_epoch_delay(spec, state):
attestation = get_valid_attestation(spec, state, signed=False, on_time=False)
next_slots(spec, state, spec.integer_squareroot(spec.SLOTS_PER_EPOCH))
attestation.data.beacon_block_root = b'\x42' * 32
attestation.data.target.root = b'\x42' * 32
sign_attestation(spec, state, attestation)
yield from run_attestation_processing(spec, state, attestation)
@with_all_phases
@spec_state_test
def test_incorrect_head_and_target_epoch_delay(spec, state):
attestation = get_valid_attestation(spec, state, signed=False, on_time=False)
next_slots(spec, state, spec.SLOTS_PER_EPOCH)
attestation.data.beacon_block_root = b'\x42' * 32
attestation.data.target.root = b'\x42' * 32
sign_attestation(spec, state, attestation)
yield from run_attestation_processing(spec, state, attestation)
@with_all_phases
@spec_state_test
def test_incorrect_head_and_target_after_epoch_delay(spec, state):
attestation = get_valid_attestation(spec, state, signed=False, on_time=False)
# increment past latest inclusion slot
next_slots(spec, state, spec.SLOTS_PER_EPOCH + 1)
attestation.data.beacon_block_root = b'\x42' * 32
attestation.data.target.root = b'\x42' * 32
sign_attestation(spec, state, attestation)
yield from run_attestation_processing(spec, state, attestation, False)
#
# Correct head and source but incorrect target at different slot inclusions
#
@with_all_phases_except([PHASE1])
@spec_state_test
def test_incorrect_target_min_inclusion_delay(spec, state):
attestation = get_valid_attestation(spec, state, signed=False)
next_slots(spec, state, spec.MIN_ATTESTATION_INCLUSION_DELAY)
attestation.data.target.root = b'\x42' * 32
sign_attestation(spec, state, attestation)
yield from run_attestation_processing(spec, state, attestation)
@with_all_phases
@spec_state_test
def test_incorrect_target_sqrt_epoch_delay(spec, state):
attestation = get_valid_attestation(spec, state, signed=False, on_time=False)
next_slots(spec, state, spec.integer_squareroot(spec.SLOTS_PER_EPOCH))
attestation.data.target.root = b'\x42' * 32
sign_attestation(spec, state, attestation)
yield from run_attestation_processing(spec, state, attestation)
@with_all_phases
@spec_state_test
def test_incorrect_target_epoch_delay(spec, state):
attestation = get_valid_attestation(spec, state, signed=False, on_time=False)
next_slots(spec, state, spec.SLOTS_PER_EPOCH)
attestation.data.target.root = b'\x42' * 32
sign_attestation(spec, state, attestation)
yield from run_attestation_processing(spec, state, attestation)
@with_all_phases
@spec_state_test
def test_incorrect_target_after_epoch_delay(spec, state):
attestation = get_valid_attestation(spec, state, signed=False, on_time=False)
# increment past latest inclusion slot
next_slots(spec, state, spec.SLOTS_PER_EPOCH + 1)
attestation.data.target.root = b'\x42' * 32
sign_attestation(spec, state, attestation)
yield from run_attestation_processing(spec, state, attestation, False)

View File

@ -4,6 +4,7 @@ from eth2spec.test.context import (
from eth2spec.test.helpers.attestations import sign_indexed_attestation from eth2spec.test.helpers.attestations import sign_indexed_attestation
from eth2spec.test.helpers.attester_slashings import get_valid_attester_slashing, \ from eth2spec.test.helpers.attester_slashings import get_valid_attester_slashing, \
get_indexed_attestation_participants, get_attestation_2_data, get_attestation_1_data get_indexed_attestation_participants, get_attestation_2_data, get_attestation_1_data
from eth2spec.test.helpers.proposer_slashings import get_min_slashing_penalty_quotient
from eth2spec.test.helpers.state import ( from eth2spec.test.helpers.state import (
get_balance, get_balance,
next_epoch_via_block, next_epoch_via_block,
@ -70,7 +71,7 @@ def run_attester_slashing_processing(spec, state, attester_slashing, valid=True)
expected_balance = ( expected_balance = (
pre_proposer_balance pre_proposer_balance
+ total_proposer_rewards + total_proposer_rewards
- pre_slashings[proposer_index] // spec.MIN_SLASHING_PENALTY_QUOTIENT - pre_slashings[proposer_index] // get_min_slashing_penalty_quotient(spec)
) )
assert get_balance(state, proposer_index) == expected_balance assert get_balance(state, proposer_index) == expected_balance

View File

@ -1,46 +1,11 @@
from eth2spec.test.context import spec_state_test, with_all_phases from eth2spec.test.context import spec_state_test, with_all_phases
from eth2spec.test.phase0.epoch_processing.run_epoch_process_base import ( from eth2spec.test.helpers.epoch_processing import (
run_epoch_processing_with, run_epoch_processing_to run_epoch_processing_with, run_epoch_processing_to
) )
from eth2spec.test.helpers.state import transition_to
def run_process_final_updates(spec, state): def run_process_effective_balance_updates(spec, state):
yield from run_epoch_processing_with(spec, state, 'process_final_updates') yield from run_epoch_processing_with(spec, state, 'process_effective_balance_updates')
@with_all_phases
@spec_state_test
def test_eth1_vote_no_reset(spec, state):
assert spec.EPOCHS_PER_ETH1_VOTING_PERIOD > 1
# skip ahead to the end of the epoch
transition_to(spec, state, spec.SLOTS_PER_EPOCH - 1)
for i in range(state.slot + 1): # add a vote for each skipped slot.
state.eth1_data_votes.append(
spec.Eth1Data(deposit_root=b'\xaa' * 32,
deposit_count=state.eth1_deposit_index,
block_hash=b'\xbb' * 32))
yield from run_process_final_updates(spec, state)
assert len(state.eth1_data_votes) == spec.SLOTS_PER_EPOCH
@with_all_phases
@spec_state_test
def test_eth1_vote_reset(spec, state):
# skip ahead to the end of the voting period
state.slot = (spec.EPOCHS_PER_ETH1_VOTING_PERIOD * spec.SLOTS_PER_EPOCH) - 1
for i in range(state.slot + 1): # add a vote for each skipped slot.
state.eth1_data_votes.append(
spec.Eth1Data(deposit_root=b'\xaa' * 32,
deposit_count=state.eth1_deposit_index,
block_hash=b'\xbb' * 32))
yield from run_process_final_updates(spec, state)
assert len(state.eth1_data_votes) == 0
@with_all_phases @with_all_phases
@ -48,7 +13,7 @@ def test_eth1_vote_reset(spec, state):
def test_effective_balance_hysteresis(spec, state): def test_effective_balance_hysteresis(spec, state):
# Prepare state up to the final-updates. # Prepare state up to the final-updates.
# Then overwrite the balances, we only want to focus to be on the hysteresis based changes. # Then overwrite the balances, we only want to focus to be on the hysteresis based changes.
run_epoch_processing_to(spec, state, 'process_final_updates') run_epoch_processing_to(spec, state, 'process_effective_balance_updates')
# Set some edge cases for balances # Set some edge cases for balances
max = spec.MAX_EFFECTIVE_BALANCE max = spec.MAX_EFFECTIVE_BALANCE
min = spec.EJECTION_BALANCE min = spec.EJECTION_BALANCE
@ -79,21 +44,7 @@ def test_effective_balance_hysteresis(spec, state):
state.validators[i].effective_balance = pre_eff state.validators[i].effective_balance = pre_eff
state.balances[i] = bal state.balances[i] = bal
yield 'pre', state yield from run_process_effective_balance_updates(spec, state)
spec.process_final_updates(state)
yield 'post', state
for i, (_, _, post_eff, name) in enumerate(cases): for i, (_, _, post_eff, name) in enumerate(cases):
assert state.validators[i].effective_balance == post_eff, name assert state.validators[i].effective_balance == post_eff, name
@with_all_phases
@spec_state_test
def test_historical_root_accumulator(spec, state):
# skip ahead to near the end of the historical roots period (excl block before epoch processing)
state.slot = spec.SLOTS_PER_HISTORICAL_ROOT - 1
history_len = len(state.historical_roots)
yield from run_process_final_updates(spec, state)
assert len(state.historical_roots) == history_len + 1

View File

@ -0,0 +1,43 @@
from eth2spec.test.context import spec_state_test, with_all_phases
from eth2spec.test.helpers.epoch_processing import (
run_epoch_processing_with,
)
from eth2spec.test.helpers.state import transition_to
def run_process_eth1_data_reset(spec, state):
yield from run_epoch_processing_with(spec, state, 'process_eth1_data_reset')
@with_all_phases
@spec_state_test
def test_eth1_vote_no_reset(spec, state):
assert spec.EPOCHS_PER_ETH1_VOTING_PERIOD > 1
# skip ahead to the end of the epoch
transition_to(spec, state, spec.SLOTS_PER_EPOCH - 1)
for i in range(state.slot + 1): # add a vote for each skipped slot.
state.eth1_data_votes.append(
spec.Eth1Data(deposit_root=b'\xaa' * 32,
deposit_count=state.eth1_deposit_index,
block_hash=b'\xbb' * 32))
yield from run_process_eth1_data_reset(spec, state)
assert len(state.eth1_data_votes) == spec.SLOTS_PER_EPOCH
@with_all_phases
@spec_state_test
def test_eth1_vote_reset(spec, state):
# skip ahead to the end of the voting period
state.slot = (spec.EPOCHS_PER_ETH1_VOTING_PERIOD * spec.SLOTS_PER_EPOCH) - 1
for i in range(state.slot + 1): # add a vote for each skipped slot.
state.eth1_data_votes.append(
spec.Eth1Data(deposit_root=b'\xaa' * 32,
deposit_count=state.eth1_deposit_index,
block_hash=b'\xbb' * 32))
yield from run_process_eth1_data_reset(spec, state)
assert len(state.eth1_data_votes) == 0

View File

@ -0,0 +1,20 @@
from eth2spec.test.context import spec_state_test, with_all_phases
from eth2spec.test.helpers.epoch_processing import (
run_epoch_processing_with
)
def run_process_historical_roots_update(spec, state):
yield from run_epoch_processing_with(spec, state, 'process_historical_roots_update')
@with_all_phases
@spec_state_test
def test_historical_root_accumulator(spec, state):
# skip ahead to near the end of the historical roots period (excl block before epoch processing)
state.slot = spec.SLOTS_PER_HISTORICAL_ROOT - 1
history_len = len(state.historical_roots)
yield from run_process_historical_roots_update(spec, state)
assert len(state.historical_roots) == history_len + 1

View File

@ -1,6 +1,6 @@
from eth2spec.test.context import spec_state_test, with_all_phases from eth2spec.test.context import is_post_lightclient_patch, spec_state_test, with_all_phases
from eth2spec.test.phase0.epoch_processing.run_epoch_process_base import ( from eth2spec.test.helpers.epoch_processing import (
run_epoch_processing_with run_epoch_processing_with,
) )
from eth2spec.test.helpers.state import transition_to from eth2spec.test.helpers.state import transition_to
@ -16,12 +16,20 @@ def add_mock_attestations(spec, state, epoch, source, target, sufficient_support
previous_epoch = spec.get_previous_epoch(state) previous_epoch = spec.get_previous_epoch(state)
current_epoch = spec.get_current_epoch(state) current_epoch = spec.get_current_epoch(state)
if current_epoch == epoch: if not is_post_lightclient_patch(spec):
attestations = state.current_epoch_attestations if current_epoch == epoch:
elif previous_epoch == epoch: attestations = state.current_epoch_attestations
attestations = state.previous_epoch_attestations elif previous_epoch == epoch:
attestations = state.previous_epoch_attestations
else:
raise Exception(f"cannot include attestations in epoch ${epoch} from epoch ${current_epoch}")
else: else:
raise Exception(f"cannot include attestations in epoch ${epoch} from epoch ${current_epoch}") if current_epoch == epoch:
epoch_participation = state.current_epoch_participation
elif previous_epoch == epoch:
epoch_participation = state.previous_epoch_participation
else:
raise Exception(f"cannot include attestations in epoch ${epoch} from epoch ${current_epoch}")
total_balance = spec.get_total_active_balance(state) total_balance = spec.get_total_active_balance(state)
remaining_balance = int(total_balance * 2 // 3) # can become negative remaining_balance = int(total_balance * 2 // 3) # can become negative
@ -52,19 +60,28 @@ def add_mock_attestations(spec, state, epoch, source, target, sufficient_support
for i in range(max(len(committee) // 5, 1)): for i in range(max(len(committee) // 5, 1)):
aggregation_bits[i] = 0 aggregation_bits[i] = 0
attestations.append(spec.PendingAttestation( # Update state
aggregation_bits=aggregation_bits, if not is_post_lightclient_patch(spec):
data=spec.AttestationData( attestations.append(spec.PendingAttestation(
slot=slot, aggregation_bits=aggregation_bits,
beacon_block_root=b'\xff' * 32, # irrelevant to testing data=spec.AttestationData(
source=source, slot=slot,
target=target, beacon_block_root=b'\xff' * 32, # irrelevant to testing
index=index, source=source,
), target=target,
inclusion_delay=1, index=index,
)) ),
if messed_up_target: inclusion_delay=1,
attestations[len(attestations) - 1].data.target.root = b'\x99' * 32 ))
if messed_up_target:
attestations[len(attestations) - 1].data.target.root = b'\x99' * 32
else:
for i, index in enumerate(committee):
if aggregation_bits[i]:
epoch_participation[index] |= spec.TIMELY_HEAD_FLAG
epoch_participation[index] |= spec.TIMELY_SOURCE_FLAG
if not messed_up_target:
epoch_participation[index] |= spec.TIMELY_TARGET_FLAG
def get_checkpoints(spec, epoch): def get_checkpoints(spec, epoch):

View File

@ -0,0 +1,21 @@
from eth2spec.test.context import PHASE0, spec_state_test, with_phases
from eth2spec.test.helpers.epoch_processing import (
run_epoch_processing_with
)
def run_process_participation_record_updates(spec, state):
yield from run_epoch_processing_with(spec, state, 'process_participation_record_updates')
@with_phases([PHASE0])
@spec_state_test
def test_updated_participation_record(spec, state):
state.previous_epoch_attestations = [spec.PendingAttestation(proposer_index=100)]
current_epoch_attestations = [spec.PendingAttestation(proposer_index=200)]
state.current_epoch_attestations = current_epoch_attestations
yield from run_process_participation_record_updates(spec, state)
assert state.previous_epoch_attestations == current_epoch_attestations
assert state.current_epoch_attestations == []

View File

@ -0,0 +1,21 @@
from eth2spec.test.context import spec_state_test, with_all_phases
from eth2spec.test.helpers.epoch_processing import (
run_epoch_processing_with
)
def run_process_randao_mixes_reset(spec, state):
yield from run_epoch_processing_with(spec, state, 'process_randao_mixes_reset')
@with_all_phases
@spec_state_test
def test_updated_randao_mixes(spec, state):
next_epoch = spec.get_current_epoch(state) + 1
state.randao_mixes[next_epoch % spec.EPOCHS_PER_HISTORICAL_VECTOR] = b'\x56' * 32
yield from run_process_randao_mixes_reset(spec, state)
assert state.randao_mixes[next_epoch % spec.EPOCHS_PER_HISTORICAL_VECTOR] == spec.get_randao_mix(
state, spec.get_current_epoch(state)
)

View File

@ -1,7 +1,7 @@
from eth2spec.test.helpers.deposits import mock_deposit from eth2spec.test.helpers.deposits import mock_deposit
from eth2spec.test.helpers.state import next_epoch, next_slots from eth2spec.test.helpers.state import next_epoch, next_slots
from eth2spec.test.context import spec_state_test, with_all_phases from eth2spec.test.context import spec_state_test, with_all_phases
from eth2spec.test.phase0.epoch_processing.run_epoch_process_base import run_epoch_processing_with from eth2spec.test.helpers.epoch_processing import run_epoch_processing_with
def run_process_registry_updates(spec, state): def run_process_registry_updates(spec, state):

View File

@ -1,10 +1,11 @@
from eth2spec.test.context import ( from eth2spec.test.context import (
spec_state_test, spec_test, spec_state_test, spec_test,
with_all_phases, single_phase, with_all_phases, single_phase,
with_phases, PHASE0, with_phases, PHASE0, PHASE1,
with_custom_state, with_custom_state,
zero_activation_threshold, zero_activation_threshold,
misc_balances, low_single_balance, misc_balances, low_single_balance,
is_post_lightclient_patch,
) )
from eth2spec.test.helpers.state import ( from eth2spec.test.helpers.state import (
next_epoch, next_epoch,
@ -18,7 +19,7 @@ from eth2spec.test.helpers.attestations import (
) )
from eth2spec.test.helpers.rewards import leaking from eth2spec.test.helpers.rewards import leaking
from eth2spec.test.helpers.attester_slashings import get_indexed_attestation_participants from eth2spec.test.helpers.attester_slashings import get_indexed_attestation_participants
from eth2spec.test.phase0.epoch_processing.run_epoch_process_base import run_epoch_processing_with from eth2spec.test.helpers.epoch_processing import run_epoch_processing_with
from random import Random from random import Random
@ -65,7 +66,7 @@ def test_genesis_epoch_full_attestations_no_rewards(spec, state):
assert state.balances[index] == pre_state.balances[index] assert state.balances[index] == pre_state.balances[index]
@with_all_phases @with_phases([PHASE0, PHASE1])
@spec_state_test @spec_state_test
def test_full_attestations_random_incorrect_fields(spec, state): def test_full_attestations_random_incorrect_fields(spec, state):
attestations = prepare_state_with_attestations(spec, state) attestations = prepare_state_with_attestations(spec, state)
@ -158,10 +159,13 @@ def run_with_participation(spec, state, participation_fn):
return att_participants return att_participants
attestations = prepare_state_with_attestations(spec, state, participation_fn=participation_tracker) attestations = prepare_state_with_attestations(spec, state, participation_fn=participation_tracker)
proposer_indices = [a.proposer_index for a in state.previous_epoch_attestations]
pre_state = state.copy() pre_state = state.copy()
if not is_post_lightclient_patch(spec):
proposer_indices = [a.proposer_index for a in state.previous_epoch_attestations]
else:
sync_committee_indices = spec.get_sync_committee_indices(state, spec.get_current_epoch(state))
yield from run_process_rewards_and_penalties(spec, state) yield from run_process_rewards_and_penalties(spec, state)
attesting_indices = spec.get_unslashed_attesting_indices(state, attestations) attesting_indices = spec.get_unslashed_attesting_indices(state, attestations)
@ -169,12 +173,16 @@ def run_with_participation(spec, state, participation_fn):
for index in range(len(pre_state.validators)): for index in range(len(pre_state.validators)):
if spec.is_in_inactivity_leak(state): if spec.is_in_inactivity_leak(state):
# Proposers can still make money during a leak # Proposers can still make money during a leak before LIGHTCLIENT_PATCH
if index in proposer_indices and index in participated: if not is_post_lightclient_patch(spec) and index in proposer_indices and index in participated:
assert state.balances[index] > pre_state.balances[index] assert state.balances[index] > pre_state.balances[index]
# If not proposer but participated optimally, should have exactly neutral balance
elif index in attesting_indices: elif index in attesting_indices:
assert state.balances[index] == pre_state.balances[index] if is_post_lightclient_patch(spec) and index in sync_committee_indices:
# The sync committee reward has not been canceled, so the sync committee participants still earn it
assert state.balances[index] >= pre_state.balances[index]
else:
# If not proposer but participated optimally, should have exactly neutral balance
assert state.balances[index] == pre_state.balances[index]
else: else:
assert state.balances[index] < pre_state.balances[index] assert state.balances[index] < pre_state.balances[index]
else: else:
@ -420,7 +428,8 @@ def test_attestations_some_slashed(spec, state):
for i in range(spec.MIN_PER_EPOCH_CHURN_LIMIT): for i in range(spec.MIN_PER_EPOCH_CHURN_LIMIT):
spec.slash_validator(state, attesting_indices_before_slashings[i]) spec.slash_validator(state, attesting_indices_before_slashings[i])
assert len(state.previous_epoch_attestations) == len(attestations) if not is_post_lightclient_patch(spec):
assert len(state.previous_epoch_attestations) == len(attestations)
pre_state = state.copy() pre_state = state.copy()

View File

@ -1,5 +1,5 @@
from eth2spec.test.context import spec_state_test, with_all_phases from eth2spec.test.context import spec_state_test, with_all_phases, is_post_lightclient_patch
from eth2spec.test.phase0.epoch_processing.run_epoch_process_base import ( from eth2spec.test.helpers.epoch_processing import (
run_epoch_processing_with, run_epoch_processing_to run_epoch_processing_with, run_epoch_processing_to
) )
from eth2spec.test.helpers.state import next_epoch from eth2spec.test.helpers.state import next_epoch
@ -23,12 +23,19 @@ def slash_validators(spec, state, indices, out_epochs):
] = total_slashed_balance ] = total_slashed_balance
def get_slashing_multiplier(spec):
if is_post_lightclient_patch(spec):
return spec.HF1_PROPORTIONAL_SLASHING_MULTIPLIER
else:
return spec.PROPORTIONAL_SLASHING_MULTIPLIER
@with_all_phases @with_all_phases
@spec_state_test @spec_state_test
def test_max_penalties(spec, state): def test_max_penalties(spec, state):
# Slashed count to ensure that enough validators are slashed to induce maximum penalties # Slashed count to ensure that enough validators are slashed to induce maximum penalties
slashed_count = min( slashed_count = min(
(len(state.validators) // spec.PROPORTIONAL_SLASHING_MULTIPLIER) + 1, (len(state.validators) // get_slashing_multiplier(spec)) + 1,
# Can't slash more than validator count! # Can't slash more than validator count!
len(state.validators) len(state.validators)
) )
@ -40,7 +47,7 @@ def test_max_penalties(spec, state):
total_balance = spec.get_total_active_balance(state) total_balance = spec.get_total_active_balance(state)
total_penalties = sum(state.slashings) total_penalties = sum(state.slashings)
assert total_balance // spec.PROPORTIONAL_SLASHING_MULTIPLIER <= total_penalties assert total_balance // get_slashing_multiplier(spec) <= total_penalties
yield from run_process_slashings(spec, state) yield from run_process_slashings(spec, state)
@ -50,7 +57,30 @@ def test_max_penalties(spec, state):
@with_all_phases @with_all_phases
@spec_state_test @spec_state_test
def test_small_penalty(spec, state): def test_low_penalty(spec, state):
# Slashed count is one tenth of validator set
slashed_count = (len(state.validators) // 10) + 1
out_epoch = spec.get_current_epoch(state) + (spec.EPOCHS_PER_SLASHINGS_VECTOR // 2)
slashed_indices = list(range(slashed_count))
slash_validators(spec, state, slashed_indices, [out_epoch] * slashed_count)
pre_state = state.copy()
yield from run_process_slashings(spec, state)
for i in slashed_indices:
assert 0 < state.balances[i] < pre_state.balances[i]
@with_all_phases
@spec_state_test
def test_minimal_penalty(spec, state):
#
# When very few slashings, the resulting slashing penalty gets rounded down
# to zero so the result of `process_slashings` is null
#
# Just the bare minimum for this one validator # Just the bare minimum for this one validator
state.balances[0] = state.validators[0].effective_balance = spec.EJECTION_BALANCE state.balances[0] = state.validators[0].effective_balance = spec.EJECTION_BALANCE
# All the other validators get the maximum. # All the other validators get the maximum.
@ -74,11 +104,13 @@ def test_small_penalty(spec, state):
expected_penalty = ( expected_penalty = (
state.validators[0].effective_balance // spec.EFFECTIVE_BALANCE_INCREMENT state.validators[0].effective_balance // spec.EFFECTIVE_BALANCE_INCREMENT
* (3 * total_penalties) * (get_slashing_multiplier(spec) * total_penalties)
// total_balance // total_balance
* spec.EFFECTIVE_BALANCE_INCREMENT * spec.EFFECTIVE_BALANCE_INCREMENT
) )
assert state.balances[0] == pre_slash_balances[0] - expected_penalty
assert expected_penalty == 0
assert state.balances[0] == pre_slash_balances[0]
@with_all_phases @with_all_phases
@ -96,7 +128,7 @@ def test_scaled_penalties(spec, state):
state.slashings[5] = base + (incr * 6) state.slashings[5] = base + (incr * 6)
state.slashings[spec.EPOCHS_PER_SLASHINGS_VECTOR - 1] = base + (incr * 7) state.slashings[spec.EPOCHS_PER_SLASHINGS_VECTOR - 1] = base + (incr * 7)
slashed_count = len(state.validators) // (spec.PROPORTIONAL_SLASHING_MULTIPLIER + 1) slashed_count = len(state.validators) // (get_slashing_multiplier(spec) + 1)
assert slashed_count > 10 assert slashed_count > 10
@ -134,7 +166,7 @@ def test_scaled_penalties(spec, state):
v = state.validators[i] v = state.validators[i]
expected_penalty = ( expected_penalty = (
v.effective_balance // spec.EFFECTIVE_BALANCE_INCREMENT v.effective_balance // spec.EFFECTIVE_BALANCE_INCREMENT
* (spec.PROPORTIONAL_SLASHING_MULTIPLIER * total_penalties) * (get_slashing_multiplier(spec) * total_penalties)
// (total_balance) // (total_balance)
* spec.EFFECTIVE_BALANCE_INCREMENT * spec.EFFECTIVE_BALANCE_INCREMENT
) )

View File

@ -0,0 +1,20 @@
from eth2spec.test.context import spec_state_test, with_all_phases
from eth2spec.test.helpers.epoch_processing import (
run_epoch_processing_with
)
def run_process_slashings_reset(spec, state):
yield from run_epoch_processing_with(spec, state, 'process_slashings_reset')
@with_all_phases
@spec_state_test
def test_flush_slashings(spec, state):
next_epoch = spec.get_current_epoch(state) + 1
state.slashings[next_epoch % spec.EPOCHS_PER_SLASHINGS_VECTOR] = 100
assert state.slashings[next_epoch % spec.EPOCHS_PER_SLASHINGS_VECTOR] != 0
yield from run_process_slashings_reset(spec, state)
assert state.slashings[next_epoch % spec.EPOCHS_PER_SLASHINGS_VECTOR] == 0

View File

@ -1,4 +1,4 @@
from eth2spec.test.context import with_all_phases, spec_state_test from eth2spec.test.context import PHASE0, PHASE1, with_all_phases, with_phases, spec_state_test
import eth2spec.test.helpers.rewards as rewards_helpers import eth2spec.test.helpers.rewards as rewards_helpers
@ -32,7 +32,7 @@ def test_full_but_partial_participation(spec, state):
yield from rewards_helpers.run_test_full_but_partial_participation(spec, state) yield from rewards_helpers.run_test_full_but_partial_participation(spec, state)
@with_all_phases @with_phases([PHASE0, PHASE1])
@spec_state_test @spec_state_test
def test_one_attestation_one_correct(spec, state): def test_one_attestation_one_correct(spec, state):
yield from rewards_helpers.run_test_one_attestation_one_correct(spec, state) yield from rewards_helpers.run_test_one_attestation_one_correct(spec, state)
@ -75,7 +75,7 @@ def test_some_very_low_effective_balances_that_did_not_attest(spec, state):
# #
@with_all_phases @with_phases([PHASE0, PHASE1])
@spec_state_test @spec_state_test
def test_full_half_correct_target_incorrect_head(spec, state): def test_full_half_correct_target_incorrect_head(spec, state):
yield from rewards_helpers.run_test_full_fraction_incorrect( yield from rewards_helpers.run_test_full_fraction_incorrect(
@ -86,7 +86,7 @@ def test_full_half_correct_target_incorrect_head(spec, state):
) )
@with_all_phases @with_phases([PHASE0, PHASE1])
@spec_state_test @spec_state_test
def test_full_correct_target_incorrect_head(spec, state): def test_full_correct_target_incorrect_head(spec, state):
yield from rewards_helpers.run_test_full_fraction_incorrect( yield from rewards_helpers.run_test_full_fraction_incorrect(
@ -97,7 +97,7 @@ def test_full_correct_target_incorrect_head(spec, state):
) )
@with_all_phases @with_phases([PHASE0, PHASE1])
@spec_state_test @spec_state_test
def test_full_half_incorrect_target_incorrect_head(spec, state): def test_full_half_incorrect_target_incorrect_head(spec, state):
yield from rewards_helpers.run_test_full_fraction_incorrect( yield from rewards_helpers.run_test_full_fraction_incorrect(
@ -108,7 +108,7 @@ def test_full_half_incorrect_target_incorrect_head(spec, state):
) )
@with_all_phases @with_phases([PHASE0, PHASE1])
@spec_state_test @spec_state_test
def test_full_half_incorrect_target_correct_head(spec, state): def test_full_half_incorrect_target_correct_head(spec, state):
yield from rewards_helpers.run_test_full_fraction_incorrect( yield from rewards_helpers.run_test_full_fraction_incorrect(
@ -119,31 +119,31 @@ def test_full_half_incorrect_target_correct_head(spec, state):
) )
@with_all_phases @with_phases([PHASE0, PHASE1])
@spec_state_test @spec_state_test
def test_full_delay_one_slot(spec, state): def test_full_delay_one_slot(spec, state):
yield from rewards_helpers.run_test_full_delay_one_slot(spec, state) yield from rewards_helpers.run_test_full_delay_one_slot(spec, state)
@with_all_phases @with_phases([PHASE0, PHASE1])
@spec_state_test @spec_state_test
def test_full_delay_max_slots(spec, state): def test_full_delay_max_slots(spec, state):
yield from rewards_helpers.run_test_full_delay_max_slots(spec, state) yield from rewards_helpers.run_test_full_delay_max_slots(spec, state)
@with_all_phases @with_phases([PHASE0, PHASE1])
@spec_state_test @spec_state_test
def test_full_mixed_delay(spec, state): def test_full_mixed_delay(spec, state):
yield from rewards_helpers.run_test_full_mixed_delay(spec, state) yield from rewards_helpers.run_test_full_mixed_delay(spec, state)
@with_all_phases @with_phases([PHASE0, PHASE1])
@spec_state_test @spec_state_test
def test_proposer_not_in_attestations(spec, state): def test_proposer_not_in_attestations(spec, state):
yield from rewards_helpers.run_test_proposer_not_in_attestations(spec, state) yield from rewards_helpers.run_test_proposer_not_in_attestations(spec, state)
@with_all_phases @with_phases([PHASE0, PHASE1])
@spec_state_test @spec_state_test
def test_duplicate_attestations_at_later_slots(spec, state): def test_duplicate_attestations_at_later_slots(spec, state):
yield from rewards_helpers.run_test_duplicate_attestations_at_later_slots(spec, state) yield from rewards_helpers.run_test_duplicate_attestations_at_later_slots(spec, state)

View File

@ -1,4 +1,4 @@
from eth2spec.test.context import with_all_phases, spec_state_test from eth2spec.test.context import PHASE0, PHASE1, with_all_phases, with_phases, spec_state_test
from eth2spec.test.helpers.rewards import leaking from eth2spec.test.helpers.rewards import leaking
import eth2spec.test.helpers.rewards as rewards_helpers import eth2spec.test.helpers.rewards as rewards_helpers
@ -38,7 +38,7 @@ def test_full_but_partial_participation_leak(spec, state):
yield from rewards_helpers.run_test_full_but_partial_participation(spec, state) yield from rewards_helpers.run_test_full_but_partial_participation(spec, state)
@with_all_phases @with_phases([PHASE0, PHASE1])
@spec_state_test @spec_state_test
@leaking() @leaking()
def test_one_attestation_one_correct_leak(spec, state): def test_one_attestation_one_correct_leak(spec, state):
@ -87,7 +87,7 @@ def test_some_very_low_effective_balances_that_did_not_attest_leak(spec, state):
# #
@with_all_phases @with_phases([PHASE0, PHASE1])
@spec_state_test @spec_state_test
@leaking() @leaking()
def test_full_half_correct_target_incorrect_head_leak(spec, state): def test_full_half_correct_target_incorrect_head_leak(spec, state):
@ -99,7 +99,7 @@ def test_full_half_correct_target_incorrect_head_leak(spec, state):
) )
@with_all_phases @with_phases([PHASE0, PHASE1])
@spec_state_test @spec_state_test
@leaking() @leaking()
def test_full_correct_target_incorrect_head_leak(spec, state): def test_full_correct_target_incorrect_head_leak(spec, state):
@ -111,7 +111,7 @@ def test_full_correct_target_incorrect_head_leak(spec, state):
) )
@with_all_phases @with_phases([PHASE0, PHASE1])
@spec_state_test @spec_state_test
@leaking() @leaking()
def test_full_half_incorrect_target_incorrect_head_leak(spec, state): def test_full_half_incorrect_target_incorrect_head_leak(spec, state):
@ -123,7 +123,7 @@ def test_full_half_incorrect_target_incorrect_head_leak(spec, state):
) )
@with_all_phases @with_phases([PHASE0, PHASE1])
@spec_state_test @spec_state_test
@leaking() @leaking()
def test_full_half_incorrect_target_correct_head_leak(spec, state): def test_full_half_incorrect_target_correct_head_leak(spec, state):

View File

@ -29,6 +29,12 @@ def test_full_random_2(spec, state):
yield from rewards_helpers.run_test_full_random(spec, state, rng=Random(3030)) yield from rewards_helpers.run_test_full_random(spec, state, rng=Random(3030))
@with_all_phases
@spec_state_test
def test_full_random_3(spec, state):
yield from rewards_helpers.run_test_full_random(spec, state, rng=Random(4040))
@with_all_phases @with_all_phases
@with_custom_state(balances_fn=low_balances, threshold_fn=lambda spec: spec.EJECTION_BALANCE) @with_custom_state(balances_fn=low_balances, threshold_fn=lambda spec: spec.EJECTION_BALANCE)
@spec_test @spec_test

View File

@ -35,6 +35,7 @@ from eth2spec.test.context import (
with_configs, with_configs,
with_custom_state, with_custom_state,
large_validator_set, large_validator_set,
is_post_lightclient_patch,
) )
@ -780,15 +781,19 @@ def test_attestation(spec, state):
spec, state, shard_transition=shard_transition, index=index, signed=True, on_time=True spec, state, shard_transition=shard_transition, index=index, signed=True, on_time=True
) )
if not is_post_lightclient_patch(spec):
pre_current_attestations_len = len(state.current_epoch_attestations)
# Add to state via block transition # Add to state via block transition
pre_current_attestations_len = len(state.current_epoch_attestations)
attestation_block.body.attestations.append(attestation) attestation_block.body.attestations.append(attestation)
signed_attestation_block = state_transition_and_sign_block(spec, state, attestation_block) signed_attestation_block = state_transition_and_sign_block(spec, state, attestation_block)
assert len(state.current_epoch_attestations) == pre_current_attestations_len + 1 if not is_post_lightclient_patch(spec):
assert len(state.current_epoch_attestations) == pre_current_attestations_len + 1
# Epoch transition should move to previous_epoch_attestations # Epoch transition should move to previous_epoch_attestations
pre_current_attestations_root = spec.hash_tree_root(state.current_epoch_attestations) pre_current_attestations_root = spec.hash_tree_root(state.current_epoch_attestations)
else:
pre_current_epoch_participation_root = spec.hash_tree_root(state.current_epoch_participation)
epoch_block = build_empty_block(spec, state, state.slot + spec.SLOTS_PER_EPOCH) epoch_block = build_empty_block(spec, state, state.slot + spec.SLOTS_PER_EPOCH)
signed_epoch_block = state_transition_and_sign_block(spec, state, epoch_block) signed_epoch_block = state_transition_and_sign_block(spec, state, epoch_block)
@ -796,8 +801,13 @@ def test_attestation(spec, state):
yield 'blocks', [signed_attestation_block, signed_epoch_block] yield 'blocks', [signed_attestation_block, signed_epoch_block]
yield 'post', state yield 'post', state
assert len(state.current_epoch_attestations) == 0 if not is_post_lightclient_patch(spec):
assert spec.hash_tree_root(state.previous_epoch_attestations) == pre_current_attestations_root assert len(state.current_epoch_attestations) == 0
assert spec.hash_tree_root(state.previous_epoch_attestations) == pre_current_attestations_root
else:
for index in range(len(state.validators)):
assert state.current_epoch_participation[index] == 0
assert spec.hash_tree_root(state.previous_epoch_participation) == pre_current_epoch_participation_root
# In phase1 a committee is computed for SHARD_COMMITTEE_PERIOD slots ago, # In phase1 a committee is computed for SHARD_COMMITTEE_PERIOD slots ago,

View File

@ -1,4 +1,4 @@
from eth2spec.test.context import PHASE0, with_all_phases, spec_state_test from eth2spec.test.context import PHASE0, PHASE1, LIGHTCLIENT_PATCH, with_all_phases, spec_state_test
from eth2spec.test.helpers.block import build_empty_block_for_next_slot from eth2spec.test.helpers.block import build_empty_block_for_next_slot
from eth2spec.test.helpers.attestations import get_valid_attestation, sign_attestation from eth2spec.test.helpers.attestations import get_valid_attestation, sign_attestation
from eth2spec.test.helpers.state import transition_to, state_transition_and_sign_block, next_epoch, next_slot from eth2spec.test.helpers.state import transition_to, state_transition_and_sign_block, next_epoch, next_slot
@ -18,12 +18,12 @@ def run_on_attestation(spec, state, store, attestation, valid=True):
spec.on_attestation(store, attestation) spec.on_attestation(store, attestation)
sample_index = indexed_attestation.attesting_indices[0] sample_index = indexed_attestation.attesting_indices[0]
if spec.fork == PHASE0: if spec.fork in (PHASE0, LIGHTCLIENT_PATCH):
latest_message = spec.LatestMessage( latest_message = spec.LatestMessage(
epoch=attestation.data.target.epoch, epoch=attestation.data.target.epoch,
root=attestation.data.beacon_block_root, root=attestation.data.beacon_block_root,
) )
else: elif spec.fork == PHASE1:
latest_message = spec.LatestMessage( latest_message = spec.LatestMessage(
epoch=attestation.data.target.epoch, epoch=attestation.data.target.epoch,
root=attestation.data.beacon_block_root, root=attestation.data.beacon_block_root,

View File

@ -1,5 +1,6 @@
from eth2spec.test.context import ( from eth2spec.test.context import (
PHASE0, PHASE0,
LIGHTCLIENT_PATCH,
with_all_phases_except, with_all_phases_except,
spec_state_test, spec_state_test,
always_bls, always_bls,
@ -12,7 +13,7 @@ from eth2spec.test.helpers.attestations import (
) )
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@always_bls @always_bls
def test_on_time_success(spec, state): def test_on_time_success(spec, state):
@ -23,7 +24,7 @@ def test_on_time_success(spec, state):
yield from run_attestation_processing(spec, state, attestation) yield from run_attestation_processing(spec, state, attestation)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@always_bls @always_bls
def test_late_success(spec, state): def test_late_success(spec, state):

View File

@ -9,6 +9,7 @@ from eth2spec.test.helpers.attestations import (
from eth2spec.test.helpers.state import transition_to, transition_to_valid_shard_slot from eth2spec.test.helpers.state import transition_to, transition_to_valid_shard_slot
from eth2spec.test.context import ( from eth2spec.test.context import (
PHASE0, PHASE0,
LIGHTCLIENT_PATCH,
MINIMAL, MINIMAL,
expect_assertion_error, expect_assertion_error,
disable_process_reveal_deadlines, disable_process_reveal_deadlines,
@ -68,7 +69,7 @@ def run_custody_chunk_response_processing(spec, state, custody_response, valid=T
yield 'post', state yield 'post', state
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@with_configs([MINIMAL], reason="too slow") @with_configs([MINIMAL], reason="too slow")
@disable_process_reveal_deadlines @disable_process_reveal_deadlines
@ -92,7 +93,7 @@ def test_challenge_appended(spec, state):
yield from run_chunk_challenge_processing(spec, state, challenge) yield from run_chunk_challenge_processing(spec, state, challenge)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@disable_process_reveal_deadlines @disable_process_reveal_deadlines
@with_configs([MINIMAL], reason="too slow") @with_configs([MINIMAL], reason="too slow")
@ -118,7 +119,7 @@ def test_challenge_empty_element_replaced(spec, state):
yield from run_chunk_challenge_processing(spec, state, challenge) yield from run_chunk_challenge_processing(spec, state, challenge)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@disable_process_reveal_deadlines @disable_process_reveal_deadlines
@with_configs([MINIMAL], reason="too slow") @with_configs([MINIMAL], reason="too slow")
@ -144,7 +145,7 @@ def test_duplicate_challenge(spec, state):
yield from run_chunk_challenge_processing(spec, state, challenge, valid=False) yield from run_chunk_challenge_processing(spec, state, challenge, valid=False)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@disable_process_reveal_deadlines @disable_process_reveal_deadlines
@with_configs([MINIMAL], reason="too slow") @with_configs([MINIMAL], reason="too slow")
@ -172,7 +173,7 @@ def test_second_challenge(spec, state):
yield from run_chunk_challenge_processing(spec, state, challenge1) yield from run_chunk_challenge_processing(spec, state, challenge1)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@disable_process_reveal_deadlines @disable_process_reveal_deadlines
@with_configs([MINIMAL], reason="too slow") @with_configs([MINIMAL], reason="too slow")
@ -197,7 +198,7 @@ def test_multiple_epochs_custody(spec, state):
yield from run_chunk_challenge_processing(spec, state, challenge) yield from run_chunk_challenge_processing(spec, state, challenge)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@disable_process_reveal_deadlines @disable_process_reveal_deadlines
@with_configs([MINIMAL], reason="too slow") @with_configs([MINIMAL], reason="too slow")
@ -222,7 +223,7 @@ def test_many_epochs_custody(spec, state):
yield from run_chunk_challenge_processing(spec, state, challenge) yield from run_chunk_challenge_processing(spec, state, challenge)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@disable_process_reveal_deadlines @disable_process_reveal_deadlines
@with_configs([MINIMAL], reason="too slow") @with_configs([MINIMAL], reason="too slow")
@ -243,7 +244,7 @@ def test_off_chain_attestation(spec, state):
yield from run_chunk_challenge_processing(spec, state, challenge) yield from run_chunk_challenge_processing(spec, state, challenge)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@disable_process_reveal_deadlines @disable_process_reveal_deadlines
@with_configs([MINIMAL], reason="too slow") @with_configs([MINIMAL], reason="too slow")
@ -275,7 +276,7 @@ def test_custody_response(spec, state):
yield from run_custody_chunk_response_processing(spec, state, custody_response) yield from run_custody_chunk_response_processing(spec, state, custody_response)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@disable_process_reveal_deadlines @disable_process_reveal_deadlines
@with_configs([MINIMAL], reason="too slow") @with_configs([MINIMAL], reason="too slow")
@ -306,7 +307,7 @@ def test_custody_response_chunk_index_2(spec, state):
yield from run_custody_chunk_response_processing(spec, state, custody_response) yield from run_custody_chunk_response_processing(spec, state, custody_response)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@disable_process_reveal_deadlines @disable_process_reveal_deadlines
@with_configs([MINIMAL], reason="too slow") @with_configs([MINIMAL], reason="too slow")
@ -338,7 +339,7 @@ def test_custody_response_multiple_epochs(spec, state):
yield from run_custody_chunk_response_processing(spec, state, custody_response) yield from run_custody_chunk_response_processing(spec, state, custody_response)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@disable_process_reveal_deadlines @disable_process_reveal_deadlines
@with_configs([MINIMAL], reason="too slow") @with_configs([MINIMAL], reason="too slow")

View File

@ -1,6 +1,7 @@
from eth2spec.test.helpers.custody import get_valid_custody_key_reveal from eth2spec.test.helpers.custody import get_valid_custody_key_reveal
from eth2spec.test.context import ( from eth2spec.test.context import (
PHASE0, PHASE0,
LIGHTCLIENT_PATCH,
with_all_phases_except, with_all_phases_except,
spec_state_test, spec_state_test,
expect_assertion_error, expect_assertion_error,
@ -39,7 +40,7 @@ def run_custody_key_reveal_processing(spec, state, custody_key_reveal, valid=Tru
yield 'post', state yield 'post', state
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@always_bls @always_bls
def test_success(spec, state): def test_success(spec, state):
@ -49,7 +50,7 @@ def test_success(spec, state):
yield from run_custody_key_reveal_processing(spec, state, custody_key_reveal) yield from run_custody_key_reveal_processing(spec, state, custody_key_reveal)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@always_bls @always_bls
def test_reveal_too_early(spec, state): def test_reveal_too_early(spec, state):
@ -58,7 +59,7 @@ def test_reveal_too_early(spec, state):
yield from run_custody_key_reveal_processing(spec, state, custody_key_reveal, False) yield from run_custody_key_reveal_processing(spec, state, custody_key_reveal, False)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@always_bls @always_bls
def test_wrong_period(spec, state): def test_wrong_period(spec, state):
@ -67,7 +68,7 @@ def test_wrong_period(spec, state):
yield from run_custody_key_reveal_processing(spec, state, custody_key_reveal, False) yield from run_custody_key_reveal_processing(spec, state, custody_key_reveal, False)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@always_bls @always_bls
def test_late_reveal(spec, state): def test_late_reveal(spec, state):
@ -77,7 +78,7 @@ def test_late_reveal(spec, state):
yield from run_custody_key_reveal_processing(spec, state, custody_key_reveal) yield from run_custody_key_reveal_processing(spec, state, custody_key_reveal)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@always_bls @always_bls
def test_double_reveal(spec, state): def test_double_reveal(spec, state):

View File

@ -11,6 +11,7 @@ from eth2spec.test.helpers.state import get_balance, transition_to
from eth2spec.test.context import ( from eth2spec.test.context import (
PHASE0, PHASE0,
MINIMAL, MINIMAL,
LIGHTCLIENT_PATCH,
with_all_phases_except, with_all_phases_except,
spec_state_test, spec_state_test,
expect_assertion_error, expect_assertion_error,
@ -112,7 +113,7 @@ def run_standard_custody_slashing_test(spec,
yield from run_custody_slashing_processing(spec, state, slashing, valid=valid, correct=correct) yield from run_custody_slashing_processing(spec, state, slashing, valid=valid, correct=correct)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@disable_process_reveal_deadlines @disable_process_reveal_deadlines
@with_configs([MINIMAL], reason="too slow") @with_configs([MINIMAL], reason="too slow")
@ -120,7 +121,7 @@ def test_custody_slashing(spec, state):
yield from run_standard_custody_slashing_test(spec, state) yield from run_standard_custody_slashing_test(spec, state)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@disable_process_reveal_deadlines @disable_process_reveal_deadlines
@with_configs([MINIMAL], reason="too slow") @with_configs([MINIMAL], reason="too slow")
@ -128,7 +129,7 @@ def test_incorrect_custody_slashing(spec, state):
yield from run_standard_custody_slashing_test(spec, state, correct=False) yield from run_standard_custody_slashing_test(spec, state, correct=False)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@disable_process_reveal_deadlines @disable_process_reveal_deadlines
@with_configs([MINIMAL], reason="too slow") @with_configs([MINIMAL], reason="too slow")
@ -136,7 +137,7 @@ def test_multiple_epochs_custody(spec, state):
yield from run_standard_custody_slashing_test(spec, state, shard_lateness=spec.SLOTS_PER_EPOCH * 3) yield from run_standard_custody_slashing_test(spec, state, shard_lateness=spec.SLOTS_PER_EPOCH * 3)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@disable_process_reveal_deadlines @disable_process_reveal_deadlines
@with_configs([MINIMAL], reason="too slow") @with_configs([MINIMAL], reason="too slow")
@ -144,7 +145,7 @@ def test_many_epochs_custody(spec, state):
yield from run_standard_custody_slashing_test(spec, state, shard_lateness=spec.SLOTS_PER_EPOCH * 5) yield from run_standard_custody_slashing_test(spec, state, shard_lateness=spec.SLOTS_PER_EPOCH * 5)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@disable_process_reveal_deadlines @disable_process_reveal_deadlines
@with_configs([MINIMAL], reason="too slow") @with_configs([MINIMAL], reason="too slow")

View File

@ -2,6 +2,7 @@ from eth2spec.test.helpers.custody import get_valid_early_derived_secret_reveal
from eth2spec.test.helpers.state import next_epoch_via_block, get_balance from eth2spec.test.helpers.state import next_epoch_via_block, get_balance
from eth2spec.test.context import ( from eth2spec.test.context import (
PHASE0, PHASE0,
LIGHTCLIENT_PATCH,
with_all_phases_except, with_all_phases_except,
spec_state_test, spec_state_test,
expect_assertion_error, expect_assertion_error,
@ -41,7 +42,7 @@ def run_early_derived_secret_reveal_processing(spec, state, randao_key_reveal, v
yield 'post', state yield 'post', state
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@always_bls @always_bls
def test_success(spec, state): def test_success(spec, state):
@ -50,7 +51,7 @@ def test_success(spec, state):
yield from run_early_derived_secret_reveal_processing(spec, state, randao_key_reveal) yield from run_early_derived_secret_reveal_processing(spec, state, randao_key_reveal)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@never_bls @never_bls
def test_reveal_from_current_epoch(spec, state): def test_reveal_from_current_epoch(spec, state):
@ -59,7 +60,7 @@ def test_reveal_from_current_epoch(spec, state):
yield from run_early_derived_secret_reveal_processing(spec, state, randao_key_reveal, False) yield from run_early_derived_secret_reveal_processing(spec, state, randao_key_reveal, False)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@never_bls @never_bls
def test_reveal_from_past_epoch(spec, state): def test_reveal_from_past_epoch(spec, state):
@ -69,7 +70,7 @@ def test_reveal_from_past_epoch(spec, state):
yield from run_early_derived_secret_reveal_processing(spec, state, randao_key_reveal, False) yield from run_early_derived_secret_reveal_processing(spec, state, randao_key_reveal, False)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@always_bls @always_bls
def test_reveal_with_custody_padding(spec, state): def test_reveal_with_custody_padding(spec, state):
@ -81,7 +82,7 @@ def test_reveal_with_custody_padding(spec, state):
yield from run_early_derived_secret_reveal_processing(spec, state, randao_key_reveal, True) yield from run_early_derived_secret_reveal_processing(spec, state, randao_key_reveal, True)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@always_bls @always_bls
def test_reveal_with_custody_padding_minus_one(spec, state): def test_reveal_with_custody_padding_minus_one(spec, state):
@ -93,7 +94,7 @@ def test_reveal_with_custody_padding_minus_one(spec, state):
yield from run_early_derived_secret_reveal_processing(spec, state, randao_key_reveal, True) yield from run_early_derived_secret_reveal_processing(spec, state, randao_key_reveal, True)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@never_bls @never_bls
def test_double_reveal(spec, state): def test_double_reveal(spec, state):
@ -114,7 +115,7 @@ def test_double_reveal(spec, state):
yield from run_early_derived_secret_reveal_processing(spec, state, randao_key_reveal2, False) yield from run_early_derived_secret_reveal_processing(spec, state, randao_key_reveal2, False)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@never_bls @never_bls
def test_revealer_is_slashed(spec, state): def test_revealer_is_slashed(spec, state):
@ -124,7 +125,7 @@ def test_revealer_is_slashed(spec, state):
yield from run_early_derived_secret_reveal_processing(spec, state, randao_key_reveal, False) yield from run_early_derived_secret_reveal_processing(spec, state, randao_key_reveal, False)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@never_bls @never_bls
def test_far_future_epoch(spec, state): def test_far_future_epoch(spec, state):

View File

@ -1,5 +1,6 @@
from eth2spec.test.context import ( from eth2spec.test.context import (
PHASE0, PHASE0,
LIGHTCLIENT_PATCH,
with_all_phases_except, with_all_phases_except,
only_full_crosslink, only_full_crosslink,
spec_state_test, spec_state_test,
@ -90,21 +91,21 @@ def run_successful_crosslink_tests(spec, state, target_len_offset_slot):
assert bool(pending_attestation.crosslink_success) is True assert bool(pending_attestation.crosslink_success) is True
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@only_full_crosslink @only_full_crosslink
def test_basic_crosslinks(spec, state): def test_basic_crosslinks(spec, state):
yield from run_successful_crosslink_tests(spec, state, target_len_offset_slot=1) yield from run_successful_crosslink_tests(spec, state, target_len_offset_slot=1)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@only_full_crosslink @only_full_crosslink
def test_multiple_offset_slots(spec, state): def test_multiple_offset_slots(spec, state):
yield from run_successful_crosslink_tests(spec, state, target_len_offset_slot=2) yield from run_successful_crosslink_tests(spec, state, target_len_offset_slot=2)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@only_full_crosslink @only_full_crosslink
def test_no_winning_root(spec, state): def test_no_winning_root(spec, state):
@ -152,7 +153,7 @@ def test_no_winning_root(spec, state):
assert state.shard_states == pre_shard_states assert state.shard_states == pre_shard_states
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@only_full_crosslink @only_full_crosslink
def test_wrong_shard_transition_root(spec, state): def test_wrong_shard_transition_root(spec, state):

View File

@ -8,13 +8,14 @@ from eth2spec.test.helpers.attestations import (
from eth2spec.test.helpers.state import transition_to, transition_to_valid_shard_slot from eth2spec.test.helpers.state import transition_to, transition_to_valid_shard_slot
from eth2spec.test.context import ( from eth2spec.test.context import (
PHASE0, PHASE0,
LIGHTCLIENT_PATCH,
MINIMAL, MINIMAL,
spec_state_test, spec_state_test,
with_all_phases_except, with_all_phases_except,
with_configs, with_configs,
) )
from eth2spec.test.phase0.block_processing.test_process_attestation import run_attestation_processing from eth2spec.test.phase0.block_processing.test_process_attestation import run_attestation_processing
from eth2spec.test.phase0.epoch_processing.run_epoch_process_base import run_epoch_processing_with from eth2spec.test.helpers.epoch_processing import run_epoch_processing_with
from eth2spec.test.phase1.block_processing.test_process_chunk_challenge import ( from eth2spec.test.phase1.block_processing.test_process_chunk_challenge import (
run_chunk_challenge_processing, run_chunk_challenge_processing,
@ -25,7 +26,7 @@ def run_process_challenge_deadlines(spec, state):
yield from run_epoch_processing_with(spec, state, 'process_challenge_deadlines') yield from run_epoch_processing_with(spec, state, 'process_challenge_deadlines')
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@with_configs([MINIMAL], reason="too slow") @with_configs([MINIMAL], reason="too slow")
def test_validator_slashed_after_chunk_challenge(spec, state): def test_validator_slashed_after_chunk_challenge(spec, state):

View File

@ -1,5 +1,6 @@
from eth2spec.test.context import ( from eth2spec.test.context import (
PHASE0, PHASE0,
LIGHTCLIENT_PATCH,
) )
from eth2spec.test.helpers.custody import ( from eth2spec.test.helpers.custody import (
get_valid_chunk_challenge, get_valid_chunk_challenge,
@ -16,7 +17,7 @@ from eth2spec.test.context import (
spec_state_test, spec_state_test,
) )
from eth2spec.test.phase0.block_processing.test_process_attestation import run_attestation_processing from eth2spec.test.phase0.block_processing.test_process_attestation import run_attestation_processing
from eth2spec.test.phase0.epoch_processing.run_epoch_process_base import run_epoch_processing_with from eth2spec.test.helpers.epoch_processing import run_epoch_processing_with
from eth2spec.test.phase1.block_processing.test_process_chunk_challenge import ( from eth2spec.test.phase1.block_processing.test_process_chunk_challenge import (
run_chunk_challenge_processing, run_chunk_challenge_processing,
@ -29,7 +30,7 @@ def run_process_custody_final_updates(spec, state):
yield from run_epoch_processing_with(spec, state, 'process_custody_final_updates') yield from run_epoch_processing_with(spec, state, 'process_custody_final_updates')
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
def test_validator_withdrawal_delay(spec, state): def test_validator_withdrawal_delay(spec, state):
transition_to_valid_shard_slot(spec, state) transition_to_valid_shard_slot(spec, state)
@ -42,7 +43,7 @@ def test_validator_withdrawal_delay(spec, state):
assert state.validators[0].withdrawable_epoch == spec.FAR_FUTURE_EPOCH assert state.validators[0].withdrawable_epoch == spec.FAR_FUTURE_EPOCH
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
def test_validator_withdrawal_reenable_after_custody_reveal(spec, state): def test_validator_withdrawal_reenable_after_custody_reveal(spec, state):
transition_to_valid_shard_slot(spec, state) transition_to_valid_shard_slot(spec, state)
@ -67,7 +68,7 @@ def test_validator_withdrawal_reenable_after_custody_reveal(spec, state):
assert state.validators[0].withdrawable_epoch < spec.FAR_FUTURE_EPOCH assert state.validators[0].withdrawable_epoch < spec.FAR_FUTURE_EPOCH
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
def test_validator_withdrawal_suspend_after_chunk_challenge(spec, state): def test_validator_withdrawal_suspend_after_chunk_challenge(spec, state):
transition_to_valid_shard_slot(spec, state) transition_to_valid_shard_slot(spec, state)
@ -116,7 +117,7 @@ def test_validator_withdrawal_suspend_after_chunk_challenge(spec, state):
assert state.validators[validator_index].withdrawable_epoch == spec.FAR_FUTURE_EPOCH assert state.validators[validator_index].withdrawable_epoch == spec.FAR_FUTURE_EPOCH
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
def test_validator_withdrawal_resume_after_chunk_challenge_response(spec, state): def test_validator_withdrawal_resume_after_chunk_challenge_response(spec, state):
transition_to_valid_shard_slot(spec, state) transition_to_valid_shard_slot(spec, state)

View File

@ -4,12 +4,13 @@ from eth2spec.test.helpers.custody import (
from eth2spec.test.helpers.state import transition_to from eth2spec.test.helpers.state import transition_to
from eth2spec.test.context import ( from eth2spec.test.context import (
PHASE0, PHASE0,
LIGHTCLIENT_PATCH,
MINIMAL, MINIMAL,
with_all_phases_except, with_all_phases_except,
with_configs, with_configs,
spec_state_test, spec_state_test,
) )
from eth2spec.test.phase0.epoch_processing.run_epoch_process_base import run_epoch_processing_with from eth2spec.test.helpers.epoch_processing import run_epoch_processing_with
from eth2spec.test.phase1.block_processing.test_process_custody_key_reveal import run_custody_key_reveal_processing from eth2spec.test.phase1.block_processing.test_process_custody_key_reveal import run_custody_key_reveal_processing
@ -17,7 +18,7 @@ def run_process_challenge_deadlines(spec, state):
yield from run_epoch_processing_with(spec, state, 'process_challenge_deadlines') yield from run_epoch_processing_with(spec, state, 'process_challenge_deadlines')
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@with_configs([MINIMAL], reason="too slow") @with_configs([MINIMAL], reason="too slow")
def test_validator_slashed_after_reveal_deadline(spec, state): def test_validator_slashed_after_reveal_deadline(spec, state):
@ -37,7 +38,7 @@ def test_validator_slashed_after_reveal_deadline(spec, state):
assert state.validators[0].slashed == 1 assert state.validators[0].slashed == 1
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@with_configs([MINIMAL], reason="too slow") @with_configs([MINIMAL], reason="too slow")
def test_validator_not_slashed_after_reveal(spec, state): def test_validator_not_slashed_after_reveal(spec, state):

View File

@ -1,7 +1,9 @@
from typing import Dict, Sequence from typing import Dict, Sequence
from eth2spec.test.context import ( from eth2spec.test.context import (
PHASE0, MINIMAL, PHASE0,
LIGHTCLIENT_PATCH,
MINIMAL,
with_all_phases_except, with_all_phases_except,
spec_state_test, spec_state_test,
only_full_crosslink, only_full_crosslink,
@ -98,7 +100,7 @@ def run_beacon_block_with_shard_blocks(spec, state, target_len_offset_slot, comm
assert post_shard_state.gasprice > pre_gasprice assert post_shard_state.gasprice > pre_gasprice
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@only_full_crosslink @only_full_crosslink
def test_process_beacon_block_with_normal_shard_transition(spec, state): def test_process_beacon_block_with_normal_shard_transition(spec, state):
@ -112,7 +114,7 @@ def test_process_beacon_block_with_normal_shard_transition(spec, state):
yield from run_beacon_block_with_shard_blocks(spec, state, target_len_offset_slot, committee_index, shard) yield from run_beacon_block_with_shard_blocks(spec, state, target_len_offset_slot, committee_index, shard)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@only_full_crosslink @only_full_crosslink
def test_process_beacon_block_with_empty_proposal_transition(spec, state): def test_process_beacon_block_with_empty_proposal_transition(spec, state):
@ -131,7 +133,7 @@ def test_process_beacon_block_with_empty_proposal_transition(spec, state):
# #
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@only_full_crosslink @only_full_crosslink
def test_with_shard_transition_with_custody_challenge_and_response(spec, state): def test_with_shard_transition_with_custody_challenge_and_response(spec, state):
@ -165,7 +167,7 @@ def test_with_shard_transition_with_custody_challenge_and_response(spec, state):
yield from run_beacon_block(spec, state, block) yield from run_beacon_block(spec, state, block)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@with_configs([MINIMAL]) @with_configs([MINIMAL])
def test_custody_key_reveal(spec, state): def test_custody_key_reveal(spec, state):
@ -179,7 +181,7 @@ def test_custody_key_reveal(spec, state):
yield from run_beacon_block(spec, state, block) yield from run_beacon_block(spec, state, block)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
def test_early_derived_secret_reveal(spec, state): def test_early_derived_secret_reveal(spec, state):
transition_to_valid_shard_slot(spec, state) transition_to_valid_shard_slot(spec, state)
@ -190,7 +192,7 @@ def test_early_derived_secret_reveal(spec, state):
yield from run_beacon_block(spec, state, block) yield from run_beacon_block(spec, state, block)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@only_full_crosslink @only_full_crosslink
def test_custody_slashing(spec, state): def test_custody_slashing(spec, state):

View File

@ -1,5 +1,6 @@
from eth2spec.test.context import ( from eth2spec.test.context import (
PHASE0, PHASE0,
LIGHTCLIENT_PATCH,
always_bls, always_bls,
expect_assertion_error, expect_assertion_error,
spec_state_test, spec_state_test,
@ -43,7 +44,7 @@ def run_shard_blocks(spec, shard_state, signed_shard_block, beacon_parent_state,
shard_state.latest_block_root == pre_shard_state.latest_block_root shard_state.latest_block_root == pre_shard_state.latest_block_root
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@always_bls @always_bls
@only_full_crosslink @only_full_crosslink
@ -63,7 +64,7 @@ def test_valid_shard_block(spec, state):
# #
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@only_full_crosslink @only_full_crosslink
def test_invalid_shard_parent_root(spec, state): def test_invalid_shard_parent_root(spec, state):
@ -79,7 +80,7 @@ def test_invalid_shard_parent_root(spec, state):
yield from run_shard_blocks(spec, shard_state, signed_shard_block, beacon_state, valid=False) yield from run_shard_blocks(spec, shard_state, signed_shard_block, beacon_state, valid=False)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@only_full_crosslink @only_full_crosslink
def test_invalid_beacon_parent_root(spec, state): def test_invalid_beacon_parent_root(spec, state):
@ -94,7 +95,7 @@ def test_invalid_beacon_parent_root(spec, state):
yield from run_shard_blocks(spec, shard_state, signed_shard_block, beacon_state, valid=False) yield from run_shard_blocks(spec, shard_state, signed_shard_block, beacon_state, valid=False)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@only_full_crosslink @only_full_crosslink
def test_invalid_slot(spec, state): def test_invalid_slot(spec, state):
@ -110,7 +111,7 @@ def test_invalid_slot(spec, state):
yield from run_shard_blocks(spec, shard_state, signed_shard_block, beacon_state, valid=False) yield from run_shard_blocks(spec, shard_state, signed_shard_block, beacon_state, valid=False)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@only_full_crosslink @only_full_crosslink
def test_invalid_proposer_index(spec, state): def test_invalid_proposer_index(spec, state):
@ -130,7 +131,7 @@ def test_invalid_proposer_index(spec, state):
yield from run_shard_blocks(spec, shard_state, signed_shard_block, beacon_state, valid=False) yield from run_shard_blocks(spec, shard_state, signed_shard_block, beacon_state, valid=False)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@always_bls @always_bls
@only_full_crosslink @only_full_crosslink
@ -151,7 +152,7 @@ def test_out_of_bound_offset(spec, state):
yield from run_shard_blocks(spec, shard_state, signed_shard_block, beacon_state, valid=False) yield from run_shard_blocks(spec, shard_state, signed_shard_block, beacon_state, valid=False)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@always_bls @always_bls
@only_full_crosslink @only_full_crosslink
@ -170,7 +171,7 @@ def test_invalid_offset(spec, state):
yield from run_shard_blocks(spec, shard_state, signed_shard_block, beacon_state, valid=False) yield from run_shard_blocks(spec, shard_state, signed_shard_block, beacon_state, valid=False)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@always_bls @always_bls
@only_full_crosslink @only_full_crosslink
@ -189,7 +190,7 @@ def test_empty_block_body(spec, state):
# #
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@always_bls @always_bls
@only_full_crosslink @only_full_crosslink
@ -208,7 +209,7 @@ def test_invalid_signature(spec, state):
# #
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@always_bls @always_bls
@only_full_crosslink @only_full_crosslink
@ -225,7 +226,7 @@ def test_max_offset(spec, state):
yield from run_shard_blocks(spec, shard_state, signed_shard_block, beacon_state) yield from run_shard_blocks(spec, shard_state, signed_shard_block, beacon_state)
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@always_bls @always_bls
@only_full_crosslink @only_full_crosslink

View File

@ -1,6 +1,13 @@
from eth2spec.utils.ssz.ssz_impl import hash_tree_root from eth2spec.utils.ssz.ssz_impl import hash_tree_root
from eth2spec.test.context import PHASE0, spec_state_test, with_all_phases_except, never_bls, only_full_crosslink from eth2spec.test.context import (
PHASE0,
LIGHTCLIENT_PATCH,
spec_state_test,
with_all_phases_except,
never_bls,
only_full_crosslink,
)
from eth2spec.test.helpers.attestations import get_valid_on_time_attestation from eth2spec.test.helpers.attestations import get_valid_on_time_attestation
from eth2spec.test.helpers.shard_block import ( from eth2spec.test.helpers.shard_block import (
build_shard_block, build_shard_block,
@ -145,7 +152,7 @@ def create_and_apply_beacon_and_shard_blocks(spec, state, store, shard, shard_bl
return has_shard_committee return has_shard_committee
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@never_bls # Set to never_bls for testing `check_pending_shard_blocks` @never_bls # Set to never_bls for testing `check_pending_shard_blocks`
def test_basic(spec, state): def test_basic(spec, state):
@ -206,7 +213,7 @@ def create_simple_fork(spec, state, store, shard):
return head_block, forking_block return head_block, forking_block
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@only_full_crosslink @only_full_crosslink
def test_shard_simple_fork(spec, state): def test_shard_simple_fork(spec, state):
@ -231,7 +238,7 @@ def test_shard_simple_fork(spec, state):
assert spec.get_shard_head(store, shard) == forking_block.message.hash_tree_root() assert spec.get_shard_head(store, shard) == forking_block.message.hash_tree_root()
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
@only_full_crosslink @only_full_crosslink
def test_shard_latest_messages_for_different_shards(spec, state): def test_shard_latest_messages_for_different_shards(spec, state):

View File

@ -1,12 +1,13 @@
from eth2spec.test.context import ( from eth2spec.test.context import (
PHASE0, PHASE0,
LIGHTCLIENT_PATCH,
with_all_phases_except, with_all_phases_except,
spec_state_test, spec_state_test,
) )
from eth2spec.test.helpers.state import next_epoch from eth2spec.test.helpers.state import next_epoch
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
def test_get_committee_count_delta(spec, state): def test_get_committee_count_delta(spec, state):
assert spec.get_committee_count_delta(state, 0, 0) == 0 assert spec.get_committee_count_delta(state, 0, 0) == 0
@ -23,7 +24,7 @@ def test_get_committee_count_delta(spec, state):
) )
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
def test_get_start_shard_current_epoch_start(spec, state): def test_get_start_shard_current_epoch_start(spec, state):
assert state.current_epoch_start_shard == 0 assert state.current_epoch_start_shard == 0
@ -39,7 +40,7 @@ def test_get_start_shard_current_epoch_start(spec, state):
assert start_shard == state.current_epoch_start_shard assert start_shard == state.current_epoch_start_shard
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
def test_get_start_shard_next_slot(spec, state): def test_get_start_shard_next_slot(spec, state):
next_epoch(spec, state) next_epoch(spec, state)
@ -57,7 +58,7 @@ def test_get_start_shard_next_slot(spec, state):
assert start_shard == expected_start_shard assert start_shard == expected_start_shard
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
def test_get_start_shard_previous_slot(spec, state): def test_get_start_shard_previous_slot(spec, state):
next_epoch(spec, state) next_epoch(spec, state)
@ -76,7 +77,7 @@ def test_get_start_shard_previous_slot(spec, state):
assert start_shard == expected_start_shard assert start_shard == expected_start_shard
@with_all_phases_except([PHASE0]) @with_all_phases_except([PHASE0, LIGHTCLIENT_PATCH])
@spec_state_test @spec_state_test
def test_get_start_shard_far_past_epoch(spec, state): def test_get_start_shard_far_past_epoch(spec, state):
initial_epoch = spec.get_current_epoch(state) initial_epoch = spec.get_current_epoch(state)

View File

@ -37,10 +37,17 @@ The provided pre-state is already transitioned to just before the specific sub-t
Sub-transitions: Sub-transitions:
Sub-transitions:
- `justification_and_finalization` - `justification_and_finalization`
- `rewards_and_penalties` (limited to `minimal` config) - `rewards_and_penalties`
- `registry_updates` - `registry_updates`
- `slashings` - `slashings`
- `final_updates` - `eth1_data_reset`
- `effective_balance_updates`
- `slashings_reset`
- `randao_mixes_reset`
- `historical_roots_update`
- `participation_record_updates`
The resulting state should match the expected `post` state. The resulting state should match the expected `post` state.

View File

@ -36,7 +36,7 @@ Prerequisites:
### Cleaning ### Cleaning
This removes the existing virtual environments (`/test_generators/<generator>/venv`) and generated tests (`/yaml_tests/`). This removes the existing virtual environments (`/tests/generators/<generator>/venv`) and generated tests (`../eth2.0-spec-tests/tests`).
```bash ```bash
make clean make clean
@ -47,7 +47,7 @@ make clean
This runs all of the generators. This runs all of the generators.
```bash ```bash
make -j 4 gen_yaml_tests make -j 4 generate_tests
``` ```
The `-j N` flag makes the generators run in parallel, with `N` being the amount of cores. The `-j N` flag makes the generators run in parallel, with `N` being the amount of cores.
@ -55,10 +55,10 @@ The `-j N` flag makes the generators run in parallel, with `N` being the amount
### Running a single generator ### Running a single generator
The makefile auto-detects generators in the `test_generators` directory and provides a tests-gen target for each generator. See example: The makefile auto-detects generators in the `tests/generators` directory and provides a tests-gen target (gen_<generator_name>) for each generator. See example:
```bash ```bash
make ./eth2.0-spec-tests/tests/shuffling/ make gen_ssz_static
``` ```
## Developing a generator ## Developing a generator
@ -78,9 +78,8 @@ It's recommended to extend the base-generator.
Create a `requirements.txt` in the root of your generator directory: Create a `requirements.txt` in the root of your generator directory:
``` ```
../../core/gen_helpers pytest>=4.4
../../core/config_helpers ../../../
../../core/pyspec
``` ```
The config helper and pyspec is optional, but preferred. We encourage generators to derive tests from the spec itself in order to prevent code duplication and outdated tests. The config helper and pyspec is optional, but preferred. We encourage generators to derive tests from the spec itself in order to prevent code duplication and outdated tests.
@ -103,7 +102,7 @@ Write a `main.py` file. The shuffling test generator is a good minimal starting
```python ```python
from eth2spec.phase0 import spec as spec from eth2spec.phase0 import spec as spec
from eth_utils import to_tuple from eth_utils import to_tuple
from gen_base import gen_runner, gen_typing from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing
from preset_loader import loader from preset_loader import loader
from typing import Iterable from typing import Iterable
@ -163,35 +162,40 @@ To extend this, one could decide to parametrize the `shuffling_test_cases` funct
Another example, to generate tests from pytests: Another example, to generate tests from pytests:
```python ```python
def create_provider(handler_name: str, tests_src, config_name: str) -> gen_typing.TestProvider: from eth2spec.phase0 import spec as spec_phase0
from eth2spec.lightclient_patch import spec as spec_lightclient_patch
from eth2spec.phase1 import spec as spec_phase1
from eth2spec.test.context import PHASE0, PHASE1, LIGHTCLIENT_PATCH
def prepare_fn(configs_path: str) -> str: from eth2spec.gen_helpers.gen_from_tests.gen import run_state_test_generators
presets = loader.load_presets(configs_path, config_name)
spec_phase0.apply_constants_preset(presets)
spec_phase1.apply_constants_preset(presets)
return config_name
def cases_fn() -> Iterable[gen_typing.TestCase]:
return generate_from_tests(
runner_name='epoch_processing',
handler_name=handler_name,
src=tests_src,
fork_name='phase0'
)
return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn) specs = (spec_phase0, spec_lightclient_patch, spec_phase1)
if __name__ == "__main__": if __name__ == "__main__":
gen_runner.run_generator("epoch_processing", [ phase_0_mods = {key: 'eth2spec.test.phase0.sanity.test_' + key for key in [
create_provider('final_updates', test_process_final_updates, 'minimal'), 'blocks',
... 'slots',
]) ]}
lightclient_patch_mods = {**{key: 'eth2spec.test.lightclient_patch.sanity.test_' + key for key in [
'blocks',
]}, **phase_0_mods} # also run the previous phase 0 tests
phase_1_mods = {**{key: 'eth2spec.test.phase1.sanity.test_' + key for key in [
'blocks', # more phase 1 specific block tests
'shard_blocks',
]}, **phase_0_mods} # also run the previous phase 0 tests (but against phase 1 spec)
all_mods = {
PHASE0: phase_0_mods,
LIGHTCLIENT_PATCH: lightclient_patch_mods,
PHASE1: phase_1_mods,
}
run_state_test_generators(runner_name="sanity", specs=specs, all_mods=all_mods)
``` ```
Here multiple phases load the configuration, and the stream of test cases is derived from a pytest file using the `generate_from_tests` utility. Here multiple phases load the configuration, and the stream of test cases is derived from a pytest file using the `eth2spec.gen_helpers.gen_from_tests.gen.run_state_test_generators` utility. Note that this helper generates all available tests of `TESTGEN_FORKS` forks of `ALL_CONFIGS` configs of the given runner.
Recommendations: Recommendations:
- You can have more than just one test provider. - You can have more than just one test provider.
@ -200,14 +204,13 @@ Recommendations:
- Use config `minimal` for performance and simplicity, but also implement a suite with the `mainnet` config where necessary. - Use config `minimal` for performance and simplicity, but also implement a suite with the `mainnet` config where necessary.
- You may be able to write your test case provider in a way where it does not make assumptions on constants. - You may be able to write your test case provider in a way where it does not make assumptions on constants.
If so, you can generate test cases with different configurations for the same scenario (see example). If so, you can generate test cases with different configurations for the same scenario (see example).
- See [`tests/core/gen_helpers/README.md`](../core/gen_helpers/README.md) for command line options for generators. - See [`tests/core/gen_helpers/README.md`](../core/pyspec/eth2spec/gen_helpers/README.md) for command line options for generators.
## How to add a new test generator ## How to add a new test generator
To add a new test generator that builds `New Tests`: To add a new test generator that builds `New Tests`:
1. Create a new directory `new_tests` within the `test_generators` directory. 1. Create a new directory `new_tests` within the `tests/generators` directory.
Note that `new_tests` is also the name of the directory in which the tests will appear in the tests repository later. Note that `new_tests` is also the name of the directory in which the tests will appear in the tests repository later.
2. Your generator is assumed to have a `requirements.txt` file, 2. Your generator is assumed to have a `requirements.txt` file,
with any dependencies it may need. Leave it empty if your generator has none. with any dependencies it may need. Leave it empty if your generator has none.
@ -216,8 +219,8 @@ To add a new test generator that builds `New Tests`:
4. Your generator is called with `-o some/file/path/for_testing/can/be_anything -c some/other/path/to_configs/`. 4. Your generator is called with `-o some/file/path/for_testing/can/be_anything -c some/other/path/to_configs/`.
The base generator helps you handle this; you only have to define test case providers. The base generator helps you handle this; you only have to define test case providers.
5. Finally, add any linting or testing commands to the 5. Finally, add any linting or testing commands to the
[circleci config file](../.circleci/config.yml) if desired to increase code quality. [circleci config file](../../.circleci/config.yml) if desired to increase code quality.
Or add it to the [`Makefile`](../Makefile), if it can be run locally. Or add it to the [`Makefile`](../../Makefile), if it can be run locally.
*Note*: You do not have to change the makefile. *Note*: You do not have to change the makefile.
However, if necessary (e.g. not using Python, or mixing in other languages), submit an issue, and it can be a special case. However, if necessary (e.g. not using Python, or mixing in other languages), submit an issue, and it can be a special case.

View File

@ -13,7 +13,7 @@ import milagro_bls_binding as milagro_bls
from eth2spec.utils import bls from eth2spec.utils import bls
from eth2spec.test.context import PHASE0 from eth2spec.test.context import PHASE0
from gen_base import gen_runner, gen_typing from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing
def to_bytes(i): def to_bytes(i):

View File

@ -1,4 +1,2 @@
py_ecc==5.0.0 pytest>=4.4
eth-utils==1.6.0
../../core/gen_helpers
../../../ ../../../

View File

@ -1,59 +1,42 @@
from typing import Iterable from eth2spec.gen_helpers.gen_from_tests.gen import run_state_test_generators
from gen_base import gen_runner, gen_typing
from gen_from_tests.gen import generate_from_tests
from importlib import reload, import_module
from eth2spec.config import config_util
from eth2spec.phase0 import spec as spec_phase0 from eth2spec.phase0 import spec as spec_phase0
from eth2spec.lightclient_patch import spec as spec_lightclient_patch
from eth2spec.phase1 import spec as spec_phase1 from eth2spec.phase1 import spec as spec_phase1
from eth2spec.test.context import PHASE0, PHASE1 from eth2spec.test.context import PHASE0, PHASE1, LIGHTCLIENT_PATCH
from eth2spec.utils import bls
def create_provider(fork_name: str, handler_name: str, specs = (spec_phase0, spec_lightclient_patch, spec_phase1)
tests_src_mod_name: str, config_name: str) -> gen_typing.TestProvider:
def prepare_fn(configs_path: str) -> str:
config_util.prepare_config(configs_path, config_name)
reload(spec_phase0)
reload(spec_phase1)
bls.use_milagro()
return config_name
def cases_fn() -> Iterable[gen_typing.TestCase]:
tests_src = import_module(tests_src_mod_name)
return generate_from_tests(
runner_name='epoch_processing',
handler_name=handler_name,
src=tests_src,
fork_name=fork_name,
)
return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn)
if __name__ == "__main__": if __name__ == "__main__":
phase_0_mods = {key: 'eth2spec.test.phase0.epoch_processing.test_process_' + key for key in [ phase_0_mods = {key: 'eth2spec.test.phase0.epoch_processing.test_process_' + key for key in [
'final_updates',
'justification_and_finalization', 'justification_and_finalization',
'registry_updates',
'rewards_and_penalties', 'rewards_and_penalties',
'registry_updates',
'slashings', 'slashings',
'eth1_data_reset',
'effective_balance_updates',
'slashings_reset',
'randao_mixes_reset',
'historical_roots_update',
'participation_record_updates',
]} ]}
lightclient_patch_mods = {
**{key: 'eth2spec.test.lightclient_patch.epoch_processing.test_process_' + key for key in [
'sync_committee_updates',
]},
**phase_0_mods,
} # also run the previous phase 0 tests
phase_1_mods = {**{key: 'eth2spec.test.phase1.epoch_processing.test_process_' + key for key in [ phase_1_mods = {**{key: 'eth2spec.test.phase1.epoch_processing.test_process_' + key for key in [
'reveal_deadlines',
'challenge_deadlines', 'challenge_deadlines',
'custody_final_updates', 'custody_final_updates',
'reveal_deadlines',
]}, **phase_0_mods} # also run the previous phase 0 tests (but against phase 1 spec) ]}, **phase_0_mods} # also run the previous phase 0 tests (but against phase 1 spec)
gen_runner.run_generator(f"epoch_processing", [ all_mods = {
create_provider(PHASE0, key, mod_name, 'minimal') for key, mod_name in phase_0_mods.items() PHASE0: phase_0_mods,
]) LIGHTCLIENT_PATCH: lightclient_patch_mods,
gen_runner.run_generator(f"epoch_processing", [ PHASE1: phase_1_mods,
create_provider(PHASE0, key, mod_name, 'mainnet') for key, mod_name in phase_0_mods.items() }
])
gen_runner.run_generator(f"epoch_processing", [ run_state_test_generators(runner_name="epoch_processing", specs=specs, all_mods=all_mods)
create_provider(PHASE1, key, mod_name, 'minimal') for key, mod_name in phase_1_mods.items()
])
gen_runner.run_generator(f"epoch_processing", [
create_provider(PHASE1, key, mod_name, 'mainnet') for key, mod_name in phase_1_mods.items()
])

View File

@ -1,2 +1,2 @@
../../core/gen_helpers pytest>=4.4
../../../ ../../../

View File

@ -1,43 +1,23 @@
from typing import Iterable from eth2spec.gen_helpers.gen_from_tests.gen import run_state_test_generators
from importlib import reload
from gen_base import gen_runner, gen_typing
from gen_from_tests.gen import generate_from_tests
from eth2spec.test.context import PHASE0, PHASE1
from eth2spec.test.phase0.finality import test_finality
from eth2spec.config import config_util
from eth2spec.phase0 import spec as spec_phase0 from eth2spec.phase0 import spec as spec_phase0
from eth2spec.lightclient_patch import spec as spec_lightclient_patch
from eth2spec.phase1 import spec as spec_phase1 from eth2spec.phase1 import spec as spec_phase1
from eth2spec.utils import bls from eth2spec.test.context import PHASE0, PHASE1, LIGHTCLIENT_PATCH
def create_provider(fork_name: str, handler_name: str, tests_src, config_name: str) -> gen_typing.TestProvider: specs = (spec_phase0, spec_lightclient_patch, spec_phase1)
def prepare_fn(configs_path: str) -> str:
config_util.prepare_config(configs_path, config_name)
reload(spec_phase0)
reload(spec_phase1)
bls.use_milagro()
return config_name
def cases_fn() -> Iterable[gen_typing.TestCase]:
return generate_from_tests(
runner_name='finality',
handler_name=handler_name,
src=tests_src,
fork_name=fork_name,
)
return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn)
if __name__ == "__main__": if __name__ == "__main__":
# No additional phase 1 specific rewards tests, yet. phase_0_mods = {'finality': 'eth2spec.test.phase0.finality.test_finality'}
key = 'finality' # No additional lightclient_patch or phase 1 specific finality tests, yet.
gen_runner.run_generator("finality", [ lightclient_patch_mods = phase_0_mods
create_provider(PHASE0, 'finality', test_finality, 'minimal'), phase_1_mods = phase_0_mods
create_provider(PHASE0, 'finality', test_finality, 'mainnet'),
create_provider(PHASE1, 'finality', test_finality, 'minimal'), all_mods = {
create_provider(PHASE1, 'finality', test_finality, 'mainnet'), PHASE0: phase_0_mods,
]) LIGHTCLIENT_PATCH: lightclient_patch_mods,
PHASE1: phase_1_mods,
}
run_state_test_generators(runner_name="finality", specs=specs, all_mods=all_mods)

View File

@ -1,2 +1,2 @@
../../core/gen_helpers pytest>=4.4
../../../ ../../../

View File

@ -3,8 +3,8 @@ from typing import Iterable
from eth2spec.test.context import PHASE0 from eth2spec.test.context import PHASE0
from eth2spec.test.phase0.genesis import test_initialization, test_validity from eth2spec.test.phase0.genesis import test_initialization, test_validity
from gen_base import gen_runner, gen_typing from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing
from gen_from_tests.gen import generate_from_tests from eth2spec.gen_helpers.gen_from_tests.gen import generate_from_tests
from eth2spec.phase0 import spec as spec from eth2spec.phase0 import spec as spec
from importlib import reload from importlib import reload
from eth2spec.config import config_util from eth2spec.config import config_util

View File

@ -1,2 +1,2 @@
../../core/gen_helpers pytest>=4.4
../../../ ../../../

View File

@ -1,34 +1,11 @@
from typing import Iterable from eth2spec.gen_helpers.gen_from_tests.gen import run_state_test_generators
from gen_base import gen_runner, gen_typing
from gen_from_tests.gen import generate_from_tests
from importlib import reload, import_module
from eth2spec.config import config_util
from eth2spec.phase0 import spec as spec_phase0 from eth2spec.phase0 import spec as spec_phase0
from eth2spec.lightclient_patch import spec as spec_lightclient_patch
from eth2spec.phase1 import spec as spec_phase1 from eth2spec.phase1 import spec as spec_phase1
from eth2spec.test.context import PHASE0, PHASE1 from eth2spec.test.context import PHASE0, PHASE1, LIGHTCLIENT_PATCH
from eth2spec.utils import bls
def create_provider(fork_name: str, handler_name: str, specs = (spec_phase0, spec_lightclient_patch, spec_phase1)
tests_src_mod_name: str, config_name: str) -> gen_typing.TestProvider:
def prepare_fn(configs_path: str) -> str:
config_util.prepare_config(configs_path, config_name)
reload(spec_phase0)
reload(spec_phase1)
bls.use_milagro()
return config_name
def cases_fn() -> Iterable[gen_typing.TestCase]:
tests_src = import_module(tests_src_mod_name)
return generate_from_tests(
runner_name='operations',
handler_name=handler_name,
src=tests_src,
fork_name=fork_name,
)
return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn)
if __name__ == "__main__": if __name__ == "__main__":
@ -40,6 +17,12 @@ if __name__ == "__main__":
'proposer_slashing', 'proposer_slashing',
'voluntary_exit', 'voluntary_exit',
]} ]}
lightclient_patch_mods = {
**{key: 'eth2spec.test.lightclient_patch.block_processing.test_process_' + key for key in [
'sync_committee',
]},
**phase_0_mods,
} # also run the previous phase 0 tests
phase_1_mods = {**{key: 'eth2spec.test.phase1.block_processing.test_process_' + key for key in [ phase_1_mods = {**{key: 'eth2spec.test.phase1.block_processing.test_process_' + key for key in [
'attestation', 'attestation',
'chunk_challenge', 'chunk_challenge',
@ -49,15 +32,10 @@ if __name__ == "__main__":
'shard_transition', 'shard_transition',
]}, **phase_0_mods} # also run the previous phase 0 tests (but against phase 1 spec) ]}, **phase_0_mods} # also run the previous phase 0 tests (but against phase 1 spec)
gen_runner.run_generator(f"operations", [ all_mods = {
create_provider(PHASE0, key, mod_name, 'minimal') for key, mod_name in phase_0_mods.items() PHASE0: phase_0_mods,
]) LIGHTCLIENT_PATCH: lightclient_patch_mods,
gen_runner.run_generator(f"operations", [ PHASE1: phase_1_mods,
create_provider(PHASE0, key, mod_name, 'mainnet') for key, mod_name in phase_0_mods.items() }
])
gen_runner.run_generator(f"operations", [ run_state_test_generators(runner_name="operations", specs=specs, all_mods=all_mods)
create_provider(PHASE1, key, mod_name, 'minimal') for key, mod_name in phase_1_mods.items()
])
gen_runner.run_generator(f"operations", [
create_provider(PHASE1, key, mod_name, 'mainnet') for key, mod_name in phase_1_mods.items()
])

View File

@ -1,3 +1,2 @@
eth-utils==1.6.0 pytest>=4.4
../../core/gen_helpers
../../../ ../../../

View File

@ -1,34 +1,11 @@
from typing import Iterable from eth2spec.gen_helpers.gen_from_tests.gen import run_state_test_generators
from gen_base import gen_runner, gen_typing
from gen_from_tests.gen import generate_from_tests
from importlib import reload, import_module
from eth2spec.config import config_util
from eth2spec.phase0 import spec as spec_phase0 from eth2spec.phase0 import spec as spec_phase0
from eth2spec.lightclient_patch import spec as spec_lightclient_patch
from eth2spec.phase1 import spec as spec_phase1 from eth2spec.phase1 import spec as spec_phase1
from eth2spec.test.context import PHASE0, PHASE1 from eth2spec.test.context import PHASE0, PHASE1, LIGHTCLIENT_PATCH
from eth2spec.utils import bls
def create_provider(fork_name: str, handler_name: str, specs = (spec_phase0, spec_lightclient_patch, spec_phase1)
tests_src_mod_name: str, config_name: str) -> gen_typing.TestProvider:
def prepare_fn(configs_path: str) -> str:
config_util.prepare_config(configs_path, config_name)
reload(spec_phase0)
reload(spec_phase1)
bls.use_milagro()
return config_name
def cases_fn() -> Iterable[gen_typing.TestCase]:
tests_src = import_module(tests_src_mod_name)
return generate_from_tests(
runner_name='rewards',
handler_name=handler_name,
src=tests_src,
fork_name=fork_name,
)
return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn)
if __name__ == "__main__": if __name__ == "__main__":
@ -37,18 +14,14 @@ if __name__ == "__main__":
'leak', 'leak',
'random', 'random',
]} ]}
# No additional phase 1 specific rewards tests, yet. # No additional lightclient_patch or phase 1 specific rewards tests, yet.
lightclient_patch_mods = phase_0_mods
phase_1_mods = phase_0_mods phase_1_mods = phase_0_mods
gen_runner.run_generator(f"rewards", [ all_mods = {
create_provider(PHASE0, key, mod_name, 'minimal') for key, mod_name in phase_0_mods.items() PHASE0: phase_0_mods,
]) LIGHTCLIENT_PATCH: lightclient_patch_mods,
gen_runner.run_generator(f"rewards", [ PHASE1: phase_1_mods,
create_provider(PHASE0, key, mod_name, 'mainnet') for key, mod_name in phase_0_mods.items() }
])
gen_runner.run_generator(f"rewards", [ run_state_test_generators(runner_name="rewards", specs=specs, all_mods=all_mods)
create_provider(PHASE1, key, mod_name, 'minimal') for key, mod_name in phase_1_mods.items()
])
gen_runner.run_generator(f"rewards", [
create_provider(PHASE1, key, mod_name, 'mainnet') for key, mod_name in phase_1_mods.items()
])

View File

@ -1,2 +1,2 @@
../../core/gen_helpers pytest>=4.4
../../../ ../../../

View File

@ -1,34 +1,12 @@
from typing import Iterable
from gen_base import gen_runner, gen_typing
from gen_from_tests.gen import generate_from_tests
from importlib import reload, import_module
from eth2spec.config import config_util
from eth2spec.phase0 import spec as spec_phase0 from eth2spec.phase0 import spec as spec_phase0
from eth2spec.lightclient_patch import spec as spec_lightclient_patch
from eth2spec.phase1 import spec as spec_phase1 from eth2spec.phase1 import spec as spec_phase1
from eth2spec.test.context import PHASE0, PHASE1 from eth2spec.test.context import PHASE0, PHASE1, LIGHTCLIENT_PATCH
from eth2spec.utils import bls
from eth2spec.gen_helpers.gen_from_tests.gen import run_state_test_generators
def create_provider(fork_name: str, handler_name: str, specs = (spec_phase0, spec_lightclient_patch, spec_phase1)
tests_src_mod_name: str, config_name: str) -> gen_typing.TestProvider:
def prepare_fn(configs_path: str) -> str:
config_util.prepare_config(configs_path, config_name)
reload(spec_phase0)
reload(spec_phase1)
bls.use_milagro()
return config_name
def cases_fn() -> Iterable[gen_typing.TestCase]:
tests_src = import_module(tests_src_mod_name)
return generate_from_tests(
runner_name='sanity',
handler_name=handler_name,
src=tests_src,
fork_name=fork_name,
)
return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn)
if __name__ == "__main__": if __name__ == "__main__":
@ -36,20 +14,18 @@ if __name__ == "__main__":
'blocks', 'blocks',
'slots', 'slots',
]} ]}
lightclient_patch_mods = {**{key: 'eth2spec.test.lightclient_patch.sanity.test_' + key for key in [
'blocks',
]}, **phase_0_mods} # also run the previous phase 0 tests
phase_1_mods = {**{key: 'eth2spec.test.phase1.sanity.test_' + key for key in [ phase_1_mods = {**{key: 'eth2spec.test.phase1.sanity.test_' + key for key in [
'blocks', # more phase 1 specific block tests 'blocks', # more phase 1 specific block tests
'shard_blocks', 'shard_blocks',
]}, **phase_0_mods} # also run the previous phase 0 tests (but against phase 1 spec) ]}, **phase_0_mods} # also run the previous phase 0 tests (but against phase 1 spec)
gen_runner.run_generator(f"sanity", [ all_mods = {
create_provider(PHASE0, key, mod_name, 'minimal') for key, mod_name in phase_0_mods.items() PHASE0: phase_0_mods,
]) LIGHTCLIENT_PATCH: lightclient_patch_mods,
gen_runner.run_generator(f"sanity", [ PHASE1: phase_1_mods,
create_provider(PHASE0, key, mod_name, 'mainnet') for key, mod_name in phase_0_mods.items() }
])
gen_runner.run_generator(f"sanity", [ run_state_test_generators(runner_name="sanity", specs=specs, all_mods=all_mods)
create_provider(PHASE1, key, mod_name, 'minimal') for key, mod_name in phase_1_mods.items()
])
gen_runner.run_generator(f"sanity", [
create_provider(PHASE1, key, mod_name, 'mainnet') for key, mod_name in phase_1_mods.items()
])

View File

@ -1,2 +1,2 @@
../../core/gen_helpers pytest>=4.4
../../../ ../../../

View File

@ -2,7 +2,7 @@ from eth_utils import to_tuple
from typing import Iterable from typing import Iterable
from importlib import reload from importlib import reload
from gen_base import gen_runner, gen_typing from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing
from eth2spec.config import config_util from eth2spec.config import config_util
from eth2spec.phase0 import spec as spec from eth2spec.phase0 import spec as spec

View File

@ -1,3 +1,2 @@
eth-utils==1.6.0 pytest>=4.4
../../core/gen_helpers
../../../ ../../../

View File

@ -1,5 +1,5 @@
from typing import Iterable from typing import Iterable
from gen_base import gen_runner, gen_typing from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing
import ssz_basic_vector import ssz_basic_vector
import ssz_bitlist import ssz_bitlist
import ssz_bitvector import ssz_bitvector

View File

@ -1,3 +1,2 @@
eth-utils==1.6.0 pytest>=4.4
../../core/gen_helpers
../../../ ../../../

View File

@ -3,13 +3,14 @@ from typing import Iterable
from importlib import reload from importlib import reload
from inspect import getmembers, isclass from inspect import getmembers, isclass
from gen_base import gen_runner, gen_typing from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing
from eth2spec.debug import random_value, encode from eth2spec.debug import random_value, encode
from eth2spec.config import config_util from eth2spec.config import config_util
from eth2spec.phase0 import spec as spec_phase0 from eth2spec.phase0 import spec as spec_phase0
from eth2spec.phase1 import spec as spec_phase1 from eth2spec.phase1 import spec as spec_phase1
from eth2spec.test.context import PHASE0, PHASE1 from eth2spec.lightclient_patch import spec as spec_lightclient_patch
from eth2spec.test.context import PHASE1, LIGHTCLIENT_PATCH, TESTGEN_FORKS, MINIMAL, MAINNET
from eth2spec.utils.ssz.ssz_typing import Container from eth2spec.utils.ssz.ssz_typing import Container
from eth2spec.utils.ssz.ssz_impl import ( from eth2spec.utils.ssz.ssz_impl import (
hash_tree_root, hash_tree_root,
@ -64,6 +65,7 @@ def create_provider(fork_name, config_name: str, seed: int, mode: random_value.R
config_util.prepare_config(configs_path, config_name) config_util.prepare_config(configs_path, config_name)
reload(spec_phase0) reload(spec_phase0)
reload(spec_phase1) reload(spec_phase1)
reload(spec_lightclient_patch)
return config_name return config_name
def cases_fn() -> Iterable[gen_typing.TestCase]: def cases_fn() -> Iterable[gen_typing.TestCase]:
@ -71,6 +73,8 @@ def create_provider(fork_name, config_name: str, seed: int, mode: random_value.R
spec = spec_phase0 spec = spec_phase0
if fork_name == PHASE1: if fork_name == PHASE1:
spec = spec_phase1 spec = spec_phase1
if fork_name == LIGHTCLIENT_PATCH:
spec = spec_lightclient_patch
for (i, (name, ssz_type)) in enumerate(get_spec_ssz_types(spec)): for (i, (name, ssz_type)) in enumerate(get_spec_ssz_types(spec)):
yield from ssz_static_cases(fork_name, seed * 1000 + i, name, ssz_type, mode, chaos, count) yield from ssz_static_cases(fork_name, seed * 1000 + i, name, ssz_type, mode, chaos, count)
@ -83,14 +87,14 @@ if __name__ == "__main__":
settings = [] settings = []
seed = 1 seed = 1
for mode in random_value.RandomizationMode: for mode in random_value.RandomizationMode:
settings.append((seed, "minimal", mode, False, 30)) settings.append((seed, MINIMAL, mode, False, 30))
seed += 1 seed += 1
settings.append((seed, "minimal", random_value.RandomizationMode.mode_random, True, 30)) settings.append((seed, MINIMAL, random_value.RandomizationMode.mode_random, True, 30))
seed += 1 seed += 1
settings.append((seed, "mainnet", random_value.RandomizationMode.mode_random, False, 5)) settings.append((seed, MAINNET, random_value.RandomizationMode.mode_random, False, 5))
seed += 1 seed += 1
for fork in [PHASE0, PHASE1]: for fork in TESTGEN_FORKS:
gen_runner.run_generator("ssz_static", [ gen_runner.run_generator("ssz_static", [
create_provider(fork, config_name, seed, mode, chaos, cases_if_random) create_provider(fork, config_name, seed, mode, chaos, cases_if_random)
for (seed, config_name, mode, chaos, cases_if_random) in settings for (seed, config_name, mode, chaos, cases_if_random) in settings

View File

@ -1,2 +1,2 @@
../../core/gen_helpers pytest>=4.4
../../../ ../../../