Merge pull request #1333 from ethereum/v08x

v0.8.2 release into master
This commit is contained in:
Danny Ryan 2019-08-04 12:57:27 -06:00 committed by GitHub
commit 3e45bf4a67
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
86 changed files with 1750 additions and 1052 deletions

View File

@ -35,13 +35,13 @@ commands:
description: "Restore the cache with pyspec keys"
steps:
- restore_cached_venv:
venv_name: v3-pyspec-bump2
venv_name: v4-pyspec
reqs_checksum: cache-{{ checksum "test_libs/pyspec/requirements.txt" }}-{{ checksum "test_libs/pyspec/requirements-testing.txt" }}
save_pyspec_cached_venv:
description: Save a venv into a cache with pyspec keys"
steps:
- save_cached_venv:
venv_name: v3-pyspec-bump2
venv_name: v4-pyspec
reqs_checksum: cache-{{ checksum "test_libs/pyspec/requirements.txt" }}-{{ checksum "test_libs/pyspec/requirements-testing.txt" }}
venv_path: ./test_libs/pyspec/venv
restore_deposit_contract_cached_venv:

View File

@ -2,17 +2,20 @@ SPEC_DIR = ./specs
SCRIPT_DIR = ./scripts
TEST_LIBS_DIR = ./test_libs
PY_SPEC_DIR = $(TEST_LIBS_DIR)/pyspec
YAML_TEST_DIR = ./eth2.0-spec-tests/tests
TEST_VECTOR_DIR = ./eth2.0-spec-tests/tests
GENERATOR_DIR = ./test_generators
DEPOSIT_CONTRACT_DIR = ./deposit_contract
CONFIGS_DIR = ./configs
# Collect a list of generator names
GENERATORS = $(sort $(dir $(wildcard $(GENERATOR_DIR)/*/)))
# Map this list of generator paths to a list of test output paths
YAML_TEST_TARGETS = $(patsubst $(GENERATOR_DIR)/%, $(YAML_TEST_DIR)/%, $(GENERATORS))
GENERATORS = $(sort $(dir $(wildcard $(GENERATOR_DIR)/*/.)))
# Map this list of generator paths to "gen_{generator name}" entries
GENERATOR_TARGETS = $(patsubst $(GENERATOR_DIR)/%/, gen_%, $(GENERATORS))
GENERATOR_VENVS = $(patsubst $(GENERATOR_DIR)/%, $(GENERATOR_DIR)/%venv, $(GENERATORS))
# To check generator matching:
#$(info $$GENERATOR_TARGETS is [${GENERATOR_TARGETS}])
PY_SPEC_PHASE_0_TARGETS = $(PY_SPEC_DIR)/eth2spec/phase0/spec.py
PY_SPEC_PHASE_0_DEPS = $(SPEC_DIR)/core/0_*.md
@ -24,14 +27,14 @@ PY_SPEC_ALL_TARGETS = $(PY_SPEC_PHASE_0_TARGETS) $(PY_SPEC_PHASE_1_TARGETS)
COV_HTML_OUT=.htmlcov
COV_INDEX_FILE=$(PY_SPEC_DIR)/$(COV_HTML_OUT)/index.html
.PHONY: clean all test citest lint gen_yaml_tests pyspec phase0 phase1 install_test open_cov \
.PHONY: clean partial_clean all test citest lint generate_tests pyspec phase0 phase1 install_test open_cov \
install_deposit_contract_test test_deposit_contract compile_deposit_contract
all: $(PY_SPEC_ALL_TARGETS) $(YAML_TEST_DIR) $(YAML_TEST_TARGETS)
all: $(PY_SPEC_ALL_TARGETS)
# deletes everything except the venvs
partial_clean:
rm -rf $(YAML_TEST_DIR)
rm -rf $(TEST_VECTOR_DIR)
rm -rf $(GENERATOR_VENVS)
rm -rf $(PY_SPEC_DIR)/.pytest_cache
rm -rf $(PY_SPEC_ALL_TARGETS)
@ -44,8 +47,8 @@ clean: partial_clean
rm -rf $(PY_SPEC_DIR)/venv
rm -rf $(DEPOSIT_CONTRACT_DIR)/venv
# "make gen_yaml_tests" to run generators
gen_yaml_tests: $(PY_SPEC_ALL_TARGETS) $(YAML_TEST_TARGETS)
# "make generate_tests" to run all generators
generate_tests: $(PY_SPEC_ALL_TARGETS) $(GENERATOR_TARGETS)
# installs the packages to run pyspec tests
install_test:
@ -90,8 +93,8 @@ $(PY_SPEC_DIR)/eth2spec/phase1/spec.py: $(PY_SPEC_PHASE_1_DEPS)
CURRENT_DIR = ${CURDIR}
# The function that builds a set of suite files, by calling a generator for the given type (param 1)
define build_yaml_tests
# Runs a generator, identified by param 1
define run_generator
# Started!
# Create output directory
# Navigate to the generator
@ -100,24 +103,25 @@ define build_yaml_tests
# Install all the necessary requirements
# Run the generator. The generator is assumed to have an "main.py" file.
# We output to the tests dir (generator program should accept a "-o <filepath>" argument.
# `-l minimal general` can be added to the generator call to filter to smaller configs, when testing.
echo "generator $(1) started"; \
mkdir -p $(YAML_TEST_DIR)$(1); \
cd $(GENERATOR_DIR)$(1); \
mkdir -p $(TEST_VECTOR_DIR); \
cd $(GENERATOR_DIR)/$(1); \
if ! test -d venv; then python3 -m venv venv; fi; \
. venv/bin/activate; \
pip3 install -r requirements.txt; \
python3 main.py -o $(CURRENT_DIR)/$(YAML_TEST_DIR)$(1) -c $(CURRENT_DIR)/$(CONFIGS_DIR); \
python3 main.py -o $(CURRENT_DIR)/$(TEST_VECTOR_DIR) -c $(CURRENT_DIR)/$(CONFIGS_DIR); \
echo "generator $(1) finished"
endef
# The tests dir itself is simply build by creating the directory (recursively creating deeper directories if necessary)
$(YAML_TEST_DIR):
$(info creating directory, to output yaml targets to: ${YAML_TEST_TARGETS})
$(TEST_VECTOR_DIR):
$(info creating test output directory, for generators: ${GENERATOR_TARGETS})
mkdir -p $@
$(YAML_TEST_DIR)/:
$(info ignoring duplicate yaml tests dir)
$(TEST_VECTOR_DIR)/:
$(info ignoring duplicate tests dir)
# For any target within the tests dir, build it using the build_yaml_tests function.
# For any generator, build it using the run_generator function.
# (creation of output dir is a dependency)
$(YAML_TEST_DIR)%: $(PY_SPEC_ALL_TARGETS) $(YAML_TEST_DIR)
$(call build_yaml_tests,$*)
gen_%: $(PY_SPEC_ALL_TARGETS) $(TEST_VECTOR_DIR)
$(call run_generator,$*)

View File

@ -21,6 +21,12 @@ Core specifications for Eth 2.0 client validation can be found in [specs/core](s
* [Custody Game](specs/core/1_custody-game.md)
* [Shard Data Chains](specs/core/1_shard-data-chains.md)
### Phase 2
Phase 2 is still actively in R&D and does not yet have any formal specifications.
See the [Eth 2.0 Phase 2 Wiki](https://hackmd.io/UzysWse1Th240HELswKqVA?view) for current progress, discussions, and definitions regarding this work.
### Accompanying documents can be found in [specs](specs) and include:
* [SimpleSerialize (SSZ) spec](specs/simple-serialize.md)

36
configs/README.md Normal file
View File

@ -0,0 +1,36 @@
# Configs
This directory contains a set of constants presets used for testing, testnets, and mainnet.
A preset file contains all the constants known for its target.
Later-fork constants can be ignored, e.g. ignore phase1 constants as a client that only supports phase 0 currently.
## Forking
Configs are not replaced, but extended with forks. This is to support syncing from one state to the other over a fork boundary, without hot-swapping a config.
Instead, for forks that introduce changes in a constant, the constant name is prefixed with a short abbreviation of the fork.
Over time, the need to sync an older state may be deprecated.
In this case, the prefix on the new constant may be removed, and the old constant will keep a special name before completely being removed.
A previous iteration of forking made use of "timelines", but this collides with the definitions used in the spec (constants for special forking slots etc.),
and was not integrated sufficiently in any of the spec tools or implementations.
Instead, the config essentially doubles as fork definition now, changing the value for e.g. `PHASE_1_GENESIS_SLOT` changes the fork.
Another reason to prefer forking through constants is the ability to program a forking moment based on context, instead of being limited to a static slot number.
## Format
Each preset is a key-value mapping.
**Key**: an `UPPER_SNAKE_CASE` (a.k.a. "macro case") formatted string, name of the constant.
**Value** can be either:
- an unsigned integer number, can be up to 64 bits (incl.)
- a hexadecimal string, prefixed with `0x`
Presets may contain comments to describe the values.
See [`mainnet.yaml`](./mainnet.yaml) for a complete example.

View File

@ -1,20 +0,0 @@
# Constant Presets
This directory contains a set of constants presets used for testing, testnets, and mainnet.
A preset file contains all the constants known for its target.
Later-fork constants can be ignored, e.g. ignore phase1 constants as a client that only supports phase 0 currently.
## Format
Each preset is a key-value mapping.
**Key**: an `UPPER_SNAKE_CASE` (a.k.a. "macro case") formatted string, name of the constant.
**Value** can be either:
- an unsigned integer number, can be up to 64 bits (incl.)
- a hexadecimal string, prefixed with `0x`
Presets may contain comments to describe the values.
See [`mainnet.yaml`](./mainnet.yaml) for a complete example.

View File

@ -1,19 +0,0 @@
# Fork timelines
This directory contains a set of fork timelines used for testing, testnets, and mainnet.
A timeline file contains all the forks known for its target.
Later forks can be ignored, e.g. ignore fork `phase1` as a client that only supports Phase 0 currently.
## Format
Each preset is a key-value mapping.
**Key**: an `lower_snake_case` (a.k.a. "python case") formatted string, name of the fork.
**Value**: an unsigned integer number, epoch number of activation of the fork.
Timelines may contain comments to describe the values.
See [`mainnet.yaml`](./mainnet.yaml) for a complete example.

View File

@ -1,12 +0,0 @@
# Mainnet fork timeline
# Equal to GENESIS_EPOCH
phase0: 67108864
# Example 1:
# phase0_funny_fork_name: 67116000
# Example 2:
# Should be equal to PHASE_1_FORK_EPOCH
# (placeholder in example value here)
# phase1: 67163000

View File

@ -1,6 +0,0 @@
# Testing fork timeline
# Equal to GENESIS_EPOCH
phase0: 536870912
# No other forks considered in testing yet (to be implemented)

View File

@ -128,3 +128,6 @@ DOMAIN_ATTESTATION: 0x02000000
DOMAIN_DEPOSIT: 0x03000000
DOMAIN_VOLUNTARY_EXIT: 0x04000000
DOMAIN_TRANSFER: 0x05000000
DOMAIN_CUSTODY_BIT_CHALLENGE: 0x06000000
DOMAIN_SHARD_PROPOSER: 0x80000000
DOMAIN_SHARD_ATTESTER: 0x81000000

View File

@ -125,4 +125,7 @@ DOMAIN_RANDAO: 0x01000000
DOMAIN_ATTESTATION: 0x02000000
DOMAIN_DEPOSIT: 0x03000000
DOMAIN_VOLUNTARY_EXIT: 0x04000000
DOMAIN_TRANSFER: 0x05000000
DOMAIN_TRANSFER: 0x05000000
DOMAIN_CUSTODY_BIT_CHALLENGE: 0x06000000
DOMAIN_SHARD_PROPOSER: 0x80000000
DOMAIN_SHARD_ATTESTER: 0x81000000

View File

@ -49,7 +49,7 @@ from eth2spec.utils.ssz.ssz_impl import (
hash_tree_root,
signing_root,
serialize,
is_empty,
is_zero,
)
from eth2spec.utils.ssz.ssz_typing import (
bit, boolean, Container, List, Vector, Bytes, uint64,

View File

@ -207,6 +207,7 @@ The following values are (non-configurable) constants used throughout the specif
| Name | Value | Unit | Duration |
| - | - | :-: | :-: |
| `SECONDS_PER_SLOT` | `6` | seconds | 6 seconds |
| `MIN_ATTESTATION_INCLUSION_DELAY` | `2**0` (= 1) | slots | 6 seconds |
| `SLOTS_PER_EPOCH` | `2**6` (= 64) | slots | 6.4 minutes |
| `MIN_SEED_LOOKAHEAD` | `2**0` (= 1) | epochs | 6.4 minutes |
@ -1270,7 +1271,7 @@ def get_unslashed_attesting_indices(state: BeaconState,
output = set() # type: Set[ValidatorIndex]
for a in attestations:
output = output.union(get_attesting_indices(state, a.data, a.aggregation_bits))
return set(filter(lambda index: not state.validators[index].slashed, list(output)))
return set(filter(lambda index: not state.validators[index].slashed, output))
```
```python
@ -1283,10 +1284,10 @@ def get_winning_crosslink_and_attesting_indices(state: BeaconState,
epoch: Epoch,
shard: Shard) -> Tuple[Crosslink, Set[ValidatorIndex]]:
attestations = [a for a in get_matching_source_attestations(state, epoch) if a.data.crosslink.shard == shard]
crosslinks = list(filter(
crosslinks = filter(
lambda c: hash_tree_root(state.current_crosslinks[shard]) in (c.parent_root, hash_tree_root(c)),
[a.data.crosslink for a in attestations]
))
)
# Winning crosslink has the crosslink data root with the most balance voting for it (ties broken lexicographically)
winning_crosslink = max(crosslinks, key=lambda c: (
get_attesting_balance(state, [a for a in attestations if a.data.crosslink == c]), c.data_root
@ -1503,8 +1504,6 @@ def process_final_updates(state: BeaconState) -> None:
HALF_INCREMENT = EFFECTIVE_BALANCE_INCREMENT // 2
if balance < validator.effective_balance or validator.effective_balance + 3 * HALF_INCREMENT < balance:
validator.effective_balance = min(balance - balance % EFFECTIVE_BALANCE_INCREMENT, MAX_EFFECTIVE_BALANCE)
# Update start shard
state.start_shard = Shard((state.start_shard + get_shard_delta(state, current_epoch)) % SHARD_COUNT)
# Set active index root
index_epoch = Epoch(next_epoch + ACTIVATION_EXIT_DELAY)
index_root_position = index_epoch % EPOCHS_PER_HISTORICAL_VECTOR
@ -1521,6 +1520,8 @@ def process_final_updates(state: BeaconState) -> None:
if next_epoch % (SLOTS_PER_HISTORICAL_ROOT // SLOTS_PER_EPOCH) == 0:
historical_batch = HistoricalBatch(block_roots=state.block_roots, state_roots=state.state_roots)
state.historical_roots.append(hash_tree_root(historical_batch))
# Update start shard
state.start_shard = Shard((state.start_shard + get_shard_delta(state, current_epoch)) % SHARD_COUNT)
# Rotate current/previous epoch attestations
state.previous_epoch_attestations = state.current_epoch_attestations
state.current_epoch_attestations = []
@ -1653,6 +1654,9 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None:
attestation_slot = get_attestation_data_slot(state, data)
assert attestation_slot + MIN_ATTESTATION_INCLUSION_DELAY <= state.slot <= attestation_slot + SLOTS_PER_EPOCH
committee = get_crosslink_committee(state, data.target.epoch, data.crosslink.shard)
assert len(attestation.aggregation_bits) == len(attestation.custody_bits) == len(committee)
pending_attestation = PendingAttestation(
data=data,
aggregation_bits=attestation.aggregation_bits,

View File

@ -8,8 +8,6 @@
- [Ethereum 2.0 Phase 0 -- Beacon Chain Fork Choice](#ethereum-20-phase-0----beacon-chain-fork-choice)
- [Table of contents](#table-of-contents)
- [Introduction](#introduction)
- [Configuration](#configuration)
- [Time parameters](#time-parameters)
- [Fork choice](#fork-choice)
- [Helpers](#helpers)
- [`LatestMessage`](#latestmessage)
@ -29,14 +27,6 @@
This document is the beacon chain fork choice spec, part of Ethereum 2.0 Phase 0. It assumes the [beacon chain state transition function spec](./0_beacon-chain.md).
## Configuration
### Time parameters
| Name | Value | Unit | Duration |
| - | - | :-: | :-: |
| `SECONDS_PER_SLOT` | `6` | seconds | 6 seconds |
## Fork choice
The head block root associated with a `store` is defined as `get_head(store)`. At genesis, let `store = get_genesis_store(genesis_state)` and update `store` by running:
@ -101,8 +91,12 @@ def get_genesis_store(genesis_state: BeaconState) -> Store:
```python
def get_ancestor(store: Store, root: Hash, slot: Slot) -> Hash:
block = store.blocks[root]
assert block.slot >= slot
return root if block.slot == slot else get_ancestor(store, block.parent_root, slot)
if block.slot > slot:
return get_ancestor(store, block.parent_root, slot)
elif block.slot == slot:
return root
else:
return Bytes32() # root is older than queried slot: no results.
```
#### `get_latest_attesting_balance`

View File

@ -15,7 +15,7 @@
- [Time parameters](#time-parameters)
- [Max operations per block](#max-operations-per-block)
- [Reward and penalty quotients](#reward-and-penalty-quotients)
- [Signature domains](#signature-domains)
- [Signature domain types](#signature-domain-types)
- [TODO PLACEHOLDER](#todo-placeholder)
- [Data structures](#data-structures)
- [Custody objects](#custody-objects)
@ -156,7 +156,7 @@ class CustodyChunkChallengeRecord(Container):
challenger_index: ValidatorIndex
responder_index: ValidatorIndex
inclusion_epoch: Epoch
data_root: Bytes32
data_root: Hash
depth: uint64
chunk_index: uint64
```
@ -169,9 +169,9 @@ class CustodyBitChallengeRecord(Container):
challenger_index: ValidatorIndex
responder_index: ValidatorIndex
inclusion_epoch: Epoch
data_root: Bytes32
data_root: Hash
chunk_count: uint64
chunk_bits_merkle_root: Bytes32
chunk_bits_merkle_root: Hash
responder_key: BLSSignature
```
@ -182,9 +182,9 @@ class CustodyResponse(Container):
challenge_index: uint64
chunk_index: uint64
chunk: Vector[Bytes[PLACEHOLDER], BYTES_PER_CUSTODY_CHUNK]
data_branch: List[Bytes32, PLACEHOLDER]
chunk_bits_branch: List[Bytes32, PLACEHOLDER]
chunk_bits_leaf: Bytes32
data_branch: List[Hash, PLACEHOLDER]
chunk_bits_branch: List[Hash, PLACEHOLDER]
chunk_bits_leaf: Hash
```
### New beacon operations
@ -296,7 +296,7 @@ def get_custody_chunk_bit(key: BLSSignature, chunk: bytes) -> bool:
### `get_chunk_bits_root`
```python
def get_chunk_bits_root(chunk_bits: bytes) -> Bytes32:
def get_chunk_bits_root(chunk_bits: bytes) -> Hash:
aggregated_bits = bytearray([0] * 32)
for i in range(0, len(chunk_bits), 32):
for j in range(32):
@ -328,7 +328,7 @@ def get_reveal_period(state: BeaconState, validator_index: ValidatorIndex, epoch
```python
def replace_empty_or_append(list: MutableSequence[Any], new_element: Any) -> int:
for i in range(len(list)):
if is_empty(list[i]):
if is_zero(list[i]):
list[i] = new_element
return i
list.append(new_element)

View File

@ -13,7 +13,7 @@
- [Misc](#misc)
- [Initial values](#initial-values)
- [Time parameters](#time-parameters)
- [Signature domains](#signature-domains)
- [Signature domain types](#signature-domain-types)
- [TODO PLACEHOLDER](#todo-placeholder)
- [Data structures](#data-structures)
- [`ShardBlockBody`](#shardblockbody)
@ -61,7 +61,6 @@ This document describes the shard data layer and the shard fork choice rule in P
| Name | Value | Unit | Duration |
| - | - | :-: | :-: |
| `CROSSLINK_LOOKBACK` | `2**0` (= 1) | epochs | 6.2 minutes |
| `PERSISTENT_COMMITTEE_PERIOD` | `2**11` (= 2,048) | epochs | ~9 days |
### Signature domain types
@ -94,7 +93,7 @@ class ShardAttestation(Container):
class data(Container):
slot: Slot
shard: Shard
shard_block_root: Bytes32
shard_block_root: Hash
aggregation_bits: Bitlist[PLACEHOLDER]
aggregate_signature: BLSSignature
```
@ -105,10 +104,10 @@ class ShardAttestation(Container):
class ShardBlock(Container):
slot: Slot
shard: Shard
beacon_chain_root: Bytes32
parent_root: Bytes32
beacon_chain_root: Hash
parent_root: Hash
data: ShardBlockBody
state_root: Bytes32
state_root: Hash
attestations: List[ShardAttestation, PLACEHOLDER]
signature: BLSSignature
```
@ -119,10 +118,10 @@ class ShardBlock(Container):
class ShardBlockHeader(Container):
slot: Slot
shard: Shard
beacon_chain_root: Bytes32
parent_root: Bytes32
body_root: Bytes32
state_root: Bytes32
beacon_chain_root: Hash
parent_root: Hash
body_root: Hash
state_root: Hash
attestations: List[ShardAttestation, PLACEHOLDER]
signature: BLSSignature
```
@ -250,7 +249,7 @@ def verify_shard_attestation_signature(state: BeaconState,
### `compute_crosslink_data_root`
```python
def compute_crosslink_data_root(blocks: Sequence[ShardBlock]) -> Bytes32:
def compute_crosslink_data_root(blocks: Sequence[ShardBlock]) -> Hash:
def is_power_of_two(value: uint64) -> bool:
return (value > 0) and (value & (value - 1) == 0)
@ -259,7 +258,7 @@ def compute_crosslink_data_root(blocks: Sequence[ShardBlock]) -> Bytes32:
values.append(b'\x00' * BYTES_PER_SHARD_BLOCK_BODY)
return values
def hash_tree_root_of_bytes(data: bytes) -> bytes:
def hash_tree_root_of_bytes(data: bytes) -> Hash:
return hash_tree_root([data[i:i + 32] for i in range(0, len(data), 32)])
def zpad(data: bytes, length: uint64) -> bytes:

View File

@ -14,7 +14,7 @@
- [Variable-size and fixed-size](#variable-size-and-fixed-size)
- [Aliases](#aliases)
- [Default values](#default-values)
- [`is_empty`](#is_empty)
- [`is_zero`](#is_zero)
- [Illegal types](#illegal-types)
- [Serialization](#serialization)
- [`uintN`](#uintn)
@ -75,19 +75,21 @@ For convenience we alias:
* `bit` to `boolean`
* `byte` to `uint8` (this is a basic type)
* `BytesN` to `Vector[byte, N]` (this is *not* a basic type)
* `null`: `{}`, i.e. the empty container
* `null`: `{}`
### Default values
The default value of a type upon initialization is recursively defined using `0` for `uintN`, `False` for `boolean` and the elements of `Bitvector`, and `[]` for lists and `Bitlist`. Unions default to the first type in the union (with type index zero), which is `null` if present in the union.
#### `is_empty`
#### `is_zero`
An SSZ object is called empty (and thus, `is_empty(object)` returns true) if it is equal to the default value for that type.
An SSZ object is called zeroed (and thus, `is_zero(object)` returns true) if it is equal to the default value for that type.
### Illegal types
Empty vector types (i.e. `[subtype, 0]` for some `subtype`) are not legal. The `null` type is only legal as the first type in a union subtype (i.e. with type index zero).
- Empty vector types (`Vector[type, 0]`, `Bitvector[0]`) are illegal.
- Containers with no fields are illegal.
- The `null` type is only legal as the first type in a union subtype (i.e. with type index zero).
## Serialization
@ -187,7 +189,7 @@ We first define helper functions:
* `List[B, N]` and `Vector[B, N]`, where `B` is a basic type: `(N * size_of(B) + 31) // 32` (dividing by chunk size, rounding up)
* `List[C, N]` and `Vector[C, N]`, where `C` is a composite type: `N`
* containers: `len(fields)`
* `bitfield_bytes(bits)`: return the bits of the bitlist or bitvector, packed in bytes, aligned to the start. Exclusive length-delimiting bit for bitlists.
* `bitfield_bytes(bits)`: return the bits of the bitlist or bitvector, packed in bytes, aligned to the start. Length-delimiting bit for bitlists is excluded.
* `pack`: Given ordered objects of the same basic type, serialize them, pack them into `BYTES_PER_CHUNK`-byte chunks, right-pad the last chunk with zero bytes, and return the chunks.
* `next_pow_of_two(i)`: get the next power of 2 of `i`, if not already a power of 2, with 0 mapping to 1. Examples: `0->1, 1->1, 2->2, 3->4, 4->4, 6->8, 9->16`
* `merkleize(chunks, limit=None)`: Given ordered `BYTES_PER_CHUNK`-byte chunks, merkleize the chunks, and return the root:

View File

@ -5,21 +5,25 @@ This document defines the YAML format and structure used for Eth 2.0 testing.
## Table of contents
<!-- TOC -->
- [General test format](#general-test-format)
- [Table of contents](#table-of-contents)
- [About](#about)
- [Test-case formats](#test-case-formats)
- [Glossary](#glossary)
- [Test format philosophy](#test-format-philosophy)
- [Config design](#config-design)
- [Fork config design](#fork-config-design)
- [Test completeness](#test-completeness)
- [Test suite](#test-suite)
- [Config](#config)
- [Fork-timeline](#fork-timeline)
- [Config sourcing](#config-sourcing)
- [Test structure](#test-structure)
- [Note for implementers](#note-for-implementers)
* [About](#about)
+ [Test-case formats](#test-case-formats)
* [Glossary](#glossary)
* [Test format philosophy](#test-format-philosophy)
+ [Config design](#config-design)
+ [Test completeness](#test-completeness)
* [Test structure](#test-structure)
+ [`<config name>/`](#--config-name---)
+ [`<fork or phase name>/`](#--fork-or-phase-name---)
+ [`<test runner name>/`](#--test-runner-name---)
+ [`<test handler name>/`](#--test-handler-name---)
+ [`<test suite name>/`](#--test-suite-name---)
+ [`<test case>/`](#--test-case---)
+ [`<output part>`](#--output-part--)
- [Special output parts](#special-output-parts)
* [`meta.yaml`](#-metayaml-)
* [Config](#config)
* [Config sourcing](#config-sourcing)
* [Note for implementers](#note-for-implementers)
<!-- /TOC -->
@ -42,30 +46,29 @@ Test formats:
- [`ssz_static`](./ssz_static/README.md)
- More formats are planned, see tracking issues for CI/testing
## Glossary
- `generator`: a program that outputs one or more `suite` files.
- A generator should only output one `type` of test.
- A generator is free to output multiple `suite` files, optionally with different `handler`s.
- `type`: the specialization of one single `generator`.
- `suite`: a YAML file with:
- a header: describes the `suite`, and defines what the `suite` is for
- a list of test cases
- `generator`: a program that outputs one or more test-cases, each organized into a `config > runner > handler > suite` hierarchy.
- `config`: tests are grouped by configuration used for spec presets. In addition to the standard configurations,
`general` may be used as a catch-all for tests not restricted to one configuration. (E.g. BLS).
- `type`: the specialization of one single `generator`. E.g. epoch processing.
- `runner`: where a generator is a *"producer"*, this is the *"consumer"*.
- A `runner` focuses on *only one* `type`, and each type has *only one* `runner`.
- `handler`: a `runner` may be too limited sometimes, you may have a `suite` with a specific focus that requires a different format.
- `handler`: a `runner` may be too limited sometimes, you may have a set of tests with a specific focus that requires a different format.
To facilitate this, you specify a `handler`: the runner can deal with the format by using the specified handler.
Using a `handler` in a `runner` is optional.
- `case`: a test case, an entry in the `test_cases` list of a `suite`. A case can be anything in general,
but its format should be well-defined in the documentation corresponding to the `type` (and `handler`).\
A test has the same exact configuration and fork context as the other entries in the `case` list of its `suite`.
- `forks_timeline`: a fork timeline definition, a YAML file containing a key for each fork-name, and an epoch number as value.
- `suite`: a directory containing test cases that are coherent. Each `suite` under the same `handler` shares the same format.
This is an organizational/cosmetic hierarchy layer.
- `case`: a test case, a directory in a `suite`. A case can be anything in general,
but its format should be well-defined in the documentation corresponding to the `type` (and `handler`).
- `case part`: a test case consists of different files, possibly in different formats, to facilitate the specific test case format better.
Optionally, a `meta.yaml` is included to declare meta-data for the test, e.g. BLS requirements.
## Test format philosophy
### Config design
After long discussion, the following types of configured constants were identified:
The configuration constant types are:
- Never changing: genesis data.
- Changing, but reliant on old value: e.g. an epoch time may change, but if you want to do the conversion
`(genesis data, timestamp) -> epoch number`, you end up needing both constants.
@ -75,26 +78,12 @@ After long discussion, the following types of configured constants were identifi
- Changing: there is a very small chance some constant may really be *replaced*.
In this off-chance, it is likely better to include it as an additional variable,
and some clients may simply stop supporting the old one if they do not want to sync from genesis.
The change of functionality goes through a phase of deprecation of the old constant, and eventually only the new constant is kept around in the config (when old state is not supported anymore).
Based on these types of changes, we model the config as a list of key value pairs,
that only grows with every fork (they may change in development versions of forks, however; git manages this).
With this approach, configurations are backwards compatible (older clients ignore unknown variables) and easy to maintain.
### Fork config design
There are two types of fork-data:
1) Timeline: When does a fork take place?
2) Coverage: What forks are covered by a test?
The first is neat to have as a separate form: we prevent duplication, and can run with different presets
(e.g. fork timeline for a minimal local test, for a public testnet, or for mainnet).
The second does not affect the result of the tests, it just states what is covered by the tests,
so that the right suites can be executed to see coverage for a certain fork.
For some types of tests, it may be beneficial to ensure it runs exactly the same, with any given fork "active".
Test-formats can be explicit on the need to repeat a test with different forks being "active",
but generally tests run only once.
### Test completeness
Tests should be independent of any sync-data. If one wants to run a test, the input data should be available from the YAML.
@ -104,93 +93,68 @@ The aim is to provide clients with a well-defined scope of work to run a particu
- Clients that are not complete in functionality can choose to ignore suites that use certain test-runners, or specific handlers of these test-runners.
- Clients that are on older versions can test their work based on older releases of the generated tests, and catch up with newer releases when possible.
## Test suite
```
title: <string, short, one line> -- Display name for the test suite
summary: <string, average, 1-3 lines> -- Summarizes the test suite
forks_timeline: <string, reference to a fork definition file, without extension> -- Used to determine the forking timeline
forks: <list of strings> -- Defines the coverage. Test-runner code may decide to re-run with the different forks "activated", when applicable.
config: <string, reference to a config file, without extension> -- Used to determine which set of constants to run (possibly compile time) with
runner: <string, no spaces, python-like naming format> *MUST be consistent with folder structure*
handler: <string, no spaces, python-like naming format> *MUST be consistent with folder structure*
test_cases: <list, values being maps defining a test case each>
...
```
## Config
A configuration is a separate YAML file.
Separation of configuration and tests aims to:
- Prevent duplication of configuration
- Make all tests easy to upgrade (e.g. when a new config constant is introduced)
- Clearly define which constants to use
- Shareable between clients, for cross-client short- or long-lived testnets
- Minimize the amounts of different constants permutations to compile as a client.
*Note*: Some clients prefer compile-time constants and optimizations.
They should compile for each configuration once, and run the corresponding tests per build target.
The format is described in [`configs/constant_presets`](../../configs/constant_presets/README.md#format).
## Fork-timeline
A fork timeline is (preferably) loaded in as a configuration object into a client, as opposed to the constants configuration:
- We do not allocate or optimize any code based on epoch numbers.
- When we transition from one fork to the other, it is preferred to stay online.
- We may decide on an epoch number for a fork based on external events (e.g. Eth1 log event);
a client should be able to activate a fork dynamically.
The format is described in [`configs/fork_timelines`](../../configs/fork_timelines/README.md#format).
## Config sourcing
The constants configurations are located in:
```
<specs repo root>/configs/constant_presets/<config name>.yaml
```
And copied by CI for testing purposes to:
```
<tests repo root>/configs/constant_presets/<config name>.yaml
```
The fork timelines are located in:
```
<specs repo root>/configs/fork_timelines/<timeline name>.yaml
```
And copied by CI for testing purposes to:
```
<tests repo root>/configs/fork_timelines/<timeline name>.yaml
```
## Test structure
To prevent parsing of hundreds of different YAML files to test a specific test type,
or even more specific, just a handler, tests should be structured in the following nested form:
```
. <--- root of eth2.0 tests repository
├── bls <--- collection of handler for a specific test-runner, example runner: "bls"
│   ├── verify_msg <--- collection of test suites for a specific handler, example handler: "verify_msg". If no multiple handlers, use a dummy folder (e.g. "core"), and specify that in the yaml.
│   │   ├── verify_valid.yml .
│   │   ├── special_cases.yml . a list of test suites
│   │   ├── domains.yml .
│   │   ├── invalid.yml .
│   │   ... <--- more suite files (optional)
│   ... <--- more handlers
... <--- more test types
File path structure:
tests/<config name>/<fork or phase name>/<test runner name>/<test handler name>/<test suite name>/<test case>/<output part>
```
## Common test-case properties
### `<config name>/`
Configs are upper level. Some clients want to run minimal first, and useful for sanity checks during development too.
As a top level dir, it is not duplicated, and the used config can be copied right into this directory as reference.
### `<fork or phase name>/`
This would be: "phase0", "transferparty", "phase1", etc. Each introduces new tests, but does not copy tests that do not change.
If you like to test phase 1, you run phase 0 tests, with the configuration that includes phase 1 changes. Out of scope for now however.
### `<test runner name>/`
The well known bls/shuffling/ssz_static/operations/epoch_processing/etc. Handlers can change the format, but there is a general target to test.
### `<test handler name>/`
Specialization within category. All suites in here will have the same test case format.
Using a `handler` in a `runner` is optional. A `core` (or other generic) handler may be used if the `runner` does not have different formats.
### `<test suite name>/`
Suites are split up. Suite size (i.e. the amount of tests) does not change the maximum memory requirement, as test cases can be loaded one by one.
This also makes filtered sets of tests fast and easy to load.
### `<test case>/`
Cases are split up too. This enables diffing of parts of the test case, tracking changes per part, while still using LFS. Also enables different formats for some parts.
### `<output part>`
E.g. `pre.yaml`, `deposit.yaml`, `post.yaml`.
Diffing a `pre.yaml` and `post.yaml` provides all the information for testing, good for readability of the change.
Then the difference between pre and post can be compared to anything that changes the pre state, e.g. `deposit.yaml`
These files allow for custom formats for some parts of the test. E.g. something encoded in SSZ.
Some yaml files have copies, but formatted as raw SSZ bytes: `pre.ssz`, `deposit.ssz`, `post.ssz`.
The yaml files are intended to be deprecated, and clients should shift to ssz inputs for efficiency.
Deprecation will start once a viewer of SSZ test-cases is in place, to maintain a standard of readable test cases.
This also means that some clients can drop legacy YAML -> JSON/other -> SSZ work-arounds.
(These were implemented to support the uint64 YAML, hex strings, etc. Things that were not idiomatic to their language.)
Yaml will not be deprecated for tests that do not use SSZ: e.g. shuffling and BLS tests.
In this case, there is no work around for loading necessary anyway, and the size and efficiency of yaml is acceptable.
#### Special output parts
##### `meta.yaml`
If present (it is optional), the test is enhanced with extra data to describe usage. Specialized data is described in the documentation of the specific test format.
Common data is documented here:
Some test-case formats share some common key-value pair patterns, and these are documented here:
@ -203,22 +167,52 @@ bls_setting: int -- optional, can have 3 different values:
2: known as "BLS ignored" - if the test validity is strictly dependent on BLS being OFF
```
## Config
A configuration is a separate YAML file.
Separation of configuration and tests aims to:
- Prevent duplication of configuration
- Make all tests easy to upgrade (e.g. when a new config constant is introduced)
- Clearly define which constants to use
- Be easily shareable between clients, for cross-client short- or long-lived testnets
- Minimize the amount of different constants permutations to compile as a client.
*Note*: Some clients prefer compile-time constants and optimizations.
They should compile for each configuration once, and run the corresponding tests per build target.
- Include constants to coordinate forking with
The format is described in [`/configs`](../../configs/README.md#format).
## Config sourcing
The constants configurations are located in:
```
<specs repo root>/configs/<config name>.yaml
```
And copied by CI for testing purposes to:
```
<tests repo root>/tests/<config name>/<config name>.yaml
```
The first `<config name>` is a directory, which contains exactly all tests that make use of the given config.
## Note for implementers
The basic pattern for test-suite loading and running is:
Iterate suites for given test-type, or sub-type (e.g. `operations > deposits`):
1. Filter test-suite, options:
- Config: Load first few lines, load into YAML, and check `config`, either:
- Pass the suite to the correct compiled target
- Ignore the suite if running tests as part of a compiled target with different configuration
- Load the correct configuration for the suite dynamically before running the suite
- Select by file name
- Filter for specific suites (e.g. for a specific fork)
2. Load the YAML
- Optionally translate the data into applicable naming, e.g. `snake_case` to `PascalCase`
3. Iterate through the `test_cases`
4. Ask test-runner to allocate a new test-case (i.e. objectify the test-case, generalize it with a `TestCase` interface)
Optionally pass raw test-case data to enable dynamic test-case allocation.
1. Load test-case data into it.
2. Make the test-case run.
1. For a specific config, load it first (and only need to do so once),
then continue with the tests defined in the config folder.
2. Select a fork. Repeat for each fork if running tests for multiple forks.
3. Select the category and specialization of interest (e.g. `operations > deposits`). Again, repeat for each if running all.
4. Select a test suite. Or repeat for each.
5. Select a test case. Or repeat for each.
6. Load the parts of the case. And `meta.yaml` if present.
7. Run the test, as defined by the test format.
Step 1 may be a step with compile time selection of a configuration, if desired for optimization.
The base requirement is just to use the same set of constants, independent of the loading process.

View File

@ -4,6 +4,8 @@ A BLS pubkey aggregation combines a series of pubkeys into a single pubkey.
## Test case format
The test data is declared in a `data.yaml` file:
```yaml
input: List[BLS Pubkey] -- list of input BLS pubkeys
output: BLS Pubkey -- expected output, single BLS pubkey

View File

@ -4,6 +4,8 @@ A BLS signature aggregation combines a series of signatures into a single signat
## Test case format
The test data is declared in a `data.yaml` file:
```yaml
input: List[BLS Signature] -- list of input BLS signatures
output: BLS Signature -- expected output, single BLS signature

View File

@ -4,6 +4,8 @@ A BLS compressed-hash to G2.
## Test case format
The test data is declared in a `data.yaml` file:
```yaml
input:
message: bytes32

View File

@ -4,6 +4,8 @@ A BLS uncompressed-hash to G2.
## Test case format
The test data is declared in a `data.yaml` file:
```yaml
input:
message: bytes32

View File

@ -4,6 +4,8 @@ A BLS private key to public key conversion.
## Test case format
The test data is declared in a `data.yaml` file:
```yaml
input: bytes32 -- the private key
output: bytes48 -- the public key

View File

@ -4,6 +4,8 @@ Message signing with BLS should produce a signature.
## Test case format
The test data is declared in a `data.yaml` file:
```yaml
input:
privkey: bytes32 -- the private key used for signing

View File

@ -7,13 +7,27 @@ Hence, the format is shared between each test-handler. (See test condition docum
## Test case format
### `meta.yaml`
```yaml
description: string -- description of test case, purely for debugging purposes
description: string -- Optional description of test case, purely for debugging purposes.
Tests should use the directory name of the test case as identifier, not the description.
bls_setting: int -- see general test-format spec.
pre: BeaconState -- state before running the sub-transition
post: BeaconState -- state after applying the epoch sub-transition.
```
### `pre.yaml`
A YAML-encoded `BeaconState`, the state before running the epoch sub-transition.
Also available as `pre.ssz`.
### `post.yaml`
A YAML-encoded `BeaconState`, the state after applying the epoch sub-transition.
Also available as `post.ssz`.
## Condition
A handler of the `epoch_processing` test-runner should process these cases,

View File

@ -4,15 +4,39 @@ Tests the initialization of a genesis state based on Eth1 data.
## Test case format
### `eth1_block_hash.yaml`
A `Bytes32` hex encoded, with prefix 0x. The root of the Eth-1 block.
Also available as `eth1_block_hash.ssz`.
### `eth1_timestamp.yaml`
An integer. The timestamp of the block, in seconds.
### `meta.yaml`
A yaml file to help read the deposit count:
```yaml
description: string -- description of test case, purely for debugging purposes
bls_setting: int -- see general test-format spec.
eth1_block_hash: Bytes32 -- the root of the Eth-1 block, hex encoded, with prefix 0x
eth1_timestamp: int -- the timestamp of the block, in seconds.
deposits: [Deposit] -- list of deposits to build the genesis state with
state: BeaconState -- the expected genesis state.
deposits_count: int -- Amount of deposits.
```
### `deposits_<index>.yaml`
A series of files, with `<index>` in range `[0, deposits_count)`. Deposits need to be processed in order.
Each file is a YAML-encoded `Deposit` object.
Each deposit is also available as `deposits_<index>.ssz`.
### `state.yaml`
The expected genesis state. A YAML-encoded `BeaconState` object.
Also available as `state.ssz`.
## Processing
To process this test, build a genesis state with the provided `eth1_block_hash`, `eth1_timestamp` and `deposits`:
`initialize_beacon_state_from_eth1(eth1_block_hash, eth1_timestamp, deposits)`,
as described in the Beacon Chain specification.

View File

@ -4,12 +4,18 @@ Tests if a genesis state is valid, i.e. if it counts as trigger to launch.
## Test case format
```yaml
description: string -- description of test case, purely for debugging purposes
bls_setting: int -- see general test-format spec.
genesis: BeaconState -- state to validate.
is_valid: bool -- true if the genesis state is deemed valid as to launch with, false otherwise.
```
### `genesis.yaml`
A `BeaconState`, the state to validate as genesis candidate.
Also available as `genesis.ssz`.
### `is_valid.yaml`
A boolean, true if the genesis state is deemed valid as to launch with, false otherwise.
## Processing
To process the data, call `is_valid_genesis_state(genesis)`.

View File

@ -4,14 +4,33 @@ The different kinds of operations ("transactions") are tested individually with
## Test case format
### `meta.yaml`
```yaml
description: string -- description of test case, purely for debugging purposes
bls_setting: int -- see general test-format spec.
pre: BeaconState -- state before applying the operation
<operation-name>: <operation-object> -- the YAML encoded operation, e.g. a "ProposerSlashing", or "Deposit".
post: BeaconState -- state after applying the operation. No value if operation processing is aborted.
description: string -- Optional description of test case, purely for debugging purposes.
Tests should use the directory name of the test case as identifier, not the description.
bls_setting: int -- see general test-format spec.
```
### `pre.yaml`
A YAML-encoded `BeaconState`, the state before applying the operation.
Also available as `pre.ssz`.
### `<operation-name>.yaml`
A YAML-encoded operation object, e.g. a `ProposerSlashing`, or `Deposit`.
Also available as `<operation-name>.ssz`.
### `post.yaml`
A YAML-encoded `BeaconState`, the state after applying the operation. No value if operation processing is aborted.
Also available as `post.ssz`.
## Condition
A handler of the `operations` test-runner should process these cases,
@ -24,7 +43,7 @@ Operations:
|-------------------------|----------------------|----------------------|--------------------------------------------------------|
| `attestation` | `Attestation` | `attestation` | `process_attestation(state, attestation)` |
| `attester_slashing` | `AttesterSlashing` | `attester_slashing` | `process_attester_slashing(state, attester_slashing)` |
| `block_header` | `Block` | `block` | `process_block_header(state, block)` |
| `block_header` | `Block` | **`block`** | `process_block_header(state, block)` |
| `deposit` | `Deposit` | `deposit` | `process_deposit(state, deposit)` |
| `proposer_slashing` | `ProposerSlashing` | `proposer_slashing` | `process_proposer_slashing(state, proposer_slashing)` |
| `transfer` | `Transfer` | `transfer` | `process_transfer(state, transfer)` |

View File

@ -4,14 +4,38 @@ Sanity tests to cover a series of one or more blocks being processed, aiming to
## Test case format
### `meta.yaml`
```yaml
description: string -- description of test case, purely for debugging purposes
description: string -- Optional. Description of test case, purely for debugging purposes.
bls_setting: int -- see general test-format spec.
pre: BeaconState -- state before running through the transitions triggered by the blocks.
blocks: [BeaconBlock] -- blocks to process, in given order, following the main transition function (i.e. process slot and epoch transitions in between blocks as normal)
post: BeaconState -- state after applying all the transitions triggered by the blocks.
blocks_count: int -- the number of blocks processed in this test.
```
### `pre.yaml`
A YAML-encoded `BeaconState`, the state before running the block transitions.
Also available as `pre.ssz`.
### `blocks_<index>.yaml`
A series of files, with `<index>` in range `[0, blocks_count)`. Blocks need to be processed in order,
following the main transition function (i.e. process slot and epoch transitions in between blocks as normal)
Each file is a YAML-encoded `BeaconBlock`.
Each block is also available as `blocks_<index>.ssz`
### `post.yaml`
A YAML-encoded `BeaconState`, the state after applying the block transitions.
Also available as `post.ssz`.
## Condition
The resulting state should match the expected `post` state, or if the `post` state is left blank,

View File

@ -4,14 +4,34 @@ Sanity tests to cover a series of one or more empty-slot transitions being proce
## Test case format
### `meta.yaml`
```yaml
description: string -- description of test case, purely for debugging purposes
description: string -- Optional. Description of test case, purely for debugging purposes.
bls_setting: int -- see general test-format spec.
pre: BeaconState -- state before running through the transitions.
slots: N -- amount of slots to process, N being a positive number.
post: BeaconState -- state after applying all the transitions.
```
### `pre.yaml`
A YAML-encoded `BeaconState`, the state before running the transitions.
Also available as `pre.ssz`.
### `slots.yaml`
An integer. The amount of slots to process (i.e. the difference in slots between pre and post), always a positive number.
### `post.yaml`
A YAML-encoded `BeaconState`, the state after applying the transitions.
Also available as `post.ssz`.
### Processing
The transition with pure time, no blocks, is known as `process_slots(state, slot)` in the spec.
This runs state-caching (pure slot transition) and epoch processing (every E slots).

View File

@ -7,26 +7,32 @@ Clients may take different approaches to shuffling, for optimizing,
and supporting advanced lookup behavior back in older history.
For implementers, possible test runners implementing testing can include:
1) Just test permute-index, run it for each index `i` in `range(count)`, and check against expected `output[i]` (default spec implementation).
1) Just test permute-index, run it for each index `i` in `range(count)`, and check against expected `mapping[i]` (default spec implementation).
2) Test un-permute-index (the reverse lookup; implemented by running the shuffling rounds in reverse, from `round_count-1` to `0`).
3) Test the optimized complete shuffle, where all indices are shuffled at once; test output in one go.
4) Test complete shuffle in reverse (reverse rounds, same as #2).
## Test case format
### `mapping.yaml`
```yaml
seed: bytes32
count: int
shuffled: List[int]
mapping: List[int]
```
- The `bytes32` is encoded a string, hexadecimal encoding, prefixed with `0x`.
- The `bytes32` is encoded as a string, hexadecimal encoding, prefixed with `0x`.
- Integers are validator indices. These are `uint64`, but realistically they are not as big.
The `count` specifies the validator registry size. One should compute the shuffling for indices `0, 1, 2, 3, ..., count (exclusive)`.
Seed is the raw shuffling seed, passed to permute-index (or optimized shuffling approach).
The `seed` is the raw shuffling seed, passed to permute-index (or optimized shuffling approach).
The `mapping` is a look up array, constructed as `[spec.compute_shuffled_index(i, count, seed) for i in range(count)]`
I.e. `mapping[i]` is the shuffled location of `i`.
## Condition
The resulting list should match the expected output `shuffled` after shuffling the implied input, using the given `seed`.
The resulting list should match the expected output after shuffling the implied input, using the given `seed`.
The output is checked using the `mapping`, based on the shuffling test type (e.g. can be backwards shuffling).

View File

@ -1,20 +1,197 @@
# SSZ, generic tests
This set of test-suites provides general testing for SSZ:
to instantiate any container/list/vector/other type from binary data.
to decode any container/list/vector/other type from binary data, encode it back, and compute the hash-tree-root.
Since SSZ is in a development-phase, the full suite of features is not covered yet.
Note that these tests are based on the older SSZ package.
The tests are still relevant, but limited in scope:
more complex object encodings have changed since the original SSZ testing.
This test collection for general-purpose SSZ is experimental.
The `ssz_static` suite is the required minimal support for SSZ, and should be prioritized.
A minimal but useful series of tests covering `uint` encoding and decoding is provided.
This is a direct port of the older SSZ `uint` tests (minus outdated test cases).
The `ssz_generic` tests are split up into different handler, each specialized into a SSZ type:
Test format documentation can be found here: [uint test format](./uint.md).
- Vectors
- `basic_vector`
- `complex_vector` *not supported yet*
- List
- `basic_list` *not supported yet*
- `complex_list` *not supported yet*
- Bitfields
- `bitvector`
- `bitlist`
- Basic types
- `boolean`
- `uints`
- Containers
- `containers`
*Note*: The current Phase 0 spec does not use larger uints, and uses byte vectors (fixed length) instead to represent roots etc.
The exact uint lengths to support may be redefined in the future.
Extension of the SSZ tests collection is planned, with an update to the new spec-maintained `minimal_ssz.py`;
see CI/testing issues for progress tracking.
## Format
For each type, a `valid` and an `invalid` suite is implemented.
The cases have the same format, but those in the `invalid` suite only declare a subset of the data a test in the `valid` declares.
Each of the handlers encodes the SSZ type declaration in the file-name. See [Type Declarations](#type-declarations).
### `valid`
Valid has 3 parts: `meta.yaml`, `serialized.ssz`, `value.yaml`
### `meta.yaml`
Valid ssz objects can have a hash-tree-root, and for some types also a signing-root.
The expected roots are encoded into the metadata yaml:
```yaml
root: Bytes32 -- Hash-tree-root of the object
signing_root: Bytes32 -- Signing-root of the object
```
The `Bytes32` is encoded as a string, hexadecimal encoding, prefixed with `0x`.
### `serialized.ssz`
The serialized form of the object, as raw SSZ bytes.
### `value.yaml`
The object, encoded as a YAML structure. Using the same familiar encoding as YAML data in the other test suites.
### Conditions
The conditions are the same for each type:
- Encoding: After encoding the given `value` object, the output should match `serialized`.
- Decoding: After decoding the given `serialized` bytes, it should match the `value` object.
- Hash-tree-root: the root should match the root declared in the metadata.
- Signing-root: if present in metadata, the signing root of the object should match the container.
## `invalid`
Test cases in the `invalid` suite only include the `serialized.ssz`
#### Condition
Unlike the `valid` suite, invalid encodings do not have any `value` or hash tree root.
The `serialized` data should simply not be decoded without raising an error.
Note that for some type declarations in the invalid suite, the type itself may technically be invalid.
This is a valid way of detecting `invalid` data too. E.g. a 0-length basic vector.
## Type declarations
Most types are not as static, and can reasonably be constructed during test runtime from the test case name.
Formats are listed below.
For each test case, an additional `_{extra...}` may be appended to the name,
where `{extra...}` contains a human readable indication of the test case contents for debugging purposes.
### `basic_vector`
```
Template:
vec_{element type}_{length}
Data:
{element type}: bool, uint8, uint16, uint32, uint64, uint128, uint256
{length}: an unsigned integer
```
### `bitlist`
```
Template:
bitlist_{limit}
Data:
{limit}: the list limit, in bits, of the bitlist. Does not include the length-delimiting bit in the serialized form.
```
### `bitvector`
```
Template:
bitvec_{length}
Data:
{length}: the length, in bits, of the bitvector.
```
### `boolean`
A boolean has no type variations. Instead, file names just plainly describe the contents for debugging.
### `uints`
```
Template:
uint_{size}
Data:
{size}: the uint size: 8, 16, 32, 64, 128 or 256.
```
### `containers`
Containers are more complicated than the other types. Instead, a set of pre-defined container structures is referenced:
```
Template:
{container name}
Data:
{container name}: Any of the container names listed below (exluding the `(Container)` python super type)
```
```python
class SingleFieldTestStruct(Container):
A: byte
class SmallTestStruct(Container):
A: uint16
B: uint16
class FixedTestStruct(Container):
A: uint8
B: uint64
C: uint32
class VarTestStruct(Container):
A: uint16
B: List[uint16, 1024]
C: uint8
class ComplexTestStruct(Container):
A: uint16
B: List[uint16, 128]
C: uint8
D: Bytes[256]
E: VarTestStruct
F: Vector[FixedTestStruct, 4]
G: Vector[VarTestStruct, 2]
class BitsStruct(Container):
A: Bitlist[5]
B: Bitvector[2]
C: Bitvector[1]
D: Bitlist[6]
E: Bitvector[8]
```

View File

@ -1,19 +0,0 @@
# Test format: SSZ uints
SSZ supports encoding of uints up to 32 bytes. These are considered to be basic types.
## Test case format
```yaml
type: "uintN" -- string, where N is one of [8, 16, 32, 64, 128, 256]
valid: bool -- expected validity of the input data
value: string -- string, decimal encoding, to support up to 256 bit integers
ssz: bytes -- string, input data, hex encoded, with prefix 0x
tags: List[string] -- description of test case, in the form of a list of labels
```
## Condition
Two-way testing can be implemented in the test-runner:
- Encoding: After encoding the given input number `value`, the output should match `ssz`
- Decoding: After decoding the given `ssz` bytes, it should match the input number `value`

View File

@ -3,6 +3,6 @@
This set of test-suites provides static testing for SSZ:
to instantiate just the known Eth 2.0 SSZ types from binary data.
This series of tests is based on the spec-maintained `minimal_ssz.py`, i.e. fully consistent with the SSZ spec.
This series of tests is based on the spec-maintained `eth2spec/utils/ssz/ssz_impl.py`, i.e. fully consistent with the SSZ spec.
Test format documentation can be found here: [core test format](./core.md).

View File

@ -4,29 +4,54 @@ The goal of this type is to provide clients with a solid reference for how the k
Each object described in the Phase 0 spec is covered.
This is important, as many of the clients aiming to serialize/deserialize objects directly into structs/classes
do not support (or have alternatives for) generic SSZ encoding/decoding.
This test-format ensures these direct serializations are covered.
Note that this test suite does not cover the invalid-encoding case:
SSZ implementations should be hardened against invalid inputs with the other SSZ tests as guide, along with fuzzing.
## Test case format
Each SSZ type is a `handler`, since the format is semantically different: the type of the data is different.
One can iterate over the handlers, and select the type based on the handler name.
Suites are then the same format, but each specialized in one randomization mode.
Some randomization modes may only produce a single test case (e.g. the all-zeroes case).
The output parts are: `roots.yaml`, `serialized.ssz`, `value.yaml`
### `roots.yaml`
```yaml
SomeObjectName: -- key, object name, formatted as in spec. E.g. "BeaconBlock".
value: dynamic -- the YAML-encoded value, of the type specified by type_name.
serialized: bytes -- string, SSZ-serialized data, hex encoded, with prefix 0x
root: bytes32 -- string, hash-tree-root of the value, hex encoded, with prefix 0x
signing_root: bytes32 -- string, signing-root of the value, hex encoded, with prefix 0x. Optional, present if type contains ``signature`` field
root: bytes32 -- string, hash-tree-root of the value, hex encoded, with prefix 0x
signing_root: bytes32 -- string, signing-root of the value, hex encoded, with prefix 0x.
*Optional*, present if type is a container and ends with a ``signature`` field.
```
### `serialized.ssz`
The raw encoded bytes.
### `value.yaml`
The same value as `serialized.ssz`, represented as YAML.
## Condition
A test-runner can implement the following assertions:
- Serialization: After parsing the `value`, SSZ-serialize it: the output should match `serialized`
- Hash-tree-root: After parsing the `value`, Hash-tree-root it: the output should match `root`
- Optionally also check signing-root, if present.
- Deserialization: SSZ-deserialize the `serialized` value, and see if it matches the parsed `value`
- If YAML decoding of SSZ objects is supported by the implementation:
- Serialization: After parsing the `value`, SSZ-serialize it: the output should match `serialized`
- Deserialization: SSZ-deserialize the `serialized` value, and see if it matches the parsed `value`
- If YAML decoding of SSZ objects is not supported by the implementation:
- Serialization in 2 steps: deserialize `serialized`, then serialize the result,
and verify if the bytes match the original `serialized`.
- Hash-tree-root: After parsing the `value` (or deserializing `serialized`), Hash-tree-root it: the output should match `root`
- Optionally also check `signing_root`, if present.
## References
**`serialized`**—[SSZ serialization](../../simple-serialize.md#serialization)
**`root`**—[hash_tree_root](../../simple-serialize.md#merkleization) function
**`signing_root`**—[signing_root](../../simple-serialize.md#self-signed-containers) function

View File

@ -322,13 +322,13 @@ Set `attestation.data = attestation_data` where `attestation_data` is the `Attes
##### Aggregation bits
- Let `attestation.aggregation_bits` be a `Bitlist[MAX_INDICES_PER_ATTESTATION]` where the bits at the index in the aggregated validator's `committee` is set to `0b1`.
- Let `attestation.aggregation_bits` be a `Bitlist[MAX_VALIDATORS_PER_COMMITTEE]` where the bits at the index in the aggregated validator's `committee` is set to `0b1`.
*Note*: Calling `get_attesting_indices(state, attestation.data, attestation.aggregation_bits)` should return a list of length equal to 1, containing `validator_index`.
##### Custody bits
- Let `attestation.custody_bits` be a `Bitlist[MAX_INDICES_PER_ATTESTATION]` filled with zeros of length `len(committee)`.
- Let `attestation.custody_bits` be a `Bitlist[MAX_VALIDATORS_PER_COMMITTEE]` filled with zeros of length `len(committee)`.
*Note*: This is a stub for Phase 0.

View File

@ -1,11 +1,13 @@
# Eth 2.0 Test Generators
This directory contains all the generators for YAML tests, consumed by Eth 2.0 client implementations.
This directory contains all the generators for tests, consumed by Eth 2.0 client implementations.
Any issues with the generators and/or generated tests should be filed in the repository that hosts the generator outputs, here: [ethereum/eth2.0-spec-tests](https://github.com/ethereum/eth2.0-spec-tests).
Any issues with the generators and/or generated tests should be filed in the repository that hosts the generator outputs,
here: [ethereum/eth2.0-spec-tests](https://github.com/ethereum/eth2.0-spec-tests).
Whenever a release is made, the new tests are automatically built, and
[eth2TestGenBot](https://github.com/eth2TestGenBot) commits the changes to the test repository.
On releases, test generators are run by the release manager. Test-generation of mainnet tests can take a significant amount of time, and is better left out of a CI setup.
An automated nightly tests release system, with a config filter applied, is being considered as implementation needs mature.
## How to run generators
@ -58,11 +60,11 @@ It's recommended to extend the base-generator.
Create a `requirements.txt` in the root of your generator directory:
```
eth-utils==1.6.0
../../test_libs/gen_helpers
../../test_libs/config_helpers
../../test_libs/pyspec
```
The config helper and pyspec is optional, but preferred. We encourage generators to derive tests from the spec itself in order to prevent code duplication and outdated tests.
Applying configurations to the spec is simple and enables you to create test suites with different contexts.
@ -73,72 +75,115 @@ Install all the necessary requirements (re-run when you add more):
pip3 install -r requirements.txt
```
Note that you may need `PYTHONPATH` to include the pyspec directory, as with running normal tests,
to run test generators manually. The makefile handles this for you already.
And write your initial test generator, extending the base generator:
Write a `main.py` file. See example:
Write a `main.py` file. The shuffling test generator is a good minimal starting point:
```python
from gen_base import gen_runner, gen_suite, gen_typing
from eth_utils import (
to_dict, to_tuple
)
from eth2spec.phase0 import spec as spec
from eth_utils import to_tuple
from gen_base import gen_runner, gen_typing
from preset_loader import loader
from eth2spec.phase0 import spec
from typing import Iterable
@to_dict
def example_test_case(v: int):
yield "spec_SHARD_COUNT", spec.SHARD_COUNT
yield "example", v
def shuffling_case_fn(seed, count):
yield 'mapping', 'data', {
'seed': '0x' + seed.hex(),
'count': count,
'mapping': [int(spec.compute_shuffled_index(i, count, seed)) for i in range(count)]
}
def shuffling_case(seed, count):
return f'shuffle_0x{seed.hex()}_{count}', lambda: shuffling_case_fn(seed, count)
@to_tuple
def generate_example_test_cases():
for i in range(10):
yield example_test_case(i)
def shuffling_test_cases():
for seed in [spec.hash(seed_init_value.to_bytes(length=4, byteorder='little')) for seed_init_value in range(30)]:
for count in [0, 1, 2, 3, 5, 10, 33, 100, 1000, 9999]:
yield shuffling_case(seed, count)
def example_minimal_suite(configs_path: str) -> gen_typing.TestSuiteOutput:
presets = loader.load_presets(configs_path, 'minimal')
spec.apply_constants_preset(presets)
def create_provider(config_name: str) -> gen_typing.TestProvider:
return ("mini", "core", gen_suite.render_suite(
title="example_minimal",
summary="Minimal example suite, testing bar.",
forks_timeline="testing",
forks=["phase0"],
config="minimal",
handler="main",
test_cases=generate_example_test_cases()))
def prepare_fn(configs_path: str) -> str:
presets = loader.load_presets(configs_path, config_name)
spec.apply_constants_preset(presets)
return config_name
def cases_fn() -> Iterable[gen_typing.TestCase]:
for (case_name, case_fn) in shuffling_test_cases():
yield gen_typing.TestCase(
fork_name='phase0',
runner_name='shuffling',
handler_name='core',
suite_name='shuffle',
case_name=case_name,
case_fn=case_fn
)
def example_mainnet_suite(configs_path: str) -> gen_typing.TestSuiteOutput:
presets = loader.load_presets(configs_path, 'mainnet')
spec.apply_constants_preset(presets)
return ("full", "core", gen_suite.render_suite(
title="example_main_net",
summary="Main net based example suite.",
forks_timeline= "mainnet",
forks=["phase0"],
config="testing",
handler="main",
test_cases=generate_example_test_cases()))
return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn)
if __name__ == "__main__":
gen_runner.run_generator("example", [example_minimal_suite, example_mainnet_suite])
gen_runner.run_generator("shuffling", [create_provider("minimal"), create_provider("mainnet")])
```
This generator:
- builds off of `gen_runner.run_generator` to handle configuration / filter / output logic.
- parametrized the creation of a test-provider to support multiple configs.
- Iterates through tests cases.
- Each test case provides a `case_fn`, to be executed by the `gen_runner.run_generator` if the case needs to be generated. But skipped otherwise.
To extend this, one could decide to parametrize the `shuffling_test_cases` function, and create test provider for any test-yielding function.
Another example, to generate tests from pytests:
```python
def create_provider(handler_name: str, tests_src, config_name: str) -> gen_typing.TestProvider:
def prepare_fn(configs_path: str) -> str:
presets = loader.load_presets(configs_path, config_name)
spec_phase0.apply_constants_preset(presets)
spec_phase1.apply_constants_preset(presets)
return config_name
def cases_fn() -> Iterable[gen_typing.TestCase]:
return generate_from_tests(
runner_name='epoch_processing',
handler_name=handler_name,
src=tests_src,
fork_name='phase0'
)
return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn)
if __name__ == "__main__":
gen_runner.run_generator("epoch_processing", [
create_provider('crosslinks', test_process_crosslinks, 'minimal'),
...
])
```
Here multiple phases load the configuration, and the stream of test cases is derived from a pytest file using the `generate_from_tests` utility.
Recommendations:
- You can have more than just one suite creator, e.g. ` gen_runner.run_generator("foo", [bar_test_suite, abc_test_suite, example_test_suite])`.
- You can concatenate lists of test cases if you don't want to split it up in suites, however, make sure they can be run with one handler.
- You can split your suite creators into different Python files/packages; this is good for code organization.
- Use config "minimal" for performance, but also implement a suite with the default config where necessary.
- You may be able to write your test suite creator in a way where it does not make assumptions on constants.
If so, you can generate test suites with different configurations for the same scenario (see example).
- The test-generator accepts `--output` and `--force` (overwrite output).
- You can have more than just one test provider.
- Your test provider is free to output any configuration and combination of runner/handler/fork/case name.
- You can split your test case generators into different Python files/packages; this is good for code organization.
- Use config `minimal` for performance and simplicity, but also implement a suite with the `mainnet` config where necessary.
- You may be able to write your test case provider in a way where it does not make assumptions on constants.
If so, you can generate test cases with different configurations for the same scenario (see example).
- See [`test_libs/gen_helpers/README.md`](../test_libs/gen_helpers/README.md) for command line options for generators.
## How to add a new test generator
@ -151,11 +196,10 @@ To add a new test generator that builds `New Tests`:
3. Your generator is assumed to have a `main.py` file in its root.
By adding the base generator to your requirements, you can make a generator really easily. See docs below.
4. Your generator is called with `-o some/file/path/for_testing/can/be_anything -c some/other/path/to_configs/`.
The base generator helps you handle this; you only have to define suite headers
and a list of tests for each suite you generate.
The base generator helps you handle this; you only have to define test case providers.
5. Finally, add any linting or testing commands to the
[circleci config file](https://github.com/ethereum/eth2.0-test-generators/blob/master/.circleci/config.yml)
if desired to increase code quality.
[circleci config file](../.circleci/config.yml) if desired to increase code quality.
Or add it to the [`Makefile`](../Makefile), if it can be run locally.
*Note*: You do not have to change the makefile.
However, if necessary (e.g. not using Python, or mixing in other languages), submit an issue, and it can be a special case.

View File

@ -2,25 +2,25 @@
BLS test vectors generator
"""
from typing import Tuple
from typing import Tuple, Iterable, Any, Callable, Dict
from eth_utils import (
to_tuple, int_to_big_endian
encode_hex,
int_to_big_endian,
)
from gen_base import gen_runner, gen_suite, gen_typing
from gen_base import gen_runner, gen_typing
from py_ecc import bls
F2Q_COEFF_LEN = 48
G2_COMPRESSED_Z_LEN = 48
def int_to_hex(n: int, byte_length: int=None) -> str:
def int_to_hex(n: int, byte_length: int = None) -> str:
byte_value = int_to_big_endian(n)
if byte_length:
byte_value = byte_value.rjust(byte_length, b'\x00')
return '0x' + byte_value.hex()
return encode_hex(byte_value)
def hex_to_int(x: str) -> int:
@ -28,11 +28,12 @@ def hex_to_int(x: str) -> int:
DOMAINS = [
0,
1,
1234,
2**32-1,
2**64-1
b'\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x00\x00\x00\x00\x00\x00\x00\x01',
b'\x01\x00\x00\x00\x00\x00\x00\x00',
b'\x80\x00\x00\x00\x00\x00\x00\x00',
b'\x01\x23\x45\x67\x89\xab\xcd\xef',
b'\xff\xff\xff\xff\xff\xff\xff\xff'
]
MESSAGES = [
@ -51,12 +52,12 @@ PRIVKEYS = [
def hash_message(msg: bytes,
domain: int) ->Tuple[Tuple[str, str], Tuple[str, str], Tuple[str, str]]:
domain: bytes) -> Tuple[Tuple[str, str], Tuple[str, str], Tuple[str, str]]:
"""
Hash message
Input:
- Message as bytes
- domain as uint64
- Message as bytes32
- domain as bytes8
Output:
- Message hash as a G2 point
"""
@ -69,12 +70,12 @@ def hash_message(msg: bytes,
]
def hash_message_compressed(msg: bytes, domain: int) -> Tuple[str, str]:
def hash_message_compressed(msg: bytes, domain: bytes) -> Tuple[str, str]:
"""
Hash message
Input:
- Message as bytes
- domain as uint64
- Message as bytes32
- domain as bytes8
Output:
- Message hash as a compressed G2 point
"""
@ -82,76 +83,75 @@ def hash_message_compressed(msg: bytes, domain: int) -> Tuple[str, str]:
return [int_to_hex(z1, G2_COMPRESSED_Z_LEN), int_to_hex(z2, G2_COMPRESSED_Z_LEN)]
@to_tuple
def case01_message_hash_G2_uncompressed():
for msg in MESSAGES:
for domain in DOMAINS:
yield {
yield f'uncom_g2_hash_{encode_hex(msg)}_{encode_hex(domain)}', {
'input': {
'message': '0x' + msg.hex(),
'domain': int_to_hex(domain, byte_length=8)
'message': encode_hex(msg),
'domain': encode_hex(domain),
},
'output': hash_message(msg, domain)
}
@to_tuple
def case02_message_hash_G2_compressed():
for msg in MESSAGES:
for domain in DOMAINS:
yield {
yield f'com_g2_hash_{encode_hex(msg)}_{encode_hex(domain)}', {
'input': {
'message': '0x' + msg.hex(),
'domain': int_to_hex(domain, byte_length=8)
'message': encode_hex(msg),
'domain': encode_hex(domain),
},
'output': hash_message_compressed(msg, domain)
}
@to_tuple
def case03_private_to_public_key():
pubkeys = [bls.privtopub(privkey) for privkey in PRIVKEYS]
pubkeys_serial = ['0x' + pubkey.hex() for pubkey in pubkeys]
for privkey, pubkey_serial in zip(PRIVKEYS, pubkeys_serial):
yield {
yield f'priv_to_pub_{int_to_hex(privkey)}', {
'input': int_to_hex(privkey),
'output': pubkey_serial,
}
@to_tuple
def case04_sign_messages():
for privkey in PRIVKEYS:
for message in MESSAGES:
for domain in DOMAINS:
sig = bls.sign(message, privkey, domain)
yield {
yield f'sign_msg_{int_to_hex(privkey)}_{encode_hex(message)}_{encode_hex(domain)}', {
'input': {
'privkey': int_to_hex(privkey),
'message': '0x' + message.hex(),
'domain': int_to_hex(domain, byte_length=8)
'message': encode_hex(message),
'domain': encode_hex(domain),
},
'output': '0x' + sig.hex()
'output': encode_hex(sig)
}
# TODO: case05_verify_messages: Verify messages signed in case04
# It takes too long, empty for now
@to_tuple
def case06_aggregate_sigs():
for domain in DOMAINS:
for message in MESSAGES:
sigs = [bls.sign(message, privkey, domain) for privkey in PRIVKEYS]
yield {
'input': ['0x' + sig.hex() for sig in sigs],
'output': '0x' + bls.aggregate_signatures(sigs).hex(),
yield f'agg_sigs_{encode_hex(message)}_{encode_hex(domain)}', {
'input': [encode_hex(sig) for sig in sigs],
'output': encode_hex(bls.aggregate_signatures(sigs)),
}
@to_tuple
def case07_aggregate_pubkeys():
pubkeys = [bls.privtopub(privkey) for privkey in PRIVKEYS]
pubkeys_serial = ['0x' + pubkey.hex() for pubkey in pubkeys]
yield {
pubkeys_serial = [encode_hex(pubkey) for pubkey in pubkeys]
yield f'agg_pub_keys', {
'input': pubkeys_serial,
'output': '0x' + bls.aggregate_pubkeys(pubkeys).hex(),
'output': encode_hex(bls.aggregate_pubkeys(pubkeys)),
}
@ -162,85 +162,36 @@ def case07_aggregate_pubkeys():
# Proof-of-possession
def bls_msg_hash_uncompressed_suite(configs_path: str) -> gen_typing.TestSuiteOutput:
return ("g2_uncompressed", "msg_hash_g2_uncompressed", gen_suite.render_suite(
title="BLS G2 Uncompressed msg hash",
summary="BLS G2 Uncompressed msg hash",
forks_timeline="mainnet",
forks=["phase0"],
config="mainnet",
runner="bls",
handler="msg_hash_uncompressed",
test_cases=case01_message_hash_G2_uncompressed()))
def create_provider(handler_name: str,
test_case_fn: Callable[[], Iterable[Tuple[str, Dict[str, Any]]]]) -> gen_typing.TestProvider:
def prepare_fn(configs_path: str) -> str:
# Nothing to load / change in spec. Maybe in future forks.
# Put the tests into the general config category, to not require any particular configuration.
return 'general'
def bls_msg_hash_compressed_suite(configs_path: str) -> gen_typing.TestSuiteOutput:
return ("g2_compressed", "msg_hash_g2_compressed", gen_suite.render_suite(
title="BLS G2 Compressed msg hash",
summary="BLS G2 Compressed msg hash",
forks_timeline="mainnet",
forks=["phase0"],
config="mainnet",
runner="bls",
handler="msg_hash_compressed",
test_cases=case02_message_hash_G2_compressed()))
def cases_fn() -> Iterable[gen_typing.TestCase]:
for data in test_case_fn():
print(data)
(case_name, case_content) = data
yield gen_typing.TestCase(
fork_name='phase0',
runner_name='bls',
handler_name=handler_name,
suite_name='small',
case_name=case_name,
case_fn=lambda: [('data', 'data', case_content)]
)
def bls_priv_to_pub_suite(configs_path: str) -> gen_typing.TestSuiteOutput:
return ("priv_to_pub", "priv_to_pub", gen_suite.render_suite(
title="BLS private key to pubkey",
summary="BLS Convert private key to public key",
forks_timeline="mainnet",
forks=["phase0"],
config="mainnet",
runner="bls",
handler="priv_to_pub",
test_cases=case03_private_to_public_key()))
def bls_sign_msg_suite(configs_path: str) -> gen_typing.TestSuiteOutput:
return ("sign_msg", "sign_msg", gen_suite.render_suite(
title="BLS sign msg",
summary="BLS Sign a message",
forks_timeline="mainnet",
forks=["phase0"],
config="mainnet",
runner="bls",
handler="sign_msg",
test_cases=case04_sign_messages()))
def bls_aggregate_sigs_suite(configs_path: str) -> gen_typing.TestSuiteOutput:
return ("aggregate_sigs", "aggregate_sigs", gen_suite.render_suite(
title="BLS aggregate sigs",
summary="BLS Aggregate signatures",
forks_timeline="mainnet",
forks=["phase0"],
config="mainnet",
runner="bls",
handler="aggregate_sigs",
test_cases=case06_aggregate_sigs()))
def bls_aggregate_pubkeys_suite(configs_path: str) -> gen_typing.TestSuiteOutput:
return ("aggregate_pubkeys", "aggregate_pubkeys", gen_suite.render_suite(
title="BLS aggregate pubkeys",
summary="BLS Aggregate public keys",
forks_timeline="mainnet",
forks=["phase0"],
config="mainnet",
runner="bls",
handler="aggregate_pubkeys",
test_cases=case07_aggregate_pubkeys()))
return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn)
if __name__ == "__main__":
gen_runner.run_generator("bls", [
bls_msg_hash_compressed_suite,
bls_msg_hash_uncompressed_suite,
bls_priv_to_pub_suite,
bls_sign_msg_suite,
bls_aggregate_sigs_suite,
bls_aggregate_pubkeys_suite
create_provider('msg_hash_uncompressed', case01_message_hash_G2_uncompressed),
create_provider('msg_hash_compressed', case02_message_hash_G2_compressed),
create_provider('priv_to_pub', case03_private_to_public_key),
create_provider('sign_msg', case04_sign_messages),
create_provider('aggregate_sigs', case06_aggregate_sigs),
create_provider('aggregate_pubkeys', case07_aggregate_pubkeys),
])

View File

@ -1,3 +1,3 @@
py-ecc==1.7.0
py_ecc==1.7.1
eth-utils==1.6.0
../../test_libs/gen_helpers

View File

@ -1,4 +1,4 @@
from typing import Callable, Iterable
from typing import Iterable
from eth2spec.phase0 import spec as spec_phase0
from eth2spec.phase1 import spec as spec_phase1
@ -9,45 +9,40 @@ from eth2spec.test.phase_0.epoch_processing import (
test_process_registry_updates,
test_process_slashings
)
from gen_base import gen_runner, gen_suite, gen_typing
from gen_base import gen_runner, gen_typing
from gen_from_tests.gen import generate_from_tests
from preset_loader import loader
def create_suite(transition_name: str, config_name: str, get_cases: Callable[[], Iterable[gen_typing.TestCase]]) \
-> Callable[[str], gen_typing.TestSuiteOutput]:
def suite_definition(configs_path: str) -> gen_typing.TestSuiteOutput:
def create_provider(handler_name: str, tests_src, config_name: str) -> gen_typing.TestProvider:
def prepare_fn(configs_path: str) -> str:
presets = loader.load_presets(configs_path, config_name)
spec_phase0.apply_constants_preset(presets)
spec_phase1.apply_constants_preset(presets)
return config_name
return ("%s_%s" % (transition_name, config_name), transition_name, gen_suite.render_suite(
title="%s epoch processing" % transition_name,
summary="Test suite for %s type epoch processing" % transition_name,
forks_timeline="testing",
forks=["phase0"],
config=config_name,
runner="epoch_processing",
handler=transition_name,
test_cases=get_cases()))
def cases_fn() -> Iterable[gen_typing.TestCase]:
return generate_from_tests(
runner_name='epoch_processing',
handler_name=handler_name,
src=tests_src,
fork_name='phase0'
)
return suite_definition
return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn)
if __name__ == "__main__":
gen_runner.run_generator("epoch_processing", [
create_suite('crosslinks', 'minimal', lambda: generate_from_tests(test_process_crosslinks, 'phase0')),
create_suite('crosslinks', 'mainnet', lambda: generate_from_tests(test_process_crosslinks, 'phase0')),
create_suite('final_updates', 'minimal', lambda: generate_from_tests(test_process_final_updates, 'phase0')),
create_suite('final_updates', 'mainnet', lambda: generate_from_tests(test_process_final_updates, 'phase0')),
create_suite('justification_and_finalization', 'minimal',
lambda: generate_from_tests(test_process_justification_and_finalization, 'phase0')),
create_suite('justification_and_finalization', 'mainnet',
lambda: generate_from_tests(test_process_justification_and_finalization, 'phase0')),
create_suite('registry_updates', 'minimal',
lambda: generate_from_tests(test_process_registry_updates, 'phase0')),
create_suite('registry_updates', 'mainnet',
lambda: generate_from_tests(test_process_registry_updates, 'phase0')),
create_suite('slashings', 'minimal', lambda: generate_from_tests(test_process_slashings, 'phase0')),
create_suite('slashings', 'mainnet', lambda: generate_from_tests(test_process_slashings, 'phase0')),
create_provider('crosslinks', test_process_crosslinks, 'minimal'),
create_provider('crosslinks', test_process_crosslinks, 'mainnet'),
create_provider('final_updates', test_process_final_updates, 'minimal'),
create_provider('final_updates', test_process_final_updates, 'mainnet'),
create_provider('justification_and_finalization', test_process_justification_and_finalization, 'minimal'),
create_provider('justification_and_finalization', test_process_justification_and_finalization, 'mainnet'),
create_provider('registry_updates', test_process_registry_updates, 'minimal'),
create_provider('registry_updates', test_process_registry_updates, 'mainnet'),
create_provider('slashings', test_process_slashings, 'minimal'),
create_provider('slashings', test_process_slashings, 'mainnet'),
])

View File

@ -1,4 +1,3 @@
eth-utils==1.6.0
../../test_libs/gen_helpers
../../test_libs/config_helpers
../../test_libs/pyspec

View File

@ -1,33 +1,33 @@
from typing import Callable, Iterable
from typing import Iterable
from eth2spec.test.genesis import test_initialization, test_validity
from gen_base import gen_runner, gen_suite, gen_typing
from gen_base import gen_runner, gen_typing
from gen_from_tests.gen import generate_from_tests
from preset_loader import loader
from eth2spec.phase0 import spec as spec
def create_suite(handler_name: str, config_name: str, get_cases: Callable[[], Iterable[gen_typing.TestCase]]) \
-> Callable[[str], gen_typing.TestSuiteOutput]:
def suite_definition(configs_path: str) -> gen_typing.TestSuiteOutput:
def create_provider(handler_name: str, tests_src, config_name: str) -> gen_typing.TestProvider:
def prepare_fn(configs_path: str) -> str:
presets = loader.load_presets(configs_path, config_name)
spec.apply_constants_preset(presets)
return config_name
return ("genesis_%s_%s" % (handler_name, config_name), handler_name, gen_suite.render_suite(
title="genesis testing",
summary="Genesis test suite, %s type, generated from pytests" % handler_name,
forks_timeline="testing",
forks=["phase0"],
config=config_name,
runner="genesis",
handler=handler_name,
test_cases=get_cases()))
return suite_definition
def cases_fn() -> Iterable[gen_typing.TestCase]:
return generate_from_tests(
runner_name='genesis',
handler_name=handler_name,
src=tests_src,
fork_name='phase0'
)
return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn)
if __name__ == "__main__":
gen_runner.run_generator("genesis", [
create_suite('initialization', 'minimal', lambda: generate_from_tests(test_initialization, 'phase0')),
create_suite('validity', 'minimal', lambda: generate_from_tests(test_validity, 'phase0')),
create_provider('initialization', test_initialization, 'minimal'),
create_provider('validity', test_validity, 'minimal'),
])

View File

@ -1,4 +1,3 @@
eth-utils==1.6.0
../../test_libs/gen_helpers
../../test_libs/config_helpers
../../test_libs/pyspec

View File

@ -1,4 +1,4 @@
from typing import Callable, Iterable
from typing import Iterable
from eth2spec.test.phase_0.block_processing import (
test_process_attestation,
@ -10,48 +10,48 @@ from eth2spec.test.phase_0.block_processing import (
test_process_voluntary_exit,
)
from gen_base import gen_runner, gen_suite, gen_typing
from gen_base import gen_runner, gen_typing
from gen_from_tests.gen import generate_from_tests
from preset_loader import loader
from eth2spec.phase0 import spec as spec_phase0
from eth2spec.phase1 import spec as spec_phase1
def create_suite(operation_name: str, config_name: str, get_cases: Callable[[], Iterable[gen_typing.TestCase]]) \
-> Callable[[str], gen_typing.TestSuiteOutput]:
def suite_definition(configs_path: str) -> gen_typing.TestSuiteOutput:
def create_provider(handler_name: str, tests_src, config_name: str) -> gen_typing.TestProvider:
def prepare_fn(configs_path: str) -> str:
presets = loader.load_presets(configs_path, config_name)
spec_phase0.apply_constants_preset(presets)
spec_phase1.apply_constants_preset(presets)
return config_name
return ("%s_%s" % (operation_name, config_name), operation_name, gen_suite.render_suite(
title="%s operation" % operation_name,
summary="Test suite for %s type operation processing" % operation_name,
forks_timeline="testing",
forks=["phase0"],
config=config_name,
runner="operations",
handler=operation_name,
test_cases=get_cases()))
return suite_definition
def cases_fn() -> Iterable[gen_typing.TestCase]:
return generate_from_tests(
runner_name='operations',
handler_name=handler_name,
src=tests_src,
fork_name='phase0'
)
return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn)
if __name__ == "__main__":
gen_runner.run_generator("operations", [
create_suite('attestation', 'minimal', lambda: generate_from_tests(test_process_attestation, 'phase0')),
create_suite('attestation', 'mainnet', lambda: generate_from_tests(test_process_attestation, 'phase0')),
create_suite('attester_slashing', 'minimal', lambda: generate_from_tests(test_process_attester_slashing, 'phase0')),
create_suite('attester_slashing', 'mainnet', lambda: generate_from_tests(test_process_attester_slashing, 'phase0')),
create_suite('block_header', 'minimal', lambda: generate_from_tests(test_process_block_header, 'phase0')),
create_suite('block_header', 'mainnet', lambda: generate_from_tests(test_process_block_header, 'phase0')),
create_suite('deposit', 'minimal', lambda: generate_from_tests(test_process_deposit, 'phase0')),
create_suite('deposit', 'mainnet', lambda: generate_from_tests(test_process_deposit, 'phase0')),
create_suite('proposer_slashing', 'minimal', lambda: generate_from_tests(test_process_proposer_slashing, 'phase0')),
create_suite('proposer_slashing', 'mainnet', lambda: generate_from_tests(test_process_proposer_slashing, 'phase0')),
create_suite('transfer', 'minimal', lambda: generate_from_tests(test_process_transfer, 'phase0')),
create_provider('attestation', test_process_attestation, 'minimal'),
create_provider('attestation', test_process_attestation, 'mainnet'),
create_provider('attester_slashing', test_process_attester_slashing, 'minimal'),
create_provider('attester_slashing', test_process_attester_slashing, 'mainnet'),
create_provider('block_header', test_process_block_header, 'minimal'),
create_provider('block_header', test_process_block_header, 'mainnet'),
create_provider('deposit', test_process_deposit, 'minimal'),
create_provider('deposit', test_process_deposit, 'mainnet'),
create_provider('proposer_slashing', test_process_proposer_slashing, 'minimal'),
create_provider('proposer_slashing', test_process_proposer_slashing, 'mainnet'),
create_provider('transfer', test_process_transfer, 'minimal'),
# Disabled, due to the high amount of different transfer tests, this produces a shocking size of tests.
# Unnecessarily, as transfer are disabled currently, so not a priority.
# create_suite('transfer', 'mainnet', lambda: generate_from_tests(test_process_transfer, 'phase0')),
create_suite('voluntary_exit', 'minimal', lambda: generate_from_tests(test_process_voluntary_exit, 'phase0')),
create_suite('voluntary_exit', 'mainnet', lambda: generate_from_tests(test_process_voluntary_exit, 'phase0')),
# create_provider('transfer', test_process_transfer, 'mainnet'),
create_provider('voluntary_exit', test_process_voluntary_exit, 'minimal'),
create_provider('voluntary_exit', test_process_voluntary_exit, 'mainnet'),
])

View File

@ -1,37 +1,37 @@
from typing import Callable, Iterable
from typing import Iterable
from eth2spec.test.sanity import test_blocks, test_slots
from gen_base import gen_runner, gen_suite, gen_typing
from gen_base import gen_runner, gen_typing
from gen_from_tests.gen import generate_from_tests
from preset_loader import loader
from eth2spec.phase0 import spec as spec_phase0
from eth2spec.phase1 import spec as spec_phase1
def create_suite(handler_name: str, config_name: str, get_cases: Callable[[], Iterable[gen_typing.TestCase]]) \
-> Callable[[str], gen_typing.TestSuiteOutput]:
def suite_definition(configs_path: str) -> gen_typing.TestSuiteOutput:
def create_provider(handler_name: str, tests_src, config_name: str) -> gen_typing.TestProvider:
def prepare_fn(configs_path: str) -> str:
presets = loader.load_presets(configs_path, config_name)
spec_phase0.apply_constants_preset(presets)
spec_phase1.apply_constants_preset(presets)
return config_name
return ("sanity_%s_%s" % (handler_name, config_name), handler_name, gen_suite.render_suite(
title="sanity testing",
summary="Sanity test suite, %s type, generated from pytests" % handler_name,
forks_timeline="testing",
forks=["phase0"],
config=config_name,
runner="sanity",
handler=handler_name,
test_cases=get_cases()))
return suite_definition
def cases_fn() -> Iterable[gen_typing.TestCase]:
return generate_from_tests(
runner_name='sanity',
handler_name=handler_name,
src=tests_src,
fork_name='phase0'
)
return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn)
if __name__ == "__main__":
gen_runner.run_generator("sanity", [
create_suite('blocks', 'minimal', lambda: generate_from_tests(test_blocks, 'phase0')),
create_suite('blocks', 'mainnet', lambda: generate_from_tests(test_blocks, 'phase0')),
create_suite('slots', 'minimal', lambda: generate_from_tests(test_slots, 'phase0')),
create_suite('slots', 'mainnet', lambda: generate_from_tests(test_slots, 'phase0')),
create_provider('blocks', test_blocks, 'minimal'),
create_provider('blocks', test_blocks, 'mainnet'),
create_provider('slots', test_slots, 'minimal'),
create_provider('slots', test_slots, 'mainnet'),
])

View File

@ -1,4 +1,3 @@
eth-utils==1.6.0
../../test_libs/gen_helpers
../../test_libs/config_helpers
../../test_libs/pyspec

View File

@ -1,54 +1,49 @@
from eth2spec.phase0 import spec as spec
from eth_utils import (
to_dict, to_tuple
)
from gen_base import gen_runner, gen_suite, gen_typing
from eth_utils import to_tuple
from gen_base import gen_runner, gen_typing
from preset_loader import loader
from typing import Iterable
def shuffling_case_fn(seed, count):
yield 'mapping', 'data', {
'seed': '0x' + seed.hex(),
'count': count,
'mapping': [int(spec.compute_shuffled_index(i, count, seed)) for i in range(count)]
}
@to_dict
def shuffling_case(seed, count):
yield 'seed', '0x' + seed.hex()
yield 'count', count
yield 'shuffled', [int(spec.compute_shuffled_index(i, count, seed)) for i in range(count)]
return f'shuffle_0x{seed.hex()}_{count}', lambda: shuffling_case_fn(seed, count)
@to_tuple
def shuffling_test_cases():
for seed in [spec.hash(spec.int_to_bytes(seed_init_value, length=4)) for seed_init_value in range(30)]:
for count in [0, 1, 2, 3, 5, 10, 33, 100, 1000]:
for seed in [spec.hash(seed_init_value.to_bytes(length=4, byteorder='little')) for seed_init_value in range(30)]:
for count in [0, 1, 2, 3, 5, 10, 33, 100, 1000, 9999]:
yield shuffling_case(seed, count)
def mini_shuffling_suite(configs_path: str) -> gen_typing.TestSuiteOutput:
presets = loader.load_presets(configs_path, 'minimal')
spec.apply_constants_preset(presets)
def create_provider(config_name: str) -> gen_typing.TestProvider:
return ("shuffling_minimal", "core", gen_suite.render_suite(
title="Swap-or-Not Shuffling tests with minimal config",
summary="Swap or not shuffling, with minimally configured testing round-count",
forks_timeline="testing",
forks=["phase0"],
config="minimal",
runner="shuffling",
handler="core",
test_cases=shuffling_test_cases()))
def prepare_fn(configs_path: str) -> str:
presets = loader.load_presets(configs_path, config_name)
spec.apply_constants_preset(presets)
return config_name
def cases_fn() -> Iterable[gen_typing.TestCase]:
for (case_name, case_fn) in shuffling_test_cases():
yield gen_typing.TestCase(
fork_name='phase0',
runner_name='shuffling',
handler_name='core',
suite_name='shuffle',
case_name=case_name,
case_fn=case_fn
)
def full_shuffling_suite(configs_path: str) -> gen_typing.TestSuiteOutput:
presets = loader.load_presets(configs_path, 'mainnet')
spec.apply_constants_preset(presets)
return ("shuffling_full", "core", gen_suite.render_suite(
title="Swap-or-Not Shuffling tests with mainnet config",
summary="Swap or not shuffling, with normal configured (secure) mainnet round-count",
forks_timeline="mainnet",
forks=["phase0"],
config="mainnet",
runner="shuffling",
handler="core",
test_cases=shuffling_test_cases()))
return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn)
if __name__ == "__main__":
gen_runner.run_generator("shuffling", [mini_shuffling_suite, full_shuffling_suite])
gen_runner.run_generator("shuffling", [create_provider("minimal"), create_provider("mainnet")])

View File

@ -1,47 +1,44 @@
from uint_test_cases import (
generate_random_uint_test_cases,
generate_uint_wrong_length_test_cases,
generate_uint_bounds_test_cases,
generate_uint_out_of_bounds_test_cases
)
from gen_base import gen_runner, gen_suite, gen_typing
def ssz_random_uint_suite(configs_path: str) -> gen_typing.TestSuiteOutput:
return ("uint_random", "uint", gen_suite.render_suite(
title="UInt Random",
summary="Random integers chosen uniformly over the allowed value range",
forks_timeline= "mainnet",
forks=["phase0"],
config="mainnet",
runner="ssz",
handler="uint",
test_cases=generate_random_uint_test_cases()))
from typing import Iterable
from gen_base import gen_runner, gen_typing
import ssz_basic_vector
import ssz_bitlist
import ssz_bitvector
import ssz_boolean
import ssz_uints
import ssz_container
def ssz_wrong_uint_suite(configs_path: str) -> gen_typing.TestSuiteOutput:
return ("uint_wrong_length", "uint", gen_suite.render_suite(
title="UInt Wrong Length",
summary="Serialized integers that are too short or too long",
forks_timeline= "mainnet",
forks=["phase0"],
config="mainnet",
runner="ssz",
handler="uint",
test_cases=generate_uint_wrong_length_test_cases()))
def create_provider(handler_name: str, suite_name: str, case_maker) -> gen_typing.TestProvider:
def prepare_fn(configs_path: str) -> str:
return "general"
def ssz_uint_bounds_suite(configs_path: str) -> gen_typing.TestSuiteOutput:
return ("uint_bounds", "uint", gen_suite.render_suite(
title="UInt Bounds",
summary="Integers right at or beyond the bounds of the allowed value range",
forks_timeline= "mainnet",
forks=["phase0"],
config="mainnet",
runner="ssz",
handler="uint",
test_cases=generate_uint_bounds_test_cases() + generate_uint_out_of_bounds_test_cases()))
def cases_fn() -> Iterable[gen_typing.TestCase]:
for (case_name, case_fn) in case_maker():
yield gen_typing.TestCase(
fork_name='phase0',
runner_name='ssz_generic',
handler_name=handler_name,
suite_name=suite_name,
case_name=case_name,
case_fn=case_fn
)
return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn)
if __name__ == "__main__":
gen_runner.run_generator("ssz_generic", [ssz_random_uint_suite, ssz_wrong_uint_suite, ssz_uint_bounds_suite])
gen_runner.run_generator("ssz_generic", [
create_provider("basic_vector", "valid", ssz_basic_vector.valid_cases),
create_provider("basic_vector", "invalid", ssz_basic_vector.invalid_cases),
create_provider("bitlist", "valid", ssz_bitlist.valid_cases),
create_provider("bitlist", "invalid", ssz_bitlist.invalid_cases),
create_provider("bitvector", "valid", ssz_bitvector.valid_cases),
create_provider("bitvector", "invalid", ssz_bitvector.invalid_cases),
create_provider("boolean", "valid", ssz_boolean.valid_cases),
create_provider("boolean", "invalid", ssz_boolean.invalid_cases),
create_provider("uints", "valid", ssz_uints.valid_cases),
create_provider("uints", "invalid", ssz_uints.invalid_cases),
create_provider("containers", "valid", ssz_container.valid_cases),
create_provider("containers", "invalid", ssz_container.invalid_cases),
])

View File

@ -1,93 +0,0 @@
from collections.abc import (
Mapping,
Sequence,
)
from eth_utils import (
encode_hex,
to_dict,
)
from ssz.sedes import (
BaseSedes,
Boolean,
Bytes,
BytesN,
Container,
List,
UInt,
)
def render_value(value):
if isinstance(value, bool):
return value
elif isinstance(value, int):
return str(value)
elif isinstance(value, bytes):
return encode_hex(value)
elif isinstance(value, Sequence):
return tuple(render_value(element) for element in value)
elif isinstance(value, Mapping):
return render_dict_value(value)
else:
raise ValueError(f"Cannot render value {value}")
@to_dict
def render_dict_value(value):
for key, value in value.items():
yield key, render_value(value)
def render_type_definition(sedes):
if isinstance(sedes, Boolean):
return "bool"
elif isinstance(sedes, UInt):
return f"uint{sedes.length * 8}"
elif isinstance(sedes, BytesN):
return f"bytes{sedes.length}"
elif isinstance(sedes, Bytes):
return f"bytes"
elif isinstance(sedes, List):
return [render_type_definition(sedes.element_sedes)]
elif isinstance(sedes, Container):
return {
field_name: render_type_definition(field_sedes)
for field_name, field_sedes in sedes.fields
}
elif isinstance(sedes, BaseSedes):
raise Exception("Unreachable: All sedes types have been checked")
else:
raise TypeError("Expected BaseSedes")
@to_dict
def render_test_case(*, sedes, valid, value=None, serial=None, description=None, tags=None):
value_and_serial_given = value is not None and serial is not None
if valid:
if not value_and_serial_given:
raise ValueError("For valid test cases, both value and ssz must be present")
else:
if value_and_serial_given:
raise ValueError("For invalid test cases, one of either value or ssz must not be present")
if tags is None:
tags = []
yield "type", render_type_definition(sedes)
yield "valid", valid
if value is not None:
yield "value", render_value(value)
if serial is not None:
yield "ssz", encode_hex(serial)
if description is not None:
yield description
yield "tags", tags

View File

@ -1,4 +1,4 @@
eth-utils==1.6.0
../../test_libs/gen_helpers
../../test_libs/config_helpers
ssz==0.1.0a2
../../test_libs/pyspec

View File

@ -0,0 +1,60 @@
from ssz_test_case import invalid_test_case, valid_test_case
from eth2spec.utils.ssz.ssz_typing import boolean, uint8, uint16, uint32, uint64, uint128, uint256, Vector, BasicType
from eth2spec.utils.ssz.ssz_impl import serialize
from random import Random
from typing import Dict
from eth2spec.debug.random_value import RandomizationMode, get_random_ssz_object
def basic_vector_case_fn(rng: Random, mode: RandomizationMode, elem_type: BasicType, length: int):
return get_random_ssz_object(rng, Vector[elem_type, length],
max_bytes_length=length * 8,
max_list_length=length,
mode=mode, chaos=False)
BASIC_TYPES: Dict[str, BasicType] = {
'bool': boolean,
'uint8': uint8,
'uint16': uint16,
'uint32': uint32,
'uint64': uint64,
'uint128': uint128,
'uint256': uint256
}
def valid_cases():
rng = Random(1234)
for (name, typ) in BASIC_TYPES.items():
random_modes = [RandomizationMode.mode_zero, RandomizationMode.mode_max]
if name != 'bool':
random_modes.append(RandomizationMode.mode_random)
for length in [1, 2, 3, 4, 5, 8, 16, 31, 512, 513]:
for mode in random_modes:
yield f'vec_{name}_{length}_{mode.to_name()}', \
valid_test_case(lambda: basic_vector_case_fn(rng, mode, typ, length))
def invalid_cases():
# zero length vectors are illegal
for (name, typ) in BASIC_TYPES.items():
yield f'vec_{name}_0', invalid_test_case(lambda: b'')
rng = Random(1234)
for (name, typ) in BASIC_TYPES.items():
random_modes = [RandomizationMode.mode_zero, RandomizationMode.mode_max]
if name != 'bool':
random_modes.append(RandomizationMode.mode_random)
for length in [1, 2, 3, 4, 5, 8, 16, 31, 512, 513]:
yield f'vec_{name}_{length}_nil', invalid_test_case(lambda: b'')
for mode in random_modes:
yield f'vec_{name}_{length}_{mode.to_name()}_one_less', \
invalid_test_case(lambda: serialize(basic_vector_case_fn(rng, mode, typ, length - 1)))
yield f'vec_{name}_{length}_{mode.to_name()}_one_more', \
invalid_test_case(lambda: serialize(basic_vector_case_fn(rng, mode, typ, length + 1)))
yield f'vec_{name}_{length}_{mode.to_name()}_one_byte_less', \
invalid_test_case(lambda: serialize(basic_vector_case_fn(rng, mode, typ, length))[:-1])
yield f'vec_{name}_{length}_{mode.to_name()}_one_byte_more', \
invalid_test_case(lambda: serialize(basic_vector_case_fn(rng, mode, typ, length))
+ serialize(basic_vector_case_fn(rng, mode, uint8, 1)))

View File

@ -0,0 +1,37 @@
from ssz_test_case import invalid_test_case, valid_test_case
from eth2spec.utils.ssz.ssz_typing import Bitlist
from eth2spec.utils.ssz.ssz_impl import serialize
from random import Random
from eth2spec.debug.random_value import RandomizationMode, get_random_ssz_object
def bitlist_case_fn(rng: Random, mode: RandomizationMode, limit: int):
return get_random_ssz_object(rng, Bitlist[limit],
max_bytes_length=(limit // 8) + 1,
max_list_length=limit,
mode=mode, chaos=False)
def valid_cases():
rng = Random(1234)
for size in [1, 2, 3, 4, 5, 8, 16, 31, 512, 513]:
for variation in range(5):
for mode in [RandomizationMode.mode_nil_count,
RandomizationMode.mode_max_count,
RandomizationMode.mode_random,
RandomizationMode.mode_zero,
RandomizationMode.mode_max]:
yield f'bitlist_{size}_{mode.to_name()}_{variation}', \
valid_test_case(lambda: bitlist_case_fn(rng, mode, size))
def invalid_cases():
yield 'bitlist_no_delimiter_empty', invalid_test_case(lambda: b'')
yield 'bitlist_no_delimiter_zero_byte', invalid_test_case(lambda: b'\x00')
yield 'bitlist_no_delimiter_zeroes', invalid_test_case(lambda: b'\x00\x00\x00')
rng = Random(1234)
for (typ_limit, test_limit) in [(1, 2), (1, 8), (1, 9), (2, 3), (3, 4), (4, 5),
(5, 6), (8, 9), (32, 64), (32, 33), (512, 513)]:
yield f'bitlist_{typ_limit}_but_{test_limit}', \
invalid_test_case(lambda: serialize(
bitlist_case_fn(rng, RandomizationMode.mode_max_count, test_limit)))

View File

@ -0,0 +1,30 @@
from ssz_test_case import invalid_test_case, valid_test_case
from eth2spec.utils.ssz.ssz_typing import Bitvector
from eth2spec.utils.ssz.ssz_impl import serialize
from random import Random
from eth2spec.debug.random_value import RandomizationMode, get_random_ssz_object
def bitvector_case_fn(rng: Random, mode: RandomizationMode, size: int):
return get_random_ssz_object(rng, Bitvector[size],
max_bytes_length=(size + 7) // 8,
max_list_length=size,
mode=mode, chaos=False)
def valid_cases():
rng = Random(1234)
for size in [1, 2, 3, 4, 5, 8, 16, 31, 512, 513]:
for mode in [RandomizationMode.mode_random, RandomizationMode.mode_zero, RandomizationMode.mode_max]:
yield f'bitvec_{size}_{mode.to_name()}', valid_test_case(lambda: bitvector_case_fn(rng, mode, size))
def invalid_cases():
# zero length bitvecors are illegal
yield 'bitvec_0', invalid_test_case(lambda: b'')
rng = Random(1234)
for (typ_size, test_size) in [(1, 2), (2, 3), (3, 4), (4, 5),
(5, 6), (8, 9), (9, 8), (16, 8), (32, 33), (512, 513)]:
for mode in [RandomizationMode.mode_random, RandomizationMode.mode_zero, RandomizationMode.mode_max]:
yield f'bitvec_{typ_size}_{mode.to_name()}_{test_size}', \
invalid_test_case(lambda: serialize(bitvector_case_fn(rng, mode, test_size)))

View File

@ -0,0 +1,15 @@
from ssz_test_case import valid_test_case, invalid_test_case
from eth2spec.utils.ssz.ssz_typing import boolean
def valid_cases():
yield "true", valid_test_case(lambda: boolean(True))
yield "false", valid_test_case(lambda: boolean(False))
def invalid_cases():
yield "byte_2", invalid_test_case(lambda: b'\x02')
yield "byte_rev_nibble", invalid_test_case(lambda: b'\x10')
yield "byte_0x80", invalid_test_case(lambda: b'\x80')
yield "byte_full", invalid_test_case(lambda: b'\xff')

View File

@ -0,0 +1,120 @@
from ssz_test_case import invalid_test_case, valid_test_case
from eth2spec.utils.ssz.ssz_typing import SSZType, Container, byte, uint8, uint16, \
uint32, uint64, List, Bytes, Vector, Bitvector, Bitlist
from eth2spec.utils.ssz.ssz_impl import serialize
from random import Random
from typing import Dict, Tuple, Sequence, Callable
from eth2spec.debug.random_value import RandomizationMode, get_random_ssz_object
class SingleFieldTestStruct(Container):
A: byte
class SmallTestStruct(Container):
A: uint16
B: uint16
class FixedTestStruct(Container):
A: uint8
B: uint64
C: uint32
class VarTestStruct(Container):
A: uint16
B: List[uint16, 1024]
C: uint8
class ComplexTestStruct(Container):
A: uint16
B: List[uint16, 128]
C: uint8
D: Bytes[256]
E: VarTestStruct
F: Vector[FixedTestStruct, 4]
G: Vector[VarTestStruct, 2]
class BitsStruct(Container):
A: Bitlist[5]
B: Bitvector[2]
C: Bitvector[1]
D: Bitlist[6]
E: Bitvector[8]
def container_case_fn(rng: Random, mode: RandomizationMode, typ: SSZType):
return get_random_ssz_object(rng, typ,
max_bytes_length=2000,
max_list_length=2000,
mode=mode, chaos=False)
PRESET_CONTAINERS: Dict[str, Tuple[SSZType, Sequence[int]]] = {
'SingleFieldTestStruct': (SingleFieldTestStruct, []),
'SmallTestStruct': (SmallTestStruct, []),
'FixedTestStruct': (FixedTestStruct, []),
'VarTestStruct': (VarTestStruct, [2]),
'ComplexTestStruct': (ComplexTestStruct, [2, 2 + 4 + 1, 2 + 4 + 1 + 4]),
'BitsStruct': (BitsStruct, [0, 4 + 1 + 1, 4 + 1 + 1 + 4]),
}
def valid_cases():
rng = Random(1234)
for (name, (typ, offsets)) in PRESET_CONTAINERS.items():
for mode in [RandomizationMode.mode_zero, RandomizationMode.mode_max]:
yield f'{name}_{mode.to_name()}', valid_test_case(lambda: container_case_fn(rng, mode, typ))
random_modes = [RandomizationMode.mode_random, RandomizationMode.mode_zero, RandomizationMode.mode_max]
if len(offsets) != 0:
random_modes.extend([RandomizationMode.mode_nil_count,
RandomizationMode.mode_one_count,
RandomizationMode.mode_max_count])
for mode in random_modes:
for variation in range(10):
yield f'{name}_{mode.to_name()}_{variation}', \
valid_test_case(lambda: container_case_fn(rng, mode, typ))
for variation in range(3):
yield f'{name}_{mode.to_name()}_chaos_{variation}', \
valid_test_case(lambda: container_case_fn(rng, mode, typ))
def mod_offset(b: bytes, offset_index: int, change: Callable[[int], int]):
return b[:offset_index] + \
(change(int.from_bytes(b[offset_index:offset_index + 4], byteorder='little')) & 0xffffffff) \
.to_bytes(length=4, byteorder='little') + \
b[offset_index + 4:]
def invalid_cases():
rng = Random(1234)
for (name, (typ, offsets)) in PRESET_CONTAINERS.items():
# using mode_max_count, so that the extra byte cannot be picked up as normal list content
yield f'{name}_extra_byte', \
invalid_test_case(lambda: serialize(
container_case_fn(rng, RandomizationMode.mode_max_count, typ)) + b'\xff')
if len(offsets) != 0:
# Note: there are many more ways to have invalid offsets,
# these are just example to get clients started looking into hardening ssz.
for mode in [RandomizationMode.mode_random,
RandomizationMode.mode_nil_count,
RandomizationMode.mode_one_count,
RandomizationMode.mode_max_count]:
if len(offsets) != 0:
for offset_index in offsets:
yield f'{name}_offset_{offset_index}_plus_one', \
invalid_test_case(lambda: mod_offset(
b=serialize(container_case_fn(rng, mode, typ)),
offset_index=offset_index,
change=lambda x: x + 1
))
yield f'{name}_offset_{offset_index}_zeroed', \
invalid_test_case(lambda: mod_offset(
b=serialize(container_case_fn(rng, mode, typ)),
offset_index=offset_index,
change=lambda x: 0
))

View File

@ -0,0 +1,21 @@
from eth2spec.utils.ssz.ssz_impl import serialize, hash_tree_root, signing_root
from eth2spec.debug.encode import encode
from eth2spec.utils.ssz.ssz_typing import SSZValue, Container
from typing import Callable
def valid_test_case(value_fn: Callable[[], SSZValue]):
def case_fn():
value = value_fn()
yield "value", "data", encode(value)
yield "serialized", "ssz", serialize(value)
yield "root", "meta", '0x' + hash_tree_root(value).hex()
if isinstance(value, Container):
yield "signing_root", "meta", '0x' + signing_root(value).hex()
return case_fn
def invalid_test_case(bytez_fn: Callable[[], bytes]):
def case_fn():
yield "serialized", "ssz", bytez_fn()
return case_fn

View File

@ -0,0 +1,37 @@
from ssz_test_case import invalid_test_case, valid_test_case
from eth2spec.utils.ssz.ssz_typing import BasicType, uint8, uint16, uint32, uint64, uint128, uint256
from random import Random
from eth2spec.debug.random_value import RandomizationMode, get_random_ssz_object
def uint_case_fn(rng: Random, mode: RandomizationMode, typ: BasicType):
return get_random_ssz_object(rng, typ,
max_bytes_length=typ.byte_len,
max_list_length=1,
mode=mode, chaos=False)
UINT_TYPES = [uint8, uint16, uint32, uint64, uint128, uint256]
def valid_cases():
rng = Random(1234)
for uint_type in UINT_TYPES:
yield f'uint_{uint_type.byte_len * 8}_last_byte_empty', \
valid_test_case(lambda: uint_type((2 ** ((uint_type.byte_len - 1) * 8)) - 1))
for variation in range(5):
for mode in [RandomizationMode.mode_random, RandomizationMode.mode_zero, RandomizationMode.mode_max]:
yield f'uint_{uint_type.byte_len * 8}_{mode.to_name()}_{variation}', \
valid_test_case(lambda: uint_case_fn(rng, mode, uint_type))
def invalid_cases():
for uint_type in UINT_TYPES:
yield f'uint_{uint_type.byte_len * 8}_one_too_high', \
invalid_test_case(lambda: (2 ** (uint_type.byte_len * 8)).to_bytes(uint_type.byte_len + 1, 'little'))
for uint_type in [uint8, uint16, uint32, uint64, uint128, uint256]:
yield f'uint_{uint_type.byte_len * 8}_one_byte_longer', \
invalid_test_case(lambda: (2 ** (uint_type.byte_len * 8) - 1).to_bytes(uint_type.byte_len + 1, 'little'))
for uint_type in [uint8, uint16, uint32, uint64, uint128, uint256]:
yield f'uint_{uint_type.byte_len * 8}_one_byte_shorter', \
invalid_test_case(lambda: (2 ** ((uint_type.byte_len - 1) * 8) - 1).to_bytes(uint_type.byte_len - 1, 'little'))

View File

@ -1,5 +1,5 @@
from random import Random
from typing import Iterable
from inspect import getmembers, isclass
from eth2spec.debug import random_value, encode
@ -10,29 +10,23 @@ from eth2spec.utils.ssz.ssz_impl import (
signing_root,
serialize,
)
from eth_utils import (
to_tuple, to_dict
)
from gen_base import gen_runner, gen_suite, gen_typing
from gen_base import gen_runner, gen_typing
from preset_loader import loader
MAX_BYTES_LENGTH = 100
MAX_LIST_LENGTH = 10
@to_dict
def create_test_case_contents(value):
yield "value", encode.encode(value)
yield "serialized", '0x' + serialize(value).hex()
yield "root", '0x' + hash_tree_root(value).hex()
if hasattr(value, "signature"):
yield "signing_root", '0x' + signing_root(value).hex()
@to_dict
def create_test_case(rng: Random, name: str, typ, mode: random_value.RandomizationMode, chaos: bool):
def create_test_case(rng: Random, typ, mode: random_value.RandomizationMode, chaos: bool) -> Iterable[gen_typing.TestCasePart]:
value = random_value.get_random_ssz_object(rng, typ, MAX_BYTES_LENGTH, MAX_LIST_LENGTH, mode, chaos)
yield name, create_test_case_contents(value)
yield "value", "data", encode.encode(value)
yield "serialized", "ssz", serialize(value)
roots_data = {
"root": '0x' + hash_tree_root(value).hex()
}
if isinstance(value, Container) and hasattr(value, "signature"):
roots_data["signing_root"] = '0x' + signing_root(value).hex()
yield "roots", "data", roots_data
def get_spec_ssz_types():
@ -42,40 +36,38 @@ def get_spec_ssz_types():
]
@to_tuple
def ssz_static_cases(rng: Random, mode: random_value.RandomizationMode, chaos: bool, count: int):
for (name, ssz_type) in get_spec_ssz_types():
for i in range(count):
yield create_test_case(rng, name, ssz_type, mode, chaos)
def ssz_static_cases(seed: int, name, ssz_type, mode: random_value.RandomizationMode, chaos: bool, count: int):
random_mode_name = mode.to_name()
# Reproducible RNG
rng = Random(seed)
for i in range(count):
yield gen_typing.TestCase(
fork_name='phase0',
runner_name='ssz_static',
handler_name=name,
suite_name=f"ssz_{random_mode_name}{'_chaos' if chaos else ''}",
case_name=f"case_{i}",
case_fn=lambda: create_test_case(rng, ssz_type, mode, chaos)
)
def get_ssz_suite(seed: int, config_name: str, mode: random_value.RandomizationMode, chaos: bool, cases_if_random: int):
def ssz_suite(configs_path: str) -> gen_typing.TestSuiteOutput:
def create_provider(config_name: str, seed: int, mode: random_value.RandomizationMode, chaos: bool,
cases_if_random: int) -> gen_typing.TestProvider:
def prepare_fn(configs_path: str) -> str:
# Apply changes to presets, this affects some of the vector types.
presets = loader.load_presets(configs_path, config_name)
spec.apply_constants_preset(presets)
return config_name
# Reproducible RNG
rng = Random(seed)
random_mode_name = mode.to_name()
suite_name = f"ssz_{config_name}_{random_mode_name}{'_chaos' if chaos else ''}"
def cases_fn() -> Iterable[gen_typing.TestCase]:
count = cases_if_random if chaos or mode.is_changing() else 1
print(f"generating SSZ-static suite ({count} cases per ssz type): {suite_name}")
return (suite_name, "core", gen_suite.render_suite(
title=f"ssz testing, with {config_name} config, randomized with mode {random_mode_name}{' and with chaos applied' if chaos else ''}",
summary="Test suite for ssz serialization and hash-tree-root",
forks_timeline="testing",
forks=["phase0"],
config=config_name,
runner="ssz",
handler="static",
test_cases=ssz_static_cases(rng, mode, chaos, count)))
for (i, (name, ssz_type)) in enumerate(get_spec_ssz_types()):
yield from ssz_static_cases(seed * 1000 + i, name, ssz_type, mode, chaos, count)
return ssz_suite
return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn)
if __name__ == "__main__":
@ -91,6 +83,6 @@ if __name__ == "__main__":
seed += 1
gen_runner.run_generator("ssz_static", [
get_ssz_suite(seed, config_name, mode, chaos, cases_if_random)
for (seed, config_name, mode, chaos, cases_if_random) in settings
create_provider(config_name, seed, mode, chaos, cases_if_random)
for (seed, config_name, mode, chaos, cases_if_random) in settings
])

View File

@ -1,4 +1,3 @@
eth-utils==1.6.0
../../test_libs/gen_helpers
../../test_libs/config_helpers
../../test_libs/pyspec

View File

@ -10,10 +10,10 @@ from os.path import join
def load_presets(configs_dir, presets_name) -> Dict[str, Any]:
"""
Loads the given preset
:param presets_name: The name of the generator. (lowercase snake_case)
:param presets_name: The name of the presets. (lowercase snake_case)
:return: Dictionary, mapping of constant-name -> constant-value
"""
path = Path(join(configs_dir, 'constant_presets', presets_name+'.yaml'))
path = Path(join(configs_dir, presets_name+'.yaml'))
yaml = YAML(typ='base')
loaded = yaml.load(path)
out = dict()

View File

@ -1,5 +1,54 @@
# ETH 2.0 test generator helpers
`gen_base`: A util to quickly write new test suite generators with.
See [Generators documentation](../../test_generators/README.md).
## `gen_base`
A util to quickly write new test suite generators with.
See [Generators documentation](../../test_generators/README.md) for integration details.
Options:
```
-o OUTPUT_DIR -- Output directory to write tests to. The directory must exist.
This directory will hold the top-level test directories (per-config directories).
[-f] -- Optional. Force-run the generator: if false, existing test case folder will be detected,
and the test generator will not run the function to generate the test case with.
If true, all cases will run regardless, and files will be overwritten.
Other existing files are not deleted.
-c CONFIGS_PATH -- The directory to load configs for pyspec from. A config is a simple key-value yaml file.
Use `../../configs/` when running from the root dir of a generator, and requiring the standard spec configs.
[-l [CONFIG_LIST [CONFIG_LIST ...]]] -- Optional. Define which configs to run.
Test providers loading other configs will be ignored. If none are specified, no config will be ignored.
```
## `gen_from_tests`
This is an util to derive tests from a tests source file.
This requires the tests to yield test-case-part outputs. These outputs are then written to the test case directory.
Yielding data is illegal in normal pytests, so it is only done when in "generator mode".
This functionality can be attached to any function by using the `vector_test()` decorator found in `ethspec/tests/utils.py`.
## Test-case parts
Test cases consist of parts, which are yielded to the base generator one by one.
The yielding pattern is:
2 value style: `yield <key name> <value>`. The kind of output will be inferred from the value by the `vector_test()` decorator.
3 value style: `yield <key name> <kind name> <value>`.
Test part output kinds:
- `ssz`: value is expected to be a `bytes`, and the raw data is written to a `<key name>.ssz` file.
- `data`: value is expected to be any python object that can be dumped as YAML. Output is written to `<key name>.yaml`
- `meta`: these key-value pairs are collected into a dict, and then collectively written to a metadata
file named `meta.yaml`, if anything is yielded with `meta` empty.
The `vector_test()` decorator can detect pyspec SSZ types, and output them both as `data` and `ssz`, for the test consumer to choose.
Note that the yielded outputs are processed before the test continues. It is safe to yield information that later mutates,
as the output will already be encoded to yaml or ssz bytes. This avoids the need to deep-copy the whole object.

View File

@ -1,13 +1,13 @@
import argparse
from pathlib import Path
import sys
from typing import List
from typing import Iterable, AnyStr, Any, Callable
from ruamel.yaml import (
YAML,
)
from gen_base.gen_typing import TestSuiteCreator
from gen_base.gen_typing import TestProvider
def validate_output_dir(path_str):
@ -31,26 +31,17 @@ def validate_configs_dir(path_str):
if not path.is_dir():
raise argparse.ArgumentTypeError("Config path must lead to a directory")
if not Path(path, "constant_presets").exists():
raise argparse.ArgumentTypeError("Constant Presets directory must exist")
if not Path(path, "constant_presets").is_dir():
raise argparse.ArgumentTypeError("Constant Presets path must lead to a directory")
if not Path(path, "fork_timelines").exists():
raise argparse.ArgumentTypeError("Fork Timelines directory must exist")
if not Path(path, "fork_timelines").is_dir():
raise argparse.ArgumentTypeError("Fork Timelines path must lead to a directory")
return path
def run_generator(generator_name, suite_creators: List[TestSuiteCreator]):
def run_generator(generator_name, test_providers: Iterable[TestProvider]):
"""
Implementation for a general test generator.
:param generator_name: The name of the generator. (lowercase snake_case)
:param suite_creators: A list of suite creators, each of these builds a list of test cases.
:param test_providers: A list of test provider,
each of these returns a callable that returns an iterable of test cases.
The call to get the iterable may set global configuration,
and the iterable should not be resumed after a pause with a change of that configuration.
:return:
"""
@ -71,7 +62,7 @@ def run_generator(generator_name, suite_creators: List[TestSuiteCreator]):
"--force",
action="store_true",
default=False,
help="if set overwrite test files if they exist",
help="if set re-generate and overwrite test files if they already exist",
)
parser.add_argument(
"-c",
@ -79,7 +70,16 @@ def run_generator(generator_name, suite_creators: List[TestSuiteCreator]):
dest="configs_path",
required=True,
type=validate_configs_dir,
help="specify the path of the configs directory (containing constants_presets and fork_timelines)",
help="specify the path of the configs directory",
)
parser.add_argument(
"-l",
"--config-list",
dest="config_list",
nargs='*',
type=str,
required=False,
help="specify configs to run with. Allows all if no config names are specified.",
)
args = parser.parse_args()
@ -92,24 +92,80 @@ def run_generator(generator_name, suite_creators: List[TestSuiteCreator]):
yaml = YAML(pure=True)
yaml.default_flow_style = None
print(f"Generating tests for {generator_name}, creating {len(suite_creators)} test suite files...")
print(f"Reading config presets and fork timelines from {args.configs_path}")
for suite_creator in suite_creators:
(output_name, handler, suite) = suite_creator(args.configs_path)
print(f"Generating tests into {output_dir}")
print(f"Reading configs from {args.configs_path}")
handler_output_dir = Path(output_dir) / Path(handler)
try:
if not handler_output_dir.exists():
handler_output_dir.mkdir()
except FileNotFoundError as e:
sys.exit(f'Error when creating handler dir {handler} for test "{suite["title"]}" ({e})')
configs = args.config_list
if configs is None:
configs = []
out_path = handler_output_dir / Path(output_name + '.yaml')
if len(configs) != 0:
print(f"Filtering test-generator runs to only include configs: {', '.join(configs)}")
try:
with out_path.open(file_mode) as f:
yaml.dump(suite, f)
except IOError as e:
sys.exit(f'Error when dumping test "{suite["title"]}" ({e})')
for tprov in test_providers:
# loads configuration etc.
config_name = tprov.prepare(args.configs_path)
if len(configs) != 0 and config_name not in configs:
print(f"skipping tests with config '{config_name}' since it is filtered out")
continue
print("done.")
print(f"generating tests with config '{config_name}' ...")
for test_case in tprov.make_cases():
case_dir = Path(output_dir) / Path(config_name) / Path(test_case.fork_name) \
/ Path(test_case.runner_name) / Path(test_case.handler_name) \
/ Path(test_case.suite_name) / Path(test_case.case_name)
if case_dir.exists():
if not args.force:
print(f'Skipping already existing test: {case_dir}')
continue
print(f'Warning, output directory {case_dir} already exist,'
f' old files are not deleted but will be overwritten when a new version is produced')
print(f'Generating test: {case_dir}')
try:
def output_part(out_kind: str, name: str, fn: Callable[[Path, ], None]):
# make sure the test case directory is created before any test part is written.
case_dir.mkdir(parents=True, exist_ok=True)
try:
fn(case_dir)
except IOError as e:
sys.exit(f'Error when dumping test "{case_dir}", part "{name}", kind "{out_kind}": {e}')
written_part = False
meta = dict()
for (name, out_kind, data) in test_case.case_fn():
written_part = True
if out_kind == "meta":
meta[name] = data
if out_kind == "data":
output_part("data", name, dump_yaml_fn(data, name, file_mode, yaml))
if out_kind == "ssz":
output_part("ssz", name, dump_ssz_fn(data, name, file_mode))
# Once all meta data is collected (if any), write it to a meta data file.
if len(meta) != 0:
written_part = True
output_part("data", "meta", dump_yaml_fn(meta, "meta", file_mode, yaml))
if not written_part:
print(f"test case {case_dir} did not produce any test case parts")
except Exception as e:
print(f"ERROR: failed to generate vector(s) for test {case_dir}: {e}")
print(f"completed {generator_name}")
def dump_yaml_fn(data: Any, name: str, file_mode: str, yaml_encoder: YAML):
def dump(case_path: Path):
out_path = case_path / Path(name + '.yaml')
with out_path.open(file_mode) as f:
yaml_encoder.dump(data, f)
return dump
def dump_ssz_fn(data: AnyStr, name: str, file_mode: str):
def dump(case_path: Path):
out_path = case_path / Path(name + '.ssz')
with out_path.open(file_mode + 'b') as f: # write in raw binary mode
f.write(data)
return dump

View File

@ -1,22 +0,0 @@
from typing import Iterable
from eth_utils import to_dict
from gen_base.gen_typing import TestCase
@to_dict
def render_suite(*,
title: str, summary: str,
forks_timeline: str, forks: Iterable[str],
config: str,
runner: str,
handler: str,
test_cases: Iterable[TestCase]):
yield "title", title
yield "summary", summary
yield "forks_timeline", forks_timeline,
yield "forks", forks
yield "config", config
yield "runner", runner
yield "handler", handler
yield "test_cases", test_cases

View File

@ -1,14 +1,35 @@
from typing import (
Any,
Callable,
Dict,
Iterable,
NewType,
Tuple,
)
from dataclasses import dataclass
# Elements: name, out_kind, data
#
# out_kind is the type of data:
# - "data" for generic
# - "ssz" for SSZ encoded bytes
# - "meta" for generic data to collect into a meta data dict.
TestCasePart = NewType("TestCasePart", Tuple[str, str, Any])
TestCase = Dict[str, Any]
TestSuite = Dict[str, Any]
# Tuple: (output name, handler name, suite) -- output name excl. ".yaml"
TestSuiteOutput = Tuple[str, str, TestSuite]
# Args: <presets path>
TestSuiteCreator = Callable[[str], TestSuiteOutput]
@dataclass
class TestCase(object):
fork_name: str
runner_name: str
handler_name: str
suite_name: str
case_name: str
case_fn: Callable[[], Iterable[TestCasePart]]
@dataclass
class TestProvider(object):
# Prepares the context with a configuration, loaded from the given config path.
# fn(config path) => chosen config name
prepare: Callable[[str], str]
# Retrieves an iterable of cases, called after prepare()
make_cases: Callable[[], Iterable[TestCase]]

View File

@ -1,26 +1,40 @@
from inspect import getmembers, isfunction
from typing import Any, Iterable
def generate_from_tests(src, phase, bls_active=True):
from gen_base.gen_typing import TestCase
def generate_from_tests(runner_name: str, handler_name: str, src: Any,
fork_name: str, bls_active: bool = True) -> Iterable[TestCase]:
"""
Generate a list of test cases by running tests from the given src in generator-mode.
:param runner_name: to categorize the test in general as.
:param handler_name: to categorize the test specialization as.
:param src: to retrieve tests from (discovered using inspect.getmembers).
:param phase: to run tests against particular phase.
:param fork_name: to run tests against particular phase and/or fork.
(if multiple forks are applicable, indicate the last fork)
:param bls_active: optional, to override BLS switch preference. Defaults to True.
:return: the list of test cases.
:return: an iterable of test cases.
"""
fn_names = [
name for (name, _) in getmembers(src, isfunction)
if name.startswith('test_')
]
out = []
print("generating test vectors from tests source: %s" % src.__name__)
for name in fn_names:
tfn = getattr(src, name)
try:
test_case = tfn(generator_mode=True, phase=phase, bls_active=bls_active)
# If no test case data is returned, the test is ignored.
if test_case is not None:
out.append(test_case)
except AssertionError:
print("ERROR: failed to generate vector from test: %s (src: %s)" % (name, src.__name__))
return out
# strip off the `test_`
case_name = name
if case_name.startswith('test_'):
case_name = case_name[5:]
yield TestCase(
fork_name=fork_name,
runner_name=runner_name,
handler_name=handler_name,
suite_name='pyspec_tests',
case_name=case_name,
# TODO: with_all_phases and other per-phase tooling, should be replaced with per-fork equivalent.
case_fn=lambda: tfn(generator_mode=True, phase=fork_name, bls_active=bls_active)
)

View File

@ -29,4 +29,4 @@ def encode(value, include_hash_tree_roots=False):
ret["hash_tree_root"] = '0x' + hash_tree_root(value).hex()
return ret
else:
raise Exception(f"Type not recognized: value={value}, typ={value.type()}")
raise Exception(f"Type not recognized: value={value}, typ={type(value)}")

View File

@ -56,15 +56,15 @@ def get_random_ssz_object(rng: Random,
if mode == RandomizationMode.mode_nil_count:
return typ(b'')
elif mode == RandomizationMode.mode_max_count:
return typ(get_random_bytes_list(rng, max_bytes_length))
return typ(get_random_bytes_list(rng, min(max_bytes_length, typ.length)))
elif mode == RandomizationMode.mode_one_count:
return typ(get_random_bytes_list(rng, 1))
return typ(get_random_bytes_list(rng, min(1, typ.length)))
elif mode == RandomizationMode.mode_zero:
return typ(b'\x00')
return typ(b'\x00' * min(1, typ.length))
elif mode == RandomizationMode.mode_max:
return typ(b'\xff')
return typ(b'\xff' * min(1, typ.length))
else:
return typ(get_random_bytes_list(rng, rng.randint(0, max_bytes_length)))
return typ(get_random_bytes_list(rng, rng.randint(0, min(max_bytes_length, typ.length))))
elif issubclass(typ, BytesN):
# Sanity, don't generate absurdly big random values
# If a client is aiming to performance-test, they should create a benchmark suite.

View File

@ -18,14 +18,11 @@ def translate_typ(typ) -> ssz.BaseSedes:
elif issubclass(typ, spec_ssz.Vector):
return ssz.Vector(translate_typ(typ.elem_type), typ.length)
elif issubclass(typ, spec_ssz.List):
# TODO: Make py-ssz List support the new fixed length list
return ssz.List(translate_typ(typ.elem_type))
return ssz.List(translate_typ(typ.elem_type), typ.length)
elif issubclass(typ, spec_ssz.Bitlist):
# TODO: Once Bitlist implemented in py-ssz, use appropriate type
return ssz.List(translate_typ(typ.elem_type))
return ssz.Bitlist(typ.length)
elif issubclass(typ, spec_ssz.Bitvector):
# TODO: Once Bitvector implemented in py-ssz, use appropriate type
return ssz.Vector(translate_typ(typ.elem_type), typ.length)
return ssz.Bitvector(typ.length)
elif issubclass(typ, spec_ssz.boolean):
return ssz.boolean
elif issubclass(typ, spec_ssz.uint):

View File

@ -9,9 +9,7 @@ def test_decoder():
rng = Random(123)
# check these types only, Block covers a lot of operation types already.
# TODO: Once has Bitlists and Bitvectors, add back
# spec.BeaconState and spec.BeaconBlock
for typ in [spec.IndexedAttestation, spec.AttestationDataAndCustodyBit]:
for typ in [spec.AttestationDataAndCustodyBit, spec.BeaconState, spec.BeaconBlock]:
# create a random pyspec value
original = random_value.get_random_ssz_object(rng, typ, 100, 10,
mode=random_value.RandomizationMode.mode_random,
@ -32,4 +30,6 @@ def test_decoder():
block = translate_value(raw_value, typ)
# and see if the hash-tree-root of the original matches the hash-tree-root of the decoded & translated value.
assert spec_ssz_impl.hash_tree_root(original) == spec_ssz_impl.hash_tree_root(block)
original_hash_tree_root = spec_ssz_impl.hash_tree_root(original)
assert original_hash_tree_root == spec_ssz_impl.hash_tree_root(block)
assert original_hash_tree_root == block_sedes.get_hash_tree_root(raw_value)

View File

@ -4,7 +4,7 @@ from eth2spec.utils import bls
from .helpers.genesis import create_genesis_state
from .utils import spectest, with_tags
from .utils import vector_test, with_meta_tags
def with_state(fn):
@ -12,7 +12,7 @@ def with_state(fn):
try:
kw['state'] = create_genesis_state(spec=kw['spec'], num_validators=spec_phase0.SLOTS_PER_EPOCH * 8)
except KeyError:
raise TypeError('Spec decorator must come before state decorator to inject spec into state.')
raise TypeError('Spec decorator must come within state decorator to inject spec into state.')
return fn(*args, **kw)
return entry
@ -27,13 +27,18 @@ def with_state(fn):
DEFAULT_BLS_ACTIVE = False
def spectest_with_bls_switch(fn):
return bls_switch(spectest()(fn))
def spec_test(fn):
# Bls switch must be wrapped by vector_test,
# to fully go through the yielded bls switch data, before setting back the BLS setting.
# A test may apply BLS overrides such as @always_bls,
# but if it yields data (n.b. @always_bls yields the bls setting), it should be wrapped by this decorator.
# This is why @alway_bls has its own bls switch, since the override is beyond the reach of the outer switch.
return vector_test()(bls_switch(fn))
# shorthand for decorating @with_state @spectest()
# shorthand for decorating @spectest() @with_state
def spec_state_test(fn):
return with_state(spectest_with_bls_switch(fn))
return spec_test(with_state(fn))
def expect_assertion_error(fn):
@ -50,47 +55,44 @@ def expect_assertion_error(fn):
raise AssertionError('expected an assertion error, but got none.')
# Tags a test to be ignoring BLS for it to pass.
bls_ignored = with_tags({'bls_setting': 2})
def never_bls(fn):
"""
Decorator to apply on ``bls_switch`` decorator to force BLS de-activation. Useful to mark tests as BLS-ignorant.
This decorator may only be applied to yielding spec test functions, and should be wrapped by vector_test,
as the yielding needs to complete before setting back the BLS setting.
"""
def entry(*args, **kw):
# override bls setting
kw['bls_active'] = False
return fn(*args, **kw)
return bls_ignored(entry)
# Tags a test to be requiring BLS for it to pass.
bls_required = with_tags({'bls_setting': 1})
return bls_switch(fn)(*args, **kw)
return with_meta_tags({'bls_setting': 2})(entry)
def always_bls(fn):
"""
Decorator to apply on ``bls_switch`` decorator to force BLS activation. Useful to mark tests as BLS-dependent.
This decorator may only be applied to yielding spec test functions, and should be wrapped by vector_test,
as the yielding needs to complete before setting back the BLS setting.
"""
def entry(*args, **kw):
# override bls setting
kw['bls_active'] = True
return fn(*args, **kw)
return bls_required(entry)
return bls_switch(fn)(*args, **kw)
return with_meta_tags({'bls_setting': 1})(entry)
def bls_switch(fn):
"""
Decorator to make a function execute with BLS ON, or BLS off.
Based on an optional bool argument ``bls_active``, passed to the function at runtime.
This decorator may only be applied to yielding spec test functions, and should be wrapped by vector_test,
as the yielding needs to complete before setting back the BLS setting.
"""
def entry(*args, **kw):
old_state = bls.bls_active
bls.bls_active = kw.pop('bls_active', DEFAULT_BLS_ACTIVE)
out = fn(*args, **kw)
yield from fn(*args, **kw)
bls.bls_active = old_state
return out
return entry

View File

@ -1,11 +1,11 @@
from eth2spec.test.context import spectest_with_bls_switch, with_phases
from eth2spec.test.context import spec_test, with_phases
from eth2spec.test.helpers.deposits import (
prepare_genesis_deposits,
)
@with_phases(['phase0'])
@spectest_with_bls_switch
@spec_test
def test_initialize_beacon_state_from_eth1(spec):
deposit_count = spec.MIN_GENESIS_ACTIVE_VALIDATOR_COUNT
deposits, deposit_root = prepare_genesis_deposits(spec, deposit_count, spec.MAX_EFFECTIVE_BALANCE, signed=True)

View File

@ -1,4 +1,4 @@
from eth2spec.test.context import spectest_with_bls_switch, with_phases
from eth2spec.test.context import spec_test, with_phases
from eth2spec.test.helpers.deposits import (
prepare_genesis_deposits,
)
@ -26,7 +26,7 @@ def run_is_valid_genesis_state(spec, state, valid=True):
@with_phases(['phase0'])
@spectest_with_bls_switch
@spec_test
def test_is_valid_genesis_state_true(spec):
state = create_valid_beacon_state(spec)
@ -34,7 +34,7 @@ def test_is_valid_genesis_state_true(spec):
@with_phases(['phase0'])
@spectest_with_bls_switch
@spec_test
def test_is_valid_genesis_state_false_invalid_timestamp(spec):
state = create_valid_beacon_state(spec)
state.genesis_time = spec.MIN_GENESIS_TIME - 1
@ -43,7 +43,7 @@ def test_is_valid_genesis_state_false_invalid_timestamp(spec):
@with_phases(['phase0'])
@spectest_with_bls_switch
@spec_test
def test_is_valid_genesis_state_true_more_balance(spec):
state = create_valid_beacon_state(spec)
state.validators[0].effective_balance = spec.MAX_EFFECTIVE_BALANCE + 1
@ -53,7 +53,7 @@ def test_is_valid_genesis_state_true_more_balance(spec):
# TODO: not part of the genesis function yet. Erroneously merged.
# @with_phases(['phase0'])
# @spectest_with_bls_switch
# @spec_test
# def test_is_valid_genesis_state_false_not_enough_balance(spec):
# state = create_valid_beacon_state(spec)
# state.validators[0].effective_balance = spec.MAX_EFFECTIVE_BALANCE - 1
@ -62,7 +62,7 @@ def test_is_valid_genesis_state_true_more_balance(spec):
@with_phases(['phase0'])
@spectest_with_bls_switch
@spec_test
def test_is_valid_genesis_state_true_one_more_validator(spec):
deposit_count = spec.MIN_GENESIS_ACTIVE_VALIDATOR_COUNT + 1
deposits, _ = prepare_genesis_deposits(spec, deposit_count, spec.MAX_EFFECTIVE_BALANCE, signed=True)
@ -75,7 +75,7 @@ def test_is_valid_genesis_state_true_one_more_validator(spec):
@with_phases(['phase0'])
@spectest_with_bls_switch
@spec_test
def test_is_valid_genesis_state_false_not_enough_validator(spec):
deposit_count = spec.MIN_GENESIS_ACTIVE_VALIDATOR_COUNT - 1
deposits, _ = prepare_genesis_deposits(spec, deposit_count, spec.MAX_EFFECTIVE_BALANCE, signed=True)

View File

@ -116,8 +116,8 @@ def test_wrong_end_epoch_with_max_epochs_per_crosslink(spec, state):
@with_all_phases
@always_bls
@spec_state_test
@always_bls
def test_invalid_attestation_signature(spec, state):
attestation = get_valid_attestation(spec, state)
state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY
@ -398,3 +398,61 @@ def test_empty_aggregation_bits(spec, state):
sign_attestation(spec, state, attestation)
yield from run_attestation_processing(spec, state, attestation)
@with_all_phases
@spec_state_test
def test_too_many_aggregation_bits(spec, state):
attestation = get_valid_attestation(spec, state, signed=True)
state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY
# one too many bits
attestation.aggregation_bits.append(0b0)
yield from run_attestation_processing(spec, state, attestation, False)
@with_all_phases
@spec_state_test
def test_too_few_aggregation_bits(spec, state):
attestation = get_valid_attestation(spec, state)
state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY
attestation.aggregation_bits = Bitlist[spec.MAX_VALIDATORS_PER_COMMITTEE](
*([0b1] + [0b0] * (len(attestation.aggregation_bits) - 1)))
sign_attestation(spec, state, attestation)
# one too few bits
attestation.aggregation_bits = attestation.aggregation_bits[:-1]
yield from run_attestation_processing(spec, state, attestation, False)
@with_all_phases
@spec_state_test
def test_too_many_custody_bits(spec, state):
attestation = get_valid_attestation(spec, state, signed=True)
state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY
# one too many bits
attestation.custody_bits.append(0b0)
yield from run_attestation_processing(spec, state, attestation, False)
@with_all_phases
@spec_state_test
def test_too_few_custody_bits(spec, state):
attestation = get_valid_attestation(spec, state)
state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY
attestation.custody_bits = Bitlist[spec.MAX_VALIDATORS_PER_COMMITTEE](
*([0b1] + [0b0] * (len(attestation.custody_bits) - 1)))
sign_attestation(spec, state, attestation)
# one too few bits
attestation.custody_bits = attestation.custody_bits[:-1]
yield from run_attestation_processing(spec, state, attestation, False)

View File

@ -108,8 +108,8 @@ def test_success_surround(spec, state):
@with_all_phases
@always_bls
@spec_state_test
@always_bls
def test_success_already_exited_recent(spec, state):
attester_slashing = get_valid_attester_slashing(spec, state, signed_1=True, signed_2=True)
slashed_indices = (
@ -123,8 +123,8 @@ def test_success_already_exited_recent(spec, state):
@with_all_phases
@always_bls
@spec_state_test
@always_bls
def test_success_already_exited_long_ago(spec, state):
attester_slashing = get_valid_attester_slashing(spec, state, signed_1=True, signed_2=True)
slashed_indices = (
@ -139,24 +139,24 @@ def test_success_already_exited_long_ago(spec, state):
@with_all_phases
@always_bls
@spec_state_test
@always_bls
def test_invalid_sig_1(spec, state):
attester_slashing = get_valid_attester_slashing(spec, state, signed_1=False, signed_2=True)
yield from run_attester_slashing_processing(spec, state, attester_slashing, False)
@with_all_phases
@always_bls
@spec_state_test
@always_bls
def test_invalid_sig_2(spec, state):
attester_slashing = get_valid_attester_slashing(spec, state, signed_1=True, signed_2=False)
yield from run_attester_slashing_processing(spec, state, attester_slashing, False)
@with_all_phases
@always_bls
@spec_state_test
@always_bls
def test_invalid_sig_1_and_2(spec, state):
attester_slashing = get_valid_attester_slashing(spec, state, signed_1=False, signed_2=False)
yield from run_attester_slashing_processing(spec, state, attester_slashing, False)
@ -212,9 +212,9 @@ def test_custody_bit_0_and_1_intersect(spec, state):
yield from run_attester_slashing_processing(spec, state, attester_slashing, False)
@always_bls
@with_all_phases
@spec_state_test
@always_bls
def test_att1_bad_extra_index(spec, state):
attester_slashing = get_valid_attester_slashing(spec, state, signed_1=True, signed_2=True)
@ -228,9 +228,9 @@ def test_att1_bad_extra_index(spec, state):
yield from run_attester_slashing_processing(spec, state, attester_slashing, False)
@always_bls
@with_all_phases
@spec_state_test
@always_bls
def test_att1_bad_replaced_index(spec, state):
attester_slashing = get_valid_attester_slashing(spec, state, signed_1=True, signed_2=True)
@ -244,9 +244,9 @@ def test_att1_bad_replaced_index(spec, state):
yield from run_attester_slashing_processing(spec, state, attester_slashing, False)
@always_bls
@with_all_phases
@spec_state_test
@always_bls
def test_att2_bad_extra_index(spec, state):
attester_slashing = get_valid_attester_slashing(spec, state, signed_1=True, signed_2=True)
@ -260,9 +260,9 @@ def test_att2_bad_extra_index(spec, state):
yield from run_attester_slashing_processing(spec, state, attester_slashing, False)
@always_bls
@with_all_phases
@spec_state_test
@always_bls
def test_att2_bad_replaced_index(spec, state):
attester_slashing = get_valid_attester_slashing(spec, state, signed_1=True, signed_2=True)

View File

@ -42,8 +42,8 @@ def test_success_block_header(spec, state):
@with_all_phases
@always_bls
@spec_state_test
@always_bls
def test_invalid_sig_block_header(spec, state):
block = build_empty_block_for_next_slot(spec, state)
yield from run_block_header_processing(spec, state, block, valid=False)

View File

@ -94,8 +94,8 @@ def test_new_deposit_over_max(spec, state):
@with_all_phases
@always_bls
@spec_state_test
@always_bls
def test_invalid_sig_new_deposit(spec, state):
# fresh deposit = next validator index = validator appended to registry
validator_index = len(state.validators)
@ -115,8 +115,8 @@ def test_success_top_up(spec, state):
@with_all_phases
@always_bls
@spec_state_test
@always_bls
def test_invalid_sig_top_up(spec, state):
validator_index = 0
amount = spec.MAX_EFFECTIVE_BALANCE // 4

View File

@ -49,24 +49,24 @@ def test_success(spec, state):
@with_all_phases
@always_bls
@spec_state_test
@always_bls
def test_invalid_sig_1(spec, state):
proposer_slashing = get_valid_proposer_slashing(spec, state, signed_1=False, signed_2=True)
yield from run_proposer_slashing_processing(spec, state, proposer_slashing, False)
@with_all_phases
@always_bls
@spec_state_test
@always_bls
def test_invalid_sig_2(spec, state):
proposer_slashing = get_valid_proposer_slashing(spec, state, signed_1=True, signed_2=False)
yield from run_proposer_slashing_processing(spec, state, proposer_slashing, False)
@with_all_phases
@always_bls
@spec_state_test
@always_bls
def test_invalid_sig_1_and_2(spec, state):
proposer_slashing = get_valid_proposer_slashing(spec, state, signed_1=False, signed_2=False)
yield from run_proposer_slashing_processing(spec, state, proposer_slashing, False)

View File

@ -81,8 +81,8 @@ def test_success_active_above_max_effective_fee(spec, state):
@with_all_phases
@always_bls
@spec_state_test
@always_bls
def test_invalid_signature(spec, state):
transfer = get_valid_transfer(spec, state)
# un-activate so validator can transfer

View File

@ -47,8 +47,8 @@ def test_success(spec, state):
@with_all_phases
@always_bls
@spec_state_test
@always_bls
def test_invalid_signature(spec, state):
# move state forward PERSISTENT_COMMITTEE_PERIOD epochs to allow for exit
state.slot += spec.PERSISTENT_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH

View File

@ -89,3 +89,20 @@ def test_historical_root_accumulator(spec, state):
yield from run_process_final_updates(spec, state)
assert len(state.historical_roots) == history_len + 1
@with_all_phases
@spec_state_test
def test_compact_committees_root(spec, state):
assert spec.SLOTS_PER_ETH1_VOTING_PERIOD > spec.SLOTS_PER_EPOCH
# skip ahead to the end of the epoch
state.slot = spec.SLOTS_PER_EPOCH - 1
next_epoch = spec.get_current_epoch(state) + 1
# ensure that order in which items are processed in final_updates
# does not alter the expected_root
expected_root = spec.get_compact_committees_root(state, next_epoch)
yield from run_process_final_updates(spec, state)
assert state.compact_committees_roots[next_epoch % spec.EPOCHS_PER_HISTORICAL_VECTOR] == expected_root

View File

@ -42,8 +42,8 @@ def run_early_derived_secret_reveal_processing(spec, state, randao_key_reveal, v
@with_all_phases_except(['phase0'])
@always_bls
@spec_state_test
@always_bls
def test_success(spec, state):
randao_key_reveal = get_valid_early_derived_secret_reveal(spec, state)
@ -51,8 +51,8 @@ def test_success(spec, state):
@with_all_phases_except(['phase0'])
@never_bls
@spec_state_test
@never_bls
def test_reveal_from_current_epoch(spec, state):
randao_key_reveal = get_valid_early_derived_secret_reveal(spec, state, spec.get_current_epoch(state))
@ -60,8 +60,8 @@ def test_reveal_from_current_epoch(spec, state):
@with_all_phases_except(['phase0'])
@never_bls
@spec_state_test
@never_bls
def test_reveal_from_past_epoch(spec, state):
next_epoch(spec, state)
apply_empty_block(spec, state)
@ -71,8 +71,8 @@ def test_reveal_from_past_epoch(spec, state):
@with_all_phases_except(['phase0'])
@always_bls
@spec_state_test
@always_bls
def test_reveal_with_custody_padding(spec, state):
randao_key_reveal = get_valid_early_derived_secret_reveal(
spec,
@ -83,8 +83,8 @@ def test_reveal_with_custody_padding(spec, state):
@with_all_phases_except(['phase0'])
@always_bls
@spec_state_test
@always_bls
def test_reveal_with_custody_padding_minus_one(spec, state):
randao_key_reveal = get_valid_early_derived_secret_reveal(
spec,
@ -95,8 +95,8 @@ def test_reveal_with_custody_padding_minus_one(spec, state):
@with_all_phases_except(['phase0'])
@never_bls
@spec_state_test
@never_bls
def test_double_reveal(spec, state):
randao_key_reveal1 = get_valid_early_derived_secret_reveal(
spec,
@ -120,8 +120,8 @@ def test_double_reveal(spec, state):
@with_all_phases_except(['phase0'])
@never_bls
@spec_state_test
@never_bls
def test_revealer_is_slashed(spec, state):
randao_key_reveal = get_valid_early_derived_secret_reveal(spec, state, spec.get_current_epoch(state))
state.validators[randao_key_reveal.revealed_index].slashed = True
@ -130,8 +130,8 @@ def test_revealer_is_slashed(spec, state):
@with_all_phases_except(['phase0'])
@never_bls
@spec_state_test
@never_bls
def test_far_future_epoch(spec, state):
randao_key_reveal = get_valid_early_derived_secret_reveal(
spec,

View File

@ -29,8 +29,8 @@ def check_finality(spec,
@with_all_phases
@never_bls
@spec_state_test
@never_bls
def test_finality_no_updates_at_genesis(spec, state):
assert spec.get_current_epoch(state) == spec.GENESIS_EPOCH
@ -53,8 +53,8 @@ def test_finality_no_updates_at_genesis(spec, state):
@with_all_phases
@never_bls
@spec_state_test
@never_bls
def test_finality_rule_4(spec, state):
# get past first two epochs that finality does not run on
next_epoch(spec, state)
@ -81,8 +81,8 @@ def test_finality_rule_4(spec, state):
@with_all_phases
@never_bls
@spec_state_test
@never_bls
def test_finality_rule_1(spec, state):
# get past first two epochs that finality does not run on
next_epoch(spec, state)
@ -111,8 +111,8 @@ def test_finality_rule_1(spec, state):
@with_all_phases
@never_bls
@spec_state_test
@never_bls
def test_finality_rule_2(spec, state):
# get past first two epochs that finality does not run on
next_epoch(spec, state)
@ -143,8 +143,8 @@ def test_finality_rule_2(spec, state):
@with_all_phases
@never_bls
@spec_state_test
@never_bls
def test_finality_rule_3(spec, state):
"""
Test scenario described here

View File

@ -1,87 +1,100 @@
from typing import Dict, Any, Callable, Iterable
from typing import Dict, Any
from eth2spec.debug.encode import encode
from eth2spec.utils.ssz.ssz_typing import SSZValue
from eth2spec.utils.ssz.ssz_impl import serialize
def spectest(description: str = None):
def vector_test(description: str = None):
"""
vector_test decorator: Allow a caller to pass "generator_mode=True" to make the test yield data,
but behave like a normal test (ignoring the yield, but fully processing) a test when not in "generator_mode"
This should always be the most outer decorator around functions that yield data.
This is to deal with silent iteration through yielding function when in a pytest
context (i.e. not in generator mode).
:param description: Optional description for the test to add to the metadata.
:return: Decorator.
"""
def runner(fn):
# this wraps the function, to hide that the function actually is yielding data, instead of returning once.
# this wraps the function, to yield type-annotated entries of data.
# Valid types are:
# - "meta": all key-values with this type can be collected by the generator, to put somewhere together.
# - "ssz": raw SSZ bytes
# - "data": a python structure to be encoded by the user.
def entry(*args, **kw):
def generator_mode():
if description is not None:
# description can be explicit
yield 'description', 'meta', description
# transform the yielded data, and add type annotations
for data in fn(*args, **kw):
# if not 2 items, then it is assumed to be already formatted with a type:
# e.g. ("bls_setting", "meta", 1)
if len(data) != 2:
yield data
continue
# Try to infer the type, but keep it as-is if it's not a SSZ type or bytes.
(key, value) = data
if value is None:
continue
if isinstance(value, SSZValue):
yield key, 'data', encode(value)
yield key, 'ssz', serialize(value)
elif isinstance(value, bytes):
yield key, 'data', encode(value)
yield key, 'ssz', value
elif isinstance(value, list) and all([isinstance(el, (SSZValue, bytes)) for el in value]):
for i, el in enumerate(value):
if isinstance(el, SSZValue):
yield f'{key}_{i}', 'data', encode(el)
yield f'{key}_{i}', 'ssz', serialize(el)
elif isinstance(el, bytes):
yield f'{key}_{i}', 'data', encode(el)
yield f'{key}_{i}', 'ssz', el
yield f'{key}_count', 'meta', len(value)
else:
# Not a ssz value.
# The data will now just be yielded as any python data,
# something that should be encodeable by the generator runner.
yield key, 'data', value
# check generator mode, may be None/else.
# "pop" removes it, so it is not passed to the inner function.
if kw.pop('generator_mode', False) is True:
out = {}
if description is None:
# fall back on function name for test description
name = fn.__name__
if name.startswith('test_'):
name = name[5:]
out['description'] = name
else:
# description can be explicit
out['description'] = description
has_contents = False
# put all generated data into a dict.
for data in fn(*args, **kw):
has_contents = True
# If there is a type argument, encode it as that type.
if len(data) == 3:
(key, value, typ) = data
out[key] = encode(value, typ)
else:
# Otherwise, try to infer the type, but keep it as-is if it's not a SSZ type or bytes.
(key, value) = data
if isinstance(value, (SSZValue, bytes)):
out[key] = encode(value)
elif isinstance(value, list) and all([isinstance(el, (SSZValue, bytes)) for el in value]):
out[key] = [encode(el) for el in value]
else:
# not a ssz value.
# It could be vector or bytes still, but it is a rare case,
# and lists can't be inferred fully (generics lose element type).
# In such cases, explicitly state the type of the yielded value as a third yielded object.
out[key] = value
if has_contents:
return out
else:
return None
# return the yielding function as a generator object.
# Don't yield in this function itself, that would make pytest skip over it.
return generator_mode()
else:
# just complete the function, ignore all yielded data, we are not using it
# Just complete the function, ignore all yielded data,
# we are not using it (or processing it, i.e. nearly zero efficiency loss)
# Pytest does not support yielded data in the outer function, so we need to wrap it like this.
for _ in fn(*args, **kw):
continue
return None
return entry
return runner
def with_tags(tags: Dict[str, Any]):
def with_meta_tags(tags: Dict[str, Any]):
"""
Decorator factory, adds tags (key, value) pairs to the output of the function.
Decorator factory, yields meta tags (key, value) pairs to the output of the function.
Useful to build test-vector annotations with.
This decorator is applied after the ``spectest`` decorator is applied.
:param tags: dict of tags
:return: Decorator.
"""
def runner(fn):
def entry(*args, **kw):
fn_out = fn(*args, **kw)
# do not add tags if the function is not returning a dict at all (i.e. not in generator mode)
if fn_out is None:
return None
return {**tags, **fn_out}
return entry
return runner
def with_args(create_args: Callable[[], Iterable[Any]]):
"""
Decorator factory, adds given extra arguments to the decorated function.
:param create_args: function to create arguments with.
:return: Decorator.
"""
def runner(fn):
# this wraps the function, to hide that the function actually yielding data.
def entry(*args, **kw):
return fn(*(list(create_args()) + list(args)), **kw)
yielded_any = False
for part in fn(*args, **kw):
yield part
yielded_any = True
# Do not add tags if the function is not returning a dict at all (i.e. not in generator mode).
# As a pytest, we do not want to be yielding anything (unsupported by pytest)
if yielded_any:
for k, v in tags.items():
yield k, 'meta', v
return entry
return runner

View File

@ -24,13 +24,13 @@ def only_with_bls(alt_return=None):
@only_with_bls(alt_return=True)
def bls_verify(pubkey, message_hash, signature, domain):
return bls.verify(message_hash=message_hash, pubkey=pubkey,
signature=signature, domain=int.from_bytes(domain, byteorder='little'))
signature=signature, domain=domain)
@only_with_bls(alt_return=True)
def bls_verify_multiple(pubkeys, message_hashes, signature, domain):
return bls.verify_multiple(pubkeys=pubkeys, message_hashes=message_hashes,
signature=signature, domain=int.from_bytes(domain, byteorder='little'))
signature=signature, domain=domain)
@only_with_bls(alt_return=STUB_PUBKEY)
@ -46,4 +46,4 @@ def bls_aggregate_signatures(signatures):
@only_with_bls(alt_return=STUB_SIGNATURE)
def bls_sign(message_hash, privkey, domain):
return bls.sign(message_hash=message_hash, privkey=privkey,
domain=int.from_bytes(domain, byteorder='little'))
domain=domain)

View File

@ -33,7 +33,7 @@ def deserialize_basic(value, typ: BasicType):
raise Exception(f"Type not supported: {typ}")
def is_empty(obj: SSZValue):
def is_zero(obj: SSZValue):
return type(obj).default() == obj

View File

@ -1,6 +1,6 @@
eth-utils>=1.3.0,<2
eth-typing>=2.1.0,<3.0.0
pycryptodome==3.7.3
py_ecc>=1.6.0
py_ecc==1.7.1
dataclasses==0.6
ssz==0.1.0a10
ssz==0.1.3

View File

@ -8,8 +8,8 @@ setup(
"eth-utils>=1.3.0,<2",
"eth-typing>=2.1.0,<3.0.0",
"pycryptodome==3.7.3",
"py_ecc>=1.6.0",
"ssz==0.1.0a10",
"py_ecc==1.7.1",
"ssz==0.1.3",
"dataclasses==0.6",
]
)