Merge pull request #1552 from ethereum/tree-based-ssz

Tree based spec SSZ implementation - remerkleable
This commit is contained in:
Diederik Loerakker 2020-01-25 01:00:58 +01:00 committed by GitHub
commit 247a4eeab1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 199 additions and 1465 deletions

View File

@ -35,39 +35,39 @@ commands:
description: "Restore the cache with pyspec keys"
steps:
- restore_cached_venv:
venv_name: v9-pyspec
venv_name: v17-pyspec
reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "tests/core/pyspec/requirements-testing.txt" }}
save_pyspec_cached_venv:
description: Save a venv into a cache with pyspec keys"
steps:
- save_cached_venv:
venv_name: v9-pyspec
venv_name: v17-pyspec
reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "tests/core/pyspec/requirements-testing.txt" }}
venv_path: ./tests/core/pyspec/venv
restore_deposit_contract_compiler_cached_venv:
description: "Restore the venv from cache for the deposit contract compiler"
steps:
- restore_cached_venv:
venv_name: v15-deposit-contract-compiler
venv_name: v16-deposit-contract-compiler
reqs_checksum: cache-{{ checksum "deposit_contract/compiler/requirements.txt" }}
save_deposit_contract_compiler_cached_venv:
description: "Save the venv to cache for later use of the deposit contract compiler"
steps:
- save_cached_venv:
venv_name: v15-deposit-contract-compiler
venv_name: v16-deposit-contract-compiler
reqs_checksum: cache-{{ checksum "deposit_contract/compiler/requirements.txt" }}
venv_path: ./deposit_contract/compiler/venv
restore_deposit_contract_tester_cached_venv:
description: "Restore the venv from cache for the deposit contract tester"
steps:
- restore_cached_venv:
venv_name: v15-deposit-contract-tester
venv_name: v17-deposit-contract-tester
reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "deposit_contract/tester/requirements.txt" }}
save_deposit_contract_tester_cached_venv:
description: "Save the venv to cache for later use of the deposit contract tester"
steps:
- save_cached_venv:
venv_name: v15-deposit-contract-tester
venv_name: v17-deposit-contract-tester
reqs_checksum: cache-{{ checksum "tests/core/pyspec/requirements.txt" }}-{{ checksum "deposit_contract/tester/requirements.txt" }}
venv_path: ./deposit_contract/tester/venv
jobs:

3
.gitignore vendored
View File

@ -20,6 +20,9 @@ tests/core/pyspec/eth2spec/phase1/spec.py
# coverage reports
.htmlcov
.coverage
.coverage.*
# local CI testing output
tests/core/pyspec/test-reports
*.egg-info

View File

@ -2,3 +2,4 @@ eth-tester[py-evm]>=0.3.0b1,<0.4
web3==5.4.0
pytest==3.6.1
../../tests/core/pyspec
../../tests/core/config_helpers

View File

@ -25,14 +25,14 @@ from dataclasses import (
from eth2spec.utils.ssz.ssz_impl import hash_tree_root
from eth2spec.utils.ssz.ssz_typing import (
boolean, Container, List, Vector, uint64, SSZType,
View, boolean, Container, List, Vector, uint64,
Bytes1, Bytes4, Bytes8, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector,
)
from eth2spec.utils import bls
from eth2spec.utils.hash_function import hash
SSZObject = TypeVar('SSZObject', bound=SSZType)
SSZObject = TypeVar('SSZObject', bound=View)
'''
PHASE1_IMPORTS = '''from eth2spec.phase0 import spec as phase0
from eth2spec.config.apply_config import apply_constants_preset
@ -47,9 +47,8 @@ from dataclasses import (
from eth2spec.utils.ssz.ssz_impl import hash_tree_root
from eth2spec.utils.ssz.ssz_typing import (
SSZType, Container, List, Vector, ByteList, ByteVector, Bitlist, Bitvector,
Bytes1, Bytes4, Bytes8, Bytes32, Bytes48, Bytes96,
uint64, uint8, bit, boolean,
View, boolean, Container, List, Vector, uint64, uint8, bit,
ByteVector, ByteList, Bytes1, Bytes4, Bytes8, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector,
)
from eth2spec.utils import bls
@ -58,7 +57,7 @@ from eth2spec.utils.hash_function import hash
SSZVariableName = str
GeneralizedIndex = NewType('GeneralizedIndex', int)
SSZObject = TypeVar('SSZObject', bound=SSZType)
SSZObject = TypeVar('SSZObject', bound=View)
'''
SUNDRY_CONSTANTS_FUNCTIONS = '''
def ceillog2(x: uint64) -> int:
@ -80,27 +79,40 @@ def hash(x: bytes) -> Bytes32: # type: ignore
return hash_cache[x]
# Monkey patch validator compute committee code
_compute_committee = compute_committee
committee_cache: Dict[Tuple[Bytes32, Bytes32, int, int], Sequence[ValidatorIndex]] = {}
def cache_this(key_fn, value_fn): # type: ignore
cache_dict = {} # type: ignore
def wrapper(*args, **kw): # type: ignore
key = key_fn(*args, **kw)
nonlocal cache_dict
if key not in cache_dict:
cache_dict[key] = value_fn(*args, **kw)
return cache_dict[key]
return wrapper
def compute_committee(indices: Sequence[ValidatorIndex], # type: ignore
seed: Bytes32,
index: int,
count: int) -> Sequence[ValidatorIndex]:
param_hash = (hash(b''.join(index.to_bytes(length=4, byteorder='little') for index in indices)), seed, index, count)
get_base_reward = cache_this(
lambda state, index: (state.validators.hash_tree_root(), state.slot),
get_base_reward)
if param_hash not in committee_cache:
committee_cache[param_hash] = _compute_committee(indices, seed, index, count)
return committee_cache[param_hash]'''
get_committee_count_at_slot = cache_this(
lambda state, epoch: (state.validators.hash_tree_root(), epoch),
get_committee_count_at_slot)
get_active_validator_indices = cache_this(
lambda state, epoch: (state.validators.hash_tree_root(), epoch),
get_active_validator_indices)
get_beacon_committee = cache_this(
lambda state, slot, index: (state.validators.hash_tree_root(), state.randao_mixes.hash_tree_root(), slot, index),
get_beacon_committee)'''
def objects_to_spec(functions: Dict[str, str],
custom_types: Dict[str, str],
constants: Dict[str, str],
ssz_objects: Dict[str, str],
imports: Dict[str, str],
imports: str,
version: str,
) -> str:
"""
@ -201,7 +213,7 @@ def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject:
custom_types = combine_constants(custom_types0, custom_types1)
constants = combine_constants(constants0, constants1)
ssz_objects = combine_ssz_objects(ssz_objects0, ssz_objects1, custom_types)
return functions, custom_types, constants, ssz_objects
return SpecObject((functions, custom_types, constants, ssz_objects))
def dependency_order_spec(objs: SpecObject):

View File

@ -1,81 +1,73 @@
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)*
- [Ethereum 2.0 Phase 1 -- The Beacon Chain for Shards](#ethereum-20-phase-1----the-beacon-chain-for-shards)
- [Table of contents](#table-of-contents)
- [Introduction](#introduction)
- [Custom types](#custom-types)
- [Configuration](#configuration)
- [Misc](#misc)
- [Updated containers](#updated-containers)
- [Extended `AttestationData`](#extended-attestationdata)
- [Extended `Attestation`](#extended-attestation)
- [Extended `PendingAttestation`](#extended-pendingattestation)
- [`IndexedAttestation`](#indexedattestation)
- [Extended `AttesterSlashing`](#extended-attesterslashing)
- [Extended `Validator`](#extended-validator)
- [Extended `BeaconBlockBody`](#extended-beaconblockbody)
- [Extended `BeaconBlock`](#extended-beaconblock)
- [Extended `SignedBeaconBlock`](#extended-signedbeaconblock)
- [Extended `BeaconState`](#extended-beaconstate)
- [New containers](#new-containers)
- [`ShardBlockWrapper`](#shardblockwrapper)
- [`ShardSignableHeader`](#shardsignableheader)
- [`ShardState`](#shardstate)
- [`ShardTransition`](#shardtransition)
- [`CompactCommittee`](#compactcommittee)
- [`AttestationCustodyBitWrapper`](#attestationcustodybitwrapper)
- [Helper functions](#helper-functions)
- [Misc](#misc-1)
- [`get_previous_slot`](#get_previous_slot)
- [`pack_compact_validator`](#pack_compact_validator)
- [`committee_to_compact_committee`](#committee_to_compact_committee)
- [`chunks_to_body_root`](#chunks_to_body_root)
- [`compute_shard_from_committee_index`](#compute_shard_from_committee_index)
- [Beacon state accessors](#beacon-state-accessors)
- [`get_active_shard_count`](#get_active_shard_count)
- [`get_online_validator_indices`](#get_online_validator_indices)
- [`get_shard_committee`](#get_shard_committee)
- [`get_shard_proposer_index`](#get_shard_proposer_index)
- [`get_light_client_committee`](#get_light_client_committee)
- [`get_indexed_attestation`](#get_indexed_attestation)
- [`get_updated_gasprice`](#get_updated_gasprice)
- [`get_start_shard`](#get_start_shard)
- [`get_shard`](#get_shard)
- [`get_next_slot_for_shard`](#get_next_slot_for_shard)
- [`get_offset_slots`](#get_offset_slots)
- [Predicates](#predicates)
- [Updated `is_valid_indexed_attestation`](#updated-is_valid_indexed_attestation)
- [Block processing](#block-processing)
- [Operations](#operations)
- [New Attestation processing](#new-attestation-processing)
- [`validate_attestation`](#validate_attestation)
- [`apply_shard_transition`](#apply_shard_transition)
- [`process_crosslink_for_shard`](#process_crosslink_for_shard)
- [`process_crosslinks`](#process_crosslinks)
- [`process_attestations`](#process_attestations)
- [New Attester slashing processing](#new-attester-slashing-processing)
- [Shard transition false positives](#shard-transition-false-positives)
- [Light client processing](#light-client-processing)
- [Epoch transition](#epoch-transition)
- [Custody game updates](#custody-game-updates)
- [Online-tracking](#online-tracking)
- [Light client committee updates](#light-client-committee-updates)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
# Ethereum 2.0 Phase 1 -- The Beacon Chain for Shards
**Notice**: This document is a work-in-progress for researchers and implementers.
## Table of contents
<!-- TOC -->
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
**Table of Contents**
TODO
- [Introduction](#introduction)
- [Custom types](#custom-types)
- [Configuration](#configuration)
- [Misc](#misc)
- [Updated containers](#updated-containers)
- [Extended `AttestationData`](#extended-attestationdata)
- [Extended `Attestation`](#extended-attestation)
- [Extended `PendingAttestation`](#extended-pendingattestation)
- [`IndexedAttestation`](#indexedattestation)
- [Extended `AttesterSlashing`](#extended-attesterslashing)
- [Extended `Validator`](#extended-validator)
- [Extended `BeaconBlockBody`](#extended-beaconblockbody)
- [Extended `BeaconBlock`](#extended-beaconblock)
- [Extended `SignedBeaconBlock`](#extended-signedbeaconblock)
- [Extended `BeaconState`](#extended-beaconstate)
- [New containers](#new-containers)
- [`ShardBlockWrapper`](#shardblockwrapper)
- [`ShardSignableHeader`](#shardsignableheader)
- [`ShardState`](#shardstate)
- [`ShardTransition`](#shardtransition)
- [`CompactCommittee`](#compactcommittee)
- [`AttestationCustodyBitWrapper`](#attestationcustodybitwrapper)
- [Helper functions](#helper-functions)
- [Misc](#misc-1)
- [`get_previous_slot`](#get_previous_slot)
- [`pack_compact_validator`](#pack_compact_validator)
- [`committee_to_compact_committee`](#committee_to_compact_committee)
- [`chunks_to_body_root`](#chunks_to_body_root)
- [`compute_shard_from_committee_index`](#compute_shard_from_committee_index)
- [Beacon state accessors](#beacon-state-accessors)
- [`get_active_shard_count`](#get_active_shard_count)
- [`get_online_validator_indices`](#get_online_validator_indices)
- [`get_shard_committee`](#get_shard_committee)
- [`get_shard_proposer_index`](#get_shard_proposer_index)
- [`get_light_client_committee`](#get_light_client_committee)
- [`get_indexed_attestation`](#get_indexed_attestation)
- [`get_updated_gasprice`](#get_updated_gasprice)
- [`get_start_shard`](#get_start_shard)
- [`get_shard`](#get_shard)
- [`get_next_slot_for_shard`](#get_next_slot_for_shard)
- [`get_offset_slots`](#get_offset_slots)
- [Predicates](#predicates)
- [Updated `is_valid_indexed_attestation`](#updated-is_valid_indexed_attestation)
- [Block processing](#block-processing)
- [Operations](#operations)
- [New Attestation processing](#new-attestation-processing)
- [`validate_attestation`](#validate_attestation)
- [`apply_shard_transition`](#apply_shard_transition)
- [`process_crosslink_for_shard`](#process_crosslink_for_shard)
- [`process_crosslinks`](#process_crosslinks)
- [`process_attestations`](#process_attestations)
- [New Attester slashing processing](#new-attester-slashing-processing)
- [Shard transition false positives](#shard-transition-false-positives)
- [Light client processing](#light-client-processing)
- [Epoch transition](#epoch-transition)
- [Custody game updates](#custody-game-updates)
- [Online-tracking](#online-tracking)
- [Light client committee updates](#light-client-committee-updates)
<!-- /TOC -->
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
## Introduction

View File

@ -1,21 +1,23 @@
from typing import Any
from eth2spec.utils.ssz.ssz_impl import hash_tree_root
from eth2spec.utils.ssz.ssz_typing import (
SSZType, SSZValue, uint, Container, ByteList, List, boolean,
Vector, ByteVector
uint, Container, List, boolean,
Vector, ByteVector, ByteList
)
def decode(data: Any, typ: SSZType) -> SSZValue:
def decode(data: Any, typ):
if issubclass(typ, (uint, boolean)):
return typ(data)
elif issubclass(typ, (List, Vector)):
return typ(decode(element, typ.elem_type) for element in data)
elif issubclass(typ, (ByteList, ByteVector)):
return typ(decode(element, typ.element_cls()) for element in data)
elif issubclass(typ, ByteVector):
return typ(bytes.fromhex(data[2:]))
elif issubclass(typ, ByteList):
return typ(bytes.fromhex(data[2:]))
elif issubclass(typ, Container):
temp = {}
for field_name, field_type in typ.get_fields().items():
for field_name, field_type in typ.fields().items():
temp[field_name] = decode(data[field_name], field_type)
if field_name + "_hash_tree_root" in data:
assert (data[field_name + "_hash_tree_root"][2:] ==

View File

@ -1,27 +1,30 @@
from eth2spec.utils.ssz.ssz_impl import hash_tree_root, serialize
from eth2spec.utils.ssz.ssz_typing import (
uint, boolean,
Bitlist, Bitvector, Container
Bitlist, Bitvector, Container, Vector, List
)
def encode(value, include_hash_tree_roots=False):
if isinstance(value, uint):
# Larger uints are boxed and the class declares their byte length
if value.type().byte_len > 8:
if value.__class__.type_byte_length() > 8:
return str(int(value))
return int(value)
elif isinstance(value, boolean):
return value == 1
elif isinstance(value, (Bitlist, Bitvector)):
return '0x' + serialize(value).hex()
elif isinstance(value, list): # normal python lists, ssz-List, Vector
elif isinstance(value, list): # normal python lists
return [encode(element, include_hash_tree_roots) for element in value]
elif isinstance(value, bytes): # both bytes and ByteVector
elif isinstance(value, (List, Vector)):
return [encode(element, include_hash_tree_roots) for element in value]
elif isinstance(value, bytes): # bytes, ByteList, ByteVector
return '0x' + value.hex()
elif isinstance(value, Container):
ret = {}
for field_value, field_name in zip(value, value.get_fields().keys()):
for field_name in value.fields().keys():
field_value = getattr(value, field_name)
ret[field_name] = encode(field_value, include_hash_tree_roots)
if include_hash_tree_roots:
ret[field_name + "_hash_tree_root"] = '0x' + hash_tree_root(field_value).hex()

View File

@ -1,9 +1,11 @@
from random import Random
from enum import Enum
from typing import Type
from eth2spec.utils.ssz.ssz_typing import (
SSZType, SSZValue, BasicValue, BasicType, uint, Container, ByteList, List, boolean,
Vector, ByteVector, Bitlist, Bitvector
View, BasicView, uint, Container, List, boolean,
Vector, ByteVector, ByteList, Bitlist, Bitvector
)
# in bytes
@ -34,11 +36,11 @@ class RandomizationMode(Enum):
def get_random_ssz_object(rng: Random,
typ: SSZType,
typ: Type[View],
max_bytes_length: int,
max_list_length: int,
mode: RandomizationMode,
chaos: bool) -> SSZValue:
chaos: bool) -> View:
"""
Create an object for a given type, filled with random data.
:param rng: The random number generator to use.
@ -56,26 +58,26 @@ def get_random_ssz_object(rng: Random,
if mode == RandomizationMode.mode_nil_count:
return typ(b'')
elif mode == RandomizationMode.mode_max_count:
return typ(get_random_bytes_list(rng, min(max_bytes_length, typ.length)))
return typ(get_random_bytes_list(rng, min(max_bytes_length, typ.limit())))
elif mode == RandomizationMode.mode_one_count:
return typ(get_random_bytes_list(rng, min(1, typ.length)))
return typ(get_random_bytes_list(rng, min(1, typ.limit())))
elif mode == RandomizationMode.mode_zero:
return typ(b'\x00' * min(1, typ.length))
return typ(b'\x00' * min(1, typ.limit()))
elif mode == RandomizationMode.mode_max:
return typ(b'\xff' * min(1, typ.length))
return typ(b'\xff' * min(1, typ.limit()))
else:
return typ(get_random_bytes_list(rng, rng.randint(0, min(max_bytes_length, typ.length))))
elif issubclass(typ, ByteVector):
return typ(get_random_bytes_list(rng, rng.randint(0, min(max_bytes_length, typ.limit()))))
if issubclass(typ, ByteVector):
# Sanity, don't generate absurdly big random values
# If a client is aiming to performance-test, they should create a benchmark suite.
assert typ.length <= max_bytes_length
assert typ.type_byte_length() <= max_bytes_length
if mode == RandomizationMode.mode_zero:
return typ(b'\x00' * typ.length)
return typ(b'\x00' * typ.type_byte_length())
elif mode == RandomizationMode.mode_max:
return typ(b'\xff' * typ.length)
return typ(b'\xff' * typ.type_byte_length())
else:
return typ(get_random_bytes_list(rng, typ.length))
elif issubclass(typ, BasicValue):
return typ(get_random_bytes_list(rng, typ.type_byte_length()))
elif issubclass(typ, (boolean, uint)):
# Basic types
if mode == RandomizationMode.mode_zero:
return get_min_basic_value(typ)
@ -83,13 +85,14 @@ def get_random_ssz_object(rng: Random,
return get_max_basic_value(typ)
else:
return get_random_basic_value(rng, typ)
elif issubclass(typ, Vector) or issubclass(typ, Bitvector):
elif issubclass(typ, (Vector, Bitvector)):
elem_type = typ.element_cls() if issubclass(typ, Vector) else boolean
return typ(
get_random_ssz_object(rng, typ.elem_type, max_bytes_length, max_list_length, mode, chaos)
for _ in range(typ.length)
get_random_ssz_object(rng, elem_type, max_bytes_length, max_list_length, mode, chaos)
for _ in range(typ.vector_length())
)
elif issubclass(typ, List) or issubclass(typ, Bitlist):
length = rng.randint(0, min(typ.length, max_list_length))
length = rng.randint(0, min(typ.limit(), max_list_length))
if mode == RandomizationMode.mode_one_count:
length = 1
elif mode == RandomizationMode.mode_max_count:
@ -97,19 +100,21 @@ def get_random_ssz_object(rng: Random,
elif mode == RandomizationMode.mode_nil_count:
length = 0
if typ.length < length: # SSZ imposes a hard limit on lists, we can't put in more than that
length = typ.length
if typ.limit() < length: # SSZ imposes a hard limit on lists, we can't put in more than that
length = typ.limit()
elem_type = typ.element_cls() if issubclass(typ, List) else boolean
return typ(
get_random_ssz_object(rng, typ.elem_type, max_bytes_length, max_list_length, mode, chaos)
get_random_ssz_object(rng, elem_type, max_bytes_length, max_list_length, mode, chaos)
for _ in range(length)
)
elif issubclass(typ, Container):
fields = typ.fields()
# Container
return typ(**{
field_name:
get_random_ssz_object(rng, field_type, max_bytes_length, max_list_length, mode, chaos)
for field_name, field_type in typ.get_fields().items()
for field_name, field_type in fields.items()
})
else:
raise Exception(f"Type not recognized: typ={typ}")
@ -119,31 +124,31 @@ def get_random_bytes_list(rng: Random, length: int) -> bytes:
return bytes(rng.getrandbits(8) for _ in range(length))
def get_random_basic_value(rng: Random, typ: BasicType) -> BasicValue:
def get_random_basic_value(rng: Random, typ) -> BasicView:
if issubclass(typ, boolean):
return typ(rng.choice((True, False)))
elif issubclass(typ, uint):
assert typ.byte_len in UINT_BYTE_SIZES
return typ(rng.randint(0, 256 ** typ.byte_len - 1))
assert typ.type_byte_length() in UINT_BYTE_SIZES
return typ(rng.randint(0, 256 ** typ.type_byte_length() - 1))
else:
raise ValueError(f"Not a basic type: typ={typ}")
def get_min_basic_value(typ: BasicType) -> BasicValue:
def get_min_basic_value(typ) -> BasicView:
if issubclass(typ, boolean):
return typ(False)
elif issubclass(typ, uint):
assert typ.byte_len in UINT_BYTE_SIZES
assert typ.type_byte_length() in UINT_BYTE_SIZES
return typ(0)
else:
raise ValueError(f"Not a basic type: typ={typ}")
def get_max_basic_value(typ: BasicType) -> BasicValue:
def get_max_basic_value(typ) -> BasicView:
if issubclass(typ, boolean):
return typ(True)
elif issubclass(typ, uint):
assert typ.byte_len in UINT_BYTE_SIZES
return typ(256 ** typ.byte_len - 1)
assert typ.type_byte_length() in UINT_BYTE_SIZES
return typ(256 ** typ.type_byte_length() - 1)
else:
raise ValueError(f"Not a basic type: typ={typ}")

View File

@ -1,87 +0,0 @@
from eth2spec.utils.ssz import ssz_typing as spec_ssz
import ssz
def translate_typ(typ) -> ssz.BaseSedes:
"""
Translates a spec type to a Py-SSZ type description (sedes).
:param typ: The spec type, a class.
:return: The Py-SSZ equivalent.
"""
if issubclass(typ, spec_ssz.Container):
return ssz.Container(
[translate_typ(field_typ) for field_name, field_typ in typ.get_fields().items()])
elif issubclass(typ, spec_ssz.ByteVector):
return ssz.ByteVector(typ.length)
elif issubclass(typ, spec_ssz.ByteList):
return ssz.ByteList()
elif issubclass(typ, spec_ssz.Vector):
return ssz.Vector(translate_typ(typ.elem_type), typ.length)
elif issubclass(typ, spec_ssz.List):
return ssz.List(translate_typ(typ.elem_type), typ.length)
elif issubclass(typ, spec_ssz.Bitlist):
return ssz.Bitlist(typ.length)
elif issubclass(typ, spec_ssz.Bitvector):
return ssz.Bitvector(typ.length)
elif issubclass(typ, spec_ssz.boolean):
return ssz.boolean
elif issubclass(typ, spec_ssz.uint):
if typ.byte_len == 1:
return ssz.uint8
elif typ.byte_len == 2:
return ssz.uint16
elif typ.byte_len == 4:
return ssz.uint32
elif typ.byte_len == 8:
return ssz.uint64
elif typ.byte_len == 16:
return ssz.uint128
elif typ.byte_len == 32:
return ssz.uint256
else:
raise TypeError("invalid uint size")
else:
raise TypeError("Type not supported: {}".format(typ))
def translate_value(value, typ):
"""
Translate a value output from Py-SSZ deserialization into the given spec type.
:param value: The PySSZ value
:param typ: The type from the spec to translate into
:return: the translated value
"""
if issubclass(typ, spec_ssz.uint):
if typ.byte_len == 1:
return spec_ssz.uint8(value)
elif typ.byte_len == 2:
return spec_ssz.uint16(value)
elif typ.byte_len == 4:
return spec_ssz.uint32(value)
elif typ.byte_len == 8:
return spec_ssz.uint64(value)
elif typ.byte_len == 16:
return spec_ssz.uint128(value)
elif typ.byte_len == 32:
return spec_ssz.uint256(value)
else:
raise TypeError("invalid uint size")
elif issubclass(typ, spec_ssz.List):
return [translate_value(elem, typ.elem_type) for elem in value]
elif issubclass(typ, spec_ssz.boolean):
return value
elif issubclass(typ, spec_ssz.Vector):
return typ(*(translate_value(elem, typ.elem_type) for elem in value))
elif issubclass(typ, spec_ssz.Bitlist):
return typ(value)
elif issubclass(typ, spec_ssz.Bitvector):
return typ(value)
elif issubclass(typ, spec_ssz.ByteVector):
return typ(value)
elif issubclass(typ, spec_ssz.ByteList):
return value
if issubclass(typ, spec_ssz.Container):
return typ(**{f_name: translate_value(f_val, f_typ) for (f_val, (f_name, f_typ))
in zip(value, typ.get_fields().items())})
else:
raise TypeError("Type not supported: {}".format(typ))

View File

@ -1,35 +0,0 @@
from eth2spec.fuzzing.decoder import translate_typ, translate_value
from eth2spec.phase0 import spec
from eth2spec.utils.ssz import ssz_impl as spec_ssz_impl
from random import Random
from eth2spec.debug import random_value
def test_decoder():
rng = Random(123)
# check these types only, Block covers a lot of operation types already.
for typ in [spec.Attestation, spec.BeaconState, spec.BeaconBlock]:
# create a random pyspec value
original = random_value.get_random_ssz_object(rng, typ, 100, 10,
mode=random_value.RandomizationMode.mode_random,
chaos=True)
# serialize it, using pyspec
pyspec_data = spec_ssz_impl.serialize(original)
# get the py-ssz type for it
block_sedes = translate_typ(typ)
# try decoding using the py-ssz type
raw_value = block_sedes.deserialize(pyspec_data)
# serialize it using py-ssz
pyssz_data = block_sedes.serialize(raw_value)
# now check if the serialized form is equal. If so, we confirmed decoding and encoding to work.
assert pyspec_data == pyssz_data
# now translate the py-ssz value in a pyspec-value
block = translate_value(raw_value, typ)
# and see if the hash-tree-root of the original matches the hash-tree-root of the decoded & translated value.
original_hash_tree_root = spec_ssz_impl.hash_tree_root(original)
assert original_hash_tree_root == spec_ssz_impl.hash_tree_root(block)
assert original_hash_tree_root == block_sedes.get_hash_tree_root(raw_value)

View File

@ -1,6 +1,7 @@
from eth2spec.config import apply_config
from eth2spec.test.context import reload_specs
# We import pytest only when it's present, i.e. when we are running tests.
# The test-cases themselves can be generated without installing pytest.

View File

@ -1,12 +1,10 @@
from copy import deepcopy
from eth2spec.test.helpers.attestations import get_valid_attestation, sign_attestation
def get_valid_attester_slashing(spec, state, signed_1=False, signed_2=False):
attestation_1 = get_valid_attestation(spec, state, signed=signed_1)
attestation_2 = deepcopy(attestation_1)
attestation_2 = attestation_1.copy()
attestation_2.data.target.root = b'\x01' * 32
if signed_2:

View File

@ -1,5 +1,3 @@
from copy import deepcopy
from eth2spec.test.helpers.keys import privkeys
from eth2spec.utils import bls
from eth2spec.utils.bls import only_with_bls
@ -16,7 +14,7 @@ def get_proposer_index_maybe(spec, state, slot, proposer_index=None):
print("warning: block slot far away, and no proposer index manually given."
" Signing block is slow due to transition for proposer index calculation.")
# use stub state to get proposer index of future slot
stub_state = deepcopy(state)
stub_state = state.copy()
spec.process_slots(stub_state, slot)
proposer_index = spec.get_beacon_proposer_index(stub_state)
return proposer_index
@ -72,9 +70,9 @@ def build_empty_block(spec, state, slot=None):
empty_block = spec.BeaconBlock()
empty_block.slot = slot
empty_block.body.eth1_data.deposit_count = state.eth1_deposit_index
previous_block_header = deepcopy(state.latest_block_header)
previous_block_header = state.latest_block_header.copy()
if previous_block_header.state_root == spec.Root():
previous_block_header.state_root = state.hash_tree_root()
previous_block_header.state_root = hash_tree_root(state)
empty_block.parent_root = hash_tree_root(previous_block_header)
apply_randao_reveal(spec, state, empty_block)
return empty_block

View File

@ -1,8 +1,10 @@
from eth2spec.test.helpers.keys import privkeys
from eth2spec.utils import bls
from eth2spec.utils.ssz.ssz_typing import Bitlist, ByteVector, Bitvector
from eth2spec.utils.ssz.ssz_impl import chunkify, pack, hash_tree_root
from eth2spec.utils.ssz.ssz_impl import hash_tree_root
from eth2spec.utils.merkle_minimal import get_merkle_tree, get_merkle_proof
from remerkleable.core import pack_bits_to_chunks
from remerkleable.tree import subtree_fill_to_contents, get_depth
BYTES_PER_CHUNK = 32
@ -119,10 +121,11 @@ def get_valid_custody_response(spec, state, bit_challenge, custody_data, challen
data_branch = get_merkle_proof(data_tree, chunk_index)
bitlist_chunk_index = chunk_index // BYTES_PER_CHUNK
bitlist_chunks = chunkify(pack(bit_challenge.chunk_bits))
bitlist_tree = get_merkle_tree(bitlist_chunks, pad_to=spec.MAX_CUSTODY_CHUNKS // 256)
bitlist_chunk_branch = get_merkle_proof(bitlist_tree, chunk_index // 256) + \
[len(bit_challenge.chunk_bits).to_bytes(32, "little")]
print(bitlist_chunk_index)
bitlist_chunk_nodes = pack_bits_to_chunks(bit_challenge.chunk_bits)
bitlist_tree = subtree_fill_to_contents(bitlist_chunk_nodes, get_depth(spec.MAX_CUSTODY_CHUNKS))
print(bitlist_tree)
bitlist_chunk_branch = None # TODO; extract proof from merkle tree
bitlist_chunk_index = chunk_index // 256
@ -145,4 +148,4 @@ def get_custody_test_vector(bytelength):
def get_custody_merkle_root(data):
return get_merkle_tree(chunkify(data))[-1][0]
return None # get_merkle_tree(chunkify(data))[-1][0]

View File

@ -1,4 +1,3 @@
import copy
from eth2spec.test.helpers.keys import pubkeys
@ -35,7 +34,7 @@ def create_genesis_state(spec, validator_balances, activation_threshold):
# We "hack" in the initial validators,
# as it is much faster than creating and processing genesis deposits for every single test case.
state.balances = copy.deepcopy(validator_balances)
state.balances = validator_balances
state.validators = [build_mock_validator(spec, i, state.balances[i]) for i in range(len(validator_balances))]
# Process genesis activations

View File

@ -1,5 +1,3 @@
from copy import deepcopy
from eth2spec.test.helpers.keys import privkeys
from eth2spec.utils import bls
from eth2spec.utils.bls import only_with_bls
@ -32,12 +30,12 @@ def build_empty_shard_block(spec,
if slot is None:
slot = shard_state.slot
previous_beacon_header = deepcopy(beacon_state.latest_block_header)
previous_beacon_header = beacon_state.latest_block_header.copy()
if previous_beacon_header.state_root == spec.Bytes32():
previous_beacon_header.state_root = beacon_state.hash_tree_root()
beacon_block_root = hash_tree_root(previous_beacon_header)
previous_block_header = deepcopy(shard_state.latest_block_header)
previous_block_header = shard_state.latest_block_header.copy()
if previous_block_header.state_root == spec.Bytes32():
previous_block_header.state_root = shard_state.hash_tree_root()
parent_root = hash_tree_root(previous_block_header)

View File

@ -1,5 +1,3 @@
from copy import deepcopy
from eth2spec.test.helpers.block_header import sign_block_header
from eth2spec.test.helpers.keys import pubkey_to_privkey
@ -14,9 +12,9 @@ def get_valid_proposer_slashing(spec, state, signed_1=False, signed_2=False):
slot=slot,
parent_root=b'\x33' * 32,
state_root=b'\x44' * 32,
block_body_root=b'\x55' * 32,
body_root=b'\x55' * 32,
)
header_2 = deepcopy(header_1)
header_2 = header_1.copy()
header_2.parent_root = b'\x99' * 32
if signed_1:

View File

@ -1,5 +1,3 @@
from copy import deepcopy
from eth2spec.test.context import expect_assertion_error
from eth2spec.test.helpers.attestations import get_valid_attestation
from eth2spec.test.helpers.block import sign_block, build_empty_block_for_next_slot, transition_unsigned_block
@ -61,7 +59,7 @@ def next_epoch_with_attestations(spec,
fill_prev_epoch):
assert state.slot % spec.SLOTS_PER_EPOCH == 0
post_state = deepcopy(state)
post_state = state.copy()
signed_blocks = []
for _ in range(spec.SLOTS_PER_EPOCH):
block = build_empty_block_for_next_slot(spec, post_state)

View File

@ -269,7 +269,7 @@ def test_att2_bad_replaced_index(spec, state):
def test_att1_duplicate_index_normal_signed(spec, state):
attester_slashing = get_valid_attester_slashing(spec, state, signed_1=False, signed_2=True)
indices = attester_slashing.attestation_1.attesting_indices
indices = list(attester_slashing.attestation_1.attesting_indices)
indices.pop(1) # remove an index, make room for the additional duplicate index.
attester_slashing.attestation_1.attesting_indices = sorted(indices)
@ -289,7 +289,7 @@ def test_att1_duplicate_index_normal_signed(spec, state):
def test_att2_duplicate_index_normal_signed(spec, state):
attester_slashing = get_valid_attester_slashing(spec, state, signed_1=True, signed_2=False)
indices = attester_slashing.attestation_2.attesting_indices
indices = list(attester_slashing.attestation_2.attesting_indices)
indices.pop(2) # remove an index, make room for the additional duplicate index.
attester_slashing.attestation_2.attesting_indices = sorted(indices)
@ -309,7 +309,7 @@ def test_att2_duplicate_index_normal_signed(spec, state):
def test_att1_duplicate_index_double_signed(spec, state):
attester_slashing = get_valid_attester_slashing(spec, state, signed_1=False, signed_2=True)
indices = attester_slashing.attestation_1.attesting_indices
indices = list(attester_slashing.attestation_1.attesting_indices)
indices.pop(1) # remove an index, make room for the additional duplicate index.
indices.append(indices[2]) # add one of the indices a second time
attester_slashing.attestation_1.attesting_indices = sorted(indices)
@ -324,7 +324,7 @@ def test_att1_duplicate_index_double_signed(spec, state):
def test_att2_duplicate_index_double_signed(spec, state):
attester_slashing = get_valid_attester_slashing(spec, state, signed_1=True, signed_2=False)
indices = attester_slashing.attestation_2.attesting_indices
indices = list(attester_slashing.attestation_2.attesting_indices)
indices.pop(1) # remove an index, make room for the additional duplicate index.
indices.append(indices[2]) # add one of the indices a second time
attester_slashing.attestation_2.attesting_indices = sorted(indices)

View File

@ -1,6 +1,6 @@
from typing import Dict, Any
from eth2spec.debug.encode import encode
from eth2spec.utils.ssz.ssz_typing import SSZValue
from eth2spec.utils.ssz.ssz_typing import View
from eth2spec.utils.ssz.ssz_impl import serialize
@ -38,15 +38,15 @@ def vector_test(description: str = None):
(key, value) = data
if value is None:
continue
if isinstance(value, SSZValue):
if isinstance(value, View):
yield key, 'data', encode(value)
yield key, 'ssz', serialize(value)
elif isinstance(value, bytes):
yield key, 'data', encode(value)
yield key, 'ssz', value
elif isinstance(value, list) and all([isinstance(el, (SSZValue, bytes)) for el in value]):
elif isinstance(value, list) and all([isinstance(el, (View, bytes)) for el in value]):
for i, el in enumerate(value):
if isinstance(el, SSZValue):
if isinstance(el, View):
yield f'{key}_{i}', 'data', encode(el)
yield f'{key}_{i}', 'ssz', serialize(el)
elif isinstance(el, bytes):

View File

@ -1,157 +1,10 @@
from ..merkle_minimal import merkleize_chunks
from ..hash_function import hash
from .ssz_typing import (
SSZValue, SSZType, BasicValue, BasicType, Series, Elements, Bits, boolean, Container, List, ByteList,
Bitlist, Bitvector, uint, Bytes32
)
# SSZ Serialization
# -----------------------------
BYTES_PER_LENGTH_OFFSET = 4
from remerkleable.core import View
from remerkleable.byte_arrays import Bytes32
def serialize_basic(value: SSZValue):
if isinstance(value, uint):
return value.to_bytes(value.type().byte_len, 'little')
elif isinstance(value, boolean):
if value:
return b'\x01'
else:
return b'\x00'
else:
raise Exception(f"Type not supported: {type(value)}")
def serialize(obj: View) -> bytes:
return obj.encode_bytes()
def deserialize_basic(value, typ: BasicType):
if issubclass(typ, uint):
return typ(int.from_bytes(value, 'little'))
elif issubclass(typ, boolean):
assert value in (b'\x00', b'\x01')
return typ(value == b'\x01')
else:
raise Exception(f"Type not supported: {typ}")
def is_zero(obj: SSZValue):
return type(obj).default() == obj
def serialize(obj: SSZValue):
if isinstance(obj, BasicValue):
return serialize_basic(obj)
elif isinstance(obj, Bitvector):
return obj.as_bytes()
elif isinstance(obj, Bitlist):
as_bytearray = list(obj.as_bytes())
if len(obj) % 8 == 0:
as_bytearray.append(1)
else:
as_bytearray[len(obj) // 8] |= 1 << (len(obj) % 8)
return bytes(as_bytearray)
elif isinstance(obj, Series):
return encode_series(obj)
else:
raise Exception(f"Type not supported: {type(obj)}")
def encode_series(values: Series):
if isinstance(values, bytes): # ByteList and ByteVector are already like serialized output
return values
# Recursively serialize
parts = [(v.type().is_fixed_size(), serialize(v)) for v in values]
# Compute and check lengths
fixed_lengths = [len(serialized) if constant_size else BYTES_PER_LENGTH_OFFSET
for (constant_size, serialized) in parts]
variable_lengths = [len(serialized) if not constant_size else 0
for (constant_size, serialized) in parts]
# Check if integer is not out of bounds (Python)
assert sum(fixed_lengths + variable_lengths) < 2 ** (BYTES_PER_LENGTH_OFFSET * 8)
# Interleave offsets of variable-size parts with fixed-size parts.
# Avoid quadratic complexity in calculation of offsets.
offset = sum(fixed_lengths)
variable_parts = []
fixed_parts = []
for (constant_size, serialized) in parts:
if constant_size:
fixed_parts.append(serialized)
else:
fixed_parts.append(offset.to_bytes(BYTES_PER_LENGTH_OFFSET, 'little'))
variable_parts.append(serialized)
offset += len(serialized)
# Return the concatenation of the fixed-size parts (offsets interleaved) with the variable-size parts
return b''.join(fixed_parts + variable_parts)
# SSZ Hash-tree-root
# -----------------------------
def pack(values: Series):
if isinstance(values, bytes): # ByteList and ByteVector are already packed
return values
elif isinstance(values, Bits):
# packs the bits in bytes, left-aligned.
# Exclusive length delimiting bits for bitlists.
return values.as_bytes()
return b''.join([serialize_basic(value) for value in values])
def chunkify(bytez):
# pad `bytez` to nearest 32-byte multiple
bytez += b'\x00' * (-len(bytez) % 32)
return [bytez[i:i + 32] for i in range(0, len(bytez), 32)]
def mix_in_length(root, length):
return hash(root + length.to_bytes(32, 'little'))
def is_bottom_layer_kind(typ: SSZType):
return (
isinstance(typ, BasicType) or
(issubclass(typ, Elements) and isinstance(typ.elem_type, BasicType))
)
def item_length(typ: SSZType) -> int:
if issubclass(typ, BasicValue):
return typ.byte_len
else:
return 32
def chunk_count(typ: SSZType) -> int:
# note that for lists, .length *on the type* describes the list limit.
if isinstance(typ, BasicType):
return 1
elif issubclass(typ, Bits):
return (typ.length + 255) // 256
elif issubclass(typ, Elements):
return (typ.length * item_length(typ.elem_type) + 31) // 32
elif issubclass(typ, Container):
return len(typ.get_fields())
else:
raise Exception(f"Type not supported: {typ}")
def hash_tree_root(obj: SSZValue) -> Bytes32:
if isinstance(obj, Series):
if is_bottom_layer_kind(obj.type()):
leaves = chunkify(pack(obj))
else:
leaves = [hash_tree_root(value) for value in obj]
elif isinstance(obj, BasicValue):
leaves = chunkify(serialize_basic(obj))
else:
raise Exception(f"Type not supported: {type(obj)}")
if isinstance(obj, (List, ByteList, Bitlist)):
return Bytes32(mix_in_length(merkleize_chunks(leaves, limit=chunk_count(obj.type())), len(obj)))
else:
return Bytes32(merkleize_chunks(leaves))
def hash_tree_root(obj: View) -> Bytes32:
return Bytes32(obj.get_backing().merkle_root())

View File

@ -1,518 +1,8 @@
from typing import Dict, Iterator, Iterable
import copy
from types import GeneratorType
class DefaultingTypeMeta(type):
def default(cls):
raise Exception("Not implemented")
class SSZType(DefaultingTypeMeta):
def is_fixed_size(cls):
raise Exception("Not implemented")
class SSZValue(object, metaclass=SSZType):
def type(self):
return self.__class__
class BasicType(SSZType):
byte_len = 0
def is_fixed_size(cls):
return True
class BasicValue(int, SSZValue, metaclass=BasicType):
pass
class boolean(BasicValue): # can't subclass bool.
byte_len = 1
def __new__(cls, value: int): # int value, but can be any subclass of int (bool, Bit, Bool, etc...)
if value < 0 or value > 1:
raise ValueError(f"value {value} out of bounds for bit")
return super().__new__(cls, value)
@classmethod
def default(cls):
return cls(0)
def __bool__(self):
return self > 0
# Alias for Bool
class bit(boolean):
pass
class uint(BasicValue, metaclass=BasicType):
def __new__(cls, value: int):
if value < 0:
raise ValueError("unsigned types must not be negative")
if cls.byte_len and value.bit_length() > (cls.byte_len << 3):
raise ValueError("value out of bounds for uint{}".format(cls.byte_len * 8))
return super().__new__(cls, value)
def __add__(self, other):
return self.__class__(super().__add__(coerce_type_maybe(other, self.__class__, strict=True)))
def __sub__(self, other):
return self.__class__(super().__sub__(coerce_type_maybe(other, self.__class__, strict=True)))
@classmethod
def default(cls):
return cls(0)
class uint8(uint):
byte_len = 1
# Alias for uint8
class byte(uint8):
pass
class uint16(uint):
byte_len = 2
class uint32(uint):
byte_len = 4
class uint64(uint):
byte_len = 8
class uint128(uint):
byte_len = 16
class uint256(uint):
byte_len = 32
def coerce_type_maybe(v, typ: SSZType, strict: bool = False):
v_typ = type(v)
# shortcut if it's already the type we are looking for
if v_typ == typ:
return v
elif isinstance(v, int):
if isinstance(v, uint): # do not coerce from one uintX to another uintY
if issubclass(typ, uint) and v.type().byte_len == typ.byte_len:
return typ(v)
# revert to default behavior below if-else. (ValueError/bare)
else:
return typ(v)
elif isinstance(v, (list, tuple)):
return typ(*v)
elif isinstance(v, (bytes, ByteVector, ByteList)):
return typ(v)
elif isinstance(v, GeneratorType):
return typ(v)
elif issubclass(typ, Container) and not isinstance(v, typ):
return typ(**{field_name: getattr(v, field_name) for field_name in typ.get_field_names()})
# just return as-is, Value-checkers will take care of it not being coerced, if we are not strict.
if strict and not isinstance(v, typ):
raise ValueError("Type coercion of {} to {} failed".format(v, typ))
return v
class Series(SSZValue):
def __iter__(self) -> Iterator[SSZValue]:
raise Exception("Not implemented")
# Note: importing ssz functionality locally, to avoid import loop
class Container(Series, metaclass=SSZType):
def __init__(self, **kwargs):
cls = self.__class__
for f, t in cls.get_fields().items():
if f not in kwargs:
setattr(self, f, t.default())
else:
value = coerce_type_maybe(kwargs[f], t)
if not isinstance(value, t):
raise ValueError(f"Bad input for class {self.__class__}:"
f" field: {f} type: {t} value: {value} value type: {type(value)}")
setattr(self, f, value)
def serialize(self):
from .ssz_impl import serialize
return serialize(self)
def hash_tree_root(self):
from .ssz_impl import hash_tree_root
return hash_tree_root(self)
def __setattr__(self, name, value):
if name not in self.__class__.__annotations__:
raise AttributeError("Cannot change non-existing SSZ-container attribute")
field_typ = self.__class__.__annotations__[name]
value = coerce_type_maybe(value, field_typ)
if not isinstance(value, field_typ):
raise ValueError(f"Cannot set field of {self.__class__}:"
f" field: {name} type: {field_typ} value: {value} value type: {type(value)}")
super().__setattr__(name, value)
def __repr__(self):
return repr({field: (getattr(self, field) if hasattr(self, field) else 'unset')
for field in self.get_fields().keys()})
def __str__(self):
output = [f'{self.__class__.__name__}']
for field in self.get_fields().keys():
output.append(f' {field}: {getattr(self, field)}')
return "\n".join(output)
def __eq__(self, other):
return self.hash_tree_root() == other.hash_tree_root()
def __hash__(self):
return hash(self.hash_tree_root())
def copy(self):
return copy.deepcopy(self)
@classmethod
def get_fields(cls) -> Dict[str, SSZType]:
if not hasattr(cls, '__annotations__'): # no container fields
return {}
return dict(cls.__annotations__)
@classmethod
def get_field_names(cls) -> Iterable[str]:
if not hasattr(cls, '__annotations__'): # no container fields
return ()
return list(cls.__annotations__.keys())
@classmethod
def default(cls):
return cls(**{f: t.default() for f, t in cls.get_fields().items()})
@classmethod
def is_fixed_size(cls):
return all(t.is_fixed_size() for t in cls.get_fields().values())
def __iter__(self) -> Iterator[SSZValue]:
return iter([getattr(self, field) for field in self.get_fields().keys()])
class ParamsBase(Series):
_has_params = False
def __new__(cls, *args, **kwargs):
if not cls._has_params:
raise Exception("cannot init bare type without params")
return super().__new__(cls, **kwargs)
class ParamsMeta(SSZType):
def __new__(cls, class_name, parents, attrs):
out = type.__new__(cls, class_name, parents, attrs)
if hasattr(out, "_has_params") and getattr(out, "_has_params"):
for k, v in attrs.items():
setattr(out, k, v)
return out
def __getitem__(self, params):
o = self.__class__(self.__name__, (self,), self.attr_from_params(params))
return o
def __str__(self):
return f"{self.__name__}~{self.__class__.__name__}"
def __repr__(self):
return f"{self.__name__}~{self.__class__.__name__}"
def attr_from_params(self, p):
# single key params are valid too. Wrap them in a tuple.
params = p if isinstance(p, tuple) else (p,)
res = {'_has_params': True}
i = 0
for (name, typ) in self.__annotations__.items():
if hasattr(self.__class__, name):
res[name] = getattr(self.__class__, name)
else:
if i >= len(params):
i += 1
continue
param = params[i]
if not isinstance(param, typ):
raise TypeError(
"cannot create parametrized class with param {} as {} of type {}".format(param, name, typ))
res[name] = param
i += 1
if len(params) != i:
raise TypeError("provided parameters {} mismatch required parameter count {}".format(params, i))
return res
def __subclasscheck__(self, subclass):
# check regular class system if we can, solves a lot of the normal cases.
if super().__subclasscheck__(subclass):
return True
# if they are not normal subclasses, they are of the same class.
# then they should have the same name
if subclass.__name__ != self.__name__:
return False
# If they do have the same name, they should also have the same params.
for name, typ in self.__annotations__.items():
if hasattr(self, name) and hasattr(subclass, name) \
and getattr(subclass, name) != getattr(self, name):
return False
return True
def __instancecheck__(self, obj):
return self.__subclasscheck__(obj.__class__)
class ElementsType(ParamsMeta):
elem_type: SSZType
length: int
class Elements(ParamsBase, metaclass=ElementsType):
pass
class BaseList(list, Elements):
def __init__(self, *args):
items = self.extract_args(*args)
if not self.value_check(items):
raise ValueError(f"Bad input for class {self.__class__}: {items}")
super().__init__(items)
@classmethod
def value_check(cls, value):
return all(isinstance(v, cls.elem_type) for v in value) and len(value) <= cls.length
@classmethod
def extract_args(cls, *args):
x = list(args)
if len(x) == 1 and isinstance(x[0], (GeneratorType, list, tuple)):
x = list(x[0])
x = [coerce_type_maybe(v, cls.elem_type) for v in x]
return x
def __str__(self):
cls = self.__class__
return f"{cls.__name__}[{cls.elem_type.__name__}, {cls.length}]({', '.join(str(v) for v in self)})"
def __repr__(self):
cls = self.__class__
return f"{cls.__name__}[{cls.elem_type.__name__}, {cls.length}]({', '.join(str(v) for v in self)})"
def __getitem__(self, k) -> SSZValue:
if isinstance(k, int): # check if we are just doing a lookup, and not slicing
if k < 0:
raise IndexError(f"cannot get item in type {self.__class__} at negative index {k}")
if k > len(self):
raise IndexError(f"cannot get item in type {self.__class__}"
f" at out of bounds index {k}")
return super().__getitem__(k)
def __setitem__(self, k, v):
if type(k) == slice:
if (k.start is not None and k.start < 0) or (k.stop is not None and k.stop > len(self)):
raise IndexError(f"cannot set item in type {self.__class__}"
f" at out of bounds slice {k} (to {v}, bound: {len(self)})")
super().__setitem__(k, [coerce_type_maybe(x, self.__class__.elem_type) for x in v])
else:
if k < 0:
raise IndexError(f"cannot set item in type {self.__class__} at negative index {k} (to {v})")
if k > len(self):
raise IndexError(f"cannot set item in type {self.__class__}"
f" at out of bounds index {k} (to {v}, bound: {len(self)})")
super().__setitem__(k, coerce_type_maybe(v, self.__class__.elem_type, strict=True))
def append(self, v):
super().append(coerce_type_maybe(v, self.__class__.elem_type, strict=True))
def __iter__(self) -> Iterator[SSZValue]:
return super().__iter__()
def last(self):
# be explicit about getting the last item, for the non-python readers, and negative-index safety
return self[len(self) - 1]
class BitElementsType(ElementsType):
elem_type: SSZType = boolean
length: int
class Bits(BaseList, metaclass=BitElementsType):
def as_bytes(self):
as_bytearray = [0] * ((len(self) + 7) // 8)
for i in range(len(self)):
as_bytearray[i // 8] |= int(self[i]) << (i % 8)
return bytes(as_bytearray)
class Bitlist(Bits):
@classmethod
def is_fixed_size(cls):
return False
@classmethod
def default(cls):
return cls()
class Bitvector(Bits):
@classmethod
def extract_args(cls, *args):
if len(args) == 0:
return cls.default()
else:
return super().extract_args(*args)
@classmethod
def value_check(cls, value):
# check length limit strictly
return len(value) == cls.length and super().value_check(value)
@classmethod
def is_fixed_size(cls):
return True
@classmethod
def default(cls):
return cls(0 for _ in range(cls.length))
class List(BaseList):
@classmethod
def default(cls):
return cls()
@classmethod
def is_fixed_size(cls):
return False
class Vector(BaseList):
@classmethod
def value_check(cls, value):
# check length limit strictly
return len(value) == cls.length and super().value_check(value)
@classmethod
def default(cls):
return cls(cls.elem_type.default() for _ in range(cls.length))
@classmethod
def is_fixed_size(cls):
return cls.elem_type.is_fixed_size()
def append(self, v):
# Deep-copy and other utils like to change the internals during work.
# Only complain if we had the right size.
if len(self) == self.__class__.length:
raise Exception("cannot modify vector length")
else:
super().append(v)
def pop(self, *args):
raise Exception("cannot modify vector length")
class BytesType(ElementsType):
elem_type: SSZType = byte
length: int
class BaseBytes(bytes, Elements, metaclass=BytesType):
def __new__(cls, *args) -> "BaseBytes":
extracted_val = cls.extract_args(*args)
if not cls.value_check(extracted_val):
raise ValueError(f"Bad input for class {cls}: {extracted_val}")
return super().__new__(cls, extracted_val)
@classmethod
def extract_args(cls, *args):
x = args
if len(x) == 1 and isinstance(x[0], (GeneratorType, bytes, str)):
x = x[0]
if isinstance(x, bytes): # Includes BytesLike
return x
if isinstance(x, str):
if x[:2] == '0x':
return bytes.fromhex(x[2:])
else:
return bytes.fromhex(x)
else:
return bytes(x) # E.g. GeneratorType put into bytes.
@classmethod
def value_check(cls, value):
# check type and virtual length limit
return isinstance(value, bytes) and len(value) <= cls.length
def __str__(self):
cls = self.__class__
return f"{cls.__name__}[{cls.length}]: {self.hex()}"
class ByteList(BaseBytes):
@classmethod
def default(cls):
return b''
@classmethod
def is_fixed_size(cls):
return False
class ByteVector(BaseBytes):
@classmethod
def extract_args(cls, *args):
if len(args) == 0:
return cls.default()
else:
return super().extract_args(*args)
@classmethod
def default(cls):
return b'\x00' * cls.length
@classmethod
def value_check(cls, value):
# check length limit strictly
return len(value) == cls.length and super().value_check(value)
@classmethod
def is_fixed_size(cls):
return True
# Helpers for common ByteVector types
Bytes1: BytesType = ByteVector[1]
Bytes4: BytesType = ByteVector[4]
Bytes8: BytesType = ByteVector[8]
Bytes32: BytesType = ByteVector[32]
Bytes48: BytesType = ByteVector[48]
Bytes96: BytesType = ByteVector[96]
# flake8: noqa
# Ignore linter: This module makes importing SSZ types easy, and hides away the underlying library from the spec.
from remerkleable.complex import Container, Vector, List
from remerkleable.basic import boolean, bit, uint, byte, uint8, uint16, uint32, uint64, uint128, uint256
from remerkleable.bitfields import Bitvector, Bitlist
from remerkleable.byte_arrays import ByteVector, Bytes1, Bytes4, Bytes8, Bytes32, Bytes48, Bytes96, ByteList
from remerkleable.core import BasicView, View, TypeDef

View File

@ -1,264 +0,0 @@
from typing import Iterable
from .ssz_impl import serialize, hash_tree_root
from .ssz_typing import (
bit, boolean, Container, List, Vector, ByteList, ByteVector,
Bitlist, Bitvector,
uint8, uint16, uint32, uint64, uint256, byte
)
from ..hash_function import hash as bytes_hash
import pytest
class EmptyTestStruct(Container):
pass
class SingleFieldTestStruct(Container):
A: byte
class SmallTestStruct(Container):
A: uint16
B: uint16
class FixedTestStruct(Container):
A: uint8
B: uint64
C: uint32
class VarTestStruct(Container):
A: uint16
B: List[uint16, 1024]
C: uint8
class ComplexTestStruct(Container):
A: uint16
B: List[uint16, 128]
C: uint8
D: ByteList[256]
E: VarTestStruct
F: Vector[FixedTestStruct, 4]
G: Vector[VarTestStruct, 2]
sig_test_data = [0 for i in range(96)]
for k, v in {0: 1, 32: 2, 64: 3, 95: 0xff}.items():
sig_test_data[k] = v
def chunk(hex: str) -> str:
return (hex + ("00" * 32))[:64] # just pad on the right, to 32 bytes (64 hex chars)
def h(a: str, b: str) -> str:
return bytes_hash(bytes.fromhex(a) + bytes.fromhex(b)).hex()
# zero hashes, as strings, for
zero_hashes = [chunk("")]
for layer in range(1, 32):
zero_hashes.append(h(zero_hashes[layer - 1], zero_hashes[layer - 1]))
def merge(a: str, branch: Iterable[str]) -> str:
"""
Merge (out on left, branch on right) leaf a with branch items, branch is from bottom to top.
"""
out = a
for b in branch:
out = h(out, b)
return out
test_data = [
("bit F", bit(False), "00", chunk("00")),
("bit T", bit(True), "01", chunk("01")),
("boolean F", boolean(False), "00", chunk("00")),
("boolean T", boolean(True), "01", chunk("01")),
("bitvector TTFTFTFF", Bitvector[8](1, 1, 0, 1, 0, 1, 0, 0), "2b", chunk("2b")),
("bitlist TTFTFTFF", Bitlist[8](1, 1, 0, 1, 0, 1, 0, 0), "2b01", h(chunk("2b"), chunk("08"))),
("bitvector FTFT", Bitvector[4](0, 1, 0, 1), "0a", chunk("0a")),
("bitlist FTFT", Bitlist[4](0, 1, 0, 1), "1a", h(chunk("0a"), chunk("04"))),
("bitvector FTF", Bitvector[3](0, 1, 0), "02", chunk("02")),
("bitlist FTF", Bitlist[3](0, 1, 0), "0a", h(chunk("02"), chunk("03"))),
("bitvector TFTFFFTTFT", Bitvector[10](1, 0, 1, 0, 0, 0, 1, 1, 0, 1), "c502", chunk("c502")),
("bitlist TFTFFFTTFT", Bitlist[16](1, 0, 1, 0, 0, 0, 1, 1, 0, 1), "c506", h(chunk("c502"), chunk("0A"))),
("bitvector TFTFFFTTFTFFFFTT", Bitvector[16](1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1),
"c5c2", chunk("c5c2")),
("bitlist TFTFFFTTFTFFFFTT", Bitlist[16](1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1),
"c5c201", h(chunk("c5c2"), chunk("10"))),
("long bitvector", Bitvector[512](1 for i in range(512)),
"ff" * 64, h("ff" * 32, "ff" * 32)),
("long bitlist", Bitlist[512](1),
"03", h(h(chunk("01"), chunk("")), chunk("01"))),
("long bitlist", Bitlist[512](1 for i in range(512)),
"ff" * 64 + "01", h(h("ff" * 32, "ff" * 32), chunk("0002"))),
("odd bitvector", Bitvector[513](1 for i in range(513)),
"ff" * 64 + "01", h(h("ff" * 32, "ff" * 32), h(chunk("01"), chunk("")))),
("odd bitlist", Bitlist[513](1 for i in range(513)),
"ff" * 64 + "03", h(h(h("ff" * 32, "ff" * 32), h(chunk("01"), chunk(""))), chunk("0102"))),
("uint8 00", uint8(0x00), "00", chunk("00")),
("uint8 01", uint8(0x01), "01", chunk("01")),
("uint8 ab", uint8(0xab), "ab", chunk("ab")),
("byte 00", byte(0x00), "00", chunk("00")),
("byte 01", byte(0x01), "01", chunk("01")),
("byte ab", byte(0xab), "ab", chunk("ab")),
("uint16 0000", uint16(0x0000), "0000", chunk("0000")),
("uint16 abcd", uint16(0xabcd), "cdab", chunk("cdab")),
("uint32 00000000", uint32(0x00000000), "00000000", chunk("00000000")),
("uint32 01234567", uint32(0x01234567), "67452301", chunk("67452301")),
("small (4567, 0123)", SmallTestStruct(A=0x4567, B=0x0123), "67452301", h(chunk("6745"), chunk("2301"))),
("small [4567, 0123]::2", Vector[uint16, 2](uint16(0x4567), uint16(0x0123)), "67452301", chunk("67452301")),
("uint32 01234567", uint32(0x01234567), "67452301", chunk("67452301")),
("uint64 0000000000000000", uint64(0x00000000), "0000000000000000", chunk("0000000000000000")),
("uint64 0123456789abcdef", uint64(0x0123456789abcdef), "efcdab8967452301", chunk("efcdab8967452301")),
("sig", ByteVector[96](*sig_test_data),
"0100000000000000000000000000000000000000000000000000000000000000"
"0200000000000000000000000000000000000000000000000000000000000000"
"03000000000000000000000000000000000000000000000000000000000000ff",
h(h(chunk("01"), chunk("02")),
h("03000000000000000000000000000000000000000000000000000000000000ff", chunk("")))),
("emptyTestStruct", EmptyTestStruct(), "", chunk("")),
("singleFieldTestStruct", SingleFieldTestStruct(A=0xab), "ab", chunk("ab")),
("uint16 list", List[uint16, 32](uint16(0xaabb), uint16(0xc0ad), uint16(0xeeff)), "bbaaadc0ffee",
h(h(chunk("bbaaadc0ffee"), chunk("")), chunk("03000000")) # max length: 32 * 2 = 64 bytes = 2 chunks
),
("uint32 list", List[uint32, 128](uint32(0xaabb), uint32(0xc0ad), uint32(0xeeff)), "bbaa0000adc00000ffee0000",
# max length: 128 * 4 = 512 bytes = 16 chunks
h(merge(chunk("bbaa0000adc00000ffee0000"), zero_hashes[0:4]), chunk("03000000"))
),
("uint256 list", List[uint256, 32](uint256(0xaabb), uint256(0xc0ad), uint256(0xeeff)),
"bbaa000000000000000000000000000000000000000000000000000000000000"
"adc0000000000000000000000000000000000000000000000000000000000000"
"ffee000000000000000000000000000000000000000000000000000000000000",
h(merge(h(h(chunk("bbaa"), chunk("adc0")), h(chunk("ffee"), chunk(""))), zero_hashes[2:5]), chunk("03000000"))
),
("uint256 list long", List[uint256, 128](i for i in range(1, 20)),
"".join([i.to_bytes(length=32, byteorder='little').hex() for i in range(1, 20)]),
h(merge(
h(
h(
h(
h(h(chunk("01"), chunk("02")), h(chunk("03"), chunk("04"))),
h(h(chunk("05"), chunk("06")), h(chunk("07"), chunk("08"))),
),
h(
h(h(chunk("09"), chunk("0a")), h(chunk("0b"), chunk("0c"))),
h(h(chunk("0d"), chunk("0e")), h(chunk("0f"), chunk("10"))),
)
),
h(
h(
h(h(chunk("11"), chunk("12")), h(chunk("13"), chunk(""))),
zero_hashes[2]
),
zero_hashes[3]
)
),
zero_hashes[5:7]), chunk("13000000")) # 128 chunks = 7 deep
),
("fixedTestStruct", FixedTestStruct(A=0xab, B=0xaabbccdd00112233, C=0x12345678), "ab33221100ddccbbaa78563412",
h(h(chunk("ab"), chunk("33221100ddccbbaa")), h(chunk("78563412"), chunk("")))),
("varTestStruct nil", VarTestStruct(A=0xabcd, C=0xff), "cdab07000000ff",
h(h(chunk("cdab"), h(zero_hashes[6], chunk("00000000"))), h(chunk("ff"), chunk("")))),
("varTestStruct empty", VarTestStruct(A=0xabcd, B=List[uint16, 1024](), C=0xff), "cdab07000000ff",
h(h(chunk("cdab"), h(zero_hashes[6], chunk("00000000"))), h(chunk("ff"), chunk("")))), # log2(1024*2/32)= 6 deep
("varTestStruct some", VarTestStruct(A=0xabcd, B=List[uint16, 1024](1, 2, 3), C=0xff),
"cdab07000000ff010002000300",
h(
h(
chunk("cdab"),
h(
merge(
chunk("010002000300"),
zero_hashes[0:6]
),
chunk("03000000") # length mix in
)
),
h(chunk("ff"), chunk(""))
)),
("complexTestStruct",
ComplexTestStruct(
A=0xaabb,
B=List[uint16, 128](0x1122, 0x3344),
C=0xff,
D=ByteList[256](b"foobar"),
E=VarTestStruct(A=0xabcd, B=List[uint16, 1024](1, 2, 3), C=0xff),
F=Vector[FixedTestStruct, 4](
FixedTestStruct(A=0xcc, B=0x4242424242424242, C=0x13371337),
FixedTestStruct(A=0xdd, B=0x3333333333333333, C=0xabcdabcd),
FixedTestStruct(A=0xee, B=0x4444444444444444, C=0x00112233),
FixedTestStruct(A=0xff, B=0x5555555555555555, C=0x44556677)),
G=Vector[VarTestStruct, 2](
VarTestStruct(A=0xdead, B=List[uint16, 1024](1, 2, 3), C=0x11),
VarTestStruct(A=0xbeef, B=List[uint16, 1024](4, 5, 6), C=0x22)),
),
"bbaa"
"47000000" # offset of B, []uint16
"ff"
"4b000000" # offset of foobar
"51000000" # offset of E
"cc424242424242424237133713"
"dd3333333333333333cdabcdab"
"ee444444444444444433221100"
"ff555555555555555577665544"
"5e000000" # pointer to G
"22114433" # contents of B
"666f6f626172" # foobar
"cdab07000000ff010002000300" # contents of E
"08000000" "15000000" # [start G]: local offsets of [2]varTestStruct
"adde0700000011010002000300"
"efbe0700000022040005000600",
h(
h(
h( # A and B
chunk("bbaa"),
h(merge(chunk("22114433"), zero_hashes[0:3]), chunk("02000000")) # 2*128/32 = 8 chunks
),
h( # C and D
chunk("ff"),
h(merge(chunk("666f6f626172"), zero_hashes[0:3]), chunk("06000000")) # 256/32 = 8 chunks
)
),
h(
h( # E and F
h(h(chunk("cdab"), h(merge(chunk("010002000300"), zero_hashes[0:6]), chunk("03000000"))),
h(chunk("ff"), chunk(""))),
h(
h(
h(h(chunk("cc"), chunk("4242424242424242")), h(chunk("37133713"), chunk(""))),
h(h(chunk("dd"), chunk("3333333333333333")), h(chunk("cdabcdab"), chunk(""))),
),
h(
h(h(chunk("ee"), chunk("4444444444444444")), h(chunk("33221100"), chunk(""))),
h(h(chunk("ff"), chunk("5555555555555555")), h(chunk("77665544"), chunk(""))),
),
)
),
h( # G and padding
h(
h(h(chunk("adde"), h(merge(chunk("010002000300"), zero_hashes[0:6]), chunk("03000000"))),
h(chunk("11"), chunk(""))),
h(h(chunk("efbe"), h(merge(chunk("040005000600"), zero_hashes[0:6]), chunk("03000000"))),
h(chunk("22"), chunk(""))),
),
chunk("")
)
)
))
]
@pytest.mark.parametrize("name, value, serialized, _", test_data)
def test_serialize(name, value, serialized, _):
assert serialize(value) == bytes.fromhex(serialized)
@pytest.mark.parametrize("name, value, _, root", test_data)
def test_hash_tree_root(name, value, _, root):
assert hash_tree_root(value) == bytes.fromhex(root)

View File

@ -1,233 +0,0 @@
from .ssz_typing import (
SSZValue, SSZType, BasicValue, BasicType, Series, ElementsType,
Elements, bit, boolean, Container, List, Vector, ByteList, ByteVector,
byte, uint, uint8, uint16, uint32, uint64, uint128, uint256,
Bytes32, Bytes48
)
def expect_value_error(fn, msg):
try:
fn()
raise AssertionError(msg)
except ValueError:
pass
def test_subclasses():
for u in [uint, uint8, uint16, uint32, uint64, uint128, uint256]:
assert issubclass(u, uint)
assert issubclass(u, int)
assert issubclass(u, BasicValue)
assert issubclass(u, SSZValue)
assert isinstance(u, SSZType)
assert isinstance(u, BasicType)
assert issubclass(boolean, BasicValue)
assert isinstance(boolean, BasicType)
for c in [Container, List, Vector, ByteList, ByteVector]:
assert issubclass(c, Series)
assert issubclass(c, SSZValue)
assert isinstance(c, SSZType)
assert not issubclass(c, BasicValue)
assert not isinstance(c, BasicType)
for c in [List, Vector, ByteList, ByteVector]:
assert issubclass(c, Elements)
assert isinstance(c, ElementsType)
def test_basic_instances():
for u in [uint, uint8, byte, uint16, uint32, uint64, uint128, uint256]:
v = u(123)
assert isinstance(v, uint)
assert isinstance(v, int)
assert isinstance(v, BasicValue)
assert isinstance(v, SSZValue)
assert isinstance(boolean(True), BasicValue)
assert isinstance(boolean(False), BasicValue)
assert isinstance(bit(True), boolean)
assert isinstance(bit(False), boolean)
def test_basic_value_bounds():
max = {
boolean: 2 ** 1,
bit: 2 ** 1,
uint8: 2 ** (8 * 1),
byte: 2 ** (8 * 1),
uint16: 2 ** (8 * 2),
uint32: 2 ** (8 * 4),
uint64: 2 ** (8 * 8),
uint128: 2 ** (8 * 16),
uint256: 2 ** (8 * 32),
}
for k, v in max.items():
# this should work
assert k(v - 1) == v - 1
# but we do not allow overflows
expect_value_error(lambda: k(v), "no overflows allowed")
for k, _ in max.items():
# this should work
assert k(0) == 0
# but we do not allow underflows
expect_value_error(lambda: k(-1), "no underflows allowed")
def test_container():
class Foo(Container):
a: uint8
b: uint32
empty = Foo()
assert empty.a == uint8(0)
assert empty.b == uint32(0)
assert issubclass(Foo, Container)
assert issubclass(Foo, SSZValue)
assert issubclass(Foo, Series)
assert Foo.is_fixed_size()
x = Foo(a=uint8(123), b=uint32(45))
assert x.a == 123
assert x.b == 45
assert isinstance(x.a, uint8)
assert isinstance(x.b, uint32)
assert x.type().is_fixed_size()
class Bar(Container):
a: uint8
b: List[uint8, 1024]
assert not Bar.is_fixed_size()
y = Bar(a=123, b=List[uint8, 1024](uint8(1), uint8(2)))
assert y.a == 123
assert isinstance(y.a, uint8)
assert len(y.b) == 2
assert isinstance(y.a, uint8)
assert isinstance(y.b, List[uint8, 1024])
assert not y.type().is_fixed_size()
assert y.b[0] == 1
v: List = y.b
assert v.type().elem_type == uint8
assert v.type().length == 1024
y.a = 42
try:
y.a = 256 # out of bounds
assert False
except ValueError:
pass
try:
y.a = uint16(255) # within bounds, wrong type
assert False
except ValueError:
pass
try:
y.not_here = 5
assert False
except AttributeError:
pass
def test_list():
typ = List[uint64, 128]
assert issubclass(typ, List)
assert issubclass(typ, SSZValue)
assert issubclass(typ, Series)
assert issubclass(typ, Elements)
assert isinstance(typ, ElementsType)
assert not typ.is_fixed_size()
assert len(typ()) == 0 # empty
assert len(typ(uint64(0))) == 1 # single arg
assert len(typ(uint64(i) for i in range(10))) == 10 # generator
assert len(typ(uint64(0), uint64(1), uint64(2))) == 3 # args
assert isinstance(typ(1, 2, 3, 4, 5)[4], uint64) # coercion
assert isinstance(typ(i for i in range(10))[9], uint64) # coercion in generator
v = typ(uint64(0))
v[0] = uint64(123)
assert v[0] == 123
assert isinstance(v[0], uint64)
assert isinstance(v, List)
assert isinstance(v, List[uint64, 128])
assert isinstance(v, typ)
assert isinstance(v, SSZValue)
assert isinstance(v, Series)
assert issubclass(v.type(), Elements)
assert isinstance(v.type(), ElementsType)
assert len(typ([i for i in range(10)])) == 10 # cast py list to SSZ list
foo = List[uint32, 128](0 for i in range(128))
foo[0] = 123
foo[1] = 654
foo[127] = 222
assert sum(foo) == 999
try:
foo[3] = 2 ** 32 # out of bounds
except ValueError:
pass
try:
foo[3] = uint64(2 ** 32 - 1) # within bounds, wrong type
assert False
except ValueError:
pass
try:
foo[128] = 100
assert False
except IndexError:
pass
try:
foo[-1] = 100 # valid in normal python lists
assert False
except IndexError:
pass
try:
foo[128] = 100 # out of bounds
assert False
except IndexError:
pass
def test_bytesn_subclass():
assert isinstance(ByteVector[32](b'\xab' * 32), Bytes32)
assert not isinstance(ByteVector[32](b'\xab' * 32), Bytes48)
assert issubclass(ByteVector[32](b'\xab' * 32).type(), Bytes32)
assert issubclass(ByteVector[32], Bytes32)
class Root(Bytes32):
pass
assert isinstance(Root(b'\xab' * 32), Bytes32)
assert not isinstance(Root(b'\xab' * 32), Bytes48)
assert issubclass(Root(b'\xab' * 32).type(), Bytes32)
assert issubclass(Root, Bytes32)
assert not issubclass(Bytes48, Bytes32)
assert len(Bytes32() + Bytes48()) == 80
def test_uint_math():
assert uint8(0) + uint8(uint32(16)) == uint8(16) # allow explicit casting to make invalid addition valid
expect_value_error(lambda: uint8(0) - uint8(1), "no underflows allowed")
expect_value_error(lambda: uint8(1) + uint8(255), "no overflows allowed")
expect_value_error(lambda: uint8(0) + 256, "no overflows allowed")
expect_value_error(lambda: uint8(42) + uint32(123), "no mixed types")
expect_value_error(lambda: uint32(42) + uint8(123), "no mixed types")
assert type(uint32(1234) + 56) == uint32

View File

@ -3,4 +3,4 @@ eth-typing>=2.1.0,<3.0.0
pycryptodome==3.9.4
py_ecc==2.0.0
dataclasses==0.6
ssz==0.1.3
remerkleable==0.1.10

View File

@ -10,8 +10,7 @@ setup(
"eth-typing>=2.1.0,<3.0.0",
"pycryptodome==3.9.4",
"py_ecc==2.0.0",
"ssz==0.1.3",
"dataclasses==0.6",
"pytest"
"remerkleable==0.1.10",
]
)