Merge pull request #1224 from ethereum/dankrad-patch-8

Add Bitlist and Bitvector
This commit is contained in:
Diederik Loerakker 2019-06-29 01:40:56 +02:00 committed by GitHub
commit b21c9cc71b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 313 additions and 195 deletions

View File

@ -25,8 +25,8 @@ from eth2spec.utils.ssz.ssz_impl import (
signing_root,
)
from eth2spec.utils.ssz.ssz_typing import (
Bit, Bool, Container, List, Vector, Bytes, uint64,
Bytes4, Bytes32, Bytes48, Bytes96,
bit, boolean, Container, List, Vector, uint64,
Bytes4, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector,
)
from eth2spec.utils.bls import (
bls_aggregate_pubkeys,
@ -52,8 +52,8 @@ from eth2spec.utils.ssz.ssz_impl import (
is_empty,
)
from eth2spec.utils.ssz.ssz_typing import (
Bit, Bool, Container, List, Vector, Bytes, uint64,
Bytes4, Bytes32, Bytes48, Bytes96,
bit, boolean, Container, List, Vector, Bytes, uint64,
Bytes4, Bytes32, Bytes48, Bytes96, Bitlist, Bitvector,
)
from eth2spec.utils.bls import (
bls_aggregate_pubkeys,
@ -174,8 +174,8 @@ def combine_constants(old_constants: Dict[str, str], new_constants: Dict[str, st
ignored_dependencies = [
'Bit', 'Bool', 'Vector', 'List', 'Container', 'Hash', 'BLSPubkey', 'BLSSignature', 'Bytes', 'BytesN'
'Bytes4', 'Bytes32', 'Bytes48', 'Bytes96',
'bit', 'boolean', 'Vector', 'List', 'Container', 'Hash', 'BLSPubkey', 'BLSSignature', 'Bytes', 'BytesN'
'Bytes4', 'Bytes32', 'Bytes48', 'Bytes96', 'Bitlist', 'Bitvector',
'uint8', 'uint16', 'uint32', 'uint64', 'uint128', 'uint256',
'bytes' # to be removed after updating spec doc
]

View File

@ -81,8 +81,6 @@
- [`bytes_to_int`](#bytes_to_int)
- [`get_total_balance`](#get_total_balance)
- [`get_domain`](#get_domain)
- [`get_bitfield_bit`](#get_bitfield_bit)
- [`verify_bitfield`](#verify_bitfield)
- [`convert_to_indexed`](#convert_to_indexed)
- [`validate_indexed_attestation`](#validate_indexed_attestation)
- [`is_slashable_attestation_data`](#is_slashable_attestation_data)
@ -192,6 +190,7 @@ The following values are (non-configurable) constants used throughout the specif
| `MIN_PER_EPOCH_CHURN_LIMIT` | `2**2` (= 4) |
| `CHURN_LIMIT_QUOTIENT` | `2**16` (= 65,536) |
| `SHUFFLE_ROUND_COUNT` | `90` |
| `JUSTIFICATION_BITS_LENGTH` | `4` |
* For the safety of crosslinks, `TARGET_COMMITTEE_SIZE` exceeds [the recommended minimum committee size of 111](https://vitalik.ca/files/Ithaca201807_Sharding.pdf); with sufficient active validators (at least `SLOTS_PER_EPOCH * TARGET_COMMITTEE_SIZE`), the shuffling algorithm ensures committee sizes of at least `TARGET_COMMITTEE_SIZE`. (Unbiasable randomness with a Verifiable Delay Function (VDF) will improve committee robustness and lower the safe minimum committee size.)
@ -306,7 +305,7 @@ class Validator(Container):
pubkey: BLSPubkey
withdrawal_credentials: Hash # Commitment to pubkey for withdrawals and transfers
effective_balance: Gwei # Balance at stake
slashed: Bool
slashed: boolean
# Status epochs
activation_eligibility_epoch: Epoch # When criteria for activation were met
activation_epoch: Epoch
@ -344,7 +343,7 @@ class AttestationData(Container):
```python
class AttestationDataAndCustodyBit(Container):
data: AttestationData
custody_bit: Bit # Challengeable bit (SSZ-bool, 1 byte) for the custody of crosslink data
custody_bit: bit # Challengeable bit (SSZ-bool, 1 byte) for the custody of crosslink data
```
#### `IndexedAttestation`
@ -361,7 +360,7 @@ class IndexedAttestation(Container):
```python
class PendingAttestation(Container):
aggregation_bitfield: Bytes[MAX_INDICES_PER_ATTESTATION // 8]
aggregation_bits: Bitlist[MAX_INDICES_PER_ATTESTATION]
data: AttestationData
inclusion_delay: Slot
proposer_index: ValidatorIndex
@ -428,9 +427,9 @@ class AttesterSlashing(Container):
```python
class Attestation(Container):
aggregation_bitfield: Bytes[MAX_INDICES_PER_ATTESTATION // 8]
aggregation_bits: Bitlist[MAX_INDICES_PER_ATTESTATION]
data: AttestationData
custody_bitfield: Bytes[MAX_INDICES_PER_ATTESTATION // 8]
custody_bits: Bitlist[MAX_INDICES_PER_ATTESTATION]
signature: BLSSignature
```
@ -528,7 +527,7 @@ class BeaconState(Container):
previous_crosslinks: Vector[Crosslink, SHARD_COUNT] # Previous epoch snapshot
current_crosslinks: Vector[Crosslink, SHARD_COUNT]
# Finality
justification_bitfield: uint64 # Bit set for every recent justified epoch
justification_bits: Bitvector[JUSTIFICATION_BITS_LENGTH] # Bit set for every recent justified epoch
previous_justified_checkpoint: Checkpoint # Previous epoch snapshot
current_justified_checkpoint: Checkpoint
finalized_checkpoint: Checkpoint
@ -866,13 +865,14 @@ def get_crosslink_committee(state: BeaconState, epoch: Epoch, shard: Shard) -> S
### `get_attesting_indices`
```python
def get_attesting_indices(state: BeaconState, data: AttestationData, bitfield: bytes) -> Set[ValidatorIndex]:
def get_attesting_indices(state: BeaconState,
data: AttestationData,
bits: Bitlist[MAX_INDICES_PER_ATTESTATION]) -> Set[ValidatorIndex]:
"""
Return the set of attesting indices corresponding to ``data`` and ``bitfield``.
"""
committee = get_crosslink_committee(state, data.target.epoch, data.crosslink.shard)
assert verify_bitfield(bitfield, len(committee))
return set(index for i, index in enumerate(committee) if get_bitfield_bit(bitfield, i) == 0b1)
return set(index for i, index in enumerate(committee) if bits[i])
```
### `int_to_bytes`
@ -913,34 +913,6 @@ def get_domain(state: BeaconState,
return bls_domain(domain_type, fork_version)
```
### `get_bitfield_bit`
```python
def get_bitfield_bit(bitfield: bytes, i: int) -> int:
"""
Extract the bit in ``bitfield`` at position ``i``.
"""
return (bitfield[i // 8] >> (i % 8)) % 2
```
### `verify_bitfield`
```python
def verify_bitfield(bitfield: bytes, committee_size: int) -> bool:
"""
Verify ``bitfield`` against the ``committee_size``.
"""
if len(bitfield) != (committee_size + 7) // 8:
return False
# Check `bitfield` is padded with zero bits only
for i in range(committee_size, len(bitfield) * 8):
if get_bitfield_bit(bitfield, i) == 0b1:
return False
return True
```
### `convert_to_indexed`
```python
@ -948,8 +920,8 @@ def convert_to_indexed(state: BeaconState, attestation: Attestation) -> IndexedA
"""
Convert ``attestation`` to (almost) indexed-verifiable form.
"""
attesting_indices = get_attesting_indices(state, attestation.data, attestation.aggregation_bitfield)
custody_bit_1_indices = get_attesting_indices(state, attestation.data, attestation.custody_bitfield)
attesting_indices = get_attesting_indices(state, attestation.data, attestation.aggregation_bits)
custody_bit_1_indices = get_attesting_indices(state, attestation.data, attestation.custody_bits)
assert custody_bit_1_indices.issubset(attesting_indices)
custody_bit_0_indices = attesting_indices.difference(custody_bit_1_indices)
@ -1283,7 +1255,7 @@ def get_unslashed_attesting_indices(state: BeaconState,
attestations: Sequence[PendingAttestation]) -> Set[ValidatorIndex]:
output = set() # type: Set[ValidatorIndex]
for a in attestations:
output = output.union(get_attesting_indices(state, a.data, a.aggregation_bitfield))
output = output.union(get_attesting_indices(state, a.data, a.aggregation_bits))
return set(filter(lambda index: not state.validators[index].slashed, list(output)))
```
@ -1323,34 +1295,32 @@ def process_justification_and_finalization(state: BeaconState) -> None:
# Process justifications
state.previous_justified_checkpoint = state.current_justified_checkpoint
state.justification_bitfield = (state.justification_bitfield << 1) % 2**64
previous_epoch_matching_target_balance = get_attesting_balance(
state, get_matching_target_attestations(state, previous_epoch)
)
if previous_epoch_matching_target_balance * 3 >= get_total_active_balance(state) * 2:
state.justification_bits[1:] = state.justification_bits[:-1]
state.justification_bits[0] = 0b0
matching_target_attestations = get_matching_target_attestations(state, previous_epoch) # Previous epoch
if get_attesting_balance(state, matching_target_attestations) * 3 >= get_total_active_balance(state) * 2:
state.current_justified_checkpoint = Checkpoint(epoch=previous_epoch,
root=get_block_root(state, previous_epoch))
state.justification_bitfield |= (1 << 1)
current_epoch_matching_target_balance = get_attesting_balance(
state, get_matching_target_attestations(state, current_epoch)
)
if current_epoch_matching_target_balance * 3 >= get_total_active_balance(state) * 2:
state.current_justified_checkpoint = Checkpoint(epoch=current_epoch, root=get_block_root(state, current_epoch))
state.justification_bitfield |= (1 << 0)
state.justification_bits[1] = 0b1
matching_target_attestations = get_matching_target_attestations(state, current_epoch) # Current epoch
if get_attesting_balance(state, matching_target_attestations) * 3 >= get_total_active_balance(state) * 2:
state.current_justified_checkpoint = Checkpoint(epoch=current_epoch,
root=get_block_root(state, current_epoch))
state.justification_bits[0] = 0b1
# Process finalizations
bitfield = state.justification_bitfield
bits = state.justification_bits
# The 2nd/3rd/4th most recent epochs are justified, the 2nd using the 4th as source
if (bitfield >> 1) % 8 == 0b111 and old_previous_justified_checkpoint.epoch + 3 == current_epoch:
if all(bits[1:4]) and old_previous_justified_checkpoint.epoch + 3 == current_epoch:
state.finalized_checkpoint = old_previous_justified_checkpoint
# The 2nd/3rd most recent epochs are justified, the 2nd using the 3rd as source
if (bitfield >> 1) % 4 == 0b11 and old_previous_justified_checkpoint.epoch + 2 == current_epoch:
if all(bits[1:3]) and old_previous_justified_checkpoint.epoch + 2 == current_epoch:
state.finalized_checkpoint = old_previous_justified_checkpoint
# The 1st/2nd/3rd most recent epochs are justified, the 1st using the 3rd as source
if (bitfield >> 0) % 8 == 0b111 and old_current_justified_checkpoint.epoch + 2 == current_epoch:
if all(bits[0:3]) and old_current_justified_checkpoint.epoch + 2 == current_epoch:
state.finalized_checkpoint = old_current_justified_checkpoint
# The 1st/2nd most recent epochs are justified, the 1st using the 2nd as source
if (bitfield >> 0) % 4 == 0b11 and old_current_justified_checkpoint.epoch + 1 == current_epoch:
if all(bits[0:2]) and old_current_justified_checkpoint.epoch + 1 == current_epoch:
state.finalized_checkpoint = old_current_justified_checkpoint
```
@ -1406,7 +1376,7 @@ def get_attestation_deltas(state: BeaconState) -> Tuple[Sequence[Gwei], Sequence
index = ValidatorIndex(index)
attestation = min([
a for a in matching_source_attestations
if index in get_attesting_indices(state, a.data, a.aggregation_bitfield)
if index in get_attesting_indices(state, a.data, a.aggregation_bits)
], key=lambda a: a.inclusion_delay)
proposer_reward = Gwei(get_base_reward(state, index) // PROPOSER_REWARD_QUOTIENT)
rewards[attestation.proposer_index] += proposer_reward
@ -1677,7 +1647,7 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None:
pending_attestation = PendingAttestation(
data=data,
aggregation_bitfield=attestation.aggregation_bitfield,
aggregation_bits=attestation.aggregation_bits,
inclusion_delay=state.slot - attestation_slot,
proposer_index=get_beacon_proposer_index(state),
)
@ -1696,7 +1666,7 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None:
assert data.crosslink.start_epoch == parent_crosslink.end_epoch
assert data.crosslink.end_epoch == min(data.target.epoch, parent_crosslink.end_epoch + MAX_EPOCHS_PER_CROSSLINK)
assert data.crosslink.data_root == ZERO_HASH # [to be removed in phase 1]
# Check signature
validate_indexed_attestation(state, convert_to_indexed(state, attestation))
```

View File

@ -272,22 +272,32 @@ def get_custody_chunk_count(crosslink: Crosslink) -> int:
return crosslink_length * chunks_per_epoch
```
### `get_bit`
```python
def get_bit(serialization: bytes, i: int) -> int:
"""
Extract the bit in ``serialization`` at position ``i``.
"""
return (serialization[i // 8] >> (i % 8)) % 2
```
### `get_custody_chunk_bit`
```python
def get_custody_chunk_bit(key: BLSSignature, chunk: bytes) -> bool:
# TODO: Replace with something MPC-friendly, e.g. the Legendre symbol
return bool(get_bitfield_bit(hash(key + chunk), 0))
return bool(get_bit(hash(key + chunk), 0))
```
### `get_chunk_bits_root`
```python
def get_chunk_bits_root(chunk_bitfield: bytes) -> Bytes32:
def get_chunk_bits_root(chunk_bits: bytes) -> Bytes32:
aggregated_bits = bytearray([0] * 32)
for i in range(0, len(chunk_bitfield), 32):
for i in range(0, len(chunk_bits), 32):
for j in range(32):
aggregated_bits[j] ^= chunk_bitfield[i + j]
aggregated_bits[j] ^= chunk_bits[i + j]
return hash(aggregated_bits)
```
@ -469,7 +479,7 @@ def process_chunk_challenge(state: BeaconState, challenge: CustodyChunkChallenge
responder = state.validators[challenge.responder_index]
assert responder.exit_epoch >= get_current_epoch(state) - MAX_CHUNK_CHALLENGE_DELAY
# Verify the responder participated in the attestation
attesters = get_attesting_indices(state, challenge.attestation.data, challenge.attestation.aggregation_bitfield)
attesters = get_attesting_indices(state, challenge.attestation.data, challenge.attestation.aggregation_bits)
assert challenge.responder_index in attesters
# Verify the challenge is not a duplicate
for record in state.custody_chunk_challenge_records:
@ -520,8 +530,9 @@ def process_bit_challenge(state: BeaconState, challenge: CustodyBitChallenge) ->
# Verify attestation is eligible for challenging
responder = state.validators[challenge.responder_index]
assert epoch + responder.max_reveal_lateness <= get_reveal_period(state, challenge.responder_index)
# Verify responder participated in the attestation
attesters = get_attesting_indices(state, attestation.data, attestation.aggregation_bitfield)
# Verify the responder participated in the attestation
attesters = get_attesting_indices(state, attestation.data, attestation.aggregation_bits)
assert challenge.responder_index in attesters
# Verifier challenger is not already challenging
for record in state.custody_bit_challenge_records:
@ -535,11 +546,10 @@ def process_bit_challenge(state: BeaconState, challenge: CustodyBitChallenge) ->
assert bls_verify(responder.pubkey, hash_tree_root(epoch_to_sign), challenge.responder_key, domain)
# Verify the chunk count
chunk_count = get_custody_chunk_count(attestation.data.crosslink)
assert verify_bitfield(challenge.chunk_bits, chunk_count)
# Verify the first bit of the hash of the chunk bits does not equal the custody bit
committee = get_crosslink_committee(state, epoch, shard)
custody_bit = get_bitfield_bit(attestation.custody_bitfield, committee.index(challenge.responder_index))
assert custody_bit != get_bitfield_bit(get_chunk_bits_root(challenge.chunk_bits), 0)
custody_bit = attestation.custody_bits[committee.index(challenge.responder_index)]
assert custody_bit != get_bit(get_chunk_bits_root(challenge.chunk_bits), 0)
# Add new bit challenge record
new_record = CustodyBitChallengeRecord(
challenge_index=state.custody_challenge_index,
@ -631,7 +641,7 @@ def process_bit_challenge_response(state: BeaconState,
)
# Verify the chunk bit does not match the challenge chunk bit
assert (get_custody_chunk_bit(challenge.responder_key, response.chunk)
!= get_bitfield_bit(challenge.chunk_bits_leaf, response.chunk_index % 256))
!= get_bit(challenge.chunk_bits_leaf, response.chunk_index % 256))
# Clear the challenge
records = state.custody_bit_challenge_records
records[records.index(challenge)] = CustodyBitChallengeRecord()

View File

@ -92,7 +92,7 @@ class ShardAttestation(Container):
slot: Slot
shard: Shard
shard_block_root: Bytes32
aggregation_bitfield: Bytes[PLACEHOLDER]
aggregation_bits: Bitlist[PLACEHOLDER]
aggregate_signature: BLSSignature
```
@ -230,10 +230,9 @@ def verify_shard_attestation_signature(state: BeaconState,
attestation: ShardAttestation) -> None:
data = attestation.data
persistent_committee = get_persistent_committee(state, data.shard, data.slot)
assert verify_bitfield(attestation.aggregation_bitfield, len(persistent_committee))
pubkeys = []
for i, index in enumerate(persistent_committee):
if get_bitfield_bit(attestation.aggregation_bitfield, i) == 0b1:
if attestation.aggregation_bits[i]:
validator = state.validators[index]
assert is_active_validator(validator, get_current_epoch(state))
pubkeys.append(validator.pubkey)

View File

@ -168,7 +168,7 @@ If a client wants to update its `finalized_header` it asks the network for a `Bl
{
'header': BeaconBlockHeader,
'shard_aggregate_signature': BLSSignature,
'shard_bitfield': 'bytes',
'shard_bits': Bitlist[PLACEHOLDER],
'shard_parent_block': ShardBlock,
}
```
@ -180,13 +180,13 @@ def verify_block_validity_proof(proof: BlockValidityProof, validator_memory: Val
assert proof.shard_parent_block.beacon_chain_root == hash_tree_root(proof.header)
committee = compute_committee(proof.header, validator_memory)
# Verify that we have >=50% support
support_balance = sum([v.effective_balance for i, v in enumerate(committee) if get_bitfield_bit(proof.shard_bitfield, i) is True])
support_balance = sum([v.effective_balance for i, v in enumerate(committee) if proof.shard_bits[i]])
total_balance = sum([v.effective_balance for i, v in enumerate(committee)])
assert support_balance * 2 > total_balance
# Verify shard attestations
group_public_key = bls_aggregate_pubkeys([
v.pubkey for v, index in enumerate(committee)
if get_bitfield_bit(proof.shard_bitfield, index) is True
if proof.shard_bits[index]
])
assert bls_verify(
pubkey=group_public_key,
@ -196,4 +196,4 @@ def verify_block_validity_proof(proof: BlockValidityProof, validator_memory: Val
)
```
The size of this proof is only 200 (header) + 96 (signature) + 16 (bitfield) + 352 (shard block) = 664 bytes. It can be reduced further by replacing `ShardBlock` with `MerklePartial(lambda x: x.beacon_chain_root, ShardBlock)`, which would cut off ~220 bytes.
The size of this proof is only 200 (header) + 96 (signature) + 16 (bits) + 352 (shard block) = 664 bytes. It can be reduced further by replacing `ShardBlock` with `MerklePartial(lambda x: x.beacon_chain_root, ShardBlock)`, which would cut off ~220 bytes.

View File

@ -15,9 +15,9 @@
- [Default values](#default-values)
- [Illegal types](#illegal-types)
- [Serialization](#serialization)
- [`"uintN"`](#uintn)
- [`"bool"`](#bool)
- [`"null`](#null)
- [`uintN`](#uintn)
- [`boolean`](#boolean)
- [`null`](#null)
- [Vectors, containers, lists, unions](#vectors-containers-lists-unions)
- [Deserialization](#deserialization)
- [Merkleization](#merkleization)
@ -37,36 +37,45 @@
## Typing
### Basic types
* `"uintN"`: `N`-bit unsigned integer (where `N in [8, 16, 32, 64, 128, 256]`)
* `"bool"`: `True` or `False`
* `uintN`: `N`-bit unsigned integer (where `N in [8, 16, 32, 64, 128, 256]`)
* `boolean`: `True` or `False`
### Composite types
* **container**: ordered heterogeneous collection of values
* key-pair curly bracket notation `{}`, e.g. `{"foo": "uint64", "bar": "bool"}`
* **vector**: ordered fixed-length homogeneous collection of values
* angle bracket notation `[type, N]`, e.g. `["uint64", N]`
* **list**: ordered variable-length homogeneous collection of values
* angle bracket notation `[type]`, e.g. `["uint64"]`
* python dataclass notation with key-type pairs, e.g.
```python
class ContainerExample(Container):
foo: uint64
bar: boolean
```
* **vector**: ordered fixed-length homogeneous collection, with `N` values
* notation `Vector[type, N]`, e.g. `Vector[uint64, N]`
* **list**: ordered variable-length homogeneous collection, limited to `N` values
* notation `List[type, N]`, e.g. `List[uint64, N]`
* **bitvector**: ordered fixed-length collection of `boolean` values, with `N` bits
* notation `Bitvector[N]`
* **bitlist**: ordered variable-length collection of `boolean` values, limited to `N` bits
* notation `Bitlist[N]`
* **union**: union type containing one of the given subtypes
* round bracket notation `(type_1, type_2, ...)`, e.g. `("null", "uint64")`
* notation `Union[type_1, type_2, ...]`, e.g. `union[null, uint64]`
### Variable-size and fixed-size
We recursively define "variable-size" types to be lists and unions and all types that contain a variable-size type. All other types are said to be "fixed-size".
We recursively define "variable-size" types to be lists, unions, `Bitlist` and all types that contain a variable-size type. All other types are said to be "fixed-size".
### Aliases
For convenience we alias:
* `"byte"` to `"uint8"` (this is a basic type)
* `"bytes"` to `["byte"]` (this is *not* a basic type)
* `"bytesN"` to `["byte", N]` (this is *not* a basic type)
* `"null"`: `{}`, i.e. the empty container
* `bit` to `boolean`
* `byte` to `uint8` (this is a basic type)
* `BytesN` to `Vector[byte, N]` (this is *not* a basic type)
* `null`: `{}`, i.e. the empty container
### Default values
The default value of a type upon initialization is recursively defined using `0` for `"uintN"`, `False` for `"bool"`, and `[]` for lists. Unions default to the first type in the union (with type index zero), which is `"null"` if present in the union.
The default value of a type upon initialization is recursively defined using `0` for `uintN`, `False` for `boolean` and the elements of `Bitvector`, and `[]` for lists and `Bitlist`. Unions default to the first type in the union (with type index zero), which is `null` if present in the union.
#### `is_empty`
@ -74,34 +83,50 @@ An SSZ object is called empty (and thus, `is_empty(object)` returns true) if it
### Illegal types
Empty vector types (i.e. `[subtype, 0]` for some `subtype`) are not legal. The `"null"` type is only legal as the first type in a union subtype (i.e. with type index zero).
Empty vector types (i.e. `[subtype, 0]` for some `subtype`) are not legal. The `null` type is only legal as the first type in a union subtype (i.e. with type index zero).
## Serialization
We recursively define the `serialize` function which consumes an object `value` (of the type specified) and returns a bytestring of type `"bytes"`.
We recursively define the `serialize` function which consumes an object `value` (of the type specified) and returns a bytestring of type `bytes`.
*Note*: In the function definitions below (`serialize`, `hash_tree_root`, `signing_root`, `is_variable_size`, etc.) objects implicitly carry their type.
### `"uintN"`
### `uintN`
```python
assert N in [8, 16, 32, 64, 128, 256]
return value.to_bytes(N // 8, "little")
```
### `"bool"`
### `boolean`
```python
assert value in (True, False)
return b"\x01" if value is True else b"\x00"
```
### `"null"`
### `null`
```python
return b""
```
### `Bitvector[N]`
```python
as_integer = sum([value[i] << i for i in range(len(value))])
return as_integer.to_bytes((N + 7) // 8, "little")
```
### `Bitlist[N]`
Note that from the offset coding, the length (in bytes) of the bitlist is known. An additional leading `1` bit is added so that the length in bits will also be known.
```python
as_integer = (1 << len(value)) + sum([value[i] << i for i in range(len(value))])
return as_integer.to_bytes((as_integer.bit_length() + 7) // 8, "little")
```
### Vectors, containers, lists, unions
```python
@ -136,23 +161,47 @@ return serialized_type_index + serialized_bytes
Because serialization is an injective function (i.e. two distinct objects of the same type will serialize to different values) any bytestring has at most one object it could deserialize to. Efficient algorithms for computing this object can be found in [the implementations](#implementations).
Note that deserialization requires hardening against invalid inputs. A non-exhaustive list:
- Offsets: out of order, out of range, mismatching minimum element size
- Scope: Extra unused bytes, not aligned with element size.
- More elements than a list limit allows. Part of enforcing consensus.
## Merkleization
We first define helper functions:
* `pack`: Given ordered objects of the same basic type, serialize them, pack them into `BYTES_PER_CHUNK`-byte chunks, right-pad the last chunk with zero bytes, and return the chunks.
* `merkleize`: Given ordered `BYTES_PER_CHUNK`-byte chunks, if necessary append zero chunks so that the number of chunks is a power of two, Merkleize the chunks, and return the root. Note that `merkleize` on a single chunk is simply that chunk, i.e. the identity when the number of chunks is one.
* `next_pow_of_two(i)`: get the next power of 2 of `i`, if not already a power of 2, with 0 mapping to 1. Examples: `0->1, 1->1, 2->2, 3->4, 4->4, 6->8, 9->16`
* `merkleize(data, pad_for)`: Given ordered `BYTES_PER_CHUNK`-byte chunks, if necessary append zero chunks so that the number of chunks is a power of two, Merkleize the chunks, and return the root.
The merkleization depends on the effective input, which can be padded: if `pad_for=L`, then pad the `data` with zeroed chunks to `next_pow_of_two(L)` (virtually for memory efficiency).
Then, merkleize the chunks (empty input is padded to 1 zero chunk):
- If `1` chunk: A single chunk is simply that chunk, i.e. the identity when the number of chunks is one.
- If `> 1` chunks: pad to `next_pow_of_two(len(chunks))`, merkleize as binary tree.
* `mix_in_length`: Given a Merkle root `root` and a length `length` (`"uint256"` little-endian serialization) return `hash(root + length)`.
* `mix_in_type`: Given a Merkle root `root` and a type_index `type_index` (`"uint256"` little-endian serialization) return `hash(root + type_index)`.
We now define Merkleization `hash_tree_root(value)` of an object `value` recursively:
* `merkleize(pack(value))` if `value` is a basic object or a vector of basic objects
* `mix_in_length(merkleize(pack(value)), len(value))` if `value` is a list of basic objects
* `mix_in_length(merkleize(pack(value), pad_for=(N * elem_size / BYTES_PER_CHUNK)), len(value))` if `value` is a list of basic objects.
* `merkleize([hash_tree_root(element) for element in value])` if `value` is a vector of composite objects or a container
* `mix_in_length(merkleize([hash_tree_root(element) for element in value]), len(value))` if `value` is a list of composite objects
* `mix_in_length(merkleize([hash_tree_root(element) for element in value], pad_for=N), len(value))` if `value` is a list of composite objects.
* `mix_in_type(merkleize(value.value), value.type_index)` if `value` is of union type
### Merkleization of `Bitvector[N]`
```python
as_integer = sum([value[i] << i for i in range(len(value))])
return merkleize(as_integer.to_bytes((N + 7) // 8, "little"))
```
### `Bitlist[N]`
```python
as_integer = sum([value[i] << i for i in range(len(value))])
return mix_in_length(merkleize(as_integer.to_bytes((N + 7) // 8, "little")), len(value))
```
## Self-signed containers
Let `value` be a self-signed container object. The convention is that the signature (e.g. a `"bytes96"` BLS12-381 signature) be the last field of `value`. Further, the signed message for `value` is `signing_root(value) = hash_tree_root(truncate_last(value))` where `truncate_last` truncates the last element of `value`.

View File

@ -44,8 +44,8 @@
- [Crosslink vote](#crosslink-vote)
- [Construct attestation](#construct-attestation)
- [Data](#data)
- [Aggregation bitfield](#aggregation-bitfield)
- [Custody bitfield](#custody-bitfield)
- [Aggregation bits](#aggregation-bits)
- [Custody bits](#custody-bits)
- [Aggregate signature](#aggregate-signature)
- [How to avoid slashing](#how-to-avoid-slashing)
- [Proposer slashing](#proposer-slashing)
@ -329,19 +329,15 @@ Next, the validator creates `attestation`, an [`Attestation`](../core/0_beacon-c
Set `attestation.data = attestation_data` where `attestation_data` is the `AttestationData` object defined in the previous section, [attestation data](#attestation-data).
##### Aggregation bitfield
##### Aggregation bits
* Let `aggregation_bitfield` be a byte array filled with zeros of length `(len(committee) + 7) // 8`.
* Let `index_into_committee` be the index into the validator's `committee` at which `validator_index` is located.
* Set `aggregation_bitfield[index_into_committee // 8] |= 2 ** (index_into_committee % 8)`.
* Set `attestation.aggregation_bitfield = aggregation_bitfield`.
* Let `attestation.aggregation_bits` be a `Bitlist[MAX_INDICES_PER_ATTESTATION]` where the bits at the index in the aggregated validator's `committee` is set to `0b1`.
*Note*: Calling `get_attesting_indices(state, attestation.data, attestation.aggregation_bitfield)` should return a list of length equal to 1, containing `validator_index`.
*Note*: Calling `get_attesting_indices(state, attestation.data, attestation.aggregation_bits)` should return a list of length equal to 1, containing `validator_index`.
##### Custody bitfield
##### Custody bits
* Let `custody_bitfield` be a byte array filled with zeros of length `(len(committee) + 7) // 8`.
* Set `attestation.custody_bitfield = custody_bitfield`.
* Let `attestation.custody_bits` be a `Bitlist[MAX_INDICES_PER_ATTESTATION]` filled with zeros of length `len(committee)`.
*Note*: This is a stub for Phase 0.

View File

@ -415,16 +415,16 @@ components:
type: object
description: "The [`Attestation`](https://github.com/ethereum/eth2.0-specs/blob/master/specs/core/0_beacon-chain.md#attestation) object from the Eth2.0 spec."
properties:
aggregation_bitfield:
aggregation_bits:
type: string
format: byte
pattern: "^0x[a-fA-F0-9]+$"
description: "Attester aggregation bitfield."
custody_bitfield:
description: "Attester aggregation bits."
custody_bits:
type: string
format: byte
pattern: "^0x[a-fA-F0-9]+$"
description: "Custody bitfield."
description: "Custody bits."
signature:
type: string
format: byte

View File

@ -1,13 +1,13 @@
from typing import Any
from eth2spec.utils.ssz.ssz_impl import hash_tree_root
from eth2spec.utils.ssz.ssz_typing import (
SSZType, SSZValue, uint, Container, Bytes, List, Bool,
SSZType, SSZValue, uint, Container, Bytes, List, boolean,
Vector, BytesN
)
def decode(data: Any, typ: SSZType) -> SSZValue:
if issubclass(typ, (uint, Bool)):
if issubclass(typ, (uint, boolean)):
return typ(data)
elif issubclass(typ, (List, Vector)):
return typ(decode(element, typ.elem_type) for element in data)

View File

@ -1,6 +1,6 @@
from eth2spec.utils.ssz.ssz_impl import hash_tree_root
from eth2spec.utils.ssz.ssz_typing import (
SSZValue, uint, Container, Bool
SSZValue, uint, Container, boolean
)
@ -10,7 +10,7 @@ def encode(value: SSZValue, include_hash_tree_roots=False):
if value.type().byte_len > 8:
return str(int(value))
return int(value)
elif isinstance(value, Bool):
elif isinstance(value, boolean):
return value == 1
elif isinstance(value, list): # normal python lists, ssz-List, Vector
return [encode(element, include_hash_tree_roots) for element in value]

View File

@ -2,8 +2,8 @@ from random import Random
from enum import Enum
from eth2spec.utils.ssz.ssz_typing import (
SSZType, SSZValue, BasicValue, BasicType, uint, Container, Bytes, List, Bool,
Vector, BytesN
SSZType, SSZValue, BasicValue, BasicType, uint, Container, Bytes, List, boolean,
Vector, BytesN, Bitlist, Bitvector
)
# in bytes
@ -83,12 +83,12 @@ def get_random_ssz_object(rng: Random,
return get_max_basic_value(typ)
else:
return get_random_basic_value(rng, typ)
elif issubclass(typ, Vector):
elif issubclass(typ, Vector) or issubclass(typ, Bitvector):
return typ(
get_random_ssz_object(rng, typ.elem_type, max_bytes_length, max_list_length, mode, chaos)
for _ in range(typ.length)
)
elif issubclass(typ, List):
elif issubclass(typ, List) or issubclass(typ, Bitlist):
length = rng.randint(0, min(typ.length, max_list_length))
if mode == RandomizationMode.mode_one_count:
length = 1
@ -118,7 +118,7 @@ def get_random_bytes_list(rng: Random, length: int) -> bytes:
def get_random_basic_value(rng: Random, typ: BasicType) -> BasicValue:
if issubclass(typ, Bool):
if issubclass(typ, boolean):
return typ(rng.choice((True, False)))
elif issubclass(typ, uint):
assert typ.byte_len in UINT_BYTE_SIZES
@ -128,7 +128,7 @@ def get_random_basic_value(rng: Random, typ: BasicType) -> BasicValue:
def get_min_basic_value(typ: BasicType) -> BasicValue:
if issubclass(typ, Bool):
if issubclass(typ, boolean):
return typ(False)
elif issubclass(typ, uint):
assert typ.byte_len in UINT_BYTE_SIZES
@ -138,7 +138,7 @@ def get_min_basic_value(typ: BasicType) -> BasicValue:
def get_max_basic_value(typ: BasicType) -> BasicValue:
if issubclass(typ, Bool):
if issubclass(typ, boolean):
return typ(True)
elif issubclass(typ, uint):
assert typ.byte_len in UINT_BYTE_SIZES

View File

@ -18,8 +18,15 @@ def translate_typ(typ) -> ssz.BaseSedes:
elif issubclass(typ, spec_ssz.Vector):
return ssz.Vector(translate_typ(typ.elem_type), typ.length)
elif issubclass(typ, spec_ssz.List):
# TODO: Make py-ssz List support the new fixed length list
return ssz.List(translate_typ(typ.elem_type))
elif issubclass(typ, spec_ssz.Bool):
elif issubclass(typ, spec_ssz.Bitlist):
# TODO: Once Bitlist implemented in py-ssz, use appropriate type
return ssz.List(translate_typ(typ.elem_type))
elif issubclass(typ, spec_ssz.Bitvector):
# TODO: Once Bitvector implemented in py-ssz, use appropriate type
return ssz.Vector(translate_typ(typ.elem_type), typ.length)
elif issubclass(typ, spec_ssz.boolean):
return ssz.boolean
elif issubclass(typ, spec_ssz.uint):
if typ.byte_len == 1:
@ -64,10 +71,14 @@ def translate_value(value, typ):
raise TypeError("invalid uint size")
elif issubclass(typ, spec_ssz.List):
return [translate_value(elem, typ.elem_type) for elem in value]
elif issubclass(typ, spec_ssz.Bool):
elif issubclass(typ, spec_ssz.boolean):
return value
elif issubclass(typ, spec_ssz.Vector):
return typ(*(translate_value(elem, typ.elem_type) for elem in value))
elif issubclass(typ, spec_ssz.Bitlist):
return typ(value)
elif issubclass(typ, spec_ssz.Bitvector):
return typ(value)
elif issubclass(typ, spec_ssz.BytesN):
return typ(value)
elif issubclass(typ, spec_ssz.Bytes):

View File

@ -9,7 +9,9 @@ def test_decoder():
rng = Random(123)
# check these types only, Block covers a lot of operation types already.
for typ in [spec.BeaconBlock, spec.BeaconState, spec.IndexedAttestation, spec.AttestationDataAndCustodyBit]:
# TODO: Once has Bitlists and Bitvectors, add back
# spec.BeaconState and spec.BeaconBlock
for typ in [spec.IndexedAttestation, spec.AttestationDataAndCustodyBit]:
# create a random pyspec value
original = random_value.get_random_ssz_object(rng, typ, 100, 10,
mode=random_value.RandomizationMode.mode_random,

View File

@ -118,5 +118,5 @@ def test_on_attestation_invalid_attestation(spec, state):
attestation = get_valid_attestation(spec, state, slot=block.slot)
# make attestation invalid
attestation.custody_bitfield = b'\xf0' + attestation.custody_bitfield[1:]
attestation.custody_bits[0:8] = [0, 0, 0, 0, 1, 1, 1, 1]
run_on_attestation(spec, state, store, attestation, False)

View File

@ -1,10 +1,10 @@
from typing import List
from eth2spec.test.helpers.bitfields import set_bitfield_bit
from eth2spec.test.helpers.block import build_empty_block_for_next_slot, sign_block
from eth2spec.test.helpers.keys import privkeys
from eth2spec.utils.bls import bls_sign, bls_aggregate_signatures
from eth2spec.utils.ssz.ssz_impl import hash_tree_root
from eth2spec.utils.ssz.ssz_typing import Bitlist
def build_attestation_data(spec, state, slot, shard):
@ -67,13 +67,12 @@ def get_valid_attestation(spec, state, slot=None, signed=False):
)
committee_size = len(crosslink_committee)
bitfield_length = (committee_size + 7) // 8
aggregation_bitfield = b'\x00' * bitfield_length
custody_bitfield = b'\x00' * bitfield_length
aggregation_bits = Bitlist[spec.MAX_INDICES_PER_ATTESTATION](*([0] * committee_size))
custody_bits = Bitlist[spec.MAX_INDICES_PER_ATTESTATION](*([0] * committee_size))
attestation = spec.Attestation(
aggregation_bitfield=aggregation_bitfield,
aggregation_bits=aggregation_bits,
data=attestation_data,
custody_bitfield=custody_bitfield,
custody_bits=custody_bits,
)
fill_aggregate_attestation(spec, state, attestation)
if signed:
@ -106,7 +105,7 @@ def sign_attestation(spec, state, attestation):
participants = spec.get_attesting_indices(
state,
attestation.data,
attestation.aggregation_bitfield,
attestation.aggregation_bits,
)
attestation.signature = sign_aggregate_attestation(spec, state, attestation.data, participants)
@ -136,7 +135,7 @@ def fill_aggregate_attestation(spec, state, attestation):
attestation.data.crosslink.shard,
)
for i in range(len(crosslink_committee)):
attestation.aggregation_bitfield = set_bitfield_bit(attestation.aggregation_bitfield, i)
attestation.aggregation_bits[i] = True
def add_attestation_to_state(spec, state, attestation, slot):

View File

@ -1,11 +0,0 @@
def set_bitfield_bit(bitfield, i):
"""
Set the bit in ``bitfield`` at position ``i`` to ``1``.
"""
byte_index = i // 8
bit_index = i % 8
return (
bitfield[:byte_index] +
bytes([bitfield[byte_index] | (1 << bit_index)]) +
bitfield[byte_index + 1:]
)

View File

@ -1,5 +1,3 @@
from copy import deepcopy
from eth2spec.test.context import spec_state_test, expect_assertion_error, always_bls, with_all_phases, with_phases
from eth2spec.test.helpers.attestations import (
get_valid_attestation,
@ -10,6 +8,7 @@ from eth2spec.test.helpers.state import (
next_slot,
)
from eth2spec.test.helpers.block import apply_empty_block
from eth2spec.utils.ssz.ssz_typing import Bitlist
def run_attestation_processing(spec, state, attestation, valid=True):
@ -274,11 +273,14 @@ def test_bad_crosslink_end_epoch(spec, state):
@with_all_phases
@spec_state_test
def test_inconsistent_bitfields(spec, state):
def test_inconsistent_bits(spec, state):
attestation = get_valid_attestation(spec, state)
state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY
attestation.custody_bitfield = deepcopy(attestation.aggregation_bitfield) + b'\x00'
custody_bits = attestation.aggregation_bits[:]
custody_bits.append(False)
attestation.custody_bits = custody_bits
sign_attestation(spec, state, attestation)
@ -287,11 +289,11 @@ def test_inconsistent_bitfields(spec, state):
@with_phases(['phase0'])
@spec_state_test
def test_non_empty_custody_bitfield(spec, state):
def test_non_empty_custody_bits(spec, state):
attestation = get_valid_attestation(spec, state)
state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY
attestation.custody_bitfield = deepcopy(attestation.aggregation_bitfield)
attestation.custody_bits = attestation.aggregation_bits[:]
sign_attestation(spec, state, attestation)
@ -300,11 +302,12 @@ def test_non_empty_custody_bitfield(spec, state):
@with_all_phases
@spec_state_test
def test_empty_aggregation_bitfield(spec, state):
def test_empty_aggregation_bits(spec, state):
attestation = get_valid_attestation(spec, state)
state.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY
attestation.aggregation_bitfield = b'\x00' * len(attestation.aggregation_bitfield)
attestation.aggregation_bits = Bitlist[spec.MAX_INDICES_PER_ATTESTATION](
*([0b0] * len(attestation.aggregation_bits)))
sign_attestation(spec, state, attestation)

View File

@ -1,7 +1,8 @@
from ..merkle_minimal import merkleize_chunks
from ..hash_function import hash
from .ssz_typing import (
SSZValue, SSZType, BasicValue, BasicType, Series, Elements, Bool, Container, List, Bytes, uint,
SSZValue, SSZType, BasicValue, BasicType, Series, Elements, Bits, boolean, Container, List, Bytes,
Bitlist, Bitvector, uint,
)
# SSZ Serialization
@ -13,7 +14,7 @@ BYTES_PER_LENGTH_OFFSET = 4
def serialize_basic(value: SSZValue):
if isinstance(value, uint):
return value.to_bytes(value.type().byte_len, 'little')
elif isinstance(value, Bool):
elif isinstance(value, boolean):
if value:
return b'\x01'
else:
@ -25,7 +26,7 @@ def serialize_basic(value: SSZValue):
def deserialize_basic(value, typ: BasicType):
if issubclass(typ, uint):
return typ(int.from_bytes(value, 'little'))
elif issubclass(typ, Bool):
elif issubclass(typ, boolean):
assert value in (b'\x00', b'\x01')
return typ(value == b'\x01')
else:
@ -39,6 +40,12 @@ def is_empty(obj: SSZValue):
def serialize(obj: SSZValue):
if isinstance(obj, BasicValue):
return serialize_basic(obj)
elif isinstance(obj, Bitvector):
as_integer = sum([obj[i] << i for i in range(len(obj))])
return as_integer.to_bytes((len(obj) + 7) // 8, "little")
elif isinstance(obj, Bitlist):
as_integer = (1 << len(obj)) + sum([obj[i] << i for i in range(len(obj))])
return as_integer.to_bytes((as_integer.bit_length() + 7) // 8, "little")
elif isinstance(obj, Series):
return encode_series(obj)
else:
@ -85,6 +92,12 @@ def encode_series(values: Series):
def pack(values: Series):
if isinstance(values, bytes): # Bytes and BytesN are already packed
return values
elif isinstance(values, Bitvector):
as_integer = sum([values[i] << i for i in range(len(values))])
return as_integer.to_bytes((values.length + 7) // 8, "little")
elif isinstance(values, Bitlist):
as_integer = sum([values[i] << i for i in range(len(values))])
return as_integer.to_bytes((values.length + 7) // 8, "little")
return b''.join([serialize_basic(value) for value in values])
@ -115,6 +128,8 @@ def item_length(typ: SSZType) -> int:
def chunk_count(typ: SSZType) -> int:
if isinstance(typ, BasicType):
return 1
elif issubclass(typ, Bits):
return (typ.length + 255) // 256
elif issubclass(typ, Elements):
return (typ.length * item_length(typ.elem_type) + 31) // 32
elif issubclass(typ, Container):
@ -134,7 +149,7 @@ def hash_tree_root(obj: SSZValue):
else:
raise Exception(f"Type not supported: {type(obj)}")
if isinstance(obj, (List, Bytes)):
if isinstance(obj, (List, Bytes, Bitlist)):
return mix_in_length(merkleize_chunks(leaves, pad_to=chunk_count(obj.type())), len(obj))
else:
return merkleize_chunks(leaves)

View File

@ -31,7 +31,7 @@ class BasicValue(int, SSZValue, metaclass=BasicType):
pass
class Bool(BasicValue): # can't subclass bool.
class boolean(BasicValue): # can't subclass bool.
byte_len = 1
def __new__(cls, value: int): # int value, but can be any subclass of int (bool, Bit, Bool, etc...)
@ -48,7 +48,7 @@ class Bool(BasicValue): # can't subclass bool.
# Alias for Bool
class Bit(Bool):
class bit(boolean):
pass
@ -233,7 +233,7 @@ class ParamsMeta(SSZType):
return f"{self.__name__}~{self.__class__.__name__}"
def __repr__(self):
return self, self.__class__
return f"{self.__name__}~{self.__class__.__name__}"
def attr_from_params(self, p):
# single key params are valid too. Wrap them in a tuple.
@ -310,6 +310,10 @@ class BaseList(list, Elements):
cls = self.__class__
return f"{cls.__name__}[{cls.elem_type.__name__}, {cls.length}]({', '.join(str(v) for v in self)})"
def __repr__(self):
cls = self.__class__
return f"{cls.__name__}[{cls.elem_type.__name__}, {cls.length}]({', '.join(str(v) for v in self)})"
def __getitem__(self, k) -> SSZValue:
if isinstance(k, int): # check if we are just doing a lookup, and not slicing
if k < 0:
@ -320,12 +324,18 @@ class BaseList(list, Elements):
return super().__getitem__(k)
def __setitem__(self, k, v):
if k < 0:
raise IndexError(f"cannot set item in type {self.__class__} at negative index {k} (to {v})")
if k > len(self):
raise IndexError(f"cannot set item in type {self.__class__}"
f" at out of bounds index {k} (to {v}, bound: {len(self)})")
super().__setitem__(k, coerce_type_maybe(v, self.__class__.elem_type, strict=True))
if type(k) == slice:
if (k.start is not None and k.start < 0) or (k.stop is not None and k.stop > len(self)):
raise IndexError(f"cannot set item in type {self.__class__}"
f" at out of bounds slice {k} (to {v}, bound: {len(self)})")
super().__setitem__(k, [coerce_type_maybe(x, self.__class__.elem_type) for x in v])
else:
if k < 0:
raise IndexError(f"cannot set item in type {self.__class__} at negative index {k} (to {v})")
if k > len(self):
raise IndexError(f"cannot set item in type {self.__class__}"
f" at out of bounds index {k} (to {v}, bound: {len(self)})")
super().__setitem__(k, coerce_type_maybe(v, self.__class__.elem_type, strict=True))
def append(self, v):
super().append(coerce_type_maybe(v, self.__class__.elem_type, strict=True))
@ -338,6 +348,48 @@ class BaseList(list, Elements):
return self[len(self) - 1]
class BitElementsType(ElementsType):
elem_type: SSZType = boolean
length: int
class Bits(BaseList, metaclass=BitElementsType):
pass
class Bitlist(Bits):
@classmethod
def is_fixed_size(cls):
return False
@classmethod
def default(cls):
return cls()
class Bitvector(Bits):
@classmethod
def extract_args(cls, *args):
if len(args) == 0:
return cls.default()
else:
return super().extract_args(*args)
@classmethod
def value_check(cls, value):
# check length limit strictly
return len(value) == cls.length and super().value_check(value)
@classmethod
def is_fixed_size(cls):
return True
@classmethod
def default(cls):
return cls(0 for _ in range(cls.length))
class List(BaseList):
@classmethod

View File

@ -1,7 +1,8 @@
from typing import Iterable
from .ssz_impl import serialize, hash_tree_root
from .ssz_typing import (
Bit, Bool, Container, List, Vector, Bytes, BytesN,
bit, boolean, Container, List, Vector, Bytes, BytesN,
Bitlist, Bitvector,
uint8, uint16, uint32, uint64, uint256, byte
)
from ..hash_function import hash as bytes_hash
@ -74,10 +75,32 @@ def merge(a: str, branch: Iterable[str]) -> str:
test_data = [
("bit F", Bit(False), "00", chunk("00")),
("bit T", Bit(True), "01", chunk("01")),
("bool F", Bool(False), "00", chunk("00")),
("bool T", Bool(True), "01", chunk("01")),
("bit F", bit(False), "00", chunk("00")),
("bit T", bit(True), "01", chunk("01")),
("boolean F", boolean(False), "00", chunk("00")),
("boolean T", boolean(True), "01", chunk("01")),
("bitvector TTFTFTFF", Bitvector[8](1, 1, 0, 1, 0, 1, 0, 0), "2b", chunk("2b")),
("bitlist TTFTFTFF", Bitlist[8](1, 1, 0, 1, 0, 1, 0, 0), "2b01", h(chunk("2b"), chunk("08"))),
("bitvector FTFT", Bitvector[4](0, 1, 0, 1), "0a", chunk("0a")),
("bitlist FTFT", Bitlist[4](0, 1, 0, 1), "1a", h(chunk("0a"), chunk("04"))),
("bitvector FTF", Bitvector[3](0, 1, 0), "02", chunk("02")),
("bitlist FTF", Bitlist[3](0, 1, 0), "0a", h(chunk("02"), chunk("03"))),
("bitvector TFTFFFTTFT", Bitvector[10](1, 0, 1, 0, 0, 0, 1, 1, 0, 1), "c502", chunk("c502")),
("bitlist TFTFFFTTFT", Bitlist[16](1, 0, 1, 0, 0, 0, 1, 1, 0, 1), "c506", h(chunk("c502"), chunk("0A"))),
("bitvector TFTFFFTTFTFFFFTT", Bitvector[16](1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1),
"c5c2", chunk("c5c2")),
("bitlist TFTFFFTTFTFFFFTT", Bitlist[16](1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1),
"c5c201", h(chunk("c5c2"), chunk("10"))),
("long bitvector", Bitvector[512](1 for i in range(512)),
"ff" * 64, h("ff" * 32, "ff" * 32)),
("long bitlist", Bitlist[512](1),
"03", h(h(chunk("01"), chunk("")), chunk("01"))),
("long bitlist", Bitlist[512](1 for i in range(512)),
"ff" * 64 + "01", h(h("ff" * 32, "ff" * 32), chunk("0002"))),
("odd bitvector", Bitvector[513](1 for i in range(513)),
"ff" * 64 + "01", h(h("ff" * 32, "ff" * 32), h(chunk("01"), chunk("")))),
("odd bitlist", Bitlist[513](1 for i in range(513)),
"ff" * 64 + "03", h(h(h("ff" * 32, "ff" * 32), h(chunk("01"), chunk(""))), chunk("0102"))),
("uint8 00", uint8(0x00), "00", chunk("00")),
("uint8 01", uint8(0x01), "01", chunk("01")),
("uint8 ab", uint8(0xab), "ab", chunk("ab")),

View File

@ -1,6 +1,6 @@
from .ssz_typing import (
SSZValue, SSZType, BasicValue, BasicType, Series, ElementsType,
Elements, Bit, Bool, Container, List, Vector, Bytes, BytesN,
Elements, bit, boolean, Container, List, Vector, Bytes, BytesN,
byte, uint, uint8, uint16, uint32, uint64, uint128, uint256,
Bytes32, Bytes48
)
@ -22,8 +22,8 @@ def test_subclasses():
assert issubclass(u, SSZValue)
assert isinstance(u, SSZType)
assert isinstance(u, BasicType)
assert issubclass(Bool, BasicValue)
assert isinstance(Bool, BasicType)
assert issubclass(boolean, BasicValue)
assert isinstance(boolean, BasicType)
for c in [Container, List, Vector, Bytes, BytesN]:
assert issubclass(c, Series)
@ -45,16 +45,16 @@ def test_basic_instances():
assert isinstance(v, BasicValue)
assert isinstance(v, SSZValue)
assert isinstance(Bool(True), BasicValue)
assert isinstance(Bool(False), BasicValue)
assert isinstance(Bit(True), Bool)
assert isinstance(Bit(False), Bool)
assert isinstance(boolean(True), BasicValue)
assert isinstance(boolean(False), BasicValue)
assert isinstance(bit(True), boolean)
assert isinstance(bit(False), boolean)
def test_basic_value_bounds():
max = {
Bool: 2 ** 1,
Bit: 2 ** 1,
boolean: 2 ** 1,
bit: 2 ** 1,
uint8: 2 ** (8 * 1),
byte: 2 ** (8 * 1),
uint16: 2 ** (8 * 2),