commit
866d3f30cc
|
@ -49,9 +49,9 @@
|
|||
- [`BeaconState`](#beaconstate)
|
||||
- [Helper functions](#helper-functions)
|
||||
- [Math](#math)
|
||||
- [`int_to_bytes`](#int_to_bytes)
|
||||
- [`integer_squareroot`](#integer_squareroot)
|
||||
- [`xor`](#xor)
|
||||
- [`int_to_bytes`](#int_to_bytes)
|
||||
- [`bytes_to_int`](#bytes_to_int)
|
||||
- [Crypto](#crypto)
|
||||
- [`hash`](#hash)
|
||||
|
@ -390,7 +390,7 @@ class DepositData(Container):
|
|||
|
||||
```python
|
||||
class CompactCommittee(Container):
|
||||
pubkeys: List[Bytes48, MAX_VALIDATORS_PER_COMMITTEE]
|
||||
pubkeys: List[BLSPubkey, MAX_VALIDATORS_PER_COMMITTEE]
|
||||
compact_validators: List[uint64, MAX_VALIDATORS_PER_COMMITTEE]
|
||||
```
|
||||
|
||||
|
@ -541,8 +541,6 @@ class BeaconState(Container):
|
|||
|
||||
### Math
|
||||
|
||||
#### `int_to_bytes`
|
||||
|
||||
#### `integer_squareroot`
|
||||
|
||||
```python
|
||||
|
@ -561,13 +559,15 @@ def integer_squareroot(n: uint64) -> uint64:
|
|||
#### `xor`
|
||||
|
||||
```python
|
||||
def xor(bytes1: Bytes32, bytes2: Bytes32) -> Bytes32:
|
||||
def xor(bytes_1: Bytes32, bytes_2: Bytes32) -> Bytes32:
|
||||
"""
|
||||
Return the exclusive-or of two 32-byte strings.
|
||||
"""
|
||||
return Bytes32(a ^ b for a, b in zip(bytes1, bytes2))
|
||||
return Bytes32(a ^ b for a, b in zip(bytes_1, bytes_2))
|
||||
```
|
||||
|
||||
#### `int_to_bytes`
|
||||
|
||||
```python
|
||||
def int_to_bytes(n: uint64, length: uint64) -> bytes:
|
||||
"""
|
||||
|
@ -654,7 +654,7 @@ def is_slashable_attestation_data(data_1: AttestationData, data_2: AttestationDa
|
|||
```python
|
||||
def is_valid_indexed_attestation(state: BeaconState, indexed_attestation: IndexedAttestation) -> bool:
|
||||
"""
|
||||
Verify validity of ``indexed_attestation``.
|
||||
Check if ``indexed_attestation`` has valid indices and signature.
|
||||
"""
|
||||
bit_0_indices = indexed_attestation.custody_bit_0_indices
|
||||
bit_1_indices = indexed_attestation.custody_bit_1_indices
|
||||
|
@ -866,7 +866,7 @@ def get_seed(state: BeaconState, epoch: Epoch) -> Hash:
|
|||
"""
|
||||
Return the seed at ``epoch``.
|
||||
"""
|
||||
mix = get_randao_mix(state, Epoch(epoch + EPOCHS_PER_HISTORICAL_VECTOR - MIN_SEED_LOOKAHEAD)) # Avoid underflow
|
||||
mix = get_randao_mix(state, Epoch(epoch + EPOCHS_PER_HISTORICAL_VECTOR - MIN_SEED_LOOKAHEAD - 1)) # Avoid underflow
|
||||
active_index_root = state.active_index_roots[epoch % EPOCHS_PER_HISTORICAL_VECTOR]
|
||||
return hash(mix + active_index_root + int_to_bytes(epoch, length=32))
|
||||
```
|
||||
|
@ -990,7 +990,7 @@ def get_total_balance(state: BeaconState, indices: Set[ValidatorIndex]) -> Gwei:
|
|||
"""
|
||||
Return the combined effective balance of the ``indices``. (1 Gwei minimum to avoid divisions by zero.)
|
||||
"""
|
||||
return Gwei(max(sum([state.validators[index].effective_balance for index in indices]), 1))
|
||||
return Gwei(max(1, sum([state.validators[index].effective_balance for index in indices])))
|
||||
```
|
||||
|
||||
#### `get_total_active_balance`
|
||||
|
@ -1217,7 +1217,7 @@ def process_slot(state: BeaconState) -> None:
|
|||
previous_state_root = hash_tree_root(state)
|
||||
state.state_roots[state.slot % SLOTS_PER_HISTORICAL_ROOT] = previous_state_root
|
||||
# Cache latest block header state root
|
||||
if state.latest_block_header.state_root == Hash():
|
||||
if state.latest_block_header.state_root == Bytes32():
|
||||
state.latest_block_header.state_root = previous_state_root
|
||||
# Cache block root
|
||||
previous_block_root = signing_root(state.latest_block_header)
|
||||
|
@ -1483,7 +1483,9 @@ def process_slashings(state: BeaconState) -> None:
|
|||
total_balance = get_total_active_balance(state)
|
||||
for index, validator in enumerate(state.validators):
|
||||
if validator.slashed and epoch + EPOCHS_PER_SLASHINGS_VECTOR // 2 == validator.withdrawable_epoch:
|
||||
penalty = validator.effective_balance * min(sum(state.slashings) * 3, total_balance) // total_balance
|
||||
increment = EFFECTIVE_BALANCE_INCREMENT # Factored out from penalty numerator to avoid uint64 overflow
|
||||
penalty_numerator = validator.effective_balance // increment * min(sum(state.slashings) * 3, total_balance)
|
||||
penalty = penalty_numerator // total_balance * increment
|
||||
decrease_balance(state, ValidatorIndex(index), penalty)
|
||||
```
|
||||
|
||||
|
@ -1547,8 +1549,9 @@ def process_block_header(state: BeaconState, block: BeaconBlock) -> None:
|
|||
state.latest_block_header = BeaconBlockHeader(
|
||||
slot=block.slot,
|
||||
parent_root=block.parent_root,
|
||||
state_root=Hash(), # Overwritten in the next `process_slot` call
|
||||
# state_root: zeroed, overwritten in the next `process_slot` call
|
||||
body_root=hash_tree_root(block.body),
|
||||
# signature is always zeroed
|
||||
)
|
||||
# Verify proposer is not slashed
|
||||
proposer = state.validators[get_beacon_proposer_index(state)]
|
||||
|
@ -1671,7 +1674,7 @@ def process_attestation(state: BeaconState, attestation: Attestation) -> None:
|
|||
assert data.crosslink.parent_root == hash_tree_root(parent_crosslink)
|
||||
assert data.crosslink.start_epoch == parent_crosslink.end_epoch
|
||||
assert data.crosslink.end_epoch == min(data.target.epoch, parent_crosslink.end_epoch + MAX_EPOCHS_PER_CROSSLINK)
|
||||
assert data.crosslink.data_root == Hash() # [to be removed in phase 1]
|
||||
assert data.crosslink.data_root == Bytes32() # [to be removed in phase 1]
|
||||
|
||||
# Check signature
|
||||
assert is_valid_indexed_attestation(state, get_indexed_attestation(state, attestation))
|
||||
|
|
|
@ -25,8 +25,6 @@
|
|||
- [Vectors, containers, lists, unions](#vectors-containers-lists-unions)
|
||||
- [Deserialization](#deserialization)
|
||||
- [Merkleization](#merkleization)
|
||||
- [Merkleization of `Bitvector[N]`](#merkleization-of-bitvectorn)
|
||||
- [`Bitlist[N]`](#bitlistn-1)
|
||||
- [Self-signed containers](#self-signed-containers)
|
||||
- [Implementations](#implementations)
|
||||
|
||||
|
@ -50,11 +48,11 @@
|
|||
|
||||
* **container**: ordered heterogeneous collection of values
|
||||
* python dataclass notation with key-type pairs, e.g.
|
||||
```python
|
||||
class ContainerExample(Container):
|
||||
foo: uint64
|
||||
bar: boolean
|
||||
```
|
||||
```python
|
||||
class ContainerExample(Container):
|
||||
foo: uint64
|
||||
bar: boolean
|
||||
```
|
||||
* **vector**: ordered fixed-length homogeneous collection, with `N` values
|
||||
* notation `Vector[type, N]`, e.g. `Vector[uint64, N]`
|
||||
* **list**: ordered variable-length homogeneous collection, limited to `N` values
|
||||
|
@ -101,7 +99,7 @@ We recursively define the `serialize` function which consumes an object `value`
|
|||
|
||||
```python
|
||||
assert N in [8, 16, 32, 64, 128, 256]
|
||||
return value.to_bytes(N // 8, "little")
|
||||
return value.to_bytes(N // BITS_PER_BYTE, "little")
|
||||
```
|
||||
|
||||
### `boolean`
|
||||
|
@ -120,8 +118,10 @@ return b""
|
|||
### `Bitvector[N]`
|
||||
|
||||
```python
|
||||
as_integer = sum([value[i] << i for i in range(len(value))])
|
||||
return as_integer.to_bytes((N + 7) // 8, "little")
|
||||
array = [0] * ((N + 7) // 8)
|
||||
for i in range(N):
|
||||
array[i // 8] |= value[i] << (i % 8)
|
||||
return bytes(array)
|
||||
```
|
||||
|
||||
### `Bitlist[N]`
|
||||
|
@ -129,8 +129,11 @@ return as_integer.to_bytes((N + 7) // 8, "little")
|
|||
Note that from the offset coding, the length (in bytes) of the bitlist is known. An additional leading `1` bit is added so that the length in bits will also be known.
|
||||
|
||||
```python
|
||||
as_integer = (1 << len(value)) + sum([value[i] << i for i in range(len(value))])
|
||||
return as_integer.to_bytes((as_integer.bit_length() + 7) // 8, "little")
|
||||
array = [0] * ((len(value) // 8) + 1)
|
||||
for i in range(len(value)):
|
||||
array[i // 8] |= value[i] << (i % 8)
|
||||
array[len(value) // 8] |= 1 << (len(value) % 8)
|
||||
return bytes(array)
|
||||
```
|
||||
|
||||
### Vectors, containers, lists, unions
|
||||
|
@ -168,7 +171,8 @@ return serialized_type_index + serialized_bytes
|
|||
Because serialization is an injective function (i.e. two distinct objects of the same type will serialize to different values) any bytestring has at most one object it could deserialize to. Efficient algorithms for computing this object can be found in [the implementations](#implementations).
|
||||
|
||||
Note that deserialization requires hardening against invalid inputs. A non-exhaustive list:
|
||||
- Offsets: out of order, out of range, mismatching minimum element size
|
||||
|
||||
- Offsets: out of order, out of range, mismatching minimum element size.
|
||||
- Scope: Extra unused bytes, not aligned with element size.
|
||||
- More elements than a list limit allows. Part of enforcing consensus.
|
||||
|
||||
|
@ -176,37 +180,36 @@ Note that deserialization requires hardening against invalid inputs. A non-exhau
|
|||
|
||||
We first define helper functions:
|
||||
|
||||
* `size_of(B)`, where `B` is a basic type: the length, in bytes, of the serialized form of the basic type.
|
||||
* `chunk_count(type)`: calculate the amount of leafs for merkleization of the type.
|
||||
* all basic types: `1`
|
||||
* `Bitlist[N]` and `Bitvector[N]`: `(N + 255) // 256` (dividing by chunk size, rounding up)
|
||||
* `List[B, N]` and `Vector[B, N]`, where `B` is a basic type: `(N * size_of(B) + 31) // 32` (dividing by chunk size, rounding up)
|
||||
* `List[C, N]` and `Vector[C, N]`, where `C` is a composite type: `N`
|
||||
* containers: `len(fields)`
|
||||
* `bitfield_bytes(bits)`: return the bits of the bitlist or bitvector, packed in bytes, aligned to the start. Exclusive length-delimiting bit for bitlists.
|
||||
* `pack`: Given ordered objects of the same basic type, serialize them, pack them into `BYTES_PER_CHUNK`-byte chunks, right-pad the last chunk with zero bytes, and return the chunks.
|
||||
* `next_pow_of_two(i)`: get the next power of 2 of `i`, if not already a power of 2, with 0 mapping to 1. Examples: `0->1, 1->1, 2->2, 3->4, 4->4, 6->8, 9->16`
|
||||
* `merkleize(data, pad_for)`: Given ordered `BYTES_PER_CHUNK`-byte chunks, if necessary append zero chunks so that the number of chunks is a power of two, Merkleize the chunks, and return the root.
|
||||
The merkleization depends on the effective input, which can be padded: if `pad_for=L`, then pad the `data` with zeroed chunks to `next_pow_of_two(L)` (virtually for memory efficiency).
|
||||
Then, merkleize the chunks (empty input is padded to 1 zero chunk):
|
||||
- If `1` chunk: A single chunk is simply that chunk, i.e. the identity when the number of chunks is one.
|
||||
- If `> 1` chunks: pad to `next_pow_of_two(len(chunks))`, merkleize as binary tree.
|
||||
* `merkleize(chunks, limit=None)`: Given ordered `BYTES_PER_CHUNK`-byte chunks, merkleize the chunks, and return the root:
|
||||
* The merkleization depends on the effective input, which can be padded/limited:
|
||||
- if no limit: pad the `chunks` with zeroed chunks to `next_pow_of_two(len(chunks))` (virtually for memory efficiency).
|
||||
- if `limit > len(chunks)`, pad the `chunks` with zeroed chunks to `next_pow_of_two(limit)` (virtually for memory efficiency).
|
||||
- if `limit < len(chunks)`: do not merkleize, input exceeds limit. Raise an error instead.
|
||||
* Then, merkleize the chunks (empty input is padded to 1 zero chunk):
|
||||
- If `1` chunk: the root is the chunk itself.
|
||||
- If `> 1` chunks: merkleize as binary tree.
|
||||
* `mix_in_length`: Given a Merkle root `root` and a length `length` (`"uint256"` little-endian serialization) return `hash(root + length)`.
|
||||
* `mix_in_type`: Given a Merkle root `root` and a type_index `type_index` (`"uint256"` little-endian serialization) return `hash(root + type_index)`.
|
||||
|
||||
We now define Merkleization `hash_tree_root(value)` of an object `value` recursively:
|
||||
|
||||
* `merkleize(pack(value))` if `value` is a basic object or a vector of basic objects
|
||||
* `mix_in_length(merkleize(pack(value), pad_for=(N * elem_size / BYTES_PER_CHUNK)), len(value))` if `value` is a list of basic objects.
|
||||
* `merkleize([hash_tree_root(element) for element in value])` if `value` is a vector of composite objects or a container
|
||||
* `mix_in_length(merkleize([hash_tree_root(element) for element in value], pad_for=N), len(value))` if `value` is a list of composite objects.
|
||||
* `mix_in_type(merkleize(value.value), value.type_index)` if `value` is of union type
|
||||
|
||||
### Merkleization of `Bitvector[N]`
|
||||
|
||||
```python
|
||||
as_integer = sum([value[i] << i for i in range(len(value))])
|
||||
return merkleize(as_integer.to_bytes((N + 7) // 8, "little"))
|
||||
```
|
||||
|
||||
### `Bitlist[N]`
|
||||
|
||||
```python
|
||||
as_integer = sum([value[i] << i for i in range(len(value))])
|
||||
return mix_in_length(merkleize(as_integer.to_bytes((N + 7) // 8, "little")), len(value))
|
||||
```
|
||||
* `merkleize(pack(value))` if `value` is a basic object or a vector of basic objects.
|
||||
* `merkleize(bitfield_bytes(value), limit=chunk_count(type))` if `value` is a bitvector.
|
||||
* `mix_in_length(merkleize(pack(value), limit=chunk_count(type)), len(value))` if `value` is a list of basic objects.
|
||||
* `mix_in_length(merkleize(bitfield_bytes(value), limit=chunk_count(type)), len(value))` if `value` is a bitlist.
|
||||
* `merkleize([hash_tree_root(element) for element in value])` if `value` is a vector of composite objects or a container.
|
||||
* `mix_in_length(merkleize([hash_tree_root(element) for element in value], limit=chunk_count(type)), len(value))` if `value` is a list of composite objects.
|
||||
* `mix_in_type(merkleize(value.value), value.type_index)` if `value` is of union type.
|
||||
|
||||
## Self-signed containers
|
||||
|
||||
|
|
|
@ -33,7 +33,10 @@ The particular formats of specific types of tests (test suites) are defined in s
|
|||
|
||||
Test formats:
|
||||
- [`bls`](./bls/README.md)
|
||||
- [`epoch_processing`](./epoch_processing/README.md)
|
||||
- [`genesis`](./genesis/README.md)
|
||||
- [`operations`](./operations/README.md)
|
||||
- [`sanity`](./sanity/README.md)
|
||||
- [`shuffling`](./shuffling/README.md)
|
||||
- [`ssz_generic`](./ssz_generic/README.md)
|
||||
- [`ssz_static`](./ssz_static/README.md)
|
||||
|
|
|
@ -38,7 +38,7 @@ The `-j N` flag makes the generators run in parallel, with `N` being the amount
|
|||
The makefile auto-detects generators in the `test_generators` directory and provides a tests-gen target for each generator. See example:
|
||||
|
||||
```bash
|
||||
make ./yaml_tests/shuffling/
|
||||
make ./eth2.0-spec-tests/tests/shuffling/
|
||||
```
|
||||
|
||||
## Developing a generator
|
||||
|
|
|
@ -66,8 +66,13 @@ def test_small_penalty(spec, state):
|
|||
spec.process_slashings(state)
|
||||
yield 'post', state
|
||||
|
||||
assert state.balances[0] == pre_slash_balances[0] - (state.validators[0].effective_balance
|
||||
* 3 * total_penalties // total_balance)
|
||||
expected_penalty = (
|
||||
state.validators[0].effective_balance // spec.EFFECTIVE_BALANCE_INCREMENT
|
||||
* (3 * total_penalties)
|
||||
// total_balance
|
||||
* spec.EFFECTIVE_BALANCE_INCREMENT
|
||||
)
|
||||
assert state.balances[0] == pre_slash_balances[0] - expected_penalty
|
||||
|
||||
|
||||
@with_all_phases
|
||||
|
@ -121,5 +126,10 @@ def test_scaled_penalties(spec, state):
|
|||
|
||||
for i in slashed_indices:
|
||||
v = state.validators[i]
|
||||
penalty = v.effective_balance * total_penalties * 3 // total_balance
|
||||
assert state.balances[i] == pre_slash_balances[i] - penalty
|
||||
expected_penalty = (
|
||||
v.effective_balance // spec.EFFECTIVE_BALANCE_INCREMENT
|
||||
* (3 * total_penalties)
|
||||
// (total_balance)
|
||||
* spec.EFFECTIVE_BALANCE_INCREMENT
|
||||
)
|
||||
assert state.balances[i] == pre_slash_balances[i] - expected_penalty
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from .hash_function import hash
|
||||
from eth2spec.utils.hash_function import hash
|
||||
from math import log2
|
||||
|
||||
|
||||
|
@ -21,6 +21,8 @@ def calc_merkle_tree_from_leaves(values, layer_count=32):
|
|||
|
||||
|
||||
def get_merkle_root(values, pad_to=1):
|
||||
if pad_to == 0:
|
||||
return zerohashes[0]
|
||||
layer_count = int(log2(pad_to))
|
||||
if len(values) == 0:
|
||||
return zerohashes[layer_count]
|
||||
|
@ -35,10 +37,21 @@ def get_merkle_proof(tree, item_index):
|
|||
return proof
|
||||
|
||||
|
||||
def merkleize_chunks(chunks, pad_to: int=1):
|
||||
def merkleize_chunks(chunks, limit=None):
|
||||
# If no limit is defined, we are just merkleizing chunks (e.g. SSZ container).
|
||||
if limit is None:
|
||||
limit = len(chunks)
|
||||
|
||||
count = len(chunks)
|
||||
# See if the input is within expected size.
|
||||
# If not, a list-limit is set incorrectly, or a value is unexpectedly large.
|
||||
assert count <= limit
|
||||
|
||||
if limit == 0:
|
||||
return zerohashes[0]
|
||||
|
||||
depth = max(count - 1, 0).bit_length()
|
||||
max_depth = max(depth, (pad_to - 1).bit_length())
|
||||
max_depth = (limit - 1).bit_length()
|
||||
tmp = [None for _ in range(max_depth + 1)]
|
||||
|
||||
def merge(h, i):
|
||||
|
|
|
@ -41,11 +41,14 @@ def serialize(obj: SSZValue):
|
|||
if isinstance(obj, BasicValue):
|
||||
return serialize_basic(obj)
|
||||
elif isinstance(obj, Bitvector):
|
||||
as_integer = sum([obj[i] << i for i in range(len(obj))])
|
||||
return as_integer.to_bytes((len(obj) + 7) // 8, "little")
|
||||
return obj.as_bytes()
|
||||
elif isinstance(obj, Bitlist):
|
||||
as_integer = (1 << len(obj)) + sum([obj[i] << i for i in range(len(obj))])
|
||||
return as_integer.to_bytes((as_integer.bit_length() + 7) // 8, "little")
|
||||
as_bytearray = list(obj.as_bytes())
|
||||
if len(obj) % 8 == 0:
|
||||
as_bytearray.append(1)
|
||||
else:
|
||||
as_bytearray[len(obj) // 8] |= 1 << (len(obj) % 8)
|
||||
return bytes(as_bytearray)
|
||||
elif isinstance(obj, Series):
|
||||
return encode_series(obj)
|
||||
else:
|
||||
|
@ -92,12 +95,10 @@ def encode_series(values: Series):
|
|||
def pack(values: Series):
|
||||
if isinstance(values, bytes): # Bytes and BytesN are already packed
|
||||
return values
|
||||
elif isinstance(values, Bitvector):
|
||||
as_integer = sum([values[i] << i for i in range(len(values))])
|
||||
return as_integer.to_bytes((values.length + 7) // 8, "little")
|
||||
elif isinstance(values, Bitlist):
|
||||
as_integer = sum([values[i] << i for i in range(len(values))])
|
||||
return as_integer.to_bytes((values.length + 7) // 8, "little")
|
||||
elif isinstance(values, Bits):
|
||||
# packs the bits in bytes, left-aligned.
|
||||
# Exclusive length delimiting bits for bitlists.
|
||||
return values.as_bytes()
|
||||
return b''.join([serialize_basic(value) for value in values])
|
||||
|
||||
|
||||
|
@ -126,6 +127,7 @@ def item_length(typ: SSZType) -> int:
|
|||
|
||||
|
||||
def chunk_count(typ: SSZType) -> int:
|
||||
# note that for lists, .length *on the type* describes the list limit.
|
||||
if isinstance(typ, BasicType):
|
||||
return 1
|
||||
elif issubclass(typ, Bits):
|
||||
|
@ -150,7 +152,7 @@ def hash_tree_root(obj: SSZValue):
|
|||
raise Exception(f"Type not supported: {type(obj)}")
|
||||
|
||||
if isinstance(obj, (List, Bytes, Bitlist)):
|
||||
return mix_in_length(merkleize_chunks(leaves, pad_to=chunk_count(obj.type())), len(obj))
|
||||
return mix_in_length(merkleize_chunks(leaves, limit=chunk_count(obj.type())), len(obj))
|
||||
else:
|
||||
return merkleize_chunks(leaves)
|
||||
|
||||
|
|
|
@ -354,7 +354,12 @@ class BitElementsType(ElementsType):
|
|||
|
||||
|
||||
class Bits(BaseList, metaclass=BitElementsType):
|
||||
pass
|
||||
|
||||
def as_bytes(self):
|
||||
as_bytearray = [0] * ((len(self) + 7) // 8)
|
||||
for i in range(len(self)):
|
||||
as_bytearray[i // 8] |= int(self[i]) << (i % 8)
|
||||
return bytes(as_bytearray)
|
||||
|
||||
|
||||
class Bitlist(Bits):
|
||||
|
|
|
@ -8,7 +8,8 @@ def h(a: bytes, b: bytes) -> bytes:
|
|||
|
||||
|
||||
def e(v: int) -> bytes:
|
||||
return v.to_bytes(length=32, byteorder='little')
|
||||
# prefix with 0xfff... to make it non-zero
|
||||
return b'\xff' * 28 + v.to_bytes(length=4, byteorder='little')
|
||||
|
||||
|
||||
def z(i: int) -> bytes:
|
||||
|
@ -16,44 +17,64 @@ def z(i: int) -> bytes:
|
|||
|
||||
|
||||
cases = [
|
||||
(0, 0, 1, z(0)),
|
||||
(0, 1, 1, e(0)),
|
||||
(1, 0, 2, h(z(0), z(0))),
|
||||
(1, 1, 2, h(e(0), z(0))),
|
||||
(1, 2, 2, h(e(0), e(1))),
|
||||
(2, 0, 4, h(h(z(0), z(0)), z(1))),
|
||||
(2, 1, 4, h(h(e(0), z(0)), z(1))),
|
||||
(2, 2, 4, h(h(e(0), e(1)), z(1))),
|
||||
(2, 3, 4, h(h(e(0), e(1)), h(e(2), z(0)))),
|
||||
(2, 4, 4, h(h(e(0), e(1)), h(e(2), e(3)))),
|
||||
(3, 0, 8, h(h(h(z(0), z(0)), z(1)), z(2))),
|
||||
(3, 1, 8, h(h(h(e(0), z(0)), z(1)), z(2))),
|
||||
(3, 2, 8, h(h(h(e(0), e(1)), z(1)), z(2))),
|
||||
(3, 3, 8, h(h(h(e(0), e(1)), h(e(2), z(0))), z(2))),
|
||||
(3, 4, 8, h(h(h(e(0), e(1)), h(e(2), e(3))), z(2))),
|
||||
(3, 5, 8, h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), z(0)), z(1)))),
|
||||
(3, 6, 8, h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(z(0), z(0))))),
|
||||
(3, 7, 8, h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(e(6), z(0))))),
|
||||
(3, 8, 8, h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(e(6), e(7))))),
|
||||
(4, 0, 16, h(h(h(h(z(0), z(0)), z(1)), z(2)), z(3))),
|
||||
(4, 1, 16, h(h(h(h(e(0), z(0)), z(1)), z(2)), z(3))),
|
||||
(4, 2, 16, h(h(h(h(e(0), e(1)), z(1)), z(2)), z(3))),
|
||||
(4, 3, 16, h(h(h(h(e(0), e(1)), h(e(2), z(0))), z(2)), z(3))),
|
||||
(4, 4, 16, h(h(h(h(e(0), e(1)), h(e(2), e(3))), z(2)), z(3))),
|
||||
(4, 5, 16, h(h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), z(0)), z(1))), z(3))),
|
||||
(4, 6, 16, h(h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(z(0), z(0)))), z(3))),
|
||||
(4, 7, 16, h(h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(e(6), z(0)))), z(3))),
|
||||
(4, 8, 16, h(h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(e(6), e(7)))), z(3))),
|
||||
(4, 9, 16,
|
||||
h(h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(e(6), e(7)))), h(h(h(e(8), z(0)), z(1)), z(2)))),
|
||||
# limit 0: always zero hash
|
||||
(0, 0, z(0)),
|
||||
(1, 0, None), # cut-off due to limit
|
||||
(2, 0, None), # cut-off due to limit
|
||||
# limit 1: padded to 1 element if not already. Returned (like identity func)
|
||||
(0, 1, z(0)),
|
||||
(1, 1, e(0)),
|
||||
(2, 1, None), # cut-off due to limit
|
||||
(1, 1, e(0)),
|
||||
(0, 2, h(z(0), z(0))),
|
||||
(1, 2, h(e(0), z(0))),
|
||||
(2, 2, h(e(0), e(1))),
|
||||
(3, 2, None), # cut-off due to limit
|
||||
(16, 2, None), # bigger cut-off due to limit
|
||||
(0, 4, h(h(z(0), z(0)), z(1))),
|
||||
(1, 4, h(h(e(0), z(0)), z(1))),
|
||||
(2, 4, h(h(e(0), e(1)), z(1))),
|
||||
(3, 4, h(h(e(0), e(1)), h(e(2), z(0)))),
|
||||
(4, 4, h(h(e(0), e(1)), h(e(2), e(3)))),
|
||||
(5, 4, None), # cut-off due to limit
|
||||
(0, 8, h(h(h(z(0), z(0)), z(1)), z(2))),
|
||||
(1, 8, h(h(h(e(0), z(0)), z(1)), z(2))),
|
||||
(2, 8, h(h(h(e(0), e(1)), z(1)), z(2))),
|
||||
(3, 8, h(h(h(e(0), e(1)), h(e(2), z(0))), z(2))),
|
||||
(4, 8, h(h(h(e(0), e(1)), h(e(2), e(3))), z(2))),
|
||||
(5, 8, h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), z(0)), z(1)))),
|
||||
(6, 8, h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(z(0), z(0))))),
|
||||
(7, 8, h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(e(6), z(0))))),
|
||||
(8, 8, h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(e(6), e(7))))),
|
||||
(9, 8, None), # cut-off due to limit
|
||||
(0, 16, h(h(h(h(z(0), z(0)), z(1)), z(2)), z(3))),
|
||||
(1, 16, h(h(h(h(e(0), z(0)), z(1)), z(2)), z(3))),
|
||||
(2, 16, h(h(h(h(e(0), e(1)), z(1)), z(2)), z(3))),
|
||||
(3, 16, h(h(h(h(e(0), e(1)), h(e(2), z(0))), z(2)), z(3))),
|
||||
(4, 16, h(h(h(h(e(0), e(1)), h(e(2), e(3))), z(2)), z(3))),
|
||||
(5, 16, h(h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), z(0)), z(1))), z(3))),
|
||||
(6, 16, h(h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(z(0), z(0)))), z(3))),
|
||||
(7, 16, h(h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(e(6), z(0)))), z(3))),
|
||||
(8, 16, h(h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(e(6), e(7)))), z(3))),
|
||||
(9, 16, h(h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(e(6), e(7)))), h(h(h(e(8), z(0)), z(1)), z(2)))),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'depth,count,pow2,value',
|
||||
'count,limit,value',
|
||||
cases,
|
||||
)
|
||||
def test_merkleize_chunks_and_get_merkle_root(depth, count, pow2, value):
|
||||
def test_merkleize_chunks_and_get_merkle_root(count, limit, value):
|
||||
chunks = [e(i) for i in range(count)]
|
||||
assert merkleize_chunks(chunks, pad_to=pow2) == value
|
||||
assert get_merkle_root(chunks, pad_to=pow2) == value
|
||||
if value is None:
|
||||
bad = False
|
||||
try:
|
||||
merkleize_chunks(chunks, limit=limit)
|
||||
bad = True
|
||||
except AssertionError:
|
||||
pass
|
||||
if bad:
|
||||
assert False, "expected merkleization to be invalid"
|
||||
else:
|
||||
assert merkleize_chunks(chunks, limit=limit) == value
|
||||
assert get_merkle_root(chunks, pad_to=limit) == value
|
||||
|
|
Loading…
Reference in New Issue