mirror of
https://github.com/status-im/eth2.0-specs.git
synced 2025-02-21 06:48:12 +00:00
fix merkleization with 0-limit case, and enforce padding limit
This commit is contained in:
parent
6dc306700b
commit
b2c8570606
@ -1,4 +1,4 @@
|
||||
from .hash_function import hash
|
||||
from eth2spec.utils.hash_function import hash
|
||||
from math import log2
|
||||
|
||||
|
||||
@ -21,6 +21,8 @@ def calc_merkle_tree_from_leaves(values, layer_count=32):
|
||||
|
||||
|
||||
def get_merkle_root(values, pad_to=1):
|
||||
if pad_to == 0:
|
||||
return zerohashes[0]
|
||||
layer_count = int(log2(pad_to))
|
||||
if len(values) == 0:
|
||||
return zerohashes[layer_count]
|
||||
@ -36,9 +38,11 @@ def get_merkle_proof(tree, item_index):
|
||||
|
||||
|
||||
def merkleize_chunks(chunks, pad_to: int=1):
|
||||
count = len(chunks)
|
||||
if pad_to == 0:
|
||||
return zerohashes[0]
|
||||
count = min(len(chunks), pad_to)
|
||||
depth = max(count - 1, 0).bit_length()
|
||||
max_depth = max(depth, (pad_to - 1).bit_length())
|
||||
max_depth = (pad_to - 1).bit_length()
|
||||
tmp = [None for _ in range(max_depth + 1)]
|
||||
|
||||
def merge(h, i):
|
||||
|
@ -8,7 +8,8 @@ def h(a: bytes, b: bytes) -> bytes:
|
||||
|
||||
|
||||
def e(v: int) -> bytes:
|
||||
return v.to_bytes(length=32, byteorder='little')
|
||||
# prefix with 0xfff... to make it non-zero
|
||||
return b'\xff' * 28 + v.to_bytes(length=4, byteorder='little')
|
||||
|
||||
|
||||
def z(i: int) -> bytes:
|
||||
@ -16,44 +17,54 @@ def z(i: int) -> bytes:
|
||||
|
||||
|
||||
cases = [
|
||||
(0, 0, 1, z(0)),
|
||||
(0, 1, 1, e(0)),
|
||||
(1, 0, 2, h(z(0), z(0))),
|
||||
(1, 1, 2, h(e(0), z(0))),
|
||||
(1, 2, 2, h(e(0), e(1))),
|
||||
(2, 0, 4, h(h(z(0), z(0)), z(1))),
|
||||
(2, 1, 4, h(h(e(0), z(0)), z(1))),
|
||||
(2, 2, 4, h(h(e(0), e(1)), z(1))),
|
||||
(2, 3, 4, h(h(e(0), e(1)), h(e(2), z(0)))),
|
||||
(2, 4, 4, h(h(e(0), e(1)), h(e(2), e(3)))),
|
||||
(3, 0, 8, h(h(h(z(0), z(0)), z(1)), z(2))),
|
||||
(3, 1, 8, h(h(h(e(0), z(0)), z(1)), z(2))),
|
||||
(3, 2, 8, h(h(h(e(0), e(1)), z(1)), z(2))),
|
||||
(3, 3, 8, h(h(h(e(0), e(1)), h(e(2), z(0))), z(2))),
|
||||
(3, 4, 8, h(h(h(e(0), e(1)), h(e(2), e(3))), z(2))),
|
||||
(3, 5, 8, h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), z(0)), z(1)))),
|
||||
(3, 6, 8, h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(z(0), z(0))))),
|
||||
(3, 7, 8, h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(e(6), z(0))))),
|
||||
(3, 8, 8, h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(e(6), e(7))))),
|
||||
(4, 0, 16, h(h(h(h(z(0), z(0)), z(1)), z(2)), z(3))),
|
||||
(4, 1, 16, h(h(h(h(e(0), z(0)), z(1)), z(2)), z(3))),
|
||||
(4, 2, 16, h(h(h(h(e(0), e(1)), z(1)), z(2)), z(3))),
|
||||
(4, 3, 16, h(h(h(h(e(0), e(1)), h(e(2), z(0))), z(2)), z(3))),
|
||||
(4, 4, 16, h(h(h(h(e(0), e(1)), h(e(2), e(3))), z(2)), z(3))),
|
||||
(4, 5, 16, h(h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), z(0)), z(1))), z(3))),
|
||||
(4, 6, 16, h(h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(z(0), z(0)))), z(3))),
|
||||
(4, 7, 16, h(h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(e(6), z(0)))), z(3))),
|
||||
(4, 8, 16, h(h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(e(6), e(7)))), z(3))),
|
||||
(4, 9, 16,
|
||||
h(h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(e(6), e(7)))), h(h(h(e(8), z(0)), z(1)), z(2)))),
|
||||
# limit 0: always zero hash
|
||||
(0, 0, z(0)),
|
||||
(1, 0, z(0)), # cut-off due to limit
|
||||
(2, 0, z(0)), # cut-off due to limit
|
||||
# limit 1: padded to 1 element if not already. Returned (like identity func)
|
||||
(0, 1, z(0)),
|
||||
(1, 1, e(0)),
|
||||
(2, 1, e(0)), # cut-off due to limit
|
||||
(1, 1, e(0)),
|
||||
(0, 2, h(z(0), z(0))),
|
||||
(1, 2, h(e(0), z(0))),
|
||||
(2, 2, h(e(0), e(1))),
|
||||
(3, 2, h(e(0), e(1))), # cut-off due to limit
|
||||
(16, 2, h(e(0), e(1))), # bigger cut-off due to limit
|
||||
(0, 4, h(h(z(0), z(0)), z(1))),
|
||||
(1, 4, h(h(e(0), z(0)), z(1))),
|
||||
(2, 4, h(h(e(0), e(1)), z(1))),
|
||||
(3, 4, h(h(e(0), e(1)), h(e(2), z(0)))),
|
||||
(4, 4, h(h(e(0), e(1)), h(e(2), e(3)))),
|
||||
(5, 4, h(h(e(0), e(1)), h(e(2), e(3)))), # cut-off due to limit
|
||||
(0, 8, h(h(h(z(0), z(0)), z(1)), z(2))),
|
||||
(1, 8, h(h(h(e(0), z(0)), z(1)), z(2))),
|
||||
(2, 8, h(h(h(e(0), e(1)), z(1)), z(2))),
|
||||
(3, 8, h(h(h(e(0), e(1)), h(e(2), z(0))), z(2))),
|
||||
(4, 8, h(h(h(e(0), e(1)), h(e(2), e(3))), z(2))),
|
||||
(5, 8, h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), z(0)), z(1)))),
|
||||
(6, 8, h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(z(0), z(0))))),
|
||||
(7, 8, h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(e(6), z(0))))),
|
||||
(8, 8, h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(e(6), e(7))))),
|
||||
(9, 8, h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(e(6), e(7))))), # cut-off due to limit
|
||||
(0, 16, h(h(h(h(z(0), z(0)), z(1)), z(2)), z(3))),
|
||||
(1, 16, h(h(h(h(e(0), z(0)), z(1)), z(2)), z(3))),
|
||||
(2, 16, h(h(h(h(e(0), e(1)), z(1)), z(2)), z(3))),
|
||||
(3, 16, h(h(h(h(e(0), e(1)), h(e(2), z(0))), z(2)), z(3))),
|
||||
(4, 16, h(h(h(h(e(0), e(1)), h(e(2), e(3))), z(2)), z(3))),
|
||||
(5, 16, h(h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), z(0)), z(1))), z(3))),
|
||||
(6, 16, h(h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(z(0), z(0)))), z(3))),
|
||||
(7, 16, h(h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(e(6), z(0)))), z(3))),
|
||||
(8, 16, h(h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(e(6), e(7)))), z(3))),
|
||||
(9, 16, h(h(h(h(e(0), e(1)), h(e(2), e(3))), h(h(e(4), e(5)), h(e(6), e(7)))), h(h(h(e(8), z(0)), z(1)), z(2)))),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'depth,count,pow2,value',
|
||||
'count,pad_to,value',
|
||||
cases,
|
||||
)
|
||||
def test_merkleize_chunks_and_get_merkle_root(depth, count, pow2, value):
|
||||
def test_merkleize_chunks_and_get_merkle_root(count, pad_to, value):
|
||||
chunks = [e(i) for i in range(count)]
|
||||
assert merkleize_chunks(chunks, pad_to=pow2) == value
|
||||
assert get_merkle_root(chunks, pad_to=pow2) == value
|
||||
assert merkleize_chunks(chunks, pad_to=pad_to) == value
|
||||
assert get_merkle_root(chunks, pad_to=pad_to) == value
|
||||
|
Loading…
x
Reference in New Issue
Block a user