eth2.0-specs/test_libs/pyspec/eth2spec/utils/ssz/ssz_impl.py

161 lines
5.2 KiB
Python
Raw Normal View History

2019-06-20 17:57:50 +00:00
from ..merkle_minimal import merkleize_chunks
from ..hash_function import hash
2019-06-20 17:51:38 +00:00
from .ssz_typing import (
SSZValue, SSZType, BasicValue, BasicType, Series, Elements, boolean, Container, List, Bytes,
Bitlist, Bitvector, uint,
)
2019-05-24 22:05:03 +00:00
# SSZ Serialization
# -----------------------------
2019-05-24 22:05:03 +00:00
BYTES_PER_LENGTH_OFFSET = 4
2019-06-20 17:57:50 +00:00
def serialize_basic(value: SSZValue):
if isinstance(value, uint):
return value.to_bytes(value.type().byte_len, 'little')
elif isinstance(value, boolean):
2019-05-24 22:05:03 +00:00
if value:
return b'\x01'
else:
return b'\x00'
2019-05-30 13:42:42 +00:00
else:
2019-06-20 17:57:50 +00:00
raise Exception(f"Type not supported: {type(value)}")
2019-05-30 13:42:42 +00:00
2019-06-20 17:57:50 +00:00
def deserialize_basic(value, typ: BasicType):
if issubclass(typ, uint):
2019-05-30 13:42:42 +00:00
return typ(int.from_bytes(value, 'little'))
elif issubclass(typ, Bool):
2019-05-30 13:42:42 +00:00
assert value in (b'\x00', b'\x01')
return typ(value == b'\x01')
else:
2019-06-20 17:57:50 +00:00
raise Exception(f"Type not supported: {typ}")
2019-05-25 18:06:42 +00:00
2019-06-20 17:57:50 +00:00
def is_empty(obj: SSZValue):
return type(obj).default() == obj
2019-06-05 13:29:26 +00:00
2019-06-20 17:57:50 +00:00
def serialize(obj: SSZValue):
if isinstance(obj, BasicValue):
return serialize_basic(obj)
elif isinstance(obj, Bitvector):
as_integer = sum([obj[i] << i for i in range(len(obj))])
return as_integer.to_bytes((len(obj) + 7) // 8, "little")
elif isinstance(obj, Bitlist):
as_integer = (1 << len(obj)) + sum([obj[i] << i for i in range(len(obj))])
return as_integer.to_bytes((as_integer.bit_length() + 7) // 8, "little")
2019-06-20 17:57:50 +00:00
elif isinstance(obj, Series):
return encode_series(obj)
2019-05-25 18:06:42 +00:00
else:
2019-06-20 17:57:50 +00:00
raise Exception(f"Type not supported: {type(obj)}")
2019-06-20 17:57:50 +00:00
def encode_series(values: Series):
if isinstance(values, bytes): # Bytes and BytesN are already like serialized output
return values
# Recursively serialize
2019-06-20 17:57:50 +00:00
parts = [(v.type().is_fixed_size(), serialize(v)) for v in values]
# Compute and check lengths
fixed_lengths = [len(serialized) if constant_size else BYTES_PER_LENGTH_OFFSET
for (constant_size, serialized) in parts]
variable_lengths = [len(serialized) if not constant_size else 0
for (constant_size, serialized) in parts]
# Check if integer is not out of bounds (Python)
assert sum(fixed_lengths + variable_lengths) < 2 ** (BYTES_PER_LENGTH_OFFSET * 8)
# Interleave offsets of variable-size parts with fixed-size parts.
# Avoid quadratic complexity in calculation of offsets.
offset = sum(fixed_lengths)
variable_parts = []
fixed_parts = []
for (constant_size, serialized) in parts:
if constant_size:
fixed_parts.append(serialized)
else:
fixed_parts.append(offset.to_bytes(BYTES_PER_LENGTH_OFFSET, 'little'))
variable_parts.append(serialized)
offset += len(serialized)
# Return the concatenation of the fixed-size parts (offsets interleaved) with the variable-size parts
return b''.join(fixed_parts + variable_parts)
2019-05-24 22:05:03 +00:00
2019-05-25 18:06:42 +00:00
# SSZ Hash-tree-root
# -----------------------------
2019-06-20 17:57:50 +00:00
def pack(values: Series):
if isinstance(values, bytes): # Bytes and BytesN are already packed
return values
elif isinstance(values, Bitvector):
as_integer = sum([values[i] << i for i in range(len(values))])
return as_integer.to_bytes((values.length + 7) // 8, "little")
elif isinstance(values, Bitlist):
as_integer = (1 << len(values)) + sum([values[i] << i for i in range(len(values))])
return as_integer.to_bytes((values.length + 7) // 8, "little")
2019-06-20 17:57:50 +00:00
return b''.join([serialize_basic(value) for value in values])
2019-05-25 18:06:42 +00:00
2019-05-25 18:06:42 +00:00
def chunkify(bytez):
# pad `bytez` to nearest 32-byte multiple
2019-05-25 18:06:42 +00:00
bytez += b'\x00' * (-len(bytez) % 32)
return [bytez[i:i + 32] for i in range(0, len(bytez), 32)]
2019-05-25 18:06:42 +00:00
def mix_in_length(root, length):
return hash(root + length.to_bytes(32, 'little'))
2019-06-20 17:57:50 +00:00
def is_bottom_layer_kind(typ: SSZType):
2019-05-28 18:33:12 +00:00
return (
2019-06-16 23:39:39 +00:00
isinstance(typ, BasicType) or
(issubclass(typ, Elements) and isinstance(typ.elem_type, BasicType))
2019-05-28 18:33:12 +00:00
)
2019-06-20 17:57:50 +00:00
def item_length(typ: SSZType) -> int:
if issubclass(typ, BasicValue):
2019-06-20 17:57:50 +00:00
return typ.byte_len
2019-05-28 18:33:12 +00:00
else:
2019-06-20 17:57:50 +00:00
return 32
2019-05-28 18:33:12 +00:00
2019-06-20 17:57:50 +00:00
def chunk_count(typ: SSZType) -> int:
2019-06-16 23:39:39 +00:00
if isinstance(typ, BasicType):
return 1
2019-06-20 17:57:50 +00:00
elif issubclass(typ, Elements):
return (typ.length * item_length(typ.elem_type) + 31) // 32
elif issubclass(typ, Container):
return len(typ.get_fields())
else:
2019-06-20 17:57:50 +00:00
raise Exception(f"Type not supported: {typ}")
def hash_tree_root(obj: SSZValue):
if isinstance(obj, Series):
if is_bottom_layer_kind(obj.type()):
leaves = chunkify(pack(obj))
else:
leaves = [hash_tree_root(value) for value in obj]
elif isinstance(obj, BasicValue):
leaves = chunkify(serialize_basic(obj))
2019-05-28 18:33:12 +00:00
else:
2019-06-16 23:39:39 +00:00
raise Exception(f"Type not supported: {type(obj)}")
2019-06-20 17:57:50 +00:00
if isinstance(obj, (List, Bytes, Bitlist)):
2019-06-20 17:57:50 +00:00
return mix_in_length(merkleize_chunks(leaves, pad_to=chunk_count(obj.type())), len(obj))
2019-05-25 18:06:42 +00:00
else:
2019-05-28 18:33:12 +00:00
return merkleize_chunks(leaves)
2019-05-25 18:06:42 +00:00
2019-06-20 17:57:50 +00:00
def signing_root(obj: Container):
2019-05-27 21:40:05 +00:00
# ignore last field
2019-06-20 18:25:22 +00:00
fields = [field for field in obj][:-1]
leaves = [hash_tree_root(f) for f in fields]
2019-05-25 18:06:42 +00:00
return merkleize_chunks(chunkify(b''.join(leaves)))