2019-06-20 17:57:50 +00:00
|
|
|
from ..merkle_minimal import merkleize_chunks
|
2019-06-20 17:54:59 +00:00
|
|
|
from ..hash_function import hash
|
2019-06-20 17:51:38 +00:00
|
|
|
from .ssz_typing import (
|
2019-06-28 11:23:22 +00:00
|
|
|
SSZValue, SSZType, BasicValue, BasicType, Series, Elements, Bits, boolean, Container, List, Bytes,
|
2019-06-27 08:51:06 +00:00
|
|
|
Bitlist, Bitvector, uint,
|
2019-06-01 00:22:14 +00:00
|
|
|
)
|
2019-05-24 22:05:03 +00:00
|
|
|
|
|
|
|
# SSZ Serialization
|
2019-05-12 21:16:17 +00:00
|
|
|
# -----------------------------
|
|
|
|
|
2019-05-24 22:05:03 +00:00
|
|
|
BYTES_PER_LENGTH_OFFSET = 4
|
2019-05-12 21:16:17 +00:00
|
|
|
|
2019-05-27 16:01:46 +00:00
|
|
|
|
2019-06-20 17:57:50 +00:00
|
|
|
def serialize_basic(value: SSZValue):
|
|
|
|
if isinstance(value, uint):
|
|
|
|
return value.to_bytes(value.type().byte_len, 'little')
|
2019-06-27 08:51:06 +00:00
|
|
|
elif isinstance(value, boolean):
|
2019-05-24 22:05:03 +00:00
|
|
|
if value:
|
|
|
|
return b'\x01'
|
|
|
|
else:
|
|
|
|
return b'\x00'
|
2019-05-30 13:42:42 +00:00
|
|
|
else:
|
2019-06-20 17:57:50 +00:00
|
|
|
raise Exception(f"Type not supported: {type(value)}")
|
2019-05-30 13:42:42 +00:00
|
|
|
|
2019-06-01 00:22:14 +00:00
|
|
|
|
2019-06-20 17:57:50 +00:00
|
|
|
def deserialize_basic(value, typ: BasicType):
|
2019-06-20 17:54:59 +00:00
|
|
|
if issubclass(typ, uint):
|
2019-05-30 13:42:42 +00:00
|
|
|
return typ(int.from_bytes(value, 'little'))
|
2019-06-27 09:42:14 +00:00
|
|
|
elif issubclass(typ, boolean):
|
2019-05-30 13:42:42 +00:00
|
|
|
assert value in (b'\x00', b'\x01')
|
2019-06-24 21:40:47 +00:00
|
|
|
return typ(value == b'\x01')
|
2019-05-12 21:16:17 +00:00
|
|
|
else:
|
2019-06-20 17:57:50 +00:00
|
|
|
raise Exception(f"Type not supported: {typ}")
|
2019-05-25 18:06:42 +00:00
|
|
|
|
2019-05-27 16:01:46 +00:00
|
|
|
|
2019-07-25 09:32:27 +00:00
|
|
|
def is_zero(obj: SSZValue):
|
2019-06-20 17:57:50 +00:00
|
|
|
return type(obj).default() == obj
|
2019-06-05 13:29:26 +00:00
|
|
|
|
|
|
|
|
2019-06-20 17:57:50 +00:00
|
|
|
def serialize(obj: SSZValue):
|
|
|
|
if isinstance(obj, BasicValue):
|
|
|
|
return serialize_basic(obj)
|
2019-06-27 08:51:06 +00:00
|
|
|
elif isinstance(obj, Bitvector):
|
2019-07-12 20:11:33 +00:00
|
|
|
return obj.as_bytes()
|
2019-06-27 08:51:06 +00:00
|
|
|
elif isinstance(obj, Bitlist):
|
2019-07-12 20:11:33 +00:00
|
|
|
as_bytearray = list(obj.as_bytes())
|
|
|
|
if len(obj) % 8 == 0:
|
|
|
|
as_bytearray.append(1)
|
|
|
|
else:
|
|
|
|
as_bytearray[len(obj) // 8] |= 1 << (len(obj) % 8)
|
2019-07-03 12:31:03 +00:00
|
|
|
return bytes(as_bytearray)
|
2019-06-20 17:57:50 +00:00
|
|
|
elif isinstance(obj, Series):
|
|
|
|
return encode_series(obj)
|
2019-05-25 18:06:42 +00:00
|
|
|
else:
|
2019-06-20 17:57:50 +00:00
|
|
|
raise Exception(f"Type not supported: {type(obj)}")
|
2019-05-12 21:16:17 +00:00
|
|
|
|
2019-05-27 16:01:46 +00:00
|
|
|
|
2019-06-20 17:57:50 +00:00
|
|
|
def encode_series(values: Series):
|
2019-06-20 18:30:42 +00:00
|
|
|
if isinstance(values, bytes): # Bytes and BytesN are already like serialized output
|
|
|
|
return values
|
2019-05-12 21:16:17 +00:00
|
|
|
|
|
|
|
# Recursively serialize
|
2019-06-20 17:57:50 +00:00
|
|
|
parts = [(v.type().is_fixed_size(), serialize(v)) for v in values]
|
2019-05-12 21:16:17 +00:00
|
|
|
|
|
|
|
# Compute and check lengths
|
|
|
|
fixed_lengths = [len(serialized) if constant_size else BYTES_PER_LENGTH_OFFSET
|
|
|
|
for (constant_size, serialized) in parts]
|
|
|
|
variable_lengths = [len(serialized) if not constant_size else 0
|
|
|
|
for (constant_size, serialized) in parts]
|
|
|
|
|
|
|
|
# Check if integer is not out of bounds (Python)
|
|
|
|
assert sum(fixed_lengths + variable_lengths) < 2 ** (BYTES_PER_LENGTH_OFFSET * 8)
|
|
|
|
|
|
|
|
# Interleave offsets of variable-size parts with fixed-size parts.
|
|
|
|
# Avoid quadratic complexity in calculation of offsets.
|
|
|
|
offset = sum(fixed_lengths)
|
|
|
|
variable_parts = []
|
|
|
|
fixed_parts = []
|
|
|
|
for (constant_size, serialized) in parts:
|
|
|
|
if constant_size:
|
|
|
|
fixed_parts.append(serialized)
|
|
|
|
else:
|
|
|
|
fixed_parts.append(offset.to_bytes(BYTES_PER_LENGTH_OFFSET, 'little'))
|
|
|
|
variable_parts.append(serialized)
|
|
|
|
offset += len(serialized)
|
|
|
|
|
|
|
|
# Return the concatenation of the fixed-size parts (offsets interleaved) with the variable-size parts
|
|
|
|
return b''.join(fixed_parts + variable_parts)
|
2019-05-24 22:05:03 +00:00
|
|
|
|
2019-05-27 16:01:46 +00:00
|
|
|
|
2019-05-25 18:06:42 +00:00
|
|
|
# SSZ Hash-tree-root
|
|
|
|
# -----------------------------
|
|
|
|
|
2019-05-27 16:01:46 +00:00
|
|
|
|
2019-06-20 17:57:50 +00:00
|
|
|
def pack(values: Series):
|
2019-06-20 18:30:42 +00:00
|
|
|
if isinstance(values, bytes): # Bytes and BytesN are already packed
|
|
|
|
return values
|
2019-07-12 20:11:33 +00:00
|
|
|
elif isinstance(values, Bits):
|
|
|
|
# packs the bits in bytes, left-aligned.
|
|
|
|
# Exclusive length delimiting bits for bitlists.
|
|
|
|
return values.as_bytes()
|
2019-06-20 17:57:50 +00:00
|
|
|
return b''.join([serialize_basic(value) for value in values])
|
2019-05-25 18:06:42 +00:00
|
|
|
|
2019-05-27 16:01:46 +00:00
|
|
|
|
2019-05-25 18:06:42 +00:00
|
|
|
def chunkify(bytez):
|
2019-06-04 13:22:34 +00:00
|
|
|
# pad `bytez` to nearest 32-byte multiple
|
2019-05-25 18:06:42 +00:00
|
|
|
bytez += b'\x00' * (-len(bytez) % 32)
|
|
|
|
return [bytez[i:i + 32] for i in range(0, len(bytez), 32)]
|
|
|
|
|
2019-05-27 16:01:46 +00:00
|
|
|
|
2019-05-25 18:06:42 +00:00
|
|
|
def mix_in_length(root, length):
|
|
|
|
return hash(root + length.to_bytes(32, 'little'))
|
|
|
|
|
2019-05-27 16:01:46 +00:00
|
|
|
|
2019-06-20 17:57:50 +00:00
|
|
|
def is_bottom_layer_kind(typ: SSZType):
|
2019-05-28 18:33:12 +00:00
|
|
|
return (
|
2019-06-16 23:39:39 +00:00
|
|
|
isinstance(typ, BasicType) or
|
|
|
|
(issubclass(typ, Elements) and isinstance(typ.elem_type, BasicType))
|
2019-05-28 18:33:12 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2019-06-20 17:57:50 +00:00
|
|
|
def item_length(typ: SSZType) -> int:
|
2019-06-20 18:20:07 +00:00
|
|
|
if issubclass(typ, BasicValue):
|
2019-06-20 17:57:50 +00:00
|
|
|
return typ.byte_len
|
2019-05-28 18:33:12 +00:00
|
|
|
else:
|
2019-06-20 17:57:50 +00:00
|
|
|
return 32
|
2019-05-28 18:33:12 +00:00
|
|
|
|
2019-06-20 17:54:59 +00:00
|
|
|
|
2019-06-20 17:57:50 +00:00
|
|
|
def chunk_count(typ: SSZType) -> int:
|
2019-07-12 18:39:55 +00:00
|
|
|
# note that for lists, .length *on the type* describes the list limit.
|
2019-06-16 23:39:39 +00:00
|
|
|
if isinstance(typ, BasicType):
|
2019-06-20 17:54:59 +00:00
|
|
|
return 1
|
2019-06-28 11:23:22 +00:00
|
|
|
elif issubclass(typ, Bits):
|
2019-06-27 15:31:33 +00:00
|
|
|
return (typ.length + 255) // 256
|
2019-06-20 17:57:50 +00:00
|
|
|
elif issubclass(typ, Elements):
|
|
|
|
return (typ.length * item_length(typ.elem_type) + 31) // 32
|
|
|
|
elif issubclass(typ, Container):
|
|
|
|
return len(typ.get_fields())
|
2019-06-20 17:54:59 +00:00
|
|
|
else:
|
2019-06-20 17:57:50 +00:00
|
|
|
raise Exception(f"Type not supported: {typ}")
|
|
|
|
|
|
|
|
|
|
|
|
def hash_tree_root(obj: SSZValue):
|
|
|
|
if isinstance(obj, Series):
|
|
|
|
if is_bottom_layer_kind(obj.type()):
|
|
|
|
leaves = chunkify(pack(obj))
|
|
|
|
else:
|
|
|
|
leaves = [hash_tree_root(value) for value in obj]
|
|
|
|
elif isinstance(obj, BasicValue):
|
|
|
|
leaves = chunkify(serialize_basic(obj))
|
2019-05-28 18:33:12 +00:00
|
|
|
else:
|
2019-06-16 23:39:39 +00:00
|
|
|
raise Exception(f"Type not supported: {type(obj)}")
|
2019-06-20 17:57:50 +00:00
|
|
|
|
2019-06-27 08:51:06 +00:00
|
|
|
if isinstance(obj, (List, Bytes, Bitlist)):
|
2019-07-12 18:39:55 +00:00
|
|
|
return mix_in_length(merkleize_chunks(leaves, limit=chunk_count(obj.type())), len(obj))
|
2019-05-25 18:06:42 +00:00
|
|
|
else:
|
2019-05-28 18:33:12 +00:00
|
|
|
return merkleize_chunks(leaves)
|
2019-05-25 18:06:42 +00:00
|
|
|
|
2019-05-27 16:01:46 +00:00
|
|
|
|
2019-06-20 17:57:50 +00:00
|
|
|
def signing_root(obj: Container):
|
2019-05-27 21:40:05 +00:00
|
|
|
# ignore last field
|
2019-06-20 18:25:22 +00:00
|
|
|
fields = [field for field in obj][:-1]
|
|
|
|
leaves = [hash_tree_root(f) for f in fields]
|
2019-05-25 18:06:42 +00:00
|
|
|
return merkleize_chunks(chunkify(b''.join(leaves)))
|