Starting work on partials

This commit is contained in:
Vitalik Buterin 2019-05-28 09:30:35 -04:00
parent 132d3c976a
commit 19601df572
4 changed files with 98 additions and 13 deletions

View File

@ -1 +1,2 @@
from .ssz_impl import *
from .ssz_partials import *

View File

@ -35,7 +35,7 @@ def is_fixed_size(typ):
@infer_input_type
def serialize(obj, typ):
def serialize(obj, typ=None):
if is_basic_type(typ):
return serialize_basic(obj, typ)
elif is_list_type(typ) or is_vector_type(typ):
@ -100,7 +100,7 @@ def mix_in_length(root, length):
@infer_input_type
def hash_tree_root(obj, typ):
def hash_tree_root(obj, typ=None):
if is_basic_type(typ):
return merkleize_chunks(chunkify(serialize_basic(obj, typ)))
elif is_list_type(typ) or is_vector_type(typ):
@ -119,7 +119,7 @@ def hash_tree_root(obj, typ):
@infer_input_type
def signing_root(obj, typ):
def signing_root(obj, typ=None):
assert is_container_typ(typ)
leaves = [hash_tree_root(elem, subtyp) for elem, subtyp in obj.get_fields()[:-1]]
return merkleize_chunks(chunkify(b''.join(leaves)))

View File

@ -0,0 +1,81 @@
from ..merkle_minimal import hash, next_power_of_two
from .ssz_typing import *
from .ssz_impl import *
ZERO_CHUNK = b'\x00' * 32
def last_power_of_two(x):
return next_power_of_two(x+1) // 2
def concat_generalized_indices(x, y):
return x * last_power_of_two(y) + y - last_power_of_two(y)
def rebase(objs, new_root):
return {concat_generalized_indices(new_root, k): v for k,v in objs.items()}
def constrict_generalized_index(x, q):
depth = last_power_of_two(x // q)
o = depth + x - q * depth
if concat_generalized_indices(q, o) != x:
return None
return o
def unrebase(objs, q):
o = {}
for k,v in objs.items():
new_k = constrict_generalized_index(k, q)
if new_k is not None:
o[new_k] = v
return o
def filler(starting_position, chunk_count):
at, skip, end = chunk_count, 1, next_power_of_two(chunk_count)
value = ZERO_CHUNK
o = {}
while at < end:
while at % (skip*2) == 0:
skip *= 2
value = hash(value + value)
o[starting_position + at] = value
at += skip
return o
def merkle_tree_of_chunks(chunks, root):
starting_index = root * next_power_of_two(len(chunks))
o = {starting_index+i: chunk for i,chunk in enumerate(chunks)}
o = {**o, **filler(starting_index, len(chunks))}
return o
def is_bottom_layer_type(typ):
return (
is_basic_type(typ) or
(is_list_type(typ) or is_vector_type(typ)) and is_basic_type(read_elem_typ(typ))
)
@infer_input_type
def get_fields(obj, typ=None):
if is_container_typ(typ):
return obj.get_fields()
elif is_list_type(typ) or is_vector_type(typ):
subtype = read_elem_typ(typ)
return zip([subtype] * len(obj), obj)
else:
raise Exception("Invalid type")
@infer_input_type
def ssz_all(obj, typ=None, root=1):
if is_list_type(typ):
o = {root * 2 + 1: len(obj).to_bytes(32, 'little')}
base = root * 2
else:
o = {}
base = root
if is_bottom_layer_type(typ):
data = serialize_basic(obj, typ) if is_basic_type(typ) else pack(obj, read_elem_typ(typ))
return {**o, **merkle_tree_of_chunks(chunkify(data), base)}
else:
fields = get_fields(obj, typ=typ)
sub_base = base * next_power_of_two(len(fields))
for i, (elem, subtype) in enumerate(fields):
o = {**o, **ssz_all(elem, typ=subtype, root=sub_base+i)}
return {**o, **filter(sub_base, len(fields))}

View File

@ -1,7 +1,10 @@
from typing import List, Iterable, Type, NewType
from typing import List, Iterable, Type, NewType, TypeVar
from typing import Union
from inspect import isclass
T = TypeVar('T')
L = TypeVar('L')
# SSZ integers
# -----------------------------
@ -64,13 +67,13 @@ class uint256(uint):
def is_uint(typ):
# All integers are uint in the scope of the spec here.
# Since we default to uint64. Bounds can be checked elsewhere.
return issubclass(typ, int)
return (isinstance(typ, int.__class__) and issubclass(typ, int)) or typ == uint64
def uint_byte_size(typ):
if issubclass(typ, uint):
if isinstance(typ, int.__class__) and issubclass(typ, uint):
return typ.byte_len
elif issubclass(typ, int):
elif typ in (int, uint64):
# Default to uint64
return 8
else:
@ -109,7 +112,7 @@ class Container(object):
return [getattr(self, field) for field in cls.get_field_names()]
def __repr__(self):
return {field: getattr(self, field) for field in self.get_field_names()}
return repr({field: getattr(self, field) for field in self.get_field_names()})
@classmethod
def get_fields_dict(cls):
@ -221,7 +224,7 @@ class Vector(metaclass=VectorMeta):
return hash_tree_root(self, self.__class__)
def __repr__(self):
return {'length': self.__class__.length, 'items': self.items}
return repr({'length': self.__class__.length, 'items': self.items})
def __getitem__(self, key):
return self.items[key]
@ -370,10 +373,10 @@ def infer_input_type(fn):
"""
Decorator to run infer_type on the obj if typ argument is None
"""
def infer_helper(obj, typ=None):
def infer_helper(obj, *args, typ=None, **kwargs):
if typ is None:
typ = infer_type(obj)
return fn(obj, typ)
return fn(obj, *args, typ=typ, **kwargs)
return infer_helper
@ -381,10 +384,10 @@ def is_list_type(typ):
return (hasattr(typ, '_name') and typ._name == 'List') or typ == bytes
def is_vector_type(typ):
return issubclass(typ, Vector)
return isinstance(typ, int.__class__) and issubclass(typ, Vector)
def is_container_typ(typ):
return issubclass(typ, Container)
return isinstance(typ, int.__class__) and issubclass(typ, Container)
def read_list_elem_typ(list_typ: Type[List[T]]) -> T:
if list_typ.__args__ is None or len(list_typ.__args__) != 1: