support all basic types

This commit is contained in:
Jacek Sieka 2020-06-02 11:44:51 +02:00
parent 00acf4f7b2
commit 872d7ff493
No known key found for this signature in database
GPG Key ID: A1B09461ABB656B8
5 changed files with 92 additions and 54 deletions

View File

@ -206,10 +206,11 @@ OK: 10/10 Fail: 0/10 Skip: 0/10
OK: 1/1 Fail: 0/1 Skip: 0/1
## SSZ navigator
```diff
+ basictype OK
+ lists with max size OK
+ simple object fields OK
```
OK: 2/2 Fail: 0/2 Skip: 0/2
OK: 3/3 Fail: 0/3 Skip: 0/3
## Spec helpers
```diff
+ integer_squareroot OK

View File

@ -212,10 +212,11 @@ OK: 10/10 Fail: 0/10 Skip: 0/10
OK: 1/1 Fail: 0/1 Skip: 0/1
## SSZ navigator
```diff
+ basictype OK
+ lists with max size OK
+ simple object fields OK
```
OK: 2/2 Fail: 0/2 Skip: 0/2
OK: 3/3 Fail: 0/3 Skip: 0/3
## Spec helpers
```diff
+ integer_squareroot OK

View File

@ -202,25 +202,19 @@ proc writeSeq[T](w: var SszWriter, value: seq[T])
proc writeVarSizeType(w: var SszWriter, value: auto) {.raises: [Defect, IOError].} =
trs "STARTING VAR SIZE TYPE"
mixin toSszType
type T = type toSszType(value)
when T is List|HashList:
when value is HashArray|HashList:
writeVarSizeType(w, value.data)
elif value is List:
# We reduce code bloat by forwarding all `List` types to a general `seq[T]` proc.
writeSeq(w, asSeq value)
elif T is BitList:
elif value is BitList:
# ATTENTION! We can reuse `writeSeq` only as long as our BitList type is implemented
# to internally match the binary representation of SSZ BitLists in memory.
writeSeq(w, bytes value)
elif T is HashArray:
trs "WRITING HASHARRAY"
var ctx = beginRecord(w, T.T)
enumerateSubFields(value, field):
writeField w, ctx, astToStr(field), field.data
endRecord w, ctx
elif T is object|tuple|array:
elif value is object|tuple|array:
trs "WRITING OBJECT OR ARRAY"
var ctx = beginRecord(w, T)
var ctx = beginRecord(w, type value)
enumerateSubFields(value, field):
writeField w, ctx, astToStr(field), field
endRecord w, ctx
@ -589,7 +583,7 @@ func mergedDataHash(x: HashList|HashArray, chunkIdx: int64): Eth2Digest =
# The hash of the two cached
trs "DATA HASH ", chunkIdx, " ", x.data.len
when x.T is uint64:
when x.T is BasicType:
when cpuEndian == bigEndian:
unsupported type x # No bigendian support here!

View File

@ -9,13 +9,32 @@ import
const
offsetSize* = 4
func hashChunks(maxLen: int64, T: type): int64 =
# For simplicity of implementation, HashArray only supports a few types - this
# could/should obviously be extended
# TODO duplicated in maxChunksCount
when T is uint64:
maxLen * sizeof(T) div 32
else: maxLen
# A few index types from here onwards:
# * dataIdx - leaf index starting from 0 to maximum length of collection
# * chunkIdx - leaf data index after chunking starting from 0
# * vIdx - virtual index in merkle tree - the root is found at index 1, its
# two children at 2, 3 then 4, 5, 6, 7 etc
proc dataPerChunk(T: type): int =
# How many data items fit in a chunk
when T is bool|SomeUnsignedInt: # BasicType
32 div sizeof(T)
else:
1
template chunkIdx*(T: type, dataIdx: int64): int64 =
# Given a data index, which chunk does it belong to?
dataIdx div dataPerChunk(T)
template maxChunkIdx(T: type, maxLen: int64): int64 =
# Given a number of data items, how many chunks are needed?
chunkIdx(T, maxLen + dataPerChunk(T) - 1)
template layer*(vIdx: int64): int =
## Layer 0 = layer at which the root hash is
## We place the root hash at index 1 which simplifies the math and leaves
## index 0 for the mixed-in-length
log2trunc(vIdx.uint64).int
type
UintN* = SomeUnsignedInt # TODO: Add StUint here
@ -41,12 +60,12 @@ type
HashArray*[maxLen: static Limit; T] = object
data*: array[maxLen, T]
hashes* {.dontSerialize.}: array[hashChunks(maxLen, T), Eth2Digest]
hashes* {.dontSerialize.}: array[maxChunkIdx(T, maxLen), Eth2Digest]
HashList*[T; maxLen: static Limit] = object
data*: List[T, maxLen]
hashes* {.dontSerialize.}: seq[Eth2Digest]
indices* {.dontSerialize.}: array[log2trunc(maxLen.uint64) + 1, int64]
indices* {.dontSerialize.}: array[layer(maxChunkIdx(T, maxLen)) + 1, int64]
template asSeq*(x: List): auto = distinctBase(x)
@ -104,12 +123,20 @@ template isCached*(v: Eth2Digest): bool =
template clearCache*(v: var Eth2Digest) =
v.data[0..<8] = [byte 0, 0, 0, 0, 0, 0, 0, 0]
template maxChunks*(a: HashList|HashArray): int64 =
## Layer where data is
chunkIdx(a.T, a.maxLen)
template maxDepth*(a: HashList|HashArray): int =
## Layer where data is
layer(a.maxChunks)
template chunkIdx(a: HashList|HashArray, dataIdx: int64): int64 =
chunkIdx(a.T, dataIdx)
proc clearCaches*(a: var HashArray, dataIdx: auto) =
## Clear all cache entries after data at dataIdx has been modified
when a.T is uint64:
var idx = 1 shl (a.maxDepth - 1) + int(dataIdx div 8)
else:
var idx = 1 shl (a.maxDepth - 1) + int(dataIdx div 2)
var idx = 1 shl (a.maxDepth - 1) + (chunkIdx(a, dataIdx) div 2)
while idx != 0:
clearCache(a.hashes[idx])
idx = idx div 2
@ -128,26 +155,12 @@ func cacheNodes*(depth, leaves: int): int =
res += nodesAtLayer(i, depth, leaves)
res
template layer*(vIdx: int64): int =
## Layer 0 = layer at which the root hash is
## We place the root hash at index 1 which simplifies the math and leaves
## index 0 for the mixed-in-length
log2trunc(vIdx.uint64).int
template maxChunks*(a: HashList|HashArray): int64 =
## Layer where data is
hashChunks(a.maxLen, a.T)
template maxDepth*(a: HashList|HashArray): int =
## Layer where data is
layer(a.maxChunks)
proc clearCaches*(a: var HashList, dataIdx: int64) =
if a.hashes.len == 0:
return
var
idx = 1'i64 shl (a.maxDepth - 1) + int64(dataIdx div 2)
idx = 1'i64 shl (a.maxDepth - 1) + (chunkIdx(a, dataIdx) div 2)
layer = a.maxDepth - 1
while idx > 0:
let
@ -164,7 +177,8 @@ proc clearCaches*(a: var HashList, dataIdx: int64) =
proc growHashes*(a: var HashList) =
# Ensure that the hash cache is big enough for the data in the list
let
leaves = a.data.len()
leaves = int(
chunkIdx(a, a.data.len() + dataPerChunk(a.T) - 1))
newSize = 1 + cacheNodes(a.maxDepth, leaves)
if a.hashes.len >= newSize:

View File

@ -9,7 +9,7 @@
import
unittest, options, json_serialization,
nimcrypto, eth/common, serialization/testing/generic_suite,
nimcrypto, serialization/testing/generic_suite,
./testutil,
../beacon_chain/spec/[datatypes, digest],
../beacon_chain/ssz, ../beacon_chain/ssz/[navigator, dynamic_navigator]
@ -21,7 +21,8 @@ type
Simple = object
flag: bool
# ignored {.dontSerialize.}: string
# data: array[256, bool]
data: array[256, bool]
data2: HashArray[256, bool]
template reject(stmt) =
doAssert(not compiles(stmt))
@ -43,14 +44,14 @@ type
ObjWithFields = object
f0: uint8
f1: uint32
f2: EthAddress
f2: array[20, byte]
f3: MDigest[256]
f4: seq[byte]
f5: ValidatorIndex
static:
doAssert fixedPortionSize(ObjWithFields) ==
1 + 4 + sizeof(EthAddress) + (256 div 8) + 4 + 8
1 + 4 + sizeof(array[20, byte]) + (256 div 8) + 4 + 8
executeRoundTripTests SSZ
@ -95,8 +96,30 @@ suiteReport "SSZ navigator":
let root = hash_tree_root(leaves)
check $root == "5248085B588FAB1DD1E03F3CD62201602B12E6560665935964F46E805977E8C5"
leaves.add c
check hash_tree_root(leaves) == hash_tree_root(leaves.data)
while leaves.len < 1 shl 3:
leaves.add c
check hash_tree_root(leaves) == hash_tree_root(leaves.data)
leaves = default(type leaves)
while leaves.len < (1 shl 3) - 1:
leaves.add c
leaves.add c
check hash_tree_root(leaves) == hash_tree_root(leaves.data)
leaves = default(type leaves)
while leaves.len < (1 shl 3) - 2:
leaves.add c
leaves.add c
leaves.add c
check hash_tree_root(leaves) == hash_tree_root(leaves.data)
for i in 0 ..< leaves.data.len - 2:
leaves[i] = a
leaves[i + 1] = b
leaves[i + 2] = c
check hash_tree_root(leaves) == hash_tree_root(leaves.data)
var leaves2 = HashList[Eth2Digest, 1'i64 shl 48]() # Large number!
leaves2.add a
@ -104,6 +127,12 @@ suiteReport "SSZ navigator":
leaves2.add c
check hash_tree_root(leaves2) == hash_tree_root(leaves2.data)
timedTest "basictype":
var leaves = HashList[uint64, 1'i64 shl 3]()
while leaves.len < leaves.maxLen:
leaves.add leaves.len.uint64
check hash_tree_root(leaves) == hash_tree_root(leaves.data)
suiteReport "SSZ dynamic navigator":
timedTest "navigating fields":
var fooOrig = Foo(bar: Bar(b: BarList @[1'u64, 2, 3], baz: Baz(i: 10'u64)))
@ -161,9 +190,8 @@ suiteReport "hash":
both: it.li.add Eth2Digest()
var y: HashArray[32, uint64]
doAssert hash_tree_root(y) == hash_tree_root(y.data)
y[4] = 42'u64
doAssert hash_tree_root(y) == hash_tree_root(y.data)
for i in 0..<y.len:
y[i] = 42'u64
doAssert hash_tree_root(y) == hash_tree_root(y.data)