more type compiler bug workarounds

This commit is contained in:
Jacek Sieka 2020-05-30 20:59:57 +02:00
parent a0b8b818f3
commit be92842944
No known key found for this signature in database
GPG Key ID: A1B09461ABB656B8
2 changed files with 33 additions and 36 deletions

View File

@ -280,7 +280,7 @@ type
randao_mixes*: HashArray[EPOCHS_PER_HISTORICAL_VECTOR, Eth2Digest] randao_mixes*: HashArray[EPOCHS_PER_HISTORICAL_VECTOR, Eth2Digest]
# Slashings # Slashings
slashings*: HashArray[EPOCHS_PER_SLASHINGS_VECTOR, uint64] ##\ slashings*: HashArray[int64(EPOCHS_PER_SLASHINGS_VECTOR), uint64] ##\
## Per-epoch sums of slashed effective balances ## Per-epoch sums of slashed effective balances
# Attestations # Attestations
@ -513,9 +513,7 @@ Json.useCustomSerialization(BitSeq):
writer.writeValue "0x" & seq[byte](value).toHex writer.writeValue "0x" & seq[byte](value).toHex
template readValue*(reader: var JsonReader, value: var List) = template readValue*(reader: var JsonReader, value: var List) =
type T = type(value) value = T readValue(reader, seq[type value[0]])
type E = ElemType(T)
value = T readValue(reader, seq[E])
template writeValue*(writer: var JsonWriter, value: List) = template writeValue*(writer: var JsonWriter, value: List) =
writeValue(writer, asSeq value) writeValue(writer, asSeq value)

View File

@ -10,8 +10,9 @@ const
offsetSize* = 4 offsetSize* = 4
func hashChunks(maxLen: int64, T: type): int64 = func hashChunks(maxLen: int64, T: type): int64 =
# For simplificy of implementation, HashArray only supports a few types - this # For simplicity of implementation, HashArray only supports a few types - this
# could/should obviously be extended # could/should obviously be extended
# TODO duplicated in maxChunksCount
when T is uint64: when T is uint64:
maxLen * sizeof(T) div 32 maxLen * sizeof(T) div 32
else: maxLen else: maxLen
@ -38,11 +39,11 @@ type
actualSszSize*: int actualSszSize*: int
elementSize*: int elementSize*: int
HashArray*[maxLen: static int; T] = object HashArray*[maxLen: static Limit; T] = object
data*: array[maxLen, T] data*: array[maxLen, T]
hashes* {.dontSerialize.}: array[hashChunks(maxLen, T), Eth2Digest] hashes* {.dontSerialize.}: array[hashChunks(maxLen, T), Eth2Digest]
HashList*[T; maxLen: static int64] = object HashList*[T; maxLen: static Limit] = object
data*: List[T, maxLen] data*: List[T, maxLen]
hashes* {.dontSerialize.}: seq[Eth2Digest] hashes* {.dontSerialize.}: seq[Eth2Digest]
indices* {.dontSerialize.}: array[log2trunc(maxLen.uint64) + 1, int] indices* {.dontSerialize.}: array[log2trunc(maxLen.uint64) + 1, int]
@ -103,7 +104,7 @@ template isCached*(v: Eth2Digest): bool =
template clearCache*(v: var Eth2Digest) = template clearCache*(v: var Eth2Digest) =
v.data[0..<8] = [byte 0, 0, 0, 0, 0, 0, 0, 0] v.data[0..<8] = [byte 0, 0, 0, 0, 0, 0, 0, 0]
proc clearTree*(a: var HashArray, dataIdx: auto) = proc clearCaches*(a: var HashArray, dataIdx: auto) =
## Clear all cache entries after data at dataIdx has been modified ## Clear all cache entries after data at dataIdx has been modified
when a.T is uint64: when a.T is uint64:
var idx = 1 shl (a.maxDepth - 1) + int(dataIdx div 8) var idx = 1 shl (a.maxDepth - 1) + int(dataIdx div 8)
@ -141,7 +142,7 @@ template maxDepth*(a: HashList|HashArray): int =
## Layer where data is ## Layer where data is
layer(a.maxChunks) layer(a.maxChunks)
proc clearTree*(a: var HashList, dataIdx: auto) = proc clearCaches*(a: var HashList, dataIdx: auto) =
if a.hashes.len == 0: if a.hashes.len == 0:
return return
@ -185,41 +186,32 @@ proc growHashes*(a: var HashList) =
swap(a.hashes, newHashes) swap(a.hashes, newHashes)
a.indices = newIndices a.indices = newIndices
template `[]`*(a: HashArray, b: auto): auto = template len*(a: type HashArray): auto = int(a.maxLen)
a.data[b]
proc `[]`*[maxLen: static int; T](a: var HashArray[maxLen, T], b: auto): var T = template add*(x: var HashList, val: auto) =
clearTree(a, b.int64)
a.data[b]
proc `[]=`*(a: var HashArray, b: auto, c: auto) =
clearTree(a, b.int64)
a.data[b] = c
template fill*[N: static int; T](a: var HashArray[N, T], c: T) =
mixin fill
fill(a.data, c)
template sum*[N: static int; T](a: var HashArray[N, T]): T =
mixin sum
sum(a.data)
template len*[N: static int; T](a: type HashArray[N, T]): int = N
template add*(x: var HashList, val: x.T) =
add(x.data, val) add(x.data, val)
x.growHashes() x.growHashes()
clearTree(x, x.data.len() - 1) # invalidate entry we just added clearCaches(x, x.data.len() - 1)
template len*(x: HashList|HashArray): auto = len(x.data) template len*(x: HashList|HashArray): auto = len(x.data)
template low*(x: HashList|HashArray): auto = low(x.data) template low*(x: HashList|HashArray): auto = low(x.data)
template high*(x: HashList|HashArray): auto = high(x.data) template high*(x: HashList|HashArray): auto = high(x.data)
template `[]`*(x: HashList, idx: auto): auto = x.data[idx] template `[]`*(x: HashList|HashArray, idx: auto): auto = x.data[idx]
proc `[]`*[T; maxLen: static int64](x: var HashList[T, maxLen], idx: auto): var T =
clearTree(x, idx.int64) proc `[]`*(a: var HashArray, b: auto): var a.T =
clearCaches(a, b.Limit)
a.data[b]
proc `[]=`*(a: var HashArray, b: auto, c: auto) =
clearCaches(a, b.Limit)
a.data[b] = c
proc `[]`*(x: var HashList, idx: auto): var x.T =
clearCaches(x, idx.int64)
x.data[idx] x.data[idx]
proc `[]=`*(x: var HashList, idx: int64, val: auto) = proc `[]=`*(x: var HashList, idx: int64, val: auto) =
clearTree(x, idx.int64) clearCaches(x, idx.int64)
x.data[idx] = val x.data[idx] = val
template `==`*(a, b: HashList|HashArray): bool = a.data == b.data template `==`*(a, b: HashList|HashArray): bool = a.data == b.data
@ -229,6 +221,13 @@ template `$`*(x: HashList): auto = $(x.data)
template items* (x: HashList|HashArray): untyped = items(x.data) template items* (x: HashList|HashArray): untyped = items(x.data)
template pairs* (x: HashList|HashArray): untyped = pairs(x.data) template pairs* (x: HashList|HashArray): untyped = pairs(x.data)
template fill*(a: var HashArray, c: auto) =
mixin fill
fill(a.data, c)
template sum*[maxLen; T](a: var HashArray[maxLen, T]): T =
mixin sum
sum(a.data)
macro unsupported*(T: typed): untyped = macro unsupported*(T: typed): untyped =
# TODO: {.fatal.} breaks compilation even in `compiles()` context, # TODO: {.fatal.} breaks compilation even in `compiles()` context,
# so we use this macro instead. It's also much better at figuring # so we use this macro instead. It's also much better at figuring
@ -274,8 +273,8 @@ func fixedPortionSize*(T0: type): int {.compileTime.} =
when T is BasicType: sizeof(T) when T is BasicType: sizeof(T)
elif T is array|HashArray: elif T is array|HashArray:
type E = ElemType(T) type E = ElemType(T)
when isFixedSize(E): len(T) * fixedPortionSize(E) when isFixedSize(E): int(len(T)) * fixedPortionSize(E)
else: len(T) * offsetSize else: int(len(T)) * offsetSize
elif T is object|tuple: elif T is object|tuple:
enumAllSerializedFields(T): enumAllSerializedFields(T):
when isFixedSize(FieldType): when isFixedSize(FieldType):