mirror of
https://github.com/status-im/nimbus-eth2.git
synced 2025-02-17 00:47:03 +00:00
Don't use StackArray in ssz; Drop the support for strings
This commit is contained in:
parent
74e35c464f
commit
accd5fe954
@ -4,7 +4,7 @@ import
|
|||||||
options as stdOptions, net as stdNet,
|
options as stdOptions, net as stdNet,
|
||||||
|
|
||||||
# Status libs
|
# Status libs
|
||||||
stew/[varints, base58, bitseqs, endians2, results],
|
stew/[varints, base58, bitseqs, endians2, results, byteutils],
|
||||||
stew/shims/[macros, tables],
|
stew/shims/[macros, tables],
|
||||||
faststreams/[inputs, outputs, buffers], snappy, snappy/framing,
|
faststreams/[inputs, outputs, buffers], snappy, snappy/framing,
|
||||||
json_serialization, json_serialization/std/[net, options],
|
json_serialization, json_serialization/std/[net, options],
|
||||||
@ -39,6 +39,7 @@ type
|
|||||||
PrivateKey* = crypto.PrivateKey
|
PrivateKey* = crypto.PrivateKey
|
||||||
|
|
||||||
Bytes = seq[byte]
|
Bytes = seq[byte]
|
||||||
|
ErrorMsg = List[byte, 256]
|
||||||
|
|
||||||
# TODO: This is here only to eradicate a compiler
|
# TODO: This is here only to eradicate a compiler
|
||||||
# warning about unused import (rpc/messages).
|
# warning about unused import (rpc/messages).
|
||||||
@ -152,7 +153,7 @@ type
|
|||||||
case kind*: Eth2NetworkingErrorKind
|
case kind*: Eth2NetworkingErrorKind
|
||||||
of ReceivedErrorResponse:
|
of ReceivedErrorResponse:
|
||||||
responseCode: ResponseCode
|
responseCode: ResponseCode
|
||||||
errorMsg: string
|
errorMsg: ErrorMsg
|
||||||
else:
|
else:
|
||||||
discard
|
discard
|
||||||
|
|
||||||
@ -324,13 +325,16 @@ proc writeChunk*(conn: Connection,
|
|||||||
|
|
||||||
await conn.write(output.getOutput)
|
await conn.write(output.getOutput)
|
||||||
|
|
||||||
|
template errorMsgLit(x: static string): ErrorMsg =
|
||||||
|
const val = ErrorMsg toBytes(x)
|
||||||
|
val
|
||||||
|
|
||||||
proc sendErrorResponse(peer: Peer,
|
proc sendErrorResponse(peer: Peer,
|
||||||
conn: Connection,
|
conn: Connection,
|
||||||
noSnappy: bool,
|
noSnappy: bool,
|
||||||
responseCode: ResponseCode,
|
responseCode: ResponseCode,
|
||||||
errMsg: string) {.async.} =
|
errMsg: ErrorMsg) {.async.} =
|
||||||
debug "Error processing request", peer, responseCode, errMsg
|
debug "Error processing request", peer, responseCode, errMsg
|
||||||
|
|
||||||
await conn.writeChunk(some responseCode, SSZ.encode(errMsg), noSnappy)
|
await conn.writeChunk(some responseCode, SSZ.encode(errMsg), noSnappy)
|
||||||
|
|
||||||
proc sendNotificationMsg(peer: Peer, protocolId: string, requestBytes: Bytes) {.async} =
|
proc sendNotificationMsg(peer: Peer, protocolId: string, requestBytes: Bytes) {.async} =
|
||||||
@ -493,15 +497,18 @@ proc handleIncomingStream(network: Eth2Node,
|
|||||||
try:
|
try:
|
||||||
let peer = peerFromStream(network, conn)
|
let peer = peerFromStream(network, conn)
|
||||||
|
|
||||||
template returnInvalidRequest(msg: string) =
|
template returnInvalidRequest(msg: ErrorMsg) =
|
||||||
await sendErrorResponse(peer, conn, noSnappy, InvalidRequest, msg)
|
await sendErrorResponse(peer, conn, noSnappy, InvalidRequest, msg)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
template returnInvalidRequest(msg: string) =
|
||||||
|
returnInvalidRequest(ErrorMsg msg.toBytes)
|
||||||
|
|
||||||
let s = when useNativeSnappy:
|
let s = when useNativeSnappy:
|
||||||
let fs = libp2pInput(conn)
|
let fs = libp2pInput(conn)
|
||||||
|
|
||||||
if fs.timeoutToNextByte(TTFB_TIMEOUT):
|
if fs.timeoutToNextByte(TTFB_TIMEOUT):
|
||||||
returnInvalidRequest "Request first byte not sent in time"
|
returnInvalidRequest(errorMsgLit "Request first byte not sent in time")
|
||||||
|
|
||||||
fs
|
fs
|
||||||
else:
|
else:
|
||||||
@ -513,7 +520,7 @@ proc handleIncomingStream(network: Eth2Node,
|
|||||||
let msg = if sizeof(MsgRec) > 0:
|
let msg = if sizeof(MsgRec) > 0:
|
||||||
try:
|
try:
|
||||||
awaitWithTimeout(readChunkPayload(s, noSnappy, MsgRec), deadline):
|
awaitWithTimeout(readChunkPayload(s, noSnappy, MsgRec), deadline):
|
||||||
returnInvalidRequest "Request full data not sent in time"
|
returnInvalidRequest(errorMsgLit "Request full data not sent in time")
|
||||||
|
|
||||||
except SerializationError as err:
|
except SerializationError as err:
|
||||||
returnInvalidRequest err.formatMsg("msg")
|
returnInvalidRequest err.formatMsg("msg")
|
||||||
@ -526,26 +533,26 @@ proc handleIncomingStream(network: Eth2Node,
|
|||||||
if msg.isErr:
|
if msg.isErr:
|
||||||
let (responseCode, errMsg) = case msg.error.kind
|
let (responseCode, errMsg) = case msg.error.kind
|
||||||
of UnexpectedEOF, PotentiallyExpectedEOF:
|
of UnexpectedEOF, PotentiallyExpectedEOF:
|
||||||
(InvalidRequest, "Incomplete request")
|
(InvalidRequest, errorMsgLit "Incomplete request")
|
||||||
|
|
||||||
of InvalidSnappyBytes:
|
of InvalidSnappyBytes:
|
||||||
(InvalidRequest, "Failed to decompress snappy payload")
|
(InvalidRequest, errorMsgLit "Failed to decompress snappy payload")
|
||||||
|
|
||||||
of InvalidSszBytes:
|
of InvalidSszBytes:
|
||||||
(InvalidRequest, "Failed to decode SSZ payload")
|
(InvalidRequest, errorMsgLit "Failed to decode SSZ payload")
|
||||||
|
|
||||||
of ZeroSizePrefix:
|
of ZeroSizePrefix:
|
||||||
(InvalidRequest, "The request chunk cannot have a size of zero")
|
(InvalidRequest, errorMsgLit "The request chunk cannot have a size of zero")
|
||||||
|
|
||||||
of SizePrefixOverflow:
|
of SizePrefixOverflow:
|
||||||
(InvalidRequest, "The chunk size exceed the maximum allowed")
|
(InvalidRequest, errorMsgLit "The chunk size exceed the maximum allowed")
|
||||||
|
|
||||||
of InvalidResponseCode, ReceivedErrorResponse,
|
of InvalidResponseCode, ReceivedErrorResponse,
|
||||||
StreamOpenTimeout, ReadResponseTimeout:
|
StreamOpenTimeout, ReadResponseTimeout:
|
||||||
# These shouldn't be possible in a request, because
|
# These shouldn't be possible in a request, because
|
||||||
# there are no response codes being read, no stream
|
# there are no response codes being read, no stream
|
||||||
# openings and no reading of responses:
|
# openings and no reading of responses:
|
||||||
(ServerError, "Internal server error")
|
(ServerError, errorMsgLit "Internal server error")
|
||||||
|
|
||||||
of BrokenConnection:
|
of BrokenConnection:
|
||||||
return
|
return
|
||||||
@ -557,7 +564,8 @@ proc handleIncomingStream(network: Eth2Node,
|
|||||||
logReceivedMsg(peer, MsgType(msg.get))
|
logReceivedMsg(peer, MsgType(msg.get))
|
||||||
await callUserHandler(peer, conn, noSnappy, msg.get)
|
await callUserHandler(peer, conn, noSnappy, msg.get)
|
||||||
except CatchableError as err:
|
except CatchableError as err:
|
||||||
await sendErrorResponse(peer, conn, noSnappy, ServerError, err.msg)
|
await sendErrorResponse(peer, conn, noSnappy, ServerError,
|
||||||
|
ErrorMsg err.msg.toBytes)
|
||||||
|
|
||||||
except CatchableError as err:
|
except CatchableError as err:
|
||||||
debug "Error processing an incoming request", err = err.msg, msgName
|
debug "Error processing an incoming request", err = err.msg, msgName
|
||||||
@ -825,7 +833,7 @@ proc p2pProtocolBackendImpl*(p: P2PProtocol): Backend =
|
|||||||
|
|
||||||
proc `protocolMounterName`(`networkVar`: `Eth2Node`) =
|
proc `protocolMounterName`(`networkVar`: `Eth2Node`) =
|
||||||
proc sszThunk(`streamVar`: `Connection`,
|
proc sszThunk(`streamVar`: `Connection`,
|
||||||
proto: string): Future[void] {.gcsafe.} =
|
proto: string): Future[void] {.gcsafe.} =
|
||||||
return handleIncomingStream(`networkVar`, `streamVar`, true,
|
return handleIncomingStream(`networkVar`, `streamVar`, true,
|
||||||
`MsgStrongRecName`)
|
`MsgStrongRecName`)
|
||||||
|
|
||||||
@ -834,7 +842,7 @@ proc p2pProtocolBackendImpl*(p: P2PProtocol): Backend =
|
|||||||
handler: sszThunk)
|
handler: sszThunk)
|
||||||
|
|
||||||
proc snappyThunk(`streamVar`: `Connection`,
|
proc snappyThunk(`streamVar`: `Connection`,
|
||||||
proto: string): Future[void] {.gcsafe.} =
|
proto: string): Future[void] {.gcsafe.} =
|
||||||
return handleIncomingStream(`networkVar`, `streamVar`, false,
|
return handleIncomingStream(`networkVar`, `streamVar`, false,
|
||||||
`MsgStrongRecName`)
|
`MsgStrongRecName`)
|
||||||
|
|
||||||
|
@ -138,7 +138,7 @@ proc readResponseChunk(conn: Connection,
|
|||||||
let responseCode = ResponseCode responseCodeByte
|
let responseCode = ResponseCode responseCodeByte
|
||||||
case responseCode:
|
case responseCode:
|
||||||
of InvalidRequest, ServerError:
|
of InvalidRequest, ServerError:
|
||||||
let errorMsgChunk = await readChunkPayload(conn, noSnappy, string)
|
let errorMsgChunk = await readChunkPayload(conn, noSnappy, ErrorMsg)
|
||||||
let errorMsg = if errorMsgChunk.isOk: errorMsgChunk.value
|
let errorMsg = if errorMsgChunk.isOk: errorMsgChunk.value
|
||||||
else: return err(errorMsgChunk.error)
|
else: return err(errorMsgChunk.error)
|
||||||
return err Eth2NetworkingError(kind: ReceivedErrorResponse,
|
return err Eth2NetworkingError(kind: ReceivedErrorResponse,
|
||||||
|
@ -15,7 +15,7 @@
|
|||||||
import
|
import
|
||||||
options, algorithm, options, strformat, typetraits,
|
options, algorithm, options, strformat, typetraits,
|
||||||
stew/[bitops2, bitseqs, endians2, objects, varints, ptrops],
|
stew/[bitops2, bitseqs, endians2, objects, varints, ptrops],
|
||||||
stew/ranges/[ptr_arith, stackarrays], stew/shims/macros,
|
stew/ranges/ptr_arith, stew/shims/macros,
|
||||||
faststreams/[inputs, outputs, buffers],
|
faststreams/[inputs, outputs, buffers],
|
||||||
serialization, serialization/testing/tracing,
|
serialization, serialization/testing/tracing,
|
||||||
./spec/[crypto, datatypes, digest],
|
./spec/[crypto, datatypes, digest],
|
||||||
@ -44,11 +44,10 @@ type
|
|||||||
SszWriter* = object
|
SszWriter* = object
|
||||||
stream: OutputStream
|
stream: OutputStream
|
||||||
|
|
||||||
BasicType = byte|char|bool|SomeUnsignedInt
|
|
||||||
|
|
||||||
SszChunksMerkleizer = object
|
SszChunksMerkleizer = object
|
||||||
combinedChunks: StackArray[Eth2Digest]
|
combinedChunks: ptr UncheckedArray[Eth2Digest]
|
||||||
totalChunks: uint64
|
totalChunks: uint64
|
||||||
|
topIndex: int
|
||||||
|
|
||||||
SizePrefixed*[T] = distinct T
|
SizePrefixed*[T] = distinct T
|
||||||
SszMaxSizeExceeded* = object of SerializationError
|
SszMaxSizeExceeded* = object of SerializationError
|
||||||
@ -110,7 +109,7 @@ proc writeFixedSized(s: var (OutputStream|WriteCursor), x: auto) {.raises: [Defe
|
|||||||
s.write toBytesLE(x)
|
s.write toBytesLE(x)
|
||||||
else:
|
else:
|
||||||
s.writeMemCopy x
|
s.writeMemCopy x
|
||||||
elif x is array|string|seq|openarray:
|
elif x is array|seq|openarray:
|
||||||
when x[0] is byte:
|
when x[0] is byte:
|
||||||
trs "APPENDING FIXED SIZE BYTES", x
|
trs "APPENDING FIXED SIZE BYTES", x
|
||||||
s.write x
|
s.write x
|
||||||
@ -136,7 +135,7 @@ func init*(T: type SszWriter, stream: OutputStream): T {.raises: [Defect].} =
|
|||||||
result.stream = stream
|
result.stream = stream
|
||||||
|
|
||||||
template enumerateSubFields(holder, fieldVar, body: untyped) =
|
template enumerateSubFields(holder, fieldVar, body: untyped) =
|
||||||
when holder is array|string|seq|openarray:
|
when holder is array|seq|openarray:
|
||||||
for fieldVar in holder: body
|
for fieldVar in holder: body
|
||||||
else:
|
else:
|
||||||
enumInstanceSerializedFields(holder, _, fieldVar): body
|
enumInstanceSerializedFields(holder, _, fieldVar): body
|
||||||
@ -184,7 +183,7 @@ proc writeVarSizeType(w: var SszWriter, value: auto) {.raises: [Defect, IOError]
|
|||||||
mixin toSszType
|
mixin toSszType
|
||||||
type T = type toSszType(value)
|
type T = type toSszType(value)
|
||||||
|
|
||||||
when T is seq|string|openarray:
|
when T is seq|openarray:
|
||||||
type E = ElemType(T)
|
type E = ElemType(T)
|
||||||
const isFixed = isFixedSize(E)
|
const isFixed = isFixedSize(E)
|
||||||
when isFixed:
|
when isFixed:
|
||||||
@ -217,7 +216,7 @@ proc writeValue*(w: var SszWriter, x: auto) {.gcsafe, raises: [Defect, IOError].
|
|||||||
|
|
||||||
when isFixedSize(T):
|
when isFixedSize(T):
|
||||||
w.stream.writeFixedSized toSszType(x)
|
w.stream.writeFixedSized toSszType(x)
|
||||||
elif T is array|seq|openarray|string|object|tuple:
|
elif T is array|seq|openarray|object|tuple:
|
||||||
w.writeVarSizeType toSszType(x)
|
w.writeVarSizeType toSszType(x)
|
||||||
else:
|
else:
|
||||||
unsupported type(x)
|
unsupported type(x)
|
||||||
@ -229,7 +228,7 @@ func sszSize*(value: auto): int =
|
|||||||
when isFixedSize(T):
|
when isFixedSize(T):
|
||||||
anonConst fixedPortionSize(T)
|
anonConst fixedPortionSize(T)
|
||||||
|
|
||||||
elif T is seq|string|array|openarray:
|
elif T is seq|array|openarray:
|
||||||
type E = ElemType(T)
|
type E = ElemType(T)
|
||||||
when isFixedSize(E):
|
when isFixedSize(E):
|
||||||
len(value) * anonConst(fixedPortionSize(E))
|
len(value) * anonConst(fixedPortionSize(E))
|
||||||
@ -337,7 +336,7 @@ func addChunk(merkleizer: var SszChunksMerkleizer, data: openarray[byte]) =
|
|||||||
else:
|
else:
|
||||||
var hash = mergeBranches(merkleizer.combinedChunks[0], data)
|
var hash = mergeBranches(merkleizer.combinedChunks[0], data)
|
||||||
|
|
||||||
for i in 1 .. high(merkleizer.combinedChunks):
|
for i in 1 .. merkleizer.topIndex:
|
||||||
trs "ITERATING"
|
trs "ITERATING"
|
||||||
if getBitLE(merkleizer.totalChunks, i):
|
if getBitLE(merkleizer.totalChunks, i):
|
||||||
trs "CALLING MERGE BRANCHES"
|
trs "CALLING MERGE BRANCHES"
|
||||||
@ -349,23 +348,25 @@ func addChunk(merkleizer: var SszChunksMerkleizer, data: openarray[byte]) =
|
|||||||
|
|
||||||
inc merkleizer.totalChunks
|
inc merkleizer.totalChunks
|
||||||
|
|
||||||
template createMerkleizer(totalElements: int64): SszChunksMerkleizer =
|
template createMerkleizer(totalElements: static Limit): SszChunksMerkleizer =
|
||||||
trs "CREATING A MERKLEIZER FOR ", totalElements
|
trs "CREATING A MERKLEIZER FOR ", totalElements
|
||||||
let merkleizerHeight = bitWidth nextPow2(uint64 totalElements)
|
|
||||||
|
const treeHeight = bitWidth nextPow2(uint64 totalElements)
|
||||||
|
var combinedChunks {.noInit.}: array[treeHeight, Eth2Digest]
|
||||||
|
|
||||||
SszChunksMerkleizer(
|
SszChunksMerkleizer(
|
||||||
combinedChunks: allocStackArrayNoInit(Eth2Digest, merkleizerHeight),
|
combinedChunks: cast[ptr UncheckedArray[Eth2Digest]](addr combinedChunks),
|
||||||
|
topIndex: treeHeight - 1,
|
||||||
totalChunks: 0)
|
totalChunks: 0)
|
||||||
|
|
||||||
func getFinalHash(merkleizer: var SszChunksMerkleizer): Eth2Digest =
|
func getFinalHash(merkleizer: var SszChunksMerkleizer): Eth2Digest =
|
||||||
if merkleizer.totalChunks == 0:
|
if merkleizer.totalChunks == 0:
|
||||||
let treeHeight = merkleizer.combinedChunks.high
|
return getZeroHashWithoutSideEffect(merkleizer.topIndex)
|
||||||
return getZeroHashWithoutSideEffect(treeHeight)
|
|
||||||
|
|
||||||
let
|
let
|
||||||
bottomHashIdx = firstOne(merkleizer.totalChunks) - 1
|
bottomHashIdx = firstOne(merkleizer.totalChunks) - 1
|
||||||
submittedChunksHeight = bitWidth(merkleizer.totalChunks - 1)
|
submittedChunksHeight = bitWidth(merkleizer.totalChunks - 1)
|
||||||
topHashIdx = merkleizer.combinedChunks.high
|
topHashIdx = merkleizer.topIndex
|
||||||
|
|
||||||
trs "BOTTOM HASH ", bottomHashIdx
|
trs "BOTTOM HASH ", bottomHashIdx
|
||||||
trs "SUBMITTED HEIGHT ", submittedChunksHeight
|
trs "SUBMITTED HEIGHT ", submittedChunksHeight
|
||||||
@ -404,7 +405,7 @@ func mixInLength(root: Eth2Digest, length: int): Eth2Digest =
|
|||||||
|
|
||||||
func hash_tree_root*(x: auto): Eth2Digest {.gcsafe, raises: [Defect].}
|
func hash_tree_root*(x: auto): Eth2Digest {.gcsafe, raises: [Defect].}
|
||||||
|
|
||||||
template merkleizeFields(totalElements: int, body: untyped): Eth2Digest =
|
template merkleizeFields(totalElements: static Limit, body: untyped): Eth2Digest =
|
||||||
var merkleizer {.inject.} = createMerkleizer(totalElements)
|
var merkleizer {.inject.} = createMerkleizer(totalElements)
|
||||||
|
|
||||||
template addField(field) =
|
template addField(field) =
|
||||||
@ -474,7 +475,7 @@ func chunkedHashTreeRootForBasicTypes[T](merkleizer: var SszChunksMerkleizer,
|
|||||||
func bitlistHashTreeRoot(merkleizer: var SszChunksMerkleizer, x: BitSeq): Eth2Digest =
|
func bitlistHashTreeRoot(merkleizer: var SszChunksMerkleizer, x: BitSeq): Eth2Digest =
|
||||||
# TODO: Switch to a simpler BitList representation and
|
# TODO: Switch to a simpler BitList representation and
|
||||||
# replace this with `chunkedHashTreeRoot`
|
# replace this with `chunkedHashTreeRoot`
|
||||||
trs "CHUNKIFYING BIT SEQ WITH LIMIT ", merkleizer.combinedChunks.len
|
trs "CHUNKIFYING BIT SEQ WITH TOP INDEX ", merkleizer.topIndex
|
||||||
|
|
||||||
var
|
var
|
||||||
totalBytes = ByteList(x).len
|
totalBytes = ByteList(x).len
|
||||||
@ -484,8 +485,7 @@ func bitlistHashTreeRoot(merkleizer: var SszChunksMerkleizer, x: BitSeq): Eth2Di
|
|||||||
if totalBytes == 1:
|
if totalBytes == 1:
|
||||||
# This is an empty bit list.
|
# This is an empty bit list.
|
||||||
# It should be hashed as a tree containing all zeros:
|
# It should be hashed as a tree containing all zeros:
|
||||||
let treeHeight = merkleizer.combinedChunks.high
|
return mergeBranches(getZeroHashWithoutSideEffect(merkleizer.topIndex),
|
||||||
return mergeBranches(getZeroHashWithoutSideEffect(treeHeight),
|
|
||||||
getZeroHashWithoutSideEffect(0)) # this is the mixed length
|
getZeroHashWithoutSideEffect(0)) # this is the mixed length
|
||||||
|
|
||||||
totalBytes -= 1
|
totalBytes -= 1
|
||||||
@ -596,8 +596,7 @@ func hash_tree_root*(x: auto): Eth2Digest {.raises: [Defect], nbench.} =
|
|||||||
|
|
||||||
trs "HASH TREE ROOT FOR ", name(type x), " = ", "0x", $result
|
trs "HASH TREE ROOT FOR ", name(type x), " = ", "0x", $result
|
||||||
|
|
||||||
iterator hash_tree_roots_prefix*[T](lst: openarray[T], limit: auto):
|
iterator hash_tree_roots_prefix*[T](lst: openarray[T], limit: static Limit): Eth2Digest =
|
||||||
Eth2Digest =
|
|
||||||
# This is a particular type's instantiation of a general fold, reduce,
|
# This is a particular type's instantiation of a general fold, reduce,
|
||||||
# accumulation, prefix sums, etc family of operations. As long as that
|
# accumulation, prefix sums, etc family of operations. As long as that
|
||||||
# Eth1 deposit case is the only notable example -- the usual uses of a
|
# Eth1 deposit case is the only notable example -- the usual uses of a
|
||||||
|
@ -67,7 +67,7 @@ template fromSszBytes*(T: type enum, bytes: openarray[byte]): auto =
|
|||||||
template fromSszBytes*(T: type BitSeq, bytes: openarray[byte]): auto =
|
template fromSszBytes*(T: type BitSeq, bytes: openarray[byte]): auto =
|
||||||
BitSeq @bytes
|
BitSeq @bytes
|
||||||
|
|
||||||
func fromSszBytes*[N](T: type BitList[N], bytes: openarray[byte]): auto {.raisesssz.} =
|
func fromSszBytes*(T: type BitList, bytes: openarray[byte]): auto {.raisesssz.} =
|
||||||
if bytes.len == 0:
|
if bytes.len == 0:
|
||||||
# https://github.com/ethereum/eth2.0-specs/blob/v0.11.1/ssz/simple-serialize.md#bitlistn
|
# https://github.com/ethereum/eth2.0-specs/blob/v0.11.1/ssz/simple-serialize.md#bitlistn
|
||||||
# "An additional 1 bit is added to the end, at index e where e is the
|
# "An additional 1 bit is added to the end, at index e where e is the
|
||||||
@ -76,7 +76,7 @@ func fromSszBytes*[N](T: type BitList[N], bytes: openarray[byte]): auto {.raises
|
|||||||
# It's not possible to have a literally 0-byte (raw) Bitlist.
|
# It's not possible to have a literally 0-byte (raw) Bitlist.
|
||||||
# https://github.com/status-im/nim-beacon-chain/issues/931
|
# https://github.com/status-im/nim-beacon-chain/issues/931
|
||||||
raise newException(MalformedSszError, "SSZ input Bitlist too small")
|
raise newException(MalformedSszError, "SSZ input Bitlist too small")
|
||||||
BitList[N] @bytes
|
T @bytes
|
||||||
|
|
||||||
func fromSszBytes*[N](T: type BitArray[N], bytes: openarray[byte]): T {.raisesssz.} =
|
func fromSszBytes*[N](T: type BitArray[N], bytes: openarray[byte]): T {.raisesssz.} =
|
||||||
# A bit vector doesn't have a marker bit, but we'll use the helper from
|
# A bit vector doesn't have a marker bit, but we'll use the helper from
|
||||||
@ -101,7 +101,7 @@ func readSszValue*(input: openarray[byte], T: type): T {.raisesssz.} =
|
|||||||
type ElemType = type result[0]
|
type ElemType = type result[0]
|
||||||
result = T readSszValue(input, seq[ElemType])
|
result = T readSszValue(input, seq[ElemType])
|
||||||
|
|
||||||
elif result is string|seq|openarray|array:
|
elif result is seq|openarray|array:
|
||||||
type ElemType = type result[0]
|
type ElemType = type result[0]
|
||||||
when ElemType is byte|char:
|
when ElemType is byte|char:
|
||||||
result.setOutputSize input.len
|
result.setOutputSize input.len
|
||||||
|
@ -9,7 +9,17 @@ const
|
|||||||
offsetSize* = 4
|
offsetSize* = 4
|
||||||
|
|
||||||
type
|
type
|
||||||
BasicType* = char|bool|SomeUnsignedInt
|
UintN* = SomeUnsignedInt # TODO: Add StUint here
|
||||||
|
BasicType* = bool|UintN
|
||||||
|
|
||||||
|
Limit* = int64
|
||||||
|
|
||||||
|
List*[T; maxLen: static Limit] = distinct seq[T]
|
||||||
|
BitList*[maxLen: static Limit] = distinct BitSeq
|
||||||
|
|
||||||
|
# Note for readers:
|
||||||
|
# We use `array` for `Vector` and
|
||||||
|
# `BitArray` for `BitVector`
|
||||||
|
|
||||||
SszError* = object of SerializationError
|
SszError* = object of SerializationError
|
||||||
|
|
||||||
@ -62,9 +72,6 @@ type
|
|||||||
of Field:
|
of Field:
|
||||||
discard
|
discard
|
||||||
|
|
||||||
List*[T; maxLen: static int64] = distinct seq[T]
|
|
||||||
BitList*[maxLen: static int] = distinct BitSeq
|
|
||||||
|
|
||||||
template add*(x: List, val: x.T) = add(distinctBase x, val)
|
template add*(x: List, val: x.T) = add(distinctBase x, val)
|
||||||
template len*(x: List): auto = len(distinctBase x)
|
template len*(x: List): auto = len(distinctBase x)
|
||||||
template low*(x: List): auto = low(distinctBase x)
|
template low*(x: List): auto = low(distinctBase x)
|
||||||
@ -111,7 +118,7 @@ template ElemType*(T: type[array]): untyped =
|
|||||||
template ElemType*[T](A: type[openarray[T]]): untyped =
|
template ElemType*[T](A: type[openarray[T]]): untyped =
|
||||||
T
|
T
|
||||||
|
|
||||||
template ElemType*(T: type[seq|string|List]): untyped =
|
template ElemType*(T: type[seq|List]): untyped =
|
||||||
type(default(T)[0])
|
type(default(T)[0])
|
||||||
|
|
||||||
func isFixedSize*(T0: type): bool {.compileTime.} =
|
func isFixedSize*(T0: type): bool {.compileTime.} =
|
||||||
@ -141,7 +148,7 @@ func fixedPortionSize*(T0: type): int {.compileTime.} =
|
|||||||
type E = ElemType(T)
|
type E = ElemType(T)
|
||||||
when isFixedSize(E): len(T) * fixedPortionSize(E)
|
when isFixedSize(E): len(T) * fixedPortionSize(E)
|
||||||
else: len(T) * offsetSize
|
else: len(T) * offsetSize
|
||||||
elif T is seq|string|openarray: offsetSize
|
elif T is seq|openarray: offsetSize
|
||||||
elif T is object|tuple:
|
elif T is object|tuple:
|
||||||
enumAllSerializedFields(T):
|
enumAllSerializedFields(T):
|
||||||
when isFixedSize(FieldType):
|
when isFixedSize(FieldType):
|
||||||
@ -165,7 +172,7 @@ func sszSchemaType*(T0: type): SszType {.compileTime.} =
|
|||||||
SszType(kind: sszUInt, bits: 32)
|
SszType(kind: sszUInt, bits: 32)
|
||||||
elif T is uint64:
|
elif T is uint64:
|
||||||
SszType(kind: sszUInt, bits: 64)
|
SszType(kind: sszUInt, bits: 64)
|
||||||
elif T is seq|string:
|
elif T is seq:
|
||||||
SszType(kind: sszList, listElemType: sszSchemaType(ElemType(T)))
|
SszType(kind: sszList, listElemType: sszSchemaType(ElemType(T)))
|
||||||
elif T is array:
|
elif T is array:
|
||||||
SszType(kind: sszVector, vectorElemType: sszSchemaType(ElemType(T)))
|
SszType(kind: sszVector, vectorElemType: sszSchemaType(ElemType(T)))
|
||||||
|
@ -31,17 +31,15 @@ static:
|
|||||||
|
|
||||||
doAssert fixedPortionSize(array[10, bool]) == 10
|
doAssert fixedPortionSize(array[10, bool]) == 10
|
||||||
doAssert fixedPortionSize(array[SomeEnum, uint64]) == 24
|
doAssert fixedPortionSize(array[SomeEnum, uint64]) == 24
|
||||||
doAssert fixedPortionSize(array[3..5, string]) == 12
|
doAssert fixedPortionSize(array[3..5, List[byte, 256]]) == 12
|
||||||
|
|
||||||
doAssert fixedPortionSize(string) == 4
|
doAssert fixedPortionSize(List[byte, 256]) == 4
|
||||||
doAssert fixedPortionSize(seq[bool]) == 4
|
doAssert fixedPortionSize(List[bool, 128]) == 4
|
||||||
doAssert fixedPortionSize(seq[string]) == 4
|
doAssert fixedPortionSize(List[List[byte, 128], 256]) == 4
|
||||||
|
|
||||||
doAssert isFixedSize(array[20, bool]) == true
|
doAssert isFixedSize(array[20, bool]) == true
|
||||||
doAssert isFixedSize(Simple) == true
|
doAssert isFixedSize(Simple) == true
|
||||||
doAssert isFixedSize(string) == false
|
doAssert isFixedSize(List[bool, 128]) == false
|
||||||
doAssert isFixedSize(seq[bool]) == false
|
|
||||||
doAssert isFixedSize(seq[string]) == false
|
|
||||||
|
|
||||||
reject fixedPortionSize(int)
|
reject fixedPortionSize(int)
|
||||||
|
|
||||||
@ -64,8 +62,10 @@ type
|
|||||||
Foo = object
|
Foo = object
|
||||||
bar: Bar
|
bar: Bar
|
||||||
|
|
||||||
|
BarList = List[uint64, 128]
|
||||||
|
|
||||||
Bar = object
|
Bar = object
|
||||||
b: string
|
b: BarList
|
||||||
baz: Baz
|
baz: Baz
|
||||||
|
|
||||||
Baz = object
|
Baz = object
|
||||||
@ -76,13 +76,13 @@ proc toDigest[N: static int](x: array[N, byte]): Eth2Digest =
|
|||||||
|
|
||||||
suiteReport "SSZ navigator":
|
suiteReport "SSZ navigator":
|
||||||
timedTest "simple object fields":
|
timedTest "simple object fields":
|
||||||
var foo = Foo(bar: Bar(b: "bar", baz: Baz(i: 10'u64)))
|
var foo = Foo(bar: Bar(b: BarList @[1'u64, 2, 3], baz: Baz(i: 10'u64)))
|
||||||
let encoded = SSZ.encode(foo)
|
let encoded = SSZ.encode(foo)
|
||||||
|
|
||||||
check SSZ.decode(encoded, Foo) == foo
|
check SSZ.decode(encoded, Foo) == foo
|
||||||
|
|
||||||
let mountedFoo = sszMount(encoded, Foo)
|
let mountedFoo = sszMount(encoded, Foo)
|
||||||
check mountedFoo.bar.b == "bar"
|
check mountedFoo.bar.b[] == BarList @[1'u64, 2, 3]
|
||||||
|
|
||||||
let mountedBar = mountedFoo.bar
|
let mountedBar = mountedFoo.bar
|
||||||
check mountedBar.baz.i == 10'u64
|
check mountedBar.baz.i == 10'u64
|
||||||
@ -102,16 +102,16 @@ suiteReport "SSZ navigator":
|
|||||||
|
|
||||||
suiteReport "SSZ dynamic navigator":
|
suiteReport "SSZ dynamic navigator":
|
||||||
timedTest "navigating fields":
|
timedTest "navigating fields":
|
||||||
var fooOrig = Foo(bar: Bar(b: "bar", baz: Baz(i: 10'u64)))
|
var fooOrig = Foo(bar: Bar(b: BarList @[1'u64, 2, 3], baz: Baz(i: 10'u64)))
|
||||||
let fooEncoded = SSZ.encode(fooOrig)
|
let fooEncoded = SSZ.encode(fooOrig)
|
||||||
|
|
||||||
var navFoo = DynamicSszNavigator.init(fooEncoded, Foo)
|
var navFoo = DynamicSszNavigator.init(fooEncoded, Foo)
|
||||||
|
|
||||||
var navBar = navFoo.navigate("bar")
|
var navBar = navFoo.navigate("bar")
|
||||||
check navBar.toJson(pretty = false) == """{"b":"bar","baz":{"i":10}}"""
|
check navBar.toJson(pretty = false) == """{"b":[1,2,3],"baz":{"i":10}}"""
|
||||||
|
|
||||||
var navB = navBar.navigate("b")
|
var navB = navBar.navigate("b")
|
||||||
check navB.toJson == "\"bar\""
|
check navB.toJson(pretty = false) == "[1,2,3]"
|
||||||
|
|
||||||
var navBaz = navBar.navigate("baz")
|
var navBaz = navBar.navigate("baz")
|
||||||
var navI = navBaz.navigate("i")
|
var navI = navBaz.navigate("i")
|
||||||
|
Loading…
x
Reference in New Issue
Block a user