Don't use StackArray in ssz; Drop the support for strings
This commit is contained in:
parent
74e35c464f
commit
accd5fe954
|
@ -4,7 +4,7 @@ import
|
|||
options as stdOptions, net as stdNet,
|
||||
|
||||
# Status libs
|
||||
stew/[varints, base58, bitseqs, endians2, results],
|
||||
stew/[varints, base58, bitseqs, endians2, results, byteutils],
|
||||
stew/shims/[macros, tables],
|
||||
faststreams/[inputs, outputs, buffers], snappy, snappy/framing,
|
||||
json_serialization, json_serialization/std/[net, options],
|
||||
|
@ -39,6 +39,7 @@ type
|
|||
PrivateKey* = crypto.PrivateKey
|
||||
|
||||
Bytes = seq[byte]
|
||||
ErrorMsg = List[byte, 256]
|
||||
|
||||
# TODO: This is here only to eradicate a compiler
|
||||
# warning about unused import (rpc/messages).
|
||||
|
@ -152,7 +153,7 @@ type
|
|||
case kind*: Eth2NetworkingErrorKind
|
||||
of ReceivedErrorResponse:
|
||||
responseCode: ResponseCode
|
||||
errorMsg: string
|
||||
errorMsg: ErrorMsg
|
||||
else:
|
||||
discard
|
||||
|
||||
|
@ -324,13 +325,16 @@ proc writeChunk*(conn: Connection,
|
|||
|
||||
await conn.write(output.getOutput)
|
||||
|
||||
template errorMsgLit(x: static string): ErrorMsg =
|
||||
const val = ErrorMsg toBytes(x)
|
||||
val
|
||||
|
||||
proc sendErrorResponse(peer: Peer,
|
||||
conn: Connection,
|
||||
noSnappy: bool,
|
||||
responseCode: ResponseCode,
|
||||
errMsg: string) {.async.} =
|
||||
errMsg: ErrorMsg) {.async.} =
|
||||
debug "Error processing request", peer, responseCode, errMsg
|
||||
|
||||
await conn.writeChunk(some responseCode, SSZ.encode(errMsg), noSnappy)
|
||||
|
||||
proc sendNotificationMsg(peer: Peer, protocolId: string, requestBytes: Bytes) {.async} =
|
||||
|
@ -493,15 +497,18 @@ proc handleIncomingStream(network: Eth2Node,
|
|||
try:
|
||||
let peer = peerFromStream(network, conn)
|
||||
|
||||
template returnInvalidRequest(msg: string) =
|
||||
template returnInvalidRequest(msg: ErrorMsg) =
|
||||
await sendErrorResponse(peer, conn, noSnappy, InvalidRequest, msg)
|
||||
return
|
||||
|
||||
template returnInvalidRequest(msg: string) =
|
||||
returnInvalidRequest(ErrorMsg msg.toBytes)
|
||||
|
||||
let s = when useNativeSnappy:
|
||||
let fs = libp2pInput(conn)
|
||||
|
||||
if fs.timeoutToNextByte(TTFB_TIMEOUT):
|
||||
returnInvalidRequest "Request first byte not sent in time"
|
||||
returnInvalidRequest(errorMsgLit "Request first byte not sent in time")
|
||||
|
||||
fs
|
||||
else:
|
||||
|
@ -513,7 +520,7 @@ proc handleIncomingStream(network: Eth2Node,
|
|||
let msg = if sizeof(MsgRec) > 0:
|
||||
try:
|
||||
awaitWithTimeout(readChunkPayload(s, noSnappy, MsgRec), deadline):
|
||||
returnInvalidRequest "Request full data not sent in time"
|
||||
returnInvalidRequest(errorMsgLit "Request full data not sent in time")
|
||||
|
||||
except SerializationError as err:
|
||||
returnInvalidRequest err.formatMsg("msg")
|
||||
|
@ -526,26 +533,26 @@ proc handleIncomingStream(network: Eth2Node,
|
|||
if msg.isErr:
|
||||
let (responseCode, errMsg) = case msg.error.kind
|
||||
of UnexpectedEOF, PotentiallyExpectedEOF:
|
||||
(InvalidRequest, "Incomplete request")
|
||||
(InvalidRequest, errorMsgLit "Incomplete request")
|
||||
|
||||
of InvalidSnappyBytes:
|
||||
(InvalidRequest, "Failed to decompress snappy payload")
|
||||
(InvalidRequest, errorMsgLit "Failed to decompress snappy payload")
|
||||
|
||||
of InvalidSszBytes:
|
||||
(InvalidRequest, "Failed to decode SSZ payload")
|
||||
(InvalidRequest, errorMsgLit "Failed to decode SSZ payload")
|
||||
|
||||
of ZeroSizePrefix:
|
||||
(InvalidRequest, "The request chunk cannot have a size of zero")
|
||||
(InvalidRequest, errorMsgLit "The request chunk cannot have a size of zero")
|
||||
|
||||
of SizePrefixOverflow:
|
||||
(InvalidRequest, "The chunk size exceed the maximum allowed")
|
||||
(InvalidRequest, errorMsgLit "The chunk size exceed the maximum allowed")
|
||||
|
||||
of InvalidResponseCode, ReceivedErrorResponse,
|
||||
StreamOpenTimeout, ReadResponseTimeout:
|
||||
# These shouldn't be possible in a request, because
|
||||
# there are no response codes being read, no stream
|
||||
# openings and no reading of responses:
|
||||
(ServerError, "Internal server error")
|
||||
(ServerError, errorMsgLit "Internal server error")
|
||||
|
||||
of BrokenConnection:
|
||||
return
|
||||
|
@ -557,7 +564,8 @@ proc handleIncomingStream(network: Eth2Node,
|
|||
logReceivedMsg(peer, MsgType(msg.get))
|
||||
await callUserHandler(peer, conn, noSnappy, msg.get)
|
||||
except CatchableError as err:
|
||||
await sendErrorResponse(peer, conn, noSnappy, ServerError, err.msg)
|
||||
await sendErrorResponse(peer, conn, noSnappy, ServerError,
|
||||
ErrorMsg err.msg.toBytes)
|
||||
|
||||
except CatchableError as err:
|
||||
debug "Error processing an incoming request", err = err.msg, msgName
|
||||
|
@ -825,7 +833,7 @@ proc p2pProtocolBackendImpl*(p: P2PProtocol): Backend =
|
|||
|
||||
proc `protocolMounterName`(`networkVar`: `Eth2Node`) =
|
||||
proc sszThunk(`streamVar`: `Connection`,
|
||||
proto: string): Future[void] {.gcsafe.} =
|
||||
proto: string): Future[void] {.gcsafe.} =
|
||||
return handleIncomingStream(`networkVar`, `streamVar`, true,
|
||||
`MsgStrongRecName`)
|
||||
|
||||
|
@ -834,7 +842,7 @@ proc p2pProtocolBackendImpl*(p: P2PProtocol): Backend =
|
|||
handler: sszThunk)
|
||||
|
||||
proc snappyThunk(`streamVar`: `Connection`,
|
||||
proto: string): Future[void] {.gcsafe.} =
|
||||
proto: string): Future[void] {.gcsafe.} =
|
||||
return handleIncomingStream(`networkVar`, `streamVar`, false,
|
||||
`MsgStrongRecName`)
|
||||
|
||||
|
|
|
@ -138,7 +138,7 @@ proc readResponseChunk(conn: Connection,
|
|||
let responseCode = ResponseCode responseCodeByte
|
||||
case responseCode:
|
||||
of InvalidRequest, ServerError:
|
||||
let errorMsgChunk = await readChunkPayload(conn, noSnappy, string)
|
||||
let errorMsgChunk = await readChunkPayload(conn, noSnappy, ErrorMsg)
|
||||
let errorMsg = if errorMsgChunk.isOk: errorMsgChunk.value
|
||||
else: return err(errorMsgChunk.error)
|
||||
return err Eth2NetworkingError(kind: ReceivedErrorResponse,
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
import
|
||||
options, algorithm, options, strformat, typetraits,
|
||||
stew/[bitops2, bitseqs, endians2, objects, varints, ptrops],
|
||||
stew/ranges/[ptr_arith, stackarrays], stew/shims/macros,
|
||||
stew/ranges/ptr_arith, stew/shims/macros,
|
||||
faststreams/[inputs, outputs, buffers],
|
||||
serialization, serialization/testing/tracing,
|
||||
./spec/[crypto, datatypes, digest],
|
||||
|
@ -44,11 +44,10 @@ type
|
|||
SszWriter* = object
|
||||
stream: OutputStream
|
||||
|
||||
BasicType = byte|char|bool|SomeUnsignedInt
|
||||
|
||||
SszChunksMerkleizer = object
|
||||
combinedChunks: StackArray[Eth2Digest]
|
||||
combinedChunks: ptr UncheckedArray[Eth2Digest]
|
||||
totalChunks: uint64
|
||||
topIndex: int
|
||||
|
||||
SizePrefixed*[T] = distinct T
|
||||
SszMaxSizeExceeded* = object of SerializationError
|
||||
|
@ -110,7 +109,7 @@ proc writeFixedSized(s: var (OutputStream|WriteCursor), x: auto) {.raises: [Defe
|
|||
s.write toBytesLE(x)
|
||||
else:
|
||||
s.writeMemCopy x
|
||||
elif x is array|string|seq|openarray:
|
||||
elif x is array|seq|openarray:
|
||||
when x[0] is byte:
|
||||
trs "APPENDING FIXED SIZE BYTES", x
|
||||
s.write x
|
||||
|
@ -136,7 +135,7 @@ func init*(T: type SszWriter, stream: OutputStream): T {.raises: [Defect].} =
|
|||
result.stream = stream
|
||||
|
||||
template enumerateSubFields(holder, fieldVar, body: untyped) =
|
||||
when holder is array|string|seq|openarray:
|
||||
when holder is array|seq|openarray:
|
||||
for fieldVar in holder: body
|
||||
else:
|
||||
enumInstanceSerializedFields(holder, _, fieldVar): body
|
||||
|
@ -184,7 +183,7 @@ proc writeVarSizeType(w: var SszWriter, value: auto) {.raises: [Defect, IOError]
|
|||
mixin toSszType
|
||||
type T = type toSszType(value)
|
||||
|
||||
when T is seq|string|openarray:
|
||||
when T is seq|openarray:
|
||||
type E = ElemType(T)
|
||||
const isFixed = isFixedSize(E)
|
||||
when isFixed:
|
||||
|
@ -217,7 +216,7 @@ proc writeValue*(w: var SszWriter, x: auto) {.gcsafe, raises: [Defect, IOError].
|
|||
|
||||
when isFixedSize(T):
|
||||
w.stream.writeFixedSized toSszType(x)
|
||||
elif T is array|seq|openarray|string|object|tuple:
|
||||
elif T is array|seq|openarray|object|tuple:
|
||||
w.writeVarSizeType toSszType(x)
|
||||
else:
|
||||
unsupported type(x)
|
||||
|
@ -229,7 +228,7 @@ func sszSize*(value: auto): int =
|
|||
when isFixedSize(T):
|
||||
anonConst fixedPortionSize(T)
|
||||
|
||||
elif T is seq|string|array|openarray:
|
||||
elif T is seq|array|openarray:
|
||||
type E = ElemType(T)
|
||||
when isFixedSize(E):
|
||||
len(value) * anonConst(fixedPortionSize(E))
|
||||
|
@ -337,7 +336,7 @@ func addChunk(merkleizer: var SszChunksMerkleizer, data: openarray[byte]) =
|
|||
else:
|
||||
var hash = mergeBranches(merkleizer.combinedChunks[0], data)
|
||||
|
||||
for i in 1 .. high(merkleizer.combinedChunks):
|
||||
for i in 1 .. merkleizer.topIndex:
|
||||
trs "ITERATING"
|
||||
if getBitLE(merkleizer.totalChunks, i):
|
||||
trs "CALLING MERGE BRANCHES"
|
||||
|
@ -349,23 +348,25 @@ func addChunk(merkleizer: var SszChunksMerkleizer, data: openarray[byte]) =
|
|||
|
||||
inc merkleizer.totalChunks
|
||||
|
||||
template createMerkleizer(totalElements: int64): SszChunksMerkleizer =
|
||||
template createMerkleizer(totalElements: static Limit): SszChunksMerkleizer =
|
||||
trs "CREATING A MERKLEIZER FOR ", totalElements
|
||||
let merkleizerHeight = bitWidth nextPow2(uint64 totalElements)
|
||||
|
||||
const treeHeight = bitWidth nextPow2(uint64 totalElements)
|
||||
var combinedChunks {.noInit.}: array[treeHeight, Eth2Digest]
|
||||
|
||||
SszChunksMerkleizer(
|
||||
combinedChunks: allocStackArrayNoInit(Eth2Digest, merkleizerHeight),
|
||||
combinedChunks: cast[ptr UncheckedArray[Eth2Digest]](addr combinedChunks),
|
||||
topIndex: treeHeight - 1,
|
||||
totalChunks: 0)
|
||||
|
||||
func getFinalHash(merkleizer: var SszChunksMerkleizer): Eth2Digest =
|
||||
if merkleizer.totalChunks == 0:
|
||||
let treeHeight = merkleizer.combinedChunks.high
|
||||
return getZeroHashWithoutSideEffect(treeHeight)
|
||||
return getZeroHashWithoutSideEffect(merkleizer.topIndex)
|
||||
|
||||
let
|
||||
bottomHashIdx = firstOne(merkleizer.totalChunks) - 1
|
||||
submittedChunksHeight = bitWidth(merkleizer.totalChunks - 1)
|
||||
topHashIdx = merkleizer.combinedChunks.high
|
||||
topHashIdx = merkleizer.topIndex
|
||||
|
||||
trs "BOTTOM HASH ", bottomHashIdx
|
||||
trs "SUBMITTED HEIGHT ", submittedChunksHeight
|
||||
|
@ -404,7 +405,7 @@ func mixInLength(root: Eth2Digest, length: int): Eth2Digest =
|
|||
|
||||
func hash_tree_root*(x: auto): Eth2Digest {.gcsafe, raises: [Defect].}
|
||||
|
||||
template merkleizeFields(totalElements: int, body: untyped): Eth2Digest =
|
||||
template merkleizeFields(totalElements: static Limit, body: untyped): Eth2Digest =
|
||||
var merkleizer {.inject.} = createMerkleizer(totalElements)
|
||||
|
||||
template addField(field) =
|
||||
|
@ -474,7 +475,7 @@ func chunkedHashTreeRootForBasicTypes[T](merkleizer: var SszChunksMerkleizer,
|
|||
func bitlistHashTreeRoot(merkleizer: var SszChunksMerkleizer, x: BitSeq): Eth2Digest =
|
||||
# TODO: Switch to a simpler BitList representation and
|
||||
# replace this with `chunkedHashTreeRoot`
|
||||
trs "CHUNKIFYING BIT SEQ WITH LIMIT ", merkleizer.combinedChunks.len
|
||||
trs "CHUNKIFYING BIT SEQ WITH TOP INDEX ", merkleizer.topIndex
|
||||
|
||||
var
|
||||
totalBytes = ByteList(x).len
|
||||
|
@ -484,8 +485,7 @@ func bitlistHashTreeRoot(merkleizer: var SszChunksMerkleizer, x: BitSeq): Eth2Di
|
|||
if totalBytes == 1:
|
||||
# This is an empty bit list.
|
||||
# It should be hashed as a tree containing all zeros:
|
||||
let treeHeight = merkleizer.combinedChunks.high
|
||||
return mergeBranches(getZeroHashWithoutSideEffect(treeHeight),
|
||||
return mergeBranches(getZeroHashWithoutSideEffect(merkleizer.topIndex),
|
||||
getZeroHashWithoutSideEffect(0)) # this is the mixed length
|
||||
|
||||
totalBytes -= 1
|
||||
|
@ -596,8 +596,7 @@ func hash_tree_root*(x: auto): Eth2Digest {.raises: [Defect], nbench.} =
|
|||
|
||||
trs "HASH TREE ROOT FOR ", name(type x), " = ", "0x", $result
|
||||
|
||||
iterator hash_tree_roots_prefix*[T](lst: openarray[T], limit: auto):
|
||||
Eth2Digest =
|
||||
iterator hash_tree_roots_prefix*[T](lst: openarray[T], limit: static Limit): Eth2Digest =
|
||||
# This is a particular type's instantiation of a general fold, reduce,
|
||||
# accumulation, prefix sums, etc family of operations. As long as that
|
||||
# Eth1 deposit case is the only notable example -- the usual uses of a
|
||||
|
|
|
@ -67,7 +67,7 @@ template fromSszBytes*(T: type enum, bytes: openarray[byte]): auto =
|
|||
template fromSszBytes*(T: type BitSeq, bytes: openarray[byte]): auto =
|
||||
BitSeq @bytes
|
||||
|
||||
func fromSszBytes*[N](T: type BitList[N], bytes: openarray[byte]): auto {.raisesssz.} =
|
||||
func fromSszBytes*(T: type BitList, bytes: openarray[byte]): auto {.raisesssz.} =
|
||||
if bytes.len == 0:
|
||||
# https://github.com/ethereum/eth2.0-specs/blob/v0.11.1/ssz/simple-serialize.md#bitlistn
|
||||
# "An additional 1 bit is added to the end, at index e where e is the
|
||||
|
@ -76,7 +76,7 @@ func fromSszBytes*[N](T: type BitList[N], bytes: openarray[byte]): auto {.raises
|
|||
# It's not possible to have a literally 0-byte (raw) Bitlist.
|
||||
# https://github.com/status-im/nim-beacon-chain/issues/931
|
||||
raise newException(MalformedSszError, "SSZ input Bitlist too small")
|
||||
BitList[N] @bytes
|
||||
T @bytes
|
||||
|
||||
func fromSszBytes*[N](T: type BitArray[N], bytes: openarray[byte]): T {.raisesssz.} =
|
||||
# A bit vector doesn't have a marker bit, but we'll use the helper from
|
||||
|
@ -101,7 +101,7 @@ func readSszValue*(input: openarray[byte], T: type): T {.raisesssz.} =
|
|||
type ElemType = type result[0]
|
||||
result = T readSszValue(input, seq[ElemType])
|
||||
|
||||
elif result is string|seq|openarray|array:
|
||||
elif result is seq|openarray|array:
|
||||
type ElemType = type result[0]
|
||||
when ElemType is byte|char:
|
||||
result.setOutputSize input.len
|
||||
|
|
|
@ -9,7 +9,17 @@ const
|
|||
offsetSize* = 4
|
||||
|
||||
type
|
||||
BasicType* = char|bool|SomeUnsignedInt
|
||||
UintN* = SomeUnsignedInt # TODO: Add StUint here
|
||||
BasicType* = bool|UintN
|
||||
|
||||
Limit* = int64
|
||||
|
||||
List*[T; maxLen: static Limit] = distinct seq[T]
|
||||
BitList*[maxLen: static Limit] = distinct BitSeq
|
||||
|
||||
# Note for readers:
|
||||
# We use `array` for `Vector` and
|
||||
# `BitArray` for `BitVector`
|
||||
|
||||
SszError* = object of SerializationError
|
||||
|
||||
|
@ -62,9 +72,6 @@ type
|
|||
of Field:
|
||||
discard
|
||||
|
||||
List*[T; maxLen: static int64] = distinct seq[T]
|
||||
BitList*[maxLen: static int] = distinct BitSeq
|
||||
|
||||
template add*(x: List, val: x.T) = add(distinctBase x, val)
|
||||
template len*(x: List): auto = len(distinctBase x)
|
||||
template low*(x: List): auto = low(distinctBase x)
|
||||
|
@ -111,7 +118,7 @@ template ElemType*(T: type[array]): untyped =
|
|||
template ElemType*[T](A: type[openarray[T]]): untyped =
|
||||
T
|
||||
|
||||
template ElemType*(T: type[seq|string|List]): untyped =
|
||||
template ElemType*(T: type[seq|List]): untyped =
|
||||
type(default(T)[0])
|
||||
|
||||
func isFixedSize*(T0: type): bool {.compileTime.} =
|
||||
|
@ -141,7 +148,7 @@ func fixedPortionSize*(T0: type): int {.compileTime.} =
|
|||
type E = ElemType(T)
|
||||
when isFixedSize(E): len(T) * fixedPortionSize(E)
|
||||
else: len(T) * offsetSize
|
||||
elif T is seq|string|openarray: offsetSize
|
||||
elif T is seq|openarray: offsetSize
|
||||
elif T is object|tuple:
|
||||
enumAllSerializedFields(T):
|
||||
when isFixedSize(FieldType):
|
||||
|
@ -165,7 +172,7 @@ func sszSchemaType*(T0: type): SszType {.compileTime.} =
|
|||
SszType(kind: sszUInt, bits: 32)
|
||||
elif T is uint64:
|
||||
SszType(kind: sszUInt, bits: 64)
|
||||
elif T is seq|string:
|
||||
elif T is seq:
|
||||
SszType(kind: sszList, listElemType: sszSchemaType(ElemType(T)))
|
||||
elif T is array:
|
||||
SszType(kind: sszVector, vectorElemType: sszSchemaType(ElemType(T)))
|
||||
|
|
|
@ -31,17 +31,15 @@ static:
|
|||
|
||||
doAssert fixedPortionSize(array[10, bool]) == 10
|
||||
doAssert fixedPortionSize(array[SomeEnum, uint64]) == 24
|
||||
doAssert fixedPortionSize(array[3..5, string]) == 12
|
||||
doAssert fixedPortionSize(array[3..5, List[byte, 256]]) == 12
|
||||
|
||||
doAssert fixedPortionSize(string) == 4
|
||||
doAssert fixedPortionSize(seq[bool]) == 4
|
||||
doAssert fixedPortionSize(seq[string]) == 4
|
||||
doAssert fixedPortionSize(List[byte, 256]) == 4
|
||||
doAssert fixedPortionSize(List[bool, 128]) == 4
|
||||
doAssert fixedPortionSize(List[List[byte, 128], 256]) == 4
|
||||
|
||||
doAssert isFixedSize(array[20, bool]) == true
|
||||
doAssert isFixedSize(Simple) == true
|
||||
doAssert isFixedSize(string) == false
|
||||
doAssert isFixedSize(seq[bool]) == false
|
||||
doAssert isFixedSize(seq[string]) == false
|
||||
doAssert isFixedSize(List[bool, 128]) == false
|
||||
|
||||
reject fixedPortionSize(int)
|
||||
|
||||
|
@ -64,8 +62,10 @@ type
|
|||
Foo = object
|
||||
bar: Bar
|
||||
|
||||
BarList = List[uint64, 128]
|
||||
|
||||
Bar = object
|
||||
b: string
|
||||
b: BarList
|
||||
baz: Baz
|
||||
|
||||
Baz = object
|
||||
|
@ -76,13 +76,13 @@ proc toDigest[N: static int](x: array[N, byte]): Eth2Digest =
|
|||
|
||||
suiteReport "SSZ navigator":
|
||||
timedTest "simple object fields":
|
||||
var foo = Foo(bar: Bar(b: "bar", baz: Baz(i: 10'u64)))
|
||||
var foo = Foo(bar: Bar(b: BarList @[1'u64, 2, 3], baz: Baz(i: 10'u64)))
|
||||
let encoded = SSZ.encode(foo)
|
||||
|
||||
check SSZ.decode(encoded, Foo) == foo
|
||||
|
||||
let mountedFoo = sszMount(encoded, Foo)
|
||||
check mountedFoo.bar.b == "bar"
|
||||
check mountedFoo.bar.b[] == BarList @[1'u64, 2, 3]
|
||||
|
||||
let mountedBar = mountedFoo.bar
|
||||
check mountedBar.baz.i == 10'u64
|
||||
|
@ -102,16 +102,16 @@ suiteReport "SSZ navigator":
|
|||
|
||||
suiteReport "SSZ dynamic navigator":
|
||||
timedTest "navigating fields":
|
||||
var fooOrig = Foo(bar: Bar(b: "bar", baz: Baz(i: 10'u64)))
|
||||
var fooOrig = Foo(bar: Bar(b: BarList @[1'u64, 2, 3], baz: Baz(i: 10'u64)))
|
||||
let fooEncoded = SSZ.encode(fooOrig)
|
||||
|
||||
var navFoo = DynamicSszNavigator.init(fooEncoded, Foo)
|
||||
|
||||
var navBar = navFoo.navigate("bar")
|
||||
check navBar.toJson(pretty = false) == """{"b":"bar","baz":{"i":10}}"""
|
||||
check navBar.toJson(pretty = false) == """{"b":[1,2,3],"baz":{"i":10}}"""
|
||||
|
||||
var navB = navBar.navigate("b")
|
||||
check navB.toJson == "\"bar\""
|
||||
check navB.toJson(pretty = false) == "[1,2,3]"
|
||||
|
||||
var navBaz = navBar.navigate("baz")
|
||||
var navI = navBaz.navigate("i")
|
||||
|
|
Loading…
Reference in New Issue