mirror of
https://github.com/waku-org/nwaku.git
synced 2025-02-06 03:54:54 +00:00
extra changes to serdes, fixing test compilation errors
This commit is contained in:
parent
e3f3177b5a
commit
c897139c49
@ -283,7 +283,7 @@ procSuite "Waku Rest API - Store v3":
|
|||||||
|
|
||||||
var pages = newSeq[seq[WakuMessage]](2)
|
var pages = newSeq[seq[WakuMessage]](2)
|
||||||
|
|
||||||
var reqHash = none(WakuMessageHash)
|
var reqHash = none(string)
|
||||||
|
|
||||||
for i in 0 ..< 2:
|
for i in 0 ..< 2:
|
||||||
let response = await client.getStoreMessagesV3(
|
let response = await client.getStoreMessagesV3(
|
||||||
@ -295,7 +295,7 @@ procSuite "Waku Rest API - Store v3":
|
|||||||
"", # end time. Empty ignores the field.
|
"", # end time. Empty ignores the field.
|
||||||
"", # hashes
|
"", # hashes
|
||||||
if reqHash.isSome():
|
if reqHash.isSome():
|
||||||
reqHash.get().toRestStringWakuMessageHash()
|
reqHash.get()
|
||||||
else:
|
else:
|
||||||
"", # base64-encoded digest. Empty ignores the field.
|
"", # base64-encoded digest. Empty ignores the field.
|
||||||
"true", # ascending
|
"true", # ascending
|
||||||
@ -775,7 +775,7 @@ procSuite "Waku Rest API - Store v3":
|
|||||||
var pages = newSeq[seq[WakuMessage]](2)
|
var pages = newSeq[seq[WakuMessage]](2)
|
||||||
|
|
||||||
var reqPubsubTopic = DefaultPubsubTopic
|
var reqPubsubTopic = DefaultPubsubTopic
|
||||||
var reqHash = none(WakuMessageHash)
|
var reqHash = none(string)
|
||||||
|
|
||||||
for i in 0 ..< 2:
|
for i in 0 ..< 2:
|
||||||
let response = await client.getStoreMessagesV3(
|
let response = await client.getStoreMessagesV3(
|
||||||
@ -787,7 +787,7 @@ procSuite "Waku Rest API - Store v3":
|
|||||||
"", # end time. Empty ignores the field.
|
"", # end time. Empty ignores the field.
|
||||||
"", # hashes
|
"", # hashes
|
||||||
if reqHash.isSome():
|
if reqHash.isSome():
|
||||||
reqHash.get().toRestStringWakuMessageHash()
|
reqHash.get()
|
||||||
else:
|
else:
|
||||||
"", # base64-encoded digest. Empty ignores the field.
|
"", # base64-encoded digest. Empty ignores the field.
|
||||||
"true", # ascending
|
"true", # ascending
|
||||||
@ -823,7 +823,7 @@ procSuite "Waku Rest API - Store v3":
|
|||||||
"", # end time. Empty ignores the field.
|
"", # end time. Empty ignores the field.
|
||||||
"", # hashes
|
"", # hashes
|
||||||
if reqHash.isSome():
|
if reqHash.isSome():
|
||||||
reqHash.get().toRestStringWakuMessageHash()
|
reqHash.get()
|
||||||
else:
|
else:
|
||||||
"", # base64-encoded digest. Empty ignores the field.
|
"", # base64-encoded digest. Empty ignores the field.
|
||||||
)
|
)
|
||||||
@ -845,7 +845,7 @@ procSuite "Waku Rest API - Store v3":
|
|||||||
"", # end time. Empty ignores the field.
|
"", # end time. Empty ignores the field.
|
||||||
"", # hashes
|
"", # hashes
|
||||||
if reqHash.isSome():
|
if reqHash.isSome():
|
||||||
reqHash.get().toRestStringWakuMessageHash()
|
reqHash.get()
|
||||||
else:
|
else:
|
||||||
"", # base64-encoded digest. Empty ignores the field.
|
"", # base64-encoded digest. Empty ignores the field.
|
||||||
"true", # ascending
|
"true", # ascending
|
||||||
|
@ -15,12 +15,12 @@ logScope:
|
|||||||
topics = "waku node rest store_api"
|
topics = "waku node rest store_api"
|
||||||
|
|
||||||
proc decodeBytes*(
|
proc decodeBytes*(
|
||||||
t: typedesc[StoreQueryResponse],
|
t: typedesc[StoreQueryResponseHex],
|
||||||
data: openArray[byte],
|
data: openArray[byte],
|
||||||
contentType: Opt[ContentTypeData],
|
contentType: Opt[ContentTypeData],
|
||||||
): RestResult[StoreQueryResponse] =
|
): RestResult[StoreQueryResponseHex] =
|
||||||
if MediaType.init($contentType) == MIMETYPE_JSON:
|
if MediaType.init($contentType) == MIMETYPE_JSON:
|
||||||
let decoded = ?decodeFromJsonBytes(StoreQueryResponse, data)
|
let decoded = ?decodeFromJsonBytes(StoreQueryResponseHex, data)
|
||||||
return ok(decoded)
|
return ok(decoded)
|
||||||
|
|
||||||
if MediaType.init($contentType) == MIMETYPE_TEXT:
|
if MediaType.init($contentType) == MIMETYPE_TEXT:
|
||||||
@ -30,11 +30,11 @@ proc decodeBytes*(
|
|||||||
copyMem(addr res[0], unsafeAddr data[0], len(data))
|
copyMem(addr res[0], unsafeAddr data[0], len(data))
|
||||||
|
|
||||||
return ok(
|
return ok(
|
||||||
StoreQueryResponse(
|
StoreQueryResponseHex(
|
||||||
statusCode: uint32(ErrorCode.BAD_RESPONSE),
|
statusCode: uint32(ErrorCode.BAD_RESPONSE),
|
||||||
statusDesc: res,
|
statusDesc: res,
|
||||||
messages: newSeq[WakuMessageKeyValue](0),
|
messages: newSeq[WakuMessageKeyValueHex](0),
|
||||||
paginationCursor: none(WakuMessageHash),
|
paginationCursor: none(string),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -58,6 +58,6 @@ proc getStoreMessagesV3*(
|
|||||||
cursor: string = "", # base64-encoded hash
|
cursor: string = "", # base64-encoded hash
|
||||||
ascending: string = "",
|
ascending: string = "",
|
||||||
pageSize: string = "",
|
pageSize: string = "",
|
||||||
): RestResponse[StoreQueryResponse] {.
|
): RestResponse[StoreQueryResponseHex] {.
|
||||||
rest, endpoint: "/store/v3/messages", meth: HttpMethod.MethodGet
|
rest, endpoint: "/store/v3/messages", meth: HttpMethod.MethodGet
|
||||||
.}
|
.}
|
||||||
|
@ -171,7 +171,7 @@ proc readValue*(
|
|||||||
reader: var JsonReader, value: var WakuMessageKeyValueHex
|
reader: var JsonReader, value: var WakuMessageKeyValueHex
|
||||||
) {.gcsafe, raises: [SerializationError, IOError].} =
|
) {.gcsafe, raises: [SerializationError, IOError].} =
|
||||||
var
|
var
|
||||||
messageHash = none(WakuMessageHash)
|
messageHash = none(string)
|
||||||
message = none(WakuMessage)
|
message = none(WakuMessage)
|
||||||
pubsubTopic = none(PubsubTopic)
|
pubsubTopic = none(PubsubTopic)
|
||||||
|
|
||||||
@ -180,19 +180,19 @@ proc readValue*(
|
|||||||
of "messageHash":
|
of "messageHash":
|
||||||
if messageHash.isSome():
|
if messageHash.isSome():
|
||||||
reader.raiseUnexpectedField(
|
reader.raiseUnexpectedField(
|
||||||
"Multiple `messageHash` fields found", "WakuMessageKeyValue"
|
"Multiple `messageHash` fields found", "WakuMessageKeyValueHex"
|
||||||
)
|
)
|
||||||
messageHash = some(reader.readValue(string))
|
messageHash = some(reader.readValue(string))
|
||||||
of "message":
|
of "message":
|
||||||
if message.isSome():
|
if message.isSome():
|
||||||
reader.raiseUnexpectedField(
|
reader.raiseUnexpectedField(
|
||||||
"Multiple `message` fields found", "WakuMessageKeyValue"
|
"Multiple `message` fields found", "WakuMessageKeyValueHex"
|
||||||
)
|
)
|
||||||
message = some(reader.readValue(WakuMessage))
|
message = some(reader.readValue(WakuMessage))
|
||||||
of "pubsubTopic":
|
of "pubsubTopic":
|
||||||
if pubsubTopic.isSome():
|
if pubsubTopic.isSome():
|
||||||
reader.raiseUnexpectedField(
|
reader.raiseUnexpectedField(
|
||||||
"Multiple `pubsubTopic` fields found", "WakuMessageKeyValue"
|
"Multiple `pubsubTopic` fields found", "WakuMessageKeyValueHex"
|
||||||
)
|
)
|
||||||
pubsubTopic = some(reader.readValue(string))
|
pubsubTopic = some(reader.readValue(string))
|
||||||
else:
|
else:
|
||||||
@ -201,7 +201,7 @@ proc readValue*(
|
|||||||
if messageHash.isNone():
|
if messageHash.isNone():
|
||||||
reader.raiseUnexpectedValue("Field `messageHash` is missing")
|
reader.raiseUnexpectedValue("Field `messageHash` is missing")
|
||||||
|
|
||||||
value = WakuMessageKeyValue(
|
value = WakuMessageKeyValueHex(
|
||||||
messageHash: messageHash.get(), message: message, pubsubTopic: pubsubTopic
|
messageHash: messageHash.get(), message: message, pubsubTopic: pubsubTopic
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -237,31 +237,31 @@ proc readValue*(
|
|||||||
of "requestId":
|
of "requestId":
|
||||||
if requestId.isSome():
|
if requestId.isSome():
|
||||||
reader.raiseUnexpectedField(
|
reader.raiseUnexpectedField(
|
||||||
"Multiple `requestId` fields found", "StoreQueryResponse"
|
"Multiple `requestId` fields found", "StoreQueryResponseHex"
|
||||||
)
|
)
|
||||||
requestId = some(reader.readValue(string))
|
requestId = some(reader.readValue(string))
|
||||||
of "statusCode":
|
of "statusCode":
|
||||||
if code.isSome():
|
if code.isSome():
|
||||||
reader.raiseUnexpectedField(
|
reader.raiseUnexpectedField(
|
||||||
"Multiple `statusCode` fields found", "StoreQueryResponse"
|
"Multiple `statusCode` fields found", "StoreQueryResponseHex"
|
||||||
)
|
)
|
||||||
code = some(reader.readValue(uint32))
|
code = some(reader.readValue(uint32))
|
||||||
of "statusDesc":
|
of "statusDesc":
|
||||||
if desc.isSome():
|
if desc.isSome():
|
||||||
reader.raiseUnexpectedField(
|
reader.raiseUnexpectedField(
|
||||||
"Multiple `statusDesc` fields found", "StoreQueryResponse"
|
"Multiple `statusDesc` fields found", "StoreQueryResponseHex"
|
||||||
)
|
)
|
||||||
desc = some(reader.readValue(string))
|
desc = some(reader.readValue(string))
|
||||||
of "messages":
|
of "messages":
|
||||||
if messages.isSome():
|
if messages.isSome():
|
||||||
reader.raiseUnexpectedField(
|
reader.raiseUnexpectedField(
|
||||||
"Multiple `messages` fields found", "StoreQueryResponse"
|
"Multiple `messages` fields found", "StoreQueryResponseHex"
|
||||||
)
|
)
|
||||||
messages = some(reader.readValue(seq[WakuMessageKeyValueHex]))
|
messages = some(reader.readValue(seq[WakuMessageKeyValueHex]))
|
||||||
of "paginationCursor":
|
of "paginationCursor":
|
||||||
if cursor.isSome():
|
if cursor.isSome():
|
||||||
reader.raiseUnexpectedField(
|
reader.raiseUnexpectedField(
|
||||||
"Multiple `paginationCursor` fields found", "StoreQueryResponse"
|
"Multiple `paginationCursor` fields found", "StoreQueryResponseHex"
|
||||||
)
|
)
|
||||||
cursor = some(reader.readValue(string))
|
cursor = some(reader.readValue(string))
|
||||||
else:
|
else:
|
||||||
@ -279,7 +279,7 @@ proc readValue*(
|
|||||||
if messages.isNone():
|
if messages.isNone():
|
||||||
reader.raiseUnexpectedValue("Field `messages` is missing")
|
reader.raiseUnexpectedValue("Field `messages` is missing")
|
||||||
|
|
||||||
value = StoreQueryResponse(
|
value = StoreQueryResponseHex(
|
||||||
requestId: requestId.get(),
|
requestId: requestId.get(),
|
||||||
statusCode: code.get(),
|
statusCode: code.get(),
|
||||||
statusDesc: desc.get(),
|
statusDesc: desc.get(),
|
||||||
|
Loading…
x
Reference in New Issue
Block a user