mirror of
https://github.com/waku-org/nwaku.git
synced 2025-01-14 17:04:53 +00:00
refactor(waku-store): reorganise pagination test cases
This commit is contained in:
parent
9e152cd975
commit
b0d4e25984
@ -5,9 +5,10 @@ import
|
|||||||
./v2/test_wakunode,
|
./v2/test_wakunode,
|
||||||
./v2/test_waku_store,
|
./v2/test_waku_store,
|
||||||
./v2/test_waku_filter,
|
./v2/test_waku_filter,
|
||||||
./v2/test_waku_pagination,
|
|
||||||
./v2/test_waku_payload,
|
./v2/test_waku_payload,
|
||||||
./v2/test_waku_swap,
|
./v2/test_waku_swap,
|
||||||
|
./v2/test_utils_pagination,
|
||||||
|
./v2/test_message_store_queue_pagination,
|
||||||
./v2/test_message_store,
|
./v2/test_message_store,
|
||||||
./v2/test_jsonrpc_waku,
|
./v2/test_jsonrpc_waku,
|
||||||
./v2/test_rest_serdes,
|
./v2/test_rest_serdes,
|
||||||
@ -27,7 +28,6 @@ import
|
|||||||
./v2/test_waku_discv5,
|
./v2/test_waku_discv5,
|
||||||
./v2/test_enr_utils,
|
./v2/test_enr_utils,
|
||||||
./v2/test_waku_store_queue,
|
./v2/test_waku_store_queue,
|
||||||
./v2/test_pagination_utils,
|
|
||||||
./v2/test_peer_exchange,
|
./v2/test_peer_exchange,
|
||||||
./v2/test_waku_noise
|
./v2/test_waku_noise
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/[options, sequtils],
|
std/[options, sequtils, times],
|
||||||
testutils/unittests,
|
testutils/unittests,
|
||||||
nimcrypto/sha2,
|
nimcrypto/sha2,
|
||||||
libp2p/protobuf/minprotobuf
|
libp2p/protobuf/minprotobuf
|
||||||
@ -13,48 +13,39 @@ import
|
|||||||
../../waku/v2/utils/pagination
|
../../waku/v2/utils/pagination
|
||||||
|
|
||||||
|
|
||||||
proc createSampleStoreQueue(s: int): StoreQueueRef =
|
const
|
||||||
## takes s as input and outputs a StoreQueue with s amount of IndexedWakuMessage
|
DEFAULT_PUBSUB_TOPIC = "/waku/2/default-waku/proto"
|
||||||
|
DEFAULT_CONTENT_TOPIC = ContentTopic("/waku/2/default-content/proto")
|
||||||
let testStoreQueue = StoreQueueRef.new(s)
|
|
||||||
|
|
||||||
|
proc getTestStoreQueue(numMessages: int): StoreQueueRef =
|
||||||
|
let testStoreQueue = StoreQueueRef.new(numMessages)
|
||||||
|
|
||||||
var data {.noinit.}: array[32, byte]
|
var data {.noinit.}: array[32, byte]
|
||||||
for x in data.mitems: x = 1
|
for x in data.mitems: x = 1
|
||||||
|
|
||||||
for i in 0..<s:
|
for i in 0..<numMessages:
|
||||||
discard testStoreQueue.add(IndexedWakuMessage(msg: WakuMessage(payload: @[byte i]),
|
let msg = IndexedWakuMessage(
|
||||||
index: Index(receiverTime: Timestamp(i),
|
msg: WakuMessage(payload: @[byte i]),
|
||||||
senderTime: Timestamp(i),
|
index: Index(
|
||||||
digest: MDigest[256](data: data)) ))
|
receiverTime: Timestamp(i),
|
||||||
|
senderTime: Timestamp(i),
|
||||||
|
digest: MDigest[256](data: data)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
discard testStoreQueue.add(msg)
|
||||||
|
|
||||||
return testStoreQueue
|
return testStoreQueue
|
||||||
|
|
||||||
procSuite "pagination":
|
proc getTestTimestamp(): Timestamp =
|
||||||
test "Index computation test":
|
let now = getNanosecondTime(epochTime())
|
||||||
let
|
Timestamp(now)
|
||||||
wm = WakuMessage(payload: @[byte 1, 2, 3], timestamp: 2)
|
|
||||||
index = wm.computeIndex()
|
|
||||||
check:
|
|
||||||
# the fields of the index should be non-empty
|
|
||||||
len(index.digest.data) != 0
|
|
||||||
len(index.digest.data) == 32 # sha2 output length in bytes
|
|
||||||
index.receiverTime != 0 # the receiver timestamp should be a non-zero value
|
|
||||||
index.senderTime == 2
|
|
||||||
index.pubsubTopic == DefaultTopic
|
|
||||||
|
|
||||||
let
|
|
||||||
wm1 = WakuMessage(payload: @[byte 1, 2, 3], contentTopic: ContentTopic("/waku/2/default-content/proto"))
|
|
||||||
index1 = wm1.computeIndex()
|
|
||||||
wm2 = WakuMessage(payload: @[byte 1, 2, 3], contentTopic: ContentTopic("/waku/2/default-content/proto"))
|
|
||||||
index2 = wm2.computeIndex()
|
|
||||||
|
|
||||||
check:
|
|
||||||
# the digests of two identical WakuMessages must be the same
|
|
||||||
index1.digest == index2.digest
|
|
||||||
|
|
||||||
|
suite "Queue store - pagination":
|
||||||
test "Forward pagination test":
|
test "Forward pagination test":
|
||||||
var
|
var
|
||||||
stQ = createSampleStoreQueue(10)
|
stQ = getTestStoreQueue(10)
|
||||||
indexList = toSeq(stQ.fwdIterator()).mapIt(it[0]) # Seq copy of the store queue indices for verification
|
indexList = toSeq(stQ.fwdIterator()).mapIt(it[0]) # Seq copy of the store queue indices for verification
|
||||||
msgList = toSeq(stQ.fwdIterator()).mapIt(it[1].msg) # Seq copy of the store queue messages for verification
|
msgList = toSeq(stQ.fwdIterator()).mapIt(it[1].msg) # Seq copy of the store queue messages for verification
|
||||||
pagingInfo = PagingInfo(pageSize: 2, cursor: indexList[3], direction: PagingDirection.FORWARD)
|
pagingInfo = PagingInfo(pageSize: 2, cursor: indexList[3], direction: PagingDirection.FORWARD)
|
||||||
@ -93,7 +84,7 @@ procSuite "pagination":
|
|||||||
|
|
||||||
# test for an empty msgList
|
# test for an empty msgList
|
||||||
pagingInfo = PagingInfo(pageSize: 2, direction: PagingDirection.FORWARD)
|
pagingInfo = PagingInfo(pageSize: 2, direction: PagingDirection.FORWARD)
|
||||||
(data, newPagingInfo, error) = getPage(createSampleStoreQueue(0), pagingInfo)
|
(data, newPagingInfo, error) = getPage(getTestStoreQueue(0), pagingInfo)
|
||||||
check:
|
check:
|
||||||
data.len == 0
|
data.len == 0
|
||||||
newPagingInfo.pageSize == 0
|
newPagingInfo.pageSize == 0
|
||||||
@ -132,7 +123,8 @@ procSuite "pagination":
|
|||||||
error == HistoryResponseError.NONE
|
error == HistoryResponseError.NONE
|
||||||
|
|
||||||
# test for an invalid cursor
|
# test for an invalid cursor
|
||||||
pagingInfo = PagingInfo(pageSize: 10, cursor: computeIndex(WakuMessage(payload: @[byte 10])), direction: PagingDirection.FORWARD)
|
let index = Index.compute(WakuMessage(payload: @[byte 10]), getTestTimestamp(), DEFAULT_PUBSUB_TOPIC)
|
||||||
|
pagingInfo = PagingInfo(pageSize: 10, cursor: index, direction: PagingDirection.FORWARD)
|
||||||
(data, newPagingInfo, error) = getPage(stQ, pagingInfo)
|
(data, newPagingInfo, error) = getPage(stQ, pagingInfo)
|
||||||
check:
|
check:
|
||||||
data.len == 0
|
data.len == 0
|
||||||
@ -142,7 +134,7 @@ procSuite "pagination":
|
|||||||
error == HistoryResponseError.INVALID_CURSOR
|
error == HistoryResponseError.INVALID_CURSOR
|
||||||
|
|
||||||
# test initial paging query over a message list with one message
|
# test initial paging query over a message list with one message
|
||||||
var singleItemMsgList = createSampleStoreQueue(1)
|
var singleItemMsgList = getTestStoreQueue(1)
|
||||||
pagingInfo = PagingInfo(pageSize: 10, direction: PagingDirection.FORWARD)
|
pagingInfo = PagingInfo(pageSize: 10, direction: PagingDirection.FORWARD)
|
||||||
(data, newPagingInfo, error) = getPage(singleItemMsgList, pagingInfo)
|
(data, newPagingInfo, error) = getPage(singleItemMsgList, pagingInfo)
|
||||||
check:
|
check:
|
||||||
@ -153,7 +145,7 @@ procSuite "pagination":
|
|||||||
error == HistoryResponseError.NONE
|
error == HistoryResponseError.NONE
|
||||||
|
|
||||||
# test pagination over a message list with one message
|
# test pagination over a message list with one message
|
||||||
singleItemMsgList = createSampleStoreQueue(1)
|
singleItemMsgList = getTestStoreQueue(1)
|
||||||
pagingInfo = PagingInfo(pageSize: 10, cursor: indexList[0], direction: PagingDirection.FORWARD)
|
pagingInfo = PagingInfo(pageSize: 10, cursor: indexList[0], direction: PagingDirection.FORWARD)
|
||||||
(data, newPagingInfo, error) = getPage(singleItemMsgList, pagingInfo)
|
(data, newPagingInfo, error) = getPage(singleItemMsgList, pagingInfo)
|
||||||
check:
|
check:
|
||||||
@ -165,7 +157,7 @@ procSuite "pagination":
|
|||||||
|
|
||||||
test "Backward pagination test":
|
test "Backward pagination test":
|
||||||
var
|
var
|
||||||
stQ = createSampleStoreQueue(10)
|
stQ = getTestStoreQueue(10)
|
||||||
indexList = toSeq(stQ.fwdIterator()).mapIt(it[0]) # Seq copy of the store queue indices for verification
|
indexList = toSeq(stQ.fwdIterator()).mapIt(it[0]) # Seq copy of the store queue indices for verification
|
||||||
msgList = toSeq(stQ.fwdIterator()).mapIt(it[1].msg) # Seq copy of the store queue messages for verification
|
msgList = toSeq(stQ.fwdIterator()).mapIt(it[1].msg) # Seq copy of the store queue messages for verification
|
||||||
pagingInfo = PagingInfo(pageSize: 2, cursor: indexList[3], direction: PagingDirection.BACKWARD)
|
pagingInfo = PagingInfo(pageSize: 2, cursor: indexList[3], direction: PagingDirection.BACKWARD)
|
||||||
@ -181,7 +173,7 @@ procSuite "pagination":
|
|||||||
|
|
||||||
# test for an empty msgList
|
# test for an empty msgList
|
||||||
pagingInfo = PagingInfo(pageSize: 2, direction: PagingDirection.BACKWARD)
|
pagingInfo = PagingInfo(pageSize: 2, direction: PagingDirection.BACKWARD)
|
||||||
(data, newPagingInfo, error) = getPage(createSampleStoreQueue(0), pagingInfo)
|
(data, newPagingInfo, error) = getPage(getTestStoreQueue(0), pagingInfo)
|
||||||
check:
|
check:
|
||||||
data.len == 0
|
data.len == 0
|
||||||
newPagingInfo.pageSize == 0
|
newPagingInfo.pageSize == 0
|
||||||
@ -242,7 +234,8 @@ procSuite "pagination":
|
|||||||
error == HistoryResponseError.NONE
|
error == HistoryResponseError.NONE
|
||||||
|
|
||||||
# test for an invalid cursor
|
# test for an invalid cursor
|
||||||
pagingInfo = PagingInfo(pageSize: 5, cursor: computeIndex(WakuMessage(payload: @[byte 10])), direction: PagingDirection.BACKWARD)
|
let index = Index.compute(WakuMessage(payload: @[byte 10]), getTestTimestamp(), DEFAULT_PUBSUB_TOPIC)
|
||||||
|
pagingInfo = PagingInfo(pageSize: 5, cursor: index, direction: PagingDirection.BACKWARD)
|
||||||
(data, newPagingInfo, error) = getPage(stQ, pagingInfo)
|
(data, newPagingInfo, error) = getPage(stQ, pagingInfo)
|
||||||
check:
|
check:
|
||||||
data.len == 0
|
data.len == 0
|
||||||
@ -252,7 +245,7 @@ procSuite "pagination":
|
|||||||
error == HistoryResponseError.INVALID_CURSOR
|
error == HistoryResponseError.INVALID_CURSOR
|
||||||
|
|
||||||
# test initial paging query over a message list with one message
|
# test initial paging query over a message list with one message
|
||||||
var singleItemMsgList = createSampleStoreQueue(1)
|
var singleItemMsgList = getTestStoreQueue(1)
|
||||||
pagingInfo = PagingInfo(pageSize: 10, direction: PagingDirection.BACKWARD)
|
pagingInfo = PagingInfo(pageSize: 10, direction: PagingDirection.BACKWARD)
|
||||||
(data, newPagingInfo, error) = getPage(singleItemMsgList, pagingInfo)
|
(data, newPagingInfo, error) = getPage(singleItemMsgList, pagingInfo)
|
||||||
check:
|
check:
|
||||||
@ -263,7 +256,7 @@ procSuite "pagination":
|
|||||||
error == HistoryResponseError.NONE
|
error == HistoryResponseError.NONE
|
||||||
|
|
||||||
# test paging query over a message list with one message
|
# test paging query over a message list with one message
|
||||||
singleItemMsgList = createSampleStoreQueue(1)
|
singleItemMsgList = getTestStoreQueue(1)
|
||||||
pagingInfo = PagingInfo(pageSize: 10, cursor: indexList[0], direction: PagingDirection.BACKWARD)
|
pagingInfo = PagingInfo(pageSize: 10, cursor: indexList[0], direction: PagingDirection.BACKWARD)
|
||||||
(data, newPagingInfo, error) = getPage(singleItemMsgList, pagingInfo)
|
(data, newPagingInfo, error) = getPage(singleItemMsgList, pagingInfo)
|
||||||
check:
|
check:
|
||||||
@ -272,46 +265,3 @@ procSuite "pagination":
|
|||||||
newPagingInfo.direction == pagingInfo.direction
|
newPagingInfo.direction == pagingInfo.direction
|
||||||
newPagingInfo.pageSize == 0
|
newPagingInfo.pageSize == 0
|
||||||
error == HistoryResponseError.NONE
|
error == HistoryResponseError.NONE
|
||||||
|
|
||||||
suite "time-window history query":
|
|
||||||
test "Encode/Decode waku message with timestamp":
|
|
||||||
# test encoding and decoding of the timestamp field of a WakuMessage
|
|
||||||
# Encoding
|
|
||||||
let
|
|
||||||
version = 0'u32
|
|
||||||
payload = @[byte 0, 1, 2]
|
|
||||||
timestamp = Timestamp(10)
|
|
||||||
msg = WakuMessage(payload: payload, version: version, timestamp: timestamp)
|
|
||||||
pb = msg.encode()
|
|
||||||
|
|
||||||
# Decoding
|
|
||||||
let
|
|
||||||
msgDecoded = WakuMessage.init(pb.buffer)
|
|
||||||
check:
|
|
||||||
msgDecoded.isOk()
|
|
||||||
|
|
||||||
let
|
|
||||||
timestampDecoded = msgDecoded.value.timestamp
|
|
||||||
check:
|
|
||||||
timestampDecoded == timestamp
|
|
||||||
|
|
||||||
test "Encode/Decode waku message without timestamp":
|
|
||||||
# test the encoding and decoding of a WakuMessage with an empty timestamp field
|
|
||||||
|
|
||||||
# Encoding
|
|
||||||
let
|
|
||||||
version = 0'u32
|
|
||||||
payload = @[byte 0, 1, 2]
|
|
||||||
msg = WakuMessage(payload: payload, version: version)
|
|
||||||
pb = msg.encode()
|
|
||||||
|
|
||||||
# Decoding
|
|
||||||
let
|
|
||||||
msgDecoded = WakuMessage.init(pb.buffer)
|
|
||||||
doAssert:
|
|
||||||
msgDecoded.isOk()
|
|
||||||
|
|
||||||
let
|
|
||||||
timestampDecoded = msgDecoded.value.timestamp
|
|
||||||
check:
|
|
||||||
timestampDecoded == Timestamp(0)
|
|
@ -1,26 +1,39 @@
|
|||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
testutils/unittests,
|
std/times,
|
||||||
chronos,
|
|
||||||
stew/byteutils,
|
stew/byteutils,
|
||||||
libp2p/crypto/crypto,
|
testutils/unittests,
|
||||||
../../waku/v2/utils/pagination,
|
nimcrypto
|
||||||
../../waku/v2/utils/time
|
import
|
||||||
|
../../waku/v2/protocol/waku_message,
|
||||||
|
../../waku/v2/utils/time,
|
||||||
|
../../waku/v2/utils/pagination
|
||||||
|
|
||||||
procSuite "Pagination utils":
|
|
||||||
|
|
||||||
## Helpers
|
const
|
||||||
proc hashFromStr(input: string): MDigest[256] =
|
DEFAULT_PUBSUB_TOPIC = "/waku/2/default-waku/proto"
|
||||||
var ctx: sha256
|
DEFAULT_CONTENT_TOPIC = ContentTopic("/waku/2/default-content/proto")
|
||||||
|
|
||||||
ctx.init()
|
|
||||||
ctx.update(input.toBytes()) # converts the input to bytes
|
|
||||||
|
|
||||||
let hashed = ctx.finish() # computes the hash
|
|
||||||
ctx.clear()
|
|
||||||
|
|
||||||
return hashed
|
|
||||||
|
## Helpers
|
||||||
|
|
||||||
|
proc getTestTimestamp(offset=0): Timestamp =
|
||||||
|
let now = getNanosecondTime(epochTime() + float(offset))
|
||||||
|
Timestamp(now)
|
||||||
|
|
||||||
|
proc hashFromStr(input: string): MDigest[256] =
|
||||||
|
var ctx: sha256
|
||||||
|
|
||||||
|
ctx.init()
|
||||||
|
ctx.update(input.toBytes())
|
||||||
|
let hashed = ctx.finish()
|
||||||
|
ctx.clear()
|
||||||
|
|
||||||
|
return hashed
|
||||||
|
|
||||||
|
|
||||||
|
suite "Pagination - Index":
|
||||||
|
|
||||||
## Test vars
|
## Test vars
|
||||||
let
|
let
|
||||||
@ -66,62 +79,109 @@ procSuite "Pagination utils":
|
|||||||
senderTime: getNanosecondTime(0),
|
senderTime: getNanosecondTime(0),
|
||||||
pubsubTopic: "zzzz")
|
pubsubTopic: "zzzz")
|
||||||
|
|
||||||
## Test suite
|
test "Index comparison":
|
||||||
asyncTest "Index comparison":
|
# Index comparison with senderTime diff
|
||||||
check:
|
check:
|
||||||
# Index comparison with senderTime diff
|
|
||||||
cmp(smallIndex1, largeIndex1) < 0
|
cmp(smallIndex1, largeIndex1) < 0
|
||||||
cmp(smallIndex2, largeIndex1) < 0
|
cmp(smallIndex2, largeIndex1) < 0
|
||||||
|
|
||||||
# Index comparison with digest diff
|
# Index comparison with digest diff
|
||||||
|
check:
|
||||||
cmp(smallIndex1, smallIndex2) < 0
|
cmp(smallIndex1, smallIndex2) < 0
|
||||||
cmp(smallIndex1, largeIndex2) < 0
|
cmp(smallIndex1, largeIndex2) < 0
|
||||||
cmp(smallIndex2, largeIndex2) > 0
|
cmp(smallIndex2, largeIndex2) > 0
|
||||||
cmp(largeIndex1, largeIndex2) > 0
|
cmp(largeIndex1, largeIndex2) > 0
|
||||||
|
|
||||||
# Index comparison when equal
|
# Index comparison when equal
|
||||||
|
check:
|
||||||
cmp(eqIndex1, eqIndex2) == 0
|
cmp(eqIndex1, eqIndex2) == 0
|
||||||
|
|
||||||
# pubsubTopic difference
|
# pubsubTopic difference
|
||||||
|
check:
|
||||||
cmp(smallIndex1, diffPsTopic) < 0
|
cmp(smallIndex1, diffPsTopic) < 0
|
||||||
|
|
||||||
# receiverTime diff plays no role when senderTime set
|
# receiverTime diff plays no role when senderTime set
|
||||||
|
check:
|
||||||
cmp(eqIndex1, eqIndex3) == 0
|
cmp(eqIndex1, eqIndex3) == 0
|
||||||
|
|
||||||
# receiverTime diff plays no role when digest/pubsubTopic equal
|
# receiverTime diff plays no role when digest/pubsubTopic equal
|
||||||
|
check:
|
||||||
cmp(noSenderTime1, noSenderTime2) == 0
|
cmp(noSenderTime1, noSenderTime2) == 0
|
||||||
|
|
||||||
# sort on receiverTime with no senderTimestamp and unequal pubsubTopic
|
# sort on receiverTime with no senderTimestamp and unequal pubsubTopic
|
||||||
|
check:
|
||||||
cmp(noSenderTime1, noSenderTime3) < 0
|
cmp(noSenderTime1, noSenderTime3) < 0
|
||||||
|
|
||||||
# sort on receiverTime with no senderTimestamp and unequal digest
|
# sort on receiverTime with no senderTimestamp and unequal digest
|
||||||
|
check:
|
||||||
cmp(noSenderTime1, noSenderTime4) < 0
|
cmp(noSenderTime1, noSenderTime4) < 0
|
||||||
|
|
||||||
# sort on receiverTime if no senderTimestamp on only one side
|
# sort on receiverTime if no senderTimestamp on only one side
|
||||||
|
check:
|
||||||
cmp(smallIndex1, noSenderTime1) < 0
|
cmp(smallIndex1, noSenderTime1) < 0
|
||||||
cmp(noSenderTime1, smallIndex1) > 0 # Test symmetry
|
cmp(noSenderTime1, smallIndex1) > 0 # Test symmetry
|
||||||
cmp(noSenderTime2, eqIndex3) < 0
|
cmp(noSenderTime2, eqIndex3) < 0
|
||||||
cmp(eqIndex3, noSenderTime2) > 0 # Test symmetry
|
cmp(eqIndex3, noSenderTime2) > 0 # Test symmetry
|
||||||
|
|
||||||
asyncTest "Index equality":
|
test "Index equality":
|
||||||
|
# Exactly equal
|
||||||
check:
|
check:
|
||||||
# Exactly equal
|
|
||||||
eqIndex1 == eqIndex2
|
eqIndex1 == eqIndex2
|
||||||
|
|
||||||
# Receiver time plays no role, even without sender time
|
# Receiver time plays no role, even without sender time
|
||||||
|
check:
|
||||||
eqIndex1 == eqIndex3
|
eqIndex1 == eqIndex3
|
||||||
noSenderTime1 == noSenderTime2 # only receiver time differs, indices are equal
|
noSenderTime1 == noSenderTime2 # only receiver time differs, indices are equal
|
||||||
noSenderTime1 != noSenderTime3 # pubsubTopics differ
|
noSenderTime1 != noSenderTime3 # pubsubTopics differ
|
||||||
noSenderTime1 != noSenderTime4 # digests differ
|
noSenderTime1 != noSenderTime4 # digests differ
|
||||||
|
|
||||||
# Unequal sender time
|
# Unequal sender time
|
||||||
|
check:
|
||||||
smallIndex1 != largeIndex1
|
smallIndex1 != largeIndex1
|
||||||
|
|
||||||
# Unequal digest
|
# Unequal digest
|
||||||
|
check:
|
||||||
smallIndex1 != smallIndex2
|
smallIndex1 != smallIndex2
|
||||||
|
|
||||||
# Unequal hash and digest
|
# Unequal hash and digest
|
||||||
|
check:
|
||||||
smallIndex1 != eqIndex1
|
smallIndex1 != eqIndex1
|
||||||
|
|
||||||
# Unequal pubsubTopic
|
# Unequal pubsubTopic
|
||||||
|
check:
|
||||||
smallIndex1 != diffPsTopic
|
smallIndex1 != diffPsTopic
|
||||||
|
|
||||||
|
test "Index computation should not be empty":
|
||||||
|
## Given
|
||||||
|
let ts = getTestTimestamp()
|
||||||
|
let wm = WakuMessage(payload: @[byte 1, 2, 3], timestamp: ts)
|
||||||
|
|
||||||
|
## When
|
||||||
|
let ts2 = getTestTimestamp() + 10
|
||||||
|
let index = Index.compute(wm, ts2, DEFAULT_CONTENT_TOPIC)
|
||||||
|
|
||||||
|
## Then
|
||||||
|
check:
|
||||||
|
index.digest.data.len != 0
|
||||||
|
index.digest.data.len == 32 # sha2 output length in bytes
|
||||||
|
index.receiverTime == ts2 # the receiver timestamp should be a non-zero value
|
||||||
|
index.senderTime == ts
|
||||||
|
index.pubsubTopic == DEFAULT_CONTENT_TOPIC
|
||||||
|
|
||||||
|
test "Index digest of two identical messsage should be the same":
|
||||||
|
## Given
|
||||||
|
let topic = ContentTopic("test-content-topic")
|
||||||
|
let
|
||||||
|
wm1 = WakuMessage(payload: @[byte 1, 2, 3], contentTopic: topic)
|
||||||
|
wm2 = WakuMessage(payload: @[byte 1, 2, 3], contentTopic: topic)
|
||||||
|
|
||||||
|
## When
|
||||||
|
let ts = getTestTimestamp()
|
||||||
|
let
|
||||||
|
index1 = Index.compute(wm1, ts, DEFAULT_PUBSUB_TOPIC)
|
||||||
|
index2 = Index.compute(wm2, ts, DEFAULT_PUBSUB_TOPIC)
|
||||||
|
|
||||||
|
## Then
|
||||||
|
check:
|
||||||
|
index1.digest == index2.digest
|
||||||
|
|
@ -3,7 +3,8 @@
|
|||||||
import
|
import
|
||||||
testutils/unittests,
|
testutils/unittests,
|
||||||
../../waku/v2/protocol/waku_message,
|
../../waku/v2/protocol/waku_message,
|
||||||
../../waku/v2/node/waku_payload
|
../../waku/v2/node/waku_payload,
|
||||||
|
../../waku/v2/utils/time
|
||||||
|
|
||||||
procSuite "Waku Payload":
|
procSuite "Waku Payload":
|
||||||
let rng = newRng()
|
let rng = newRng()
|
||||||
@ -109,3 +110,46 @@ procSuite "Waku Payload":
|
|||||||
|
|
||||||
check:
|
check:
|
||||||
decoded.isErr()
|
decoded.isErr()
|
||||||
|
|
||||||
|
test "Encode/Decode waku message with timestamp":
|
||||||
|
## Test encoding and decoding of the timestamp field of a WakuMessage
|
||||||
|
|
||||||
|
## Given
|
||||||
|
let
|
||||||
|
version = 0'u32
|
||||||
|
payload = @[byte 0, 1, 2]
|
||||||
|
timestamp = Timestamp(10)
|
||||||
|
msg = WakuMessage(payload: payload, version: version, timestamp: timestamp)
|
||||||
|
|
||||||
|
## When
|
||||||
|
let pb = msg.encode()
|
||||||
|
let msgDecoded = WakuMessage.init(pb.buffer)
|
||||||
|
|
||||||
|
## Then
|
||||||
|
check:
|
||||||
|
msgDecoded.isOk()
|
||||||
|
|
||||||
|
let timestampDecoded = msgDecoded.value.timestamp
|
||||||
|
check:
|
||||||
|
timestampDecoded == timestamp
|
||||||
|
|
||||||
|
test "Encode/Decode waku message without timestamp":
|
||||||
|
## Test the encoding and decoding of a WakuMessage with an empty timestamp field
|
||||||
|
|
||||||
|
## Given
|
||||||
|
let
|
||||||
|
version = 0'u32
|
||||||
|
payload = @[byte 0, 1, 2]
|
||||||
|
msg = WakuMessage(payload: payload, version: version)
|
||||||
|
|
||||||
|
## When
|
||||||
|
let pb = msg.encode()
|
||||||
|
let msgDecoded = WakuMessage.init(pb.buffer)
|
||||||
|
|
||||||
|
## Then
|
||||||
|
check:
|
||||||
|
msgDecoded.isOk()
|
||||||
|
|
||||||
|
let timestampDecoded = msgDecoded.value.timestamp
|
||||||
|
check:
|
||||||
|
timestampDecoded == Timestamp(0)
|
Loading…
x
Reference in New Issue
Block a user