From c3dea59e8fc56fd610c6169e9e04c3c470ff2420 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Cabeza=20Romero?= Date: Tue, 6 Feb 2024 17:37:42 +0100 Subject: [PATCH] test(lightpush): Lightpush functional tests (#2269) * Add ligthpush payload tests. * Add end to end lightpush tests. * updating vendor/nim-unittest2 to protect against core dump issue * Enable "Valid Payload Sizes" test again --------- Co-authored-by: Ivan FB <128452529+Ivansete-status@users.noreply.github.com> --- tests/all_tests_waku.nim | 23 +- tests/node/test_all.nim | 4 + tests/node/test_wakunode_filter.nim | 108 +-- tests/node/test_wakunode_lightpush.nim | 93 +++ tests/node/test_wakunode_store.nim | 836 ++++++++++++----------- tests/resources/content_topics.nim | 8 + tests/resources/pubsub_topics.nim | 14 + tests/test_waku_lightpush.nim | 154 ----- tests/testlib/assertions.nim | 4 + tests/testlib/futures.nim | 20 + tests/waku_archive/archive_utils.nim | 34 +- tests/waku_lightpush/lightpush_utils.nim | 33 + tests/waku_lightpush/test_all.nim | 2 + tests/waku_lightpush/test_client.nim | 362 ++++++++++ tests/waku_store/test_all.nim | 1 + tests/waku_store/test_client.nim | 7 +- vendor/nim-unittest2 | 2 +- waku/waku_lightpush/client.nim | 7 +- 18 files changed, 1077 insertions(+), 635 deletions(-) create mode 100644 tests/node/test_all.nim create mode 100644 tests/node/test_wakunode_lightpush.nim create mode 100644 tests/resources/content_topics.nim create mode 100644 tests/resources/pubsub_topics.nim delete mode 100644 tests/test_waku_lightpush.nim create mode 100644 tests/testlib/assertions.nim create mode 100644 tests/waku_lightpush/lightpush_utils.nim create mode 100644 tests/waku_lightpush/test_all.nim create mode 100644 tests/waku_lightpush/test_client.nim diff --git a/tests/all_tests_waku.nim b/tests/all_tests_waku.nim index 35e9f41a6..303021404 100644 --- a/tests/all_tests_waku.nim +++ b/tests/all_tests_waku.nim @@ -8,7 +8,6 @@ import ./waku_core/test_peers, ./waku_core/test_published_address - # Waku archive test suite import ./waku_archive/test_driver_queue_index, @@ -22,15 +21,15 @@ import const os* {.strdefine.} = "" when os == "Linux" and - # GitHub only supports container actions on Linux - # and we need to start a postgress database in a docker container - defined(postgres): +# GitHub only supports container actions on Linux +# and we need to start a postgress database in a docker container +defined(postgres): import - ./waku_archive/test_driver_postgres_query, - ./waku_archive/test_driver_postgres + ./waku_archive/test_driver_postgres_query, ./waku_archive/test_driver_postgres # Waku store test suite import + ./waku_store/test_client, ./waku_store/test_rpc_codec, ./waku_store/test_waku_store, ./waku_store/test_wakunode_store @@ -39,17 +38,11 @@ when defined(waku_exp_store_resume): # TODO: Review store resume test cases (#1282) import ./waku_store/test_resume - -import - ./waku_relay/test_all, - ./waku_filter_v2/test_all - +import ./waku_relay/test_all, ./waku_filter_v2/test_all, ./waku_lightpush/test_all import # Waku v2 tests ./test_wakunode, - # Waku LightPush - ./test_waku_lightpush, ./test_wakunode_lightpush, # Waku Filter ./test_waku_filter_legacy, @@ -71,9 +64,7 @@ import ./test_waku_rendezvous # Waku Keystore test suite -import - ./test_waku_keystore_keyfile, - ./test_waku_keystore +import ./test_waku_keystore_keyfile, ./test_waku_keystore ## Wakunode JSON-RPC API test suite import diff --git a/tests/node/test_all.nim b/tests/node/test_all.nim new file mode 100644 index 000000000..d8a9685de --- /dev/null +++ b/tests/node/test_all.nim @@ -0,0 +1,4 @@ +import + ./test_wakunode_filter, + ./test_wakunode_lightpush, + ./test_wakunode_store diff --git a/tests/node/test_wakunode_filter.nim b/tests/node/test_wakunode_filter.nim index 040367093..383b3881e 100644 --- a/tests/node/test_wakunode_filter.nim +++ b/tests/node/test_wakunode_filter.nim @@ -1,20 +1,13 @@ {.used.} import - std/[ - options, - tables, - sequtils - ], + std/[options, tables, sequtils], stew/shims/net as stewNet, testutils/unittests, chronos, chronicles, os, - libp2p/[ - peerstore, - crypto/crypto - ] + libp2p/[peerstore, crypto/crypto] import ../../../waku/[ @@ -25,14 +18,7 @@ import waku_filter_v2/client, waku_filter_v2/subscriptions ], - ../testlib/[ - common, - wakucore, - wakunode, - testasync, - futures, - testutils - ] + ../testlib/[common, wakucore, wakunode, testasync, futures, testutils] suite "Waku Filter - End to End": var client {.threadvar.}: WakuNode @@ -48,10 +34,11 @@ suite "Waku Filter - End to End": asyncSetup: pushHandlerFuture = newFuture[(string, WakuMessage)]() - messagePushHandler = proc( - pubsubTopic: PubsubTopic, message: WakuMessage - ): Future[void] {.async, closure, gcsafe.} = - pushHandlerFuture.complete((pubsubTopic, message)) + messagePushHandler = + proc(pubsubTopic: PubsubTopic, message: WakuMessage): Future[void] {. + async, closure, gcsafe + .} = + pushHandlerFuture.complete((pubsubTopic, message)) pubsubTopic = DefaultPubsubTopic contentTopic = DefaultContentTopic @@ -63,7 +50,8 @@ suite "Waku Filter - End to End": server = newTestWakuNode(serverKey, parseIpAddress("0.0.0.0"), Port(23450)) client = newTestWakuNode(clientKey, parseIpAddress("0.0.0.0"), Port(23451)) - clientClone = newTestWakuNode(clientKey, parseIpAddress("0.0.0.0"), Port(23451)) # Used for testing client restarts + clientClone = newTestWakuNode(clientKey, parseIpAddress("0.0.0.0"), Port(23451)) + # Used for testing client restarts await allFutures(server.start(), client.start()) @@ -83,9 +71,11 @@ suite "Waku Filter - End to End": asyncTest "Client Node receives Push from Server Node, via Filter": # When a client node subscribes to a filter node - let subscribeResponse = await client.filterSubscribe( - some(pubsubTopic), contentTopicSeq, serverRemotePeerInfo - ) + let + subscribeResponse = + await client.filterSubscribe( + some(pubsubTopic), contentTopicSeq, serverRemotePeerInfo + ) # Then the subscription is successful check: @@ -94,7 +84,7 @@ suite "Waku Filter - End to End": server.wakuFilter.subscriptions.isSubscribed(clientPeerId) # When sending a message to the subscribed content topic - let msg1 = fakeWakuMessage(contentTopic=contentTopic) + let msg1 = fakeWakuMessage(contentTopic = contentTopic) await server.filterHandleMessage(pubsubTopic, msg1) # Then the message is pushed to the client @@ -105,9 +95,11 @@ suite "Waku Filter - End to End": pushedMsg1 == msg1 # When unsubscribing from the subscription - let unsubscribeResponse = await client.filterUnsubscribe( - some(pubsubTopic), contentTopicSeq, serverRemotePeerInfo - ) + let + unsubscribeResponse = + await client.filterUnsubscribe( + some(pubsubTopic), contentTopicSeq, serverRemotePeerInfo + ) # Then the unsubscription is successful check: @@ -116,7 +108,7 @@ suite "Waku Filter - End to End": # When sending a message to the previously subscribed content topic pushHandlerFuture = newPushHandlerFuture() # Clear previous future - let msg2 = fakeWakuMessage(contentTopic=contentTopic) + let msg2 = fakeWakuMessage(contentTopic = contentTopic) await server.filterHandleMessage(pubsubTopic, msg2) # Then the message is not pushed to the client @@ -128,16 +120,18 @@ suite "Waku Filter - End to End": await server.mountRelay() # And valid filter subscription - let subscribeResponse = await client.filterSubscribe( - some(pubsubTopic), contentTopicSeq, serverRemotePeerInfo - ) + let + subscribeResponse = + await client.filterSubscribe( + some(pubsubTopic), contentTopicSeq, serverRemotePeerInfo + ) require: subscribeResponse.isOk() server.wakuFilter.subscriptions.subscribedPeerCount() == 1 # When a server node gets a Relay message - let msg1 = fakeWakuMessage(contentTopic=contentTopic) - await server.publish(some(pubsubTopic), msg1) + let msg1 = fakeWakuMessage(contentTopic = contentTopic) + discard await server.publish(some(pubsubTopic), msg1) # Then the message is not sent to the client's filter push handler check (not await pushHandlerFuture.withTimeout(FUTURE_TIMEOUT)) @@ -154,18 +148,22 @@ suite "Waku Filter - End to End": let serverRemotePeerInfo = server.peerInfo.toRemotePeerInfo() # When a client node subscribes to the server node - let subscribeResponse = await client.filterSubscribe( - some(pubsubTopic), contentTopicSeq, serverRemotePeerInfo - ) + let + subscribeResponse = + await client.filterSubscribe( + some(pubsubTopic), contentTopicSeq, serverRemotePeerInfo + ) # Then the subscription is successful check (not subscribeResponse.isOk()) asyncTest "Filter Client Node can receive messages after subscribing and restarting, via Filter": # Given a valid filter subscription - let subscribeResponse = await client.filterSubscribe( - some(pubsubTopic), contentTopicSeq, serverRemotePeerInfo - ) + let + subscribeResponse = + await client.filterSubscribe( + some(pubsubTopic), contentTopicSeq, serverRemotePeerInfo + ) require: subscribeResponse.isOk() server.wakuFilter.subscriptions.subscribedPeerCount() == 1 @@ -175,7 +173,7 @@ suite "Waku Filter - End to End": await clientClone.start() # Mimic restart by starting the clone # When a message is sent to the subscribed content topic, via Filter; without refreshing the subscription - let msg = fakeWakuMessage(contentTopic=contentTopic) + let msg = fakeWakuMessage(contentTopic = contentTopic) await server.filterHandleMessage(pubsubTopic, msg) # Then the message is pushed to the client @@ -185,13 +183,15 @@ suite "Waku Filter - End to End": pushedMsgPubsubTopic == pubsubTopic pushedMsg == msg - asyncTest "Filter Client Node can't receive messages after subscribing and restarting, via Relay": # Given the server node has Relay enabled + asyncTest "Filter Client Node can't receive messages after subscribing and restarting, via Relay": await server.mountRelay() # Given a valid filter subscription - let subscribeResponse = await client.filterSubscribe( - some(pubsubTopic), contentTopicSeq, serverRemotePeerInfo - ) + let + subscribeResponse = + await client.filterSubscribe( + some(pubsubTopic), contentTopicSeq, serverRemotePeerInfo + ) require: subscribeResponse.isOk() server.wakuFilter.subscriptions.subscribedPeerCount() == 1 @@ -201,24 +201,26 @@ suite "Waku Filter - End to End": await clientClone.start() # Mimic restart by starting the clone # When a message is sent to the subscribed content topic, via Relay - let msg = fakeWakuMessage(contentTopic=contentTopic) - await server.publish(some(pubsubTopic), msg) + let msg = fakeWakuMessage(contentTopic = contentTopic) + discard await server.publish(some(pubsubTopic), msg) # Then the message is not sent to the client's filter push handler check (not await pushHandlerFuture.withTimeout(FUTURE_TIMEOUT)) # Given the client refreshes the subscription - let subscribeResponse2 = await clientClone.filterSubscribe( - some(pubsubTopic), contentTopicSeq, serverRemotePeerInfo - ) + let + subscribeResponse2 = + await clientClone.filterSubscribe( + some(pubsubTopic), contentTopicSeq, serverRemotePeerInfo + ) check: subscribeResponse2.isOk() server.wakuFilter.subscriptions.subscribedPeerCount() == 1 # When a message is sent to the subscribed content topic, via Relay pushHandlerFuture = newPushHandlerFuture() - let msg2 = fakeWakuMessage(contentTopic=contentTopic) - await server.publish(some(pubsubTopic), msg2) + let msg2 = fakeWakuMessage(contentTopic = contentTopic) + discard await server.publish(some(pubsubTopic), msg2) # Then the message is not sent to the client's filter push handler check (not await pushHandlerFuture.withTimeout(FUTURE_TIMEOUT)) diff --git a/tests/node/test_wakunode_lightpush.nim b/tests/node/test_wakunode_lightpush.nim new file mode 100644 index 000000000..0a8141c15 --- /dev/null +++ b/tests/node/test_wakunode_lightpush.nim @@ -0,0 +1,93 @@ +{.used.} + +import + std/[options, tables, sequtils], + stew/shims/net as stewNet, + testutils/unittests, + chronos, + chronicles, + os, + libp2p/[peerstore, crypto/crypto] + +import + ../../../waku/[ + waku_core, + node/peer_manager, + node/waku_node, + waku_filter_v2, + waku_filter_v2/client, + waku_filter_v2/subscriptions, + waku_lightpush, + waku_lightpush/common, + waku_lightpush/client, + waku_lightpush/protocol_metrics, + waku_lightpush/rpc + ], + ../testlib/[assertions, common, wakucore, wakunode, testasync, futures, testutils] + +suite "Waku Lightpush - End To End": + var + handlerFuture {.threadvar.}: Future[(PubsubTopic, WakuMessage)] + handler {.threadvar.}: PushMessageHandler + + server {.threadvar.}: WakuNode + client {.threadvar.}: WakuNode + + serverRemotePeerInfo {.threadvar.}: RemotePeerInfo + pubsubTopic {.threadvar.}: PubsubTopic + contentTopic {.threadvar.}: ContentTopic + message {.threadvar.}: WakuMessage + + asyncSetup: + handlerFuture = newPushHandlerFuture() + handler = + proc( + peer: PeerId, pubsubTopic: PubsubTopic, message: WakuMessage + ): Future[WakuLightPushResult[void]] {.async.} = + handlerFuture.complete((pubsubTopic, message)) + return ok() + + let + serverKey = generateSecp256k1Key() + clientKey = generateSecp256k1Key() + + server = newTestWakuNode(serverKey, ValidIpAddress.init("0.0.0.0"), Port(0)) + client = newTestWakuNode(clientKey, ValidIpAddress.init("0.0.0.0"), Port(0)) + + await allFutures(server.start(), client.start()) + await server.start() + + waitFor server.mountRelay() + waitFor server.mountLightpush() + client.mountLightpushClient() + + serverRemotePeerInfo = server.peerInfo.toRemotePeerInfo() + pubsubTopic = DefaultPubsubTopic + contentTopic = DefaultContentTopic + message = fakeWakuMessage() + + asyncTeardown: + await server.stop() + + suite "Assessment of Message Relaying Mechanisms": + asyncTest "Via 11/WAKU2-RELAY from Relay/Full Node": + # Given a light lightpush client + let + lightpushClient = + newTestWakuNode( + generateSecp256k1Key(), ValidIpAddress.init("0.0.0.0"), Port(0) + ) + lightpushClient.mountLightpushClient() + + # When the client publishes a message + let + publishResponse = + await lightpushClient.lightpushPublish( + some(pubsubTopic), message, serverRemotePeerInfo + ) + + if not publishResponse.isOk(): + echo "Publish failed: ", publishResponse.error() + + # Then the message is relayed to the server + assertResultOk publishResponse diff --git a/tests/node/test_wakunode_store.nim b/tests/node/test_wakunode_store.nim index d0e44634c..22153de29 100644 --- a/tests/node/test_wakunode_store.nim +++ b/tests/node/test_wakunode_store.nim @@ -1,4 +1,4 @@ -{.used.} +{.used.} import std/options, @@ -9,6 +9,7 @@ import import ../../../waku/[ + common/paging, node/waku_node, node/peer_manager, waku_core, @@ -20,15 +21,7 @@ import ], ../waku_store/store_utils, ../waku_archive/archive_utils, - ../testlib/[ - common, - wakucore, - wakunode, - testasync, - futures, - testutils - ] - + ../testlib/[common, wakucore, wakunode, testasync, futures, testutils] suite "Waku Store - End to End - Sorted Archive": var pubsubTopic {.threadvar.}: PubsubTopic @@ -43,7 +36,7 @@ suite "Waku Store - End to End - Sorted Archive": var archiveDriver {.threadvar.}: ArchiveDriver var serverRemotePeerInfo {.threadvar.}: RemotePeerInfo - var clientPeerId {.threadvar.}: PeerId + var clientPeerId {.threadvar.}: PeerId asyncSetup: pubsubTopic = DefaultPubsubTopic @@ -51,25 +44,27 @@ suite "Waku Store - End to End - Sorted Archive": contentTopicSeq = @[contentTopic] let timeOrigin = now() - archiveMessages = @[ - fakeWakuMessage(@[byte 00], ts=ts(00, timeOrigin)), - fakeWakuMessage(@[byte 01], ts=ts(10, timeOrigin)), - fakeWakuMessage(@[byte 02], ts=ts(20, timeOrigin)), - fakeWakuMessage(@[byte 03], ts=ts(30, timeOrigin)), - fakeWakuMessage(@[byte 04], ts=ts(40, timeOrigin)), - fakeWakuMessage(@[byte 05], ts=ts(50, timeOrigin)), - fakeWakuMessage(@[byte 06], ts=ts(60, timeOrigin)), - fakeWakuMessage(@[byte 07], ts=ts(70, timeOrigin)), - fakeWakuMessage(@[byte 08], ts=ts(80, timeOrigin)), - fakeWakuMessage(@[byte 09], ts=ts(90, timeOrigin)) - ] + archiveMessages = + @[ + fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)), + fakeWakuMessage(@[byte 01], ts = ts(10, timeOrigin)), + fakeWakuMessage(@[byte 02], ts = ts(20, timeOrigin)), + fakeWakuMessage(@[byte 03], ts = ts(30, timeOrigin)), + fakeWakuMessage(@[byte 04], ts = ts(40, timeOrigin)), + fakeWakuMessage(@[byte 05], ts = ts(50, timeOrigin)), + fakeWakuMessage(@[byte 06], ts = ts(60, timeOrigin)), + fakeWakuMessage(@[byte 07], ts = ts(70, timeOrigin)), + fakeWakuMessage(@[byte 08], ts = ts(80, timeOrigin)), + fakeWakuMessage(@[byte 09], ts = ts(90, timeOrigin)) + ] - historyQuery = HistoryQuery( - pubsubTopic: some(pubsubTopic), - contentTopics: contentTopicSeq, - direction: true, - pageSize: 5 - ) + historyQuery = + HistoryQuery( + pubsubTopic: some(pubsubTopic), + contentTopics: contentTopicSeq, + direction: PagingDirection.Forward, + pageSize: 5, + ) let serverKey = generateSecp256k1Key() @@ -103,16 +98,19 @@ suite "Waku Store - End to End - Sorted Archive": queryResponse.get().messages == archiveMessages[0..<5] # Given the next query - var otherHistoryQuery = HistoryQuery( - cursor: queryResponse.get().cursor, - pubsubTopic: some(pubsubTopic), - contentTopics: contentTopicSeq, - direction: true, - pageSize: 5 - ) + var + otherHistoryQuery = + HistoryQuery( + cursor: queryResponse.get().cursor, + pubsubTopic: some(pubsubTopic), + contentTopics: contentTopicSeq, + direction: PagingDirection.FORWARD, + pageSize: 5, + ) # When making the next history query - let otherQueryResponse = await client.query(otherHistoryQuery, serverRemotePeerInfo) + let + otherQueryResponse = await client.query(otherHistoryQuery, serverRemotePeerInfo) # Then the response contains the messages check: @@ -120,7 +118,7 @@ suite "Waku Store - End to End - Sorted Archive": asyncTest "Backward Pagination": # Given the history query is backward - historyQuery.direction = false + historyQuery.direction = PagingDirection.BACKWARD # When making a history query let queryResponse = await client.query(historyQuery, serverRemotePeerInfo) @@ -130,16 +128,19 @@ suite "Waku Store - End to End - Sorted Archive": queryResponse.get().messages == archiveMessages[5..<10] # Given the next query - var nextHistoryQuery = HistoryQuery( - cursor: queryResponse.get().cursor, - pubsubTopic: some(pubsubTopic), - contentTopics: contentTopicSeq, - direction: false, - pageSize: 5 - ) + var + nextHistoryQuery = + HistoryQuery( + cursor: queryResponse.get().cursor, + pubsubTopic: some(pubsubTopic), + contentTopics: contentTopicSeq, + direction: PagingDirection.BACKWARD, + pageSize: 5, + ) # When making the next history query - let otherQueryResponse = await client.query(nextHistoryQuery, serverRemotePeerInfo) + let + otherQueryResponse = await client.query(nextHistoryQuery, serverRemotePeerInfo) # Then the response contains the messages check: @@ -158,13 +159,15 @@ suite "Waku Store - End to End - Sorted Archive": queryResponse1.get().messages == archiveMessages[0..<2] # Given the next query (2/5) - let historyQuery2 = HistoryQuery( - cursor: queryResponse1.get().cursor, - pubsubTopic: some(pubsubTopic), - contentTopics: contentTopicSeq, - direction: true, - pageSize: 2 - ) + let + historyQuery2 = + HistoryQuery( + cursor: queryResponse1.get().cursor, + pubsubTopic: some(pubsubTopic), + contentTopics: contentTopicSeq, + direction: PagingDirection.FORWARD, + pageSize: 2, + ) # When making the next history query let queryResponse2 = await client.query(historyQuery2, serverRemotePeerInfo) @@ -174,13 +177,15 @@ suite "Waku Store - End to End - Sorted Archive": queryResponse2.get().messages == archiveMessages[2..<4] # Given the next query (3/5) - let historyQuery3 = HistoryQuery( - cursor: queryResponse2.get().cursor, - pubsubTopic: some(pubsubTopic), - contentTopics: contentTopicSeq, - direction: true, - pageSize: 2 - ) + let + historyQuery3 = + HistoryQuery( + cursor: queryResponse2.get().cursor, + pubsubTopic: some(pubsubTopic), + contentTopics: contentTopicSeq, + direction: PagingDirection.FORWARD, + pageSize: 2, + ) # When making the next history query let queryResponse3 = await client.query(historyQuery3, serverRemotePeerInfo) @@ -188,15 +193,17 @@ suite "Waku Store - End to End - Sorted Archive": # Then the response contains the messages check: queryResponse3.get().messages == archiveMessages[4..<6] - + # Given the next query (4/5) - let historyQuery4 = HistoryQuery( - cursor: queryResponse3.get().cursor, - pubsubTopic: some(pubsubTopic), - contentTopics: contentTopicSeq, - direction: true, - pageSize: 2 - ) + let + historyQuery4 = + HistoryQuery( + cursor: queryResponse3.get().cursor, + pubsubTopic: some(pubsubTopic), + contentTopics: contentTopicSeq, + direction: PagingDirection.FORWARD, + pageSize: 2, + ) # When making the next history query let queryResponse4 = await client.query(historyQuery4, serverRemotePeerInfo) @@ -206,13 +213,15 @@ suite "Waku Store - End to End - Sorted Archive": queryResponse4.get().messages == archiveMessages[6..<8] # Given the next query (5/5) - let historyQuery5 = HistoryQuery( - cursor: queryResponse4.get().cursor, - pubsubTopic: some(pubsubTopic), - contentTopics: contentTopicSeq, - direction: true, - pageSize: 2 - ) + let + historyQuery5 = + HistoryQuery( + cursor: queryResponse4.get().cursor, + pubsubTopic: some(pubsubTopic), + contentTopics: contentTopicSeq, + direction: PagingDirection.FORWARD, + pageSize: 2, + ) # When making the next history query let queryResponse5 = await client.query(historyQuery5, serverRemotePeerInfo) @@ -220,7 +229,7 @@ suite "Waku Store - End to End - Sorted Archive": # Then the response contains the messages check: queryResponse5.get().messages == archiveMessages[8..<10] - + asyncTest "Pagination with Large Page Size": # Given the first query (1/2) historyQuery.pageSize = 8 @@ -231,15 +240,17 @@ suite "Waku Store - End to End - Sorted Archive": # Then the response contains the messages check: queryResponse1.get().messages == archiveMessages[0..<8] - + # Given the next query (2/2) - let historyQuery2 = HistoryQuery( - cursor: queryResponse1.get().cursor, - pubsubTopic: some(pubsubTopic), - contentTopics: contentTopicSeq, - direction: true, - pageSize: 8 - ) + let + historyQuery2 = + HistoryQuery( + cursor: queryResponse1.get().cursor, + pubsubTopic: some(pubsubTopic), + contentTopics: contentTopicSeq, + direction: PagingDirection.FORWARD, + pageSize: 8, + ) # When making the next history query let queryResponse2 = await client.query(historyQuery2, serverRemotePeerInfo) @@ -247,7 +258,7 @@ suite "Waku Store - End to End - Sorted Archive": # Then the response contains the messages check: queryResponse2.get().messages == archiveMessages[8..<10] - + asyncTest "Pagination with Excessive Page Size": # Given the first query (1/1) historyQuery.pageSize = 100 @@ -271,13 +282,15 @@ suite "Waku Store - End to End - Sorted Archive": queryResponse1.get().messages == archiveMessages[0..<2] # Given the next query (2/3) - let historyQuery2 = HistoryQuery( - cursor: queryResponse1.get().cursor, - pubsubTopic: some(pubsubTopic), - contentTopics: contentTopicSeq, - direction: true, - pageSize: 4 - ) + let + historyQuery2 = + HistoryQuery( + cursor: queryResponse1.get().cursor, + pubsubTopic: some(pubsubTopic), + contentTopics: contentTopicSeq, + direction: PagingDirection.FORWARD, + pageSize: 4, + ) # When making the next history query let queryResponse2 = await client.query(historyQuery2, serverRemotePeerInfo) @@ -287,13 +300,15 @@ suite "Waku Store - End to End - Sorted Archive": queryResponse2.get().messages == archiveMessages[2..<6] # Given the next query (3/3) - let historyQuery3 = HistoryQuery( - cursor: queryResponse2.get().cursor, - pubsubTopic: some(pubsubTopic), - contentTopics: contentTopicSeq, - direction: true, - pageSize: 6 - ) + let + historyQuery3 = + HistoryQuery( + cursor: queryResponse2.get().cursor, + pubsubTopic: some(pubsubTopic), + contentTopics: contentTopicSeq, + direction: PagingDirection.FORWARD, + pageSize: 6, + ) # When making the next history query let queryResponse3 = await client.query(historyQuery3, serverRemotePeerInfo) @@ -305,15 +320,18 @@ suite "Waku Store - End to End - Sorted Archive": asyncTest "Pagination with Zero Page Size (Behaves as DefaultPageSize)": # Given a message list of size higher than the default page size let currentStoreLen = uint((await archiveDriver.getMessagesCount()).get()) - assert archive.DefaultPageSize > currentStoreLen, "This test requires a store with more than (DefaultPageSize) messages" + assert archive.DefaultPageSize > currentStoreLen, + "This test requires a store with more than (DefaultPageSize) messages" let missingMessagesAmount = archive.DefaultPageSize - currentStoreLen + 5 let lastMessageTimestamp = archiveMessages[archiveMessages.len - 1].timestamp var extraMessages: seq[WakuMessage] = @[] for i in 0.. currentStoreLen, "This test requires a store with more than (DefaultPageSize) messages" + assert archive.DefaultPageSize > currentStoreLen, + "This test requires a store with more than (DefaultPageSize) messages" let missingMessagesAmount = archive.DefaultPageSize - currentStoreLen + 5 let lastMessageTimestamp = archiveMessages[archiveMessages.len - 1].timestamp var extraMessages: seq[WakuMessage] = @[] for i in 0..