resolve conflicts

This commit is contained in:
stubbsta 2025-04-24 10:32:23 +02:00
commit 5edda43ade
160 changed files with 2209 additions and 1285 deletions

View File

@ -133,6 +133,9 @@ Binary will be created as `<path to your test file.nim>.bin` under the `build` d
make test/tests/common/test_enr_builder.nim
```
### Testing against `js-waku`
Refer to [js-waku repo](https://github.com/waku-org/js-waku/tree/master/packages/tests) for instructions.
## Formatting
Nim files are expected to be formatted using the [`nph`](https://github.com/arnetheduck/nph) version present in `vendor/nph`.

View File

@ -12,16 +12,16 @@ MIN_MESSAGE_SIZE=15Kb
MAX_MESSAGE_SIZE=145Kb
## for wakusim
#PUBSUB=/waku/2/rs/66/0
#SHARD=0
#CONTENT_TOPIC=/tester/2/light-pubsub-test/wakusim
#CLUSTER_ID=66
## for status.prod
PUBSUB=/waku/2/rs/16/32
#SHARDS=32
CONTENT_TOPIC=/tester/2/light-pubsub-test/fleet
CLUSTER_ID=16
## for TWN
#PUBSUB=/waku/2/rs/1/4
#SHARD=4
#CONTENT_TOPIC=/tester/2/light-pubsub-test/twn
#CLUSTER_ID=1

View File

@ -55,6 +55,8 @@ RUN chmod +x /usr/bin/liteprotocoltester
FROM base_lpt AS standalone_lpt
COPY --from=nim-build /app/apps/liteprotocoltester/run_tester_node.sh /usr/bin/
COPY --from=nim-build /app/apps/liteprotocoltester/run_tester_node_on_fleet.sh /usr/bin/
RUN chmod +x /usr/bin/run_tester_node.sh
ENTRYPOINT ["/usr/bin/run_tester_node.sh", "/usr/bin/liteprotocoltester"]

View File

@ -127,7 +127,7 @@ Run a SENDER role liteprotocoltester and a RECEIVER role one on different termin
| ---: | :--- | :--- |
| NUM_MESSAGES | Number of message to publish, 0 means infinite | 120 |
| MESSAGE_INTERVAL_MILLIS | Frequency of messages in milliseconds | 1000 |
| PUBSUB | Used pubsub_topic for testing | /waku/2/rs/66/0 |
| SHARD | Used shard for testing | 0 |
| CONTENT_TOPIC | content_topic for testing | /tester/1/light-pubsub-example/proto |
| CLUSTER_ID | cluster_id of the network | 16 |
| START_PUBLISHING_AFTER_SECS | Delay in seconds before starting to publish to let service node connected | 5 |
@ -272,7 +272,7 @@ export NUM_MESSAGES=200
export MESSAGE_INTERVAL_MILLIS=1000
export MIN_MESSAGE_SIZE=15Kb
export MAX_MESSAGE_SIZE=145Kb
export PUBSUB=/waku/2/rs/16/32
export SHARD=32
export CONTENT_TOPIC=/tester/2/light-pubsub-test/fleet
export CLUSTER_ID=16
@ -307,7 +307,7 @@ export NUM_MESSAGES=300
export MESSAGE_INTERVAL_MILLIS=7000
export MIN_MESSAGE_SIZE=15Kb
export MAX_MESSAGE_SIZE=145Kb
export PUBSUB=/waku/2/rs/1/4
export SHARD=4
export CONTENT_TOPIC=/tester/2/light-pubsub-test/twn
export CLUSTER_ID=1

View File

@ -16,7 +16,7 @@ x-rln-environment: &rln_env
x-test-running-conditions: &test_running_conditions
NUM_MESSAGES: ${NUM_MESSAGES:-120}
MESSAGE_INTERVAL_MILLIS: "${MESSAGE_INTERVAL_MILLIS:-1000}"
PUBSUB: ${PUBSUB:-/waku/2/rs/66/0}
SHARD: ${SHARD:-0}
CONTENT_TOPIC: ${CONTENT_TOPIC:-/tester/2/light-pubsub-test/wakusim}
CLUSTER_ID: ${CLUSTER_ID:-66}
MIN_MESSAGE_SIZE: ${MIN_MESSAGE_SIZE:-1Kb}

View File

@ -16,7 +16,7 @@ x-rln-environment: &rln_env
x-test-running-conditions: &test_running_conditions
NUM_MESSAGES: ${NUM_MESSAGES:-120}
MESSAGE_INTERVAL_MILLIS: "${MESSAGE_INTERVAL_MILLIS:-1000}"
PUBSUB: ${PUBSUB:-/waku/2/rs/66/0}
SHARD: ${SHARD:-0}
CONTENT_TOPIC: ${CONTENT_TOPIC:-/tester/2/light-pubsub-test/wakusim}
CLUSTER_ID: ${CLUSTER_ID:-66}
MIN_MESSAGE_SIZE: ${MIN_MESSAGE_SIZE:-1Kb}

View File

@ -130,7 +130,9 @@ proc setupAndSubscribe*(
var stats: PerPeerStatistics
actualFilterPeer = servicePeer
let pushHandler = proc(pubsubTopic: PubsubTopic, message: WakuMessage) {.async.} =
let pushHandler = proc(
pubsubTopic: PubsubTopic, message: WakuMessage
): Future[void] {.async, closure.} =
let payloadStr = string.fromBytes(message.payload)
let testerMessage = js.Json.decode(payloadStr, ProtocolTesterMessage)
let msgHash = computeMessageHash(pubsubTopic, message).to0xHex
@ -163,7 +165,7 @@ proc setupAndSubscribe*(
if conf.numMessages > 0 and
waitFor stats.checkIfAllMessagesReceived(maxWaitForLastMessage):
waitFor unsubscribe(wakuNode, conf.pubsubTopics[0], conf.contentTopics[0])
waitFor unsubscribe(wakuNode, conf.getPubsubTopic(), conf.contentTopics[0])
info "All messages received. Exiting."
## for gracefull shutdown through signal hooks
@ -176,5 +178,5 @@ proc setupAndSubscribe*(
# Start maintaining subscription
asyncSpawn maintainSubscription(
wakuNode, conf.pubsubTopics[0], conf.contentTopics[0], conf.fixedServicePeer
wakuNode, conf.getPubsubTopic(), conf.contentTopics[0], conf.fixedServicePeer
)

View File

@ -4,7 +4,7 @@ NUM_MESSAGES=300
MESSAGE_INTERVAL_MILLIS=1000
MIN_MESSAGE_SIZE=15Kb
MAX_MESSAGE_SIZE=145Kb
PUBSUB=/waku/2/rs/16/32
SHARD=32
CONTENT_TOPIC=/tester/2/light-pubsub-test-at-infra/status-prod
CLUSTER_ID=16
LIGHTPUSH_BOOTSTRAP=enr:-QEKuED9AJm2HGgrRpVaJY2nj68ao_QiPeUT43sK-aRM7sMJ6R4G11OSDOwnvVacgN1sTw-K7soC5dzHDFZgZkHU0u-XAYJpZIJ2NIJpcISnYxMvim11bHRpYWRkcnO4WgAqNiVib290LTAxLmRvLWFtczMuc3RhdHVzLnByb2Quc3RhdHVzLmltBnZfACw2JWJvb3QtMDEuZG8tYW1zMy5zdGF0dXMucHJvZC5zdGF0dXMuaW0GAbveA4Jyc40AEAUAAQAgAEAAgAEAiXNlY3AyNTZrMaEC3rRtFQSgc24uWewzXaxTY8hDAHB8sgnxr9k8Rjb5GeSDdGNwgnZfg3VkcIIjKIV3YWt1Mg0

View File

@ -145,13 +145,20 @@ proc publishMessages(
lightpushContentTopic,
renderMsgSize,
)
let publishStartTime = Moment.now()
let wlpRes = await wakuNode.legacyLightpushPublish(
some(lightpushPubsubTopic), message, actualServicePeer
)
let publishDuration = Moment.now() - publishStartTime
let msgHash = computeMessageHash(lightpushPubsubTopic, message).to0xHex
if wlpRes.isOk():
lpt_publish_duration_seconds.observe(publishDuration.milliseconds.float / 1000)
sentMessages[messagesSent] = (hash: msgHash, relayed: true)
notice "published message using lightpush",
index = messagesSent + 1,
@ -251,7 +258,7 @@ proc setupAndPublish*(
asyncSpawn publishMessages(
wakuNode,
servicePeer,
conf.pubsubTopics[0],
conf.getPubsubTopic(),
conf.contentTopics[0],
conf.numMessages,
(min: parsedMinMsgSize, max: parsedMaxMsgSize),

View File

@ -99,7 +99,7 @@ when isMainModule:
wakuConf.dnsAddrs = true
wakuConf.dnsAddrsNameServers = @[parseIpAddress("8.8.8.8"), parseIpAddress("1.1.1.1")]
wakuConf.pubsubTopics = conf.pubsubTopics
wakuConf.shards = @[conf.shard]
wakuConf.contentTopics = conf.contentTopics
wakuConf.clusterId = conf.clusterId
## TODO: Depending on the tester needs we might extend here with shards, clusterId, etc...
@ -118,6 +118,7 @@ when isMainModule:
wakuConf.store = false
wakuConf.rest = false
wakuConf.relayServiceRatio = "40:60"
# NOTE: {.threadvar.} is used to make the global variable GC safe for the closure uses it
# It will always be called from main thread anyway.
@ -202,10 +203,8 @@ when isMainModule:
var codec = WakuLightPushCodec
# mounting relevant client, for PX filter client must be mounted ahead
if conf.testFunc == TesterFunctionality.SENDER:
wakuApp.node.mountLegacyLightPushClient()
codec = WakuLightPushCodec
else:
waitFor wakuApp.node.mountFilterClient()
codec = WakuFilterSubscribeCodec
var lookForServiceNode = false

View File

@ -47,3 +47,10 @@ declarePublicGauge lpt_px_peers,
declarePublicGauge lpt_dialed_peers, "Number of peers successfully dialed", ["agent"]
declarePublicGauge lpt_dial_failures, "Number of dial failures by cause", ["agent"]
declarePublicHistogram lpt_publish_duration_seconds,
"duration to lightpush messages",
buckets = [
0.005, 0.01, 0.025, 0.05, 0.075, 0.1, 0.25, 0.5, 0.75, 1.0, 2.5, 5.0, 7.5, 10.0,
15.0, 20.0, 30.0, Inf,
]

View File

@ -5,10 +5,10 @@ IP=$(ip a | grep "inet " | grep -Fv 127.0.0.1 | sed 's/.*inet \([^/]*\).*/\1/')
echo "Service node IP: ${IP}"
if [ -n "${PUBSUB}" ]; then
PUBSUB=--pubsub-topic="${PUBSUB}"
if [ -n "${SHARD}" ]; then
SHARD=--shard="${SHARD}"
else
PUBSUB=--pubsub-topic="/waku/2/rs/66/0"
SHARD=--shard="0"
fi
if [ -n "${CLUSTER_ID}" ]; then
@ -59,5 +59,5 @@ exec /usr/bin/wakunode\
--metrics-server-port=8003\
--metrics-server-address=0.0.0.0\
--nat=extip:${IP}\
${PUBSUB}\
${SHARD}\
${CLUSTER_ID}

View File

@ -93,10 +93,10 @@ else
FULL_NODE=--bootstrap-node="${SERIVCE_NODE_ADDR}"
fi
if [ -n "${PUBSUB}" ]; then
PUBSUB=--pubsub-topic="${PUBSUB}"
if [ -n "${SHARD}" ]; then
SHARD=--shard="${SHARD}"
else
PUBSUB=--pubsub-topic="/waku/2/rs/66/0"
SHARD=--shard="0"
fi
if [ -n "${CONTENT_TOPIC}" ]; then
@ -128,19 +128,25 @@ if [ -n "${MESSAGE_INTERVAL_MILLIS}" ]; then
MESSAGE_INTERVAL_MILLIS=--message-interval="${MESSAGE_INTERVAL_MILLIS}"
fi
if [ -n "${LOG_LEVEL}" ]; then
LOG_LEVEL=--log-level=${LOG_LEVEL}
else
LOG_LEVEL=--log-level=INFO
fi
echo "Running binary: ${BINARY_PATH}"
echo "Tester node: ${FUNCTION}"
echo "Using service node: ${SERIVCE_NODE_ADDR}"
echo "My external IP: ${MY_EXT_IP}"
exec "${BINARY_PATH}"\
--log-level=INFO\
--nat=extip:${MY_EXT_IP}\
--test-peers\
${LOG_LEVEL}\
${FULL_NODE}\
${MESSAGE_INTERVAL_MILLIS}\
${NUM_MESSAGES}\
${PUBSUB}\
${SHARD}\
${CONTENT_TOPIC}\
${CLUSTER_ID}\
${FUNCTION}\

View File

@ -48,10 +48,10 @@ fi
MY_EXT_IP=$(wget -qO- --no-check-certificate https://api4.ipify.org)
if [ -n "${PUBSUB}" ]; then
PUBSUB=--pubsub-topic="${PUBSUB}"
if [ -n "${SHARD}" ]; then
SHARD=--shard="${SHARD}"
else
PUBSUB=--pubsub-topic="/waku/2/rs/66/0"
SHARD=--shard="0"
fi
if [ -n "${CONTENT_TOPIC}" ]; then
@ -83,19 +83,25 @@ if [ -n "${MESSAGE_INTERVAL_MILLIS}" ]; then
MESSAGE_INTERVAL_MILLIS=--message-interval="${MESSAGE_INTERVAL_MILLIS}"
fi
if [ -n "${LOG_LEVEL}" ]; then
LOG_LEVEL=--log-level=${LOG_LEVEL}
else
LOG_LEVEL=--log-level=INFO
fi
echo "Running binary: ${BINARY_PATH}"
echo "Node function is: ${FUNCTION}"
echo "Using service/bootstrap node as: ${NODE_ARG}"
echo "My external IP: ${MY_EXT_IP}"
exec "${BINARY_PATH}"\
--log-level=INFO\
--nat=extip:${MY_EXT_IP}\
--test-peers\
${LOG_LEVEL}\
${NODE_ARG}\
${MESSAGE_INTERVAL_MILLIS}\
${NUM_MESSAGES}\
${PUBSUB}\
${SHARD}\
${CONTENT_TOPIC}\
${CLUSTER_ID}\
${FUNCTION}\

View File

@ -48,10 +48,10 @@ fi
MY_EXT_IP=$(wget -qO- --no-check-certificate https://api4.ipify.org)
if [ -n "${PUBSUB}" ]; then
PUBSUB=--pubsub-topic="${PUBSUB}"
if [ -n "${SHARD}" ]; then
SHARD=--shard=${SHARD}
else
PUBSUB=--pubsub-topic="/waku/2/rs/66/0"
SHARD=--shard=0
fi
if [ -n "${CONTENT_TOPIC}" ]; then
@ -79,8 +79,14 @@ if [ -n "${NUM_MESSAGES}" ]; then
NUM_MESSAGES=--num-messages="${NUM_MESSAGES}"
fi
if [ -n "${DELAY_MESSAGES}" ]; then
DELAY_MESSAGES=--delay-messages="${DELAY_MESSAGES}"
if [ -n "${MESSAGE_INTERVAL_MILLIS}" ]; then
MESSAGE_INTERVAL_MILLIS=--message-interval="${MESSAGE_INTERVAL_MILLIS}"
fi
if [ -n "${LOG_LEVEL}" ]; then
LOG_LEVEL=--log-level=${LOG_LEVEL}
else
LOG_LEVEL=--log-level=INFO
fi
echo "Running binary: ${BINARY_PATH}"
@ -89,12 +95,12 @@ echo "Using service/bootstrap node as: ${NODE_ARG}"
echo "My external IP: ${MY_EXT_IP}"
exec "${BINARY_PATH}"\
--log-level=INFO\
--nat=extip:${MY_EXT_IP}\
${LOG_LEVEL}\
${NODE_ARG}\
${DELAY_MESSAGES}\
${MESSAGE_INTERVAL_MILLIS}\
${NUM_MESSAGES}\
${PUBSUB}\
${SHARD}\
${CONTENT_TOPIC}\
${CLUSTER_ID}\
${FUNCTION}\

View File

@ -189,14 +189,14 @@ proc pxLookupServiceNode*(
if conf.testPeers:
let peersOpt =
await tryCallAllPxPeers(node.peerManager, codec, conf.pubsubTopics[0])
await tryCallAllPxPeers(node.peerManager, codec, conf.getPubsubTopic())
if peersOpt.isSome():
info "Found service peers for codec",
codec = codec, peer_count = peersOpt.get().len()
return ok(peersOpt.get().len > 0)
else:
let peerOpt =
await selectRandomCapablePeer(node.peerManager, codec, conf.pubsubTopics[0])
await selectRandomCapablePeer(node.peerManager, codec, conf.getPubsubTopic())
if peerOpt.isSome():
info "Found service peer for codec", codec = codec, peer = peerOpt.get()
return ok(true)

View File

@ -18,6 +18,7 @@ import
common/logging,
factory/external_config,
waku_core,
waku_core/topics/pubsub_topic,
]
export confTomlDefs, confTomlNet, confEnvvarDefs, confEnvvarNet
@ -95,18 +96,9 @@ type LiteProtocolTesterConf* = object
name: "message-interval"
.}: uint32
pubsubTopics* {.
desc: "Default pubsub topic to subscribe to. Argument may be repeated.",
defaultValue: @[LitePubsubTopic],
name: "pubsub-topic"
.}: seq[PubsubTopic]
shard* {.desc: "Shards index to subscribe to. ", defaultValue: 0, name: "shard".}:
uint16
## TODO: extend lite protocol tester configuration based on testing needs
# shards* {.
# desc: "Shards index to subscribe to [0..NUM_SHARDS_IN_NETWORK-1]. Argument may be repeated.",
# defaultValue: @[],
# name: "shard"
# .}: seq[uint16]
contentTopics* {.
desc: "Default content topic to subscribe to. Argument may be repeated.",
defaultValue: @[LiteContentTopic],
@ -195,4 +187,7 @@ proc load*(T: type LiteProtocolTesterConf, version = ""): ConfResult[T] =
except CatchableError:
err(getCurrentExceptionMsg())
proc getPubsubTopic*(conf: LiteProtocolTesterConf): PubsubTopic =
return $RelayShard(clusterId: conf.clusterId, shardId: conf.shard)
{.pop.}

View File

@ -0,0 +1,18 @@
## App description
This is a very simple example that shows how to invoke libwaku functions from a C program.
## Build
1. Open terminal
2. cd to nwaku root folder
3. make cwaku_example -j8
This will create libwaku.so and cwaku_example binary within the build folder.
## Run
1. Open terminal
2. cd to nwaku root folder
3. export LD_LIBRARY_PATH=build
4. `./build/cwaku_example --host=0.0.0.0 --port=60001`
Use `./build/cwaku_example --help` to see some other options.

View File

@ -14,7 +14,6 @@
#include "base64.h"
#include "../../library/libwaku.h"
// Shared synchronization variables
pthread_mutex_t mutex = PTHREAD_MUTEX_INITIALIZER;
pthread_cond_t cond = PTHREAD_COND_INITIALIZER;
@ -29,7 +28,6 @@ void waitForCallback() {
pthread_mutex_unlock(&mutex);
}
#define WAKU_CALL(call) \
do { \
int ret = call; \
@ -107,6 +105,13 @@ static error_t parse_opt(int key, char *arg, struct argp_state *state) {
return 0;
}
void signal_cond() {
pthread_mutex_lock(&mutex);
callback_executed = 1;
pthread_cond_signal(&cond);
pthread_mutex_unlock(&mutex);
}
static struct argp argp = { options, parse_opt, args_doc, doc, 0, 0, 0 };
void event_handler(int callerRet, const char* msg, size_t len, void* userData) {
@ -118,10 +123,7 @@ void event_handler(int callerRet, const char* msg, size_t len, void* userData) {
printf("Receiving event: %s\n", msg);
}
pthread_mutex_lock(&mutex);
callback_executed = 1;
pthread_cond_signal(&cond);
pthread_mutex_unlock(&mutex);
signal_cond();
}
void on_event_received(int callerRet, const char* msg, size_t len, void* userData) {
@ -142,6 +144,7 @@ void handle_content_topic(int callerRet, const char* msg, size_t len, void* user
contentTopic = malloc(len * sizeof(char) + 1);
strcpy(contentTopic, msg);
signal_cond();
}
char* publishResponse = NULL;
@ -158,33 +161,30 @@ void handle_publish_ok(int callerRet, const char* msg, size_t len, void* userDat
#define MAX_MSG_SIZE 65535
void publish_message(char* pubsubTopic, const char* msg) {
void publish_message(const char* msg) {
char jsonWakuMsg[MAX_MSG_SIZE];
char *msgPayload = b64_encode(msg, strlen(msg));
WAKU_CALL( waku_content_topic(RET_OK,
WAKU_CALL( waku_content_topic(ctx,
"appName",
1,
"contentTopicName",
"encoding",
handle_content_topic,
userData) );
snprintf(jsonWakuMsg,
MAX_MSG_SIZE,
"{\"payload\":\"%s\",\"content_topic\":\"%s\"}",
"{\"payload\":\"%s\",\"contentTopic\":\"%s\"}",
msgPayload, contentTopic);
free(msgPayload);
WAKU_CALL( waku_relay_publish(&ctx,
pubsubTopic,
WAKU_CALL( waku_relay_publish(ctx,
"/waku/2/rs/16/32",
jsonWakuMsg,
10000 /*timeout ms*/,
event_handler,
userData) );
printf("waku relay response [%s]\n", publishResponse);
}
void show_help_and_exit() {
@ -194,20 +194,12 @@ void show_help_and_exit() {
void print_default_pubsub_topic(int callerRet, const char* msg, size_t len, void* userData) {
printf("Default pubsub topic: %s\n", msg);
pthread_mutex_lock(&mutex);
callback_executed = 1;
pthread_cond_signal(&cond);
pthread_mutex_unlock(&mutex);
signal_cond();
}
void print_waku_version(int callerRet, const char* msg, size_t len, void* userData) {
printf("Git Version: %s\n", msg);
pthread_mutex_lock(&mutex);
callback_executed = 1;
pthread_cond_signal(&cond);
pthread_mutex_unlock(&mutex);
signal_cond();
}
// Beginning of UI program logic
@ -236,9 +228,6 @@ void handle_user_input() {
return;
}
int c;
while ( (c = getchar()) != '\n' && c != EOF ) { }
switch (atoi(cmd))
{
case SUBSCRIBE_TOPIC_MENU:
@ -247,7 +236,7 @@ void handle_user_input() {
char pubsubTopic[128];
scanf("%127s", pubsubTopic);
WAKU_CALL( waku_relay_subscribe(&ctx,
WAKU_CALL( waku_relay_subscribe(ctx,
pubsubTopic,
event_handler,
userData) );
@ -262,21 +251,17 @@ void handle_user_input() {
printf("e.g.: /ip4/127.0.0.1/tcp/60001/p2p/16Uiu2HAmVFXtAfSj4EiR7mL2KvL4EE2wztuQgUSBoj2Jx2KeXFLN\n");
char peerAddr[512];
scanf("%511s", peerAddr);
WAKU_CALL(waku_connect(&ctx, peerAddr, 10000 /* timeoutMs */, event_handler, userData));
WAKU_CALL(waku_connect(ctx, peerAddr, 10000 /* timeoutMs */, event_handler, userData));
show_main_menu();
break;
case PUBLISH_MESSAGE_MENU:
{
printf("Indicate the Pubsubtopic:\n");
char pubsubTopic[128];
scanf("%127s", pubsubTopic);
printf("Type the message tp publish:\n");
printf("Type the message to publish:\n");
char msg[1024];
scanf("%1023s", msg);
publish_message(pubsubTopic, msg);
publish_message(msg);
show_main_menu();
}
@ -311,24 +296,24 @@ int main(int argc, char** argv) {
char jsonConfig[5000];
snprintf(jsonConfig, 5000, "{ \
\"clusterId\": 16, \
\"shards\": [ 1, 32, 64, 128, 256 ], \
\"listenAddress\": \"%s\", \
\"tcpPort\": %d, \
\"nodekey\": \"%s\", \
\"relay\": %s, \
\"store\": %s, \
\"storeMessageDbUrl\": \"%s\", \
\"storeMessageRetentionPolicy\": \"%s\", \
\"storeMaxNumDbConnections\": %d , \
\"logLevel\": \"DEBUG\", \
\"logLevel\": \"FATAL\", \
\"discv5Discovery\": true, \
\"discv5BootstrapNodes\": \
[\"enr:-QESuEB4Dchgjn7gfAvwB00CxTA-nGiyk-aALI-H4dYSZD3rUk7bZHmP8d2U6xDiQ2vZffpo45Jp7zKNdnwDUx6g4o6XAYJpZIJ2NIJpcIRA4VDAim11bHRpYWRkcnO4XAArNiZub2RlLTAxLmRvLWFtczMud2FrdS5zYW5kYm94LnN0YXR1cy5pbQZ2XwAtNiZub2RlLTAxLmRvLWFtczMud2FrdS5zYW5kYm94LnN0YXR1cy5pbQYfQN4DgnJzkwABCAAAAAEAAgADAAQABQAGAAeJc2VjcDI1NmsxoQOvD3S3jUNICsrOILlmhENiWAMmMVlAl6-Q8wRB7hidY4N0Y3CCdl-DdWRwgiMohXdha3UyDw\", \"enr:-QEkuEBIkb8q8_mrorHndoXH9t5N6ZfD-jehQCrYeoJDPHqT0l0wyaONa2-piRQsi3oVKAzDShDVeoQhy0uwN1xbZfPZAYJpZIJ2NIJpcIQiQlleim11bHRpYWRkcnO4bgA0Ni9ub2RlLTAxLmdjLXVzLWNlbnRyYWwxLWEud2FrdS5zYW5kYm94LnN0YXR1cy5pbQZ2XwA2Ni9ub2RlLTAxLmdjLXVzLWNlbnRyYWwxLWEud2FrdS5zYW5kYm94LnN0YXR1cy5pbQYfQN4DgnJzkwABCAAAAAEAAgADAAQABQAGAAeJc2VjcDI1NmsxoQKnGt-GSgqPSf3IAPM7bFgTlpczpMZZLF3geeoNNsxzSoN0Y3CCdl-DdWRwgiMohXdha3UyDw\"], \
\"discv5UdpPort\": 9999, \
\"dnsDiscoveryUrl\": \"enrtree://AOGYWMBYOUIMOENHXCHILPKY3ZRFEULMFI4DOM442QSZ73TT2A7VI@test.waku.nodes.status.im\", \
\"dnsDiscoveryUrl\": \"enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.prod.status.nodes.status.im\", \
\"dnsDiscoveryNameServers\": [\"8.8.8.8\", \"1.0.0.1\"] \
}", cfgNode.host,
cfgNode.port,
cfgNode.key,
cfgNode.relay ? "true":"false",
cfgNode.store ? "true":"false",
cfgNode.storeDbUrl,
@ -351,14 +336,6 @@ int main(int argc, char** argv) {
WAKU_CALL( waku_listen_addresses(ctx, event_handler, userData) );
printf("Establishing connection with: %s\n", cfgNode.peers);
WAKU_CALL( waku_connect(ctx,
cfgNode.peers,
10000 /* timeoutMs */,
event_handler,
userData) );
WAKU_CALL( waku_relay_subscribe(ctx,
"/waku/2/rs/0/0",
event_handler,

18
examples/cpp/README.md Normal file
View File

@ -0,0 +1,18 @@
## App description
This is a very simple example that shows how to invoke libwaku functions from a C++ program.
## Build
1. Open terminal
2. cd to nwaku root folder
3. make cppwaku_example -j8
This will create libwaku.so and cppwaku_example binary within the build folder.
## Run
1. Open terminal
2. cd to nwaku root folder
3. export LD_LIBRARY_PATH=build
4. `./build/cppwaku_example --host=0.0.0.0 --port=60001`
Use `./build/cppwaku_example --help` to see some other options.

View File

@ -16,12 +16,34 @@
#include "base64.h"
#include "../../library/libwaku.h"
// Shared synchronization variables
pthread_mutex_t mutex = PTHREAD_MUTEX_INITIALIZER;
pthread_cond_t cond = PTHREAD_COND_INITIALIZER;
int callback_executed = 0;
void waitForCallback() {
pthread_mutex_lock(&mutex);
while (!callback_executed) {
pthread_cond_wait(&cond, &mutex);
}
callback_executed = 0;
pthread_mutex_unlock(&mutex);
}
void signal_cond() {
pthread_mutex_lock(&mutex);
callback_executed = 1;
pthread_cond_signal(&cond);
pthread_mutex_unlock(&mutex);
}
#define WAKU_CALL(call) \
do { \
int ret = call; \
if (ret != 0) { \
std::cout << "Failed the call to: " << #call << ". Code: " << ret << "\n"; \
} \
waitForCallback(); \
} while (0)
struct ConfigNode {
@ -78,6 +100,24 @@ static error_t parse_opt(int key, char *arg, struct argp_state *state) {
return 0;
}
void event_handler(const char* msg, size_t len) {
printf("Receiving event: %s\n", msg);
}
void handle_error(const char* msg, size_t len) {
printf("handle_error: %s\n", msg);
exit(1);
}
template <class F>
auto cify(F&& f) {
static F fn = std::forward<F>(f);
return [](int callerRet, const char* msg, size_t len, void* userData) {
signal_cond();
return fn(msg, len);
};
}
static struct argp argp = { options, parse_opt, args_doc, doc, 0, 0, 0 };
// Beginning of UI program logic
@ -98,7 +138,7 @@ void show_main_menu() {
printf("\t3.) Publish a message\n");
}
void handle_user_input() {
void handle_user_input(void* ctx) {
char cmd[1024];
memset(cmd, 0, 1024);
int numRead = read(0, cmd, 1024);
@ -106,9 +146,6 @@ void handle_user_input() {
return;
}
int c;
while ( (c = getchar()) != '\n' && c != EOF ) { }
switch (atoi(cmd))
{
case SUBSCRIBE_TOPIC_MENU:
@ -116,10 +153,14 @@ void handle_user_input() {
printf("Indicate the Pubsubtopic to subscribe:\n");
char pubsubTopic[128];
scanf("%127s", pubsubTopic);
// if (!waku_relay_subscribe(pubsubTopic, &mResp)) {
// printf("Error subscribing to PubsubTopic: %s\n", mResp->data);
// }
// printf("Waku Relay subscription response: %s\n", mResp->data);
WAKU_CALL( waku_relay_subscribe(ctx,
pubsubTopic,
cify([&](const char* msg, size_t len) {
event_handler(msg, len);
}),
nullptr) );
printf("The subscription went well\n");
show_main_menu();
}
@ -130,41 +171,51 @@ void handle_user_input() {
printf("e.g.: /ip4/127.0.0.1/tcp/60001/p2p/16Uiu2HAmVFXtAfSj4EiR7mL2KvL4EE2wztuQgUSBoj2Jx2KeXFLN\n");
char peerAddr[512];
scanf("%511s", peerAddr);
// if (!waku_connect(peerAddr, 10000 /* timeoutMs */, &mResp)) {
// printf("Couldn't connect to the remote peer: %s\n", mResp->data);
// }
WAKU_CALL( waku_connect(ctx,
peerAddr,
10000 /* timeoutMs */,
cify([&](const char* msg, size_t len) {
event_handler(msg, len);
}),
nullptr));
show_main_menu();
break;
case PUBLISH_MESSAGE_MENU:
{
printf("Indicate the Pubsubtopic:\n");
char pubsubTopic[128];
scanf("%127s", pubsubTopic);
printf("Type the message tp publish:\n");
printf("Type the message to publish:\n");
char msg[1024];
scanf("%1023s", msg);
char jsonWakuMsg[1024];
char jsonWakuMsg[2048];
std::vector<char> msgPayload;
b64_encode(msg, strlen(msg), msgPayload);
// waku_content_topic("appName",
// 1,
// "contentTopicName",
// "encoding",
// &mResp);
std::string contentTopic;
waku_content_topic(ctx,
"appName",
1,
"contentTopicName",
"encoding",
cify([&contentTopic](const char* msg, size_t len) {
contentTopic = msg;
}),
nullptr);
// snprintf(jsonWakuMsg,
// 1024,
// "{\"payload\":\"%s\",\"content_topic\":\"%s\"}",
// msgPayload, mResp->data);
snprintf(jsonWakuMsg,
2048,
"{\"payload\":\"%s\",\"contentTopic\":\"%s\"}",
msgPayload.data(), contentTopic.c_str());
// free(msgPayload);
WAKU_CALL( waku_relay_publish(ctx,
"/waku/2/rs/16/32",
jsonWakuMsg,
10000 /*timeout ms*/,
cify([&](const char* msg, size_t len) {
event_handler(msg, len);
}),
nullptr) );
// waku_relay_publish(pubsubTopic, jsonWakuMsg, 10000 /*timeout ms*/, &mResp);
// printf("waku relay response [%s]\n", mResp->data);
show_main_menu();
}
break;
@ -181,23 +232,6 @@ void show_help_and_exit() {
exit(1);
}
void event_handler(const char* msg, size_t len) {
printf("Receiving message %s\n", msg);
}
void handle_error(const char* msg, size_t len) {
printf("Error: %s\n", msg);
exit(1);
}
template <class F>
auto cify(F&& f) {
static F fn = std::forward<F>(f);
return [](const char* msg, size_t len) {
return fn(msg, len);
};
}
int main(int argc, char** argv) {
struct ConfigNode cfgNode;
// default values
@ -212,60 +246,86 @@ int main(int argc, char** argv) {
show_help_and_exit();
}
char jsonConfig[1024];
snprintf(jsonConfig, 1024, "{ \
char jsonConfig[2048];
snprintf(jsonConfig, 2048, "{ \
\"host\": \"%s\", \
\"port\": %d, \
\"key\": \"%s\", \
\"relay\": %s, \
\"logLevel\": \"DEBUG\" \
\"relay\": true, \
\"clusterId\": 16, \
\"shards\": [ 1, 32, 64, 128, 256 ], \
\"logLevel\": \"FATAL\", \
\"discv5Discovery\": true, \
\"discv5BootstrapNodes\": \
[\"enr:-QESuEB4Dchgjn7gfAvwB00CxTA-nGiyk-aALI-H4dYSZD3rUk7bZHmP8d2U6xDiQ2vZffpo45Jp7zKNdnwDUx6g4o6XAYJpZIJ2NIJpcIRA4VDAim11bHRpYWRkcnO4XAArNiZub2RlLTAxLmRvLWFtczMud2FrdS5zYW5kYm94LnN0YXR1cy5pbQZ2XwAtNiZub2RlLTAxLmRvLWFtczMud2FrdS5zYW5kYm94LnN0YXR1cy5pbQYfQN4DgnJzkwABCAAAAAEAAgADAAQABQAGAAeJc2VjcDI1NmsxoQOvD3S3jUNICsrOILlmhENiWAMmMVlAl6-Q8wRB7hidY4N0Y3CCdl-DdWRwgiMohXdha3UyDw\", \"enr:-QEkuEBIkb8q8_mrorHndoXH9t5N6ZfD-jehQCrYeoJDPHqT0l0wyaONa2-piRQsi3oVKAzDShDVeoQhy0uwN1xbZfPZAYJpZIJ2NIJpcIQiQlleim11bHRpYWRkcnO4bgA0Ni9ub2RlLTAxLmdjLXVzLWNlbnRyYWwxLWEud2FrdS5zYW5kYm94LnN0YXR1cy5pbQZ2XwA2Ni9ub2RlLTAxLmdjLXVzLWNlbnRyYWwxLWEud2FrdS5zYW5kYm94LnN0YXR1cy5pbQYfQN4DgnJzkwABCAAAAAEAAgADAAQABQAGAAeJc2VjcDI1NmsxoQKnGt-GSgqPSf3IAPM7bFgTlpczpMZZLF3geeoNNsxzSoN0Y3CCdl-DdWRwgiMohXdha3UyDw\"], \
\"discv5UdpPort\": 9999, \
\"dnsDiscoveryUrl\": \"enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.prod.status.nodes.status.im\", \
\"dnsDiscoveryNameServers\": [\"8.8.8.8\", \"1.0.0.1\"] \
}", cfgNode.host,
cfgNode.port,
cfgNode.key,
cfgNode.relay ? "true":"false");
cfgNode.port);
WAKU_CALL(waku_new(jsonConfig, cify([](const char* msg, size_t len) {
std::cout << "Error: " << msg << std::endl;
exit(1);
})));
void* ctx =
waku_new(jsonConfig,
cify([](const char* msg, size_t len) {
std::cout << "waku_new feedback: " << msg << std::endl;
}
),
nullptr
);
waitForCallback();
// example on how to retrieve a value from the `libwaku` callback.
std::string defaultPubsubTopic;
WAKU_CALL(waku_default_pubsub_topic(cify([&defaultPubsubTopic](const char* msg, size_t len) {
defaultPubsubTopic = msg;
})));
WAKU_CALL(
waku_default_pubsub_topic(
ctx,
cify([&defaultPubsubTopic](const char* msg, size_t len) {
defaultPubsubTopic = msg;
}
),
nullptr));
std::cout << "Default pubsub topic: " << defaultPubsubTopic << std::endl;
WAKU_CALL(waku_version(cify([&](const char* msg, size_t len) {
std::cout << "Git Version: " << msg << std::endl;
})));
WAKU_CALL(waku_version(ctx,
cify([&](const char* msg, size_t len) {
std::cout << "Git Version: " << msg << std::endl;
}),
nullptr));
printf("Bind addr: %s:%u\n", cfgNode.host, cfgNode.port);
printf("Waku Relay enabled: %s\n", cfgNode.relay == 1 ? "YES": "NO");
std::string pubsubTopic;
WAKU_CALL(waku_pubsub_topic("example", cify([&](const char* msg, size_t len) {
pubsubTopic = msg;
})));
WAKU_CALL(waku_pubsub_topic(ctx,
"example",
cify([&](const char* msg, size_t len) {
pubsubTopic = msg;
}),
nullptr));
std::cout << "Custom pubsub topic: " << pubsubTopic << std::endl;
waku_set_event_callback(event_handler);
waku_start();
waku_set_event_callback(ctx,
cify([&](const char* msg, size_t len) {
event_handler(msg, len);
}),
nullptr);
WAKU_CALL( waku_connect(cfgNode.peers,
10000 /* timeoutMs */,
handle_error) );
WAKU_CALL( waku_start(ctx,
cify([&](const char* msg, size_t len) {
event_handler(msg, len);
}),
nullptr));
WAKU_CALL( waku_relay_subscribe(defaultPubsubTopic.c_str(),
handle_error) );
std::cout << "Establishing connection with: " << cfgNode.peers << std::endl;
WAKU_CALL(waku_connect(cfgNode.peers, 10000 /* timeoutMs */, handle_error));
WAKU_CALL( waku_relay_subscribe(ctx,
defaultPubsubTopic.c_str(),
cify([&](const char* msg, size_t len) {
event_handler(msg, len);
}),
nullptr) );
show_main_menu();
while(1) {
handle_user_input();
handle_user_input(ctx);
}
}

View File

@ -9,9 +9,6 @@ type JsonConnectionChangeEvent* = ref object of JsonEvent
proc new*(
T: type JsonConnectionChangeEvent, peerId: string, peerEvent: PeerEventKind
): T =
# Returns a JsonConnectionChangeEvent event as indicated in
# https://rfc.vac.dev/spec/36/#jsonmessageevent-type
return JsonConnectionChangeEvent(
eventType: "connection_change", peerId: peerId, peerEvent: peerEvent
)

View File

@ -71,7 +71,7 @@ type JsonMessageEvent* = ref object of JsonEvent
proc new*(T: type JsonMessageEvent, pubSubTopic: string, msg: WakuMessage): T =
# Returns a WakuMessage event as indicated in
# https://rfc.vac.dev/spec/36/#jsonmessageevent-type
# https://github.com/vacp2p/rfc/blob/master/content/docs/rfcs/36/README.md#jsonmessageevent-type
var payload = newSeq[byte](len(msg.payload))
if len(msg.payload) != 0:

View File

@ -10,9 +10,6 @@ type JsonTopicHealthChangeEvent* = ref object of JsonEvent
proc new*(
T: type JsonTopicHealthChangeEvent, pubsubTopic: string, topicHealth: TopicHealth
): T =
# Returns a TopicHealthChange event as indicated in
# https://rfc.vac.dev/spec/36/#jsonmessageevent-type
return JsonTopicHealthChangeEvent(
eventType: "relay_topic_health_change",
pubsubTopic: pubsubTopic,

View File

@ -42,7 +42,8 @@ import
template checkLibwakuParams*(
ctx: ptr WakuContext, callback: WakuCallBack, userData: pointer
) =
ctx[].userData = userData
if not isNil(ctx):
ctx[].userData = userData
if isNil(callback):
return RET_MISSING_CALLBACK
@ -224,19 +225,11 @@ proc waku_content_topic(
initializeLibrary()
checkLibwakuParams(ctx, callback, userData)
let appStr = appName.alloc()
let ctnStr = contentTopicName.alloc()
let encodingStr = encoding.alloc()
let contentTopic = fmt"/{$appStr}/{appVersion}/{$ctnStr}/{$encodingStr}"
let contentTopic = fmt"/{$appName}/{$appVersion}/{$contentTopicName}/{$encoding}"
callback(
RET_OK, unsafeAddr contentTopic[0], cast[csize_t](len(contentTopic)), userData
)
deallocShared(appStr)
deallocShared(ctnStr)
deallocShared(encodingStr)
return RET_OK
proc waku_pubsub_topic(
@ -247,15 +240,11 @@ proc waku_pubsub_topic(
initializeLibrary()
checkLibwakuParams(ctx, callback, userData)
let topicNameStr = topicName.alloc()
let outPubsubTopic = fmt"/waku/2/{$topicNameStr}"
let outPubsubTopic = fmt"/waku/2/{$topicName}"
callback(
RET_OK, unsafeAddr outPubsubTopic[0], cast[csize_t](len(outPubsubTopic)), userData
)
deallocShared(topicNameStr)
return RET_OK
proc waku_default_pubsub_topic(
@ -288,12 +277,9 @@ proc waku_relay_publish(
initializeLibrary()
checkLibwakuParams(ctx, callback, userData)
let jwm = jsonWakuMessage.alloc()
defer:
deallocShared(jwm)
var jsonMessage: JsonMessage
try:
let jsonContent = parseJson($jwm)
let jsonContent = parseJson($jsonWakuMessage)
jsonMessage = JsonMessage.fromJsonNode(jsonContent).valueOr:
raise newException(JsonParsingError, $error)
except JsonParsingError:
@ -306,14 +292,10 @@ proc waku_relay_publish(
callback(RET_ERR, unsafeAddr msg[0], cast[csize_t](len(msg)), userData)
return RET_ERR
let pst = pubSubTopic.alloc()
defer:
deallocShared(pst)
handleRequest(
ctx,
RequestType.RELAY,
RelayRequest.createShared(RelayMsgType.PUBLISH, pst, nil, wakuMessage),
RelayRequest.createShared(RelayMsgType.PUBLISH, pubSubTopic, nil, wakuMessage),
callback,
userData,
)
@ -353,15 +335,12 @@ proc waku_relay_subscribe(
initializeLibrary()
checkLibwakuParams(ctx, callback, userData)
let pst = pubSubTopic.alloc()
defer:
deallocShared(pst)
var cb = onReceivedMessage(ctx)
handleRequest(
ctx,
RequestType.RELAY,
RelayRequest.createShared(RelayMsgType.SUBSCRIBE, pst, WakuRelayHandler(cb)),
RelayRequest.createShared(RelayMsgType.SUBSCRIBE, pubSubTopic, WakuRelayHandler(cb)),
callback,
userData,
)
@ -376,9 +355,6 @@ proc waku_relay_add_protected_shard(
): cint {.dynlib, exportc, cdecl.} =
initializeLibrary()
checkLibwakuParams(ctx, callback, userData)
let pubk = publicKey.alloc()
defer:
deallocShared(pubk)
handleRequest(
ctx,
@ -387,7 +363,7 @@ proc waku_relay_add_protected_shard(
RelayMsgType.ADD_PROTECTED_SHARD,
clusterId = clusterId,
shardId = shardId,
publicKey = pubk,
publicKey = publicKey,
),
callback,
userData,
@ -402,15 +378,11 @@ proc waku_relay_unsubscribe(
initializeLibrary()
checkLibwakuParams(ctx, callback, userData)
let pst = pubSubTopic.alloc()
defer:
deallocShared(pst)
handleRequest(
ctx,
RequestType.RELAY,
RelayRequest.createShared(
RelayMsgType.UNSUBSCRIBE, pst, WakuRelayHandler(onReceivedMessage(ctx))
RelayMsgType.UNSUBSCRIBE, pubSubTopic, WakuRelayHandler(onReceivedMessage(ctx))
),
callback,
userData,
@ -425,14 +397,10 @@ proc waku_relay_get_num_connected_peers(
initializeLibrary()
checkLibwakuParams(ctx, callback, userData)
let pst = pubSubTopic.alloc()
defer:
deallocShared(pst)
handleRequest(
ctx,
RequestType.RELAY,
RelayRequest.createShared(RelayMsgType.NUM_CONNECTED_PEERS, pst),
RelayRequest.createShared(RelayMsgType.NUM_CONNECTED_PEERS, pubSubTopic),
callback,
userData,
)
@ -446,14 +414,10 @@ proc waku_relay_get_connected_peers(
initializeLibrary()
checkLibwakuParams(ctx, callback, userData)
let pst = pubSubTopic.alloc()
defer:
deallocShared(pst)
handleRequest(
ctx,
RequestType.RELAY,
RelayRequest.createShared(RelayMsgType.LIST_CONNECTED_PEERS, pst),
RelayRequest.createShared(RelayMsgType.LIST_CONNECTED_PEERS, pubSubTopic),
callback,
userData,
)
@ -467,14 +431,10 @@ proc waku_relay_get_num_peers_in_mesh(
initializeLibrary()
checkLibwakuParams(ctx, callback, userData)
let pst = pubSubTopic.alloc()
defer:
deallocShared(pst)
handleRequest(
ctx,
RequestType.RELAY,
RelayRequest.createShared(RelayMsgType.NUM_MESH_PEERS, pst),
RelayRequest.createShared(RelayMsgType.NUM_MESH_PEERS, pubSubTopic),
callback,
userData,
)
@ -488,14 +448,10 @@ proc waku_relay_get_peers_in_mesh(
initializeLibrary()
checkLibwakuParams(ctx, callback, userData)
let pst = pubSubTopic.alloc()
defer:
deallocShared(pst)
handleRequest(
ctx,
RequestType.RELAY,
RelayRequest.createShared(RelayMsgType.LIST_MESH_PEERS, pst),
RelayRequest.createShared(RelayMsgType.LIST_MESH_PEERS, pubSubTopic),
callback,
userData,
)
@ -565,15 +521,9 @@ proc waku_lightpush_publish(
initializeLibrary()
checkLibwakuParams(ctx, callback, userData)
let jwm = jsonWakuMessage.alloc()
let pst = pubSubTopic.alloc()
defer:
deallocShared(jwm)
deallocShared(pst)
var jsonMessage: JsonMessage
try:
let jsonContent = parseJson($jwm)
let jsonContent = parseJson($jsonWakuMessage)
jsonMessage = JsonMessage.fromJsonNode(jsonContent).valueOr:
raise newException(JsonParsingError, $error)
except JsonParsingError:
@ -589,7 +539,7 @@ proc waku_lightpush_publish(
handleRequest(
ctx,
RequestType.LIGHTPUSH,
LightpushRequest.createShared(LightpushMsgType.PUBLISH, pst, wakuMessage),
LightpushRequest.createShared(LightpushMsgType.PUBLISH, pubSubTopic, wakuMessage),
callback,
userData,
)

View File

@ -24,50 +24,49 @@ func fromJsonNode(
T: type StoreRequest, jsonContent: JsonNode
): Result[StoreQueryRequest, string] =
var contentTopics: seq[string]
if jsonContent.contains("content_topics"):
if jsonContent.contains("contentTopics"):
contentTopics = collect(newSeq):
for cTopic in jsonContent["content_topics"].getElems():
for cTopic in jsonContent["contentTopics"].getElems():
cTopic.getStr()
var msgHashes: seq[WakuMessageHash]
if jsonContent.contains("message_hashes"):
for hashJsonObj in jsonContent["message_hashes"].getElems():
if jsonContent.contains("messageHashes"):
for hashJsonObj in jsonContent["messageHashes"].getElems():
let hash = hashJsonObj.getStr().hexToHash().valueOr:
return err("Failed converting message hash hex string to bytes: " & error)
msgHashes.add(hash)
let pubsubTopic =
if jsonContent.contains("pubsub_topic"):
some(jsonContent["pubsub_topic"].getStr())
if jsonContent.contains("pubsubTopic"):
some(jsonContent["pubsubTopic"].getStr())
else:
none(string)
let paginationCursor =
if jsonContent.contains("pagination_cursor"):
let hash = jsonContent["pagination_cursor"].getStr().hexToHash().valueOr:
return
err("Failed converting pagination_cursor hex string to bytes: " & error)
if jsonContent.contains("paginationCursor"):
let hash = jsonContent["paginationCursor"].getStr().hexToHash().valueOr:
return err("Failed converting paginationCursor hex string to bytes: " & error)
some(hash)
else:
none(WakuMessageHash)
let paginationForwardBool = jsonContent["pagination_forward"].getBool()
let paginationForwardBool = jsonContent["paginationForward"].getBool()
let paginationForward =
if paginationForwardBool: PagingDirection.FORWARD else: PagingDirection.BACKWARD
let paginationLimit =
if jsonContent.contains("pagination_limit"):
some(uint64(jsonContent["pagination_limit"].getInt()))
if jsonContent.contains("paginationLimit"):
some(uint64(jsonContent["paginationLimit"].getInt()))
else:
none(uint64)
let startTime = ?jsonContent.getProtoInt64("time_start")
let endTime = ?jsonContent.getProtoInt64("time_end")
let startTime = ?jsonContent.getProtoInt64("timeStart")
let endTime = ?jsonContent.getProtoInt64("timeEnd")
return ok(
StoreQueryRequest(
requestId: jsonContent["request_id"].getStr(),
includeData: jsonContent["include_data"].getBool(),
requestId: jsonContent["requestId"].getStr(),
includeData: jsonContent["includeData"].getBool(),
pubsubTopic: pubsubTopic,
contentTopics: contentTopics,
startTime: startTime,

File diff suppressed because it is too large Load Diff

View File

@ -99,6 +99,7 @@ import
./wakunode_rest/test_rest_relay_serdes,
./wakunode_rest/test_rest_serdes,
./wakunode_rest/test_rest_filter,
./wakunode_rest/test_rest_lightpush,
./wakunode_rest/test_rest_lightpush_legacy,
./wakunode_rest/test_rest_admin,
./wakunode_rest/test_rest_cors,

View File

@ -1,6 +1,6 @@
{.used.}
import std/strutils, stew/[results, byteutils], testutils/unittests
import std/strutils, results, stew/byteutils, testutils/unittests
import waku/common/base64
suite "Waku Common - stew base64 wrapper":

View File

@ -2,7 +2,7 @@
import
std/[os, options],
stew/results,
results,
stew/shims/net as stewNet,
testutils/unittests,
confutils,

View File

@ -1,6 +1,6 @@
{.used.}
import std/options, stew/results, stew/shims/net, testutils/unittests
import std/options, results, stew/shims/net, testutils/unittests
import waku/common/enr, ../testlib/wakucore
suite "nim-eth ENR - builder and typed record":

View File

@ -1,6 +1,6 @@
{.used.}
import testutils/unittests, stew/results
import testutils/unittests, results
import waku/common/utils/parse_size_units
suite "Size serialization test":

View File

@ -10,7 +10,7 @@
import testutils/unittests
import chronos, libp2p/stream/connection
import std/[sequtils, options, tables]
import std/[options, tables]
import ../../waku/common/rate_limit/request_limiter
import ../../waku/common/rate_limit/timed_map

View File

@ -10,7 +10,7 @@
import testutils/unittests
import chronos, libp2p/stream/connection
import std/[sequtils, options]
import std/options
import ../../waku/common/rate_limit/request_limiter
import ../../waku/common/rate_limit/timed_map

View File

@ -1,6 +1,6 @@
{.used.}
import std/[strutils, os], stew/results, testutils/unittests
import std/[strutils, os], results, testutils/unittests
import waku/common/databases/db_sqlite {.all.}, ../waku_archive/archive_utils
template sourceDir(): string =

View File

@ -1,17 +1,9 @@
{.used.}
import
std/options,
testutils/unittests,
chronos,
web3,
stew/byteutils,
stint,
strutils,
tests/testlib/testasync
import std/options, testutils/unittests, chronos, web3, stint, tests/testlib/testasync
import
waku/[node/peer_manager, waku_core],
waku/node/peer_manager,
waku/incentivization/[rpc, eligibility_manager],
../waku_rln_relay/[utils_onchain, utils]

View File

@ -1,17 +1,6 @@
import
std/options,
testutils/unittests,
chronos,
web3,
stew/byteutils,
stint,
strutils,
tests/testlib/testasync
import std/options, testutils/unittests, chronos, web3
import
waku/[node/peer_manager, waku_core],
waku/incentivization/[rpc, reputation_manager],
waku/waku_lightpush_legacy/rpc
import waku/incentivization/reputation_manager, waku/waku_lightpush_legacy/rpc
suite "Waku Incentivization PoC Reputation":
var manager {.threadvar.}: ReputationManager

View File

@ -1,4 +1,4 @@
import std/options, stew/results, libp2p/peerstore
import std/options, results
import
waku/node/peer_manager/[waku_peer_store, peer_store/waku_peer_storage],

View File

@ -6,7 +6,6 @@ import
testutils/unittests,
chronos,
chronicles,
os,
libp2p/[peerstore, crypto/crypto]
import

View File

@ -1,31 +1,24 @@
{.used.}
import
std/[options, tables, sequtils, tempfiles, strutils],
std/[options, tempfiles],
stew/shims/net as stewNet,
testutils/unittests,
chronos,
chronicles,
std/strformat,
os,
libp2p/[peerstore, crypto/crypto]
libp2p/crypto/crypto
import
waku/[
waku_core,
node/peer_manager,
node/waku_node,
waku_filter_v2,
waku_filter_v2/client,
waku_filter_v2/subscriptions,
waku_lightpush_legacy,
waku_lightpush_legacy/common,
waku_lightpush_legacy/client,
waku_lightpush_legacy/protocol_metrics,
waku_lightpush_legacy/rpc,
waku_rln_relay,
],
../testlib/[assertions, common, wakucore, wakunode, testasync, futures, testutils],
../testlib/[wakucore, wakunode, testasync, futures],
../resources/payloads
suite "Waku Legacy Lightpush - End To End":

View File

@ -14,14 +14,11 @@ import
node/peer_manager,
waku_core,
waku_store_legacy,
waku_store_legacy/client,
waku_archive_legacy,
waku_archive_legacy/driver/sqlite_driver,
common/databases/db_sqlite,
],
../waku_store_legacy/store_utils,
../waku_archive_legacy/archive_utils,
../testlib/[common, wakucore, wakunode, testasync, futures, testutils]
../testlib/[wakucore, wakunode, testasync, testutils]
suite "Waku Store - End to End - Sorted Archive":
var pubsubTopic {.threadvar.}: PubsubTopic

View File

@ -1,27 +1,16 @@
{.used.}
import
std/[options, tables, sequtils, tempfiles, strutils],
std/[options, tempfiles],
stew/shims/net as stewNet,
testutils/unittests,
chronos,
chronicles,
std/strformat,
os,
libp2p/[peerstore, crypto/crypto]
libp2p/crypto/crypto
import
waku/[
waku_core,
node/peer_manager,
node/waku_node,
waku_filter_v2,
waku_filter_v2/client,
waku_filter_v2/subscriptions,
waku_lightpush,
waku_rln_relay,
],
../testlib/[assertions, common, wakucore, wakunode, testasync, futures, testutils],
waku/[waku_core, node/peer_manager, node/waku_node, waku_lightpush, waku_rln_relay],
../testlib/[wakucore, wakunode, testasync, futures],
../resources/payloads
const PublishedToOnePeer = 1

View File

@ -13,14 +13,8 @@ import
eth/p2p/discoveryv5/enr
import
waku/[
waku_node,
discovery/waku_discv5,
waku_peer_exchange,
node/peer_manager,
waku_relay/protocol,
waku_core,
],
waku/
[waku_node, discovery/waku_discv5, waku_peer_exchange, node/peer_manager, waku_core],
../waku_peer_exchange/utils,
../testlib/[wakucore, wakunode, testasync]

View File

@ -18,18 +18,15 @@ import
waku_core,
node/peer_manager,
node/waku_node,
waku_enr/sharding,
discovery/waku_discv5,
waku_filter_v2/common,
waku_relay/protocol,
],
../testlib/
[wakucore, wakunode, testasync, testutils, assertions, comparisons, futures],
../testlib/[wakucore, wakunode, testasync, testutils, comparisons],
../waku_enr/utils,
../waku_archive/archive_utils,
../waku_discv5/utils,
./peer_manager/peer_store/utils,
./utils
./peer_manager/peer_store/utils
const DEFAULT_PROTOCOLS: seq[string] =
@["/ipfs/id/1.0.0", "/libp2p/autonat/1.0.0", "/libp2p/circuit/relay/0.2.0/hop"]

View File

@ -1,7 +1,7 @@
{.used.}
import
std/[options, sequtils, algorithm, sets],
std/[options, sequtils, sets],
stew/shims/net as stewNet,
testutils/unittests,
chronos,
@ -15,14 +15,11 @@ import
waku_core,
waku_core/message/digest,
waku_store,
waku_store/client,
waku_archive,
waku_archive/driver/sqlite_driver,
common/databases/db_sqlite,
],
../waku_store/store_utils,
../waku_archive/archive_utils,
../testlib/[common, wakucore, wakunode, testasync, futures, testutils]
../testlib/[wakucore, wakunode, testasync, testutils]
suite "Waku Store - End to End - Sorted Archive":
var pubsubTopic {.threadvar.}: PubsubTopic

View File

@ -1,6 +1,6 @@
{.used.}
import std/[sets, random], stew/[results, byteutils], testutils/unittests
import std/[sets, random], results, stew/byteutils, testutils/unittests
import waku/waku_core, waku/waku_api/message_cache, ./testlib/wakucore
randomize()

View File

@ -1,7 +1,7 @@
{.used.}
import
std/[options, sequtils, times, sugar, net],
std/[sequtils, times, sugar, net],
stew/shims/net as stewNet,
testutils/unittests,
chronos,
@ -27,7 +27,6 @@ import
waku_relay/protocol,
waku_filter_v2/common,
waku_store/common,
waku_lightpush/common,
waku_peer_exchange,
waku_metadata,
],

View File

@ -9,12 +9,8 @@ import
libp2p/multiaddress,
testutils/unittests
import
waku/[
node/peer_manager/peer_manager,
node/peer_manager/waku_peer_store,
waku_node,
waku_core/peers,
],
waku/
[node/peer_manager/peer_manager, node/peer_manager/waku_peer_store, waku_core/peers],
./testlib/wakucore
suite "Extended nim-libp2p Peer Store":

View File

@ -4,10 +4,8 @@ import
std/[sequtils, options],
stew/shims/net,
testutils/unittests,
chronicles,
chronos,
libp2p/peerid,
libp2p/crypto/crypto,
libp2p/protocols/pubsub/gossipsub
import waku/waku_core, waku/waku_node, ./testlib/wakucore, ./testlib/wakunode

View File

@ -3,7 +3,8 @@
import
std/[sequtils, tables],
stew/shims/net,
stew/[base32, results],
results,
stew/base32,
testutils/unittests,
chronicles,
chronos,

View File

@ -1,6 +1,6 @@
{.used.}
import std/[options, sequtils], stew/results, testutils/unittests
import std/[options, sequtils], results, testutils/unittests
import waku/waku_core, waku/waku_enr, ./testlib/wakucore
suite "Waku ENR - Capabilities bitfield":

View File

@ -1,7 +1,6 @@
{.used.}
import
std/options,
stew/shims/net as stewNet,
testutils/unittests,
chronos,

View File

@ -1,6 +1,6 @@
{.used.}
import std/tables, stew/[results, byteutils], testutils/unittests
import std/tables, results, stew/byteutils, testutils/unittests
import
waku/[
common/protobuf,

View File

@ -17,9 +17,7 @@ import
libp2p/nameresolving/mockresolver,
eth/p2p/discoveryv5/enr
import
waku/[waku_core, waku_node, node/peer_manager, waku_relay, waku_peer_exchange],
./testlib/wakucore,
./testlib/wakunode
waku/[waku_core, waku_node, node/peer_manager], ./testlib/wakucore, ./testlib/wakunode
suite "WakuNode":
asyncTest "Protocol matcher works as expected":

View File

@ -9,9 +9,9 @@ type Instr {.union.} = object
proc mockImpl*(target, replacement: pointer) =
# YOLO who needs alignment
#doAssert (cast[ByteAddress](target) and ByteAddress(0x07)) == 0
var page = cast[pointer](cast[ByteAddress](target) and (not 0xfff))
var page = cast[pointer](cast[uint](target) and (not 0xfff))
doAssert mprotect(page, 4096, PROT_WRITE or PROT_EXEC) == 0
let rel = cast[ByteAddress](replacement) - cast[ByteAddress](target) - 5
let rel = cast[uint](replacement) - cast[uint](target) - 5
var instr = Instr(
bytes: [
0xe9.byte,

View File

@ -1,6 +1,7 @@
import
std/[options, times],
stew/[results, byteutils],
results,
stew/byteutils,
stew/shims/net,
chronos,
libp2p/switch,

View File

@ -1,6 +1,6 @@
import
std/options,
stew/results,
results,
stew/shims/net,
chronos,
libp2p/switch,

View File

@ -1,13 +1,12 @@
{.used.}
import std/options, stew/results, chronos, libp2p/crypto/crypto
import std/options, results, chronos, libp2p/crypto/crypto
import
waku/[
node/peer_manager,
waku_core,
waku_archive,
waku_archive/common,
waku_archive/driver/sqlite_driver,
waku_archive/driver/sqlite_driver/migrations,
common/databases/db_sqlite,

View File

@ -1,6 +1,6 @@
{.used.}
import std/options, stew/results, testutils/unittests
import std/options, results, testutils/unittests
import
waku/[
waku_archive,

View File

@ -1,6 +1,6 @@
{.used.}
import std/[times, random], stew/byteutils, testutils/unittests, nimcrypto
import std/random, testutils/unittests
import waku/waku_core, waku/waku_archive/driver/queue_driver/index
var rng = initRand()

View File

@ -9,7 +9,6 @@ import
waku_archive/driver/queue_driver/index,
waku_core,
],
../testlib/common,
../testlib/wakucore
proc getTestQueueDriver(numMessages: int): QueueDriver =

View File

@ -2,12 +2,7 @@
import std/sequtils, testutils/unittests, chronos
import
waku/[
common/databases/db_sqlite,
waku_archive,
waku_archive/driver/sqlite_driver,
waku_core,
],
waku/[waku_archive, waku_archive/driver/sqlite_driver, waku_core],
../waku_archive/archive_utils,
../testlib/wakucore

View File

@ -4,13 +4,7 @@ import
std/[options, sequtils, random, algorithm], testutils/unittests, chronos, chronicles
import
waku/[
common/databases/db_sqlite,
waku_archive,
waku_archive/driver/sqlite_driver,
waku_core,
waku_core/message/digest,
],
waku/[waku_archive, waku_core, waku_core/message/digest],
../testlib/common,
../testlib/wakucore,
../waku_archive/archive_utils

View File

@ -1,13 +1,11 @@
{.used.}
import std/[sequtils, times], stew/results, testutils/unittests, chronos
import std/[sequtils, times], results, testutils/unittests, chronos
import
waku/[
common/databases/db_sqlite,
waku_core,
waku_core/message/digest,
waku_archive,
waku_archive/driver/sqlite_driver,
waku_archive/retention_policy,
waku_archive/retention_policy/retention_policy_capacity,
waku_archive/retention_policy/retention_policy_size,

View File

@ -4,12 +4,10 @@ import std/[options, sequtils], testutils/unittests, chronos, libp2p/crypto/cryp
import
waku/[
common/databases/db_sqlite,
common/databases/db_postgres/dbconn,
common/paging,
waku_core,
waku_core/message/digest,
waku_archive/driver/sqlite_driver,
waku_archive,
],
../waku_archive/archive_utils,

View File

@ -1,6 +1,6 @@
{.used.}
import std/options, stew/results, testutils/unittests
import std/options, results, testutils/unittests
import
waku/waku_archive_legacy,
waku/waku_archive_legacy/driver/queue_driver/queue_driver {.all.},

View File

@ -2,12 +2,10 @@
import std/sequtils, testutils/unittests, chronos
import
waku/common/databases/db_sqlite,
waku/waku_archive_legacy,
waku/waku_archive_legacy/driver/sqlite_driver,
waku/waku_core,
../waku_archive_legacy/archive_utils,
../testlib/common,
../testlib/wakucore
suite "SQLite driver":

View File

@ -4,9 +4,7 @@ import
std/[options, sequtils, random, algorithm], testutils/unittests, chronos, chronicles
import
waku/common/databases/db_sqlite,
waku/waku_archive_legacy,
waku/waku_archive_legacy/driver/sqlite_driver,
waku/waku_core,
waku/waku_core/message/digest,
../testlib/common,

View File

@ -1,21 +1,13 @@
{.used.}
import
std/[options, sequtils],
testutils/unittests,
chronicles,
chronos,
libp2p/crypto/crypto
import std/[options, sequtils], testutils/unittests, chronos, libp2p/crypto/crypto
import
waku/common/databases/db_sqlite,
waku/common/paging,
waku/waku_core,
waku/waku_core/message/digest,
waku/waku_archive_legacy/driver/sqlite_driver,
waku/waku_archive_legacy,
../waku_archive_legacy/archive_utils,
../testlib/common,
../testlib/wakucore
suite "Waku Archive - message handling":

View File

@ -1,7 +1,7 @@
{.used.}
import
stew/results,
results,
testutils/unittests,
libp2p/multiaddress,
libp2p/peerid,

View File

@ -2,7 +2,7 @@
import
std/[sequtils, algorithm],
stew/results,
results,
stew/shims/net,
chronos,
chronicles,

View File

@ -1,6 +1,5 @@
import
std/options,
stew/results,
stew/shims/net,
chronos,
libp2p/crypto/crypto as libp2p_keys,

View File

@ -1,15 +1,13 @@
import
std/options,
sequtils,
stew/results,
results,
stew/shims/net,
chronos,
libp2p/crypto/crypto as libp2p_keys,
eth/keys as eth_keys
import
waku/[waku_core/topics, waku_enr, discovery/waku_discv5, waku_enr/sharding],
../testlib/[common, wakucore]
import waku/[waku_enr, discovery/waku_discv5, waku_enr/sharding], ../testlib/wakucore
proc newTestEnrRecord*(
privKey: libp2p_keys.PrivateKey,

View File

@ -1,13 +1,6 @@
{.used.}
import
std/[options, tables, sequtils, strutils, json],
testutils/unittests,
stew/[results, byteutils],
chronos,
chronicles,
os,
libp2p/peerstore
import std/[options, sequtils, json], testutils/unittests, results, chronos
import
waku/node/[peer_manager, waku_node],

View File

@ -1,25 +1,18 @@
{.used.}
import
std/[options, tables, sequtils, strutils, json],
std/[options, tables, json],
testutils/unittests,
stew/[results, byteutils],
results,
chronos,
chronicles,
os,
libp2p/peerstore
import
waku/[
node/peer_manager,
waku_core,
common/rate_limit/setting,
common/rate_limit/token_bucket,
],
waku/waku_filter_v2/[common, client, subscriptions, protocol, rpc_codec],
../testlib/[wakucore, testasync, testutils, futures, sequtils],
./waku_filter_utils,
../resources/payloads
waku/[node/peer_manager, waku_core],
waku/waku_filter_v2/[common, client, subscriptions, protocol],
../testlib/[wakucore, testasync, futures],
./waku_filter_utils
type AFilterClient = ref object of RootObj
clientSwitch*: Switch

View File

@ -1,4 +1,4 @@
import std/[options, tables, sets, sequtils, algorithm], chronos, chronicles, os
import std/[options, tables, sets, algorithm], chronos, chronicles, os
import
waku/[

View File

@ -1,6 +1,6 @@
{.used.}
import std/options, chronicles, chronos, libp2p/crypto/crypto
import std/options, chronos, chronicles, libp2p/crypto/crypto
import
waku/node/peer_manager,

View File

@ -3,8 +3,8 @@
import
std/[options, strscans],
testutils/unittests,
chronicles,
chronos,
chronicles,
libp2p/crypto/crypto
import
@ -15,7 +15,7 @@ import
waku_lightpush/client,
waku_lightpush/protocol_metrics,
],
../testlib/[assertions, wakucore, testasync, futures, testutils],
../testlib/[assertions, wakucore, testasync, futures],
./lightpush_utils,
../resources/[pubsub_topics, content_topics, payloads]
@ -312,6 +312,39 @@ suite "Waku Lightpush Client":
# Cleanup
await serverSwitch2.stop()
asyncTest "Check timestamp is not zero":
## This test validates that, even the generated message has a timestamp of 0,
## the node will eventually set a timestamp when publishing the message.
let
zeroTimestamp = 0
meta = "TEST-META"
message = fakeWakuMessage(
payloads.ALPHABETIC, content_topics.CURRENT, meta, zeroTimestamp
)
# When publishing a valid payload
let publishResponse =
await client.publish(some(pubsubTopic), message, serverRemotePeerInfo)
# Then the message is received by the server
discard await handlerFuture.withTimeout(FUTURE_TIMEOUT)
assertResultOk publishResponse
check handlerFuture.finished()
# And the message is received with the correct topic and payload
let (readPubsubTopic, readMessage) = handlerFuture.read()
check:
pubsubTopic == readPubsubTopic
message.payload == readMessage.payload
message.contentTopic == readMessage.contentTopic
message.meta == readMessage.meta
message.timestamp != readMessage.timestamp
message.ephemeral == readMessage.ephemeral
message.proof == readMessage.proof
message.version == readMessage.version
readMessage.timestamp > 0
suite "Verification of PushResponse Payload":
asyncTest "Positive Responses":
# When sending a valid PushRequest

View File

@ -1,24 +1,11 @@
{.used.}
import
std/[options, strscans],
testutils/unittests,
chronicles,
chronos,
libp2p/crypto/crypto
import std/options, testutils/unittests, chronos, libp2p/crypto/crypto
import
waku/[
node/peer_manager,
common/rate_limit/setting,
waku_core,
waku_lightpush,
waku_lightpush/client,
waku_lightpush/protocol_metrics,
],
../testlib/[assertions, wakucore, testasync, futures, testutils],
./lightpush_utils,
../resources/[pubsub_topics, content_topics, payloads]
waku/[node/peer_manager, waku_core, waku_lightpush, waku_lightpush/client],
../testlib/wakucore,
./lightpush_utils
suite "Rate limited push service":
asyncTest "push message with rate limit not violated":

View File

@ -1,10 +1,9 @@
{.used.}
import std/options, chronicles, chronos, libp2p/crypto/crypto
import std/options, chronos, libp2p/crypto/crypto
import
waku/node/peer_manager,
waku/waku_core,
waku/waku_lightpush_legacy,
waku/waku_lightpush_legacy/[client, common],
waku/common/rate_limit/setting,

View File

@ -1,11 +1,6 @@
{.used.}
import
std/[options, strscans],
testutils/unittests,
chronicles,
chronos,
libp2p/crypto/crypto
import std/[options, strscans], testutils/unittests, chronos, libp2p/crypto/crypto
import
waku/[
@ -16,9 +11,8 @@ import
waku_lightpush_legacy/common,
waku_lightpush_legacy/protocol_metrics,
waku_lightpush_legacy/rpc,
waku_lightpush_legacy/rpc_codec,
],
../testlib/[assertions, wakucore, testasync, futures, testutils],
../testlib/[assertions, wakucore, testasync, futures],
./lightpush_utils,
../resources/[pubsub_topics, content_topics, payloads]

View File

@ -1,27 +1,17 @@
{.used.}
import
std/[options, strscans],
testutils/unittests,
chronicles,
chronos,
libp2p/crypto/crypto
import std/options, testutils/unittests, chronos, libp2p/crypto/crypto
import
waku/[
node/peer_manager,
common/rate_limit/setting,
waku_core,
waku_lightpush_legacy,
waku_lightpush_legacy/client,
waku_lightpush_legacy/common,
waku_lightpush_legacy/protocol_metrics,
waku_lightpush_legacy/rpc,
waku_lightpush_legacy/rpc_codec,
],
../testlib/[assertions, wakucore, testasync, futures, testutils],
./lightpush_utils,
../resources/[pubsub_topics, content_topics, payloads]
../testlib/wakucore,
./lightpush_utils
suite "Rate limited push service":
asyncTest "push message with rate limit not violated":

View File

@ -1,11 +1,10 @@
{.used.}
import
std/[options, sequtils, tables, net],
std/[options, sequtils, net],
testutils/unittests,
chronos,
chronicles,
libp2p/[switch, peerId, crypto/crypto, multistream, muxers/muxer],
libp2p/[switch, peerId, crypto/crypto],
eth/[keys, p2p/discoveryv5/enr]
import
@ -18,14 +17,11 @@ import
waku_peer_exchange/rpc_codec,
waku_peer_exchange/protocol,
node/peer_manager,
waku_relay/protocol,
waku_relay,
waku_core,
waku_core/message/codec,
common/enr/builder,
waku_enr/sharding,
],
../testlib/[wakucore, wakunode, simple_mock, assertions],
../testlib/[wakucore, wakunode, assertions],
./utils.nim
suite "Waku Peer Exchange":

View File

@ -1,10 +1,11 @@
import
unittest,
stew/[shims/net, results, byteutils],
results,
stew/[shims/net, byteutils],
nimcrypto/sha2,
libp2p/protocols/pubsub/rpc/messages
import waku/waku_relay/message_id, ../testlib/sequtils
import waku/waku_relay/message_id
suite "Message ID Provider":
test "Non-empty string":

View File

@ -1,13 +1,12 @@
{.used.}
import
std/[options, sequtils, strutils, strformat],
std/[options, strformat],
stew/shims/net as stewNet,
testutils/unittests,
chronicles,
chronos,
libp2p/protocols/pubsub/[pubsub, gossipsub],
libp2p/[multihash, stream/connection, switch],
libp2p/[stream/connection, switch],
./crypto_utils,
std/json
@ -19,7 +18,7 @@ import
waku_core,
waku_core/message/codec,
],
../testlib/[wakucore, testasync, testutils, futures, sequtils],
../testlib/[wakucore, testasync, futures, sequtils],
./utils,
../resources/payloads

View File

@ -90,6 +90,7 @@ suite "WakuNode - Relay":
topic == $shard
msg.contentTopic == contentTopic
msg.payload == payload
msg.timestamp > 0
completionFut.complete(true)
node3.subscribe((kind: PubsubSub, topic: $shard), some(relayHandler))
@ -279,6 +280,7 @@ suite "WakuNode - Relay":
topic == $shard
msg.contentTopic == contentTopic
msg.payload == payload
msg.timestamp > 0
completionFut.complete(true)
node1.subscribe((kind: PubsubSub, topic: $shard), some(relayHandler))
@ -327,6 +329,7 @@ suite "WakuNode - Relay":
topic == $shard
msg.contentTopic == contentTopic
msg.payload == payload
msg.timestamp > 0
completionFut.complete(true)
node1.subscribe((kind: PubsubSub, topic: $shard), some(relayHandler))
@ -379,6 +382,7 @@ suite "WakuNode - Relay":
topic == $shard
msg.contentTopic == contentTopic
msg.payload == payload
msg.timestamp > 0
completionFut.complete(true)
node1.subscribe((kind: PubsubSub, topic: $shard), some(relayHandler))
@ -429,6 +433,7 @@ suite "WakuNode - Relay":
topic == $shard
msg.contentTopic == contentTopic
msg.payload == payload
msg.timestamp > 0
completionFut.complete(true)
node1.subscribe((kind: PubsubSub, topic: $shard), some(relayHandler))
@ -487,6 +492,7 @@ suite "WakuNode - Relay":
topic == $shard
msg.contentTopic == contentTopic
msg.payload == payload
msg.timestamp > 0
completionFut.complete(true)
node1.subscribe((kind: PubsubSub, topic: $shard), some(relayHandler))

View File

@ -4,7 +4,6 @@ import
std/[strutils, sequtils, tempfiles],
stew/byteutils,
stew/shims/net as stewNet,
testutils/unittests,
chronos,
libp2p/switch,
libp2p/protocols/pubsub/pubsub
@ -23,8 +22,7 @@ import
],
../waku_store/store_utils,
../waku_archive/archive_utils,
../testlib/[wakucore, wakunode, testasync, futures],
../resources/payloads
../testlib/[wakucore, futures]
proc noopRawHandler*(): WakuRelayHandler =
var handler: WakuRelayHandler

View File

@ -3,8 +3,9 @@
{.push raises: [].}
import
std/[options, os, osproc, sequtils, deques, streams, strutils, tempfiles, strformat],
stew/[results, byteutils],
std/[options, sequtils, deques],
results,
stew/byteutils,
testutils/unittests,
chronos,
chronicles,
@ -17,19 +18,15 @@ import
import
waku/[
waku_node,
node/waku_node,
waku_rln_relay,
waku_rln_relay/protocol_types,
waku_rln_relay/constants,
waku_rln_relay/contract,
waku_rln_relay/rln,
waku_rln_relay/conversion_utils,
waku_rln_relay/group_manager/on_chain/group_manager,
],
../testlib/[wakucore, wakunode, common],
./utils_onchain,
./utils
../testlib/wakucore,
./utils_onchain
suite "Onchain group manager":
# We run Anvil

View File

@ -4,7 +4,7 @@
import
testutils/unittests,
stew/results,
results,
options,
waku/[
waku_rln_relay/protocol_types,

View File

@ -2,7 +2,7 @@
{.push raises: [].}
import stew/results, stint
import results
import
./rln/waku_rln_relay_utils,

View File

@ -17,7 +17,6 @@ import
waku_rln_relay/protocol_metrics,
waku_keystore,
],
../testlib/common,
./rln/waku_rln_relay_utils
suite "Waku rln relay":

View File

@ -451,9 +451,9 @@ procSuite "WakuNode - RLN relay":
completionFut1.complete(true)
if msg == wm2:
completionFut2.complete(true)
if msg == wm3:
if msg.payload == wm3.payload:
completionFut3.complete(true)
if msg == wm4:
if msg.payload == wm4.payload:
completionFut4.complete(true)
# mount the relay handler for node3
@ -490,6 +490,9 @@ procSuite "WakuNode - RLN relay":
<<<<<<< HEAD
=======
## This is skipped because is flaky and made CI randomly fail but is useful to run manually
<<<<<<< HEAD
>>>>>>> deprecate_sync_strategy
=======
>>>>>>> deprecate_sync_strategy
# Given two nodes
let

View File

@ -1,4 +1,4 @@
import web3, chronos, options, stint, stew/byteutils
import web3, chronos, stew/byteutils
proc deployContract*(
web3: Web3, code: string, gasPrice = 0, contractInput = ""

View File

@ -3,8 +3,9 @@
{.push raises: [].}
import
std/[options, os, osproc, sequtils, deques, streams, strutils, tempfiles, strformat],
stew/[results, byteutils],
std/[options, os, osproc, deques, streams, strutils, tempfiles, strformat],
results,
stew/byteutils,
testutils/unittests,
chronos,
chronicles,

View File

@ -1,10 +1,9 @@
{.used.}
import std/options, chronos, chronicles, libp2p/crypto/crypto
import std/options, chronos
import
waku/[node/peer_manager, waku_core, waku_store, waku_store/client],
../testlib/[common, wakucore]
waku/[node/peer_manager, waku_store, waku_store/client], ../testlib/[common, wakucore]
proc newTestWakuStore*(
switch: Switch, handler: StoreQueryRequestHandler

View File

@ -1,10 +1,10 @@
{.used.}
import std/options, testutils/unittests, chronos, chronicles, libp2p/crypto/crypto
import std/options, testutils/unittests, chronos, libp2p/crypto/crypto
import
waku/[node/peer_manager, waku_core, waku_store, waku_store/client, common/paging],
../testlib/[common, wakucore, testasync, futures],
../testlib/[wakucore, testasync, futures],
./store_utils
suite "Store Client":

View File

@ -1,6 +1,6 @@
{.used.}
import std/options, testutils/unittests, chronos, chronicles, libp2p/crypto/crypto
import std/options, testutils/unittests, chronos, libp2p/crypto/crypto
import
waku/[
@ -12,7 +12,7 @@ import
waku_store/client,
waku_store/common,
],
../testlib/[common, wakucore],
../testlib/wakucore,
./store_utils
suite "Waku Store - query handler":

View File

@ -18,10 +18,8 @@ import
common/paging,
waku_core,
waku_core/message/digest,
waku_core/subscription,
node/peer_manager,
waku_archive,
waku_archive/driver/sqlite_driver,
waku_filter_v2,
waku_filter_v2/client,
waku_store,

View File

@ -1,6 +1,6 @@
{.used.}
import std/options, chronos, chronicles, libp2p/crypto/crypto
import std/options, chronos
import
waku/[node/peer_manager, waku_core, waku_store_legacy, waku_store_legacy/client],

View File

@ -1,6 +1,6 @@
{.used.}
import std/options, testutils/unittests, chronos, chronicles, libp2p/crypto/crypto
import std/options, testutils/unittests, chronos, libp2p/crypto/crypto
import
waku/[
@ -10,7 +10,7 @@ import
waku_store_legacy/client,
common/paging,
],
../testlib/[common, wakucore, testasync, futures],
../testlib/[wakucore, testasync, futures],
./store_utils
suite "Store Client":

Some files were not shown because too many files have changed in this diff Show More