mirror of
https://github.com/logos-messaging/logos-messaging-nim.git
synced 2026-02-24 07:43:08 +00:00
format with nph 0.7.0
This commit is contained in:
parent
72c24ec556
commit
5fd7c82a1a
@ -51,8 +51,7 @@ import
|
||||
import libp2p/protocols/pubsub/rpc/messages, libp2p/protocols/pubsub/pubsub
|
||||
import ../../waku/waku_rln_relay
|
||||
|
||||
const Help =
|
||||
"""
|
||||
const Help = """
|
||||
Commands: /[?|help|connect|nick|exit]
|
||||
help: Prints this help
|
||||
connect: dials a remote peer
|
||||
@ -338,16 +337,16 @@ proc processInput(rfd: AsyncFD, rng: ref HmacDrbgContext) {.async.} =
|
||||
builder.withRecord(record)
|
||||
|
||||
builder
|
||||
.withNetworkConfigurationDetails(
|
||||
conf.listenAddress,
|
||||
Port(uint16(conf.tcpPort) + conf.portsShift),
|
||||
extIp,
|
||||
extTcpPort,
|
||||
wsBindPort = Port(uint16(conf.websocketPort) + conf.portsShift),
|
||||
wsEnabled = conf.websocketSupport,
|
||||
wssEnabled = conf.websocketSecureSupport,
|
||||
)
|
||||
.tryGet()
|
||||
.withNetworkConfigurationDetails(
|
||||
conf.listenAddress,
|
||||
Port(uint16(conf.tcpPort) + conf.portsShift),
|
||||
extIp,
|
||||
extTcpPort,
|
||||
wsBindPort = Port(uint16(conf.websocketPort) + conf.portsShift),
|
||||
wsEnabled = conf.websocketSupport,
|
||||
wssEnabled = conf.websocketSecureSupport,
|
||||
)
|
||||
.tryGet()
|
||||
builder.build().tryGet()
|
||||
|
||||
await node.start()
|
||||
|
||||
@ -127,8 +127,10 @@ proc toMatterbridge(
|
||||
assert chat2Msg.isOk
|
||||
|
||||
if not cmb.mbClient
|
||||
.postMessage(text = string.fromBytes(chat2Msg[].payload), username = chat2Msg[].nick)
|
||||
.containsValue(true):
|
||||
.postMessage(
|
||||
text = string.fromBytes(chat2Msg[].payload), username = chat2Msg[].nick
|
||||
)
|
||||
.containsValue(true):
|
||||
chat2_mb_dropped.inc(labelValues = ["duplicate"])
|
||||
error "Matterbridge host unreachable. Dropping message."
|
||||
|
||||
@ -175,10 +177,10 @@ proc new*(
|
||||
builder.withNodeKey(nodev2Key)
|
||||
|
||||
builder
|
||||
.withNetworkConfigurationDetails(
|
||||
nodev2BindIp, nodev2BindPort, nodev2ExtIp, nodev2ExtPort
|
||||
)
|
||||
.tryGet()
|
||||
.withNetworkConfigurationDetails(
|
||||
nodev2BindIp, nodev2BindPort, nodev2ExtIp, nodev2ExtPort
|
||||
)
|
||||
.tryGet()
|
||||
builder.build().tryGet()
|
||||
|
||||
return Chat2MatterBridge(
|
||||
|
||||
@ -57,8 +57,7 @@ import ../../waku/waku_rln_relay
|
||||
logScope:
|
||||
topics = "chat2 mix"
|
||||
|
||||
const Help =
|
||||
"""
|
||||
const Help = """
|
||||
Commands: /[?|help|connect|nick|exit]
|
||||
help: Prints this help
|
||||
connect: dials a remote peer
|
||||
@ -429,16 +428,16 @@ proc processInput(rfd: AsyncFD, rng: ref HmacDrbgContext) {.async.} =
|
||||
builder.withRecord(record)
|
||||
|
||||
builder
|
||||
.withNetworkConfigurationDetails(
|
||||
conf.listenAddress,
|
||||
Port(uint16(conf.tcpPort) + conf.portsShift),
|
||||
extIp,
|
||||
extTcpPort,
|
||||
wsBindPort = Port(uint16(conf.websocketPort) + conf.portsShift),
|
||||
wsEnabled = conf.websocketSupport,
|
||||
wssEnabled = conf.websocketSecureSupport,
|
||||
)
|
||||
.tryGet()
|
||||
.withNetworkConfigurationDetails(
|
||||
conf.listenAddress,
|
||||
Port(uint16(conf.tcpPort) + conf.portsShift),
|
||||
extIp,
|
||||
extTcpPort,
|
||||
wsBindPort = Port(uint16(conf.websocketPort) + conf.portsShift),
|
||||
wsEnabled = conf.websocketSupport,
|
||||
wssEnabled = conf.websocketSecureSupport,
|
||||
)
|
||||
.tryGet()
|
||||
builder.build().tryGet()
|
||||
|
||||
node.mountAutoSharding(conf.clusterId, conf.numShardsInNetwork).isOkOr:
|
||||
|
||||
@ -113,17 +113,16 @@ type
|
||||
shards* {.
|
||||
desc:
|
||||
"Shards index to subscribe to [0..NUM_SHARDS_IN_NETWORK-1]. Argument may be repeated.",
|
||||
defaultValue:
|
||||
@[
|
||||
uint16(0),
|
||||
uint16(1),
|
||||
uint16(2),
|
||||
uint16(3),
|
||||
uint16(4),
|
||||
uint16(5),
|
||||
uint16(6),
|
||||
uint16(7),
|
||||
],
|
||||
defaultValue: @[
|
||||
uint16(0),
|
||||
uint16(1),
|
||||
uint16(2),
|
||||
uint16(3),
|
||||
uint16(4),
|
||||
uint16(5),
|
||||
uint16(6),
|
||||
uint16(7),
|
||||
],
|
||||
name: "shard"
|
||||
.}: seq[uint16]
|
||||
|
||||
|
||||
@ -161,11 +161,10 @@ proc main(rng: ref HmacDrbgContext): Future[int] {.async.} =
|
||||
|
||||
# create dns resolver
|
||||
let
|
||||
nameServers =
|
||||
@[
|
||||
initTAddress(parseIpAddress("1.1.1.1"), Port(53)),
|
||||
initTAddress(parseIpAddress("1.0.0.1"), Port(53)),
|
||||
]
|
||||
nameServers = @[
|
||||
initTAddress(parseIpAddress("1.1.1.1"), Port(53)),
|
||||
initTAddress(parseIpAddress("1.0.0.1"), Port(53)),
|
||||
]
|
||||
resolver: DnsResolver = DnsResolver.new(nameServers)
|
||||
|
||||
if conf.logLevel != LogLevel.NONE:
|
||||
|
||||
@ -7,13 +7,9 @@ import ../../waku/api/types
|
||||
type JsonConnectionStatusChangeEvent* = ref object of JsonEvent
|
||||
status*: ConnectionStatus
|
||||
|
||||
proc new*(
|
||||
T: type JsonConnectionStatusChangeEvent, status: ConnectionStatus
|
||||
): T =
|
||||
return JsonConnectionStatusChangeEvent(
|
||||
eventType: "node_health_change",
|
||||
status: status
|
||||
)
|
||||
proc new*(T: type JsonConnectionStatusChangeEvent, status: ConnectionStatus): T =
|
||||
return
|
||||
JsonConnectionStatusChangeEvent(eventType: "node_health_change", status: status)
|
||||
|
||||
method `$`*(event: JsonConnectionStatusChangeEvent): string =
|
||||
$(%*event)
|
||||
|
||||
@ -72,7 +72,7 @@ proc waku_new(
|
||||
relayHandler: onReceivedMessage(ctx),
|
||||
topicHealthChangeHandler: onTopicHealthChange(ctx),
|
||||
connectionChangeHandler: onConnectionChange(ctx),
|
||||
connectionStatusChangeHandler: onConnectionStatusChange(ctx)
|
||||
connectionStatusChangeHandler: onConnectionStatusChange(ctx),
|
||||
)
|
||||
|
||||
ffi.sendRequestToFFIThread(
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
const ContentScriptVersion_1* =
|
||||
"""
|
||||
const ContentScriptVersion_1* = """
|
||||
CREATE TABLE IF NOT EXISTS messages (
|
||||
pubsubTopic VARCHAR NOT NULL,
|
||||
contentTopic VARCHAR NOT NULL,
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
const ContentScriptVersion_2* =
|
||||
"""
|
||||
const ContentScriptVersion_2* = """
|
||||
ALTER TABLE IF EXISTS messages_backup RENAME TO messages;
|
||||
ALTER TABLE messages RENAME TO messages_backup;
|
||||
ALTER TABLE messages_backup DROP CONSTRAINT messageIndex;
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
const ContentScriptVersion_3* =
|
||||
"""
|
||||
const ContentScriptVersion_3* = """
|
||||
CREATE INDEX IF NOT EXISTS i_query ON messages
|
||||
(contentTopic, pubsubTopic, storedAt, id);
|
||||
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
const ContentScriptVersion_4* =
|
||||
"""
|
||||
const ContentScriptVersion_4* = """
|
||||
ALTER TABLE messages ADD meta VARCHAR default null;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS i_query ON messages (contentTopic, pubsubTopic, storedAt, id);
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
const ContentScriptVersion_5* =
|
||||
"""
|
||||
const ContentScriptVersion_5* = """
|
||||
CREATE INDEX IF NOT EXISTS i_query_storedAt ON messages (storedAt, id);
|
||||
|
||||
UPDATE version SET version = 5 WHERE version = 4;
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
const ContentScriptVersion_6* =
|
||||
"""
|
||||
const ContentScriptVersion_6* = """
|
||||
-- we can drop the timestamp column because this data is also kept in the storedAt column
|
||||
ALTER TABLE messages DROP COLUMN timestamp;
|
||||
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
const ContentScriptVersion_7* =
|
||||
"""
|
||||
const ContentScriptVersion_7* = """
|
||||
|
||||
-- Create lookup table
|
||||
CREATE TABLE IF NOT EXISTS messages_lookup (
|
||||
|
||||
@ -10,16 +10,15 @@ type MigrationScript* = object
|
||||
proc init*(T: type MigrationScript, targetVersion: int, scriptContent: string): T =
|
||||
return MigrationScript(targetVersion: targetVersion, scriptContent: scriptContent)
|
||||
|
||||
const PgMigrationScripts* =
|
||||
@[
|
||||
MigrationScript(version: 1, scriptContent: ContentScriptVersion_1),
|
||||
MigrationScript(version: 2, scriptContent: ContentScriptVersion_2),
|
||||
MigrationScript(version: 3, scriptContent: ContentScriptVersion_3),
|
||||
MigrationScript(version: 4, scriptContent: ContentScriptVersion_4),
|
||||
MigrationScript(version: 5, scriptContent: ContentScriptVersion_5),
|
||||
MigrationScript(version: 6, scriptContent: ContentScriptVersion_6),
|
||||
MigrationScript(version: 7, scriptContent: ContentScriptVersion_7),
|
||||
]
|
||||
const PgMigrationScripts* = @[
|
||||
MigrationScript(version: 1, scriptContent: ContentScriptVersion_1),
|
||||
MigrationScript(version: 2, scriptContent: ContentScriptVersion_2),
|
||||
MigrationScript(version: 3, scriptContent: ContentScriptVersion_3),
|
||||
MigrationScript(version: 4, scriptContent: ContentScriptVersion_4),
|
||||
MigrationScript(version: 5, scriptContent: ContentScriptVersion_5),
|
||||
MigrationScript(version: 6, scriptContent: ContentScriptVersion_6),
|
||||
MigrationScript(version: 7, scriptContent: ContentScriptVersion_7),
|
||||
]
|
||||
|
||||
proc getMigrationScripts*(currentVersion: int64, targetVersion: int64): seq[string] =
|
||||
var ret = newSeq[string]()
|
||||
|
||||
@ -87,7 +87,7 @@ proc waitForEvents(
|
||||
return await allFutures(
|
||||
manager.sentFuture, manager.propagatedFuture, manager.errorFuture
|
||||
)
|
||||
.withTimeout(timeout)
|
||||
.withTimeout(timeout)
|
||||
|
||||
proc outcomes(manager: SendEventListenerManager): set[SendEventOutcome] =
|
||||
if manager.sentFuture.completed():
|
||||
|
||||
@ -126,12 +126,11 @@ suite "Entry Nodes Classification":
|
||||
|
||||
suite "Entry Nodes Processing":
|
||||
test "Process mixed entry nodes":
|
||||
let entryNodes =
|
||||
@[
|
||||
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
|
||||
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
|
||||
"enr:-QESuEC1p_s3xJzAC_XlOuuNrhVUETmfhbm1wxRGis0f7DlqGSw2FM-p2Vn7gmfkTTnAe8Ys2cgGBN8ufJnvzKQFZqFMBgmlkgnY0iXNlY3AyNTZrMaEDS8-D878DrdbNwcuY-3p1qdDp5MOoCurhdsNPJTXZ3c5g3RjcIJ2X4N1ZHCCd2g",
|
||||
]
|
||||
let entryNodes = @[
|
||||
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
|
||||
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
|
||||
"enr:-QESuEC1p_s3xJzAC_XlOuuNrhVUETmfhbm1wxRGis0f7DlqGSw2FM-p2Vn7gmfkTTnAe8Ys2cgGBN8ufJnvzKQFZqFMBgmlkgnY0iXNlY3AyNTZrMaEDS8-D878DrdbNwcuY-3p1qdDp5MOoCurhdsNPJTXZ3c5g3RjcIJ2X4N1ZHCCd2g",
|
||||
]
|
||||
|
||||
let result = processEntryNodes(entryNodes)
|
||||
check:
|
||||
@ -147,11 +146,10 @@ suite "Entry Nodes Processing":
|
||||
staticNodes[0] == entryNodes[1] # multiaddr added to static
|
||||
|
||||
test "Process only ENRTree nodes":
|
||||
let entryNodes =
|
||||
@[
|
||||
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
|
||||
"enrtree://ANOTHER_TREE@example.com",
|
||||
]
|
||||
let entryNodes = @[
|
||||
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
|
||||
"enrtree://ANOTHER_TREE@example.com",
|
||||
]
|
||||
|
||||
let result = processEntryNodes(entryNodes)
|
||||
check:
|
||||
@ -165,11 +163,10 @@ suite "Entry Nodes Processing":
|
||||
enrTreeUrls == entryNodes
|
||||
|
||||
test "Process only multiaddresses":
|
||||
let entryNodes =
|
||||
@[
|
||||
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
|
||||
"/ip4/192.168.1.1/tcp/60001/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYd",
|
||||
]
|
||||
let entryNodes = @[
|
||||
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
|
||||
"/ip4/192.168.1.1/tcp/60001/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYd",
|
||||
]
|
||||
|
||||
let result = processEntryNodes(entryNodes)
|
||||
check:
|
||||
@ -183,11 +180,10 @@ suite "Entry Nodes Processing":
|
||||
staticNodes == entryNodes
|
||||
|
||||
test "Process only ENR nodes":
|
||||
let entryNodes =
|
||||
@[
|
||||
"enr:-QESuEC1p_s3xJzAC_XlOuuNrhVUETmfhbm1wxRGis0f7DlqGSw2FM-p2Vn7gmfkTTnAe8Ys2cgGBN8ufJnvzKQFZqFMBgmlkgnY0iXNlY3AyNTZrMaEDS8-D878DrdbNwcuY-3p1qdDp5MOoCurhdsNPJTXZ3c5g3RjcIJ2X4N1ZHCCd2g",
|
||||
"enr:-QEkuECnZ3IbVAgkOzv-QLnKC4dRKAPRY80m1-R7G8jZ7yfT3ipEfBrhKN7ARcQgQ-vg-h40AQzyvAkPYlHPaFKk6u9MBgmlkgnY0iXNlY3AyNTZrMaEDk49D8JjMSns4p1XVNBvJquOUzT4PENSJknkROspfAFGg3RjcIJ2X4N1ZHCCd2g",
|
||||
]
|
||||
let entryNodes = @[
|
||||
"enr:-QESuEC1p_s3xJzAC_XlOuuNrhVUETmfhbm1wxRGis0f7DlqGSw2FM-p2Vn7gmfkTTnAe8Ys2cgGBN8ufJnvzKQFZqFMBgmlkgnY0iXNlY3AyNTZrMaEDS8-D878DrdbNwcuY-3p1qdDp5MOoCurhdsNPJTXZ3c5g3RjcIJ2X4N1ZHCCd2g",
|
||||
"enr:-QEkuECnZ3IbVAgkOzv-QLnKC4dRKAPRY80m1-R7G8jZ7yfT3ipEfBrhKN7ARcQgQ-vg-h40AQzyvAkPYlHPaFKk6u9MBgmlkgnY0iXNlY3AyNTZrMaEDk49D8JjMSns4p1XVNBvJquOUzT4PENSJknkROspfAFGg3RjcIJ2X4N1ZHCCd2g",
|
||||
]
|
||||
|
||||
let result = processEntryNodes(entryNodes)
|
||||
check:
|
||||
@ -224,13 +220,12 @@ suite "Entry Nodes Processing":
|
||||
"Entry node error: Unrecognized entry node format. Must start with 'enrtree:', 'enr:', or '/'"
|
||||
|
||||
test "Process different multiaddr formats":
|
||||
let entryNodes =
|
||||
@[
|
||||
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
|
||||
"/ip6/::1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYd",
|
||||
"/dns4/example.com/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYe",
|
||||
"/dns/node.example.org/tcp/443/wss/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYf",
|
||||
]
|
||||
let entryNodes = @[
|
||||
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
|
||||
"/ip6/::1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYd",
|
||||
"/dns4/example.com/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYe",
|
||||
"/dns/node.example.org/tcp/443/wss/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYf",
|
||||
]
|
||||
|
||||
let result = processEntryNodes(entryNodes)
|
||||
check:
|
||||
@ -244,13 +239,12 @@ suite "Entry Nodes Processing":
|
||||
staticNodes == entryNodes
|
||||
|
||||
test "Process with duplicate entries":
|
||||
let entryNodes =
|
||||
@[
|
||||
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
|
||||
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
|
||||
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
|
||||
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
|
||||
]
|
||||
let entryNodes = @[
|
||||
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
|
||||
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
|
||||
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
|
||||
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
|
||||
]
|
||||
|
||||
let result = processEntryNodes(entryNodes)
|
||||
check:
|
||||
|
||||
@ -88,11 +88,10 @@ suite "LibWaku Conf - toWakuConf":
|
||||
|
||||
test "Bootstrap nodes configuration":
|
||||
## Given
|
||||
let entryNodes =
|
||||
@[
|
||||
"enr:-QESuEC1p_s3xJzAC_XlOuuNrhVUETmfhbm1wxRGis0f7DlqGSw2FM-p2Vn7gmfkTTnAe8Ys2cgGBN8ufJnvzKQFZqFMBgmlkgnY0iXNlY3AyNTZrMaEDS8-D878DrdbNwcuY-3p1qdDp5MOoCurhdsNPJTXZ3c5g3RjcIJ2X4N1ZHCCd2g",
|
||||
"enr:-QEkuECnZ3IbVAgkOzv-QLnKC4dRKAPRY80m1-R7G8jZ7yfT3ipEfBrhKN7ARcQgQ-vg-h40AQzyvAkPYlHPaFKk6u9MBgmlkgnY0iXNlY3AyNTZrMaEDk49D8JjMSns4p1XVNBvJquOUzT4PENSJknkROspfAFGg3RjcIJ2X4N1ZHCCd2g",
|
||||
]
|
||||
let entryNodes = @[
|
||||
"enr:-QESuEC1p_s3xJzAC_XlOuuNrhVUETmfhbm1wxRGis0f7DlqGSw2FM-p2Vn7gmfkTTnAe8Ys2cgGBN8ufJnvzKQFZqFMBgmlkgnY0iXNlY3AyNTZrMaEDS8-D878DrdbNwcuY-3p1qdDp5MOoCurhdsNPJTXZ3c5g3RjcIJ2X4N1ZHCCd2g",
|
||||
"enr:-QEkuECnZ3IbVAgkOzv-QLnKC4dRKAPRY80m1-R7G8jZ7yfT3ipEfBrhKN7ARcQgQ-vg-h40AQzyvAkPYlHPaFKk6u9MBgmlkgnY0iXNlY3AyNTZrMaEDk49D8JjMSns4p1XVNBvJquOUzT4PENSJknkROspfAFGg3RjcIJ2X4N1ZHCCd2g",
|
||||
]
|
||||
let libConf = NodeConfig.init(
|
||||
mode = Core,
|
||||
protocolsConfig = ProtocolsConfig.init(
|
||||
@ -113,11 +112,10 @@ suite "LibWaku Conf - toWakuConf":
|
||||
|
||||
test "Static store nodes configuration":
|
||||
## Given
|
||||
let staticStoreNodes =
|
||||
@[
|
||||
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
|
||||
"/ip4/192.168.1.1/tcp/60001/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYd",
|
||||
]
|
||||
let staticStoreNodes = @[
|
||||
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
|
||||
"/ip4/192.168.1.1/tcp/60001/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYd",
|
||||
]
|
||||
let nodeConf = NodeConfig.init(
|
||||
protocolsConfig = ProtocolsConfig.init(
|
||||
entryNodes = @[], staticStoreNodes = staticStoreNodes, clusterId = 1
|
||||
@ -199,14 +197,12 @@ suite "LibWaku Conf - toWakuConf":
|
||||
let nodeConfig = NodeConfig.init(
|
||||
mode = Core,
|
||||
protocolsConfig = ProtocolsConfig.init(
|
||||
entryNodes =
|
||||
@[
|
||||
"enr:-QESuEC1p_s3xJzAC_XlOuuNrhVUETmfhbm1wxRGis0f7DlqGSw2FM-p2Vn7gmfkTTnAe8Ys2cgGBN8ufJnvzKQFZqFMBgmlkgnY0iXNlY3AyNTZrMaEDS8-D878DrdbNwcuY-3p1qdDp5MOoCurhdsNPJTXZ3c5g3RjcIJ2X4N1ZHCCd2g"
|
||||
],
|
||||
staticStoreNodes =
|
||||
@[
|
||||
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc"
|
||||
],
|
||||
entryNodes = @[
|
||||
"enr:-QESuEC1p_s3xJzAC_XlOuuNrhVUETmfhbm1wxRGis0f7DlqGSw2FM-p2Vn7gmfkTTnAe8Ys2cgGBN8ufJnvzKQFZqFMBgmlkgnY0iXNlY3AyNTZrMaEDS8-D878DrdbNwcuY-3p1qdDp5MOoCurhdsNPJTXZ3c5g3RjcIJ2X4N1ZHCCd2g"
|
||||
],
|
||||
staticStoreNodes = @[
|
||||
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc"
|
||||
],
|
||||
clusterId = 99,
|
||||
autoShardingConfig = AutoShardingConfig(numShardsInCluster: 12),
|
||||
messageValidation = MessageValidation(
|
||||
@ -270,11 +266,10 @@ suite "LibWaku Conf - toWakuConf":
|
||||
|
||||
test "NodeConfig with mixed entry nodes (integration test)":
|
||||
## Given
|
||||
let entryNodes =
|
||||
@[
|
||||
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
|
||||
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
|
||||
]
|
||||
let entryNodes = @[
|
||||
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
|
||||
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
|
||||
]
|
||||
|
||||
let nodeConfig = NodeConfig.init(
|
||||
mode = Core,
|
||||
@ -304,8 +299,7 @@ suite "LibWaku Conf - toWakuConf":
|
||||
suite "NodeConfig JSON - complete format":
|
||||
test "Full NodeConfig from complete JSON with field validation":
|
||||
## Given
|
||||
let jsonStr =
|
||||
"""
|
||||
let jsonStr = """
|
||||
{
|
||||
"mode": "Core",
|
||||
"protocolsConfig": {
|
||||
@ -362,8 +356,7 @@ suite "NodeConfig JSON - complete format":
|
||||
|
||||
test "Full NodeConfig with RlnConfig present":
|
||||
## Given
|
||||
let jsonStr =
|
||||
"""
|
||||
let jsonStr = """
|
||||
{
|
||||
"mode": "Edge",
|
||||
"protocolsConfig": {
|
||||
@ -408,10 +401,9 @@ suite "NodeConfig JSON - complete format":
|
||||
mode = Edge,
|
||||
protocolsConfig = ProtocolsConfig.init(
|
||||
entryNodes = @["enrtree://TREE@example.com"],
|
||||
staticStoreNodes =
|
||||
@[
|
||||
"/ip4/1.2.3.4/tcp/80/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc"
|
||||
],
|
||||
staticStoreNodes = @[
|
||||
"/ip4/1.2.3.4/tcp/80/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc"
|
||||
],
|
||||
clusterId = 42,
|
||||
autoShardingConfig = AutoShardingConfig(numShardsInCluster: 16),
|
||||
messageValidation = MessageValidation(
|
||||
@ -515,8 +507,7 @@ suite "NodeConfig JSON - partial format with defaults":
|
||||
|
||||
test "ProtocolsConfig partial - optional fields get defaults":
|
||||
## Given — only entryNodes and clusterId provided
|
||||
let jsonStr =
|
||||
"""
|
||||
let jsonStr = """
|
||||
{
|
||||
"protocolsConfig": {
|
||||
"entryNodes": ["enrtree://X@y.com"],
|
||||
@ -546,8 +537,7 @@ suite "NodeConfig JSON - partial format with defaults":
|
||||
|
||||
test "MessageValidation partial - rlnConfig omitted defaults to none":
|
||||
## Given
|
||||
let jsonStr =
|
||||
"""
|
||||
let jsonStr = """
|
||||
{
|
||||
"protocolsConfig": {
|
||||
"entryNodes": [],
|
||||
@ -574,8 +564,7 @@ suite "NodeConfig JSON - partial format with defaults":
|
||||
|
||||
test "logLevel and logFormat omitted use defaults":
|
||||
## Given
|
||||
let jsonStr =
|
||||
"""
|
||||
let jsonStr = """
|
||||
{
|
||||
"mode": "Core",
|
||||
"protocolsConfig": {
|
||||
@ -600,8 +589,7 @@ suite "NodeConfig JSON - partial format with defaults":
|
||||
|
||||
suite "NodeConfig JSON - unsupported fields raise errors":
|
||||
test "Unknown field at NodeConfig level raises":
|
||||
let jsonStr =
|
||||
"""
|
||||
let jsonStr = """
|
||||
{
|
||||
"mode": "Core",
|
||||
"unknownTopLevel": true
|
||||
@ -616,8 +604,7 @@ suite "NodeConfig JSON - unsupported fields raise errors":
|
||||
check raised
|
||||
|
||||
test "Typo in NodeConfig field name raises":
|
||||
let jsonStr =
|
||||
"""
|
||||
let jsonStr = """
|
||||
{
|
||||
"modes": "Core"
|
||||
}
|
||||
@ -631,8 +618,7 @@ suite "NodeConfig JSON - unsupported fields raise errors":
|
||||
check raised
|
||||
|
||||
test "Unknown field in ProtocolsConfig raises":
|
||||
let jsonStr =
|
||||
"""
|
||||
let jsonStr = """
|
||||
{
|
||||
"protocolsConfig": {
|
||||
"entryNodes": [],
|
||||
@ -655,8 +641,7 @@ suite "NodeConfig JSON - unsupported fields raise errors":
|
||||
check raised
|
||||
|
||||
test "Unknown field in NetworkingConfig raises":
|
||||
let jsonStr =
|
||||
"""
|
||||
let jsonStr = """
|
||||
{
|
||||
"protocolsConfig": {
|
||||
"entryNodes": [],
|
||||
@ -679,8 +664,7 @@ suite "NodeConfig JSON - unsupported fields raise errors":
|
||||
check raised
|
||||
|
||||
test "Unknown field in MessageValidation raises":
|
||||
let jsonStr =
|
||||
"""
|
||||
let jsonStr = """
|
||||
{
|
||||
"protocolsConfig": {
|
||||
"entryNodes": [],
|
||||
@ -706,8 +690,7 @@ suite "NodeConfig JSON - unsupported fields raise errors":
|
||||
check raised
|
||||
|
||||
test "Unknown field in RlnConfig raises":
|
||||
let jsonStr =
|
||||
"""
|
||||
let jsonStr = """
|
||||
{
|
||||
"protocolsConfig": {
|
||||
"entryNodes": [],
|
||||
@ -738,8 +721,7 @@ suite "NodeConfig JSON - unsupported fields raise errors":
|
||||
check raised
|
||||
|
||||
test "Unknown field in AutoShardingConfig raises":
|
||||
let jsonStr =
|
||||
"""
|
||||
let jsonStr = """
|
||||
{
|
||||
"protocolsConfig": {
|
||||
"entryNodes": [],
|
||||
@ -766,8 +748,7 @@ suite "NodeConfig JSON - unsupported fields raise errors":
|
||||
|
||||
suite "NodeConfig JSON - missing required fields":
|
||||
test "Missing 'entryNodes' in ProtocolsConfig":
|
||||
let jsonStr =
|
||||
"""
|
||||
let jsonStr = """
|
||||
{
|
||||
"protocolsConfig": {
|
||||
"clusterId": 1
|
||||
@ -788,8 +769,7 @@ suite "NodeConfig JSON - missing required fields":
|
||||
check raised
|
||||
|
||||
test "Missing 'clusterId' in ProtocolsConfig":
|
||||
let jsonStr =
|
||||
"""
|
||||
let jsonStr = """
|
||||
{
|
||||
"protocolsConfig": {
|
||||
"entryNodes": []
|
||||
@ -810,8 +790,7 @@ suite "NodeConfig JSON - missing required fields":
|
||||
check raised
|
||||
|
||||
test "Missing required fields in NetworkingConfig":
|
||||
let jsonStr =
|
||||
"""
|
||||
let jsonStr = """
|
||||
{
|
||||
"protocolsConfig": {
|
||||
"entryNodes": [],
|
||||
@ -831,8 +810,7 @@ suite "NodeConfig JSON - missing required fields":
|
||||
check raised
|
||||
|
||||
test "Missing 'numShardsInCluster' in AutoShardingConfig":
|
||||
let jsonStr =
|
||||
"""
|
||||
let jsonStr = """
|
||||
{
|
||||
"protocolsConfig": {
|
||||
"entryNodes": [],
|
||||
@ -855,8 +833,7 @@ suite "NodeConfig JSON - missing required fields":
|
||||
check raised
|
||||
|
||||
test "Missing required fields in RlnConfig":
|
||||
let jsonStr =
|
||||
"""
|
||||
let jsonStr = """
|
||||
{
|
||||
"protocolsConfig": {
|
||||
"entryNodes": [],
|
||||
@ -884,8 +861,7 @@ suite "NodeConfig JSON - missing required fields":
|
||||
check raised
|
||||
|
||||
test "Missing 'maxMessageSize' in MessageValidation":
|
||||
let jsonStr =
|
||||
"""
|
||||
let jsonStr = """
|
||||
{
|
||||
"protocolsConfig": {
|
||||
"entryNodes": [],
|
||||
@ -927,8 +903,7 @@ suite "NodeConfig JSON - invalid values":
|
||||
check raised
|
||||
|
||||
test "Wrong type for clusterId (string instead of number)":
|
||||
let jsonStr =
|
||||
"""
|
||||
let jsonStr = """
|
||||
{
|
||||
"protocolsConfig": {
|
||||
"entryNodes": [],
|
||||
@ -960,8 +935,7 @@ suite "NodeConfig JSON - invalid values":
|
||||
suite "NodeConfig JSON -> WakuConf integration":
|
||||
test "Decoded config translates to valid WakuConf":
|
||||
## Given
|
||||
let jsonStr =
|
||||
"""
|
||||
let jsonStr = """
|
||||
{
|
||||
"mode": "Core",
|
||||
"protocolsConfig": {
|
||||
|
||||
@ -4,23 +4,22 @@ import std/strutils, results, stew/byteutils, testutils/unittests
|
||||
import waku/common/base64
|
||||
|
||||
suite "Waku Common - stew base64 wrapper":
|
||||
const TestData =
|
||||
@[
|
||||
# Test vectors from RFC 4648
|
||||
# See: https://datatracker.ietf.org/doc/html/rfc4648#section-10
|
||||
("", Base64String("")),
|
||||
("f", Base64String("Zg==")),
|
||||
("fo", Base64String("Zm8=")),
|
||||
("foo", Base64String("Zm9v")),
|
||||
("foob", Base64String("Zm9vYg==")),
|
||||
("fooba", Base64String("Zm9vYmE=")),
|
||||
("foobar", Base64String("Zm9vYmFy")),
|
||||
const TestData = @[
|
||||
# Test vectors from RFC 4648
|
||||
# See: https://datatracker.ietf.org/doc/html/rfc4648#section-10
|
||||
("", Base64String("")),
|
||||
("f", Base64String("Zg==")),
|
||||
("fo", Base64String("Zm8=")),
|
||||
("foo", Base64String("Zm9v")),
|
||||
("foob", Base64String("Zm9vYg==")),
|
||||
("fooba", Base64String("Zm9vYmE=")),
|
||||
("foobar", Base64String("Zm9vYmFy")),
|
||||
|
||||
# Custom test vectors
|
||||
("\x01", Base64String("AQ==")),
|
||||
("\x13", Base64String("Ew==")),
|
||||
("\x01\x02\x03\x04", Base64String("AQIDBA==")),
|
||||
]
|
||||
# Custom test vectors
|
||||
("\x01", Base64String("AQ==")),
|
||||
("\x13", Base64String("Ew==")),
|
||||
("\x01\x02\x03\x04", Base64String("AQIDBA==")),
|
||||
]
|
||||
|
||||
for (plaintext, encoded) in TestData:
|
||||
test "encode into base64 (" & escape(plaintext) & " -> \"" & string(encoded) & "\")":
|
||||
|
||||
@ -45,11 +45,11 @@ static:
|
||||
suite "RequestBroker macro (async mode)":
|
||||
test "serves zero-argument providers":
|
||||
check SimpleResponse
|
||||
.setProvider(
|
||||
proc(): Future[Result[SimpleResponse, string]] {.async.} =
|
||||
ok(SimpleResponse(value: "hi"))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(): Future[Result[SimpleResponse, string]] {.async.} =
|
||||
ok(SimpleResponse(value: "hi"))
|
||||
)
|
||||
.isOk()
|
||||
|
||||
let res = waitFor SimpleResponse.request()
|
||||
check res.isOk()
|
||||
@ -65,12 +65,14 @@ suite "RequestBroker macro (async mode)":
|
||||
test "serves input-based providers":
|
||||
var seen: seq[string] = @[]
|
||||
check KeyedResponse
|
||||
.setProvider(
|
||||
proc(key: string, subKey: int): Future[Result[KeyedResponse, string]] {.async.} =
|
||||
seen.add(key)
|
||||
ok(KeyedResponse(key: key, payload: key & "-payload+" & $subKey))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(
|
||||
key: string, subKey: int
|
||||
): Future[Result[KeyedResponse, string]] {.async.} =
|
||||
seen.add(key)
|
||||
ok(KeyedResponse(key: key, payload: key & "-payload+" & $subKey))
|
||||
)
|
||||
.isOk()
|
||||
|
||||
let res = waitFor KeyedResponse.request("topic", 1)
|
||||
check res.isOk()
|
||||
@ -82,11 +84,13 @@ suite "RequestBroker macro (async mode)":
|
||||
|
||||
test "catches provider exception":
|
||||
check KeyedResponse
|
||||
.setProvider(
|
||||
proc(key: string, subKey: int): Future[Result[KeyedResponse, string]] {.async.} =
|
||||
raise newException(ValueError, "simulated failure")
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(
|
||||
key: string, subKey: int
|
||||
): Future[Result[KeyedResponse, string]] {.async.} =
|
||||
raise newException(ValueError, "simulated failure")
|
||||
)
|
||||
.isOk()
|
||||
|
||||
let res = waitFor KeyedResponse.request("neglected", 11)
|
||||
check res.isErr()
|
||||
@ -101,18 +105,18 @@ suite "RequestBroker macro (async mode)":
|
||||
|
||||
test "supports both provider types simultaneously":
|
||||
check DualResponse
|
||||
.setProvider(
|
||||
proc(): Future[Result[DualResponse, string]] {.async.} =
|
||||
ok(DualResponse(note: "base", count: 1))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(): Future[Result[DualResponse, string]] {.async.} =
|
||||
ok(DualResponse(note: "base", count: 1))
|
||||
)
|
||||
.isOk()
|
||||
|
||||
check DualResponse
|
||||
.setProvider(
|
||||
proc(suffix: string): Future[Result[DualResponse, string]] {.async.} =
|
||||
ok(DualResponse(note: "base" & suffix, count: suffix.len))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(suffix: string): Future[Result[DualResponse, string]] {.async.} =
|
||||
ok(DualResponse(note: "base" & suffix, count: suffix.len))
|
||||
)
|
||||
.isOk()
|
||||
|
||||
let noInput = waitFor DualResponse.request()
|
||||
check noInput.isOk()
|
||||
@ -127,11 +131,11 @@ suite "RequestBroker macro (async mode)":
|
||||
|
||||
test "clearProvider resets both entries":
|
||||
check DualResponse
|
||||
.setProvider(
|
||||
proc(): Future[Result[DualResponse, string]] {.async.} =
|
||||
ok(DualResponse(note: "temp", count: 0))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(): Future[Result[DualResponse, string]] {.async.} =
|
||||
ok(DualResponse(note: "temp", count: 0))
|
||||
)
|
||||
.isOk()
|
||||
DualResponse.clearProvider()
|
||||
|
||||
let res = waitFor DualResponse.request()
|
||||
@ -139,11 +143,11 @@ suite "RequestBroker macro (async mode)":
|
||||
|
||||
test "implicit zero-argument provider works by default":
|
||||
check ImplicitResponse
|
||||
.setProvider(
|
||||
proc(): Future[Result[ImplicitResponse, string]] {.async.} =
|
||||
ok(ImplicitResponse(note: "auto"))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(): Future[Result[ImplicitResponse, string]] {.async.} =
|
||||
ok(ImplicitResponse(note: "auto"))
|
||||
)
|
||||
.isOk()
|
||||
|
||||
let res = waitFor ImplicitResponse.request()
|
||||
check res.isOk()
|
||||
@ -158,18 +162,18 @@ suite "RequestBroker macro (async mode)":
|
||||
|
||||
test "no provider override":
|
||||
check DualResponse
|
||||
.setProvider(
|
||||
proc(): Future[Result[DualResponse, string]] {.async.} =
|
||||
ok(DualResponse(note: "base", count: 1))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(): Future[Result[DualResponse, string]] {.async.} =
|
||||
ok(DualResponse(note: "base", count: 1))
|
||||
)
|
||||
.isOk()
|
||||
|
||||
check DualResponse
|
||||
.setProvider(
|
||||
proc(suffix: string): Future[Result[DualResponse, string]] {.async.} =
|
||||
ok(DualResponse(note: "base" & suffix, count: suffix.len))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(suffix: string): Future[Result[DualResponse, string]] {.async.} =
|
||||
ok(DualResponse(note: "base" & suffix, count: suffix.len))
|
||||
)
|
||||
.isOk()
|
||||
|
||||
let overrideProc = proc(): Future[Result[DualResponse, string]] {.async.} =
|
||||
ok(DualResponse(note: "something else", count: 1))
|
||||
@ -207,27 +211,27 @@ suite "RequestBroker macro (async mode)":
|
||||
SimpleResponse.clearProvider()
|
||||
|
||||
check SimpleResponse
|
||||
.setProvider(
|
||||
proc(): Future[Result[SimpleResponse, string]] {.async.} =
|
||||
ok(SimpleResponse(value: "default"))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(): Future[Result[SimpleResponse, string]] {.async.} =
|
||||
ok(SimpleResponse(value: "default"))
|
||||
)
|
||||
.isOk()
|
||||
|
||||
check SimpleResponse
|
||||
.setProvider(
|
||||
BrokerContext(0x11111111'u32),
|
||||
proc(): Future[Result[SimpleResponse, string]] {.async.} =
|
||||
ok(SimpleResponse(value: "one")),
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
BrokerContext(0x11111111'u32),
|
||||
proc(): Future[Result[SimpleResponse, string]] {.async.} =
|
||||
ok(SimpleResponse(value: "one")),
|
||||
)
|
||||
.isOk()
|
||||
|
||||
check SimpleResponse
|
||||
.setProvider(
|
||||
BrokerContext(0x22222222'u32),
|
||||
proc(): Future[Result[SimpleResponse, string]] {.async.} =
|
||||
ok(SimpleResponse(value: "two")),
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
BrokerContext(0x22222222'u32),
|
||||
proc(): Future[Result[SimpleResponse, string]] {.async.} =
|
||||
ok(SimpleResponse(value: "two")),
|
||||
)
|
||||
.isOk()
|
||||
|
||||
let defaultRes = waitFor SimpleResponse.request()
|
||||
check defaultRes.isOk()
|
||||
@ -246,12 +250,12 @@ suite "RequestBroker macro (async mode)":
|
||||
check missing.error.contains("no provider registered for broker context")
|
||||
|
||||
check SimpleResponse
|
||||
.setProvider(
|
||||
BrokerContext(0x11111111'u32),
|
||||
proc(): Future[Result[SimpleResponse, string]] {.async.} =
|
||||
ok(SimpleResponse(value: "dup")),
|
||||
)
|
||||
.isErr()
|
||||
.setProvider(
|
||||
BrokerContext(0x11111111'u32),
|
||||
proc(): Future[Result[SimpleResponse, string]] {.async.} =
|
||||
ok(SimpleResponse(value: "dup")),
|
||||
)
|
||||
.isErr()
|
||||
|
||||
SimpleResponse.clearProvider()
|
||||
|
||||
@ -259,27 +263,33 @@ suite "RequestBroker macro (async mode)":
|
||||
KeyedResponse.clearProvider()
|
||||
|
||||
check KeyedResponse
|
||||
.setProvider(
|
||||
proc(key: string, subKey: int): Future[Result[KeyedResponse, string]] {.async.} =
|
||||
ok(KeyedResponse(key: "default-" & key, payload: $subKey))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(
|
||||
key: string, subKey: int
|
||||
): Future[Result[KeyedResponse, string]] {.async.} =
|
||||
ok(KeyedResponse(key: "default-" & key, payload: $subKey))
|
||||
)
|
||||
.isOk()
|
||||
|
||||
check KeyedResponse
|
||||
.setProvider(
|
||||
BrokerContext(0xABCDEF01'u32),
|
||||
proc(key: string, subKey: int): Future[Result[KeyedResponse, string]] {.async.} =
|
||||
ok(KeyedResponse(key: "k1-" & key, payload: "p" & $subKey)),
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
BrokerContext(0xABCDEF01'u32),
|
||||
proc(
|
||||
key: string, subKey: int
|
||||
): Future[Result[KeyedResponse, string]] {.async.} =
|
||||
ok(KeyedResponse(key: "k1-" & key, payload: "p" & $subKey)),
|
||||
)
|
||||
.isOk()
|
||||
|
||||
check KeyedResponse
|
||||
.setProvider(
|
||||
BrokerContext(0xABCDEF02'u32),
|
||||
proc(key: string, subKey: int): Future[Result[KeyedResponse, string]] {.async.} =
|
||||
ok(KeyedResponse(key: "k2-" & key, payload: "q" & $subKey)),
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
BrokerContext(0xABCDEF02'u32),
|
||||
proc(
|
||||
key: string, subKey: int
|
||||
): Future[Result[KeyedResponse, string]] {.async.} =
|
||||
ok(KeyedResponse(key: "k2-" & key, payload: "q" & $subKey)),
|
||||
)
|
||||
.isOk()
|
||||
|
||||
let d = waitFor KeyedResponse.request("topic", 7)
|
||||
check d.isOk()
|
||||
@ -343,11 +353,11 @@ static:
|
||||
suite "RequestBroker macro (sync mode)":
|
||||
test "serves zero-argument providers (sync)":
|
||||
check SimpleResponseSync
|
||||
.setProvider(
|
||||
proc(): Result[SimpleResponseSync, string] =
|
||||
ok(SimpleResponseSync(value: "hi"))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(): Result[SimpleResponseSync, string] =
|
||||
ok(SimpleResponseSync(value: "hi"))
|
||||
)
|
||||
.isOk()
|
||||
|
||||
let res = SimpleResponseSync.request()
|
||||
check res.isOk()
|
||||
@ -363,12 +373,12 @@ suite "RequestBroker macro (sync mode)":
|
||||
test "serves input-based providers (sync)":
|
||||
var seen: seq[string] = @[]
|
||||
check KeyedResponseSync
|
||||
.setProvider(
|
||||
proc(key: string, subKey: int): Result[KeyedResponseSync, string] =
|
||||
seen.add(key)
|
||||
ok(KeyedResponseSync(key: key, payload: key & "-payload+" & $subKey))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(key: string, subKey: int): Result[KeyedResponseSync, string] =
|
||||
seen.add(key)
|
||||
ok(KeyedResponseSync(key: key, payload: key & "-payload+" & $subKey))
|
||||
)
|
||||
.isOk()
|
||||
|
||||
let res = KeyedResponseSync.request("topic", 1)
|
||||
check res.isOk()
|
||||
@ -380,11 +390,11 @@ suite "RequestBroker macro (sync mode)":
|
||||
|
||||
test "catches provider exception (sync)":
|
||||
check KeyedResponseSync
|
||||
.setProvider(
|
||||
proc(key: string, subKey: int): Result[KeyedResponseSync, string] =
|
||||
raise newException(ValueError, "simulated failure")
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(key: string, subKey: int): Result[KeyedResponseSync, string] =
|
||||
raise newException(ValueError, "simulated failure")
|
||||
)
|
||||
.isOk()
|
||||
|
||||
let res = KeyedResponseSync.request("neglected", 11)
|
||||
check res.isErr()
|
||||
@ -399,18 +409,18 @@ suite "RequestBroker macro (sync mode)":
|
||||
|
||||
test "supports both provider types simultaneously (sync)":
|
||||
check DualResponseSync
|
||||
.setProvider(
|
||||
proc(): Result[DualResponseSync, string] =
|
||||
ok(DualResponseSync(note: "base", count: 1))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(): Result[DualResponseSync, string] =
|
||||
ok(DualResponseSync(note: "base", count: 1))
|
||||
)
|
||||
.isOk()
|
||||
|
||||
check DualResponseSync
|
||||
.setProvider(
|
||||
proc(suffix: string): Result[DualResponseSync, string] =
|
||||
ok(DualResponseSync(note: "base" & suffix, count: suffix.len))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(suffix: string): Result[DualResponseSync, string] =
|
||||
ok(DualResponseSync(note: "base" & suffix, count: suffix.len))
|
||||
)
|
||||
.isOk()
|
||||
|
||||
let noInput = DualResponseSync.request()
|
||||
check noInput.isOk()
|
||||
@ -425,11 +435,11 @@ suite "RequestBroker macro (sync mode)":
|
||||
|
||||
test "clearProvider resets both entries (sync)":
|
||||
check DualResponseSync
|
||||
.setProvider(
|
||||
proc(): Result[DualResponseSync, string] =
|
||||
ok(DualResponseSync(note: "temp", count: 0))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(): Result[DualResponseSync, string] =
|
||||
ok(DualResponseSync(note: "temp", count: 0))
|
||||
)
|
||||
.isOk()
|
||||
DualResponseSync.clearProvider()
|
||||
|
||||
let res = DualResponseSync.request()
|
||||
@ -437,11 +447,11 @@ suite "RequestBroker macro (sync mode)":
|
||||
|
||||
test "implicit zero-argument provider works by default (sync)":
|
||||
check ImplicitResponseSync
|
||||
.setProvider(
|
||||
proc(): Result[ImplicitResponseSync, string] =
|
||||
ok(ImplicitResponseSync(note: "auto"))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(): Result[ImplicitResponseSync, string] =
|
||||
ok(ImplicitResponseSync(note: "auto"))
|
||||
)
|
||||
.isOk()
|
||||
|
||||
let res = ImplicitResponseSync.request()
|
||||
check res.isOk()
|
||||
@ -456,11 +466,11 @@ suite "RequestBroker macro (sync mode)":
|
||||
|
||||
test "implicit zero-argument provider raises error (sync)":
|
||||
check ImplicitResponseSync
|
||||
.setProvider(
|
||||
proc(): Result[ImplicitResponseSync, string] =
|
||||
raise newException(ValueError, "simulated failure")
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(): Result[ImplicitResponseSync, string] =
|
||||
raise newException(ValueError, "simulated failure")
|
||||
)
|
||||
.isOk()
|
||||
|
||||
let res = ImplicitResponseSync.request()
|
||||
check res.isErr()
|
||||
@ -472,19 +482,19 @@ suite "RequestBroker macro (sync mode)":
|
||||
SimpleResponseSync.clearProvider()
|
||||
|
||||
check SimpleResponseSync
|
||||
.setProvider(
|
||||
proc(): Result[SimpleResponseSync, string] =
|
||||
ok(SimpleResponseSync(value: "default"))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(): Result[SimpleResponseSync, string] =
|
||||
ok(SimpleResponseSync(value: "default"))
|
||||
)
|
||||
.isOk()
|
||||
|
||||
check SimpleResponseSync
|
||||
.setProvider(
|
||||
BrokerContext(0x10101010'u32),
|
||||
proc(): Result[SimpleResponseSync, string] =
|
||||
ok(SimpleResponseSync(value: "ten")),
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
BrokerContext(0x10101010'u32),
|
||||
proc(): Result[SimpleResponseSync, string] =
|
||||
ok(SimpleResponseSync(value: "ten")),
|
||||
)
|
||||
.isOk()
|
||||
|
||||
let defaultRes = SimpleResponseSync.request()
|
||||
check defaultRes.isOk()
|
||||
@ -504,19 +514,19 @@ suite "RequestBroker macro (sync mode)":
|
||||
KeyedResponseSync.clearProvider()
|
||||
|
||||
check KeyedResponseSync
|
||||
.setProvider(
|
||||
proc(key: string, subKey: int): Result[KeyedResponseSync, string] =
|
||||
ok(KeyedResponseSync(key: "default-" & key, payload: $subKey))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(key: string, subKey: int): Result[KeyedResponseSync, string] =
|
||||
ok(KeyedResponseSync(key: "default-" & key, payload: $subKey))
|
||||
)
|
||||
.isOk()
|
||||
|
||||
check KeyedResponseSync
|
||||
.setProvider(
|
||||
BrokerContext(0xA0A0A0A0'u32),
|
||||
proc(key: string, subKey: int): Result[KeyedResponseSync, string] =
|
||||
ok(KeyedResponseSync(key: "k-" & key, payload: "p" & $subKey)),
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
BrokerContext(0xA0A0A0A0'u32),
|
||||
proc(key: string, subKey: int): Result[KeyedResponseSync, string] =
|
||||
ok(KeyedResponseSync(key: "k-" & key, payload: "p" & $subKey)),
|
||||
)
|
||||
.isOk()
|
||||
|
||||
let d = KeyedResponseSync.request("topic", 2)
|
||||
check d.isOk()
|
||||
@ -576,11 +586,11 @@ RequestBroker(sync):
|
||||
suite "RequestBroker macro (POD/external types)":
|
||||
test "supports non-object response types (async)":
|
||||
check PodResponse
|
||||
.setProvider(
|
||||
proc(): Future[Result[PodResponse, string]] {.async.} =
|
||||
ok(PodResponse(123))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(): Future[Result[PodResponse, string]] {.async.} =
|
||||
ok(PodResponse(123))
|
||||
)
|
||||
.isOk()
|
||||
|
||||
let res = waitFor PodResponse.request()
|
||||
check res.isOk()
|
||||
@ -590,11 +600,11 @@ suite "RequestBroker macro (POD/external types)":
|
||||
|
||||
test "supports aliased external types (async)":
|
||||
check ExternalAliasedResponse
|
||||
.setProvider(
|
||||
proc(): Future[Result[ExternalAliasedResponse, string]] {.async.} =
|
||||
ok(ExternalAliasedResponse(ExternalDefinedTypeAsync(label: "ext")))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(): Future[Result[ExternalAliasedResponse, string]] {.async.} =
|
||||
ok(ExternalAliasedResponse(ExternalDefinedTypeAsync(label: "ext")))
|
||||
)
|
||||
.isOk()
|
||||
|
||||
let res = waitFor ExternalAliasedResponse.request()
|
||||
check res.isOk()
|
||||
@ -604,11 +614,11 @@ suite "RequestBroker macro (POD/external types)":
|
||||
|
||||
test "supports aliased external types (sync)":
|
||||
check ExternalAliasedResponseSync
|
||||
.setProvider(
|
||||
proc(): Result[ExternalAliasedResponseSync, string] =
|
||||
ok(ExternalAliasedResponseSync(ExternalDefinedTypeSync(label: "ext")))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(): Result[ExternalAliasedResponseSync, string] =
|
||||
ok(ExternalAliasedResponseSync(ExternalDefinedTypeSync(label: "ext")))
|
||||
)
|
||||
.isOk()
|
||||
|
||||
let res = ExternalAliasedResponseSync.request()
|
||||
check res.isOk()
|
||||
@ -618,32 +628,32 @@ suite "RequestBroker macro (POD/external types)":
|
||||
|
||||
test "distinct response types avoid overload ambiguity (sync)":
|
||||
check DistinctStringResponseA
|
||||
.setProvider(
|
||||
proc(): Result[DistinctStringResponseA, string] =
|
||||
ok(DistinctStringResponseA("a"))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(): Result[DistinctStringResponseA, string] =
|
||||
ok(DistinctStringResponseA("a"))
|
||||
)
|
||||
.isOk()
|
||||
|
||||
check DistinctStringResponseB
|
||||
.setProvider(
|
||||
proc(): Result[DistinctStringResponseB, string] =
|
||||
ok(DistinctStringResponseB("b"))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(): Result[DistinctStringResponseB, string] =
|
||||
ok(DistinctStringResponseB("b"))
|
||||
)
|
||||
.isOk()
|
||||
|
||||
check ExternalDistinctResponseA
|
||||
.setProvider(
|
||||
proc(): Result[ExternalDistinctResponseA, string] =
|
||||
ok(ExternalDistinctResponseA(ExternalDefinedTypeShared(label: "ea")))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(): Result[ExternalDistinctResponseA, string] =
|
||||
ok(ExternalDistinctResponseA(ExternalDefinedTypeShared(label: "ea")))
|
||||
)
|
||||
.isOk()
|
||||
|
||||
check ExternalDistinctResponseB
|
||||
.setProvider(
|
||||
proc(): Result[ExternalDistinctResponseB, string] =
|
||||
ok(ExternalDistinctResponseB(ExternalDefinedTypeShared(label: "eb")))
|
||||
)
|
||||
.isOk()
|
||||
.setProvider(
|
||||
proc(): Result[ExternalDistinctResponseB, string] =
|
||||
ok(ExternalDistinctResponseB(ExternalDefinedTypeShared(label: "eb")))
|
||||
)
|
||||
.isOk()
|
||||
|
||||
let resA = DistinctStringResponseA.request()
|
||||
let resB = DistinctStringResponseB.request()
|
||||
|
||||
@ -29,17 +29,16 @@ suite "SQLite - migrations":
|
||||
|
||||
test "filter and order migration script file paths":
|
||||
## Given
|
||||
let paths =
|
||||
@[
|
||||
sourceDir / "00001_valid.up.sql",
|
||||
sourceDir / "00002_alsoValidWithUpperCaseExtension.UP.SQL",
|
||||
sourceDir / "00007_unorderedValid.up.sql",
|
||||
sourceDir / "00003_validRepeated.up.sql",
|
||||
sourceDir / "00003_validRepeated.up.sql",
|
||||
sourceDir / "00666_noMigrationScript.bmp",
|
||||
sourceDir / "00X00_invalidVersion.down.sql",
|
||||
sourceDir / "00008_notWithinVersionRange.up.sql",
|
||||
]
|
||||
let paths = @[
|
||||
sourceDir / "00001_valid.up.sql",
|
||||
sourceDir / "00002_alsoValidWithUpperCaseExtension.UP.SQL",
|
||||
sourceDir / "00007_unorderedValid.up.sql",
|
||||
sourceDir / "00003_validRepeated.up.sql",
|
||||
sourceDir / "00003_validRepeated.up.sql",
|
||||
sourceDir / "00666_noMigrationScript.bmp",
|
||||
sourceDir / "00X00_invalidVersion.down.sql",
|
||||
sourceDir / "00008_notWithinVersionRange.up.sql",
|
||||
]
|
||||
|
||||
let
|
||||
lowerVersion = 0
|
||||
@ -64,16 +63,14 @@ suite "SQLite - migrations":
|
||||
|
||||
test "break migration scripts into queries":
|
||||
## Given
|
||||
let statement1 =
|
||||
"""CREATE TABLE contacts1 (
|
||||
let statement1 = """CREATE TABLE contacts1 (
|
||||
contact_id INTEGER PRIMARY KEY,
|
||||
first_name TEXT NOT NULL,
|
||||
last_name TEXT NOT NULL,
|
||||
email TEXT NOT NULL UNIQUE,
|
||||
phone TEXT NOT NULL UNIQUE
|
||||
);"""
|
||||
let statement2 =
|
||||
"""CREATE TABLE contacts2 (
|
||||
let statement2 = """CREATE TABLE contacts2 (
|
||||
contact_id INTEGER PRIMARY KEY,
|
||||
first_name TEXT NOT NULL,
|
||||
last_name TEXT NOT NULL,
|
||||
@ -91,16 +88,14 @@ suite "SQLite - migrations":
|
||||
|
||||
test "break statements script into queries - empty statements":
|
||||
## Given
|
||||
let statement1 =
|
||||
"""CREATE TABLE contacts1 (
|
||||
let statement1 = """CREATE TABLE contacts1 (
|
||||
contact_id INTEGER PRIMARY KEY,
|
||||
first_name TEXT NOT NULL,
|
||||
last_name TEXT NOT NULL,
|
||||
email TEXT NOT NULL UNIQUE,
|
||||
phone TEXT NOT NULL UNIQUE
|
||||
);"""
|
||||
let statement2 =
|
||||
"""CREATE TABLE contacts2 (
|
||||
let statement2 = """CREATE TABLE contacts2 (
|
||||
contact_id INTEGER PRIMARY KEY,
|
||||
first_name TEXT NOT NULL,
|
||||
last_name TEXT NOT NULL,
|
||||
|
||||
@ -10,22 +10,21 @@ import
|
||||
import waku/waku_core/peers, waku/node/peer_manager/peer_store/waku_peer_storage
|
||||
|
||||
proc `==`(a, b: RemotePeerInfo): bool =
|
||||
let comparisons =
|
||||
@[
|
||||
a.peerId == b.peerId,
|
||||
a.addrs == b.addrs,
|
||||
a.enr == b.enr,
|
||||
a.protocols == b.protocols,
|
||||
a.agent == b.agent,
|
||||
a.protoVersion == b.protoVersion,
|
||||
a.publicKey == b.publicKey,
|
||||
a.connectedness == b.connectedness,
|
||||
a.disconnectTime == b.disconnectTime,
|
||||
a.origin == b.origin,
|
||||
a.direction == b.direction,
|
||||
a.lastFailedConn == b.lastFailedConn,
|
||||
a.numberFailedConn == b.numberFailedConn,
|
||||
]
|
||||
let comparisons = @[
|
||||
a.peerId == b.peerId,
|
||||
a.addrs == b.addrs,
|
||||
a.enr == b.enr,
|
||||
a.protocols == b.protocols,
|
||||
a.agent == b.agent,
|
||||
a.protoVersion == b.protoVersion,
|
||||
a.publicKey == b.publicKey,
|
||||
a.connectedness == b.connectedness,
|
||||
a.disconnectTime == b.disconnectTime,
|
||||
a.origin == b.origin,
|
||||
a.direction == b.direction,
|
||||
a.lastFailedConn == b.lastFailedConn,
|
||||
a.numberFailedConn == b.numberFailedConn,
|
||||
]
|
||||
|
||||
allIt(comparisons, it == true)
|
||||
|
||||
@ -61,18 +60,17 @@ suite "Protobuf Serialisation":
|
||||
suite "encode":
|
||||
test "simple":
|
||||
# Given the expected bytes representation of a valid RemotePeerInfo
|
||||
let expectedBuffer: seq[byte] =
|
||||
@[
|
||||
10, 39, 0, 37, 8, 2, 18, 33, 3, 43, 246, 238, 219, 109, 147, 79, 129, 40, 145,
|
||||
217, 209, 109, 105, 185, 186, 200, 180, 203, 72, 166, 220, 196, 232, 170, 74,
|
||||
141, 125, 255, 112, 238, 204, 18, 8, 4, 192, 168, 0, 1, 6, 31, 144, 34, 95, 8,
|
||||
3, 18, 91, 48, 89, 48, 19, 6, 7, 42, 134, 72, 206, 61, 2, 1, 6, 8, 42, 134,
|
||||
72, 206, 61, 3, 1, 7, 3, 66, 0, 4, 222, 61, 48, 15, 163, 106, 224, 232, 245,
|
||||
213, 48, 137, 157, 131, 171, 171, 68, 171, 243, 22, 31, 22, 42, 75, 201, 1,
|
||||
216, 230, 236, 218, 2, 14, 139, 109, 95, 141, 163, 5, 37, 231, 29, 104, 81,
|
||||
81, 12, 9, 142, 92, 71, 198, 70, 165, 151, 251, 77, 206, 192, 52, 233, 247,
|
||||
124, 64, 158, 98, 40, 0, 48, 0,
|
||||
]
|
||||
let expectedBuffer: seq[byte] = @[
|
||||
10, 39, 0, 37, 8, 2, 18, 33, 3, 43, 246, 238, 219, 109, 147, 79, 129, 40, 145,
|
||||
217, 209, 109, 105, 185, 186, 200, 180, 203, 72, 166, 220, 196, 232, 170, 74,
|
||||
141, 125, 255, 112, 238, 204, 18, 8, 4, 192, 168, 0, 1, 6, 31, 144, 34, 95, 8,
|
||||
3, 18, 91, 48, 89, 48, 19, 6, 7, 42, 134, 72, 206, 61, 2, 1, 6, 8, 42, 134, 72,
|
||||
206, 61, 3, 1, 7, 3, 66, 0, 4, 222, 61, 48, 15, 163, 106, 224, 232, 245, 213,
|
||||
48, 137, 157, 131, 171, 171, 68, 171, 243, 22, 31, 22, 42, 75, 201, 1, 216, 230,
|
||||
236, 218, 2, 14, 139, 109, 95, 141, 163, 5, 37, 231, 29, 104, 81, 81, 12, 9,
|
||||
142, 92, 71, 198, 70, 165, 151, 251, 77, 206, 192, 52, 233, 247, 124, 64, 158,
|
||||
98, 40, 0, 48, 0,
|
||||
]
|
||||
|
||||
# When converting a valid RemotePeerInfo to a ProtoBuffer
|
||||
let encodedRemotePeerInfo = encode(remotePeerInfo).get()
|
||||
@ -87,18 +85,17 @@ suite "Protobuf Serialisation":
|
||||
suite "decode":
|
||||
test "simple":
|
||||
# Given the bytes representation of a valid RemotePeerInfo
|
||||
let buffer: seq[byte] =
|
||||
@[
|
||||
10, 39, 0, 37, 8, 2, 18, 33, 3, 43, 246, 238, 219, 109, 147, 79, 129, 40, 145,
|
||||
217, 209, 109, 105, 185, 186, 200, 180, 203, 72, 166, 220, 196, 232, 170, 74,
|
||||
141, 125, 255, 112, 238, 204, 18, 8, 4, 192, 168, 0, 1, 6, 31, 144, 34, 95, 8,
|
||||
3, 18, 91, 48, 89, 48, 19, 6, 7, 42, 134, 72, 206, 61, 2, 1, 6, 8, 42, 134,
|
||||
72, 206, 61, 3, 1, 7, 3, 66, 0, 4, 222, 61, 48, 15, 163, 106, 224, 232, 245,
|
||||
213, 48, 137, 157, 131, 171, 171, 68, 171, 243, 22, 31, 22, 42, 75, 201, 1,
|
||||
216, 230, 236, 218, 2, 14, 139, 109, 95, 141, 163, 5, 37, 231, 29, 104, 81,
|
||||
81, 12, 9, 142, 92, 71, 198, 70, 165, 151, 251, 77, 206, 192, 52, 233, 247,
|
||||
124, 64, 158, 98, 40, 0, 48, 0,
|
||||
]
|
||||
let buffer: seq[byte] = @[
|
||||
10, 39, 0, 37, 8, 2, 18, 33, 3, 43, 246, 238, 219, 109, 147, 79, 129, 40, 145,
|
||||
217, 209, 109, 105, 185, 186, 200, 180, 203, 72, 166, 220, 196, 232, 170, 74,
|
||||
141, 125, 255, 112, 238, 204, 18, 8, 4, 192, 168, 0, 1, 6, 31, 144, 34, 95, 8,
|
||||
3, 18, 91, 48, 89, 48, 19, 6, 7, 42, 134, 72, 206, 61, 2, 1, 6, 8, 42, 134, 72,
|
||||
206, 61, 3, 1, 7, 3, 66, 0, 4, 222, 61, 48, 15, 163, 106, 224, 232, 245, 213,
|
||||
48, 137, 157, 131, 171, 171, 68, 171, 243, 22, 31, 22, 42, 75, 201, 1, 216, 230,
|
||||
236, 218, 2, 14, 139, 109, 95, 141, 163, 5, 37, 231, 29, 104, 81, 81, 12, 9,
|
||||
142, 92, 71, 198, 70, 165, 151, 251, 77, 206, 192, 52, 233, 247, 124, 64, 158,
|
||||
98, 40, 0, 48, 0,
|
||||
]
|
||||
|
||||
# When converting a valid buffer to RemotePeerInfo
|
||||
let decodedRemotePeerInfo = RemotePeerInfo.decode(buffer).get()
|
||||
|
||||
@ -35,13 +35,12 @@ proc protoHealthMock(kind: WakuProtocol, health: HealthStatus): ProtocolHealth =
|
||||
|
||||
suite "Health Monitor - health state calculation":
|
||||
test "Disconnected, zero peers":
|
||||
let protocols =
|
||||
@[
|
||||
protoHealthMock(RelayProtocol, HealthStatus.NOT_READY),
|
||||
protoHealthMock(StoreClientProtocol, HealthStatus.NOT_READY),
|
||||
protoHealthMock(FilterClientProtocol, HealthStatus.NOT_READY),
|
||||
protoHealthMock(LightpushClientProtocol, HealthStatus.NOT_READY),
|
||||
]
|
||||
let protocols = @[
|
||||
protoHealthMock(RelayProtocol, HealthStatus.NOT_READY),
|
||||
protoHealthMock(StoreClientProtocol, HealthStatus.NOT_READY),
|
||||
protoHealthMock(FilterClientProtocol, HealthStatus.NOT_READY),
|
||||
protoHealthMock(LightpushClientProtocol, HealthStatus.NOT_READY),
|
||||
]
|
||||
let strength = initTable[WakuProtocol, int]()
|
||||
let state = calculateConnectionState(protocols, strength, some(MockDLow))
|
||||
check state == ConnectionStatus.Disconnected
|
||||
@ -64,13 +63,12 @@ suite "Health Monitor - health state calculation":
|
||||
check state == ConnectionStatus.Connected
|
||||
|
||||
test "Connected, robust edge":
|
||||
let protocols =
|
||||
@[
|
||||
protoHealthMock(RelayProtocol, HealthStatus.NOT_MOUNTED),
|
||||
protoHealthMock(LightpushClientProtocol, HealthStatus.READY),
|
||||
protoHealthMock(FilterClientProtocol, HealthStatus.READY),
|
||||
protoHealthMock(StoreClientProtocol, HealthStatus.READY),
|
||||
]
|
||||
let protocols = @[
|
||||
protoHealthMock(RelayProtocol, HealthStatus.NOT_MOUNTED),
|
||||
protoHealthMock(LightpushClientProtocol, HealthStatus.READY),
|
||||
protoHealthMock(FilterClientProtocol, HealthStatus.READY),
|
||||
protoHealthMock(StoreClientProtocol, HealthStatus.READY),
|
||||
]
|
||||
var strength = initTable[WakuProtocol, int]()
|
||||
strength[LightpushClientProtocol] = HealthyThreshold
|
||||
strength[FilterClientProtocol] = HealthyThreshold
|
||||
@ -79,12 +77,11 @@ suite "Health Monitor - health state calculation":
|
||||
check state == ConnectionStatus.Connected
|
||||
|
||||
test "Disconnected, edge missing store":
|
||||
let protocols =
|
||||
@[
|
||||
protoHealthMock(LightpushClientProtocol, HealthStatus.READY),
|
||||
protoHealthMock(FilterClientProtocol, HealthStatus.READY),
|
||||
protoHealthMock(StoreClientProtocol, HealthStatus.NOT_READY),
|
||||
]
|
||||
let protocols = @[
|
||||
protoHealthMock(LightpushClientProtocol, HealthStatus.READY),
|
||||
protoHealthMock(FilterClientProtocol, HealthStatus.READY),
|
||||
protoHealthMock(StoreClientProtocol, HealthStatus.NOT_READY),
|
||||
]
|
||||
var strength = initTable[WakuProtocol, int]()
|
||||
strength[LightpushClientProtocol] = HealthyThreshold
|
||||
strength[FilterClientProtocol] = HealthyThreshold
|
||||
@ -94,12 +91,11 @@ suite "Health Monitor - health state calculation":
|
||||
|
||||
test "PartiallyConnected, edge meets minimum failover requirement":
|
||||
let weakCount = max(1, HealthyThreshold - 1)
|
||||
let protocols =
|
||||
@[
|
||||
protoHealthMock(LightpushClientProtocol, HealthStatus.READY),
|
||||
protoHealthMock(FilterClientProtocol, HealthStatus.READY),
|
||||
protoHealthMock(StoreClientProtocol, HealthStatus.READY),
|
||||
]
|
||||
let protocols = @[
|
||||
protoHealthMock(LightpushClientProtocol, HealthStatus.READY),
|
||||
protoHealthMock(FilterClientProtocol, HealthStatus.READY),
|
||||
protoHealthMock(StoreClientProtocol, HealthStatus.READY),
|
||||
]
|
||||
var strength = initTable[WakuProtocol, int]()
|
||||
strength[LightpushClientProtocol] = weakCount
|
||||
strength[FilterClientProtocol] = weakCount
|
||||
@ -108,11 +104,10 @@ suite "Health Monitor - health state calculation":
|
||||
check state == ConnectionStatus.PartiallyConnected
|
||||
|
||||
test "Connected, robust relay ignores store server":
|
||||
let protocols =
|
||||
@[
|
||||
protoHealthMock(RelayProtocol, HealthStatus.READY),
|
||||
protoHealthMock(StoreProtocol, HealthStatus.READY),
|
||||
]
|
||||
let protocols = @[
|
||||
protoHealthMock(RelayProtocol, HealthStatus.READY),
|
||||
protoHealthMock(StoreProtocol, HealthStatus.READY),
|
||||
]
|
||||
var strength = initTable[WakuProtocol, int]()
|
||||
strength[RelayProtocol] = MockDLow
|
||||
strength[StoreProtocol] = 0
|
||||
@ -120,12 +115,11 @@ suite "Health Monitor - health state calculation":
|
||||
check state == ConnectionStatus.Connected
|
||||
|
||||
test "Connected, robust relay ignores store client":
|
||||
let protocols =
|
||||
@[
|
||||
protoHealthMock(RelayProtocol, HealthStatus.READY),
|
||||
protoHealthMock(StoreProtocol, HealthStatus.READY),
|
||||
protoHealthMock(StoreClientProtocol, HealthStatus.NOT_READY),
|
||||
]
|
||||
let protocols = @[
|
||||
protoHealthMock(RelayProtocol, HealthStatus.READY),
|
||||
protoHealthMock(StoreProtocol, HealthStatus.READY),
|
||||
protoHealthMock(StoreClientProtocol, HealthStatus.NOT_READY),
|
||||
]
|
||||
var strength = initTable[WakuProtocol, int]()
|
||||
strength[RelayProtocol] = MockDLow
|
||||
strength[StoreProtocol] = 0
|
||||
|
||||
@ -37,19 +37,18 @@ suite "Waku Store - End to End - Sorted Archive":
|
||||
contentTopicSeq = @[contentTopic]
|
||||
|
||||
let timeOrigin = now()
|
||||
archiveMessages =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 01], ts = ts(10, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 02], ts = ts(20, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 03], ts = ts(30, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 04], ts = ts(40, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 05], ts = ts(50, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 06], ts = ts(60, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 07], ts = ts(70, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 08], ts = ts(80, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 09], ts = ts(90, timeOrigin)),
|
||||
]
|
||||
archiveMessages = @[
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 01], ts = ts(10, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 02], ts = ts(20, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 03], ts = ts(30, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 04], ts = ts(40, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 05], ts = ts(50, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 06], ts = ts(60, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 07], ts = ts(70, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 08], ts = ts(80, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 09], ts = ts(90, timeOrigin)),
|
||||
]
|
||||
|
||||
historyQuery = HistoryQuery(
|
||||
pubsubTopic: some(pubsubTopic),
|
||||
@ -500,19 +499,18 @@ suite "Waku Store - End to End - Unsorted Archive":
|
||||
)
|
||||
|
||||
let timeOrigin = now()
|
||||
unsortedArchiveMessages =
|
||||
@[ # SortIndex (by timestamp and digest)
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)), # 1
|
||||
fakeWakuMessage(@[byte 03], ts = ts(00, timeOrigin)), # 2
|
||||
fakeWakuMessage(@[byte 08], ts = ts(00, timeOrigin)), # 0
|
||||
fakeWakuMessage(@[byte 07], ts = ts(10, timeOrigin)), # 4
|
||||
fakeWakuMessage(@[byte 02], ts = ts(10, timeOrigin)), # 3
|
||||
fakeWakuMessage(@[byte 09], ts = ts(10, timeOrigin)), # 5
|
||||
fakeWakuMessage(@[byte 06], ts = ts(20, timeOrigin)), # 6
|
||||
fakeWakuMessage(@[byte 01], ts = ts(20, timeOrigin)), # 9
|
||||
fakeWakuMessage(@[byte 04], ts = ts(20, timeOrigin)), # 7
|
||||
fakeWakuMessage(@[byte 05], ts = ts(20, timeOrigin)), # 8
|
||||
]
|
||||
unsortedArchiveMessages = @[ # SortIndex (by timestamp and digest)
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)), # 1
|
||||
fakeWakuMessage(@[byte 03], ts = ts(00, timeOrigin)), # 2
|
||||
fakeWakuMessage(@[byte 08], ts = ts(00, timeOrigin)), # 0
|
||||
fakeWakuMessage(@[byte 07], ts = ts(10, timeOrigin)), # 4
|
||||
fakeWakuMessage(@[byte 02], ts = ts(10, timeOrigin)), # 3
|
||||
fakeWakuMessage(@[byte 09], ts = ts(10, timeOrigin)), # 5
|
||||
fakeWakuMessage(@[byte 06], ts = ts(20, timeOrigin)), # 6
|
||||
fakeWakuMessage(@[byte 01], ts = ts(20, timeOrigin)), # 9
|
||||
fakeWakuMessage(@[byte 04], ts = ts(20, timeOrigin)), # 7
|
||||
fakeWakuMessage(@[byte 05], ts = ts(20, timeOrigin)), # 8
|
||||
]
|
||||
|
||||
let
|
||||
serverKey = generateSecp256k1Key()
|
||||
@ -654,21 +652,20 @@ suite "Waku Store - End to End - Archive with Multiple Topics":
|
||||
originTs = proc(offset = 0): Timestamp {.gcsafe, raises: [].} =
|
||||
ts(offset, timeOrigin)
|
||||
|
||||
archiveMessages =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 00], ts = originTs(00), contentTopic = contentTopic),
|
||||
fakeWakuMessage(@[byte 01], ts = originTs(10), contentTopic = contentTopicB),
|
||||
fakeWakuMessage(@[byte 02], ts = originTs(20), contentTopic = contentTopicC),
|
||||
fakeWakuMessage(@[byte 03], ts = originTs(30), contentTopic = contentTopic),
|
||||
fakeWakuMessage(@[byte 04], ts = originTs(40), contentTopic = contentTopicB),
|
||||
fakeWakuMessage(@[byte 05], ts = originTs(50), contentTopic = contentTopicC),
|
||||
fakeWakuMessage(@[byte 06], ts = originTs(60), contentTopic = contentTopic),
|
||||
fakeWakuMessage(@[byte 07], ts = originTs(70), contentTopic = contentTopicB),
|
||||
fakeWakuMessage(@[byte 08], ts = originTs(80), contentTopic = contentTopicC),
|
||||
fakeWakuMessage(
|
||||
@[byte 09], ts = originTs(90), contentTopic = contentTopicSpecials
|
||||
),
|
||||
]
|
||||
archiveMessages = @[
|
||||
fakeWakuMessage(@[byte 00], ts = originTs(00), contentTopic = contentTopic),
|
||||
fakeWakuMessage(@[byte 01], ts = originTs(10), contentTopic = contentTopicB),
|
||||
fakeWakuMessage(@[byte 02], ts = originTs(20), contentTopic = contentTopicC),
|
||||
fakeWakuMessage(@[byte 03], ts = originTs(30), contentTopic = contentTopic),
|
||||
fakeWakuMessage(@[byte 04], ts = originTs(40), contentTopic = contentTopicB),
|
||||
fakeWakuMessage(@[byte 05], ts = originTs(50), contentTopic = contentTopicC),
|
||||
fakeWakuMessage(@[byte 06], ts = originTs(60), contentTopic = contentTopic),
|
||||
fakeWakuMessage(@[byte 07], ts = originTs(70), contentTopic = contentTopicB),
|
||||
fakeWakuMessage(@[byte 08], ts = originTs(80), contentTopic = contentTopicC),
|
||||
fakeWakuMessage(
|
||||
@[byte 09], ts = originTs(90), contentTopic = contentTopicSpecials
|
||||
),
|
||||
]
|
||||
|
||||
let
|
||||
serverKey = generateSecp256k1Key()
|
||||
@ -910,12 +907,11 @@ suite "Waku Store - End to End - Archive with Multiple Topics":
|
||||
xasyncTest "Only ephemeral Messages:":
|
||||
# Given an archive with only ephemeral messages
|
||||
let
|
||||
ephemeralMessages =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00), ephemeral = true),
|
||||
fakeWakuMessage(@[byte 01], ts = ts(10), ephemeral = true),
|
||||
fakeWakuMessage(@[byte 02], ts = ts(20), ephemeral = true),
|
||||
]
|
||||
ephemeralMessages = @[
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00), ephemeral = true),
|
||||
fakeWakuMessage(@[byte 01], ts = ts(10), ephemeral = true),
|
||||
fakeWakuMessage(@[byte 02], ts = ts(20), ephemeral = true),
|
||||
]
|
||||
ephemeralArchiveDriver =
|
||||
newSqliteArchiveDriver().put(pubsubTopic, ephemeralMessages)
|
||||
|
||||
@ -946,18 +942,16 @@ suite "Waku Store - End to End - Archive with Multiple Topics":
|
||||
xasyncTest "Mixed messages":
|
||||
# Given an archive with both ephemeral and non-ephemeral messages
|
||||
let
|
||||
ephemeralMessages =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00), ephemeral = true),
|
||||
fakeWakuMessage(@[byte 01], ts = ts(10), ephemeral = true),
|
||||
fakeWakuMessage(@[byte 02], ts = ts(20), ephemeral = true),
|
||||
]
|
||||
nonEphemeralMessages =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 03], ts = ts(30), ephemeral = false),
|
||||
fakeWakuMessage(@[byte 04], ts = ts(40), ephemeral = false),
|
||||
fakeWakuMessage(@[byte 05], ts = ts(50), ephemeral = false),
|
||||
]
|
||||
ephemeralMessages = @[
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00), ephemeral = true),
|
||||
fakeWakuMessage(@[byte 01], ts = ts(10), ephemeral = true),
|
||||
fakeWakuMessage(@[byte 02], ts = ts(20), ephemeral = true),
|
||||
]
|
||||
nonEphemeralMessages = @[
|
||||
fakeWakuMessage(@[byte 03], ts = ts(30), ephemeral = false),
|
||||
fakeWakuMessage(@[byte 04], ts = ts(40), ephemeral = false),
|
||||
fakeWakuMessage(@[byte 05], ts = ts(50), ephemeral = false),
|
||||
]
|
||||
mixedArchiveDriver = newSqliteArchiveDriver()
|
||||
.put(pubsubTopic, ephemeralMessages)
|
||||
.put(pubsubTopic, nonEphemeralMessages)
|
||||
|
||||
@ -283,31 +283,31 @@ suite "Waku RlnRelay - End to End - Static":
|
||||
|
||||
doAssert(
|
||||
client.wakuRlnRelay
|
||||
.appendRLNProof(
|
||||
message1b, epoch + float64(client.wakuRlnRelay.rlnEpochSizeSec * 0)
|
||||
)
|
||||
.isOk()
|
||||
.appendRLNProof(
|
||||
message1b, epoch + float64(client.wakuRlnRelay.rlnEpochSizeSec * 0)
|
||||
)
|
||||
.isOk()
|
||||
)
|
||||
doAssert(
|
||||
client.wakuRlnRelay
|
||||
.appendRLNProof(
|
||||
message1kib, epoch + float64(client.wakuRlnRelay.rlnEpochSizeSec * 1)
|
||||
)
|
||||
.isOk()
|
||||
.appendRLNProof(
|
||||
message1kib, epoch + float64(client.wakuRlnRelay.rlnEpochSizeSec * 1)
|
||||
)
|
||||
.isOk()
|
||||
)
|
||||
doAssert(
|
||||
client.wakuRlnRelay
|
||||
.appendRLNProof(
|
||||
message150kib, epoch + float64(client.wakuRlnRelay.rlnEpochSizeSec * 2)
|
||||
)
|
||||
.isOk()
|
||||
.appendRLNProof(
|
||||
message150kib, epoch + float64(client.wakuRlnRelay.rlnEpochSizeSec * 2)
|
||||
)
|
||||
.isOk()
|
||||
)
|
||||
doAssert(
|
||||
client.wakuRlnRelay
|
||||
.appendRLNProof(
|
||||
message151kibPlus, epoch + float64(client.wakuRlnRelay.rlnEpochSizeSec * 3)
|
||||
)
|
||||
.isOk()
|
||||
.appendRLNProof(
|
||||
message151kibPlus, epoch + float64(client.wakuRlnRelay.rlnEpochSizeSec * 3)
|
||||
)
|
||||
.isOk()
|
||||
)
|
||||
|
||||
# When sending the 1B message
|
||||
@ -372,10 +372,10 @@ suite "Waku RlnRelay - End to End - Static":
|
||||
|
||||
doAssert(
|
||||
client.wakuRlnRelay
|
||||
.appendRLNProof(
|
||||
message151kibPlus, epoch + float64(client.wakuRlnRelay.rlnEpochSizeSec * 3)
|
||||
)
|
||||
.isOk()
|
||||
.appendRLNProof(
|
||||
message151kibPlus, epoch + float64(client.wakuRlnRelay.rlnEpochSizeSec * 3)
|
||||
)
|
||||
.isOk()
|
||||
)
|
||||
|
||||
# When sending the 150KiB plus message
|
||||
@ -496,11 +496,11 @@ suite "Waku RlnRelay - End to End - OnChain":
|
||||
# However, it doesn't reduce the retries against the blockchain that the mounting rln process attempts (until it accepts failure).
|
||||
# Note: These retries might be an unintended library issue.
|
||||
discard await server
|
||||
.setupRelayWithOnChainRln(@[pubsubTopic], wakuRlnConfig1)
|
||||
.withTimeout(FUTURE_TIMEOUT)
|
||||
.setupRelayWithOnChainRln(@[pubsubTopic], wakuRlnConfig1)
|
||||
.withTimeout(FUTURE_TIMEOUT)
|
||||
discard await client
|
||||
.setupRelayWithOnChainRln(@[pubsubTopic], wakuRlnConfig2)
|
||||
.withTimeout(FUTURE_TIMEOUT)
|
||||
.setupRelayWithOnChainRln(@[pubsubTopic], wakuRlnConfig2)
|
||||
.withTimeout(FUTURE_TIMEOUT)
|
||||
|
||||
check:
|
||||
(await serverErrorFuture.waitForResult()).get() ==
|
||||
|
||||
@ -434,18 +434,16 @@ suite "Sharding":
|
||||
contentTopicShort = "/toychat/2/huilong/proto"
|
||||
contentTopicFull = "/0/toychat/2/huilong/proto"
|
||||
pubsubTopic = "/waku/2/rs/0/58355"
|
||||
archiveMessages1 =
|
||||
@[
|
||||
fakeWakuMessage(
|
||||
@[byte 00], ts = ts(00, timeOrigin), contentTopic = contentTopicShort
|
||||
)
|
||||
]
|
||||
archiveMessages2 =
|
||||
@[
|
||||
fakeWakuMessage(
|
||||
@[byte 01], ts = ts(10, timeOrigin), contentTopic = contentTopicFull
|
||||
)
|
||||
]
|
||||
archiveMessages1 = @[
|
||||
fakeWakuMessage(
|
||||
@[byte 00], ts = ts(00, timeOrigin), contentTopic = contentTopicShort
|
||||
)
|
||||
]
|
||||
archiveMessages2 = @[
|
||||
fakeWakuMessage(
|
||||
@[byte 01], ts = ts(10, timeOrigin), contentTopic = contentTopicFull
|
||||
)
|
||||
]
|
||||
archiveDriver = newArchiveDriverWithMessages(pubsubTopic, archiveMessages1)
|
||||
discard archiveDriver.put(pubsubTopic, archiveMessages2)
|
||||
let mountArchiveResult = server.mountArchive(archiveDriver)
|
||||
@ -597,18 +595,16 @@ suite "Sharding":
|
||||
contentTopic2 = "/0/toychat2/2/huilong/proto"
|
||||
pubsubTopic2 = "/waku/2/rs/0/23286"
|
||||
# Automatically generated from the contentTopic above
|
||||
archiveMessages1 =
|
||||
@[
|
||||
fakeWakuMessage(
|
||||
@[byte 00], ts = ts(00, timeOrigin), contentTopic = contentTopic1
|
||||
)
|
||||
]
|
||||
archiveMessages2 =
|
||||
@[
|
||||
fakeWakuMessage(
|
||||
@[byte 01], ts = ts(10, timeOrigin), contentTopic = contentTopic2
|
||||
)
|
||||
]
|
||||
archiveMessages1 = @[
|
||||
fakeWakuMessage(
|
||||
@[byte 00], ts = ts(00, timeOrigin), contentTopic = contentTopic1
|
||||
)
|
||||
]
|
||||
archiveMessages2 = @[
|
||||
fakeWakuMessage(
|
||||
@[byte 01], ts = ts(10, timeOrigin), contentTopic = contentTopic2
|
||||
)
|
||||
]
|
||||
archiveDriver = newArchiveDriverWithMessages(pubsubTopic1, archiveMessages1)
|
||||
discard archiveDriver.put(pubsubTopic2, archiveMessages2)
|
||||
let mountArchiveResult = server.mountArchive(archiveDriver)
|
||||
|
||||
@ -38,19 +38,18 @@ suite "Waku Store - End to End - Sorted Archive":
|
||||
contentTopicSeq = @[contentTopic]
|
||||
|
||||
let timeOrigin = now()
|
||||
let messages =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 01], ts = ts(10, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 02], ts = ts(20, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 03], ts = ts(30, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 04], ts = ts(40, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 05], ts = ts(50, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 06], ts = ts(60, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 07], ts = ts(70, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 08], ts = ts(80, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 09], ts = ts(90, timeOrigin)),
|
||||
]
|
||||
let messages = @[
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 01], ts = ts(10, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 02], ts = ts(20, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 03], ts = ts(30, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 04], ts = ts(40, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 05], ts = ts(50, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 06], ts = ts(60, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 07], ts = ts(70, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 08], ts = ts(80, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 09], ts = ts(90, timeOrigin)),
|
||||
]
|
||||
archiveMessages = messages.mapIt(
|
||||
WakuMessageKeyValue(
|
||||
messageHash: computeMessageHash(pubsubTopic, it),
|
||||
@ -542,19 +541,18 @@ suite "Waku Store - End to End - Unsorted Archive":
|
||||
)
|
||||
|
||||
let timeOrigin = now()
|
||||
let messages =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 03], ts = ts(00, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 08], ts = ts(00, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 07], ts = ts(10, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 02], ts = ts(10, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 09], ts = ts(10, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 06], ts = ts(20, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 01], ts = ts(20, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 04], ts = ts(20, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 05], ts = ts(20, timeOrigin)),
|
||||
]
|
||||
let messages = @[
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 03], ts = ts(00, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 08], ts = ts(00, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 07], ts = ts(10, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 02], ts = ts(10, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 09], ts = ts(10, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 06], ts = ts(20, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 01], ts = ts(20, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 04], ts = ts(20, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 05], ts = ts(20, timeOrigin)),
|
||||
]
|
||||
unsortedArchiveMessages = messages.mapIt(
|
||||
WakuMessageKeyValue(
|
||||
messageHash: computeMessageHash(pubsubTopic, it),
|
||||
@ -759,19 +757,19 @@ suite "Waku Store - End to End - Unsorted Archive without provided Timestamp":
|
||||
paginationLimit: some(uint64(5)),
|
||||
)
|
||||
|
||||
let messages =
|
||||
@[ # Not providing explicit timestamp means it will be set in "arrive" order
|
||||
fakeWakuMessage(@[byte 09]),
|
||||
fakeWakuMessage(@[byte 07]),
|
||||
fakeWakuMessage(@[byte 05]),
|
||||
fakeWakuMessage(@[byte 03]),
|
||||
fakeWakuMessage(@[byte 01]),
|
||||
fakeWakuMessage(@[byte 00]),
|
||||
fakeWakuMessage(@[byte 02]),
|
||||
fakeWakuMessage(@[byte 04]),
|
||||
fakeWakuMessage(@[byte 06]),
|
||||
fakeWakuMessage(@[byte 08]),
|
||||
]
|
||||
let messages = @[
|
||||
# Not providing explicit timestamp means it will be set in "arrive" order
|
||||
fakeWakuMessage(@[byte 09]),
|
||||
fakeWakuMessage(@[byte 07]),
|
||||
fakeWakuMessage(@[byte 05]),
|
||||
fakeWakuMessage(@[byte 03]),
|
||||
fakeWakuMessage(@[byte 01]),
|
||||
fakeWakuMessage(@[byte 00]),
|
||||
fakeWakuMessage(@[byte 02]),
|
||||
fakeWakuMessage(@[byte 04]),
|
||||
fakeWakuMessage(@[byte 06]),
|
||||
fakeWakuMessage(@[byte 08]),
|
||||
]
|
||||
unsortedArchiveMessages = messages.mapIt(
|
||||
WakuMessageKeyValue(
|
||||
messageHash: computeMessageHash(pubsubTopic, it),
|
||||
@ -900,21 +898,20 @@ suite "Waku Store - End to End - Archive with Multiple Topics":
|
||||
originTs = proc(offset = 0): Timestamp {.gcsafe, raises: [].} =
|
||||
ts(offset, timeOrigin)
|
||||
|
||||
let messages =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 00], ts = originTs(00), contentTopic = contentTopic),
|
||||
fakeWakuMessage(@[byte 01], ts = originTs(10), contentTopic = contentTopicB),
|
||||
fakeWakuMessage(@[byte 02], ts = originTs(20), contentTopic = contentTopicC),
|
||||
fakeWakuMessage(@[byte 03], ts = originTs(30), contentTopic = contentTopic),
|
||||
fakeWakuMessage(@[byte 04], ts = originTs(40), contentTopic = contentTopicB),
|
||||
fakeWakuMessage(@[byte 05], ts = originTs(50), contentTopic = contentTopicC),
|
||||
fakeWakuMessage(@[byte 06], ts = originTs(60), contentTopic = contentTopic),
|
||||
fakeWakuMessage(@[byte 07], ts = originTs(70), contentTopic = contentTopicB),
|
||||
fakeWakuMessage(@[byte 08], ts = originTs(80), contentTopic = contentTopicC),
|
||||
fakeWakuMessage(
|
||||
@[byte 09], ts = originTs(90), contentTopic = contentTopicSpecials
|
||||
),
|
||||
]
|
||||
let messages = @[
|
||||
fakeWakuMessage(@[byte 00], ts = originTs(00), contentTopic = contentTopic),
|
||||
fakeWakuMessage(@[byte 01], ts = originTs(10), contentTopic = contentTopicB),
|
||||
fakeWakuMessage(@[byte 02], ts = originTs(20), contentTopic = contentTopicC),
|
||||
fakeWakuMessage(@[byte 03], ts = originTs(30), contentTopic = contentTopic),
|
||||
fakeWakuMessage(@[byte 04], ts = originTs(40), contentTopic = contentTopicB),
|
||||
fakeWakuMessage(@[byte 05], ts = originTs(50), contentTopic = contentTopicC),
|
||||
fakeWakuMessage(@[byte 06], ts = originTs(60), contentTopic = contentTopic),
|
||||
fakeWakuMessage(@[byte 07], ts = originTs(70), contentTopic = contentTopicB),
|
||||
fakeWakuMessage(@[byte 08], ts = originTs(80), contentTopic = contentTopicC),
|
||||
fakeWakuMessage(
|
||||
@[byte 09], ts = originTs(90), contentTopic = contentTopicSpecials
|
||||
),
|
||||
]
|
||||
|
||||
archiveMessages = messages.mapIt(
|
||||
WakuMessageKeyValue(
|
||||
@ -1172,12 +1169,11 @@ suite "Waku Store - End to End - Archive with Multiple Topics":
|
||||
xasyncTest "Only ephemeral Messages:":
|
||||
# Given an archive with only ephemeral messages
|
||||
let
|
||||
ephemeralMessages =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00), ephemeral = true),
|
||||
fakeWakuMessage(@[byte 01], ts = ts(10), ephemeral = true),
|
||||
fakeWakuMessage(@[byte 02], ts = ts(20), ephemeral = true),
|
||||
]
|
||||
ephemeralMessages = @[
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00), ephemeral = true),
|
||||
fakeWakuMessage(@[byte 01], ts = ts(10), ephemeral = true),
|
||||
fakeWakuMessage(@[byte 02], ts = ts(20), ephemeral = true),
|
||||
]
|
||||
ephemeralArchiveDriver =
|
||||
newSqliteArchiveDriver().put(pubsubTopic, ephemeralMessages)
|
||||
|
||||
@ -1207,18 +1203,16 @@ suite "Waku Store - End to End - Archive with Multiple Topics":
|
||||
xasyncTest "Mixed messages":
|
||||
# Given an archive with both ephemeral and non-ephemeral messages
|
||||
let
|
||||
ephemeralMessages =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00), ephemeral = true),
|
||||
fakeWakuMessage(@[byte 01], ts = ts(10), ephemeral = true),
|
||||
fakeWakuMessage(@[byte 02], ts = ts(20), ephemeral = true),
|
||||
]
|
||||
nonEphemeralMessages =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 03], ts = ts(30), ephemeral = false),
|
||||
fakeWakuMessage(@[byte 04], ts = ts(40), ephemeral = false),
|
||||
fakeWakuMessage(@[byte 05], ts = ts(50), ephemeral = false),
|
||||
]
|
||||
ephemeralMessages = @[
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00), ephemeral = true),
|
||||
fakeWakuMessage(@[byte 01], ts = ts(10), ephemeral = true),
|
||||
fakeWakuMessage(@[byte 02], ts = ts(20), ephemeral = true),
|
||||
]
|
||||
nonEphemeralMessages = @[
|
||||
fakeWakuMessage(@[byte 03], ts = ts(30), ephemeral = false),
|
||||
fakeWakuMessage(@[byte 04], ts = ts(40), ephemeral = false),
|
||||
fakeWakuMessage(@[byte 05], ts = ts(50), ephemeral = false),
|
||||
]
|
||||
mixedArchiveDriver = newSqliteArchiveDriver()
|
||||
.put(pubsubTopic, ephemeralMessages)
|
||||
.put(pubsubTopic, nonEphemeralMessages)
|
||||
|
||||
@ -8,8 +8,7 @@ const
|
||||
EMOJI* =
|
||||
"😀 😃 😄 😁 😆 😅 🤣 😂 🙂 🙃 😉 😊 😇 🥰 😍 🤩 😘 😗 😚 😙"
|
||||
CODE* = "def main():\n\tprint('Hello, world!')"
|
||||
QUERY* =
|
||||
"""
|
||||
QUERY* = """
|
||||
SELECT
|
||||
u.id,
|
||||
u.name,
|
||||
@ -30,8 +29,7 @@ const
|
||||
u.id = 1
|
||||
"""
|
||||
TEXT_SMALL* = "Lorem ipsum dolor sit amet, consectetur adipiscing elit."
|
||||
TEXT_LARGE* =
|
||||
"""
|
||||
TEXT_LARGE* = """
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Cras gravida vulputate semper. Proin
|
||||
eleifend varius cursus. Morbi lacinia posuere quam sit amet pretium. Sed non metus fermentum,
|
||||
venenatis nisl id, vestibulum eros. Quisque non lorem sit amet lectus faucibus elementum eu
|
||||
|
||||
@ -1216,30 +1216,29 @@ procSuite "Peer Manager":
|
||||
shardId1 = 1.uint16
|
||||
|
||||
# Create 3 nodes with different shards
|
||||
let nodes =
|
||||
@[
|
||||
newTestWakuNode(
|
||||
generateSecp256k1Key(),
|
||||
parseIpAddress("0.0.0.0"),
|
||||
Port(0),
|
||||
clusterId = clusterId,
|
||||
subscribeShards = @[shardId0],
|
||||
),
|
||||
newTestWakuNode(
|
||||
generateSecp256k1Key(),
|
||||
parseIpAddress("0.0.0.0"),
|
||||
Port(0),
|
||||
clusterId = clusterId,
|
||||
subscribeShards = @[shardId1],
|
||||
),
|
||||
newTestWakuNode(
|
||||
generateSecp256k1Key(),
|
||||
parseIpAddress("0.0.0.0"),
|
||||
Port(0),
|
||||
clusterId = clusterId,
|
||||
subscribeShards = @[shardId0],
|
||||
),
|
||||
]
|
||||
let nodes = @[
|
||||
newTestWakuNode(
|
||||
generateSecp256k1Key(),
|
||||
parseIpAddress("0.0.0.0"),
|
||||
Port(0),
|
||||
clusterId = clusterId,
|
||||
subscribeShards = @[shardId0],
|
||||
),
|
||||
newTestWakuNode(
|
||||
generateSecp256k1Key(),
|
||||
parseIpAddress("0.0.0.0"),
|
||||
Port(0),
|
||||
clusterId = clusterId,
|
||||
subscribeShards = @[shardId1],
|
||||
),
|
||||
newTestWakuNode(
|
||||
generateSecp256k1Key(),
|
||||
parseIpAddress("0.0.0.0"),
|
||||
Port(0),
|
||||
clusterId = clusterId,
|
||||
subscribeShards = @[shardId0],
|
||||
),
|
||||
]
|
||||
|
||||
await allFutures(nodes.mapIt(it.start()))
|
||||
for node in nodes:
|
||||
@ -1364,13 +1363,12 @@ procSuite "Peer Manager":
|
||||
node.peerManager.switch.peerStore[ProtoBook][peerInfo.peerId] = @[WakuRelayCodec]
|
||||
|
||||
## When: selectPeer is called with malformed pubsub topic
|
||||
let invalidTopics =
|
||||
@[
|
||||
some(PubsubTopic("invalid-topic")),
|
||||
some(PubsubTopic("/waku/2/invalid")),
|
||||
some(PubsubTopic("/waku/2/rs/abc/0")), # non-numeric cluster
|
||||
some(PubsubTopic("")), # empty topic
|
||||
]
|
||||
let invalidTopics = @[
|
||||
some(PubsubTopic("invalid-topic")),
|
||||
some(PubsubTopic("/waku/2/invalid")),
|
||||
some(PubsubTopic("/waku/2/rs/abc/0")), # non-numeric cluster
|
||||
some(PubsubTopic("")), # empty topic
|
||||
]
|
||||
|
||||
## Then: Returns none(RemotePeerInfo) without crashing
|
||||
for invalidTopic in invalidTopics:
|
||||
|
||||
@ -29,14 +29,12 @@ suite "Waku API - Create node":
|
||||
let nodeConfig = NodeConfig.init(
|
||||
mode = Core,
|
||||
protocolsConfig = ProtocolsConfig.init(
|
||||
entryNodes =
|
||||
@[
|
||||
"enr:-QESuEC1p_s3xJzAC_XlOuuNrhVUETmfhbm1wxRGis0f7DlqGSw2FM-p2Vn7gmfkTTnAe8Ys2cgGBN8ufJnvzKQFZqFMBgmlkgnY0iXNlY3AyNTZrMaEDS8-D878DrdbNwcuY-3p1qdDp5MOoCurhdsNPJTXZ3c5g3RjcIJ2X4N1ZHCCd2g"
|
||||
],
|
||||
staticStoreNodes =
|
||||
@[
|
||||
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc"
|
||||
],
|
||||
entryNodes = @[
|
||||
"enr:-QESuEC1p_s3xJzAC_XlOuuNrhVUETmfhbm1wxRGis0f7DlqGSw2FM-p2Vn7gmfkTTnAe8Ys2cgGBN8ufJnvzKQFZqFMBgmlkgnY0iXNlY3AyNTZrMaEDS8-D878DrdbNwcuY-3p1qdDp5MOoCurhdsNPJTXZ3c5g3RjcIJ2X4N1ZHCCd2g"
|
||||
],
|
||||
staticStoreNodes = @[
|
||||
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc"
|
||||
],
|
||||
clusterId = 99,
|
||||
autoShardingConfig = AutoShardingConfig(numShardsInCluster: 16),
|
||||
messageValidation =
|
||||
@ -65,11 +63,10 @@ suite "Waku API - Create node":
|
||||
let nodeConfig = NodeConfig.init(
|
||||
mode = Core,
|
||||
protocolsConfig = ProtocolsConfig.init(
|
||||
entryNodes =
|
||||
@[
|
||||
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
|
||||
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
|
||||
],
|
||||
entryNodes = @[
|
||||
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
|
||||
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
|
||||
],
|
||||
clusterId = 42,
|
||||
),
|
||||
)
|
||||
|
||||
@ -307,30 +307,29 @@ suite "KeyFile test suite (adapted from nim-eth keyfile tests)":
|
||||
# but the last byte of mac is changed to 00.
|
||||
# While ciphertext is the correct encryption of priv under password,
|
||||
# mac verfication should fail and nothing will be decrypted
|
||||
let keyfileWrongMac =
|
||||
%*{
|
||||
"keyfile": {
|
||||
"crypto": {
|
||||
"cipher": "aes-128-ctr",
|
||||
"cipherparams": {"iv": "6087dab2f9fdbbfaddc31a909735c1e6"},
|
||||
"ciphertext":
|
||||
"5318b4d5bcd28de64ee5559e671353e16f075ecae9f99c7a79a38af5f869aa46",
|
||||
"kdf": "pbkdf2",
|
||||
"kdfparams": {
|
||||
"c": 262144,
|
||||
"dklen": 32,
|
||||
"prf": "hmac-sha256",
|
||||
"salt": "ae3cd4e7013836a3df6bd7241b12db061dbe2c6785853cce422d148a624ce0bd",
|
||||
},
|
||||
"mac": "517ead924a9d0dc3124507e3393d175ce3ff7c1e96529c6c555ce9e51205e900",
|
||||
let keyfileWrongMac = %*{
|
||||
"keyfile": {
|
||||
"crypto": {
|
||||
"cipher": "aes-128-ctr",
|
||||
"cipherparams": {"iv": "6087dab2f9fdbbfaddc31a909735c1e6"},
|
||||
"ciphertext":
|
||||
"5318b4d5bcd28de64ee5559e671353e16f075ecae9f99c7a79a38af5f869aa46",
|
||||
"kdf": "pbkdf2",
|
||||
"kdfparams": {
|
||||
"c": 262144,
|
||||
"dklen": 32,
|
||||
"prf": "hmac-sha256",
|
||||
"salt": "ae3cd4e7013836a3df6bd7241b12db061dbe2c6785853cce422d148a624ce0bd",
|
||||
},
|
||||
"id": "3198bc9c-6672-5ab3-d995-4942343ae5b6",
|
||||
"version": 3,
|
||||
"mac": "517ead924a9d0dc3124507e3393d175ce3ff7c1e96529c6c555ce9e51205e900",
|
||||
},
|
||||
"name": "test1",
|
||||
"password": "testpassword",
|
||||
"priv": "7a28b5ba57c53603b0b07b56bba752f7784bf506fa95edc395f5cf6c7514fe9d",
|
||||
}
|
||||
"id": "3198bc9c-6672-5ab3-d995-4942343ae5b6",
|
||||
"version": 3,
|
||||
},
|
||||
"name": "test1",
|
||||
"password": "testpassword",
|
||||
"priv": "7a28b5ba57c53603b0b07b56bba752f7784bf506fa95edc395f5cf6c7514fe9d",
|
||||
}
|
||||
|
||||
# Decryption with correct password
|
||||
let expectedSecret = decodeHex(keyfileWrongMac.getOrDefault("priv").getStr())
|
||||
|
||||
@ -669,11 +669,10 @@ procSuite "Waku Noise":
|
||||
# <- s
|
||||
# ...
|
||||
# So we define accordingly the sequence of the pre-message public keys
|
||||
let preMessagePKs: seq[NoisePublicKey] =
|
||||
@[
|
||||
toNoisePublicKey(getPublicKey(aliceStaticKey)),
|
||||
toNoisePublicKey(getPublicKey(bobStaticKey)),
|
||||
]
|
||||
let preMessagePKs: seq[NoisePublicKey] = @[
|
||||
toNoisePublicKey(getPublicKey(aliceStaticKey)),
|
||||
toNoisePublicKey(getPublicKey(bobStaticKey)),
|
||||
]
|
||||
|
||||
var aliceHS = initialize(
|
||||
hsPattern = hsPattern,
|
||||
|
||||
@ -117,11 +117,10 @@ procSuite "Waku Rendezvous":
|
||||
## Given: A light client node with no relay protocol
|
||||
let
|
||||
clusterId = 10.uint16
|
||||
configuredShards =
|
||||
@[
|
||||
RelayShard(clusterId: clusterId, shardId: 0),
|
||||
RelayShard(clusterId: clusterId, shardId: 1),
|
||||
]
|
||||
configuredShards = @[
|
||||
RelayShard(clusterId: clusterId, shardId: 0),
|
||||
RelayShard(clusterId: clusterId, shardId: 1),
|
||||
]
|
||||
|
||||
let lightClient = newTestWakuNode(
|
||||
generateSecp256k1Key(), parseIpAddress("0.0.0.0"), Port(0), clusterId = clusterId
|
||||
|
||||
@ -12,14 +12,14 @@ import waku/node/waku_switch, ./testlib/common, ./testlib/wakucore
|
||||
|
||||
proc newCircuitRelayClientSwitch(relayClient: RelayClient): Switch =
|
||||
SwitchBuilder
|
||||
.new()
|
||||
.withRng(rng())
|
||||
.withAddresses(@[MultiAddress.init("/ip4/0.0.0.0/tcp/0").tryGet()])
|
||||
.withTcpTransport()
|
||||
.withMplex()
|
||||
.withNoise()
|
||||
.withCircuitRelay(relayClient)
|
||||
.build()
|
||||
.new()
|
||||
.withRng(rng())
|
||||
.withAddresses(@[MultiAddress.init("/ip4/0.0.0.0/tcp/0").tryGet()])
|
||||
.withTcpTransport()
|
||||
.withMplex()
|
||||
.withNoise()
|
||||
.withCircuitRelay(relayClient)
|
||||
.build()
|
||||
|
||||
suite "Waku Switch":
|
||||
asyncTest "Waku Switch works with AutoNat":
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -121,16 +121,15 @@ suite "Waku Archive - Retention policy":
|
||||
retentionPolicy: RetentionPolicy =
|
||||
CapacityRetentionPolicy.new(capacity = capacity)
|
||||
|
||||
let messages =
|
||||
@[
|
||||
fakeWakuMessage(contentTopic = DefaultContentTopic, ts = ts(0)),
|
||||
fakeWakuMessage(contentTopic = DefaultContentTopic, ts = ts(1)),
|
||||
fakeWakuMessage(contentTopic = contentTopic, ts = ts(2)),
|
||||
fakeWakuMessage(contentTopic = contentTopic, ts = ts(3)),
|
||||
fakeWakuMessage(contentTopic = contentTopic, ts = ts(4)),
|
||||
fakeWakuMessage(contentTopic = contentTopic, ts = ts(5)),
|
||||
fakeWakuMessage(contentTopic = contentTopic, ts = ts(6)),
|
||||
]
|
||||
let messages = @[
|
||||
fakeWakuMessage(contentTopic = DefaultContentTopic, ts = ts(0)),
|
||||
fakeWakuMessage(contentTopic = DefaultContentTopic, ts = ts(1)),
|
||||
fakeWakuMessage(contentTopic = contentTopic, ts = ts(2)),
|
||||
fakeWakuMessage(contentTopic = contentTopic, ts = ts(3)),
|
||||
fakeWakuMessage(contentTopic = contentTopic, ts = ts(4)),
|
||||
fakeWakuMessage(contentTopic = contentTopic, ts = ts(5)),
|
||||
fakeWakuMessage(contentTopic = contentTopic, ts = ts(6)),
|
||||
]
|
||||
|
||||
## When
|
||||
for msg in messages:
|
||||
|
||||
@ -36,14 +36,13 @@ suite "Waku Archive - message handling":
|
||||
let archive = newWakuArchive(driver)
|
||||
|
||||
## Given
|
||||
let msgList =
|
||||
@[
|
||||
fakeWakuMessage(ephemeral = false, payload = "1"),
|
||||
fakeWakuMessage(ephemeral = true, payload = "2"),
|
||||
fakeWakuMessage(ephemeral = true, payload = "3"),
|
||||
fakeWakuMessage(ephemeral = true, payload = "4"),
|
||||
fakeWakuMessage(ephemeral = false, payload = "5"),
|
||||
]
|
||||
let msgList = @[
|
||||
fakeWakuMessage(ephemeral = false, payload = "1"),
|
||||
fakeWakuMessage(ephemeral = true, payload = "2"),
|
||||
fakeWakuMessage(ephemeral = true, payload = "3"),
|
||||
fakeWakuMessage(ephemeral = true, payload = "4"),
|
||||
fakeWakuMessage(ephemeral = false, payload = "5"),
|
||||
]
|
||||
|
||||
## When
|
||||
for msg in msgList:
|
||||
@ -127,39 +126,38 @@ suite "Waku Archive - message handling":
|
||||
procSuite "Waku Archive - find messages":
|
||||
## Fixtures
|
||||
let timeOrigin = now()
|
||||
let msgListA =
|
||||
@[
|
||||
fakeWakuMessage(
|
||||
@[byte 00], contentTopic = ContentTopic("2"), ts = ts(00, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 01], contentTopic = ContentTopic("1"), ts = ts(10, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 02], contentTopic = ContentTopic("2"), ts = ts(20, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 03], contentTopic = ContentTopic("1"), ts = ts(30, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 04], contentTopic = ContentTopic("2"), ts = ts(40, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 05], contentTopic = ContentTopic("1"), ts = ts(50, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 06], contentTopic = ContentTopic("2"), ts = ts(60, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 07], contentTopic = ContentTopic("1"), ts = ts(70, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 08], contentTopic = ContentTopic("2"), ts = ts(80, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 09], contentTopic = ContentTopic("1"), ts = ts(90, timeOrigin)
|
||||
),
|
||||
]
|
||||
let msgListA = @[
|
||||
fakeWakuMessage(
|
||||
@[byte 00], contentTopic = ContentTopic("2"), ts = ts(00, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 01], contentTopic = ContentTopic("1"), ts = ts(10, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 02], contentTopic = ContentTopic("2"), ts = ts(20, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 03], contentTopic = ContentTopic("1"), ts = ts(30, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 04], contentTopic = ContentTopic("2"), ts = ts(40, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 05], contentTopic = ContentTopic("1"), ts = ts(50, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 06], contentTopic = ContentTopic("2"), ts = ts(60, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 07], contentTopic = ContentTopic("1"), ts = ts(70, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 08], contentTopic = ContentTopic("2"), ts = ts(80, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 09], contentTopic = ContentTopic("1"), ts = ts(90, timeOrigin)
|
||||
),
|
||||
]
|
||||
|
||||
let archiveA = block:
|
||||
let
|
||||
@ -446,19 +444,18 @@ procSuite "Waku Archive - find messages":
|
||||
driver = newSqliteArchiveDriver()
|
||||
archive = newWakuArchive(driver)
|
||||
|
||||
let msgList =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 0], contentTopic = ContentTopic("2")),
|
||||
fakeWakuMessage(@[byte 1], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 2], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 3], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 4], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 5], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 6], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 7], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 8], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 9], contentTopic = ContentTopic("2")),
|
||||
]
|
||||
let msgList = @[
|
||||
fakeWakuMessage(@[byte 0], contentTopic = ContentTopic("2")),
|
||||
fakeWakuMessage(@[byte 1], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 2], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 3], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 4], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 5], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 6], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 7], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 8], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 9], contentTopic = ContentTopic("2")),
|
||||
]
|
||||
|
||||
for msg in msgList:
|
||||
require (
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -33,14 +33,13 @@ suite "Waku Archive - message handling":
|
||||
let archive = newWakuArchive(driver)
|
||||
|
||||
## Given
|
||||
let msgList =
|
||||
@[
|
||||
fakeWakuMessage(ephemeral = false, payload = "1"),
|
||||
fakeWakuMessage(ephemeral = true, payload = "2"),
|
||||
fakeWakuMessage(ephemeral = true, payload = "3"),
|
||||
fakeWakuMessage(ephemeral = true, payload = "4"),
|
||||
fakeWakuMessage(ephemeral = false, payload = "5"),
|
||||
]
|
||||
let msgList = @[
|
||||
fakeWakuMessage(ephemeral = false, payload = "1"),
|
||||
fakeWakuMessage(ephemeral = true, payload = "2"),
|
||||
fakeWakuMessage(ephemeral = true, payload = "3"),
|
||||
fakeWakuMessage(ephemeral = true, payload = "4"),
|
||||
fakeWakuMessage(ephemeral = false, payload = "5"),
|
||||
]
|
||||
|
||||
## When
|
||||
for msg in msgList:
|
||||
@ -108,39 +107,38 @@ suite "Waku Archive - message handling":
|
||||
procSuite "Waku Archive - find messages":
|
||||
## Fixtures
|
||||
let timeOrigin = now()
|
||||
let msgListA =
|
||||
@[
|
||||
fakeWakuMessage(
|
||||
@[byte 00], contentTopic = ContentTopic("2"), ts = ts(00, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 01], contentTopic = ContentTopic("1"), ts = ts(10, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 02], contentTopic = ContentTopic("2"), ts = ts(20, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 03], contentTopic = ContentTopic("1"), ts = ts(30, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 04], contentTopic = ContentTopic("2"), ts = ts(40, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 05], contentTopic = ContentTopic("1"), ts = ts(50, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 06], contentTopic = ContentTopic("2"), ts = ts(60, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 07], contentTopic = ContentTopic("1"), ts = ts(70, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 08], contentTopic = ContentTopic("2"), ts = ts(80, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 09], contentTopic = ContentTopic("1"), ts = ts(90, timeOrigin)
|
||||
),
|
||||
]
|
||||
let msgListA = @[
|
||||
fakeWakuMessage(
|
||||
@[byte 00], contentTopic = ContentTopic("2"), ts = ts(00, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 01], contentTopic = ContentTopic("1"), ts = ts(10, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 02], contentTopic = ContentTopic("2"), ts = ts(20, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 03], contentTopic = ContentTopic("1"), ts = ts(30, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 04], contentTopic = ContentTopic("2"), ts = ts(40, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 05], contentTopic = ContentTopic("1"), ts = ts(50, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 06], contentTopic = ContentTopic("2"), ts = ts(60, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 07], contentTopic = ContentTopic("1"), ts = ts(70, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 08], contentTopic = ContentTopic("2"), ts = ts(80, timeOrigin)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
@[byte 09], contentTopic = ContentTopic("1"), ts = ts(90, timeOrigin)
|
||||
),
|
||||
]
|
||||
|
||||
let archiveA = block:
|
||||
let
|
||||
@ -433,19 +431,18 @@ procSuite "Waku Archive - find messages":
|
||||
driver = newSqliteArchiveDriver()
|
||||
archive = newWakuArchive(driver)
|
||||
|
||||
let msgList =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 0], contentTopic = ContentTopic("2")),
|
||||
fakeWakuMessage(@[byte 1], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 2], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 3], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 4], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 5], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 6], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 7], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 8], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 9], contentTopic = ContentTopic("2")),
|
||||
]
|
||||
let msgList = @[
|
||||
fakeWakuMessage(@[byte 0], contentTopic = ContentTopic("2")),
|
||||
fakeWakuMessage(@[byte 1], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 2], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 3], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 4], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 5], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 6], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 7], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 8], contentTopic = DefaultContentTopic),
|
||||
fakeWakuMessage(@[byte 9], contentTopic = ContentTopic("2")),
|
||||
]
|
||||
|
||||
for msg in msgList:
|
||||
require (
|
||||
|
||||
@ -140,14 +140,13 @@ suite "Discovery Mechanisms for Shards":
|
||||
test "Bit Vector Representation":
|
||||
# Given a valid bit vector and its representation
|
||||
let
|
||||
bitVector: seq[byte] =
|
||||
@[
|
||||
0, 73, 2, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
]
|
||||
bitVector: seq[byte] = @[
|
||||
0, 73, 2, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
]
|
||||
clusterId: uint16 = 73 # bitVector's clusterId
|
||||
shardIds: seq[uint16] = @[1u16, 10u16] # bitVector's shardIds
|
||||
|
||||
|
||||
@ -79,11 +79,10 @@ suite "Waku rln relay":
|
||||
let rln = rlnInstance.get()
|
||||
|
||||
# prepare the input
|
||||
let msg =
|
||||
@[
|
||||
"126f4c026cd731979365f79bd345a46d673c5a3f6f588bdc718e6356d02b6fdc".toBytes(),
|
||||
"1f0e5db2b69d599166ab16219a97b82b662085c93220382b39f9f911d3b943b1".toBytes(),
|
||||
]
|
||||
let msg = @[
|
||||
"126f4c026cd731979365f79bd345a46d673c5a3f6f588bdc718e6356d02b6fdc".toBytes(),
|
||||
"1f0e5db2b69d599166ab16219a97b82b662085c93220382b39f9f911d3b943b1".toBytes(),
|
||||
]
|
||||
|
||||
let hashRes = poseidon(msg)
|
||||
|
||||
@ -457,7 +456,7 @@ suite "Waku rln relay":
|
||||
password = password,
|
||||
appInfo = RLNAppInfo,
|
||||
)
|
||||
.isOk()
|
||||
.isOk()
|
||||
|
||||
let readKeystoreRes = getMembershipCredentials(
|
||||
path = filepath,
|
||||
|
||||
@ -138,8 +138,10 @@ procSuite "WakuNode - RLN relay":
|
||||
WakuMessage(payload: @payload, contentTopic: contentTopic, timestamp: now())
|
||||
doAssert(
|
||||
node1.wakuRlnRelay
|
||||
.unsafeAppendRLNProof(message, node1.wakuRlnRelay.getCurrentEpoch(), MessageId(0))
|
||||
.isOk()
|
||||
.unsafeAppendRLNProof(
|
||||
message, node1.wakuRlnRelay.getCurrentEpoch(), MessageId(0)
|
||||
)
|
||||
.isOk()
|
||||
)
|
||||
|
||||
info " Nodes participating in the test",
|
||||
@ -223,11 +225,10 @@ procSuite "WakuNode - RLN relay":
|
||||
|
||||
let shards =
|
||||
@[RelayShard(clusterId: 0, shardId: 0), RelayShard(clusterId: 0, shardId: 1)]
|
||||
let contentTopics =
|
||||
@[
|
||||
ContentTopic("/waku/2/content-topic-a/proto"),
|
||||
ContentTopic("/waku/2/content-topic-b/proto"),
|
||||
]
|
||||
let contentTopics = @[
|
||||
ContentTopic("/waku/2/content-topic-a/proto"),
|
||||
ContentTopic("/waku/2/content-topic-b/proto"),
|
||||
]
|
||||
|
||||
# connect them together
|
||||
await node1.connectToNodes(@[node2.switch.peerInfo.toRemotePeerInfo()])
|
||||
|
||||
@ -535,22 +535,21 @@ proc runAnvil*(
|
||||
let anvilPath = getAnvilPath()
|
||||
info "Anvil path", anvilPath
|
||||
|
||||
var args =
|
||||
@[
|
||||
"--port",
|
||||
$port,
|
||||
"--gas-limit",
|
||||
"30000000",
|
||||
"--gas-price",
|
||||
"7",
|
||||
"--base-fee",
|
||||
"7",
|
||||
"--balance",
|
||||
"10000000000",
|
||||
"--chain-id",
|
||||
$chainId,
|
||||
"--disable-min-priority-fee",
|
||||
]
|
||||
var args = @[
|
||||
"--port",
|
||||
$port,
|
||||
"--gas-limit",
|
||||
"30000000",
|
||||
"--gas-price",
|
||||
"7",
|
||||
"--base-fee",
|
||||
"7",
|
||||
"--balance",
|
||||
"10000000000",
|
||||
"--chain-id",
|
||||
$chainId,
|
||||
"--disable-min-priority-fee",
|
||||
]
|
||||
|
||||
# Add state file argument if provided
|
||||
if stateFile.isSome():
|
||||
|
||||
@ -35,24 +35,23 @@ suite "Store Client":
|
||||
hash1 = computeMessageHash(DefaultPubsubTopic, message1)
|
||||
hash2 = computeMessageHash(DefaultPubsubTopic, message2)
|
||||
hash3 = computeMessageHash(DefaultPubsubTopic, message3)
|
||||
messageSeq =
|
||||
@[
|
||||
WakuMessageKeyValue(
|
||||
messageHash: hash1,
|
||||
message: some(message1),
|
||||
pubsubTopic: some(DefaultPubsubTopic),
|
||||
),
|
||||
WakuMessageKeyValue(
|
||||
messageHash: hash2,
|
||||
message: some(message2),
|
||||
pubsubTopic: some(DefaultPubsubTopic),
|
||||
),
|
||||
WakuMessageKeyValue(
|
||||
messageHash: hash3,
|
||||
message: some(message3),
|
||||
pubsubTopic: some(DefaultPubsubTopic),
|
||||
),
|
||||
]
|
||||
messageSeq = @[
|
||||
WakuMessageKeyValue(
|
||||
messageHash: hash1,
|
||||
message: some(message1),
|
||||
pubsubTopic: some(DefaultPubsubTopic),
|
||||
),
|
||||
WakuMessageKeyValue(
|
||||
messageHash: hash2,
|
||||
message: some(message2),
|
||||
pubsubTopic: some(DefaultPubsubTopic),
|
||||
),
|
||||
WakuMessageKeyValue(
|
||||
messageHash: hash3,
|
||||
message: some(message3),
|
||||
pubsubTopic: some(DefaultPubsubTopic),
|
||||
),
|
||||
]
|
||||
handlerFuture = newHistoryFuture()
|
||||
handler = proc(req: StoreQueryRequest): Future[StoreQueryResult] {.async, gcsafe.} =
|
||||
var request = req
|
||||
|
||||
@ -50,19 +50,18 @@ suite "Store Resume - End to End":
|
||||
var clientDriver {.threadvar.}: ArchiveDriver
|
||||
|
||||
asyncSetup:
|
||||
let messages =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 00]),
|
||||
fakeWakuMessage(@[byte 01]),
|
||||
fakeWakuMessage(@[byte 02]),
|
||||
fakeWakuMessage(@[byte 03]),
|
||||
fakeWakuMessage(@[byte 04]),
|
||||
fakeWakuMessage(@[byte 05]),
|
||||
fakeWakuMessage(@[byte 06]),
|
||||
fakeWakuMessage(@[byte 07]),
|
||||
fakeWakuMessage(@[byte 08]),
|
||||
fakeWakuMessage(@[byte 09]),
|
||||
]
|
||||
let messages = @[
|
||||
fakeWakuMessage(@[byte 00]),
|
||||
fakeWakuMessage(@[byte 01]),
|
||||
fakeWakuMessage(@[byte 02]),
|
||||
fakeWakuMessage(@[byte 03]),
|
||||
fakeWakuMessage(@[byte 04]),
|
||||
fakeWakuMessage(@[byte 05]),
|
||||
fakeWakuMessage(@[byte 06]),
|
||||
fakeWakuMessage(@[byte 07]),
|
||||
fakeWakuMessage(@[byte 08]),
|
||||
fakeWakuMessage(@[byte 09]),
|
||||
]
|
||||
|
||||
let
|
||||
serverKey = generateSecp256k1Key()
|
||||
|
||||
@ -32,19 +32,18 @@ import
|
||||
procSuite "WakuNode - Store":
|
||||
## Fixtures
|
||||
let timeOrigin = now()
|
||||
let msgListA =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 01], ts = ts(10, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 02], ts = ts(20, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 03], ts = ts(30, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 04], ts = ts(40, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 05], ts = ts(50, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 06], ts = ts(60, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 07], ts = ts(70, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 08], ts = ts(80, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 09], ts = ts(90, timeOrigin)),
|
||||
]
|
||||
let msgListA = @[
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 01], ts = ts(10, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 02], ts = ts(20, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 03], ts = ts(30, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 04], ts = ts(40, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 05], ts = ts(50, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 06], ts = ts(60, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 07], ts = ts(70, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 08], ts = ts(80, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 09], ts = ts(90, timeOrigin)),
|
||||
]
|
||||
|
||||
let hashes = msgListA.mapIt(computeMessageHash(DefaultPubsubTopic, it))
|
||||
|
||||
|
||||
@ -29,93 +29,91 @@ when defined(waku_exp_store_resume):
|
||||
## Fixtures
|
||||
let storeA = block:
|
||||
let store = newTestMessageStore()
|
||||
let msgList =
|
||||
@[
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 0], contentTopic = ContentTopic("2"), ts = ts(0)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 1], contentTopic = ContentTopic("1"), ts = ts(1)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 2], contentTopic = ContentTopic("2"), ts = ts(2)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 3], contentTopic = ContentTopic("1"), ts = ts(3)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 4], contentTopic = ContentTopic("2"), ts = ts(4)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 5], contentTopic = ContentTopic("1"), ts = ts(5)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 6], contentTopic = ContentTopic("2"), ts = ts(6)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 7], contentTopic = ContentTopic("1"), ts = ts(7)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 8], contentTopic = ContentTopic("2"), ts = ts(8)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 9], contentTopic = ContentTopic("1"), ts = ts(9)
|
||||
),
|
||||
]
|
||||
let msgList = @[
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 0], contentTopic = ContentTopic("2"), ts = ts(0)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 1], contentTopic = ContentTopic("1"), ts = ts(1)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 2], contentTopic = ContentTopic("2"), ts = ts(2)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 3], contentTopic = ContentTopic("1"), ts = ts(3)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 4], contentTopic = ContentTopic("2"), ts = ts(4)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 5], contentTopic = ContentTopic("1"), ts = ts(5)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 6], contentTopic = ContentTopic("2"), ts = ts(6)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 7], contentTopic = ContentTopic("1"), ts = ts(7)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 8], contentTopic = ContentTopic("2"), ts = ts(8)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 9], contentTopic = ContentTopic("1"), ts = ts(9)
|
||||
),
|
||||
]
|
||||
|
||||
for msg in msgList:
|
||||
require store
|
||||
.put(
|
||||
DefaultPubsubTopic,
|
||||
msg,
|
||||
computeDigest(msg),
|
||||
computeMessageHash(DefaultPubsubTopic, msg),
|
||||
msg.timestamp,
|
||||
)
|
||||
.isOk()
|
||||
.put(
|
||||
DefaultPubsubTopic,
|
||||
msg,
|
||||
computeDigest(msg),
|
||||
computeMessageHash(DefaultPubsubTopic, msg),
|
||||
msg.timestamp,
|
||||
)
|
||||
.isOk()
|
||||
|
||||
store
|
||||
|
||||
let storeB = block:
|
||||
let store = newTestMessageStore()
|
||||
let msgList2 =
|
||||
@[
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 0], contentTopic = ContentTopic("2"), ts = ts(0)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 11], contentTopic = ContentTopic("1"), ts = ts(1)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 12], contentTopic = ContentTopic("2"), ts = ts(2)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 3], contentTopic = ContentTopic("1"), ts = ts(3)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 4], contentTopic = ContentTopic("2"), ts = ts(4)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 5], contentTopic = ContentTopic("1"), ts = ts(5)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 13], contentTopic = ContentTopic("2"), ts = ts(6)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 14], contentTopic = ContentTopic("1"), ts = ts(7)
|
||||
),
|
||||
]
|
||||
let msgList2 = @[
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 0], contentTopic = ContentTopic("2"), ts = ts(0)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 11], contentTopic = ContentTopic("1"), ts = ts(1)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 12], contentTopic = ContentTopic("2"), ts = ts(2)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 3], contentTopic = ContentTopic("1"), ts = ts(3)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 4], contentTopic = ContentTopic("2"), ts = ts(4)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 5], contentTopic = ContentTopic("1"), ts = ts(5)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 13], contentTopic = ContentTopic("2"), ts = ts(6)
|
||||
),
|
||||
fakeWakuMessage(
|
||||
payload = @[byte 14], contentTopic = ContentTopic("1"), ts = ts(7)
|
||||
),
|
||||
]
|
||||
|
||||
for msg in msgList2:
|
||||
require store
|
||||
.put(
|
||||
DefaultPubsubTopic,
|
||||
msg,
|
||||
computeDigest(msg),
|
||||
computeMessageHash(DefaultPubsubTopic, msg),
|
||||
msg.timestamp,
|
||||
)
|
||||
.isOk()
|
||||
.put(
|
||||
DefaultPubsubTopic,
|
||||
msg,
|
||||
computeDigest(msg),
|
||||
computeMessageHash(DefaultPubsubTopic, msg),
|
||||
msg.timestamp,
|
||||
)
|
||||
.isOk()
|
||||
|
||||
store
|
||||
|
||||
@ -136,11 +134,10 @@ when defined(waku_exp_store_resume):
|
||||
client = newTestWakuStoreClient(clientSwitch)
|
||||
|
||||
## Given
|
||||
let peers =
|
||||
@[
|
||||
serverSwitchA.peerInfo.toRemotePeerInfo(),
|
||||
serverSwitchB.peerInfo.toRemotePeerInfo(),
|
||||
]
|
||||
let peers = @[
|
||||
serverSwitchA.peerInfo.toRemotePeerInfo(),
|
||||
serverSwitchB.peerInfo.toRemotePeerInfo(),
|
||||
]
|
||||
let req = HistoryQuery(contentTopics: @[DefaultContentTopic], pageSize: 5)
|
||||
|
||||
## When
|
||||
@ -226,12 +223,11 @@ when defined(waku_exp_store_resume):
|
||||
client = await newTestWakuStore(clientSwitch)
|
||||
|
||||
## Given
|
||||
let peers =
|
||||
@[
|
||||
offlineSwitch.peerInfo.toRemotePeerInfo(),
|
||||
serverASwitch.peerInfo.toRemotePeerInfo(),
|
||||
serverBSwitch.peerInfo.toRemotePeerInfo(),
|
||||
]
|
||||
let peers = @[
|
||||
offlineSwitch.peerInfo.toRemotePeerInfo(),
|
||||
serverASwitch.peerInfo.toRemotePeerInfo(),
|
||||
serverBSwitch.peerInfo.toRemotePeerInfo(),
|
||||
]
|
||||
|
||||
## When
|
||||
let res = await client.resume(some(peers))
|
||||
@ -323,11 +319,11 @@ when defined(waku_exp_store_resume):
|
||||
receivedTime3 = now() + getNanosecondTime(10)
|
||||
digest3 = computeDigest(msg3)
|
||||
require server.wakuStore.store
|
||||
.put(DefaultPubsubTopic, msg3, digest3, receivedTime3)
|
||||
.isOk()
|
||||
.put(DefaultPubsubTopic, msg3, digest3, receivedTime3)
|
||||
.isOk()
|
||||
require client.wakuStore.store
|
||||
.put(DefaultPubsubTopic, msg3, digest3, receivedTime3)
|
||||
.isOk()
|
||||
.put(DefaultPubsubTopic, msg3, digest3, receivedTime3)
|
||||
.isOk()
|
||||
|
||||
let serverPeer = server.peerInfo.toRemotePeerInfo()
|
||||
|
||||
|
||||
@ -100,11 +100,10 @@ procSuite "Waku Store - RPC codec":
|
||||
direction: some(PagingDirection.BACKWARD),
|
||||
)
|
||||
query = HistoryQueryRPC(
|
||||
contentFilters:
|
||||
@[
|
||||
HistoryContentFilterRPC(contentTopic: DefaultContentTopic),
|
||||
HistoryContentFilterRPC(contentTopic: DefaultContentTopic),
|
||||
],
|
||||
contentFilters: @[
|
||||
HistoryContentFilterRPC(contentTopic: DefaultContentTopic),
|
||||
HistoryContentFilterRPC(contentTopic: DefaultContentTopic),
|
||||
],
|
||||
pagingInfo: some(pagingInfo),
|
||||
startTime: some(Timestamp(10)),
|
||||
endTime: some(Timestamp(11)),
|
||||
|
||||
@ -30,19 +30,18 @@ import
|
||||
procSuite "WakuNode - Store Legacy":
|
||||
## Fixtures
|
||||
let timeOrigin = now()
|
||||
let msgListA =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 01], ts = ts(10, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 02], ts = ts(20, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 03], ts = ts(30, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 04], ts = ts(40, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 05], ts = ts(50, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 06], ts = ts(60, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 07], ts = ts(70, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 08], ts = ts(80, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 09], ts = ts(90, timeOrigin)),
|
||||
]
|
||||
let msgListA = @[
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 01], ts = ts(10, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 02], ts = ts(20, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 03], ts = ts(30, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 04], ts = ts(40, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 05], ts = ts(50, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 06], ts = ts(60, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 07], ts = ts(70, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 08], ts = ts(80, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 09], ts = ts(90, timeOrigin)),
|
||||
]
|
||||
|
||||
let archiveA = block:
|
||||
let driver = newSqliteArchiveDriver()
|
||||
|
||||
@ -119,12 +119,11 @@ suite "Waku Sync – reconciliation":
|
||||
pubsubTopics: @[DefaultPubsubTopic],
|
||||
contentTopics: @[DefaultContentTopic],
|
||||
ranges: @[(wholeRange, RangeType.Fingerprint)],
|
||||
fingerprints:
|
||||
@[
|
||||
remote.computeFingerprint(
|
||||
wholeRange, @[DefaultPubsubTopic], @[DefaultContentTopic]
|
||||
)
|
||||
],
|
||||
fingerprints: @[
|
||||
remote.computeFingerprint(
|
||||
wholeRange, @[DefaultPubsubTopic], @[DefaultContentTopic]
|
||||
)
|
||||
],
|
||||
itemSets: @[],
|
||||
)
|
||||
|
||||
@ -180,12 +179,11 @@ suite "Waku Sync – reconciliation":
|
||||
pubsubTopics: @[DefaultPubsubTopic],
|
||||
contentTopics: @[DefaultContentTopic],
|
||||
ranges: @[(sliceWhole, RangeType.Fingerprint)],
|
||||
fingerprints:
|
||||
@[
|
||||
remote.computeFingerprint(
|
||||
sliceWhole, @[DefaultPubsubTopic], @[DefaultContentTopic]
|
||||
)
|
||||
],
|
||||
fingerprints: @[
|
||||
remote.computeFingerprint(
|
||||
sliceWhole, @[DefaultPubsubTopic], @[DefaultContentTopic]
|
||||
)
|
||||
],
|
||||
itemSets: @[],
|
||||
)
|
||||
|
||||
@ -207,12 +205,11 @@ suite "Waku Sync – reconciliation":
|
||||
pubsubTopics: @[DefaultPubsubTopic],
|
||||
contentTopics: @[DefaultContentTopic],
|
||||
ranges: @[(subSlice, RangeType.Fingerprint)],
|
||||
fingerprints:
|
||||
@[
|
||||
remote.computeFingerprint(
|
||||
subSlice, @[DefaultPubsubTopic], @[DefaultContentTopic]
|
||||
)
|
||||
],
|
||||
fingerprints: @[
|
||||
remote.computeFingerprint(
|
||||
subSlice, @[DefaultPubsubTopic], @[DefaultContentTopic]
|
||||
)
|
||||
],
|
||||
itemSets: @[],
|
||||
)
|
||||
|
||||
@ -272,12 +269,9 @@ suite "Waku Sync – reconciliation":
|
||||
pubsubTopics: @[DefaultPubsubTopic],
|
||||
contentTopics: @[DefaultContentTopic],
|
||||
ranges: @[(slice, RangeType.Fingerprint)],
|
||||
fingerprints:
|
||||
@[
|
||||
remote.computeFingerprint(
|
||||
slice, @[DefaultPubsubTopic], @[DefaultContentTopic]
|
||||
)
|
||||
],
|
||||
fingerprints: @[
|
||||
remote.computeFingerprint(slice, @[DefaultPubsubTopic], @[DefaultContentTopic])
|
||||
],
|
||||
itemSets: @[],
|
||||
)
|
||||
|
||||
|
||||
@ -44,12 +44,9 @@ suite "Waku Sync – reconciliation":
|
||||
pubsubTopics: @[DefaultPubsubTopic],
|
||||
contentTopics: @[DefaultContentTopic],
|
||||
ranges: @[(whole, RangeType.Fingerprint)],
|
||||
fingerprints:
|
||||
@[
|
||||
remote.computeFingerprint(
|
||||
whole, @[DefaultPubsubTopic], @[DefaultContentTopic]
|
||||
)
|
||||
],
|
||||
fingerprints: @[
|
||||
remote.computeFingerprint(whole, @[DefaultPubsubTopic], @[DefaultContentTopic])
|
||||
],
|
||||
itemSets: @[],
|
||||
)
|
||||
let rep1 = local.processPayload(p1, s1, r1)
|
||||
@ -131,15 +128,10 @@ suite "Waku Sync – reconciliation":
|
||||
pubsubTopics: @[DefaultPubsubTopic],
|
||||
contentTopics: @[DefaultContentTopic],
|
||||
ranges: @[(sliceA, RangeType.Fingerprint), (sliceB, RangeType.Fingerprint)],
|
||||
fingerprints:
|
||||
@[
|
||||
remote.computeFingerprint(
|
||||
sliceA, @[DefaultPubsubTopic], @[DefaultContentTopic]
|
||||
),
|
||||
remote.computeFingerprint(
|
||||
sliceB, @[DefaultPubsubTopic], @[DefaultContentTopic]
|
||||
),
|
||||
],
|
||||
fingerprints: @[
|
||||
remote.computeFingerprint(sliceA, @[DefaultPubsubTopic], @[DefaultContentTopic]),
|
||||
remote.computeFingerprint(sliceB, @[DefaultPubsubTopic], @[DefaultContentTopic]),
|
||||
],
|
||||
itemSets: @[],
|
||||
)
|
||||
let reply = local.processPayload(payload, s, r)
|
||||
@ -180,12 +172,9 @@ suite "Waku Sync – reconciliation":
|
||||
pubsubTopics: @[DefaultPubsubTopic],
|
||||
contentTopics: @[DefaultContentTopic],
|
||||
ranges: @[(slice, RangeType.Fingerprint)],
|
||||
fingerprints:
|
||||
@[
|
||||
remote.computeFingerprint(
|
||||
slice, @[DefaultPubsubTopic], @[DefaultContentTopic]
|
||||
)
|
||||
],
|
||||
fingerprints: @[
|
||||
remote.computeFingerprint(slice, @[DefaultPubsubTopic], @[DefaultContentTopic])
|
||||
],
|
||||
itemSets: @[],
|
||||
)
|
||||
let reply = local.processPayload(p, toS, toR)
|
||||
@ -236,12 +225,9 @@ suite "Waku Sync – reconciliation":
|
||||
pubsubTopics: @[DefaultPubsubTopic],
|
||||
contentTopics: @[DefaultContentTopic],
|
||||
ranges: @[(s, RangeType.Fingerprint)],
|
||||
fingerprints:
|
||||
@[
|
||||
remote.computeFingerprint(
|
||||
s, @[DefaultPubsubTopic], @[DefaultContentTopic]
|
||||
)
|
||||
],
|
||||
fingerprints: @[
|
||||
remote.computeFingerprint(s, @[DefaultPubsubTopic], @[DefaultContentTopic])
|
||||
],
|
||||
itemSets: @[],
|
||||
),
|
||||
sendQ,
|
||||
|
||||
@ -176,12 +176,11 @@ suite "Waku v2 Rest API - Filter V2":
|
||||
)
|
||||
discard await restFilterTest.client.filterPostSubscriptions(requestBody)
|
||||
|
||||
let contentFilters =
|
||||
@[
|
||||
ContentTopic("1"),
|
||||
ContentTopic("2"),
|
||||
ContentTopic("3"), # ,ContentTopic("4") # Keep this subscription for check
|
||||
]
|
||||
let contentFilters = @[
|
||||
ContentTopic("1"),
|
||||
ContentTopic("2"),
|
||||
ContentTopic("3"), # ,ContentTopic("4") # Keep this subscription for check
|
||||
]
|
||||
|
||||
let requestBodyUnsub = FilterUnsubscribeRequest(
|
||||
requestId: "4321",
|
||||
|
||||
@ -193,15 +193,14 @@ suite "Waku v2 Rest API - Relay":
|
||||
|
||||
let pubSubTopic = "/waku/2/rs/0/0"
|
||||
|
||||
var messages =
|
||||
@[
|
||||
fakeWakuMessage(
|
||||
contentTopic = "content-topic-x",
|
||||
payload = toBytes("TEST-1"),
|
||||
meta = toBytes("test-meta"),
|
||||
ephemeral = true,
|
||||
)
|
||||
]
|
||||
var messages = @[
|
||||
fakeWakuMessage(
|
||||
contentTopic = "content-topic-x",
|
||||
payload = toBytes("TEST-1"),
|
||||
meta = toBytes("test-meta"),
|
||||
ephemeral = true,
|
||||
)
|
||||
]
|
||||
|
||||
# Prevent duplicate messages
|
||||
for i in 0 ..< 2:
|
||||
@ -348,12 +347,11 @@ suite "Waku v2 Rest API - Relay":
|
||||
installRelayApiHandlers(restServer.router, node, cache)
|
||||
restServer.start()
|
||||
|
||||
let contentTopics =
|
||||
@[
|
||||
ContentTopic("/app-1/2/default-content/proto"),
|
||||
ContentTopic("/app-2/2/default-content/proto"),
|
||||
ContentTopic("/app-3/2/default-content/proto"),
|
||||
]
|
||||
let contentTopics = @[
|
||||
ContentTopic("/app-1/2/default-content/proto"),
|
||||
ContentTopic("/app-2/2/default-content/proto"),
|
||||
ContentTopic("/app-3/2/default-content/proto"),
|
||||
]
|
||||
|
||||
# When
|
||||
let client = newRestHttpClient(initTAddress(restAddress, restPort))
|
||||
@ -394,13 +392,12 @@ suite "Waku v2 Rest API - Relay":
|
||||
|
||||
restPort = restServer.httpServer.address.port # update with bound port for client use
|
||||
|
||||
let contentTopics =
|
||||
@[
|
||||
ContentTopic("/waku/2/default-content1/proto"),
|
||||
ContentTopic("/waku/2/default-content2/proto"),
|
||||
ContentTopic("/waku/2/default-content3/proto"),
|
||||
ContentTopic("/waku/2/default-contentX/proto"),
|
||||
]
|
||||
let contentTopics = @[
|
||||
ContentTopic("/waku/2/default-content1/proto"),
|
||||
ContentTopic("/waku/2/default-content2/proto"),
|
||||
ContentTopic("/waku/2/default-content3/proto"),
|
||||
ContentTopic("/waku/2/default-contentX/proto"),
|
||||
]
|
||||
|
||||
let cache = MessageCache.init()
|
||||
cache.contentSubscribe(contentTopics[0])
|
||||
@ -454,10 +451,9 @@ suite "Waku v2 Rest API - Relay":
|
||||
|
||||
let contentTopic = DefaultContentTopic
|
||||
|
||||
var messages =
|
||||
@[
|
||||
fakeWakuMessage(contentTopic = DefaultContentTopic, payload = toBytes("TEST-1"))
|
||||
]
|
||||
var messages = @[
|
||||
fakeWakuMessage(contentTopic = DefaultContentTopic, payload = toBytes("TEST-1"))
|
||||
]
|
||||
|
||||
# Prevent duplicate messages
|
||||
for i in 0 ..< 2:
|
||||
|
||||
@ -115,17 +115,16 @@ procSuite "Waku Rest API - Store v3":
|
||||
await sleepAsync(1.seconds())
|
||||
|
||||
# Now prime it with some history before tests
|
||||
let msgList =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 0], contentTopic = ContentTopic("ct1"), ts = 0),
|
||||
fakeWakuMessage(@[byte 1], ts = 1),
|
||||
fakeWakuMessage(@[byte 1, byte 2], ts = 2),
|
||||
fakeWakuMessage(@[byte 1], ts = 3),
|
||||
fakeWakuMessage(@[byte 1], ts = 4),
|
||||
fakeWakuMessage(@[byte 1], ts = 5),
|
||||
fakeWakuMessage(@[byte 1], ts = 6),
|
||||
fakeWakuMessage(@[byte 9], contentTopic = ContentTopic("c2"), ts = 9),
|
||||
]
|
||||
let msgList = @[
|
||||
fakeWakuMessage(@[byte 0], contentTopic = ContentTopic("ct1"), ts = 0),
|
||||
fakeWakuMessage(@[byte 1], ts = 1),
|
||||
fakeWakuMessage(@[byte 1, byte 2], ts = 2),
|
||||
fakeWakuMessage(@[byte 1], ts = 3),
|
||||
fakeWakuMessage(@[byte 1], ts = 4),
|
||||
fakeWakuMessage(@[byte 1], ts = 5),
|
||||
fakeWakuMessage(@[byte 1], ts = 6),
|
||||
fakeWakuMessage(@[byte 9], contentTopic = ContentTopic("c2"), ts = 9),
|
||||
]
|
||||
for msg in msgList:
|
||||
require (await driver.put(DefaultPubsubTopic, msg)).isOk()
|
||||
|
||||
@ -191,17 +190,16 @@ procSuite "Waku Rest API - Store v3":
|
||||
peerSwitch.mount(node.wakuStore)
|
||||
|
||||
# Now prime it with some history before tests
|
||||
let msgList =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 0], contentTopic = ContentTopic("ct1"), ts = 0),
|
||||
fakeWakuMessage(@[byte 1], ts = 1),
|
||||
fakeWakuMessage(@[byte 1, byte 2], ts = 2),
|
||||
fakeWakuMessage(@[byte 1], ts = 3),
|
||||
fakeWakuMessage(@[byte 1], ts = 4),
|
||||
fakeWakuMessage(@[byte 1], ts = 5),
|
||||
fakeWakuMessage(@[byte 1], ts = 6),
|
||||
fakeWakuMessage(@[byte 9], contentTopic = ContentTopic("c2"), ts = 9),
|
||||
]
|
||||
let msgList = @[
|
||||
fakeWakuMessage(@[byte 0], contentTopic = ContentTopic("ct1"), ts = 0),
|
||||
fakeWakuMessage(@[byte 1], ts = 1),
|
||||
fakeWakuMessage(@[byte 1, byte 2], ts = 2),
|
||||
fakeWakuMessage(@[byte 1], ts = 3),
|
||||
fakeWakuMessage(@[byte 1], ts = 4),
|
||||
fakeWakuMessage(@[byte 1], ts = 5),
|
||||
fakeWakuMessage(@[byte 1], ts = 6),
|
||||
fakeWakuMessage(@[byte 9], contentTopic = ContentTopic("c2"), ts = 9),
|
||||
]
|
||||
for msg in msgList:
|
||||
require (await driver.put(DefaultPubsubTopic, msg)).isOk()
|
||||
|
||||
@ -262,19 +260,18 @@ procSuite "Waku Rest API - Store v3":
|
||||
|
||||
# Now prime it with some history before tests
|
||||
let timeOrigin = wakucore.now()
|
||||
let msgList =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 01], ts = ts(10, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 02], ts = ts(20, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 03], ts = ts(30, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 04], ts = ts(40, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 05], ts = ts(50, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 06], ts = ts(60, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 07], ts = ts(70, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 08], ts = ts(80, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 09], ts = ts(90, timeOrigin)),
|
||||
]
|
||||
let msgList = @[
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 01], ts = ts(10, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 02], ts = ts(20, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 03], ts = ts(30, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 04], ts = ts(40, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 05], ts = ts(50, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 06], ts = ts(60, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 07], ts = ts(70, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 08], ts = ts(80, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 09], ts = ts(90, timeOrigin)),
|
||||
]
|
||||
for msg in msgList:
|
||||
require (await driver.put(DefaultPubsubTopic, msg)).isOk()
|
||||
|
||||
@ -357,12 +354,11 @@ procSuite "Waku Rest API - Store v3":
|
||||
peerSwitch.mount(node.wakuStore)
|
||||
|
||||
# Now prime it with some history before tests
|
||||
let msgList =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 0], contentTopic = ContentTopic("2"), ts = 0),
|
||||
fakeWakuMessage(@[byte 1], ts = 1),
|
||||
fakeWakuMessage(@[byte 9], contentTopic = ContentTopic("2"), ts = 9),
|
||||
]
|
||||
let msgList = @[
|
||||
fakeWakuMessage(@[byte 0], contentTopic = ContentTopic("2"), ts = 0),
|
||||
fakeWakuMessage(@[byte 1], ts = 1),
|
||||
fakeWakuMessage(@[byte 9], contentTopic = ContentTopic("2"), ts = 9),
|
||||
]
|
||||
for msg in msgList:
|
||||
require (await driver.put(DefaultPubsubTopic, msg)).isOk()
|
||||
|
||||
@ -431,12 +427,11 @@ procSuite "Waku Rest API - Store v3":
|
||||
peerSwitch.mount(node.wakuStore)
|
||||
|
||||
# Now prime it with some history before tests
|
||||
let msgList =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 0], contentTopic = ContentTopic("ct1"), ts = 0),
|
||||
fakeWakuMessage(@[byte 1], ts = 1),
|
||||
fakeWakuMessage(@[byte 9], contentTopic = ContentTopic("ct2"), ts = 9),
|
||||
]
|
||||
let msgList = @[
|
||||
fakeWakuMessage(@[byte 0], contentTopic = ContentTopic("ct1"), ts = 0),
|
||||
fakeWakuMessage(@[byte 1], ts = 1),
|
||||
fakeWakuMessage(@[byte 9], contentTopic = ContentTopic("ct2"), ts = 9),
|
||||
]
|
||||
for msg in msgList:
|
||||
require (await driver.put(DefaultPubsubTopic, msg)).isOk()
|
||||
|
||||
@ -521,12 +516,11 @@ procSuite "Waku Rest API - Store v3":
|
||||
peerSwitch.mount(node.wakuStore)
|
||||
|
||||
# Now prime it with some history before tests
|
||||
let msgList =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 0], contentTopic = ContentTopic("ct1"), ts = 0),
|
||||
fakeWakuMessage(@[byte 1], ts = 1),
|
||||
fakeWakuMessage(@[byte 9], contentTopic = ContentTopic("ct2"), ts = 9),
|
||||
]
|
||||
let msgList = @[
|
||||
fakeWakuMessage(@[byte 0], contentTopic = ContentTopic("ct1"), ts = 0),
|
||||
fakeWakuMessage(@[byte 1], ts = 1),
|
||||
fakeWakuMessage(@[byte 9], contentTopic = ContentTopic("ct2"), ts = 9),
|
||||
]
|
||||
for msg in msgList:
|
||||
require (await driver.put(DefaultPubsubTopic, msg)).isOk()
|
||||
|
||||
@ -594,12 +588,11 @@ procSuite "Waku Rest API - Store v3":
|
||||
await node.mountStore()
|
||||
|
||||
# Now prime it with some history before tests
|
||||
let msgList =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 0], contentTopic = ContentTopic("ct1"), ts = 0),
|
||||
fakeWakuMessage(@[byte 1], ts = 1),
|
||||
fakeWakuMessage(@[byte 9], contentTopic = ContentTopic("ct2"), ts = 9),
|
||||
]
|
||||
let msgList = @[
|
||||
fakeWakuMessage(@[byte 0], contentTopic = ContentTopic("ct1"), ts = 0),
|
||||
fakeWakuMessage(@[byte 1], ts = 1),
|
||||
fakeWakuMessage(@[byte 9], contentTopic = ContentTopic("ct2"), ts = 9),
|
||||
]
|
||||
for msg in msgList:
|
||||
require (await driver.put(DefaultPubsubTopic, msg)).isOk()
|
||||
|
||||
@ -640,14 +633,13 @@ procSuite "Waku Rest API - Store v3":
|
||||
await node.mountStore()
|
||||
|
||||
# Now prime it with some history before tests
|
||||
let msgList =
|
||||
@[
|
||||
fakeWakuMessage(
|
||||
@[byte 0], contentTopic = ContentTopic("ct1"), ts = 0, meta = (@[byte 8])
|
||||
),
|
||||
fakeWakuMessage(@[byte 1], ts = 1),
|
||||
fakeWakuMessage(@[byte 9], contentTopic = ContentTopic("ct2"), ts = 9),
|
||||
]
|
||||
let msgList = @[
|
||||
fakeWakuMessage(
|
||||
@[byte 0], contentTopic = ContentTopic("ct1"), ts = 0, meta = (@[byte 8])
|
||||
),
|
||||
fakeWakuMessage(@[byte 1], ts = 1),
|
||||
fakeWakuMessage(@[byte 9], contentTopic = ContentTopic("ct2"), ts = 9),
|
||||
]
|
||||
for msg in msgList:
|
||||
require (await driver.put(DefaultPubsubTopic, msg)).isOk()
|
||||
|
||||
@ -757,19 +749,18 @@ procSuite "Waku Rest API - Store v3":
|
||||
|
||||
# Now prime it with some history before tests
|
||||
let timeOrigin = wakucore.now()
|
||||
let msgList =
|
||||
@[
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 01], ts = ts(10, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 02], ts = ts(20, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 03], ts = ts(30, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 04], ts = ts(40, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 05], ts = ts(50, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 06], ts = ts(60, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 07], ts = ts(70, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 08], ts = ts(80, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 09], ts = ts(90, timeOrigin)),
|
||||
]
|
||||
let msgList = @[
|
||||
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 01], ts = ts(10, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 02], ts = ts(20, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 03], ts = ts(30, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 04], ts = ts(40, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 05], ts = ts(50, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 06], ts = ts(60, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 07], ts = ts(70, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 08], ts = ts(80, timeOrigin)),
|
||||
fakeWakuMessage(@[byte 09], ts = ts(90, timeOrigin)),
|
||||
]
|
||||
for msg in msgList:
|
||||
require (await driver.put(DefaultPubsubTopic, msg)).isOk()
|
||||
|
||||
|
||||
@ -250,8 +250,7 @@ type WakuNodeConf* = object
|
||||
|
||||
## Circuit-relay config
|
||||
isRelayClient* {.
|
||||
desc:
|
||||
"""Set the node as a relay-client.
|
||||
desc: """Set the node as a relay-client.
|
||||
Set it to true for nodes that run behind a NAT or firewall and
|
||||
hence would have reachability issues.""",
|
||||
defaultValue: false,
|
||||
|
||||
@ -62,10 +62,9 @@ proc init*(
|
||||
)
|
||||
|
||||
const TheWakuNetworkPreset* = ProtocolsConfig(
|
||||
entryNodes:
|
||||
@[
|
||||
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im"
|
||||
],
|
||||
entryNodes: @[
|
||||
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im"
|
||||
],
|
||||
staticStoreNodes: @[],
|
||||
clusterId: 1,
|
||||
autoShardingConfig: AutoShardingConfig(numShardsInCluster: 8),
|
||||
|
||||
@ -151,14 +151,13 @@ macro EventBroker*(body: untyped): untyped =
|
||||
proc `accessProcIdent`(): `brokerTypeIdent` =
|
||||
if `globalVarIdent`.isNil():
|
||||
new(`globalVarIdent`)
|
||||
`globalVarIdent`.buckets =
|
||||
@[
|
||||
`bucketTypeIdent`(
|
||||
brokerCtx: DefaultBrokerContext,
|
||||
listeners: initTable[uint64, `handlerProcIdent`](),
|
||||
nextId: 1'u64,
|
||||
)
|
||||
]
|
||||
`globalVarIdent`.buckets = @[
|
||||
`bucketTypeIdent`(
|
||||
brokerCtx: DefaultBrokerContext,
|
||||
listeners: initTable[uint64, `handlerProcIdent`](),
|
||||
nextId: 1'u64,
|
||||
)
|
||||
]
|
||||
`globalVarIdent`
|
||||
|
||||
)
|
||||
|
||||
@ -211,11 +211,10 @@ proc waitQueryToFinish(
|
||||
pqclear(pqResult)
|
||||
|
||||
proc containsRiskyPatterns(input: string): bool =
|
||||
let riskyPatterns =
|
||||
@[
|
||||
" OR ", " AND ", " UNION ", " SELECT ", "INSERT ", "DELETE ", "UPDATE ", "DROP ",
|
||||
"EXEC ", "--", "/*", "*/",
|
||||
]
|
||||
let riskyPatterns = @[
|
||||
" OR ", " AND ", " UNION ", " SELECT ", "INSERT ", "DELETE ", "UPDATE ", "DROP ",
|
||||
"EXEC ", "--", "/*", "*/",
|
||||
]
|
||||
|
||||
for pattern in riskyPatterns:
|
||||
if pattern.toLowerAscii() in input.toLowerAscii():
|
||||
|
||||
@ -106,16 +106,8 @@ proc mgetOrPut*[K, V](t: var TimedMap[K, V], k: K, v: V, now = Moment.now()): va
|
||||
|
||||
let
|
||||
previous = t.del(k) # Refresh existing item
|
||||
addedAt =
|
||||
if previous.isSome():
|
||||
previous[].addedAt
|
||||
else:
|
||||
now
|
||||
value =
|
||||
if previous.isSome():
|
||||
previous[].value
|
||||
else:
|
||||
v
|
||||
addedAt = if previous.isSome(): previous[].addedAt else: now
|
||||
value = if previous.isSome(): previous[].value else: v
|
||||
|
||||
let node =
|
||||
TimedEntry[K, V](key: k, value: value, addedAt: addedAt, expiresAt: now + t.timeout)
|
||||
|
||||
@ -84,20 +84,19 @@ proc withNetworkConfigurationDetails*(
|
||||
): WakuNodeBuilderResult {.
|
||||
deprecated: "use 'builder.withNetworkConfiguration()' instead"
|
||||
.} =
|
||||
let netConfig =
|
||||
?NetConfig.init(
|
||||
bindIp = bindIp,
|
||||
bindPort = bindPort,
|
||||
extIp = extIp,
|
||||
extPort = extPort,
|
||||
extMultiAddrs = extMultiAddrs,
|
||||
wsBindPort = some(wsBindPort),
|
||||
wsEnabled = wsEnabled,
|
||||
wssEnabled = wssEnabled,
|
||||
wakuFlags = wakuFlags,
|
||||
dns4DomainName = dns4DomainName,
|
||||
dnsNameServers = dnsNameServers,
|
||||
)
|
||||
let netConfig = ?NetConfig.init(
|
||||
bindIp = bindIp,
|
||||
bindPort = bindPort,
|
||||
extIp = extIp,
|
||||
extPort = extPort,
|
||||
extMultiAddrs = extMultiAddrs,
|
||||
wsBindPort = some(wsBindPort),
|
||||
wsEnabled = wsEnabled,
|
||||
wssEnabled = wssEnabled,
|
||||
wakuFlags = wakuFlags,
|
||||
dns4DomainName = dns4DomainName,
|
||||
dnsNameServers = dnsNameServers,
|
||||
)
|
||||
builder.withNetworkConfiguration(netConfig)
|
||||
ok()
|
||||
|
||||
|
||||
@ -46,12 +46,11 @@ proc TheWakuNetworkConf*(T: type NetworkConf): NetworkConf =
|
||||
rlnRelayUserMessageLimit: 100,
|
||||
shardingConf: ShardingConf(kind: AutoSharding, numShardsInCluster: 8),
|
||||
discv5Discovery: true,
|
||||
discv5BootstrapNodes:
|
||||
@[
|
||||
"enr:-QESuED0qW1BCmF-oH_ARGPr97Nv767bl_43uoy70vrbah3EaCAdK3Q0iRQ6wkSTTpdrg_dU_NC2ydO8leSlRpBX4pxiAYJpZIJ2NIJpcIRA4VDAim11bHRpYWRkcnO4XAArNiZub2RlLTAxLmRvLWFtczMud2FrdS5zYW5kYm94LnN0YXR1cy5pbQZ2XwAtNiZub2RlLTAxLmRvLWFtczMud2FrdS5zYW5kYm94LnN0YXR1cy5pbQYfQN4DgnJzkwABCAAAAAEAAgADAAQABQAGAAeJc2VjcDI1NmsxoQOTd-h5owwj-cx7xrmbvQKU8CV3Fomfdvcv1MBc-67T5oN0Y3CCdl-DdWRwgiMohXdha3UyDw",
|
||||
"enr:-QEkuED9X80QF_jcN9gA2ZRhhmwVEeJnsg_Hyg7IFCTYnZD0BDI7a8HArE61NhJZFwygpHCWkgwSt2vqiABXkBxzIqZBAYJpZIJ2NIJpcIQiQlleim11bHRpYWRkcnO4bgA0Ni9ub2RlLTAxLmdjLXVzLWNlbnRyYWwxLWEud2FrdS5zYW5kYm94LnN0YXR1cy5pbQZ2XwA2Ni9ub2RlLTAxLmdjLXVzLWNlbnRyYWwxLWEud2FrdS5zYW5kYm94LnN0YXR1cy5pbQYfQN4DgnJzkwABCAAAAAEAAgADAAQABQAGAAeJc2VjcDI1NmsxoQPFAS8zz2cg1QQhxMaK8CzkGQ5wdHvPJcrgLzJGOiHpwYN0Y3CCdl-DdWRwgiMohXdha3UyDw",
|
||||
"enr:-QEkuEBfEzJm_kigJ2HoSS_RBFJYhKHocGdkhhBr6jSUAWjLdFPp6Pj1l4yiTQp7TGHyu1kC6FyaU573VN8klLsEm-XuAYJpZIJ2NIJpcIQI2SVcim11bHRpYWRkcnO4bgA0Ni9ub2RlLTAxLmFjLWNuLWhvbmdrb25nLWMud2FrdS5zYW5kYm94LnN0YXR1cy5pbQZ2XwA2Ni9ub2RlLTAxLmFjLWNuLWhvbmdrb25nLWMud2FrdS5zYW5kYm94LnN0YXR1cy5pbQYfQN4DgnJzkwABCAAAAAEAAgADAAQABQAGAAeJc2VjcDI1NmsxoQOwsS69tgD7u1K50r5-qG5hweuTwa0W26aYPnvivpNlrYN0Y3CCdl-DdWRwgiMohXdha3UyDw",
|
||||
],
|
||||
discv5BootstrapNodes: @[
|
||||
"enr:-QESuED0qW1BCmF-oH_ARGPr97Nv767bl_43uoy70vrbah3EaCAdK3Q0iRQ6wkSTTpdrg_dU_NC2ydO8leSlRpBX4pxiAYJpZIJ2NIJpcIRA4VDAim11bHRpYWRkcnO4XAArNiZub2RlLTAxLmRvLWFtczMud2FrdS5zYW5kYm94LnN0YXR1cy5pbQZ2XwAtNiZub2RlLTAxLmRvLWFtczMud2FrdS5zYW5kYm94LnN0YXR1cy5pbQYfQN4DgnJzkwABCAAAAAEAAgADAAQABQAGAAeJc2VjcDI1NmsxoQOTd-h5owwj-cx7xrmbvQKU8CV3Fomfdvcv1MBc-67T5oN0Y3CCdl-DdWRwgiMohXdha3UyDw",
|
||||
"enr:-QEkuED9X80QF_jcN9gA2ZRhhmwVEeJnsg_Hyg7IFCTYnZD0BDI7a8HArE61NhJZFwygpHCWkgwSt2vqiABXkBxzIqZBAYJpZIJ2NIJpcIQiQlleim11bHRpYWRkcnO4bgA0Ni9ub2RlLTAxLmdjLXVzLWNlbnRyYWwxLWEud2FrdS5zYW5kYm94LnN0YXR1cy5pbQZ2XwA2Ni9ub2RlLTAxLmdjLXVzLWNlbnRyYWwxLWEud2FrdS5zYW5kYm94LnN0YXR1cy5pbQYfQN4DgnJzkwABCAAAAAEAAgADAAQABQAGAAeJc2VjcDI1NmsxoQPFAS8zz2cg1QQhxMaK8CzkGQ5wdHvPJcrgLzJGOiHpwYN0Y3CCdl-DdWRwgiMohXdha3UyDw",
|
||||
"enr:-QEkuEBfEzJm_kigJ2HoSS_RBFJYhKHocGdkhhBr6jSUAWjLdFPp6Pj1l4yiTQp7TGHyu1kC6FyaU573VN8klLsEm-XuAYJpZIJ2NIJpcIQI2SVcim11bHRpYWRkcnO4bgA0Ni9ub2RlLTAxLmFjLWNuLWhvbmdrb25nLWMud2FrdS5zYW5kYm94LnN0YXR1cy5pbQZ2XwA2Ni9ub2RlLTAxLmFjLWNuLWhvbmdrb25nLWMud2FrdS5zYW5kYm94LnN0YXR1cy5pbQYfQN4DgnJzkwABCAAAAAEAAgADAAQABQAGAAeJc2VjcDI1NmsxoQOwsS69tgD7u1K50r5-qG5hweuTwa0W26aYPnvivpNlrYN0Y3CCdl-DdWRwgiMohXdha3UyDw",
|
||||
],
|
||||
)
|
||||
|
||||
proc validateShards*(
|
||||
|
||||
@ -126,11 +126,10 @@ proc initNode(
|
||||
builder.withRateLimit(conf.rateLimit)
|
||||
builder.withCircuitRelay(relay)
|
||||
|
||||
let node =
|
||||
?builder.build().mapErr(
|
||||
proc(err: string): string =
|
||||
"failed to create waku node instance: " & err
|
||||
)
|
||||
let node = ?builder.build().mapErr(
|
||||
proc(err: string): string =
|
||||
"failed to create waku node instance: " & err
|
||||
)
|
||||
|
||||
ok(node)
|
||||
|
||||
|
||||
@ -1,11 +1,9 @@
|
||||
{.push raises: [].}
|
||||
|
||||
import
|
||||
std/
|
||||
[
|
||||
options, sets, sequtils, times, strformat, strutils, math, random, tables,
|
||||
algorithm,
|
||||
],
|
||||
std/[
|
||||
options, sets, sequtils, times, strformat, strutils, math, random, tables, algorithm
|
||||
],
|
||||
chronos,
|
||||
chronicles,
|
||||
metrics,
|
||||
|
||||
@ -354,12 +354,11 @@ proc mountStoreSync*(
|
||||
|
||||
let pubsubTopics = shards.mapIt($RelayShard(clusterId: cluster, shardId: it))
|
||||
|
||||
let recon =
|
||||
?await SyncReconciliation.new(
|
||||
pubsubTopics, contentTopics, node.peerManager, node.wakuArchive,
|
||||
storeSyncRange.seconds, storeSyncInterval.seconds, storeSyncRelayJitter.seconds,
|
||||
idsChannel, wantsChannel, needsChannel,
|
||||
)
|
||||
let recon = ?await SyncReconciliation.new(
|
||||
pubsubTopics, contentTopics, node.peerManager, node.wakuArchive,
|
||||
storeSyncRange.seconds, storeSyncInterval.seconds, storeSyncRelayJitter.seconds,
|
||||
idsChannel, wantsChannel, needsChannel,
|
||||
)
|
||||
|
||||
node.wakuStoreReconciliation = recon
|
||||
|
||||
|
||||
@ -84,13 +84,12 @@ proc startRestServerEssentials*(
|
||||
|
||||
let address = conf.listenAddress
|
||||
let port = Port(conf.port.uint16 + portsShift)
|
||||
let server =
|
||||
?newRestHttpServer(
|
||||
address,
|
||||
port,
|
||||
allowedOrigin = allowedOrigin,
|
||||
requestErrorHandler = requestErrorHandler,
|
||||
)
|
||||
let server = ?newRestHttpServer(
|
||||
address,
|
||||
port,
|
||||
allowedOrigin = allowedOrigin,
|
||||
requestErrorHandler = requestErrorHandler,
|
||||
)
|
||||
|
||||
## Health REST API
|
||||
installHealthApiHandler(server.router, nodeHealthMonitor)
|
||||
|
||||
@ -91,23 +91,22 @@ proc new*(
|
||||
): Future[HttpResponseRef] {.async: (raises: [CancelledError]).} =
|
||||
discard
|
||||
|
||||
server.httpServer =
|
||||
?HttpServerRef.new(
|
||||
address,
|
||||
defaultProcessCallback,
|
||||
serverFlags,
|
||||
socketFlags,
|
||||
serverUri,
|
||||
serverIdent,
|
||||
maxConnections,
|
||||
bufferSize,
|
||||
backlogSize,
|
||||
httpHeadersTimeout,
|
||||
maxHeadersSize,
|
||||
maxRequestBodySize,
|
||||
dualstack = dualstack,
|
||||
middlewares = middlewares,
|
||||
)
|
||||
server.httpServer = ?HttpServerRef.new(
|
||||
address,
|
||||
defaultProcessCallback,
|
||||
serverFlags,
|
||||
socketFlags,
|
||||
serverUri,
|
||||
serverIdent,
|
||||
maxConnections,
|
||||
bufferSize,
|
||||
backlogSize,
|
||||
httpHeadersTimeout,
|
||||
maxHeadersSize,
|
||||
maxRequestBodySize,
|
||||
dualstack = dualstack,
|
||||
middlewares = middlewares,
|
||||
)
|
||||
return ok(server)
|
||||
|
||||
proc getRouter(): RestRouter =
|
||||
|
||||
@ -45,8 +45,7 @@ const SelectClause =
|
||||
|
||||
const SelectNoCursorAscStmtName = "SelectWithoutCursorAsc"
|
||||
const SelectNoCursorAscStmtDef =
|
||||
SelectClause &
|
||||
"""WHERE contentTopic IN ($1) AND
|
||||
SelectClause & """WHERE contentTopic IN ($1) AND
|
||||
messageHash IN ($2) AND
|
||||
pubsubTopic = $3 AND
|
||||
timestamp >= $4 AND
|
||||
@ -54,8 +53,7 @@ const SelectNoCursorAscStmtDef =
|
||||
ORDER BY timestamp ASC, messageHash ASC LIMIT $6;"""
|
||||
|
||||
const SelectNoCursorNoDataAscStmtName = "SelectWithoutCursorAndDataAsc"
|
||||
const SelectNoCursorNoDataAscStmtDef =
|
||||
"""SELECT messageHash FROM messages
|
||||
const SelectNoCursorNoDataAscStmtDef = """SELECT messageHash FROM messages
|
||||
WHERE contentTopic IN ($1) AND
|
||||
messageHash IN ($2) AND
|
||||
pubsubTopic = $3 AND
|
||||
@ -65,8 +63,7 @@ const SelectNoCursorNoDataAscStmtDef =
|
||||
|
||||
const SelectNoCursorDescStmtName = "SelectWithoutCursorDesc"
|
||||
const SelectNoCursorDescStmtDef =
|
||||
SelectClause &
|
||||
"""WHERE contentTopic IN ($1) AND
|
||||
SelectClause & """WHERE contentTopic IN ($1) AND
|
||||
messageHash IN ($2) AND
|
||||
pubsubTopic = $3 AND
|
||||
timestamp >= $4 AND
|
||||
@ -74,8 +71,7 @@ const SelectNoCursorDescStmtDef =
|
||||
ORDER BY timestamp DESC, messageHash DESC LIMIT $6;"""
|
||||
|
||||
const SelectNoCursorNoDataDescStmtName = "SelectWithoutCursorAndDataDesc"
|
||||
const SelectNoCursorNoDataDescStmtDef =
|
||||
"""SELECT messageHash FROM messages
|
||||
const SelectNoCursorNoDataDescStmtDef = """SELECT messageHash FROM messages
|
||||
WHERE contentTopic IN ($1) AND
|
||||
messageHash IN ($2) AND
|
||||
pubsubTopic = $3 AND
|
||||
@ -85,8 +81,7 @@ const SelectNoCursorNoDataDescStmtDef =
|
||||
|
||||
const SelectWithCursorDescStmtName = "SelectWithCursorDesc"
|
||||
const SelectWithCursorDescStmtDef =
|
||||
SelectClause &
|
||||
"""WHERE contentTopic IN ($1) AND
|
||||
SelectClause & """WHERE contentTopic IN ($1) AND
|
||||
messageHash IN ($2) AND
|
||||
pubsubTopic = $3 AND
|
||||
(timestamp, messageHash) < ($4,$5) AND
|
||||
@ -95,8 +90,7 @@ const SelectWithCursorDescStmtDef =
|
||||
ORDER BY timestamp DESC, messageHash DESC LIMIT $8;"""
|
||||
|
||||
const SelectWithCursorNoDataDescStmtName = "SelectWithCursorNoDataDesc"
|
||||
const SelectWithCursorNoDataDescStmtDef =
|
||||
"""SELECT messageHash FROM messages
|
||||
const SelectWithCursorNoDataDescStmtDef = """SELECT messageHash FROM messages
|
||||
WHERE contentTopic IN ($1) AND
|
||||
messageHash IN ($2) AND
|
||||
pubsubTopic = $3 AND
|
||||
@ -107,8 +101,7 @@ const SelectWithCursorNoDataDescStmtDef =
|
||||
|
||||
const SelectWithCursorAscStmtName = "SelectWithCursorAsc"
|
||||
const SelectWithCursorAscStmtDef =
|
||||
SelectClause &
|
||||
"""WHERE contentTopic IN ($1) AND
|
||||
SelectClause & """WHERE contentTopic IN ($1) AND
|
||||
messageHash IN ($2) AND
|
||||
pubsubTopic = $3 AND
|
||||
(timestamp, messageHash) > ($4,$5) AND
|
||||
@ -117,8 +110,7 @@ const SelectWithCursorAscStmtDef =
|
||||
ORDER BY timestamp ASC, messageHash ASC LIMIT $8;"""
|
||||
|
||||
const SelectWithCursorNoDataAscStmtName = "SelectWithCursorNoDataAsc"
|
||||
const SelectWithCursorNoDataAscStmtDef =
|
||||
"""SELECT messageHash FROM messages
|
||||
const SelectWithCursorNoDataAscStmtDef = """SELECT messageHash FROM messages
|
||||
WHERE contentTopic IN ($1) AND
|
||||
messageHash IN ($2) AND
|
||||
pubsubTopic = $3 AND
|
||||
@ -128,8 +120,7 @@ const SelectWithCursorNoDataAscStmtDef =
|
||||
ORDER BY timestamp ASC, messageHash ASC LIMIT $8;"""
|
||||
|
||||
const SelectCursorByHashName = "SelectMessageByHashInMessagesLookup"
|
||||
const SelectCursorByHashDef =
|
||||
"""SELECT timestamp FROM messages_lookup
|
||||
const SelectCursorByHashDef = """SELECT timestamp FROM messages_lookup
|
||||
WHERE messageHash = $1"""
|
||||
|
||||
const
|
||||
@ -896,11 +887,10 @@ method getMessages*(
|
||||
|
||||
let splittedHashes = hashes[i ..< stop]
|
||||
|
||||
let subRows =
|
||||
?await s.getMessagesWithinLimits(
|
||||
includeData, contentTopics, pubsubTopic, cursor, startTime, endTime,
|
||||
splittedHashes, maxPageSize, ascendingOrder, requestId,
|
||||
)
|
||||
let subRows = ?await s.getMessagesWithinLimits(
|
||||
includeData, contentTopics, pubsubTopic, cursor, startTime, endTime,
|
||||
splittedHashes, maxPageSize, ascendingOrder, requestId,
|
||||
)
|
||||
|
||||
for row in subRows:
|
||||
row
|
||||
|
||||
@ -78,12 +78,11 @@ proc createTableQuery(table: string): SqlQueryStr =
|
||||
|
||||
proc createTable*(db: SqliteDatabase): DatabaseResult[void] =
|
||||
let query = createTableQuery(DbTable)
|
||||
discard
|
||||
?db.query(
|
||||
query,
|
||||
proc(s: ptr sqlite3_stmt) =
|
||||
discard,
|
||||
)
|
||||
discard ?db.query(
|
||||
query,
|
||||
proc(s: ptr sqlite3_stmt) =
|
||||
discard,
|
||||
)
|
||||
return ok()
|
||||
|
||||
## Create indices
|
||||
@ -93,12 +92,11 @@ proc createOldestMessageTimestampIndexQuery(table: string): SqlQueryStr =
|
||||
|
||||
proc createOldestMessageTimestampIndex*(db: SqliteDatabase): DatabaseResult[void] =
|
||||
let query = createOldestMessageTimestampIndexQuery(DbTable)
|
||||
discard
|
||||
?db.query(
|
||||
query,
|
||||
proc(s: ptr sqlite3_stmt) =
|
||||
discard,
|
||||
)
|
||||
discard ?db.query(
|
||||
query,
|
||||
proc(s: ptr sqlite3_stmt) =
|
||||
discard,
|
||||
)
|
||||
return ok()
|
||||
|
||||
## Insert message
|
||||
@ -175,12 +173,11 @@ proc deleteMessagesOlderThanTimestamp*(
|
||||
db: SqliteDatabase, ts: int64
|
||||
): DatabaseResult[void] =
|
||||
let query = deleteMessagesOlderThanTimestampQuery(DbTable, ts)
|
||||
discard
|
||||
?db.query(
|
||||
query,
|
||||
proc(s: ptr sqlite3_stmt) =
|
||||
discard,
|
||||
)
|
||||
discard ?db.query(
|
||||
query,
|
||||
proc(s: ptr sqlite3_stmt) =
|
||||
discard,
|
||||
)
|
||||
return ok()
|
||||
|
||||
## Delete oldest messages not within limit
|
||||
@ -196,12 +193,11 @@ proc deleteOldestMessagesNotWithinLimit*(
|
||||
): DatabaseResult[void] =
|
||||
# NOTE: The word `limit` here refers the store capacity/maximum number-of-messages allowed limit
|
||||
let query = deleteOldestMessagesNotWithinLimitQuery(DbTable, limit = limit)
|
||||
discard
|
||||
?db.query(
|
||||
query,
|
||||
proc(s: ptr sqlite3_stmt) =
|
||||
discard,
|
||||
)
|
||||
discard ?db.query(
|
||||
query,
|
||||
proc(s: ptr sqlite3_stmt) =
|
||||
discard,
|
||||
)
|
||||
return ok()
|
||||
|
||||
## Select all messages
|
||||
|
||||
@ -92,12 +92,11 @@ proc createTableQuery(table: string): SqlQueryStr =
|
||||
|
||||
proc createTable*(db: SqliteDatabase): DatabaseResult[void] =
|
||||
let query = createTableQuery(DbTable)
|
||||
discard
|
||||
?db.query(
|
||||
query,
|
||||
proc(s: ptr sqlite3_stmt) =
|
||||
discard,
|
||||
)
|
||||
discard ?db.query(
|
||||
query,
|
||||
proc(s: ptr sqlite3_stmt) =
|
||||
discard,
|
||||
)
|
||||
return ok()
|
||||
|
||||
## Create indices
|
||||
@ -107,12 +106,11 @@ proc createOldestMessageTimestampIndexQuery(table: string): SqlQueryStr =
|
||||
|
||||
proc createOldestMessageTimestampIndex*(db: SqliteDatabase): DatabaseResult[void] =
|
||||
let query = createOldestMessageTimestampIndexQuery(DbTable)
|
||||
discard
|
||||
?db.query(
|
||||
query,
|
||||
proc(s: ptr sqlite3_stmt) =
|
||||
discard,
|
||||
)
|
||||
discard ?db.query(
|
||||
query,
|
||||
proc(s: ptr sqlite3_stmt) =
|
||||
discard,
|
||||
)
|
||||
return ok()
|
||||
|
||||
proc createHistoryQueryIndexQuery(table: string): SqlQueryStr =
|
||||
@ -121,12 +119,11 @@ proc createHistoryQueryIndexQuery(table: string): SqlQueryStr =
|
||||
|
||||
proc createHistoryQueryIndex*(db: SqliteDatabase): DatabaseResult[void] =
|
||||
let query = createHistoryQueryIndexQuery(DbTable)
|
||||
discard
|
||||
?db.query(
|
||||
query,
|
||||
proc(s: ptr sqlite3_stmt) =
|
||||
discard,
|
||||
)
|
||||
discard ?db.query(
|
||||
query,
|
||||
proc(s: ptr sqlite3_stmt) =
|
||||
discard,
|
||||
)
|
||||
return ok()
|
||||
|
||||
## Insert message
|
||||
@ -216,12 +213,11 @@ proc deleteMessagesOlderThanTimestamp*(
|
||||
db: SqliteDatabase, ts: int64
|
||||
): DatabaseResult[void] =
|
||||
let query = deleteMessagesOlderThanTimestampQuery(DbTable, ts)
|
||||
discard
|
||||
?db.query(
|
||||
query,
|
||||
proc(s: ptr sqlite3_stmt) =
|
||||
discard,
|
||||
)
|
||||
discard ?db.query(
|
||||
query,
|
||||
proc(s: ptr sqlite3_stmt) =
|
||||
discard,
|
||||
)
|
||||
return ok()
|
||||
|
||||
## Delete oldest messages not within limit
|
||||
@ -237,12 +233,11 @@ proc deleteOldestMessagesNotWithinLimit*(
|
||||
): DatabaseResult[void] =
|
||||
# NOTE: The word `limit` here refers the store capacity/maximum number-of-messages allowed limit
|
||||
let query = deleteOldestMessagesNotWithinLimitQuery(DbTable, limit = limit)
|
||||
discard
|
||||
?db.query(
|
||||
query,
|
||||
proc(s: ptr sqlite3_stmt) =
|
||||
discard,
|
||||
)
|
||||
discard ?db.query(
|
||||
query,
|
||||
proc(s: ptr sqlite3_stmt) =
|
||||
discard,
|
||||
)
|
||||
return ok()
|
||||
|
||||
## Select all messages
|
||||
|
||||
@ -176,17 +176,15 @@ proc parsePeerInfoFromRegularAddr(peer: MultiAddress): Result[RemotePeerInfo, st
|
||||
case addrPart[].protoName()[]
|
||||
# All protocols listed here: https://github.com/multiformats/multiaddr/blob/b746a7d014e825221cc3aea6e57a92d78419990f/protocols.csv
|
||||
of "p2p":
|
||||
p2pPart =
|
||||
?addrPart.mapErr(
|
||||
proc(err: string): string =
|
||||
"Error getting p2pPart [" & err & "]"
|
||||
)
|
||||
p2pPart = ?addrPart.mapErr(
|
||||
proc(err: string): string =
|
||||
"Error getting p2pPart [" & err & "]"
|
||||
)
|
||||
of "ip4", "ip6", "dns", "dnsaddr", "dns4", "dns6", "tcp", "ws", "wss":
|
||||
let val =
|
||||
?addrPart.mapErr(
|
||||
proc(err: string): string =
|
||||
"Error getting addrPart [" & err & "]"
|
||||
)
|
||||
let val = ?addrPart.mapErr(
|
||||
proc(err: string): string =
|
||||
"Error getting addrPart [" & err & "]"
|
||||
)
|
||||
?wireAddr.append(val).mapErr(
|
||||
proc(err: string): string =
|
||||
"Error appending addrPart [" & err & "]"
|
||||
@ -199,11 +197,10 @@ proc parsePeerInfoFromRegularAddr(peer: MultiAddress): Result[RemotePeerInfo, st
|
||||
"] [peer:" & $peer & "]"
|
||||
return err(msg)
|
||||
|
||||
let peerId =
|
||||
?PeerID.init(p2pPartStr.split("/")[^1]).mapErr(
|
||||
proc(e: cstring): string =
|
||||
$e
|
||||
)
|
||||
let peerId = ?PeerID.init(p2pPartStr.split("/")[^1]).mapErr(
|
||||
proc(e: cstring): string =
|
||||
$e
|
||||
)
|
||||
|
||||
if not wireAddr.validWireAddr():
|
||||
return err("invalid multiaddress: no supported transport found")
|
||||
@ -233,11 +230,10 @@ proc parsePeerInfo*(maddrs: varargs[string]): Result[RemotePeerInfo, string] =
|
||||
## format `(ip4|ip6)/tcp/p2p`, into dialable PeerInfo
|
||||
var multiAddresses = newSeq[MultiAddress]()
|
||||
for maddr in maddrs:
|
||||
let multiAddr =
|
||||
?MultiAddress.init(maddr).mapErr(
|
||||
proc(err: string): string =
|
||||
"MultiAddress.init [" & err & "]"
|
||||
)
|
||||
let multiAddr = ?MultiAddress.init(maddr).mapErr(
|
||||
proc(err: string): string =
|
||||
"MultiAddress.init [" & err & "]"
|
||||
)
|
||||
multiAddresses.add(multiAddr)
|
||||
|
||||
parsePeerInfo(multiAddresses)
|
||||
|
||||
@ -54,20 +54,18 @@ proc parseStaticSharding*(
|
||||
let clusterPart = parts[0]
|
||||
if clusterPart.len == 0:
|
||||
return err(ParsingError.missingPart("cluster_id"))
|
||||
let clusterId =
|
||||
?Base10.decode(uint16, clusterPart).mapErr(
|
||||
proc(err: auto): auto =
|
||||
ParsingError.invalidFormat($err)
|
||||
)
|
||||
let clusterId = ?Base10.decode(uint16, clusterPart).mapErr(
|
||||
proc(err: auto): auto =
|
||||
ParsingError.invalidFormat($err)
|
||||
)
|
||||
|
||||
let shardPart = parts[1]
|
||||
if shardPart.len == 0:
|
||||
return err(ParsingError.missingPart("shard_number"))
|
||||
let shardId =
|
||||
?Base10.decode(uint16, shardPart).mapErr(
|
||||
proc(err: auto): auto =
|
||||
ParsingError.invalidFormat($err)
|
||||
)
|
||||
let shardId = ?Base10.decode(uint16, shardPart).mapErr(
|
||||
proc(err: auto): auto =
|
||||
ParsingError.invalidFormat($err)
|
||||
)
|
||||
|
||||
ok(RelayShard(clusterId: clusterId, shardId: shardId))
|
||||
|
||||
|
||||
@ -70,10 +70,9 @@ func topicsToRelayShards*(topics: seq[string]): Result[Option[RelayShards], stri
|
||||
if parsedTopicsRes.anyIt(it.get().clusterId != parsedTopicsRes[0].get().clusterId):
|
||||
return err("use shards with the same cluster Id.")
|
||||
|
||||
let relayShard =
|
||||
?RelayShards.init(
|
||||
parsedTopicsRes[0].get().clusterId, parsedTopicsRes.mapIt(it.get().shardId)
|
||||
)
|
||||
let relayShard = ?RelayShards.init(
|
||||
parsedTopicsRes[0].get().clusterId, parsedTopicsRes.mapIt(it.get().shardId)
|
||||
)
|
||||
|
||||
return ok(some(relayShard))
|
||||
|
||||
|
||||
@ -119,10 +119,9 @@ proc `==`*(x, y: KeystoreMembership): bool =
|
||||
|
||||
proc hash*(m: KeystoreMembership): string =
|
||||
# hash together the chainId, address and treeIndex
|
||||
return
|
||||
$sha256.digest(
|
||||
m.membershipContract.chainId & m.membershipContract.address & $m.treeIndex
|
||||
)
|
||||
return $sha256.digest(
|
||||
m.membershipContract.chainId & m.membershipContract.address & $m.treeIndex
|
||||
)
|
||||
|
||||
type MembershipTable* = Table[string, KeystoreMembership]
|
||||
|
||||
|
||||
@ -59,15 +59,14 @@ proc isValid(msg: seq[PreMessagePattern]): bool =
|
||||
var isValid: bool = true
|
||||
|
||||
# Non-empty pre-messages can only have patterns "e", "s", "e,s" in each direction
|
||||
let allowedPatterns: seq[PreMessagePattern] =
|
||||
@[
|
||||
PreMessagePattern(direction: D_r, tokens: @[T_s]),
|
||||
PreMessagePattern(direction: D_r, tokens: @[T_e]),
|
||||
PreMessagePattern(direction: D_r, tokens: @[T_e, T_s]),
|
||||
PreMessagePattern(direction: D_l, tokens: @[T_s]),
|
||||
PreMessagePattern(direction: D_l, tokens: @[T_e]),
|
||||
PreMessagePattern(direction: D_l, tokens: @[T_e, T_s]),
|
||||
]
|
||||
let allowedPatterns: seq[PreMessagePattern] = @[
|
||||
PreMessagePattern(direction: D_r, tokens: @[T_s]),
|
||||
PreMessagePattern(direction: D_r, tokens: @[T_e]),
|
||||
PreMessagePattern(direction: D_r, tokens: @[T_e, T_s]),
|
||||
PreMessagePattern(direction: D_l, tokens: @[T_s]),
|
||||
PreMessagePattern(direction: D_l, tokens: @[T_e]),
|
||||
PreMessagePattern(direction: D_l, tokens: @[T_e, T_s]),
|
||||
]
|
||||
|
||||
# We check if pre message patterns are allowed
|
||||
for pattern in msg:
|
||||
|
||||
@ -223,57 +223,51 @@ const
|
||||
NoiseHandshakePatterns* = {
|
||||
"K1K1": HandshakePattern(
|
||||
name: "Noise_K1K1_25519_ChaChaPoly_SHA256",
|
||||
preMessagePatterns:
|
||||
@[
|
||||
PreMessagePattern(direction: D_r, tokens: @[T_s]),
|
||||
PreMessagePattern(direction: D_l, tokens: @[T_s]),
|
||||
],
|
||||
messagePatterns:
|
||||
@[
|
||||
MessagePattern(direction: D_r, tokens: @[T_e]),
|
||||
MessagePattern(direction: D_l, tokens: @[T_e, T_ee, T_es]),
|
||||
MessagePattern(direction: D_r, tokens: @[T_se]),
|
||||
],
|
||||
preMessagePatterns: @[
|
||||
PreMessagePattern(direction: D_r, tokens: @[T_s]),
|
||||
PreMessagePattern(direction: D_l, tokens: @[T_s]),
|
||||
],
|
||||
messagePatterns: @[
|
||||
MessagePattern(direction: D_r, tokens: @[T_e]),
|
||||
MessagePattern(direction: D_l, tokens: @[T_e, T_ee, T_es]),
|
||||
MessagePattern(direction: D_r, tokens: @[T_se]),
|
||||
],
|
||||
),
|
||||
"XK1": HandshakePattern(
|
||||
name: "Noise_XK1_25519_ChaChaPoly_SHA256",
|
||||
preMessagePatterns: @[PreMessagePattern(direction: D_l, tokens: @[T_s])],
|
||||
messagePatterns:
|
||||
@[
|
||||
MessagePattern(direction: D_r, tokens: @[T_e]),
|
||||
MessagePattern(direction: D_l, tokens: @[T_e, T_ee, T_es]),
|
||||
MessagePattern(direction: D_r, tokens: @[T_s, T_se]),
|
||||
],
|
||||
messagePatterns: @[
|
||||
MessagePattern(direction: D_r, tokens: @[T_e]),
|
||||
MessagePattern(direction: D_l, tokens: @[T_e, T_ee, T_es]),
|
||||
MessagePattern(direction: D_r, tokens: @[T_s, T_se]),
|
||||
],
|
||||
),
|
||||
"XX": HandshakePattern(
|
||||
name: "Noise_XX_25519_ChaChaPoly_SHA256",
|
||||
preMessagePatterns: EmptyPreMessage,
|
||||
messagePatterns:
|
||||
@[
|
||||
MessagePattern(direction: D_r, tokens: @[T_e]),
|
||||
MessagePattern(direction: D_l, tokens: @[T_e, T_ee, T_s, T_es]),
|
||||
MessagePattern(direction: D_r, tokens: @[T_s, T_se]),
|
||||
],
|
||||
messagePatterns: @[
|
||||
MessagePattern(direction: D_r, tokens: @[T_e]),
|
||||
MessagePattern(direction: D_l, tokens: @[T_e, T_ee, T_s, T_es]),
|
||||
MessagePattern(direction: D_r, tokens: @[T_s, T_se]),
|
||||
],
|
||||
),
|
||||
"XXpsk0": HandshakePattern(
|
||||
name: "Noise_XXpsk0_25519_ChaChaPoly_SHA256",
|
||||
preMessagePatterns: EmptyPreMessage,
|
||||
messagePatterns:
|
||||
@[
|
||||
MessagePattern(direction: D_r, tokens: @[T_psk, T_e]),
|
||||
MessagePattern(direction: D_l, tokens: @[T_e, T_ee, T_s, T_es]),
|
||||
MessagePattern(direction: D_r, tokens: @[T_s, T_se]),
|
||||
],
|
||||
messagePatterns: @[
|
||||
MessagePattern(direction: D_r, tokens: @[T_psk, T_e]),
|
||||
MessagePattern(direction: D_l, tokens: @[T_e, T_ee, T_s, T_es]),
|
||||
MessagePattern(direction: D_r, tokens: @[T_s, T_se]),
|
||||
],
|
||||
),
|
||||
"WakuPairing": HandshakePattern(
|
||||
name: "Noise_WakuPairing_25519_ChaChaPoly_SHA256",
|
||||
preMessagePatterns: @[PreMessagePattern(direction: D_l, tokens: @[T_e])],
|
||||
messagePatterns:
|
||||
@[
|
||||
MessagePattern(direction: D_r, tokens: @[T_e, T_ee]),
|
||||
MessagePattern(direction: D_l, tokens: @[T_s, T_es]),
|
||||
MessagePattern(direction: D_r, tokens: @[T_s, T_se, T_ss]),
|
||||
],
|
||||
messagePatterns: @[
|
||||
MessagePattern(direction: D_r, tokens: @[T_e, T_ee]),
|
||||
MessagePattern(direction: D_l, tokens: @[T_s, T_es]),
|
||||
MessagePattern(direction: D_r, tokens: @[T_s, T_se, T_ss]),
|
||||
],
|
||||
),
|
||||
}.toTable()
|
||||
|
||||
|
||||
@ -252,8 +252,8 @@ method register*(
|
||||
var txHash: TxHash
|
||||
g.retryWrapper(txHash, "Failed to register the member"):
|
||||
await wakuRlnContract
|
||||
.register(idCommitment, userMessageLimit.stuint(32), idCommitmentsToErase)
|
||||
.send(gasPrice = gasPrice)
|
||||
.register(idCommitment, userMessageLimit.stuint(32), idCommitmentsToErase)
|
||||
.send(gasPrice = gasPrice)
|
||||
|
||||
# wait for the transaction to be mined
|
||||
var tsReceipt: ReceiptObject
|
||||
|
||||
@ -72,13 +72,12 @@ type RlnConfig = ref object of RootObj
|
||||
proc `%`(c: RlnConfig): JsonNode =
|
||||
## wrapper around the generic JObject constructor.
|
||||
## We don't need to have a separate proc for the tree_config field
|
||||
let tree_config =
|
||||
%{
|
||||
"cache_capacity": %c.tree_config.cache_capacity,
|
||||
"mode": %c.tree_config.mode,
|
||||
"compression": %c.tree_config.compression,
|
||||
"flush_every_ms": %c.tree_config.flush_every_ms,
|
||||
}
|
||||
let tree_config = %{
|
||||
"cache_capacity": %c.tree_config.cache_capacity,
|
||||
"mode": %c.tree_config.mode,
|
||||
"compression": %c.tree_config.compression,
|
||||
"flush_every_ms": %c.tree_config.flush_every_ms,
|
||||
}
|
||||
return %[("resources_folder", %c.resources_folder), ("tree_config", %tree_config)]
|
||||
|
||||
proc createRLNInstanceLocal(): RLNResult =
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user