2022-07-01 18:19:57 +00:00
|
|
|
# Nim-Libp2p
|
2023-01-20 14:47:40 +00:00
|
|
|
# Copyright (c) 2023 Status Research & Development GmbH
|
2022-07-01 18:19:57 +00:00
|
|
|
# Licensed under either of
|
|
|
|
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE))
|
|
|
|
# * MIT license ([LICENSE-MIT](LICENSE-MIT))
|
|
|
|
# at your option.
|
|
|
|
# This file may not be copied, modified, or distributed except according to
|
|
|
|
# those terms.
|
2019-12-06 02:16:18 +00:00
|
|
|
|
2020-05-08 20:58:23 +00:00
|
|
|
{.used.}
|
|
|
|
|
2022-10-26 09:07:31 +00:00
|
|
|
import sequtils, options, tables, sets, sugar
|
2020-06-03 02:21:11 +00:00
|
|
|
import chronos, stew/byteutils
|
2020-04-21 01:24:42 +00:00
|
|
|
import chronicles
|
|
|
|
import utils, ../../libp2p/[errors,
|
2020-07-01 06:25:09 +00:00
|
|
|
peerid,
|
2019-12-06 02:16:18 +00:00
|
|
|
peerinfo,
|
2020-06-19 17:29:43 +00:00
|
|
|
stream/connection,
|
2020-08-03 05:20:11 +00:00
|
|
|
stream/bufferstream,
|
2019-12-06 02:16:18 +00:00
|
|
|
crypto/crypto,
|
|
|
|
protocols/pubsub/pubsub,
|
2020-12-19 14:43:32 +00:00
|
|
|
protocols/pubsub/gossipsub,
|
2020-08-03 05:20:11 +00:00
|
|
|
protocols/pubsub/pubsubpeer,
|
2020-07-15 19:18:55 +00:00
|
|
|
protocols/pubsub/peertable,
|
2021-11-14 08:08:05 +00:00
|
|
|
protocols/pubsub/timedcache,
|
2019-12-17 05:24:03 +00:00
|
|
|
protocols/pubsub/rpc/messages]
|
2022-02-21 15:04:17 +00:00
|
|
|
import ../../libp2p/protocols/pubsub/errors as pubsub_errors
|
2020-05-08 20:10:06 +00:00
|
|
|
import ../helpers
|
2020-04-21 01:24:42 +00:00
|
|
|
|
2021-04-18 08:08:33 +00:00
|
|
|
proc `$`(peer: PubSubPeer): string = shortLog(peer)
|
|
|
|
|
2022-09-02 08:24:54 +00:00
|
|
|
template tryPublish(call: untyped, require: int, wait = 10.milliseconds, timeout = 5.seconds): untyped =
|
2020-07-08 00:33:05 +00:00
|
|
|
var
|
2022-09-02 08:24:54 +00:00
|
|
|
expiration = Moment.now() + timeout
|
2020-07-08 00:33:05 +00:00
|
|
|
pubs = 0
|
2022-09-02 08:24:54 +00:00
|
|
|
while pubs < require and Moment.now() < expiration:
|
2020-07-08 00:33:05 +00:00
|
|
|
pubs = pubs + call
|
|
|
|
await sleepAsync(wait)
|
2022-09-02 08:24:54 +00:00
|
|
|
|
|
|
|
doAssert pubs >= require, "Failed to publish!"
|
2020-07-08 00:33:05 +00:00
|
|
|
|
2019-12-06 02:16:18 +00:00
|
|
|
suite "GossipSub":
|
2020-04-21 01:24:42 +00:00
|
|
|
teardown:
|
2020-09-21 17:48:19 +00:00
|
|
|
checkTrackers()
|
2020-04-21 01:24:42 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
asyncTest "GossipSub validation should succeed":
|
|
|
|
var handlerFut = newFuture[bool]()
|
|
|
|
proc handler(topic: string, data: seq[byte]) {.async, gcsafe.} =
|
|
|
|
check topic == "foobar"
|
|
|
|
handlerFut.complete(true)
|
2019-12-17 05:24:03 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
let
|
|
|
|
nodes = generateNodes(2, gossip = true)
|
2020-07-27 19:33:51 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
# start switches
|
|
|
|
nodesFut = await allFinished(
|
|
|
|
nodes[0].switch.start(),
|
|
|
|
nodes[1].switch.start(),
|
2020-08-12 00:05:49 +00:00
|
|
|
)
|
2020-07-27 19:33:51 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
await subscribeNodes(nodes)
|
2019-12-17 05:24:03 +00:00
|
|
|
|
2020-12-19 14:43:32 +00:00
|
|
|
nodes[0].subscribe("foobar", handler)
|
|
|
|
nodes[1].subscribe("foobar", handler)
|
2019-12-17 05:24:03 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
var subs: seq[Future[void]]
|
|
|
|
subs &= waitSub(nodes[1], nodes[0], "foobar")
|
|
|
|
subs &= waitSub(nodes[0], nodes[1], "foobar")
|
2020-08-12 00:05:49 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
await allFuturesThrowing(subs)
|
2019-12-17 05:24:03 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
var validatorFut = newFuture[bool]()
|
|
|
|
proc validator(topic: string,
|
|
|
|
message: Message):
|
|
|
|
Future[ValidationResult] {.async.} =
|
|
|
|
check topic == "foobar"
|
|
|
|
validatorFut.complete(true)
|
|
|
|
result = ValidationResult.Accept
|
2020-08-12 00:05:49 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
nodes[1].addValidator("foobar", validator)
|
|
|
|
tryPublish await nodes[0].publish("foobar", "Hello!".toBytes()), 1
|
2020-04-21 01:24:42 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
check (await validatorFut) and (await handlerFut)
|
2019-12-17 05:24:03 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
await allFuturesThrowing(
|
|
|
|
nodes[0].switch.stop(),
|
|
|
|
nodes[1].switch.stop()
|
|
|
|
)
|
2020-09-21 09:16:29 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
await allFuturesThrowing(nodesFut.concat())
|
2020-09-21 09:16:29 +00:00
|
|
|
|
2020-12-15 01:25:22 +00:00
|
|
|
asyncTest "GossipSub validation should fail (reject)":
|
2020-11-13 03:44:02 +00:00
|
|
|
proc handler(topic: string, data: seq[byte]) {.async, gcsafe.} =
|
|
|
|
check false # if we get here, it should fail
|
2019-12-17 05:24:03 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
let
|
|
|
|
nodes = generateNodes(2, gossip = true)
|
2019-12-17 05:24:03 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
# start switches
|
|
|
|
nodesFut = await allFinished(
|
|
|
|
nodes[0].switch.start(),
|
|
|
|
nodes[1].switch.start(),
|
2020-08-12 00:05:49 +00:00
|
|
|
)
|
2020-07-27 19:33:51 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
await subscribeNodes(nodes)
|
2019-12-17 05:24:03 +00:00
|
|
|
|
2020-12-19 14:43:32 +00:00
|
|
|
nodes[0].subscribe("foobar", handler)
|
|
|
|
nodes[1].subscribe("foobar", handler)
|
2020-08-12 00:05:49 +00:00
|
|
|
|
2023-03-08 11:30:19 +00:00
|
|
|
await waitSubGraph(nodes, "foobar")
|
2020-08-12 00:05:49 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
let gossip1 = GossipSub(nodes[0])
|
|
|
|
let gossip2 = GossipSub(nodes[1])
|
2019-12-17 05:24:03 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
check:
|
|
|
|
gossip1.mesh["foobar"].len == 1 and "foobar" notin gossip1.fanout
|
|
|
|
gossip2.mesh["foobar"].len == 1 and "foobar" notin gossip2.fanout
|
|
|
|
|
|
|
|
var validatorFut = newFuture[bool]()
|
|
|
|
proc validator(topic: string,
|
|
|
|
message: Message):
|
|
|
|
Future[ValidationResult] {.async.} =
|
|
|
|
result = ValidationResult.Reject
|
|
|
|
validatorFut.complete(true)
|
|
|
|
|
|
|
|
nodes[1].addValidator("foobar", validator)
|
|
|
|
tryPublish await nodes[0].publish("foobar", "Hello!".toBytes()), 1
|
|
|
|
|
|
|
|
check (await validatorFut) == true
|
2020-12-15 01:25:22 +00:00
|
|
|
|
|
|
|
await allFuturesThrowing(
|
|
|
|
nodes[0].switch.stop(),
|
|
|
|
nodes[1].switch.stop()
|
|
|
|
)
|
|
|
|
|
|
|
|
await allFuturesThrowing(nodesFut.concat())
|
|
|
|
|
|
|
|
asyncTest "GossipSub validation should fail (ignore)":
|
|
|
|
proc handler(topic: string, data: seq[byte]) {.async, gcsafe.} =
|
|
|
|
check false # if we get here, it should fail
|
|
|
|
|
|
|
|
let
|
|
|
|
nodes = generateNodes(2, gossip = true)
|
|
|
|
|
|
|
|
# start switches
|
|
|
|
nodesFut = await allFinished(
|
|
|
|
nodes[0].switch.start(),
|
|
|
|
nodes[1].switch.start(),
|
|
|
|
)
|
|
|
|
|
|
|
|
await subscribeNodes(nodes)
|
|
|
|
|
2020-12-19 14:43:32 +00:00
|
|
|
nodes[0].subscribe("foobar", handler)
|
|
|
|
nodes[1].subscribe("foobar", handler)
|
2020-12-15 01:25:22 +00:00
|
|
|
|
2023-03-08 11:30:19 +00:00
|
|
|
await waitSubGraph(nodes, "foobar")
|
2020-12-15 01:25:22 +00:00
|
|
|
|
|
|
|
let gossip1 = GossipSub(nodes[0])
|
|
|
|
let gossip2 = GossipSub(nodes[1])
|
|
|
|
|
|
|
|
check:
|
|
|
|
gossip1.mesh["foobar"].len == 1 and "foobar" notin gossip1.fanout
|
|
|
|
gossip2.mesh["foobar"].len == 1 and "foobar" notin gossip2.fanout
|
|
|
|
|
|
|
|
var validatorFut = newFuture[bool]()
|
|
|
|
proc validator(topic: string,
|
|
|
|
message: Message):
|
|
|
|
Future[ValidationResult] {.async.} =
|
|
|
|
result = ValidationResult.Ignore
|
|
|
|
validatorFut.complete(true)
|
|
|
|
|
|
|
|
nodes[1].addValidator("foobar", validator)
|
|
|
|
tryPublish await nodes[0].publish("foobar", "Hello!".toBytes()), 1
|
|
|
|
|
|
|
|
check (await validatorFut) == true
|
2020-11-13 03:44:02 +00:00
|
|
|
|
|
|
|
await allFuturesThrowing(
|
|
|
|
nodes[0].switch.stop(),
|
|
|
|
nodes[1].switch.stop()
|
|
|
|
)
|
|
|
|
|
|
|
|
await allFuturesThrowing(nodesFut.concat())
|
|
|
|
|
|
|
|
asyncTest "GossipSub validation one fails and one succeeds":
|
|
|
|
var handlerFut = newFuture[bool]()
|
|
|
|
proc handler(topic: string, data: seq[byte]) {.async, gcsafe.} =
|
|
|
|
check topic == "foo"
|
|
|
|
handlerFut.complete(true)
|
|
|
|
|
|
|
|
let
|
|
|
|
nodes = generateNodes(2, gossip = true)
|
|
|
|
|
|
|
|
# start switches
|
|
|
|
nodesFut = await allFinished(
|
|
|
|
nodes[0].switch.start(),
|
|
|
|
nodes[1].switch.start(),
|
2020-08-12 00:05:49 +00:00
|
|
|
)
|
2020-08-03 05:20:11 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
await subscribeNodes(nodes)
|
2020-08-03 05:20:11 +00:00
|
|
|
|
2020-12-19 14:43:32 +00:00
|
|
|
nodes[1].subscribe("foo", handler)
|
|
|
|
nodes[1].subscribe("bar", handler)
|
2019-12-06 02:16:18 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
var passed, failed: Future[bool] = newFuture[bool]()
|
|
|
|
proc validator(topic: string,
|
|
|
|
message: Message):
|
|
|
|
Future[ValidationResult] {.async.} =
|
|
|
|
result = if topic == "foo":
|
|
|
|
passed.complete(true)
|
|
|
|
ValidationResult.Accept
|
|
|
|
else:
|
|
|
|
failed.complete(true)
|
|
|
|
ValidationResult.Reject
|
2019-12-06 02:16:18 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
nodes[1].addValidator("foo", "bar", validator)
|
|
|
|
tryPublish await nodes[0].publish("foo", "Hello!".toBytes()), 1
|
|
|
|
tryPublish await nodes[0].publish("bar", "Hello!".toBytes()), 1
|
2019-12-06 02:16:18 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
check ((await passed) and (await failed) and (await handlerFut))
|
2019-12-06 02:16:18 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
let gossip1 = GossipSub(nodes[0])
|
|
|
|
let gossip2 = GossipSub(nodes[1])
|
2020-07-27 19:33:51 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
check:
|
|
|
|
"foo" notin gossip1.mesh and gossip1.fanout["foo"].len == 1
|
|
|
|
"foo" notin gossip2.mesh and "foo" notin gossip2.fanout
|
|
|
|
"bar" notin gossip1.mesh and gossip1.fanout["bar"].len == 1
|
|
|
|
"bar" notin gossip2.mesh and "bar" notin gossip2.fanout
|
|
|
|
|
|
|
|
await allFuturesThrowing(
|
|
|
|
nodes[0].switch.stop(),
|
|
|
|
nodes[1].switch.stop()
|
|
|
|
)
|
|
|
|
|
|
|
|
await allFuturesThrowing(nodesFut.concat())
|
|
|
|
|
2021-12-02 14:47:40 +00:00
|
|
|
asyncTest "GossipSub unsub - resub faster than backoff":
|
|
|
|
var handlerFut = newFuture[bool]()
|
|
|
|
proc handler(topic: string, data: seq[byte]) {.async, gcsafe.} =
|
|
|
|
check topic == "foobar"
|
|
|
|
handlerFut.complete(true)
|
|
|
|
|
|
|
|
let
|
|
|
|
nodes = generateNodes(2, gossip = true)
|
|
|
|
|
|
|
|
# start switches
|
|
|
|
nodesFut = await allFinished(
|
|
|
|
nodes[0].switch.start(),
|
|
|
|
nodes[1].switch.start(),
|
|
|
|
)
|
|
|
|
|
|
|
|
await subscribeNodes(nodes)
|
|
|
|
|
|
|
|
nodes[0].subscribe("foobar", handler)
|
|
|
|
nodes[1].subscribe("foobar", handler)
|
|
|
|
|
|
|
|
var subs: seq[Future[void]]
|
|
|
|
subs &= waitSub(nodes[1], nodes[0], "foobar")
|
|
|
|
subs &= waitSub(nodes[0], nodes[1], "foobar")
|
|
|
|
|
|
|
|
await allFuturesThrowing(subs)
|
|
|
|
|
|
|
|
nodes[0].unsubscribe("foobar", handler)
|
|
|
|
nodes[0].subscribe("foobar", handler)
|
|
|
|
|
|
|
|
# regular backoff is 60 seconds, so we must not wait that long
|
|
|
|
await (waitSub(nodes[0], nodes[1], "foobar") and waitSub(nodes[1], nodes[0], "foobar")).wait(30.seconds)
|
|
|
|
|
|
|
|
var validatorFut = newFuture[bool]()
|
|
|
|
proc validator(topic: string,
|
|
|
|
message: Message):
|
|
|
|
Future[ValidationResult] {.async.} =
|
|
|
|
check topic == "foobar"
|
|
|
|
validatorFut.complete(true)
|
|
|
|
result = ValidationResult.Accept
|
|
|
|
|
|
|
|
nodes[1].addValidator("foobar", validator)
|
|
|
|
tryPublish await nodes[0].publish("foobar", "Hello!".toBytes()), 1
|
|
|
|
|
|
|
|
check (await validatorFut) and (await handlerFut)
|
|
|
|
|
|
|
|
await allFuturesThrowing(
|
|
|
|
nodes[0].switch.stop(),
|
|
|
|
nodes[1].switch.stop()
|
|
|
|
)
|
|
|
|
|
|
|
|
await allFuturesThrowing(nodesFut.concat())
|
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
asyncTest "e2e - GossipSub should add remote peer topic subscriptions":
|
|
|
|
proc handler(topic: string, data: seq[byte]) {.async, gcsafe.} =
|
|
|
|
discard
|
|
|
|
|
|
|
|
let
|
|
|
|
nodes = generateNodes(
|
|
|
|
2,
|
2021-02-08 20:33:34 +00:00
|
|
|
gossip = true)
|
2020-11-13 03:44:02 +00:00
|
|
|
|
|
|
|
# start switches
|
|
|
|
nodesFut = await allFinished(
|
|
|
|
nodes[0].switch.start(),
|
|
|
|
nodes[1].switch.start(),
|
2020-08-12 00:05:49 +00:00
|
|
|
)
|
2019-12-06 02:16:18 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
await subscribeNodes(nodes)
|
2019-12-06 02:16:18 +00:00
|
|
|
|
2020-12-19 14:43:32 +00:00
|
|
|
nodes[1].subscribe("foobar", handler)
|
2019-12-06 02:16:18 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
let gossip1 = GossipSub(nodes[0])
|
|
|
|
let gossip2 = GossipSub(nodes[1])
|
2020-08-12 00:05:49 +00:00
|
|
|
|
2022-10-27 23:10:24 +00:00
|
|
|
checkExpiring:
|
2022-09-02 08:24:54 +00:00
|
|
|
"foobar" in gossip2.topics and
|
|
|
|
"foobar" in gossip1.gossipsub and
|
2021-12-16 10:05:20 +00:00
|
|
|
gossip1.gossipsub.hasPeerId("foobar", gossip2.peerInfo.peerId)
|
2020-11-13 03:44:02 +00:00
|
|
|
|
|
|
|
await allFuturesThrowing(
|
|
|
|
nodes[0].switch.stop(),
|
|
|
|
nodes[1].switch.stop()
|
|
|
|
)
|
|
|
|
|
|
|
|
await allFuturesThrowing(nodesFut.concat())
|
|
|
|
|
|
|
|
asyncTest "e2e - GossipSub should add remote peer topic subscriptions if both peers are subscribed":
|
|
|
|
proc handler(topic: string, data: seq[byte]) {.async, gcsafe.} =
|
|
|
|
discard
|
|
|
|
|
|
|
|
let
|
|
|
|
nodes = generateNodes(
|
|
|
|
2,
|
2021-02-08 20:33:34 +00:00
|
|
|
gossip = true)
|
2020-11-13 03:44:02 +00:00
|
|
|
|
|
|
|
# start switches
|
|
|
|
nodesFut = await allFinished(
|
|
|
|
nodes[0].switch.start(),
|
|
|
|
nodes[1].switch.start(),
|
2020-08-12 00:05:49 +00:00
|
|
|
)
|
2019-12-06 02:16:18 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
await subscribeNodes(nodes)
|
2020-08-12 00:05:49 +00:00
|
|
|
|
2020-12-19 14:43:32 +00:00
|
|
|
nodes[0].subscribe("foobar", handler)
|
|
|
|
nodes[1].subscribe("foobar", handler)
|
2020-08-12 00:05:49 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
var subs: seq[Future[void]]
|
|
|
|
subs &= waitSub(nodes[1], nodes[0], "foobar")
|
|
|
|
subs &= waitSub(nodes[0], nodes[1], "foobar")
|
2019-12-06 02:16:18 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
await allFuturesThrowing(subs)
|
2019-12-06 02:16:18 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
let
|
|
|
|
gossip1 = GossipSub(nodes[0])
|
|
|
|
gossip2 = GossipSub(nodes[1])
|
2020-04-30 13:22:31 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
check:
|
|
|
|
"foobar" in gossip1.topics
|
|
|
|
"foobar" in gossip2.topics
|
|
|
|
|
|
|
|
"foobar" in gossip1.gossipsub
|
|
|
|
"foobar" in gossip2.gossipsub
|
|
|
|
|
2021-12-16 10:05:20 +00:00
|
|
|
gossip1.gossipsub.hasPeerId("foobar", gossip2.peerInfo.peerId) or
|
|
|
|
gossip1.mesh.hasPeerId("foobar", gossip2.peerInfo.peerId)
|
2020-11-13 03:44:02 +00:00
|
|
|
|
2021-12-16 10:05:20 +00:00
|
|
|
gossip2.gossipsub.hasPeerId("foobar", gossip1.peerInfo.peerId) or
|
|
|
|
gossip2.mesh.hasPeerId("foobar", gossip1.peerInfo.peerId)
|
2020-11-13 03:44:02 +00:00
|
|
|
|
|
|
|
await allFuturesThrowing(
|
|
|
|
nodes[0].switch.stop(),
|
|
|
|
nodes[1].switch.stop()
|
|
|
|
)
|
|
|
|
|
|
|
|
await allFuturesThrowing(nodesFut.concat())
|
|
|
|
|
|
|
|
asyncTest "e2e - GossipSub send over fanout A -> B":
|
|
|
|
var passed = newFuture[void]()
|
|
|
|
proc handler(topic: string, data: seq[byte]) {.async, gcsafe.} =
|
|
|
|
check topic == "foobar"
|
|
|
|
passed.complete()
|
|
|
|
|
|
|
|
let
|
|
|
|
nodes = generateNodes(
|
|
|
|
2,
|
2021-02-08 20:33:34 +00:00
|
|
|
gossip = true)
|
2020-11-13 03:44:02 +00:00
|
|
|
|
|
|
|
# start switches
|
|
|
|
nodesFut = await allFinished(
|
|
|
|
nodes[0].switch.start(),
|
|
|
|
nodes[1].switch.start(),
|
|
|
|
)
|
2019-12-06 02:16:18 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
await subscribeNodes(nodes)
|
2020-04-21 01:24:42 +00:00
|
|
|
|
2020-12-19 14:43:32 +00:00
|
|
|
nodes[1].subscribe("foobar", handler)
|
2020-11-13 03:44:02 +00:00
|
|
|
await waitSub(nodes[0], nodes[1], "foobar")
|
2020-07-27 19:33:51 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
var observed = 0
|
|
|
|
let
|
|
|
|
obs1 = PubSubObserver(onRecv: proc(peer: PubSubPeer; msgs: var RPCMsg) =
|
|
|
|
inc observed
|
2020-08-12 00:05:49 +00:00
|
|
|
)
|
2020-11-13 03:44:02 +00:00
|
|
|
obs2 = PubSubObserver(onSend: proc(peer: PubSubPeer; msgs: var RPCMsg) =
|
|
|
|
inc observed
|
2020-08-12 00:05:49 +00:00
|
|
|
)
|
2019-12-06 02:16:18 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
nodes[1].addObserver(obs1)
|
|
|
|
nodes[0].addObserver(obs2)
|
2020-08-12 00:05:49 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
tryPublish await nodes[0].publish("foobar", "Hello!".toBytes()), 1
|
2019-12-06 02:16:18 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
var gossip1: GossipSub = GossipSub(nodes[0])
|
|
|
|
var gossip2: GossipSub = GossipSub(nodes[1])
|
2019-12-06 02:16:18 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
check:
|
|
|
|
"foobar" in gossip1.gossipsub
|
2021-12-16 10:05:20 +00:00
|
|
|
gossip1.fanout.hasPeerId("foobar", gossip2.peerInfo.peerId)
|
|
|
|
not gossip1.mesh.hasPeerId("foobar", gossip2.peerInfo.peerId)
|
2020-11-13 03:44:02 +00:00
|
|
|
|
|
|
|
await passed.wait(2.seconds)
|
|
|
|
|
|
|
|
await allFuturesThrowing(
|
|
|
|
nodes[0].switch.stop(),
|
|
|
|
nodes[1].switch.stop()
|
|
|
|
)
|
|
|
|
|
|
|
|
await allFuturesThrowing(nodesFut.concat())
|
|
|
|
check observed == 2
|
|
|
|
|
2022-02-21 15:22:08 +00:00
|
|
|
asyncTest "e2e - GossipSub send over fanout A -> B for subscribed topic":
|
|
|
|
var passed = newFuture[void]()
|
|
|
|
proc handler(topic: string, data: seq[byte]) {.async, gcsafe.} =
|
|
|
|
check topic == "foobar"
|
|
|
|
passed.complete()
|
|
|
|
|
|
|
|
let
|
|
|
|
nodes = generateNodes(
|
|
|
|
2,
|
2022-02-21 17:14:43 +00:00
|
|
|
gossip = true,
|
|
|
|
unsubscribeBackoff = 10.minutes)
|
2022-02-21 15:22:08 +00:00
|
|
|
|
|
|
|
# start switches
|
|
|
|
nodesFut = await allFinished(
|
|
|
|
nodes[0].switch.start(),
|
|
|
|
nodes[1].switch.start(),
|
|
|
|
)
|
|
|
|
|
2023-03-08 11:30:19 +00:00
|
|
|
GossipSub(nodes[1]).parameters.d = 0
|
|
|
|
GossipSub(nodes[1]).parameters.dHigh = 0
|
|
|
|
GossipSub(nodes[1]).parameters.dLow = 0
|
|
|
|
|
2022-02-21 15:22:08 +00:00
|
|
|
await subscribeNodes(nodes)
|
|
|
|
|
|
|
|
nodes[0].subscribe("foobar", handler)
|
2023-03-08 11:30:19 +00:00
|
|
|
nodes[1].subscribe("foobar", handler)
|
2022-02-21 15:22:08 +00:00
|
|
|
|
|
|
|
let gsNode = GossipSub(nodes[1])
|
2023-03-08 11:30:19 +00:00
|
|
|
checkExpiring:
|
|
|
|
gsNode.mesh.getOrDefault("foobar").len == 0 and
|
|
|
|
GossipSub(nodes[0]).mesh.getOrDefault("foobar").len == 0 and
|
|
|
|
(
|
|
|
|
GossipSub(nodes[0]).gossipsub.getOrDefault("foobar").len == 1 or
|
|
|
|
GossipSub(nodes[0]).fanout.getOrDefault("foobar").len == 1
|
|
|
|
)
|
2022-02-21 15:22:08 +00:00
|
|
|
|
|
|
|
tryPublish await nodes[0].publish("foobar", "Hello!".toBytes()), 1
|
|
|
|
|
|
|
|
check:
|
|
|
|
GossipSub(nodes[0]).fanout.getOrDefault("foobar").len > 0
|
|
|
|
GossipSub(nodes[0]).mesh.getOrDefault("foobar").len == 0
|
|
|
|
|
|
|
|
await passed.wait(2.seconds)
|
|
|
|
|
|
|
|
trace "test done, stopping..."
|
|
|
|
|
|
|
|
await allFuturesThrowing(
|
|
|
|
nodes[0].switch.stop(),
|
|
|
|
nodes[1].switch.stop()
|
|
|
|
)
|
|
|
|
|
|
|
|
await allFuturesThrowing(nodesFut.concat())
|
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
asyncTest "e2e - GossipSub send over mesh A -> B":
|
|
|
|
var passed: Future[bool] = newFuture[bool]()
|
|
|
|
proc handler(topic: string, data: seq[byte]) {.async, gcsafe.} =
|
|
|
|
check topic == "foobar"
|
|
|
|
passed.complete(true)
|
|
|
|
|
|
|
|
let
|
|
|
|
nodes = generateNodes(
|
|
|
|
2,
|
2021-02-08 20:33:34 +00:00
|
|
|
gossip = true)
|
2020-11-13 03:44:02 +00:00
|
|
|
|
|
|
|
# start switches
|
|
|
|
nodesFut = await allFinished(
|
|
|
|
nodes[0].switch.start(),
|
|
|
|
nodes[1].switch.start(),
|
|
|
|
)
|
2020-08-12 00:05:49 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
await subscribeNodes(nodes)
|
2019-12-06 02:16:18 +00:00
|
|
|
|
2020-12-19 14:43:32 +00:00
|
|
|
nodes[0].subscribe("foobar", handler)
|
|
|
|
nodes[1].subscribe("foobar", handler)
|
2020-11-13 03:44:02 +00:00
|
|
|
await waitSub(nodes[0], nodes[1], "foobar")
|
2020-01-07 08:06:27 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
tryPublish await nodes[0].publish("foobar", "Hello!".toBytes()), 1
|
2019-12-06 02:16:18 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
check await passed
|
2020-04-21 01:24:42 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
var gossip1: GossipSub = GossipSub(nodes[0])
|
|
|
|
var gossip2: GossipSub = GossipSub(nodes[1])
|
2019-12-06 02:16:18 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
check:
|
|
|
|
"foobar" in gossip1.gossipsub
|
|
|
|
"foobar" in gossip2.gossipsub
|
2021-12-16 10:05:20 +00:00
|
|
|
gossip1.mesh.hasPeerId("foobar", gossip2.peerInfo.peerId)
|
|
|
|
not gossip1.fanout.hasPeerId("foobar", gossip2.peerInfo.peerId)
|
|
|
|
gossip2.mesh.hasPeerId("foobar", gossip1.peerInfo.peerId)
|
|
|
|
not gossip2.fanout.hasPeerId("foobar", gossip1.peerInfo.peerId)
|
2020-11-13 03:44:02 +00:00
|
|
|
|
|
|
|
await allFuturesThrowing(
|
|
|
|
nodes[0].switch.stop(),
|
|
|
|
nodes[1].switch.stop()
|
|
|
|
)
|
|
|
|
|
|
|
|
await allFuturesThrowing(nodesFut.concat())
|
|
|
|
|
2021-11-14 08:08:05 +00:00
|
|
|
asyncTest "e2e - GossipSub should not send to source & peers who already seen":
|
|
|
|
# 3 nodes: A, B, C
|
2023-03-08 11:30:19 +00:00
|
|
|
# A publishes, C relays, B is having a long validation
|
|
|
|
# so B should not send to anyone
|
2021-11-14 08:08:05 +00:00
|
|
|
|
|
|
|
let
|
|
|
|
nodes = generateNodes(
|
|
|
|
3,
|
|
|
|
gossip = true)
|
|
|
|
|
|
|
|
# start switches
|
|
|
|
nodesFut = await allFinished(
|
|
|
|
nodes[0].switch.start(),
|
|
|
|
nodes[1].switch.start(),
|
|
|
|
nodes[2].switch.start(),
|
|
|
|
)
|
|
|
|
|
|
|
|
await subscribeNodes(nodes)
|
|
|
|
|
|
|
|
var cRelayed: Future[void] = newFuture[void]()
|
|
|
|
var bFinished: Future[void] = newFuture[void]()
|
|
|
|
var
|
|
|
|
aReceived = 0
|
|
|
|
cReceived = 0
|
|
|
|
proc handlerA(topic: string, data: seq[byte]) {.async, gcsafe.} =
|
|
|
|
inc aReceived
|
|
|
|
check aReceived < 2
|
|
|
|
proc handlerB(topic: string, data: seq[byte]) {.async, gcsafe.} = discard
|
|
|
|
proc handlerC(topic: string, data: seq[byte]) {.async, gcsafe.} =
|
|
|
|
inc cReceived
|
|
|
|
check cReceived < 2
|
|
|
|
cRelayed.complete()
|
|
|
|
|
|
|
|
nodes[0].subscribe("foobar", handlerA)
|
|
|
|
nodes[1].subscribe("foobar", handlerB)
|
|
|
|
nodes[2].subscribe("foobar", handlerC)
|
2023-03-08 11:30:19 +00:00
|
|
|
await waitSubGraph(nodes, "foobar")
|
2021-11-14 08:08:05 +00:00
|
|
|
|
|
|
|
var gossip1: GossipSub = GossipSub(nodes[0])
|
|
|
|
var gossip2: GossipSub = GossipSub(nodes[1])
|
|
|
|
var gossip3: GossipSub = GossipSub(nodes[2])
|
|
|
|
|
|
|
|
proc slowValidator(topic: string, message: Message): Future[ValidationResult] {.async.} =
|
|
|
|
await cRelayed
|
|
|
|
# Empty A & C caches to detect duplicates
|
|
|
|
gossip1.seen = TimedCache[MessageId].init()
|
|
|
|
gossip3.seen = TimedCache[MessageId].init()
|
|
|
|
let msgId = toSeq(gossip2.validationSeen.keys)[0]
|
2022-10-27 23:10:24 +00:00
|
|
|
checkExpiring(try: gossip2.validationSeen[msgId].len > 0 except: false)
|
2021-11-14 08:08:05 +00:00
|
|
|
result = ValidationResult.Accept
|
|
|
|
bFinished.complete()
|
|
|
|
|
|
|
|
nodes[1].addValidator("foobar", slowValidator)
|
|
|
|
|
2023-03-08 11:30:19 +00:00
|
|
|
checkExpiring(
|
|
|
|
gossip1.mesh.getOrDefault("foobar").len == 2 and
|
|
|
|
gossip2.mesh.getOrDefault("foobar").len == 2 and
|
|
|
|
gossip3.mesh.getOrDefault("foobar").len == 2)
|
|
|
|
tryPublish await nodes[0].publish("foobar", "Hello!".toBytes()), 2
|
2021-11-14 08:08:05 +00:00
|
|
|
|
|
|
|
await bFinished
|
|
|
|
|
|
|
|
await allFuturesThrowing(
|
|
|
|
nodes[0].switch.stop(),
|
|
|
|
nodes[1].switch.stop(),
|
|
|
|
nodes[2].switch.stop()
|
|
|
|
)
|
|
|
|
|
|
|
|
await allFuturesThrowing(nodesFut.concat())
|
|
|
|
|
2021-04-22 09:51:22 +00:00
|
|
|
asyncTest "e2e - GossipSub send over floodPublish A -> B":
|
|
|
|
var passed: Future[bool] = newFuture[bool]()
|
|
|
|
proc handler(topic: string, data: seq[byte]) {.async, gcsafe.} =
|
|
|
|
check topic == "foobar"
|
|
|
|
passed.complete(true)
|
2020-11-13 03:44:02 +00:00
|
|
|
|
|
|
|
let
|
2021-04-22 09:51:22 +00:00
|
|
|
nodes = generateNodes(
|
|
|
|
2,
|
|
|
|
gossip = true)
|
2020-11-13 03:44:02 +00:00
|
|
|
|
2021-04-22 09:51:22 +00:00
|
|
|
# start switches
|
|
|
|
nodesFut = await allFinished(
|
|
|
|
nodes[0].switch.start(),
|
|
|
|
nodes[1].switch.start(),
|
|
|
|
)
|
2020-11-13 03:44:02 +00:00
|
|
|
|
2021-04-22 09:51:22 +00:00
|
|
|
var gossip1: GossipSub = GossipSub(nodes[0])
|
|
|
|
gossip1.parameters.floodPublish = true
|
|
|
|
var gossip2: GossipSub = GossipSub(nodes[1])
|
|
|
|
gossip2.parameters.floodPublish = true
|
2020-11-13 03:44:02 +00:00
|
|
|
|
2021-04-22 09:51:22 +00:00
|
|
|
await subscribeNodes(nodes)
|
2020-11-13 03:44:02 +00:00
|
|
|
|
2021-04-22 09:51:22 +00:00
|
|
|
# nodes[0].subscribe("foobar", handler)
|
|
|
|
nodes[1].subscribe("foobar", handler)
|
|
|
|
await waitSub(nodes[0], nodes[1], "foobar")
|
2020-11-13 03:44:02 +00:00
|
|
|
|
2021-04-22 09:51:22 +00:00
|
|
|
tryPublish await nodes[0].publish("foobar", "Hello!".toBytes()), 1
|
2020-07-20 06:55:00 +00:00
|
|
|
|
2023-03-08 11:30:19 +00:00
|
|
|
check await passed.wait(10.seconds)
|
2021-04-22 09:51:22 +00:00
|
|
|
|
|
|
|
check:
|
|
|
|
"foobar" in gossip1.gossipsub
|
|
|
|
"foobar" notin gossip2.gossipsub
|
2021-12-16 10:05:20 +00:00
|
|
|
not gossip1.mesh.hasPeerId("foobar", gossip2.peerInfo.peerId)
|
|
|
|
not gossip1.fanout.hasPeerId("foobar", gossip2.peerInfo.peerId)
|
2020-11-13 03:44:02 +00:00
|
|
|
|
|
|
|
await allFuturesThrowing(
|
2021-04-22 09:51:22 +00:00
|
|
|
nodes[0].switch.stop(),
|
|
|
|
nodes[1].switch.stop()
|
|
|
|
)
|
2020-11-13 03:44:02 +00:00
|
|
|
|
2021-04-22 09:51:22 +00:00
|
|
|
await allFuturesThrowing(nodesFut.concat())
|
|
|
|
|
|
|
|
asyncTest "e2e - GossipSub with multiple peers":
|
2020-11-13 03:44:02 +00:00
|
|
|
var runs = 10
|
|
|
|
|
|
|
|
let
|
|
|
|
nodes = generateNodes(runs, gossip = true, triggerSelf = true)
|
|
|
|
nodesFut = nodes.mapIt(it.switch.start())
|
|
|
|
|
2021-04-22 09:51:22 +00:00
|
|
|
await subscribeNodes(nodes)
|
2020-11-13 03:44:02 +00:00
|
|
|
|
|
|
|
var seen: Table[string, int]
|
|
|
|
var seenFut = newFuture[void]()
|
2020-12-14 21:22:53 +00:00
|
|
|
for i in 0..<nodes.len:
|
|
|
|
let dialer = nodes[i]
|
2020-11-13 03:44:02 +00:00
|
|
|
var handler: TopicHandler
|
|
|
|
closureScope:
|
|
|
|
var peerName = $dialer.peerInfo.peerId
|
|
|
|
handler = proc(topic: string, data: seq[byte]) {.async, gcsafe, closure.} =
|
|
|
|
if peerName notin seen:
|
|
|
|
seen[peerName] = 0
|
|
|
|
seen[peerName].inc
|
|
|
|
check topic == "foobar"
|
|
|
|
if not seenFut.finished() and seen.len >= runs:
|
|
|
|
seenFut.complete()
|
|
|
|
|
2020-12-19 14:43:32 +00:00
|
|
|
dialer.subscribe("foobar", handler)
|
2022-10-26 09:07:31 +00:00
|
|
|
await waitSubGraph(nodes, "foobar")
|
2020-11-13 03:44:02 +00:00
|
|
|
|
|
|
|
tryPublish await wait(nodes[0].publish("foobar",
|
|
|
|
toBytes("from node " &
|
|
|
|
$nodes[0].peerInfo.peerId)),
|
2022-09-02 08:24:54 +00:00
|
|
|
1.minutes), 1
|
2020-11-13 03:44:02 +00:00
|
|
|
|
2022-10-26 09:07:31 +00:00
|
|
|
await wait(seenFut, 1.minutes)
|
2020-11-13 03:44:02 +00:00
|
|
|
check: seen.len >= runs
|
|
|
|
for k, v in seen.pairs:
|
|
|
|
check: v >= 1
|
|
|
|
|
|
|
|
for node in nodes:
|
|
|
|
var gossip = GossipSub(node)
|
2021-04-22 09:51:22 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
check:
|
|
|
|
"foobar" in gossip.gossipsub
|
2020-07-27 19:33:51 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
await allFuturesThrowing(
|
|
|
|
nodes.mapIt(
|
|
|
|
allFutures(
|
|
|
|
it.switch.stop())))
|
2020-01-10 03:59:27 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
await allFuturesThrowing(nodesFut)
|
2021-01-13 14:49:44 +00:00
|
|
|
|
2021-04-22 09:51:22 +00:00
|
|
|
asyncTest "e2e - GossipSub with multiple peers (sparse)":
|
2021-02-26 05:15:58 +00:00
|
|
|
var runs = 10
|
|
|
|
|
|
|
|
let
|
|
|
|
nodes = generateNodes(runs, gossip = true, triggerSelf = true)
|
|
|
|
nodesFut = nodes.mapIt(it.switch.start())
|
|
|
|
|
2021-04-22 09:51:22 +00:00
|
|
|
await subscribeSparseNodes(nodes)
|
2021-02-26 05:15:58 +00:00
|
|
|
|
|
|
|
var seen: Table[string, int]
|
|
|
|
var seenFut = newFuture[void]()
|
2022-10-26 09:07:31 +00:00
|
|
|
|
2021-02-26 05:15:58 +00:00
|
|
|
for i in 0..<nodes.len:
|
|
|
|
let dialer = nodes[i]
|
|
|
|
var handler: TopicHandler
|
2022-10-26 09:07:31 +00:00
|
|
|
capture dialer, i:
|
2021-02-26 05:15:58 +00:00
|
|
|
var peerName = $dialer.peerInfo.peerId
|
|
|
|
handler = proc(topic: string, data: seq[byte]) {.async, gcsafe, closure.} =
|
|
|
|
if peerName notin seen:
|
|
|
|
seen[peerName] = 0
|
|
|
|
seen[peerName].inc
|
|
|
|
check topic == "foobar"
|
|
|
|
if not seenFut.finished() and seen.len >= runs:
|
|
|
|
seenFut.complete()
|
|
|
|
|
|
|
|
dialer.subscribe("foobar", handler)
|
|
|
|
|
2022-10-26 09:07:31 +00:00
|
|
|
await waitSubGraph(nodes, "foobar")
|
2021-02-26 05:15:58 +00:00
|
|
|
tryPublish await wait(nodes[0].publish("foobar",
|
|
|
|
toBytes("from node " &
|
|
|
|
$nodes[0].peerInfo.peerId)),
|
2022-09-02 08:24:54 +00:00
|
|
|
1.minutes), 1
|
2021-02-26 05:15:58 +00:00
|
|
|
|
2022-10-26 09:07:31 +00:00
|
|
|
await wait(seenFut, 60.seconds)
|
2021-02-26 05:15:58 +00:00
|
|
|
check: seen.len >= runs
|
|
|
|
for k, v in seen.pairs:
|
|
|
|
check: v >= 1
|
|
|
|
|
|
|
|
for node in nodes:
|
|
|
|
var gossip = GossipSub(node)
|
|
|
|
check:
|
|
|
|
"foobar" in gossip.gossipsub
|
|
|
|
gossip.fanout.len == 0
|
|
|
|
gossip.mesh["foobar"].len > 0
|
|
|
|
|
|
|
|
await allFuturesThrowing(
|
|
|
|
nodes.mapIt(
|
|
|
|
allFutures(
|
|
|
|
it.switch.stop())))
|
|
|
|
|
|
|
|
await allFuturesThrowing(nodesFut)
|
2022-03-14 08:39:30 +00:00
|
|
|
|
|
|
|
asyncTest "e2e - GossipSub peer exchange":
|
|
|
|
# A, B & C are subscribed to something
|
|
|
|
# B unsubcribe from it, it should send
|
|
|
|
# PX to A & C
|
|
|
|
#
|
|
|
|
# C sent his SPR, not A
|
|
|
|
proc handler(topic: string, data: seq[byte]) {.async, gcsafe.} =
|
|
|
|
discard # not used in this test
|
|
|
|
|
|
|
|
let
|
|
|
|
nodes = generateNodes(
|
|
|
|
2,
|
2022-05-10 08:39:43 +00:00
|
|
|
gossip = true,
|
|
|
|
enablePX = true) &
|
2022-03-14 08:39:30 +00:00
|
|
|
generateNodes(1, gossip = true, sendSignedPeerRecord = true)
|
|
|
|
|
|
|
|
# start switches
|
|
|
|
nodesFut = await allFinished(
|
|
|
|
nodes[0].switch.start(),
|
|
|
|
nodes[1].switch.start(),
|
|
|
|
nodes[2].switch.start(),
|
|
|
|
)
|
|
|
|
|
|
|
|
var
|
|
|
|
gossip0 = GossipSub(nodes[0])
|
|
|
|
gossip1 = GossipSub(nodes[1])
|
|
|
|
gossip2 = GossipSub(nodes[1])
|
|
|
|
|
|
|
|
await subscribeNodes(nodes)
|
|
|
|
|
|
|
|
nodes[0].subscribe("foobar", handler)
|
|
|
|
nodes[1].subscribe("foobar", handler)
|
|
|
|
nodes[2].subscribe("foobar", handler)
|
|
|
|
for x in 0..<3:
|
|
|
|
for y in 0..<3:
|
|
|
|
if x != y:
|
|
|
|
await waitSub(nodes[x], nodes[y], "foobar")
|
|
|
|
|
|
|
|
var passed: Future[void] = newFuture[void]()
|
|
|
|
gossip0.routingRecordsHandler.add(proc(peer: PeerId, tag: string, peers: seq[RoutingRecordsPair]) =
|
|
|
|
check:
|
|
|
|
tag == "foobar"
|
|
|
|
peers.len == 2
|
|
|
|
peers[0].record.isSome() xor peers[1].record.isSome()
|
|
|
|
passed.complete()
|
|
|
|
)
|
|
|
|
nodes[1].unsubscribe("foobar", handler)
|
|
|
|
|
2022-05-10 08:39:43 +00:00
|
|
|
await passed.wait(5.seconds)
|
2022-03-14 08:39:30 +00:00
|
|
|
|
|
|
|
await allFuturesThrowing(
|
|
|
|
nodes[0].switch.stop(),
|
|
|
|
nodes[1].switch.stop(),
|
|
|
|
nodes[2].switch.stop()
|
|
|
|
)
|
|
|
|
|
|
|
|
await allFuturesThrowing(nodesFut.concat())
|