2020-05-06 09:26:08 +00:00
|
|
|
include ../../libp2p/protocols/pubsub/gossipsub
|
|
|
|
|
2020-05-08 20:58:23 +00:00
|
|
|
{.used.}
|
|
|
|
|
2020-09-23 15:56:33 +00:00
|
|
|
import options
|
2021-05-21 16:27:01 +00:00
|
|
|
import bearssl
|
2020-05-21 20:24:20 +00:00
|
|
|
import stew/byteutils
|
2021-04-02 01:20:51 +00:00
|
|
|
import ../../libp2p/builders
|
2020-05-06 09:26:08 +00:00
|
|
|
import ../../libp2p/errors
|
2020-06-28 15:56:38 +00:00
|
|
|
import ../../libp2p/crypto/crypto
|
2020-05-06 09:26:08 +00:00
|
|
|
import ../../libp2p/stream/bufferstream
|
2021-01-15 04:48:03 +00:00
|
|
|
import ../../libp2p/switch
|
2020-05-06 09:26:08 +00:00
|
|
|
|
2020-05-08 20:10:06 +00:00
|
|
|
import ../helpers
|
|
|
|
|
2020-05-06 09:26:08 +00:00
|
|
|
type
|
|
|
|
TestGossipSub = ref object of GossipSub
|
|
|
|
|
2020-05-21 20:24:20 +00:00
|
|
|
proc noop(data: seq[byte]) {.async, gcsafe.} = discard
|
|
|
|
|
2021-03-09 12:22:52 +00:00
|
|
|
proc getPubSubPeer(p: TestGossipSub, peerId: PeerID): PubSubPeer =
|
2020-09-22 07:05:53 +00:00
|
|
|
proc getConn(): Future[Connection] =
|
|
|
|
p.switch.dial(peerId, GossipSubCodec)
|
2020-09-01 07:33:03 +00:00
|
|
|
|
2021-03-02 23:23:40 +00:00
|
|
|
proc dropConn(peer: PubSubPeer) =
|
|
|
|
discard # we don't care about it here yet
|
|
|
|
|
2021-06-07 07:32:08 +00:00
|
|
|
let pubSubPeer = PubSubPeer.new(peerId, getConn, dropConn, nil, GossipSubCodec)
|
2021-03-09 12:22:52 +00:00
|
|
|
debug "created new pubsub peer", peerId
|
|
|
|
|
|
|
|
p.peers[peerId] = pubSubPeer
|
|
|
|
|
|
|
|
onNewPeer(p, pubSubPeer)
|
|
|
|
pubSubPeer
|
2020-09-01 07:33:03 +00:00
|
|
|
|
2021-09-08 09:07:46 +00:00
|
|
|
proc randomPeerId(): PeerId =
|
2021-05-21 16:27:01 +00:00
|
|
|
try:
|
2021-09-08 09:07:46 +00:00
|
|
|
PeerId.init(PrivateKey.random(ECDSA, rng[]).get()).tryGet()
|
2021-05-21 16:27:01 +00:00
|
|
|
except CatchableError as exc:
|
|
|
|
raise newException(Defect, exc.msg)
|
2020-07-07 11:14:11 +00:00
|
|
|
|
2020-05-06 09:26:08 +00:00
|
|
|
suite "GossipSub internal":
|
|
|
|
teardown:
|
2020-09-21 17:48:19 +00:00
|
|
|
checkTrackers()
|
2020-05-06 09:26:08 +00:00
|
|
|
|
2020-12-19 15:45:34 +00:00
|
|
|
asyncTest "subscribe/unsubscribeAll":
|
|
|
|
let gossipSub = TestGossipSub.init(newStandardSwitch())
|
|
|
|
|
|
|
|
proc handler(topic: string, data: seq[byte]): Future[void] {.gcsafe.} =
|
|
|
|
discard
|
|
|
|
|
|
|
|
let topic = "foobar"
|
|
|
|
gossipSub.mesh[topic] = initHashSet[PubSubPeer]()
|
|
|
|
gossipSub.topicParams[topic] = TopicParams.init()
|
|
|
|
|
|
|
|
var conns = newSeq[Connection]()
|
|
|
|
gossipSub.gossipsub[topic] = initHashSet[PubSubPeer]()
|
|
|
|
for i in 0..<15:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2020-12-19 15:45:34 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2020-12-19 15:45:34 +00:00
|
|
|
peer.sendConn = conn
|
|
|
|
gossipSub.gossipsub[topic].incl(peer)
|
|
|
|
|
|
|
|
# test via dynamic dispatch
|
|
|
|
gossipSub.PubSub.subscribe(topic, handler)
|
|
|
|
|
|
|
|
check:
|
|
|
|
gossipSub.topics.contains(topic)
|
|
|
|
gossipSub.gossipsub[topic].len() > 0
|
|
|
|
gossipSub.mesh[topic].len() > 0
|
|
|
|
|
|
|
|
# test via dynamic dispatch
|
|
|
|
gossipSub.PubSub.unsubscribeAll(topic)
|
|
|
|
|
|
|
|
check:
|
|
|
|
topic notin gossipSub.topics # not in local topics
|
|
|
|
topic notin gossipSub.mesh # not in mesh
|
|
|
|
topic in gossipSub.gossipsub # but still in gossipsub table (for fanning out)
|
|
|
|
|
|
|
|
await allFuturesThrowing(conns.mapIt(it.close()))
|
|
|
|
await gossipSub.switch.stop()
|
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
asyncTest "topic params":
|
|
|
|
let params = TopicParams.init()
|
|
|
|
params.validateParameters().tryGet()
|
|
|
|
|
|
|
|
asyncTest "`rebalanceMesh` Degree Lo":
|
|
|
|
let gossipSub = TestGossipSub.init(newStandardSwitch())
|
|
|
|
|
|
|
|
let topic = "foobar"
|
|
|
|
gossipSub.mesh[topic] = initHashSet[PubSubPeer]()
|
|
|
|
gossipSub.topicParams[topic] = TopicParams.init()
|
|
|
|
|
|
|
|
var conns = newSeq[Connection]()
|
|
|
|
gossipSub.gossipsub[topic] = initHashSet[PubSubPeer]()
|
|
|
|
for i in 0..<15:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2020-11-13 03:44:02 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2020-11-13 03:44:02 +00:00
|
|
|
peer.sendConn = conn
|
|
|
|
gossipSub.gossipsub[topic].incl(peer)
|
|
|
|
|
|
|
|
check gossipSub.peers.len == 15
|
2020-12-15 01:25:22 +00:00
|
|
|
gossipSub.rebalanceMesh(topic)
|
2021-02-22 01:05:25 +00:00
|
|
|
check gossipSub.mesh[topic].len == gossipSub.parameters.d
|
|
|
|
|
|
|
|
await allFuturesThrowing(conns.mapIt(it.close()))
|
|
|
|
await gossipSub.switch.stop()
|
|
|
|
|
|
|
|
asyncTest "rebalanceMesh - bad peers":
|
|
|
|
let gossipSub = TestGossipSub.init(newStandardSwitch())
|
|
|
|
|
|
|
|
let topic = "foobar"
|
|
|
|
gossipSub.mesh[topic] = initHashSet[PubSubPeer]()
|
|
|
|
gossipSub.topicParams[topic] = TopicParams.init()
|
|
|
|
|
|
|
|
var conns = newSeq[Connection]()
|
|
|
|
gossipSub.gossipsub[topic] = initHashSet[PubSubPeer]()
|
|
|
|
var scoreLow = -11'f64
|
|
|
|
for i in 0..<15:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2021-02-22 01:05:25 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2021-02-22 01:05:25 +00:00
|
|
|
peer.sendConn = conn
|
|
|
|
peer.score = scoreLow
|
|
|
|
gossipSub.gossipsub[topic].incl(peer)
|
|
|
|
scoreLow += 1.0
|
|
|
|
|
|
|
|
check gossipSub.peers.len == 15
|
|
|
|
gossipSub.rebalanceMesh(topic)
|
|
|
|
# low score peers should not be in mesh, that's why the count must be 4
|
|
|
|
check gossipSub.mesh[topic].len == 4
|
|
|
|
for peer in gossipSub.mesh[topic]:
|
|
|
|
check peer.score >= 0.0
|
2020-11-13 03:44:02 +00:00
|
|
|
|
|
|
|
await allFuturesThrowing(conns.mapIt(it.close()))
|
|
|
|
await gossipSub.switch.stop()
|
|
|
|
|
|
|
|
asyncTest "`rebalanceMesh` Degree Hi":
|
|
|
|
let gossipSub = TestGossipSub.init(newStandardSwitch())
|
|
|
|
|
|
|
|
let topic = "foobar"
|
|
|
|
gossipSub.mesh[topic] = initHashSet[PubSubPeer]()
|
|
|
|
gossipSub.topicParams[topic] = TopicParams.init()
|
|
|
|
|
|
|
|
var conns = newSeq[Connection]()
|
|
|
|
gossipSub.gossipsub[topic] = initHashSet[PubSubPeer]()
|
|
|
|
for i in 0..<15:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2020-11-13 03:44:02 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = PeerId.init(PrivateKey.random(ECDSA, rng[]).get()).tryGet()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2020-11-13 03:44:02 +00:00
|
|
|
gossipSub.grafted(peer, topic)
|
|
|
|
gossipSub.mesh[topic].incl(peer)
|
|
|
|
|
|
|
|
check gossipSub.mesh[topic].len == 15
|
2020-12-15 01:25:22 +00:00
|
|
|
gossipSub.rebalanceMesh(topic)
|
2020-11-19 07:48:17 +00:00
|
|
|
check gossipSub.mesh[topic].len == gossipSub.parameters.d + gossipSub.parameters.dScore
|
2020-11-13 03:44:02 +00:00
|
|
|
|
|
|
|
await allFuturesThrowing(conns.mapIt(it.close()))
|
|
|
|
await gossipSub.switch.stop()
|
|
|
|
|
|
|
|
asyncTest "`replenishFanout` Degree Lo":
|
|
|
|
let gossipSub = TestGossipSub.init(newStandardSwitch())
|
|
|
|
|
|
|
|
proc handler(peer: PubSubPeer, msg: RPCMsg) {.async.} =
|
|
|
|
discard
|
|
|
|
|
|
|
|
let topic = "foobar"
|
|
|
|
gossipSub.gossipsub[topic] = initHashSet[PubSubPeer]()
|
|
|
|
gossipSub.topicParams[topic] = TopicParams.init()
|
|
|
|
|
|
|
|
var conns = newSeq[Connection]()
|
|
|
|
for i in 0..<15:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2020-11-13 03:44:02 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
var peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2020-11-13 03:44:02 +00:00
|
|
|
peer.handler = handler
|
|
|
|
gossipSub.gossipsub[topic].incl(peer)
|
|
|
|
|
|
|
|
check gossipSub.gossipsub[topic].len == 15
|
|
|
|
gossipSub.replenishFanout(topic)
|
2020-11-19 07:48:17 +00:00
|
|
|
check gossipSub.fanout[topic].len == gossipSub.parameters.d
|
2020-11-13 03:44:02 +00:00
|
|
|
|
|
|
|
await allFuturesThrowing(conns.mapIt(it.close()))
|
|
|
|
await gossipSub.switch.stop()
|
|
|
|
|
|
|
|
asyncTest "`dropFanoutPeers` drop expired fanout topics":
|
|
|
|
let gossipSub = TestGossipSub.init(newStandardSwitch())
|
|
|
|
|
|
|
|
proc handler(peer: PubSubPeer, msg: RPCMsg) {.async.} =
|
|
|
|
discard
|
|
|
|
|
|
|
|
let topic = "foobar"
|
|
|
|
gossipSub.topicParams[topic] = TopicParams.init()
|
|
|
|
gossipSub.fanout[topic] = initHashSet[PubSubPeer]()
|
|
|
|
gossipSub.lastFanoutPubSub[topic] = Moment.fromNow(1.millis)
|
|
|
|
await sleepAsync(5.millis) # allow the topic to expire
|
|
|
|
|
|
|
|
var conns = newSeq[Connection]()
|
|
|
|
for i in 0..<6:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2020-11-13 03:44:02 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = PeerId.init(PrivateKey.random(ECDSA, rng[]).get()).tryGet()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2020-11-13 03:44:02 +00:00
|
|
|
peer.handler = handler
|
|
|
|
gossipSub.fanout[topic].incl(peer)
|
|
|
|
|
2020-11-19 07:48:17 +00:00
|
|
|
check gossipSub.fanout[topic].len == gossipSub.parameters.d
|
2020-11-13 03:44:02 +00:00
|
|
|
|
|
|
|
gossipSub.dropFanoutPeers()
|
|
|
|
check topic notin gossipSub.fanout
|
|
|
|
|
|
|
|
await allFuturesThrowing(conns.mapIt(it.close()))
|
|
|
|
await gossipSub.switch.stop()
|
|
|
|
|
|
|
|
asyncTest "`dropFanoutPeers` leave unexpired fanout topics":
|
|
|
|
let gossipSub = TestGossipSub.init(newStandardSwitch())
|
|
|
|
|
|
|
|
proc handler(peer: PubSubPeer, msg: RPCMsg) {.async.} =
|
|
|
|
discard
|
|
|
|
|
|
|
|
let topic1 = "foobar1"
|
|
|
|
let topic2 = "foobar2"
|
|
|
|
gossipSub.topicParams[topic1] = TopicParams.init()
|
|
|
|
gossipSub.topicParams[topic2] = TopicParams.init()
|
|
|
|
gossipSub.fanout[topic1] = initHashSet[PubSubPeer]()
|
|
|
|
gossipSub.fanout[topic2] = initHashSet[PubSubPeer]()
|
|
|
|
gossipSub.lastFanoutPubSub[topic1] = Moment.fromNow(1.millis)
|
|
|
|
gossipSub.lastFanoutPubSub[topic2] = Moment.fromNow(1.minutes)
|
|
|
|
await sleepAsync(5.millis) # allow the topic to expire
|
|
|
|
|
|
|
|
var conns = newSeq[Connection]()
|
|
|
|
for i in 0..<6:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2020-11-13 03:44:02 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2020-11-13 03:44:02 +00:00
|
|
|
peer.handler = handler
|
|
|
|
gossipSub.fanout[topic1].incl(peer)
|
|
|
|
gossipSub.fanout[topic2].incl(peer)
|
|
|
|
|
2020-11-19 07:48:17 +00:00
|
|
|
check gossipSub.fanout[topic1].len == gossipSub.parameters.d
|
|
|
|
check gossipSub.fanout[topic2].len == gossipSub.parameters.d
|
2020-11-13 03:44:02 +00:00
|
|
|
|
|
|
|
gossipSub.dropFanoutPeers()
|
|
|
|
check topic1 notin gossipSub.fanout
|
|
|
|
check topic2 in gossipSub.fanout
|
|
|
|
|
|
|
|
await allFuturesThrowing(conns.mapIt(it.close()))
|
|
|
|
await gossipSub.switch.stop()
|
|
|
|
|
|
|
|
asyncTest "`getGossipPeers` - should gather up to degree D non intersecting peers":
|
|
|
|
let gossipSub = TestGossipSub.init(newStandardSwitch())
|
|
|
|
|
|
|
|
proc handler(peer: PubSubPeer, msg: RPCMsg) {.async.} =
|
|
|
|
discard
|
|
|
|
|
|
|
|
let topic = "foobar"
|
|
|
|
gossipSub.topicParams[topic] = TopicParams.init()
|
|
|
|
gossipSub.mesh[topic] = initHashSet[PubSubPeer]()
|
|
|
|
gossipSub.fanout[topic] = initHashSet[PubSubPeer]()
|
|
|
|
gossipSub.gossipsub[topic] = initHashSet[PubSubPeer]()
|
|
|
|
var conns = newSeq[Connection]()
|
|
|
|
|
|
|
|
# generate mesh and fanout peers
|
|
|
|
for i in 0..<30:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2020-11-13 03:44:02 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2020-11-13 03:44:02 +00:00
|
|
|
peer.handler = handler
|
|
|
|
if i mod 2 == 0:
|
|
|
|
gossipSub.fanout[topic].incl(peer)
|
|
|
|
else:
|
2020-09-21 09:16:29 +00:00
|
|
|
gossipSub.grafted(peer, topic)
|
2020-07-13 13:32:38 +00:00
|
|
|
gossipSub.mesh[topic].incl(peer)
|
2020-05-06 09:26:08 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
# generate gossipsub (free standing) peers
|
|
|
|
for i in 0..<15:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2020-11-13 03:44:02 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2020-11-13 03:44:02 +00:00
|
|
|
peer.handler = handler
|
|
|
|
gossipSub.gossipsub[topic].incl(peer)
|
|
|
|
|
|
|
|
# generate messages
|
|
|
|
var seqno = 0'u64
|
|
|
|
for i in 0..5:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2020-11-13 03:44:02 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
2020-11-13 03:44:02 +00:00
|
|
|
inc seqno
|
2021-09-08 09:07:46 +00:00
|
|
|
let msg = Message.init(peerId, ("HELLO" & $i).toBytes(), topic, some(seqno))
|
2020-11-13 03:44:02 +00:00
|
|
|
gossipSub.mcache.put(gossipSub.msgIdProvider(msg), msg)
|
|
|
|
|
|
|
|
check gossipSub.fanout[topic].len == 15
|
|
|
|
check gossipSub.mesh[topic].len == 15
|
|
|
|
check gossipSub.gossipsub[topic].len == 15
|
|
|
|
|
|
|
|
let peers = gossipSub.getGossipPeers()
|
2020-11-19 07:48:17 +00:00
|
|
|
check peers.len == gossipSub.parameters.d
|
2020-11-13 03:44:02 +00:00
|
|
|
for p in peers.keys:
|
|
|
|
check not gossipSub.fanout.hasPeerID(topic, p.peerId)
|
|
|
|
check not gossipSub.mesh.hasPeerID(topic, p.peerId)
|
|
|
|
|
|
|
|
await allFuturesThrowing(conns.mapIt(it.close()))
|
|
|
|
await gossipSub.switch.stop()
|
|
|
|
|
|
|
|
asyncTest "`getGossipPeers` - should not crash on missing topics in mesh":
|
|
|
|
let gossipSub = TestGossipSub.init(newStandardSwitch())
|
|
|
|
|
|
|
|
proc handler(peer: PubSubPeer, msg: RPCMsg) {.async.} =
|
|
|
|
discard
|
|
|
|
|
|
|
|
let topic = "foobar"
|
|
|
|
gossipSub.topicParams[topic] = TopicParams.init()
|
|
|
|
gossipSub.fanout[topic] = initHashSet[PubSubPeer]()
|
|
|
|
gossipSub.gossipsub[topic] = initHashSet[PubSubPeer]()
|
|
|
|
var conns = newSeq[Connection]()
|
|
|
|
for i in 0..<30:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2020-11-13 03:44:02 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2020-11-13 03:44:02 +00:00
|
|
|
peer.handler = handler
|
|
|
|
if i mod 2 == 0:
|
|
|
|
gossipSub.fanout[topic].incl(peer)
|
|
|
|
else:
|
2020-07-13 13:32:38 +00:00
|
|
|
gossipSub.gossipsub[topic].incl(peer)
|
2020-05-06 09:26:08 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
# generate messages
|
|
|
|
var seqno = 0'u64
|
|
|
|
for i in 0..5:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2020-11-13 03:44:02 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
2020-11-13 03:44:02 +00:00
|
|
|
inc seqno
|
2021-09-08 09:07:46 +00:00
|
|
|
let msg = Message.init(peerId, ("HELLO" & $i).toBytes(), topic, some(seqno))
|
2020-11-13 03:44:02 +00:00
|
|
|
gossipSub.mcache.put(gossipSub.msgIdProvider(msg), msg)
|
|
|
|
|
|
|
|
let peers = gossipSub.getGossipPeers()
|
2020-11-19 07:48:17 +00:00
|
|
|
check peers.len == gossipSub.parameters.d
|
2020-11-13 03:44:02 +00:00
|
|
|
|
|
|
|
await allFuturesThrowing(conns.mapIt(it.close()))
|
|
|
|
await gossipSub.switch.stop()
|
|
|
|
|
|
|
|
asyncTest "`getGossipPeers` - should not crash on missing topics in fanout":
|
|
|
|
let gossipSub = TestGossipSub.init(newStandardSwitch())
|
|
|
|
|
|
|
|
proc handler(peer: PubSubPeer, msg: RPCMsg) {.async.} =
|
|
|
|
discard
|
|
|
|
|
|
|
|
let topic = "foobar"
|
|
|
|
gossipSub.topicParams[topic] = TopicParams.init()
|
|
|
|
gossipSub.mesh[topic] = initHashSet[PubSubPeer]()
|
|
|
|
gossipSub.gossipsub[topic] = initHashSet[PubSubPeer]()
|
|
|
|
var conns = newSeq[Connection]()
|
|
|
|
for i in 0..<30:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2020-11-13 03:44:02 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2020-11-13 03:44:02 +00:00
|
|
|
peer.handler = handler
|
|
|
|
if i mod 2 == 0:
|
|
|
|
gossipSub.mesh[topic].incl(peer)
|
|
|
|
gossipSub.grafted(peer, topic)
|
|
|
|
else:
|
|
|
|
gossipSub.gossipsub[topic].incl(peer)
|
2020-05-06 09:26:08 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
# generate messages
|
|
|
|
var seqno = 0'u64
|
|
|
|
for i in 0..5:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2020-11-13 03:44:02 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
2020-11-13 03:44:02 +00:00
|
|
|
inc seqno
|
2021-09-08 09:07:46 +00:00
|
|
|
let msg = Message.init(peerId, ("HELLO" & $i).toBytes(), topic, some(seqno))
|
2020-11-13 03:44:02 +00:00
|
|
|
gossipSub.mcache.put(gossipSub.msgIdProvider(msg), msg)
|
|
|
|
|
|
|
|
let peers = gossipSub.getGossipPeers()
|
2020-11-19 07:48:17 +00:00
|
|
|
check peers.len == gossipSub.parameters.d
|
2020-11-13 03:44:02 +00:00
|
|
|
|
|
|
|
await allFuturesThrowing(conns.mapIt(it.close()))
|
|
|
|
await gossipSub.switch.stop()
|
|
|
|
|
|
|
|
asyncTest "`getGossipPeers` - should not crash on missing topics in gossip":
|
|
|
|
let gossipSub = TestGossipSub.init(newStandardSwitch())
|
|
|
|
|
|
|
|
proc handler(peer: PubSubPeer, msg: RPCMsg) {.async.} =
|
|
|
|
discard
|
|
|
|
|
|
|
|
let topic = "foobar"
|
|
|
|
gossipSub.topicParams[topic] = TopicParams.init()
|
|
|
|
gossipSub.mesh[topic] = initHashSet[PubSubPeer]()
|
|
|
|
gossipSub.fanout[topic] = initHashSet[PubSubPeer]()
|
|
|
|
var conns = newSeq[Connection]()
|
|
|
|
for i in 0..<30:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2020-11-13 03:44:02 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2020-11-13 03:44:02 +00:00
|
|
|
peer.handler = handler
|
|
|
|
if i mod 2 == 0:
|
|
|
|
gossipSub.mesh[topic].incl(peer)
|
|
|
|
gossipSub.grafted(peer, topic)
|
|
|
|
else:
|
2020-07-13 13:32:38 +00:00
|
|
|
gossipSub.fanout[topic].incl(peer)
|
2020-05-06 09:26:08 +00:00
|
|
|
|
2020-11-13 03:44:02 +00:00
|
|
|
# generate messages
|
|
|
|
var seqno = 0'u64
|
|
|
|
for i in 0..5:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2020-11-13 03:44:02 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
2020-11-13 03:44:02 +00:00
|
|
|
inc seqno
|
2021-09-08 09:07:46 +00:00
|
|
|
let msg = Message.init(peerId, ("bar" & $i).toBytes(), topic, some(seqno))
|
2020-11-13 03:44:02 +00:00
|
|
|
gossipSub.mcache.put(gossipSub.msgIdProvider(msg), msg)
|
|
|
|
|
|
|
|
let peers = gossipSub.getGossipPeers()
|
|
|
|
check peers.len == 0
|
|
|
|
|
|
|
|
await allFuturesThrowing(conns.mapIt(it.close()))
|
|
|
|
await gossipSub.switch.stop()
|
2021-01-13 14:49:44 +00:00
|
|
|
|
|
|
|
asyncTest "Drop messages of topics without subscription":
|
|
|
|
let gossipSub = TestGossipSub.init(newStandardSwitch())
|
|
|
|
|
|
|
|
proc handler(peer: PubSubPeer, msg: RPCMsg) {.async.} =
|
|
|
|
check false
|
|
|
|
|
|
|
|
let topic = "foobar"
|
|
|
|
var conns = newSeq[Connection]()
|
|
|
|
for i in 0..<30:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2021-01-13 14:49:44 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2021-01-13 14:49:44 +00:00
|
|
|
peer.handler = handler
|
|
|
|
|
|
|
|
# generate messages
|
|
|
|
var seqno = 0'u64
|
|
|
|
for i in 0..5:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2021-01-13 14:49:44 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2021-01-13 14:49:44 +00:00
|
|
|
inc seqno
|
2021-09-08 09:07:46 +00:00
|
|
|
let msg = Message.init(peerId, ("bar" & $i).toBytes(), topic, some(seqno))
|
2021-01-13 14:49:44 +00:00
|
|
|
await gossipSub.rpcHandler(peer, RPCMsg(messages: @[msg]))
|
|
|
|
|
|
|
|
check gossipSub.mcache.msgs.len == 0
|
|
|
|
|
|
|
|
await allFuturesThrowing(conns.mapIt(it.close()))
|
|
|
|
await gossipSub.switch.stop()
|
2021-01-15 04:48:03 +00:00
|
|
|
|
|
|
|
asyncTest "Disconnect bad peers":
|
|
|
|
let gossipSub = TestGossipSub.init(newStandardSwitch())
|
|
|
|
gossipSub.parameters.disconnectBadPeers = true
|
2021-03-09 12:22:52 +00:00
|
|
|
gossipSub.parameters.appSpecificWeight = 1.0
|
2021-01-15 04:48:03 +00:00
|
|
|
proc handler(peer: PubSubPeer, msg: RPCMsg) {.async.} =
|
|
|
|
check false
|
|
|
|
|
|
|
|
let topic = "foobar"
|
|
|
|
var conns = newSeq[Connection]()
|
|
|
|
for i in 0..<30:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2021-01-15 04:48:03 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2021-01-15 04:48:03 +00:00
|
|
|
peer.sendConn = conn
|
|
|
|
peer.handler = handler
|
2021-03-09 12:22:52 +00:00
|
|
|
peer.appScore = gossipSub.parameters.graylistThreshold - 1
|
2021-01-15 04:48:03 +00:00
|
|
|
gossipSub.gossipsub.mgetOrPut(topic, initHashSet[PubSubPeer]()).incl(peer)
|
2021-01-21 04:00:24 +00:00
|
|
|
gossipSub.switch.connManager.storeConn(conn)
|
2021-01-15 04:48:03 +00:00
|
|
|
|
|
|
|
gossipSub.updateScores()
|
|
|
|
|
|
|
|
await sleepAsync(100.millis)
|
|
|
|
|
|
|
|
check:
|
|
|
|
# test our disconnect mechanics
|
|
|
|
gossipSub.gossipsub.peers(topic) == 0
|
|
|
|
# also ensure we cleanup properly the peersInIP table
|
|
|
|
gossipSub.peersInIP.len == 0
|
|
|
|
|
|
|
|
await allFuturesThrowing(conns.mapIt(it.close()))
|
|
|
|
await gossipSub.switch.stop()
|
2021-02-12 03:27:26 +00:00
|
|
|
|
|
|
|
asyncTest "subscription limits":
|
|
|
|
let gossipSub = TestGossipSub.init(newStandardSwitch())
|
|
|
|
gossipSub.topicsHigh = 10
|
|
|
|
|
|
|
|
var tooManyTopics: seq[string]
|
|
|
|
for i in 0..gossipSub.topicsHigh + 10:
|
|
|
|
tooManyTopics &= "topic" & $i
|
|
|
|
let lotOfSubs = RPCMsg.withSubs(tooManyTopics, true)
|
|
|
|
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2021-02-12 03:27:26 +00:00
|
|
|
|
|
|
|
await gossipSub.rpcHandler(peer, lotOfSubs)
|
|
|
|
|
|
|
|
check:
|
|
|
|
gossipSub.gossipSub.len == gossipSub.topicsHigh
|
|
|
|
peer.behaviourPenalty > 0.0
|
|
|
|
|
|
|
|
await conn.close()
|
|
|
|
await gossipSub.switch.stop()
|
2021-02-13 04:39:32 +00:00
|
|
|
|
2021-02-22 03:04:20 +00:00
|
|
|
asyncTest "rebalanceMesh fail due to backoff":
|
|
|
|
let gossipSub = TestGossipSub.init(newStandardSwitch())
|
|
|
|
let topic = "foobar"
|
|
|
|
gossipSub.mesh[topic] = initHashSet[PubSubPeer]()
|
|
|
|
gossipSub.topicParams[topic] = TopicParams.init()
|
|
|
|
|
|
|
|
var conns = newSeq[Connection]()
|
|
|
|
gossipSub.gossipsub[topic] = initHashSet[PubSubPeer]()
|
|
|
|
for i in 0..<15:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2021-02-22 03:04:20 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2021-02-22 03:04:20 +00:00
|
|
|
peer.sendConn = conn
|
|
|
|
gossipSub.gossipsub[topic].incl(peer)
|
|
|
|
gossipSub.backingOff
|
|
|
|
.mgetOrPut(topic, initTable[PeerID, Moment]())
|
2021-09-08 09:07:46 +00:00
|
|
|
.add(peerId, Moment.now() + 1.hours)
|
2021-02-22 03:04:20 +00:00
|
|
|
let prunes = gossipSub.handleGraft(peer, @[ControlGraft(topicID: topic)])
|
|
|
|
# there must be a control prune due to violation of backoff
|
|
|
|
check prunes.len != 0
|
|
|
|
|
|
|
|
check gossipSub.peers.len == 15
|
|
|
|
gossipSub.rebalanceMesh(topic)
|
|
|
|
# expect 0 since they are all backing off
|
|
|
|
check gossipSub.mesh[topic].len == 0
|
|
|
|
|
|
|
|
await allFuturesThrowing(conns.mapIt(it.close()))
|
|
|
|
await gossipSub.switch.stop()
|
|
|
|
|
2021-04-22 09:51:22 +00:00
|
|
|
asyncTest "rebalanceMesh fail due to backoff - remote":
|
|
|
|
let gossipSub = TestGossipSub.init(newStandardSwitch())
|
|
|
|
let topic = "foobar"
|
|
|
|
gossipSub.mesh[topic] = initHashSet[PubSubPeer]()
|
|
|
|
gossipSub.topicParams[topic] = TopicParams.init()
|
|
|
|
|
|
|
|
var conns = newSeq[Connection]()
|
|
|
|
gossipSub.gossipsub[topic] = initHashSet[PubSubPeer]()
|
|
|
|
for i in 0..<15:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2021-04-22 09:51:22 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2021-04-22 09:51:22 +00:00
|
|
|
peer.sendConn = conn
|
|
|
|
gossipSub.gossipsub[topic].incl(peer)
|
|
|
|
gossipSub.mesh[topic].incl(peer)
|
|
|
|
|
|
|
|
check gossipSub.peers.len == 15
|
|
|
|
gossipSub.rebalanceMesh(topic)
|
|
|
|
check gossipSub.mesh[topic].len != 0
|
|
|
|
|
|
|
|
for i in 0..<15:
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = conns[i].peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2021-04-22 09:51:22 +00:00
|
|
|
gossipSub.handlePrune(peer, @[ControlPrune(
|
|
|
|
topicID: topic,
|
|
|
|
peers: @[],
|
|
|
|
backoff: gossipSub.parameters.pruneBackoff.seconds.uint64
|
|
|
|
)])
|
|
|
|
|
|
|
|
# expect topic cleaned up since they are all pruned
|
|
|
|
check topic notin gossipSub.mesh
|
|
|
|
|
|
|
|
await allFuturesThrowing(conns.mapIt(it.close()))
|
|
|
|
await gossipSub.switch.stop()
|
|
|
|
|
2021-02-13 04:39:32 +00:00
|
|
|
asyncTest "rebalanceMesh Degree Hi - audit scenario":
|
|
|
|
let gossipSub = TestGossipSub.init(newStandardSwitch())
|
2021-02-22 03:04:20 +00:00
|
|
|
let topic = "foobar"
|
|
|
|
gossipSub.mesh[topic] = initHashSet[PubSubPeer]()
|
|
|
|
gossipSub.topicParams[topic] = TopicParams.init()
|
2021-02-13 04:39:32 +00:00
|
|
|
gossipSub.parameters.dScore = 4
|
|
|
|
gossipSub.parameters.d = 6
|
|
|
|
gossipSub.parameters.dOut = 3
|
|
|
|
gossipSub.parameters.dHigh = 12
|
|
|
|
gossipSub.parameters.dLow = 4
|
|
|
|
|
|
|
|
var conns = newSeq[Connection]()
|
|
|
|
gossipSub.gossipsub[topic] = initHashSet[PubSubPeer]()
|
|
|
|
for i in 0..<6:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2021-03-02 23:23:40 +00:00
|
|
|
conn.transportDir = Direction.In
|
2021-02-13 04:39:32 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = PeerId.init(PrivateKey.random(ECDSA, rng[]).get()).tryGet()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2021-02-13 04:39:32 +00:00
|
|
|
peer.score = 40.0
|
|
|
|
peer.sendConn = conn
|
|
|
|
gossipSub.grafted(peer, topic)
|
|
|
|
gossipSub.mesh[topic].incl(peer)
|
|
|
|
|
|
|
|
for i in 0..<7:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2021-03-02 23:23:40 +00:00
|
|
|
conn.transportDir = Direction.Out
|
2021-02-13 04:39:32 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = PeerId.init(PrivateKey.random(ECDSA, rng[]).get()).tryGet()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2021-02-13 04:39:32 +00:00
|
|
|
peer.score = 10.0
|
|
|
|
peer.sendConn = conn
|
|
|
|
gossipSub.grafted(peer, topic)
|
|
|
|
gossipSub.mesh[topic].incl(peer)
|
|
|
|
|
|
|
|
check gossipSub.mesh[topic].len == 13
|
|
|
|
gossipSub.rebalanceMesh(topic)
|
|
|
|
# ensure we are above dlow
|
|
|
|
check gossipSub.mesh[topic].len > gossipSub.parameters.dLow
|
|
|
|
var outbound = 0
|
|
|
|
for peer in gossipSub.mesh[topic]:
|
2021-03-02 23:23:40 +00:00
|
|
|
if peer.sendConn.transportDir == Direction.Out:
|
2021-02-13 04:39:32 +00:00
|
|
|
inc outbound
|
|
|
|
# ensure we give priority and keep at least dOut outbound peers
|
|
|
|
check outbound >= gossipSub.parameters.dOut
|
|
|
|
|
|
|
|
await allFuturesThrowing(conns.mapIt(it.close()))
|
|
|
|
await gossipSub.switch.stop()
|
2021-02-26 05:27:42 +00:00
|
|
|
|
|
|
|
asyncTest "handleIHave/Iwant tests":
|
|
|
|
let gossipSub = TestGossipSub.init(newStandardSwitch())
|
|
|
|
|
|
|
|
proc handler(peer: PubSubPeer, msg: RPCMsg) {.async.} =
|
|
|
|
check false
|
|
|
|
|
|
|
|
let topic = "foobar"
|
|
|
|
var conns = newSeq[Connection]()
|
|
|
|
gossipSub.gossipsub[topic] = initHashSet[PubSubPeer]()
|
|
|
|
gossipSub.mesh[topic] = initHashSet[PubSubPeer]()
|
|
|
|
for i in 0..<30:
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2021-02-26 05:27:42 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2021-02-26 05:27:42 +00:00
|
|
|
peer.handler = handler
|
|
|
|
gossipSub.grafted(peer, topic)
|
|
|
|
gossipSub.mesh[topic].incl(peer)
|
|
|
|
|
|
|
|
block:
|
|
|
|
# should ignore no budget peer
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2021-02-26 05:27:42 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2021-02-26 05:27:42 +00:00
|
|
|
let id = @[0'u8, 1, 2, 3]
|
|
|
|
let msg = ControlIHave(
|
|
|
|
topicID: topic,
|
|
|
|
messageIDs: @[id, id, id]
|
|
|
|
)
|
2021-03-09 12:22:52 +00:00
|
|
|
peer.iHaveBudget = 0
|
2021-02-26 05:27:42 +00:00
|
|
|
let iwants = gossipSub.handleIHave(peer, @[msg])
|
|
|
|
check: iwants.messageIDs.len == 0
|
|
|
|
|
|
|
|
block:
|
|
|
|
# given duplicate ihave should generate only one iwant
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2021-02-26 05:27:42 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2021-02-26 05:27:42 +00:00
|
|
|
let id = @[0'u8, 1, 2, 3]
|
|
|
|
let msg = ControlIHave(
|
|
|
|
topicID: topic,
|
|
|
|
messageIDs: @[id, id, id]
|
|
|
|
)
|
|
|
|
let iwants = gossipSub.handleIHave(peer, @[msg])
|
|
|
|
check: iwants.messageIDs.len == 1
|
|
|
|
|
|
|
|
block:
|
|
|
|
# given duplicate iwant should generate only one message
|
2021-06-07 07:32:08 +00:00
|
|
|
let conn = TestBufferStream.new(noop)
|
2021-02-26 05:27:42 +00:00
|
|
|
conns &= conn
|
2021-09-08 09:07:46 +00:00
|
|
|
let peerId = randomPeerId()
|
|
|
|
conn.peerId = peerId
|
|
|
|
let peer = gossipSub.getPubSubPeer(peerId)
|
2021-02-26 05:27:42 +00:00
|
|
|
let id = @[0'u8, 1, 2, 3]
|
|
|
|
gossipSub.mcache.put(id, Message())
|
|
|
|
let msg = ControlIWant(
|
|
|
|
messageIDs: @[id, id, id]
|
|
|
|
)
|
|
|
|
let genmsg = gossipSub.handleIWant(peer, @[msg])
|
|
|
|
check: genmsg.len == 1
|
|
|
|
|
|
|
|
check gossipSub.mcache.msgs.len == 1
|
|
|
|
|
|
|
|
await allFuturesThrowing(conns.mapIt(it.close()))
|
|
|
|
await gossipSub.switch.stop()
|