2023-02-27 14:38:24 +00:00
|
|
|
|
## Waku Relay module. Thin layer on top of GossipSub.
|
|
|
|
|
##
|
|
|
|
|
## See https://github.com/vacp2p/specs/blob/master/specs/waku/v2/waku-relay.md
|
|
|
|
|
## for spec.
|
|
|
|
|
when (NimMajor, NimMinor) < (1, 4):
|
|
|
|
|
{.push raises: [Defect].}
|
|
|
|
|
else:
|
|
|
|
|
{.push raises: [].}
|
|
|
|
|
|
|
|
|
|
import
|
2024-02-01 17:16:10 +00:00
|
|
|
|
std/strformat,
|
2023-02-27 14:38:24 +00:00
|
|
|
|
stew/results,
|
2023-09-05 09:05:07 +00:00
|
|
|
|
sequtils,
|
2023-02-27 14:38:24 +00:00
|
|
|
|
chronos,
|
|
|
|
|
chronicles,
|
|
|
|
|
metrics,
|
|
|
|
|
libp2p/multihash,
|
|
|
|
|
libp2p/protocols/pubsub/pubsub,
|
|
|
|
|
libp2p/protocols/pubsub/gossipsub,
|
2023-06-06 17:28:47 +00:00
|
|
|
|
libp2p/protocols/pubsub/rpc/messages,
|
2023-02-27 14:38:24 +00:00
|
|
|
|
libp2p/stream/connection,
|
|
|
|
|
libp2p/switch
|
2024-03-15 23:08:47 +00:00
|
|
|
|
import ../waku_core, ./message_id
|
2023-02-27 14:38:24 +00:00
|
|
|
|
|
|
|
|
|
logScope:
|
|
|
|
|
topics = "waku relay"
|
|
|
|
|
|
2024-03-15 23:08:47 +00:00
|
|
|
|
const WakuRelayCodec* = "/vac/waku/relay/2.0.0"
|
2023-02-27 14:38:24 +00:00
|
|
|
|
|
2023-06-06 17:28:47 +00:00
|
|
|
|
# see: https://github.com/libp2p/specs/blob/master/pubsub/gossipsub/gossipsub-v1.1.md#overview-of-new-parameters
|
|
|
|
|
const TopicParameters = TopicParams(
|
2024-03-15 23:08:47 +00:00
|
|
|
|
topicWeight: 1,
|
|
|
|
|
|
|
|
|
|
# p1: favours peers already in the mesh
|
|
|
|
|
timeInMeshWeight: 0.01,
|
|
|
|
|
timeInMeshQuantum: 1.seconds,
|
|
|
|
|
timeInMeshCap: 10.0,
|
|
|
|
|
|
|
|
|
|
# p2: rewards fast peers
|
|
|
|
|
firstMessageDeliveriesWeight: 1.0,
|
|
|
|
|
firstMessageDeliveriesDecay: 0.5,
|
|
|
|
|
firstMessageDeliveriesCap: 10.0,
|
|
|
|
|
|
|
|
|
|
# p3: penalizes lazy peers. safe low value
|
|
|
|
|
meshMessageDeliveriesWeight: 0.0,
|
|
|
|
|
meshMessageDeliveriesDecay: 0.0,
|
|
|
|
|
meshMessageDeliveriesCap: 0,
|
|
|
|
|
meshMessageDeliveriesThreshold: 0,
|
|
|
|
|
meshMessageDeliveriesWindow: 0.milliseconds,
|
|
|
|
|
meshMessageDeliveriesActivation: 0.seconds,
|
|
|
|
|
|
|
|
|
|
# p3b: tracks history of prunes
|
|
|
|
|
meshFailurePenaltyWeight: 0.0,
|
|
|
|
|
meshFailurePenaltyDecay: 0.0,
|
|
|
|
|
|
|
|
|
|
# p4: penalizes invalid messages. highly penalize
|
|
|
|
|
# peers sending wrong messages
|
|
|
|
|
invalidMessageDeliveriesWeight: -100.0,
|
|
|
|
|
invalidMessageDeliveriesDecay: 0.5,
|
|
|
|
|
)
|
2023-06-06 17:28:47 +00:00
|
|
|
|
|
|
|
|
|
# see: https://rfc.vac.dev/spec/29/#gossipsub-v10-parameters
|
2024-04-08 09:12:05 +00:00
|
|
|
|
const GossipsubParameters = GossipSubParams.init(
|
|
|
|
|
explicit = true,
|
|
|
|
|
pruneBackoff = chronos.minutes(1),
|
|
|
|
|
unsubscribeBackoff = chronos.seconds(5),
|
|
|
|
|
floodPublish = true,
|
|
|
|
|
gossipFactor = 0.25,
|
|
|
|
|
d = 6,
|
|
|
|
|
dLow = 4,
|
|
|
|
|
dHigh = 8,
|
|
|
|
|
dScore = 6,
|
|
|
|
|
dOut = 3,
|
|
|
|
|
dLazy = 6,
|
|
|
|
|
heartbeatInterval = chronos.seconds(1),
|
|
|
|
|
historyLength = 6,
|
|
|
|
|
historyGossip = 3,
|
|
|
|
|
fanoutTTL = chronos.minutes(1),
|
|
|
|
|
seenTTL = chronos.minutes(2),
|
2024-03-15 23:08:47 +00:00
|
|
|
|
|
|
|
|
|
# no gossip is sent to peers below this score
|
2024-04-08 09:12:05 +00:00
|
|
|
|
gossipThreshold = -100,
|
2024-03-15 23:08:47 +00:00
|
|
|
|
|
|
|
|
|
# no self-published msgs are sent to peers below this score
|
2024-04-08 09:12:05 +00:00
|
|
|
|
publishThreshold = -1000,
|
2024-03-15 23:08:47 +00:00
|
|
|
|
|
|
|
|
|
# used to trigger disconnections + ignore peer if below this score
|
2024-04-08 09:12:05 +00:00
|
|
|
|
graylistThreshold = -10000,
|
2024-03-15 23:08:47 +00:00
|
|
|
|
|
|
|
|
|
# grafts better peers if the mesh median score drops below this. unset.
|
2024-04-08 09:12:05 +00:00
|
|
|
|
opportunisticGraftThreshold = 0,
|
2024-03-15 23:08:47 +00:00
|
|
|
|
|
|
|
|
|
# how often peer scoring is updated
|
2024-04-08 09:12:05 +00:00
|
|
|
|
decayInterval = chronos.seconds(12),
|
2024-03-15 23:08:47 +00:00
|
|
|
|
|
|
|
|
|
# below this we consider the parameter to be zero
|
2024-04-08 09:12:05 +00:00
|
|
|
|
decayToZero = 0.01,
|
2024-03-15 23:08:47 +00:00
|
|
|
|
|
|
|
|
|
# remember peer score during x after it disconnects
|
2024-04-08 09:12:05 +00:00
|
|
|
|
retainScore = chronos.minutes(10),
|
2024-03-15 23:08:47 +00:00
|
|
|
|
|
|
|
|
|
# p5: application specific, unset
|
2024-04-08 09:12:05 +00:00
|
|
|
|
appSpecificWeight = 0.0,
|
2024-03-15 23:08:47 +00:00
|
|
|
|
|
|
|
|
|
# p6: penalizes peers sharing more than threshold ips
|
2024-04-08 09:12:05 +00:00
|
|
|
|
ipColocationFactorWeight = -50.0,
|
|
|
|
|
ipColocationFactorThreshold = 5.0,
|
2024-03-15 23:08:47 +00:00
|
|
|
|
|
|
|
|
|
# p7: penalizes bad behaviour (weight and decay)
|
2024-04-08 09:12:05 +00:00
|
|
|
|
behaviourPenaltyWeight = -10.0,
|
|
|
|
|
behaviourPenaltyDecay = 0.986,
|
2024-03-15 23:08:47 +00:00
|
|
|
|
|
|
|
|
|
# triggers disconnections of bad peers aka score <graylistThreshold
|
2024-04-08 09:12:05 +00:00
|
|
|
|
disconnectBadPeers = true,
|
2024-03-15 23:08:47 +00:00
|
|
|
|
)
|
2023-02-27 14:38:24 +00:00
|
|
|
|
|
|
|
|
|
type
|
2023-06-06 17:28:47 +00:00
|
|
|
|
WakuRelayResult*[T] = Result[T, string]
|
2024-03-15 23:08:47 +00:00
|
|
|
|
WakuRelayHandler* = proc(pubsubTopic: PubsubTopic, message: WakuMessage): Future[void] {.
|
|
|
|
|
gcsafe, raises: [Defect]
|
|
|
|
|
.}
|
|
|
|
|
WakuValidatorHandler* = proc(
|
|
|
|
|
pubsubTopic: PubsubTopic, message: WakuMessage
|
|
|
|
|
): Future[ValidationResult] {.gcsafe, raises: [Defect].}
|
2023-02-27 14:38:24 +00:00
|
|
|
|
WakuRelay* = ref object of GossipSub
|
2024-02-01 17:16:10 +00:00
|
|
|
|
# seq of tuples: the first entry in the tuple contains the validators are called for every topic
|
|
|
|
|
# the second entry contains the error messages to be returned when the validator fails
|
|
|
|
|
wakuValidators: seq[tuple[handler: WakuValidatorHandler, errorMessage: string]]
|
|
|
|
|
# a map of validators to error messages to return when validation fails
|
2023-09-05 09:05:07 +00:00
|
|
|
|
validatorInserted: Table[PubsubTopic, bool]
|
2023-02-27 14:38:24 +00:00
|
|
|
|
|
|
|
|
|
proc initProtocolHandler(w: WakuRelay) =
|
|
|
|
|
proc handler(conn: Connection, proto: string) {.async.} =
|
|
|
|
|
## main protocol handler that gets triggered on every
|
|
|
|
|
## connection for a protocol string
|
|
|
|
|
## e.g. ``/wakusub/0.0.1``, etc...
|
2024-03-15 23:08:47 +00:00
|
|
|
|
debug "Incoming WakuRelay connection", connection = conn, protocol = proto
|
2023-02-27 14:38:24 +00:00
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
await w.handleConn(conn, proto)
|
|
|
|
|
except CancelledError:
|
|
|
|
|
# This is top-level procedure which will work as separate task, so it
|
|
|
|
|
# do not need to propogate CancelledError.
|
2024-03-15 23:08:47 +00:00
|
|
|
|
error "Unexpected cancellation in relay handler",
|
|
|
|
|
conn = conn, error = getCurrentExceptionMsg()
|
2023-02-27 14:38:24 +00:00
|
|
|
|
except CatchableError:
|
2024-03-15 23:08:47 +00:00
|
|
|
|
error "WakuRelay handler leaks an error",
|
|
|
|
|
conn = conn, error = getCurrentExceptionMsg()
|
2023-02-27 14:38:24 +00:00
|
|
|
|
|
|
|
|
|
# XXX: Handler hijack GossipSub here?
|
|
|
|
|
w.handler = handler
|
|
|
|
|
w.codec = WakuRelayCodec
|
|
|
|
|
|
2024-03-15 23:08:47 +00:00
|
|
|
|
proc new*(
|
|
|
|
|
T: type WakuRelay, switch: Switch, maxMessageSize = int(MaxWakuMessageSize)
|
|
|
|
|
): WakuRelayResult[T] =
|
2024-01-03 12:11:50 +00:00
|
|
|
|
## maxMessageSize: max num bytes that are allowed for the WakuMessage
|
2023-02-27 14:38:24 +00:00
|
|
|
|
|
2023-06-06 17:28:47 +00:00
|
|
|
|
var w: WakuRelay
|
2023-02-27 14:38:24 +00:00
|
|
|
|
try:
|
2023-06-06 17:28:47 +00:00
|
|
|
|
w = WakuRelay.init(
|
2023-02-27 14:38:24 +00:00
|
|
|
|
switch = switch,
|
2023-06-06 17:28:47 +00:00
|
|
|
|
anonymize = true,
|
2023-02-27 14:38:24 +00:00
|
|
|
|
verifySignature = false,
|
2023-06-06 17:28:47 +00:00
|
|
|
|
sign = false,
|
|
|
|
|
triggerSelf = true,
|
|
|
|
|
msgIdProvider = defaultMessageIdProvider,
|
2024-01-03 12:11:50 +00:00
|
|
|
|
maxMessageSize = maxMessageSize,
|
2024-03-15 23:08:47 +00:00
|
|
|
|
parameters = GossipsubParameters,
|
2023-02-27 14:38:24 +00:00
|
|
|
|
)
|
|
|
|
|
|
2023-06-06 17:28:47 +00:00
|
|
|
|
procCall GossipSub(w).initPubSub()
|
|
|
|
|
w.initProtocolHandler()
|
|
|
|
|
except InitializationError:
|
|
|
|
|
return err("initialization error: " & getCurrentExceptionMsg())
|
2023-02-27 14:38:24 +00:00
|
|
|
|
|
2023-06-06 17:28:47 +00:00
|
|
|
|
return ok(w)
|
2023-02-27 14:38:24 +00:00
|
|
|
|
|
2024-03-15 23:08:47 +00:00
|
|
|
|
proc addValidator*(
|
|
|
|
|
w: WakuRelay, handler: WakuValidatorHandler, errorMessage: string = ""
|
|
|
|
|
) {.gcsafe.} =
|
2024-02-01 17:16:10 +00:00
|
|
|
|
w.wakuValidators.add((handler, errorMessage))
|
2024-01-29 15:11:26 +00:00
|
|
|
|
|
2023-02-27 14:38:24 +00:00
|
|
|
|
method start*(w: WakuRelay) {.async.} =
|
|
|
|
|
debug "start"
|
|
|
|
|
await procCall GossipSub(w).start()
|
|
|
|
|
|
|
|
|
|
method stop*(w: WakuRelay) {.async.} =
|
|
|
|
|
debug "stop"
|
|
|
|
|
await procCall GossipSub(w).stop()
|
|
|
|
|
|
|
|
|
|
proc isSubscribed*(w: WakuRelay, topic: PubsubTopic): bool =
|
|
|
|
|
GossipSub(w).topics.hasKey(topic)
|
|
|
|
|
|
2023-09-01 13:03:59 +00:00
|
|
|
|
proc subscribedTopics*(w: WakuRelay): seq[PubsubTopic] =
|
|
|
|
|
return toSeq(GossipSub(w).topics.keys())
|
2023-02-27 14:38:24 +00:00
|
|
|
|
|
2024-03-04 14:31:37 +00:00
|
|
|
|
proc generateOrderedValidator(w: WakuRelay): auto {.gcsafe.} =
|
2023-09-05 09:05:07 +00:00
|
|
|
|
# rejects messages that are not WakuMessage
|
2024-03-15 23:08:47 +00:00
|
|
|
|
let wrappedValidator = proc(
|
|
|
|
|
pubsubTopic: string, message: messages.Message
|
|
|
|
|
): Future[ValidationResult] {.async.} =
|
2023-09-05 09:05:07 +00:00
|
|
|
|
# can be optimized by checking if the message is a WakuMessage without allocating memory
|
|
|
|
|
# see nim-libp2p protobuf library
|
|
|
|
|
let msgRes = WakuMessage.decode(message.data)
|
|
|
|
|
if msgRes.isErr():
|
2024-03-15 23:08:47 +00:00
|
|
|
|
trace "protocol generateOrderedValidator reject decode error",
|
|
|
|
|
error = msgRes.error
|
2023-09-05 09:05:07 +00:00
|
|
|
|
return ValidationResult.Reject
|
|
|
|
|
let msg = msgRes.get()
|
|
|
|
|
|
|
|
|
|
# now sequentially validate the message
|
2024-02-01 17:16:10 +00:00
|
|
|
|
for (validator, _) in w.wakuValidators:
|
2024-01-29 15:11:26 +00:00
|
|
|
|
let validatorRes = await validator(pubsubTopic, msg)
|
|
|
|
|
if validatorRes != ValidationResult.Accept:
|
|
|
|
|
return validatorRes
|
2023-09-05 09:05:07 +00:00
|
|
|
|
return ValidationResult.Accept
|
|
|
|
|
return wrappedValidator
|
|
|
|
|
|
2024-03-15 23:08:47 +00:00
|
|
|
|
proc validateMessage*(
|
|
|
|
|
w: WakuRelay, pubsubTopic: string, msg: WakuMessage
|
|
|
|
|
): Future[Result[void, string]] {.async.} =
|
|
|
|
|
let messageSizeBytes = msg.encode().buffer.len
|
|
|
|
|
|
|
|
|
|
if messageSizeBytes > w.maxMessageSize:
|
|
|
|
|
let message = fmt"Message size exceeded maximum of {w.maxMessageSize} bytes"
|
|
|
|
|
debug "Invalid Waku Message", error = message
|
|
|
|
|
return err(message)
|
|
|
|
|
|
|
|
|
|
for (validator, message) in w.wakuValidators:
|
|
|
|
|
let validatorRes = await validator(pubsubTopic, msg)
|
|
|
|
|
if validatorRes != ValidationResult.Accept:
|
|
|
|
|
if message.len > 0:
|
|
|
|
|
return err(message)
|
|
|
|
|
else:
|
|
|
|
|
return err("Validator failed")
|
|
|
|
|
return ok()
|
2024-02-01 17:16:10 +00:00
|
|
|
|
|
2024-03-15 23:08:47 +00:00
|
|
|
|
proc subscribe*(
|
|
|
|
|
w: WakuRelay, pubsubTopic: PubsubTopic, handler: WakuRelayHandler
|
|
|
|
|
): TopicHandler =
|
|
|
|
|
debug "subscribe", pubsubTopic = pubsubTopic
|
2023-02-27 14:38:24 +00:00
|
|
|
|
|
2023-11-15 15:11:36 +00:00
|
|
|
|
# We need to wrap the handler since gossipsub doesnt understand WakuMessage
|
2024-03-15 23:08:47 +00:00
|
|
|
|
let wrappedHandler = proc(
|
|
|
|
|
pubsubTopic: string, data: seq[byte]
|
|
|
|
|
): Future[void] {.gcsafe, raises: [].} =
|
|
|
|
|
let decMsg = WakuMessage.decode(data)
|
|
|
|
|
if decMsg.isErr():
|
|
|
|
|
# fine if triggerSelf enabled, since validators are bypassed
|
|
|
|
|
error "failed to decode WakuMessage, validator passed a wrong message",
|
|
|
|
|
error = decMsg.error
|
|
|
|
|
let fut = newFuture[void]()
|
|
|
|
|
fut.complete()
|
|
|
|
|
return fut
|
|
|
|
|
else:
|
|
|
|
|
return handler(pubsubTopic, decMsg.get())
|
2023-02-27 14:38:24 +00:00
|
|
|
|
|
2023-11-15 15:11:36 +00:00
|
|
|
|
# Add the ordered validator to the topic
|
|
|
|
|
# This assumes that if `w.validatorInserted.hasKey(pubSubTopic) is true`, it contains the ordered validator.
|
|
|
|
|
# Otherwise this might lead to unintended behaviour.
|
2023-09-05 09:05:07 +00:00
|
|
|
|
if not w.validatorInserted.hasKey(pubSubTopic):
|
|
|
|
|
procCall GossipSub(w).addValidator(pubSubTopic, w.generateOrderedValidator())
|
|
|
|
|
w.validatorInserted[pubSubTopic] = true
|
2023-02-27 14:38:24 +00:00
|
|
|
|
|
2023-06-06 17:28:47 +00:00
|
|
|
|
# set this topic parameters for scoring
|
|
|
|
|
w.topicParams[pubsubTopic] = TopicParameters
|
2023-02-27 14:38:24 +00:00
|
|
|
|
|
2023-06-06 17:28:47 +00:00
|
|
|
|
# subscribe to the topic with our wrapped handler
|
|
|
|
|
procCall GossipSub(w).subscribe(pubsubTopic, wrappedHandler)
|
2023-02-27 14:38:24 +00:00
|
|
|
|
|
2023-09-26 11:33:52 +00:00
|
|
|
|
return wrappedHandler
|
|
|
|
|
|
|
|
|
|
proc unsubscribeAll*(w: WakuRelay, pubsubTopic: PubsubTopic) =
|
|
|
|
|
## Unsubscribe all handlers on this pubsub topic
|
2023-10-30 15:17:39 +00:00
|
|
|
|
|
2024-03-15 23:08:47 +00:00
|
|
|
|
debug "unsubscribe all", pubsubTopic = pubsubTopic
|
2023-02-27 14:38:24 +00:00
|
|
|
|
|
|
|
|
|
procCall GossipSub(w).unsubscribeAll(pubsubTopic)
|
2023-09-05 09:05:07 +00:00
|
|
|
|
w.validatorInserted.del(pubsubTopic)
|
2023-02-27 14:38:24 +00:00
|
|
|
|
|
2023-09-26 11:33:52 +00:00
|
|
|
|
proc unsubscribe*(w: WakuRelay, pubsubTopic: PubsubTopic, handler: TopicHandler) =
|
|
|
|
|
## Unsubscribe this handler on this pubsub topic
|
2023-10-30 15:17:39 +00:00
|
|
|
|
|
2024-03-15 23:08:47 +00:00
|
|
|
|
debug "unsubscribe", pubsubTopic = pubsubTopic
|
2023-09-26 11:33:52 +00:00
|
|
|
|
|
|
|
|
|
procCall GossipSub(w).unsubscribe(pubsubTopic, handler)
|
|
|
|
|
|
2024-03-15 23:08:47 +00:00
|
|
|
|
proc publish*(
|
|
|
|
|
w: WakuRelay, pubsubTopic: PubsubTopic, message: WakuMessage
|
|
|
|
|
): Future[int] {.async.} =
|
|
|
|
|
trace "publish", pubsubTopic = pubsubTopic
|
2023-06-06 17:28:47 +00:00
|
|
|
|
let data = message.encode().buffer
|
2023-02-27 14:38:24 +00:00
|
|
|
|
|
|
|
|
|
return await procCall GossipSub(w).publish(pubsubTopic, data)
|