From 3842d84b55eb96728f6b05b9307ff823fac58a54 Mon Sep 17 00:00:00 2001 From: fryorcraken Date: Fri, 11 Jul 2025 13:33:45 +1000 Subject: [PATCH 01/23] feat!: Introduce routing info concept Concepts are being mixed up between the global network config (static vs auto sharding), that needs to be the same of all nodes in the network, individual node configuration (eg relay node subscribing to a given shard), and the routing characteristic of a specific message (eg pubsub topic, shard). This stops proper configuration of nwaku post 0.36.0 because we know need to be deliberate on whether nwaku nodes are running with auto or static sharding. It also included various back and forth conversions between shards, pubsub topics, etc. With this change, we tidy up the network configuration, and make it explicit whether it is static or auto sharded. We also introduce the concept of routing info, which is specific to a message, and tied to the overall network configuration. Routing info abstract pubsub topic, shard, and autosharding needs. Which should lead to easier tidy up of the pubsub concept at a later stage. # Conflicts: # packages/core/src/lib/connection_manager/connection_manager.ts # packages/core/src/lib/metadata/metadata.ts # packages/interfaces/src/metadata.ts # packages/interfaces/src/sharding.ts # packages/relay/src/create.ts # packages/sdk/src/filter/filter.ts # packages/sdk/src/filter/types.ts # packages/sdk/src/light_push/light_push.spec.ts # packages/tests/tests/sharding/auto_sharding.spec.ts # packages/tests/tests/sharding/static_sharding.spec.ts # Conflicts: # packages/sdk/src/store/store.ts --- .../connection_manager.spec.ts | 2 +- .../connection_manager/connection_manager.ts | 13 +- .../src/lib/connection_manager/dialer.spec.ts | 20 +- .../core/src/lib/connection_manager/dialer.ts | 6 +- .../keep_alive_manager.spec.ts | 22 + .../connection_manager/keep_alive_manager.ts | 15 +- .../connection_manager/shard_reader.spec.ts | 119 +++-- .../lib/connection_manager/shard_reader.ts | 79 ++-- .../core/src/lib/light_push/light_push.ts | 8 +- .../core/src/lib/message/version_0.spec.ts | 91 ++-- packages/core/src/lib/message/version_0.ts | 59 ++- packages/core/src/lib/metadata/metadata.ts | 14 +- packages/core/src/lib/store/rpc.spec.ts | 31 +- packages/core/src/lib/store/rpc.ts | 4 +- packages/core/src/lib/store/store.spec.ts | 11 +- packages/core/src/lib/store/store.ts | 2 +- .../waku_peer_exchange_discovery.ts | 4 +- packages/enr/src/enr.ts | 6 +- packages/enr/src/raw_enr.ts | 6 +- packages/interfaces/src/constants.ts | 10 +- packages/interfaces/src/enr.ts | 4 +- packages/interfaces/src/message.ts | 35 +- packages/interfaces/src/metadata.ts | 4 +- packages/interfaces/src/protocols.ts | 4 +- packages/interfaces/src/sharding.ts | 49 +- packages/interfaces/src/store.ts | 7 +- packages/interfaces/src/waku.ts | 13 +- packages/message-encryption/src/ecies.spec.ts | 77 ++- packages/message-encryption/src/ecies.ts | 47 +- .../message-encryption/src/symmetric.spec.ts | 77 ++- packages/message-encryption/src/symmetric.ts | 47 +- packages/relay/src/create.ts | 12 +- packages/relay/src/message_validator.spec.ts | 21 +- packages/relay/src/relay.ts | 30 +- packages/relay/src/topic_only_message.ts | 26 +- .../tests/high-throughput.spec.ts | 44 +- .../reliability-tests/tests/longevity.spec.ts | 40 +- .../tests/throughput-sizes.spec.ts | 40 +- packages/rln/src/codec.spec.ts | 120 +++-- packages/rln/src/codec.test-utils.ts | 18 +- packages/rln/src/codec.ts | 14 +- packages/rln/src/rln.ts | 31 +- packages/sdk/src/filter/filter.spec.ts | 26 +- packages/sdk/src/filter/filter.ts | 16 +- packages/sdk/src/filter/subscription.spec.ts | 24 +- packages/sdk/src/filter/subscription.ts | 25 +- packages/sdk/src/filter/types.ts | 8 +- .../sdk/src/light_push/light_push.spec.ts | 15 +- packages/sdk/src/light_push/light_push.ts | 6 +- .../sdk/src/light_push/retry_manager.spec.ts | 26 +- packages/sdk/src/light_push/retry_manager.ts | 12 +- .../sdk/src/peer_manager/peer_manager.spec.ts | 19 +- packages/sdk/src/peer_manager/peer_manager.ts | 24 +- packages/sdk/src/store/store.spec.ts | 31 +- packages/sdk/src/store/store.ts | 35 +- packages/sdk/src/waku/utils.spec.ts | 119 ----- packages/sdk/src/waku/utils.ts | 47 -- packages/sdk/src/waku/waku.ts | 46 +- packages/tests/src/constants.ts | 47 +- packages/tests/src/lib/index.ts | 55 ++- packages/tests/src/lib/message_collector.ts | 26 +- packages/tests/src/lib/runNodes.ts | 95 +++- packages/tests/src/lib/service_node.ts | 102 +++- packages/tests/src/types.ts | 7 +- .../tests/src/utils/generate_test_data.ts | 22 +- packages/tests/src/utils/nodes.ts | 28 +- .../connection_limiter.spec.ts | 8 +- .../tests/connection-mananger/dialing.spec.ts | 6 +- .../discovery_dialer.spec.ts | 6 +- .../network_monitor.spec.ts | 17 +- .../tests/tests/connection-mananger/utils.ts | 13 +- packages/tests/tests/enr.node.spec.ts | 30 +- packages/tests/tests/ephemeral.node.spec.ts | 64 ++- packages/tests/tests/filter/push.node.spec.ts | 45 +- .../tests/tests/filter/subscribe.node.spec.ts | 232 ++++++---- .../tests/filter/unsubscribe.node.spec.ts | 48 +- packages/tests/tests/filter/utils.ts | 137 +----- .../tests/tests/light-push/index.node.spec.ts | 104 ++--- .../light-push/multiple_pubsub.node.spec.ts | 283 +++++------ packages/tests/tests/light-push/utils.ts | 41 +- packages/tests/tests/metadata.spec.ts | 372 +++++++++------ packages/tests/tests/nwaku.node.spec.ts | 2 - .../tests/peer-exchange/compliance.spec.ts | 6 +- .../continuous_discovery.spec.ts | 20 +- .../tests/tests/peer-exchange/index.spec.ts | 18 +- .../tests/peer-exchange/pe.optional.spec.ts | 6 +- packages/tests/tests/relay/index.node.spec.ts | 18 +- .../tests/tests/relay/interop.node.spec.ts | 22 +- .../tests/relay/multiple_pubsub.node.spec.ts | 192 ++++---- .../tests/tests/relay/publish.node.spec.ts | 67 +-- .../tests/tests/relay/subscribe.node.spec.ts | 92 ++-- packages/tests/tests/relay/utils.ts | 42 +- .../tests/sharding/auto_sharding.spec.ts | 86 ++-- .../tests/sharding/peer_management.spec.ts | 128 ++--- .../tests/sharding/static_sharding.spec.ts | 128 +---- .../tests/tests/store/cursor.node.spec.ts | 33 +- .../store/different_static_shards.spec.ts | 190 ++++++++ .../tests/store/error_handling.node.spec.ts | 8 +- packages/tests/tests/store/index.node.spec.ts | 97 ++-- .../tests/tests/store/message_hash.spec.ts | 13 +- .../tests/tests/store/multiple_pubsub.spec.ts | 438 ------------------ packages/tests/tests/store/order.node.spec.ts | 11 +- .../tests/tests/store/page_size.node.spec.ts | 14 +- .../tests/tests/store/sorting.node.spec.ts | 9 +- .../tests/store/time_filter.node.spec.ts | 11 +- packages/tests/tests/store/utils.ts | 88 ++-- .../tests/wait_for_remote_peer.node.spec.ts | 49 +- packages/tests/tests/waku.node.spec.ts | 42 +- .../utils/src/common/relay_shard_codec.ts | 8 +- .../utils/src/common/sharding/index.spec.ts | 299 ++++-------- packages/utils/src/common/sharding/index.ts | 247 ++-------- .../utils/src/common/sharding/routing_info.ts | 183 ++++++++ .../utils/src/common/sharding/type_guards.ts | 10 +- 113 files changed, 2896 insertions(+), 3230 deletions(-) delete mode 100644 packages/sdk/src/waku/utils.spec.ts delete mode 100644 packages/sdk/src/waku/utils.ts create mode 100644 packages/tests/tests/store/different_static_shards.spec.ts delete mode 100644 packages/tests/tests/store/multiple_pubsub.spec.ts create mode 100644 packages/utils/src/common/sharding/routing_info.ts diff --git a/packages/core/src/lib/connection_manager/connection_manager.spec.ts b/packages/core/src/lib/connection_manager/connection_manager.spec.ts index 25235cba12..ec1d53921f 100644 --- a/packages/core/src/lib/connection_manager/connection_manager.spec.ts +++ b/packages/core/src/lib/connection_manager/connection_manager.spec.ts @@ -63,7 +63,7 @@ describe("ConnectionManager", () => { } as unknown as IWakuEventEmitter; networkConfig = { - clusterId: 1, + clusterId: 2, shards: [0, 1] } as NetworkConfig; diff --git a/packages/core/src/lib/connection_manager/connection_manager.ts b/packages/core/src/lib/connection_manager/connection_manager.ts index 952ab32d53..0f3f83e159 100644 --- a/packages/core/src/lib/connection_manager/connection_manager.ts +++ b/packages/core/src/lib/connection_manager/connection_manager.ts @@ -1,11 +1,13 @@ import { type Peer, type PeerId, type Stream } from "@libp2p/interface"; import { MultiaddrInput } from "@multiformats/multiaddr"; import { + ClusterId, ConnectionManagerOptions, IConnectionManager, IRelay, IWakuEventEmitter, - NetworkConfig + NetworkConfig, + ShardId } from "@waku/interfaces"; import { Libp2p } from "@waku/interfaces"; import { Logger } from "@waku/utils"; @@ -66,6 +68,7 @@ export class ConnectionManager implements IConnectionManager { this.keepAliveManager = new KeepAliveManager({ relay: options.relay, libp2p: options.libp2p, + networkConfig: options.networkConfig, options: { pingKeepAlive: this.options.pingKeepAlive, relayKeepAlive: this.options.relayKeepAlive @@ -194,4 +197,12 @@ export class ConnectionManager implements IConnectionManager { ): Promise { return this.shardReader.isPeerOnTopic(peerId, pubsubTopic); } + + public async isPeerOnShard( + peerId: PeerId, + clusterId: ClusterId, + shardId: ShardId + ): Promise { + return this.shardReader.isPeerOnShard(peerId, clusterId, shardId); + } } diff --git a/packages/core/src/lib/connection_manager/dialer.spec.ts b/packages/core/src/lib/connection_manager/dialer.spec.ts index 74690a79c9..d7112a5b02 100644 --- a/packages/core/src/lib/connection_manager/dialer.spec.ts +++ b/packages/core/src/lib/connection_manager/dialer.spec.ts @@ -29,7 +29,7 @@ describe("Dialer", () => { mockShardReader = { hasShardInfo: sinon.stub().resolves(false), - isPeerOnNetwork: sinon.stub().resolves(true) + isPeerOnCluster: sinon.stub().resolves(true) } as unknown as sinon.SinonStubbedInstance; mockOptions = { @@ -280,9 +280,9 @@ describe("Dialer", () => { expect(dialStub.calledTwice).to.be.true; }); - it("should skip peer when not on same shard", async () => { + it("should skip peer when not on same cluster", async () => { mockShardReader.hasShardInfo.resolves(true); - mockShardReader.isPeerOnNetwork.resolves(false); + mockShardReader.isPeerOnCluster.resolves(false); const dialStub = libp2p.dial as sinon.SinonStub; @@ -290,12 +290,12 @@ describe("Dialer", () => { expect(dialStub.called).to.be.false; expect(mockShardReader.hasShardInfo.calledWith(mockPeerId)).to.be.true; - expect(mockShardReader.isPeerOnNetwork.calledWith(mockPeerId)).to.be.true; + expect(mockShardReader.isPeerOnCluster.calledWith(mockPeerId)).to.be.true; }); it("should dial peer when on same shard", async () => { mockShardReader.hasShardInfo.resolves(true); - mockShardReader.isPeerOnNetwork.resolves(true); + mockShardReader.isPeerOnCluster.resolves(true); const dialStub = libp2p.dial as sinon.SinonStub; dialStub.resolves(); @@ -305,7 +305,7 @@ describe("Dialer", () => { expect(dialStub.calledOnce).to.be.true; expect(dialStub.calledWith(mockPeerId)).to.be.true; expect(mockShardReader.hasShardInfo.calledWith(mockPeerId)).to.be.true; - expect(mockShardReader.isPeerOnNetwork.calledWith(mockPeerId)).to.be.true; + expect(mockShardReader.isPeerOnCluster.calledWith(mockPeerId)).to.be.true; }); it("should dial peer when no shard info available", async () => { @@ -319,7 +319,7 @@ describe("Dialer", () => { expect(dialStub.calledOnce).to.be.true; expect(dialStub.calledWith(mockPeerId)).to.be.true; expect(mockShardReader.hasShardInfo.calledWith(mockPeerId)).to.be.true; - expect(mockShardReader.isPeerOnNetwork.called).to.be.false; + expect(mockShardReader.isPeerOnCluster.called).to.be.false; }); it("should handle dial errors gracefully", async () => { @@ -468,7 +468,7 @@ describe("Dialer", () => { it("should handle network check errors gracefully", async () => { mockShardReader.hasShardInfo.resolves(true); - mockShardReader.isPeerOnNetwork.rejects(new Error("Network check error")); + mockShardReader.isPeerOnCluster.rejects(new Error("Network check error")); const dialStub = libp2p.dial as sinon.SinonStub; @@ -476,7 +476,7 @@ describe("Dialer", () => { expect(dialStub.called).to.be.false; expect(mockShardReader.hasShardInfo.calledWith(mockPeerId)).to.be.true; - expect(mockShardReader.isPeerOnNetwork.calledWith(mockPeerId)).to.be.true; + expect(mockShardReader.isPeerOnCluster.calledWith(mockPeerId)).to.be.true; }); }); @@ -512,7 +512,7 @@ describe("Dialer", () => { dialStub.resolves(); mockShardReader.hasShardInfo.withArgs(mockPeerId).resolves(true); - mockShardReader.isPeerOnNetwork.withArgs(mockPeerId).resolves(true); + mockShardReader.isPeerOnCluster.withArgs(mockPeerId).resolves(true); mockShardReader.hasShardInfo.withArgs(mockPeerId2).resolves(false); diff --git a/packages/core/src/lib/connection_manager/dialer.ts b/packages/core/src/lib/connection_manager/dialer.ts index 21989c12aa..fbe317d3d2 100644 --- a/packages/core/src/lib/connection_manager/dialer.ts +++ b/packages/core/src/lib/connection_manager/dialer.ts @@ -153,9 +153,9 @@ export class Dialer implements IDialer { return false; } - const isOnSameShard = await this.shardReader.isPeerOnNetwork(peerId); - if (!isOnSameShard) { - log.info(`Skipping peer ${peerId} - not on same shard`); + const isOnSameCluster = await this.shardReader.isPeerOnCluster(peerId); + if (!isOnSameCluster) { + log.info(`Skipping peer ${peerId} - not on same cluster`); return true; } diff --git a/packages/core/src/lib/connection_manager/keep_alive_manager.spec.ts b/packages/core/src/lib/connection_manager/keep_alive_manager.spec.ts index 3699be7967..9df2050c1e 100644 --- a/packages/core/src/lib/connection_manager/keep_alive_manager.spec.ts +++ b/packages/core/src/lib/connection_manager/keep_alive_manager.spec.ts @@ -1,4 +1,5 @@ import type { PeerId } from "@libp2p/interface"; +import { AutoSharding } from "@waku/interfaces"; import { expect } from "chai"; import sinon from "sinon"; @@ -23,6 +24,11 @@ describe("KeepAliveManager", () => { relayKeepAlive: 60 }; + const defaultNetworkConfig: AutoSharding = { + clusterId: 0, + numShardsInCluster: 1 + }; + beforeEach(() => { clock = sinon.useFakeTimers(); @@ -61,6 +67,7 @@ describe("KeepAliveManager", () => { it("should create KeepAliveManager with required options", () => { keepAliveManager = new KeepAliveManager({ options: defaultOptions, + networkConfig: defaultNetworkConfig, libp2p }); @@ -70,6 +77,7 @@ describe("KeepAliveManager", () => { it("should create KeepAliveManager with relay", () => { keepAliveManager = new KeepAliveManager({ options: defaultOptions, + networkConfig: defaultNetworkConfig, libp2p, relay }); @@ -82,6 +90,7 @@ describe("KeepAliveManager", () => { beforeEach(() => { keepAliveManager = new KeepAliveManager({ options: defaultOptions, + networkConfig: defaultNetworkConfig, libp2p }); }); @@ -110,6 +119,7 @@ describe("KeepAliveManager", () => { beforeEach(() => { keepAliveManager = new KeepAliveManager({ options: defaultOptions, + networkConfig: defaultNetworkConfig, libp2p, relay }); @@ -158,6 +168,7 @@ describe("KeepAliveManager", () => { beforeEach(() => { keepAliveManager = new KeepAliveManager({ options: defaultOptions, + networkConfig: defaultNetworkConfig, libp2p, relay }); @@ -194,6 +205,7 @@ describe("KeepAliveManager", () => { beforeEach(() => { keepAliveManager = new KeepAliveManager({ options: defaultOptions, + networkConfig: defaultNetworkConfig, libp2p, relay }); @@ -225,6 +237,7 @@ describe("KeepAliveManager", () => { beforeEach(() => { keepAliveManager = new KeepAliveManager({ options: defaultOptions, + networkConfig: defaultNetworkConfig, libp2p }); keepAliveManager.start(); @@ -244,6 +257,7 @@ describe("KeepAliveManager", () => { keepAliveManager.stop(); keepAliveManager = new KeepAliveManager({ options: { pingKeepAlive: 0, relayKeepAlive: 0 }, + networkConfig: defaultNetworkConfig, libp2p }); keepAliveManager.start(); @@ -317,6 +331,7 @@ describe("KeepAliveManager", () => { beforeEach(() => { keepAliveManager = new KeepAliveManager({ options: defaultOptions, + networkConfig: defaultNetworkConfig, libp2p, relay }); @@ -337,6 +352,7 @@ describe("KeepAliveManager", () => { keepAliveManager.stop(); keepAliveManager = new KeepAliveManager({ options: { pingKeepAlive: 30, relayKeepAlive: 0 }, + networkConfig: defaultNetworkConfig, libp2p, relay }); @@ -355,6 +371,7 @@ describe("KeepAliveManager", () => { keepAliveManager.stop(); keepAliveManager = new KeepAliveManager({ options: defaultOptions, + networkConfig: defaultNetworkConfig, libp2p }); keepAliveManager.start(); @@ -423,6 +440,7 @@ describe("KeepAliveManager", () => { beforeEach(() => { keepAliveManager = new KeepAliveManager({ options: defaultOptions, + networkConfig: defaultNetworkConfig, libp2p, relay }); @@ -489,6 +507,7 @@ describe("KeepAliveManager", () => { keepAliveManager = new KeepAliveManager({ options: defaultOptions, + networkConfig: defaultNetworkConfig, libp2p, relay: emptyRelay }); @@ -506,6 +525,7 @@ describe("KeepAliveManager", () => { it("should handle all zero keep alive options", () => { keepAliveManager = new KeepAliveManager({ options: { pingKeepAlive: 0, relayKeepAlive: 0 }, + networkConfig: defaultNetworkConfig, libp2p, relay }); @@ -525,6 +545,7 @@ describe("KeepAliveManager", () => { keepAliveManager = new KeepAliveManager({ options: defaultOptions, + networkConfig: defaultNetworkConfig, libp2p, relay }); @@ -544,6 +565,7 @@ describe("KeepAliveManager", () => { it("should handle complete peer lifecycle", async () => { keepAliveManager = new KeepAliveManager({ options: defaultOptions, + networkConfig: defaultNetworkConfig, libp2p, relay }); diff --git a/packages/core/src/lib/connection_manager/keep_alive_manager.ts b/packages/core/src/lib/connection_manager/keep_alive_manager.ts index 35c0800e5e..19ad6070d2 100644 --- a/packages/core/src/lib/connection_manager/keep_alive_manager.ts +++ b/packages/core/src/lib/connection_manager/keep_alive_manager.ts @@ -1,6 +1,6 @@ import type { PeerId } from "@libp2p/interface"; -import type { IEncoder, IRelay, Libp2p } from "@waku/interfaces"; -import { Logger, pubsubTopicToSingleShardInfo } from "@waku/utils"; +import type { IEncoder, IRelay, Libp2p, NetworkConfig } from "@waku/interfaces"; +import { createRoutingInfo, Logger } from "@waku/utils"; import { utf8ToBytes } from "@waku/utils/bytes"; import { createEncoder } from "../message/version_0.js"; @@ -15,6 +15,7 @@ type KeepAliveOptions = { type CreateKeepAliveManagerOptions = { options: KeepAliveOptions; + networkConfig: NetworkConfig; libp2p: Libp2p; relay?: IRelay; }; @@ -26,6 +27,7 @@ interface IKeepAliveManager { export class KeepAliveManager implements IKeepAliveManager { private readonly relay?: IRelay; + private readonly networkConfig: NetworkConfig; private readonly libp2p: Libp2p; private readonly options: KeepAliveOptions; @@ -38,10 +40,12 @@ export class KeepAliveManager implements IKeepAliveManager { public constructor({ options, relay, + networkConfig, libp2p }: CreateKeepAliveManagerOptions) { this.options = options; this.relay = relay; + this.networkConfig = networkConfig; this.libp2p = libp2p; this.onPeerConnect = this.onPeerConnect.bind(this); @@ -163,8 +167,13 @@ export class KeepAliveManager implements IKeepAliveManager { continue; } + const routingInfo = createRoutingInfo(this.networkConfig, { + contentTopic: RelayPingContentTopic, + pubsubTopic: topic + }); + const encoder = createEncoder({ - pubsubTopicShardInfo: pubsubTopicToSingleShardInfo(topic), + routingInfo: routingInfo, contentTopic: RelayPingContentTopic, ephemeral: true }); diff --git a/packages/core/src/lib/connection_manager/shard_reader.spec.ts b/packages/core/src/lib/connection_manager/shard_reader.spec.ts index 843966f705..7f38c83190 100644 --- a/packages/core/src/lib/connection_manager/shard_reader.spec.ts +++ b/packages/core/src/lib/connection_manager/shard_reader.spec.ts @@ -1,9 +1,10 @@ import { PeerId } from "@libp2p/interface"; import { + AutoSharding, + DEFAULT_NUM_SHARDS, NetworkConfig, PubsubTopic, - ShardInfo, - SingleShardInfo + RelayShards } from "@waku/interfaces"; import { contentTopicToShardIndex, encodeRelayShard } from "@waku/utils"; import { expect } from "chai"; @@ -30,12 +31,12 @@ describe("ShardReader", function () { const testClusterId = 3; const testShardIndex = contentTopicToShardIndex(testContentTopic); - const testNetworkConfig: NetworkConfig = { - contentTopics: [testContentTopic], - clusterId: testClusterId + const testNetworkConfig: AutoSharding = { + clusterId: testClusterId, + numShardsInCluster: DEFAULT_NUM_SHARDS }; - const testShardInfo: ShardInfo = { + const testRelayShards: RelayShards = { clusterId: testClusterId, shards: [testShardIndex] }; @@ -64,10 +65,10 @@ describe("ShardReader", function () { }); describe("constructor", function () { - it("should create ShardReader with contentTopics network config", function () { - const config: NetworkConfig = { - contentTopics: ["/test/1/waku-light-push/utf8"], - clusterId: 3 + it("should create ShardReader with auto sharding network config", function () { + const config: AutoSharding = { + clusterId: 3, + numShardsInCluster: 10 }; const reader = new ShardReader({ @@ -78,10 +79,9 @@ describe("ShardReader", function () { expect(reader).to.be.instanceOf(ShardReader); }); - it("should create ShardReader with shards network config", function () { + it("should create ShardReader with static shards network config", function () { const config: NetworkConfig = { - clusterId: 3, - shards: [1, 2, 3] + clusterId: 3 }; const reader = new ShardReader({ @@ -94,22 +94,22 @@ describe("ShardReader", function () { }); describe("isPeerOnNetwork", function () { - it("should return true when peer is on the same network", async function () { - const shardInfoBytes = encodeRelayShard(testShardInfo); + it("should return true when peer is on the same cluster", async function () { + const shardInfoBytes = encodeRelayShard(testRelayShards); const mockPeer = { metadata: new Map([["shardInfo", shardInfoBytes]]) }; mockPeerStore.get.resolves(mockPeer); - const result = await shardReader.isPeerOnNetwork(testPeerId); + const result = await shardReader.isPeerOnCluster(testPeerId); expect(result).to.be.true; sinon.assert.calledWith(mockPeerStore.get, testPeerId); }); it("should return false when peer is on different cluster", async function () { - const differentClusterShardInfo: ShardInfo = { + const differentClusterShardInfo: RelayShards = { clusterId: 5, shards: [1, 2] }; @@ -120,13 +120,13 @@ describe("ShardReader", function () { mockPeerStore.get.resolves(mockPeer); - const result = await shardReader.isPeerOnNetwork(testPeerId); + const result = await shardReader.isPeerOnCluster(testPeerId); expect(result).to.be.false; }); - it("should return false when peer has no overlapping shards", async function () { - const noOverlapShardInfo: ShardInfo = { + it("should return true even if peer has no overlapping shards", async function () { + const noOverlapShardInfo: RelayShards = { clusterId: testClusterId, shards: [testShardIndex + 100, testShardIndex + 200] // Use different shards }; @@ -137,9 +137,9 @@ describe("ShardReader", function () { mockPeerStore.get.resolves(mockPeer); - const result = await shardReader.isPeerOnNetwork(testPeerId); + const result = await shardReader.isPeerOnCluster(testPeerId); - expect(result).to.be.false; + expect(result).to.be.true; }); it("should return false when peer has no shard info", async function () { @@ -149,7 +149,7 @@ describe("ShardReader", function () { mockPeerStore.get.resolves(mockPeer); - const result = await shardReader.isPeerOnNetwork(testPeerId); + const result = await shardReader.isPeerOnCluster(testPeerId); expect(result).to.be.false; }); @@ -157,7 +157,7 @@ describe("ShardReader", function () { it("should return false when peer is not found", async function () { mockPeerStore.get.rejects(new Error("Peer not found")); - const result = await shardReader.isPeerOnNetwork(testPeerId); + const result = await shardReader.isPeerOnCluster(testPeerId); expect(result).to.be.false; }); @@ -165,66 +165,52 @@ describe("ShardReader", function () { describe("isPeerOnShard", function () { it("should return true when peer is on the specified shard", async function () { - const shardInfoBytes = encodeRelayShard(testShardInfo); + const shardInfoBytes = encodeRelayShard(testRelayShards); const mockPeer = { metadata: new Map([["shardInfo", shardInfoBytes]]) }; mockPeerStore.get.resolves(mockPeer); - const shard: SingleShardInfo = { - clusterId: testClusterId, - shard: testShardIndex - }; - - const result = await shardReader.isPeerOnShard(testPeerId, shard); + const result = await shardReader.isPeerOnShard( + testPeerId, + testClusterId, + testShardIndex + ); expect(result).to.be.true; }); it("should return false when peer is on different cluster", async function () { - const shardInfoBytes = encodeRelayShard(testShardInfo); + const shardInfoBytes = encodeRelayShard(testRelayShards); const mockPeer = { metadata: new Map([["shardInfo", shardInfoBytes]]) }; mockPeerStore.get.resolves(mockPeer); - const shard: SingleShardInfo = { - clusterId: 5, - shard: testShardIndex - }; - - const result = await shardReader.isPeerOnShard(testPeerId, shard); + const result = await shardReader.isPeerOnShard( + testPeerId, + 5, + testShardIndex + ); expect(result).to.be.false; }); it("should return false when peer is not on the specified shard", async function () { - const shardInfoBytes = encodeRelayShard(testShardInfo); + const shardInfoBytes = encodeRelayShard(testRelayShards); const mockPeer = { metadata: new Map([["shardInfo", shardInfoBytes]]) }; mockPeerStore.get.resolves(mockPeer); - const shard: SingleShardInfo = { - clusterId: testClusterId, - shard: testShardIndex + 100 - }; - - const result = await shardReader.isPeerOnShard(testPeerId, shard); - - expect(result).to.be.false; - }); - - it("should return false when shard info is undefined", async function () { - const shard: SingleShardInfo = { - clusterId: testClusterId, - shard: undefined - }; - - const result = await shardReader.isPeerOnShard(testPeerId, shard); + const result = await shardReader.isPeerOnShard( + testPeerId, + testClusterId, + testShardIndex + 100 + ); expect(result).to.be.false; }); @@ -232,12 +218,11 @@ describe("ShardReader", function () { it("should return false when peer shard info is not found", async function () { mockPeerStore.get.rejects(new Error("Peer not found")); - const shard: SingleShardInfo = { - clusterId: testClusterId, - shard: testShardIndex - }; - - const result = await shardReader.isPeerOnShard(testPeerId, shard); + const result = await shardReader.isPeerOnShard( + testPeerId, + testClusterId, + testShardIndex + ); expect(result).to.be.false; }); @@ -245,7 +230,7 @@ describe("ShardReader", function () { describe("isPeerOnTopic", function () { it("should return true when peer is on the pubsub topic shard", async function () { - const shardInfoBytes = encodeRelayShard(testShardInfo); + const shardInfoBytes = encodeRelayShard(testRelayShards); const mockPeer = { metadata: new Map([["shardInfo", shardInfoBytes]]) }; @@ -260,7 +245,7 @@ describe("ShardReader", function () { }); it("should return false when peer is not on the pubsub topic shard", async function () { - const shardInfoBytes = encodeRelayShard(testShardInfo); + const shardInfoBytes = encodeRelayShard(testRelayShards); const mockPeer = { metadata: new Map([["shardInfo", shardInfoBytes]]) }; @@ -275,7 +260,7 @@ describe("ShardReader", function () { }); it("should return false when pubsub topic parsing fails", async function () { - const shardInfoBytes = encodeRelayShard(testShardInfo); + const shardInfoBytes = encodeRelayShard(testRelayShards); const mockPeer = { metadata: new Map([["shardInfo", shardInfoBytes]]) }; @@ -307,7 +292,7 @@ describe("ShardReader", function () { it("should handle errors gracefully when getting peer info", async function () { mockPeerStore.get.rejects(new Error("Network error")); - const result = await shardReader.isPeerOnNetwork(testPeerId); + const result = await shardReader.isPeerOnCluster(testPeerId); expect(result).to.be.false; }); @@ -319,7 +304,7 @@ describe("ShardReader", function () { mockPeerStore.get.resolves(mockPeer); - const result = await shardReader.isPeerOnNetwork(testPeerId); + const result = await shardReader.isPeerOnCluster(testPeerId); expect(result).to.be.false; }); diff --git a/packages/core/src/lib/connection_manager/shard_reader.ts b/packages/core/src/lib/connection_manager/shard_reader.ts index b7b5a735b0..b5ae35a778 100644 --- a/packages/core/src/lib/connection_manager/shard_reader.ts +++ b/packages/core/src/lib/connection_manager/shard_reader.ts @@ -1,13 +1,12 @@ import type { PeerId } from "@libp2p/interface"; import type { + ClusterId, NetworkConfig, PubsubTopic, - ShardInfo, - SingleShardInfo, - StaticSharding + RelayShards, + ShardId } from "@waku/interfaces"; import { - contentTopicToShardIndex, decodeRelayShard, Logger, pubsubTopicToSingleShardInfo @@ -23,8 +22,12 @@ type ShardReaderConstructorOptions = { interface IShardReader { hasShardInfo(id: PeerId): Promise; - isPeerOnNetwork(id: PeerId): Promise; - isPeerOnShard(id: PeerId, shard: SingleShardInfo): Promise; + isPeerOnCluster(id: PeerId): Promise; + isPeerOnShard( + id: PeerId, + clusterId: ClusterId, + shard: ShardId + ): Promise; isPeerOnTopic(id: PeerId, pubsubTopic: PubsubTopic): Promise; } @@ -34,33 +37,26 @@ interface IShardReader { export class ShardReader implements IShardReader { private readonly libp2p: Libp2p; - private readonly staticShard: StaticSharding; + private readonly clusterId: ClusterId; public constructor(options: ShardReaderConstructorOptions) { this.libp2p = options.libp2p; - this.staticShard = this.getStaticShardFromNetworkConfig( - options.networkConfig - ); + this.clusterId = options.networkConfig.clusterId; } - public async isPeerOnNetwork(id: PeerId): Promise { - const shardInfo = await this.getShardInfo(id); + public async isPeerOnCluster(id: PeerId): Promise { + const peerRelayShards = await this.getRelayShards(id); - if (!shardInfo) { + if (!peerRelayShards) { return false; } - const clusterMatch = shardInfo.clusterId === this.staticShard.clusterId; - const shardOverlap = this.staticShard.shards.some((s) => - shardInfo.shards.includes(s) - ); - - return clusterMatch && shardOverlap; + return peerRelayShards.clusterId === this.clusterId; } public async hasShardInfo(id: PeerId): Promise { - const shardInfo = await this.getShardInfo(id); + const shardInfo = await this.getRelayShards(id); return !!shardInfo; } @@ -69,8 +65,8 @@ export class ShardReader implements IShardReader { pubsubTopic: PubsubTopic ): Promise { try { - const shardInfo = pubsubTopicToSingleShardInfo(pubsubTopic); - return await this.isPeerOnShard(id, shardInfo); + const { clusterId, shard } = pubsubTopicToSingleShardInfo(pubsubTopic); + return await this.isPeerOnShard(id, clusterId, shard); } catch (error) { log.error( `Error comparing pubsub topic ${pubsubTopic} with shard info for ${id}`, @@ -82,21 +78,25 @@ export class ShardReader implements IShardReader { public async isPeerOnShard( id: PeerId, - shard: SingleShardInfo + clusterId: ClusterId, + shard: ShardId ): Promise { - const peerShardInfo = await this.getShardInfo(id); - - if (!peerShardInfo || shard.shard === undefined) { + const peerShardInfo = await this.getRelayShards(id); + log.info( + `Checking if peer on same shard: this { clusterId: ${clusterId}, shardId: ${shard} },` + + `${id} { clusterId: ${peerShardInfo?.clusterId}, shards: ${peerShardInfo?.shards} }` + ); + if (!peerShardInfo) { return false; } return ( - peerShardInfo.clusterId === shard.clusterId && - peerShardInfo.shards.includes(shard.shard) + peerShardInfo.clusterId === clusterId && + peerShardInfo.shards.includes(shard) ); } - private async getShardInfo(id: PeerId): Promise { + private async getRelayShards(id: PeerId): Promise { try { const peer = await this.libp2p.peerStore.get(id); @@ -106,29 +106,10 @@ export class ShardReader implements IShardReader { return undefined; } - const decodedShardInfo = decodeRelayShard(shardInfoBytes); - - return decodedShardInfo; + return decodeRelayShard(shardInfoBytes); } catch (error) { log.error(`Error getting shard info for ${id}`, error); return undefined; } } - - private getStaticShardFromNetworkConfig( - networkConfig: NetworkConfig - ): StaticSharding { - if ("shards" in networkConfig) { - return networkConfig; - } - - const shards = networkConfig.contentTopics.map((topic) => - contentTopicToShardIndex(topic) - ); - - return { - clusterId: networkConfig.clusterId!, - shards - }; - } } diff --git a/packages/core/src/lib/light_push/light_push.ts b/packages/core/src/lib/light_push/light_push.ts index 6c2430e5a5..68f17d4a71 100644 --- a/packages/core/src/lib/light_push/light_push.ts +++ b/packages/core/src/lib/light_push/light_push.ts @@ -8,8 +8,7 @@ import { type ThisOrThat } from "@waku/interfaces"; import { PushResponse } from "@waku/proto"; -import { isMessageSizeUnderCap } from "@waku/utils"; -import { Logger } from "@waku/utils"; +import { isMessageSizeUnderCap, Logger } from "@waku/utils"; import all from "it-all"; import * as lp from "it-length-prefixed"; import { pipe } from "it-pipe"; @@ -63,7 +62,10 @@ export class LightPushCore { }; } - const query = PushRpc.createRequest(protoMessage, encoder.pubsubTopic); + const query = PushRpc.createRequest( + protoMessage, + encoder.routingInfo.pubsubTopic + ); return { query, error: null }; } catch (error) { log.error("Failed to prepare push message", error); diff --git a/packages/core/src/lib/message/version_0.spec.ts b/packages/core/src/lib/message/version_0.spec.ts index 4cf6856154..4c9f02ef67 100644 --- a/packages/core/src/lib/message/version_0.spec.ts +++ b/packages/core/src/lib/message/version_0.spec.ts @@ -1,30 +1,38 @@ -import type { IProtoMessage } from "@waku/interfaces"; -import { contentTopicToPubsubTopic } from "@waku/utils"; +import type { AutoSharding, IProtoMessage } from "@waku/interfaces"; +import { createRoutingInfo } from "@waku/utils"; import { expect } from "chai"; import fc from "fast-check"; import { createDecoder, createEncoder, DecodedMessage } from "./version_0.js"; -const contentTopic = "/js-waku/1/tests/bytes"; -const pubsubTopic = contentTopicToPubsubTopic(contentTopic); +const testContentTopic = "/js-waku/1/tests/bytes"; + +const testNetworkConfig: AutoSharding = { + clusterId: 0, + numShardsInCluster: 8 +}; +const testRoutingInfo = createRoutingInfo(testNetworkConfig, { + contentTopic: testContentTopic +}); describe("Waku Message version 0", function () { it("Round trip binary serialization", async function () { await fc.assert( fc.asyncProperty(fc.uint8Array({ minLength: 1 }), async (payload) => { const encoder = createEncoder({ - contentTopic + contentTopic: testContentTopic, + routingInfo: testRoutingInfo }); const bytes = await encoder.toWire({ payload }); - const decoder = createDecoder(contentTopic); + const decoder = createDecoder(testContentTopic, testRoutingInfo); const protoResult = await decoder.fromWireToProtoObj(bytes); const result = (await decoder.fromProtoObj( - pubsubTopic, + testRoutingInfo.pubsubTopic, protoResult! )) as DecodedMessage; - expect(result.contentTopic).to.eq(contentTopic); - expect(result.pubsubTopic).to.eq(pubsubTopic); + expect(result.contentTopic).to.eq(testContentTopic); + expect(result.pubsubTopic).to.eq(testRoutingInfo.pubsubTopic); expect(result.version).to.eq(0); expect(result.ephemeral).to.be.false; expect(result.payload).to.deep.eq(payload); @@ -37,14 +45,15 @@ describe("Waku Message version 0", function () { await fc.assert( fc.asyncProperty(fc.uint8Array({ minLength: 1 }), async (payload) => { const encoder = createEncoder({ - contentTopic, + contentTopic: testContentTopic, + routingInfo: testRoutingInfo, ephemeral: true }); const bytes = await encoder.toWire({ payload }); - const decoder = createDecoder(contentTopic); + const decoder = createDecoder(testContentTopic, testRoutingInfo); const protoResult = await decoder.fromWireToProtoObj(bytes); const result = (await decoder.fromProtoObj( - pubsubTopic, + testRoutingInfo.pubsubTopic, protoResult! )) as DecodedMessage; @@ -68,15 +77,16 @@ describe("Waku Message version 0", function () { }; const encoder = createEncoder({ - contentTopic, + contentTopic: testContentTopic, + routingInfo: testRoutingInfo, ephemeral: true, metaSetter }); const bytes = await encoder.toWire({ payload }); - const decoder = createDecoder(contentTopic); + const decoder = createDecoder(testContentTopic, testRoutingInfo); const protoResult = await decoder.fromWireToProtoObj(bytes); const result = (await decoder.fromProtoObj( - pubsubTopic, + testRoutingInfo.pubsubTopic, protoResult! )) as DecodedMessage; @@ -99,54 +109,73 @@ describe("Waku Message version 0", function () { describe("Ensures content topic is defined", () => { it("Encoder throws on undefined content topic", () => { const wrapper = function (): void { - createEncoder({ contentTopic: undefined as unknown as string }); + createEncoder({ + contentTopic: undefined as unknown as string, + routingInfo: testRoutingInfo + }); }; - expect(wrapper).to.throw("Content topic must be specified"); + expect(wrapper).to.throw( + "Routing Info must have the same content topic as the encoder" + ); }); it("Encoder throws on empty string content topic", () => { const wrapper = function (): void { - createEncoder({ contentTopic: "" }); + createEncoder({ + contentTopic: "", + routingInfo: createRoutingInfo(testNetworkConfig, { contentTopic: "" }) + }); }; - expect(wrapper).to.throw("Content topic must be specified"); + expect(wrapper).to.throw("AutoSharding requires contentTopic"); }); it("Decoder throws on undefined content topic", () => { const wrapper = function (): void { - createDecoder(undefined as unknown as string); + createDecoder( + undefined as unknown as string, + createRoutingInfo(testNetworkConfig, { + contentTopic: undefined as unknown as string + }) + ); }; - expect(wrapper).to.throw("Content topic must be specified"); + expect(wrapper).to.throw("AutoSharding requires contentTopic"); }); it("Decoder throws on empty string content topic", () => { const wrapper = function (): void { - createDecoder(""); + createDecoder( + "", + createRoutingInfo(testNetworkConfig, { contentTopic: "" }) + ); }; - expect(wrapper).to.throw("Content topic must be specified"); + expect(wrapper).to.throw("AutoSharding requires contentTopic"); }); }); describe("Sets sharding configuration correctly", () => { it("uses static shard pubsub topic instead of autosharding when set", async () => { // Create an encoder setup to use autosharding - const ContentTopic = "/waku/2/content/test.js"; + const contentTopic = "/myapp/1/test/proto"; const autoshardingEncoder = createEncoder({ - pubsubTopicShardInfo: { clusterId: 0 }, - contentTopic: ContentTopic + contentTopic: contentTopic, + routingInfo: createRoutingInfo(testNetworkConfig, { contentTopic }) }); // When autosharding is enabled, we expect the shard index to be 1 - expect(autoshardingEncoder.pubsubTopic).to.be.eq("/waku/2/rs/0/1"); + expect(autoshardingEncoder.routingInfo.pubsubTopic).to.be.eq( + "/waku/2/rs/0/0" + ); // Create an encoder setup to use static sharding with the same content topic - const singleShardInfo = { clusterId: 0, shard: 0 }; const staticshardingEncoder = createEncoder({ - contentTopic: ContentTopic, - pubsubTopicShardInfo: singleShardInfo + contentTopic: contentTopic, + routingInfo: createRoutingInfo({ clusterId: 0 }, { shardId: 3 }) }); // When static sharding is enabled, we expect the shard index to be 0 - expect(staticshardingEncoder.pubsubTopic).to.be.eq("/waku/2/rs/0/0"); + expect(staticshardingEncoder.routingInfo.pubsubTopic).to.be.eq( + "/waku/2/rs/0/3" + ); }); }); diff --git a/packages/core/src/lib/message/version_0.ts b/packages/core/src/lib/message/version_0.ts index d1777b3c2d..53f337ccde 100644 --- a/packages/core/src/lib/message/version_0.ts +++ b/packages/core/src/lib/message/version_0.ts @@ -1,17 +1,14 @@ import type { - EncoderOptions, IDecodedMessage, IDecoder, IEncoder, IMessage, IMetaSetter, IProtoMessage, - IRateLimitProof, - PubsubTopic, - SingleShardInfo + IRateLimitProof } from "@waku/interfaces"; import { proto_message as proto } from "@waku/proto"; -import { determinePubsubTopic, Logger } from "@waku/utils"; +import { isAutoShardingRoutingInfo, Logger, RoutingInfo } from "@waku/utils"; const log = new Logger("message:version-0"); const OneMillion = BigInt(1_000_000); @@ -67,11 +64,31 @@ export class DecodedMessage implements IDecodedMessage { } } +export type EncoderOptions = { + /** + * The routing information for messages to encode. + */ + routingInfo: RoutingInfo; + /** The content topic to set on outgoing messages. */ + contentTopic: string; + /** + * An optional flag to mark message as ephemeral, i.e., not to be stored by Waku Store nodes. + * @defaultValue `false` + */ + ephemeral?: boolean; + /** + * A function called when encoding messages to set the meta field. + * @param IProtoMessage The message encoded for wire, without the meta field. + * If encryption is used, `metaSetter` only accesses _encrypted_ payload. + */ + metaSetter?: IMetaSetter; +}; + export class Encoder implements IEncoder { public constructor( public contentTopic: string, public ephemeral: boolean = false, - public pubsubTopic: PubsubTopic, + public routingInfo: RoutingInfo, public metaSetter?: IMetaSetter ) { if (!contentTopic || contentTopic === "") { @@ -114,24 +131,22 @@ export class Encoder implements IEncoder { * messages. */ export function createEncoder({ - pubsubTopic, - pubsubTopicShardInfo, contentTopic, + routingInfo, ephemeral, metaSetter }: EncoderOptions): Encoder { - return new Encoder( - contentTopic, - ephemeral, - determinePubsubTopic(contentTopic, pubsubTopic ?? pubsubTopicShardInfo), - metaSetter - ); + if (isAutoShardingRoutingInfo(routingInfo)) { + if (routingInfo.contentTopic !== contentTopic) + throw "Routing Info must have the same content topic as the encoder"; + } + return new Encoder(contentTopic, ephemeral, routingInfo, metaSetter); } export class Decoder implements IDecoder { public constructor( - public pubsubTopic: PubsubTopic, - public contentTopic: string + public contentTopic: string, + public routingInfo: RoutingInfo ) { if (!contentTopic || contentTopic === "") { throw new Error("Content topic must be specified"); @@ -182,13 +197,15 @@ export class Decoder implements IDecoder { * messages. * * @param contentTopic The resulting decoder will only decode messages with this content topic. + * @param routingInfo */ export function createDecoder( contentTopic: string, - pubsubTopicShardInfo?: SingleShardInfo | PubsubTopic + routingInfo: RoutingInfo ): Decoder { - return new Decoder( - determinePubsubTopic(contentTopic, pubsubTopicShardInfo), - contentTopic - ); + if (isAutoShardingRoutingInfo(routingInfo)) { + if (routingInfo.contentTopic !== contentTopic) + throw "Routing Info must have the same content topic as the encoder"; + } + return new Decoder(contentTopic, routingInfo); } diff --git a/packages/core/src/lib/metadata/metadata.ts b/packages/core/src/lib/metadata/metadata.ts index ac4707e575..18d59b5790 100644 --- a/packages/core/src/lib/metadata/metadata.ts +++ b/packages/core/src/lib/metadata/metadata.ts @@ -7,7 +7,7 @@ import { type MetadataQueryResult, type PeerIdStr, ProtocolError, - type ShardInfo + type RelayShards } from "@waku/interfaces"; import { proto_metadata } from "@waku/proto"; import { encodeRelayShard, Logger } from "@waku/utils"; @@ -25,7 +25,7 @@ export const MetadataCodec = "/vac/waku/metadata/1.0.0"; class Metadata implements IMetadata { private readonly streamManager: StreamManager; private readonly libp2pComponents: Libp2pComponents; - protected handshakesConfirmed: Map = new Map(); + protected handshakesConfirmed: Map = new Map(); public readonly multicodec = MetadataCodec; @@ -148,7 +148,7 @@ class Metadata implements IMetadata { }); const response = proto_metadata.WakuMetadataResponse.decode( bytes - ) as ShardInfo; + ) as RelayShards; if (!response) { log.error("Error decoding metadata response"); @@ -166,16 +166,16 @@ class Metadata implements IMetadata { private async savePeerShardInfo( peerId: PeerId, - shardInfo: ShardInfo + relayShards: RelayShards ): Promise { - // add or update the shardInfo to peer store + // add or update the relayShards to peer store await this.libp2pComponents.peerStore.merge(peerId, { metadata: { - shardInfo: encodeRelayShard(shardInfo) + shardInfo: encodeRelayShard(relayShards) } }); - this.handshakesConfirmed.set(peerId.toString(), shardInfo); + this.handshakesConfirmed.set(peerId.toString(), relayShards); } } diff --git a/packages/core/src/lib/store/rpc.spec.ts b/packages/core/src/lib/store/rpc.spec.ts index 6e38449c2f..ecea28e3c0 100644 --- a/packages/core/src/lib/store/rpc.spec.ts +++ b/packages/core/src/lib/store/rpc.spec.ts @@ -1,11 +1,17 @@ +import { createRoutingInfo } from "@waku/utils"; import { expect } from "chai"; import { StoreQueryRequest } from "./rpc.js"; +const routingInfo = createRoutingInfo( + { clusterId: 0 }, + { pubsubTopic: "/waku/2/rs/0/0" } +); + describe("StoreQueryRequest validation", () => { it("accepts valid content-filtered query", () => { const request = StoreQueryRequest.create({ - pubsubTopic: "/waku/2/default-waku/proto", + routingInfo, contentTopics: ["/test/1/content/proto"], includeData: true, paginationForward: true @@ -16,7 +22,7 @@ describe("StoreQueryRequest validation", () => { it("rejects content-filtered query with only pubsubTopic", () => { expect(() => StoreQueryRequest.create({ - pubsubTopic: "/waku/2/default-waku/proto", + routingInfo, contentTopics: [], includeData: true, paginationForward: true @@ -26,22 +32,9 @@ describe("StoreQueryRequest validation", () => { ); }); - it("rejects content-filtered query with only contentTopics", () => { - expect(() => - StoreQueryRequest.create({ - pubsubTopic: "", - contentTopics: ["/test/1/content/proto"], - includeData: true, - paginationForward: true - }) - ).to.throw( - "Both pubsubTopic and contentTopics must be set together for content-filtered queries" - ); - }); - it("accepts valid message hash query", () => { const request = StoreQueryRequest.create({ - pubsubTopic: "", + routingInfo, contentTopics: [], messageHashes: [new Uint8Array([1, 2, 3, 4])], includeData: true, @@ -54,7 +47,7 @@ describe("StoreQueryRequest validation", () => { expect(() => StoreQueryRequest.create({ messageHashes: [new Uint8Array([1, 2, 3, 4])], - pubsubTopic: "/waku/2/default-waku/proto", + routingInfo, contentTopics: ["/test/1/content/proto"], includeData: true, paginationForward: true @@ -67,7 +60,7 @@ describe("StoreQueryRequest validation", () => { it("rejects hash query with time filter", () => { expect(() => StoreQueryRequest.create({ - pubsubTopic: "", + routingInfo, contentTopics: [], messageHashes: [new Uint8Array([1, 2, 3, 4])], timeStart: new Date(), @@ -81,7 +74,7 @@ describe("StoreQueryRequest validation", () => { it("accepts time-filtered query with content filter", () => { const request = StoreQueryRequest.create({ - pubsubTopic: "/waku/2/default-waku/proto", + routingInfo, contentTopics: ["/test/1/content/proto"], timeStart: new Date(Date.now() - 3600000), timeEnd: new Date(), diff --git a/packages/core/src/lib/store/rpc.ts b/packages/core/src/lib/store/rpc.ts index 0055ed96a3..3fcc00f8ab 100644 --- a/packages/core/src/lib/store/rpc.ts +++ b/packages/core/src/lib/store/rpc.ts @@ -42,9 +42,9 @@ export class StoreQueryRequest { } } else { if ( - (params.pubsubTopic && + (params.routingInfo && (!params.contentTopics || params.contentTopics.length === 0)) || - (!params.pubsubTopic && + (!params.routingInfo && params.contentTopics && params.contentTopics.length > 0) ) { diff --git a/packages/core/src/lib/store/store.spec.ts b/packages/core/src/lib/store/store.spec.ts index 1cf61eb878..5677b24b74 100644 --- a/packages/core/src/lib/store/store.spec.ts +++ b/packages/core/src/lib/store/store.spec.ts @@ -2,6 +2,7 @@ import type { PeerId } from "@libp2p/interface"; import { IDecodedMessage, IDecoder, + IRoutingInfo, Libp2p, QueryRequestParams } from "@waku/interfaces"; @@ -78,9 +79,17 @@ describe("StoreCore", () => { let mockStoreQueryRequest: any; let mockStoreQueryResponse: any; + const routingInfo: IRoutingInfo = { + pubsubTopic: "test-topic", + shardId: 1, + networkConfig: { clusterId: 0 }, + isAutoSharding: false, + isStaticSharding: false + }; + beforeEach(() => { queryOpts = { - pubsubTopic: "test-topic", + routingInfo, contentTopics: ["test-topic"], paginationLimit: 10, includeData: true, diff --git a/packages/core/src/lib/store/store.ts b/packages/core/src/lib/store/store.ts index ce61b7a553..61f6f07737 100644 --- a/packages/core/src/lib/store/store.ts +++ b/packages/core/src/lib/store/store.ts @@ -76,7 +76,7 @@ export class StoreCore { log.info("Sending store query request:", { hasMessageHashes: !!queryOpts.messageHashes?.length, messageHashCount: queryOpts.messageHashes?.length, - pubsubTopic: queryOpts.pubsubTopic, + routingInfo: queryOpts.routingInfo, contentTopics: queryOpts.contentTopics }); diff --git a/packages/discovery/src/peer-exchange/waku_peer_exchange_discovery.ts b/packages/discovery/src/peer-exchange/waku_peer_exchange_discovery.ts index 9087f12c15..a9d46740f2 100644 --- a/packages/discovery/src/peer-exchange/waku_peer_exchange_discovery.ts +++ b/packages/discovery/src/peer-exchange/waku_peer_exchange_discovery.ts @@ -10,7 +10,7 @@ import type { import { type Libp2pComponents, type PeerExchangeQueryResult, - ShardInfo, + type RelayShards, Tags } from "@waku/interfaces"; import { decodeRelayShard, encodeRelayShard, Logger } from "@waku/utils"; @@ -279,7 +279,7 @@ export class PeerExchangeDiscovery private async checkPeerInfoDiff( peerInfo: PeerInfo, - shardInfo?: ShardInfo + shardInfo?: RelayShards ): Promise<{ hasMultiaddrDiff: boolean; hasShardDiff: boolean }> { const { id: peerId } = peerInfo; const peer = await this.components.peerStore.get(peerId); diff --git a/packages/enr/src/enr.ts b/packages/enr/src/enr.ts index 77c80fc1c2..71b2bcc0fb 100644 --- a/packages/enr/src/enr.ts +++ b/packages/enr/src/enr.ts @@ -5,8 +5,8 @@ import type { ENRValue, IEnr, NodeId, - SequenceNumber, - ShardInfo + RelayShards, + SequenceNumber } from "@waku/interfaces"; import { Logger } from "@waku/utils"; @@ -64,7 +64,7 @@ export class ENR extends RawEnr implements IEnr { protocol: TransportProtocol | TransportProtocolPerIpVersion ) => Multiaddr | undefined = locationMultiaddrFromEnrFields.bind({}, this); - public get shardInfo(): ShardInfo | undefined { + public get shardInfo(): RelayShards | undefined { if (this.rs && this.rsv) { log.warn("ENR contains both `rs` and `rsv` fields."); } diff --git a/packages/enr/src/raw_enr.ts b/packages/enr/src/raw_enr.ts index 0629932f78..1b3ced089a 100644 --- a/packages/enr/src/raw_enr.ts +++ b/packages/enr/src/raw_enr.ts @@ -6,8 +6,8 @@ import { import type { ENRKey, ENRValue, + RelayShards, SequenceNumber, - ShardInfo, Waku2 } from "@waku/interfaces"; import { decodeRelayShard } from "@waku/utils"; @@ -52,13 +52,13 @@ export class RawEnr extends Map { } } - public get rs(): ShardInfo | undefined { + public get rs(): RelayShards | undefined { const rs = this.get("rs"); if (!rs) return undefined; return decodeRelayShard(rs); } - public get rsv(): ShardInfo | undefined { + public get rsv(): RelayShards | undefined { const rsv = this.get("rsv"); if (!rsv) return undefined; return decodeRelayShard(rsv); diff --git a/packages/interfaces/src/constants.ts b/packages/interfaces/src/constants.ts index b65f48d72c..566299501c 100644 --- a/packages/interfaces/src/constants.ts +++ b/packages/interfaces/src/constants.ts @@ -1,4 +1,4 @@ -import type { ShardInfo } from "./sharding"; +import type { AutoSharding } from "./sharding"; /** * The default cluster ID for The Waku Network @@ -11,11 +11,9 @@ export const DEFAULT_CLUSTER_ID = 1; export const DEFAULT_NUM_SHARDS = 8; /** - * DefaultShardInfo is default configuration for The Waku Network. + * DefaultNetworkConfig is default configuration for The Waku Network. */ -export const DefaultShardInfo: ShardInfo = { +export const DefaultNetworkConfig: AutoSharding = { clusterId: DEFAULT_CLUSTER_ID, - shards: [0, 1, 2, 3, 4, 5, 6, 7, 8] + numShardsInCluster: DEFAULT_NUM_SHARDS }; - -export const DefaultNetworkConfig = DefaultShardInfo; diff --git a/packages/interfaces/src/enr.ts b/packages/interfaces/src/enr.ts index ec4b4ab54c..01d4bcb751 100644 --- a/packages/interfaces/src/enr.ts +++ b/packages/interfaces/src/enr.ts @@ -2,7 +2,7 @@ import type { PeerId } from "@libp2p/interface"; import type { PeerInfo } from "@libp2p/interface"; import type { Multiaddr } from "@multiformats/multiaddr"; -import { ShardInfo } from "./sharding.js"; +import { RelayShards } from "./sharding.js"; export type ENRKey = string; export type ENRValue = Uint8Array; @@ -36,7 +36,7 @@ export interface IEnr extends Map { multiaddrs?: Multiaddr[]; waku2?: Waku2; peerInfo: PeerInfo | undefined; - shardInfo?: ShardInfo; + shardInfo?: RelayShards; /** * @deprecated: use { @link IEnr.peerInfo } instead. diff --git a/packages/interfaces/src/message.ts b/packages/interfaces/src/message.ts index 8c1ae1dd20..fda16fb160 100644 --- a/packages/interfaces/src/message.ts +++ b/packages/interfaces/src/message.ts @@ -1,13 +1,5 @@ import type { ContentTopic, PubsubTopic } from "./misc.js"; - -export interface SingleShardInfo { - clusterId: number; - /** - * TODO: make shard required - * Specifying this field indicates to the encoder/decoder that static sharding must be used. - */ - shard?: number; -} +import type { IRoutingInfo } from "./sharding.js"; export interface IRateLimitProof { proof: Uint8Array; @@ -79,38 +71,17 @@ export interface IMetaSetter { (message: IProtoMessage & { meta: undefined }): Uint8Array; } -export interface EncoderOptions { - /** - * @deprecated - */ - pubsubTopic?: PubsubTopic; - pubsubTopicShardInfo?: SingleShardInfo; - /** The content topic to set on outgoing messages. */ - contentTopic: string; - /** - * An optional flag to mark message as ephemeral, i.e., not to be stored by Waku Store nodes. - * @defaultValue `false` - */ - ephemeral?: boolean; - /** - * A function called when encoding messages to set the meta field. - * @param IProtoMessage The message encoded for wire, without the meta field. - * If encryption is used, `metaSetter` only accesses _encrypted_ payload. - */ - metaSetter?: IMetaSetter; -} - export interface IEncoder { - pubsubTopic: PubsubTopic; contentTopic: string; ephemeral: boolean; + routingInfo: IRoutingInfo; toWire: (message: IMessage) => Promise; toProtoObj: (message: IMessage) => Promise; } export interface IDecoder { - pubsubTopic: PubsubTopic; contentTopic: string; + routingInfo: IRoutingInfo; fromWireToProtoObj: (bytes: Uint8Array) => Promise; fromProtoObj: ( pubsubTopic: string, diff --git a/packages/interfaces/src/metadata.ts b/packages/interfaces/src/metadata.ts index 32ce59c2e6..b9714d92f8 100644 --- a/packages/interfaces/src/metadata.ts +++ b/packages/interfaces/src/metadata.ts @@ -1,9 +1,9 @@ import type { PeerId } from "@libp2p/interface"; import { ThisOrThat } from "./misc.js"; -import type { ClusterId, ShardInfo } from "./sharding.js"; +import type { ClusterId, RelayShards } from "./sharding.js"; -export type MetadataQueryResult = ThisOrThat<"shardInfo", ShardInfo>; +export type MetadataQueryResult = ThisOrThat<"shardInfo", RelayShards>; export interface IMetadata { readonly multicodec: string; diff --git a/packages/interfaces/src/protocols.ts b/packages/interfaces/src/protocols.ts index 1086bc28d9..9b0b8fc44c 100644 --- a/packages/interfaces/src/protocols.ts +++ b/packages/interfaces/src/protocols.ts @@ -6,7 +6,7 @@ import type { CreateLibp2pOptions } from "./libp2p.js"; import type { LightPushProtocolOptions } from "./light_push.js"; import type { IDecodedMessage } from "./message.js"; import type { ThisAndThat, ThisOrThat } from "./misc.js"; -import type { AutoSharding, StaticSharding } from "./sharding.js"; +import { NetworkConfig } from "./sharding.js"; import type { StoreProtocolOptions } from "./store.js"; export enum Protocols { @@ -16,8 +16,6 @@ export enum Protocols { Filter = "filter" } -export type NetworkConfig = StaticSharding | AutoSharding; - export type CreateNodeOptions = { /** * Set the user agent string to be used in identification of the node. diff --git a/packages/interfaces/src/sharding.ts b/packages/interfaces/src/sharding.ts index c2364b6396..ee5a0f0bb4 100644 --- a/packages/interfaces/src/sharding.ts +++ b/packages/interfaces/src/sharding.ts @@ -1,13 +1,44 @@ -export type ShardInfo = { - clusterId: number; - shards: number[]; +/** + * Configuration for a Waku network. All nodes in a given network/cluster + * should have the same configuration. + */ +export type NetworkConfig = StaticSharding | AutoSharding; + +export type RelayShards = { + clusterId: ClusterId; + shards: ShardId[]; }; -export type ContentTopicInfo = { - clusterId?: number; // TODO: This should be mandatory on a network config - contentTopics: string[]; +export type StaticSharding = { + clusterId: ClusterId; +}; +export type AutoSharding = { + clusterId: ClusterId; + numShardsInCluster: number; }; - -export type StaticSharding = ShardInfo; -export type AutoSharding = ContentTopicInfo; export type ClusterId = number; +export type ShardId = number; + +/** + * Routing Information for a given message. + */ +export interface IRoutingInfoAutoSharding { + pubsubTopic: string; + shardId: ShardId; + networkConfig: AutoSharding; + contentTopic: string; + isAutoSharding: boolean; + isStaticSharding: boolean; +} + +export interface IRoutingInfoStaticSharding { + pubsubTopic: string; + shardId: ShardId; + networkConfig: StaticSharding; + isAutoSharding: boolean; + isStaticSharding: boolean; +} + +export type IRoutingInfo = + | IRoutingInfoAutoSharding + | IRoutingInfoStaticSharding; diff --git a/packages/interfaces/src/store.ts b/packages/interfaces/src/store.ts index 014842aaa6..a8feebb236 100644 --- a/packages/interfaces/src/store.ts +++ b/packages/interfaces/src/store.ts @@ -1,4 +1,5 @@ import type { IDecodedMessage, IDecoder } from "./message.js"; +import { IRoutingInfo } from "./sharding.js"; export type StoreCursor = Uint8Array; @@ -15,10 +16,10 @@ export type QueryRequestParams = { includeData: boolean; /** - * The pubsub topic to query. This field is mandatory. - * The query will only return messages that were published on this specific pubsub topic. + * The routing information to query. This field is mandatory. + * The query will only return messages that were published on this specific route (cluster and shard). */ - pubsubTopic: string; + routingInfo: IRoutingInfo; /** * The content topics to filter the messages. diff --git a/packages/interfaces/src/waku.ts b/packages/interfaces/src/waku.ts index 049588c819..5c99f716e6 100644 --- a/packages/interfaces/src/waku.ts +++ b/packages/interfaces/src/waku.ts @@ -13,21 +13,12 @@ import type { ILightPush } from "./light_push.js"; import { IDecodedMessage, IDecoder, IEncoder } from "./message.js"; import type { Protocols } from "./protocols.js"; import type { IRelay } from "./relay.js"; +import type { ShardId } from "./sharding.js"; import type { IStore } from "./store.js"; -type AutoShardSingle = { - clusterId: number; - shardsUnderCluster: number; -}; - -type StaticShardSingle = { - clusterId: number; - shard: number; -}; - export type CreateDecoderParams = { contentTopic: string; - shardInfo?: AutoShardSingle | StaticShardSingle; + shardId?: ShardId; }; export type CreateEncoderParams = CreateDecoderParams & { diff --git a/packages/message-encryption/src/ecies.spec.ts b/packages/message-encryption/src/ecies.spec.ts index 55743820a0..9020aeffb6 100644 --- a/packages/message-encryption/src/ecies.spec.ts +++ b/packages/message-encryption/src/ecies.spec.ts @@ -1,13 +1,19 @@ import { IProtoMessage } from "@waku/interfaces"; -import { contentTopicToPubsubTopic } from "@waku/utils"; +import { createRoutingInfo } from "@waku/utils"; import { expect } from "chai"; import fc from "fast-check"; import { getPublicKey } from "./crypto/index.js"; import { createDecoder, createEncoder } from "./ecies.js"; -const contentTopic = "/js-waku/1/tests/bytes"; -const pubsubTopic = contentTopicToPubsubTopic(contentTopic); +const testContentTopic = "/js-waku/1/tests/bytes"; +const testRoutingInfo = createRoutingInfo( + { + clusterId: 0, + numShardsInCluster: 14 + }, + { contentTopic: testContentTopic } +); describe("Ecies Encryption", function () { this.timeout(20000); @@ -20,19 +26,27 @@ describe("Ecies Encryption", function () { const publicKey = getPublicKey(privateKey); const encoder = createEncoder({ - contentTopic, + contentTopic: testContentTopic, + routingInfo: testRoutingInfo, publicKey }); const bytes = await encoder.toWire({ payload }); - const decoder = createDecoder(contentTopic, privateKey); + const decoder = createDecoder( + testContentTopic, + testRoutingInfo, + privateKey + ); const protoResult = await decoder.fromWireToProtoObj(bytes!); if (!protoResult) throw "Failed to proto decode"; - const result = await decoder.fromProtoObj(pubsubTopic, protoResult); + const result = await decoder.fromProtoObj( + testRoutingInfo.pubsubTopic, + protoResult + ); if (!result) throw "Failed to decode"; - expect(result.contentTopic).to.equal(contentTopic); - expect(result.pubsubTopic).to.equal(pubsubTopic); + expect(result.contentTopic).to.equal(testContentTopic); + expect(result.pubsubTopic).to.equal(testRoutingInfo.pubsubTopic); expect(result.version).to.equal(1); expect(result?.payload).to.deep.equal(payload); expect(result.signature).to.be.undefined; @@ -56,20 +70,28 @@ describe("Ecies Encryption", function () { const bobPublicKey = getPublicKey(bobPrivateKey); const encoder = createEncoder({ - contentTopic, + contentTopic: testContentTopic, + routingInfo: testRoutingInfo, publicKey: bobPublicKey, sigPrivKey: alicePrivateKey }); const bytes = await encoder.toWire({ payload }); - const decoder = createDecoder(contentTopic, bobPrivateKey); + const decoder = createDecoder( + testContentTopic, + testRoutingInfo, + bobPrivateKey + ); const protoResult = await decoder.fromWireToProtoObj(bytes!); if (!protoResult) throw "Failed to proto decode"; - const result = await decoder.fromProtoObj(pubsubTopic, protoResult); + const result = await decoder.fromProtoObj( + testRoutingInfo.pubsubTopic, + protoResult + ); if (!result) throw "Failed to decode"; - expect(result.contentTopic).to.equal(contentTopic); - expect(result.pubsubTopic).to.equal(pubsubTopic); + expect(result.contentTopic).to.equal(testContentTopic); + expect(result.pubsubTopic).to.equal(testRoutingInfo.pubsubTopic); expect(result.version).to.equal(1); expect(result?.payload).to.deep.equal(payload); expect(result.signature).to.not.be.undefined; @@ -97,16 +119,24 @@ describe("Ecies Encryption", function () { }; const encoder = createEncoder({ - contentTopic, + contentTopic: testContentTopic, + routingInfo: testRoutingInfo, publicKey, metaSetter }); const bytes = await encoder.toWire({ payload }); - const decoder = createDecoder(contentTopic, privateKey); + const decoder = createDecoder( + testContentTopic, + testRoutingInfo, + privateKey + ); const protoResult = await decoder.fromWireToProtoObj(bytes!); if (!protoResult) throw "Failed to proto decode"; - const result = await decoder.fromProtoObj(pubsubTopic, protoResult); + const result = await decoder.fromProtoObj( + testRoutingInfo.pubsubTopic, + protoResult + ); if (!result) throw "Failed to decode"; const expectedMeta = metaSetter({ @@ -131,6 +161,7 @@ describe("Ensures content topic is defined", () => { const wrapper = function (): void { createEncoder({ contentTopic: undefined as unknown as string, + routingInfo: testRoutingInfo, publicKey: new Uint8Array() }); }; @@ -139,21 +170,29 @@ describe("Ensures content topic is defined", () => { }); it("Encoder throws on empty string content topic", () => { const wrapper = function (): void { - createEncoder({ contentTopic: "", publicKey: new Uint8Array() }); + createEncoder({ + contentTopic: "", + routingInfo: testRoutingInfo, + publicKey: new Uint8Array() + }); }; expect(wrapper).to.throw("Content topic must be specified"); }); it("Decoder throws on undefined content topic", () => { const wrapper = function (): void { - createDecoder(undefined as unknown as string, new Uint8Array()); + createDecoder( + undefined as unknown as string, + testRoutingInfo, + new Uint8Array() + ); }; expect(wrapper).to.throw("Content topic must be specified"); }); it("Decoder throws on empty string content topic", () => { const wrapper = function (): void { - createDecoder("", new Uint8Array()); + createDecoder("", testRoutingInfo, new Uint8Array()); }; expect(wrapper).to.throw("Content topic must be specified"); diff --git a/packages/message-encryption/src/ecies.ts b/packages/message-encryption/src/ecies.ts index 83bb05ceb2..4fec13531b 100644 --- a/packages/message-encryption/src/ecies.ts +++ b/packages/message-encryption/src/ecies.ts @@ -1,17 +1,14 @@ import { Decoder as DecoderV0 } from "@waku/core/lib/message/version_0"; import { - type EncoderOptions as BaseEncoderOptions, type IDecoder, type IEncoder, type IEncryptedMessage, type IMessage, type IMetaSetter, - type IProtoMessage, - type PubsubTopic, - type SingleShardInfo + type IProtoMessage } from "@waku/interfaces"; import { WakuMessage } from "@waku/proto"; -import { determinePubsubTopic, Logger } from "@waku/utils"; +import { Logger, RoutingInfo } from "@waku/utils"; import { generatePrivateKey } from "./crypto/utils.js"; import { DecodedMessage } from "./decoded_message.js"; @@ -35,8 +32,8 @@ const log = new Logger("message-encryption:ecies"); class Encoder implements IEncoder { public constructor( - public pubsubTopic: PubsubTopic, public contentTopic: string, + public routingInfo: RoutingInfo, private publicKey: Uint8Array, private sigPrivKey?: Uint8Array, public ephemeral: boolean = false, @@ -81,11 +78,24 @@ class Encoder implements IEncoder { } } -export interface EncoderOptions extends BaseEncoderOptions { +export interface EncoderOptions { /** - * @deprecated + * The routing information for messages to encode. */ - pubsubTopic?: PubsubTopic; + routingInfo: RoutingInfo; + /** The content topic to set on outgoing messages. */ + contentTopic: string; + /** + * An optional flag to mark message as ephemeral, i.e., not to be stored by Waku Store nodes. + * @defaultValue `false` + */ + ephemeral?: boolean; + /** + * A function called when encoding messages to set the meta field. + * @param IProtoMessage The message encoded for wire, without the meta field. + * If encryption is used, `metaSetter` only accesses _encrypted_ payload. + */ + metaSetter?: IMetaSetter; /** The public key to encrypt the payload for. */ publicKey: Uint8Array; /** An optional private key to be used to sign the payload before encryption. */ @@ -105,17 +115,16 @@ export interface EncoderOptions extends BaseEncoderOptions { * in [26/WAKU2-PAYLOAD](https://rfc.vac.dev/spec/26/). */ export function createEncoder({ - pubsubTopic, - pubsubTopicShardInfo, contentTopic, + routingInfo, publicKey, sigPrivKey, ephemeral = false, metaSetter }: EncoderOptions): Encoder { return new Encoder( - determinePubsubTopic(contentTopic, pubsubTopic ?? pubsubTopicShardInfo), contentTopic, + routingInfo, publicKey, sigPrivKey, ephemeral, @@ -125,11 +134,11 @@ export function createEncoder({ class Decoder extends DecoderV0 implements IDecoder { public constructor( - pubsubTopic: PubsubTopic, contentTopic: string, + routingInfo: RoutingInfo, private privateKey: Uint8Array ) { - super(pubsubTopic, contentTopic); + super(contentTopic, routingInfo); } public async fromProtoObj( @@ -201,12 +210,8 @@ class Decoder extends DecoderV0 implements IDecoder { */ export function createDecoder( contentTopic: string, - privateKey: Uint8Array, - pubsubTopicShardInfo?: SingleShardInfo | PubsubTopic + routingInfo: RoutingInfo, + privateKey: Uint8Array ): Decoder { - return new Decoder( - determinePubsubTopic(contentTopic, pubsubTopicShardInfo), - contentTopic, - privateKey - ); + return new Decoder(contentTopic, routingInfo, privateKey); } diff --git a/packages/message-encryption/src/symmetric.spec.ts b/packages/message-encryption/src/symmetric.spec.ts index 9016f66604..de6026456e 100644 --- a/packages/message-encryption/src/symmetric.spec.ts +++ b/packages/message-encryption/src/symmetric.spec.ts @@ -1,13 +1,19 @@ import { IProtoMessage } from "@waku/interfaces"; -import { contentTopicToPubsubTopic } from "@waku/utils"; +import { createRoutingInfo } from "@waku/utils"; import { expect } from "chai"; import fc from "fast-check"; import { getPublicKey } from "./crypto/index.js"; import { createDecoder, createEncoder } from "./symmetric.js"; -const contentTopic = "/js-waku/1/tests/bytes"; -const pubsubTopic = contentTopicToPubsubTopic(contentTopic); +const testContentTopic = "/js-waku/1/tests/bytes"; +const testRoutingInfo = createRoutingInfo( + { + clusterId: 0, + numShardsInCluster: 14 + }, + { contentTopic: testContentTopic } +); describe("Symmetric Encryption", function () { it("Round trip binary encryption [symmetric, no signature]", async function () { @@ -17,19 +23,27 @@ describe("Symmetric Encryption", function () { fc.uint8Array({ min: 1, minLength: 32, maxLength: 32 }), async (payload, symKey) => { const encoder = createEncoder({ - contentTopic, + contentTopic: testContentTopic, + routingInfo: testRoutingInfo, symKey }); const bytes = await encoder.toWire({ payload }); - const decoder = createDecoder(contentTopic, symKey); + const decoder = createDecoder( + testContentTopic, + testRoutingInfo, + symKey + ); const protoResult = await decoder.fromWireToProtoObj(bytes!); if (!protoResult) throw "Failed to proto decode"; - const result = await decoder.fromProtoObj(pubsubTopic, protoResult); + const result = await decoder.fromProtoObj( + testRoutingInfo.pubsubTopic, + protoResult + ); if (!result) throw "Failed to decode"; - expect(result.contentTopic).to.equal(contentTopic); - expect(result.pubsubTopic).to.equal(pubsubTopic); + expect(result.contentTopic).to.equal(testContentTopic); + expect(result.pubsubTopic).to.equal(testRoutingInfo.pubsubTopic); expect(result.version).to.equal(1); expect(result?.payload).to.deep.equal(payload); expect(result.signature).to.be.undefined; @@ -50,20 +64,28 @@ describe("Symmetric Encryption", function () { const sigPubKey = getPublicKey(sigPrivKey); const encoder = createEncoder({ - contentTopic, + contentTopic: testContentTopic, + routingInfo: testRoutingInfo, symKey, sigPrivKey }); const bytes = await encoder.toWire({ payload }); - const decoder = createDecoder(contentTopic, symKey); + const decoder = createDecoder( + testContentTopic, + testRoutingInfo, + symKey + ); const protoResult = await decoder.fromWireToProtoObj(bytes!); if (!protoResult) throw "Failed to proto decode"; - const result = await decoder.fromProtoObj(pubsubTopic, protoResult); + const result = await decoder.fromProtoObj( + testRoutingInfo.pubsubTopic, + protoResult + ); if (!result) throw "Failed to decode"; - expect(result.contentTopic).to.equal(contentTopic); - expect(result.pubsubTopic).to.equal(pubsubTopic); + expect(result.contentTopic).to.equal(testContentTopic); + expect(result.pubsubTopic).to.equal(testRoutingInfo.pubsubTopic); expect(result.version).to.equal(1); expect(result?.payload).to.deep.equal(payload); expect(result.signature).to.not.be.undefined; @@ -90,16 +112,24 @@ describe("Symmetric Encryption", function () { }; const encoder = createEncoder({ - contentTopic, + contentTopic: testContentTopic, + routingInfo: testRoutingInfo, symKey, metaSetter }); const bytes = await encoder.toWire({ payload }); - const decoder = createDecoder(contentTopic, symKey); + const decoder = createDecoder( + testContentTopic, + testRoutingInfo, + symKey + ); const protoResult = await decoder.fromWireToProtoObj(bytes!); if (!protoResult) throw "Failed to proto decode"; - const result = await decoder.fromProtoObj(pubsubTopic, protoResult); + const result = await decoder.fromProtoObj( + testRoutingInfo.pubsubTopic, + protoResult + ); if (!result) throw "Failed to decode"; const expectedMeta = metaSetter({ @@ -124,6 +154,7 @@ describe("Ensures content topic is defined", () => { const wrapper = function (): void { createEncoder({ contentTopic: undefined as unknown as string, + routingInfo: testRoutingInfo, symKey: new Uint8Array() }); }; @@ -132,21 +163,29 @@ describe("Ensures content topic is defined", () => { }); it("Encoder throws on empty string content topic", () => { const wrapper = function (): void { - createEncoder({ contentTopic: "", symKey: new Uint8Array() }); + createEncoder({ + contentTopic: "", + routingInfo: testRoutingInfo, + symKey: new Uint8Array() + }); }; expect(wrapper).to.throw("Content topic must be specified"); }); it("Decoder throws on undefined content topic", () => { const wrapper = function (): void { - createDecoder(undefined as unknown as string, new Uint8Array()); + createDecoder( + undefined as unknown as string, + testRoutingInfo, + new Uint8Array() + ); }; expect(wrapper).to.throw("Content topic must be specified"); }); it("Decoder throws on empty string content topic", () => { const wrapper = function (): void { - createDecoder("", new Uint8Array()); + createDecoder("", testRoutingInfo, new Uint8Array()); }; expect(wrapper).to.throw("Content topic must be specified"); diff --git a/packages/message-encryption/src/symmetric.ts b/packages/message-encryption/src/symmetric.ts index 732f0755d9..80692dd834 100644 --- a/packages/message-encryption/src/symmetric.ts +++ b/packages/message-encryption/src/symmetric.ts @@ -1,17 +1,15 @@ import { Decoder as DecoderV0 } from "@waku/core/lib/message/version_0"; import type { - EncoderOptions as BaseEncoderOptions, IDecoder, IEncoder, IEncryptedMessage, IMessage, IMetaSetter, IProtoMessage, - PubsubTopic, - SingleShardInfo + IRoutingInfo } from "@waku/interfaces"; import { WakuMessage } from "@waku/proto"; -import { determinePubsubTopic, Logger } from "@waku/utils"; +import { Logger, RoutingInfo } from "@waku/utils"; import { generateSymmetricKey } from "./crypto/utils.js"; import { DecodedMessage } from "./decoded_message.js"; @@ -35,8 +33,8 @@ const log = new Logger("message-encryption:symmetric"); class Encoder implements IEncoder { public constructor( - public pubsubTopic: PubsubTopic, public contentTopic: string, + public routingInfo: IRoutingInfo, private symKey: Uint8Array, private sigPrivKey?: Uint8Array, public ephemeral: boolean = false, @@ -81,7 +79,24 @@ class Encoder implements IEncoder { } } -export interface EncoderOptions extends BaseEncoderOptions { +export interface EncoderOptions { + /** + * The routing information for messages to encode. + */ + routingInfo: RoutingInfo; + /** The content topic to set on outgoing messages. */ + contentTopic: string; + /** + * An optional flag to mark message as ephemeral, i.e., not to be stored by Waku Store nodes. + * @defaultValue `false` + */ + ephemeral?: boolean; + /** + * A function called when encoding messages to set the meta field. + * @param IProtoMessage The message encoded for wire, without the meta field. + * If encryption is used, `metaSetter` only accesses _encrypted_ payload. + */ + metaSetter?: IMetaSetter; /** The symmetric key to encrypt the payload with. */ symKey: Uint8Array; /** An optional private key to be used to sign the payload before encryption. */ @@ -101,17 +116,16 @@ export interface EncoderOptions extends BaseEncoderOptions { * in [26/WAKU2-PAYLOAD](https://rfc.vac.dev/spec/26/). */ export function createEncoder({ - pubsubTopic, - pubsubTopicShardInfo, contentTopic, + routingInfo, symKey, sigPrivKey, ephemeral = false, metaSetter }: EncoderOptions): Encoder { return new Encoder( - determinePubsubTopic(contentTopic, pubsubTopic ?? pubsubTopicShardInfo), contentTopic, + routingInfo, symKey, sigPrivKey, ephemeral, @@ -121,11 +135,11 @@ export function createEncoder({ class Decoder extends DecoderV0 implements IDecoder { public constructor( - pubsubTopic: PubsubTopic, contentTopic: string, + routingInfo: RoutingInfo, private symKey: Uint8Array ) { - super(pubsubTopic, contentTopic); + super(contentTopic, routingInfo); } public async fromProtoObj( @@ -193,16 +207,13 @@ class Decoder extends DecoderV0 implements IDecoder { * decode incoming messages. * * @param contentTopic The resulting decoder will only decode messages with this content topic. + * @param routingInfo Routing information, depends on the network config (static vs auto sharding) * @param symKey The symmetric key used to decrypt the message. */ export function createDecoder( contentTopic: string, - symKey: Uint8Array, - pubsubTopicShardInfo?: SingleShardInfo | PubsubTopic + routingInfo: RoutingInfo, + symKey: Uint8Array ): Decoder { - return new Decoder( - determinePubsubTopic(contentTopic, pubsubTopicShardInfo), - contentTopic, - symKey - ); + return new Decoder(contentTopic, routingInfo, symKey); } diff --git a/packages/relay/src/create.ts b/packages/relay/src/create.ts index 0d330dffd6..8ce2cd1de8 100644 --- a/packages/relay/src/create.ts +++ b/packages/relay/src/create.ts @@ -1,7 +1,5 @@ -import type { CreateNodeOptions, RelayNode } from "@waku/interfaces"; -import { DefaultNetworkConfig } from "@waku/interfaces"; +import { CreateNodeOptions, RelayNode } from "@waku/interfaces"; import { createLibp2pAndUpdateOptions, WakuNode } from "@waku/sdk"; -import { derivePubsubTopicsFromNetworkConfig } from "@waku/utils"; import { Relay, RelayCreateOptions, wakuGossipSub } from "./relay.js"; @@ -16,7 +14,7 @@ import { Relay, RelayCreateOptions, wakuGossipSub } from "./relay.js"; * or use this function with caution. */ export async function createRelayNode( - options: CreateNodeOptions & Partial + options: CreateNodeOptions & RelayCreateOptions ): Promise { options = { ...options, @@ -29,9 +27,9 @@ export async function createRelayNode( }; const libp2p = await createLibp2pAndUpdateOptions(options); - const pubsubTopics = derivePubsubTopicsFromNetworkConfig( - options.networkConfig ?? DefaultNetworkConfig - ); + + const pubsubTopics = options.routingInfos.map((ri) => ri.pubsubTopic); + const relay = new Relay({ pubsubTopics, libp2p diff --git a/packages/relay/src/message_validator.spec.ts b/packages/relay/src/message_validator.spec.ts index 313898708f..5f2a4a57f6 100644 --- a/packages/relay/src/message_validator.spec.ts +++ b/packages/relay/src/message_validator.spec.ts @@ -3,14 +3,21 @@ import { TopicValidatorResult } from "@libp2p/interface"; import type { UnsignedMessage } from "@libp2p/interface"; import { peerIdFromPrivateKey } from "@libp2p/peer-id"; import { createEncoder } from "@waku/core"; -import { determinePubsubTopic } from "@waku/utils"; +import { createRoutingInfo } from "@waku/utils"; import { expect } from "chai"; import fc from "fast-check"; import { messageValidator } from "./message_validator.js"; -const TestContentTopic = "/app/1/topic/utf8"; -const TestPubsubTopic = determinePubsubTopic(TestContentTopic); +const testContentTopic = "/app/1/topic/utf8"; +const testRoutingInfo = createRoutingInfo( + { + clusterId: 0, + numShardsInCluster: 8 + }, + { contentTopic: testContentTopic } +); +const testPubsubTopic = testRoutingInfo.pubsubTopic; describe("Message Validator", () => { it("Accepts a valid Waku Message", async () => { @@ -20,14 +27,14 @@ describe("Message Validator", () => { const peerId = peerIdFromPrivateKey(privateKey); const encoder = createEncoder({ - contentTopic: TestContentTopic, - pubsubTopic: TestPubsubTopic + contentTopic: testContentTopic, + routingInfo: testRoutingInfo }); const bytes = await encoder.toWire({ payload }); const message: UnsignedMessage = { type: "unsigned", - topic: TestPubsubTopic, + topic: testPubsubTopic, data: bytes }; @@ -46,7 +53,7 @@ describe("Message Validator", () => { const message: UnsignedMessage = { type: "unsigned", - topic: TestPubsubTopic, + topic: testPubsubTopic, data }; diff --git a/packages/relay/src/relay.ts b/packages/relay/src/relay.ts index 98aabde45d..571a6a78cd 100644 --- a/packages/relay/src/relay.ts +++ b/packages/relay/src/relay.ts @@ -22,14 +22,14 @@ import { PubsubTopic, SDKProtocolResult } from "@waku/interfaces"; -import { isWireSizeUnderCap, toAsyncIterator } from "@waku/utils"; +import { isWireSizeUnderCap, RoutingInfo, toAsyncIterator } from "@waku/utils"; import { pushOrInitMapSet } from "@waku/utils"; import { Logger } from "@waku/utils"; import { pEvent } from "p-event"; import { RelayCodecs } from "./constants.js"; import { messageValidator } from "./message_validator.js"; -import { TopicOnlyDecoder } from "./topic_only_message.js"; +import { ContentTopicOnlyDecoder } from "./topic_only_message.js"; const log = new Logger("relay"); @@ -38,7 +38,9 @@ export type Observer = { callback: Callback; }; -export type RelayCreateOptions = CreateNodeOptions & GossipsubOpts; +export type RelayCreateOptions = CreateNodeOptions & { + routingInfos: RoutingInfo[]; +} & Partial; export type ContentTopic = string; type ActiveSubscriptions = Map; @@ -53,7 +55,7 @@ type RelayConstructorParams = { * Throws if libp2p.pubsub does not support Waku Relay */ export class Relay implements IRelay { - public readonly pubsubTopics: Set; + public pubsubTopics: Set; private defaultDecoder: IDecoder; public static multicodec: string = RelayCodecs[0]; @@ -73,6 +75,7 @@ export class Relay implements IRelay { } this.gossipSub = params.libp2p.services.pubsub as GossipSub; + this.pubsubTopics = new Set(params.pubsubTopics); if (this.gossipSub.isStarted()) { @@ -82,7 +85,7 @@ export class Relay implements IRelay { this.observers = new Map(); // TODO: User might want to decide what decoder should be used (e.g. for RLN) - this.defaultDecoder = new TopicOnlyDecoder(params.pubsubTopics[0]); + this.defaultDecoder = new ContentTopicOnlyDecoder(); } /** @@ -124,7 +127,7 @@ export class Relay implements IRelay { encoder: IEncoder, message: IMessage ): Promise { - const { pubsubTopic } = encoder; + const { pubsubTopic } = encoder.routingInfo; if (!this.pubsubTopics.has(pubsubTopic)) { log.error("Failed to send waku relay: topic not configured"); return { @@ -176,7 +179,7 @@ export class Relay implements IRelay { const observers: Array<[PubsubTopic, Observer]> = []; for (const decoder of Array.isArray(decoders) ? decoders : [decoders]) { - const { pubsubTopic } = decoder; + const { pubsubTopic } = decoder.routingInfo; const ctObs: Map>> = this.observers.get( pubsubTopic ) ?? new Map(); @@ -240,8 +243,9 @@ export class Relay implements IRelay { pubsubTopic: string, bytes: Uint8Array ): Promise { - const topicOnlyMsg = await this.defaultDecoder.fromWireToProtoObj(bytes); - if (!topicOnlyMsg || !topicOnlyMsg.contentTopic) { + const contentTopicOnlyMsg = + await this.defaultDecoder.fromWireToProtoObj(bytes); + if (!contentTopicOnlyMsg || !contentTopicOnlyMsg.contentTopic) { log.warn("Message does not have a content topic, skipping"); return; } @@ -253,9 +257,9 @@ export class Relay implements IRelay { } // Retrieve the set of observers for the given contentTopic - const observers = contentTopicMap.get(topicOnlyMsg.contentTopic) as Set< - Observer - >; + const observers = contentTopicMap.get( + contentTopicOnlyMsg.contentTopic + ) as Set>; if (!observers) { return; } @@ -277,7 +281,7 @@ export class Relay implements IRelay { } else { log.error( "Failed to decode messages on", - topicOnlyMsg.contentTopic + contentTopicOnlyMsg.contentTopic ); } } catch (error) { diff --git a/packages/relay/src/topic_only_message.ts b/packages/relay/src/topic_only_message.ts index 9a9410cc07..d5332b9d9f 100644 --- a/packages/relay/src/topic_only_message.ts +++ b/packages/relay/src/topic_only_message.ts @@ -1,15 +1,18 @@ -import { message } from "@waku/core"; import type { IDecoder, IProtoMessage, - ITopicOnlyMessage, - PubsubTopic + IRoutingInfo, + ITopicOnlyMessage } from "@waku/interfaces"; import { TopicOnlyMessage as ProtoTopicOnlyMessage } from "@waku/proto"; export class TopicOnlyMessage implements ITopicOnlyMessage { - public version = message.version_0.Version; - public payload: Uint8Array = new Uint8Array(); + public get version(): number { + throw "Only content topic can be accessed on this message"; + } + public get payload(): Uint8Array { + throw "Only content topic can be accessed on this message"; + } public rateLimitProof: undefined; public timestamp: undefined; public meta: undefined; @@ -26,11 +29,16 @@ export class TopicOnlyMessage implements ITopicOnlyMessage { } // This decoder is used only for reading `contentTopic` from the WakuMessage -export class TopicOnlyDecoder implements IDecoder { - public contentTopic = ""; +export class ContentTopicOnlyDecoder implements IDecoder { + public constructor() {} - // pubsubTopic is ignored - public constructor(public pubsubTopic: PubsubTopic) {} + public get contentTopic(): string { + throw "ContentTopic is not available on this decoder, it is only meant to decode the content topic for any message"; + } + + public get routingInfo(): IRoutingInfo { + throw "RoutingInfo is not available on this decoder, it is only meant to decode the content topic for any message"; + } public fromWireToProtoObj( bytes: Uint8Array diff --git a/packages/reliability-tests/tests/high-throughput.spec.ts b/packages/reliability-tests/tests/high-throughput.spec.ts index 8414aab53d..357efed5f7 100644 --- a/packages/reliability-tests/tests/high-throughput.spec.ts +++ b/packages/reliability-tests/tests/high-throughput.spec.ts @@ -1,16 +1,6 @@ import { LightNode, Protocols } from "@waku/interfaces"; -import { - createDecoder, - createEncoder, - createLightNode, - utf8ToBytes -} from "@waku/sdk"; -import { - delay, - shardInfoToPubsubTopics, - singleShardInfosToShardInfo, - singleShardInfoToPubsubTopic -} from "@waku/utils"; +import { createDecoder, createLightNode, utf8ToBytes } from "@waku/sdk"; +import { createRoutingInfo, delay } from "@waku/utils"; import { expect } from "chai"; import { @@ -41,8 +31,7 @@ describe("High Throughput Messaging", function () { }); it("Send/Receive thousands of messages quickly", async function () { - const singleShardInfo = { clusterId: 0, shard: 0 }; - const shardInfo = singleShardInfosToShardInfo([singleShardInfo]); + const networkConfig = { clusterId: 0, numShardsInCluster: 8 }; const testStart = new Date(); const testEnd = Date.now() + testDurationMs; @@ -60,8 +49,8 @@ describe("High Throughput Messaging", function () { store: true, filter: true, relay: true, - clusterId: 0, - shard: [0], + clusterId: networkConfig.clusterId, + numShardsInNetwork: networkConfig.numShardsInCluster, contentTopic: [ContentTopic] }, { retries: 3 } @@ -69,29 +58,23 @@ describe("High Throughput Messaging", function () { await delay(1000); - await nwaku.ensureSubscriptions(shardInfoToPubsubTopics(shardInfo)); + // TODO await nwaku.ensureSubscriptions(shardInfoToPubsubTopics(shardInfo)); - waku = await createLightNode({ networkConfig: shardInfo }); + waku = await createLightNode({ networkConfig }); await waku.start(); await waku.dial(await nwaku.getMultiaddrWithId()); await waku.waitForPeers([Protocols.Filter]); - const decoder = createDecoder(ContentTopic, singleShardInfo); + const routingInfo = createRoutingInfo(networkConfig, { + contentTopic: ContentTopic + }); + const decoder = createDecoder(ContentTopic, routingInfo); const hasSubscribed = await waku.filter.subscribe( [decoder], messageCollector.callback ); if (!hasSubscribed) throw new Error("Failed to subscribe from the start."); - const encoder = createEncoder({ - contentTopic: ContentTopic, - pubsubTopicShardInfo: singleShardInfo - }); - - expect(encoder.pubsubTopic).to.eq( - singleShardInfoToPubsubTopic(singleShardInfo) - ); - let messageId = 0; // Send messages as fast as possible until testEnd @@ -107,7 +90,8 @@ describe("High Throughput Messaging", function () { ServiceNode.toMessageRpcQuery({ contentTopic: ContentTopic, payload: utf8ToBytes(message) - }) + }), + routingInfo ); sent = true; @@ -119,7 +103,7 @@ describe("High Throughput Messaging", function () { messageCollector.verifyReceivedMessage(0, { expectedMessageText: message, expectedContentTopic: ContentTopic, - expectedPubsubTopic: shardInfoToPubsubTopics(shardInfo)[0] + expectedPubsubTopic: routingInfo.pubsubTopic }); } } catch (e: any) { diff --git a/packages/reliability-tests/tests/longevity.spec.ts b/packages/reliability-tests/tests/longevity.spec.ts index 3abaddcde5..3e7848842f 100644 --- a/packages/reliability-tests/tests/longevity.spec.ts +++ b/packages/reliability-tests/tests/longevity.spec.ts @@ -1,16 +1,6 @@ import { LightNode, Protocols } from "@waku/interfaces"; -import { - createDecoder, - createEncoder, - createLightNode, - utf8ToBytes -} from "@waku/sdk"; -import { - delay, - shardInfoToPubsubTopics, - singleShardInfosToShardInfo, - singleShardInfoToPubsubTopic -} from "@waku/utils"; +import { createDecoder, createLightNode, utf8ToBytes } from "@waku/sdk"; +import { createRoutingInfo, delay } from "@waku/utils"; import { expect } from "chai"; import { @@ -41,8 +31,7 @@ describe("Longevity", function () { }); it("Filter - 2 hours", async function () { - const singleShardInfo = { clusterId: 0, shard: 0 }; - const shardInfo = singleShardInfosToShardInfo([singleShardInfo]); + const networkConfig = { clusterId: 0, numShardsInCluster: 8 }; const testStart = new Date(); @@ -68,29 +57,23 @@ describe("Longevity", function () { { retries: 3 } ); - await nwaku.ensureSubscriptions(shardInfoToPubsubTopics(shardInfo)); + // TODO await nwaku.ensureSubscriptions(shardInfoToPubsubTopics(shardInfo)); - waku = await createLightNode({ networkConfig: shardInfo }); + waku = await createLightNode({ networkConfig }); await waku.start(); await waku.dial(await nwaku.getMultiaddrWithId()); await waku.waitForPeers([Protocols.Filter]); - const decoder = createDecoder(ContentTopic, singleShardInfo); + const routingInfo = createRoutingInfo(networkConfig, { + contentTopic: ContentTopic + }); + const decoder = createDecoder(ContentTopic, routingInfo); const hasSubscribed = await waku.filter.subscribe( [decoder], messageCollector.callback ); if (!hasSubscribed) throw new Error("Failed to subscribe from the start."); - const encoder = createEncoder({ - contentTopic: ContentTopic, - pubsubTopicShardInfo: singleShardInfo - }); - - expect(encoder.pubsubTopic).to.eq( - singleShardInfoToPubsubTopic(singleShardInfo) - ); - let messageId = 0; while (Date.now() < testEnd) { @@ -105,7 +88,8 @@ describe("Longevity", function () { ServiceNode.toMessageRpcQuery({ contentTopic: ContentTopic, payload: utf8ToBytes(message) - }) + }), + routingInfo ); sent = true; @@ -117,7 +101,7 @@ describe("Longevity", function () { messageCollector.verifyReceivedMessage(0, { expectedMessageText: message, expectedContentTopic: ContentTopic, - expectedPubsubTopic: shardInfoToPubsubTopics(shardInfo)[0] + expectedPubsubTopic: routingInfo.pubsubTopic }); } } catch (e: any) { diff --git a/packages/reliability-tests/tests/throughput-sizes.spec.ts b/packages/reliability-tests/tests/throughput-sizes.spec.ts index 911f49bc1a..6d556adbd9 100644 --- a/packages/reliability-tests/tests/throughput-sizes.spec.ts +++ b/packages/reliability-tests/tests/throughput-sizes.spec.ts @@ -1,16 +1,6 @@ import { LightNode, Protocols } from "@waku/interfaces"; -import { - createDecoder, - createEncoder, - createLightNode, - utf8ToBytes -} from "@waku/sdk"; -import { - delay, - shardInfoToPubsubTopics, - singleShardInfosToShardInfo, - singleShardInfoToPubsubTopic -} from "@waku/utils"; +import { createDecoder, createLightNode, utf8ToBytes } from "@waku/sdk"; +import { createRoutingInfo, delay } from "@waku/utils"; import { expect } from "chai"; import { @@ -52,8 +42,7 @@ describe("Throughput Sanity Checks - Different Message Sizes", function () { }); it("Send/Receive messages of varying sizes", async function () { - const singleShardInfo = { clusterId: 0, shard: 0 }; - const shardInfo = singleShardInfosToShardInfo([singleShardInfo]); + const networkConfig = { clusterId: 0, numShardsInCluster: 8 }; const testStart = new Date(); const testEnd = Date.now() + testDurationMs; @@ -74,29 +63,23 @@ describe("Throughput Sanity Checks - Different Message Sizes", function () { await delay(1000); - await nwaku.ensureSubscriptions(shardInfoToPubsubTopics(shardInfo)); + // TODO await nwaku.ensureSubscriptions(shardInfoToPubsubTopics(shardInfo)); - waku = await createLightNode({ networkConfig: shardInfo }); + waku = await createLightNode({ networkConfig }); await waku.start(); await waku.dial(await nwaku.getMultiaddrWithId()); await waku.waitForPeers([Protocols.Filter]); - const decoder = createDecoder(ContentTopic, singleShardInfo); + const routingInfo = createRoutingInfo(networkConfig, { + contentTopic: ContentTopic + }); + const decoder = createDecoder(ContentTopic, routingInfo); const hasSubscribed = await waku.filter.subscribe( [decoder], messageCollector.callback ); if (!hasSubscribed) throw new Error("Failed to subscribe from the start."); - const encoder = createEncoder({ - contentTopic: ContentTopic, - pubsubTopicShardInfo: singleShardInfo - }); - - expect(encoder.pubsubTopic).to.eq( - singleShardInfoToPubsubTopic(singleShardInfo) - ); - let messageId = 0; const report: { messageId: number; @@ -121,7 +104,8 @@ describe("Throughput Sanity Checks - Different Message Sizes", function () { ServiceNode.toMessageRpcQuery({ contentTopic: ContentTopic, payload: utf8ToBytes(message) - }) + }), + routingInfo ); sent = true; @@ -133,7 +117,7 @@ describe("Throughput Sanity Checks - Different Message Sizes", function () { messageCollector.verifyReceivedMessage(0, { expectedMessageText: message, expectedContentTopic: ContentTopic, - expectedPubsubTopic: shardInfoToPubsubTopics(shardInfo)[0] + expectedPubsubTopic: routingInfo.pubsubTopic }); } } catch (e: any) { diff --git a/packages/rln/src/codec.spec.ts b/packages/rln/src/codec.spec.ts index 5182cd1e86..084ac1becf 100644 --- a/packages/rln/src/codec.spec.ts +++ b/packages/rln/src/codec.spec.ts @@ -24,8 +24,8 @@ import { import { createTestMetaSetter, createTestRLNCodecSetup, - EMPTY_PROTO_MESSAGE, - TEST_CONSTANTS, + EmptyProtoMessage, + TestConstants, verifyRLNMessage } from "./codec.test-utils.js"; import { RlnMessage } from "./message.js"; @@ -37,14 +37,20 @@ describe("RLN codec with version 0", () => { await createTestRLNCodecSetup(); const rlnEncoder = createRLNEncoder({ - encoder: createEncoder({ contentTopic: TEST_CONSTANTS.contentTopic }), + encoder: createEncoder({ + contentTopic: TestConstants.contentTopic, + routingInfo: TestConstants.routingInfo + }), rlnInstance, index, credential }); const rlnDecoder = createRLNDecoder({ rlnInstance, - decoder: createDecoder(TEST_CONSTANTS.contentTopic) + decoder: createDecoder( + TestConstants.contentTopic, + TestConstants.routingInfo + ) }); const bytes = await rlnEncoder.toWire({ payload }); @@ -53,11 +59,11 @@ describe("RLN codec with version 0", () => { const protoResult = await rlnDecoder.fromWireToProtoObj(bytes!); expect(protoResult).to.not.be.undefined; const msg = (await rlnDecoder.fromProtoObj( - TEST_CONSTANTS.emptyPubsubTopic, + TestConstants.emptyPubsubTopic, protoResult! ))!; - verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 0, rlnInstance); + verifyRLNMessage(msg, payload, TestConstants.contentTopic, 0, rlnInstance); }); it("toProtoObj", async function () { @@ -65,25 +71,28 @@ describe("RLN codec with version 0", () => { await createTestRLNCodecSetup(); const rlnEncoder = new RLNEncoder( - createEncoder({ contentTopic: TEST_CONSTANTS.contentTopic }), + createEncoder({ + contentTopic: TestConstants.contentTopic, + routingInfo: TestConstants.routingInfo + }), rlnInstance, index, credential ); const rlnDecoder = new RLNDecoder( rlnInstance, - createDecoder(TEST_CONSTANTS.contentTopic) + createDecoder(TestConstants.contentTopic, TestConstants.routingInfo) ); const proto = await rlnEncoder.toProtoObj({ payload }); expect(proto).to.not.be.undefined; const msg = (await rlnDecoder.fromProtoObj( - TEST_CONSTANTS.emptyPubsubTopic, + TestConstants.emptyPubsubTopic, proto! )) as RlnMessage; - verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 0, rlnInstance); + verifyRLNMessage(msg, payload, TestConstants.contentTopic, 0, rlnInstance); }); }); @@ -95,7 +104,8 @@ describe("RLN codec with version 1", () => { const rlnEncoder = new RLNEncoder( createSymEncoder({ - contentTopic: TEST_CONSTANTS.contentTopic, + contentTopic: TestConstants.contentTopic, + routingInfo: TestConstants.routingInfo, symKey }), rlnInstance, @@ -104,7 +114,11 @@ describe("RLN codec with version 1", () => { ); const rlnDecoder = new RLNDecoder( rlnInstance, - createSymDecoder(TEST_CONSTANTS.contentTopic, symKey) + createSymDecoder( + TestConstants.contentTopic, + TestConstants.routingInfo, + symKey + ) ); const bytes = await rlnEncoder.toWire({ payload }); @@ -113,11 +127,11 @@ describe("RLN codec with version 1", () => { const protoResult = await rlnDecoder.fromWireToProtoObj(bytes!); expect(protoResult).to.not.be.undefined; const msg = (await rlnDecoder.fromProtoObj( - TEST_CONSTANTS.emptyPubsubTopic, + TestConstants.emptyPubsubTopic, protoResult! ))!; - verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 1, rlnInstance); + verifyRLNMessage(msg, payload, TestConstants.contentTopic, 1, rlnInstance); }); it("Symmetric, toProtoObj", async function () { @@ -127,7 +141,8 @@ describe("RLN codec with version 1", () => { const rlnEncoder = new RLNEncoder( createSymEncoder({ - contentTopic: TEST_CONSTANTS.contentTopic, + contentTopic: TestConstants.contentTopic, + routingInfo: TestConstants.routingInfo, symKey }), rlnInstance, @@ -136,18 +151,22 @@ describe("RLN codec with version 1", () => { ); const rlnDecoder = new RLNDecoder( rlnInstance, - createSymDecoder(TEST_CONSTANTS.contentTopic, symKey) + createSymDecoder( + TestConstants.contentTopic, + TestConstants.routingInfo, + symKey + ) ); const proto = await rlnEncoder.toProtoObj({ payload }); expect(proto).to.not.be.undefined; const msg = await rlnDecoder.fromProtoObj( - TEST_CONSTANTS.emptyPubsubTopic, + TestConstants.emptyPubsubTopic, proto! ); - verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 1, rlnInstance); + verifyRLNMessage(msg, payload, TestConstants.contentTopic, 1, rlnInstance); }); it("Asymmetric, toWire", async function () { @@ -158,7 +177,8 @@ describe("RLN codec with version 1", () => { const rlnEncoder = new RLNEncoder( createAsymEncoder({ - contentTopic: TEST_CONSTANTS.contentTopic, + contentTopic: TestConstants.contentTopic, + routingInfo: TestConstants.routingInfo, publicKey }), rlnInstance, @@ -167,7 +187,11 @@ describe("RLN codec with version 1", () => { ); const rlnDecoder = new RLNDecoder( rlnInstance, - createAsymDecoder(TEST_CONSTANTS.contentTopic, privateKey) + createAsymDecoder( + TestConstants.contentTopic, + TestConstants.routingInfo, + privateKey + ) ); const bytes = await rlnEncoder.toWire({ payload }); @@ -176,11 +200,11 @@ describe("RLN codec with version 1", () => { const protoResult = await rlnDecoder.fromWireToProtoObj(bytes!); expect(protoResult).to.not.be.undefined; const msg = (await rlnDecoder.fromProtoObj( - TEST_CONSTANTS.emptyPubsubTopic, + TestConstants.emptyPubsubTopic, protoResult! ))!; - verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 1, rlnInstance); + verifyRLNMessage(msg, payload, TestConstants.contentTopic, 1, rlnInstance); }); it("Asymmetric, toProtoObj", async function () { @@ -191,7 +215,8 @@ describe("RLN codec with version 1", () => { const rlnEncoder = new RLNEncoder( createAsymEncoder({ - contentTopic: TEST_CONSTANTS.contentTopic, + contentTopic: TestConstants.contentTopic, + routingInfo: TestConstants.routingInfo, publicKey }), rlnInstance, @@ -200,18 +225,22 @@ describe("RLN codec with version 1", () => { ); const rlnDecoder = new RLNDecoder( rlnInstance, - createAsymDecoder(TEST_CONSTANTS.contentTopic, privateKey) + createAsymDecoder( + TestConstants.contentTopic, + TestConstants.routingInfo, + privateKey + ) ); const proto = await rlnEncoder.toProtoObj({ payload }); expect(proto).to.not.be.undefined; const msg = await rlnDecoder.fromProtoObj( - TEST_CONSTANTS.emptyPubsubTopic, + TestConstants.emptyPubsubTopic, proto! ); - verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 1, rlnInstance); + verifyRLNMessage(msg, payload, TestConstants.contentTopic, 1, rlnInstance); }); }); @@ -221,21 +250,24 @@ describe("RLN Codec - epoch", () => { await createTestRLNCodecSetup(); const rlnEncoder = new RLNEncoder( - createEncoder({ contentTopic: TEST_CONSTANTS.contentTopic }), + createEncoder({ + contentTopic: TestConstants.contentTopic, + routingInfo: TestConstants.routingInfo + }), rlnInstance, index, credential ); const rlnDecoder = new RLNDecoder( rlnInstance, - createDecoder(TEST_CONSTANTS.contentTopic) + createDecoder(TestConstants.contentTopic, TestConstants.routingInfo) ); const proto = await rlnEncoder.toProtoObj({ payload }); expect(proto).to.not.be.undefined; const msg = (await rlnDecoder.fromProtoObj( - TEST_CONSTANTS.emptyPubsubTopic, + TestConstants.emptyPubsubTopic, proto! )) as RlnMessage; @@ -245,7 +277,7 @@ describe("RLN Codec - epoch", () => { expect(msg.epoch!.toString(10).length).to.eq(9); expect(msg.epoch).to.eq(epoch); - verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 0, rlnInstance); + verifyRLNMessage(msg, payload, TestConstants.contentTopic, 0, rlnInstance); }); }); @@ -257,7 +289,8 @@ describe("RLN codec with version 0 and meta setter", () => { const rlnEncoder = createRLNEncoder({ encoder: createEncoder({ - contentTopic: TEST_CONSTANTS.contentTopic, + contentTopic: TestConstants.contentTopic, + routingInfo: TestConstants.routingInfo, metaSetter }), rlnInstance, @@ -266,7 +299,10 @@ describe("RLN codec with version 0 and meta setter", () => { }); const rlnDecoder = createRLNDecoder({ rlnInstance, - decoder: createDecoder(TEST_CONSTANTS.contentTopic) + decoder: createDecoder( + TestConstants.contentTopic, + TestConstants.routingInfo + ) }); const bytes = await rlnEncoder.toWire({ payload }); @@ -275,17 +311,17 @@ describe("RLN codec with version 0 and meta setter", () => { const protoResult = await rlnDecoder.fromWireToProtoObj(bytes!); expect(protoResult).to.not.be.undefined; const msg = (await rlnDecoder.fromProtoObj( - TEST_CONSTANTS.emptyPubsubTopic, + TestConstants.emptyPubsubTopic, protoResult! ))!; const expectedMeta = metaSetter({ - ...EMPTY_PROTO_MESSAGE, + ...EmptyProtoMessage, payload: protoResult!.payload }); expect(msg!.meta).to.deep.eq(expectedMeta); - verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 0, rlnInstance); + verifyRLNMessage(msg, payload, TestConstants.contentTopic, 0, rlnInstance); }); it("toProtoObj", async function () { @@ -294,30 +330,34 @@ describe("RLN codec with version 0 and meta setter", () => { const metaSetter = createTestMetaSetter(); const rlnEncoder = new RLNEncoder( - createEncoder({ contentTopic: TEST_CONSTANTS.contentTopic, metaSetter }), + createEncoder({ + contentTopic: TestConstants.contentTopic, + routingInfo: TestConstants.routingInfo, + metaSetter + }), rlnInstance, index, credential ); const rlnDecoder = new RLNDecoder( rlnInstance, - createDecoder(TEST_CONSTANTS.contentTopic) + createDecoder(TestConstants.contentTopic, TestConstants.routingInfo) ); const proto = await rlnEncoder.toProtoObj({ payload }); expect(proto).to.not.be.undefined; const msg = (await rlnDecoder.fromProtoObj( - TEST_CONSTANTS.emptyPubsubTopic, + TestConstants.emptyPubsubTopic, proto! )) as RlnMessage; const expectedMeta = metaSetter({ - ...EMPTY_PROTO_MESSAGE, + ...EmptyProtoMessage, payload: msg!.payload }); expect(msg!.meta).to.deep.eq(expectedMeta); - verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 0, rlnInstance); + verifyRLNMessage(msg, payload, TestConstants.contentTopic, 0, rlnInstance); }); }); diff --git a/packages/rln/src/codec.test-utils.ts b/packages/rln/src/codec.test-utils.ts index b3bfc29612..140a726007 100644 --- a/packages/rln/src/codec.test-utils.ts +++ b/packages/rln/src/codec.test-utils.ts @@ -1,4 +1,5 @@ import type { IProtoMessage } from "@waku/interfaces"; +import { createRoutingInfo } from "@waku/utils"; import { expect } from "chai"; import { createRLN } from "./create.js"; @@ -11,14 +12,21 @@ export interface TestRLNCodecSetup { payload: Uint8Array; } -export const TEST_CONSTANTS = { +export const TestConstants = { contentTopic: "/test/1/waku-message/utf8", emptyPubsubTopic: "", defaultIndex: 0, - defaultPayload: new Uint8Array([1, 2, 3, 4, 5]) + defaultPayload: new Uint8Array([1, 2, 3, 4, 5]), + routingInfo: createRoutingInfo( + { + clusterId: 0, + numShardsInCluster: 2 + }, + { contentTopic: "/test/1/waku-message/utf8" } + ) } as const; -export const EMPTY_PROTO_MESSAGE = { +export const EmptyProtoMessage = { timestamp: undefined, contentTopic: "", ephemeral: undefined, @@ -38,8 +46,8 @@ export async function createTestRLNCodecSetup(): Promise { return { rlnInstance, credential, - index: TEST_CONSTANTS.defaultIndex, - payload: TEST_CONSTANTS.defaultPayload + index: TestConstants.defaultIndex, + payload: TestConstants.defaultPayload }; } diff --git a/packages/rln/src/codec.ts b/packages/rln/src/codec.ts index 3a9036d4b1..21be117c1e 100644 --- a/packages/rln/src/codec.ts +++ b/packages/rln/src/codec.ts @@ -4,7 +4,8 @@ import type { IEncoder, IMessage, IProtoMessage, - IRateLimitProof + IRateLimitProof, + IRoutingInfo } from "@waku/interfaces"; import { Logger } from "@waku/utils"; @@ -47,17 +48,16 @@ export class RLNEncoder implements IEncoder { private async generateProof(message: IMessage): Promise { const signal = toRLNSignal(this.contentTopic, message); - const proof = await this.rlnInstance.zerokit.generateRLNProof( + return this.rlnInstance.zerokit.generateRLNProof( signal, this.index, message.timestamp, this.idSecretHash ); - return proof; } - public get pubsubTopic(): string { - return this.encoder.pubsubTopic; + public get routingInfo(): IRoutingInfo { + return this.encoder.routingInfo; } public get contentTopic(): string { @@ -93,8 +93,8 @@ export class RLNDecoder private readonly decoder: IDecoder ) {} - public get pubsubTopic(): string { - return this.decoder.pubsubTopic; + public get routingInfo(): IRoutingInfo { + return this.decoder.routingInfo; } public get contentTopic(): string { diff --git a/packages/rln/src/rln.ts b/packages/rln/src/rln.ts index ba709ed57c..8dbfc69fc0 100644 --- a/packages/rln/src/rln.ts +++ b/packages/rln/src/rln.ts @@ -2,9 +2,9 @@ import { createDecoder, createEncoder } from "@waku/core"; import type { ContentTopic, IDecodedMessage, - EncoderOptions as WakuEncoderOptions + IMetaSetter } from "@waku/interfaces"; -import { Logger } from "@waku/utils"; +import { Logger, RoutingInfo } from "@waku/utils"; import init from "@waku/zerokit-rln-wasm"; import * as zerokitRLN from "@waku/zerokit-rln-wasm"; @@ -27,7 +27,27 @@ import { Zerokit } from "./zerokit.js"; const log = new Logger("waku:rln"); -type WakuRLNEncoderOptions = WakuEncoderOptions & { +type WakuRLNEncoderOptions = { + /** + * The routing information for messages to encode. + */ + routingInfo: RoutingInfo; + /** The content topic to set on outgoing messages. */ + contentTopic: string; + /** + * An optional flag to mark message as ephemeral, i.e., not to be stored by Waku Store nodes. + * @defaultValue `false` + */ + ephemeral?: boolean; + /** + * A function called when encoding messages to set the meta field. + * @param IProtoMessage The message encoded for wire, without the meta field. + * If encryption is used, `metaSetter` only accesses _encrypted_ payload. + */ + metaSetter?: IMetaSetter; + /** + * RLN Credentials + */ credentials: EncryptedCredentials | DecryptedCredentials; }; @@ -87,11 +107,12 @@ export class RLNInstance extends RLNCredentialsManager { } public createDecoder( - contentTopic: ContentTopic + contentTopic: ContentTopic, + routingInfo: RoutingInfo ): RLNDecoder { return createRLNDecoder({ rlnInstance: this, - decoder: createDecoder(contentTopic) + decoder: createDecoder(contentTopic, routingInfo) }); } diff --git a/packages/sdk/src/filter/filter.spec.ts b/packages/sdk/src/filter/filter.spec.ts index 2010ba576c..a0819b36a5 100644 --- a/packages/sdk/src/filter/filter.spec.ts +++ b/packages/sdk/src/filter/filter.spec.ts @@ -5,6 +5,7 @@ import type { IProtoMessage, Libp2p } from "@waku/interfaces"; +import { createRoutingInfo } from "@waku/utils"; import { expect } from "chai"; import sinon from "sinon"; @@ -13,8 +14,15 @@ import { PeerManager } from "../peer_manager/index.js"; import { Filter } from "./filter.js"; import { Subscription } from "./subscription.js"; -const PUBSUB_TOPIC = "/waku/2/rs/1/4"; -const CONTENT_TOPIC = "/test/1/waku-filter/utf8"; +const testContentTopic = "/test/1/waku-filter/utf8"; +const testNetworkconfig = { + clusterId: 0, + numShardsInCluster: 9 +}; +const testRoutingInfo = createRoutingInfo(testNetworkconfig, { + contentTopic: testContentTopic +}); +const testPubsubTopic = testRoutingInfo.pubsubTopic; describe("Filter SDK", () => { let libp2p: Libp2p; @@ -29,7 +37,7 @@ describe("Filter SDK", () => { connectionManager = mockConnectionManager(); peerManager = mockPeerManager(); filter = mockFilter({ libp2p, connectionManager, peerManager }); - decoder = createDecoder(CONTENT_TOPIC, PUBSUB_TOPIC); + decoder = createDecoder(testContentTopic, testRoutingInfo); callback = sinon.spy(); }); @@ -80,10 +88,10 @@ describe("Filter SDK", () => { await filter.subscribe(decoder, callback); - const message = createMockMessage(CONTENT_TOPIC); + const message = createMockMessage(testContentTopic); const peerId = "peer1"; - await (filter as any).onIncomingMessage(PUBSUB_TOPIC, message, peerId); + await (filter as any).onIncomingMessage(testPubsubTopic, message, peerId); expect(subscriptionInvokeStub.calledOnce).to.be.true; expect(subscriptionInvokeStub.firstCall.args[0]).to.equal(message); @@ -91,7 +99,11 @@ describe("Filter SDK", () => { }); it("should successfully stop", async () => { - const decoder2 = createDecoder("/another-content-topic", PUBSUB_TOPIC); + const contentTopic2 = "/test/1/waku-filter-2/utf8"; + const decoder2 = createDecoder( + contentTopic2, + createRoutingInfo(testNetworkconfig, { contentTopic: contentTopic2 }) + ); const stopStub = sinon.stub(Subscription.prototype, "stop"); sinon.stub(Subscription.prototype, "add").resolves(true); @@ -129,7 +141,7 @@ function mockLibp2p(): Libp2p { function mockConnectionManager(): ConnectionManager { return { isTopicConfigured: sinon.stub().callsFake((topic: string) => { - return topic === PUBSUB_TOPIC; + return topic === testPubsubTopic; }) } as unknown as ConnectionManager; } diff --git a/packages/sdk/src/filter/filter.ts b/packages/sdk/src/filter/filter.ts index 43895fab7c..b686822f30 100644 --- a/packages/sdk/src/filter/filter.ts +++ b/packages/sdk/src/filter/filter.ts @@ -63,21 +63,21 @@ export class Filter implements IFilter { throw Error("Cannot subscribe with 0 decoders."); } - const pubsubTopics = decoders.map((v) => v.pubsubTopic); - const singlePubsubTopic = pubsubTopics[0]; + const routingInfos = decoders.map((v) => v.routingInfo); + const routingInfo = routingInfos[0]; const contentTopics = decoders.map((v) => v.contentTopic); log.info( - `Subscribing to contentTopics: ${contentTopics}, pubsubTopic: ${singlePubsubTopic}` + `Subscribing to contentTopics: ${contentTopics}, pubsubTopic: ${routingInfo.pubsubTopic}` ); - this.throwIfTopicNotSame(pubsubTopics); + this.throwIfTopicNotSame(routingInfos.map((r) => r.pubsubTopic)); - let subscription = this.subscriptions.get(singlePubsubTopic); + let subscription = this.subscriptions.get(routingInfo.pubsubTopic); if (!subscription) { subscription = new Subscription({ - pubsubTopic: singlePubsubTopic, + routingInfo: routingInfo, protocol: this.protocol, config: this.config, peerManager: this.peerManager @@ -86,7 +86,7 @@ export class Filter implements IFilter { } const result = await subscription.add(decoders, callback); - this.subscriptions.set(singlePubsubTopic, subscription); + this.subscriptions.set(routingInfo.pubsubTopic, subscription); log.info( `Subscription ${result ? "successful" : "failed"} for content topic: ${contentTopics}` @@ -104,7 +104,7 @@ export class Filter implements IFilter { throw Error("Cannot unsubscribe with 0 decoders."); } - const pubsubTopics = decoders.map((v) => v.pubsubTopic); + const pubsubTopics = decoders.map((v) => v.routingInfo.pubsubTopic); const singlePubsubTopic = pubsubTopics[0]; const contentTopics = decoders.map((v) => v.contentTopic); diff --git a/packages/sdk/src/filter/subscription.spec.ts b/packages/sdk/src/filter/subscription.spec.ts index 37f3d48ed3..e65128092f 100644 --- a/packages/sdk/src/filter/subscription.spec.ts +++ b/packages/sdk/src/filter/subscription.spec.ts @@ -1,10 +1,12 @@ import { FilterCore } from "@waku/core"; import type { + AutoSharding, FilterProtocolOptions, IDecodedMessage, IDecoder } from "@waku/interfaces"; import { WakuMessage } from "@waku/proto"; +import { createRoutingInfo } from "@waku/utils"; import { expect } from "chai"; import sinon from "sinon"; @@ -14,7 +16,13 @@ import { Subscription } from "./subscription.js"; const PUBSUB_TOPIC = "/waku/2/rs/1/4"; const CONTENT_TOPIC = "/test/1/waku-filter/utf8"; - +const NETWORK_CONFIG: AutoSharding = { + clusterId: 2, + numShardsInCluster: 3 +}; +const ROUTING_INFO = createRoutingInfo(NETWORK_CONFIG, { + contentTopic: CONTENT_TOPIC +}); describe("Filter Subscription", () => { let filterCore: FilterCore; let peerManager: PeerManager; @@ -32,7 +40,7 @@ describe("Filter Subscription", () => { }; subscription = new Subscription({ - pubsubTopic: PUBSUB_TOPIC, + routingInfo: ROUTING_INFO, protocol: filterCore, config, peerManager @@ -79,9 +87,11 @@ describe("Filter Subscription", () => { }); it("should invoke callbacks when receiving a message", async () => { - const testContentTopic = "/custom/content/topic"; + const testContentTopic = "/custom/0/content/proto"; const testDecoder = { - pubsubTopic: PUBSUB_TOPIC, + routingInfo: createRoutingInfo(NETWORK_CONFIG, { + contentTopic: testContentTopic + }), contentTopic: testContentTopic, fromProtoObj: sinon.stub().callsFake(() => { return Promise.resolve({ payload: new Uint8Array([1, 2, 3]) }); @@ -106,9 +116,11 @@ describe("Filter Subscription", () => { }); it("should invoke callbacks only when newly receiving message is given", async () => { - const testContentTopic = "/custom/content/topic"; + const testContentTopic = "/custom/0/content/topic"; const testDecoder = { - pubsubTopic: PUBSUB_TOPIC, + routingInfo: createRoutingInfo(NETWORK_CONFIG, { + contentTopic: testContentTopic + }), contentTopic: testContentTopic, fromProtoObj: sinon.stub().callsFake(() => { return Promise.resolve({ payload: new Uint8Array([1, 2, 3]) }); diff --git a/packages/sdk/src/filter/subscription.ts b/packages/sdk/src/filter/subscription.ts index 00804f5601..67b3f6aa33 100644 --- a/packages/sdk/src/filter/subscription.ts +++ b/packages/sdk/src/filter/subscription.ts @@ -10,11 +10,12 @@ import type { IDecodedMessage, IDecoder, IProtoMessage, - PeerIdStr + PeerIdStr, + PubsubTopic } from "@waku/interfaces"; import { Protocols } from "@waku/interfaces"; import { WakuMessage } from "@waku/proto"; -import { Logger } from "@waku/utils"; +import { Logger, RoutingInfo } from "@waku/utils"; import { PeerManager, PeerManagerEventNames } from "../peer_manager/index.js"; @@ -35,7 +36,8 @@ type AttemptUnsubscribeParams = { type Libp2pEventHandler = (e: CustomEvent) => void; export class Subscription { - private readonly pubsubTopic: string; + private readonly routingInfo: RoutingInfo; + private readonly pubsubTopic: PubsubTopic; private readonly protocol: FilterCore; private readonly peerManager: PeerManager; @@ -73,7 +75,8 @@ export class Subscription { public constructor(params: SubscriptionParams) { this.config = params.config; - this.pubsubTopic = params.pubsubTopic; + this.routingInfo = params.routingInfo; + this.pubsubTopic = params.routingInfo.pubsubTopic; this.protocol = params.protocol; this.peerManager = params.peerManager; @@ -193,7 +196,7 @@ export class Subscription { if (this.callbacks.has(decoder)) { log.warn( - `Replacing callback associated associated with decoder with pubsubTopic:${decoder.pubsubTopic} and contentTopic:${decoder.contentTopic}` + `Replacing callback associated associated with decoder with pubsubTopic:${decoder.routingInfo.pubsubTopic} and contentTopic:${decoder.contentTopic}` ); const callback = this.callbacks.get(decoder); @@ -205,7 +208,7 @@ export class Subscription { void (async (): Promise => { try { const message = await decoder.fromProtoObj( - decoder.pubsubTopic, + decoder.routingInfo.pubsubTopic, event.detail as IProtoMessage ); void callback(message!); @@ -230,7 +233,7 @@ export class Subscription { if (!callback) { log.warn( - `No callback associated with decoder with pubsubTopic:${decoder.pubsubTopic} and contentTopic:${decoder.contentTopic}` + `No callback associated with decoder with pubsubTopic:${decoder.routingInfo.pubsubTopic} and contentTopic:${decoder.contentTopic}` ); } @@ -413,11 +416,13 @@ export class Subscription { const usablePeer = await this.peerManager.isPeerOnPubsub( event.detail, - this.pubsubTopic + this.routingInfo.pubsubTopic ); if (!usablePeer) { - log.info(`Peer ${id} doesn't support pubsubTopic:${this.pubsubTopic}`); + log.info( + `Peer ${id} doesn't support pubsubTopic:${this.routingInfo.pubsubTopic}` + ); return; } @@ -483,7 +488,7 @@ export class Subscription { const prevPeers = new Set(this.peers.keys()); const peersToAdd = await this.peerManager.getPeers({ protocol: Protocols.Filter, - pubsubTopic: this.pubsubTopic + routingInfo: this.routingInfo }); for (const peer of peersToAdd) { diff --git a/packages/sdk/src/filter/types.ts b/packages/sdk/src/filter/types.ts index 44326728d1..f010f45440 100644 --- a/packages/sdk/src/filter/types.ts +++ b/packages/sdk/src/filter/types.ts @@ -1,5 +1,9 @@ import type { FilterCore } from "@waku/core"; -import type { FilterProtocolOptions, Libp2p } from "@waku/interfaces"; +import type { + FilterProtocolOptions, + IRoutingInfo, + Libp2p +} from "@waku/interfaces"; import type { WakuMessage } from "@waku/proto"; import type { PeerManager } from "../peer_manager/index.js"; @@ -15,7 +19,7 @@ export type SubscriptionEvents = { }; export type SubscriptionParams = { - pubsubTopic: string; + routingInfo: IRoutingInfo; protocol: FilterCore; config: FilterProtocolOptions; peerManager: PeerManager; diff --git a/packages/sdk/src/light_push/light_push.spec.ts b/packages/sdk/src/light_push/light_push.spec.ts index 114f0e413c..faabc16459 100644 --- a/packages/sdk/src/light_push/light_push.spec.ts +++ b/packages/sdk/src/light_push/light_push.spec.ts @@ -1,6 +1,7 @@ import { Peer, PeerId } from "@libp2p/interface"; import { createEncoder, Encoder, LightPushCodec } from "@waku/core"; import { Libp2p, ProtocolError } from "@waku/interfaces"; +import { createRoutingInfo } from "@waku/utils"; import { utf8ToBytes } from "@waku/utils/bytes"; import { expect } from "chai"; import sinon, { SinonSpy } from "sinon"; @@ -9,7 +10,14 @@ import { PeerManager } from "../peer_manager/index.js"; import { LightPush } from "./light_push.js"; -const CONTENT_TOPIC = "/test/1/waku-light-push/utf8"; +const testContentTopic = "/test/1/waku-light-push/utf8"; +const testRoutingInfo = createRoutingInfo( + { + clusterId: 0, + numShardsInCluster: 7 + }, + { contentTopic: testContentTopic } +); describe("LightPush SDK", () => { let libp2p: Libp2p; @@ -18,7 +26,10 @@ describe("LightPush SDK", () => { beforeEach(() => { libp2p = mockLibp2p(); - encoder = createEncoder({ contentTopic: CONTENT_TOPIC }); + encoder = createEncoder({ + contentTopic: testContentTopic, + routingInfo: testRoutingInfo + }); lightPush = mockLightPush({ libp2p }); }); diff --git a/packages/sdk/src/light_push/light_push.ts b/packages/sdk/src/light_push/light_push.ts index 5789f351bd..13dc92089e 100644 --- a/packages/sdk/src/light_push/light_push.ts +++ b/packages/sdk/src/light_push/light_push.ts @@ -77,13 +77,13 @@ export class LightPush implements ILightPush { ...options }; - const { pubsubTopic } = encoder; + const { pubsubTopic } = encoder.routingInfo; log.info("send: attempting to send a message to pubsubTopic:", pubsubTopic); const peerIds = await this.peerManager.getPeers({ protocol: Protocols.LightPush, - pubsubTopic: encoder.pubsubTopic + routingInfo: encoder.routingInfo }); const coreResults: CoreProtocolResult[] = @@ -124,7 +124,7 @@ export class LightPush implements ILightPush { this.retryManager.push( sendCallback.bind(this), options.maxAttempts || DEFAULT_MAX_ATTEMPTS, - encoder.pubsubTopic + encoder.routingInfo ); } diff --git a/packages/sdk/src/light_push/retry_manager.spec.ts b/packages/sdk/src/light_push/retry_manager.spec.ts index d5f415503a..425bb5837c 100644 --- a/packages/sdk/src/light_push/retry_manager.spec.ts +++ b/packages/sdk/src/light_push/retry_manager.spec.ts @@ -4,6 +4,7 @@ import { ProtocolError, Protocols } from "@waku/interfaces"; +import { createRoutingInfo } from "@waku/utils"; import { expect } from "chai"; import sinon from "sinon"; @@ -11,6 +12,11 @@ import { PeerManager } from "../peer_manager/index.js"; import { RetryManager, ScheduledTask } from "./retry_manager.js"; +const TestRoutingInfo = createRoutingInfo( + { clusterId: 0 }, + { pubsubTopic: "/waku/2/rs/0/0" } +); + describe("RetryManager", () => { let retryManager: RetryManager; let peerManager: PeerManager; @@ -59,7 +65,7 @@ describe("RetryManager", () => { }) ); - retryManager.push(successCallback, 3, "test-topic"); + retryManager.push(successCallback, 3, TestRoutingInfo); retryManager.start(); await clock.tickAsync(200); @@ -74,7 +80,7 @@ describe("RetryManager", () => { (peerManager as any).getPeers = () => []; const callback = sinon.spy(); - retryManager.push(callback, 2, "test-topic"); + retryManager.push(callback, 2, TestRoutingInfo); retryManager.start(); const queue = (retryManager as any)["queue"] as ScheduledTask[]; @@ -92,7 +98,7 @@ describe("RetryManager", () => { (peerManager as any).getPeers = () => []; const callback = sinon.spy(); - retryManager.push(callback, 1, "test-topic"); + retryManager.push(callback, 1, TestRoutingInfo); retryManager.start(); const queue = (retryManager as any)["queue"] as ScheduledTask[]; expect(queue.length).to.equal(1); @@ -117,7 +123,7 @@ describe("RetryManager", () => { const task = { callback: failingCallback, maxAttempts: 2, - pubsubTopic: "test-topic" + routingInfo: TestRoutingInfo }; await (retryManager as any)["taskExecutor"](task); @@ -136,14 +142,14 @@ describe("RetryManager", () => { await (retryManager as any)["taskExecutor"]({ callback: errorCallback, maxAttempts: 1, - pubsubTopic: "test-topic" + routingInfo: TestRoutingInfo }); expect((peerManager.renewPeer as sinon.SinonSpy).calledOnce).to.be.true; expect( (peerManager.renewPeer as sinon.SinonSpy).calledWith(mockPeerId, { protocol: Protocols.LightPush, - pubsubTopic: "test-topic" + routingInfo: TestRoutingInfo }) ).to.be.true; }); @@ -157,7 +163,7 @@ describe("RetryManager", () => { const task = { callback: slowCallback, maxAttempts: 1, - pubsubTopic: "test-topic" + routingInfo: TestRoutingInfo }; const executionPromise = (retryManager as any)["taskExecutor"](task); @@ -175,7 +181,7 @@ describe("RetryManager", () => { const task = { callback: failingCallback, maxAttempts: 0, - pubsubTopic: "test-topic" + routingInfo: TestRoutingInfo }; await (retryManager as any)["taskExecutor"](task); @@ -190,7 +196,7 @@ describe("RetryManager", () => { if (called === 1) retryManager.stop(); return Promise.resolve({ success: mockPeerId, failure: null }); }); - retryManager.push(successCallback, 2, "test-topic"); + retryManager.push(successCallback, 2, TestRoutingInfo); retryManager.start(); await clock.tickAsync(500); expect(called).to.equal(1); @@ -206,7 +212,7 @@ describe("RetryManager", () => { failure: { error: ProtocolError.GENERIC_FAIL } }); }); - retryManager.push(failCallback, 2, "test-topic"); + retryManager.push(failCallback, 2, TestRoutingInfo); retryManager.start(); await clock.tickAsync(1000); retryManager.stop(); diff --git a/packages/sdk/src/light_push/retry_manager.ts b/packages/sdk/src/light_push/retry_manager.ts index 5e42dfc1ae..9fe63fc92e 100644 --- a/packages/sdk/src/light_push/retry_manager.ts +++ b/packages/sdk/src/light_push/retry_manager.ts @@ -1,6 +1,6 @@ import type { PeerId } from "@libp2p/interface"; import { type CoreProtocolResult, Protocols } from "@waku/interfaces"; -import { Logger } from "@waku/utils"; +import { Logger, RoutingInfo } from "@waku/utils"; import type { PeerManager } from "../peer_manager/index.js"; @@ -15,7 +15,7 @@ type AttemptCallback = (peerId: PeerId) => Promise; export type ScheduledTask = { maxAttempts: number; - pubsubTopic: string; + routingInfo: RoutingInfo; callback: AttemptCallback; }; @@ -54,12 +54,12 @@ export class RetryManager { public push( callback: AttemptCallback, maxAttempts: number, - pubsubTopic: string + routingInfo: RoutingInfo ): void { this.queue.push({ maxAttempts, callback, - pubsubTopic + routingInfo }); } @@ -96,7 +96,7 @@ export class RetryManager { const peerId = ( await this.peerManager.getPeers({ protocol: Protocols.LightPush, - pubsubTopic: task.pubsubTopic + routingInfo: task.routingInfo }) )[0]; @@ -142,7 +142,7 @@ export class RetryManager { if (shouldPeerBeChanged(error.message)) { await this.peerManager.renewPeer(peerId, { protocol: Protocols.LightPush, - pubsubTopic: task.pubsubTopic + routingInfo: task.routingInfo }); } diff --git a/packages/sdk/src/peer_manager/peer_manager.spec.ts b/packages/sdk/src/peer_manager/peer_manager.spec.ts index 81a5ec58d1..cac779ca0e 100644 --- a/packages/sdk/src/peer_manager/peer_manager.spec.ts +++ b/packages/sdk/src/peer_manager/peer_manager.spec.ts @@ -5,6 +5,7 @@ import { Libp2p, Protocols } from "@waku/interfaces"; +import { createRoutingInfo } from "@waku/utils"; import { expect } from "chai"; import sinon from "sinon"; @@ -17,8 +18,12 @@ describe("PeerManager", () => { let peers: any[]; let mockConnections: any[]; - const TEST_PUBSUB_TOPIC = "/test/1/waku-light-push/utf8"; + const TEST_PUBSUB_TOPIC = "/waku/2/rs/0/0"; const TEST_PROTOCOL = Protocols.LightPush; + const TEST_ROUTING_INFO = createRoutingInfo( + { clusterId: 0 }, + { pubsubTopic: TEST_PUBSUB_TOPIC } + ); const clearPeerState = (): void => { (peerManager as any).lockedPeers.clear(); @@ -36,7 +41,7 @@ describe("PeerManager", () => { const getPeersForTest = async (): Promise => { return await peerManager.getPeers({ protocol: TEST_PROTOCOL, - pubsubTopic: TEST_PUBSUB_TOPIC + routingInfo: TEST_ROUTING_INFO }); }; @@ -81,7 +86,7 @@ describe("PeerManager", () => { pubsubTopics: [TEST_PUBSUB_TOPIC], getConnectedPeers: async () => peers, getPeers: async () => peers, - isPeerOnTopic: async (_id: PeerId, _topic: string) => true + isPeerOnShard: async (_id: PeerId, _topic: string) => true } as unknown as IConnectionManager; peerManager = new PeerManager({ libp2p, @@ -126,7 +131,7 @@ describe("PeerManager", () => { const peerId = ids[0]; await peerManager.renewPeer(peerId, { protocol: TEST_PROTOCOL, - pubsubTopic: TEST_PUBSUB_TOPIC + routingInfo: TEST_ROUTING_INFO }); expect((peerManager as any).lockedPeers.has(peerId.toString())).to.be.false; expect((peerManager as any).unlockedPeers.has(peerId.toString())).to.be @@ -224,7 +229,7 @@ describe("PeerManager", () => { if (skipIfNoPeers(first)) return; await peerManager.renewPeer(first[0], { protocol: TEST_PROTOCOL, - pubsubTopic: TEST_PUBSUB_TOPIC + routingInfo: TEST_ROUTING_INFO }); const second = await getPeersForTest(); if (skipIfNoPeers(second)) return; @@ -238,7 +243,7 @@ describe("PeerManager", () => { } as any; await peerManager.renewPeer(fakePeerId, { protocol: TEST_PROTOCOL, - pubsubTopic: TEST_PUBSUB_TOPIC + routingInfo: TEST_ROUTING_INFO }); expect(true).to.be.true; }); @@ -263,7 +268,7 @@ describe("PeerManager", () => { const peerId = result[0]; await peerManager.renewPeer(peerId, { protocol: TEST_PROTOCOL, - pubsubTopic: TEST_PUBSUB_TOPIC + routingInfo: TEST_ROUTING_INFO }); const connection = mockConnections.find((c) => c.remotePeer.equals(peerId)); diff --git a/packages/sdk/src/peer_manager/peer_manager.ts b/packages/sdk/src/peer_manager/peer_manager.ts index a42baf7215..73ab46c72a 100644 --- a/packages/sdk/src/peer_manager/peer_manager.ts +++ b/packages/sdk/src/peer_manager/peer_manager.ts @@ -16,7 +16,7 @@ import { Libp2pEventHandler, Protocols } from "@waku/interfaces"; -import { Logger } from "@waku/utils"; +import { Logger, RoutingInfo } from "@waku/utils"; const log = new Logger("peer-manager"); @@ -34,7 +34,7 @@ type PeerManagerParams = { type GetPeersParams = { protocol: Protocols; - pubsubTopic: string; + routingInfo: RoutingInfo; }; export enum PeerManagerEventNames { @@ -107,7 +107,9 @@ export class PeerManager { public async getPeers(params: GetPeersParams): Promise { log.info( - `Getting peers for protocol: ${params.protocol}, pubsubTopic: ${params.pubsubTopic}` + `Getting peers for protocol: ${params.protocol}, ` + + `clusterId: ${params.routingInfo.networkConfig.clusterId},` + + ` shard: ${params.routingInfo.shardId}` ); const connectedPeers = await this.connectionManager.getConnectedPeers(); @@ -117,13 +119,19 @@ export class PeerManager { for (const peer of connectedPeers) { const hasProtocol = this.hasPeerProtocol(peer, params.protocol); - const hasSamePubsub = await this.connectionManager.isPeerOnTopic( + + const isOnSameShard = await this.connectionManager.isPeerOnShard( peer.id, - params.pubsubTopic + params.routingInfo.networkConfig.clusterId, + params.routingInfo.shardId ); + if (!isOnSameShard) { + continue; + } + const isPeerAvailableForUse = this.isPeerAvailableForUse(peer.id); - if (hasProtocol && hasSamePubsub && isPeerAvailableForUse) { + if (hasProtocol && isPeerAvailableForUse) { results.push(peer); log.info(`Peer ${peer.id} qualifies for protocol ${params.protocol}`); } @@ -168,7 +176,7 @@ export class PeerManager { public async renewPeer(id: PeerId, params: GetPeersParams): Promise { log.info( - `Renewing peer ${id} for protocol: ${params.protocol}, pubsubTopic: ${params.pubsubTopic}` + `Renewing peer ${id} for protocol: ${params.protocol}, routingInfo: ${params.routingInfo}` ); const connectedPeers = await this.connectionManager.getConnectedPeers(); @@ -265,7 +273,7 @@ export class PeerManager { } const wasUnlocked = new Date(value).getTime(); - return Date.now() - wasUnlocked >= 10_000 ? true : false; + return Date.now() - wasUnlocked >= 10_000; } private dispatchFilterPeerConnect(id: PeerId): void { diff --git a/packages/sdk/src/store/store.spec.ts b/packages/sdk/src/store/store.spec.ts index 025f2df425..983c1ddba1 100644 --- a/packages/sdk/src/store/store.spec.ts +++ b/packages/sdk/src/store/store.spec.ts @@ -1,6 +1,12 @@ import { StoreCore } from "@waku/core"; -import type { IDecodedMessage, IDecoder, Libp2p } from "@waku/interfaces"; +import { + IDecodedMessage, + IDecoder, + IRoutingInfo, + Libp2p +} from "@waku/interfaces"; import { Protocols } from "@waku/interfaces"; +import { createRoutingInfo } from "@waku/utils"; import { expect } from "chai"; import sinon from "sinon"; @@ -8,6 +14,15 @@ import { PeerManager } from "../peer_manager/index.js"; import { Store } from "./store.js"; +const TestNetworkingInfo = { clusterId: 0, numShardsInCluster: 8 }; +const MockRoutingInfo: IRoutingInfo = { + pubsubTopic: "/custom/topic", + shardId: 1, + networkConfig: TestNetworkingInfo, + isAutoSharding: false, + isStaticSharding: false +}; + describe("Store", () => { let store: Store; let mockLibp2p: Libp2p; @@ -61,9 +76,11 @@ describe("Store", () => { }); describe("queryGenerator", () => { + const contentTopic = "/test/1/test/proto"; + const routingInfo = createRoutingInfo(TestNetworkingInfo, { contentTopic }); const mockDecoder: IDecoder = { - pubsubTopic: "/waku/2/default-waku/proto", - contentTopic: "/test/1/test/proto", + routingInfo, + contentTopic, fromWireToProtoObj: sinon.stub(), fromProtoObj: sinon.stub() }; @@ -71,7 +88,7 @@ describe("Store", () => { const mockMessage: IDecodedMessage = { version: 1, pubsubTopic: "/waku/2/default-waku/proto", - contentTopic: "/test/1/test/proto", + contentTopic, payload: new Uint8Array([1, 2, 3]), timestamp: new Date(), rateLimitProof: undefined, @@ -98,7 +115,7 @@ describe("Store", () => { expect( mockPeerManager.getPeers.calledWith({ protocol: Protocols.Store, - pubsubTopic: "/waku/2/default-waku/proto" + routingInfo }) ).to.be.true; @@ -250,9 +267,11 @@ describe("Store", () => { mockPeerManager.getPeers.resolves([mockPeerId]); mockStoreCore.queryPerPage.returns(mockResponseGenerator); + const routingInfo: IRoutingInfo = structuredClone(MockRoutingInfo); + routingInfo.pubsubTopic = "/custom/topic"; const generator = store.queryGenerator([mockDecoder], { messageHashes: [new Uint8Array([1, 2, 3]), new Uint8Array([4, 5, 6])], - pubsubTopic: "/custom/topic" + routingInfo }); const results = []; diff --git a/packages/sdk/src/store/store.ts b/packages/sdk/src/store/store.ts index 1297060cf2..1e85274f6e 100644 --- a/packages/sdk/src/store/store.ts +++ b/packages/sdk/src/store/store.ts @@ -12,7 +12,7 @@ import { StoreCursor, StoreProtocolOptions } from "@waku/interfaces"; -import { isDefined, Logger } from "@waku/utils"; +import { isDefined, Logger, RoutingInfo } from "@waku/utils"; import { PeerManager } from "../peer_manager/index.js"; @@ -65,7 +65,7 @@ export class Store implements IStore { ); for (const queryOption of queryOptions) { - const peer = await this.getPeerToUse(queryOption.pubsubTopic); + const peer = await this.getPeerToUse(queryOption.routingInfo); if (!peer) { log.error("No peers available to query"); @@ -181,7 +181,7 @@ export class Store implements IStore { private validateDecodersAndPubsubTopic( decoders: IDecoder[] ): { - pubsubTopic: string; + routingInfo: RoutingInfo; contentTopics: string[]; decodersAsMap: Map>; } { @@ -191,7 +191,7 @@ export class Store implements IStore { } const uniquePubsubTopicsInQuery = Array.from( - new Set(decoders.map((decoder) => decoder.pubsubTopic)) + new Set(decoders.map((decoder) => decoder.routingInfo.pubsubTopic)) ); if (uniquePubsubTopicsInQuery.length > 1) { log.error("API does not support querying multiple pubsub topics at once"); @@ -214,7 +214,9 @@ export class Store implements IStore { }); const contentTopics = decoders - .filter((decoder) => decoder.pubsubTopic === pubsubTopicForQuery) + .filter( + (decoder) => decoder.routingInfo.pubsubTopic === pubsubTopicForQuery + ) .map((dec) => dec.contentTopic); if (contentTopics.length === 0) { @@ -223,16 +225,18 @@ export class Store implements IStore { } return { - pubsubTopic: pubsubTopicForQuery, + routingInfo: decoders[0].routingInfo, contentTopics, decodersAsMap }; } - private async getPeerToUse(pubsubTopic: string): Promise { + private async getPeerToUse( + routingInfo: RoutingInfo + ): Promise { const peers = await this.peerManager.getPeers({ protocol: Protocols.Store, - pubsubTopic + routingInfo }); return this.options.peers @@ -297,15 +301,16 @@ export class Store implements IStore { const isHashQuery = options?.messageHashes && options.messageHashes.length > 0; - let pubsubTopic: string; + let routingInfo: RoutingInfo; let contentTopics: string[]; let decodersAsMap: Map>; if (isHashQuery) { // For hash queries, we still need decoders to decode messages - // but we don't validate pubsubTopic consistency - // Use pubsubTopic from options if provided, otherwise from first decoder - pubsubTopic = options.pubsubTopic || decoders[0]?.pubsubTopic || ""; + // but we don't validate routing info consistency + // Use routing info from options if provided, otherwise from first decoder + // Otherwise, throw + routingInfo = options?.routingInfo || decoders[0]?.routingInfo; contentTopics = []; decodersAsMap = new Map(); decoders.forEach((dec) => { @@ -313,7 +318,7 @@ export class Store implements IStore { }); } else { const validated = this.validateDecodersAndPubsubTopic(decoders); - pubsubTopic = validated.pubsubTopic; + routingInfo = validated.routingInfo; contentTopics = validated.contentTopics; decodersAsMap = validated.decodersAsMap; } @@ -340,7 +345,7 @@ export class Store implements IStore { decodersAsMap, queryOptions: [ { - pubsubTopic, + routingInfo, contentTopics, includeData: true, paginationForward: true, @@ -355,7 +360,7 @@ export class Store implements IStore { return { decodersAsMap, queryOptions: subTimeRanges.map(([start, end]) => ({ - pubsubTopic, + routingInfo, contentTopics, includeData: true, paginationForward: true, diff --git a/packages/sdk/src/waku/utils.spec.ts b/packages/sdk/src/waku/utils.spec.ts deleted file mode 100644 index 57ed1e495c..0000000000 --- a/packages/sdk/src/waku/utils.spec.ts +++ /dev/null @@ -1,119 +0,0 @@ -import { DEFAULT_NUM_SHARDS, DefaultNetworkConfig } from "@waku/interfaces"; -import { contentTopicToShardIndex } from "@waku/utils"; -import { expect } from "chai"; - -import { decoderParamsToShardInfo, isShardCompatible } from "./utils.js"; - -const TestContentTopic = "/test/1/waku-sdk/utf8"; - -describe("IWaku utils", () => { - describe("decoderParamsToShardInfo", () => { - it("should use provided shard info when available", () => { - const params = { - contentTopic: TestContentTopic, - shardInfo: { - clusterId: 10, - shard: 5 - } - }; - - const result = decoderParamsToShardInfo(params, DefaultNetworkConfig); - - expect(result.clusterId).to.equal(10); - expect(result.shard).to.equal(5); - }); - - it("should use network config clusterId when shard info clusterId is not provided", () => { - const params = { - contentTopic: TestContentTopic, - shardInfo: { - clusterId: 1, - shard: 5 - } - }; - - const result = decoderParamsToShardInfo(params, DefaultNetworkConfig); - - expect(result.clusterId).to.equal(1); - expect(result.shard).to.equal(5); - }); - - it("should use shardsUnderCluster when provided", () => { - const contentTopic = TestContentTopic; - const params = { - contentTopic, - shardInfo: { - clusterId: 10, - shardsUnderCluster: 64 - } - }; - - const result = decoderParamsToShardInfo(params, DefaultNetworkConfig); - const expectedShardIndex = contentTopicToShardIndex(contentTopic, 64); - - expect(result.clusterId).to.equal(10); - expect(result.shard).to.equal(expectedShardIndex); - }); - - it("should calculate shard index from content topic when shard is not provided", () => { - const contentTopic = TestContentTopic; - const params = { - contentTopic - }; - - const result = decoderParamsToShardInfo(params, DefaultNetworkConfig); - const expectedShardIndex = contentTopicToShardIndex( - contentTopic, - DEFAULT_NUM_SHARDS - ); - - expect(result.clusterId).to.equal(1); - expect(result.shard).to.equal(expectedShardIndex); - }); - }); - - describe("isShardCompatible", () => { - it("should return false when clusterId doesn't match", () => { - const shardInfo = { - clusterId: 10, - shard: 5 - }; - - const result = isShardCompatible(shardInfo, DefaultNetworkConfig); - - expect(result).to.be.false; - }); - - it("should return false when shard is not included in network shards", () => { - const shardInfo = { - clusterId: 1, - shard: 5 - }; - - const networkConfig = { - clusterId: 1, - shards: [1, 2, 3, 4] - }; - - const result = isShardCompatible(shardInfo, networkConfig); - - expect(result).to.be.false; - }); - - it("should return true when clusterId matches and shard is included in network shards", () => { - const shardInfo = { - clusterId: 1, - shard: 3 - }; - - const networkConfig = { - clusterId: 1, - shards: [1, 2, 3, 4] - }; - - const result = isShardCompatible(shardInfo, networkConfig); - - expect(result).to.be.true; - }); - }); -}); diff --git a/packages/sdk/src/waku/utils.ts b/packages/sdk/src/waku/utils.ts deleted file mode 100644 index 76c99a6eeb..0000000000 --- a/packages/sdk/src/waku/utils.ts +++ /dev/null @@ -1,47 +0,0 @@ -import type { - CreateDecoderParams, - NetworkConfig, - SingleShardInfo -} from "@waku/interfaces"; -import { DEFAULT_NUM_SHARDS } from "@waku/interfaces"; -import { contentTopicToShardIndex } from "@waku/utils"; - -export const decoderParamsToShardInfo = ( - params: CreateDecoderParams, - networkConfig: NetworkConfig -): SingleShardInfo => { - const clusterId = (params.shardInfo?.clusterId || - networkConfig.clusterId) as number; - const shardsUnderCluster = - params.shardInfo && "shardsUnderCluster" in params.shardInfo - ? params.shardInfo.shardsUnderCluster - : DEFAULT_NUM_SHARDS; - - const shardIndex = - params.shardInfo && "shard" in params.shardInfo - ? params.shardInfo.shard - : contentTopicToShardIndex(params.contentTopic, shardsUnderCluster); - - return { - clusterId, - shard: shardIndex - }; -}; - -export const isShardCompatible = ( - shardInfo: SingleShardInfo, - networkConfig: NetworkConfig -): boolean => { - if (networkConfig.clusterId !== shardInfo.clusterId) { - return false; - } - - if ( - "shards" in networkConfig && - !networkConfig.shards.includes(shardInfo.shard!) - ) { - return false; - } - - return true; -}; diff --git a/packages/sdk/src/waku/waku.ts b/packages/sdk/src/waku/waku.ts index df6e845e3a..5d44a2c609 100644 --- a/packages/sdk/src/waku/waku.ts +++ b/packages/sdk/src/waku/waku.ts @@ -27,7 +27,7 @@ import { HealthStatus, Protocols } from "@waku/interfaces"; -import { Logger } from "@waku/utils"; +import { createRoutingInfo, Logger, RoutingInfo } from "@waku/utils"; import { Filter } from "../filter/index.js"; import { HealthIndicator } from "../health_indicator/index.js"; @@ -35,7 +35,6 @@ import { LightPush } from "../light_push/index.js"; import { PeerManager } from "../peer_manager/index.js"; import { Store } from "../store/index.js"; -import { decoderParamsToShardInfo, isShardCompatible } from "./utils.js"; import { waitForRemotePeer } from "./wait_for_remote_peer.js"; const log = new Logger("waku"); @@ -260,40 +259,33 @@ export class WakuNode implements IWaku { } public createDecoder(params: CreateDecoderParams): IDecoder { - const singleShardInfo = decoderParamsToShardInfo( - params, - this.networkConfig + const routingInfo = getRoutingInfo( + this.networkConfig, + params.contentTopic, + params.shardId ); - - log.info( - `Creating Decoder with input:${JSON.stringify(params.shardInfo)}, determined:${JSON.stringify(singleShardInfo)}, expected:${JSON.stringify(this.networkConfig)}.` - ); - - if (!isShardCompatible(singleShardInfo, this.networkConfig)) { - throw Error(`Cannot create decoder: incompatible shard configuration.`); - } - - return createDecoder(params.contentTopic, singleShardInfo); + return createDecoder(params.contentTopic, routingInfo); } public createEncoder(params: CreateEncoderParams): IEncoder { - const singleShardInfo = decoderParamsToShardInfo( - params, - this.networkConfig + const routingInfo = getRoutingInfo( + this.networkConfig, + params.contentTopic, + params.shardId ); - log.info( - `Creating Encoder with input:${JSON.stringify(params.shardInfo)}, determined:${JSON.stringify(singleShardInfo)}, expected:${JSON.stringify(this.networkConfig)}.` - ); - - if (!isShardCompatible(singleShardInfo, this.networkConfig)) { - throw Error(`Cannot create encoder: incompatible shard configuration.`); - } - return createEncoder({ contentTopic: params.contentTopic, ephemeral: params.ephemeral, - pubsubTopicShardInfo: singleShardInfo + routingInfo: routingInfo }); } } + +function getRoutingInfo( + networkConfig: NetworkConfig, + contentTopic?: string, + shardId?: number +): RoutingInfo { + return createRoutingInfo(networkConfig, { contentTopic, shardId }); +} diff --git a/packages/tests/src/constants.ts b/packages/tests/src/constants.ts index 025026df39..6847f41dac 100644 --- a/packages/tests/src/constants.ts +++ b/packages/tests/src/constants.ts @@ -5,7 +5,8 @@ * @module */ -import { PubsubTopic, ShardInfo, SingleShardInfo } from "@waku/interfaces"; +import { AutoSharding, RelayShards } from "@waku/interfaces"; +import { createRoutingInfo } from "@waku/utils"; export const NOISE_KEY_1 = new Uint8Array( ((): number[] => { @@ -46,11 +47,27 @@ export const TEST_STRING = [ { description: "Arabic", value: "مرحبا" }, { description: "Russian", value: "Привет" }, { description: "SQL Injection", value: "'; DROP TABLE users; --" }, - { description: "Script", value: '' }, - { description: "XML", value: "Some content" }, - { description: "Basic HTML tag", value: "

Heading

" }, + { + description: "Script", + value: '', + invalidContentTopic: true + }, + { + description: "XML", + value: "Some content", + invalidContentTopic: true + }, + { + description: "Basic HTML tag", + value: "

Heading

", + invalidContentTopic: true + }, { description: "JSON", value: '{"user":"admin","password":"123456"}' }, - { description: "shell command", value: "`rm -rf /`" }, + { + description: "shell command", + value: "`rm -rf /`", + invalidContentTopic: true + }, { description: "escaped characters", value: "\\n\\t\\0" }, { description: "unicode special characters", value: "\u202Ereverse" }, { description: "emoji", value: "🤫 🤥 😶 😶‍🌫️ 😐 😑 😬 🫨 🫠 🙄 😯 😦 😧 😮" } @@ -68,12 +85,18 @@ export const MOCHA_HOOK_MAX_TIMEOUT = 50_000; export const SEPOLIA_RPC_URL = process.env.SEPOLIA_RPC_URL || "https://sepolia.gateway.tenderly.co"; -export const DefaultTestPubsubTopic: PubsubTopic = "/waku/2/rs/0/0"; -export const DefaultTestShardInfo: ShardInfo = { - clusterId: 0, +export const DefaultTestClusterId = 0; +export const DefaultTestNumShardsInCluster = 10; +export const DefaultTestNetworkConfig: AutoSharding = { + clusterId: DefaultTestClusterId, + numShardsInCluster: DefaultTestNumShardsInCluster +}; +export const DefaultTestRelayShards: RelayShards = { + clusterId: DefaultTestClusterId, shards: [0] }; -export const DefaultTestSingleShardInfo: SingleShardInfo = { - clusterId: 0, - shard: 0 -}; +export const DefaultTestContentTopic = "/test/1/content-topic/proto"; +export const DefaultTestRoutingInfo = createRoutingInfo( + DefaultTestNetworkConfig, + { contentTopic: DefaultTestContentTopic } +); diff --git a/packages/tests/src/lib/index.ts b/packages/tests/src/lib/index.ts index 22323ee2af..02b0b77fac 100644 --- a/packages/tests/src/lib/index.ts +++ b/packages/tests/src/lib/index.ts @@ -1,13 +1,7 @@ -import { - AutoSharding, - IDecodedMessage, - NetworkConfig, - StaticSharding -} from "@waku/interfaces"; -import { contentTopicToShardIndex, Logger } from "@waku/utils"; +import { ContentTopic, IDecodedMessage } from "@waku/interfaces"; +import { isAutoShardingRoutingInfo, Logger, RoutingInfo } from "@waku/utils"; import { expect } from "chai"; -import { DefaultTestPubsubTopic } from "../constants.js"; import { Args, MessageRpcQuery, MessageRpcResponse } from "../types.js"; import { delay, makeLogFileName } from "../utils/index.js"; @@ -29,7 +23,7 @@ export class ServiceNodesFleet { mochaContext: Mocha.Context, nodesToCreate: number = 3, strictChecking: boolean = false, - networkConfig: NetworkConfig, + routingInfo: RoutingInfo, _args?: Args, withoutFilter = false ): Promise { @@ -40,7 +34,7 @@ export class ServiceNodesFleet { makeLogFileName(mochaContext) + Math.random().toString(36).substring(7) ); - const args = getArgs(networkConfig, _args); + const args = applyDefaultArgs(routingInfo, _args); if (nodes[0]) { const addr = await nodes[0].getExternalMultiaddr(); @@ -93,15 +87,19 @@ export class ServiceNodesFleet { public async sendRelayMessage( message: MessageRpcQuery, - pubsubTopic: string = DefaultTestPubsubTopic + routingInfo: RoutingInfo ): Promise { const relayMessagePromises: Promise[] = this.nodes.map((node) => - node.sendMessage(message, pubsubTopic) + node.sendMessage(message, routingInfo) ); const relayMessages = await Promise.all(relayMessagePromises); return relayMessages.every((message) => message); } + /** + * This is a dodgy things to do as it assumes the nwaku node did not flush + * any messages from its cache. + */ public async confirmMessageLength(numMessages: number): Promise { if (this.strictChecking) { await Promise.all( @@ -203,13 +201,12 @@ class MultipleNodesMessageCollector { public async waitForMessages( numMessages: number, options?: { - pubsubTopic?: string; timeoutDuration?: number; exact?: boolean; + contentTopic?: ContentTopic; } ): Promise { const startTime = Date.now(); - const pubsubTopic = options?.pubsubTopic || DefaultTestPubsubTopic; const timeoutDuration = options?.timeoutDuration || 400; const exact = options?.exact || false; @@ -218,7 +215,7 @@ class MultipleNodesMessageCollector { if (this.strictChecking) { const results = await Promise.all( this.relayNodes.map(async (node) => { - const msgs = await node.messages(pubsubTopic); + const msgs = await node.messages(options?.contentTopic); return msgs.length >= numMessages; }) ); @@ -226,7 +223,7 @@ class MultipleNodesMessageCollector { } else { const results = await Promise.all( this.relayNodes.map(async (node) => { - const msgs = await node.messages(pubsubTopic); + const msgs = await node.messages(options?.contentTopic); return msgs.length >= numMessages; }) ); @@ -257,23 +254,25 @@ class MultipleNodesMessageCollector { } } -function getArgs(networkConfig: NetworkConfig, args?: Args): Args { - const defaultArgs = { +function applyDefaultArgs(routingInfo: RoutingInfo, args?: Args): Args { + const defaultArgs: Args = { lightpush: true, filter: true, discv5Discovery: true, peerExchange: true, - relay: true, - clusterId: networkConfig.clusterId - } as Args; + relay: true + }; - if ((networkConfig as StaticSharding).shards) { - defaultArgs.shard = (networkConfig as StaticSharding).shards; - } else if ((networkConfig as AutoSharding).contentTopics) { - defaultArgs.contentTopic = (networkConfig as AutoSharding).contentTopics; - defaultArgs.shard = (networkConfig as AutoSharding).contentTopics.map( - (topic) => contentTopicToShardIndex(topic) - ); + defaultArgs.clusterId = routingInfo.networkConfig.clusterId; + + if (isAutoShardingRoutingInfo(routingInfo)) { + defaultArgs.numShardsInNetwork = + routingInfo.networkConfig.numShardsInCluster; + + defaultArgs.contentTopic = [routingInfo.contentTopic]; + } else { + defaultArgs.numShardsInNetwork = 0; + defaultArgs.shard = [routingInfo.shardId]; } return { ...defaultArgs, ...args }; diff --git a/packages/tests/src/lib/message_collector.ts b/packages/tests/src/lib/message_collector.ts index 3f8a29ee48..456d1f881f 100644 --- a/packages/tests/src/lib/message_collector.ts +++ b/packages/tests/src/lib/message_collector.ts @@ -4,7 +4,6 @@ import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes"; import { AssertionError, expect } from "chai"; import { equals } from "uint8arrays/equals"; -import { DefaultTestPubsubTopic } from "../constants.js"; import { MessageRpcResponse } from "../types.js"; import { base64ToUtf8 } from "../utils/base64_utf8.js"; import { delay } from "../utils/delay.js"; @@ -67,20 +66,19 @@ export class MessageCollector { public async waitForMessages( numMessages: number, options?: { - pubsubTopic?: string; + // pubsubTopic?: string; timeoutDuration?: number; exact?: boolean; } ): Promise { const startTime = Date.now(); - const pubsubTopic = this.getPubsubTopicToUse(options?.pubsubTopic); const timeoutDuration = options?.timeoutDuration || 400; const exact = options?.exact || false; while (this.count < numMessages) { if (this.nwaku) { try { - this.list = await this.nwaku.messages(pubsubTopic); + this.list = await this.nwaku.messages(); } catch (error) { log.error(`Can't retrieve messages because of ${error}`); await delay(10); @@ -237,15 +235,13 @@ export class MessageCollector { `Message text mismatch. Expected: ${options.expectedMessageText}. Got: ${receivedMessageText}` ); } else { - const pubsubTopicToUse = this.getPubsubTopicToUse( - options.expectedPubsubTopic - ); - // js-waku message specific assertions - expect(message.pubsubTopic).to.eq( - pubsubTopicToUse, - `Message pub/sub topic mismatch. Expected: ${pubsubTopicToUse}. Got: ${message.pubsubTopic}` - ); - + if (options.expectedPubsubTopic) { + // js-waku message specific assertions + expect(message.pubsubTopic).to.eq( + options.expectedPubsubTopic, + `Message pub/sub topic mismatch. Expected: ${options.expectedPubsubTopic}. Got: ${message.pubsubTopic}` + ); + } expect(bytesToUtf8(message.payload)).to.eq( options.expectedMessageText, `Message text mismatch. Expected: ${ @@ -267,8 +263,4 @@ export class MessageCollector { ); } } - - private getPubsubTopicToUse(pubsubTopic: string | undefined): string { - return pubsubTopic || DefaultTestPubsubTopic; - } } diff --git a/packages/tests/src/lib/runNodes.ts b/packages/tests/src/lib/runNodes.ts index 09f13c9dbd..19ba198bd3 100644 --- a/packages/tests/src/lib/runNodes.ts +++ b/packages/tests/src/lib/runNodes.ts @@ -1,14 +1,23 @@ -import { CreateNodeOptions, NetworkConfig, Protocols } from "@waku/interfaces"; -import { createRelayNode } from "@waku/relay"; +import { + ContentTopic, + type CreateNodeOptions, + type NetworkConfig, + Protocols, + type ShardId +} from "@waku/interfaces"; +import { createRelayNode, RelayCreateOptions } from "@waku/relay"; import { createLightNode, WakuNode } from "@waku/sdk"; import { - derivePubsubTopicsFromNetworkConfig, + createRoutingInfo, + isAutoSharding, + isStaticSharding, Logger, - pubsubTopicsToShardInfo + RoutingInfo } from "@waku/utils"; import { Context } from "mocha"; import { NOISE_KEY_1 } from "../constants.js"; +import { Args } from "../types.js"; import { makeLogFileName } from "../utils/index.js"; import { ServiceNode } from "./service_node.js"; @@ -24,6 +33,8 @@ export const DEFAULT_DISCOVERIES_ENABLED = { type RunNodesOptions = { context: Context; networkConfig: NetworkConfig; + relayShards?: ShardId[]; // Only for static sharding + contentTopics?: ContentTopic[]; // Only for auto sharding protocols: Protocols[]; createNode: typeof createLightNode | typeof createRelayNode; }; @@ -34,32 +45,61 @@ export async function runNodes( const { context, networkConfig, createNode, protocols } = options; const nwaku = new ServiceNode(makeLogFileName(context)); - const pubsubTopics = derivePubsubTopicsFromNetworkConfig(networkConfig); - const shardInfo = pubsubTopicsToShardInfo(pubsubTopics); - await nwaku.start( - { - filter: true, - lightpush: true, - relay: true, - store: true, - shard: shardInfo.shards, - clusterId: shardInfo.clusterId - }, - { retries: 3 } - ); - const waku_options: CreateNodeOptions = { + const nwakuArgs: Args = { + filter: true, + lightpush: true, + relay: true, + store: true, + clusterId: networkConfig.clusterId + }; + + const jswakuArgs: CreateNodeOptions = { staticNoiseKey: NOISE_KEY_1, libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }, - networkConfig: shardInfo, + networkConfig, lightPush: { numPeersToUse: 2 }, discovery: DEFAULT_DISCOVERIES_ENABLED }; - log.info("Starting js waku node with :", JSON.stringify(waku_options)); + const routingInfos: RoutingInfo[] = []; + if (isAutoSharding(networkConfig)) { + nwakuArgs.numShardsInNetwork = networkConfig.numShardsInCluster; + nwakuArgs.contentTopic = options.contentTopics ?? []; + + nwakuArgs.contentTopic.map((ct) => + routingInfos.push(createRoutingInfo(networkConfig, { contentTopic: ct })) + ); + + if (options.relayShards && options.relayShards.length > 0) + throw "`relayShards` cannot be set for auto-sharding"; + } else if (isStaticSharding(networkConfig) && options.relayShards) { + const shards = options.relayShards; + nwakuArgs.shard = shards; + + shards.map((shardId) => + routingInfos.push(createRoutingInfo(networkConfig, { shardId })) + ); + + if (options.contentTopics && options.contentTopics.length > 0) + throw "`contentTopics` cannot be set for static sharding"; + } else { + throw "Invalid Network Config"; + } + + const jswakuRelayCreateOptions: RelayCreateOptions = { + routingInfos + }; + + await nwaku.start(nwakuArgs, { retries: 3 }); + + log.info("Starting js waku node with :", JSON.stringify(jswakuArgs)); let waku: WakuNode | undefined; try { - waku = (await createNode(waku_options)) as unknown as WakuNode; + waku = (await createNode({ + ...jswakuArgs, + ...jswakuRelayCreateOptions + })) as unknown as WakuNode; await waku.start(); } catch (error) { log.error("jswaku node failed to start:", error); @@ -68,7 +108,18 @@ export async function runNodes( if (waku) { await waku.dial(await nwaku.getMultiaddrWithId()); await waku.waitForPeers(protocols); - await nwaku.ensureSubscriptions(pubsubTopics); + + // TODO + + // const clusterId = networkConfig.clusterId; + + // await nwaku.ensureSubscriptions( + // relayShardsToPubsubTopics({ + // clusterId, + // shards: options.relayShards ?? [] + // }) + // ); + return [nwaku, waku as T]; } else { throw new Error("Failed to initialize waku"); diff --git a/packages/tests/src/lib/service_node.ts b/packages/tests/src/lib/service_node.ts index 6f7262a006..7048c763bc 100644 --- a/packages/tests/src/lib/service_node.ts +++ b/packages/tests/src/lib/service_node.ts @@ -1,12 +1,19 @@ import type { PeerId } from "@libp2p/interface"; import { peerIdFromString } from "@libp2p/peer-id"; import { Multiaddr, multiaddr } from "@multiformats/multiaddr"; -import { isDefined, shardInfoToPubsubTopics } from "@waku/utils"; +import { ContentTopic, PubsubTopic } from "@waku/interfaces"; +import { + formatPubsubTopic, + isAutoSharding, + isDefined, + isStaticSharding, + RoutingInfo +} from "@waku/utils"; import { Logger } from "@waku/utils"; import pRetry from "p-retry"; import portfinder from "portfinder"; -import { DefaultTestPubsubTopic } from "../constants.js"; +import { DefaultTestNetworkConfig } from "../constants.js"; import { Args, LogLevel, @@ -245,9 +252,7 @@ export class ServiceNode { ); } - public async ensureSubscriptions( - pubsubTopics: string[] = [DefaultTestPubsubTopic] - ): Promise { + public async ensureSubscriptions(pubsubTopics: string[]): Promise { return this.restCall( "/relay/v1/subscriptions", "POST", @@ -256,13 +261,51 @@ export class ServiceNode { ); } - public async messages(_pubsubTopic?: string): Promise { - const pubsubTopic = - _pubsubTopic ?? - shardInfoToPubsubTopics({ - clusterId: this.args?.clusterId, - shards: this.args?.shard - })[0]; + public async messages( + contentTopic?: ContentTopic + ): Promise { + if (contentTopic) { + return this.contentTopicMessages(contentTopic); + } + + if (this.args?.contentTopic) { + if (this.args?.contentTopic.length > 1) + throw "More that one content topic passed, not supported"; + const contentTopic = this.args?.contentTopic[0]; + + return this.contentTopicMessages(contentTopic); + } + + if (this.args?.shard) { + if (this.args?.shard.length > 1) + throw "More that one shard passed, not supported"; + const pubsubTopic = formatPubsubTopic( + this.args.clusterId ?? DefaultTestNetworkConfig.clusterId, + this.args?.shard[0] + ); + return this.pubsubTopicMessages(pubsubTopic); + } + + throw "Content topic, shard or pubsubTopic must be set"; + } + + private async contentTopicMessages( + contentTopic: ContentTopic + ): Promise { + return this.restCall( + `/relay/v1/auto/messages/${encodeURIComponent(contentTopic)}`, + "GET", + null, + async (response) => { + const data = await response.json(); + return data?.length ? data : []; + } + ); + } + + private async pubsubTopicMessages( + pubsubTopic: PubsubTopic + ): Promise { return this.restCall( `/relay/v1/messages/${encodeURIComponent(pubsubTopic)}`, "GET", @@ -289,7 +332,20 @@ export class ServiceNode { public async sendMessage( message: MessageRpcQuery, - _pubsubTopic?: string + routingInfo: RoutingInfo + ): Promise { + if (isAutoSharding(routingInfo.networkConfig)) { + return this.sendMessageAutoSharding(message); + } + if (isStaticSharding(routingInfo.networkConfig)) { + return this.sendMessageStaticSharding(message, routingInfo.pubsubTopic); + } + throw "Invalid network config"; + } + + private async sendMessageStaticSharding( + message: MessageRpcQuery, + pubsubTopic: PubsubTopic ): Promise { this.checkProcess(); @@ -297,21 +353,15 @@ export class ServiceNode { message.timestamp = BigInt(new Date().valueOf()) * OneMillion; } - const pubsubTopic = - _pubsubTopic ?? - shardInfoToPubsubTopics({ - clusterId: this.args?.clusterId, - shards: this.args?.shard - })[0]; return this.restCall( - `/relay/v1/messages/${encodeURIComponent(pubsubTopic || DefaultTestPubsubTopic)}`, + `/relay/v1/messages/${encodeURIComponent(pubsubTopic)}`, "POST", message, async (response) => response.status === 200 ); } - public async sendMessageAutosharding( + private async sendMessageAutoSharding( message: MessageRpcQuery ): Promise { this.checkProcess(); @@ -398,7 +448,11 @@ export class ServiceNode { if (body) options.body = JSON.stringify(body); const response = await fetch(`${this.httpUrl}${endpoint}`, options); - log.info(`Received REST Response: `, response.status); + log.info( + `Received REST Response: `, + response.status, + response.statusText + ); return await processResponse(response); } catch (error) { log.error(`${this.httpUrl} failed with error:`, error); @@ -429,9 +483,7 @@ export function defaultArgs(): Args { rest: true, restAdmin: true, websocketSupport: true, - logLevel: LogLevel.Trace, - clusterId: 0, - shard: [0] + logLevel: LogLevel.Trace }; } diff --git a/packages/tests/src/types.ts b/packages/tests/src/types.ts index 872cadbe5a..d7b1903f7a 100644 --- a/packages/tests/src/types.ts +++ b/packages/tests/src/types.ts @@ -1,3 +1,5 @@ +import type { ClusterId, ShardId } from "@waku/interfaces"; + export interface Args { staticnode?: string; nat?: "none"; @@ -21,8 +23,9 @@ export interface Args { websocketPort?: number; discv5BootstrapNode?: string; discv5UdpPort?: number; - clusterId?: number; - shard?: Array; + clusterId?: ClusterId; + shard?: Array; + numShardsInNetwork?: number; rlnRelayEthClientAddress?: string; } diff --git a/packages/tests/src/utils/generate_test_data.ts b/packages/tests/src/utils/generate_test_data.ts index cd1b6ed61b..420f06b56e 100644 --- a/packages/tests/src/utils/generate_test_data.ts +++ b/packages/tests/src/utils/generate_test_data.ts @@ -1,13 +1,11 @@ import { createDecoder, createEncoder, Decoder, Encoder } from "@waku/core"; - -type TestDataOptions = { - pubsubTopic: string; -}; +import { AutoSharding } from "@waku/interfaces"; +import { createRoutingInfo } from "@waku/utils"; // Utility to generate test data for multiple topics tests. export function generateTestData( topicCount: number, - options?: TestDataOptions + networkConfig: AutoSharding ): { contentTopics: string[]; encoders: Encoder[]; @@ -15,14 +13,22 @@ export function generateTestData( } { const contentTopics = Array.from( { length: topicCount }, - (_, i) => `/test/${i + 1}/waku-multi/default` + // Remember that auto-sharding uses both app name and app version fields + (_, i) => `/test/0/waku-multi-${i + 1}/default` ); const encoders = contentTopics.map((topic) => - createEncoder({ contentTopic: topic, pubsubTopic: options?.pubsubTopic }) + createEncoder({ + contentTopic: topic, + routingInfo: createRoutingInfo(networkConfig, { contentTopic: topic }) + }) ); const decoders = contentTopics.map((topic) => - createDecoder(topic, options?.pubsubTopic) + createDecoder( + topic, + createRoutingInfo(networkConfig, { contentTopic: topic }) + ) ); + return { contentTopics, encoders, diff --git a/packages/tests/src/utils/nodes.ts b/packages/tests/src/utils/nodes.ts index ef312f5868..3490193d84 100644 --- a/packages/tests/src/utils/nodes.ts +++ b/packages/tests/src/utils/nodes.ts @@ -1,13 +1,11 @@ import { CreateNodeOptions, - DefaultNetworkConfig, IWaku, LightNode, - NetworkConfig, Protocols } from "@waku/interfaces"; import { createLightNode } from "@waku/sdk"; -import { derivePubsubTopicsFromNetworkConfig } from "@waku/utils"; +import { RoutingInfo } from "@waku/utils"; import { Context } from "mocha"; import pRetry from "p-retry"; @@ -18,9 +16,20 @@ import { Args } from "../types.js"; import { waitForConnections } from "./waitForConnections.js"; +/** + * Runs both js-waku and nwaku nodes. + * + * @param context + * @param routingInfo + * @param customArgs passed to nwaku service nodes + * @param strictChecking + * @param numServiceNodes + * @param withoutFilter + * @param jsWakuParams + */ export async function runMultipleNodes( context: Context, - networkConfig: NetworkConfig = DefaultNetworkConfig, + routingInfo: RoutingInfo, customArgs?: Args, strictChecking: boolean = false, numServiceNodes = 2, @@ -32,7 +41,7 @@ export async function runMultipleNodes( context, numServiceNodes, strictChecking, - networkConfig, + routingInfo, customArgs, withoutFilter ); @@ -42,7 +51,7 @@ export async function runMultipleNodes( libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }, - networkConfig, + networkConfig: routingInfo.networkConfig, lightPush: { numPeersToUse: numServiceNodes }, discovery: DEFAULT_DISCOVERIES_ENABLED, ...jsWakuParams @@ -57,9 +66,10 @@ export async function runMultipleNodes( for (const node of serviceNodes.nodes) { await waku.dial(await node.getMultiaddrWithId()); await waku.waitForPeers([Protocols.Filter, Protocols.LightPush]); - await node.ensureSubscriptions( - derivePubsubTopicsFromNetworkConfig(networkConfig) - ); + // TODO + // await node.ensureSubscriptions( + // derivePubsubTopicsFromNetworkConfig(networkConfig) + // ); const wakuConnections = waku.libp2p.getConnections(); diff --git a/packages/tests/tests/connection-mananger/connection_limiter.spec.ts b/packages/tests/tests/connection-mananger/connection_limiter.spec.ts index 57aa374b14..ee6b9f0da7 100644 --- a/packages/tests/tests/connection-mananger/connection_limiter.spec.ts +++ b/packages/tests/tests/connection-mananger/connection_limiter.spec.ts @@ -9,7 +9,7 @@ import { teardownNodesWithRedundancy } from "../../src/index.js"; -import { TestShardInfo } from "./utils.js"; +import { TestRoutingInfo } from "./utils.js"; describe("Connection Limiter", function () { let waku: LightNode; @@ -18,7 +18,7 @@ describe("Connection Limiter", function () { beforeEachCustom(this, async () => { [serviceNodes, waku] = await runMultipleNodes( this.ctx, - TestShardInfo, + TestRoutingInfo, { lightpush: true, filter: true, peerExchange: true }, false, 2, @@ -68,7 +68,7 @@ describe("Connection Limiter", function () { [serviceNodes, waku] = await runMultipleNodes( this.ctx, - TestShardInfo, + TestRoutingInfo, { lightpush: true, filter: true, peerExchange: true }, false, 2, @@ -126,7 +126,7 @@ describe("Connection Limiter", function () { [serviceNodes, waku] = await runMultipleNodes( this.ctx, - TestShardInfo, + TestRoutingInfo, { lightpush: true, filter: true, peerExchange: true }, false, 2, diff --git a/packages/tests/tests/connection-mananger/dialing.spec.ts b/packages/tests/tests/connection-mananger/dialing.spec.ts index 70c9a3e017..5f11541a26 100644 --- a/packages/tests/tests/connection-mananger/dialing.spec.ts +++ b/packages/tests/tests/connection-mananger/dialing.spec.ts @@ -10,7 +10,7 @@ import { teardownNodesWithRedundancy } from "../../src/index.js"; -import { TestShardInfo } from "./utils.js"; +import { TestRoutingInfo } from "./utils.js"; describe("Dialing", function () { const ctx: Context = this.ctx; @@ -20,7 +20,7 @@ describe("Dialing", function () { beforeEachCustom(this, async () => { [serviceNodes, waku] = await runMultipleNodes( this.ctx, - TestShardInfo, + TestRoutingInfo, { lightpush: true, filter: true, peerExchange: true }, false, 2, @@ -33,7 +33,7 @@ describe("Dialing", function () { ctx, 2, false, - TestShardInfo, + TestRoutingInfo, { lightpush: true, filter: true, diff --git a/packages/tests/tests/connection-mananger/discovery_dialer.spec.ts b/packages/tests/tests/connection-mananger/discovery_dialer.spec.ts index 8d33b69715..1105b16768 100644 --- a/packages/tests/tests/connection-mananger/discovery_dialer.spec.ts +++ b/packages/tests/tests/connection-mananger/discovery_dialer.spec.ts @@ -11,7 +11,7 @@ import { teardownNodesWithRedundancy } from "../../src/index.js"; -import { TestShardInfo } from "./utils.js"; +import { TestRoutingInfo } from "./utils.js"; // TODO: investigate and re-enable in https://github.com/waku-org/js-waku/issues/2453 describe.skip("DiscoveryDialer", function () { @@ -22,7 +22,7 @@ describe.skip("DiscoveryDialer", function () { beforeEachCustom(this, async () => { [serviceNodes, waku] = await runMultipleNodes( this.ctx, - TestShardInfo, + TestRoutingInfo, { lightpush: true, filter: true, peerExchange: true }, false, 2, @@ -35,7 +35,7 @@ describe.skip("DiscoveryDialer", function () { ctx, 2, false, - TestShardInfo, + TestRoutingInfo, { lightpush: true, filter: true, diff --git a/packages/tests/tests/connection-mananger/network_monitor.spec.ts b/packages/tests/tests/connection-mananger/network_monitor.spec.ts index 371660f378..bfc0c2c322 100644 --- a/packages/tests/tests/connection-mananger/network_monitor.spec.ts +++ b/packages/tests/tests/connection-mananger/network_monitor.spec.ts @@ -11,7 +11,8 @@ import { expect } from "chai"; import { afterEachCustom, beforeEachCustom, - DefaultTestShardInfo, + DefaultTestNetworkConfig, + DefaultTestRoutingInfo, delay, NOISE_KEY_1 } from "../../src/index.js"; @@ -36,7 +37,7 @@ describe("Connection state", function () { let originalNavigator: any; beforeEachCustom(this, async () => { - waku = await createLightNode({ networkConfig: DefaultTestShardInfo }); + waku = await createLightNode({ networkConfig: DefaultTestNetworkConfig }); nwaku1 = new ServiceNode(makeLogFileName(this.ctx) + "1"); nwaku2 = new ServiceNode(makeLogFileName(this.ctx) + "2"); await nwaku1.start({ filter: true }); @@ -104,11 +105,13 @@ describe("Connection state", function () { it("`waku:online` between 2 js-waku relay nodes", async function () { const waku1 = await createRelayNode({ staticNoiseKey: NOISE_KEY_1, - networkConfig: DefaultTestShardInfo + networkConfig: DefaultTestNetworkConfig, + routingInfos: [DefaultTestRoutingInfo] }); const waku2 = await createRelayNode({ libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }, - networkConfig: DefaultTestShardInfo + networkConfig: DefaultTestNetworkConfig, + routingInfos: [DefaultTestRoutingInfo] }); let eventCount1 = 0; @@ -171,10 +174,12 @@ describe("Connection state", function () { it("isConnected between 2 js-waku relay nodes", async function () { const waku1 = await createRelayNode({ - staticNoiseKey: NOISE_KEY_1 + staticNoiseKey: NOISE_KEY_1, + routingInfos: [DefaultTestRoutingInfo] }); const waku2 = await createRelayNode({ - libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } } + libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }, + routingInfos: [DefaultTestRoutingInfo] }); await waku1.libp2p.peerStore.merge(waku2.libp2p.peerId, { multiaddrs: waku2.libp2p.getMultiaddrs() diff --git a/packages/tests/tests/connection-mananger/utils.ts b/packages/tests/tests/connection-mananger/utils.ts index 2447e946a1..0ea69ed16f 100644 --- a/packages/tests/tests/connection-mananger/utils.ts +++ b/packages/tests/tests/connection-mananger/utils.ts @@ -1,6 +1,11 @@ +import { createRoutingInfo } from "@waku/utils"; + export const TestContentTopic = "/test/1/waku-light-push/utf8"; -export const ClusterId = 3; -export const TestShardInfo = { - contentTopics: [TestContentTopic], - clusterId: ClusterId +export const TestClusterId = 2; +export const TestNetworkConfig = { + clusterId: TestClusterId, + numShardsInCluster: 8 // Cannot be under 8 for nwaku 0.36.0 and below }; +export const TestRoutingInfo = createRoutingInfo(TestNetworkConfig, { + contentTopic: TestContentTopic +}); diff --git a/packages/tests/tests/enr.node.spec.ts b/packages/tests/tests/enr.node.spec.ts index 83bd80fd01..cd66d8621a 100644 --- a/packages/tests/tests/enr.node.spec.ts +++ b/packages/tests/tests/enr.node.spec.ts @@ -6,12 +6,16 @@ import { expect } from "chai"; import { afterEachCustom, + DefaultTestClusterId, + DefaultTestContentTopic, + DefaultTestNetworkConfig, + DefaultTestNumShardsInCluster, + DefaultTestRoutingInfo, makeLogFileName, NOISE_KEY_1, ServiceNode, tearDownNodes } from "../src/index.js"; -import { DefaultTestShardInfo } from "../src/index.js"; describe("ENR Interop: ServiceNode", function () { let waku: RelayNode; @@ -29,14 +33,16 @@ describe("ENR Interop: ServiceNode", function () { store: false, filter: false, lightpush: false, - clusterId: DefaultTestShardInfo.clusterId, - shard: DefaultTestShardInfo.shards + clusterId: DefaultTestClusterId, + numShardsInNetwork: DefaultTestNumShardsInCluster, + contentTopic: [DefaultTestContentTopic] }); const multiAddrWithId = await nwaku.getMultiaddrWithId(); waku = await createRelayNode({ staticNoiseKey: NOISE_KEY_1, - networkConfig: DefaultTestShardInfo + networkConfig: DefaultTestNetworkConfig, + routingInfos: [DefaultTestRoutingInfo] }); await waku.start(); await waku.dial(multiAddrWithId); @@ -64,14 +70,16 @@ describe("ENR Interop: ServiceNode", function () { store: true, filter: false, lightpush: false, - clusterId: DefaultTestShardInfo.clusterId, - shard: DefaultTestShardInfo.shards + clusterId: DefaultTestClusterId, + numShardsInNetwork: DefaultTestNumShardsInCluster, + contentTopic: [DefaultTestContentTopic] }); const multiAddrWithId = await nwaku.getMultiaddrWithId(); waku = await createRelayNode({ staticNoiseKey: NOISE_KEY_1, - networkConfig: DefaultTestShardInfo + networkConfig: DefaultTestNetworkConfig, + routingInfos: [DefaultTestRoutingInfo] }); await waku.start(); await waku.dial(multiAddrWithId); @@ -99,14 +107,16 @@ describe("ENR Interop: ServiceNode", function () { store: true, filter: true, lightpush: true, - clusterId: DefaultTestShardInfo.clusterId, - shard: DefaultTestShardInfo.shards + clusterId: DefaultTestClusterId, + numShardsInNetwork: DefaultTestNumShardsInCluster, + contentTopic: [DefaultTestContentTopic] }); const multiAddrWithId = await nwaku.getMultiaddrWithId(); waku = await createRelayNode({ staticNoiseKey: NOISE_KEY_1, - networkConfig: DefaultTestShardInfo + networkConfig: DefaultTestNetworkConfig, + routingInfos: [DefaultTestRoutingInfo] }); await waku.start(); await waku.dial(multiAddrWithId); diff --git a/packages/tests/tests/ephemeral.node.spec.ts b/packages/tests/tests/ephemeral.node.spec.ts index 8f8a3c15b6..848162ca4b 100644 --- a/packages/tests/tests/ephemeral.node.spec.ts +++ b/packages/tests/tests/ephemeral.node.spec.ts @@ -1,5 +1,5 @@ import { createDecoder, createEncoder } from "@waku/core"; -import { Protocols } from "@waku/interfaces"; +import { AutoSharding, Protocols } from "@waku/interfaces"; import type { IDecodedMessage, LightNode } from "@waku/interfaces"; import { generatePrivateKey, @@ -15,11 +15,7 @@ import { createEncoder as createSymEncoder } from "@waku/message-encryption/symmetric"; import { createLightNode } from "@waku/sdk"; -import { - contentTopicToPubsubTopic, - contentTopicToShardIndex, - Logger -} from "@waku/utils"; +import { createRoutingInfo, Logger } from "@waku/utils"; import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes"; import { expect } from "chai"; @@ -36,15 +32,21 @@ import { const log = new Logger("test:ephemeral"); -const ClusterId = 2; +const TestClusterId = 2; +const TestNetworkConfig: AutoSharding = { + clusterId: TestClusterId, + numShardsInCluster: 8 +}; const TestContentTopic = "/test/1/ephemeral/utf8"; -const PubsubTopic = contentTopicToPubsubTopic(TestContentTopic, ClusterId); +const TestRoutingInfo = createRoutingInfo(TestNetworkConfig, { + contentTopic: TestContentTopic +}); const TestEncoder = createEncoder({ contentTopic: TestContentTopic, - pubsubTopic: PubsubTopic + routingInfo: TestRoutingInfo }); -const TestDecoder = createDecoder(TestContentTopic, PubsubTopic); +const TestDecoder = createDecoder(TestContentTopic, TestRoutingInfo); const privateKey = generatePrivateKey(); const symKey = generateSymmetricKey(); @@ -57,26 +59,26 @@ const AsymEncoder = createEciesEncoder({ contentTopic: AsymContentTopic, publicKey, ephemeral: true, - pubsubTopic: PubsubTopic + routingInfo: TestRoutingInfo }); const SymEncoder = createSymEncoder({ contentTopic: SymContentTopic, symKey, ephemeral: true, - pubsubTopic: PubsubTopic + routingInfo: TestRoutingInfo }); const ClearEncoder = createEncoder({ contentTopic: TestContentTopic, ephemeral: true, - pubsubTopic: PubsubTopic + routingInfo: TestRoutingInfo }); const AsymDecoder = createEciesDecoder( AsymContentTopic, - privateKey, - PubsubTopic + TestRoutingInfo, + privateKey ); -const SymDecoder = createSymDecoder(SymContentTopic, symKey, PubsubTopic); +const SymDecoder = createSymDecoder(SymContentTopic, TestRoutingInfo, symKey); describe("Waku Message Ephemeral field", function () { let waku: LightNode; @@ -95,8 +97,7 @@ describe("Waku Message Ephemeral field", function () { store: true, relay: true, contentTopic: contentTopics, - clusterId: ClusterId, - shard: contentTopics.map((t) => contentTopicToShardIndex(t)) + clusterId: TestClusterId }); await nwaku.ensureSubscriptionsAutosharding([ TestContentTopic, @@ -107,10 +108,7 @@ describe("Waku Message Ephemeral field", function () { waku = await createLightNode({ staticNoiseKey: NOISE_KEY_1, libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }, - networkConfig: { - contentTopics: [TestContentTopic, AsymContentTopic, SymContentTopic], - clusterId: ClusterId - } + networkConfig: TestNetworkConfig }); await waku.start(); await waku.dial(await nwaku.getMultiaddrWithId()); @@ -138,17 +136,11 @@ describe("Waku Message Ephemeral field", function () { const [waku1, waku2, nimWakuMultiaddr] = await Promise.all([ createLightNode({ staticNoiseKey: NOISE_KEY_1, - networkConfig: { - contentTopics: [TestContentTopic, AsymContentTopic, SymContentTopic], - clusterId: ClusterId - } + networkConfig: TestNetworkConfig }).then((waku) => waku.start().then(() => waku)), createLightNode({ staticNoiseKey: NOISE_KEY_2, - networkConfig: { - contentTopics: [TestContentTopic, AsymContentTopic, SymContentTopic], - clusterId: ClusterId - } + networkConfig: TestNetworkConfig }).then((waku) => waku.start().then(() => waku)), nwaku.getMultiaddrWithId() ]); @@ -200,7 +192,7 @@ describe("Waku Message Ephemeral field", function () { const ephemeralEncoder = createEncoder({ contentTopic: TestContentTopic, ephemeral: true, - pubsubTopic: PubsubTopic + routingInfo: TestRoutingInfo }); const messages: IDecodedMessage[] = []; @@ -246,9 +238,9 @@ describe("Waku Message Ephemeral field", function () { const encoder = createSymEncoder({ contentTopic: SymContentTopic, symKey, - pubsubTopic: PubsubTopic + routingInfo: TestRoutingInfo }); - const decoder = createSymDecoder(SymContentTopic, symKey, PubsubTopic); + const decoder = createSymDecoder(SymContentTopic, TestRoutingInfo, symKey); const messages: IDecodedMessage[] = []; const callback = (msg: IDecodedMessage): void => { @@ -293,12 +285,12 @@ describe("Waku Message Ephemeral field", function () { const encoder = createEciesEncoder({ contentTopic: AsymContentTopic, publicKey: publicKey, - pubsubTopic: PubsubTopic + routingInfo: TestRoutingInfo }); const decoder = createEciesDecoder( AsymContentTopic, - privateKey, - PubsubTopic + TestRoutingInfo, + privateKey ); const messages: IDecodedMessage[] = []; diff --git a/packages/tests/tests/filter/push.node.spec.ts b/packages/tests/tests/filter/push.node.spec.ts index 3ecbf6a585..b183c64d93 100644 --- a/packages/tests/tests/filter/push.node.spec.ts +++ b/packages/tests/tests/filter/push.node.spec.ts @@ -1,5 +1,6 @@ import { LightNode, Protocols } from "@waku/interfaces"; import { utf8ToBytes } from "@waku/sdk"; +import { createRoutingInfo } from "@waku/utils"; import { expect } from "chai"; import { @@ -18,8 +19,7 @@ import { TestContentTopic, TestDecoder, TestEncoder, - TestPubsubTopic, - TestShardInfo + TestRoutingInfo } from "./utils.js"; const runTests = (strictCheckNodes: boolean): void => { @@ -32,7 +32,7 @@ const runTests = (strictCheckNodes: boolean): void => { beforeEachCustom(this, async () => { ctx = this.ctx; - [serviceNodes, waku] = await runMultipleNodes(this.ctx, TestShardInfo, { + [serviceNodes, waku] = await runMultipleNodes(this.ctx, TestRoutingInfo, { lightpush: true, filter: true }); @@ -59,7 +59,7 @@ const runTests = (strictCheckNodes: boolean): void => { serviceNodes.messageCollector.verifyReceivedMessage(0, { expectedMessageText: testItem.value, expectedContentTopic: TestContentTopic, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); }); }); @@ -78,7 +78,7 @@ const runTests = (strictCheckNodes: boolean): void => { payload: Buffer.from(utf8ToBytes(messageText)).toString("base64"), timestamp: testItem as any }, - TestPubsubTopic + TestRoutingInfo ); expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq( @@ -88,7 +88,7 @@ const runTests = (strictCheckNodes: boolean): void => { expectedMessageText: messageText, checkTimestamp: false, expectedContentTopic: TestContentTopic, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); // Check if the timestamp matches @@ -117,7 +117,7 @@ const runTests = (strictCheckNodes: boolean): void => { payload: Buffer.from(utf8ToBytes(messageText)).toString("base64"), timestamp: "2023-09-06T12:05:38.609Z" as any }, - TestPubsubTopic + TestRoutingInfo ); // Verify that no message was received @@ -133,20 +133,21 @@ const runTests = (strictCheckNodes: boolean): void => { ); await delay(400); + const wrongContentTopic = "/wrong/1/ContentTopic/proto"; await serviceNodes.sendRelayMessage( { - contentTopic: TestContentTopic, + contentTopic: wrongContentTopic, payload: Buffer.from(utf8ToBytes(messageText)).toString("base64"), timestamp: BigInt(Date.now()) * BigInt(1000000) }, - "WrongContentTopic" + createRoutingInfo(TestRoutingInfo.networkConfig, { + contentTopic: "/wrong/1/ContentTopic/proto" + }) ); - expect( - await serviceNodes.messageCollector.waitForMessages(1, { - pubsubTopic: TestPubsubTopic - }) - ).to.eq(false); + expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq( + false + ); }); it("Check message with no pubsub topic is not received", async function () { @@ -184,7 +185,7 @@ const runTests = (strictCheckNodes: boolean): void => { payload: Buffer.from(utf8ToBytes(messageText)).toString("base64"), timestamp: BigInt(Date.now()) * BigInt(1000000) }, - TestPubsubTopic + TestRoutingInfo ); expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq( @@ -205,7 +206,7 @@ const runTests = (strictCheckNodes: boolean): void => { timestamp: BigInt(Date.now()) * BigInt(1000000), payload: undefined as any }, - TestPubsubTopic + TestRoutingInfo ); expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq( @@ -226,7 +227,7 @@ const runTests = (strictCheckNodes: boolean): void => { payload: 12345 as unknown as string, timestamp: BigInt(Date.now()) * BigInt(1000000) }, - TestPubsubTopic + TestRoutingInfo ); expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq( @@ -267,12 +268,12 @@ const runTests = (strictCheckNodes: boolean): void => { serviceNodes.messageCollector.verifyReceivedMessage(0, { expectedMessageText: "M1", expectedContentTopic: TestContentTopic, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); serviceNodes.messageCollector.verifyReceivedMessage(1, { expectedMessageText: "M2", expectedContentTopic: TestContentTopic, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); }); @@ -289,7 +290,7 @@ const runTests = (strictCheckNodes: boolean): void => { serviceNodes.messageCollector.verifyReceivedMessage(0, { expectedMessageText: "M1", expectedContentTopic: TestContentTopic, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); await teardownNodesWithRedundancy(serviceNodes, []); @@ -297,7 +298,7 @@ const runTests = (strictCheckNodes: boolean): void => { ctx, 2, false, - TestShardInfo, + TestRoutingInfo, { lightpush: true, filter: true, @@ -334,7 +335,7 @@ const runTests = (strictCheckNodes: boolean): void => { serviceNodes.messageCollector.verifyReceivedMessage(1, { expectedMessageText: "M2", expectedContentTopic: TestContentTopic, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); }); }); diff --git a/packages/tests/tests/filter/subscribe.node.spec.ts b/packages/tests/tests/filter/subscribe.node.spec.ts index d8a0c4ea0d..6fd66b9ef2 100644 --- a/packages/tests/tests/filter/subscribe.node.spec.ts +++ b/packages/tests/tests/filter/subscribe.node.spec.ts @@ -8,6 +8,7 @@ import { symmetric } from "@waku/message-encryption"; import { Protocols, utf8ToBytes } from "@waku/sdk"; +import { createRoutingInfo } from "@waku/utils"; import { expect } from "chai"; import { @@ -27,15 +28,15 @@ import { } from "../../src/index.js"; import { - ClusterId, messagePayload, messageText, - ShardIndex, + TestClusterId, TestContentTopic, TestDecoder, TestEncoder, - TestPubsubTopic, - TestShardInfo + TestNetworkConfig, + TestRoutingInfo, + TestShardIndex } from "./utils.js"; const runTests = (strictCheckNodes: boolean): void => { @@ -47,7 +48,7 @@ const runTests = (strictCheckNodes: boolean): void => { beforeEachCustom(this, async () => { [serviceNodes, waku] = await runMultipleNodes( this.ctx, - TestShardInfo, + TestRoutingInfo, undefined, strictCheckNodes ); @@ -84,12 +85,12 @@ const runTests = (strictCheckNodes: boolean): void => { const encoder = ecies.createEncoder({ contentTopic: TestContentTopic, publicKey, - pubsubTopic: TestPubsubTopic + routingInfo: TestRoutingInfo }); const decoder = ecies.createDecoder( TestContentTopic, - privateKey, - TestPubsubTopic + TestRoutingInfo, + privateKey ); await waku.filter.subscribe( @@ -106,7 +107,7 @@ const runTests = (strictCheckNodes: boolean): void => { expectedMessageText: messageText, expectedContentTopic: TestContentTopic, expectedVersion: 1, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); await serviceNodes.confirmMessageLength(2); @@ -117,12 +118,12 @@ const runTests = (strictCheckNodes: boolean): void => { const encoder = symmetric.createEncoder({ contentTopic: TestContentTopic, symKey, - pubsubTopic: TestPubsubTopic + routingInfo: TestRoutingInfo }); const decoder = symmetric.createDecoder( TestContentTopic, - symKey, - TestPubsubTopic + TestRoutingInfo, + symKey ); await waku.filter.subscribe( @@ -139,7 +140,7 @@ const runTests = (strictCheckNodes: boolean): void => { expectedMessageText: messageText, expectedContentTopic: TestContentTopic, expectedVersion: 1, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); await serviceNodes.confirmMessageLength(2); @@ -158,7 +159,7 @@ const runTests = (strictCheckNodes: boolean): void => { contentTopic: TestContentTopic, payload: utf8ToBytes(messageText) }); - await serviceNodes.sendRelayMessage(relayMessage, TestPubsubTopic); + await serviceNodes.sendRelayMessage(relayMessage, TestRoutingInfo); expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq( true @@ -166,7 +167,7 @@ const runTests = (strictCheckNodes: boolean): void => { serviceNodes.messageCollector.verifyReceivedMessage(0, { expectedMessageText: messageText, expectedContentTopic: TestContentTopic, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); await serviceNodes.confirmMessageLength(1); @@ -219,18 +220,20 @@ const runTests = (strictCheckNodes: boolean): void => { serviceNodes.messageCollector.verifyReceivedMessage(0, { expectedMessageText: messageText, expectedContentTopic: TestContentTopic, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); // Modify subscription to include a new content topic and send a message. const newMessageText = "Filtering still works!"; - const newMessagePayload = { payload: utf8ToBytes(newMessageText) }; const newContentTopic = "/test/2/waku-filter/default"; + const newRoutingInfo = createRoutingInfo(TestNetworkConfig, { + contentTopic: newContentTopic + }); const newEncoder = createEncoder({ contentTopic: newContentTopic, - pubsubTopic: TestPubsubTopic + routingInfo: newRoutingInfo }); - const newDecoder = createDecoder(newContentTopic, TestPubsubTopic); + const newDecoder = createDecoder(newContentTopic, newRoutingInfo); await waku.filter.subscribe( newDecoder, serviceNodes.messageCollector.callback @@ -244,26 +247,30 @@ const runTests = (strictCheckNodes: boolean): void => { serviceNodes.messageCollector.verifyReceivedMessage(1, { expectedContentTopic: newContentTopic, expectedMessageText: newMessageText, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); // Send another message on the initial content topic to verify it still works. - await waku.lightPush.send(TestEncoder, newMessagePayload); + const thirdMessageText = "Filtering still works on first subscription!"; + const thirdMessagePayload = { payload: utf8ToBytes(thirdMessageText) }; + await waku.lightPush.send(TestEncoder, thirdMessagePayload); expect(await serviceNodes.messageCollector.waitForMessages(3)).to.eq( true ); serviceNodes.messageCollector.verifyReceivedMessage(2, { - expectedMessageText: newMessageText, + expectedMessageText: thirdMessageText, expectedContentTopic: TestContentTopic, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); - await serviceNodes.confirmMessageLength(3); + // This relies on nwaku not emptying the relay cache + // We received the 3 messages already, what else are checking? + // await serviceNodes.confirmMessageLength(3); }); it("Subscribe and receives messages on 20 topics", async function () { const topicCount = 20; - const td = generateTestData(topicCount, { pubsubTopic: TestPubsubTopic }); + const td = generateTestData(topicCount, TestNetworkConfig); // Subscribe to all 20 topics. for (let i = 0; i < topicCount; i++) { @@ -288,7 +295,7 @@ const runTests = (strictCheckNodes: boolean): void => { serviceNodes.messageCollector.verifyReceivedMessage(index, { expectedContentTopic: topic, expectedMessageText: `Message for Topic ${index + 1}`, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); }); }); @@ -297,7 +304,7 @@ const runTests = (strictCheckNodes: boolean): void => { it.skip("Subscribe to 30 topics in separate streams (30 streams for Filter is limit) at once and receives messages", async function () { this.timeout(100_000); const topicCount = 30; - const td = generateTestData(topicCount, { pubsubTopic: TestPubsubTopic }); + const td = generateTestData(topicCount, TestNetworkConfig); for (let i = 0; i < topicCount; i++) { await waku.filter.subscribe( @@ -321,7 +328,7 @@ const runTests = (strictCheckNodes: boolean): void => { serviceNodes.messageCollector.verifyReceivedMessage(index, { expectedContentTopic: topic, expectedMessageText: `Message for Topic ${index + 1}`, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); }); }); @@ -329,7 +336,7 @@ const runTests = (strictCheckNodes: boolean): void => { it("Subscribe to 100 topics (new limit) at once and receives messages", async function () { this.timeout(100_000); const topicCount = 100; - const td = generateTestData(topicCount, { pubsubTopic: TestPubsubTopic }); + const td = generateTestData(topicCount, TestNetworkConfig); await waku.filter.subscribe( td.decoders, @@ -351,14 +358,14 @@ const runTests = (strictCheckNodes: boolean): void => { serviceNodes.messageCollector.verifyReceivedMessage(index, { expectedContentTopic: topic, expectedMessageText: `Message for Topic ${index + 1}`, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); }); }); it("Error when try to subscribe to more than 101 topics (new limit)", async function () { const topicCount = 101; - const td = generateTestData(topicCount, { pubsubTopic: TestPubsubTopic }); + const td = generateTestData(topicCount, TestNetworkConfig); try { await waku.filter.subscribe( @@ -382,14 +389,10 @@ const runTests = (strictCheckNodes: boolean): void => { it("Overlapping topic subscription", async function () { // Define two sets of test data with overlapping topics. const topicCount1 = 2; - const td1 = generateTestData(topicCount1, { - pubsubTopic: TestPubsubTopic - }); + const td1 = generateTestData(topicCount1, TestNetworkConfig); const topicCount2 = 4; - const td2 = generateTestData(topicCount2, { - pubsubTopic: TestPubsubTopic - }); + const td2 = generateTestData(topicCount2, TestNetworkConfig); await waku.filter.subscribe( td1.decoders, @@ -445,31 +448,25 @@ const runTests = (strictCheckNodes: boolean): void => { serviceNodes.messageCollector.verifyReceivedMessage(0, { expectedMessageText: "M1", expectedContentTopic: TestContentTopic, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); serviceNodes.messageCollector.verifyReceivedMessage(1, { expectedMessageText: "M2", expectedContentTopic: TestContentTopic, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); }); TEST_STRING.forEach((testItem) => { it(`Subscribe to topic containing ${testItem.description} and receive message`, async function () { - const newContentTopic = testItem.value; + const newContentTopic = `/test/0/${testItem.description}/test`; const newEncoder = waku.createEncoder({ contentTopic: newContentTopic, - shardInfo: { - clusterId: ClusterId, - shard: ShardIndex - } + shardId: TestShardIndex }); const newDecoder = waku.createDecoder({ contentTopic: newContentTopic, - shardInfo: { - clusterId: ClusterId, - shard: ShardIndex - } + shardId: TestShardIndex }); await waku.filter.subscribe( @@ -484,7 +481,7 @@ const runTests = (strictCheckNodes: boolean): void => { serviceNodes.messageCollector.verifyReceivedMessage(0, { expectedMessageText: messageText, expectedContentTopic: newContentTopic, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); }); }); @@ -497,11 +494,15 @@ const runTests = (strictCheckNodes: boolean): void => { await waku.lightPush.send(TestEncoder, { payload: utf8ToBytes("M1") }); const newContentTopic = "/test/2/waku-filter/default"; + const newRoutingInfo = createRoutingInfo(TestNetworkConfig, { + contentTopic: newContentTopic + }); + const newEncoder = createEncoder({ contentTopic: newContentTopic, - pubsubTopic: TestPubsubTopic + routingInfo: newRoutingInfo }); - const newDecoder = createDecoder(newContentTopic, TestPubsubTopic); + const newDecoder = createDecoder(newContentTopic, newRoutingInfo); await waku.filter.subscribe( newDecoder, serviceNodes.messageCollector.callback @@ -516,12 +517,12 @@ const runTests = (strictCheckNodes: boolean): void => { serviceNodes.messageCollector.verifyReceivedMessage(0, { expectedMessageText: "M1", expectedContentTopic: TestContentTopic, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); serviceNodes.messageCollector.verifyReceivedMessage(1, { expectedContentTopic: newContentTopic, expectedMessageText: "M2", - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: newRoutingInfo.pubsubTopic }); }); @@ -570,96 +571,127 @@ const runTests = (strictCheckNodes: boolean): void => { expectedContentTopic: TestContentTopic }); }); + }); - it("Subscribe and receive messages from 2 nwaku nodes each with different pubsubtopics", async function () { + describe("Filter subscribe test with static sharding", function () { + this.timeout(100000); + let waku: LightNode; + let serviceNodes: ServiceNodesFleet; + const networkConfig = { clusterId: TestClusterId }; + const routingInfo = createRoutingInfo(networkConfig, { shardId: 3 }); + + beforeEachCustom(this, async () => { + [serviceNodes, waku] = await runMultipleNodes( + this.ctx, + routingInfo, + {}, + strictCheckNodes + ); + }); + + afterEachCustom(this, async () => { + await teardownNodesWithRedundancy(serviceNodes, waku); + }); + }); +}; + +[true, false].map((strictCheckNodes) => runTests(strictCheckNodes)); + +const runTestsStatic = (strictCheckNodes: boolean): void => { + describe(`Waku Filter: Subscribe: Multiple Service Nodes on Static Shard: Strict Check mode: ${strictCheckNodes}`, function () { + this.timeout(100000); + let waku: LightNode; + let serviceNodes: ServiceNodesFleet; + const staticNetworkConfig = { clusterId: 9 }; + const routingInfoShard1 = createRoutingInfo(staticNetworkConfig, { + shardId: 1 + }); + const encoderShard1 = createEncoder({ + contentTopic: TestContentTopic, + routingInfo: routingInfoShard1 + }); + const decoderShard1 = createDecoder(TestContentTopic, routingInfoShard1); + + beforeEachCustom(this, async () => { + [serviceNodes, waku] = await runMultipleNodes( + this.ctx, + routingInfoShard1, + undefined, + strictCheckNodes + ); + }); + + afterEachCustom(this, async () => { + await teardownNodesWithRedundancy(serviceNodes, waku); + }); + + it("Subscribe and receive messages from 2 nwaku nodes each with different static shards", async function () { await waku.filter.subscribe( - TestDecoder, + decoderShard1, serviceNodes.messageCollector.callback ); - // Set up and start a new nwaku node with customPubsubTopic1 + // Set up and start a new nwaku node on different shard const nwaku2 = new ServiceNode(makeLogFileName(this) + "3"); try { - const customContentTopic = "/test/4/waku-filter/default"; - const customDecoder = createDecoder(customContentTopic, { - clusterId: ClusterId, - shard: 4 + const routingInfoShard2 = createRoutingInfo(staticNetworkConfig, { + shardId: 2 }); - const customEncoder = createEncoder({ - contentTopic: customContentTopic, - pubsubTopicShardInfo: { clusterId: ClusterId, shard: 4 } + const contentTopic2 = "/test/4/waku-filter/default"; + const decoderShard2 = createDecoder(contentTopic2, routingInfoShard2); + const encoderShard2 = createEncoder({ + contentTopic: contentTopic2, + routingInfo: routingInfoShard2 }); await nwaku2.start({ filter: true, lightpush: true, relay: true, - clusterId: ClusterId, - shard: [4] + clusterId: TestClusterId, + shard: [2] }); await waku.dial(await nwaku2.getMultiaddrWithId()); await waku.waitForPeers([Protocols.Filter, Protocols.LightPush]); - await nwaku2.ensureSubscriptions([customDecoder.pubsubTopic]); + // TODO + // await nwaku2.ensureSubscriptions([customDecoder.pubsubTopic]); const messageCollector2 = new MessageCollector(); - await waku.filter.subscribe(customDecoder, messageCollector2.callback); + await waku.filter.subscribe(decoderShard2, messageCollector2.callback); - // Making sure that messages are send and reveiced for both subscriptions + // Making sure that messages are send and received for both subscriptions // While loop is done because of https://github.com/waku-org/js-waku/issues/1606 while ( - !(await serviceNodes.messageCollector.waitForMessages(1, { - pubsubTopic: TestDecoder.pubsubTopic - })) || - !(await messageCollector2.waitForMessages(1, { - pubsubTopic: customDecoder.pubsubTopic - })) + !(await serviceNodes.messageCollector.waitForMessages(1)) || + !(await messageCollector2.waitForMessages(1)) ) { - await waku.lightPush.send(TestEncoder, { + await waku.lightPush.send(encoderShard1, { payload: utf8ToBytes("M1") }); - await waku.lightPush.send(customEncoder, { + await waku.lightPush.send(encoderShard2, { payload: utf8ToBytes("M2") }); } serviceNodes.messageCollector.verifyReceivedMessage(0, { - expectedContentTopic: TestDecoder.contentTopic, - expectedPubsubTopic: TestDecoder.pubsubTopic, + expectedContentTopic: encoderShard1.contentTopic, + expectedPubsubTopic: routingInfoShard1.pubsubTopic, expectedMessageText: "M1" }); messageCollector2.verifyReceivedMessage(0, { - expectedContentTopic: customDecoder.contentTopic, - expectedPubsubTopic: customDecoder.pubsubTopic, + expectedContentTopic: encoderShard2.contentTopic, + expectedPubsubTopic: routingInfoShard2.pubsubTopic, expectedMessageText: "M2" }); } catch (e) { await tearDownNodes([nwaku2], []); } }); - - it("Should fail to subscribe with decoder with wrong shard", async function () { - const wrongDecoder = createDecoder(TestDecoder.contentTopic, { - clusterId: ClusterId, - shard: 5 - }); - - // this subscription object is set up with the `customPubsubTopic1` but we're passing it a Decoder with the `customPubsubTopic2` - try { - await waku.filter.subscribe( - wrongDecoder, - serviceNodes.messageCollector.callback - ); - } catch (error) { - expect((error as Error).message).to.include( - `Pubsub topic ${wrongDecoder.pubsubTopic} has not been configured on this instance.` - ); - } - }); }); }; -[true, false].map((strictCheckNodes) => runTests(strictCheckNodes)); +[true, false].map((strictCheckNodes) => runTestsStatic(strictCheckNodes)); diff --git a/packages/tests/tests/filter/unsubscribe.node.spec.ts b/packages/tests/tests/filter/unsubscribe.node.spec.ts index 27816742cd..72b91acd79 100644 --- a/packages/tests/tests/filter/unsubscribe.node.spec.ts +++ b/packages/tests/tests/filter/unsubscribe.node.spec.ts @@ -1,6 +1,7 @@ import { createDecoder, createEncoder } from "@waku/core"; import { type LightNode } from "@waku/interfaces"; import { utf8ToBytes } from "@waku/sdk"; +import { createRoutingInfo } from "@waku/utils"; import { expect } from "chai"; import { @@ -13,13 +14,13 @@ import { } from "../../src/index.js"; import { - ClusterId, messagePayload, messageText, TestContentTopic, TestDecoder, TestEncoder, - TestPubsubTopic + TestNetworkConfig, + TestRoutingInfo } from "./utils.js"; const runTests = (strictCheckNodes: boolean): void => { @@ -30,14 +31,10 @@ const runTests = (strictCheckNodes: boolean): void => { let serviceNodes: ServiceNodesFleet; beforeEachCustom(this, async () => { - [serviceNodes, waku] = await runMultipleNodes( - this.ctx, - { - contentTopics: [TestContentTopic], - clusterId: ClusterId - }, - { filter: true, lightpush: true } - ); + [serviceNodes, waku] = await runMultipleNodes(this.ctx, TestRoutingInfo, { + filter: true, + lightpush: true + }); }); afterEachCustom(this, async () => { @@ -77,12 +74,15 @@ const runTests = (strictCheckNodes: boolean): void => { serviceNodes.messageCollector.callback ); - const newContentTopic = "/test/2/waku-filter"; + const newContentTopic = "/test/2/waku-filter/proto"; + const newRoutingInfo = createRoutingInfo(TestNetworkConfig, { + contentTopic: newContentTopic + }); const newEncoder = createEncoder({ contentTopic: newContentTopic, - pubsubTopic: TestPubsubTopic + routingInfo: newRoutingInfo }); - const newDecoder = createDecoder(newContentTopic, TestPubsubTopic); + const newDecoder = createDecoder(newContentTopic, newRoutingInfo); await waku.filter.subscribe( newDecoder, serviceNodes.messageCollector.callback @@ -103,7 +103,6 @@ const runTests = (strictCheckNodes: boolean): void => { // Check that from 4 messages send 3 were received expect(serviceNodes.messageCollector.count).to.eq(3); - await serviceNodes.confirmMessageLength(4); }); it("Unsubscribe 2 topics - node subscribed to 2 topics", async function () { @@ -112,12 +111,15 @@ const runTests = (strictCheckNodes: boolean): void => { TestDecoder, serviceNodes.messageCollector.callback ); - const newContentTopic = "/test/2/waku-filter"; + const newContentTopic = "/test/2/waku-filter/proto"; + const newRoutingInfo = createRoutingInfo(TestNetworkConfig, { + contentTopic: newContentTopic + }); const newEncoder = createEncoder({ contentTopic: newContentTopic, - pubsubTopic: TestPubsubTopic + routingInfo: newRoutingInfo }); - const newDecoder = createDecoder(newContentTopic, TestPubsubTopic); + const newDecoder = createDecoder(newContentTopic, newRoutingInfo); await waku.filter.subscribe( newDecoder, serviceNodes.messageCollector.callback @@ -140,7 +142,6 @@ const runTests = (strictCheckNodes: boolean): void => { // Check that from 4 messages send 2 were received expect(serviceNodes.messageCollector.count).to.eq(2); - await serviceNodes.confirmMessageLength(4); }); it("Unsubscribe topics the node is not subscribed to", async function () { @@ -159,7 +160,12 @@ const runTests = (strictCheckNodes: boolean): void => { // Unsubscribe from topics that the node is not not subscribed to and send again await waku.filter.unsubscribe( - createDecoder("/test/2/waku-filter", TestDecoder.pubsubTopic) + createDecoder( + "/test/2/waku-filter/proto", + createRoutingInfo(TestNetworkConfig, { + contentTopic: "/test/2/waku-filter/proto" + }) + ) ); await waku.lightPush.send(TestEncoder, { payload: utf8ToBytes("M2") }); expect(await serviceNodes.messageCollector.waitForMessages(2)).to.eq( @@ -174,7 +180,7 @@ const runTests = (strictCheckNodes: boolean): void => { it("Unsubscribe from 100 topics (new limit) at once and receives messages", async function () { this.timeout(100_000); const topicCount = 100; - const td = generateTestData(topicCount, { pubsubTopic: TestPubsubTopic }); + const td = generateTestData(topicCount, TestNetworkConfig); await waku.filter.subscribe( td.decoders, @@ -194,7 +200,7 @@ const runTests = (strictCheckNodes: boolean): void => { serviceNodes.messageCollector.verifyReceivedMessage(index, { expectedContentTopic: topic, expectedMessageText: `Message for Topic ${index + 1}`, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); }); diff --git a/packages/tests/tests/filter/utils.ts b/packages/tests/tests/filter/utils.ts index 01a5220b05..a679f5337a 100644 --- a/packages/tests/tests/filter/utils.ts +++ b/packages/tests/tests/filter/utils.ts @@ -1,142 +1,27 @@ import { createDecoder, createEncoder } from "@waku/core"; import { - CreateNodeOptions, - DefaultNetworkConfig, - IWaku, - LightNode, - NetworkConfig, - Protocols -} from "@waku/interfaces"; -import { createLightNode } from "@waku/sdk"; -import { - contentTopicToPubsubTopic, contentTopicToShardIndex, - derivePubsubTopicsFromNetworkConfig, + createRoutingInfo, Logger } from "@waku/utils"; import { utf8ToBytes } from "@waku/utils/bytes"; -import { Context } from "mocha"; -import pRetry from "p-retry"; - -import { - NOISE_KEY_1, - ServiceNodesFleet, - waitForConnections -} from "../../src/index.js"; // Constants for test configuration. export const log = new Logger("test:filter"); export const TestContentTopic = "/test/1/waku-filter/default"; -export const ClusterId = 2; -export const ShardIndex = contentTopicToShardIndex(TestContentTopic); -export const TestShardInfo = { - contentTopics: [TestContentTopic], - clusterId: ClusterId +export const TestClusterId = 2; +export const TestShardIndex = contentTopicToShardIndex(TestContentTopic); +export const TestNetworkConfig = { + clusterId: TestClusterId, + numShardsInCluster: 8 }; -export const TestPubsubTopic = contentTopicToPubsubTopic( - TestContentTopic, - ClusterId -); +export const TestRoutingInfo = createRoutingInfo(TestNetworkConfig, { + contentTopic: TestContentTopic +}); export const TestEncoder = createEncoder({ contentTopic: TestContentTopic, - pubsubTopic: TestPubsubTopic + routingInfo: TestRoutingInfo }); -export const TestDecoder = createDecoder(TestContentTopic, TestPubsubTopic); +export const TestDecoder = createDecoder(TestContentTopic, TestRoutingInfo); export const messageText = "Filtering works!"; export const messagePayload = { payload: utf8ToBytes(messageText) }; - -export async function runMultipleNodes( - context: Context, - networkConfig: NetworkConfig = DefaultNetworkConfig, - strictChecking: boolean = false, - numServiceNodes = 3, - withoutFilter = false -): Promise<[ServiceNodesFleet, LightNode]> { - const pubsubTopics = derivePubsubTopicsFromNetworkConfig(networkConfig); - // create numServiceNodes nodes - const serviceNodes = await ServiceNodesFleet.createAndRun( - context, - numServiceNodes, - strictChecking, - networkConfig, - undefined, - withoutFilter - ); - - const wakuOptions: CreateNodeOptions = { - staticNoiseKey: NOISE_KEY_1, - libp2p: { - addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } - } - }; - - log.info("Starting js waku node with :", JSON.stringify(wakuOptions)); - let waku: LightNode | undefined; - try { - waku = await createLightNode(wakuOptions); - await waku.start(); - } catch (error) { - log.error("jswaku node failed to start:", error); - } - - if (!waku) { - throw new Error("Failed to initialize waku"); - } - - for (const node of serviceNodes.nodes) { - await waku.dial(await node.getMultiaddrWithId()); - await waku.waitForPeers([Protocols.Filter, Protocols.LightPush]); - await node.ensureSubscriptions(pubsubTopics); - - const wakuConnections = waku.libp2p.getConnections(); - - if (wakuConnections.length < 1) { - throw new Error(`Expected at least 1 connection for js-waku.`); - } - - await node.waitForLog(waku.libp2p.peerId.toString(), 100); - } - - await waitForConnections(numServiceNodes, waku); - - return [serviceNodes, waku]; -} - -export async function teardownNodesWithRedundancy( - serviceNodes: ServiceNodesFleet, - wakuNodes: IWaku | IWaku[] -): Promise { - const wNodes = Array.isArray(wakuNodes) ? wakuNodes : [wakuNodes]; - - const stopNwakuNodes = serviceNodes.nodes.map(async (node) => { - await pRetry( - async () => { - try { - await node.stop(); - } catch (error) { - log.error("Service Node failed to stop:", error); - throw error; - } - }, - { retries: 3 } - ); - }); - - const stopWakuNodes = wNodes.map(async (waku) => { - if (waku) { - await pRetry( - async () => { - try { - await waku.stop(); - } catch (error) { - log.error("Waku failed to stop:", error); - throw error; - } - }, - { retries: 3 } - ); - } - }); - - await Promise.all([...stopNwakuNodes, ...stopWakuNodes]); -} diff --git a/packages/tests/tests/light-push/index.node.spec.ts b/packages/tests/tests/light-push/index.node.spec.ts index b57429c410..d750c6c77f 100644 --- a/packages/tests/tests/light-push/index.node.spec.ts +++ b/packages/tests/tests/light-push/index.node.spec.ts @@ -14,14 +14,11 @@ import { } from "../../src/index.js"; import { - ClusterId, messagePayload, messageText, - ShardIndex, TestContentTopic, TestEncoder, - TestPubsubTopic, - TestShardInfo + TestRoutingInfo } from "./utils.js"; const runTests = (strictNodeCheck: boolean): void => { @@ -35,7 +32,7 @@ const runTests = (strictNodeCheck: boolean): void => { beforeEachCustom(this, async () => { [serviceNodes, waku] = await runMultipleNodes( this.ctx, - TestShardInfo, + TestRoutingInfo, { lightpush: true, filter: true }, strictNodeCheck, numServiceNodes, @@ -54,20 +51,18 @@ const runTests = (strictNodeCheck: boolean): void => { }); expect(pushResponse.successes.length).to.eq(numServiceNodes); - expect( - await serviceNodes.messageCollector.waitForMessages(1, { - pubsubTopic: TestPubsubTopic - }) - ).to.eq(true); + expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq( + true + ); serviceNodes.messageCollector.verifyReceivedMessage(0, { expectedMessageText: testItem.value, expectedContentTopic: TestContentTopic, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); }); }); - // TODO: skiped till https://github.com/waku-org/nwaku/issues/3369 resolved + // TODO: skipped till https://github.com/waku-org/nwaku/issues/3369 resolved it.skip("Push 30 different messages", async function () { const generateMessageText = (index: number): string => `M${index}`; @@ -79,17 +74,15 @@ const runTests = (strictNodeCheck: boolean): void => { expect(pushResponse.successes.length).to.eq(numServiceNodes); } - expect( - await serviceNodes.messageCollector.waitForMessages(30, { - pubsubTopic: TestPubsubTopic - }) - ).to.eq(true); + expect(await serviceNodes.messageCollector.waitForMessages(30)).to.eq( + true + ); for (let i = 0; i < 30; i++) { serviceNodes.messageCollector.verifyReceivedMessage(i, { expectedMessageText: generateMessageText(i), expectedContentTopic: TestContentTopic, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); } }); @@ -105,21 +98,16 @@ const runTests = (strictNodeCheck: boolean): void => { ProtocolError.EMPTY_PAYLOAD ); - expect( - await serviceNodes.messageCollector.waitForMessages(1, { - pubsubTopic: TestPubsubTopic - }) - ).to.eq(false); + expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq( + false + ); }); - TEST_STRING.forEach((testItem) => { + [{ description: "short", value: "hi" }].forEach((testItem) => { it(`Push message with content topic containing ${testItem.description}`, async function () { + const contentTopic = `/test/1/${testItem.value}/proto`; const customEncoder = waku.createEncoder({ - contentTopic: testItem.value, - shardInfo: { - clusterId: ClusterId, - shard: ShardIndex - } + contentTopic }); const pushResponse = await waku.lightPush.send( customEncoder, @@ -129,13 +117,13 @@ const runTests = (strictNodeCheck: boolean): void => { expect( await serviceNodes.messageCollector.waitForMessages(1, { - pubsubTopic: TestPubsubTopic + contentTopic }) ).to.eq(true); serviceNodes.messageCollector.verifyReceivedMessage(0, { expectedMessageText: messageText, - expectedContentTopic: testItem.value, - expectedPubsubTopic: TestPubsubTopic + expectedContentTopic: contentTopic, + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); }); }); @@ -144,7 +132,7 @@ const runTests = (strictNodeCheck: boolean): void => { const customTestEncoder = createEncoder({ contentTopic: TestContentTopic, metaSetter: () => new Uint8Array(10), - pubsubTopic: TestPubsubTopic + routingInfo: TestRoutingInfo }); const pushResponse = await waku.lightPush.send( @@ -153,22 +141,20 @@ const runTests = (strictNodeCheck: boolean): void => { ); expect(pushResponse.successes.length).to.eq(numServiceNodes); - expect( - await serviceNodes.messageCollector.waitForMessages(1, { - pubsubTopic: TestPubsubTopic - }) - ).to.eq(true); + expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq( + true + ); serviceNodes.messageCollector.verifyReceivedMessage(0, { expectedMessageText: messageText, expectedContentTopic: TestContentTopic, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); }); it("Fails to push message with large meta", async function () { const customTestEncoder = createEncoder({ contentTopic: TestContentTopic, - pubsubTopic: TestPubsubTopic, + routingInfo: TestRoutingInfo, metaSetter: () => new Uint8Array(105024) // see the note below *** }); @@ -176,7 +162,7 @@ const runTests = (strictNodeCheck: boolean): void => { // `nwaku` establishes the max lightpush msg size as `const MaxRpcSize* = MaxWakuMessageSize + 64 * 1024` // see: https://github.com/waku-org/nwaku/blob/07beea02095035f4f4c234ec2dec1f365e6955b8/waku/waku_lightpush/rpc_codec.nim#L15 // In the PR https://github.com/waku-org/nwaku/pull/2298 we reduced the MaxWakuMessageSize - // from 1MiB to 150KiB. Therefore, the 105024 number comes from substracting ( 1*2^20 - 150*2^10 ) + // from 1MiB to 150KiB. Therefore, the 105024 number comes from subtracting ( 1*2^20 - 150*2^10 ) // to the original 10^6 that this test had when MaxWakuMessageSize == 1*2^20 const pushResponse = await waku.lightPush.send( @@ -188,11 +174,9 @@ const runTests = (strictNodeCheck: boolean): void => { expect(pushResponse.failures?.map((failure) => failure.error)).to.include( ProtocolError.REMOTE_PEER_REJECTED ); - expect( - await serviceNodes.messageCollector.waitForMessages(1, { - pubsubTopic: TestPubsubTopic - }) - ).to.eq(false); + expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq( + false + ); }); it("Push message with rate limit", async function () { @@ -212,15 +196,13 @@ const runTests = (strictNodeCheck: boolean): void => { }); expect(pushResponse.successes.length).to.eq(numServiceNodes); - expect( - await serviceNodes.messageCollector.waitForMessages(1, { - pubsubTopic: TestPubsubTopic - }) - ).to.eq(true); + expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq( + true + ); serviceNodes.messageCollector.verifyReceivedMessage(0, { expectedMessageText: messageText, expectedContentTopic: TestContentTopic, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); }); @@ -236,16 +218,14 @@ const runTests = (strictNodeCheck: boolean): void => { }); expect(pushResponse.successes.length).to.eq(numServiceNodes); - expect( - await serviceNodes.messageCollector.waitForMessages(1, { - pubsubTopic: TestPubsubTopic - }) - ).to.eq(true); + expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq( + true + ); serviceNodes.messageCollector.verifyReceivedMessage(0, { expectedMessageText: messageText, expectedTimestamp: testItem, expectedContentTopic: TestContentTopic, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }); }); }); @@ -268,11 +248,9 @@ const runTests = (strictNodeCheck: boolean): void => { expect(pushResponse.failures?.map((failure) => failure.error)).to.include( ProtocolError.SIZE_TOO_BIG ); - expect( - await serviceNodes.messageCollector.waitForMessages(1, { - pubsubTopic: TestPubsubTopic - }) - ).to.eq(false); + expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq( + false + ); }); }); }; diff --git a/packages/tests/tests/light-push/multiple_pubsub.node.spec.ts b/packages/tests/tests/light-push/multiple_pubsub.node.spec.ts index ac82a0cab5..a446f3b56b 100644 --- a/packages/tests/tests/light-push/multiple_pubsub.node.spec.ts +++ b/packages/tests/tests/light-push/multiple_pubsub.node.spec.ts @@ -1,141 +1,144 @@ -import { createEncoder } from "@waku/core"; -import { LightNode, Protocols } from "@waku/interfaces"; -import { contentTopicToPubsubTopic } from "@waku/utils"; -import { utf8ToBytes } from "@waku/utils/bytes"; -import { expect } from "chai"; +// TODO: This test is useless because the content topics all start +// with `/test/` meaning they are in the same shard -import { - afterEachCustom, - beforeEachCustom, - makeLogFileName, - MessageCollector, - runMultipleNodes, - ServiceNode, - ServiceNodesFleet, - tearDownNodes, - teardownNodesWithRedundancy -} from "../../src/index.js"; - -import { ClusterId, TestEncoder } from "./utils.js"; - -describe("Waku Light Push (Autosharding): Multiple PubsubTopics", function () { - this.timeout(30000); - const numServiceNodes = 2; - - let waku: LightNode; - let serviceNodes: ServiceNodesFleet; - - const customEncoder2 = createEncoder({ - contentTopic: "/test/2/waku-light-push/utf8", - pubsubTopic: contentTopicToPubsubTopic( - "/test/2/waku-light-push/utf8", - ClusterId - ) - }); - - beforeEachCustom(this, async () => { - [serviceNodes, waku] = await runMultipleNodes( - this.ctx, - { - clusterId: ClusterId, - contentTopics: [TestEncoder.contentTopic, customEncoder2.contentTopic] - }, - { lightpush: true, filter: true }, - false, - numServiceNodes, - false - ); - }); - - afterEachCustom(this, async () => { - await teardownNodesWithRedundancy(serviceNodes, waku); - }); - - it("Subscribe and receive messages on 2 different pubsubtopics", async function () { - const pushResponse1 = await waku.lightPush.send(TestEncoder, { - payload: utf8ToBytes("M1") - }); - const pushResponse2 = await waku.lightPush.send(customEncoder2, { - payload: utf8ToBytes("M2") - }); - - expect(pushResponse1.successes.length).to.eq(numServiceNodes); - expect(pushResponse2.successes.length).to.eq(numServiceNodes); - - const messageCollector1 = new MessageCollector(serviceNodes.nodes[0]); - const messageCollector2 = new MessageCollector(serviceNodes.nodes[1]); - - expect( - await messageCollector1.waitForMessages(1, { - pubsubTopic: TestEncoder.pubsubTopic - }) - ).to.eq(true); - - expect( - await messageCollector2.waitForMessages(1, { - pubsubTopic: customEncoder2.pubsubTopic - }) - ).to.eq(true); - - messageCollector1.verifyReceivedMessage(0, { - expectedMessageText: "M1", - expectedContentTopic: TestEncoder.contentTopic, - expectedPubsubTopic: TestEncoder.pubsubTopic - }); - - messageCollector2.verifyReceivedMessage(0, { - expectedMessageText: "M2", - expectedContentTopic: customEncoder2.contentTopic, - expectedPubsubTopic: customEncoder2.pubsubTopic - }); - }); - - it("Light push messages to 2 nwaku nodes each with different pubsubtopics", async function () { - // Set up and start a new nwaku node with Default PubsubTopic - const nwaku2 = new ServiceNode(makeLogFileName(this) + "3"); - - try { - await nwaku2.start({ - filter: true, - lightpush: true, - relay: true, - clusterId: ClusterId, - shard: [2] - }); - await nwaku2.ensureSubscriptionsAutosharding([ - customEncoder2.pubsubTopic - ]); - await waku.dial(await nwaku2.getMultiaddrWithId()); - await waku.waitForPeers([Protocols.LightPush]); - - const messageCollector2 = new MessageCollector(nwaku2); - - await waku.lightPush.send(TestEncoder, { - payload: utf8ToBytes("M1") - }); - await waku.lightPush.send(customEncoder2, { - payload: utf8ToBytes("M2") - }); - - await serviceNodes.messageCollector.waitForMessages(1, { - pubsubTopic: TestEncoder.pubsubTopic - }); - await messageCollector2.waitForMessagesAutosharding(1, { - contentTopic: customEncoder2.contentTopic - }); - - serviceNodes.messageCollector.verifyReceivedMessage(0, { - expectedMessageText: "M1", - expectedContentTopic: TestEncoder.contentTopic, - expectedPubsubTopic: TestEncoder.pubsubTopic - }); - messageCollector2.verifyReceivedMessage(0, { - expectedMessageText: "M2", - expectedContentTopic: customEncoder2.contentTopic, - expectedPubsubTopic: customEncoder2.pubsubTopic - }); - } catch (e) { - await tearDownNodes([nwaku2], []); - } - }); -}); +// import { createEncoder } from "@waku/core"; +// import { LightNode, Protocols } from "@waku/interfaces"; +// import { contentTopicToPubsubTopic } from "@waku/utils"; +// import { utf8ToBytes } from "@waku/utils/bytes"; +// import { expect } from "chai"; +// +// import { +// afterEachCustom, +// beforeEachCustom, +// makeLogFileName, +// MessageCollector, +// runMultipleNodes, +// ServiceNode, +// ServiceNodesFleet, +// tearDownNodes, +// teardownNodesWithRedundancy +// } from "../../src/index.js"; +// +// import { TestClusterId, TestEncoder } from "./utils.js"; +// +// describe("Waku Light Push (Autosharding): Multiple Shards", function () { +// this.timeout(30000); +// const numServiceNodes = 2; +// +// let waku: LightNode; +// let serviceNodes: ServiceNodesFleet; +// +// const customEncoder2 = createEncoder({ +// contentTopic: "/test/2/waku-light-push/utf8", +// pubsubTopic: contentTopicToPubsubTopic( +// "/test/2/waku-light-push/utf8", +// TestClusterId +// ) +// }); +// +// beforeEachCustom(this, async () => { +// [serviceNodes, waku] = await runMultipleNodes( +// this.ctx, +// { +// clusterId: TestClusterId, +// contentTopics: [TestEncoder.contentTopic, customEncoder2.contentTopic] +// }, +// { lightpush: true, filter: true }, +// false, +// numServiceNodes, +// false +// ); +// }); +// +// afterEachCustom(this, async () => { +// await teardownNodesWithRedundancy(serviceNodes, waku); +// }); +// +// it("Subscribe and receive messages on 2 different pubsubtopics", async function () { +// const pushResponse1 = await waku.lightPush.send(TestEncoder, { +// payload: utf8ToBytes("M1") +// }); +// const pushResponse2 = await waku.lightPush.send(customEncoder2, { +// payload: utf8ToBytes("M2") +// }); +// +// expect(pushResponse1.successes.length).to.eq(numServiceNodes); +// expect(pushResponse2.successes.length).to.eq(numServiceNodes); +// +// const messageCollector1 = new MessageCollector(serviceNodes.nodes[0]); +// const messageCollector2 = new MessageCollector(serviceNodes.nodes[1]); +// +// expect( +// await messageCollector1.waitForMessages(1, { +// pubsubTopic: TestEncoder.pubsubTopic +// }) +// ).to.eq(true); +// +// expect( +// await messageCollector2.waitForMessages(1, { +// pubsubTopic: customEncoder2.pubsubTopic +// }) +// ).to.eq(true); +// +// messageCollector1.verifyReceivedMessage(0, { +// expectedMessageText: "M1", +// expectedContentTopic: TestEncoder.contentTopic, +// expectedPubsubTopic: TestEncoder.pubsubTopic +// }); +// +// messageCollector2.verifyReceivedMessage(0, { +// expectedMessageText: "M2", +// expectedContentTopic: customEncoder2.contentTopic, +// expectedPubsubTopic: customEncoder2.pubsubTopic +// }); +// }); +// +// it("Light push messages to 2 nwaku nodes each with different pubsubtopics", async function () { +// // Set up and start a new nwaku node with Default PubsubTopic +// const nwaku2 = new ServiceNode(makeLogFileName(this) + "3"); +// +// try { +// await nwaku2.start({ +// filter: true, +// lightpush: true, +// relay: true, +// clusterId: TestClusterId, +// shard: [2] +// }); +// await nwaku2.ensureSubscriptionsAutosharding([ +// customEncoder2.pubsubTopic +// ]); +// await waku.dial(await nwaku2.getMultiaddrWithId()); +// await waku.waitForPeers([Protocols.LightPush]); +// +// const messageCollector2 = new MessageCollector(nwaku2); +// +// await waku.lightPush.send(TestEncoder, { +// payload: utf8ToBytes("M1") +// }); +// await waku.lightPush.send(customEncoder2, { +// payload: utf8ToBytes("M2") +// }); +// +// await serviceNodes.messageCollector.waitForMessages(1, { +// pubsubTopic: TestEncoder.pubsubTopic +// }); +// await messageCollector2.waitForMessagesAutosharding(1, { +// contentTopic: customEncoder2.contentTopic +// }); +// +// serviceNodes.messageCollector.verifyReceivedMessage(0, { +// expectedMessageText: "M1", +// expectedContentTopic: TestEncoder.contentTopic, +// expectedPubsubTopic: TestEncoder.pubsubTopic +// }); +// messageCollector2.verifyReceivedMessage(0, { +// expectedMessageText: "M2", +// expectedContentTopic: customEncoder2.contentTopic, +// expectedPubsubTopic: customEncoder2.pubsubTopic +// }); +// } catch (e) { +// await tearDownNodes([nwaku2], []); +// } +// }); +// }); diff --git a/packages/tests/tests/light-push/utils.ts b/packages/tests/tests/light-push/utils.ts index 8538d48ffa..85db44b426 100644 --- a/packages/tests/tests/light-push/utils.ts +++ b/packages/tests/tests/light-push/utils.ts @@ -1,43 +1,22 @@ import { createEncoder } from "@waku/core"; -import { LightNode, NetworkConfig, Protocols } from "@waku/interfaces"; import { utf8ToBytes } from "@waku/sdk"; -import { createLightNode } from "@waku/sdk"; -import { - contentTopicToPubsubTopic, - contentTopicToShardIndex, - Logger -} from "@waku/utils"; -import { Context } from "mocha"; - -import { runNodes as runNodesBuilder, ServiceNode } from "../../src/index.js"; +import { createRoutingInfo, Logger } from "@waku/utils"; // Constants for test configuration. export const log = new Logger("test:lightpush"); export const TestContentTopic = "/test/1/waku-light-push/utf8"; -export const ClusterId = 3; -export const ShardIndex = contentTopicToShardIndex(TestContentTopic); -export const TestPubsubTopic = contentTopicToPubsubTopic( - TestContentTopic, - ClusterId -); -export const TestShardInfo = { - contentTopics: [TestContentTopic], - clusterId: ClusterId +export const TestClusterId = 3; +export const TestNumShardsInCluster = 8; +export const TestNetworkConfig = { + clusterId: TestClusterId, + numShardsInCluster: TestNumShardsInCluster }; +export const TestRoutingInfo = createRoutingInfo(TestNetworkConfig, { + contentTopic: TestContentTopic +}); export const TestEncoder = createEncoder({ contentTopic: TestContentTopic, - pubsubTopic: TestPubsubTopic + routingInfo: TestRoutingInfo }); export const messageText = "Light Push works!"; export const messagePayload = { payload: utf8ToBytes(messageText) }; - -export const runNodes = ( - context: Context, - shardInfo: NetworkConfig -): Promise<[ServiceNode, LightNode]> => - runNodesBuilder({ - context, - createNode: createLightNode, - protocols: [Protocols.LightPush, Protocols.Filter], - networkConfig: shardInfo - }); diff --git a/packages/tests/tests/metadata.spec.ts b/packages/tests/tests/metadata.spec.ts index ee954f09de..450621b0e6 100644 --- a/packages/tests/tests/metadata.spec.ts +++ b/packages/tests/tests/metadata.spec.ts @@ -1,5 +1,5 @@ import { MetadataCodec } from "@waku/core"; -import type { LightNode, ShardInfo } from "@waku/interfaces"; +import type { LightNode } from "@waku/interfaces"; import { createLightNode } from "@waku/sdk"; import { decodeRelayShard } from "@waku/utils"; import chai, { expect } from "chai"; @@ -29,25 +29,27 @@ describe("Metadata Protocol", function () { await tearDownNodes([nwaku1], waku); }); - describe("connections", function () { - it("same cluster, same shard: nodes connect", async function () { - const shardInfo: ShardInfo = { - clusterId: 2, - shards: [1] - }; + describe("static sharding", function () { + it("same cluster, static sharding: nodes connect", async function () { + const clusterId = 2; + const shards = [1]; + const numShardsInCluster = 8; await nwaku1.start({ relay: true, discv5Discovery: true, peerExchange: true, - clusterId: shardInfo.clusterId, - shard: shardInfo.shards + clusterId, + shard: shards, + numShardsInNetwork: numShardsInCluster }); const nwaku1Ma = await nwaku1.getMultiaddrWithId(); const nwaku1PeerId = await nwaku1.getPeerId(); - waku = await createLightNode({ networkConfig: shardInfo }); + waku = await createLightNode({ + networkConfig: { clusterId, numShardsInCluster } + }); await waku.start(); await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec); @@ -65,82 +67,33 @@ describe("Metadata Protocol", function () { } expect(shardInfoRes).to.not.be.undefined; - expect(shardInfoRes.clusterId).to.equal(shardInfo.clusterId); - expect(shardInfoRes.shards).to.include.members(shardInfo.shards); + expect(shardInfoRes.clusterId).to.equal(clusterId); + expect(shardInfoRes.shards).to.include.members(shards); const activeConnections = waku.libp2p.getConnections(); expect(activeConnections.length).to.equal(1); }); - it("same cluster, different shard: nodes connect", async function () { - const shardInfo1: ShardInfo = { - clusterId: 2, - shards: [1] - }; - - const shardInfo2: ShardInfo = { - clusterId: 2, - shards: [2] - }; + it("different cluster: nodes don't connect", async function () { + const clusterIdNwaku = 2; + const custerIdJsWaku = 3; + const shards = [1]; + const numShardsInCluster = 8; await nwaku1.start({ relay: true, discv5Discovery: true, peerExchange: true, - clusterId: shardInfo1.clusterId, - shard: shardInfo1.shards - }); - - const nwaku1Ma = await nwaku1.getMultiaddrWithId(); - const nwaku1PeerId = await nwaku1.getPeerId(); - - waku = await createLightNode({ networkConfig: shardInfo2 }); - await waku.start(); - await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec); - - if (!waku.libp2p.services.metadata) { - expect(waku.libp2p.services.metadata).to.not.be.undefined; - return; - } - - const { error, shardInfo: shardInfoRes } = - await waku.libp2p.services.metadata.query(nwaku1PeerId); - - if (error) { - expect(error).to.be.null; - return; - } - - expect(shardInfoRes).to.not.be.undefined; - expect(shardInfoRes.clusterId).to.equal(shardInfo1.clusterId); - expect(shardInfoRes.shards).to.include.members(shardInfo1.shards); - - const activeConnections = waku.libp2p.getConnections(); - expect(activeConnections.length).to.equal(1); - }); - - it("different cluster, same shard: nodes don't connect", async function () { - const shardInfo1: ShardInfo = { - clusterId: 2, - shards: [1] - }; - - const shardInfo2: ShardInfo = { - clusterId: 3, - shards: [1] - }; - - await nwaku1.start({ - relay: true, - discv5Discovery: true, - peerExchange: true, - clusterId: shardInfo1.clusterId, - shard: shardInfo1.shards + clusterId: clusterIdNwaku, + shard: shards, + numShardsInNetwork: numShardsInCluster }); const nwaku1Ma = await nwaku1.getMultiaddrWithId(); - waku = await createLightNode({ networkConfig: shardInfo2 }); + waku = await createLightNode({ + networkConfig: { clusterId: custerIdJsWaku, numShardsInCluster } + }); await waku.start(); await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec); @@ -157,28 +110,151 @@ describe("Metadata Protocol", function () { expect(waku.libp2p.getConnections().length).to.equal(0); }); - it("different cluster, different shard: nodes don't connect", async function () { - const shardInfo1: ShardInfo = { - clusterId: 2, - shards: [1] - }; - - const shardInfo2: ShardInfo = { - clusterId: 3, - shards: [2] - }; + it("PeerStore has remote peer's shard info after successful connection", async function () { + const clusterId = 2; + const shards = [1]; + const numShardsInCluster = 8; await nwaku1.start({ relay: true, discv5Discovery: true, peerExchange: true, - clusterId: shardInfo1.clusterId, - shard: shardInfo1.shards + clusterId, + shard: shards, + numShardsInNetwork: numShardsInCluster + }); + + const nwaku1Ma = await nwaku1.getMultiaddrWithId(); + const nwaku1PeerId = await nwaku1.getPeerId(); + + waku = await createLightNode({ + networkConfig: { clusterId, numShardsInCluster } + }); + await waku.start(); + await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec); + + // delay to ensure the connection is estabilished and shardInfo is updated + await delay(500); + + const encodedShardInfo = ( + await waku.libp2p.peerStore.get(nwaku1PeerId) + ).metadata.get("shardInfo"); + expect(encodedShardInfo).to.not.be.undefined; + + const metadataShardInfo = decodeRelayShard(encodedShardInfo!); + expect(metadataShardInfo).not.be.undefined; + + expect(metadataShardInfo!.clusterId).to.eq(clusterId); + expect(metadataShardInfo.shards).to.include.members(shards); + }); + + it("receiving a ping from a peer does not overwrite shard info", async function () { + const clusterId = 2; + const shards = [1]; + const numShardsInCluster = 8; + + await nwaku1.start({ + relay: true, + discv5Discovery: true, + peerExchange: true, + clusterId, + shard: shards + }); + + const nwaku1Ma = await nwaku1.getMultiaddrWithId(); + const nwaku1PeerId = await nwaku1.getPeerId(); + + waku = await createLightNode({ + networkConfig: { + clusterId, + numShardsInCluster + }, + connectionManager: { + pingKeepAlive: 1 + } + }); + await waku.start(); + await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec); + + // delay to ensure the connection is estabilished, shardInfo is updated, and there is a ping + await delay(1500); + + const metadata = (await waku.libp2p.peerStore.get(nwaku1PeerId)).metadata; + expect(metadata.get("shardInfo")).to.not.be.undefined; + + const pingInfo = metadata.get("ping"); + expect(pingInfo).to.not.be.undefined; + }); + }); + describe("auto sharding", function () { + it("same cluster: nodes connect", async function () { + const clusterId = 2; + const contentTopic = "/foo/1/bar/proto"; + const numShardsInCluster = 0; + + await nwaku1.start({ + relay: true, + discv5Discovery: true, + peerExchange: true, + clusterId, + contentTopic: [contentTopic], + numShardsInNetwork: numShardsInCluster + }); + + const nwaku1Ma = await nwaku1.getMultiaddrWithId(); + const nwaku1PeerId = await nwaku1.getPeerId(); + + waku = await createLightNode({ + networkConfig: { clusterId, numShardsInCluster } + }); + await waku.start(); + await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec); + + if (!waku.libp2p.services.metadata) { + expect(waku.libp2p.services.metadata).to.not.be.undefined; + return; + } + + const { error, shardInfo: shardInfoRes } = + await waku.libp2p.services.metadata.query(nwaku1PeerId); + + if (error) { + expect(error).to.be.null; + return; + } + + expect(shardInfoRes).to.not.be.undefined; + expect(shardInfoRes.clusterId).to.equal(clusterId); + // TODO: calculate shards from content topics + //expect(shardInfoRes.shards).to.include.members(shards); + + const activeConnections = waku.libp2p.getConnections(); + expect(activeConnections.length).to.equal(1); + }); + + it("different cluster: nodes don't connect", async function () { + const clusterIdNwaku = 2; + const clusterIdJSWaku = 3; + const contentTopic = ["/foo/1/bar/proto"]; + const numShardsInCluster = 0; + + await nwaku1.start({ + relay: true, + discv5Discovery: true, + peerExchange: true, + clusterId: clusterIdNwaku, + contentTopic, + numShardsInNetwork: numShardsInCluster }); const nwaku1Ma = await nwaku1.getMultiaddrWithId(); - waku = await createLightNode({ networkConfig: shardInfo2 }); + waku = await createLightNode({ + networkConfig: { + clusterId: clusterIdJSWaku, + numShardsInCluster + } + }); await waku.start(); await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec); @@ -195,77 +271,81 @@ describe("Metadata Protocol", function () { expect(waku.libp2p.getConnections().length).to.equal(0); }); - }); - it("PeerStore has remote peer's shard info after successful connection", async function () { - const shardInfo: ShardInfo = { - clusterId: 2, - shards: [1] - }; + it("PeerStore has remote peer's shard info after successful connection", async function () { + const clusterId = 2; + const contentTopic = ["/foo/1/bar/proto"]; + const numShardsInCluster = 0; - await nwaku1.start({ - relay: true, - discv5Discovery: true, - peerExchange: true, - clusterId: shardInfo.clusterId, - shard: shardInfo.shards + await nwaku1.start({ + relay: true, + discv5Discovery: true, + peerExchange: true, + clusterId, + contentTopic + }); + + const nwaku1Ma = await nwaku1.getMultiaddrWithId(); + const nwaku1PeerId = await nwaku1.getPeerId(); + + waku = await createLightNode({ + networkConfig: { clusterId, numShardsInCluster } + }); + await waku.start(); + await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec); + + // delay to ensure the connection is estabilished and shardInfo is updated + await delay(500); + + const encodedShardInfo = ( + await waku.libp2p.peerStore.get(nwaku1PeerId) + ).metadata.get("shardInfo"); + expect(encodedShardInfo).to.not.be.undefined; + + const metadataShardInfo = decodeRelayShard(encodedShardInfo!); + expect(metadataShardInfo).not.be.undefined; + + expect(metadataShardInfo!.clusterId).to.eq(clusterId); + // TODO derive shard from content topic + // expect(metadataShardInfo.shards).to.include.members(shards); }); - const nwaku1Ma = await nwaku1.getMultiaddrWithId(); - const nwaku1PeerId = await nwaku1.getPeerId(); + it("receiving a ping from a peer does not overwrite shard info", async function () { + const clusterId = 2; + const contentTopic = ["/foo/1/bar/proto"]; + const numShardsInCluster = 0; - waku = await createLightNode({ networkConfig: shardInfo }); - await waku.start(); - await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec); + await nwaku1.start({ + relay: true, + discv5Discovery: true, + peerExchange: true, + clusterId, + contentTopic + }); - // delay to ensure the connection is estabilished and shardInfo is updated - await delay(500); + const nwaku1Ma = await nwaku1.getMultiaddrWithId(); + const nwaku1PeerId = await nwaku1.getPeerId(); - const encodedShardInfo = ( - await waku.libp2p.peerStore.get(nwaku1PeerId) - ).metadata.get("shardInfo"); - expect(encodedShardInfo).to.not.be.undefined; + waku = await createLightNode({ + networkConfig: { + clusterId, + numShardsInCluster + }, + connectionManager: { + pingKeepAlive: 1 + } + }); + await waku.start(); + await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec); - const metadataShardInfo = decodeRelayShard(encodedShardInfo!); - expect(metadataShardInfo).not.be.undefined; + // delay to ensure the connection is estabilished, shardInfo is updated, and there is a ping + await delay(1500); - expect(metadataShardInfo!.clusterId).to.eq(shardInfo.clusterId); - expect(metadataShardInfo.shards).to.include.members(shardInfo.shards); - }); + const metadata = (await waku.libp2p.peerStore.get(nwaku1PeerId)).metadata; + expect(metadata.get("shardInfo")).to.not.be.undefined; - it("receiving a ping from a peer does not overwrite shard info", async function () { - const shardInfo: ShardInfo = { - clusterId: 2, - shards: [1] - }; - - await nwaku1.start({ - relay: true, - discv5Discovery: true, - peerExchange: true, - clusterId: shardInfo.clusterId, - shard: shardInfo.shards + const pingInfo = metadata.get("ping"); + expect(pingInfo).to.not.be.undefined; }); - - const nwaku1Ma = await nwaku1.getMultiaddrWithId(); - const nwaku1PeerId = await nwaku1.getPeerId(); - - waku = await createLightNode({ - networkConfig: shardInfo, - connectionManager: { - pingKeepAlive: 1 - } - }); - await waku.start(); - await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec); - - // delay to ensure the connection is estabilished, shardInfo is updated, and there is a ping - await delay(1500); - - const metadata = (await waku.libp2p.peerStore.get(nwaku1PeerId)).metadata; - expect(metadata.get("shardInfo")).to.not.be.undefined; - - const pingInfo = metadata.get("ping"); - expect(pingInfo).to.not.be.undefined; }); }); diff --git a/packages/tests/tests/nwaku.node.spec.ts b/packages/tests/tests/nwaku.node.spec.ts index 84c9d36d85..4a72b78e4b 100644 --- a/packages/tests/tests/nwaku.node.spec.ts +++ b/packages/tests/tests/nwaku.node.spec.ts @@ -17,8 +17,6 @@ describe("nwaku", () => { "--rest-admin=true", "--websocket-support=true", "--log-level=TRACE", - "--cluster-id=0", - "--shard=0", "--ports-shift=42" ]; diff --git a/packages/tests/tests/peer-exchange/compliance.spec.ts b/packages/tests/tests/peer-exchange/compliance.spec.ts index 5ac95252c4..48a081c318 100644 --- a/packages/tests/tests/peer-exchange/compliance.spec.ts +++ b/packages/tests/tests/peer-exchange/compliance.spec.ts @@ -5,7 +5,7 @@ import { createLightNode } from "@waku/sdk"; import { beforeEachCustom, - DefaultTestShardInfo, + DefaultTestNetworkConfig, makeLogFileName, ServiceNode, tearDownNodes @@ -40,7 +40,9 @@ describe("Peer Exchange", function () { tests({ async setup() { - waku = await createLightNode({ networkConfig: DefaultTestShardInfo }); + waku = await createLightNode({ + networkConfig: DefaultTestNetworkConfig + }); await waku.start(); const nwaku2Ma = await nwaku2.getMultiaddrWithId(); diff --git a/packages/tests/tests/peer-exchange/continuous_discovery.spec.ts b/packages/tests/tests/peer-exchange/continuous_discovery.spec.ts index 08fa5027d3..e85ba125e0 100644 --- a/packages/tests/tests/peer-exchange/continuous_discovery.spec.ts +++ b/packages/tests/tests/peer-exchange/continuous_discovery.spec.ts @@ -3,8 +3,8 @@ import { type PeerId } from "@libp2p/interface"; import { peerIdFromPrivateKey } from "@libp2p/peer-id"; import { multiaddr } from "@multiformats/multiaddr"; import { PeerExchangeDiscovery } from "@waku/discovery"; -import { IEnr, LightNode } from "@waku/interfaces"; -import { createLightNode, ShardInfo } from "@waku/sdk"; +import { IEnr, LightNode, RelayShards } from "@waku/interfaces"; +import { createLightNode } from "@waku/sdk"; import { decodeRelayShard } from "@waku/utils"; import { expect } from "chai"; import Sinon from "sinon"; @@ -15,8 +15,8 @@ describe("Peer Exchange Continuous Discovery", () => { let peerId: PeerId; let randomPeerId: PeerId; let waku: LightNode; - const shardInfo: ShardInfo = { - clusterId: 1, + const relayShards: RelayShards = { + clusterId: 2, shards: [1, 2] }; const multiaddrs = [multiaddr("/ip4/127.0.0.1/udp/1234")]; @@ -38,7 +38,7 @@ describe("Peer Exchange Continuous Discovery", () => { const newPeerInfo = { ENR: { peerId, - shardInfo, + shardInfo: relayShards, peerInfo: { multiaddrs: newMultiaddrs, id: peerId @@ -59,14 +59,14 @@ describe("Peer Exchange Continuous Discovery", () => { }); it("Should update shard info", async () => { - const newShardInfo: ShardInfo = { + const newRelayShards: RelayShards = { clusterId: 2, shards: [1, 2, 3] }; const newPeerInfo = { ENR: { peerId, - shardInfo: newShardInfo, + shardInfo: newRelayShards, peerInfo: { multiaddrs: multiaddrs, id: peerId @@ -86,7 +86,7 @@ describe("Peer Exchange Continuous Discovery", () => { ); const _shardInfo = decodeRelayShard(newPeer.metadata.get("shardInfo")!); - expect(_shardInfo).to.deep.equal(newShardInfo); + expect(_shardInfo).to.deep.equal(newRelayShards); }); async function discoverPeerOnce(): Promise { @@ -95,7 +95,7 @@ describe("Peer Exchange Continuous Discovery", () => { const enr: IEnr = { peerId, - shardInfo, + shardInfo: relayShards, peerInfo: { multiaddrs: multiaddrs, id: peerId @@ -122,6 +122,6 @@ describe("Peer Exchange Continuous Discovery", () => { multiaddrs[0].toString() ); const _shardInfo = decodeRelayShard(peer.metadata.get("shardInfo")!); - expect(_shardInfo).to.deep.equal(shardInfo); + expect(_shardInfo).to.deep.equal(relayShards); } }); diff --git a/packages/tests/tests/peer-exchange/index.spec.ts b/packages/tests/tests/peer-exchange/index.spec.ts index 57638756e3..c0a3128363 100644 --- a/packages/tests/tests/peer-exchange/index.spec.ts +++ b/packages/tests/tests/peer-exchange/index.spec.ts @@ -10,7 +10,9 @@ import Sinon, { SinonSpy } from "sinon"; import { afterEachCustom, beforeEachCustom, - DefaultTestShardInfo, + DefaultTestClusterId, + DefaultTestNetworkConfig, + DefaultTestRelayShards, makeLogFileName, ServiceNode, tearDownNodes @@ -30,15 +32,15 @@ describe("Peer Exchange", function () { nwaku1 = new ServiceNode(makeLogFileName(this.ctx) + "1"); nwaku2 = new ServiceNode(makeLogFileName(this.ctx) + "2"); await nwaku1.start({ - clusterId: DefaultTestShardInfo.clusterId, - shard: DefaultTestShardInfo.shards, + clusterId: DefaultTestClusterId, + shard: DefaultTestRelayShards.shards, discv5Discovery: true, peerExchange: true, relay: true }); await nwaku2.start({ - clusterId: DefaultTestShardInfo.clusterId, - shard: DefaultTestShardInfo.shards, + clusterId: DefaultTestClusterId, + shard: DefaultTestRelayShards.shards, discv5Discovery: true, peerExchange: true, discv5BootstrapNode: (await nwaku1.info()).enrUri, @@ -52,7 +54,7 @@ describe("Peer Exchange", function () { it("peer exchange sets tag", async function () { waku = await createLightNode({ - networkConfig: DefaultTestShardInfo, + networkConfig: DefaultTestNetworkConfig, libp2p: { peerDiscovery: [ bootstrap({ list: [(await nwaku2.getMultiaddrWithId()).toString()] }), @@ -117,8 +119,8 @@ describe("Peer Exchange", function () { nwaku3 = new ServiceNode(makeLogFileName(this) + "3"); await nwaku3.start({ - clusterId: DefaultTestShardInfo.clusterId, - shard: DefaultTestShardInfo.shards, + clusterId: DefaultTestClusterId, + shard: DefaultTestRelayShards.shards, discv5Discovery: true, peerExchange: true, discv5BootstrapNode: (await nwaku1.info()).enrUri, diff --git a/packages/tests/tests/peer-exchange/pe.optional.spec.ts b/packages/tests/tests/peer-exchange/pe.optional.spec.ts index 9b93f1d660..354c13325f 100644 --- a/packages/tests/tests/peer-exchange/pe.optional.spec.ts +++ b/packages/tests/tests/peer-exchange/pe.optional.spec.ts @@ -6,7 +6,6 @@ import { } from "@waku/discovery"; import type { LightNode } from "@waku/interfaces"; import { createLightNode } from "@waku/sdk"; -import { singleShardInfosToShardInfo } from "@waku/utils"; import { expect } from "chai"; import { afterEachCustom, tearDownNodes } from "../../src/index.js"; @@ -36,8 +35,7 @@ describe("Peer Exchange", () => { ) .filter((ma) => ma.includes("wss")); - const singleShardInfo = { clusterId: 1, shard: 1 }; - const shardInfo = singleShardInfosToShardInfo([singleShardInfo]); + const networkConfig = { clusterId: 2, numShardsInCluster: 0 }; waku = await createLightNode({ libp2p: { peerDiscovery: [ @@ -45,7 +43,7 @@ describe("Peer Exchange", () => { wakuPeerExchangeDiscovery() ] }, - networkConfig: shardInfo + networkConfig }); await waku.start(); diff --git a/packages/tests/tests/relay/index.node.spec.ts b/packages/tests/tests/relay/index.node.spec.ts index 91e038975f..2712715c72 100644 --- a/packages/tests/tests/relay/index.node.spec.ts +++ b/packages/tests/tests/relay/index.node.spec.ts @@ -13,6 +13,7 @@ import { createDecoder as createSymDecoder, createEncoder as createSymEncoder } from "@waku/message-encryption/symmetric"; +import { createRoutingInfo } from "@waku/utils"; import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes"; import { expect } from "chai"; @@ -23,7 +24,7 @@ import { tearDownNodes } from "../../src/index.js"; -import { runJSNodes, TestPubsubTopic } from "./utils.js"; +import { runJSNodes, TestNetworkConfig, TestRoutingInfo } from "./utils.js"; describe("Waku Relay", function () { this.timeout(15000); @@ -51,20 +52,20 @@ describe("Waku Relay", function () { const eciesEncoder = createEciesEncoder({ contentTopic: asymTopic, publicKey, - pubsubTopic: TestPubsubTopic + routingInfo: TestRoutingInfo }); const symEncoder = createSymEncoder({ contentTopic: symTopic, symKey, - pubsubTopic: TestPubsubTopic + routingInfo: TestRoutingInfo }); const eciesDecoder = createEciesDecoder( asymTopic, - privateKey, - TestPubsubTopic + TestRoutingInfo, + privateKey ); - const symDecoder = createSymDecoder(symTopic, symKey, TestPubsubTopic); + const symDecoder = createSymDecoder(symTopic, TestRoutingInfo, symKey); const msgs: IDecodedMessage[] = []; void waku2.relay.subscribeWithUnsubscribe([eciesDecoder], (wakuMsg) => { @@ -93,19 +94,20 @@ describe("Waku Relay", function () { "Published on content topic with added then deleted observer"; const contentTopic = "/test/1/observer/proto"; + const routingInfo = createRoutingInfo(TestNetworkConfig, { contentTopic }); // The promise **fails** if we receive a message on this observer. const receivedMsgPromise: Promise = new Promise( (resolve, reject) => { const deleteObserver = waku2.relay.subscribeWithUnsubscribe( - [createDecoder(contentTopic)], + [createDecoder(contentTopic, routingInfo)], reject ) as () => void; deleteObserver(); setTimeout(resolve, 500); } ); - await waku1.relay.send(createEncoder({ contentTopic }), { + await waku1.relay.send(createEncoder({ contentTopic, routingInfo }), { payload: utf8ToBytes(messageText) }); diff --git a/packages/tests/tests/relay/interop.node.spec.ts b/packages/tests/tests/relay/interop.node.spec.ts index 211df51b9b..664fda4c18 100644 --- a/packages/tests/tests/relay/interop.node.spec.ts +++ b/packages/tests/tests/relay/interop.node.spec.ts @@ -19,8 +19,8 @@ import { TestContentTopic, TestDecoder, TestEncoder, - TestPubsubTopic, - TestShardInfo + TestNetworkConfig, + TestRoutingInfo } from "./utils.js"; import { runRelayNodes } from "./utils.js"; @@ -30,7 +30,12 @@ describe("Waku Relay, Interop", function () { let nwaku: ServiceNode; beforeEachCustom(this, async () => { - [nwaku, waku] = await runRelayNodes(this.ctx, TestShardInfo); + [nwaku, waku] = await runRelayNodes( + this.ctx, + TestNetworkConfig, + undefined, + [TestContentTopic] + ); }); afterEachCustom(this, async () => { @@ -42,8 +47,9 @@ describe("Waku Relay, Interop", function () { while (subscribers.length === 0) { await delay(200); - subscribers = - waku.libp2p.services.pubsub!.getSubscribers(TestPubsubTopic); + subscribers = waku.libp2p.services.pubsub!.getSubscribers( + TestRoutingInfo.pubsubTopic + ); } const nimPeerId = await nwaku.getPeerId(); @@ -86,7 +92,8 @@ describe("Waku Relay, Interop", function () { ServiceNode.toMessageRpcQuery({ contentTopic: TestContentTopic, payload: utf8ToBytes(messageText) - }) + }), + TestRoutingInfo ); const receivedMsg = await receivedMsgPromise; @@ -98,9 +105,10 @@ describe("Waku Relay, Interop", function () { it("Js publishes, other Js receives", async function () { const waku2 = await createRelayNode({ + routingInfos: [TestRoutingInfo], staticNoiseKey: NOISE_KEY_2, emitSelf: true, - networkConfig: TestShardInfo + networkConfig: TestNetworkConfig }); await waku2.start(); diff --git a/packages/tests/tests/relay/multiple_pubsub.node.spec.ts b/packages/tests/tests/relay/multiple_pubsub.node.spec.ts index d531cca547..4c3990fd08 100644 --- a/packages/tests/tests/relay/multiple_pubsub.node.spec.ts +++ b/packages/tests/tests/relay/multiple_pubsub.node.spec.ts @@ -1,18 +1,7 @@ import { createDecoder, createEncoder } from "@waku/core"; -import { - ContentTopicInfo, - IDecodedMessage, - Protocols, - RelayNode, - ShardInfo, - SingleShardInfo -} from "@waku/interfaces"; +import { IDecodedMessage, Protocols, RelayNode } from "@waku/interfaces"; import { createRelayNode } from "@waku/relay"; -import { - contentTopicToPubsubTopic, - pubsubTopicToSingleShardInfo, - singleShardInfoToPubsubTopic -} from "@waku/utils"; +import { createRoutingInfo } from "@waku/utils"; import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes"; import { expect } from "chai"; @@ -26,43 +15,38 @@ import { } from "../../src/index.js"; import { TestDecoder } from "../filter/utils.js"; -describe("Waku Relay, multiple pubsub topics", function () { +describe("Waku Relay, static sharding, multiple pubsub topics", function () { this.timeout(15000); let waku1: RelayNode; let waku2: RelayNode; let waku3: RelayNode; - const customPubsubTopic1 = singleShardInfoToPubsubTopic({ - clusterId: 3, - shard: 1 - }); - const customPubsubTopic2 = singleShardInfoToPubsubTopic({ - clusterId: 3, - shard: 2 - }); - const shardInfo1: ShardInfo = { clusterId: 3, shards: [1] }; - const singleShardInfo1: SingleShardInfo = { - clusterId: 3, - shard: 1 - }; + const clusterId = 3; + const networkConfig = { clusterId }; + + const shardOne = 1; + const shardTwo = 2; + const customContentTopic1 = "/test/2/waku-relay/utf8"; const customContentTopic2 = "/test/3/waku-relay/utf8"; - const shardInfo2: ShardInfo = { clusterId: 3, shards: [2] }; - const singleShardInfo2: SingleShardInfo = { - clusterId: 3, - shard: 2 - }; + + const routingInfoOne = createRoutingInfo(networkConfig, { + shardId: shardOne + }); + const routingInfoTwo = createRoutingInfo(networkConfig, { + shardId: shardTwo + }); + const customEncoder1 = createEncoder({ - pubsubTopicShardInfo: singleShardInfo1, - contentTopic: customContentTopic1 + contentTopic: customContentTopic1, + routingInfo: routingInfoOne }); - const customDecoder1 = createDecoder(customContentTopic1, singleShardInfo1); + const customDecoder1 = createDecoder(customContentTopic1, routingInfoOne); const customEncoder2 = createEncoder({ - pubsubTopicShardInfo: singleShardInfo2, - contentTopic: customContentTopic2 + contentTopic: customContentTopic2, + routingInfo: routingInfoTwo }); - const customDecoder2 = createDecoder(customContentTopic2, singleShardInfo2); - const shardInfoBothShards: ShardInfo = { clusterId: 3, shards: [1, 2] }; + const customDecoder2 = createDecoder(customContentTopic2, routingInfoTwo); afterEachCustom(this, async () => { await tearDownNodes([], [waku1, waku2, waku3]); @@ -70,35 +54,36 @@ describe("Waku Relay, multiple pubsub topics", function () { [ { - pubsub: customPubsubTopic1, - shardInfo: shardInfo1, + routingInfo: routingInfoOne, encoder: customEncoder1, decoder: customDecoder1 }, { - pubsub: customPubsubTopic2, - shardInfo: shardInfo2, + routingInfo: routingInfoTwo, encoder: customEncoder2, decoder: customDecoder2 } ].forEach((testItem) => { - it(`3 nodes on ${testItem.pubsub} topic`, async function () { + it(`3 nodes on ${testItem.routingInfo.pubsubTopic} topic`, async function () { const [msgCollector1, msgCollector2, msgCollector3] = Array(3) .fill(null) .map(() => new MessageCollector()); [waku1, waku2, waku3] = await Promise.all([ createRelayNode({ - networkConfig: testItem.shardInfo, + networkConfig: networkConfig, + routingInfos: [testItem.routingInfo], staticNoiseKey: NOISE_KEY_1 }).then((waku) => waku.start().then(() => waku)), createRelayNode({ - networkConfig: testItem.shardInfo, + networkConfig: networkConfig, + routingInfos: [testItem.routingInfo], staticNoiseKey: NOISE_KEY_2, libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } } }).then((waku) => waku.start().then(() => waku)), createRelayNode({ - networkConfig: testItem.shardInfo, + networkConfig: networkConfig, + routingInfos: [testItem.routingInfo], staticNoiseKey: NOISE_KEY_3 }).then((waku) => waku.start().then(() => waku)) ]); @@ -196,16 +181,19 @@ describe("Waku Relay, multiple pubsub topics", function () { // Waku1 and waku2 are using multiple pubsub topis [waku1, waku2, waku3] = await Promise.all([ createRelayNode({ - networkConfig: shardInfoBothShards, + networkConfig: networkConfig, + routingInfos: [routingInfoOne, routingInfoTwo], staticNoiseKey: NOISE_KEY_1 }).then((waku) => waku.start().then(() => waku)), createRelayNode({ - networkConfig: shardInfoBothShards, + networkConfig: networkConfig, + routingInfos: [routingInfoOne, routingInfoTwo], staticNoiseKey: NOISE_KEY_2, libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } } }).then((waku) => waku.start().then(() => waku)), createRelayNode({ - networkConfig: shardInfo1, + networkConfig: networkConfig, + routingInfos: [routingInfoOne], staticNoiseKey: NOISE_KEY_3 }).then((waku) => waku.start().then(() => waku)) ]); @@ -262,18 +250,22 @@ describe("Waku Relay, multiple pubsub topics", function () { expect(msgCollector3.hasMessage(customContentTopic1, "M3")).to.eq(true); }); - it("n1 and n2 uses a custom pubsub, n3 uses the default pubsub", async function () { + it("n1 and n2 uses relay shard 1, n3 uses relay shard 2", async function () { [waku1, waku2, waku3] = await Promise.all([ createRelayNode({ - networkConfig: shardInfo1, + networkConfig, + routingInfos: [routingInfoOne], staticNoiseKey: NOISE_KEY_1 }).then((waku) => waku.start().then(() => waku)), createRelayNode({ - networkConfig: shardInfo1, + networkConfig, + routingInfos: [routingInfoOne], staticNoiseKey: NOISE_KEY_2, libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } } }).then((waku) => waku.start().then(() => waku)), createRelayNode({ + networkConfig, + routingInfos: [routingInfoTwo], staticNoiseKey: NOISE_KEY_3 }).then((waku) => waku.start().then(() => waku)) ]); @@ -319,55 +311,45 @@ describe("Waku Relay, multiple pubsub topics", function () { await waku3NoMsgPromise; expect(bytesToUtf8(waku2ReceivedMsg.payload!)).to.eq(messageText); - expect(waku2ReceivedMsg.pubsubTopic).to.eq(customPubsubTopic1); + expect(waku2ReceivedMsg.pubsubTopic).to.eq(routingInfoOne.pubsubTopic); }); }); -describe("Waku Relay (Autosharding), multiple pubsub topics", function () { +describe("Waku Relay auto-sharding, multiple pubsub topics", function () { this.timeout(15000); const clusterId = 7; let waku1: RelayNode; let waku2: RelayNode; let waku3: RelayNode; + const networkConfig = { clusterId, numShardsInCluster: 8 }; + const customContentTopic1 = "/waku/2/content/utf8"; const customContentTopic2 = "/myapp/1/latest/proto"; - const autoshardingPubsubTopic1 = contentTopicToPubsubTopic( - customContentTopic1, - clusterId - ); - const autoshardingPubsubTopic2 = contentTopicToPubsubTopic( - customContentTopic2, - clusterId - ); - const contentTopicInfo1: ContentTopicInfo = { - clusterId: clusterId, - contentTopics: [customContentTopic1] - }; - const contentTopicInfo2: ContentTopicInfo = { - clusterId: clusterId, - contentTopics: [customContentTopic2] - }; + + const routingInfo1 = createRoutingInfo(networkConfig, { + contentTopic: customContentTopic1 + }); + const routingInfo2 = createRoutingInfo(networkConfig, { + contentTopic: customContentTopic2 + }); + + if (routingInfo1.pubsubTopic == routingInfo2.pubsubTopic) + throw "Internal error, both content topics resolve to same shard"; + const customEncoder1 = createEncoder({ contentTopic: customContentTopic1, - pubsubTopicShardInfo: pubsubTopicToSingleShardInfo(autoshardingPubsubTopic1) + routingInfo: routingInfo1 }); - const customDecoder1 = createDecoder( - customContentTopic1, - pubsubTopicToSingleShardInfo(autoshardingPubsubTopic1) - ); + const customDecoder1 = createDecoder(customContentTopic1, routingInfo1); const customEncoder2 = createEncoder({ contentTopic: customContentTopic2, - pubsubTopicShardInfo: pubsubTopicToSingleShardInfo(autoshardingPubsubTopic2) + routingInfo: routingInfo2 }); - const customDecoder2 = createDecoder( - customContentTopic2, - pubsubTopicToSingleShardInfo(autoshardingPubsubTopic2) - ); - const contentTopicInfoBothShards: ContentTopicInfo = { - clusterId: clusterId, - contentTopics: [customContentTopic1, customContentTopic2] - }; + const customDecoder2 = createDecoder(customContentTopic2, routingInfo2); + + const relayShard1 = { clusterId, shards: [routingInfo1.shardId] }; + const relayShard2 = { clusterId, shards: [routingInfo2.shardId] }; afterEachCustom(this, async () => { await tearDownNodes([], [waku1, waku2, waku3]); @@ -375,35 +357,38 @@ describe("Waku Relay (Autosharding), multiple pubsub topics", function () { [ { - pubsub: autoshardingPubsubTopic1, - shardInfo: contentTopicInfo1, + routingInfo: routingInfo1, + relayShards: relayShard1, encoder: customEncoder1, decoder: customDecoder1 }, { - pubsub: autoshardingPubsubTopic2, - shardInfo: contentTopicInfo2, + routingInfo: routingInfo2, + relayShards: relayShard2, encoder: customEncoder2, decoder: customDecoder2 } ].forEach((testItem) => { - it(`3 nodes on ${testItem.pubsub} topic`, async function () { + it(`3 nodes on ${testItem.routingInfo.pubsubTopic} topic`, async function () { const [msgCollector1, msgCollector2, msgCollector3] = Array(3) .fill(null) .map(() => new MessageCollector()); [waku1, waku2, waku3] = await Promise.all([ createRelayNode({ - networkConfig: testItem.shardInfo, + networkConfig, + routingInfos: [testItem.routingInfo], staticNoiseKey: NOISE_KEY_1 }).then((waku) => waku.start().then(() => waku)), createRelayNode({ - networkConfig: testItem.shardInfo, + networkConfig, + routingInfos: [testItem.routingInfo], staticNoiseKey: NOISE_KEY_2, libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } } }).then((waku) => waku.start().then(() => waku)), createRelayNode({ - networkConfig: testItem.shardInfo, + networkConfig, + routingInfos: [testItem.routingInfo], staticNoiseKey: NOISE_KEY_3 }).then((waku) => waku.start().then(() => waku)) ]); @@ -510,16 +495,19 @@ describe("Waku Relay (Autosharding), multiple pubsub topics", function () { // Waku1 and waku2 are using multiple pubsub topis [waku1, waku2, waku3] = await Promise.all([ createRelayNode({ - networkConfig: contentTopicInfoBothShards, + networkConfig, + routingInfos: [routingInfo1, routingInfo2], staticNoiseKey: NOISE_KEY_1 }).then((waku) => waku.start().then(() => waku)), createRelayNode({ - networkConfig: contentTopicInfoBothShards, + networkConfig, + routingInfos: [routingInfo1, routingInfo2], staticNoiseKey: NOISE_KEY_2, libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } } }).then((waku) => waku.start().then(() => waku)), createRelayNode({ - networkConfig: contentTopicInfo1, + networkConfig, + routingInfos: [routingInfo1], staticNoiseKey: NOISE_KEY_3 }).then((waku) => waku.start().then(() => waku)) ]); @@ -603,18 +591,22 @@ describe("Waku Relay (Autosharding), multiple pubsub topics", function () { expect(msgCollector3.hasMessage(customContentTopic1, "M3")).to.eq(true); }); - it("n1 and n2 uses a custom pubsub, n3 uses the default pubsub", async function () { + it("n1 and n2 uses first shard, n3 uses the second shard", async function () { [waku1, waku2, waku3] = await Promise.all([ createRelayNode({ - networkConfig: contentTopicInfo1, + networkConfig, + routingInfos: [routingInfo1], staticNoiseKey: NOISE_KEY_1 }).then((waku) => waku.start().then(() => waku)), createRelayNode({ - networkConfig: contentTopicInfo1, + networkConfig, + routingInfos: [routingInfo1], staticNoiseKey: NOISE_KEY_2, libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } } }).then((waku) => waku.start().then(() => waku)), createRelayNode({ + networkConfig, + routingInfos: [routingInfo2], staticNoiseKey: NOISE_KEY_3 }).then((waku) => waku.start().then(() => waku)) ]); @@ -660,6 +652,6 @@ describe("Waku Relay (Autosharding), multiple pubsub topics", function () { await waku3NoMsgPromise; expect(bytesToUtf8(waku2ReceivedMsg.payload!)).to.eq(messageText); - expect(waku2ReceivedMsg.pubsubTopic).to.eq(autoshardingPubsubTopic1); + expect(waku2ReceivedMsg.pubsubTopic).to.eq(routingInfo1.pubsubTopic); }); }); diff --git a/packages/tests/tests/relay/publish.node.spec.ts b/packages/tests/tests/relay/publish.node.spec.ts index f7fc8be951..e8b8afed8a 100644 --- a/packages/tests/tests/relay/publish.node.spec.ts +++ b/packages/tests/tests/relay/publish.node.spec.ts @@ -1,5 +1,6 @@ import { createEncoder } from "@waku/core"; import { IRateLimitProof, ProtocolError, RelayNode } from "@waku/interfaces"; +import { createRoutingInfo } from "@waku/utils"; import { utf8ToBytes } from "@waku/utils/bytes"; import { expect } from "chai"; @@ -16,13 +17,12 @@ import { import { messageText, runJSNodes, + TestClusterId, TestContentTopic, TestDecoder, TestEncoder, TestExpectOptions, - TestPubsubTopic, - TestShardInfo, - TestWaitMessageOptions, + TestRoutingInfo, waitForAllRemotePeers } from "./utils.js"; @@ -54,9 +54,7 @@ describe("Waku Relay, Publish", function () { expect(pushResponse.successes[0].toString()).to.eq( waku2.libp2p.peerId.toString() ); - expect( - await messageCollector.waitForMessages(1, TestWaitMessageOptions) - ).to.eq(true); + expect(await messageCollector.waitForMessages(1)).to.eq(true); messageCollector.verifyReceivedMessage(0, { ...TestExpectOptions, expectedMessageText: testItem.value @@ -81,9 +79,7 @@ describe("Waku Relay, Publish", function () { waku2.libp2p.peerId.toString() ); - expect( - await messageCollector.waitForMessages(1, TestWaitMessageOptions) - ).to.eq(true); + expect(await messageCollector.waitForMessages(1)).to.eq(true); messageCollector.verifyReceivedMessage(0, { ...TestExpectOptions, @@ -107,31 +103,16 @@ describe("Waku Relay, Publish", function () { it("Fails to publish message with empty text", async function () { await waku1.relay.send(TestEncoder, { payload: utf8ToBytes("") }); await delay(400); - expect( - await messageCollector.waitForMessages(1, TestWaitMessageOptions) - ).to.eq(false); - }); - - it("Fails to publish message with wrong content topic", async function () { - const wrong_encoder = createEncoder({ - contentTopic: "/test/1/wrong/utf8", - pubsubTopic: TestPubsubTopic - }); - await waku1.relay.send(wrong_encoder, { - payload: utf8ToBytes("") - }); - expect( - await messageCollector.waitForMessages(1, TestWaitMessageOptions) - ).to.eq(false); + expect(await messageCollector.waitForMessages(1)).to.eq(false); }); it("Fails to publish message with wrong pubsubtopic", async function () { const wrong_encoder = createEncoder({ - pubsubTopicShardInfo: { - clusterId: TestShardInfo.clusterId, - shard: TestShardInfo.shards[0] + 1 - }, - contentTopic: TestContentTopic + contentTopic: TestContentTopic, + routingInfo: createRoutingInfo( + { clusterId: TestClusterId }, + { shardId: 32 } + ) }); const pushResponse = await waku1.relay.send(wrong_encoder, { payload: utf8ToBytes("") @@ -140,9 +121,7 @@ describe("Waku Relay, Publish", function () { ProtocolError.TOPIC_NOT_CONFIGURED ); await delay(400); - expect( - await messageCollector.waitForMessages(1, TestWaitMessageOptions) - ).to.eq(false); + expect(await messageCollector.waitForMessages(1)).to.eq(false); }); [1024 ** 2 + 65536, 2 * 1024 ** 2].forEach((testItem) => { @@ -155,9 +134,7 @@ describe("Waku Relay, Publish", function () { ProtocolError.SIZE_TOO_BIG ); await delay(400); - expect( - await messageCollector.waitForMessages(1, TestWaitMessageOptions) - ).to.eq(false); + expect(await messageCollector.waitForMessages(1)).to.eq(false); }); }); @@ -183,9 +160,7 @@ describe("Waku Relay, Publish", function () { expect(pushResponse.successes[0].toString()).to.eq( waku2.libp2p.peerId.toString() ); - expect( - await messageCollector.waitForMessages(2, TestWaitMessageOptions) - ).to.eq(true); + expect(await messageCollector.waitForMessages(2)).to.eq(true); }); // Will be skipped until https://github.com/waku-org/js-waku/issues/1464 si done @@ -210,15 +185,13 @@ describe("Waku Relay, Publish", function () { expect(pushResponse.successes[0].toString()).to.eq( waku2.libp2p.peerId.toString() ); - expect( - await messageCollector.waitForMessages(2, TestWaitMessageOptions) - ).to.eq(true); + expect(await messageCollector.waitForMessages(2)).to.eq(true); }); it("Publish message with large meta", async function () { const customTestEncoder = createEncoder({ contentTopic: TestContentTopic, - pubsubTopic: TestPubsubTopic, + routingInfo: TestRoutingInfo, metaSetter: () => new Uint8Array(10 ** 6) }); @@ -229,9 +202,7 @@ describe("Waku Relay, Publish", function () { expect(pushResponse.successes[0].toString()).to.eq( waku2.libp2p.peerId.toString() ); - expect( - await messageCollector.waitForMessages(1, TestWaitMessageOptions) - ).to.eq(true); + expect(await messageCollector.waitForMessages(1)).to.eq(true); }); it("Publish message with rate limit", async function () { @@ -251,9 +222,7 @@ describe("Waku Relay, Publish", function () { }); expect(pushResponse.successes.length).to.eq(1); - expect( - await messageCollector.waitForMessages(1, TestWaitMessageOptions) - ).to.eq(true); + expect(await messageCollector.waitForMessages(1)).to.eq(true); messageCollector.verifyReceivedMessage(0, { ...TestExpectOptions, expectedMessageText: messageText diff --git a/packages/tests/tests/relay/subscribe.node.spec.ts b/packages/tests/tests/relay/subscribe.node.spec.ts index c147ed3595..698d5ee45c 100644 --- a/packages/tests/tests/relay/subscribe.node.spec.ts +++ b/packages/tests/tests/relay/subscribe.node.spec.ts @@ -1,6 +1,7 @@ import { createDecoder, createEncoder } from "@waku/core"; import { RelayNode } from "@waku/interfaces"; import { createRelayNode } from "@waku/relay"; +import { createRoutingInfo } from "@waku/utils"; import { utf8ToBytes } from "@waku/utils/bytes"; import { expect } from "chai"; @@ -20,9 +21,8 @@ import { TestDecoder, TestEncoder, TestExpectOptions, - TestPubsubTopic, - TestShardInfo, - TestWaitMessageOptions, + TestNetworkConfig, + TestRoutingInfo, waitForAllRemotePeers } from "./utils.js"; @@ -44,10 +44,10 @@ describe("Waku Relay, Subscribe", function () { it("Mutual subscription", async function () { await waitForAllRemotePeers(waku1, waku2); const subscribers1 = waku1.libp2p.services - .pubsub!.getSubscribers(TestPubsubTopic) + .pubsub!.getSubscribers(TestRoutingInfo.pubsubTopic) .map((p) => p.toString()); const subscribers2 = waku2.libp2p.services - .pubsub!.getSubscribers(TestPubsubTopic) + .pubsub!.getSubscribers(TestRoutingInfo.pubsubTopic) .map((p) => p.toString()); expect(subscribers1).to.contain(waku2.libp2p.peerId.toString()); @@ -65,7 +65,8 @@ describe("Waku Relay, Subscribe", function () { try { const waku = await createRelayNode({ staticNoiseKey: NOISE_KEY_1, - networkConfig: TestShardInfo + networkConfig: TestNetworkConfig, + routingInfos: [TestRoutingInfo] }); await waku.start(); @@ -90,9 +91,7 @@ describe("Waku Relay, Subscribe", function () { messageCollector.callback ); await waku1.relay.send(TestEncoder, { payload: utf8ToBytes(messageText) }); - expect( - await messageCollector.waitForMessages(1, TestWaitMessageOptions) - ).to.eq(true); + expect(await messageCollector.waitForMessages(1)).to.eq(true); messageCollector.verifyReceivedMessage(0, { ...TestExpectOptions, expectedMessageText: messageText @@ -115,7 +114,6 @@ describe("Waku Relay, Subscribe", function () { // Verify that each message was received on the corresponding topic. expect( await messageCollector.waitForMessages(messageCount, { - ...TestWaitMessageOptions, exact: true }) ).to.eq(true); @@ -130,12 +128,15 @@ describe("Waku Relay, Subscribe", function () { }); it("Subscribe and publish messages on 2 different content topics", async function () { - const secondContentTopic = "/test/2/waku-relay/utf8"; + const secondContentTopic = "/test/0/waku-relay-2/utf8"; + const secondRoutingInfo = createRoutingInfo(TestNetworkConfig, { + contentTopic: secondContentTopic + }); const secondEncoder = createEncoder({ contentTopic: secondContentTopic, - pubsubTopic: TestPubsubTopic + routingInfo: secondRoutingInfo }); - const secondDecoder = createDecoder(secondContentTopic, TestPubsubTopic); + const secondDecoder = createDecoder(secondContentTopic, secondRoutingInfo); await waku2.relay.subscribeWithUnsubscribe( [TestDecoder], @@ -149,7 +150,6 @@ describe("Waku Relay, Subscribe", function () { await waku1.relay.send(secondEncoder, { payload: utf8ToBytes("M2") }); expect( await messageCollector.waitForMessages(2, { - ...TestWaitMessageOptions, exact: true }) ).to.eq(true); @@ -166,7 +166,7 @@ describe("Waku Relay, Subscribe", function () { it("Subscribe one by one to 100 topics and publish messages", async function () { const topicCount = 100; - const td = generateTestData(topicCount, TestWaitMessageOptions); + const td = generateTestData(topicCount, TestNetworkConfig); // Subscribe to topics one by one for (let i = 0; i < topicCount; i++) { @@ -186,7 +186,6 @@ describe("Waku Relay, Subscribe", function () { // Verify that each message was received on the corresponding topic. expect( await messageCollector.waitForMessages(topicCount, { - ...TestWaitMessageOptions, exact: true }) ).to.eq(true); @@ -201,7 +200,7 @@ describe("Waku Relay, Subscribe", function () { it("Subscribe at once to 10000 topics and publish messages", async function () { const topicCount = 10000; - const td = generateTestData(topicCount, TestWaitMessageOptions); + const td = generateTestData(topicCount, TestNetworkConfig); // Subscribe to all topics at once await waku2.relay.subscribeWithUnsubscribe( @@ -219,7 +218,6 @@ describe("Waku Relay, Subscribe", function () { // Verify that each message was received on the corresponding topic. expect( await messageCollector.waitForMessages(topicCount, { - ...TestWaitMessageOptions, exact: true }) ).to.eq(true); @@ -248,7 +246,6 @@ describe("Waku Relay, Subscribe", function () { expect( await messageCollector.waitForMessages(1, { - ...TestWaitMessageOptions, exact: true }) ).to.eq(true); @@ -258,9 +255,9 @@ describe("Waku Relay, Subscribe", function () { it.skip("Overlapping topic subscription", async function () { // Define two sets of test data with overlapping topics. const topicCount1 = 2; - const td1 = generateTestData(topicCount1, TestWaitMessageOptions); + const td1 = generateTestData(topicCount1, TestNetworkConfig); const topicCount2 = 4; - const td2 = generateTestData(topicCount2, TestWaitMessageOptions); + const td2 = generateTestData(topicCount2, TestNetworkConfig); // Subscribe to the first set of topics. await waku2.relay.subscribeWithUnsubscribe( @@ -293,7 +290,6 @@ describe("Waku Relay, Subscribe", function () { // Since there are overlapping topics, there should be 6 messages in total (2 from the first set + 4 from the second set). expect( await messageCollector.waitForMessages(6, { - ...TestWaitMessageOptions, exact: true }) ).to.eq(true); @@ -301,29 +297,39 @@ describe("Waku Relay, Subscribe", function () { TEST_STRING.forEach((testItem) => { it(`Subscribe to topic containing ${testItem.description} and publish message`, async function () { - const newContentTopic = testItem.value; - const newEncoder = createEncoder({ - contentTopic: newContentTopic, - pubsubTopic: TestPubsubTopic - }); - const newDecoder = createDecoder(newContentTopic, TestPubsubTopic); + const newContentTopic = `/test/0/${testItem.value}/null`; - await waku2.relay.subscribeWithUnsubscribe( - [newDecoder], - messageCollector.callback - ); - await waku1.relay.send(newEncoder, { - payload: utf8ToBytes(messageText) - }); + try { + const newRoutingInfo = createRoutingInfo(TestNetworkConfig, { + contentTopic: newContentTopic + }); - expect( - await messageCollector.waitForMessages(1, TestWaitMessageOptions) - ).to.eq(true); - messageCollector.verifyReceivedMessage(0, { - ...TestExpectOptions, - expectedMessageText: messageText, - expectedContentTopic: newContentTopic - }); + const newEncoder = createEncoder({ + contentTopic: newContentTopic, + routingInfo: newRoutingInfo + }); + const newDecoder = createDecoder(newContentTopic, newRoutingInfo); + + await waku2.relay.subscribeWithUnsubscribe( + [newDecoder], + messageCollector.callback + ); + await waku1.relay.send(newEncoder, { + payload: utf8ToBytes(messageText) + }); + + expect(await messageCollector.waitForMessages(1)).to.eq(true); + messageCollector.verifyReceivedMessage(0, { + ...TestExpectOptions, + expectedMessageText: messageText, + expectedContentTopic: newContentTopic + }); + } catch (err: unknown) { + if (testItem.invalidContentTopic) { + const e = err as Error; + expect(e.message).to.contain("Invalid generation field"); + } + } }); }); }); diff --git a/packages/tests/tests/relay/utils.ts b/packages/tests/tests/relay/utils.ts index 2feb0c7a69..f865a25515 100644 --- a/packages/tests/tests/relay/utils.ts +++ b/packages/tests/tests/relay/utils.ts @@ -1,12 +1,14 @@ import { createDecoder, createEncoder } from "@waku/core"; import { + AutoSharding, + ContentTopic, NetworkConfig, Protocols, RelayNode, - ShardInfo + type ShardId } from "@waku/interfaces"; import { createRelayNode } from "@waku/relay"; -import { contentTopicToPubsubTopic, Logger } from "@waku/utils"; +import { createRoutingInfo, Logger } from "@waku/utils"; import { Context } from "mocha"; import { @@ -16,25 +18,25 @@ import { ServiceNode } from "../../src/index.js"; +export const TestClusterId = 4; export const messageText = "Relay works!"; -export const TestContentTopic = "/test/1/waku-relay/utf8"; -export const TestShardInfo: ShardInfo = { - clusterId: 2, - shards: [4] +export const TestContentTopic = "/test/0/waku-relay/utf8"; + +export const TestNetworkConfig: AutoSharding = { + clusterId: TestClusterId, + numShardsInCluster: 8 }; -export const TestPubsubTopic = contentTopicToPubsubTopic( - TestContentTopic, - TestShardInfo.clusterId -); +export const TestRoutingInfo = createRoutingInfo(TestNetworkConfig, { + contentTopic: TestContentTopic +}); export const TestEncoder = createEncoder({ contentTopic: TestContentTopic, - pubsubTopic: TestPubsubTopic + routingInfo: TestRoutingInfo }); -export const TestDecoder = createDecoder(TestContentTopic, TestPubsubTopic); -export const TestWaitMessageOptions = { pubsubTopic: TestPubsubTopic }; +export const TestDecoder = createDecoder(TestContentTopic, TestRoutingInfo); export const TestExpectOptions = { expectedContentTopic: TestContentTopic, - expectedPubsubTopic: TestPubsubTopic + expectedPubsubTopic: TestRoutingInfo.pubsubTopic }; export const log = new Logger("test:relay"); @@ -51,10 +53,14 @@ export async function waitForAllRemotePeers( export const runRelayNodes = ( context: Context, - networkConfig: NetworkConfig + networkConfig: NetworkConfig, + relayShards?: ShardId[], // Only for static sharding + contentTopics?: ContentTopic[] // Only for auto sharding ): Promise<[ServiceNode, RelayNode]> => runNodes({ networkConfig, + relayShards, + contentTopics, context, protocols: RELAY_PROTOCOLS, createNode: createRelayNode @@ -64,12 +70,14 @@ export async function runJSNodes(): Promise<[RelayNode, RelayNode]> { log.info("Starting JS Waku instances"); const [waku1, waku2] = await Promise.all([ createRelayNode({ + routingInfos: [TestRoutingInfo], staticNoiseKey: NOISE_KEY_1, - networkConfig: TestShardInfo + networkConfig: TestNetworkConfig }).then((waku) => waku.start().then(() => waku)), createRelayNode({ + routingInfos: [TestRoutingInfo], staticNoiseKey: NOISE_KEY_2, - networkConfig: TestShardInfo, + networkConfig: TestNetworkConfig, libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } } }).then((waku) => waku.start().then(() => waku)) ]); diff --git a/packages/tests/tests/sharding/auto_sharding.spec.ts b/packages/tests/tests/sharding/auto_sharding.spec.ts index 15d0c9b476..306c211a11 100644 --- a/packages/tests/tests/sharding/auto_sharding.spec.ts +++ b/packages/tests/tests/sharding/auto_sharding.spec.ts @@ -1,9 +1,6 @@ -import { LightNode } from "@waku/interfaces"; +import { AutoSharding, LightNode } from "@waku/interfaces"; import { createEncoder, utf8ToBytes } from "@waku/sdk"; -import { - contentTopicToPubsubTopic, - contentTopicToShardIndex -} from "@waku/utils"; +import { contentTopicToPubsubTopic, createRoutingInfo } from "@waku/utils"; import { expect } from "chai"; import { @@ -33,10 +30,14 @@ describe("Autosharding: Running Nodes", function () { // js-waku allows autosharding for cluster IDs different than 1 it("Cluster ID 0 - Default/Global Cluster", async function () { const clusterId = 0; + const networkConfig: AutoSharding = { clusterId, numShardsInCluster: 8 }; + const routingInfo = createRoutingInfo(networkConfig, { + contentTopic: ContentTopic + }); [serviceNodes, waku] = await runMultipleNodes( this.ctx, - { clusterId, contentTopics: [ContentTopic] }, + routingInfo, { lightpush: true, filter: true }, false, numServiceNodes, @@ -45,10 +46,7 @@ describe("Autosharding: Running Nodes", function () { const encoder = createEncoder({ contentTopic: ContentTopic, - pubsubTopicShardInfo: { - clusterId: clusterId, - shard: contentTopicToShardIndex(ContentTopic) - } + routingInfo }); const request = await waku.lightPush.send(encoder, { @@ -56,19 +54,19 @@ describe("Autosharding: Running Nodes", function () { }); expect(request.successes.length).to.eq(numServiceNodes); - expect( - await serviceNodes.messageCollector.waitForMessages(1, { - pubsubTopic: encoder.pubsubTopic - }) - ).to.eq(true); + expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(true); }); it("Non TWN Cluster", async function () { const clusterId = 5; + const networkConfig: AutoSharding = { clusterId, numShardsInCluster: 10 }; + const routingInfo = createRoutingInfo(networkConfig, { + contentTopic: ContentTopic + }); [serviceNodes, waku] = await runMultipleNodes( this.ctx, - { clusterId, contentTopics: [ContentTopic] }, + routingInfo, { lightpush: true, filter: true }, false, numServiceNodes, @@ -77,10 +75,7 @@ describe("Autosharding: Running Nodes", function () { const encoder = createEncoder({ contentTopic: ContentTopic, - pubsubTopicShardInfo: { - clusterId: clusterId, - shard: contentTopicToShardIndex(ContentTopic) - } + routingInfo }); const request = await waku.lightPush.send(encoder, { @@ -88,11 +83,7 @@ describe("Autosharding: Running Nodes", function () { }); expect(request.successes.length).to.eq(numServiceNodes); - expect( - await serviceNodes.messageCollector.waitForMessages(1, { - pubsubTopic: encoder.pubsubTopic - }) - ).to.eq(true); + expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(true); }); const numTest = 10; @@ -109,9 +100,14 @@ describe("Autosharding: Running Nodes", function () { it(`random auto sharding ${ i + 1 } - Cluster ID: ${clusterId}, Content Topic: ${ContentTopic}`, async function () { + const networkConfig: AutoSharding = { clusterId, numShardsInCluster: 8 }; + const routingInfo = createRoutingInfo(networkConfig, { + contentTopic: ContentTopic + }); + [serviceNodes, waku] = await runMultipleNodes( this.ctx, - { clusterId, contentTopics: [ContentTopic] }, + routingInfo, { lightpush: true, filter: true }, false, numServiceNodes, @@ -120,10 +116,7 @@ describe("Autosharding: Running Nodes", function () { const encoder = createEncoder({ contentTopic: ContentTopic, - pubsubTopicShardInfo: { - clusterId: clusterId, - shard: contentTopicToShardIndex(ContentTopic) - } + routingInfo }); const request = await waku.lightPush.send(encoder, { @@ -133,7 +126,7 @@ describe("Autosharding: Running Nodes", function () { expect(request.successes.length).to.eq(numServiceNodes); expect( await serviceNodes.messageCollector.waitForMessages(1, { - pubsubTopic: encoder.pubsubTopic + contentTopic: ContentTopic }) ).to.eq(true); }); @@ -143,7 +136,7 @@ describe("Autosharding: Running Nodes", function () { it("Wrong topic", async function () { const wrongTopic = "wrong_format"; try { - contentTopicToPubsubTopic(wrongTopic, clusterId); + contentTopicToPubsubTopic(wrongTopic, clusterId, 8); throw new Error("Wrong topic should've thrown an error"); } catch (err) { if ( @@ -156,10 +149,19 @@ describe("Autosharding: Running Nodes", function () { }); it("configure the node with multiple content topics", async function () { + const networkConfig: AutoSharding = { clusterId, numShardsInCluster: 8 }; + const routingInfo = createRoutingInfo(networkConfig, { + contentTopic: ContentTopic + }); + [serviceNodes, waku] = await runMultipleNodes( this.ctx, - { clusterId, contentTopics: [ContentTopic, ContentTopic2] }, - { lightpush: true, filter: true }, + routingInfo, + { + lightpush: true, + filter: true, + contentTopic: [ContentTopic, ContentTopic2] + }, false, numServiceNodes, true @@ -167,18 +169,14 @@ describe("Autosharding: Running Nodes", function () { const encoder1 = createEncoder({ contentTopic: ContentTopic, - pubsubTopicShardInfo: { - clusterId: clusterId, - shard: contentTopicToShardIndex(ContentTopic) - } + routingInfo }); const encoder2 = createEncoder({ contentTopic: ContentTopic2, - pubsubTopicShardInfo: { - clusterId: clusterId, - shard: contentTopicToShardIndex(ContentTopic2) - } + routingInfo: createRoutingInfo(networkConfig, { + contentTopic: ContentTopic2 + }) }); const request1 = await waku.lightPush.send(encoder1, { @@ -187,7 +185,7 @@ describe("Autosharding: Running Nodes", function () { expect(request1.successes.length).to.eq(numServiceNodes); expect( await serviceNodes.messageCollector.waitForMessages(1, { - pubsubTopic: encoder1.pubsubTopic + contentTopic: ContentTopic }) ).to.eq(true); @@ -197,7 +195,7 @@ describe("Autosharding: Running Nodes", function () { expect(request2.successes.length).to.eq(numServiceNodes); expect( await serviceNodes.messageCollector.waitForMessages(1, { - pubsubTopic: encoder2.pubsubTopic + contentTopic: ContentTopic2 }) ).to.eq(true); }); diff --git a/packages/tests/tests/sharding/peer_management.spec.ts b/packages/tests/tests/sharding/peer_management.spec.ts index 6b42aa4e90..c0dec2e1fd 100644 --- a/packages/tests/tests/sharding/peer_management.spec.ts +++ b/packages/tests/tests/sharding/peer_management.spec.ts @@ -1,13 +1,8 @@ import { bootstrap } from "@libp2p/bootstrap"; import type { PeerId } from "@libp2p/interface"; import { wakuPeerExchangeDiscovery } from "@waku/discovery"; -import { - ContentTopicInfo, - createLightNode, - LightNode, - ShardInfo, - Tags -} from "@waku/sdk"; +import { AutoSharding } from "@waku/interfaces"; +import { createLightNode, LightNode, Tags } from "@waku/sdk"; import { contentTopicToShardIndex } from "@waku/utils"; import chai, { expect } from "chai"; import chaiAsPromised from "chai-as-promised"; @@ -48,14 +43,17 @@ describe("Static Sharding: Peer Management", function () { it("all px service nodes subscribed to the shard topic should be dialed", async function () { this.timeout(100_000); - const shardInfo: ShardInfo = { clusterId: clusterId, shards: [2] }; + const shard = 2; + const numShardsInCluster = 8; + const networkConfig: AutoSharding = { clusterId, numShardsInCluster }; await nwaku1.start({ discv5Discovery: true, peerExchange: true, relay: true, clusterId: clusterId, - shard: [2] + shard: [shard], + numShardsInNetwork: numShardsInCluster }); const enr1 = (await nwaku1.info()).enrUri; @@ -66,7 +64,8 @@ describe("Static Sharding: Peer Management", function () { discv5BootstrapNode: enr1, relay: true, clusterId: clusterId, - shard: [2] + shard: [shard], + numShardsInNetwork: numShardsInCluster }); const enr2 = (await nwaku2.info()).enrUri; @@ -77,12 +76,13 @@ describe("Static Sharding: Peer Management", function () { discv5BootstrapNode: enr2, relay: true, clusterId: clusterId, - shard: [2] + shard: [shard], + numShardsInNetwork: numShardsInCluster }); const nwaku3Ma = await nwaku3.getMultiaddrWithId(); waku = await createLightNode({ - networkConfig: shardInfo, + networkConfig: networkConfig, libp2p: { peerDiscovery: [ bootstrap({ list: [nwaku3Ma.toString()] }), @@ -118,9 +118,11 @@ describe("Static Sharding: Peer Management", function () { expect(dialPeerSpy.callCount).to.equal(3); }); - it("px service nodes not subscribed to the shard should not be dialed", async function () { + it("px service nodes in same cluster, no matter the shard, should be dialed", async function () { this.timeout(100_000); - const shardInfoToDial: ShardInfo = { clusterId: clusterId, shards: [2] }; + + const numShardsInCluster = 8; + const networkConfig: AutoSharding = { clusterId, numShardsInCluster }; // this service node is not subscribed to the shard await nwaku1.start({ @@ -128,7 +130,8 @@ describe("Static Sharding: Peer Management", function () { discv5Discovery: true, peerExchange: true, clusterId: clusterId, - shard: [1] + shard: [1], + numShardsInNetwork: numShardsInCluster }); const enr1 = (await nwaku1.info()).enrUri; @@ -139,7 +142,8 @@ describe("Static Sharding: Peer Management", function () { peerExchange: true, discv5BootstrapNode: enr1, clusterId: clusterId, - shard: [2] + shard: [2], + numShardsInNetwork: numShardsInCluster }); const enr2 = (await nwaku2.info()).enrUri; @@ -150,12 +154,13 @@ describe("Static Sharding: Peer Management", function () { peerExchange: true, discv5BootstrapNode: enr2, clusterId: clusterId, - shard: [2] + shard: [2], + numShardsInNetwork: numShardsInCluster }); const nwaku3Ma = await nwaku3.getMultiaddrWithId(); waku = await createLightNode({ - networkConfig: shardInfoToDial, + networkConfig: networkConfig, libp2p: { peerDiscovery: [ bootstrap({ list: [nwaku3Ma.toString()] }), @@ -178,7 +183,7 @@ describe("Static Sharding: Peer Management", function () { const tags = Array.from(peer.tags.keys()); if (tags.includes(Tags.PEER_EXCHANGE)) { pxPeersDiscovered.add(peerId); - if (pxPeersDiscovered.size === 1) { + if (pxPeersDiscovered.size === 2) { resolve(); } } @@ -187,7 +192,7 @@ describe("Static Sharding: Peer Management", function () { }); await delay(1000); - expect(dialPeerSpy.callCount).to.equal(2); + expect(dialPeerSpy.callCount).to.equal(3); }); }); }); @@ -219,9 +224,9 @@ describe("Autosharding: Peer Management", function () { it("all px service nodes subscribed to the shard topic should be dialed", async function () { this.timeout(100_000); - const contentTopicInfo: ContentTopicInfo = { + const networkConfig: AutoSharding = { clusterId: clusterId, - contentTopics: [ContentTopic] + numShardsInCluster: 8 }; await nwaku1.start({ @@ -259,7 +264,7 @@ describe("Autosharding: Peer Management", function () { const nwaku3Ma = await nwaku3.getMultiaddrWithId(); waku = await createLightNode({ - networkConfig: contentTopicInfo, + networkConfig: networkConfig, libp2p: { peerDiscovery: [ bootstrap({ list: [nwaku3Ma.toString()] }), @@ -294,82 +299,5 @@ describe("Autosharding: Peer Management", function () { expect(dialPeerSpy.callCount).to.equal(3); }); - - it("px service nodes not subscribed to the shard should not be dialed", async function () { - this.timeout(100_000); - const contentTopicInfoToDial: ContentTopicInfo = { - clusterId: clusterId, - contentTopics: [ContentTopic] - }; - - // this service node is not subscribed to the shard - await nwaku1.start({ - relay: true, - discv5Discovery: true, - peerExchange: true, - clusterId: 3, - shard: Shard - }); - - const enr1 = (await nwaku1.info()).enrUri; - - await nwaku2.start({ - relay: true, - discv5Discovery: true, - peerExchange: true, - discv5BootstrapNode: enr1, - clusterId: clusterId, - shard: Shard, - contentTopic: [ContentTopic] - }); - - const enr2 = (await nwaku2.info()).enrUri; - - await nwaku3.start({ - relay: true, - discv5Discovery: true, - peerExchange: true, - discv5BootstrapNode: enr2, - clusterId: clusterId, - shard: Shard, - contentTopic: [ContentTopic] - }); - const nwaku3Ma = await nwaku3.getMultiaddrWithId(); - - waku = await createLightNode({ - networkConfig: contentTopicInfoToDial, - libp2p: { - peerDiscovery: [ - bootstrap({ list: [nwaku3Ma.toString()] }), - wakuPeerExchangeDiscovery() - ] - } - }); - - dialPeerSpy = Sinon.spy((waku as any).libp2p, "dial"); - - await waku.start(); - - const pxPeersDiscovered = new Set(); - - await new Promise((resolve) => { - waku.libp2p.addEventListener("peer:discovery", (evt) => { - return void (async () => { - const peerId = evt.detail.id; - const peer = await waku.libp2p.peerStore.get(peerId); - const tags = Array.from(peer.tags.keys()); - if (tags.includes(Tags.PEER_EXCHANGE)) { - pxPeersDiscovered.add(peerId); - if (pxPeersDiscovered.size === 1) { - resolve(); - } - } - })(); - }); - }); - - await delay(1000); - expect(dialPeerSpy.callCount).to.equal(2); - }); }); }); diff --git a/packages/tests/tests/sharding/static_sharding.spec.ts b/packages/tests/tests/sharding/static_sharding.spec.ts index aa55abac45..9ca84ed25d 100644 --- a/packages/tests/tests/sharding/static_sharding.spec.ts +++ b/packages/tests/tests/sharding/static_sharding.spec.ts @@ -1,15 +1,10 @@ -import { LightNode, SingleShardInfo } from "@waku/interfaces"; +import { LightNode, StaticSharding } from "@waku/interfaces"; import { createEncoder, utf8ToBytes } from "@waku/sdk"; -import { - shardInfoToPubsubTopics, - singleShardInfosToShardInfo, - singleShardInfoToPubsubTopic -} from "@waku/utils"; +import { createRoutingInfo } from "@waku/utils"; import { expect } from "chai"; import { afterEachCustom, - beforeEachCustom, runMultipleNodes, ServiceNodesFleet, teardownNodesWithRedundancy @@ -30,13 +25,15 @@ describe("Static Sharding: Running Nodes", function () { } }); - it("shard 0", async function () { - const singleShardInfo = { clusterId: 0, shard: 0 }; - const shardInfo = singleShardInfosToShardInfo([singleShardInfo]); + it("Cluster id 0, shard 0", async function () { + const clusterId = 0; + const shardId = 0; + const networkConfig: StaticSharding = { clusterId }; + const routingInfo = createRoutingInfo(networkConfig, { shardId }); [serviceNodes, waku] = await runMultipleNodes( this.ctx, - shardInfo, + routingInfo, { lightpush: true, filter: true }, false, numServiceNodes, @@ -45,32 +42,27 @@ describe("Static Sharding: Running Nodes", function () { const encoder = createEncoder({ contentTopic: ContentTopic, - pubsubTopicShardInfo: singleShardInfo + routingInfo }); - expect(encoder.pubsubTopic).to.eq( - singleShardInfoToPubsubTopic(singleShardInfo) - ); const request = await waku.lightPush.send(encoder, { payload: utf8ToBytes("Hello World") }); expect(request.successes.length).to.eq(numServiceNodes); - expect( - await serviceNodes.messageCollector.waitForMessages(1, { - pubsubTopic: encoder.pubsubTopic - }) - ).to.eq(true); + expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(true); }); // dedicated test for Default Cluster ID 0 - it("Cluster ID 0 - Default/Global Cluster", async function () { - const singleShardInfo = { clusterId: 0, shard: 1 }; - const shardInfo = singleShardInfosToShardInfo([singleShardInfo]); + it("Cluster ID 0, shard 1", async function () { + const clusterId = 0; + const shardId = 1; + const networkConfig: StaticSharding = { clusterId }; + const routingInfo = createRoutingInfo(networkConfig, { shardId }); [serviceNodes, waku] = await runMultipleNodes( this.ctx, - shardInfo, + routingInfo, { lightpush: true, filter: true }, false, numServiceNodes, @@ -79,7 +71,7 @@ describe("Static Sharding: Running Nodes", function () { const encoder = createEncoder({ contentTopic: ContentTopic, - pubsubTopicShardInfo: singleShardInfo + routingInfo }); const request = await waku.lightPush.send(encoder, { @@ -87,11 +79,7 @@ describe("Static Sharding: Running Nodes", function () { }); expect(request.successes.length).to.eq(numServiceNodes); - expect( - await serviceNodes.messageCollector.waitForMessages(1, { - pubsubTopic: shardInfoToPubsubTopics(shardInfo)[0] - }) - ).to.eq(true); + expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(true); }); const numTest = 10; @@ -102,15 +90,15 @@ describe("Static Sharding: Running Nodes", function () { // Random shardId between 1 and 1000 const shardId = Math.floor(Math.random() * 1000) + 1; + const networkConfig: StaticSharding = { clusterId }; + const routingInfo = createRoutingInfo(networkConfig, { shardId }); + it(`random static sharding ${ i + 1 } - Cluster ID: ${clusterId}, Shard ID: ${shardId}`, async function () { - const singleShardInfo = { clusterId: clusterId, shard: shardId }; - const shardInfo = singleShardInfosToShardInfo([singleShardInfo]); - [serviceNodes, waku] = await runMultipleNodes( this.ctx, - shardInfo, + routingInfo, { lightpush: true, filter: true }, false, numServiceNodes, @@ -119,7 +107,7 @@ describe("Static Sharding: Running Nodes", function () { const encoder = createEncoder({ contentTopic: ContentTopic, - pubsubTopicShardInfo: singleShardInfo + routingInfo }); const request = await waku.lightPush.send(encoder, { @@ -127,75 +115,9 @@ describe("Static Sharding: Running Nodes", function () { }); expect(request.successes.length).to.eq(numServiceNodes); - expect( - await serviceNodes.messageCollector.waitForMessages(1, { - pubsubTopic: shardInfoToPubsubTopics(shardInfo)[0] - }) - ).to.eq(true); - }); - } - - describe("Others", function () { - const clusterId = 2; - - const singleShardInfo1: SingleShardInfo = { - clusterId: clusterId, - shard: 2 - }; - const singleShardInfo2: SingleShardInfo = { - clusterId: clusterId, - shard: 3 - }; - - beforeEachCustom(this, async () => { - [serviceNodes, waku] = await runMultipleNodes( - this.ctx, - { clusterId, shards: [2, 3] }, - { lightpush: true, filter: true }, - false, - numServiceNodes, + expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq( true ); }); - - afterEachCustom(this, async () => { - if (serviceNodes) { - await teardownNodesWithRedundancy(serviceNodes, waku ?? []); - } - }); - - it("configure the node with multiple pubsub topics", async function () { - const encoder1 = createEncoder({ - contentTopic: ContentTopic, - pubsubTopicShardInfo: singleShardInfo1 - }); - - const encoder2 = createEncoder({ - contentTopic: ContentTopic, - pubsubTopicShardInfo: singleShardInfo2 - }); - - const request1 = await waku?.lightPush.send(encoder1, { - payload: utf8ToBytes("Hello World2") - }); - - expect(request1?.successes.length).to.eq(numServiceNodes); - expect( - await serviceNodes?.messageCollector.waitForMessages(1, { - pubsubTopic: encoder1.pubsubTopic - }) - ).to.eq(true); - - const request2 = await waku?.lightPush.send(encoder2, { - payload: utf8ToBytes("Hello World3") - }); - - expect(request2?.successes.length).to.eq(numServiceNodes); - expect( - await serviceNodes?.messageCollector.waitForMessages(1, { - pubsubTopic: encoder2.pubsubTopic - }) - ).to.eq(true); - }); - }); + } }); diff --git a/packages/tests/tests/store/cursor.node.spec.ts b/packages/tests/tests/store/cursor.node.spec.ts index cd3316a65d..ebac2d1a3a 100644 --- a/packages/tests/tests/store/cursor.node.spec.ts +++ b/packages/tests/tests/store/cursor.node.spec.ts @@ -14,9 +14,11 @@ import { runStoreNodes, sendMessages, startAndConnectLightNode, + TestContentTopic, TestDecoder, TestDecoder2, - TestShardInfo, + TestNetworkConfig, + TestRoutingInfo, totalMsgs } from "./utils.js"; @@ -27,7 +29,12 @@ describe("Waku Store, cursor", function () { let nwaku: ServiceNode; beforeEachCustom(this, async () => { - [nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo); + [nwaku, waku] = await runStoreNodes( + this.ctx, + TestNetworkConfig, + [], + [TestContentTopic] + ); }); afterEachCustom(this, async () => { @@ -43,11 +50,12 @@ describe("Waku Store, cursor", function () { [110, 120] ].forEach(([cursorIndex, messageCount]) => { it(`Passing a valid cursor at ${cursorIndex} index when there are ${messageCount} messages`, async function () { + console.log(nwaku); await sendMessages( nwaku, messageCount, TestDecoder.contentTopic, - TestDecoder.pubsubTopic + TestRoutingInfo ); // messages in reversed order (first message at last index) @@ -95,9 +103,9 @@ describe("Waku Store, cursor", function () { nwaku, totalMsgs, TestDecoder.contentTopic, - TestDecoder.pubsubTopic + TestRoutingInfo ); - waku2 = await startAndConnectLightNode(nwaku, TestShardInfo); + waku2 = await startAndConnectLightNode(nwaku, TestNetworkConfig); // messages in reversed order (first message at last index) const messages: DecodedMessage[] = []; @@ -137,12 +145,7 @@ describe("Waku Store, cursor", function () { this.skip(); } - await sendMessages( - nwaku, - totalMsgs, - TestDecoder.contentTopic, - TestDecoder.pubsubTopic - ); + await sendMessages(nwaku, totalMsgs, TestContentTopic, TestRoutingInfo); const messages: DecodedMessage[] = []; for await (const page of waku.store.queryGenerator([TestDecoder])) { @@ -170,7 +173,7 @@ describe("Waku Store, cursor", function () { if ( !(err instanceof Error) || !err.message.includes( - "Store query failed with status code: 300, description: BAD_RESPONSE: archive error: DRIVER_ERROR: cursor not found" + "Store query failed with status code: 300, description: BAD_RESPONSE: archive error: DIRVER_ERROR: cursor not found" ) ) { throw err; @@ -187,7 +190,7 @@ describe("Waku Store, cursor", function () { nwaku, totalMsgs, TestDecoder.contentTopic, - TestDecoder.pubsubTopic + TestRoutingInfo ); const messages: DecodedMessage[] = []; @@ -196,7 +199,7 @@ describe("Waku Store, cursor", function () { messages.push(msg as DecodedMessage); } } - messages[5].pubsubTopic = TestDecoder2.pubsubTopic; + messages[5].pubsubTopic = TestDecoder2.routingInfo.pubsubTopic; const cursor = waku.store.createCursor(messages[5]); try { @@ -210,7 +213,7 @@ describe("Waku Store, cursor", function () { if ( !(err instanceof Error) || !err.message.includes( - "Store query failed with status code: 300, description: BAD_RESPONSE: archive error: DRIVER_ERROR: cursor not found" + "Store query failed with status code: 300, description: BAD_RESPONSE: archive error: DIRVER_ERROR: cursor not found" ) ) { throw err; diff --git a/packages/tests/tests/store/different_static_shards.spec.ts b/packages/tests/tests/store/different_static_shards.spec.ts new file mode 100644 index 0000000000..bb71b4a2d4 --- /dev/null +++ b/packages/tests/tests/store/different_static_shards.spec.ts @@ -0,0 +1,190 @@ +import { createDecoder } from "@waku/core"; +import { IMessage, LightNode, ShardId, StaticSharding } from "@waku/interfaces"; +import { Protocols } from "@waku/sdk"; +import { createRoutingInfo } from "@waku/utils"; +import { expect } from "chai"; + +import { + afterEachCustom, + beforeEachCustom, + makeLogFileName, + ServiceNode, + tearDownNodes +} from "../../src/index.js"; + +import { + processQueriedMessages, + runStoreNodes, + sendMessages, + totalMsgs +} from "./utils.js"; + +const StaticTestClusterId = 2; +const StaticTestRelayShards = [1, 2]; +const StaticTestNetworkConfig: StaticSharding = { + clusterId: StaticTestClusterId +}; + +const TestShardOne: ShardId = 1; +const TestContentTopicOne = "/test/0/one/proto"; +const TestRoutingInfoOne = createRoutingInfo(StaticTestNetworkConfig, { + shardId: TestShardOne +}); + +const TestDecoderShardOne = createDecoder( + TestContentTopicOne, + TestRoutingInfoOne +); + +const TestShardTwo: ShardId = 2; +const TestContentTopicTwo = "/test/0/two/proto"; +const TestRoutingInfoTwo = createRoutingInfo(StaticTestNetworkConfig, { + shardId: TestShardTwo +}); + +const TestDecoderShardTwo = createDecoder( + TestContentTopicTwo, + TestRoutingInfoTwo +); + +// TODO: Same tests but with auto-sharding +describe("Waku Store, different static shards", function () { + this.timeout(15000); + let waku: LightNode; + let nwaku: ServiceNode; + let nwaku2: ServiceNode; + + beforeEachCustom(this, async () => { + [nwaku, waku] = await runStoreNodes( + this.ctx, + StaticTestNetworkConfig, + StaticTestRelayShards + ); + }); + + afterEachCustom(this, async () => { + await tearDownNodes([nwaku, nwaku2], waku); + }); + + it("Generator, one shard", async function () { + await sendMessages( + nwaku, + totalMsgs, + TestContentTopicOne, + TestRoutingInfoOne + ); + + const messages = await processQueriedMessages( + waku, + [TestDecoderShardOne], + TestDecoderShardOne.routingInfo.pubsubTopic + ); + + expect(messages?.length).eq(totalMsgs); + const result = messages?.findIndex((msg) => { + return msg.payload![0]! === 0; + }); + expect(result).to.not.eq(-1); + }); + + it("Generator, 2 different shards", async function () { + this.timeout(10000); + + const totalMsgs = 10; + await sendMessages( + nwaku, + totalMsgs, + TestContentTopicOne, + TestRoutingInfoOne + ); + await sendMessages( + nwaku, + totalMsgs, + TestContentTopicTwo, + TestRoutingInfoTwo + ); + + const customMessages = await processQueriedMessages( + waku, + [TestDecoderShardOne], + TestDecoderShardOne.routingInfo.pubsubTopic + ); + expect(customMessages?.length).eq(totalMsgs); + const result1 = customMessages?.findIndex((msg) => { + return msg.payload![0]! === 0; + }); + expect(result1).to.not.eq(-1); + + const testMessages = await processQueriedMessages( + waku, + [TestDecoderShardTwo], + TestDecoderShardTwo.routingInfo.pubsubTopic + ); + expect(testMessages?.length).eq(totalMsgs); + const result2 = testMessages?.findIndex((msg) => { + return msg.payload![0]! === 0; + }); + expect(result2).to.not.eq(-1); + }); + + it("Generator, 2 nwaku nodes each with different shards", async function () { + this.timeout(10000); + + await tearDownNodes([nwaku], []); + + // make sure each nwaku node operates on dedicated shard only + nwaku = new ServiceNode(makeLogFileName(this) + "1"); + await nwaku.start({ + store: true, + clusterId: StaticTestClusterId, + shard: [1], + relay: true + }); + + // Set up and start a new nwaku node with Default Pubsubtopic + nwaku2 = new ServiceNode(makeLogFileName(this) + "2"); + await nwaku2.start({ + store: true, + clusterId: StaticTestClusterId, + shard: [2], + relay: true + }); + + const totalMsgs = 10; + await sendMessages( + nwaku, + totalMsgs, + TestDecoderShardOne.contentTopic, + TestDecoderShardOne.routingInfo + ); + await sendMessages( + nwaku2, + totalMsgs, + TestDecoderShardTwo.contentTopic, + TestDecoderShardTwo.routingInfo + ); + + await waku.dial(await nwaku.getMultiaddrWithId()); + await waku.dial(await nwaku2.getMultiaddrWithId()); + await waku.waitForPeers([Protocols.Store]); + + let customMessages: IMessage[] = []; + let testMessages: IMessage[] = []; + + while ( + customMessages.length != totalMsgs || + testMessages.length != totalMsgs + ) { + customMessages = await processQueriedMessages( + waku, + [TestDecoderShardOne], + TestDecoderShardOne.routingInfo.pubsubTopic + ); + testMessages = await processQueriedMessages( + waku, + [TestDecoderShardTwo], + TestDecoderShardTwo.routingInfo.pubsubTopic + ); + } + }); +}); diff --git a/packages/tests/tests/store/error_handling.node.spec.ts b/packages/tests/tests/store/error_handling.node.spec.ts index 89f80d5cdf..ebb554fd6f 100644 --- a/packages/tests/tests/store/error_handling.node.spec.ts +++ b/packages/tests/tests/store/error_handling.node.spec.ts @@ -1,5 +1,5 @@ import { IMessage, type LightNode } from "@waku/interfaces"; -import { determinePubsubTopic } from "@waku/utils"; +import { formatPubsubTopic } from "@waku/utils"; import { expect } from "chai"; import { @@ -14,7 +14,7 @@ import { runStoreNodes, TestDecoder, TestDecoder2, - TestShardInfo + TestNetworkConfig } from "./utils.js"; describe("Waku Store, error handling", function () { @@ -23,7 +23,7 @@ describe("Waku Store, error handling", function () { let nwaku: ServiceNode; beforeEachCustom(this, async () => { - [nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo); + [nwaku, waku] = await runStoreNodes(this.ctx, TestNetworkConfig); }); afterEachCustom(this, async () => { @@ -68,7 +68,7 @@ describe("Waku Store, error handling", function () { }); it("Query Generator, No message returned", async function () { - const WrongTestPubsubTopic = determinePubsubTopic("/test/1/wrong/utf8"); + const WrongTestPubsubTopic = formatPubsubTopic(43, 53); const messages = await processQueriedMessages( waku, [TestDecoder], diff --git a/packages/tests/tests/store/index.node.spec.ts b/packages/tests/tests/store/index.node.spec.ts index c95acffc86..4d6d23470e 100644 --- a/packages/tests/tests/store/index.node.spec.ts +++ b/packages/tests/tests/store/index.node.spec.ts @@ -14,6 +14,7 @@ import { createDecoder as createSymDecoder, createEncoder as createSymEncoder } from "@waku/message-encryption/symmetric"; +import { createRoutingInfo } from "@waku/utils"; import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes"; import { expect } from "chai"; import { equals } from "uint8arrays/equals"; @@ -35,12 +36,11 @@ import { runStoreNodes, sendMessages, startAndConnectLightNode, - TestContentTopic1, + TestContentTopic, TestDecoder, - TestDecoder2, TestEncoder, - TestPubsubTopic1, - TestShardInfo, + TestNetworkConfig, + TestRoutingInfo, totalMsgs } from "./utils.js"; @@ -51,7 +51,7 @@ describe("Waku Store, general", function () { let nwaku: ServiceNode; beforeEachCustom(this, async () => { - [nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo); + [nwaku, waku] = await runStoreNodes(this.ctx, TestNetworkConfig); }); afterEachCustom(this, async () => { @@ -63,13 +63,13 @@ describe("Waku Store, general", function () { nwaku, totalMsgs, TestDecoder.contentTopic, - TestDecoder.pubsubTopic + TestRoutingInfo ); const messages = await processQueriedMessages( waku, [TestDecoder], - TestDecoder.pubsubTopic + TestRoutingInfo.pubsubTopic ); expect(messages?.length).eq(totalMsgs); @@ -89,7 +89,7 @@ describe("Waku Store, general", function () { payload: utf8ToBytes(testItem["value"]), contentTopic: TestDecoder.contentTopic }), - TestDecoder.pubsubTopic + TestRoutingInfo ) ).to.eq(true); await delay(1); // to ensure each timestamp is unique. @@ -99,7 +99,7 @@ describe("Waku Store, general", function () { messageCollector.list = await processQueriedMessages( waku, [TestDecoder], - TestDecoder.pubsubTopic + TestRoutingInfo.pubsubTopic ); // checking that all message sent were retrieved @@ -111,57 +111,69 @@ describe("Waku Store, general", function () { }); it("Query generator for multiple messages with multiple decoders", async function () { - const SecondDecoder = createDecoder( - TestDecoder2.contentTopic, - TestDecoder.pubsubTopic - ); + const secondContentTopic = "/test/1/waku-store-two/utf8"; + const secondRoutingInfo = createRoutingInfo(TestNetworkConfig, { + contentTopic: secondContentTopic + }); + const secondDecoder = createDecoder(secondContentTopic, secondRoutingInfo); await nwaku.sendMessage( ServiceNode.toMessageRpcQuery({ payload: utf8ToBytes("M1"), - contentTopic: TestDecoder.contentTopic + contentTopic: TestContentTopic }), - TestDecoder.pubsubTopic + TestRoutingInfo ); await nwaku.sendMessage( ServiceNode.toMessageRpcQuery({ payload: utf8ToBytes("M2"), - contentTopic: SecondDecoder.contentTopic + contentTopic: secondContentTopic }), - SecondDecoder.pubsubTopic + secondRoutingInfo ); const messageCollector = new MessageCollector(nwaku); messageCollector.list = await processQueriedMessages( waku, - [TestDecoder, SecondDecoder], - TestDecoder.pubsubTopic + [TestDecoder, secondDecoder], + TestRoutingInfo.pubsubTopic ); expect(messageCollector.hasMessage(TestDecoder.contentTopic, "M1")).to.eq( true ); - expect(messageCollector.hasMessage(SecondDecoder.contentTopic, "M2")).to.eq( - true - ); + expect(messageCollector.hasMessage(secondContentTopic, "M2")).to.eq(true); }); it("Query generator for multiple messages with different content topic format", async function () { for (const testItem of TEST_STRING) { + if (testItem.invalidContentTopic) continue; + + const contentTopic = `/test/1/${testItem.value}/proto`; + const routingInfo = createRoutingInfo(TestNetworkConfig, { + contentTopic + }); expect( await nwaku.sendMessage( ServiceNode.toMessageRpcQuery({ payload: utf8ToBytes(messageText), - contentTopic: testItem["value"] + contentTopic }), - TestDecoder.pubsubTopic + routingInfo ) ).to.eq(true); await delay(1); // to ensure each timestamp is unique. } for (const testItem of TEST_STRING) { + if (testItem.invalidContentTopic) continue; + + const contentTopic = `/test/1/${testItem.value}/proto`; + const routingInfo = createRoutingInfo(TestNetworkConfig, { + contentTopic + }); + for await (const query of waku.store.queryGenerator([ - createDecoder(testItem["value"], TestDecoder.pubsubTopic) + createDecoder(contentTopic, routingInfo) ])) { for await (const msg of query) { expect(equals(msg!.payload, utf8ToBytes(messageText))).to.eq(true); @@ -175,7 +187,7 @@ describe("Waku Store, general", function () { nwaku, totalMsgs, TestDecoder.contentTopic, - TestDecoder.pubsubTopic + TestRoutingInfo ); const messages: IMessage[] = []; @@ -201,7 +213,7 @@ describe("Waku Store, general", function () { nwaku, totalMsgs, TestDecoder.contentTopic, - TestDecoder.pubsubTopic + TestRoutingInfo ); const desiredMsgs = 14; @@ -254,32 +266,28 @@ describe("Waku Store, general", function () { const eciesEncoder = createEciesEncoder({ contentTopic: asymTopic, publicKey, - pubsubTopic: TestPubsubTopic1 + routingInfo: TestRoutingInfo }); const symEncoder = createSymEncoder({ contentTopic: symTopic, symKey, - pubsubTopic: TestPubsubTopic1 + routingInfo: TestRoutingInfo }); const otherEncoder = createEciesEncoder({ - contentTopic: TestContentTopic1, - pubsubTopic: TestPubsubTopic1, + contentTopic: TestContentTopic, + routingInfo: TestRoutingInfo, publicKey: getPublicKey(generatePrivateKey()) }); const eciesDecoder = createEciesDecoder( asymTopic, - privateKey, - TestDecoder.pubsubTopic - ); - const symDecoder = createSymDecoder( - symTopic, - symKey, - TestDecoder.pubsubTopic + TestRoutingInfo, + privateKey ); + const symDecoder = createSymDecoder(symTopic, TestRoutingInfo, symKey); - waku2 = await startAndConnectLightNode(nwaku, TestShardInfo); + waku2 = await startAndConnectLightNode(nwaku, TestNetworkConfig); const nimWakuMultiaddr = await nwaku.getMultiaddrWithId(); await waku2.dial(nimWakuMultiaddr); @@ -320,7 +328,7 @@ describe("Waku Store, general", function () { nwaku, totalMsgs, TestDecoder.contentTopic, - TestDecoder.pubsubTopic + TestRoutingInfo ); const desiredMsgs = 14; @@ -339,17 +347,12 @@ describe("Waku Store, general", function () { it("Query generator for 2000 messages", async function () { this.timeout(40000); - await sendMessages( - nwaku, - 2000, - TestDecoder.contentTopic, - TestDecoder.pubsubTopic - ); + await sendMessages(nwaku, 2000, TestDecoder.contentTopic, TestRoutingInfo); const messages = await processQueriedMessages( waku, [TestDecoder], - TestDecoder.pubsubTopic + TestRoutingInfo.pubsubTopic ); expect(messages?.length).eq(2000); diff --git a/packages/tests/tests/store/message_hash.spec.ts b/packages/tests/tests/store/message_hash.spec.ts index bba98ae109..d97077e747 100644 --- a/packages/tests/tests/store/message_hash.spec.ts +++ b/packages/tests/tests/store/message_hash.spec.ts @@ -13,7 +13,8 @@ import { runStoreNodes, sendMessages, TestDecoder, - TestShardInfo, + TestNetworkConfig, + TestRoutingInfo, totalMsgs } from "./utils.js"; @@ -23,7 +24,7 @@ describe("Waku Store, message hash query", function () { let nwaku: ServiceNode; beforeEachCustom(this, async () => { - [nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo); + [nwaku, waku] = await runStoreNodes(this.ctx, TestNetworkConfig); }); afterEachCustom(this, async () => { @@ -35,7 +36,7 @@ describe("Waku Store, message hash query", function () { nwaku, totalMsgs, TestDecoder.contentTopic, - TestDecoder.pubsubTopic, + TestDecoder.routingInfo, true ); @@ -54,11 +55,11 @@ describe("Waku Store, message hash query", function () { nwaku, totalMsgs, TestDecoder.contentTopic, - TestDecoder.pubsubTopic, + TestRoutingInfo, true ); const messageHashes = sentMessages.map((msg) => - messageHash(TestDecoder.pubsubTopic, { + messageHash(TestRoutingInfo.pubsubTopic, { payload: Buffer.from(msg.payload, "base64"), contentTopic: msg.contentTopic || TestDecoder.contentTopic, timestamp: msg.timestamp || undefined, @@ -72,7 +73,7 @@ describe("Waku Store, message hash query", function () { const messages: IDecodedMessage[] = []; for await (const page of waku.store.queryGenerator([TestDecoder], { messageHashes, - pubsubTopic: TestDecoder.pubsubTopic + routingInfo: TestRoutingInfo })) { for await (const msg of page) { messages.push(msg as IDecodedMessage); diff --git a/packages/tests/tests/store/multiple_pubsub.spec.ts b/packages/tests/tests/store/multiple_pubsub.spec.ts deleted file mode 100644 index caf4204765..0000000000 --- a/packages/tests/tests/store/multiple_pubsub.spec.ts +++ /dev/null @@ -1,438 +0,0 @@ -import { createDecoder } from "@waku/core"; -import type { ContentTopicInfo, IMessage, LightNode } from "@waku/interfaces"; -import { createLightNode, Protocols } from "@waku/sdk"; -import { - contentTopicToPubsubTopic, - pubsubTopicToSingleShardInfo -} from "@waku/utils"; -import { expect } from "chai"; - -import { - afterEachCustom, - beforeEachCustom, - makeLogFileName, - NOISE_KEY_1, - ServiceNode, - tearDownNodes -} from "../../src/index.js"; - -import { - processQueriedMessages, - runStoreNodes, - sendMessages, - sendMessagesAutosharding, - TestDecoder, - TestDecoder2, - TestShardInfo, - totalMsgs -} from "./utils.js"; - -describe("Waku Store, custom pubsub topic", function () { - this.timeout(15000); - let waku: LightNode; - let nwaku: ServiceNode; - let nwaku2: ServiceNode; - - beforeEachCustom(this, async () => { - [nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo); - }); - - afterEachCustom(this, async () => { - await tearDownNodes([nwaku, nwaku2], waku); - }); - - it("Generator, custom pubsub topic", async function () { - await sendMessages( - nwaku, - totalMsgs, - TestDecoder.contentTopic, - TestDecoder.pubsubTopic - ); - - const messages = await processQueriedMessages( - waku, - [TestDecoder], - TestDecoder.pubsubTopic - ); - - expect(messages?.length).eq(totalMsgs); - const result = messages?.findIndex((msg) => { - return msg.payload![0]! === 0; - }); - expect(result).to.not.eq(-1); - }); - - it("Generator, 2 different pubsubtopics", async function () { - this.timeout(10000); - - const totalMsgs = 10; - await sendMessages( - nwaku, - totalMsgs, - TestDecoder.contentTopic, - TestDecoder.pubsubTopic - ); - await sendMessages( - nwaku, - totalMsgs, - TestDecoder2.contentTopic, - TestDecoder2.pubsubTopic - ); - - const customMessages = await processQueriedMessages( - waku, - [TestDecoder], - TestDecoder.pubsubTopic - ); - expect(customMessages?.length).eq(totalMsgs); - const result1 = customMessages?.findIndex((msg) => { - return msg.payload![0]! === 0; - }); - expect(result1).to.not.eq(-1); - - const testMessages = await processQueriedMessages( - waku, - [TestDecoder2], - TestDecoder2.pubsubTopic - ); - expect(testMessages?.length).eq(totalMsgs); - const result2 = testMessages?.findIndex((msg) => { - return msg.payload![0]! === 0; - }); - expect(result2).to.not.eq(-1); - }); - - it("Generator, 2 nwaku nodes each with different pubsubtopics", async function () { - this.timeout(10000); - - await tearDownNodes([nwaku], []); - - // make sure each nwaku node operates on dedicated shard only - nwaku = new ServiceNode(makeLogFileName(this) + "1"); - await nwaku.start({ - store: true, - clusterId: TestShardInfo.clusterId, - shard: [TestShardInfo.shards[0]], - relay: true - }); - - // Set up and start a new nwaku node with Default Pubsubtopic - nwaku2 = new ServiceNode(makeLogFileName(this) + "2"); - await nwaku2.start({ - store: true, - clusterId: TestShardInfo.clusterId, - shard: [TestShardInfo.shards[1]], - relay: true - }); - - const totalMsgs = 10; - await sendMessages( - nwaku, - totalMsgs, - TestDecoder.contentTopic, - TestDecoder.pubsubTopic - ); - await sendMessages( - nwaku2, - totalMsgs, - TestDecoder2.contentTopic, - TestDecoder2.pubsubTopic - ); - - await waku.dial(await nwaku.getMultiaddrWithId()); - await waku.dial(await nwaku2.getMultiaddrWithId()); - await waku.waitForPeers([Protocols.Store]); - - let customMessages: IMessage[] = []; - let testMessages: IMessage[] = []; - - while ( - customMessages.length != totalMsgs || - testMessages.length != totalMsgs - ) { - customMessages = await processQueriedMessages( - waku, - [TestDecoder], - TestDecoder.pubsubTopic - ); - testMessages = await processQueriedMessages( - waku, - [TestDecoder2], - TestDecoder2.pubsubTopic - ); - } - }); -}); - -// TODO: blocked by https://github.com/waku-org/nwaku/issues/3362 -describe.skip("Waku Store (Autosharding), custom pubsub topic", function () { - this.timeout(15000); - let waku: LightNode; - let nwaku: ServiceNode; - let nwaku2: ServiceNode; - - const customContentTopic1 = "/waku/2/content/utf8"; - const customContentTopic2 = "/myapp/1/latest/proto"; - const clusterId = 5; - const Shard2 = [1]; - const autoshardingPubsubTopic1 = contentTopicToPubsubTopic( - customContentTopic1, - clusterId - ); - const autoshardingPubsubTopic2 = contentTopicToPubsubTopic( - customContentTopic2, - clusterId - ); - const customDecoder1 = createDecoder( - customContentTopic1, - pubsubTopicToSingleShardInfo(autoshardingPubsubTopic1) - ); - const customDecoder2 = createDecoder( - customContentTopic2, - pubsubTopicToSingleShardInfo(autoshardingPubsubTopic2) - ); - const contentTopicInfoBothShards: ContentTopicInfo = { - clusterId, - contentTopics: [customContentTopic1, customContentTopic2] - }; - - beforeEachCustom(this, async () => { - [nwaku, waku] = await runStoreNodes(this.ctx, contentTopicInfoBothShards); - }); - - afterEachCustom(this, async () => { - await tearDownNodes([nwaku, nwaku2], waku); - }); - - it("Generator, custom pubsub topic", async function () { - await sendMessagesAutosharding(nwaku, totalMsgs, customContentTopic1); - - const messages = await processQueriedMessages( - waku, - [customDecoder1], - autoshardingPubsubTopic1 - ); - - expect(messages?.length).eq(totalMsgs); - const result = messages?.findIndex((msg) => { - return msg.payload![0]! === 0; - }); - expect(result).to.not.eq(-1); - }); - - it("Generator, 2 different pubsubtopics", async function () { - this.timeout(10000); - - const totalMsgs = 10; - await sendMessagesAutosharding(nwaku, totalMsgs, customContentTopic1); - await sendMessagesAutosharding(nwaku, totalMsgs, customContentTopic2); - - const customMessages = await processQueriedMessages( - waku, - [customDecoder1], - autoshardingPubsubTopic1 - ); - expect(customMessages?.length).eq(totalMsgs); - const result1 = customMessages?.findIndex((msg) => { - return msg.payload![0]! === 0; - }); - expect(result1).to.not.eq(-1); - - const testMessages = await processQueriedMessages( - waku, - [customDecoder2], - autoshardingPubsubTopic2 - ); - expect(testMessages?.length).eq(totalMsgs); - const result2 = testMessages?.findIndex((msg) => { - return msg.payload![0]! === 0; - }); - expect(result2).to.not.eq(-1); - }); - - it("Generator, 2 nwaku nodes each with different pubsubtopics", async function () { - this.timeout(10000); - - // Set up and start a new nwaku node with Default Pubsubtopic - nwaku2 = new ServiceNode(makeLogFileName(this) + "2"); - await nwaku2.start({ - store: true, - contentTopic: [customContentTopic2], - relay: true, - clusterId, - shard: Shard2 - }); - await nwaku2.ensureSubscriptionsAutosharding([customContentTopic2]); - - const totalMsgs = 10; - await sendMessagesAutosharding(nwaku, totalMsgs, customContentTopic1); - await sendMessagesAutosharding(nwaku2, totalMsgs, customContentTopic2); - - waku = await createLightNode({ - staticNoiseKey: NOISE_KEY_1, - networkConfig: contentTopicInfoBothShards - }); - await waku.start(); - - await waku.dial(await nwaku.getMultiaddrWithId()); - await waku.dial(await nwaku2.getMultiaddrWithId()); - await waku.waitForPeers([Protocols.Store]); - - let customMessages: IMessage[] = []; - let testMessages: IMessage[] = []; - - while ( - customMessages.length != totalMsgs || - testMessages.length != totalMsgs - ) { - customMessages = await processQueriedMessages( - waku, - [customDecoder1], - autoshardingPubsubTopic1 - ); - testMessages = await processQueriedMessages( - waku, - [customDecoder2], - autoshardingPubsubTopic2 - ); - } - }); -}); - -describe("Waku Store (named sharding), custom pubsub topic", function () { - this.timeout(15000); - let waku: LightNode; - let nwaku: ServiceNode; - let nwaku2: ServiceNode; - - beforeEachCustom(this, async () => { - [nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo); - }); - - afterEachCustom(this, async () => { - await tearDownNodes([nwaku, nwaku2], waku); - }); - - it("Generator, custom pubsub topic", async function () { - await sendMessages( - nwaku, - totalMsgs, - TestDecoder.contentTopic, - TestDecoder.pubsubTopic - ); - - const messages = await processQueriedMessages( - waku, - [TestDecoder], - TestDecoder.pubsubTopic - ); - - expect(messages?.length).eq(totalMsgs); - const result = messages?.findIndex((msg) => { - return msg.payload![0]! === 0; - }); - expect(result).to.not.eq(-1); - }); - - it("Generator, 2 different pubsubtopics", async function () { - this.timeout(10000); - - const totalMsgs = 10; - await sendMessages( - nwaku, - totalMsgs, - TestDecoder.contentTopic, - TestDecoder.pubsubTopic - ); - await sendMessages( - nwaku, - totalMsgs, - TestDecoder2.contentTopic, - TestDecoder2.pubsubTopic - ); - - const customMessages = await processQueriedMessages( - waku, - [TestDecoder], - TestDecoder.pubsubTopic - ); - expect(customMessages?.length).eq(totalMsgs); - const result1 = customMessages?.findIndex((msg) => { - return msg.payload![0]! === 0; - }); - expect(result1).to.not.eq(-1); - - const testMessages = await processQueriedMessages( - waku, - [TestDecoder2], - TestDecoder2.pubsubTopic - ); - expect(testMessages?.length).eq(totalMsgs); - const result2 = testMessages?.findIndex((msg) => { - return msg.payload![0]! === 0; - }); - expect(result2).to.not.eq(-1); - }); - - it("Generator, 2 nwaku nodes each with different pubsubtopics", async function () { - this.timeout(10000); - - await tearDownNodes([nwaku], []); - - // make sure each nwaku node operates on dedicated shard only - nwaku = new ServiceNode(makeLogFileName(this) + "1"); - await nwaku.start({ - store: true, - clusterId: TestShardInfo.clusterId, - shard: [TestShardInfo.shards[0]], - relay: true - }); - - // Set up and start a new nwaku node with Default Pubsubtopic - nwaku2 = new ServiceNode(makeLogFileName(this) + "2"); - await nwaku2.start({ - store: true, - relay: true, - clusterId: TestShardInfo.clusterId, - shard: TestShardInfo.shards - }); - await nwaku2.ensureSubscriptions([TestDecoder2.pubsubTopic]); - - const totalMsgs = 10; - await sendMessages( - nwaku, - totalMsgs, - TestDecoder.contentTopic, - TestDecoder.pubsubTopic - ); - await sendMessages( - nwaku2, - totalMsgs, - TestDecoder2.contentTopic, - TestDecoder2.pubsubTopic - ); - - await waku.dial(await nwaku.getMultiaddrWithId()); - await waku.dial(await nwaku2.getMultiaddrWithId()); - await waku.waitForPeers([Protocols.Store]); - - let customMessages: IMessage[] = []; - let testMessages: IMessage[] = []; - - while ( - customMessages.length != totalMsgs || - testMessages.length != totalMsgs - ) { - customMessages = await processQueriedMessages( - waku, - [TestDecoder], - TestDecoder.pubsubTopic - ); - testMessages = await processQueriedMessages( - waku, - [TestDecoder2], - TestDecoder2.pubsubTopic - ); - } - }); -}); diff --git a/packages/tests/tests/store/order.node.spec.ts b/packages/tests/tests/store/order.node.spec.ts index 3de1f30005..bddea7a4ad 100644 --- a/packages/tests/tests/store/order.node.spec.ts +++ b/packages/tests/tests/store/order.node.spec.ts @@ -13,7 +13,8 @@ import { runStoreNodes, sendMessages, TestDecoder, - TestShardInfo, + TestNetworkConfig, + TestRoutingInfo, totalMsgs } from "./utils.js"; @@ -23,7 +24,7 @@ describe("Waku Store, order", function () { let nwaku: ServiceNode; beforeEachCustom(this, async () => { - [nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo); + [nwaku, waku] = await runStoreNodes(this.ctx, TestNetworkConfig); }); afterEachCustom(this, async () => { @@ -36,7 +37,7 @@ describe("Waku Store, order", function () { nwaku, totalMsgs, TestDecoder.contentTopic, - TestDecoder.pubsubTopic + TestRoutingInfo ); const messages: IMessage[] = []; @@ -64,7 +65,7 @@ describe("Waku Store, order", function () { nwaku, totalMsgs, TestDecoder.contentTopic, - TestDecoder.pubsubTopic + TestRoutingInfo ); const messages: IMessage[] = []; @@ -95,7 +96,7 @@ describe("Waku Store, order", function () { nwaku, totalMsgs, TestDecoder.contentTopic, - TestDecoder.pubsubTopic + TestRoutingInfo ); const messages: IMessage[] = []; diff --git a/packages/tests/tests/store/page_size.node.spec.ts b/packages/tests/tests/store/page_size.node.spec.ts index 019b58bd51..66ce158334 100644 --- a/packages/tests/tests/store/page_size.node.spec.ts +++ b/packages/tests/tests/store/page_size.node.spec.ts @@ -12,7 +12,8 @@ import { runStoreNodes, sendMessages, TestDecoder, - TestShardInfo + TestNetworkConfig, + TestRoutingInfo } from "./utils.js"; describe("Waku Store, page size", function () { @@ -21,7 +22,7 @@ describe("Waku Store, page size", function () { let nwaku: ServiceNode; beforeEachCustom(this, async () => { - [nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo); + [nwaku, waku] = await runStoreNodes(this.ctx, TestNetworkConfig); }); afterEachCustom(this, async () => { @@ -42,7 +43,7 @@ describe("Waku Store, page size", function () { nwaku, messageCount, TestDecoder.contentTopic, - TestDecoder.pubsubTopic + TestRoutingInfo ); // Determine effectivePageSize for test expectations @@ -77,12 +78,7 @@ describe("Waku Store, page size", function () { // Possible issue here because pageSize differs across implementations it("Default pageSize", async function () { - await sendMessages( - nwaku, - 20, - TestDecoder.contentTopic, - TestDecoder.pubsubTopic - ); + await sendMessages(nwaku, 20, TestDecoder.contentTopic, TestRoutingInfo); let messagesRetrieved = 0; for await (const query of waku.store.queryGenerator([TestDecoder])) { diff --git a/packages/tests/tests/store/sorting.node.spec.ts b/packages/tests/tests/store/sorting.node.spec.ts index 63bd8e4591..46b45f0133 100644 --- a/packages/tests/tests/store/sorting.node.spec.ts +++ b/packages/tests/tests/store/sorting.node.spec.ts @@ -12,7 +12,8 @@ import { runStoreNodes, sendMessages, TestDecoder, - TestShardInfo, + TestNetworkConfig, + TestRoutingInfo, totalMsgs } from "./utils.js"; @@ -22,7 +23,7 @@ describe("Waku Store, sorting", function () { let nwaku: ServiceNode; beforeEachCustom(this, async () => { - [nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo); + [nwaku, waku] = await runStoreNodes(this.ctx, TestNetworkConfig); }); afterEachCustom(this, async () => { @@ -35,7 +36,7 @@ describe("Waku Store, sorting", function () { nwaku, totalMsgs, TestDecoder.contentTopic, - TestDecoder.pubsubTopic + TestRoutingInfo ); const pages: IMessage[][] = []; @@ -96,7 +97,7 @@ describe("Waku Store, sorting", function () { nwaku, totalMsgs, TestDecoder.contentTopic, - TestDecoder.pubsubTopic + TestRoutingInfo ); const messages: IMessage[] = []; diff --git a/packages/tests/tests/store/time_filter.node.spec.ts b/packages/tests/tests/store/time_filter.node.spec.ts index e149a38614..dc7c407858 100644 --- a/packages/tests/tests/store/time_filter.node.spec.ts +++ b/packages/tests/tests/store/time_filter.node.spec.ts @@ -12,7 +12,8 @@ import { adjustDate, runStoreNodes, TestDecoder, - TestShardInfo + TestNetworkConfig, + TestRoutingInfo } from "./utils.js"; describe("Waku Store, time filter", function () { @@ -21,7 +22,7 @@ describe("Waku Store, time filter", function () { let nwaku: ServiceNode; beforeEachCustom(this, async () => { - [nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo); + [nwaku, waku] = await runStoreNodes(this.ctx, TestNetworkConfig); }); afterEachCustom(this, async () => { @@ -49,7 +50,8 @@ describe("Waku Store, time filter", function () { payload: new Uint8Array([0]), contentTopic: TestDecoder.contentTopic, timestamp: msgTimestamp - }) + }), + TestRoutingInfo ) ).to.eq(true); @@ -90,7 +92,8 @@ describe("Waku Store, time filter", function () { payload: new Uint8Array([0]), contentTopic: TestDecoder.contentTopic, timestamp: msgTimestamp - }) + }), + TestRoutingInfo ) ).to.eq(true); diff --git a/packages/tests/tests/store/utils.ts b/packages/tests/tests/store/utils.ts index 727149f240..7d25243a9a 100644 --- a/packages/tests/tests/store/utils.ts +++ b/packages/tests/tests/store/utils.ts @@ -5,14 +5,16 @@ import { Decoder } from "@waku/core"; import { + type AutoSharding, + ContentTopic, LightNode, - NetworkConfig, + type NetworkConfig, Protocols, - ShardInfo, - type SingleShardInfo + RelayShards, + ShardId } from "@waku/interfaces"; import { createLightNode } from "@waku/sdk"; -import { Logger, singleShardInfoToPubsubTopic } from "@waku/utils"; +import { createRoutingInfo, Logger, RoutingInfo } from "@waku/utils"; import { expect } from "chai"; import { Context } from "mocha"; @@ -21,27 +23,34 @@ import { MessageRpcQuery } from "../../src/types.js"; export const log = new Logger("test:store"); -export const TestClusterId = 3; -export const TestShardInfo: ShardInfo = { +export const TestClusterId = 5; +export const TestNetworkConfig: AutoSharding = { clusterId: TestClusterId, - shards: [1, 2] + numShardsInCluster: 8 }; -export const TestShardInfo1: SingleShardInfo = { clusterId: 3, shard: 1 }; -export const TestPubsubTopic1 = singleShardInfoToPubsubTopic(TestShardInfo1); - -export const TestShardInfo2: SingleShardInfo = { clusterId: 3, shard: 2 }; -export const TestPubsubTopic2 = singleShardInfoToPubsubTopic(TestShardInfo2); - -export const TestContentTopic1 = "/test/1/waku-store/utf8"; -export const TestEncoder = createEncoder({ - contentTopic: TestContentTopic1, - pubsubTopicShardInfo: TestShardInfo1 +export const TestContentTopic = "/test/1/waku-store/utf8"; +export const TestRoutingInfo = createRoutingInfo(TestNetworkConfig, { + contentTopic: TestContentTopic }); -export const TestDecoder = createDecoder(TestContentTopic1, TestPubsubTopic1); -export const TestContentTopic2 = "/test/3/waku-store/utf8"; -export const TestDecoder2 = createDecoder(TestContentTopic2, TestPubsubTopic2); +export const TestRelayShards: RelayShards = { + clusterId: TestClusterId, + shards: [TestRoutingInfo.shardId] +}; + +export const TestEncoder = createEncoder({ + contentTopic: TestContentTopic, + routingInfo: TestRoutingInfo +}); +export const TestDecoder = createDecoder(TestContentTopic, TestRoutingInfo); + +export const TestContentTopic2 = "/test/12/waku-store/utf8"; +export const TestRoutingInfo2 = createRoutingInfo(TestNetworkConfig, { + contentTopic: TestContentTopic2 +}); + +export const TestDecoder2 = createDecoder(TestContentTopic2, TestRoutingInfo2); export const totalMsgs = 20; export const messageText = "Store Push works!"; @@ -50,7 +59,7 @@ export async function sendMessages( instance: ServiceNode, numMessages: number, contentTopic: string, - pubsubTopic: string, + routingInfo: RoutingInfo, timestamp: boolean = false ): Promise { const messages: MessageRpcQuery[] = new Array(numMessages); @@ -60,30 +69,12 @@ export async function sendMessages( contentTopic: contentTopic, timestamp: timestamp ? new Date() : undefined }); - expect(await instance.sendMessage(messages[i], pubsubTopic)).to.eq(true); + expect(await instance.sendMessage(messages[i], routingInfo)).to.eq(true); await delay(1); // to ensure each timestamp is unique. } return messages; } -export async function sendMessagesAutosharding( - instance: ServiceNode, - numMessages: number, - contentTopic: string -): Promise { - for (let i = 0; i < numMessages; i++) { - expect( - await instance.sendMessageAutosharding( - ServiceNode.toMessageRpcQuery({ - payload: new Uint8Array([i]), - contentTopic: contentTopic - }) - ) - ).to.eq(true); - await delay(1); // to ensure each timestamp is unique. - } -} - export async function processQueriedMessages( instance: LightNode, decoders: Array, @@ -126,17 +117,6 @@ export async function startAndConnectLightNode( return waku; } -export function chunkAndReverseArray( - arr: number[], - chunkSize: number -): number[] { - const result: number[] = []; - for (let i = 0; i < arr.length; i += chunkSize) { - result.push(...arr.slice(i, i + chunkSize).reverse()); - } - return result.reverse(); -} - export const adjustDate = (baseDate: Date, adjustMs: number): Date => { const adjusted = new Date(baseDate); adjusted.setTime(adjusted.getTime() + adjustMs); @@ -145,11 +125,15 @@ export const adjustDate = (baseDate: Date, adjustMs: number): Date => { export const runStoreNodes = ( context: Context, - networkConfig: NetworkConfig + networkConfig: NetworkConfig, + shardIds?: ShardId[], + contentTopics?: ContentTopic[] ): Promise<[ServiceNode, LightNode]> => runNodes({ context, networkConfig, createNode: createLightNode, + relayShards: shardIds, + contentTopics, protocols: [Protocols.Store] }); diff --git a/packages/tests/tests/wait_for_remote_peer.node.spec.ts b/packages/tests/tests/wait_for_remote_peer.node.spec.ts index e68cbe347b..0811fc069c 100644 --- a/packages/tests/tests/wait_for_remote_peer.node.spec.ts +++ b/packages/tests/tests/wait_for_remote_peer.node.spec.ts @@ -6,8 +6,11 @@ import { expect } from "chai"; import { afterEachCustom, - DefaultTestPubsubTopic, - DefaultTestShardInfo, + DefaultTestClusterId, + DefaultTestContentTopic, + DefaultTestNetworkConfig, + DefaultTestNumShardsInCluster, + DefaultTestRoutingInfo, delay, makeLogFileName, NOISE_KEY_1, @@ -15,11 +18,7 @@ import { tearDownNodes } from "../src/index.js"; -import { - runRelayNodes, - TestPubsubTopic, - TestShardInfo -} from "./relay/utils.js"; +import { runRelayNodes } from "./relay/utils.js"; describe("Wait for remote peer", function () { let waku1: RelayNode; @@ -32,10 +31,15 @@ describe("Wait for remote peer", function () { it("Relay - dialed first", async function () { this.timeout(20_000); - [nwaku, waku1] = await runRelayNodes(this, TestShardInfo); + [nwaku, waku1] = await runRelayNodes( + this, + DefaultTestNetworkConfig, + undefined, + [DefaultTestContentTopic] + ); const multiAddrWithId = await nwaku.getMultiaddrWithId(); - const peers = waku1.relay.getMeshPeers(TestPubsubTopic); + const peers = waku1.relay.getMeshPeers(DefaultTestRoutingInfo.pubsubTopic); const nimPeerId = multiAddrWithId.getPeerId(); expect(nimPeerId).to.not.be.undefined; @@ -50,14 +54,16 @@ describe("Wait for remote peer", function () { store: false, filter: false, lightpush: false, - clusterId: DefaultTestShardInfo.clusterId, - shard: DefaultTestShardInfo.shards + clusterId: DefaultTestClusterId, + numShardsInNetwork: DefaultTestNumShardsInCluster, + contentTopic: [DefaultTestContentTopic] }); const multiAddrWithId = await nwaku.getMultiaddrWithId(); waku1 = await createRelayNode({ staticNoiseKey: NOISE_KEY_1, - networkConfig: DefaultTestShardInfo + networkConfig: DefaultTestNetworkConfig, + routingInfos: [DefaultTestRoutingInfo] }); await waku1.start(); @@ -66,7 +72,7 @@ describe("Wait for remote peer", function () { await waku1.dial(multiAddrWithId); await waitPromise; - const peers = waku1.relay.getMeshPeers(DefaultTestPubsubTopic); + const peers = waku1.relay.getMeshPeers(DefaultTestRoutingInfo.pubsubTopic); const nimPeerId = multiAddrWithId.getPeerId(); expect(nimPeerId).to.not.be.undefined; @@ -77,7 +83,8 @@ describe("Wait for remote peer", function () { this.timeout(5000); createRelayNode({ staticNoiseKey: NOISE_KEY_1, - networkConfig: DefaultTestShardInfo + networkConfig: DefaultTestNetworkConfig, + routingInfos: [DefaultTestRoutingInfo] }) .then((waku1) => waku1.start().then(() => waku1)) .then((waku1) => { @@ -109,7 +116,7 @@ describe("Wait for remote peer", function () { waku2 = await createLightNode({ staticNoiseKey: NOISE_KEY_1, - networkConfig: DefaultTestShardInfo + networkConfig: DefaultTestNetworkConfig }); await waku2.start(); await waku2.dial(multiAddrWithId); @@ -138,7 +145,7 @@ describe("Wait for remote peer", function () { waku2 = await createLightNode({ staticNoiseKey: NOISE_KEY_1, - networkConfig: DefaultTestShardInfo + networkConfig: DefaultTestNetworkConfig }); await waku2.start(); const waitPromise = waku2.waitForPeers([Protocols.Store], 2000); @@ -169,7 +176,7 @@ describe("Wait for remote peer", function () { waku2 = await createLightNode({ staticNoiseKey: NOISE_KEY_1, - networkConfig: DefaultTestShardInfo + networkConfig: DefaultTestNetworkConfig }); await waku2.start(); await waku2.dial(multiAddrWithId); @@ -198,7 +205,7 @@ describe("Wait for remote peer", function () { waku2 = await createLightNode({ staticNoiseKey: NOISE_KEY_1, - networkConfig: DefaultTestShardInfo + networkConfig: DefaultTestNetworkConfig }); await waku2.start(); await waku2.dial(multiAddrWithId); @@ -228,7 +235,7 @@ describe("Wait for remote peer", function () { waku2 = await createLightNode({ staticNoiseKey: NOISE_KEY_1, - networkConfig: DefaultTestShardInfo + networkConfig: DefaultTestNetworkConfig }); await waku2.start(); await waku2.dial(multiAddrWithId); @@ -250,10 +257,10 @@ describe("Wait for remote peer", function () { it("Privacy Node - default protocol", async function () { this.timeout(20_000); - [nwaku, waku1] = await runRelayNodes(this, TestShardInfo); + [nwaku, waku1] = await runRelayNodes(this, DefaultTestNetworkConfig); const multiAddrWithId = await nwaku.getMultiaddrWithId(); - const peers = waku1.relay.getMeshPeers(TestPubsubTopic); + const peers = waku1.relay.getMeshPeers(DefaultTestRoutingInfo.pubsubTopic); const nimPeerId = multiAddrWithId.getPeerId(); diff --git a/packages/tests/tests/waku.node.spec.ts b/packages/tests/tests/waku.node.spec.ts index d97a473a89..9483bc8ba6 100644 --- a/packages/tests/tests/waku.node.spec.ts +++ b/packages/tests/tests/waku.node.spec.ts @@ -17,14 +17,15 @@ import { createLightNode, createEncoder as createPlainEncoder } from "@waku/sdk"; +import { createRoutingInfo } from "@waku/utils"; import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes"; import { expect } from "chai"; import { afterEachCustom, beforeEachCustom, - DefaultTestShardInfo, - DefaultTestSingleShardInfo, + DefaultTestNetworkConfig, + DefaultTestRoutingInfo, makeLogFileName, NOISE_KEY_1, NOISE_KEY_2, @@ -33,8 +34,13 @@ import { } from "../src/index.js"; const TestContentTopic = "/test/1/waku/utf8"; - -const TestEncoder = createPlainEncoder({ contentTopic: TestContentTopic }); +const TestRoutingInfo = createRoutingInfo(DefaultTestNetworkConfig, { + contentTopic: TestContentTopic +}); +const TestEncoder = createPlainEncoder({ + contentTopic: TestContentTopic, + routingInfo: TestRoutingInfo +}); describe("Waku Dial [node only]", function () { describe("Interop: ServiceNode", function () { @@ -57,7 +63,7 @@ describe("Waku Dial [node only]", function () { waku = await createLightNode({ staticNoiseKey: NOISE_KEY_1, - networkConfig: DefaultTestShardInfo + networkConfig: DefaultTestNetworkConfig }); await waku.start(); await waku.dial(multiAddrWithId); @@ -91,7 +97,7 @@ describe("Waku Dial [node only]", function () { waku = await createLightNode({ staticNoiseKey: NOISE_KEY_1, - networkConfig: DefaultTestShardInfo + networkConfig: DefaultTestNetworkConfig }); await waku.start(); await waku.dial(multiAddrWithId); @@ -119,7 +125,7 @@ describe("Waku Dial [node only]", function () { const multiAddrWithId = await nwaku.getMultiaddrWithId(); waku = await createLightNode({ staticNoiseKey: NOISE_KEY_1, - networkConfig: DefaultTestShardInfo, + networkConfig: DefaultTestNetworkConfig, libp2p: { peerDiscovery: [bootstrap({ list: [multiAddrWithId.toString()] })] } @@ -145,7 +151,7 @@ describe("Waku Dial [node only]", function () { waku = await createLightNode({ staticNoiseKey: NOISE_KEY_1, - networkConfig: DefaultTestShardInfo, + networkConfig: DefaultTestNetworkConfig, libp2p: { peerDiscovery: [bootstrap({ list: [nwakuMa.toString()] })] } @@ -177,11 +183,13 @@ describe("Decryption Keys", function () { [waku1, waku2] = await Promise.all([ createRelayNode({ staticNoiseKey: NOISE_KEY_1, - networkConfig: DefaultTestShardInfo + networkConfig: DefaultTestNetworkConfig, + routingInfos: [DefaultTestRoutingInfo] }).then((waku) => waku.start().then(() => waku)), createRelayNode({ staticNoiseKey: NOISE_KEY_2, - networkConfig: DefaultTestShardInfo, + networkConfig: DefaultTestNetworkConfig, + routingInfos: [DefaultTestRoutingInfo], libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } } }).then((waku) => waku.start().then(() => waku)) ]); @@ -205,15 +213,11 @@ describe("Decryption Keys", function () { this.timeout(10000); const symKey = generateSymmetricKey(); - const decoder = createDecoder( - TestContentTopic, - symKey, - DefaultTestSingleShardInfo - ); + const decoder = createDecoder(TestContentTopic, TestRoutingInfo, symKey); const encoder = createEncoder({ contentTopic: TestContentTopic, - pubsubTopicShardInfo: DefaultTestSingleShardInfo, + routingInfo: TestRoutingInfo, symKey }); @@ -257,11 +261,13 @@ describe("User Agent", function () { createRelayNode({ staticNoiseKey: NOISE_KEY_1, userAgent: waku1UserAgent, - networkConfig: DefaultTestShardInfo + networkConfig: DefaultTestNetworkConfig, + routingInfos: [DefaultTestRoutingInfo] }).then((waku) => waku.start().then(() => waku)), createRelayNode({ staticNoiseKey: NOISE_KEY_2, - networkConfig: DefaultTestShardInfo, + networkConfig: DefaultTestNetworkConfig, + routingInfos: [DefaultTestRoutingInfo], libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } } }).then((waku) => waku.start().then(() => waku)) ]); diff --git a/packages/utils/src/common/relay_shard_codec.ts b/packages/utils/src/common/relay_shard_codec.ts index 91dea7b4ea..334673187f 100644 --- a/packages/utils/src/common/relay_shard_codec.ts +++ b/packages/utils/src/common/relay_shard_codec.ts @@ -1,6 +1,6 @@ -import type { ShardInfo } from "@waku/interfaces"; +import type { RelayShards } from "@waku/interfaces"; -export const decodeRelayShard = (bytes: Uint8Array): ShardInfo => { +export const decodeRelayShard = (bytes: Uint8Array): RelayShards => { // explicitly converting to Uint8Array to avoid Buffer // https://github.com/libp2p/js-libp2p/issues/2146 bytes = new Uint8Array(bytes); @@ -33,8 +33,8 @@ export const decodeRelayShard = (bytes: Uint8Array): ShardInfo => { return { clusterId, shards }; }; -export const encodeRelayShard = (shardInfo: ShardInfo): Uint8Array => { - const { clusterId, shards } = shardInfo; +export const encodeRelayShard = (relayShards: RelayShards): Uint8Array => { + const { clusterId, shards } = relayShards; const totalLength = shards.length >= 64 ? 130 : 3 + 2 * shards.length; const buffer = new ArrayBuffer(totalLength); const view = new DataView(buffer); diff --git a/packages/utils/src/common/sharding/index.spec.ts b/packages/utils/src/common/sharding/index.spec.ts index 4c8f854875..7e5056dbe5 100644 --- a/packages/utils/src/common/sharding/index.spec.ts +++ b/packages/utils/src/common/sharding/index.spec.ts @@ -1,17 +1,12 @@ -import { DEFAULT_CLUSTER_ID, NetworkConfig } from "@waku/interfaces"; +import { DEFAULT_CLUSTER_ID } from "@waku/interfaces"; import { expect } from "chai"; import { contentTopicsByPubsubTopic, contentTopicToPubsubTopic, contentTopicToShardIndex, - determinePubsubTopic, - ensureShardingConfigured, ensureValidContentTopic, - pubsubTopicToSingleShardInfo, - shardInfoToPubsubTopics, - singleShardInfosToShardInfo, - singleShardInfoToPubsubTopic + pubsubTopicToSingleShardInfo } from "./index.js"; const testInvalidCases = ( @@ -154,7 +149,7 @@ describe("contentTopicsByPubsubTopic", () => { const contentTopics = ["/toychat/2/huilong/proto", "/myapp/1/latest/proto"]; const grouped = contentTopicsByPubsubTopic(contentTopics); for (const contentTopic of contentTopics) { - const pubsubTopic = contentTopicToPubsubTopic(contentTopic); + const pubsubTopic = contentTopicToPubsubTopic(contentTopic, 0, 8); expect(grouped.get(pubsubTopic)?.includes(contentTopic)).to.be.true; } }); @@ -166,23 +161,25 @@ describe("contentTopicsByPubsubTopic", () => { ]; const grouped = contentTopicsByPubsubTopic(contentTopics); expect(grouped.size).to.eq(1); // Only one pubsub topic expected - const pubsubTopic = contentTopicToPubsubTopic(contentTopics[0]); + const pubsubTopic = contentTopicToPubsubTopic(contentTopics[0], 0, 8); expect(grouped.get(pubsubTopic)?.length).to.eq(2); // Both topics should be grouped under the same pubsub topic }); it("handles different clusterIds correctly", () => { const contentTopics = ["/app/22/sometopic/someencoding"]; - const clusterId1 = 1; + const clusterId1 = 3; const clusterId2 = 2; const grouped1 = contentTopicsByPubsubTopic(contentTopics, clusterId1); const grouped2 = contentTopicsByPubsubTopic(contentTopics, clusterId2); const pubsubTopic1 = contentTopicToPubsubTopic( contentTopics[0], - clusterId1 + clusterId1, + 8 ); const pubsubTopic2 = contentTopicToPubsubTopic( contentTopics[0], - clusterId2 + clusterId2, + 8 ); expect(pubsubTopic1).not.to.equal(pubsubTopic2); expect(grouped1.has(pubsubTopic1)).to.be.true; @@ -228,95 +225,6 @@ describe("contentTopicsByPubsubTopic", () => { }); }); -describe("singleShardInfoToPubsubTopic", () => { - it("should convert a SingleShardInfo object to the correct PubsubTopic", () => { - const singleShardInfo = { clusterId: 2, shard: 2 }; - const expectedTopic = "/waku/2/rs/2/2"; - expect(singleShardInfoToPubsubTopic(singleShardInfo)).to.equal( - expectedTopic - ); - }); -}); - -describe("singleShardInfosToShardInfo", () => { - it("should aggregate SingleShardInfos into a ShardInfo", () => { - const singleShardInfos = [ - { clusterId: 1, shard: 2 }, - { clusterId: 1, shard: 3 }, - { clusterId: 1, shard: 5 } - ]; - const expectedShardInfo = { clusterId: 1, shards: [2, 3, 5] }; - expect(singleShardInfosToShardInfo(singleShardInfos)).to.deep.equal( - expectedShardInfo - ); - }); - - it("should throw an error for empty SingleShardInfos array", () => { - expect(() => singleShardInfosToShardInfo([])).to.throw("Invalid shard"); - }); - - it("should throw an error for SingleShardInfos with different clusterIds", () => { - const invalidShardInfos = [ - { clusterId: 1, shard: 2 }, - { clusterId: 2, shard: 3 } - ]; - expect(() => singleShardInfosToShardInfo(invalidShardInfos)).to.throw( - "Passed shard infos have different clusterIds" - ); - }); -}); - -describe("shardInfoToPubsubTopics", () => { - it("should convert content topics to PubsubTopics for autosharding", () => { - const shardInfo = { - contentTopics: ["/app/v1/topic1/proto", "/app/v1/topic2/proto"] - }; - const topics = shardInfoToPubsubTopics(shardInfo); - expect(topics).to.be.an("array").that.includes("/waku/2/rs/1/4"); - expect(topics.length).to.equal(1); - }); - - it("should return unique PubsubTopics for static sharding", () => { - const shardInfo = { clusterId: 1, shards: [0, 1, 0] }; // Duplicate shard to test uniqueness - const topics = shardInfoToPubsubTopics(shardInfo); - expect(topics).to.have.members(["/waku/2/rs/1/0", "/waku/2/rs/1/1"]); - expect(topics.length).to.equal(2); - }); - - [0, 1, 6].forEach((clusterId) => { - it(`should handle clusterId, application and version for autosharding with cluster iD ${clusterId}`, () => { - const shardInfo = { - clusterId: clusterId, - application: "app", - version: "v1" - }; - const topics = shardInfoToPubsubTopics(shardInfo); - expect(topics) - .to.be.an("array") - .that.includes(`/waku/2/rs/${clusterId}/4`); - expect(topics.length).to.equal(1); - }); - }); - - it("should return empty list for no shard", () => { - const shardInfo = { clusterId: 1, shards: [] }; - const topics = shardInfoToPubsubTopics(shardInfo); - expect(topics.length).to.equal(0); - }); - - it("should throw an error if shards are undefined for static sharding", () => { - const shardInfo = { clusterId: 1, shards: undefined }; - expect(() => shardInfoToPubsubTopics(shardInfo)).to.throw("Invalid shard"); - }); - - it("should throw an error for missing required configuration", () => { - const shardInfo = {}; - expect(() => shardInfoToPubsubTopics(shardInfo)).to.throw( - "Missing required configuration in shard parameters" - ); - }); -}); - describe("pubsubTopicToSingleShardInfo with various invalid formats", () => { const invalidTopics = [ "/waku/1/rs/1/2", // Invalid Waku version @@ -327,8 +235,8 @@ describe("pubsubTopicToSingleShardInfo with various invalid formats", () => { ]; it("should extract SingleShardInfo from a valid PubsubTopic", () => { - const topic = "/waku/2/rs/1/2"; - const expectedInfo = { clusterId: 1, shard: 2 }; + const topic = "/waku/2/rs/2/2"; + const expectedInfo = { clusterId: 2, shard: 2 }; expect(pubsubTopicToSingleShardInfo(topic)).to.deep.equal(expectedInfo); }); @@ -356,114 +264,77 @@ describe("pubsubTopicToSingleShardInfo with various invalid formats", () => { }); }); -describe("determinePubsubTopic", () => { - const contentTopic = "/app/46/sometopic/someencoding"; - it("should return the pubsub topic directly if a string is provided", () => { - const topic = "/waku/2/rs/1/3"; - expect(determinePubsubTopic(contentTopic, topic)).to.equal(topic); - }); - - it("should return a calculated topic if SingleShardInfo is provided", () => { - const info = { clusterId: 1, shard: 2 }; - const expectedTopic = "/waku/2/rs/1/2"; - expect(determinePubsubTopic(contentTopic, info)).to.equal(expectedTopic); - }); - - it("should fall back to default pubsub topic when pubsubTopicShardInfo is not provided", () => { - expect(determinePubsubTopic(contentTopic)).to.equal("/waku/2/rs/1/6"); - }); - - it("should process correctly when SingleShardInfo has no clusterId but has a shard", () => { - const info = { shard: 0 }; - const expectedTopic = `/waku/2/rs/${DEFAULT_CLUSTER_ID}/0`; - expect(determinePubsubTopic(contentTopic, info as any)).to.equal( - expectedTopic - ); - }); - - it("should derive a pubsub topic using contentTopic when SingleShardInfo only contains clusterId", () => { - const info = { clusterId: 2 }; - const expectedTopic = contentTopicToPubsubTopic( - contentTopic, - info.clusterId - ); - expect(determinePubsubTopic(contentTopic, info as any)).to.equal( - expectedTopic - ); - }); -}); - -describe("ensureShardingConfigured", () => { - it("should return valid sharding parameters for static sharding", () => { - const shardInfo = { clusterId: 1, shards: [0, 1] }; - const result = ensureShardingConfigured(shardInfo); - expect(result.shardInfo).to.deep.include({ - clusterId: 1, - shards: [0, 1] - }); - expect(result.shardInfo).to.deep.include({ clusterId: 1, shards: [0, 1] }); - expect(result.pubsubTopics).to.have.members([ - "/waku/2/rs/1/0", - "/waku/2/rs/1/1" - ]); - }); - - it("should return valid sharding parameters for content topics autosharding", () => { - const contentTopicInfo = { contentTopics: ["/app/v1/topic1/proto"] }; - const result = ensureShardingConfigured(contentTopicInfo); - const expectedPubsubTopic = contentTopicToPubsubTopic( - "/app/v1/topic1/proto", - DEFAULT_CLUSTER_ID - ); - expect(result.shardInfo.shards).to.include( - contentTopicToShardIndex("/app/v1/topic1/proto") - ); - expect(result.pubsubTopics).to.include(expectedPubsubTopic); - }); - - it("should throw an error for missing sharding configuration", () => { - const shardInfo = {} as any as NetworkConfig; - expect(() => ensureShardingConfigured(shardInfo)).to.throw(); - }); - - it("handles empty shards array correctly", () => { - const shardInfo = { clusterId: 1, shards: [] }; - expect(() => ensureShardingConfigured(shardInfo)).to.throw(); - }); - - it("handles empty contentTopics array correctly", () => { - const shardInfo = { contentTopics: [] }; - expect(() => ensureShardingConfigured(shardInfo)).to.throw(); - }); -}); - -describe("contentTopicToPubsubTopic", () => { - it("should correctly map a content topic to a pubsub topic", () => { - const contentTopic = "/app/v1/topic1/proto"; - expect(contentTopicToPubsubTopic(contentTopic)).to.equal("/waku/2/rs/1/4"); - }); - - it("should map different content topics to different pubsub topics based on shard index", () => { - const contentTopic1 = "/app/v1/topic1/proto"; - const contentTopic2 = "/app/v2/topic2/proto"; - const pubsubTopic1 = contentTopicToPubsubTopic(contentTopic1); - const pubsubTopic2 = contentTopicToPubsubTopic(contentTopic2); - expect(pubsubTopic1).not.to.equal(pubsubTopic2); - }); - - it("should use the provided clusterId for the pubsub topic", () => { - const contentTopic = "/app/v1/topic1/proto"; - const clusterId = 2; - expect(contentTopicToPubsubTopic(contentTopic, clusterId)).to.equal( - "/waku/2/rs/2/4" - ); - }); - - it("should correctly map a content topic to a pubsub topic for different network shard sizes", () => { - const contentTopic = "/app/v1/topic1/proto"; - const networkShards = 16; - expect(contentTopicToPubsubTopic(contentTopic, 1, networkShards)).to.equal( - "/waku/2/rs/1/4" - ); - }); -}); +// describe("ensureShardingConfigured", () => { +// it("should return valid sharding parameters for static sharding", () => { +// const shardInfo = { clusterId: 1, shards: [0, 1] }; +// const result = ensureShardingConfigured(shardInfo); +// expect(result.shardInfo).to.deep.include({ +// clusterId: 1, +// shards: [0, 1] +// }); +// expect(result.shardInfo).to.deep.include({ clusterId: 1, shards: [0, 1] }); +// expect(result.pubsubTopics).to.have.members([ +// "/waku/2/rs/1/0", +// "/waku/2/rs/1/1" +// ]); +// }); +// +// it("should return valid sharding parameters for content topics autosharding", () => { +// const contentTopicInfo = { contentTopics: ["/app/v1/topic1/proto"] }; +// const result = ensureShardingConfigured(contentTopicInfo); +// const expectedPubsubTopic = contentTopicToPubsubTopic( +// "/app/v1/topic1/proto", +// DEFAULT_CLUSTER_ID +// ); +// expect(result.shardInfo.shards).to.include( +// contentTopicToShardIndex("/app/v1/topic1/proto") +// ); +// expect(result.pubsubTopics).to.include(expectedPubsubTopic); +// }); +// +// it("should throw an error for missing sharding configuration", () => { +// const shardInfo = {} as any as NetworkConfig; +// expect(() => ensureShardingConfigured(shardInfo)).to.throw(); +// }); +// +// it("handles empty shards array correctly", () => { +// const shardInfo = { clusterId: 1, shards: [] }; +// expect(() => ensureShardingConfigured(shardInfo)).to.throw(); +// }); +// +// it("handles empty contentTopics array correctly", () => { +// const shardInfo = { contentTopics: [] }; +// expect(() => ensureShardingConfigured(shardInfo)).to.throw(); +// }); +// }); +// +// describe("contentTopicToPubsubTopic", () => { +// it("should correctly map a content topic to a pubsub topic", () => { +// const contentTopic = "/app/v1/topic1/proto"; +// expect(contentTopicToPubsubTopic(contentTopic)).to.equal("/waku/2/rs/1/4"); +// }); +// +// it("should map different content topics to different pubsub topics based on shard index", () => { +// const contentTopic1 = "/app/v1/topic1/proto"; +// const contentTopic2 = "/app/v2/topic2/proto"; +// const pubsubTopic1 = contentTopicToPubsubTopic(contentTopic1); +// const pubsubTopic2 = contentTopicToPubsubTopic(contentTopic2); +// expect(pubsubTopic1).not.to.equal(pubsubTopic2); +// }); +// +// it("should use the provided clusterId for the pubsub topic", () => { +// const contentTopic = "/app/v1/topic1/proto"; +// const clusterId = 2; +// expect(contentTopicToPubsubTopic(contentTopic, clusterId)).to.equal( +// "/waku/2/rs/2/4" +// ); +// }); +// +// it("should correctly map a content topic to a pubsub topic for different network shard sizes", () => { +// const contentTopic = "/app/v1/topic1/proto"; +// const networkShards = 16; +// expect(contentTopicToPubsubTopic(contentTopic, 1, networkShards)).to.equal( +// "/waku/2/rs/1/4" +// ); +// }); +// }); diff --git a/packages/utils/src/common/sharding/index.ts b/packages/utils/src/common/sharding/index.ts index f70db904bf..e48522ac22 100644 --- a/packages/utils/src/common/sharding/index.ts +++ b/packages/utils/src/common/sharding/index.ts @@ -1,109 +1,22 @@ import { sha256 } from "@noble/hashes/sha256"; import { + type ClusterId, + ContentTopic, DEFAULT_CLUSTER_ID, - NetworkConfig, PubsubTopic, - ShardInfo, - SingleShardInfo + type ShardId } from "@waku/interfaces"; import { concat, utf8ToBytes } from "../../bytes/index.js"; -import { isAutoSharding, isStaticSharding } from "./type_guards.js"; - export * from "./type_guards.js"; +export * from "./routing_info.js"; -export function derivePubsubTopicsFromNetworkConfig( - networkConfig: NetworkConfig -): PubsubTopic[] { - if (isStaticSharding(networkConfig)) { - if (networkConfig.shards.length === 0) { - throw new Error( - "Invalid shards configuration: please provide at least one shard" - ); - } - return shardInfoToPubsubTopics(networkConfig); - } else if (isAutoSharding(networkConfig)) { - if (networkConfig.contentTopics.length === 0) { - throw new Error( - "Invalid content topics configuration: please provide at least one content topic" - ); - } - return networkConfig.contentTopics.map((contentTopic) => - contentTopicToPubsubTopic(contentTopic, networkConfig.clusterId) - ); - } else { - throw new Error( - "Unknown shard config. Please use ShardInfo or ContentTopicInfo" - ); - } -} - -export const singleShardInfoToPubsubTopic = ( - shardInfo: SingleShardInfo +export const formatPubsubTopic = ( + clusterId: ClusterId, + shard: ShardId ): PubsubTopic => { - if (shardInfo.shard === undefined) throw new Error("Invalid shard"); - - return `/waku/2/rs/${shardInfo.clusterId ?? DEFAULT_CLUSTER_ID}/${shardInfo.shard}`; -}; - -export const singleShardInfosToShardInfo = ( - singleShardInfos: SingleShardInfo[] -): ShardInfo => { - if (singleShardInfos.length === 0) throw new Error("Invalid shard"); - - const clusterIds = singleShardInfos.map((shardInfo) => shardInfo.clusterId); - if (new Set(clusterIds).size !== 1) { - throw new Error("Passed shard infos have different clusterIds"); - } - - const shards = singleShardInfos - .map((shardInfo) => shardInfo.shard) - .filter((shard): shard is number => shard !== undefined); - - return { - clusterId: singleShardInfos[0].clusterId, - shards - }; -}; - -/** - * @deprecated will be removed, use cluster and shard comparison directly - */ -export const shardInfoToPubsubTopics = ( - shardInfo: Partial -): PubsubTopic[] => { - if ("contentTopics" in shardInfo && shardInfo.contentTopics) { - // Autosharding: explicitly defined content topics - return Array.from( - new Set( - shardInfo.contentTopics.map((contentTopic) => - contentTopicToPubsubTopic(contentTopic, shardInfo.clusterId) - ) - ) - ); - } else if ("shards" in shardInfo) { - // Static sharding - if (shardInfo.shards === undefined) throw new Error("Invalid shard"); - return Array.from( - new Set( - shardInfo.shards.map( - (index) => - `/waku/2/rs/${shardInfo.clusterId ?? DEFAULT_CLUSTER_ID}/${index}` - ) - ) - ); - } else if ("application" in shardInfo && "version" in shardInfo) { - // Autosharding: single shard from application and version - return [ - contentTopicToPubsubTopic( - `/${shardInfo.application}/${shardInfo.version}/default/default`, - shardInfo.clusterId - ) - ]; - } else { - throw new Error("Missing required configuration in shard parameters"); - } + return `/waku/2/rs/${clusterId}/${shard}`; }; /** @@ -111,7 +24,7 @@ export const shardInfoToPubsubTopics = ( */ export const pubsubTopicToSingleShardInfo = ( pubsubTopics: PubsubTopic -): SingleShardInfo => { +): { clusterId: ClusterId; shard: ShardId } => { const parts = pubsubTopics.split("/"); if ( @@ -134,40 +47,7 @@ export const pubsubTopicToSingleShardInfo = ( }; }; -export const pubsubTopicsToShardInfo = ( - pubsubTopics: PubsubTopic[] -): ShardInfo => { - const shardInfoSet = new Set(); - const clusterIds = new Set(); - - for (const topic of pubsubTopics) { - const { clusterId, shard } = pubsubTopicToSingleShardInfo(topic); - shardInfoSet.add(`${clusterId}:${shard}`); - clusterIds.add(clusterId); - } - - if (shardInfoSet.size === 0) { - throw new Error("No valid pubsub topics provided"); - } - - if (clusterIds.size > 1) { - throw new Error( - "Pubsub topics from multiple cluster IDs are not supported" - ); - } - - const clusterId = clusterIds.values().next().value!; - const shards = Array.from(shardInfoSet).map((info) => - parseInt(info.split(":")[1]) - ); - - return { - clusterId, - shards - }; -}; - -interface ContentTopic { +interface ParsedContentTopic { generation: number; application: string; version: string; @@ -180,39 +60,45 @@ interface ContentTopic { * @param contentTopic String to validate * @returns Object with each content topic field as an attribute */ -export function ensureValidContentTopic(contentTopic: string): ContentTopic { - const parts = contentTopic.split("/"); +export function ensureValidContentTopic( + contentTopic: ContentTopic +): ParsedContentTopic { + const parts = (contentTopic as string).split("/"); if (parts.length < 5 || parts.length > 6) { - throw Error("Content topic format is invalid"); + throw Error(`Content topic format is invalid: ${contentTopic}`); } // Validate generation field if present let generation = 0; if (parts.length == 6) { generation = parseInt(parts[1]); if (isNaN(generation)) { - throw new Error("Invalid generation field in content topic"); + throw new Error( + `Invalid generation field in content topic: ${contentTopic}` + ); } if (generation > 0) { - throw new Error("Generation greater than 0 is not supported"); + throw new Error( + `Generation greater than 0 is not supported: ${contentTopic}` + ); } } // Validate remaining fields const fields = parts.splice(-4); // Validate application field if (fields[0].length == 0) { - throw new Error("Application field cannot be empty"); + throw new Error(`Application field cannot be empty: ${contentTopic}`); } // Validate version field if (fields[1].length == 0) { - throw new Error("Version field cannot be empty"); + throw new Error(`Version field cannot be empty: ${contentTopic}`); } // Validate topic name field if (fields[2].length == 0) { - throw new Error("Topic name field cannot be empty"); + throw new Error(`Topic name field cannot be empty: ${contentTopic}`); } // Validate encoding field if (fields[3].length == 0) { - throw new Error("Encoding field cannot be empty"); + throw new Error(`Encoding field cannot be empty: ${contentTopic}`); } return { @@ -229,27 +115,27 @@ export function ensureValidContentTopic(contentTopic: string): ContentTopic { * Based on the algorithm described in the RFC: https://rfc.vac.dev/spec/51//#algorithm */ export function contentTopicToShardIndex( - contentTopic: string, - networkShards: number = 8 + contentTopic: ContentTopic, + numShardsInCluster: number = 8 ): number { const { application, version } = ensureValidContentTopic(contentTopic); const digest = sha256( concat([utf8ToBytes(application), utf8ToBytes(version)]) ); const dataview = new DataView(digest.buffer.slice(-8)); - return Number(dataview.getBigUint64(0, false) % BigInt(networkShards)); + return Number(dataview.getBigUint64(0, false) % BigInt(numShardsInCluster)); } export function contentTopicToPubsubTopic( - contentTopic: string, - clusterId: number = DEFAULT_CLUSTER_ID, - networkShards: number = 8 + contentTopic: ContentTopic, + clusterId: number, + numShardsInCluster: number ): string { if (!contentTopic) { throw Error("Content topic must be specified"); } - const shardIndex = contentTopicToShardIndex(contentTopic, networkShards); + const shardIndex = contentTopicToShardIndex(contentTopic, numShardsInCluster); return `/waku/2/rs/${clusterId}/${shardIndex}`; } @@ -258,7 +144,7 @@ export function contentTopicToPubsubTopic( * If any of the content topics are not properly formatted, the function will throw an error. */ export function contentTopicsByPubsubTopic( - contentTopics: string[], + contentTopics: ContentTopic[], clusterId: number = DEFAULT_CLUSTER_ID, networkShards: number = 8 ): Map> { @@ -278,70 +164,3 @@ export function contentTopicsByPubsubTopic( } return groupedContentTopics; } - -/** - * Used when creating encoders/decoders to determine which pubsub topic to use - */ -export function determinePubsubTopic( - contentTopic: string, - // TODO: make it accept ShardInfo https://github.com/waku-org/js-waku/issues/2086 - pubsubTopicShardInfo?: SingleShardInfo | PubsubTopic -): string { - if (typeof pubsubTopicShardInfo == "string") { - return pubsubTopicShardInfo; - } - - return pubsubTopicShardInfo?.shard !== undefined - ? singleShardInfoToPubsubTopic(pubsubTopicShardInfo) - : contentTopicToPubsubTopic( - contentTopic, - pubsubTopicShardInfo?.clusterId ?? DEFAULT_CLUSTER_ID - ); -} - -/** - * Validates sharding configuration and sets defaults where possible. - * @returns Validated sharding parameters, with any missing values set to defaults - */ -export const ensureShardingConfigured = ( - networkConfig: NetworkConfig -): { - shardInfo: ShardInfo; - pubsubTopics: PubsubTopic[]; -} => { - const clusterId = networkConfig.clusterId ?? DEFAULT_CLUSTER_ID; - const shards = "shards" in networkConfig ? networkConfig.shards : []; - const contentTopics = - "contentTopics" in networkConfig ? networkConfig.contentTopics : []; - - const isShardsConfigured = shards && shards.length > 0; - const isContentTopicsConfigured = contentTopics && contentTopics.length > 0; - - if (isShardsConfigured) { - return { - shardInfo: { clusterId, shards }, - pubsubTopics: shardInfoToPubsubTopics({ clusterId, shards }) - }; - } - - if (isContentTopicsConfigured) { - const pubsubTopics = Array.from( - new Set( - contentTopics.map((topic) => - contentTopicToPubsubTopic(topic, clusterId) - ) - ) - ); - const shards = Array.from( - new Set(contentTopics.map((topic) => contentTopicToShardIndex(topic))) - ); - return { - shardInfo: { clusterId, shards }, - pubsubTopics - }; - } - - throw new Error( - "Missing minimum required configuration options for static sharding or autosharding." - ); -}; diff --git a/packages/utils/src/common/sharding/routing_info.ts b/packages/utils/src/common/sharding/routing_info.ts new file mode 100644 index 0000000000..1b804ac581 --- /dev/null +++ b/packages/utils/src/common/sharding/routing_info.ts @@ -0,0 +1,183 @@ +import type { + AutoSharding, + ContentTopic, + IRoutingInfoAutoSharding, + IRoutingInfoStaticSharding, + NetworkConfig, + PubsubTopic, + ShardId, + StaticSharding +} from "@waku/interfaces"; + +import { + contentTopicToShardIndex, + ensureValidContentTopic, + formatPubsubTopic, + isAutoSharding, + pubsubTopicToSingleShardInfo +} from "./index.js"; + +export type RoutingInfo = AutoShardingRoutingInfo | StaticShardingRoutingInfo; + +export abstract class BaseRoutingInfo { + protected constructor( + public networkConfig: NetworkConfig, + public pubsubTopic: PubsubTopic, + public shardId: ShardId + ) {} + + public abstract get isAutoSharding(): boolean; + public abstract get isStaticSharding(): boolean; +} + +export class AutoShardingRoutingInfo + extends BaseRoutingInfo + implements IRoutingInfoAutoSharding +{ + public static fromContentTopic( + contentTopic: ContentTopic, + networkConfig: AutoSharding + ): AutoShardingRoutingInfo { + ensureValidContentTopic(contentTopic); + + const shardId = contentTopicToShardIndex( + contentTopic, + networkConfig.numShardsInCluster + ); + const pubsubTopic = formatPubsubTopic(networkConfig.clusterId, shardId); + + return new AutoShardingRoutingInfo( + networkConfig, + pubsubTopic, + shardId, + contentTopic + ); + } + + /** + * No checks are done with this constructor, + * Be sure you check that the network config (auto vs static) + * matches other parameters. + */ + private constructor( + public networkConfig: AutoSharding, + public pubsubTopic: PubsubTopic, + public shardId: ShardId, + public contentTopic: string + ) { + super(networkConfig, pubsubTopic, shardId); + } + + public get isAutoSharding(): boolean { + return true; + } + + public get isStaticSharding(): boolean { + return false; + } +} + +export class StaticShardingRoutingInfo + extends BaseRoutingInfo + implements IRoutingInfoStaticSharding +{ + /** + * Create Routing Info for static sharding network, using shard + * + * @param shardId + * @param networkConfig + */ + public static fromShard( + shardId: ShardId, + networkConfig: StaticSharding + ): StaticShardingRoutingInfo { + const pubsubTopic = formatPubsubTopic(networkConfig.clusterId, shardId); + + return new StaticShardingRoutingInfo(networkConfig, pubsubTopic, shardId); + } + + /** + * Create Routing Info for static sharding network, using pubsub topic + * + * @param pubsubTopic + * @param networkConfig + * + * @throws if the pubsub topic is malformed, or does not match the network config + */ + public static fromPubsubTopic( + pubsubTopic: PubsubTopic, + networkConfig: StaticSharding + ): StaticShardingRoutingInfo { + const { clusterId, shard } = pubsubTopicToSingleShardInfo(pubsubTopic); + + if (clusterId != networkConfig.clusterId) + throw "Pubsub topic does not match network config's cluster id"; + + return new StaticShardingRoutingInfo(networkConfig, pubsubTopic, shard); + } + + /** + * No checks are done with this constructor, + * Be sure you check that the network config (auto vs static) + * matches other parameters. + */ + private constructor( + public networkConfig: StaticSharding, + public pubsubTopic: PubsubTopic, + public shardId: ShardId + ) { + super(networkConfig, pubsubTopic, shardId); + } + + public get isAutoSharding(): boolean { + return false; + } + + public get isStaticSharding(): boolean { + return true; + } +} + +export function isAutoShardingRoutingInfo( + routingInfo: BaseRoutingInfo +): routingInfo is AutoShardingRoutingInfo { + return routingInfo.isAutoSharding; +} + +export function isStaticShardingRoutingInfo( + routingInfo: BaseRoutingInfo +): routingInfo is StaticShardingRoutingInfo { + return routingInfo.isStaticSharding; +} + +export function createRoutingInfo( + networkConfig: NetworkConfig, + options: { + contentTopic?: ContentTopic; + shardId?: ShardId; + pubsubTopic?: PubsubTopic; + } +): AutoShardingRoutingInfo | StaticShardingRoutingInfo { + if (isAutoSharding(networkConfig)) { + if (options.contentTopic) { + return AutoShardingRoutingInfo.fromContentTopic( + options.contentTopic, + networkConfig + ); + } + throw new Error("AutoSharding requires contentTopic"); + } else { + if (options.shardId !== undefined) { + return StaticShardingRoutingInfo.fromShard( + options.shardId, + networkConfig + ); + } else if (options.pubsubTopic) { + return StaticShardingRoutingInfo.fromPubsubTopic( + options.pubsubTopic, + networkConfig + ); + } + throw new Error("StaticSharding requires shardId or pubsubTopic"); + } +} diff --git a/packages/utils/src/common/sharding/type_guards.ts b/packages/utils/src/common/sharding/type_guards.ts index 9ab53373aa..297b6b2b6a 100644 --- a/packages/utils/src/common/sharding/type_guards.ts +++ b/packages/utils/src/common/sharding/type_guards.ts @@ -1,5 +1,5 @@ import type { - ContentTopicInfo, + AutoSharding, CreateNodeOptions, StaticSharding } from "@waku/interfaces"; @@ -7,13 +7,11 @@ import type { export function isStaticSharding( config: NonNullable ): config is StaticSharding { - return ( - "clusterId" in config && "shards" in config && !("contentTopics" in config) - ); + return "clusterId" in config && !("numShardsInCluster" in config); } export function isAutoSharding( config: NonNullable -): config is ContentTopicInfo { - return "contentTopics" in config; +): config is AutoSharding { + return "clusterId" in config && "numShardsInCluster" in config; } From b4787e0e87f5ab7b9d3403a115c5b299b62463a7 Mon Sep 17 00:00:00 2001 From: fryorcraken Date: Sat, 19 Jul 2025 12:20:08 +1000 Subject: [PATCH 02/23] remove default values for cluster id and num shards in cluster These are high level configuration values, that need to be consistent within a given execution. Setting them as default value at such a low level is a footgun risk. --- .../connection_manager/shard_reader.spec.ts | 5 ++- packages/tests/tests/filter/utils.ts | 8 +++- .../tests/sharding/peer_management.spec.ts | 3 +- .../utils/src/common/sharding/index.spec.ts | 45 +++++++++++++++---- packages/utils/src/common/sharding/index.ts | 7 ++- 5 files changed, 52 insertions(+), 16 deletions(-) diff --git a/packages/core/src/lib/connection_manager/shard_reader.spec.ts b/packages/core/src/lib/connection_manager/shard_reader.spec.ts index 7f38c83190..8abb90265b 100644 --- a/packages/core/src/lib/connection_manager/shard_reader.spec.ts +++ b/packages/core/src/lib/connection_manager/shard_reader.spec.ts @@ -29,7 +29,10 @@ describe("ShardReader", function () { const testContentTopic = "/test/1/waku-light-push/utf8"; const testClusterId = 3; - const testShardIndex = contentTopicToShardIndex(testContentTopic); + const testShardIndex = contentTopicToShardIndex( + testContentTopic, + DEFAULT_NUM_SHARDS + ); const testNetworkConfig: AutoSharding = { clusterId: testClusterId, diff --git a/packages/tests/tests/filter/utils.ts b/packages/tests/tests/filter/utils.ts index a679f5337a..111ad5f420 100644 --- a/packages/tests/tests/filter/utils.ts +++ b/packages/tests/tests/filter/utils.ts @@ -10,10 +10,14 @@ import { utf8ToBytes } from "@waku/utils/bytes"; export const log = new Logger("test:filter"); export const TestContentTopic = "/test/1/waku-filter/default"; export const TestClusterId = 2; -export const TestShardIndex = contentTopicToShardIndex(TestContentTopic); +export const TestNumShardsInCluster = 8; +export const TestShardIndex = contentTopicToShardIndex( + TestContentTopic, + TestNumShardsInCluster +); export const TestNetworkConfig = { clusterId: TestClusterId, - numShardsInCluster: 8 + numShardsInCluster: TestNumShardsInCluster }; export const TestRoutingInfo = createRoutingInfo(TestNetworkConfig, { contentTopic: TestContentTopic diff --git a/packages/tests/tests/sharding/peer_management.spec.ts b/packages/tests/tests/sharding/peer_management.spec.ts index c0dec2e1fd..6d734828c9 100644 --- a/packages/tests/tests/sharding/peer_management.spec.ts +++ b/packages/tests/tests/sharding/peer_management.spec.ts @@ -200,7 +200,8 @@ describe("Static Sharding: Peer Management", function () { describe("Autosharding: Peer Management", function () { const ContentTopic = "/myapp/1/latest/proto"; const clusterId = 8; - const Shard = [contentTopicToShardIndex(ContentTopic)]; + const numShardsInCluster = 8; + const Shard = [contentTopicToShardIndex(ContentTopic, numShardsInCluster)]; describe("Peer Exchange", function () { let waku: LightNode; diff --git a/packages/utils/src/common/sharding/index.spec.ts b/packages/utils/src/common/sharding/index.spec.ts index 7e5056dbe5..28b5aeba2e 100644 --- a/packages/utils/src/common/sharding/index.spec.ts +++ b/packages/utils/src/common/sharding/index.spec.ts @@ -9,6 +9,9 @@ import { pubsubTopicToSingleShardInfo } from "./index.js"; +const ClusterId = 0; +const NumShardsInCluster = 8; + const testInvalidCases = ( contentTopics: string[], expectedError: string @@ -112,7 +115,9 @@ describe("contentTopicToShardIndex", () => { ]; contentTopicsWithExpectedShards.forEach(([topic, expectedShard]) => { it(`should correctly map ${topic} to shard index ${expectedShard}`, () => { - expect(contentTopicToShardIndex(topic)).to.eq(expectedShard); + expect(contentTopicToShardIndex(topic, NumShardsInCluster)).to.eq( + expectedShard + ); }); }); @@ -137,8 +142,8 @@ describe("contentTopicToShardIndex", () => { ["/waku/2/content/test.js", "/waku/2/users/proto"] ]; for (const [topic1, topic2] of contentTopics) { - expect(contentTopicToShardIndex(topic1)).to.eq( - contentTopicToShardIndex(topic2) + expect(contentTopicToShardIndex(topic1, NumShardsInCluster)).to.eq( + contentTopicToShardIndex(topic2, NumShardsInCluster) ); } }); @@ -147,9 +152,15 @@ describe("contentTopicToShardIndex", () => { describe("contentTopicsByPubsubTopic", () => { it("groups content topics by expected pubsub topic", () => { const contentTopics = ["/toychat/2/huilong/proto", "/myapp/1/latest/proto"]; - const grouped = contentTopicsByPubsubTopic(contentTopics); + const grouped = contentTopicsByPubsubTopic( + contentTopics, + ClusterId, + NumShardsInCluster + ); + for (const contentTopic of contentTopics) { const pubsubTopic = contentTopicToPubsubTopic(contentTopic, 0, 8); + expect(grouped.get(pubsubTopic)?.includes(contentTopic)).to.be.true; } }); @@ -159,7 +170,11 @@ describe("contentTopicsByPubsubTopic", () => { "/app/22/sometopic/someencoding", "/app/22/anothertopic/otherencoding" ]; - const grouped = contentTopicsByPubsubTopic(contentTopics); + const grouped = contentTopicsByPubsubTopic( + contentTopics, + ClusterId, + NumShardsInCluster + ); expect(grouped.size).to.eq(1); // Only one pubsub topic expected const pubsubTopic = contentTopicToPubsubTopic(contentTopics[0], 0, 8); expect(grouped.get(pubsubTopic)?.length).to.eq(2); // Both topics should be grouped under the same pubsub topic @@ -169,8 +184,16 @@ describe("contentTopicsByPubsubTopic", () => { const contentTopics = ["/app/22/sometopic/someencoding"]; const clusterId1 = 3; const clusterId2 = 2; - const grouped1 = contentTopicsByPubsubTopic(contentTopics, clusterId1); - const grouped2 = contentTopicsByPubsubTopic(contentTopics, clusterId2); + const grouped1 = contentTopicsByPubsubTopic( + contentTopics, + clusterId1, + NumShardsInCluster + ); + const grouped2 = contentTopicsByPubsubTopic( + contentTopics, + clusterId2, + NumShardsInCluster + ); const pubsubTopic1 = contentTopicToPubsubTopic( contentTopics[0], clusterId1, @@ -221,7 +244,13 @@ describe("contentTopicsByPubsubTopic", () => { it("throws an error for improperly formatted content topics", () => { const invalidContentTopics = ["/invalid/format"]; - expect(() => contentTopicsByPubsubTopic(invalidContentTopics)).to.throw(); + expect(() => + contentTopicsByPubsubTopic( + invalidContentTopics, + ClusterId, + NumShardsInCluster + ) + ).to.throw(); }); }); diff --git a/packages/utils/src/common/sharding/index.ts b/packages/utils/src/common/sharding/index.ts index e48522ac22..495ea42c6e 100644 --- a/packages/utils/src/common/sharding/index.ts +++ b/packages/utils/src/common/sharding/index.ts @@ -2,7 +2,6 @@ import { sha256 } from "@noble/hashes/sha256"; import { type ClusterId, ContentTopic, - DEFAULT_CLUSTER_ID, PubsubTopic, type ShardId } from "@waku/interfaces"; @@ -116,7 +115,7 @@ export function ensureValidContentTopic( */ export function contentTopicToShardIndex( contentTopic: ContentTopic, - numShardsInCluster: number = 8 + numShardsInCluster: number ): number { const { application, version } = ensureValidContentTopic(contentTopic); const digest = sha256( @@ -145,8 +144,8 @@ export function contentTopicToPubsubTopic( */ export function contentTopicsByPubsubTopic( contentTopics: ContentTopic[], - clusterId: number = DEFAULT_CLUSTER_ID, - networkShards: number = 8 + clusterId: number, + networkShards: number ): Map> { const groupedContentTopics = new Map(); for (const contentTopic of contentTopics) { From b8867dee38278d709f9a04cb66bab47b96371e1b Mon Sep 17 00:00:00 2001 From: fryorcraken Date: Sat, 19 Jul 2025 14:24:30 +1000 Subject: [PATCH 03/23] test: re-introduce usage of ensureSubscriptions --- packages/interfaces/src/sharding.ts | 10 ++++- .../tests/high-throughput.spec.ts | 14 ++++++- .../reliability-tests/tests/longevity.spec.ts | 14 ++++++- .../tests/throughput-sizes.spec.ts | 14 ++++++- packages/tests/src/lib/runNodes.ts | 11 +---- packages/tests/src/utils/nodes.ts | 41 ++++++++++++++++--- .../tests/tests/filter/subscribe.node.spec.ts | 10 +++-- 7 files changed, 88 insertions(+), 26 deletions(-) diff --git a/packages/interfaces/src/sharding.ts b/packages/interfaces/src/sharding.ts index ee5a0f0bb4..3cbe86d6c7 100644 --- a/packages/interfaces/src/sharding.ts +++ b/packages/interfaces/src/sharding.ts @@ -25,10 +25,18 @@ export type ShardId = number; export interface IRoutingInfoAutoSharding { pubsubTopic: string; shardId: ShardId; + + // Is the network config really needed for exposure? + // we should probably aim to only expose the above + Cluster Id networkConfig: AutoSharding; - contentTopic: string; + + // This is actually a property of network config, should probably be removed isAutoSharding: boolean; isStaticSharding: boolean; + + // This is only needed for tests, to setup nwaku node + // might be a cleaner way to handle it + contentTopic: string; } export interface IRoutingInfoStaticSharding { diff --git a/packages/reliability-tests/tests/high-throughput.spec.ts b/packages/reliability-tests/tests/high-throughput.spec.ts index 357efed5f7..48bb20f5a3 100644 --- a/packages/reliability-tests/tests/high-throughput.spec.ts +++ b/packages/reliability-tests/tests/high-throughput.spec.ts @@ -1,6 +1,10 @@ import { LightNode, Protocols } from "@waku/interfaces"; import { createDecoder, createLightNode, utf8ToBytes } from "@waku/sdk"; -import { createRoutingInfo, delay } from "@waku/utils"; +import { + contentTopicToPubsubTopic, + createRoutingInfo, + delay +} from "@waku/utils"; import { expect } from "chai"; import { @@ -58,7 +62,13 @@ describe("High Throughput Messaging", function () { await delay(1000); - // TODO await nwaku.ensureSubscriptions(shardInfoToPubsubTopics(shardInfo)); + await nwaku.ensureSubscriptions([ + contentTopicToPubsubTopic( + ContentTopic, + networkConfig.clusterId, + networkConfig.numShardsInCluster + ) + ]); waku = await createLightNode({ networkConfig }); await waku.start(); diff --git a/packages/reliability-tests/tests/longevity.spec.ts b/packages/reliability-tests/tests/longevity.spec.ts index 3e7848842f..e0ec05678f 100644 --- a/packages/reliability-tests/tests/longevity.spec.ts +++ b/packages/reliability-tests/tests/longevity.spec.ts @@ -1,6 +1,10 @@ import { LightNode, Protocols } from "@waku/interfaces"; import { createDecoder, createLightNode, utf8ToBytes } from "@waku/sdk"; -import { createRoutingInfo, delay } from "@waku/utils"; +import { + contentTopicToPubsubTopic, + createRoutingInfo, + delay +} from "@waku/utils"; import { expect } from "chai"; import { @@ -57,7 +61,13 @@ describe("Longevity", function () { { retries: 3 } ); - // TODO await nwaku.ensureSubscriptions(shardInfoToPubsubTopics(shardInfo)); + await nwaku.ensureSubscriptions([ + contentTopicToPubsubTopic( + ContentTopic, + networkConfig.clusterId, + networkConfig.numShardsInCluster + ) + ]); waku = await createLightNode({ networkConfig }); await waku.start(); diff --git a/packages/reliability-tests/tests/throughput-sizes.spec.ts b/packages/reliability-tests/tests/throughput-sizes.spec.ts index 6d556adbd9..7044176223 100644 --- a/packages/reliability-tests/tests/throughput-sizes.spec.ts +++ b/packages/reliability-tests/tests/throughput-sizes.spec.ts @@ -1,6 +1,10 @@ import { LightNode, Protocols } from "@waku/interfaces"; import { createDecoder, createLightNode, utf8ToBytes } from "@waku/sdk"; -import { createRoutingInfo, delay } from "@waku/utils"; +import { + contentTopicToPubsubTopic, + createRoutingInfo, + delay +} from "@waku/utils"; import { expect } from "chai"; import { @@ -63,7 +67,13 @@ describe("Throughput Sanity Checks - Different Message Sizes", function () { await delay(1000); - // TODO await nwaku.ensureSubscriptions(shardInfoToPubsubTopics(shardInfo)); + await nwaku.ensureSubscriptions([ + contentTopicToPubsubTopic( + ContentTopic, + networkConfig.clusterId, + networkConfig.numShardsInCluster + ) + ]); waku = await createLightNode({ networkConfig }); await waku.start(); diff --git a/packages/tests/src/lib/runNodes.ts b/packages/tests/src/lib/runNodes.ts index 19ba198bd3..687ae1af25 100644 --- a/packages/tests/src/lib/runNodes.ts +++ b/packages/tests/src/lib/runNodes.ts @@ -109,16 +109,7 @@ export async function runNodes( await waku.dial(await nwaku.getMultiaddrWithId()); await waku.waitForPeers(protocols); - // TODO - - // const clusterId = networkConfig.clusterId; - - // await nwaku.ensureSubscriptions( - // relayShardsToPubsubTopics({ - // clusterId, - // shards: options.relayShards ?? [] - // }) - // ); + await nwaku.ensureSubscriptions(routingInfos.map((r) => r.pubsubTopic)); return [nwaku, waku as T]; } else { diff --git a/packages/tests/src/utils/nodes.ts b/packages/tests/src/utils/nodes.ts index 3490193d84..2275f5bd0c 100644 --- a/packages/tests/src/utils/nodes.ts +++ b/packages/tests/src/utils/nodes.ts @@ -5,7 +5,12 @@ import { Protocols } from "@waku/interfaces"; import { createLightNode } from "@waku/sdk"; -import { RoutingInfo } from "@waku/utils"; +import { + contentTopicToPubsubTopic, + formatPubsubTopic, + isAutoShardingRoutingInfo, + RoutingInfo +} from "@waku/utils"; import { Context } from "mocha"; import pRetry from "p-retry"; @@ -63,13 +68,39 @@ export async function runMultipleNodes( throw new Error("Failed to initialize waku"); } + const pubsubTopics = []; + + pubsubTopics.push(routingInfo.pubsubTopic); + + if (customArgs?.shard) { + const shards = customArgs?.shard ?? []; + for (const s of shards) { + pubsubTopics.push( + formatPubsubTopic(routingInfo.networkConfig.clusterId, s) + ); + } + } + + if (customArgs?.contentTopic && isAutoShardingRoutingInfo(routingInfo)) { + const contentTopics = customArgs?.contentTopic ?? []; + for (const ct of contentTopics) { + pubsubTopics.push( + contentTopicToPubsubTopic( + ct, + routingInfo.networkConfig.clusterId, + routingInfo.networkConfig.numShardsInCluster + ) + ); + } + } + for (const node of serviceNodes.nodes) { await waku.dial(await node.getMultiaddrWithId()); await waku.waitForPeers([Protocols.Filter, Protocols.LightPush]); - // TODO - // await node.ensureSubscriptions( - // derivePubsubTopicsFromNetworkConfig(networkConfig) - // ); + + if (pubsubTopics.length > 0) { + await node.ensureSubscriptions(pubsubTopics); + } const wakuConnections = waku.libp2p.getConnections(); diff --git a/packages/tests/tests/filter/subscribe.node.spec.ts b/packages/tests/tests/filter/subscribe.node.spec.ts index 6fd66b9ef2..fd2bcb5290 100644 --- a/packages/tests/tests/filter/subscribe.node.spec.ts +++ b/packages/tests/tests/filter/subscribe.node.spec.ts @@ -8,7 +8,7 @@ import { symmetric } from "@waku/message-encryption"; import { Protocols, utf8ToBytes } from "@waku/sdk"; -import { createRoutingInfo } from "@waku/utils"; +import { createRoutingInfo, formatPubsubTopic } from "@waku/utils"; import { expect } from "chai"; import { @@ -645,18 +645,20 @@ const runTestsStatic = (strictCheckNodes: boolean): void => { routingInfo: routingInfoShard2 }); + const shardId = 2; await nwaku2.start({ filter: true, lightpush: true, relay: true, clusterId: TestClusterId, - shard: [2] + shard: [shardId] }); await waku.dial(await nwaku2.getMultiaddrWithId()); await waku.waitForPeers([Protocols.Filter, Protocols.LightPush]); - // TODO - // await nwaku2.ensureSubscriptions([customDecoder.pubsubTopic]); + await nwaku2.ensureSubscriptions([ + formatPubsubTopic(TestClusterId, shardId) + ]); const messageCollector2 = new MessageCollector(); From d04130feb685a73565255a8f26632dacec19b547 Mon Sep 17 00:00:00 2001 From: fryorcraken Date: Sat, 19 Jul 2025 14:30:44 +1000 Subject: [PATCH 04/23] use IRouting info for typing --- packages/core/src/lib/message/version_0.ts | 13 +++++++------ packages/message-encryption/src/ecies.ts | 14 ++++++++------ packages/message-encryption/src/symmetric.ts | 8 ++++---- packages/relay/src/relay.ts | 5 +++-- packages/rln/src/rln.ts | 9 +++++---- packages/sdk/src/filter/subscription.ts | 5 +++-- packages/sdk/src/light_push/retry_manager.ts | 12 ++++++++---- packages/sdk/src/peer_manager/peer_manager.ts | 5 +++-- packages/sdk/src/store/store.ts | 9 +++++---- packages/sdk/src/waku/waku.ts | 5 +++-- 10 files changed, 49 insertions(+), 36 deletions(-) diff --git a/packages/core/src/lib/message/version_0.ts b/packages/core/src/lib/message/version_0.ts index 53f337ccde..4715ebe6e3 100644 --- a/packages/core/src/lib/message/version_0.ts +++ b/packages/core/src/lib/message/version_0.ts @@ -5,10 +5,11 @@ import type { IMessage, IMetaSetter, IProtoMessage, - IRateLimitProof + IRateLimitProof, + IRoutingInfo } from "@waku/interfaces"; import { proto_message as proto } from "@waku/proto"; -import { isAutoShardingRoutingInfo, Logger, RoutingInfo } from "@waku/utils"; +import { isAutoShardingRoutingInfo, Logger } from "@waku/utils"; const log = new Logger("message:version-0"); const OneMillion = BigInt(1_000_000); @@ -68,7 +69,7 @@ export type EncoderOptions = { /** * The routing information for messages to encode. */ - routingInfo: RoutingInfo; + routingInfo: IRoutingInfo; /** The content topic to set on outgoing messages. */ contentTopic: string; /** @@ -88,7 +89,7 @@ export class Encoder implements IEncoder { public constructor( public contentTopic: string, public ephemeral: boolean = false, - public routingInfo: RoutingInfo, + public routingInfo: IRoutingInfo, public metaSetter?: IMetaSetter ) { if (!contentTopic || contentTopic === "") { @@ -146,7 +147,7 @@ export function createEncoder({ export class Decoder implements IDecoder { public constructor( public contentTopic: string, - public routingInfo: RoutingInfo + public routingInfo: IRoutingInfo ) { if (!contentTopic || contentTopic === "") { throw new Error("Content topic must be specified"); @@ -201,7 +202,7 @@ export class Decoder implements IDecoder { */ export function createDecoder( contentTopic: string, - routingInfo: RoutingInfo + routingInfo: IRoutingInfo ): Decoder { if (isAutoShardingRoutingInfo(routingInfo)) { if (routingInfo.contentTopic !== contentTopic) diff --git a/packages/message-encryption/src/ecies.ts b/packages/message-encryption/src/ecies.ts index 4fec13531b..bb7f1ded23 100644 --- a/packages/message-encryption/src/ecies.ts +++ b/packages/message-encryption/src/ecies.ts @@ -5,10 +5,11 @@ import { type IEncryptedMessage, type IMessage, type IMetaSetter, - type IProtoMessage + type IProtoMessage, + type IRoutingInfo } from "@waku/interfaces"; import { WakuMessage } from "@waku/proto"; -import { Logger, RoutingInfo } from "@waku/utils"; +import { Logger } from "@waku/utils"; import { generatePrivateKey } from "./crypto/utils.js"; import { DecodedMessage } from "./decoded_message.js"; @@ -33,7 +34,7 @@ const log = new Logger("message-encryption:ecies"); class Encoder implements IEncoder { public constructor( public contentTopic: string, - public routingInfo: RoutingInfo, + public routingInfo: IRoutingInfo, private publicKey: Uint8Array, private sigPrivKey?: Uint8Array, public ephemeral: boolean = false, @@ -82,7 +83,7 @@ export interface EncoderOptions { /** * The routing information for messages to encode. */ - routingInfo: RoutingInfo; + routingInfo: IRoutingInfo; /** The content topic to set on outgoing messages. */ contentTopic: string; /** @@ -135,7 +136,7 @@ export function createEncoder({ class Decoder extends DecoderV0 implements IDecoder { public constructor( contentTopic: string, - routingInfo: RoutingInfo, + routingInfo: IRoutingInfo, private privateKey: Uint8Array ) { super(contentTopic, routingInfo); @@ -206,11 +207,12 @@ class Decoder extends DecoderV0 implements IDecoder { * decode incoming messages. * * @param contentTopic The resulting decoder will only decode messages with this content topic. + * @param routingInfo * @param privateKey The private key used to decrypt the message. */ export function createDecoder( contentTopic: string, - routingInfo: RoutingInfo, + routingInfo: IRoutingInfo, privateKey: Uint8Array ): Decoder { return new Decoder(contentTopic, routingInfo, privateKey); diff --git a/packages/message-encryption/src/symmetric.ts b/packages/message-encryption/src/symmetric.ts index 80692dd834..d6f0ebfe41 100644 --- a/packages/message-encryption/src/symmetric.ts +++ b/packages/message-encryption/src/symmetric.ts @@ -9,7 +9,7 @@ import type { IRoutingInfo } from "@waku/interfaces"; import { WakuMessage } from "@waku/proto"; -import { Logger, RoutingInfo } from "@waku/utils"; +import { Logger } from "@waku/utils"; import { generateSymmetricKey } from "./crypto/utils.js"; import { DecodedMessage } from "./decoded_message.js"; @@ -83,7 +83,7 @@ export interface EncoderOptions { /** * The routing information for messages to encode. */ - routingInfo: RoutingInfo; + routingInfo: IRoutingInfo; /** The content topic to set on outgoing messages. */ contentTopic: string; /** @@ -136,7 +136,7 @@ export function createEncoder({ class Decoder extends DecoderV0 implements IDecoder { public constructor( contentTopic: string, - routingInfo: RoutingInfo, + routingInfo: IRoutingInfo, private symKey: Uint8Array ) { super(contentTopic, routingInfo); @@ -212,7 +212,7 @@ class Decoder extends DecoderV0 implements IDecoder { */ export function createDecoder( contentTopic: string, - routingInfo: RoutingInfo, + routingInfo: IRoutingInfo, symKey: Uint8Array ): Decoder { return new Decoder(contentTopic, routingInfo, symKey); diff --git a/packages/relay/src/relay.ts b/packages/relay/src/relay.ts index 571a6a78cd..6c4c5b9136 100644 --- a/packages/relay/src/relay.ts +++ b/packages/relay/src/relay.ts @@ -17,12 +17,13 @@ import { IEncoder, IMessage, IRelay, + type IRoutingInfo, Libp2p, ProtocolError, PubsubTopic, SDKProtocolResult } from "@waku/interfaces"; -import { isWireSizeUnderCap, RoutingInfo, toAsyncIterator } from "@waku/utils"; +import { isWireSizeUnderCap, toAsyncIterator } from "@waku/utils"; import { pushOrInitMapSet } from "@waku/utils"; import { Logger } from "@waku/utils"; import { pEvent } from "p-event"; @@ -39,7 +40,7 @@ export type Observer = { }; export type RelayCreateOptions = CreateNodeOptions & { - routingInfos: RoutingInfo[]; + routingInfos: IRoutingInfo[]; } & Partial; export type ContentTopic = string; diff --git a/packages/rln/src/rln.ts b/packages/rln/src/rln.ts index 8dbfc69fc0..2c9f6f653d 100644 --- a/packages/rln/src/rln.ts +++ b/packages/rln/src/rln.ts @@ -2,9 +2,10 @@ import { createDecoder, createEncoder } from "@waku/core"; import type { ContentTopic, IDecodedMessage, - IMetaSetter + IMetaSetter, + IRoutingInfo } from "@waku/interfaces"; -import { Logger, RoutingInfo } from "@waku/utils"; +import { Logger } from "@waku/utils"; import init from "@waku/zerokit-rln-wasm"; import * as zerokitRLN from "@waku/zerokit-rln-wasm"; @@ -31,7 +32,7 @@ type WakuRLNEncoderOptions = { /** * The routing information for messages to encode. */ - routingInfo: RoutingInfo; + routingInfo: IRoutingInfo; /** The content topic to set on outgoing messages. */ contentTopic: string; /** @@ -108,7 +109,7 @@ export class RLNInstance extends RLNCredentialsManager { public createDecoder( contentTopic: ContentTopic, - routingInfo: RoutingInfo + routingInfo: IRoutingInfo ): RLNDecoder { return createRLNDecoder({ rlnInstance: this, diff --git a/packages/sdk/src/filter/subscription.ts b/packages/sdk/src/filter/subscription.ts index 67b3f6aa33..e35429572d 100644 --- a/packages/sdk/src/filter/subscription.ts +++ b/packages/sdk/src/filter/subscription.ts @@ -10,12 +10,13 @@ import type { IDecodedMessage, IDecoder, IProtoMessage, + IRoutingInfo, PeerIdStr, PubsubTopic } from "@waku/interfaces"; import { Protocols } from "@waku/interfaces"; import { WakuMessage } from "@waku/proto"; -import { Logger, RoutingInfo } from "@waku/utils"; +import { Logger } from "@waku/utils"; import { PeerManager, PeerManagerEventNames } from "../peer_manager/index.js"; @@ -36,7 +37,7 @@ type AttemptUnsubscribeParams = { type Libp2pEventHandler = (e: CustomEvent) => void; export class Subscription { - private readonly routingInfo: RoutingInfo; + private readonly routingInfo: IRoutingInfo; private readonly pubsubTopic: PubsubTopic; private readonly protocol: FilterCore; private readonly peerManager: PeerManager; diff --git a/packages/sdk/src/light_push/retry_manager.ts b/packages/sdk/src/light_push/retry_manager.ts index 9fe63fc92e..380c954277 100644 --- a/packages/sdk/src/light_push/retry_manager.ts +++ b/packages/sdk/src/light_push/retry_manager.ts @@ -1,6 +1,10 @@ import type { PeerId } from "@libp2p/interface"; -import { type CoreProtocolResult, Protocols } from "@waku/interfaces"; -import { Logger, RoutingInfo } from "@waku/utils"; +import { + type CoreProtocolResult, + type IRoutingInfo, + Protocols +} from "@waku/interfaces"; +import { Logger } from "@waku/utils"; import type { PeerManager } from "../peer_manager/index.js"; @@ -15,7 +19,7 @@ type AttemptCallback = (peerId: PeerId) => Promise; export type ScheduledTask = { maxAttempts: number; - routingInfo: RoutingInfo; + routingInfo: IRoutingInfo; callback: AttemptCallback; }; @@ -54,7 +58,7 @@ export class RetryManager { public push( callback: AttemptCallback, maxAttempts: number, - routingInfo: RoutingInfo + routingInfo: IRoutingInfo ): void { this.queue.push({ maxAttempts, diff --git a/packages/sdk/src/peer_manager/peer_manager.ts b/packages/sdk/src/peer_manager/peer_manager.ts index 73ab46c72a..cb68f3da56 100644 --- a/packages/sdk/src/peer_manager/peer_manager.ts +++ b/packages/sdk/src/peer_manager/peer_manager.ts @@ -12,11 +12,12 @@ import { } from "@waku/core"; import { CONNECTION_LOCKED_TAG, + type IRoutingInfo, Libp2p, Libp2pEventHandler, Protocols } from "@waku/interfaces"; -import { Logger, RoutingInfo } from "@waku/utils"; +import { Logger } from "@waku/utils"; const log = new Logger("peer-manager"); @@ -34,7 +35,7 @@ type PeerManagerParams = { type GetPeersParams = { protocol: Protocols; - routingInfo: RoutingInfo; + routingInfo: IRoutingInfo; }; export enum PeerManagerEventNames { diff --git a/packages/sdk/src/store/store.ts b/packages/sdk/src/store/store.ts index 1e85274f6e..0d8e686d23 100644 --- a/packages/sdk/src/store/store.ts +++ b/packages/sdk/src/store/store.ts @@ -5,6 +5,7 @@ import { messageHash, StoreCore } from "@waku/core"; import { IDecodedMessage, IDecoder, + type IRoutingInfo, IStore, Libp2p, Protocols, @@ -12,7 +13,7 @@ import { StoreCursor, StoreProtocolOptions } from "@waku/interfaces"; -import { isDefined, Logger, RoutingInfo } from "@waku/utils"; +import { isDefined, Logger } from "@waku/utils"; import { PeerManager } from "../peer_manager/index.js"; @@ -181,7 +182,7 @@ export class Store implements IStore { private validateDecodersAndPubsubTopic( decoders: IDecoder[] ): { - routingInfo: RoutingInfo; + routingInfo: IRoutingInfo; contentTopics: string[]; decodersAsMap: Map>; } { @@ -232,7 +233,7 @@ export class Store implements IStore { } private async getPeerToUse( - routingInfo: RoutingInfo + routingInfo: IRoutingInfo ): Promise { const peers = await this.peerManager.getPeers({ protocol: Protocols.Store, @@ -301,7 +302,7 @@ export class Store implements IStore { const isHashQuery = options?.messageHashes && options.messageHashes.length > 0; - let routingInfo: RoutingInfo; + let routingInfo: IRoutingInfo; let contentTopics: string[]; let decodersAsMap: Map>; diff --git a/packages/sdk/src/waku/waku.ts b/packages/sdk/src/waku/waku.ts index 5d44a2c609..b0ada06494 100644 --- a/packages/sdk/src/waku/waku.ts +++ b/packages/sdk/src/waku/waku.ts @@ -16,6 +16,7 @@ import type { IFilter, ILightPush, IRelay, + IRoutingInfo, IStore, IWaku, IWakuEventEmitter, @@ -27,7 +28,7 @@ import { HealthStatus, Protocols } from "@waku/interfaces"; -import { createRoutingInfo, Logger, RoutingInfo } from "@waku/utils"; +import { createRoutingInfo, Logger } from "@waku/utils"; import { Filter } from "../filter/index.js"; import { HealthIndicator } from "../health_indicator/index.js"; @@ -286,6 +287,6 @@ function getRoutingInfo( networkConfig: NetworkConfig, contentTopic?: string, shardId?: number -): RoutingInfo { +): IRoutingInfo { return createRoutingInfo(networkConfig, { contentTopic, shardId }); } From 3200b19a02b47c7feba51321f5880c4a94efad8d Mon Sep 17 00:00:00 2001 From: fryorcraken Date: Sat, 19 Jul 2025 14:45:06 +1000 Subject: [PATCH 05/23] Simplify IRouting interface --- packages/core/src/lib/message/version_0.ts | 16 +++++------ packages/interfaces/src/sharding.ts | 27 ++----------------- packages/sdk/src/peer_manager/peer_manager.ts | 4 +-- packages/sdk/src/store/store.spec.ts | 4 +-- packages/tests/src/lib/index.ts | 2 +- packages/tests/src/utils/nodes.ts | 6 ++--- .../store/different_static_shards.spec.ts | 6 ++--- .../tests/tests/store/message_hash.spec.ts | 3 ++- .../utils/src/common/sharding/routing_info.ts | 16 ++++++++--- 9 files changed, 31 insertions(+), 53 deletions(-) diff --git a/packages/core/src/lib/message/version_0.ts b/packages/core/src/lib/message/version_0.ts index 4715ebe6e3..60748bb4e0 100644 --- a/packages/core/src/lib/message/version_0.ts +++ b/packages/core/src/lib/message/version_0.ts @@ -9,7 +9,7 @@ import type { IRoutingInfo } from "@waku/interfaces"; import { proto_message as proto } from "@waku/proto"; -import { isAutoShardingRoutingInfo, Logger } from "@waku/utils"; +import { Logger } from "@waku/utils"; const log = new Logger("message:version-0"); const OneMillion = BigInt(1_000_000); @@ -130,6 +130,8 @@ export class Encoder implements IEncoder { * format to be sent over the Waku network. The resulting encoder can then be * pass to { @link @waku/interfaces!ISender.send } to automatically encode outgoing * messages. + * + * Note that a routing info may be tied to a given content topic, this is not checked by the encoder. */ export function createEncoder({ contentTopic, @@ -137,10 +139,6 @@ export function createEncoder({ ephemeral, metaSetter }: EncoderOptions): Encoder { - if (isAutoShardingRoutingInfo(routingInfo)) { - if (routingInfo.contentTopic !== contentTopic) - throw "Routing Info must have the same content topic as the encoder"; - } return new Encoder(contentTopic, ephemeral, routingInfo, metaSetter); } @@ -198,15 +196,13 @@ export class Decoder implements IDecoder { * messages. * * @param contentTopic The resulting decoder will only decode messages with this content topic. - * @param routingInfo + * @param routingInfo Routing information such as cluster id and shard id on which the message is expected to be received. + * + * Note that a routing info may be tied to a given content topic, this is not checked by the encoder. */ export function createDecoder( contentTopic: string, routingInfo: IRoutingInfo ): Decoder { - if (isAutoShardingRoutingInfo(routingInfo)) { - if (routingInfo.contentTopic !== contentTopic) - throw "Routing Info must have the same content topic as the encoder"; - } return new Decoder(contentTopic, routingInfo); } diff --git a/packages/interfaces/src/sharding.ts b/packages/interfaces/src/sharding.ts index 3cbe86d6c7..f204d47ecd 100644 --- a/packages/interfaces/src/sharding.ts +++ b/packages/interfaces/src/sharding.ts @@ -22,31 +22,8 @@ export type ShardId = number; /** * Routing Information for a given message. */ -export interface IRoutingInfoAutoSharding { - pubsubTopic: string; +export interface IRoutingInfo { + clusterId: ClusterId; shardId: ShardId; - - // Is the network config really needed for exposure? - // we should probably aim to only expose the above + Cluster Id - networkConfig: AutoSharding; - - // This is actually a property of network config, should probably be removed - isAutoSharding: boolean; - isStaticSharding: boolean; - - // This is only needed for tests, to setup nwaku node - // might be a cleaner way to handle it - contentTopic: string; -} - -export interface IRoutingInfoStaticSharding { pubsubTopic: string; - shardId: ShardId; - networkConfig: StaticSharding; - isAutoSharding: boolean; - isStaticSharding: boolean; } - -export type IRoutingInfo = - | IRoutingInfoAutoSharding - | IRoutingInfoStaticSharding; diff --git a/packages/sdk/src/peer_manager/peer_manager.ts b/packages/sdk/src/peer_manager/peer_manager.ts index cb68f3da56..ebf8bc82c5 100644 --- a/packages/sdk/src/peer_manager/peer_manager.ts +++ b/packages/sdk/src/peer_manager/peer_manager.ts @@ -109,7 +109,7 @@ export class PeerManager { public async getPeers(params: GetPeersParams): Promise { log.info( `Getting peers for protocol: ${params.protocol}, ` + - `clusterId: ${params.routingInfo.networkConfig.clusterId},` + + `clusterId: ${params.routingInfo.clusterId},` + ` shard: ${params.routingInfo.shardId}` ); @@ -123,7 +123,7 @@ export class PeerManager { const isOnSameShard = await this.connectionManager.isPeerOnShard( peer.id, - params.routingInfo.networkConfig.clusterId, + params.routingInfo.clusterId, params.routingInfo.shardId ); if (!isOnSameShard) { diff --git a/packages/sdk/src/store/store.spec.ts b/packages/sdk/src/store/store.spec.ts index 983c1ddba1..83ccb08436 100644 --- a/packages/sdk/src/store/store.spec.ts +++ b/packages/sdk/src/store/store.spec.ts @@ -18,9 +18,7 @@ const TestNetworkingInfo = { clusterId: 0, numShardsInCluster: 8 }; const MockRoutingInfo: IRoutingInfo = { pubsubTopic: "/custom/topic", shardId: 1, - networkConfig: TestNetworkingInfo, - isAutoSharding: false, - isStaticSharding: false + clusterId: TestNetworkingInfo.clusterId }; describe("Store", () => { diff --git a/packages/tests/src/lib/index.ts b/packages/tests/src/lib/index.ts index 02b0b77fac..85de368b23 100644 --- a/packages/tests/src/lib/index.ts +++ b/packages/tests/src/lib/index.ts @@ -263,7 +263,7 @@ function applyDefaultArgs(routingInfo: RoutingInfo, args?: Args): Args { relay: true }; - defaultArgs.clusterId = routingInfo.networkConfig.clusterId; + defaultArgs.clusterId = routingInfo.clusterId; if (isAutoShardingRoutingInfo(routingInfo)) { defaultArgs.numShardsInNetwork = diff --git a/packages/tests/src/utils/nodes.ts b/packages/tests/src/utils/nodes.ts index 2275f5bd0c..26f29e27c3 100644 --- a/packages/tests/src/utils/nodes.ts +++ b/packages/tests/src/utils/nodes.ts @@ -75,9 +75,7 @@ export async function runMultipleNodes( if (customArgs?.shard) { const shards = customArgs?.shard ?? []; for (const s of shards) { - pubsubTopics.push( - formatPubsubTopic(routingInfo.networkConfig.clusterId, s) - ); + pubsubTopics.push(formatPubsubTopic(routingInfo.clusterId, s)); } } @@ -87,7 +85,7 @@ export async function runMultipleNodes( pubsubTopics.push( contentTopicToPubsubTopic( ct, - routingInfo.networkConfig.clusterId, + routingInfo.clusterId, routingInfo.networkConfig.numShardsInCluster ) ); diff --git a/packages/tests/tests/store/different_static_shards.spec.ts b/packages/tests/tests/store/different_static_shards.spec.ts index bb71b4a2d4..3c639364bc 100644 --- a/packages/tests/tests/store/different_static_shards.spec.ts +++ b/packages/tests/tests/store/different_static_shards.spec.ts @@ -1,7 +1,7 @@ import { createDecoder } from "@waku/core"; import { IMessage, LightNode, ShardId, StaticSharding } from "@waku/interfaces"; import { Protocols } from "@waku/sdk"; -import { createRoutingInfo } from "@waku/utils"; +import { createRoutingInfo, RoutingInfo } from "@waku/utils"; import { expect } from "chai"; import { @@ -155,13 +155,13 @@ describe("Waku Store, different static shards", function () { nwaku, totalMsgs, TestDecoderShardOne.contentTopic, - TestDecoderShardOne.routingInfo + TestDecoderShardOne.routingInfo as RoutingInfo ); await sendMessages( nwaku2, totalMsgs, TestDecoderShardTwo.contentTopic, - TestDecoderShardTwo.routingInfo + TestDecoderShardTwo.routingInfo as RoutingInfo ); await waku.dial(await nwaku.getMultiaddrWithId()); diff --git a/packages/tests/tests/store/message_hash.spec.ts b/packages/tests/tests/store/message_hash.spec.ts index d97077e747..906fc98ed8 100644 --- a/packages/tests/tests/store/message_hash.spec.ts +++ b/packages/tests/tests/store/message_hash.spec.ts @@ -1,5 +1,6 @@ import { messageHash } from "@waku/core"; import type { IDecodedMessage, LightNode } from "@waku/interfaces"; +import { RoutingInfo } from "@waku/utils"; import { expect } from "chai"; import { @@ -36,7 +37,7 @@ describe("Waku Store, message hash query", function () { nwaku, totalMsgs, TestDecoder.contentTopic, - TestDecoder.routingInfo, + TestDecoder.routingInfo as RoutingInfo, true ); diff --git a/packages/utils/src/common/sharding/routing_info.ts b/packages/utils/src/common/sharding/routing_info.ts index 1b804ac581..a51de7cfc3 100644 --- a/packages/utils/src/common/sharding/routing_info.ts +++ b/packages/utils/src/common/sharding/routing_info.ts @@ -1,8 +1,8 @@ import type { AutoSharding, + ClusterId, ContentTopic, - IRoutingInfoAutoSharding, - IRoutingInfoStaticSharding, + IRoutingInfo, NetworkConfig, PubsubTopic, ShardId, @@ -32,7 +32,7 @@ export abstract class BaseRoutingInfo { export class AutoShardingRoutingInfo extends BaseRoutingInfo - implements IRoutingInfoAutoSharding + implements IRoutingInfo { public static fromContentTopic( contentTopic: ContentTopic, @@ -68,6 +68,10 @@ export class AutoShardingRoutingInfo super(networkConfig, pubsubTopic, shardId); } + public get clusterId(): number { + return this.networkConfig.clusterId; + } + public get isAutoSharding(): boolean { return true; } @@ -79,7 +83,7 @@ export class AutoShardingRoutingInfo export class StaticShardingRoutingInfo extends BaseRoutingInfo - implements IRoutingInfoStaticSharding + implements IRoutingInfo { /** * Create Routing Info for static sharding network, using shard @@ -129,6 +133,10 @@ export class StaticShardingRoutingInfo super(networkConfig, pubsubTopic, shardId); } + public get clusterId(): ClusterId { + return this.networkConfig.clusterId; + } + public get isAutoSharding(): boolean { return false; } From 1bbb129d0477ccb08feb1f9a77cb92948fdbe591 Mon Sep 17 00:00:00 2001 From: fryorcraken Date: Sat, 19 Jul 2025 17:19:58 +1000 Subject: [PATCH 06/23] fix relay test --- packages/tests/tests/wait_for_remote_peer.node.spec.ts | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/tests/tests/wait_for_remote_peer.node.spec.ts b/packages/tests/tests/wait_for_remote_peer.node.spec.ts index 0811fc069c..55e15160b3 100644 --- a/packages/tests/tests/wait_for_remote_peer.node.spec.ts +++ b/packages/tests/tests/wait_for_remote_peer.node.spec.ts @@ -2,6 +2,7 @@ import type { LightNode, RelayNode } from "@waku/interfaces"; import { Protocols } from "@waku/interfaces"; import { createRelayNode } from "@waku/relay"; import { createLightNode } from "@waku/sdk"; +import { formatPubsubTopic } from "@waku/utils"; import { expect } from "chai"; import { @@ -255,12 +256,12 @@ describe("Wait for remote peer", function () { expect(peers.includes(nimPeerId as string)).to.be.true; }); - it("Privacy Node - default protocol", async function () { + it("Relay Node - default protocol", async function () { this.timeout(20_000); - [nwaku, waku1] = await runRelayNodes(this, DefaultTestNetworkConfig); + [nwaku, waku1] = await runRelayNodes(this, { clusterId: 0 }, [0]); const multiAddrWithId = await nwaku.getMultiaddrWithId(); - const peers = waku1.relay.getMeshPeers(DefaultTestRoutingInfo.pubsubTopic); + const peers = waku1.relay.getMeshPeers(formatPubsubTopic(0, 0)); const nimPeerId = multiAddrWithId.getPeerId(); From 8d278f81325a6228a4c16c2b994751b39381e259 Mon Sep 17 00:00:00 2001 From: fryorcraken Date: Sat, 19 Jul 2025 17:24:34 +1000 Subject: [PATCH 07/23] revert change that is just noise --- packages/rln/src/codec.spec.ts | 110 +++++++++++++-------------- packages/rln/src/codec.test-utils.ts | 8 +- 2 files changed, 59 insertions(+), 59 deletions(-) diff --git a/packages/rln/src/codec.spec.ts b/packages/rln/src/codec.spec.ts index 084ac1becf..dfb6c80f1b 100644 --- a/packages/rln/src/codec.spec.ts +++ b/packages/rln/src/codec.spec.ts @@ -24,8 +24,8 @@ import { import { createTestMetaSetter, createTestRLNCodecSetup, - EmptyProtoMessage, - TestConstants, + EMPTY_PROTO_MESSAGE, + TEST_CONSTANTS, verifyRLNMessage } from "./codec.test-utils.js"; import { RlnMessage } from "./message.js"; @@ -38,8 +38,8 @@ describe("RLN codec with version 0", () => { const rlnEncoder = createRLNEncoder({ encoder: createEncoder({ - contentTopic: TestConstants.contentTopic, - routingInfo: TestConstants.routingInfo + contentTopic: TEST_CONSTANTS.contentTopic, + routingInfo: TEST_CONSTANTS.routingInfo }), rlnInstance, index, @@ -48,8 +48,8 @@ describe("RLN codec with version 0", () => { const rlnDecoder = createRLNDecoder({ rlnInstance, decoder: createDecoder( - TestConstants.contentTopic, - TestConstants.routingInfo + TEST_CONSTANTS.contentTopic, + TEST_CONSTANTS.routingInfo ) }); @@ -59,11 +59,11 @@ describe("RLN codec with version 0", () => { const protoResult = await rlnDecoder.fromWireToProtoObj(bytes!); expect(protoResult).to.not.be.undefined; const msg = (await rlnDecoder.fromProtoObj( - TestConstants.emptyPubsubTopic, + TEST_CONSTANTS.emptyPubsubTopic, protoResult! ))!; - verifyRLNMessage(msg, payload, TestConstants.contentTopic, 0, rlnInstance); + verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 0, rlnInstance); }); it("toProtoObj", async function () { @@ -72,8 +72,8 @@ describe("RLN codec with version 0", () => { const rlnEncoder = new RLNEncoder( createEncoder({ - contentTopic: TestConstants.contentTopic, - routingInfo: TestConstants.routingInfo + contentTopic: TEST_CONSTANTS.contentTopic, + routingInfo: TEST_CONSTANTS.routingInfo }), rlnInstance, index, @@ -81,18 +81,18 @@ describe("RLN codec with version 0", () => { ); const rlnDecoder = new RLNDecoder( rlnInstance, - createDecoder(TestConstants.contentTopic, TestConstants.routingInfo) + createDecoder(TEST_CONSTANTS.contentTopic, TEST_CONSTANTS.routingInfo) ); const proto = await rlnEncoder.toProtoObj({ payload }); expect(proto).to.not.be.undefined; const msg = (await rlnDecoder.fromProtoObj( - TestConstants.emptyPubsubTopic, + TEST_CONSTANTS.emptyPubsubTopic, proto! )) as RlnMessage; - verifyRLNMessage(msg, payload, TestConstants.contentTopic, 0, rlnInstance); + verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 0, rlnInstance); }); }); @@ -104,8 +104,8 @@ describe("RLN codec with version 1", () => { const rlnEncoder = new RLNEncoder( createSymEncoder({ - contentTopic: TestConstants.contentTopic, - routingInfo: TestConstants.routingInfo, + contentTopic: TEST_CONSTANTS.contentTopic, + routingInfo: TEST_CONSTANTS.routingInfo, symKey }), rlnInstance, @@ -115,8 +115,8 @@ describe("RLN codec with version 1", () => { const rlnDecoder = new RLNDecoder( rlnInstance, createSymDecoder( - TestConstants.contentTopic, - TestConstants.routingInfo, + TEST_CONSTANTS.contentTopic, + TEST_CONSTANTS.routingInfo, symKey ) ); @@ -127,11 +127,11 @@ describe("RLN codec with version 1", () => { const protoResult = await rlnDecoder.fromWireToProtoObj(bytes!); expect(protoResult).to.not.be.undefined; const msg = (await rlnDecoder.fromProtoObj( - TestConstants.emptyPubsubTopic, + TEST_CONSTANTS.emptyPubsubTopic, protoResult! ))!; - verifyRLNMessage(msg, payload, TestConstants.contentTopic, 1, rlnInstance); + verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 1, rlnInstance); }); it("Symmetric, toProtoObj", async function () { @@ -141,8 +141,8 @@ describe("RLN codec with version 1", () => { const rlnEncoder = new RLNEncoder( createSymEncoder({ - contentTopic: TestConstants.contentTopic, - routingInfo: TestConstants.routingInfo, + contentTopic: TEST_CONSTANTS.contentTopic, + routingInfo: TEST_CONSTANTS.routingInfo, symKey }), rlnInstance, @@ -152,8 +152,8 @@ describe("RLN codec with version 1", () => { const rlnDecoder = new RLNDecoder( rlnInstance, createSymDecoder( - TestConstants.contentTopic, - TestConstants.routingInfo, + TEST_CONSTANTS.contentTopic, + TEST_CONSTANTS.routingInfo, symKey ) ); @@ -162,11 +162,11 @@ describe("RLN codec with version 1", () => { expect(proto).to.not.be.undefined; const msg = await rlnDecoder.fromProtoObj( - TestConstants.emptyPubsubTopic, + TEST_CONSTANTS.emptyPubsubTopic, proto! ); - verifyRLNMessage(msg, payload, TestConstants.contentTopic, 1, rlnInstance); + verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 1, rlnInstance); }); it("Asymmetric, toWire", async function () { @@ -177,8 +177,8 @@ describe("RLN codec with version 1", () => { const rlnEncoder = new RLNEncoder( createAsymEncoder({ - contentTopic: TestConstants.contentTopic, - routingInfo: TestConstants.routingInfo, + contentTopic: TEST_CONSTANTS.contentTopic, + routingInfo: TEST_CONSTANTS.routingInfo, publicKey }), rlnInstance, @@ -188,8 +188,8 @@ describe("RLN codec with version 1", () => { const rlnDecoder = new RLNDecoder( rlnInstance, createAsymDecoder( - TestConstants.contentTopic, - TestConstants.routingInfo, + TEST_CONSTANTS.contentTopic, + TEST_CONSTANTS.routingInfo, privateKey ) ); @@ -200,11 +200,11 @@ describe("RLN codec with version 1", () => { const protoResult = await rlnDecoder.fromWireToProtoObj(bytes!); expect(protoResult).to.not.be.undefined; const msg = (await rlnDecoder.fromProtoObj( - TestConstants.emptyPubsubTopic, + TEST_CONSTANTS.emptyPubsubTopic, protoResult! ))!; - verifyRLNMessage(msg, payload, TestConstants.contentTopic, 1, rlnInstance); + verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 1, rlnInstance); }); it("Asymmetric, toProtoObj", async function () { @@ -215,8 +215,8 @@ describe("RLN codec with version 1", () => { const rlnEncoder = new RLNEncoder( createAsymEncoder({ - contentTopic: TestConstants.contentTopic, - routingInfo: TestConstants.routingInfo, + contentTopic: TEST_CONSTANTS.contentTopic, + routingInfo: TEST_CONSTANTS.routingInfo, publicKey }), rlnInstance, @@ -226,8 +226,8 @@ describe("RLN codec with version 1", () => { const rlnDecoder = new RLNDecoder( rlnInstance, createAsymDecoder( - TestConstants.contentTopic, - TestConstants.routingInfo, + TEST_CONSTANTS.contentTopic, + TEST_CONSTANTS.routingInfo, privateKey ) ); @@ -236,11 +236,11 @@ describe("RLN codec with version 1", () => { expect(proto).to.not.be.undefined; const msg = await rlnDecoder.fromProtoObj( - TestConstants.emptyPubsubTopic, + TEST_CONSTANTS.emptyPubsubTopic, proto! ); - verifyRLNMessage(msg, payload, TestConstants.contentTopic, 1, rlnInstance); + verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 1, rlnInstance); }); }); @@ -251,8 +251,8 @@ describe("RLN Codec - epoch", () => { const rlnEncoder = new RLNEncoder( createEncoder({ - contentTopic: TestConstants.contentTopic, - routingInfo: TestConstants.routingInfo + contentTopic: TEST_CONSTANTS.contentTopic, + routingInfo: TEST_CONSTANTS.routingInfo }), rlnInstance, index, @@ -260,14 +260,14 @@ describe("RLN Codec - epoch", () => { ); const rlnDecoder = new RLNDecoder( rlnInstance, - createDecoder(TestConstants.contentTopic, TestConstants.routingInfo) + createDecoder(TEST_CONSTANTS.contentTopic, TEST_CONSTANTS.routingInfo) ); const proto = await rlnEncoder.toProtoObj({ payload }); expect(proto).to.not.be.undefined; const msg = (await rlnDecoder.fromProtoObj( - TestConstants.emptyPubsubTopic, + TEST_CONSTANTS.emptyPubsubTopic, proto! )) as RlnMessage; @@ -277,7 +277,7 @@ describe("RLN Codec - epoch", () => { expect(msg.epoch!.toString(10).length).to.eq(9); expect(msg.epoch).to.eq(epoch); - verifyRLNMessage(msg, payload, TestConstants.contentTopic, 0, rlnInstance); + verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 0, rlnInstance); }); }); @@ -289,8 +289,8 @@ describe("RLN codec with version 0 and meta setter", () => { const rlnEncoder = createRLNEncoder({ encoder: createEncoder({ - contentTopic: TestConstants.contentTopic, - routingInfo: TestConstants.routingInfo, + contentTopic: TEST_CONSTANTS.contentTopic, + routingInfo: TEST_CONSTANTS.routingInfo, metaSetter }), rlnInstance, @@ -300,8 +300,8 @@ describe("RLN codec with version 0 and meta setter", () => { const rlnDecoder = createRLNDecoder({ rlnInstance, decoder: createDecoder( - TestConstants.contentTopic, - TestConstants.routingInfo + TEST_CONSTANTS.contentTopic, + TEST_CONSTANTS.routingInfo ) }); @@ -311,17 +311,17 @@ describe("RLN codec with version 0 and meta setter", () => { const protoResult = await rlnDecoder.fromWireToProtoObj(bytes!); expect(protoResult).to.not.be.undefined; const msg = (await rlnDecoder.fromProtoObj( - TestConstants.emptyPubsubTopic, + TEST_CONSTANTS.emptyPubsubTopic, protoResult! ))!; const expectedMeta = metaSetter({ - ...EmptyProtoMessage, + ...EMPTY_PROTO_MESSAGE, payload: protoResult!.payload }); expect(msg!.meta).to.deep.eq(expectedMeta); - verifyRLNMessage(msg, payload, TestConstants.contentTopic, 0, rlnInstance); + verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 0, rlnInstance); }); it("toProtoObj", async function () { @@ -331,8 +331,8 @@ describe("RLN codec with version 0 and meta setter", () => { const rlnEncoder = new RLNEncoder( createEncoder({ - contentTopic: TestConstants.contentTopic, - routingInfo: TestConstants.routingInfo, + contentTopic: TEST_CONSTANTS.contentTopic, + routingInfo: TEST_CONSTANTS.routingInfo, metaSetter }), rlnInstance, @@ -341,23 +341,23 @@ describe("RLN codec with version 0 and meta setter", () => { ); const rlnDecoder = new RLNDecoder( rlnInstance, - createDecoder(TestConstants.contentTopic, TestConstants.routingInfo) + createDecoder(TEST_CONSTANTS.contentTopic, TEST_CONSTANTS.routingInfo) ); const proto = await rlnEncoder.toProtoObj({ payload }); expect(proto).to.not.be.undefined; const msg = (await rlnDecoder.fromProtoObj( - TestConstants.emptyPubsubTopic, + TEST_CONSTANTS.emptyPubsubTopic, proto! )) as RlnMessage; const expectedMeta = metaSetter({ - ...EmptyProtoMessage, + ...EMPTY_PROTO_MESSAGE, payload: msg!.payload }); expect(msg!.meta).to.deep.eq(expectedMeta); - verifyRLNMessage(msg, payload, TestConstants.contentTopic, 0, rlnInstance); + verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 0, rlnInstance); }); }); diff --git a/packages/rln/src/codec.test-utils.ts b/packages/rln/src/codec.test-utils.ts index 140a726007..4b16ae7131 100644 --- a/packages/rln/src/codec.test-utils.ts +++ b/packages/rln/src/codec.test-utils.ts @@ -12,7 +12,7 @@ export interface TestRLNCodecSetup { payload: Uint8Array; } -export const TestConstants = { +export const TEST_CONSTANTS = { contentTopic: "/test/1/waku-message/utf8", emptyPubsubTopic: "", defaultIndex: 0, @@ -26,7 +26,7 @@ export const TestConstants = { ) } as const; -export const EmptyProtoMessage = { +export const EMPTY_PROTO_MESSAGE = { timestamp: undefined, contentTopic: "", ephemeral: undefined, @@ -46,8 +46,8 @@ export async function createTestRLNCodecSetup(): Promise { return { rlnInstance, credential, - index: TestConstants.defaultIndex, - payload: TestConstants.defaultPayload + index: TEST_CONSTANTS.defaultIndex, + payload: TEST_CONSTANTS.defaultPayload }; } From 065ef0adb07a15be5564406002eff565b2534a17 Mon Sep 17 00:00:00 2001 From: fryorcraken Date: Sat, 19 Jul 2025 17:32:58 +1000 Subject: [PATCH 08/23] `getRoutingInfo` is best as a method as it can use the context from `this`. --- packages/sdk/src/waku/waku.ts | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/packages/sdk/src/waku/waku.ts b/packages/sdk/src/waku/waku.ts index b0ada06494..38dfec9ffa 100644 --- a/packages/sdk/src/waku/waku.ts +++ b/packages/sdk/src/waku/waku.ts @@ -260,8 +260,7 @@ export class WakuNode implements IWaku { } public createDecoder(params: CreateDecoderParams): IDecoder { - const routingInfo = getRoutingInfo( - this.networkConfig, + const routingInfo = this.createRoutingInfo( params.contentTopic, params.shardId ); @@ -269,8 +268,7 @@ export class WakuNode implements IWaku { } public createEncoder(params: CreateEncoderParams): IEncoder { - const routingInfo = getRoutingInfo( - this.networkConfig, + const routingInfo = this.createRoutingInfo( params.contentTopic, params.shardId ); @@ -281,12 +279,11 @@ export class WakuNode implements IWaku { routingInfo: routingInfo }); } -} -function getRoutingInfo( - networkConfig: NetworkConfig, - contentTopic?: string, - shardId?: number -): IRoutingInfo { - return createRoutingInfo(networkConfig, { contentTopic, shardId }); + private createRoutingInfo( + contentTopic?: string, + shardId?: number + ): IRoutingInfo { + return createRoutingInfo(this.networkConfig, { contentTopic, shardId }); + } } From 3c930dedfca6a1b9a6a4b1c6ddfdce128ed9a7ac Mon Sep 17 00:00:00 2001 From: fryorcraken Date: Sat, 19 Jul 2025 18:07:38 +1000 Subject: [PATCH 09/23] revert change on encoder --- packages/core/src/lib/message/version_0.ts | 21 +--------------- packages/core/src/lib/store/store.spec.ts | 4 +-- packages/interfaces/src/message.ts | 20 +++++++++++++++ packages/message-encryption/src/ecies.ts | 20 ++------------- packages/message-encryption/src/symmetric.ts | 20 ++------------- packages/rln/src/rln.ts | 26 +++----------------- packages/tests/src/lib/message_collector.ts | 1 - 7 files changed, 29 insertions(+), 83 deletions(-) diff --git a/packages/core/src/lib/message/version_0.ts b/packages/core/src/lib/message/version_0.ts index 60748bb4e0..bb7e48fc36 100644 --- a/packages/core/src/lib/message/version_0.ts +++ b/packages/core/src/lib/message/version_0.ts @@ -1,4 +1,5 @@ import type { + EncoderOptions, IDecodedMessage, IDecoder, IEncoder, @@ -65,26 +66,6 @@ export class DecodedMessage implements IDecodedMessage { } } -export type EncoderOptions = { - /** - * The routing information for messages to encode. - */ - routingInfo: IRoutingInfo; - /** The content topic to set on outgoing messages. */ - contentTopic: string; - /** - * An optional flag to mark message as ephemeral, i.e., not to be stored by Waku Store nodes. - * @defaultValue `false` - */ - ephemeral?: boolean; - /** - * A function called when encoding messages to set the meta field. - * @param IProtoMessage The message encoded for wire, without the meta field. - * If encryption is used, `metaSetter` only accesses _encrypted_ payload. - */ - metaSetter?: IMetaSetter; -}; - export class Encoder implements IEncoder { public constructor( public contentTopic: string, diff --git a/packages/core/src/lib/store/store.spec.ts b/packages/core/src/lib/store/store.spec.ts index 5677b24b74..fbe340ab90 100644 --- a/packages/core/src/lib/store/store.spec.ts +++ b/packages/core/src/lib/store/store.spec.ts @@ -82,9 +82,7 @@ describe("StoreCore", () => { const routingInfo: IRoutingInfo = { pubsubTopic: "test-topic", shardId: 1, - networkConfig: { clusterId: 0 }, - isAutoSharding: false, - isStaticSharding: false + clusterId: 0 }; beforeEach(() => { diff --git a/packages/interfaces/src/message.ts b/packages/interfaces/src/message.ts index fda16fb160..0e076f0b3e 100644 --- a/packages/interfaces/src/message.ts +++ b/packages/interfaces/src/message.ts @@ -71,6 +71,26 @@ export interface IMetaSetter { (message: IProtoMessage & { meta: undefined }): Uint8Array; } +export interface EncoderOptions { + /** + * The routing information for the message (cluster id, shard id, pubsubTopic) + */ + routingInfo: IRoutingInfo; + /** The content topic to set on outgoing messages. */ + contentTopic: string; + /** + * An optional flag to mark message as ephemeral, i.e., not to be stored by Waku Store nodes. + * @defaultValue `false` + */ + ephemeral?: boolean; + /** + * A function called when encoding messages to set the meta field. + * @param IProtoMessage The message encoded for wire, without the meta field. + * If encryption is used, `metaSetter` only accesses _encrypted_ payload. + */ + metaSetter?: IMetaSetter; +} + export interface IEncoder { contentTopic: string; ephemeral: boolean; diff --git a/packages/message-encryption/src/ecies.ts b/packages/message-encryption/src/ecies.ts index bb7f1ded23..1628b2481f 100644 --- a/packages/message-encryption/src/ecies.ts +++ b/packages/message-encryption/src/ecies.ts @@ -1,5 +1,6 @@ import { Decoder as DecoderV0 } from "@waku/core/lib/message/version_0"; import { + type EncoderOptions as BaseEncoderOptions, type IDecoder, type IEncoder, type IEncryptedMessage, @@ -79,24 +80,7 @@ class Encoder implements IEncoder { } } -export interface EncoderOptions { - /** - * The routing information for messages to encode. - */ - routingInfo: IRoutingInfo; - /** The content topic to set on outgoing messages. */ - contentTopic: string; - /** - * An optional flag to mark message as ephemeral, i.e., not to be stored by Waku Store nodes. - * @defaultValue `false` - */ - ephemeral?: boolean; - /** - * A function called when encoding messages to set the meta field. - * @param IProtoMessage The message encoded for wire, without the meta field. - * If encryption is used, `metaSetter` only accesses _encrypted_ payload. - */ - metaSetter?: IMetaSetter; +export interface EncoderOptions extends BaseEncoderOptions { /** The public key to encrypt the payload for. */ publicKey: Uint8Array; /** An optional private key to be used to sign the payload before encryption. */ diff --git a/packages/message-encryption/src/symmetric.ts b/packages/message-encryption/src/symmetric.ts index d6f0ebfe41..2261e01751 100644 --- a/packages/message-encryption/src/symmetric.ts +++ b/packages/message-encryption/src/symmetric.ts @@ -1,5 +1,6 @@ import { Decoder as DecoderV0 } from "@waku/core/lib/message/version_0"; import type { + EncoderOptions as BaseEncoderOptions, IDecoder, IEncoder, IEncryptedMessage, @@ -79,24 +80,7 @@ class Encoder implements IEncoder { } } -export interface EncoderOptions { - /** - * The routing information for messages to encode. - */ - routingInfo: IRoutingInfo; - /** The content topic to set on outgoing messages. */ - contentTopic: string; - /** - * An optional flag to mark message as ephemeral, i.e., not to be stored by Waku Store nodes. - * @defaultValue `false` - */ - ephemeral?: boolean; - /** - * A function called when encoding messages to set the meta field. - * @param IProtoMessage The message encoded for wire, without the meta field. - * If encryption is used, `metaSetter` only accesses _encrypted_ payload. - */ - metaSetter?: IMetaSetter; +export interface EncoderOptions extends BaseEncoderOptions { /** The symmetric key to encrypt the payload with. */ symKey: Uint8Array; /** An optional private key to be used to sign the payload before encryption. */ diff --git a/packages/rln/src/rln.ts b/packages/rln/src/rln.ts index 2c9f6f653d..c677ad9d0a 100644 --- a/packages/rln/src/rln.ts +++ b/packages/rln/src/rln.ts @@ -2,8 +2,8 @@ import { createDecoder, createEncoder } from "@waku/core"; import type { ContentTopic, IDecodedMessage, - IMetaSetter, - IRoutingInfo + IRoutingInfo, + EncoderOptions as WakuEncoderOptions } from "@waku/interfaces"; import { Logger } from "@waku/utils"; import init from "@waku/zerokit-rln-wasm"; @@ -28,27 +28,7 @@ import { Zerokit } from "./zerokit.js"; const log = new Logger("waku:rln"); -type WakuRLNEncoderOptions = { - /** - * The routing information for messages to encode. - */ - routingInfo: IRoutingInfo; - /** The content topic to set on outgoing messages. */ - contentTopic: string; - /** - * An optional flag to mark message as ephemeral, i.e., not to be stored by Waku Store nodes. - * @defaultValue `false` - */ - ephemeral?: boolean; - /** - * A function called when encoding messages to set the meta field. - * @param IProtoMessage The message encoded for wire, without the meta field. - * If encryption is used, `metaSetter` only accesses _encrypted_ payload. - */ - metaSetter?: IMetaSetter; - /** - * RLN Credentials - */ +type WakuRLNEncoderOptions = WakuEncoderOptions & { credentials: EncryptedCredentials | DecryptedCredentials; }; diff --git a/packages/tests/src/lib/message_collector.ts b/packages/tests/src/lib/message_collector.ts index 456d1f881f..a5f1cf8263 100644 --- a/packages/tests/src/lib/message_collector.ts +++ b/packages/tests/src/lib/message_collector.ts @@ -66,7 +66,6 @@ export class MessageCollector { public async waitForMessages( numMessages: number, options?: { - // pubsubTopic?: string; timeoutDuration?: number; exact?: boolean; } From e53717cd08add1bb69d74779ab99582753e56307 Mon Sep 17 00:00:00 2001 From: fryorcraken Date: Sat, 19 Jul 2025 19:03:51 +1000 Subject: [PATCH 10/23] revert temporary change --- .../tests/tests/light-push/index.node.spec.ts | 44 ++++++++++--------- 1 file changed, 23 insertions(+), 21 deletions(-) diff --git a/packages/tests/tests/light-push/index.node.spec.ts b/packages/tests/tests/light-push/index.node.spec.ts index d750c6c77f..6733c2dbb9 100644 --- a/packages/tests/tests/light-push/index.node.spec.ts +++ b/packages/tests/tests/light-push/index.node.spec.ts @@ -103,29 +103,31 @@ const runTests = (strictNodeCheck: boolean): void => { ); }); - [{ description: "short", value: "hi" }].forEach((testItem) => { - it(`Push message with content topic containing ${testItem.description}`, async function () { - const contentTopic = `/test/1/${testItem.value}/proto`; - const customEncoder = waku.createEncoder({ - contentTopic - }); - const pushResponse = await waku.lightPush.send( - customEncoder, - messagePayload - ); - expect(pushResponse.successes.length).to.eq(numServiceNodes); - - expect( - await serviceNodes.messageCollector.waitForMessages(1, { + TEST_STRING.forEach((testItem) => { + if (!testItem.invalidContentTopic) { + it(`Push message with content topic containing ${testItem.description}`, async function () { + const contentTopic = `/test/1/${testItem.value}/proto`; + const customEncoder = waku.createEncoder({ contentTopic - }) - ).to.eq(true); - serviceNodes.messageCollector.verifyReceivedMessage(0, { - expectedMessageText: messageText, - expectedContentTopic: contentTopic, - expectedPubsubTopic: TestRoutingInfo.pubsubTopic + }); + const pushResponse = await waku.lightPush.send( + customEncoder, + messagePayload + ); + expect(pushResponse.successes.length).to.eq(numServiceNodes); + + expect( + await serviceNodes.messageCollector.waitForMessages(1, { + contentTopic + }) + ).to.eq(true); + serviceNodes.messageCollector.verifyReceivedMessage(0, { + expectedMessageText: messageText, + expectedContentTopic: contentTopic, + expectedPubsubTopic: TestRoutingInfo.pubsubTopic + }); }); - }); + } }); it("Push message with meta", async function () { From 97a26b237314d66f3f64679cd64081a8f0e84716 Mon Sep 17 00:00:00 2001 From: fryorcraken Date: Sat, 19 Jul 2025 19:21:39 +1000 Subject: [PATCH 11/23] revert test changes --- .../core/src/lib/message/version_0.spec.ts | 24 ++++++------------- 1 file changed, 7 insertions(+), 17 deletions(-) diff --git a/packages/core/src/lib/message/version_0.spec.ts b/packages/core/src/lib/message/version_0.spec.ts index 4c9f02ef67..45d7da6c12 100644 --- a/packages/core/src/lib/message/version_0.spec.ts +++ b/packages/core/src/lib/message/version_0.spec.ts @@ -115,41 +115,31 @@ describe("Ensures content topic is defined", () => { }); }; - expect(wrapper).to.throw( - "Routing Info must have the same content topic as the encoder" - ); + expect(wrapper).to.throw("Content topic must be specified"); }); it("Encoder throws on empty string content topic", () => { const wrapper = function (): void { createEncoder({ contentTopic: "", - routingInfo: createRoutingInfo(testNetworkConfig, { contentTopic: "" }) + routingInfo: testRoutingInfo }); }; - expect(wrapper).to.throw("AutoSharding requires contentTopic"); + expect(wrapper).to.throw("Content topic must be specified"); }); it("Decoder throws on undefined content topic", () => { const wrapper = function (): void { - createDecoder( - undefined as unknown as string, - createRoutingInfo(testNetworkConfig, { - contentTopic: undefined as unknown as string - }) - ); + createDecoder(undefined as unknown as string, testRoutingInfo); }; - expect(wrapper).to.throw("AutoSharding requires contentTopic"); + expect(wrapper).to.throw("Content topic must be specified"); }); it("Decoder throws on empty string content topic", () => { const wrapper = function (): void { - createDecoder( - "", - createRoutingInfo(testNetworkConfig, { contentTopic: "" }) - ); + createDecoder("", testRoutingInfo); }; - expect(wrapper).to.throw("AutoSharding requires contentTopic"); + expect(wrapper).to.throw("Content topic must be specified"); }); }); From e5919a6bd95ecc3063d2d0361c16ae5502c11324 Mon Sep 17 00:00:00 2001 From: fryorcraken Date: Sat, 19 Jul 2025 21:25:21 +1000 Subject: [PATCH 12/23] revert most invasive changes --- .../core/src/lib/light_push/light_push.ts | 8 ++--- .../core/src/lib/message/version_0.spec.ts | 8 ++--- packages/core/src/lib/message/version_0.ts | 11 +++++- packages/core/src/lib/store/rpc.spec.ts | 31 ++++++++++------- packages/core/src/lib/store/rpc.ts | 4 +-- packages/core/src/lib/store/store.spec.ts | 9 +---- packages/core/src/lib/store/store.ts | 2 +- packages/interfaces/src/message.ts | 3 +- packages/interfaces/src/store.ts | 7 ++-- packages/message-encryption/src/ecies.ts | 7 +++- packages/message-encryption/src/symmetric.ts | 7 +++- packages/relay/src/relay.ts | 4 +-- packages/relay/src/topic_only_message.ts | 7 +++- packages/rln/src/codec.ts | 11 ++++-- packages/sdk/src/filter/filter.ts | 16 ++++----- packages/sdk/src/filter/subscription.spec.ts | 24 ++++--------- packages/sdk/src/filter/subscription.ts | 24 +++++-------- packages/sdk/src/filter/types.ts | 8 ++--- packages/sdk/src/light_push/light_push.ts | 4 +-- packages/sdk/src/light_push/retry_manager.ts | 4 +-- .../sdk/src/peer_manager/peer_manager.spec.ts | 17 ++++------ packages/sdk/src/peer_manager/peer_manager.ts | 23 ++++--------- packages/sdk/src/store/store.spec.ts | 29 ++++------------ packages/sdk/src/store/store.ts | 34 ++++++++----------- .../tests/tests/store/message_hash.spec.ts | 3 +- packages/tests/tests/store/utils.ts | 6 +--- 26 files changed, 136 insertions(+), 175 deletions(-) diff --git a/packages/core/src/lib/light_push/light_push.ts b/packages/core/src/lib/light_push/light_push.ts index 68f17d4a71..6c2430e5a5 100644 --- a/packages/core/src/lib/light_push/light_push.ts +++ b/packages/core/src/lib/light_push/light_push.ts @@ -8,7 +8,8 @@ import { type ThisOrThat } from "@waku/interfaces"; import { PushResponse } from "@waku/proto"; -import { isMessageSizeUnderCap, Logger } from "@waku/utils"; +import { isMessageSizeUnderCap } from "@waku/utils"; +import { Logger } from "@waku/utils"; import all from "it-all"; import * as lp from "it-length-prefixed"; import { pipe } from "it-pipe"; @@ -62,10 +63,7 @@ export class LightPushCore { }; } - const query = PushRpc.createRequest( - protoMessage, - encoder.routingInfo.pubsubTopic - ); + const query = PushRpc.createRequest(protoMessage, encoder.pubsubTopic); return { query, error: null }; } catch (error) { log.error("Failed to prepare push message", error); diff --git a/packages/core/src/lib/message/version_0.spec.ts b/packages/core/src/lib/message/version_0.spec.ts index 45d7da6c12..d95963905c 100644 --- a/packages/core/src/lib/message/version_0.spec.ts +++ b/packages/core/src/lib/message/version_0.spec.ts @@ -153,9 +153,7 @@ describe("Sets sharding configuration correctly", () => { }); // When autosharding is enabled, we expect the shard index to be 1 - expect(autoshardingEncoder.routingInfo.pubsubTopic).to.be.eq( - "/waku/2/rs/0/0" - ); + expect(autoshardingEncoder.pubsubTopic).to.be.eq("/waku/2/rs/0/0"); // Create an encoder setup to use static sharding with the same content topic const staticshardingEncoder = createEncoder({ @@ -164,8 +162,6 @@ describe("Sets sharding configuration correctly", () => { }); // When static sharding is enabled, we expect the shard index to be 0 - expect(staticshardingEncoder.routingInfo.pubsubTopic).to.be.eq( - "/waku/2/rs/0/3" - ); + expect(staticshardingEncoder.pubsubTopic).to.be.eq("/waku/2/rs/0/3"); }); }); diff --git a/packages/core/src/lib/message/version_0.ts b/packages/core/src/lib/message/version_0.ts index bb7e48fc36..a8706817d9 100644 --- a/packages/core/src/lib/message/version_0.ts +++ b/packages/core/src/lib/message/version_0.ts @@ -7,7 +7,8 @@ import type { IMetaSetter, IProtoMessage, IRateLimitProof, - IRoutingInfo + IRoutingInfo, + PubsubTopic } from "@waku/interfaces"; import { proto_message as proto } from "@waku/proto"; import { Logger } from "@waku/utils"; @@ -78,6 +79,10 @@ export class Encoder implements IEncoder { } } + public get pubsubTopic(): PubsubTopic { + return this.routingInfo.pubsubTopic; + } + public async toWire(message: IMessage): Promise { return proto.WakuMessage.encode(await this.toProtoObj(message)); } @@ -133,6 +138,10 @@ export class Decoder implements IDecoder { } } + public get pubsubTopic(): PubsubTopic { + return this.routingInfo.pubsubTopic; + } + public fromWireToProtoObj( bytes: Uint8Array ): Promise { diff --git a/packages/core/src/lib/store/rpc.spec.ts b/packages/core/src/lib/store/rpc.spec.ts index ecea28e3c0..6e38449c2f 100644 --- a/packages/core/src/lib/store/rpc.spec.ts +++ b/packages/core/src/lib/store/rpc.spec.ts @@ -1,17 +1,11 @@ -import { createRoutingInfo } from "@waku/utils"; import { expect } from "chai"; import { StoreQueryRequest } from "./rpc.js"; -const routingInfo = createRoutingInfo( - { clusterId: 0 }, - { pubsubTopic: "/waku/2/rs/0/0" } -); - describe("StoreQueryRequest validation", () => { it("accepts valid content-filtered query", () => { const request = StoreQueryRequest.create({ - routingInfo, + pubsubTopic: "/waku/2/default-waku/proto", contentTopics: ["/test/1/content/proto"], includeData: true, paginationForward: true @@ -22,7 +16,7 @@ describe("StoreQueryRequest validation", () => { it("rejects content-filtered query with only pubsubTopic", () => { expect(() => StoreQueryRequest.create({ - routingInfo, + pubsubTopic: "/waku/2/default-waku/proto", contentTopics: [], includeData: true, paginationForward: true @@ -32,9 +26,22 @@ describe("StoreQueryRequest validation", () => { ); }); + it("rejects content-filtered query with only contentTopics", () => { + expect(() => + StoreQueryRequest.create({ + pubsubTopic: "", + contentTopics: ["/test/1/content/proto"], + includeData: true, + paginationForward: true + }) + ).to.throw( + "Both pubsubTopic and contentTopics must be set together for content-filtered queries" + ); + }); + it("accepts valid message hash query", () => { const request = StoreQueryRequest.create({ - routingInfo, + pubsubTopic: "", contentTopics: [], messageHashes: [new Uint8Array([1, 2, 3, 4])], includeData: true, @@ -47,7 +54,7 @@ describe("StoreQueryRequest validation", () => { expect(() => StoreQueryRequest.create({ messageHashes: [new Uint8Array([1, 2, 3, 4])], - routingInfo, + pubsubTopic: "/waku/2/default-waku/proto", contentTopics: ["/test/1/content/proto"], includeData: true, paginationForward: true @@ -60,7 +67,7 @@ describe("StoreQueryRequest validation", () => { it("rejects hash query with time filter", () => { expect(() => StoreQueryRequest.create({ - routingInfo, + pubsubTopic: "", contentTopics: [], messageHashes: [new Uint8Array([1, 2, 3, 4])], timeStart: new Date(), @@ -74,7 +81,7 @@ describe("StoreQueryRequest validation", () => { it("accepts time-filtered query with content filter", () => { const request = StoreQueryRequest.create({ - routingInfo, + pubsubTopic: "/waku/2/default-waku/proto", contentTopics: ["/test/1/content/proto"], timeStart: new Date(Date.now() - 3600000), timeEnd: new Date(), diff --git a/packages/core/src/lib/store/rpc.ts b/packages/core/src/lib/store/rpc.ts index 3fcc00f8ab..0055ed96a3 100644 --- a/packages/core/src/lib/store/rpc.ts +++ b/packages/core/src/lib/store/rpc.ts @@ -42,9 +42,9 @@ export class StoreQueryRequest { } } else { if ( - (params.routingInfo && + (params.pubsubTopic && (!params.contentTopics || params.contentTopics.length === 0)) || - (!params.routingInfo && + (!params.pubsubTopic && params.contentTopics && params.contentTopics.length > 0) ) { diff --git a/packages/core/src/lib/store/store.spec.ts b/packages/core/src/lib/store/store.spec.ts index fbe340ab90..1cf61eb878 100644 --- a/packages/core/src/lib/store/store.spec.ts +++ b/packages/core/src/lib/store/store.spec.ts @@ -2,7 +2,6 @@ import type { PeerId } from "@libp2p/interface"; import { IDecodedMessage, IDecoder, - IRoutingInfo, Libp2p, QueryRequestParams } from "@waku/interfaces"; @@ -79,15 +78,9 @@ describe("StoreCore", () => { let mockStoreQueryRequest: any; let mockStoreQueryResponse: any; - const routingInfo: IRoutingInfo = { - pubsubTopic: "test-topic", - shardId: 1, - clusterId: 0 - }; - beforeEach(() => { queryOpts = { - routingInfo, + pubsubTopic: "test-topic", contentTopics: ["test-topic"], paginationLimit: 10, includeData: true, diff --git a/packages/core/src/lib/store/store.ts b/packages/core/src/lib/store/store.ts index 61f6f07737..ce61b7a553 100644 --- a/packages/core/src/lib/store/store.ts +++ b/packages/core/src/lib/store/store.ts @@ -76,7 +76,7 @@ export class StoreCore { log.info("Sending store query request:", { hasMessageHashes: !!queryOpts.messageHashes?.length, messageHashCount: queryOpts.messageHashes?.length, - routingInfo: queryOpts.routingInfo, + pubsubTopic: queryOpts.pubsubTopic, contentTopics: queryOpts.contentTopics }); diff --git a/packages/interfaces/src/message.ts b/packages/interfaces/src/message.ts index 0e076f0b3e..1b34700010 100644 --- a/packages/interfaces/src/message.ts +++ b/packages/interfaces/src/message.ts @@ -95,13 +95,14 @@ export interface IEncoder { contentTopic: string; ephemeral: boolean; routingInfo: IRoutingInfo; + pubsubTopic: PubsubTopic; toWire: (message: IMessage) => Promise; toProtoObj: (message: IMessage) => Promise; } export interface IDecoder { contentTopic: string; - routingInfo: IRoutingInfo; + pubsubTopic: PubsubTopic; fromWireToProtoObj: (bytes: Uint8Array) => Promise; fromProtoObj: ( pubsubTopic: string, diff --git a/packages/interfaces/src/store.ts b/packages/interfaces/src/store.ts index a8feebb236..014842aaa6 100644 --- a/packages/interfaces/src/store.ts +++ b/packages/interfaces/src/store.ts @@ -1,5 +1,4 @@ import type { IDecodedMessage, IDecoder } from "./message.js"; -import { IRoutingInfo } from "./sharding.js"; export type StoreCursor = Uint8Array; @@ -16,10 +15,10 @@ export type QueryRequestParams = { includeData: boolean; /** - * The routing information to query. This field is mandatory. - * The query will only return messages that were published on this specific route (cluster and shard). + * The pubsub topic to query. This field is mandatory. + * The query will only return messages that were published on this specific pubsub topic. */ - routingInfo: IRoutingInfo; + pubsubTopic: string; /** * The content topics to filter the messages. diff --git a/packages/message-encryption/src/ecies.ts b/packages/message-encryption/src/ecies.ts index 1628b2481f..0cdf17975e 100644 --- a/packages/message-encryption/src/ecies.ts +++ b/packages/message-encryption/src/ecies.ts @@ -7,7 +7,8 @@ import { type IMessage, type IMetaSetter, type IProtoMessage, - type IRoutingInfo + type IRoutingInfo, + type PubsubTopic } from "@waku/interfaces"; import { WakuMessage } from "@waku/proto"; import { Logger } from "@waku/utils"; @@ -46,6 +47,10 @@ class Encoder implements IEncoder { } } + public get pubsubTopic(): PubsubTopic { + return this.routingInfo.pubsubTopic; + } + public async toWire(message: IMessage): Promise { const protoMessage = await this.toProtoObj(message); if (!protoMessage) return; diff --git a/packages/message-encryption/src/symmetric.ts b/packages/message-encryption/src/symmetric.ts index 2261e01751..87fcbbb8fc 100644 --- a/packages/message-encryption/src/symmetric.ts +++ b/packages/message-encryption/src/symmetric.ts @@ -7,7 +7,8 @@ import type { IMessage, IMetaSetter, IProtoMessage, - IRoutingInfo + IRoutingInfo, + PubsubTopic } from "@waku/interfaces"; import { WakuMessage } from "@waku/proto"; import { Logger } from "@waku/utils"; @@ -46,6 +47,10 @@ class Encoder implements IEncoder { } } + public get pubsubTopic(): PubsubTopic { + return this.routingInfo.pubsubTopic; + } + public async toWire(message: IMessage): Promise { const protoMessage = await this.toProtoObj(message); if (!protoMessage) return; diff --git a/packages/relay/src/relay.ts b/packages/relay/src/relay.ts index 6c4c5b9136..6f6dd98fa7 100644 --- a/packages/relay/src/relay.ts +++ b/packages/relay/src/relay.ts @@ -128,7 +128,7 @@ export class Relay implements IRelay { encoder: IEncoder, message: IMessage ): Promise { - const { pubsubTopic } = encoder.routingInfo; + const { pubsubTopic } = encoder; if (!this.pubsubTopics.has(pubsubTopic)) { log.error("Failed to send waku relay: topic not configured"); return { @@ -180,7 +180,7 @@ export class Relay implements IRelay { const observers: Array<[PubsubTopic, Observer]> = []; for (const decoder of Array.isArray(decoders) ? decoders : [decoders]) { - const { pubsubTopic } = decoder.routingInfo; + const { pubsubTopic } = decoder; const ctObs: Map>> = this.observers.get( pubsubTopic ) ?? new Map(); diff --git a/packages/relay/src/topic_only_message.ts b/packages/relay/src/topic_only_message.ts index d5332b9d9f..0929361166 100644 --- a/packages/relay/src/topic_only_message.ts +++ b/packages/relay/src/topic_only_message.ts @@ -2,7 +2,8 @@ import type { IDecoder, IProtoMessage, IRoutingInfo, - ITopicOnlyMessage + ITopicOnlyMessage, + PubsubTopic } from "@waku/interfaces"; import { TopicOnlyMessage as ProtoTopicOnlyMessage } from "@waku/proto"; @@ -32,6 +33,10 @@ export class TopicOnlyMessage implements ITopicOnlyMessage { export class ContentTopicOnlyDecoder implements IDecoder { public constructor() {} + public get pubsubTopic(): PubsubTopic { + throw "Pubsub Topic is not available on this decoder, it is only meant to decode the content topic for any message"; + } + public get contentTopic(): string { throw "ContentTopic is not available on this decoder, it is only meant to decode the content topic for any message"; } diff --git a/packages/rln/src/codec.ts b/packages/rln/src/codec.ts index 21be117c1e..25a441b9bb 100644 --- a/packages/rln/src/codec.ts +++ b/packages/rln/src/codec.ts @@ -5,7 +5,8 @@ import type { IMessage, IProtoMessage, IRateLimitProof, - IRoutingInfo + IRoutingInfo, + PubsubTopic } from "@waku/interfaces"; import { Logger } from "@waku/utils"; @@ -28,6 +29,10 @@ export class RLNEncoder implements IEncoder { this.idSecretHash = identityCredential.IDSecretHash; } + public get pubsubTopic(): PubsubTopic { + return this.encoder.pubsubTopic; + } + public async toWire(message: IMessage): Promise { message.rateLimitProof = await this.generateProof(message); log.info("Proof generated", message.rateLimitProof); @@ -93,8 +98,8 @@ export class RLNDecoder private readonly decoder: IDecoder ) {} - public get routingInfo(): IRoutingInfo { - return this.decoder.routingInfo; + public get pubsubTopic(): PubsubTopic { + return this.decoder.pubsubTopic; } public get contentTopic(): string { diff --git a/packages/sdk/src/filter/filter.ts b/packages/sdk/src/filter/filter.ts index b686822f30..43895fab7c 100644 --- a/packages/sdk/src/filter/filter.ts +++ b/packages/sdk/src/filter/filter.ts @@ -63,21 +63,21 @@ export class Filter implements IFilter { throw Error("Cannot subscribe with 0 decoders."); } - const routingInfos = decoders.map((v) => v.routingInfo); - const routingInfo = routingInfos[0]; + const pubsubTopics = decoders.map((v) => v.pubsubTopic); + const singlePubsubTopic = pubsubTopics[0]; const contentTopics = decoders.map((v) => v.contentTopic); log.info( - `Subscribing to contentTopics: ${contentTopics}, pubsubTopic: ${routingInfo.pubsubTopic}` + `Subscribing to contentTopics: ${contentTopics}, pubsubTopic: ${singlePubsubTopic}` ); - this.throwIfTopicNotSame(routingInfos.map((r) => r.pubsubTopic)); + this.throwIfTopicNotSame(pubsubTopics); - let subscription = this.subscriptions.get(routingInfo.pubsubTopic); + let subscription = this.subscriptions.get(singlePubsubTopic); if (!subscription) { subscription = new Subscription({ - routingInfo: routingInfo, + pubsubTopic: singlePubsubTopic, protocol: this.protocol, config: this.config, peerManager: this.peerManager @@ -86,7 +86,7 @@ export class Filter implements IFilter { } const result = await subscription.add(decoders, callback); - this.subscriptions.set(routingInfo.pubsubTopic, subscription); + this.subscriptions.set(singlePubsubTopic, subscription); log.info( `Subscription ${result ? "successful" : "failed"} for content topic: ${contentTopics}` @@ -104,7 +104,7 @@ export class Filter implements IFilter { throw Error("Cannot unsubscribe with 0 decoders."); } - const pubsubTopics = decoders.map((v) => v.routingInfo.pubsubTopic); + const pubsubTopics = decoders.map((v) => v.pubsubTopic); const singlePubsubTopic = pubsubTopics[0]; const contentTopics = decoders.map((v) => v.contentTopic); diff --git a/packages/sdk/src/filter/subscription.spec.ts b/packages/sdk/src/filter/subscription.spec.ts index e65128092f..37f3d48ed3 100644 --- a/packages/sdk/src/filter/subscription.spec.ts +++ b/packages/sdk/src/filter/subscription.spec.ts @@ -1,12 +1,10 @@ import { FilterCore } from "@waku/core"; import type { - AutoSharding, FilterProtocolOptions, IDecodedMessage, IDecoder } from "@waku/interfaces"; import { WakuMessage } from "@waku/proto"; -import { createRoutingInfo } from "@waku/utils"; import { expect } from "chai"; import sinon from "sinon"; @@ -16,13 +14,7 @@ import { Subscription } from "./subscription.js"; const PUBSUB_TOPIC = "/waku/2/rs/1/4"; const CONTENT_TOPIC = "/test/1/waku-filter/utf8"; -const NETWORK_CONFIG: AutoSharding = { - clusterId: 2, - numShardsInCluster: 3 -}; -const ROUTING_INFO = createRoutingInfo(NETWORK_CONFIG, { - contentTopic: CONTENT_TOPIC -}); + describe("Filter Subscription", () => { let filterCore: FilterCore; let peerManager: PeerManager; @@ -40,7 +32,7 @@ describe("Filter Subscription", () => { }; subscription = new Subscription({ - routingInfo: ROUTING_INFO, + pubsubTopic: PUBSUB_TOPIC, protocol: filterCore, config, peerManager @@ -87,11 +79,9 @@ describe("Filter Subscription", () => { }); it("should invoke callbacks when receiving a message", async () => { - const testContentTopic = "/custom/0/content/proto"; + const testContentTopic = "/custom/content/topic"; const testDecoder = { - routingInfo: createRoutingInfo(NETWORK_CONFIG, { - contentTopic: testContentTopic - }), + pubsubTopic: PUBSUB_TOPIC, contentTopic: testContentTopic, fromProtoObj: sinon.stub().callsFake(() => { return Promise.resolve({ payload: new Uint8Array([1, 2, 3]) }); @@ -116,11 +106,9 @@ describe("Filter Subscription", () => { }); it("should invoke callbacks only when newly receiving message is given", async () => { - const testContentTopic = "/custom/0/content/topic"; + const testContentTopic = "/custom/content/topic"; const testDecoder = { - routingInfo: createRoutingInfo(NETWORK_CONFIG, { - contentTopic: testContentTopic - }), + pubsubTopic: PUBSUB_TOPIC, contentTopic: testContentTopic, fromProtoObj: sinon.stub().callsFake(() => { return Promise.resolve({ payload: new Uint8Array([1, 2, 3]) }); diff --git a/packages/sdk/src/filter/subscription.ts b/packages/sdk/src/filter/subscription.ts index e35429572d..00804f5601 100644 --- a/packages/sdk/src/filter/subscription.ts +++ b/packages/sdk/src/filter/subscription.ts @@ -10,9 +10,7 @@ import type { IDecodedMessage, IDecoder, IProtoMessage, - IRoutingInfo, - PeerIdStr, - PubsubTopic + PeerIdStr } from "@waku/interfaces"; import { Protocols } from "@waku/interfaces"; import { WakuMessage } from "@waku/proto"; @@ -37,8 +35,7 @@ type AttemptUnsubscribeParams = { type Libp2pEventHandler = (e: CustomEvent) => void; export class Subscription { - private readonly routingInfo: IRoutingInfo; - private readonly pubsubTopic: PubsubTopic; + private readonly pubsubTopic: string; private readonly protocol: FilterCore; private readonly peerManager: PeerManager; @@ -76,8 +73,7 @@ export class Subscription { public constructor(params: SubscriptionParams) { this.config = params.config; - this.routingInfo = params.routingInfo; - this.pubsubTopic = params.routingInfo.pubsubTopic; + this.pubsubTopic = params.pubsubTopic; this.protocol = params.protocol; this.peerManager = params.peerManager; @@ -197,7 +193,7 @@ export class Subscription { if (this.callbacks.has(decoder)) { log.warn( - `Replacing callback associated associated with decoder with pubsubTopic:${decoder.routingInfo.pubsubTopic} and contentTopic:${decoder.contentTopic}` + `Replacing callback associated associated with decoder with pubsubTopic:${decoder.pubsubTopic} and contentTopic:${decoder.contentTopic}` ); const callback = this.callbacks.get(decoder); @@ -209,7 +205,7 @@ export class Subscription { void (async (): Promise => { try { const message = await decoder.fromProtoObj( - decoder.routingInfo.pubsubTopic, + decoder.pubsubTopic, event.detail as IProtoMessage ); void callback(message!); @@ -234,7 +230,7 @@ export class Subscription { if (!callback) { log.warn( - `No callback associated with decoder with pubsubTopic:${decoder.routingInfo.pubsubTopic} and contentTopic:${decoder.contentTopic}` + `No callback associated with decoder with pubsubTopic:${decoder.pubsubTopic} and contentTopic:${decoder.contentTopic}` ); } @@ -417,13 +413,11 @@ export class Subscription { const usablePeer = await this.peerManager.isPeerOnPubsub( event.detail, - this.routingInfo.pubsubTopic + this.pubsubTopic ); if (!usablePeer) { - log.info( - `Peer ${id} doesn't support pubsubTopic:${this.routingInfo.pubsubTopic}` - ); + log.info(`Peer ${id} doesn't support pubsubTopic:${this.pubsubTopic}`); return; } @@ -489,7 +483,7 @@ export class Subscription { const prevPeers = new Set(this.peers.keys()); const peersToAdd = await this.peerManager.getPeers({ protocol: Protocols.Filter, - routingInfo: this.routingInfo + pubsubTopic: this.pubsubTopic }); for (const peer of peersToAdd) { diff --git a/packages/sdk/src/filter/types.ts b/packages/sdk/src/filter/types.ts index f010f45440..44326728d1 100644 --- a/packages/sdk/src/filter/types.ts +++ b/packages/sdk/src/filter/types.ts @@ -1,9 +1,5 @@ import type { FilterCore } from "@waku/core"; -import type { - FilterProtocolOptions, - IRoutingInfo, - Libp2p -} from "@waku/interfaces"; +import type { FilterProtocolOptions, Libp2p } from "@waku/interfaces"; import type { WakuMessage } from "@waku/proto"; import type { PeerManager } from "../peer_manager/index.js"; @@ -19,7 +15,7 @@ export type SubscriptionEvents = { }; export type SubscriptionParams = { - routingInfo: IRoutingInfo; + pubsubTopic: string; protocol: FilterCore; config: FilterProtocolOptions; peerManager: PeerManager; diff --git a/packages/sdk/src/light_push/light_push.ts b/packages/sdk/src/light_push/light_push.ts index 13dc92089e..947ce1528b 100644 --- a/packages/sdk/src/light_push/light_push.ts +++ b/packages/sdk/src/light_push/light_push.ts @@ -77,13 +77,13 @@ export class LightPush implements ILightPush { ...options }; - const { pubsubTopic } = encoder.routingInfo; + const { pubsubTopic } = encoder; log.info("send: attempting to send a message to pubsubTopic:", pubsubTopic); const peerIds = await this.peerManager.getPeers({ protocol: Protocols.LightPush, - routingInfo: encoder.routingInfo + pubsubTopic: encoder.pubsubTopic }); const coreResults: CoreProtocolResult[] = diff --git a/packages/sdk/src/light_push/retry_manager.ts b/packages/sdk/src/light_push/retry_manager.ts index 380c954277..0fc156efe4 100644 --- a/packages/sdk/src/light_push/retry_manager.ts +++ b/packages/sdk/src/light_push/retry_manager.ts @@ -100,7 +100,7 @@ export class RetryManager { const peerId = ( await this.peerManager.getPeers({ protocol: Protocols.LightPush, - routingInfo: task.routingInfo + pubsubTopic: task.routingInfo.pubsubTopic }) )[0]; @@ -146,7 +146,7 @@ export class RetryManager { if (shouldPeerBeChanged(error.message)) { await this.peerManager.renewPeer(peerId, { protocol: Protocols.LightPush, - routingInfo: task.routingInfo + pubsubTopic: task.routingInfo.pubsubTopic }); } diff --git a/packages/sdk/src/peer_manager/peer_manager.spec.ts b/packages/sdk/src/peer_manager/peer_manager.spec.ts index cac779ca0e..f4eac85f81 100644 --- a/packages/sdk/src/peer_manager/peer_manager.spec.ts +++ b/packages/sdk/src/peer_manager/peer_manager.spec.ts @@ -5,7 +5,6 @@ import { Libp2p, Protocols } from "@waku/interfaces"; -import { createRoutingInfo } from "@waku/utils"; import { expect } from "chai"; import sinon from "sinon"; @@ -18,12 +17,8 @@ describe("PeerManager", () => { let peers: any[]; let mockConnections: any[]; - const TEST_PUBSUB_TOPIC = "/waku/2/rs/0/0"; + const TEST_PUBSUB_TOPIC = "/test/1/waku-light-push/utf8"; const TEST_PROTOCOL = Protocols.LightPush; - const TEST_ROUTING_INFO = createRoutingInfo( - { clusterId: 0 }, - { pubsubTopic: TEST_PUBSUB_TOPIC } - ); const clearPeerState = (): void => { (peerManager as any).lockedPeers.clear(); @@ -41,7 +36,7 @@ describe("PeerManager", () => { const getPeersForTest = async (): Promise => { return await peerManager.getPeers({ protocol: TEST_PROTOCOL, - routingInfo: TEST_ROUTING_INFO + pubsubTopic: TEST_PUBSUB_TOPIC }); }; @@ -131,7 +126,7 @@ describe("PeerManager", () => { const peerId = ids[0]; await peerManager.renewPeer(peerId, { protocol: TEST_PROTOCOL, - routingInfo: TEST_ROUTING_INFO + pubsubTopic: TEST_PUBSUB_TOPIC }); expect((peerManager as any).lockedPeers.has(peerId.toString())).to.be.false; expect((peerManager as any).unlockedPeers.has(peerId.toString())).to.be @@ -229,7 +224,7 @@ describe("PeerManager", () => { if (skipIfNoPeers(first)) return; await peerManager.renewPeer(first[0], { protocol: TEST_PROTOCOL, - routingInfo: TEST_ROUTING_INFO + pubsubTopic: TEST_PUBSUB_TOPIC }); const second = await getPeersForTest(); if (skipIfNoPeers(second)) return; @@ -243,7 +238,7 @@ describe("PeerManager", () => { } as any; await peerManager.renewPeer(fakePeerId, { protocol: TEST_PROTOCOL, - routingInfo: TEST_ROUTING_INFO + pubsubTopic: TEST_PUBSUB_TOPIC }); expect(true).to.be.true; }); @@ -268,7 +263,7 @@ describe("PeerManager", () => { const peerId = result[0]; await peerManager.renewPeer(peerId, { protocol: TEST_PROTOCOL, - routingInfo: TEST_ROUTING_INFO + pubsubTopic: TEST_PUBSUB_TOPIC }); const connection = mockConnections.find((c) => c.remotePeer.equals(peerId)); diff --git a/packages/sdk/src/peer_manager/peer_manager.ts b/packages/sdk/src/peer_manager/peer_manager.ts index ebf8bc82c5..a42baf7215 100644 --- a/packages/sdk/src/peer_manager/peer_manager.ts +++ b/packages/sdk/src/peer_manager/peer_manager.ts @@ -12,7 +12,6 @@ import { } from "@waku/core"; import { CONNECTION_LOCKED_TAG, - type IRoutingInfo, Libp2p, Libp2pEventHandler, Protocols @@ -35,7 +34,7 @@ type PeerManagerParams = { type GetPeersParams = { protocol: Protocols; - routingInfo: IRoutingInfo; + pubsubTopic: string; }; export enum PeerManagerEventNames { @@ -108,9 +107,7 @@ export class PeerManager { public async getPeers(params: GetPeersParams): Promise { log.info( - `Getting peers for protocol: ${params.protocol}, ` + - `clusterId: ${params.routingInfo.clusterId},` + - ` shard: ${params.routingInfo.shardId}` + `Getting peers for protocol: ${params.protocol}, pubsubTopic: ${params.pubsubTopic}` ); const connectedPeers = await this.connectionManager.getConnectedPeers(); @@ -120,19 +117,13 @@ export class PeerManager { for (const peer of connectedPeers) { const hasProtocol = this.hasPeerProtocol(peer, params.protocol); - - const isOnSameShard = await this.connectionManager.isPeerOnShard( + const hasSamePubsub = await this.connectionManager.isPeerOnTopic( peer.id, - params.routingInfo.clusterId, - params.routingInfo.shardId + params.pubsubTopic ); - if (!isOnSameShard) { - continue; - } - const isPeerAvailableForUse = this.isPeerAvailableForUse(peer.id); - if (hasProtocol && isPeerAvailableForUse) { + if (hasProtocol && hasSamePubsub && isPeerAvailableForUse) { results.push(peer); log.info(`Peer ${peer.id} qualifies for protocol ${params.protocol}`); } @@ -177,7 +168,7 @@ export class PeerManager { public async renewPeer(id: PeerId, params: GetPeersParams): Promise { log.info( - `Renewing peer ${id} for protocol: ${params.protocol}, routingInfo: ${params.routingInfo}` + `Renewing peer ${id} for protocol: ${params.protocol}, pubsubTopic: ${params.pubsubTopic}` ); const connectedPeers = await this.connectionManager.getConnectedPeers(); @@ -274,7 +265,7 @@ export class PeerManager { } const wasUnlocked = new Date(value).getTime(); - return Date.now() - wasUnlocked >= 10_000; + return Date.now() - wasUnlocked >= 10_000 ? true : false; } private dispatchFilterPeerConnect(id: PeerId): void { diff --git a/packages/sdk/src/store/store.spec.ts b/packages/sdk/src/store/store.spec.ts index 83ccb08436..025f2df425 100644 --- a/packages/sdk/src/store/store.spec.ts +++ b/packages/sdk/src/store/store.spec.ts @@ -1,12 +1,6 @@ import { StoreCore } from "@waku/core"; -import { - IDecodedMessage, - IDecoder, - IRoutingInfo, - Libp2p -} from "@waku/interfaces"; +import type { IDecodedMessage, IDecoder, Libp2p } from "@waku/interfaces"; import { Protocols } from "@waku/interfaces"; -import { createRoutingInfo } from "@waku/utils"; import { expect } from "chai"; import sinon from "sinon"; @@ -14,13 +8,6 @@ import { PeerManager } from "../peer_manager/index.js"; import { Store } from "./store.js"; -const TestNetworkingInfo = { clusterId: 0, numShardsInCluster: 8 }; -const MockRoutingInfo: IRoutingInfo = { - pubsubTopic: "/custom/topic", - shardId: 1, - clusterId: TestNetworkingInfo.clusterId -}; - describe("Store", () => { let store: Store; let mockLibp2p: Libp2p; @@ -74,11 +61,9 @@ describe("Store", () => { }); describe("queryGenerator", () => { - const contentTopic = "/test/1/test/proto"; - const routingInfo = createRoutingInfo(TestNetworkingInfo, { contentTopic }); const mockDecoder: IDecoder = { - routingInfo, - contentTopic, + pubsubTopic: "/waku/2/default-waku/proto", + contentTopic: "/test/1/test/proto", fromWireToProtoObj: sinon.stub(), fromProtoObj: sinon.stub() }; @@ -86,7 +71,7 @@ describe("Store", () => { const mockMessage: IDecodedMessage = { version: 1, pubsubTopic: "/waku/2/default-waku/proto", - contentTopic, + contentTopic: "/test/1/test/proto", payload: new Uint8Array([1, 2, 3]), timestamp: new Date(), rateLimitProof: undefined, @@ -113,7 +98,7 @@ describe("Store", () => { expect( mockPeerManager.getPeers.calledWith({ protocol: Protocols.Store, - routingInfo + pubsubTopic: "/waku/2/default-waku/proto" }) ).to.be.true; @@ -265,11 +250,9 @@ describe("Store", () => { mockPeerManager.getPeers.resolves([mockPeerId]); mockStoreCore.queryPerPage.returns(mockResponseGenerator); - const routingInfo: IRoutingInfo = structuredClone(MockRoutingInfo); - routingInfo.pubsubTopic = "/custom/topic"; const generator = store.queryGenerator([mockDecoder], { messageHashes: [new Uint8Array([1, 2, 3]), new Uint8Array([4, 5, 6])], - routingInfo + pubsubTopic: "/custom/topic" }); const results = []; diff --git a/packages/sdk/src/store/store.ts b/packages/sdk/src/store/store.ts index 0d8e686d23..1297060cf2 100644 --- a/packages/sdk/src/store/store.ts +++ b/packages/sdk/src/store/store.ts @@ -5,7 +5,6 @@ import { messageHash, StoreCore } from "@waku/core"; import { IDecodedMessage, IDecoder, - type IRoutingInfo, IStore, Libp2p, Protocols, @@ -66,7 +65,7 @@ export class Store implements IStore { ); for (const queryOption of queryOptions) { - const peer = await this.getPeerToUse(queryOption.routingInfo); + const peer = await this.getPeerToUse(queryOption.pubsubTopic); if (!peer) { log.error("No peers available to query"); @@ -182,7 +181,7 @@ export class Store implements IStore { private validateDecodersAndPubsubTopic( decoders: IDecoder[] ): { - routingInfo: IRoutingInfo; + pubsubTopic: string; contentTopics: string[]; decodersAsMap: Map>; } { @@ -192,7 +191,7 @@ export class Store implements IStore { } const uniquePubsubTopicsInQuery = Array.from( - new Set(decoders.map((decoder) => decoder.routingInfo.pubsubTopic)) + new Set(decoders.map((decoder) => decoder.pubsubTopic)) ); if (uniquePubsubTopicsInQuery.length > 1) { log.error("API does not support querying multiple pubsub topics at once"); @@ -215,9 +214,7 @@ export class Store implements IStore { }); const contentTopics = decoders - .filter( - (decoder) => decoder.routingInfo.pubsubTopic === pubsubTopicForQuery - ) + .filter((decoder) => decoder.pubsubTopic === pubsubTopicForQuery) .map((dec) => dec.contentTopic); if (contentTopics.length === 0) { @@ -226,18 +223,16 @@ export class Store implements IStore { } return { - routingInfo: decoders[0].routingInfo, + pubsubTopic: pubsubTopicForQuery, contentTopics, decodersAsMap }; } - private async getPeerToUse( - routingInfo: IRoutingInfo - ): Promise { + private async getPeerToUse(pubsubTopic: string): Promise { const peers = await this.peerManager.getPeers({ protocol: Protocols.Store, - routingInfo + pubsubTopic }); return this.options.peers @@ -302,16 +297,15 @@ export class Store implements IStore { const isHashQuery = options?.messageHashes && options.messageHashes.length > 0; - let routingInfo: IRoutingInfo; + let pubsubTopic: string; let contentTopics: string[]; let decodersAsMap: Map>; if (isHashQuery) { // For hash queries, we still need decoders to decode messages - // but we don't validate routing info consistency - // Use routing info from options if provided, otherwise from first decoder - // Otherwise, throw - routingInfo = options?.routingInfo || decoders[0]?.routingInfo; + // but we don't validate pubsubTopic consistency + // Use pubsubTopic from options if provided, otherwise from first decoder + pubsubTopic = options.pubsubTopic || decoders[0]?.pubsubTopic || ""; contentTopics = []; decodersAsMap = new Map(); decoders.forEach((dec) => { @@ -319,7 +313,7 @@ export class Store implements IStore { }); } else { const validated = this.validateDecodersAndPubsubTopic(decoders); - routingInfo = validated.routingInfo; + pubsubTopic = validated.pubsubTopic; contentTopics = validated.contentTopics; decodersAsMap = validated.decodersAsMap; } @@ -346,7 +340,7 @@ export class Store implements IStore { decodersAsMap, queryOptions: [ { - routingInfo, + pubsubTopic, contentTopics, includeData: true, paginationForward: true, @@ -361,7 +355,7 @@ export class Store implements IStore { return { decodersAsMap, queryOptions: subTimeRanges.map(([start, end]) => ({ - routingInfo, + pubsubTopic, contentTopics, includeData: true, paginationForward: true, diff --git a/packages/tests/tests/store/message_hash.spec.ts b/packages/tests/tests/store/message_hash.spec.ts index 906fc98ed8..59e93c6153 100644 --- a/packages/tests/tests/store/message_hash.spec.ts +++ b/packages/tests/tests/store/message_hash.spec.ts @@ -15,6 +15,7 @@ import { sendMessages, TestDecoder, TestNetworkConfig, + TestPubsubTopic, TestRoutingInfo, totalMsgs } from "./utils.js"; @@ -74,7 +75,7 @@ describe("Waku Store, message hash query", function () { const messages: IDecodedMessage[] = []; for await (const page of waku.store.queryGenerator([TestDecoder], { messageHashes, - routingInfo: TestRoutingInfo + pubsubTopic: TestPubsubTopic })) { for await (const msg of page) { messages.push(msg as IDecodedMessage); diff --git a/packages/tests/tests/store/utils.ts b/packages/tests/tests/store/utils.ts index 7d25243a9a..b13967f952 100644 --- a/packages/tests/tests/store/utils.ts +++ b/packages/tests/tests/store/utils.ts @@ -10,7 +10,6 @@ import { LightNode, type NetworkConfig, Protocols, - RelayShards, ShardId } from "@waku/interfaces"; import { createLightNode } from "@waku/sdk"; @@ -34,10 +33,7 @@ export const TestRoutingInfo = createRoutingInfo(TestNetworkConfig, { contentTopic: TestContentTopic }); -export const TestRelayShards: RelayShards = { - clusterId: TestClusterId, - shards: [TestRoutingInfo.shardId] -}; +export const TestPubsubTopic = TestRoutingInfo.pubsubTopic; export const TestEncoder = createEncoder({ contentTopic: TestContentTopic, From 0739fd1dd2d0909ee5dd81e3adff5a5c06770113 Mon Sep 17 00:00:00 2001 From: fryorcraken Date: Sat, 19 Jul 2025 21:50:51 +1000 Subject: [PATCH 13/23] revert shard info -> relay shard renaming --- .../connection_manager/shard_reader.spec.ts | 22 +++++++++---------- .../lib/connection_manager/shard_reader.ts | 6 ++--- packages/core/src/lib/metadata/metadata.ts | 14 ++++++------ .../waku_peer_exchange_discovery.ts | 4 ++-- packages/enr/src/enr.ts | 6 ++--- packages/enr/src/raw_enr.ts | 6 ++--- packages/interfaces/src/enr.ts | 4 ++-- packages/interfaces/src/metadata.ts | 4 ++-- packages/interfaces/src/sharding.ts | 2 +- .../sdk/src/peer_manager/peer_manager.spec.ts | 11 ++++++++-- packages/tests/src/constants.ts | 4 ++-- .../continuous_discovery.spec.ts | 18 +++++++-------- .../tests/tests/peer-exchange/index.spec.ts | 8 +++---- .../utils/src/common/relay_shard_codec.ts | 8 +++---- 14 files changed, 62 insertions(+), 55 deletions(-) diff --git a/packages/core/src/lib/connection_manager/shard_reader.spec.ts b/packages/core/src/lib/connection_manager/shard_reader.spec.ts index 8abb90265b..30bb19ac26 100644 --- a/packages/core/src/lib/connection_manager/shard_reader.spec.ts +++ b/packages/core/src/lib/connection_manager/shard_reader.spec.ts @@ -4,7 +4,7 @@ import { DEFAULT_NUM_SHARDS, NetworkConfig, PubsubTopic, - RelayShards + ShardInfo } from "@waku/interfaces"; import { contentTopicToShardIndex, encodeRelayShard } from "@waku/utils"; import { expect } from "chai"; @@ -39,7 +39,7 @@ describe("ShardReader", function () { numShardsInCluster: DEFAULT_NUM_SHARDS }; - const testRelayShards: RelayShards = { + const testShardInfo: ShardInfo = { clusterId: testClusterId, shards: [testShardIndex] }; @@ -98,7 +98,7 @@ describe("ShardReader", function () { describe("isPeerOnNetwork", function () { it("should return true when peer is on the same cluster", async function () { - const shardInfoBytes = encodeRelayShard(testRelayShards); + const shardInfoBytes = encodeRelayShard(testShardInfo); const mockPeer = { metadata: new Map([["shardInfo", shardInfoBytes]]) }; @@ -112,7 +112,7 @@ describe("ShardReader", function () { }); it("should return false when peer is on different cluster", async function () { - const differentClusterShardInfo: RelayShards = { + const differentClusterShardInfo: ShardInfo = { clusterId: 5, shards: [1, 2] }; @@ -129,7 +129,7 @@ describe("ShardReader", function () { }); it("should return true even if peer has no overlapping shards", async function () { - const noOverlapShardInfo: RelayShards = { + const noOverlapShardInfo: ShardInfo = { clusterId: testClusterId, shards: [testShardIndex + 100, testShardIndex + 200] // Use different shards }; @@ -168,7 +168,7 @@ describe("ShardReader", function () { describe("isPeerOnShard", function () { it("should return true when peer is on the specified shard", async function () { - const shardInfoBytes = encodeRelayShard(testRelayShards); + const shardInfoBytes = encodeRelayShard(testShardInfo); const mockPeer = { metadata: new Map([["shardInfo", shardInfoBytes]]) }; @@ -185,7 +185,7 @@ describe("ShardReader", function () { }); it("should return false when peer is on different cluster", async function () { - const shardInfoBytes = encodeRelayShard(testRelayShards); + const shardInfoBytes = encodeRelayShard(testShardInfo); const mockPeer = { metadata: new Map([["shardInfo", shardInfoBytes]]) }; @@ -202,7 +202,7 @@ describe("ShardReader", function () { }); it("should return false when peer is not on the specified shard", async function () { - const shardInfoBytes = encodeRelayShard(testRelayShards); + const shardInfoBytes = encodeRelayShard(testShardInfo); const mockPeer = { metadata: new Map([["shardInfo", shardInfoBytes]]) }; @@ -233,7 +233,7 @@ describe("ShardReader", function () { describe("isPeerOnTopic", function () { it("should return true when peer is on the pubsub topic shard", async function () { - const shardInfoBytes = encodeRelayShard(testRelayShards); + const shardInfoBytes = encodeRelayShard(testShardInfo); const mockPeer = { metadata: new Map([["shardInfo", shardInfoBytes]]) }; @@ -248,7 +248,7 @@ describe("ShardReader", function () { }); it("should return false when peer is not on the pubsub topic shard", async function () { - const shardInfoBytes = encodeRelayShard(testRelayShards); + const shardInfoBytes = encodeRelayShard(testShardInfo); const mockPeer = { metadata: new Map([["shardInfo", shardInfoBytes]]) }; @@ -263,7 +263,7 @@ describe("ShardReader", function () { }); it("should return false when pubsub topic parsing fails", async function () { - const shardInfoBytes = encodeRelayShard(testRelayShards); + const shardInfoBytes = encodeRelayShard(testShardInfo); const mockPeer = { metadata: new Map([["shardInfo", shardInfoBytes]]) }; diff --git a/packages/core/src/lib/connection_manager/shard_reader.ts b/packages/core/src/lib/connection_manager/shard_reader.ts index b5ae35a778..2dade0de99 100644 --- a/packages/core/src/lib/connection_manager/shard_reader.ts +++ b/packages/core/src/lib/connection_manager/shard_reader.ts @@ -3,8 +3,8 @@ import type { ClusterId, NetworkConfig, PubsubTopic, - RelayShards, - ShardId + ShardId, + ShardInfo } from "@waku/interfaces"; import { decodeRelayShard, @@ -96,7 +96,7 @@ export class ShardReader implements IShardReader { ); } - private async getRelayShards(id: PeerId): Promise { + private async getRelayShards(id: PeerId): Promise { try { const peer = await this.libp2p.peerStore.get(id); diff --git a/packages/core/src/lib/metadata/metadata.ts b/packages/core/src/lib/metadata/metadata.ts index 18d59b5790..ac4707e575 100644 --- a/packages/core/src/lib/metadata/metadata.ts +++ b/packages/core/src/lib/metadata/metadata.ts @@ -7,7 +7,7 @@ import { type MetadataQueryResult, type PeerIdStr, ProtocolError, - type RelayShards + type ShardInfo } from "@waku/interfaces"; import { proto_metadata } from "@waku/proto"; import { encodeRelayShard, Logger } from "@waku/utils"; @@ -25,7 +25,7 @@ export const MetadataCodec = "/vac/waku/metadata/1.0.0"; class Metadata implements IMetadata { private readonly streamManager: StreamManager; private readonly libp2pComponents: Libp2pComponents; - protected handshakesConfirmed: Map = new Map(); + protected handshakesConfirmed: Map = new Map(); public readonly multicodec = MetadataCodec; @@ -148,7 +148,7 @@ class Metadata implements IMetadata { }); const response = proto_metadata.WakuMetadataResponse.decode( bytes - ) as RelayShards; + ) as ShardInfo; if (!response) { log.error("Error decoding metadata response"); @@ -166,16 +166,16 @@ class Metadata implements IMetadata { private async savePeerShardInfo( peerId: PeerId, - relayShards: RelayShards + shardInfo: ShardInfo ): Promise { - // add or update the relayShards to peer store + // add or update the shardInfo to peer store await this.libp2pComponents.peerStore.merge(peerId, { metadata: { - shardInfo: encodeRelayShard(relayShards) + shardInfo: encodeRelayShard(shardInfo) } }); - this.handshakesConfirmed.set(peerId.toString(), relayShards); + this.handshakesConfirmed.set(peerId.toString(), shardInfo); } } diff --git a/packages/discovery/src/peer-exchange/waku_peer_exchange_discovery.ts b/packages/discovery/src/peer-exchange/waku_peer_exchange_discovery.ts index a9d46740f2..9087f12c15 100644 --- a/packages/discovery/src/peer-exchange/waku_peer_exchange_discovery.ts +++ b/packages/discovery/src/peer-exchange/waku_peer_exchange_discovery.ts @@ -10,7 +10,7 @@ import type { import { type Libp2pComponents, type PeerExchangeQueryResult, - type RelayShards, + ShardInfo, Tags } from "@waku/interfaces"; import { decodeRelayShard, encodeRelayShard, Logger } from "@waku/utils"; @@ -279,7 +279,7 @@ export class PeerExchangeDiscovery private async checkPeerInfoDiff( peerInfo: PeerInfo, - shardInfo?: RelayShards + shardInfo?: ShardInfo ): Promise<{ hasMultiaddrDiff: boolean; hasShardDiff: boolean }> { const { id: peerId } = peerInfo; const peer = await this.components.peerStore.get(peerId); diff --git a/packages/enr/src/enr.ts b/packages/enr/src/enr.ts index 71b2bcc0fb..77c80fc1c2 100644 --- a/packages/enr/src/enr.ts +++ b/packages/enr/src/enr.ts @@ -5,8 +5,8 @@ import type { ENRValue, IEnr, NodeId, - RelayShards, - SequenceNumber + SequenceNumber, + ShardInfo } from "@waku/interfaces"; import { Logger } from "@waku/utils"; @@ -64,7 +64,7 @@ export class ENR extends RawEnr implements IEnr { protocol: TransportProtocol | TransportProtocolPerIpVersion ) => Multiaddr | undefined = locationMultiaddrFromEnrFields.bind({}, this); - public get shardInfo(): RelayShards | undefined { + public get shardInfo(): ShardInfo | undefined { if (this.rs && this.rsv) { log.warn("ENR contains both `rs` and `rsv` fields."); } diff --git a/packages/enr/src/raw_enr.ts b/packages/enr/src/raw_enr.ts index 1b3ced089a..0629932f78 100644 --- a/packages/enr/src/raw_enr.ts +++ b/packages/enr/src/raw_enr.ts @@ -6,8 +6,8 @@ import { import type { ENRKey, ENRValue, - RelayShards, SequenceNumber, + ShardInfo, Waku2 } from "@waku/interfaces"; import { decodeRelayShard } from "@waku/utils"; @@ -52,13 +52,13 @@ export class RawEnr extends Map { } } - public get rs(): RelayShards | undefined { + public get rs(): ShardInfo | undefined { const rs = this.get("rs"); if (!rs) return undefined; return decodeRelayShard(rs); } - public get rsv(): RelayShards | undefined { + public get rsv(): ShardInfo | undefined { const rsv = this.get("rsv"); if (!rsv) return undefined; return decodeRelayShard(rsv); diff --git a/packages/interfaces/src/enr.ts b/packages/interfaces/src/enr.ts index 01d4bcb751..ec4b4ab54c 100644 --- a/packages/interfaces/src/enr.ts +++ b/packages/interfaces/src/enr.ts @@ -2,7 +2,7 @@ import type { PeerId } from "@libp2p/interface"; import type { PeerInfo } from "@libp2p/interface"; import type { Multiaddr } from "@multiformats/multiaddr"; -import { RelayShards } from "./sharding.js"; +import { ShardInfo } from "./sharding.js"; export type ENRKey = string; export type ENRValue = Uint8Array; @@ -36,7 +36,7 @@ export interface IEnr extends Map { multiaddrs?: Multiaddr[]; waku2?: Waku2; peerInfo: PeerInfo | undefined; - shardInfo?: RelayShards; + shardInfo?: ShardInfo; /** * @deprecated: use { @link IEnr.peerInfo } instead. diff --git a/packages/interfaces/src/metadata.ts b/packages/interfaces/src/metadata.ts index b9714d92f8..32ce59c2e6 100644 --- a/packages/interfaces/src/metadata.ts +++ b/packages/interfaces/src/metadata.ts @@ -1,9 +1,9 @@ import type { PeerId } from "@libp2p/interface"; import { ThisOrThat } from "./misc.js"; -import type { ClusterId, RelayShards } from "./sharding.js"; +import type { ClusterId, ShardInfo } from "./sharding.js"; -export type MetadataQueryResult = ThisOrThat<"shardInfo", RelayShards>; +export type MetadataQueryResult = ThisOrThat<"shardInfo", ShardInfo>; export interface IMetadata { readonly multicodec: string; diff --git a/packages/interfaces/src/sharding.ts b/packages/interfaces/src/sharding.ts index f204d47ecd..44689c5953 100644 --- a/packages/interfaces/src/sharding.ts +++ b/packages/interfaces/src/sharding.ts @@ -4,7 +4,7 @@ */ export type NetworkConfig = StaticSharding | AutoSharding; -export type RelayShards = { +export type ShardInfo = { clusterId: ClusterId; shards: ShardId[]; }; diff --git a/packages/sdk/src/peer_manager/peer_manager.spec.ts b/packages/sdk/src/peer_manager/peer_manager.spec.ts index f4eac85f81..8a4ce9ad84 100644 --- a/packages/sdk/src/peer_manager/peer_manager.spec.ts +++ b/packages/sdk/src/peer_manager/peer_manager.spec.ts @@ -1,9 +1,11 @@ import { PeerId } from "@libp2p/interface"; import { + ClusterId, CONNECTION_LOCKED_TAG, IConnectionManager, Libp2p, - Protocols + Protocols, + ShardId } from "@waku/interfaces"; import { expect } from "chai"; import sinon from "sinon"; @@ -81,7 +83,12 @@ describe("PeerManager", () => { pubsubTopics: [TEST_PUBSUB_TOPIC], getConnectedPeers: async () => peers, getPeers: async () => peers, - isPeerOnShard: async (_id: PeerId, _topic: string) => true + isPeerOnShard: async ( + _id: PeerId, + _clusterId: ClusterId, + _shardId: ShardId + ) => true, + isPeerOnTopic: async (_id: PeerId, _topic: string) => true } as unknown as IConnectionManager; peerManager = new PeerManager({ libp2p, diff --git a/packages/tests/src/constants.ts b/packages/tests/src/constants.ts index 6847f41dac..637cb76a4c 100644 --- a/packages/tests/src/constants.ts +++ b/packages/tests/src/constants.ts @@ -5,7 +5,7 @@ * @module */ -import { AutoSharding, RelayShards } from "@waku/interfaces"; +import { AutoSharding, ShardInfo } from "@waku/interfaces"; import { createRoutingInfo } from "@waku/utils"; export const NOISE_KEY_1 = new Uint8Array( @@ -91,7 +91,7 @@ export const DefaultTestNetworkConfig: AutoSharding = { clusterId: DefaultTestClusterId, numShardsInCluster: DefaultTestNumShardsInCluster }; -export const DefaultTestRelayShards: RelayShards = { +export const DefaultTestShardInfo: ShardInfo = { clusterId: DefaultTestClusterId, shards: [0] }; diff --git a/packages/tests/tests/peer-exchange/continuous_discovery.spec.ts b/packages/tests/tests/peer-exchange/continuous_discovery.spec.ts index e85ba125e0..9034ea9e4c 100644 --- a/packages/tests/tests/peer-exchange/continuous_discovery.spec.ts +++ b/packages/tests/tests/peer-exchange/continuous_discovery.spec.ts @@ -3,8 +3,8 @@ import { type PeerId } from "@libp2p/interface"; import { peerIdFromPrivateKey } from "@libp2p/peer-id"; import { multiaddr } from "@multiformats/multiaddr"; import { PeerExchangeDiscovery } from "@waku/discovery"; -import { IEnr, LightNode, RelayShards } from "@waku/interfaces"; -import { createLightNode } from "@waku/sdk"; +import { IEnr, LightNode } from "@waku/interfaces"; +import { createLightNode, ShardInfo } from "@waku/sdk"; import { decodeRelayShard } from "@waku/utils"; import { expect } from "chai"; import Sinon from "sinon"; @@ -15,7 +15,7 @@ describe("Peer Exchange Continuous Discovery", () => { let peerId: PeerId; let randomPeerId: PeerId; let waku: LightNode; - const relayShards: RelayShards = { + const shardInfo: ShardInfo = { clusterId: 2, shards: [1, 2] }; @@ -38,7 +38,7 @@ describe("Peer Exchange Continuous Discovery", () => { const newPeerInfo = { ENR: { peerId, - shardInfo: relayShards, + shardInfo, peerInfo: { multiaddrs: newMultiaddrs, id: peerId @@ -59,14 +59,14 @@ describe("Peer Exchange Continuous Discovery", () => { }); it("Should update shard info", async () => { - const newRelayShards: RelayShards = { + const newShardInfo: ShardInfo = { clusterId: 2, shards: [1, 2, 3] }; const newPeerInfo = { ENR: { peerId, - shardInfo: newRelayShards, + shardInfo: newShardInfo, peerInfo: { multiaddrs: multiaddrs, id: peerId @@ -86,7 +86,7 @@ describe("Peer Exchange Continuous Discovery", () => { ); const _shardInfo = decodeRelayShard(newPeer.metadata.get("shardInfo")!); - expect(_shardInfo).to.deep.equal(newRelayShards); + expect(_shardInfo).to.deep.equal(newShardInfo); }); async function discoverPeerOnce(): Promise { @@ -95,7 +95,7 @@ describe("Peer Exchange Continuous Discovery", () => { const enr: IEnr = { peerId, - shardInfo: relayShards, + shardInfo, peerInfo: { multiaddrs: multiaddrs, id: peerId @@ -122,6 +122,6 @@ describe("Peer Exchange Continuous Discovery", () => { multiaddrs[0].toString() ); const _shardInfo = decodeRelayShard(peer.metadata.get("shardInfo")!); - expect(_shardInfo).to.deep.equal(relayShards); + expect(_shardInfo).to.deep.equal(shardInfo); } }); diff --git a/packages/tests/tests/peer-exchange/index.spec.ts b/packages/tests/tests/peer-exchange/index.spec.ts index c0a3128363..e34b257895 100644 --- a/packages/tests/tests/peer-exchange/index.spec.ts +++ b/packages/tests/tests/peer-exchange/index.spec.ts @@ -12,7 +12,7 @@ import { beforeEachCustom, DefaultTestClusterId, DefaultTestNetworkConfig, - DefaultTestRelayShards, + DefaultTestShardInfo, makeLogFileName, ServiceNode, tearDownNodes @@ -33,14 +33,14 @@ describe("Peer Exchange", function () { nwaku2 = new ServiceNode(makeLogFileName(this.ctx) + "2"); await nwaku1.start({ clusterId: DefaultTestClusterId, - shard: DefaultTestRelayShards.shards, + shard: DefaultTestShardInfo.shards, discv5Discovery: true, peerExchange: true, relay: true }); await nwaku2.start({ clusterId: DefaultTestClusterId, - shard: DefaultTestRelayShards.shards, + shard: DefaultTestShardInfo.shards, discv5Discovery: true, peerExchange: true, discv5BootstrapNode: (await nwaku1.info()).enrUri, @@ -120,7 +120,7 @@ describe("Peer Exchange", function () { nwaku3 = new ServiceNode(makeLogFileName(this) + "3"); await nwaku3.start({ clusterId: DefaultTestClusterId, - shard: DefaultTestRelayShards.shards, + shard: DefaultTestShardInfo.shards, discv5Discovery: true, peerExchange: true, discv5BootstrapNode: (await nwaku1.info()).enrUri, diff --git a/packages/utils/src/common/relay_shard_codec.ts b/packages/utils/src/common/relay_shard_codec.ts index 334673187f..91dea7b4ea 100644 --- a/packages/utils/src/common/relay_shard_codec.ts +++ b/packages/utils/src/common/relay_shard_codec.ts @@ -1,6 +1,6 @@ -import type { RelayShards } from "@waku/interfaces"; +import type { ShardInfo } from "@waku/interfaces"; -export const decodeRelayShard = (bytes: Uint8Array): RelayShards => { +export const decodeRelayShard = (bytes: Uint8Array): ShardInfo => { // explicitly converting to Uint8Array to avoid Buffer // https://github.com/libp2p/js-libp2p/issues/2146 bytes = new Uint8Array(bytes); @@ -33,8 +33,8 @@ export const decodeRelayShard = (bytes: Uint8Array): RelayShards => { return { clusterId, shards }; }; -export const encodeRelayShard = (relayShards: RelayShards): Uint8Array => { - const { clusterId, shards } = relayShards; +export const encodeRelayShard = (shardInfo: ShardInfo): Uint8Array => { + const { clusterId, shards } = shardInfo; const totalLength = shards.length >= 64 ? 130 : 3 + 2 * shards.length; const buffer = new ArrayBuffer(totalLength); const view = new DataView(buffer); From 3bb5a4931fc97ec6999136d43542be79c2c593e3 Mon Sep 17 00:00:00 2001 From: fryorcraken Date: Mon, 21 Jul 2025 12:26:29 +1000 Subject: [PATCH 14/23] fix peer exchange test --- packages/tests/tests/sharding/peer_management.spec.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/tests/tests/sharding/peer_management.spec.ts b/packages/tests/tests/sharding/peer_management.spec.ts index 6d734828c9..a02c973e3c 100644 --- a/packages/tests/tests/sharding/peer_management.spec.ts +++ b/packages/tests/tests/sharding/peer_management.spec.ts @@ -1,7 +1,7 @@ import { bootstrap } from "@libp2p/bootstrap"; import type { PeerId } from "@libp2p/interface"; import { wakuPeerExchangeDiscovery } from "@waku/discovery"; -import { AutoSharding } from "@waku/interfaces"; +import type { AutoSharding, StaticSharding } from "@waku/interfaces"; import { createLightNode, LightNode, Tags } from "@waku/sdk"; import { contentTopicToShardIndex } from "@waku/utils"; import chai, { expect } from "chai"; @@ -45,7 +45,7 @@ describe("Static Sharding: Peer Management", function () { const shard = 2; const numShardsInCluster = 8; - const networkConfig: AutoSharding = { clusterId, numShardsInCluster }; + const networkConfig: StaticSharding = { clusterId }; await nwaku1.start({ discv5Discovery: true, @@ -122,7 +122,7 @@ describe("Static Sharding: Peer Management", function () { this.timeout(100_000); const numShardsInCluster = 8; - const networkConfig: AutoSharding = { clusterId, numShardsInCluster }; + const networkConfig: StaticSharding = { clusterId }; // this service node is not subscribed to the shard await nwaku1.start({ From ec8ded9c6afcd4e2c5cf96eff15e51bce2ce8d16 Mon Sep 17 00:00:00 2001 From: fryorcraken Date: Mon, 21 Jul 2025 12:26:51 +1000 Subject: [PATCH 15/23] revert distracting changes --- packages/rln/src/codec.ts | 13 ++++++------- packages/tests/src/lib/service_node.ts | 6 +----- 2 files changed, 7 insertions(+), 12 deletions(-) diff --git a/packages/rln/src/codec.ts b/packages/rln/src/codec.ts index 25a441b9bb..62cb68dad9 100644 --- a/packages/rln/src/codec.ts +++ b/packages/rln/src/codec.ts @@ -5,8 +5,7 @@ import type { IMessage, IProtoMessage, IRateLimitProof, - IRoutingInfo, - PubsubTopic + IRoutingInfo } from "@waku/interfaces"; import { Logger } from "@waku/utils"; @@ -29,10 +28,6 @@ export class RLNEncoder implements IEncoder { this.idSecretHash = identityCredential.IDSecretHash; } - public get pubsubTopic(): PubsubTopic { - return this.encoder.pubsubTopic; - } - public async toWire(message: IMessage): Promise { message.rateLimitProof = await this.generateProof(message); log.info("Proof generated", message.rateLimitProof); @@ -61,6 +56,10 @@ export class RLNEncoder implements IEncoder { ); } + public get pubsubTopic(): string { + return this.encoder.pubsubTopic; + } + public get routingInfo(): IRoutingInfo { return this.encoder.routingInfo; } @@ -98,7 +97,7 @@ export class RLNDecoder private readonly decoder: IDecoder ) {} - public get pubsubTopic(): PubsubTopic { + public get pubsubTopic(): string { return this.decoder.pubsubTopic; } diff --git a/packages/tests/src/lib/service_node.ts b/packages/tests/src/lib/service_node.ts index 7048c763bc..a71526f9dc 100644 --- a/packages/tests/src/lib/service_node.ts +++ b/packages/tests/src/lib/service_node.ts @@ -448,11 +448,7 @@ export class ServiceNode { if (body) options.body = JSON.stringify(body); const response = await fetch(`${this.httpUrl}${endpoint}`, options); - log.info( - `Received REST Response: `, - response.status, - response.statusText - ); + log.info(`Received REST Response: `, response.status); return await processResponse(response); } catch (error) { log.error(`${this.httpUrl} failed with error:`, error); From b51d598581b865e4752a38e2161eda499e956a99 Mon Sep 17 00:00:00 2001 From: fryorcraken Date: Mon, 21 Jul 2025 13:57:06 +1000 Subject: [PATCH 16/23] re-enable commented test --- .../light-push/multiple_pubsub.node.spec.ts | 295 +++++++++--------- 1 file changed, 152 insertions(+), 143 deletions(-) diff --git a/packages/tests/tests/light-push/multiple_pubsub.node.spec.ts b/packages/tests/tests/light-push/multiple_pubsub.node.spec.ts index a446f3b56b..a71218faf0 100644 --- a/packages/tests/tests/light-push/multiple_pubsub.node.spec.ts +++ b/packages/tests/tests/light-push/multiple_pubsub.node.spec.ts @@ -1,144 +1,153 @@ -// TODO: This test is useless because the content topics all start -// with `/test/` meaning they are in the same shard +import { createEncoder } from "@waku/core"; +import { LightNode, Protocols } from "@waku/interfaces"; +import { createRoutingInfo } from "@waku/utils"; +import { utf8ToBytes } from "@waku/utils/bytes"; +import { expect } from "chai"; -// import { createEncoder } from "@waku/core"; -// import { LightNode, Protocols } from "@waku/interfaces"; -// import { contentTopicToPubsubTopic } from "@waku/utils"; -// import { utf8ToBytes } from "@waku/utils/bytes"; -// import { expect } from "chai"; -// -// import { -// afterEachCustom, -// beforeEachCustom, -// makeLogFileName, -// MessageCollector, -// runMultipleNodes, -// ServiceNode, -// ServiceNodesFleet, -// tearDownNodes, -// teardownNodesWithRedundancy -// } from "../../src/index.js"; -// -// import { TestClusterId, TestEncoder } from "./utils.js"; -// -// describe("Waku Light Push (Autosharding): Multiple Shards", function () { -// this.timeout(30000); -// const numServiceNodes = 2; -// -// let waku: LightNode; -// let serviceNodes: ServiceNodesFleet; -// -// const customEncoder2 = createEncoder({ -// contentTopic: "/test/2/waku-light-push/utf8", -// pubsubTopic: contentTopicToPubsubTopic( -// "/test/2/waku-light-push/utf8", -// TestClusterId -// ) -// }); -// -// beforeEachCustom(this, async () => { -// [serviceNodes, waku] = await runMultipleNodes( -// this.ctx, -// { -// clusterId: TestClusterId, -// contentTopics: [TestEncoder.contentTopic, customEncoder2.contentTopic] -// }, -// { lightpush: true, filter: true }, -// false, -// numServiceNodes, -// false -// ); -// }); -// -// afterEachCustom(this, async () => { -// await teardownNodesWithRedundancy(serviceNodes, waku); -// }); -// -// it("Subscribe and receive messages on 2 different pubsubtopics", async function () { -// const pushResponse1 = await waku.lightPush.send(TestEncoder, { -// payload: utf8ToBytes("M1") -// }); -// const pushResponse2 = await waku.lightPush.send(customEncoder2, { -// payload: utf8ToBytes("M2") -// }); -// -// expect(pushResponse1.successes.length).to.eq(numServiceNodes); -// expect(pushResponse2.successes.length).to.eq(numServiceNodes); -// -// const messageCollector1 = new MessageCollector(serviceNodes.nodes[0]); -// const messageCollector2 = new MessageCollector(serviceNodes.nodes[1]); -// -// expect( -// await messageCollector1.waitForMessages(1, { -// pubsubTopic: TestEncoder.pubsubTopic -// }) -// ).to.eq(true); -// -// expect( -// await messageCollector2.waitForMessages(1, { -// pubsubTopic: customEncoder2.pubsubTopic -// }) -// ).to.eq(true); -// -// messageCollector1.verifyReceivedMessage(0, { -// expectedMessageText: "M1", -// expectedContentTopic: TestEncoder.contentTopic, -// expectedPubsubTopic: TestEncoder.pubsubTopic -// }); -// -// messageCollector2.verifyReceivedMessage(0, { -// expectedMessageText: "M2", -// expectedContentTopic: customEncoder2.contentTopic, -// expectedPubsubTopic: customEncoder2.pubsubTopic -// }); -// }); -// -// it("Light push messages to 2 nwaku nodes each with different pubsubtopics", async function () { -// // Set up and start a new nwaku node with Default PubsubTopic -// const nwaku2 = new ServiceNode(makeLogFileName(this) + "3"); -// -// try { -// await nwaku2.start({ -// filter: true, -// lightpush: true, -// relay: true, -// clusterId: TestClusterId, -// shard: [2] -// }); -// await nwaku2.ensureSubscriptionsAutosharding([ -// customEncoder2.pubsubTopic -// ]); -// await waku.dial(await nwaku2.getMultiaddrWithId()); -// await waku.waitForPeers([Protocols.LightPush]); -// -// const messageCollector2 = new MessageCollector(nwaku2); -// -// await waku.lightPush.send(TestEncoder, { -// payload: utf8ToBytes("M1") -// }); -// await waku.lightPush.send(customEncoder2, { -// payload: utf8ToBytes("M2") -// }); -// -// await serviceNodes.messageCollector.waitForMessages(1, { -// pubsubTopic: TestEncoder.pubsubTopic -// }); -// await messageCollector2.waitForMessagesAutosharding(1, { -// contentTopic: customEncoder2.contentTopic -// }); -// -// serviceNodes.messageCollector.verifyReceivedMessage(0, { -// expectedMessageText: "M1", -// expectedContentTopic: TestEncoder.contentTopic, -// expectedPubsubTopic: TestEncoder.pubsubTopic -// }); -// messageCollector2.verifyReceivedMessage(0, { -// expectedMessageText: "M2", -// expectedContentTopic: customEncoder2.contentTopic, -// expectedPubsubTopic: customEncoder2.pubsubTopic -// }); -// } catch (e) { -// await tearDownNodes([nwaku2], []); -// } -// }); -// }); +import { + afterEachCustom, + beforeEachCustom, + makeLogFileName, + MessageCollector, + runMultipleNodes, + ServiceNode, + ServiceNodesFleet, + tearDownNodes, + teardownNodesWithRedundancy +} from "../../src/index.js"; + +import { + TestClusterId, + TestContentTopic, + TestEncoder, + TestNetworkConfig, + TestRoutingInfo +} from "./utils.js"; + +describe("Waku Light Push (Autosharding): Multiple Shards", function () { + this.timeout(30000); + const numServiceNodes = 2; + + let waku: LightNode; + let serviceNodes: ServiceNodesFleet; + + const customContentTopic2 = "/test/2/waku-light-push/utf8"; + const customRoutingInfo2 = createRoutingInfo(TestNetworkConfig, { + contentTopic: customContentTopic2 + }); + + const customEncoder2 = createEncoder({ + contentTopic: customContentTopic2, + routingInfo: customRoutingInfo2 + }); + + beforeEachCustom(this, async () => { + [serviceNodes, waku] = await runMultipleNodes( + this.ctx, + TestRoutingInfo, + { + lightpush: true, + filter: true, + contentTopic: [TestEncoder.contentTopic, customEncoder2.contentTopic] + }, + false, + numServiceNodes, + false + ); + }); + + afterEachCustom(this, async () => { + await teardownNodesWithRedundancy(serviceNodes, waku); + }); + + it("Subscribe and receive messages on 2 different pubsubtopics", async function () { + if (customRoutingInfo2.pubsubTopic === TestEncoder.pubsubTopic) + throw "Invalid test, both encoder uses same shard"; + + const pushResponse1 = await waku.lightPush.send(TestEncoder, { + payload: utf8ToBytes("M1") + }); + const pushResponse2 = await waku.lightPush.send(customEncoder2, { + payload: utf8ToBytes("M2") + }); + + expect(pushResponse1.successes.length).to.eq(numServiceNodes); + expect(pushResponse2.successes.length).to.eq(numServiceNodes); + + const messageCollector1 = new MessageCollector(serviceNodes.nodes[0]); + const messageCollector2 = new MessageCollector(serviceNodes.nodes[1]); + + expect( + await messageCollector1.waitForMessagesAutosharding(1, { + contentTopic: TestEncoder.contentTopic + }) + ).to.eq(true); + + expect( + await messageCollector2.waitForMessagesAutosharding(1, { + contentTopic: customEncoder2.contentTopic + }) + ).to.eq(true); + + messageCollector1.verifyReceivedMessage(0, { + expectedMessageText: "M1", + expectedContentTopic: TestEncoder.contentTopic, + expectedPubsubTopic: TestEncoder.pubsubTopic + }); + + messageCollector2.verifyReceivedMessage(0, { + expectedMessageText: "M2", + expectedContentTopic: customEncoder2.contentTopic, + expectedPubsubTopic: customEncoder2.pubsubTopic + }); + }); + + it("Light push messages to 2 nwaku nodes each with different pubsubtopics", async function () { + // Set up and start a new nwaku node with Default PubsubTopic + const nwaku2 = new ServiceNode(makeLogFileName(this) + "3"); + + try { + await nwaku2.start({ + filter: true, + lightpush: true, + relay: true, + clusterId: TestClusterId, + contentTopic: [TestContentTopic] + }); + await nwaku2.ensureSubscriptionsAutosharding([ + customEncoder2.pubsubTopic + ]); + await waku.dial(await nwaku2.getMultiaddrWithId()); + await waku.waitForPeers([Protocols.LightPush]); + + const messageCollector2 = new MessageCollector(nwaku2); + + await waku.lightPush.send(TestEncoder, { + payload: utf8ToBytes("M1") + }); + await waku.lightPush.send(customEncoder2, { + payload: utf8ToBytes("M2") + }); + + await serviceNodes.messageCollector.waitForMessages(1, { + contentTopic: TestEncoder.contentTopic + }); + await messageCollector2.waitForMessagesAutosharding(1, { + contentTopic: customEncoder2.contentTopic + }); + + serviceNodes.messageCollector.verifyReceivedMessage(0, { + expectedMessageText: "M1", + expectedContentTopic: TestEncoder.contentTopic, + expectedPubsubTopic: TestEncoder.pubsubTopic + }); + messageCollector2.verifyReceivedMessage(0, { + expectedMessageText: "M2", + expectedContentTopic: customEncoder2.contentTopic, + expectedPubsubTopic: customEncoder2.pubsubTopic + }); + } catch (e) { + await tearDownNodes([nwaku2], []); + } + }); +}); From 1e7e0291faa1d6536e617cd554cc39dc488f8a9f Mon Sep 17 00:00:00 2001 From: fryorcraken Date: Mon, 21 Jul 2025 14:10:00 +1000 Subject: [PATCH 17/23] separate file tests To not have 2 `runTests` in same file. --- .../subscribe-static-sharding.node.spec.ts | 119 ++++++++++++++++++ .../tests/tests/filter/subscribe.node.spec.ts | 111 +--------------- 2 files changed, 122 insertions(+), 108 deletions(-) create mode 100644 packages/tests/tests/filter/subscribe-static-sharding.node.spec.ts diff --git a/packages/tests/tests/filter/subscribe-static-sharding.node.spec.ts b/packages/tests/tests/filter/subscribe-static-sharding.node.spec.ts new file mode 100644 index 0000000000..17adf742ba --- /dev/null +++ b/packages/tests/tests/filter/subscribe-static-sharding.node.spec.ts @@ -0,0 +1,119 @@ +import { createDecoder, createEncoder } from "@waku/core"; +import { LightNode } from "@waku/interfaces"; +import { Protocols, utf8ToBytes } from "@waku/sdk"; +import { createRoutingInfo, formatPubsubTopic } from "@waku/utils"; + +import { + afterEachCustom, + beforeEachCustom, + makeLogFileName, + MessageCollector, + runMultipleNodes, + ServiceNode, + ServiceNodesFleet, + tearDownNodes, + teardownNodesWithRedundancy +} from "../../src/index.js"; + +import { TestClusterId, TestContentTopic } from "./utils.js"; + +const runTests = (strictCheckNodes: boolean): void => { + describe(`Waku Filter: Subscribe: Multiple Service Nodes on Static Shard: Strict Check mode: ${strictCheckNodes}`, function () { + this.timeout(100000); + let waku: LightNode; + let serviceNodes: ServiceNodesFleet; + const staticNetworkConfig = { clusterId: 9 }; + const routingInfoShard1 = createRoutingInfo(staticNetworkConfig, { + shardId: 1 + }); + const encoderShard1 = createEncoder({ + contentTopic: TestContentTopic, + routingInfo: routingInfoShard1 + }); + const decoderShard1 = createDecoder(TestContentTopic, routingInfoShard1); + + beforeEachCustom(this, async () => { + [serviceNodes, waku] = await runMultipleNodes( + this.ctx, + routingInfoShard1, + undefined, + strictCheckNodes + ); + }); + + afterEachCustom(this, async () => { + await teardownNodesWithRedundancy(serviceNodes, waku); + }); + + it("Subscribe and receive messages from 2 nwaku nodes each with different static shards", async function () { + await waku.filter.subscribe( + decoderShard1, + serviceNodes.messageCollector.callback + ); + + // Set up and start a new nwaku node on different shard + const nwaku2 = new ServiceNode(makeLogFileName(this) + "3"); + + try { + const routingInfoShard2 = createRoutingInfo(staticNetworkConfig, { + shardId: 2 + }); + const contentTopic2 = "/test/4/waku-filter/default"; + const decoderShard2 = createDecoder(contentTopic2, routingInfoShard2); + const encoderShard2 = createEncoder({ + contentTopic: contentTopic2, + routingInfo: routingInfoShard2 + }); + + const shardId = 2; + await nwaku2.start({ + filter: true, + lightpush: true, + relay: true, + clusterId: TestClusterId, + shard: [shardId] + }); + await waku.dial(await nwaku2.getMultiaddrWithId()); + await waku.waitForPeers([Protocols.Filter, Protocols.LightPush]); + + await nwaku2.ensureSubscriptions([ + formatPubsubTopic(TestClusterId, shardId) + ]); + + const messageCollector2 = new MessageCollector(); + + await waku.filter.subscribe(decoderShard2, messageCollector2.callback); + + // Making sure that messages are send and received for both subscriptions + // While loop is done because of https://github.com/waku-org/js-waku/issues/1606 + while ( + !(await serviceNodes.messageCollector.waitForMessages(1)) || + !(await messageCollector2.waitForMessages(1)) + ) { + await waku.lightPush.send(encoderShard1, { + payload: utf8ToBytes("M1") + }); + await waku.lightPush.send(encoderShard2, { + payload: utf8ToBytes("M2") + }); + } + + serviceNodes.messageCollector.verifyReceivedMessage(0, { + expectedContentTopic: encoderShard1.contentTopic, + expectedPubsubTopic: routingInfoShard1.pubsubTopic, + expectedMessageText: "M1" + }); + + messageCollector2.verifyReceivedMessage(0, { + expectedContentTopic: encoderShard2.contentTopic, + expectedPubsubTopic: routingInfoShard2.pubsubTopic, + expectedMessageText: "M2" + }); + } catch (e) { + await tearDownNodes([nwaku2], []); + } + }); + }); +}; + +[true, false].map((strictCheckNodes) => runTests(strictCheckNodes)); diff --git a/packages/tests/tests/filter/subscribe.node.spec.ts b/packages/tests/tests/filter/subscribe.node.spec.ts index fd2bcb5290..197387ee0b 100644 --- a/packages/tests/tests/filter/subscribe.node.spec.ts +++ b/packages/tests/tests/filter/subscribe.node.spec.ts @@ -7,8 +7,8 @@ import { getPublicKey, symmetric } from "@waku/message-encryption"; -import { Protocols, utf8ToBytes } from "@waku/sdk"; -import { createRoutingInfo, formatPubsubTopic } from "@waku/utils"; +import { utf8ToBytes } from "@waku/sdk"; +import { createRoutingInfo } from "@waku/utils"; import { expect } from "chai"; import { @@ -16,12 +16,8 @@ import { beforeEachCustom, delay, generateTestData, - makeLogFileName, - MessageCollector, runMultipleNodes, - ServiceNode, ServiceNodesFleet, - tearDownNodes, teardownNodesWithRedundancy, TEST_STRING, waitForConnections @@ -300,7 +296,7 @@ const runTests = (strictCheckNodes: boolean): void => { }); }); - // skiped as it fails in CI but not locally https://github.com/waku-org/js-waku/issues/2438 + // skipped as it fails in CI but not locally https://github.com/waku-org/js-waku/issues/2438 it.skip("Subscribe to 30 topics in separate streams (30 streams for Filter is limit) at once and receives messages", async function () { this.timeout(100_000); const topicCount = 30; @@ -596,104 +592,3 @@ const runTests = (strictCheckNodes: boolean): void => { }; [true, false].map((strictCheckNodes) => runTests(strictCheckNodes)); - -const runTestsStatic = (strictCheckNodes: boolean): void => { - describe(`Waku Filter: Subscribe: Multiple Service Nodes on Static Shard: Strict Check mode: ${strictCheckNodes}`, function () { - this.timeout(100000); - let waku: LightNode; - let serviceNodes: ServiceNodesFleet; - const staticNetworkConfig = { clusterId: 9 }; - const routingInfoShard1 = createRoutingInfo(staticNetworkConfig, { - shardId: 1 - }); - const encoderShard1 = createEncoder({ - contentTopic: TestContentTopic, - routingInfo: routingInfoShard1 - }); - const decoderShard1 = createDecoder(TestContentTopic, routingInfoShard1); - - beforeEachCustom(this, async () => { - [serviceNodes, waku] = await runMultipleNodes( - this.ctx, - routingInfoShard1, - undefined, - strictCheckNodes - ); - }); - - afterEachCustom(this, async () => { - await teardownNodesWithRedundancy(serviceNodes, waku); - }); - - it("Subscribe and receive messages from 2 nwaku nodes each with different static shards", async function () { - await waku.filter.subscribe( - decoderShard1, - serviceNodes.messageCollector.callback - ); - - // Set up and start a new nwaku node on different shard - const nwaku2 = new ServiceNode(makeLogFileName(this) + "3"); - - try { - const routingInfoShard2 = createRoutingInfo(staticNetworkConfig, { - shardId: 2 - }); - const contentTopic2 = "/test/4/waku-filter/default"; - const decoderShard2 = createDecoder(contentTopic2, routingInfoShard2); - const encoderShard2 = createEncoder({ - contentTopic: contentTopic2, - routingInfo: routingInfoShard2 - }); - - const shardId = 2; - await nwaku2.start({ - filter: true, - lightpush: true, - relay: true, - clusterId: TestClusterId, - shard: [shardId] - }); - await waku.dial(await nwaku2.getMultiaddrWithId()); - await waku.waitForPeers([Protocols.Filter, Protocols.LightPush]); - - await nwaku2.ensureSubscriptions([ - formatPubsubTopic(TestClusterId, shardId) - ]); - - const messageCollector2 = new MessageCollector(); - - await waku.filter.subscribe(decoderShard2, messageCollector2.callback); - - // Making sure that messages are send and received for both subscriptions - // While loop is done because of https://github.com/waku-org/js-waku/issues/1606 - while ( - !(await serviceNodes.messageCollector.waitForMessages(1)) || - !(await messageCollector2.waitForMessages(1)) - ) { - await waku.lightPush.send(encoderShard1, { - payload: utf8ToBytes("M1") - }); - await waku.lightPush.send(encoderShard2, { - payload: utf8ToBytes("M2") - }); - } - - serviceNodes.messageCollector.verifyReceivedMessage(0, { - expectedContentTopic: encoderShard1.contentTopic, - expectedPubsubTopic: routingInfoShard1.pubsubTopic, - expectedMessageText: "M1" - }); - - messageCollector2.verifyReceivedMessage(0, { - expectedContentTopic: encoderShard2.contentTopic, - expectedPubsubTopic: routingInfoShard2.pubsubTopic, - expectedMessageText: "M2" - }); - } catch (e) { - await tearDownNodes([nwaku2], []); - } - }); - }); -}; - -[true, false].map((strictCheckNodes) => runTestsStatic(strictCheckNodes)); From d4429702c261845e314c4f5464b477f1d9092de0 Mon Sep 17 00:00:00 2001 From: fryorcraken Date: Mon, 21 Jul 2025 14:47:07 +1000 Subject: [PATCH 18/23] introduce interface for better mocking As one could fall in a trap of not defining the right methods on the mock --- .../connection_manager/connection_manager.spec.ts | 6 +++--- .../lib/connection_manager/connection_manager.ts | 6 ++---- .../lib/connection_manager/shard_reader.spec.ts | 11 ++++++----- .../src/lib/connection_manager/shard_reader.ts | 15 ++++++--------- packages/interfaces/src/connection_manager.ts | 12 ++++++++++++ packages/sdk/src/peer_manager/peer_manager.ts | 12 ++++-------- 6 files changed, 33 insertions(+), 29 deletions(-) diff --git a/packages/core/src/lib/connection_manager/connection_manager.spec.ts b/packages/core/src/lib/connection_manager/connection_manager.spec.ts index ec1d53921f..45d64781f6 100644 --- a/packages/core/src/lib/connection_manager/connection_manager.spec.ts +++ b/packages/core/src/lib/connection_manager/connection_manager.spec.ts @@ -15,7 +15,7 @@ import { ConnectionManager } from "./connection_manager.js"; import { DiscoveryDialer } from "./discovery_dialer.js"; import { KeepAliveManager } from "./keep_alive_manager.js"; import { NetworkMonitor } from "./network_monitor.js"; -import { ShardReader } from "./shard_reader.js"; +import { IShardReader, ShardReader } from "./shard_reader.js"; describe("ConnectionManager", () => { let libp2p: Libp2p; @@ -30,7 +30,7 @@ describe("ConnectionManager", () => { // Mock internal components let mockKeepAliveManager: sinon.SinonStubbedInstance; let mockDiscoveryDialer: sinon.SinonStubbedInstance; - let mockShardReader: sinon.SinonStubbedInstance; + let mockShardReader: sinon.SinonStubbedInstance; let mockNetworkMonitor: sinon.SinonStubbedInstance; let mockConnectionLimiter: sinon.SinonStubbedInstance; @@ -87,7 +87,7 @@ describe("ConnectionManager", () => { mockShardReader = { isPeerOnTopic: sinon.stub().resolves(true) - } as unknown as sinon.SinonStubbedInstance; + } as unknown as sinon.SinonStubbedInstance; mockNetworkMonitor = { start: sinon.stub(), diff --git a/packages/core/src/lib/connection_manager/connection_manager.ts b/packages/core/src/lib/connection_manager/connection_manager.ts index 0f3f83e159..f5d6ded196 100644 --- a/packages/core/src/lib/connection_manager/connection_manager.ts +++ b/packages/core/src/lib/connection_manager/connection_manager.ts @@ -1,7 +1,6 @@ import { type Peer, type PeerId, type Stream } from "@libp2p/interface"; import { MultiaddrInput } from "@multiformats/multiaddr"; import { - ClusterId, ConnectionManagerOptions, IConnectionManager, IRelay, @@ -47,7 +46,7 @@ export class ConnectionManager implements IConnectionManager { private readonly networkMonitor: NetworkMonitor; private readonly connectionLimiter: ConnectionLimiter; - private options: ConnectionManagerOptions; + private readonly options: ConnectionManagerOptions; private libp2p: Libp2p; public constructor(options: ConnectionManagerConstructorOptions) { @@ -200,9 +199,8 @@ export class ConnectionManager implements IConnectionManager { public async isPeerOnShard( peerId: PeerId, - clusterId: ClusterId, shardId: ShardId ): Promise { - return this.shardReader.isPeerOnShard(peerId, clusterId, shardId); + return this.shardReader.isPeerOnShard(peerId, shardId); } } diff --git a/packages/core/src/lib/connection_manager/shard_reader.spec.ts b/packages/core/src/lib/connection_manager/shard_reader.spec.ts index 30bb19ac26..79f058a1d7 100644 --- a/packages/core/src/lib/connection_manager/shard_reader.spec.ts +++ b/packages/core/src/lib/connection_manager/shard_reader.spec.ts @@ -177,7 +177,6 @@ describe("ShardReader", function () { const result = await shardReader.isPeerOnShard( testPeerId, - testClusterId, testShardIndex ); @@ -192,9 +191,13 @@ describe("ShardReader", function () { mockPeerStore.get.resolves(mockPeer); - const result = await shardReader.isPeerOnShard( + const shardReaderCluster5 = new ShardReader({ + libp2p: mockLibp2p as any, + networkConfig: { clusterId: 5 } + }); + + const result = await shardReaderCluster5.isPeerOnShard( testPeerId, - 5, testShardIndex ); @@ -211,7 +214,6 @@ describe("ShardReader", function () { const result = await shardReader.isPeerOnShard( testPeerId, - testClusterId, testShardIndex + 100 ); @@ -223,7 +225,6 @@ describe("ShardReader", function () { const result = await shardReader.isPeerOnShard( testPeerId, - testClusterId, testShardIndex ); diff --git a/packages/core/src/lib/connection_manager/shard_reader.ts b/packages/core/src/lib/connection_manager/shard_reader.ts index 2dade0de99..0867795ca4 100644 --- a/packages/core/src/lib/connection_manager/shard_reader.ts +++ b/packages/core/src/lib/connection_manager/shard_reader.ts @@ -20,7 +20,7 @@ type ShardReaderConstructorOptions = { networkConfig: NetworkConfig; }; -interface IShardReader { +export interface IShardReader { hasShardInfo(id: PeerId): Promise; isPeerOnCluster(id: PeerId): Promise; isPeerOnShard( @@ -66,7 +66,8 @@ export class ShardReader implements IShardReader { ): Promise { try { const { clusterId, shard } = pubsubTopicToSingleShardInfo(pubsubTopic); - return await this.isPeerOnShard(id, clusterId, shard); + if (clusterId !== this.clusterId) return false; + return await this.isPeerOnShard(id, shard); } catch (error) { log.error( `Error comparing pubsub topic ${pubsubTopic} with shard info for ${id}`, @@ -76,14 +77,10 @@ export class ShardReader implements IShardReader { } } - public async isPeerOnShard( - id: PeerId, - clusterId: ClusterId, - shard: ShardId - ): Promise { + public async isPeerOnShard(id: PeerId, shard: ShardId): Promise { const peerShardInfo = await this.getRelayShards(id); log.info( - `Checking if peer on same shard: this { clusterId: ${clusterId}, shardId: ${shard} },` + + `Checking if peer on same shard: this { clusterId: ${this.clusterId}, shardId: ${shard} },` + `${id} { clusterId: ${peerShardInfo?.clusterId}, shards: ${peerShardInfo?.shards} }` ); if (!peerShardInfo) { @@ -91,7 +88,7 @@ export class ShardReader implements IShardReader { } return ( - peerShardInfo.clusterId === clusterId && + peerShardInfo.clusterId === this.clusterId && peerShardInfo.shards.includes(shard) ); } diff --git a/packages/interfaces/src/connection_manager.ts b/packages/interfaces/src/connection_manager.ts index 301ca94155..d863bfe3af 100644 --- a/packages/interfaces/src/connection_manager.ts +++ b/packages/interfaces/src/connection_manager.ts @@ -1,6 +1,8 @@ import type { Peer, PeerId, Stream } from "@libp2p/interface"; import type { MultiaddrInput } from "@multiformats/multiaddr"; +import { ShardId } from "./sharding.js"; + // Peer tags export enum Tags { BOOTSTRAP = "bootstrap", @@ -161,4 +163,14 @@ export interface IConnectionManager { * @returns Promise resolving to true if the peer has shard info, false otherwise */ hasShardInfo(peerId: PeerId): Promise; + + /** + * Returns true if the passed peer is on the passed pubsub topic + */ + isPeerOnTopic(peerId: PeerId, pubsubTopic: string): Promise; + + /** + * Returns true if the passed peer is on the passed shard + */ + isPeerOnShard(peerId: PeerId, shardId: ShardId): Promise; } diff --git a/packages/sdk/src/peer_manager/peer_manager.ts b/packages/sdk/src/peer_manager/peer_manager.ts index a42baf7215..48c7a1efe2 100644 --- a/packages/sdk/src/peer_manager/peer_manager.ts +++ b/packages/sdk/src/peer_manager/peer_manager.ts @@ -4,14 +4,10 @@ import { PeerId, TypedEventEmitter } from "@libp2p/interface"; -import { - ConnectionManager, - FilterCodecs, - LightPushCodec, - StoreCodec -} from "@waku/core"; +import { FilterCodecs, LightPushCodec, StoreCodec } from "@waku/core"; import { CONNECTION_LOCKED_TAG, + type IConnectionManager, Libp2p, Libp2pEventHandler, Protocols @@ -29,7 +25,7 @@ type PeerManagerConfig = { type PeerManagerParams = { libp2p: Libp2p; config?: PeerManagerConfig; - connectionManager: ConnectionManager; + connectionManager: IConnectionManager; }; type GetPeersParams = { @@ -67,7 +63,7 @@ export class PeerManager { private readonly numPeersToUse: number; private readonly libp2p: Libp2p; - private readonly connectionManager: ConnectionManager; + private readonly connectionManager: IConnectionManager; private readonly lockedPeers = new Set(); private readonly unlockedPeers = new Map(); From 158f6ecf984d82556e4b72555020524e21b5129e Mon Sep 17 00:00:00 2001 From: fryorcraken Date: Mon, 21 Jul 2025 15:02:26 +1000 Subject: [PATCH 19/23] fix retry_manager test --- packages/sdk/src/light_push/retry_manager.spec.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/sdk/src/light_push/retry_manager.spec.ts b/packages/sdk/src/light_push/retry_manager.spec.ts index 425bb5837c..c9eb95eea8 100644 --- a/packages/sdk/src/light_push/retry_manager.spec.ts +++ b/packages/sdk/src/light_push/retry_manager.spec.ts @@ -139,7 +139,7 @@ describe("RetryManager", () => { throw new Error(ProtocolError.NO_PEER_AVAILABLE); }); - await (retryManager as any)["taskExecutor"]({ + await (retryManager as RetryManager)["taskExecutor"]({ callback: errorCallback, maxAttempts: 1, routingInfo: TestRoutingInfo @@ -149,7 +149,7 @@ describe("RetryManager", () => { expect( (peerManager.renewPeer as sinon.SinonSpy).calledWith(mockPeerId, { protocol: Protocols.LightPush, - routingInfo: TestRoutingInfo + pubsubTopic: TestRoutingInfo.pubsubTopic }) ).to.be.true; }); From 77c694095dadeff4e80136d58a6f35d4aa330984 Mon Sep 17 00:00:00 2001 From: fryorcraken Date: Mon, 21 Jul 2025 15:40:05 +1000 Subject: [PATCH 20/23] fixing test against nwaku nightly --- packages/tests/tests/ephemeral.node.spec.ts | 3 ++- .../tests/filter/subscribe-static-sharding.node.spec.ts | 5 +++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/tests/tests/ephemeral.node.spec.ts b/packages/tests/tests/ephemeral.node.spec.ts index 848162ca4b..a0cee19156 100644 --- a/packages/tests/tests/ephemeral.node.spec.ts +++ b/packages/tests/tests/ephemeral.node.spec.ts @@ -97,7 +97,8 @@ describe("Waku Message Ephemeral field", function () { store: true, relay: true, contentTopic: contentTopics, - clusterId: TestClusterId + clusterId: TestClusterId, + numShardsInNetwork: TestNetworkConfig.numShardsInCluster }); await nwaku.ensureSubscriptionsAutosharding([ TestContentTopic, diff --git a/packages/tests/tests/filter/subscribe-static-sharding.node.spec.ts b/packages/tests/tests/filter/subscribe-static-sharding.node.spec.ts index 17adf742ba..fa97915584 100644 --- a/packages/tests/tests/filter/subscribe-static-sharding.node.spec.ts +++ b/packages/tests/tests/filter/subscribe-static-sharding.node.spec.ts @@ -70,8 +70,9 @@ const runTests = (strictCheckNodes: boolean): void => { filter: true, lightpush: true, relay: true, - clusterId: TestClusterId, - shard: [shardId] + clusterId: staticNetworkConfig.clusterId, + shard: [shardId], + numShardsInNetwork: 0 // Running static sharding }); await waku.dial(await nwaku2.getMultiaddrWithId()); await waku.waitForPeers([Protocols.Filter, Protocols.LightPush]); From 39c139158c4b9fb3db20250dab1abd9e7665c40b Mon Sep 17 00:00:00 2001 From: fryorcraken Date: Mon, 21 Jul 2025 15:44:30 +1000 Subject: [PATCH 21/23] Delete dual test (makes it hard to work on nwaku master) --- .../tests/tests/store/cursor.node.spec.ts | 52 +------------------ 1 file changed, 2 insertions(+), 50 deletions(-) diff --git a/packages/tests/tests/store/cursor.node.spec.ts b/packages/tests/tests/store/cursor.node.spec.ts index ebac2d1a3a..1a68186c45 100644 --- a/packages/tests/tests/store/cursor.node.spec.ts +++ b/packages/tests/tests/store/cursor.node.spec.ts @@ -16,7 +16,6 @@ import { startAndConnectLightNode, TestContentTopic, TestDecoder, - TestDecoder2, TestNetworkConfig, TestRoutingInfo, totalMsgs @@ -50,7 +49,6 @@ describe("Waku Store, cursor", function () { [110, 120] ].forEach(([cursorIndex, messageCount]) => { it(`Passing a valid cursor at ${cursorIndex} index when there are ${messageCount} messages`, async function () { - console.log(nwaku); await sendMessages( nwaku, messageCount, @@ -140,11 +138,7 @@ describe("Waku Store, cursor", function () { ).to.be.eq(bytesToUtf8(messages[messages.length - 1].payload)); }); - it("Passing invalid cursor for nwaku > 0.35.1", async function () { - if (nwaku.version && nwaku.version.minor < 36) { - this.skip(); - } - + it("Passing invalid cursor", async function () { await sendMessages(nwaku, totalMsgs, TestContentTopic, TestRoutingInfo); const messages: DecodedMessage[] = []; @@ -172,49 +166,7 @@ describe("Waku Store, cursor", function () { } catch (err) { if ( !(err instanceof Error) || - !err.message.includes( - "Store query failed with status code: 300, description: BAD_RESPONSE: archive error: DIRVER_ERROR: cursor not found" - ) - ) { - throw err; - } - } - }); - - it("Passing cursor with wrong pubsubTopic for nwaku > 0.35.1", async function () { - if (nwaku.version && nwaku.version.minor < 36) { - this.skip(); - } - - await sendMessages( - nwaku, - totalMsgs, - TestDecoder.contentTopic, - TestRoutingInfo - ); - - const messages: DecodedMessage[] = []; - for await (const page of waku.store.queryGenerator([TestDecoder])) { - for await (const msg of page) { - messages.push(msg as DecodedMessage); - } - } - messages[5].pubsubTopic = TestDecoder2.routingInfo.pubsubTopic; - const cursor = waku.store.createCursor(messages[5]); - - try { - for await (const page of waku.store.queryGenerator([TestDecoder], { - paginationCursor: cursor - })) { - void page; - } - throw new Error("Cursor with wrong pubsubtopic was accepted"); - } catch (err) { - if ( - !(err instanceof Error) || - !err.message.includes( - "Store query failed with status code: 300, description: BAD_RESPONSE: archive error: DIRVER_ERROR: cursor not found" - ) + !err.message.includes("cursor not found") ) { throw err; } From cd265ba1ae6a11a56dc0894d8f48c4e3771e0ec4 Mon Sep 17 00:00:00 2001 From: fryorcraken Date: Mon, 21 Jul 2025 15:54:05 +1000 Subject: [PATCH 22/23] fix test for nwaku master --- packages/tests/src/lib/runNodes.ts | 1 + packages/tests/tests/metadata.spec.ts | 5 +++-- packages/tests/tests/store/different_static_shards.spec.ts | 6 ++++-- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/packages/tests/src/lib/runNodes.ts b/packages/tests/src/lib/runNodes.ts index 687ae1af25..f80962019a 100644 --- a/packages/tests/src/lib/runNodes.ts +++ b/packages/tests/src/lib/runNodes.ts @@ -76,6 +76,7 @@ export async function runNodes( } else if (isStaticSharding(networkConfig) && options.relayShards) { const shards = options.relayShards; nwakuArgs.shard = shards; + nwakuArgs.numShardsInNetwork = 0; shards.map((shardId) => routingInfos.push(createRoutingInfo(networkConfig, { shardId })) diff --git a/packages/tests/tests/metadata.spec.ts b/packages/tests/tests/metadata.spec.ts index 450621b0e6..1be3fe54a6 100644 --- a/packages/tests/tests/metadata.spec.ts +++ b/packages/tests/tests/metadata.spec.ts @@ -151,14 +151,15 @@ describe("Metadata Protocol", function () { it("receiving a ping from a peer does not overwrite shard info", async function () { const clusterId = 2; const shards = [1]; - const numShardsInCluster = 8; + const numShardsInCluster = 0; //static sharding await nwaku1.start({ relay: true, discv5Discovery: true, peerExchange: true, clusterId, - shard: shards + shard: shards, + numShardsInNetwork: numShardsInCluster }); const nwaku1Ma = await nwaku1.getMultiaddrWithId(); diff --git a/packages/tests/tests/store/different_static_shards.spec.ts b/packages/tests/tests/store/different_static_shards.spec.ts index 3c639364bc..7295666588 100644 --- a/packages/tests/tests/store/different_static_shards.spec.ts +++ b/packages/tests/tests/store/different_static_shards.spec.ts @@ -138,7 +138,8 @@ describe("Waku Store, different static shards", function () { store: true, clusterId: StaticTestClusterId, shard: [1], - relay: true + relay: true, + numShardsInNetwork: 0 // static sharding }); // Set up and start a new nwaku node with Default Pubsubtopic @@ -147,7 +148,8 @@ describe("Waku Store, different static shards", function () { store: true, clusterId: StaticTestClusterId, shard: [2], - relay: true + relay: true, + numShardsInNetwork: 0 // static sharding }); const totalMsgs = 10; From f8f20db85caeec9992caa7b5942b7ff0be155527 Mon Sep 17 00:00:00 2001 From: fryorcraken Date: Tue, 22 Jul 2025 10:59:47 +1000 Subject: [PATCH 23/23] small optimization --- .../tests/high-throughput.spec.ts | 25 +++++++++---------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/packages/reliability-tests/tests/high-throughput.spec.ts b/packages/reliability-tests/tests/high-throughput.spec.ts index 48bb20f5a3..7a11e9caf9 100644 --- a/packages/reliability-tests/tests/high-throughput.spec.ts +++ b/packages/reliability-tests/tests/high-throughput.spec.ts @@ -17,6 +17,10 @@ import { } from "../../tests/src/index.js"; const ContentTopic = "/waku/2/content/test.high-throughput.js"; +const NetworkConfig = { clusterId: 0, numShardsInCluster: 8 }; +const RoutingInfo = createRoutingInfo(NetworkConfig, { + contentTopic: ContentTopic +}); describe("High Throughput Messaging", function () { const testDurationMs = 20 * 60 * 1000; // 20 minutes @@ -35,8 +39,6 @@ describe("High Throughput Messaging", function () { }); it("Send/Receive thousands of messages quickly", async function () { - const networkConfig = { clusterId: 0, numShardsInCluster: 8 }; - const testStart = new Date(); const testEnd = Date.now() + testDurationMs; @@ -53,8 +55,8 @@ describe("High Throughput Messaging", function () { store: true, filter: true, relay: true, - clusterId: networkConfig.clusterId, - numShardsInNetwork: networkConfig.numShardsInCluster, + clusterId: NetworkConfig.clusterId, + numShardsInNetwork: NetworkConfig.numShardsInCluster, contentTopic: [ContentTopic] }, { retries: 3 } @@ -65,20 +67,17 @@ describe("High Throughput Messaging", function () { await nwaku.ensureSubscriptions([ contentTopicToPubsubTopic( ContentTopic, - networkConfig.clusterId, - networkConfig.numShardsInCluster + NetworkConfig.clusterId, + NetworkConfig.numShardsInCluster ) ]); - waku = await createLightNode({ networkConfig }); + waku = await createLightNode({ networkConfig: NetworkConfig }); await waku.start(); await waku.dial(await nwaku.getMultiaddrWithId()); await waku.waitForPeers([Protocols.Filter]); - const routingInfo = createRoutingInfo(networkConfig, { - contentTopic: ContentTopic - }); - const decoder = createDecoder(ContentTopic, routingInfo); + const decoder = createDecoder(ContentTopic, RoutingInfo); const hasSubscribed = await waku.filter.subscribe( [decoder], messageCollector.callback @@ -101,7 +100,7 @@ describe("High Throughput Messaging", function () { contentTopic: ContentTopic, payload: utf8ToBytes(message) }), - routingInfo + RoutingInfo ); sent = true; @@ -113,7 +112,7 @@ describe("High Throughput Messaging", function () { messageCollector.verifyReceivedMessage(0, { expectedMessageText: message, expectedContentTopic: ContentTopic, - expectedPubsubTopic: routingInfo.pubsubTopic + expectedPubsubTopic: RoutingInfo.pubsubTopic }); } } catch (e: any) {