feat!: Introduce routing info concept

Concepts are being mixed up between the global network config (static vs auto sharding), that needs to be the same of all nodes in the network, individual node configuration (eg relay node subscribing to a given shard), and the routing characteristic of a specific message (eg pubsub topic, shard).

This stops proper configuration of nwaku post 0.36.0 because we know need to be deliberate on whether nwaku nodes are running with auto or static sharding.

It also included various back and forth conversions between shards, pubsub topics, etc.

With this change, we tidy up the network configuration, and make it explicit whether it is static or auto sharded.
We also introduce the concept of routing info, which is specific to a message, and tied to the overall network configuration.

Routing info abstract pubsub topic, shard, and autosharding needs. Which should lead to easier tidy up of the pubsub concept at a later stage.
This commit is contained in:
fryorcraken 2025-07-11 13:33:45 +10:00 committed by Arseniy Klempner
parent 36f6884d22
commit 54bc0cabdc
No known key found for this signature in database
GPG Key ID: 51653F18863BD24B
111 changed files with 2853 additions and 3211 deletions

View File

@ -63,7 +63,7 @@ describe("ConnectionManager", () => {
} as unknown as IWakuEventEmitter;
networkConfig = {
clusterId: 1,
clusterId: 2,
shards: [0, 1]
} as NetworkConfig;

View File

@ -1,11 +1,13 @@
import { type Peer, type PeerId, type Stream } from "@libp2p/interface";
import { MultiaddrInput } from "@multiformats/multiaddr";
import {
ClusterId,
ConnectionManagerOptions,
IConnectionManager,
IRelay,
IWakuEventEmitter,
NetworkConfig
NetworkConfig,
ShardId
} from "@waku/interfaces";
import { Libp2p } from "@waku/interfaces";
import { Logger } from "@waku/utils";
@ -66,6 +68,7 @@ export class ConnectionManager implements IConnectionManager {
this.keepAliveManager = new KeepAliveManager({
relay: options.relay,
libp2p: options.libp2p,
networkConfig: options.networkConfig,
options: {
pingKeepAlive: this.options.pingKeepAlive,
relayKeepAlive: this.options.relayKeepAlive
@ -194,4 +197,12 @@ export class ConnectionManager implements IConnectionManager {
): Promise<boolean> {
return this.shardReader.isPeerOnTopic(peerId, pubsubTopic);
}
public async isPeerOnShard(
peerId: PeerId,
clusterId: ClusterId,
shardId: ShardId
): Promise<boolean> {
return this.shardReader.isPeerOnShard(peerId, clusterId, shardId);
}
}

View File

@ -29,7 +29,7 @@ describe("Dialer", () => {
mockShardReader = {
hasShardInfo: sinon.stub().resolves(false),
isPeerOnNetwork: sinon.stub().resolves(true)
isPeerOnCluster: sinon.stub().resolves(true)
} as unknown as sinon.SinonStubbedInstance<ShardReader>;
mockOptions = {
@ -280,9 +280,9 @@ describe("Dialer", () => {
expect(dialStub.calledTwice).to.be.true;
});
it("should skip peer when not on same shard", async () => {
it("should skip peer when not on same cluster", async () => {
mockShardReader.hasShardInfo.resolves(true);
mockShardReader.isPeerOnNetwork.resolves(false);
mockShardReader.isPeerOnCluster.resolves(false);
const dialStub = libp2p.dial as sinon.SinonStub;
@ -290,12 +290,12 @@ describe("Dialer", () => {
expect(dialStub.called).to.be.false;
expect(mockShardReader.hasShardInfo.calledWith(mockPeerId)).to.be.true;
expect(mockShardReader.isPeerOnNetwork.calledWith(mockPeerId)).to.be.true;
expect(mockShardReader.isPeerOnCluster.calledWith(mockPeerId)).to.be.true;
});
it("should dial peer when on same shard", async () => {
mockShardReader.hasShardInfo.resolves(true);
mockShardReader.isPeerOnNetwork.resolves(true);
mockShardReader.isPeerOnCluster.resolves(true);
const dialStub = libp2p.dial as sinon.SinonStub;
dialStub.resolves();
@ -305,7 +305,7 @@ describe("Dialer", () => {
expect(dialStub.calledOnce).to.be.true;
expect(dialStub.calledWith(mockPeerId)).to.be.true;
expect(mockShardReader.hasShardInfo.calledWith(mockPeerId)).to.be.true;
expect(mockShardReader.isPeerOnNetwork.calledWith(mockPeerId)).to.be.true;
expect(mockShardReader.isPeerOnCluster.calledWith(mockPeerId)).to.be.true;
});
it("should dial peer when no shard info available", async () => {
@ -319,7 +319,7 @@ describe("Dialer", () => {
expect(dialStub.calledOnce).to.be.true;
expect(dialStub.calledWith(mockPeerId)).to.be.true;
expect(mockShardReader.hasShardInfo.calledWith(mockPeerId)).to.be.true;
expect(mockShardReader.isPeerOnNetwork.called).to.be.false;
expect(mockShardReader.isPeerOnCluster.called).to.be.false;
});
it("should handle dial errors gracefully", async () => {
@ -468,7 +468,7 @@ describe("Dialer", () => {
it("should handle network check errors gracefully", async () => {
mockShardReader.hasShardInfo.resolves(true);
mockShardReader.isPeerOnNetwork.rejects(new Error("Network check error"));
mockShardReader.isPeerOnCluster.rejects(new Error("Network check error"));
const dialStub = libp2p.dial as sinon.SinonStub;
@ -476,7 +476,7 @@ describe("Dialer", () => {
expect(dialStub.called).to.be.false;
expect(mockShardReader.hasShardInfo.calledWith(mockPeerId)).to.be.true;
expect(mockShardReader.isPeerOnNetwork.calledWith(mockPeerId)).to.be.true;
expect(mockShardReader.isPeerOnCluster.calledWith(mockPeerId)).to.be.true;
});
});
@ -512,7 +512,7 @@ describe("Dialer", () => {
dialStub.resolves();
mockShardReader.hasShardInfo.withArgs(mockPeerId).resolves(true);
mockShardReader.isPeerOnNetwork.withArgs(mockPeerId).resolves(true);
mockShardReader.isPeerOnCluster.withArgs(mockPeerId).resolves(true);
mockShardReader.hasShardInfo.withArgs(mockPeerId2).resolves(false);

View File

@ -153,9 +153,9 @@ export class Dialer implements IDialer {
return false;
}
const isOnSameShard = await this.shardReader.isPeerOnNetwork(peerId);
if (!isOnSameShard) {
log.info(`Skipping peer ${peerId} - not on same shard`);
const isOnSameCluster = await this.shardReader.isPeerOnCluster(peerId);
if (!isOnSameCluster) {
log.info(`Skipping peer ${peerId} - not on same cluster`);
return true;
}

View File

@ -1,4 +1,5 @@
import type { PeerId } from "@libp2p/interface";
import { AutoSharding } from "@waku/interfaces";
import { expect } from "chai";
import sinon from "sinon";
@ -23,6 +24,11 @@ describe("KeepAliveManager", () => {
relayKeepAlive: 60
};
const defaultNetworkConfig: AutoSharding = {
clusterId: 0,
numShardsInCluster: 1
};
beforeEach(() => {
clock = sinon.useFakeTimers();
@ -61,6 +67,7 @@ describe("KeepAliveManager", () => {
it("should create KeepAliveManager with required options", () => {
keepAliveManager = new KeepAliveManager({
options: defaultOptions,
networkConfig: defaultNetworkConfig,
libp2p
});
@ -70,6 +77,7 @@ describe("KeepAliveManager", () => {
it("should create KeepAliveManager with relay", () => {
keepAliveManager = new KeepAliveManager({
options: defaultOptions,
networkConfig: defaultNetworkConfig,
libp2p,
relay
});
@ -82,6 +90,7 @@ describe("KeepAliveManager", () => {
beforeEach(() => {
keepAliveManager = new KeepAliveManager({
options: defaultOptions,
networkConfig: defaultNetworkConfig,
libp2p
});
});
@ -110,6 +119,7 @@ describe("KeepAliveManager", () => {
beforeEach(() => {
keepAliveManager = new KeepAliveManager({
options: defaultOptions,
networkConfig: defaultNetworkConfig,
libp2p,
relay
});
@ -158,6 +168,7 @@ describe("KeepAliveManager", () => {
beforeEach(() => {
keepAliveManager = new KeepAliveManager({
options: defaultOptions,
networkConfig: defaultNetworkConfig,
libp2p,
relay
});
@ -194,6 +205,7 @@ describe("KeepAliveManager", () => {
beforeEach(() => {
keepAliveManager = new KeepAliveManager({
options: defaultOptions,
networkConfig: defaultNetworkConfig,
libp2p,
relay
});
@ -225,6 +237,7 @@ describe("KeepAliveManager", () => {
beforeEach(() => {
keepAliveManager = new KeepAliveManager({
options: defaultOptions,
networkConfig: defaultNetworkConfig,
libp2p
});
keepAliveManager.start();
@ -244,6 +257,7 @@ describe("KeepAliveManager", () => {
keepAliveManager.stop();
keepAliveManager = new KeepAliveManager({
options: { pingKeepAlive: 0, relayKeepAlive: 0 },
networkConfig: defaultNetworkConfig,
libp2p
});
keepAliveManager.start();
@ -317,6 +331,7 @@ describe("KeepAliveManager", () => {
beforeEach(() => {
keepAliveManager = new KeepAliveManager({
options: defaultOptions,
networkConfig: defaultNetworkConfig,
libp2p,
relay
});
@ -337,6 +352,7 @@ describe("KeepAliveManager", () => {
keepAliveManager.stop();
keepAliveManager = new KeepAliveManager({
options: { pingKeepAlive: 30, relayKeepAlive: 0 },
networkConfig: defaultNetworkConfig,
libp2p,
relay
});
@ -355,6 +371,7 @@ describe("KeepAliveManager", () => {
keepAliveManager.stop();
keepAliveManager = new KeepAliveManager({
options: defaultOptions,
networkConfig: defaultNetworkConfig,
libp2p
});
keepAliveManager.start();
@ -423,6 +440,7 @@ describe("KeepAliveManager", () => {
beforeEach(() => {
keepAliveManager = new KeepAliveManager({
options: defaultOptions,
networkConfig: defaultNetworkConfig,
libp2p,
relay
});
@ -489,6 +507,7 @@ describe("KeepAliveManager", () => {
keepAliveManager = new KeepAliveManager({
options: defaultOptions,
networkConfig: defaultNetworkConfig,
libp2p,
relay: emptyRelay
});
@ -506,6 +525,7 @@ describe("KeepAliveManager", () => {
it("should handle all zero keep alive options", () => {
keepAliveManager = new KeepAliveManager({
options: { pingKeepAlive: 0, relayKeepAlive: 0 },
networkConfig: defaultNetworkConfig,
libp2p,
relay
});
@ -525,6 +545,7 @@ describe("KeepAliveManager", () => {
keepAliveManager = new KeepAliveManager({
options: defaultOptions,
networkConfig: defaultNetworkConfig,
libp2p,
relay
});
@ -544,6 +565,7 @@ describe("KeepAliveManager", () => {
it("should handle complete peer lifecycle", async () => {
keepAliveManager = new KeepAliveManager({
options: defaultOptions,
networkConfig: defaultNetworkConfig,
libp2p,
relay
});

View File

@ -1,6 +1,6 @@
import type { PeerId } from "@libp2p/interface";
import type { IEncoder, IRelay, Libp2p } from "@waku/interfaces";
import { Logger, pubsubTopicToSingleShardInfo } from "@waku/utils";
import type { IEncoder, IRelay, Libp2p, NetworkConfig } from "@waku/interfaces";
import { createRoutingInfo, Logger } from "@waku/utils";
import { utf8ToBytes } from "@waku/utils/bytes";
import { createEncoder } from "../message/version_0.js";
@ -15,6 +15,7 @@ type KeepAliveOptions = {
type CreateKeepAliveManagerOptions = {
options: KeepAliveOptions;
networkConfig: NetworkConfig;
libp2p: Libp2p;
relay?: IRelay;
};
@ -26,6 +27,7 @@ interface IKeepAliveManager {
export class KeepAliveManager implements IKeepAliveManager {
private readonly relay?: IRelay;
private readonly networkConfig: NetworkConfig;
private readonly libp2p: Libp2p;
private readonly options: KeepAliveOptions;
@ -38,10 +40,12 @@ export class KeepAliveManager implements IKeepAliveManager {
public constructor({
options,
relay,
networkConfig,
libp2p
}: CreateKeepAliveManagerOptions) {
this.options = options;
this.relay = relay;
this.networkConfig = networkConfig;
this.libp2p = libp2p;
this.onPeerConnect = this.onPeerConnect.bind(this);
@ -163,8 +167,13 @@ export class KeepAliveManager implements IKeepAliveManager {
continue;
}
const routingInfo = createRoutingInfo(this.networkConfig, {
contentTopic: RelayPingContentTopic,
pubsubTopic: topic
});
const encoder = createEncoder({
pubsubTopicShardInfo: pubsubTopicToSingleShardInfo(topic),
routingInfo: routingInfo,
contentTopic: RelayPingContentTopic,
ephemeral: true
});

View File

@ -1,9 +1,10 @@
import { PeerId } from "@libp2p/interface";
import {
AutoSharding,
DEFAULT_NUM_SHARDS,
NetworkConfig,
PubsubTopic,
ShardInfo,
SingleShardInfo
RelayShards
} from "@waku/interfaces";
import { contentTopicToShardIndex, encodeRelayShard } from "@waku/utils";
import { expect } from "chai";
@ -30,12 +31,12 @@ describe("ShardReader", function () {
const testClusterId = 3;
const testShardIndex = contentTopicToShardIndex(testContentTopic);
const testNetworkConfig: NetworkConfig = {
contentTopics: [testContentTopic],
clusterId: testClusterId
const testNetworkConfig: AutoSharding = {
clusterId: testClusterId,
numShardsInCluster: DEFAULT_NUM_SHARDS
};
const testShardInfo: ShardInfo = {
const testRelayShards: RelayShards = {
clusterId: testClusterId,
shards: [testShardIndex]
};
@ -64,10 +65,10 @@ describe("ShardReader", function () {
});
describe("constructor", function () {
it("should create ShardReader with contentTopics network config", function () {
const config: NetworkConfig = {
contentTopics: ["/test/1/waku-light-push/utf8"],
clusterId: 3
it("should create ShardReader with auto sharding network config", function () {
const config: AutoSharding = {
clusterId: 3,
numShardsInCluster: 10
};
const reader = new ShardReader({
@ -78,10 +79,9 @@ describe("ShardReader", function () {
expect(reader).to.be.instanceOf(ShardReader);
});
it("should create ShardReader with shards network config", function () {
it("should create ShardReader with static shards network config", function () {
const config: NetworkConfig = {
clusterId: 3,
shards: [1, 2, 3]
clusterId: 3
};
const reader = new ShardReader({
@ -94,22 +94,22 @@ describe("ShardReader", function () {
});
describe("isPeerOnNetwork", function () {
it("should return true when peer is on the same network", async function () {
const shardInfoBytes = encodeRelayShard(testShardInfo);
it("should return true when peer is on the same cluster", async function () {
const shardInfoBytes = encodeRelayShard(testRelayShards);
const mockPeer = {
metadata: new Map([["shardInfo", shardInfoBytes]])
};
mockPeerStore.get.resolves(mockPeer);
const result = await shardReader.isPeerOnNetwork(testPeerId);
const result = await shardReader.isPeerOnCluster(testPeerId);
expect(result).to.be.true;
sinon.assert.calledWith(mockPeerStore.get, testPeerId);
});
it("should return false when peer is on different cluster", async function () {
const differentClusterShardInfo: ShardInfo = {
const differentClusterShardInfo: RelayShards = {
clusterId: 5,
shards: [1, 2]
};
@ -120,13 +120,13 @@ describe("ShardReader", function () {
mockPeerStore.get.resolves(mockPeer);
const result = await shardReader.isPeerOnNetwork(testPeerId);
const result = await shardReader.isPeerOnCluster(testPeerId);
expect(result).to.be.false;
});
it("should return false when peer has no overlapping shards", async function () {
const noOverlapShardInfo: ShardInfo = {
it("should return true even if peer has no overlapping shards", async function () {
const noOverlapShardInfo: RelayShards = {
clusterId: testClusterId,
shards: [testShardIndex + 100, testShardIndex + 200] // Use different shards
};
@ -137,9 +137,9 @@ describe("ShardReader", function () {
mockPeerStore.get.resolves(mockPeer);
const result = await shardReader.isPeerOnNetwork(testPeerId);
const result = await shardReader.isPeerOnCluster(testPeerId);
expect(result).to.be.false;
expect(result).to.be.true;
});
it("should return false when peer has no shard info", async function () {
@ -149,7 +149,7 @@ describe("ShardReader", function () {
mockPeerStore.get.resolves(mockPeer);
const result = await shardReader.isPeerOnNetwork(testPeerId);
const result = await shardReader.isPeerOnCluster(testPeerId);
expect(result).to.be.false;
});
@ -157,7 +157,7 @@ describe("ShardReader", function () {
it("should return false when peer is not found", async function () {
mockPeerStore.get.rejects(new Error("Peer not found"));
const result = await shardReader.isPeerOnNetwork(testPeerId);
const result = await shardReader.isPeerOnCluster(testPeerId);
expect(result).to.be.false;
});
@ -165,66 +165,52 @@ describe("ShardReader", function () {
describe("isPeerOnShard", function () {
it("should return true when peer is on the specified shard", async function () {
const shardInfoBytes = encodeRelayShard(testShardInfo);
const shardInfoBytes = encodeRelayShard(testRelayShards);
const mockPeer = {
metadata: new Map([["shardInfo", shardInfoBytes]])
};
mockPeerStore.get.resolves(mockPeer);
const shard: SingleShardInfo = {
clusterId: testClusterId,
shard: testShardIndex
};
const result = await shardReader.isPeerOnShard(testPeerId, shard);
const result = await shardReader.isPeerOnShard(
testPeerId,
testClusterId,
testShardIndex
);
expect(result).to.be.true;
});
it("should return false when peer is on different cluster", async function () {
const shardInfoBytes = encodeRelayShard(testShardInfo);
const shardInfoBytes = encodeRelayShard(testRelayShards);
const mockPeer = {
metadata: new Map([["shardInfo", shardInfoBytes]])
};
mockPeerStore.get.resolves(mockPeer);
const shard: SingleShardInfo = {
clusterId: 5,
shard: testShardIndex
};
const result = await shardReader.isPeerOnShard(testPeerId, shard);
const result = await shardReader.isPeerOnShard(
testPeerId,
5,
testShardIndex
);
expect(result).to.be.false;
});
it("should return false when peer is not on the specified shard", async function () {
const shardInfoBytes = encodeRelayShard(testShardInfo);
const shardInfoBytes = encodeRelayShard(testRelayShards);
const mockPeer = {
metadata: new Map([["shardInfo", shardInfoBytes]])
};
mockPeerStore.get.resolves(mockPeer);
const shard: SingleShardInfo = {
clusterId: testClusterId,
shard: testShardIndex + 100
};
const result = await shardReader.isPeerOnShard(testPeerId, shard);
expect(result).to.be.false;
});
it("should return false when shard info is undefined", async function () {
const shard: SingleShardInfo = {
clusterId: testClusterId,
shard: undefined
};
const result = await shardReader.isPeerOnShard(testPeerId, shard);
const result = await shardReader.isPeerOnShard(
testPeerId,
testClusterId,
testShardIndex + 100
);
expect(result).to.be.false;
});
@ -232,12 +218,11 @@ describe("ShardReader", function () {
it("should return false when peer shard info is not found", async function () {
mockPeerStore.get.rejects(new Error("Peer not found"));
const shard: SingleShardInfo = {
clusterId: testClusterId,
shard: testShardIndex
};
const result = await shardReader.isPeerOnShard(testPeerId, shard);
const result = await shardReader.isPeerOnShard(
testPeerId,
testClusterId,
testShardIndex
);
expect(result).to.be.false;
});
@ -245,7 +230,7 @@ describe("ShardReader", function () {
describe("isPeerOnTopic", function () {
it("should return true when peer is on the pubsub topic shard", async function () {
const shardInfoBytes = encodeRelayShard(testShardInfo);
const shardInfoBytes = encodeRelayShard(testRelayShards);
const mockPeer = {
metadata: new Map([["shardInfo", shardInfoBytes]])
};
@ -260,7 +245,7 @@ describe("ShardReader", function () {
});
it("should return false when peer is not on the pubsub topic shard", async function () {
const shardInfoBytes = encodeRelayShard(testShardInfo);
const shardInfoBytes = encodeRelayShard(testRelayShards);
const mockPeer = {
metadata: new Map([["shardInfo", shardInfoBytes]])
};
@ -275,7 +260,7 @@ describe("ShardReader", function () {
});
it("should return false when pubsub topic parsing fails", async function () {
const shardInfoBytes = encodeRelayShard(testShardInfo);
const shardInfoBytes = encodeRelayShard(testRelayShards);
const mockPeer = {
metadata: new Map([["shardInfo", shardInfoBytes]])
};
@ -307,7 +292,7 @@ describe("ShardReader", function () {
it("should handle errors gracefully when getting peer info", async function () {
mockPeerStore.get.rejects(new Error("Network error"));
const result = await shardReader.isPeerOnNetwork(testPeerId);
const result = await shardReader.isPeerOnCluster(testPeerId);
expect(result).to.be.false;
});
@ -319,7 +304,7 @@ describe("ShardReader", function () {
mockPeerStore.get.resolves(mockPeer);
const result = await shardReader.isPeerOnNetwork(testPeerId);
const result = await shardReader.isPeerOnCluster(testPeerId);
expect(result).to.be.false;
});

View File

@ -1,13 +1,12 @@
import type { PeerId } from "@libp2p/interface";
import type {
ClusterId,
NetworkConfig,
PubsubTopic,
ShardInfo,
SingleShardInfo,
StaticSharding
RelayShards,
ShardId
} from "@waku/interfaces";
import {
contentTopicToShardIndex,
decodeRelayShard,
Logger,
pubsubTopicToSingleShardInfo
@ -23,8 +22,12 @@ type ShardReaderConstructorOptions = {
interface IShardReader {
hasShardInfo(id: PeerId): Promise<boolean>;
isPeerOnNetwork(id: PeerId): Promise<boolean>;
isPeerOnShard(id: PeerId, shard: SingleShardInfo): Promise<boolean>;
isPeerOnCluster(id: PeerId): Promise<boolean>;
isPeerOnShard(
id: PeerId,
clusterId: ClusterId,
shard: ShardId
): Promise<boolean>;
isPeerOnTopic(id: PeerId, pubsubTopic: PubsubTopic): Promise<boolean>;
}
@ -34,33 +37,26 @@ interface IShardReader {
export class ShardReader implements IShardReader {
private readonly libp2p: Libp2p;
private readonly staticShard: StaticSharding;
private readonly clusterId: ClusterId;
public constructor(options: ShardReaderConstructorOptions) {
this.libp2p = options.libp2p;
this.staticShard = this.getStaticShardFromNetworkConfig(
options.networkConfig
);
this.clusterId = options.networkConfig.clusterId;
}
public async isPeerOnNetwork(id: PeerId): Promise<boolean> {
const shardInfo = await this.getShardInfo(id);
public async isPeerOnCluster(id: PeerId): Promise<boolean> {
const peerRelayShards = await this.getRelayShards(id);
if (!shardInfo) {
if (!peerRelayShards) {
return false;
}
const clusterMatch = shardInfo.clusterId === this.staticShard.clusterId;
const shardOverlap = this.staticShard.shards.some((s) =>
shardInfo.shards.includes(s)
);
return clusterMatch && shardOverlap;
return peerRelayShards.clusterId === this.clusterId;
}
public async hasShardInfo(id: PeerId): Promise<boolean> {
const shardInfo = await this.getShardInfo(id);
const shardInfo = await this.getRelayShards(id);
return !!shardInfo;
}
@ -69,8 +65,8 @@ export class ShardReader implements IShardReader {
pubsubTopic: PubsubTopic
): Promise<boolean> {
try {
const shardInfo = pubsubTopicToSingleShardInfo(pubsubTopic);
return await this.isPeerOnShard(id, shardInfo);
const { clusterId, shard } = pubsubTopicToSingleShardInfo(pubsubTopic);
return await this.isPeerOnShard(id, clusterId, shard);
} catch (error) {
log.error(
`Error comparing pubsub topic ${pubsubTopic} with shard info for ${id}`,
@ -82,21 +78,25 @@ export class ShardReader implements IShardReader {
public async isPeerOnShard(
id: PeerId,
shard: SingleShardInfo
clusterId: ClusterId,
shard: ShardId
): Promise<boolean> {
const peerShardInfo = await this.getShardInfo(id);
if (!peerShardInfo || shard.shard === undefined) {
const peerShardInfo = await this.getRelayShards(id);
log.info(
`Checking if peer on same shard: this { clusterId: ${clusterId}, shardId: ${shard} },` +
`${id} { clusterId: ${peerShardInfo?.clusterId}, shards: ${peerShardInfo?.shards} }`
);
if (!peerShardInfo) {
return false;
}
return (
peerShardInfo.clusterId === shard.clusterId &&
peerShardInfo.shards.includes(shard.shard)
peerShardInfo.clusterId === clusterId &&
peerShardInfo.shards.includes(shard)
);
}
private async getShardInfo(id: PeerId): Promise<ShardInfo | undefined> {
private async getRelayShards(id: PeerId): Promise<RelayShards | undefined> {
try {
const peer = await this.libp2p.peerStore.get(id);
@ -106,29 +106,10 @@ export class ShardReader implements IShardReader {
return undefined;
}
const decodedShardInfo = decodeRelayShard(shardInfoBytes);
return decodedShardInfo;
return decodeRelayShard(shardInfoBytes);
} catch (error) {
log.error(`Error getting shard info for ${id}`, error);
return undefined;
}
}
private getStaticShardFromNetworkConfig(
networkConfig: NetworkConfig
): StaticSharding {
if ("shards" in networkConfig) {
return networkConfig;
}
const shards = networkConfig.contentTopics.map((topic) =>
contentTopicToShardIndex(topic)
);
return {
clusterId: networkConfig.clusterId!,
shards
};
}
}

View File

@ -8,8 +8,7 @@ import {
type ThisOrThat
} from "@waku/interfaces";
import { PushResponse } from "@waku/proto";
import { isMessageSizeUnderCap } from "@waku/utils";
import { Logger } from "@waku/utils";
import { isMessageSizeUnderCap, Logger } from "@waku/utils";
import all from "it-all";
import * as lp from "it-length-prefixed";
import { pipe } from "it-pipe";
@ -63,7 +62,10 @@ export class LightPushCore {
};
}
const query = PushRpc.createRequest(protoMessage, encoder.pubsubTopic);
const query = PushRpc.createRequest(
protoMessage,
encoder.routingInfo.pubsubTopic
);
return { query, error: null };
} catch (error) {
log.error("Failed to prepare push message", error);

View File

@ -1,30 +1,38 @@
import type { IProtoMessage } from "@waku/interfaces";
import { contentTopicToPubsubTopic } from "@waku/utils";
import type { AutoSharding, IProtoMessage } from "@waku/interfaces";
import { createRoutingInfo } from "@waku/utils";
import { expect } from "chai";
import fc from "fast-check";
import { createDecoder, createEncoder, DecodedMessage } from "./version_0.js";
const contentTopic = "/js-waku/1/tests/bytes";
const pubsubTopic = contentTopicToPubsubTopic(contentTopic);
const testContentTopic = "/js-waku/1/tests/bytes";
const testNetworkConfig: AutoSharding = {
clusterId: 0,
numShardsInCluster: 8
};
const testRoutingInfo = createRoutingInfo(testNetworkConfig, {
contentTopic: testContentTopic
});
describe("Waku Message version 0", function () {
it("Round trip binary serialization", async function () {
await fc.assert(
fc.asyncProperty(fc.uint8Array({ minLength: 1 }), async (payload) => {
const encoder = createEncoder({
contentTopic
contentTopic: testContentTopic,
routingInfo: testRoutingInfo
});
const bytes = await encoder.toWire({ payload });
const decoder = createDecoder(contentTopic);
const decoder = createDecoder(testContentTopic, testRoutingInfo);
const protoResult = await decoder.fromWireToProtoObj(bytes);
const result = (await decoder.fromProtoObj(
pubsubTopic,
testRoutingInfo.pubsubTopic,
protoResult!
)) as DecodedMessage;
expect(result.contentTopic).to.eq(contentTopic);
expect(result.pubsubTopic).to.eq(pubsubTopic);
expect(result.contentTopic).to.eq(testContentTopic);
expect(result.pubsubTopic).to.eq(testRoutingInfo.pubsubTopic);
expect(result.version).to.eq(0);
expect(result.ephemeral).to.be.false;
expect(result.payload).to.deep.eq(payload);
@ -37,14 +45,15 @@ describe("Waku Message version 0", function () {
await fc.assert(
fc.asyncProperty(fc.uint8Array({ minLength: 1 }), async (payload) => {
const encoder = createEncoder({
contentTopic,
contentTopic: testContentTopic,
routingInfo: testRoutingInfo,
ephemeral: true
});
const bytes = await encoder.toWire({ payload });
const decoder = createDecoder(contentTopic);
const decoder = createDecoder(testContentTopic, testRoutingInfo);
const protoResult = await decoder.fromWireToProtoObj(bytes);
const result = (await decoder.fromProtoObj(
pubsubTopic,
testRoutingInfo.pubsubTopic,
protoResult!
)) as DecodedMessage;
@ -68,15 +77,16 @@ describe("Waku Message version 0", function () {
};
const encoder = createEncoder({
contentTopic,
contentTopic: testContentTopic,
routingInfo: testRoutingInfo,
ephemeral: true,
metaSetter
});
const bytes = await encoder.toWire({ payload });
const decoder = createDecoder(contentTopic);
const decoder = createDecoder(testContentTopic, testRoutingInfo);
const protoResult = await decoder.fromWireToProtoObj(bytes);
const result = (await decoder.fromProtoObj(
pubsubTopic,
testRoutingInfo.pubsubTopic,
protoResult!
)) as DecodedMessage;
@ -99,54 +109,73 @@ describe("Waku Message version 0", function () {
describe("Ensures content topic is defined", () => {
it("Encoder throws on undefined content topic", () => {
const wrapper = function (): void {
createEncoder({ contentTopic: undefined as unknown as string });
createEncoder({
contentTopic: undefined as unknown as string,
routingInfo: testRoutingInfo
});
};
expect(wrapper).to.throw("Content topic must be specified");
expect(wrapper).to.throw(
"Routing Info must have the same content topic as the encoder"
);
});
it("Encoder throws on empty string content topic", () => {
const wrapper = function (): void {
createEncoder({ contentTopic: "" });
createEncoder({
contentTopic: "",
routingInfo: createRoutingInfo(testNetworkConfig, { contentTopic: "" })
});
};
expect(wrapper).to.throw("Content topic must be specified");
expect(wrapper).to.throw("AutoSharding requires contentTopic");
});
it("Decoder throws on undefined content topic", () => {
const wrapper = function (): void {
createDecoder(undefined as unknown as string);
createDecoder(
undefined as unknown as string,
createRoutingInfo(testNetworkConfig, {
contentTopic: undefined as unknown as string
})
);
};
expect(wrapper).to.throw("Content topic must be specified");
expect(wrapper).to.throw("AutoSharding requires contentTopic");
});
it("Decoder throws on empty string content topic", () => {
const wrapper = function (): void {
createDecoder("");
createDecoder(
"",
createRoutingInfo(testNetworkConfig, { contentTopic: "" })
);
};
expect(wrapper).to.throw("Content topic must be specified");
expect(wrapper).to.throw("AutoSharding requires contentTopic");
});
});
describe("Sets sharding configuration correctly", () => {
it("uses static shard pubsub topic instead of autosharding when set", async () => {
// Create an encoder setup to use autosharding
const ContentTopic = "/waku/2/content/test.js";
const contentTopic = "/myapp/1/test/proto";
const autoshardingEncoder = createEncoder({
pubsubTopicShardInfo: { clusterId: 0 },
contentTopic: ContentTopic
contentTopic: contentTopic,
routingInfo: createRoutingInfo(testNetworkConfig, { contentTopic })
});
// When autosharding is enabled, we expect the shard index to be 1
expect(autoshardingEncoder.pubsubTopic).to.be.eq("/waku/2/rs/0/1");
expect(autoshardingEncoder.routingInfo.pubsubTopic).to.be.eq(
"/waku/2/rs/0/0"
);
// Create an encoder setup to use static sharding with the same content topic
const singleShardInfo = { clusterId: 0, shard: 0 };
const staticshardingEncoder = createEncoder({
contentTopic: ContentTopic,
pubsubTopicShardInfo: singleShardInfo
contentTopic: contentTopic,
routingInfo: createRoutingInfo({ clusterId: 0 }, { shardId: 3 })
});
// When static sharding is enabled, we expect the shard index to be 0
expect(staticshardingEncoder.pubsubTopic).to.be.eq("/waku/2/rs/0/0");
expect(staticshardingEncoder.routingInfo.pubsubTopic).to.be.eq(
"/waku/2/rs/0/3"
);
});
});

View File

@ -1,17 +1,14 @@
import type {
EncoderOptions,
IDecodedMessage,
IDecoder,
IEncoder,
IMessage,
IMetaSetter,
IProtoMessage,
IRateLimitProof,
PubsubTopic,
SingleShardInfo
IRateLimitProof
} from "@waku/interfaces";
import { proto_message as proto } from "@waku/proto";
import { determinePubsubTopic, Logger } from "@waku/utils";
import { isAutoShardingRoutingInfo, Logger, RoutingInfo } from "@waku/utils";
const log = new Logger("message:version-0");
const OneMillion = BigInt(1_000_000);
@ -67,11 +64,31 @@ export class DecodedMessage implements IDecodedMessage {
}
}
export type EncoderOptions = {
/**
* The routing information for messages to encode.
*/
routingInfo: RoutingInfo;
/** The content topic to set on outgoing messages. */
contentTopic: string;
/**
* An optional flag to mark message as ephemeral, i.e., not to be stored by Waku Store nodes.
* @defaultValue `false`
*/
ephemeral?: boolean;
/**
* A function called when encoding messages to set the meta field.
* @param IProtoMessage The message encoded for wire, without the meta field.
* If encryption is used, `metaSetter` only accesses _encrypted_ payload.
*/
metaSetter?: IMetaSetter;
};
export class Encoder implements IEncoder {
public constructor(
public contentTopic: string,
public ephemeral: boolean = false,
public pubsubTopic: PubsubTopic,
public routingInfo: RoutingInfo,
public metaSetter?: IMetaSetter
) {
if (!contentTopic || contentTopic === "") {
@ -114,24 +131,22 @@ export class Encoder implements IEncoder {
* messages.
*/
export function createEncoder({
pubsubTopic,
pubsubTopicShardInfo,
contentTopic,
routingInfo,
ephemeral,
metaSetter
}: EncoderOptions): Encoder {
return new Encoder(
contentTopic,
ephemeral,
determinePubsubTopic(contentTopic, pubsubTopic ?? pubsubTopicShardInfo),
metaSetter
);
if (isAutoShardingRoutingInfo(routingInfo)) {
if (routingInfo.contentTopic !== contentTopic)
throw "Routing Info must have the same content topic as the encoder";
}
return new Encoder(contentTopic, ephemeral, routingInfo, metaSetter);
}
export class Decoder implements IDecoder<IDecodedMessage> {
public constructor(
public pubsubTopic: PubsubTopic,
public contentTopic: string
public contentTopic: string,
public routingInfo: RoutingInfo
) {
if (!contentTopic || contentTopic === "") {
throw new Error("Content topic must be specified");
@ -182,13 +197,15 @@ export class Decoder implements IDecoder<IDecodedMessage> {
* messages.
*
* @param contentTopic The resulting decoder will only decode messages with this content topic.
* @param routingInfo
*/
export function createDecoder(
contentTopic: string,
pubsubTopicShardInfo?: SingleShardInfo | PubsubTopic
routingInfo: RoutingInfo
): Decoder {
return new Decoder(
determinePubsubTopic(contentTopic, pubsubTopicShardInfo),
contentTopic
);
if (isAutoShardingRoutingInfo(routingInfo)) {
if (routingInfo.contentTopic !== contentTopic)
throw "Routing Info must have the same content topic as the encoder";
}
return new Decoder(contentTopic, routingInfo);
}

View File

@ -7,7 +7,7 @@ import {
type MetadataQueryResult,
type PeerIdStr,
ProtocolError,
type ShardInfo
type RelayShards
} from "@waku/interfaces";
import { proto_metadata } from "@waku/proto";
import { encodeRelayShard, Logger } from "@waku/utils";
@ -25,7 +25,7 @@ export const MetadataCodec = "/vac/waku/metadata/1.0.0";
class Metadata implements IMetadata {
private readonly streamManager: StreamManager;
private readonly libp2pComponents: Libp2pComponents;
protected handshakesConfirmed: Map<PeerIdStr, ShardInfo> = new Map();
protected handshakesConfirmed: Map<PeerIdStr, RelayShards> = new Map();
public readonly multicodec = MetadataCodec;
@ -148,7 +148,7 @@ class Metadata implements IMetadata {
});
const response = proto_metadata.WakuMetadataResponse.decode(
bytes
) as ShardInfo;
) as RelayShards;
if (!response) {
log.error("Error decoding metadata response");
@ -166,16 +166,16 @@ class Metadata implements IMetadata {
private async savePeerShardInfo(
peerId: PeerId,
shardInfo: ShardInfo
relayShards: RelayShards
): Promise<void> {
// add or update the shardInfo to peer store
// add or update the relayShards to peer store
await this.libp2pComponents.peerStore.merge(peerId, {
metadata: {
shardInfo: encodeRelayShard(shardInfo)
shardInfo: encodeRelayShard(relayShards)
}
});
this.handshakesConfirmed.set(peerId.toString(), shardInfo);
this.handshakesConfirmed.set(peerId.toString(), relayShards);
}
}

View File

@ -1,11 +1,17 @@
import { createRoutingInfo } from "@waku/utils";
import { expect } from "chai";
import { StoreQueryRequest } from "./rpc.js";
const routingInfo = createRoutingInfo(
{ clusterId: 0 },
{ pubsubTopic: "/waku/2/rs/0/0" }
);
describe("StoreQueryRequest validation", () => {
it("accepts valid content-filtered query", () => {
const request = StoreQueryRequest.create({
pubsubTopic: "/waku/2/default-waku/proto",
routingInfo,
contentTopics: ["/test/1/content/proto"],
includeData: true,
paginationForward: true
@ -16,7 +22,7 @@ describe("StoreQueryRequest validation", () => {
it("rejects content-filtered query with only pubsubTopic", () => {
expect(() =>
StoreQueryRequest.create({
pubsubTopic: "/waku/2/default-waku/proto",
routingInfo,
contentTopics: [],
includeData: true,
paginationForward: true
@ -26,22 +32,9 @@ describe("StoreQueryRequest validation", () => {
);
});
it("rejects content-filtered query with only contentTopics", () => {
expect(() =>
StoreQueryRequest.create({
pubsubTopic: "",
contentTopics: ["/test/1/content/proto"],
includeData: true,
paginationForward: true
})
).to.throw(
"Both pubsubTopic and contentTopics must be set together for content-filtered queries"
);
});
it("accepts valid message hash query", () => {
const request = StoreQueryRequest.create({
pubsubTopic: "",
routingInfo,
contentTopics: [],
messageHashes: [new Uint8Array([1, 2, 3, 4])],
includeData: true,
@ -54,7 +47,7 @@ describe("StoreQueryRequest validation", () => {
expect(() =>
StoreQueryRequest.create({
messageHashes: [new Uint8Array([1, 2, 3, 4])],
pubsubTopic: "/waku/2/default-waku/proto",
routingInfo,
contentTopics: ["/test/1/content/proto"],
includeData: true,
paginationForward: true
@ -67,7 +60,7 @@ describe("StoreQueryRequest validation", () => {
it("rejects hash query with time filter", () => {
expect(() =>
StoreQueryRequest.create({
pubsubTopic: "",
routingInfo,
contentTopics: [],
messageHashes: [new Uint8Array([1, 2, 3, 4])],
timeStart: new Date(),
@ -81,7 +74,7 @@ describe("StoreQueryRequest validation", () => {
it("accepts time-filtered query with content filter", () => {
const request = StoreQueryRequest.create({
pubsubTopic: "/waku/2/default-waku/proto",
routingInfo,
contentTopics: ["/test/1/content/proto"],
timeStart: new Date(Date.now() - 3600000),
timeEnd: new Date(),

View File

@ -42,9 +42,9 @@ export class StoreQueryRequest {
}
} else {
if (
(params.pubsubTopic &&
(params.routingInfo &&
(!params.contentTopics || params.contentTopics.length === 0)) ||
(!params.pubsubTopic &&
(!params.routingInfo &&
params.contentTopics &&
params.contentTopics.length > 0)
) {

View File

@ -76,7 +76,7 @@ export class StoreCore {
log.info("Sending store query request:", {
hasMessageHashes: !!queryOpts.messageHashes?.length,
messageHashCount: queryOpts.messageHashes?.length,
pubsubTopic: queryOpts.pubsubTopic,
routingInfo: queryOpts.routingInfo,
contentTopics: queryOpts.contentTopics
});

View File

@ -10,7 +10,7 @@ import type {
import {
type Libp2pComponents,
type PeerExchangeQueryResult,
ShardInfo,
type RelayShards,
Tags
} from "@waku/interfaces";
import { decodeRelayShard, encodeRelayShard, Logger } from "@waku/utils";
@ -279,7 +279,7 @@ export class PeerExchangeDiscovery
private async checkPeerInfoDiff(
peerInfo: PeerInfo,
shardInfo?: ShardInfo
shardInfo?: RelayShards
): Promise<{ hasMultiaddrDiff: boolean; hasShardDiff: boolean }> {
const { id: peerId } = peerInfo;
const peer = await this.components.peerStore.get(peerId);

View File

@ -5,8 +5,8 @@ import type {
ENRValue,
IEnr,
NodeId,
SequenceNumber,
ShardInfo
RelayShards,
SequenceNumber
} from "@waku/interfaces";
import { Logger } from "@waku/utils";
@ -64,7 +64,7 @@ export class ENR extends RawEnr implements IEnr {
protocol: TransportProtocol | TransportProtocolPerIpVersion
) => Multiaddr | undefined = locationMultiaddrFromEnrFields.bind({}, this);
public get shardInfo(): ShardInfo | undefined {
public get shardInfo(): RelayShards | undefined {
if (this.rs && this.rsv) {
log.warn("ENR contains both `rs` and `rsv` fields.");
}

View File

@ -6,8 +6,8 @@ import {
import type {
ENRKey,
ENRValue,
RelayShards,
SequenceNumber,
ShardInfo,
Waku2
} from "@waku/interfaces";
import { decodeRelayShard } from "@waku/utils";
@ -52,13 +52,13 @@ export class RawEnr extends Map<ENRKey, ENRValue> {
}
}
public get rs(): ShardInfo | undefined {
public get rs(): RelayShards | undefined {
const rs = this.get("rs");
if (!rs) return undefined;
return decodeRelayShard(rs);
}
public get rsv(): ShardInfo | undefined {
public get rsv(): RelayShards | undefined {
const rsv = this.get("rsv");
if (!rsv) return undefined;
return decodeRelayShard(rsv);

View File

@ -1,4 +1,4 @@
import type { ShardInfo } from "./sharding";
import type { AutoSharding } from "./sharding";
/**
* The default cluster ID for The Waku Network
@ -11,11 +11,9 @@ export const DEFAULT_CLUSTER_ID = 1;
export const DEFAULT_NUM_SHARDS = 8;
/**
* DefaultShardInfo is default configuration for The Waku Network.
* DefaultNetworkConfig is default configuration for The Waku Network.
*/
export const DefaultShardInfo: ShardInfo = {
export const DefaultNetworkConfig: AutoSharding = {
clusterId: DEFAULT_CLUSTER_ID,
shards: [0, 1, 2, 3, 4, 5, 6, 7, 8]
numShardsInCluster: DEFAULT_NUM_SHARDS
};
export const DefaultNetworkConfig = DefaultShardInfo;

View File

@ -2,7 +2,7 @@ import type { PeerId } from "@libp2p/interface";
import type { PeerInfo } from "@libp2p/interface";
import type { Multiaddr } from "@multiformats/multiaddr";
import { ShardInfo } from "./sharding.js";
import { RelayShards } from "./sharding.js";
export type ENRKey = string;
export type ENRValue = Uint8Array;
@ -36,7 +36,7 @@ export interface IEnr extends Map<ENRKey, ENRValue> {
multiaddrs?: Multiaddr[];
waku2?: Waku2;
peerInfo: PeerInfo | undefined;
shardInfo?: ShardInfo;
shardInfo?: RelayShards;
/**
* @deprecated: use { @link IEnr.peerInfo } instead.

View File

@ -1,13 +1,5 @@
import type { ContentTopic, PubsubTopic } from "./misc.js";
export interface SingleShardInfo {
clusterId: number;
/**
* TODO: make shard required
* Specifying this field indicates to the encoder/decoder that static sharding must be used.
*/
shard?: number;
}
import type { IRoutingInfo } from "./sharding.js";
export interface IRateLimitProof {
proof: Uint8Array;
@ -79,38 +71,17 @@ export interface IMetaSetter {
(message: IProtoMessage & { meta: undefined }): Uint8Array;
}
export interface EncoderOptions {
/**
* @deprecated
*/
pubsubTopic?: PubsubTopic;
pubsubTopicShardInfo?: SingleShardInfo;
/** The content topic to set on outgoing messages. */
contentTopic: string;
/**
* An optional flag to mark message as ephemeral, i.e., not to be stored by Waku Store nodes.
* @defaultValue `false`
*/
ephemeral?: boolean;
/**
* A function called when encoding messages to set the meta field.
* @param IProtoMessage The message encoded for wire, without the meta field.
* If encryption is used, `metaSetter` only accesses _encrypted_ payload.
*/
metaSetter?: IMetaSetter;
}
export interface IEncoder {
pubsubTopic: PubsubTopic;
contentTopic: string;
ephemeral: boolean;
routingInfo: IRoutingInfo;
toWire: (message: IMessage) => Promise<Uint8Array | undefined>;
toProtoObj: (message: IMessage) => Promise<IProtoMessage | undefined>;
}
export interface IDecoder<T extends IDecodedMessage> {
pubsubTopic: PubsubTopic;
contentTopic: string;
routingInfo: IRoutingInfo;
fromWireToProtoObj: (bytes: Uint8Array) => Promise<IProtoMessage | undefined>;
fromProtoObj: (
pubsubTopic: string,

View File

@ -1,9 +1,9 @@
import type { PeerId } from "@libp2p/interface";
import { ThisOrThat } from "./misc.js";
import type { ClusterId, ShardInfo } from "./sharding.js";
import type { ClusterId, RelayShards } from "./sharding.js";
export type MetadataQueryResult = ThisOrThat<"shardInfo", ShardInfo>;
export type MetadataQueryResult = ThisOrThat<"shardInfo", RelayShards>;
export interface IMetadata {
readonly multicodec: string;

View File

@ -6,7 +6,7 @@ import type { CreateLibp2pOptions } from "./libp2p.js";
import type { LightPushProtocolOptions } from "./light_push.js";
import type { IDecodedMessage } from "./message.js";
import type { ThisAndThat, ThisOrThat } from "./misc.js";
import type { AutoSharding, StaticSharding } from "./sharding.js";
import { NetworkConfig } from "./sharding.js";
import type { StoreProtocolOptions } from "./store.js";
export enum Protocols {
@ -16,8 +16,6 @@ export enum Protocols {
Filter = "filter"
}
export type NetworkConfig = StaticSharding | AutoSharding;
export type CreateNodeOptions = {
/**
* Set the user agent string to be used in identification of the node.

View File

@ -1,6 +1,12 @@
export type ShardInfo = {
clusterId: number;
shards: number[];
/**
* Configuration for a Waku network. All nodes in a given network/cluster
* should have the same configuration.
*/
export type NetworkConfig = StaticSharding | AutoSharding;
export type RelayShards = {
clusterId: ClusterId;
shards: ShardId[];
};
export type ContentTopicInfo = {
@ -8,6 +14,36 @@ export type ContentTopicInfo = {
contentTopics: string[];
};
export type StaticSharding = ShardInfo;
export type AutoSharding = ContentTopicInfo;
export type StaticSharding = {
clusterId: ClusterId;
};
export type AutoSharding = {
clusterId: ClusterId;
numShardsInCluster: number;
};
export type ClusterId = number;
export type ShardId = number;
/**
* Routing Information for a given message.
*/
export interface IRoutingInfoAutoSharding {
pubsubTopic: string;
shardId: ShardId;
networkConfig: AutoSharding;
contentTopic: string;
isAutoSharding(): boolean;
isStaticSharding(): boolean;
}
export interface IRoutingInfoStaticSharding {
pubsubTopic: string;
shardId: ShardId;
networkConfig: StaticSharding;
isAutoSharding(): boolean;
isStaticSharding(): boolean;
}
export type IRoutingInfo =
| IRoutingInfoAutoSharding
| IRoutingInfoStaticSharding;

View File

@ -1,4 +1,5 @@
import type { IDecodedMessage, IDecoder } from "./message.js";
import { IRoutingInfo } from "./sharding.js";
export type StoreCursor = Uint8Array;
@ -15,10 +16,10 @@ export type QueryRequestParams = {
includeData: boolean;
/**
* The pubsub topic to query. This field is mandatory.
* The query will only return messages that were published on this specific pubsub topic.
* The routing information to query. This field is mandatory.
* The query will only return messages that were published on this specific route (cluster and shard).
*/
pubsubTopic: string;
routingInfo: IRoutingInfo;
/**
* The content topics to filter the messages.

View File

@ -13,21 +13,12 @@ import type { ILightPush } from "./light_push.js";
import { IDecodedMessage, IDecoder, IEncoder } from "./message.js";
import type { Protocols } from "./protocols.js";
import type { IRelay } from "./relay.js";
import type { ShardId } from "./sharding.js";
import type { IStore } from "./store.js";
type AutoShardSingle = {
clusterId: number;
shardsUnderCluster: number;
};
type StaticShardSingle = {
clusterId: number;
shard: number;
};
export type CreateDecoderParams = {
contentTopic: string;
shardInfo?: AutoShardSingle | StaticShardSingle;
shardId?: ShardId;
};
export type CreateEncoderParams = CreateDecoderParams & {

View File

@ -1,13 +1,19 @@
import { IProtoMessage } from "@waku/interfaces";
import { contentTopicToPubsubTopic } from "@waku/utils";
import { createRoutingInfo } from "@waku/utils";
import { expect } from "chai";
import fc from "fast-check";
import { getPublicKey } from "./crypto/index.js";
import { createDecoder, createEncoder } from "./ecies.js";
const contentTopic = "/js-waku/1/tests/bytes";
const pubsubTopic = contentTopicToPubsubTopic(contentTopic);
const testContentTopic = "/js-waku/1/tests/bytes";
const testRoutingInfo = createRoutingInfo(
{
clusterId: 0,
numShardsInCluster: 14
},
{ contentTopic: testContentTopic }
);
describe("Ecies Encryption", function () {
this.timeout(20000);
@ -20,19 +26,27 @@ describe("Ecies Encryption", function () {
const publicKey = getPublicKey(privateKey);
const encoder = createEncoder({
contentTopic,
contentTopic: testContentTopic,
routingInfo: testRoutingInfo,
publicKey
});
const bytes = await encoder.toWire({ payload });
const decoder = createDecoder(contentTopic, privateKey);
const decoder = createDecoder(
testContentTopic,
testRoutingInfo,
privateKey
);
const protoResult = await decoder.fromWireToProtoObj(bytes!);
if (!protoResult) throw "Failed to proto decode";
const result = await decoder.fromProtoObj(pubsubTopic, protoResult);
const result = await decoder.fromProtoObj(
testRoutingInfo.pubsubTopic,
protoResult
);
if (!result) throw "Failed to decode";
expect(result.contentTopic).to.equal(contentTopic);
expect(result.pubsubTopic).to.equal(pubsubTopic);
expect(result.contentTopic).to.equal(testContentTopic);
expect(result.pubsubTopic).to.equal(testRoutingInfo.pubsubTopic);
expect(result.version).to.equal(1);
expect(result?.payload).to.deep.equal(payload);
expect(result.signature).to.be.undefined;
@ -56,20 +70,28 @@ describe("Ecies Encryption", function () {
const bobPublicKey = getPublicKey(bobPrivateKey);
const encoder = createEncoder({
contentTopic,
contentTopic: testContentTopic,
routingInfo: testRoutingInfo,
publicKey: bobPublicKey,
sigPrivKey: alicePrivateKey
});
const bytes = await encoder.toWire({ payload });
const decoder = createDecoder(contentTopic, bobPrivateKey);
const decoder = createDecoder(
testContentTopic,
testRoutingInfo,
bobPrivateKey
);
const protoResult = await decoder.fromWireToProtoObj(bytes!);
if (!protoResult) throw "Failed to proto decode";
const result = await decoder.fromProtoObj(pubsubTopic, protoResult);
const result = await decoder.fromProtoObj(
testRoutingInfo.pubsubTopic,
protoResult
);
if (!result) throw "Failed to decode";
expect(result.contentTopic).to.equal(contentTopic);
expect(result.pubsubTopic).to.equal(pubsubTopic);
expect(result.contentTopic).to.equal(testContentTopic);
expect(result.pubsubTopic).to.equal(testRoutingInfo.pubsubTopic);
expect(result.version).to.equal(1);
expect(result?.payload).to.deep.equal(payload);
expect(result.signature).to.not.be.undefined;
@ -97,16 +119,24 @@ describe("Ecies Encryption", function () {
};
const encoder = createEncoder({
contentTopic,
contentTopic: testContentTopic,
routingInfo: testRoutingInfo,
publicKey,
metaSetter
});
const bytes = await encoder.toWire({ payload });
const decoder = createDecoder(contentTopic, privateKey);
const decoder = createDecoder(
testContentTopic,
testRoutingInfo,
privateKey
);
const protoResult = await decoder.fromWireToProtoObj(bytes!);
if (!protoResult) throw "Failed to proto decode";
const result = await decoder.fromProtoObj(pubsubTopic, protoResult);
const result = await decoder.fromProtoObj(
testRoutingInfo.pubsubTopic,
protoResult
);
if (!result) throw "Failed to decode";
const expectedMeta = metaSetter({
@ -131,6 +161,7 @@ describe("Ensures content topic is defined", () => {
const wrapper = function (): void {
createEncoder({
contentTopic: undefined as unknown as string,
routingInfo: testRoutingInfo,
publicKey: new Uint8Array()
});
};
@ -139,21 +170,29 @@ describe("Ensures content topic is defined", () => {
});
it("Encoder throws on empty string content topic", () => {
const wrapper = function (): void {
createEncoder({ contentTopic: "", publicKey: new Uint8Array() });
createEncoder({
contentTopic: "",
routingInfo: testRoutingInfo,
publicKey: new Uint8Array()
});
};
expect(wrapper).to.throw("Content topic must be specified");
});
it("Decoder throws on undefined content topic", () => {
const wrapper = function (): void {
createDecoder(undefined as unknown as string, new Uint8Array());
createDecoder(
undefined as unknown as string,
testRoutingInfo,
new Uint8Array()
);
};
expect(wrapper).to.throw("Content topic must be specified");
});
it("Decoder throws on empty string content topic", () => {
const wrapper = function (): void {
createDecoder("", new Uint8Array());
createDecoder("", testRoutingInfo, new Uint8Array());
};
expect(wrapper).to.throw("Content topic must be specified");

View File

@ -1,17 +1,14 @@
import { Decoder as DecoderV0 } from "@waku/core/lib/message/version_0";
import {
type EncoderOptions as BaseEncoderOptions,
type IDecoder,
type IEncoder,
type IEncryptedMessage,
type IMessage,
type IMetaSetter,
type IProtoMessage,
type PubsubTopic,
type SingleShardInfo
type IProtoMessage
} from "@waku/interfaces";
import { WakuMessage } from "@waku/proto";
import { determinePubsubTopic, Logger } from "@waku/utils";
import { Logger, RoutingInfo } from "@waku/utils";
import { generatePrivateKey } from "./crypto/utils.js";
import { DecodedMessage } from "./decoded_message.js";
@ -35,8 +32,8 @@ const log = new Logger("message-encryption:ecies");
class Encoder implements IEncoder {
public constructor(
public pubsubTopic: PubsubTopic,
public contentTopic: string,
public routingInfo: RoutingInfo,
private publicKey: Uint8Array,
private sigPrivKey?: Uint8Array,
public ephemeral: boolean = false,
@ -81,11 +78,24 @@ class Encoder implements IEncoder {
}
}
export interface EncoderOptions extends BaseEncoderOptions {
export interface EncoderOptions {
/**
* @deprecated
* The routing information for messages to encode.
*/
pubsubTopic?: PubsubTopic;
routingInfo: RoutingInfo;
/** The content topic to set on outgoing messages. */
contentTopic: string;
/**
* An optional flag to mark message as ephemeral, i.e., not to be stored by Waku Store nodes.
* @defaultValue `false`
*/
ephemeral?: boolean;
/**
* A function called when encoding messages to set the meta field.
* @param IProtoMessage The message encoded for wire, without the meta field.
* If encryption is used, `metaSetter` only accesses _encrypted_ payload.
*/
metaSetter?: IMetaSetter;
/** The public key to encrypt the payload for. */
publicKey: Uint8Array;
/** An optional private key to be used to sign the payload before encryption. */
@ -105,17 +115,16 @@ export interface EncoderOptions extends BaseEncoderOptions {
* in [26/WAKU2-PAYLOAD](https://rfc.vac.dev/spec/26/).
*/
export function createEncoder({
pubsubTopic,
pubsubTopicShardInfo,
contentTopic,
routingInfo,
publicKey,
sigPrivKey,
ephemeral = false,
metaSetter
}: EncoderOptions): Encoder {
return new Encoder(
determinePubsubTopic(contentTopic, pubsubTopic ?? pubsubTopicShardInfo),
contentTopic,
routingInfo,
publicKey,
sigPrivKey,
ephemeral,
@ -125,11 +134,11 @@ export function createEncoder({
class Decoder extends DecoderV0 implements IDecoder<IEncryptedMessage> {
public constructor(
pubsubTopic: PubsubTopic,
contentTopic: string,
routingInfo: RoutingInfo,
private privateKey: Uint8Array
) {
super(pubsubTopic, contentTopic);
super(contentTopic, routingInfo);
}
public async fromProtoObj(
@ -201,12 +210,8 @@ class Decoder extends DecoderV0 implements IDecoder<IEncryptedMessage> {
*/
export function createDecoder(
contentTopic: string,
privateKey: Uint8Array,
pubsubTopicShardInfo?: SingleShardInfo | PubsubTopic
routingInfo: RoutingInfo,
privateKey: Uint8Array
): Decoder {
return new Decoder(
determinePubsubTopic(contentTopic, pubsubTopicShardInfo),
contentTopic,
privateKey
);
return new Decoder(contentTopic, routingInfo, privateKey);
}

View File

@ -1,13 +1,19 @@
import { IProtoMessage } from "@waku/interfaces";
import { contentTopicToPubsubTopic } from "@waku/utils";
import { createRoutingInfo } from "@waku/utils";
import { expect } from "chai";
import fc from "fast-check";
import { getPublicKey } from "./crypto/index.js";
import { createDecoder, createEncoder } from "./symmetric.js";
const contentTopic = "/js-waku/1/tests/bytes";
const pubsubTopic = contentTopicToPubsubTopic(contentTopic);
const testContentTopic = "/js-waku/1/tests/bytes";
const testRoutingInfo = createRoutingInfo(
{
clusterId: 0,
numShardsInCluster: 14
},
{ contentTopic: testContentTopic }
);
describe("Symmetric Encryption", function () {
it("Round trip binary encryption [symmetric, no signature]", async function () {
@ -17,19 +23,27 @@ describe("Symmetric Encryption", function () {
fc.uint8Array({ min: 1, minLength: 32, maxLength: 32 }),
async (payload, symKey) => {
const encoder = createEncoder({
contentTopic,
contentTopic: testContentTopic,
routingInfo: testRoutingInfo,
symKey
});
const bytes = await encoder.toWire({ payload });
const decoder = createDecoder(contentTopic, symKey);
const decoder = createDecoder(
testContentTopic,
testRoutingInfo,
symKey
);
const protoResult = await decoder.fromWireToProtoObj(bytes!);
if (!protoResult) throw "Failed to proto decode";
const result = await decoder.fromProtoObj(pubsubTopic, protoResult);
const result = await decoder.fromProtoObj(
testRoutingInfo.pubsubTopic,
protoResult
);
if (!result) throw "Failed to decode";
expect(result.contentTopic).to.equal(contentTopic);
expect(result.pubsubTopic).to.equal(pubsubTopic);
expect(result.contentTopic).to.equal(testContentTopic);
expect(result.pubsubTopic).to.equal(testRoutingInfo.pubsubTopic);
expect(result.version).to.equal(1);
expect(result?.payload).to.deep.equal(payload);
expect(result.signature).to.be.undefined;
@ -50,20 +64,28 @@ describe("Symmetric Encryption", function () {
const sigPubKey = getPublicKey(sigPrivKey);
const encoder = createEncoder({
contentTopic,
contentTopic: testContentTopic,
routingInfo: testRoutingInfo,
symKey,
sigPrivKey
});
const bytes = await encoder.toWire({ payload });
const decoder = createDecoder(contentTopic, symKey);
const decoder = createDecoder(
testContentTopic,
testRoutingInfo,
symKey
);
const protoResult = await decoder.fromWireToProtoObj(bytes!);
if (!protoResult) throw "Failed to proto decode";
const result = await decoder.fromProtoObj(pubsubTopic, protoResult);
const result = await decoder.fromProtoObj(
testRoutingInfo.pubsubTopic,
protoResult
);
if (!result) throw "Failed to decode";
expect(result.contentTopic).to.equal(contentTopic);
expect(result.pubsubTopic).to.equal(pubsubTopic);
expect(result.contentTopic).to.equal(testContentTopic);
expect(result.pubsubTopic).to.equal(testRoutingInfo.pubsubTopic);
expect(result.version).to.equal(1);
expect(result?.payload).to.deep.equal(payload);
expect(result.signature).to.not.be.undefined;
@ -90,16 +112,24 @@ describe("Symmetric Encryption", function () {
};
const encoder = createEncoder({
contentTopic,
contentTopic: testContentTopic,
routingInfo: testRoutingInfo,
symKey,
metaSetter
});
const bytes = await encoder.toWire({ payload });
const decoder = createDecoder(contentTopic, symKey);
const decoder = createDecoder(
testContentTopic,
testRoutingInfo,
symKey
);
const protoResult = await decoder.fromWireToProtoObj(bytes!);
if (!protoResult) throw "Failed to proto decode";
const result = await decoder.fromProtoObj(pubsubTopic, protoResult);
const result = await decoder.fromProtoObj(
testRoutingInfo.pubsubTopic,
protoResult
);
if (!result) throw "Failed to decode";
const expectedMeta = metaSetter({
@ -124,6 +154,7 @@ describe("Ensures content topic is defined", () => {
const wrapper = function (): void {
createEncoder({
contentTopic: undefined as unknown as string,
routingInfo: testRoutingInfo,
symKey: new Uint8Array()
});
};
@ -132,21 +163,29 @@ describe("Ensures content topic is defined", () => {
});
it("Encoder throws on empty string content topic", () => {
const wrapper = function (): void {
createEncoder({ contentTopic: "", symKey: new Uint8Array() });
createEncoder({
contentTopic: "",
routingInfo: testRoutingInfo,
symKey: new Uint8Array()
});
};
expect(wrapper).to.throw("Content topic must be specified");
});
it("Decoder throws on undefined content topic", () => {
const wrapper = function (): void {
createDecoder(undefined as unknown as string, new Uint8Array());
createDecoder(
undefined as unknown as string,
testRoutingInfo,
new Uint8Array()
);
};
expect(wrapper).to.throw("Content topic must be specified");
});
it("Decoder throws on empty string content topic", () => {
const wrapper = function (): void {
createDecoder("", new Uint8Array());
createDecoder("", testRoutingInfo, new Uint8Array());
};
expect(wrapper).to.throw("Content topic must be specified");

View File

@ -1,17 +1,15 @@
import { Decoder as DecoderV0 } from "@waku/core/lib/message/version_0";
import type {
EncoderOptions as BaseEncoderOptions,
IDecoder,
IEncoder,
IEncryptedMessage,
IMessage,
IMetaSetter,
IProtoMessage,
PubsubTopic,
SingleShardInfo
IRoutingInfo
} from "@waku/interfaces";
import { WakuMessage } from "@waku/proto";
import { determinePubsubTopic, Logger } from "@waku/utils";
import { Logger, RoutingInfo } from "@waku/utils";
import { generateSymmetricKey } from "./crypto/utils.js";
import { DecodedMessage } from "./decoded_message.js";
@ -35,8 +33,8 @@ const log = new Logger("message-encryption:symmetric");
class Encoder implements IEncoder {
public constructor(
public pubsubTopic: PubsubTopic,
public contentTopic: string,
public routingInfo: IRoutingInfo,
private symKey: Uint8Array,
private sigPrivKey?: Uint8Array,
public ephemeral: boolean = false,
@ -81,7 +79,24 @@ class Encoder implements IEncoder {
}
}
export interface EncoderOptions extends BaseEncoderOptions {
export interface EncoderOptions {
/**
* The routing information for messages to encode.
*/
routingInfo: RoutingInfo;
/** The content topic to set on outgoing messages. */
contentTopic: string;
/**
* An optional flag to mark message as ephemeral, i.e., not to be stored by Waku Store nodes.
* @defaultValue `false`
*/
ephemeral?: boolean;
/**
* A function called when encoding messages to set the meta field.
* @param IProtoMessage The message encoded for wire, without the meta field.
* If encryption is used, `metaSetter` only accesses _encrypted_ payload.
*/
metaSetter?: IMetaSetter;
/** The symmetric key to encrypt the payload with. */
symKey: Uint8Array;
/** An optional private key to be used to sign the payload before encryption. */
@ -101,17 +116,16 @@ export interface EncoderOptions extends BaseEncoderOptions {
* in [26/WAKU2-PAYLOAD](https://rfc.vac.dev/spec/26/).
*/
export function createEncoder({
pubsubTopic,
pubsubTopicShardInfo,
contentTopic,
routingInfo,
symKey,
sigPrivKey,
ephemeral = false,
metaSetter
}: EncoderOptions): Encoder {
return new Encoder(
determinePubsubTopic(contentTopic, pubsubTopic ?? pubsubTopicShardInfo),
contentTopic,
routingInfo,
symKey,
sigPrivKey,
ephemeral,
@ -121,11 +135,11 @@ export function createEncoder({
class Decoder extends DecoderV0 implements IDecoder<IEncryptedMessage> {
public constructor(
pubsubTopic: PubsubTopic,
contentTopic: string,
routingInfo: RoutingInfo,
private symKey: Uint8Array
) {
super(pubsubTopic, contentTopic);
super(contentTopic, routingInfo);
}
public async fromProtoObj(
@ -193,16 +207,13 @@ class Decoder extends DecoderV0 implements IDecoder<IEncryptedMessage> {
* decode incoming messages.
*
* @param contentTopic The resulting decoder will only decode messages with this content topic.
* @param routingInfo Routing information, depends on the network config (static vs auto sharding)
* @param symKey The symmetric key used to decrypt the message.
*/
export function createDecoder(
contentTopic: string,
symKey: Uint8Array,
pubsubTopicShardInfo?: SingleShardInfo | PubsubTopic
routingInfo: RoutingInfo,
symKey: Uint8Array
): Decoder {
return new Decoder(
determinePubsubTopic(contentTopic, pubsubTopicShardInfo),
contentTopic,
symKey
);
return new Decoder(contentTopic, routingInfo, symKey);
}

View File

@ -1,7 +1,5 @@
import type { CreateNodeOptions, RelayNode } from "@waku/interfaces";
import { DefaultNetworkConfig } from "@waku/interfaces";
import { CreateNodeOptions, RelayNode } from "@waku/interfaces";
import { createLibp2pAndUpdateOptions, WakuNode } from "@waku/sdk";
import { derivePubsubTopicsFromNetworkConfig } from "@waku/utils";
import { Relay, RelayCreateOptions, wakuGossipSub } from "./relay.js";
@ -16,7 +14,7 @@ import { Relay, RelayCreateOptions, wakuGossipSub } from "./relay.js";
* or use this function with caution.
*/
export async function createRelayNode(
options: CreateNodeOptions & Partial<RelayCreateOptions>
options: CreateNodeOptions & RelayCreateOptions
): Promise<RelayNode> {
options = {
...options,
@ -29,9 +27,9 @@ export async function createRelayNode(
};
const libp2p = await createLibp2pAndUpdateOptions(options);
const pubsubTopics = derivePubsubTopicsFromNetworkConfig(
options.networkConfig ?? DefaultNetworkConfig
);
const pubsubTopics = options.routingInfos.map((ri) => ri.pubsubTopic);
const relay = new Relay({
pubsubTopics,
libp2p

View File

@ -3,14 +3,21 @@ import { TopicValidatorResult } from "@libp2p/interface";
import type { UnsignedMessage } from "@libp2p/interface";
import { peerIdFromPrivateKey } from "@libp2p/peer-id";
import { createEncoder } from "@waku/core";
import { determinePubsubTopic } from "@waku/utils";
import { createRoutingInfo } from "@waku/utils";
import { expect } from "chai";
import fc from "fast-check";
import { messageValidator } from "./message_validator.js";
const TestContentTopic = "/app/1/topic/utf8";
const TestPubsubTopic = determinePubsubTopic(TestContentTopic);
const testContentTopic = "/app/1/topic/utf8";
const testRoutingInfo = createRoutingInfo(
{
clusterId: 0,
numShardsInCluster: 8
},
{ contentTopic: testContentTopic }
);
const testPubsubTopic = testRoutingInfo.pubsubTopic;
describe("Message Validator", () => {
it("Accepts a valid Waku Message", async () => {
@ -20,14 +27,14 @@ describe("Message Validator", () => {
const peerId = peerIdFromPrivateKey(privateKey);
const encoder = createEncoder({
contentTopic: TestContentTopic,
pubsubTopic: TestPubsubTopic
contentTopic: testContentTopic,
routingInfo: testRoutingInfo
});
const bytes = await encoder.toWire({ payload });
const message: UnsignedMessage = {
type: "unsigned",
topic: TestPubsubTopic,
topic: testPubsubTopic,
data: bytes
};
@ -46,7 +53,7 @@ describe("Message Validator", () => {
const message: UnsignedMessage = {
type: "unsigned",
topic: TestPubsubTopic,
topic: testPubsubTopic,
data
};

View File

@ -22,14 +22,14 @@ import {
PubsubTopic,
SDKProtocolResult
} from "@waku/interfaces";
import { isWireSizeUnderCap, toAsyncIterator } from "@waku/utils";
import { isWireSizeUnderCap, RoutingInfo, toAsyncIterator } from "@waku/utils";
import { pushOrInitMapSet } from "@waku/utils";
import { Logger } from "@waku/utils";
import { pEvent } from "p-event";
import { RelayCodecs } from "./constants.js";
import { messageValidator } from "./message_validator.js";
import { TopicOnlyDecoder } from "./topic_only_message.js";
import { ContentTopicOnlyDecoder } from "./topic_only_message.js";
const log = new Logger("relay");
@ -38,7 +38,9 @@ export type Observer<T extends IDecodedMessage> = {
callback: Callback<T>;
};
export type RelayCreateOptions = CreateNodeOptions & GossipsubOpts;
export type RelayCreateOptions = CreateNodeOptions & {
routingInfos: RoutingInfo[];
} & Partial<GossipsubOpts>;
export type ContentTopic = string;
type ActiveSubscriptions = Map<PubsubTopic, ContentTopic[]>;
@ -53,7 +55,7 @@ type RelayConstructorParams = {
* Throws if libp2p.pubsub does not support Waku Relay
*/
export class Relay implements IRelay {
public readonly pubsubTopics: Set<PubsubTopic>;
public pubsubTopics: Set<PubsubTopic>;
private defaultDecoder: IDecoder<IDecodedMessage>;
public static multicodec: string = RelayCodecs[0];
@ -73,6 +75,7 @@ export class Relay implements IRelay {
}
this.gossipSub = params.libp2p.services.pubsub as GossipSub;
this.pubsubTopics = new Set(params.pubsubTopics);
if (this.gossipSub.isStarted()) {
@ -82,7 +85,7 @@ export class Relay implements IRelay {
this.observers = new Map();
// TODO: User might want to decide what decoder should be used (e.g. for RLN)
this.defaultDecoder = new TopicOnlyDecoder(params.pubsubTopics[0]);
this.defaultDecoder = new ContentTopicOnlyDecoder();
}
/**
@ -124,7 +127,7 @@ export class Relay implements IRelay {
encoder: IEncoder,
message: IMessage
): Promise<SDKProtocolResult> {
const { pubsubTopic } = encoder;
const { pubsubTopic } = encoder.routingInfo;
if (!this.pubsubTopics.has(pubsubTopic)) {
log.error("Failed to send waku relay: topic not configured");
return {
@ -176,7 +179,7 @@ export class Relay implements IRelay {
const observers: Array<[PubsubTopic, Observer<T>]> = [];
for (const decoder of Array.isArray(decoders) ? decoders : [decoders]) {
const { pubsubTopic } = decoder;
const { pubsubTopic } = decoder.routingInfo;
const ctObs: Map<ContentTopic, Set<Observer<T>>> = this.observers.get(
pubsubTopic
) ?? new Map();
@ -240,8 +243,9 @@ export class Relay implements IRelay {
pubsubTopic: string,
bytes: Uint8Array
): Promise<void> {
const topicOnlyMsg = await this.defaultDecoder.fromWireToProtoObj(bytes);
if (!topicOnlyMsg || !topicOnlyMsg.contentTopic) {
const contentTopicOnlyMsg =
await this.defaultDecoder.fromWireToProtoObj(bytes);
if (!contentTopicOnlyMsg || !contentTopicOnlyMsg.contentTopic) {
log.warn("Message does not have a content topic, skipping");
return;
}
@ -253,9 +257,9 @@ export class Relay implements IRelay {
}
// Retrieve the set of observers for the given contentTopic
const observers = contentTopicMap.get(topicOnlyMsg.contentTopic) as Set<
Observer<T>
>;
const observers = contentTopicMap.get(
contentTopicOnlyMsg.contentTopic
) as Set<Observer<T>>;
if (!observers) {
return;
}
@ -277,7 +281,7 @@ export class Relay implements IRelay {
} else {
log.error(
"Failed to decode messages on",
topicOnlyMsg.contentTopic
contentTopicOnlyMsg.contentTopic
);
}
} catch (error) {

View File

@ -1,15 +1,18 @@
import { message } from "@waku/core";
import type {
IDecoder,
IProtoMessage,
ITopicOnlyMessage,
PubsubTopic
IRoutingInfo,
ITopicOnlyMessage
} from "@waku/interfaces";
import { TopicOnlyMessage as ProtoTopicOnlyMessage } from "@waku/proto";
export class TopicOnlyMessage implements ITopicOnlyMessage {
public version = message.version_0.Version;
public payload: Uint8Array = new Uint8Array();
public get version(): number {
throw "Only content topic can be accessed on this message";
}
public get payload(): Uint8Array {
throw "Only content topic can be accessed on this message";
}
public rateLimitProof: undefined;
public timestamp: undefined;
public meta: undefined;
@ -26,11 +29,16 @@ export class TopicOnlyMessage implements ITopicOnlyMessage {
}
// This decoder is used only for reading `contentTopic` from the WakuMessage
export class TopicOnlyDecoder implements IDecoder<ITopicOnlyMessage> {
public contentTopic = "";
export class ContentTopicOnlyDecoder implements IDecoder<ITopicOnlyMessage> {
public constructor() {}
// pubsubTopic is ignored
public constructor(public pubsubTopic: PubsubTopic) {}
public get contentTopic(): string {
throw "ContentTopic is not available on this decoder, it is only meant to decode the content topic for any message";
}
public get routingInfo(): IRoutingInfo {
throw "RoutingInfo is not available on this decoder, it is only meant to decode the content topic for any message";
}
public fromWireToProtoObj(
bytes: Uint8Array

View File

@ -1,16 +1,6 @@
import { LightNode, Protocols } from "@waku/interfaces";
import {
createDecoder,
createEncoder,
createLightNode,
utf8ToBytes
} from "@waku/sdk";
import {
delay,
shardInfoToPubsubTopics,
singleShardInfosToShardInfo,
singleShardInfoToPubsubTopic
} from "@waku/utils";
import { createDecoder, createLightNode, utf8ToBytes } from "@waku/sdk";
import { createRoutingInfo, delay } from "@waku/utils";
import { expect } from "chai";
import {
@ -41,8 +31,7 @@ describe("High Throughput Messaging", function () {
});
it("Send/Receive thousands of messages quickly", async function () {
const singleShardInfo = { clusterId: 0, shard: 0 };
const shardInfo = singleShardInfosToShardInfo([singleShardInfo]);
const networkConfig = { clusterId: 0, numShardsInCluster: 8 };
const testStart = new Date();
const testEnd = Date.now() + testDurationMs;
@ -60,8 +49,8 @@ describe("High Throughput Messaging", function () {
store: true,
filter: true,
relay: true,
clusterId: 0,
shard: [0],
clusterId: networkConfig.clusterId,
numShardsInNetwork: networkConfig.numShardsInCluster,
contentTopic: [ContentTopic]
},
{ retries: 3 }
@ -69,29 +58,23 @@ describe("High Throughput Messaging", function () {
await delay(1000);
await nwaku.ensureSubscriptions(shardInfoToPubsubTopics(shardInfo));
// TODO await nwaku.ensureSubscriptions(shardInfoToPubsubTopics(shardInfo));
waku = await createLightNode({ networkConfig: shardInfo });
waku = await createLightNode({ networkConfig });
await waku.start();
await waku.dial(await nwaku.getMultiaddrWithId());
await waku.waitForPeers([Protocols.Filter]);
const decoder = createDecoder(ContentTopic, singleShardInfo);
const routingInfo = createRoutingInfo(networkConfig, {
contentTopic: ContentTopic
});
const decoder = createDecoder(ContentTopic, routingInfo);
const hasSubscribed = await waku.filter.subscribe(
[decoder],
messageCollector.callback
);
if (!hasSubscribed) throw new Error("Failed to subscribe from the start.");
const encoder = createEncoder({
contentTopic: ContentTopic,
pubsubTopicShardInfo: singleShardInfo
});
expect(encoder.pubsubTopic).to.eq(
singleShardInfoToPubsubTopic(singleShardInfo)
);
let messageId = 0;
// Send messages as fast as possible until testEnd
@ -107,7 +90,8 @@ describe("High Throughput Messaging", function () {
ServiceNode.toMessageRpcQuery({
contentTopic: ContentTopic,
payload: utf8ToBytes(message)
})
}),
routingInfo
);
sent = true;
@ -119,7 +103,7 @@ describe("High Throughput Messaging", function () {
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: message,
expectedContentTopic: ContentTopic,
expectedPubsubTopic: shardInfoToPubsubTopics(shardInfo)[0]
expectedPubsubTopic: routingInfo.pubsubTopic
});
}
} catch (e: any) {

View File

@ -1,16 +1,6 @@
import { LightNode, Protocols } from "@waku/interfaces";
import {
createDecoder,
createEncoder,
createLightNode,
utf8ToBytes
} from "@waku/sdk";
import {
delay,
shardInfoToPubsubTopics,
singleShardInfosToShardInfo,
singleShardInfoToPubsubTopic
} from "@waku/utils";
import { createDecoder, createLightNode, utf8ToBytes } from "@waku/sdk";
import { createRoutingInfo, delay } from "@waku/utils";
import { expect } from "chai";
import {
@ -41,8 +31,7 @@ describe("Longevity", function () {
});
it("Filter - 2 hours", async function () {
const singleShardInfo = { clusterId: 0, shard: 0 };
const shardInfo = singleShardInfosToShardInfo([singleShardInfo]);
const networkConfig = { clusterId: 0, numShardsInCluster: 8 };
const testStart = new Date();
@ -68,29 +57,23 @@ describe("Longevity", function () {
{ retries: 3 }
);
await nwaku.ensureSubscriptions(shardInfoToPubsubTopics(shardInfo));
// TODO await nwaku.ensureSubscriptions(shardInfoToPubsubTopics(shardInfo));
waku = await createLightNode({ networkConfig: shardInfo });
waku = await createLightNode({ networkConfig });
await waku.start();
await waku.dial(await nwaku.getMultiaddrWithId());
await waku.waitForPeers([Protocols.Filter]);
const decoder = createDecoder(ContentTopic, singleShardInfo);
const routingInfo = createRoutingInfo(networkConfig, {
contentTopic: ContentTopic
});
const decoder = createDecoder(ContentTopic, routingInfo);
const hasSubscribed = await waku.filter.subscribe(
[decoder],
messageCollector.callback
);
if (!hasSubscribed) throw new Error("Failed to subscribe from the start.");
const encoder = createEncoder({
contentTopic: ContentTopic,
pubsubTopicShardInfo: singleShardInfo
});
expect(encoder.pubsubTopic).to.eq(
singleShardInfoToPubsubTopic(singleShardInfo)
);
let messageId = 0;
while (Date.now() < testEnd) {
@ -105,7 +88,8 @@ describe("Longevity", function () {
ServiceNode.toMessageRpcQuery({
contentTopic: ContentTopic,
payload: utf8ToBytes(message)
})
}),
routingInfo
);
sent = true;
@ -117,7 +101,7 @@ describe("Longevity", function () {
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: message,
expectedContentTopic: ContentTopic,
expectedPubsubTopic: shardInfoToPubsubTopics(shardInfo)[0]
expectedPubsubTopic: routingInfo.pubsubTopic
});
}
} catch (e: any) {

View File

@ -1,16 +1,6 @@
import { LightNode, Protocols } from "@waku/interfaces";
import {
createDecoder,
createEncoder,
createLightNode,
utf8ToBytes
} from "@waku/sdk";
import {
delay,
shardInfoToPubsubTopics,
singleShardInfosToShardInfo,
singleShardInfoToPubsubTopic
} from "@waku/utils";
import { createDecoder, createLightNode, utf8ToBytes } from "@waku/sdk";
import { createRoutingInfo, delay } from "@waku/utils";
import { expect } from "chai";
import {
@ -52,8 +42,7 @@ describe("Throughput Sanity Checks - Different Message Sizes", function () {
});
it("Send/Receive messages of varying sizes", async function () {
const singleShardInfo = { clusterId: 0, shard: 0 };
const shardInfo = singleShardInfosToShardInfo([singleShardInfo]);
const networkConfig = { clusterId: 0, numShardsInCluster: 8 };
const testStart = new Date();
const testEnd = Date.now() + testDurationMs;
@ -74,29 +63,23 @@ describe("Throughput Sanity Checks - Different Message Sizes", function () {
await delay(1000);
await nwaku.ensureSubscriptions(shardInfoToPubsubTopics(shardInfo));
// TODO await nwaku.ensureSubscriptions(shardInfoToPubsubTopics(shardInfo));
waku = await createLightNode({ networkConfig: shardInfo });
waku = await createLightNode({ networkConfig });
await waku.start();
await waku.dial(await nwaku.getMultiaddrWithId());
await waku.waitForPeers([Protocols.Filter]);
const decoder = createDecoder(ContentTopic, singleShardInfo);
const routingInfo = createRoutingInfo(networkConfig, {
contentTopic: ContentTopic
});
const decoder = createDecoder(ContentTopic, routingInfo);
const hasSubscribed = await waku.filter.subscribe(
[decoder],
messageCollector.callback
);
if (!hasSubscribed) throw new Error("Failed to subscribe from the start.");
const encoder = createEncoder({
contentTopic: ContentTopic,
pubsubTopicShardInfo: singleShardInfo
});
expect(encoder.pubsubTopic).to.eq(
singleShardInfoToPubsubTopic(singleShardInfo)
);
let messageId = 0;
const report: {
messageId: number;
@ -121,7 +104,8 @@ describe("Throughput Sanity Checks - Different Message Sizes", function () {
ServiceNode.toMessageRpcQuery({
contentTopic: ContentTopic,
payload: utf8ToBytes(message)
})
}),
routingInfo
);
sent = true;
@ -133,7 +117,7 @@ describe("Throughput Sanity Checks - Different Message Sizes", function () {
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: message,
expectedContentTopic: ContentTopic,
expectedPubsubTopic: shardInfoToPubsubTopics(shardInfo)[0]
expectedPubsubTopic: routingInfo.pubsubTopic
});
}
} catch (e: any) {

View File

@ -24,8 +24,8 @@ import {
import {
createTestMetaSetter,
createTestRLNCodecSetup,
EMPTY_PROTO_MESSAGE,
TEST_CONSTANTS,
EmptyProtoMessage,
TestConstants,
verifyRLNMessage
} from "./codec.test-utils.js";
import { RlnMessage } from "./message.js";
@ -37,14 +37,20 @@ describe("RLN codec with version 0", () => {
await createTestRLNCodecSetup();
const rlnEncoder = createRLNEncoder({
encoder: createEncoder({ contentTopic: TEST_CONSTANTS.contentTopic }),
encoder: createEncoder({
contentTopic: TestConstants.contentTopic,
routingInfo: TestConstants.routingInfo
}),
rlnInstance,
index,
credential
});
const rlnDecoder = createRLNDecoder({
rlnInstance,
decoder: createDecoder(TEST_CONSTANTS.contentTopic)
decoder: createDecoder(
TestConstants.contentTopic,
TestConstants.routingInfo
)
});
const bytes = await rlnEncoder.toWire({ payload });
@ -53,11 +59,11 @@ describe("RLN codec with version 0", () => {
const protoResult = await rlnDecoder.fromWireToProtoObj(bytes!);
expect(protoResult).to.not.be.undefined;
const msg = (await rlnDecoder.fromProtoObj(
TEST_CONSTANTS.emptyPubsubTopic,
TestConstants.emptyPubsubTopic,
protoResult!
))!;
verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 0, rlnInstance);
verifyRLNMessage(msg, payload, TestConstants.contentTopic, 0, rlnInstance);
});
it("toProtoObj", async function () {
@ -65,25 +71,28 @@ describe("RLN codec with version 0", () => {
await createTestRLNCodecSetup();
const rlnEncoder = new RLNEncoder(
createEncoder({ contentTopic: TEST_CONSTANTS.contentTopic }),
createEncoder({
contentTopic: TestConstants.contentTopic,
routingInfo: TestConstants.routingInfo
}),
rlnInstance,
index,
credential
);
const rlnDecoder = new RLNDecoder(
rlnInstance,
createDecoder(TEST_CONSTANTS.contentTopic)
createDecoder(TestConstants.contentTopic, TestConstants.routingInfo)
);
const proto = await rlnEncoder.toProtoObj({ payload });
expect(proto).to.not.be.undefined;
const msg = (await rlnDecoder.fromProtoObj(
TEST_CONSTANTS.emptyPubsubTopic,
TestConstants.emptyPubsubTopic,
proto!
)) as RlnMessage<IDecodedMessage>;
verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 0, rlnInstance);
verifyRLNMessage(msg, payload, TestConstants.contentTopic, 0, rlnInstance);
});
});
@ -95,7 +104,8 @@ describe("RLN codec with version 1", () => {
const rlnEncoder = new RLNEncoder(
createSymEncoder({
contentTopic: TEST_CONSTANTS.contentTopic,
contentTopic: TestConstants.contentTopic,
routingInfo: TestConstants.routingInfo,
symKey
}),
rlnInstance,
@ -104,7 +114,11 @@ describe("RLN codec with version 1", () => {
);
const rlnDecoder = new RLNDecoder(
rlnInstance,
createSymDecoder(TEST_CONSTANTS.contentTopic, symKey)
createSymDecoder(
TestConstants.contentTopic,
TestConstants.routingInfo,
symKey
)
);
const bytes = await rlnEncoder.toWire({ payload });
@ -113,11 +127,11 @@ describe("RLN codec with version 1", () => {
const protoResult = await rlnDecoder.fromWireToProtoObj(bytes!);
expect(protoResult).to.not.be.undefined;
const msg = (await rlnDecoder.fromProtoObj(
TEST_CONSTANTS.emptyPubsubTopic,
TestConstants.emptyPubsubTopic,
protoResult!
))!;
verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 1, rlnInstance);
verifyRLNMessage(msg, payload, TestConstants.contentTopic, 1, rlnInstance);
});
it("Symmetric, toProtoObj", async function () {
@ -127,7 +141,8 @@ describe("RLN codec with version 1", () => {
const rlnEncoder = new RLNEncoder(
createSymEncoder({
contentTopic: TEST_CONSTANTS.contentTopic,
contentTopic: TestConstants.contentTopic,
routingInfo: TestConstants.routingInfo,
symKey
}),
rlnInstance,
@ -136,18 +151,22 @@ describe("RLN codec with version 1", () => {
);
const rlnDecoder = new RLNDecoder(
rlnInstance,
createSymDecoder(TEST_CONSTANTS.contentTopic, symKey)
createSymDecoder(
TestConstants.contentTopic,
TestConstants.routingInfo,
symKey
)
);
const proto = await rlnEncoder.toProtoObj({ payload });
expect(proto).to.not.be.undefined;
const msg = await rlnDecoder.fromProtoObj(
TEST_CONSTANTS.emptyPubsubTopic,
TestConstants.emptyPubsubTopic,
proto!
);
verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 1, rlnInstance);
verifyRLNMessage(msg, payload, TestConstants.contentTopic, 1, rlnInstance);
});
it("Asymmetric, toWire", async function () {
@ -158,7 +177,8 @@ describe("RLN codec with version 1", () => {
const rlnEncoder = new RLNEncoder(
createAsymEncoder({
contentTopic: TEST_CONSTANTS.contentTopic,
contentTopic: TestConstants.contentTopic,
routingInfo: TestConstants.routingInfo,
publicKey
}),
rlnInstance,
@ -167,7 +187,11 @@ describe("RLN codec with version 1", () => {
);
const rlnDecoder = new RLNDecoder(
rlnInstance,
createAsymDecoder(TEST_CONSTANTS.contentTopic, privateKey)
createAsymDecoder(
TestConstants.contentTopic,
TestConstants.routingInfo,
privateKey
)
);
const bytes = await rlnEncoder.toWire({ payload });
@ -176,11 +200,11 @@ describe("RLN codec with version 1", () => {
const protoResult = await rlnDecoder.fromWireToProtoObj(bytes!);
expect(protoResult).to.not.be.undefined;
const msg = (await rlnDecoder.fromProtoObj(
TEST_CONSTANTS.emptyPubsubTopic,
TestConstants.emptyPubsubTopic,
protoResult!
))!;
verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 1, rlnInstance);
verifyRLNMessage(msg, payload, TestConstants.contentTopic, 1, rlnInstance);
});
it("Asymmetric, toProtoObj", async function () {
@ -191,7 +215,8 @@ describe("RLN codec with version 1", () => {
const rlnEncoder = new RLNEncoder(
createAsymEncoder({
contentTopic: TEST_CONSTANTS.contentTopic,
contentTopic: TestConstants.contentTopic,
routingInfo: TestConstants.routingInfo,
publicKey
}),
rlnInstance,
@ -200,18 +225,22 @@ describe("RLN codec with version 1", () => {
);
const rlnDecoder = new RLNDecoder(
rlnInstance,
createAsymDecoder(TEST_CONSTANTS.contentTopic, privateKey)
createAsymDecoder(
TestConstants.contentTopic,
TestConstants.routingInfo,
privateKey
)
);
const proto = await rlnEncoder.toProtoObj({ payload });
expect(proto).to.not.be.undefined;
const msg = await rlnDecoder.fromProtoObj(
TEST_CONSTANTS.emptyPubsubTopic,
TestConstants.emptyPubsubTopic,
proto!
);
verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 1, rlnInstance);
verifyRLNMessage(msg, payload, TestConstants.contentTopic, 1, rlnInstance);
});
});
@ -221,21 +250,24 @@ describe("RLN Codec - epoch", () => {
await createTestRLNCodecSetup();
const rlnEncoder = new RLNEncoder(
createEncoder({ contentTopic: TEST_CONSTANTS.contentTopic }),
createEncoder({
contentTopic: TestConstants.contentTopic,
routingInfo: TestConstants.routingInfo
}),
rlnInstance,
index,
credential
);
const rlnDecoder = new RLNDecoder(
rlnInstance,
createDecoder(TEST_CONSTANTS.contentTopic)
createDecoder(TestConstants.contentTopic, TestConstants.routingInfo)
);
const proto = await rlnEncoder.toProtoObj({ payload });
expect(proto).to.not.be.undefined;
const msg = (await rlnDecoder.fromProtoObj(
TEST_CONSTANTS.emptyPubsubTopic,
TestConstants.emptyPubsubTopic,
proto!
)) as RlnMessage<IDecodedMessage>;
@ -245,7 +277,7 @@ describe("RLN Codec - epoch", () => {
expect(msg.epoch!.toString(10).length).to.eq(9);
expect(msg.epoch).to.eq(epoch);
verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 0, rlnInstance);
verifyRLNMessage(msg, payload, TestConstants.contentTopic, 0, rlnInstance);
});
});
@ -257,7 +289,8 @@ describe("RLN codec with version 0 and meta setter", () => {
const rlnEncoder = createRLNEncoder({
encoder: createEncoder({
contentTopic: TEST_CONSTANTS.contentTopic,
contentTopic: TestConstants.contentTopic,
routingInfo: TestConstants.routingInfo,
metaSetter
}),
rlnInstance,
@ -266,7 +299,10 @@ describe("RLN codec with version 0 and meta setter", () => {
});
const rlnDecoder = createRLNDecoder({
rlnInstance,
decoder: createDecoder(TEST_CONSTANTS.contentTopic)
decoder: createDecoder(
TestConstants.contentTopic,
TestConstants.routingInfo
)
});
const bytes = await rlnEncoder.toWire({ payload });
@ -275,17 +311,17 @@ describe("RLN codec with version 0 and meta setter", () => {
const protoResult = await rlnDecoder.fromWireToProtoObj(bytes!);
expect(protoResult).to.not.be.undefined;
const msg = (await rlnDecoder.fromProtoObj(
TEST_CONSTANTS.emptyPubsubTopic,
TestConstants.emptyPubsubTopic,
protoResult!
))!;
const expectedMeta = metaSetter({
...EMPTY_PROTO_MESSAGE,
...EmptyProtoMessage,
payload: protoResult!.payload
});
expect(msg!.meta).to.deep.eq(expectedMeta);
verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 0, rlnInstance);
verifyRLNMessage(msg, payload, TestConstants.contentTopic, 0, rlnInstance);
});
it("toProtoObj", async function () {
@ -294,30 +330,34 @@ describe("RLN codec with version 0 and meta setter", () => {
const metaSetter = createTestMetaSetter();
const rlnEncoder = new RLNEncoder(
createEncoder({ contentTopic: TEST_CONSTANTS.contentTopic, metaSetter }),
createEncoder({
contentTopic: TestConstants.contentTopic,
routingInfo: TestConstants.routingInfo,
metaSetter
}),
rlnInstance,
index,
credential
);
const rlnDecoder = new RLNDecoder(
rlnInstance,
createDecoder(TEST_CONSTANTS.contentTopic)
createDecoder(TestConstants.contentTopic, TestConstants.routingInfo)
);
const proto = await rlnEncoder.toProtoObj({ payload });
expect(proto).to.not.be.undefined;
const msg = (await rlnDecoder.fromProtoObj(
TEST_CONSTANTS.emptyPubsubTopic,
TestConstants.emptyPubsubTopic,
proto!
)) as RlnMessage<IDecodedMessage>;
const expectedMeta = metaSetter({
...EMPTY_PROTO_MESSAGE,
...EmptyProtoMessage,
payload: msg!.payload
});
expect(msg!.meta).to.deep.eq(expectedMeta);
verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 0, rlnInstance);
verifyRLNMessage(msg, payload, TestConstants.contentTopic, 0, rlnInstance);
});
});

View File

@ -1,4 +1,5 @@
import type { IProtoMessage } from "@waku/interfaces";
import { createRoutingInfo } from "@waku/utils";
import { expect } from "chai";
import { createRLN } from "./create.js";
@ -11,14 +12,21 @@ export interface TestRLNCodecSetup {
payload: Uint8Array;
}
export const TEST_CONSTANTS = {
export const TestConstants = {
contentTopic: "/test/1/waku-message/utf8",
emptyPubsubTopic: "",
defaultIndex: 0,
defaultPayload: new Uint8Array([1, 2, 3, 4, 5])
defaultPayload: new Uint8Array([1, 2, 3, 4, 5]),
routingInfo: createRoutingInfo(
{
clusterId: 0,
numShardsInCluster: 2
},
{ contentTopic: "/test/1/waku-message/utf8" }
)
} as const;
export const EMPTY_PROTO_MESSAGE = {
export const EmptyProtoMessage = {
timestamp: undefined,
contentTopic: "",
ephemeral: undefined,
@ -38,8 +46,8 @@ export async function createTestRLNCodecSetup(): Promise<TestRLNCodecSetup> {
return {
rlnInstance,
credential,
index: TEST_CONSTANTS.defaultIndex,
payload: TEST_CONSTANTS.defaultPayload
index: TestConstants.defaultIndex,
payload: TestConstants.defaultPayload
};
}

View File

@ -4,7 +4,8 @@ import type {
IEncoder,
IMessage,
IProtoMessage,
IRateLimitProof
IRateLimitProof,
IRoutingInfo
} from "@waku/interfaces";
import { Logger } from "@waku/utils";
@ -47,17 +48,16 @@ export class RLNEncoder implements IEncoder {
private async generateProof(message: IMessage): Promise<IRateLimitProof> {
const signal = toRLNSignal(this.contentTopic, message);
const proof = await this.rlnInstance.zerokit.generateRLNProof(
return this.rlnInstance.zerokit.generateRLNProof(
signal,
this.index,
message.timestamp,
this.idSecretHash
);
return proof;
}
public get pubsubTopic(): string {
return this.encoder.pubsubTopic;
public get routingInfo(): IRoutingInfo {
return this.encoder.routingInfo;
}
public get contentTopic(): string {
@ -93,8 +93,8 @@ export class RLNDecoder<T extends IDecodedMessage>
private readonly decoder: IDecoder<T>
) {}
public get pubsubTopic(): string {
return this.decoder.pubsubTopic;
public get routingInfo(): IRoutingInfo {
return this.decoder.routingInfo;
}
public get contentTopic(): string {

View File

@ -2,9 +2,9 @@ import { createDecoder, createEncoder } from "@waku/core";
import type {
ContentTopic,
IDecodedMessage,
EncoderOptions as WakuEncoderOptions
IMetaSetter
} from "@waku/interfaces";
import { Logger } from "@waku/utils";
import { Logger, RoutingInfo } from "@waku/utils";
import init from "@waku/zerokit-rln-wasm";
import * as zerokitRLN from "@waku/zerokit-rln-wasm";
@ -27,7 +27,27 @@ import { Zerokit } from "./zerokit.js";
const log = new Logger("waku:rln");
type WakuRLNEncoderOptions = WakuEncoderOptions & {
type WakuRLNEncoderOptions = {
/**
* The routing information for messages to encode.
*/
routingInfo: RoutingInfo;
/** The content topic to set on outgoing messages. */
contentTopic: string;
/**
* An optional flag to mark message as ephemeral, i.e., not to be stored by Waku Store nodes.
* @defaultValue `false`
*/
ephemeral?: boolean;
/**
* A function called when encoding messages to set the meta field.
* @param IProtoMessage The message encoded for wire, without the meta field.
* If encryption is used, `metaSetter` only accesses _encrypted_ payload.
*/
metaSetter?: IMetaSetter;
/**
* RLN Credentials
*/
credentials: EncryptedCredentials | DecryptedCredentials;
};
@ -87,11 +107,12 @@ export class RLNInstance extends RLNCredentialsManager {
}
public createDecoder(
contentTopic: ContentTopic
contentTopic: ContentTopic,
routingInfo: RoutingInfo
): RLNDecoder<IDecodedMessage> {
return createRLNDecoder({
rlnInstance: this,
decoder: createDecoder(contentTopic)
decoder: createDecoder(contentTopic, routingInfo)
});
}

View File

@ -5,6 +5,7 @@ import type {
IProtoMessage,
Libp2p
} from "@waku/interfaces";
import { createRoutingInfo } from "@waku/utils";
import { expect } from "chai";
import sinon from "sinon";
@ -13,8 +14,15 @@ import { PeerManager } from "../peer_manager/index.js";
import { Filter } from "./filter.js";
import { Subscription } from "./subscription.js";
const PUBSUB_TOPIC = "/waku/2/rs/1/4";
const CONTENT_TOPIC = "/test/1/waku-filter/utf8";
const testContentTopic = "/test/1/waku-filter/utf8";
const testNetworkconfig = {
clusterId: 0,
numShardsInCluster: 9
};
const testRoutingInfo = createRoutingInfo(testNetworkconfig, {
contentTopic: testContentTopic
});
const testPubsubTopic = testRoutingInfo.pubsubTopic;
describe("Filter SDK", () => {
let libp2p: Libp2p;
@ -29,7 +37,7 @@ describe("Filter SDK", () => {
connectionManager = mockConnectionManager();
peerManager = mockPeerManager();
filter = mockFilter({ libp2p, connectionManager, peerManager });
decoder = createDecoder(CONTENT_TOPIC, PUBSUB_TOPIC);
decoder = createDecoder(testContentTopic, testRoutingInfo);
callback = sinon.spy();
});
@ -80,10 +88,10 @@ describe("Filter SDK", () => {
await filter.subscribe(decoder, callback);
const message = createMockMessage(CONTENT_TOPIC);
const message = createMockMessage(testContentTopic);
const peerId = "peer1";
await (filter as any).onIncomingMessage(PUBSUB_TOPIC, message, peerId);
await (filter as any).onIncomingMessage(testPubsubTopic, message, peerId);
expect(subscriptionInvokeStub.calledOnce).to.be.true;
expect(subscriptionInvokeStub.firstCall.args[0]).to.equal(message);
@ -91,7 +99,11 @@ describe("Filter SDK", () => {
});
it("should successfully stop", async () => {
const decoder2 = createDecoder("/another-content-topic", PUBSUB_TOPIC);
const contentTopic2 = "/test/1/waku-filter-2/utf8";
const decoder2 = createDecoder(
contentTopic2,
createRoutingInfo(testNetworkconfig, { contentTopic: contentTopic2 })
);
const stopStub = sinon.stub(Subscription.prototype, "stop");
sinon.stub(Subscription.prototype, "add").resolves(true);
@ -129,7 +141,7 @@ function mockLibp2p(): Libp2p {
function mockConnectionManager(): ConnectionManager {
return {
isTopicConfigured: sinon.stub().callsFake((topic: string) => {
return topic === PUBSUB_TOPIC;
return topic === testPubsubTopic;
})
} as unknown as ConnectionManager;
}

View File

@ -63,21 +63,21 @@ export class Filter implements IFilter {
throw Error("Cannot subscribe with 0 decoders.");
}
const pubsubTopics = decoders.map((v) => v.pubsubTopic);
const singlePubsubTopic = pubsubTopics[0];
const routingInfos = decoders.map((v) => v.routingInfo);
const routingInfo = routingInfos[0];
const contentTopics = decoders.map((v) => v.contentTopic);
log.info(
`Subscribing to contentTopics: ${contentTopics}, pubsubTopic: ${singlePubsubTopic}`
`Subscribing to contentTopics: ${contentTopics}, pubsubTopic: ${routingInfo.pubsubTopic}`
);
this.throwIfTopicNotSame(pubsubTopics);
this.throwIfTopicNotSame(routingInfos.map((r) => r.pubsubTopic));
let subscription = this.subscriptions.get(singlePubsubTopic);
let subscription = this.subscriptions.get(routingInfo.pubsubTopic);
if (!subscription) {
subscription = new Subscription({
pubsubTopic: singlePubsubTopic,
routingInfo: routingInfo,
protocol: this.protocol,
config: this.config,
peerManager: this.peerManager
@ -86,7 +86,7 @@ export class Filter implements IFilter {
}
const result = await subscription.add(decoders, callback);
this.subscriptions.set(singlePubsubTopic, subscription);
this.subscriptions.set(routingInfo.pubsubTopic, subscription);
log.info(
`Subscription ${result ? "successful" : "failed"} for content topic: ${contentTopics}`
@ -104,7 +104,7 @@ export class Filter implements IFilter {
throw Error("Cannot unsubscribe with 0 decoders.");
}
const pubsubTopics = decoders.map((v) => v.pubsubTopic);
const pubsubTopics = decoders.map((v) => v.routingInfo.pubsubTopic);
const singlePubsubTopic = pubsubTopics[0];
const contentTopics = decoders.map((v) => v.contentTopic);

View File

@ -1,10 +1,12 @@
import { FilterCore } from "@waku/core";
import type {
AutoSharding,
FilterProtocolOptions,
IDecodedMessage,
IDecoder
} from "@waku/interfaces";
import { WakuMessage } from "@waku/proto";
import { createRoutingInfo } from "@waku/utils";
import { expect } from "chai";
import sinon from "sinon";
@ -14,7 +16,13 @@ import { Subscription } from "./subscription.js";
const PUBSUB_TOPIC = "/waku/2/rs/1/4";
const CONTENT_TOPIC = "/test/1/waku-filter/utf8";
const NETWORK_CONFIG: AutoSharding = {
clusterId: 2,
numShardsInCluster: 3
};
const ROUTING_INFO = createRoutingInfo(NETWORK_CONFIG, {
contentTopic: CONTENT_TOPIC
});
describe("Filter Subscription", () => {
let filterCore: FilterCore;
let peerManager: PeerManager;
@ -32,7 +40,7 @@ describe("Filter Subscription", () => {
};
subscription = new Subscription({
pubsubTopic: PUBSUB_TOPIC,
routingInfo: ROUTING_INFO,
protocol: filterCore,
config,
peerManager
@ -79,9 +87,11 @@ describe("Filter Subscription", () => {
});
it("should invoke callbacks when receiving a message", async () => {
const testContentTopic = "/custom/content/topic";
const testContentTopic = "/custom/0/content/proto";
const testDecoder = {
pubsubTopic: PUBSUB_TOPIC,
routingInfo: createRoutingInfo(NETWORK_CONFIG, {
contentTopic: testContentTopic
}),
contentTopic: testContentTopic,
fromProtoObj: sinon.stub().callsFake(() => {
return Promise.resolve({ payload: new Uint8Array([1, 2, 3]) });
@ -106,9 +116,11 @@ describe("Filter Subscription", () => {
});
it("should invoke callbacks only when newly receiving message is given", async () => {
const testContentTopic = "/custom/content/topic";
const testContentTopic = "/custom/0/content/topic";
const testDecoder = {
pubsubTopic: PUBSUB_TOPIC,
routingInfo: createRoutingInfo(NETWORK_CONFIG, {
contentTopic: testContentTopic
}),
contentTopic: testContentTopic,
fromProtoObj: sinon.stub().callsFake(() => {
return Promise.resolve({ payload: new Uint8Array([1, 2, 3]) });

View File

@ -10,11 +10,12 @@ import type {
IDecodedMessage,
IDecoder,
IProtoMessage,
PeerIdStr
PeerIdStr,
PubsubTopic
} from "@waku/interfaces";
import { Protocols } from "@waku/interfaces";
import { WakuMessage } from "@waku/proto";
import { Logger } from "@waku/utils";
import { Logger, RoutingInfo } from "@waku/utils";
import { PeerManager, PeerManagerEventNames } from "../peer_manager/index.js";
@ -35,7 +36,8 @@ type AttemptUnsubscribeParams = {
type Libp2pEventHandler = (e: CustomEvent<PeerId>) => void;
export class Subscription {
private readonly pubsubTopic: string;
private readonly routingInfo: RoutingInfo;
private readonly pubsubTopic: PubsubTopic;
private readonly protocol: FilterCore;
private readonly peerManager: PeerManager;
@ -73,7 +75,8 @@ export class Subscription {
public constructor(params: SubscriptionParams) {
this.config = params.config;
this.pubsubTopic = params.pubsubTopic;
this.routingInfo = params.routingInfo;
this.pubsubTopic = params.routingInfo.pubsubTopic;
this.protocol = params.protocol;
this.peerManager = params.peerManager;
@ -193,7 +196,7 @@ export class Subscription {
if (this.callbacks.has(decoder)) {
log.warn(
`Replacing callback associated associated with decoder with pubsubTopic:${decoder.pubsubTopic} and contentTopic:${decoder.contentTopic}`
`Replacing callback associated associated with decoder with pubsubTopic:${decoder.routingInfo.pubsubTopic} and contentTopic:${decoder.contentTopic}`
);
const callback = this.callbacks.get(decoder);
@ -205,7 +208,7 @@ export class Subscription {
void (async (): Promise<void> => {
try {
const message = await decoder.fromProtoObj(
decoder.pubsubTopic,
decoder.routingInfo.pubsubTopic,
event.detail as IProtoMessage
);
void callback(message!);
@ -230,7 +233,7 @@ export class Subscription {
if (!callback) {
log.warn(
`No callback associated with decoder with pubsubTopic:${decoder.pubsubTopic} and contentTopic:${decoder.contentTopic}`
`No callback associated with decoder with pubsubTopic:${decoder.routingInfo.pubsubTopic} and contentTopic:${decoder.contentTopic}`
);
}
@ -413,11 +416,13 @@ export class Subscription {
const usablePeer = await this.peerManager.isPeerOnPubsub(
event.detail,
this.pubsubTopic
this.routingInfo.pubsubTopic
);
if (!usablePeer) {
log.info(`Peer ${id} doesn't support pubsubTopic:${this.pubsubTopic}`);
log.info(
`Peer ${id} doesn't support pubsubTopic:${this.routingInfo.pubsubTopic}`
);
return;
}
@ -483,7 +488,7 @@ export class Subscription {
const prevPeers = new Set<PeerIdStr>(this.peers.keys());
const peersToAdd = await this.peerManager.getPeers({
protocol: Protocols.Filter,
pubsubTopic: this.pubsubTopic
routingInfo: this.routingInfo
});
for (const peer of peersToAdd) {

View File

@ -1,5 +1,9 @@
import type { FilterCore } from "@waku/core";
import type { FilterProtocolOptions, Libp2p } from "@waku/interfaces";
import type {
FilterProtocolOptions,
IRoutingInfo,
Libp2p
} from "@waku/interfaces";
import type { WakuMessage } from "@waku/proto";
import type { PeerManager } from "../peer_manager/index.js";
@ -15,7 +19,7 @@ export type SubscriptionEvents = {
};
export type SubscriptionParams = {
pubsubTopic: string;
routingInfo: IRoutingInfo;
protocol: FilterCore;
config: FilterProtocolOptions;
peerManager: PeerManager;

View File

@ -1,6 +1,7 @@
import { Peer, PeerId } from "@libp2p/interface";
import { createEncoder, Encoder, LightPushCodec } from "@waku/core";
import { Libp2p, ProtocolError } from "@waku/interfaces";
import { createRoutingInfo } from "@waku/utils";
import { utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai";
import sinon, { SinonSpy } from "sinon";
@ -9,7 +10,14 @@ import { PeerManager } from "../peer_manager/index.js";
import { LightPush } from "./light_push.js";
const CONTENT_TOPIC = "/test/1/waku-light-push/utf8";
const testContentTopic = "/test/1/waku-light-push/utf8";
const testRoutingInfo = createRoutingInfo(
{
clusterId: 0,
numShardsInCluster: 7
},
{ contentTopic: testContentTopic }
);
describe("LightPush SDK", () => {
let libp2p: Libp2p;
@ -18,7 +26,10 @@ describe("LightPush SDK", () => {
beforeEach(() => {
libp2p = mockLibp2p();
encoder = createEncoder({ contentTopic: CONTENT_TOPIC });
encoder = createEncoder({
contentTopic: testContentTopic,
routingInfo: testRoutingInfo
});
lightPush = mockLightPush({ libp2p });
});

View File

@ -77,13 +77,13 @@ export class LightPush implements ILightPush {
...options
};
const { pubsubTopic } = encoder;
const { pubsubTopic } = encoder.routingInfo;
log.info("send: attempting to send a message to pubsubTopic:", pubsubTopic);
const peerIds = await this.peerManager.getPeers({
protocol: Protocols.LightPush,
pubsubTopic: encoder.pubsubTopic
routingInfo: encoder.routingInfo
});
const coreResults: CoreProtocolResult[] =
@ -124,7 +124,7 @@ export class LightPush implements ILightPush {
this.retryManager.push(
sendCallback.bind(this),
options.maxAttempts || DEFAULT_MAX_ATTEMPTS,
encoder.pubsubTopic
encoder.routingInfo
);
}

View File

@ -4,6 +4,7 @@ import {
ProtocolError,
Protocols
} from "@waku/interfaces";
import { createRoutingInfo } from "@waku/utils";
import { expect } from "chai";
import sinon from "sinon";
@ -11,6 +12,11 @@ import { PeerManager } from "../peer_manager/index.js";
import { RetryManager, ScheduledTask } from "./retry_manager.js";
const TestRoutingInfo = createRoutingInfo(
{ clusterId: 0 },
{ pubsubTopic: "/waku/2/rs/0/0" }
);
describe("RetryManager", () => {
let retryManager: RetryManager;
let peerManager: PeerManager;
@ -59,7 +65,7 @@ describe("RetryManager", () => {
})
);
retryManager.push(successCallback, 3, "test-topic");
retryManager.push(successCallback, 3, TestRoutingInfo);
retryManager.start();
await clock.tickAsync(200);
@ -74,7 +80,7 @@ describe("RetryManager", () => {
(peerManager as any).getPeers = () => [];
const callback = sinon.spy();
retryManager.push(callback, 2, "test-topic");
retryManager.push(callback, 2, TestRoutingInfo);
retryManager.start();
const queue = (retryManager as any)["queue"] as ScheduledTask[];
@ -92,7 +98,7 @@ describe("RetryManager", () => {
(peerManager as any).getPeers = () => [];
const callback = sinon.spy();
retryManager.push(callback, 1, "test-topic");
retryManager.push(callback, 1, TestRoutingInfo);
retryManager.start();
const queue = (retryManager as any)["queue"] as ScheduledTask[];
expect(queue.length).to.equal(1);
@ -117,7 +123,7 @@ describe("RetryManager", () => {
const task = {
callback: failingCallback,
maxAttempts: 2,
pubsubTopic: "test-topic"
routingInfo: TestRoutingInfo
};
await (retryManager as any)["taskExecutor"](task);
@ -136,14 +142,14 @@ describe("RetryManager", () => {
await (retryManager as any)["taskExecutor"]({
callback: errorCallback,
maxAttempts: 1,
pubsubTopic: "test-topic"
routingInfo: TestRoutingInfo
});
expect((peerManager.renewPeer as sinon.SinonSpy).calledOnce).to.be.true;
expect(
(peerManager.renewPeer as sinon.SinonSpy).calledWith(mockPeerId, {
protocol: Protocols.LightPush,
pubsubTopic: "test-topic"
routingInfo: TestRoutingInfo
})
).to.be.true;
});
@ -157,7 +163,7 @@ describe("RetryManager", () => {
const task = {
callback: slowCallback,
maxAttempts: 1,
pubsubTopic: "test-topic"
routingInfo: TestRoutingInfo
};
const executionPromise = (retryManager as any)["taskExecutor"](task);
@ -175,7 +181,7 @@ describe("RetryManager", () => {
const task = {
callback: failingCallback,
maxAttempts: 0,
pubsubTopic: "test-topic"
routingInfo: TestRoutingInfo
};
await (retryManager as any)["taskExecutor"](task);
@ -190,7 +196,7 @@ describe("RetryManager", () => {
if (called === 1) retryManager.stop();
return Promise.resolve({ success: mockPeerId, failure: null });
});
retryManager.push(successCallback, 2, "test-topic");
retryManager.push(successCallback, 2, TestRoutingInfo);
retryManager.start();
await clock.tickAsync(500);
expect(called).to.equal(1);
@ -206,7 +212,7 @@ describe("RetryManager", () => {
failure: { error: ProtocolError.GENERIC_FAIL }
});
});
retryManager.push(failCallback, 2, "test-topic");
retryManager.push(failCallback, 2, TestRoutingInfo);
retryManager.start();
await clock.tickAsync(1000);
retryManager.stop();

View File

@ -1,6 +1,6 @@
import type { PeerId } from "@libp2p/interface";
import { type CoreProtocolResult, Protocols } from "@waku/interfaces";
import { Logger } from "@waku/utils";
import { Logger, RoutingInfo } from "@waku/utils";
import type { PeerManager } from "../peer_manager/index.js";
@ -15,7 +15,7 @@ type AttemptCallback = (peerId: PeerId) => Promise<CoreProtocolResult>;
export type ScheduledTask = {
maxAttempts: number;
pubsubTopic: string;
routingInfo: RoutingInfo;
callback: AttemptCallback;
};
@ -54,12 +54,12 @@ export class RetryManager {
public push(
callback: AttemptCallback,
maxAttempts: number,
pubsubTopic: string
routingInfo: RoutingInfo
): void {
this.queue.push({
maxAttempts,
callback,
pubsubTopic
routingInfo
});
}
@ -96,7 +96,7 @@ export class RetryManager {
const peerId = (
await this.peerManager.getPeers({
protocol: Protocols.LightPush,
pubsubTopic: task.pubsubTopic
routingInfo: task.routingInfo
})
)[0];
@ -142,7 +142,7 @@ export class RetryManager {
if (shouldPeerBeChanged(error.message)) {
await this.peerManager.renewPeer(peerId, {
protocol: Protocols.LightPush,
pubsubTopic: task.pubsubTopic
routingInfo: task.routingInfo
});
}

View File

@ -5,6 +5,7 @@ import {
Libp2p,
Protocols
} from "@waku/interfaces";
import { createRoutingInfo } from "@waku/utils";
import { expect } from "chai";
import sinon from "sinon";
@ -17,8 +18,12 @@ describe("PeerManager", () => {
let peers: any[];
let mockConnections: any[];
const TEST_PUBSUB_TOPIC = "/test/1/waku-light-push/utf8";
const TEST_PUBSUB_TOPIC = "/waku/2/rs/0/0";
const TEST_PROTOCOL = Protocols.LightPush;
const TEST_ROUTING_INFO = createRoutingInfo(
{ clusterId: 0 },
{ pubsubTopic: TEST_PUBSUB_TOPIC }
);
const clearPeerState = (): void => {
(peerManager as any).lockedPeers.clear();
@ -36,7 +41,7 @@ describe("PeerManager", () => {
const getPeersForTest = async (): Promise<PeerId[]> => {
return await peerManager.getPeers({
protocol: TEST_PROTOCOL,
pubsubTopic: TEST_PUBSUB_TOPIC
routingInfo: TEST_ROUTING_INFO
});
};
@ -81,7 +86,7 @@ describe("PeerManager", () => {
pubsubTopics: [TEST_PUBSUB_TOPIC],
getConnectedPeers: async () => peers,
getPeers: async () => peers,
isPeerOnTopic: async (_id: PeerId, _topic: string) => true
isPeerOnShard: async (_id: PeerId, _topic: string) => true
} as unknown as IConnectionManager;
peerManager = new PeerManager({
libp2p,
@ -126,7 +131,7 @@ describe("PeerManager", () => {
const peerId = ids[0];
await peerManager.renewPeer(peerId, {
protocol: TEST_PROTOCOL,
pubsubTopic: TEST_PUBSUB_TOPIC
routingInfo: TEST_ROUTING_INFO
});
expect((peerManager as any).lockedPeers.has(peerId.toString())).to.be.false;
expect((peerManager as any).unlockedPeers.has(peerId.toString())).to.be
@ -224,7 +229,7 @@ describe("PeerManager", () => {
if (skipIfNoPeers(first)) return;
await peerManager.renewPeer(first[0], {
protocol: TEST_PROTOCOL,
pubsubTopic: TEST_PUBSUB_TOPIC
routingInfo: TEST_ROUTING_INFO
});
const second = await getPeersForTest();
if (skipIfNoPeers(second)) return;
@ -238,7 +243,7 @@ describe("PeerManager", () => {
} as any;
await peerManager.renewPeer(fakePeerId, {
protocol: TEST_PROTOCOL,
pubsubTopic: TEST_PUBSUB_TOPIC
routingInfo: TEST_ROUTING_INFO
});
expect(true).to.be.true;
});
@ -263,7 +268,7 @@ describe("PeerManager", () => {
const peerId = result[0];
await peerManager.renewPeer(peerId, {
protocol: TEST_PROTOCOL,
pubsubTopic: TEST_PUBSUB_TOPIC
routingInfo: TEST_ROUTING_INFO
});
const connection = mockConnections.find((c) => c.remotePeer.equals(peerId));

View File

@ -16,7 +16,7 @@ import {
Libp2pEventHandler,
Protocols
} from "@waku/interfaces";
import { Logger } from "@waku/utils";
import { Logger, RoutingInfo } from "@waku/utils";
const log = new Logger("peer-manager");
@ -34,7 +34,7 @@ type PeerManagerParams = {
type GetPeersParams = {
protocol: Protocols;
pubsubTopic: string;
routingInfo: RoutingInfo;
};
export enum PeerManagerEventNames {
@ -107,7 +107,9 @@ export class PeerManager {
public async getPeers(params: GetPeersParams): Promise<PeerId[]> {
log.info(
`Getting peers for protocol: ${params.protocol}, pubsubTopic: ${params.pubsubTopic}`
`Getting peers for protocol: ${params.protocol}, ` +
`clusterId: ${params.routingInfo.networkConfig.clusterId},` +
` shard: ${params.routingInfo.shardId}`
);
const connectedPeers = await this.connectionManager.getConnectedPeers();
@ -117,13 +119,19 @@ export class PeerManager {
for (const peer of connectedPeers) {
const hasProtocol = this.hasPeerProtocol(peer, params.protocol);
const hasSamePubsub = await this.connectionManager.isPeerOnTopic(
const isOnSameShard = await this.connectionManager.isPeerOnShard(
peer.id,
params.pubsubTopic
params.routingInfo.networkConfig.clusterId,
params.routingInfo.shardId
);
if (!isOnSameShard) {
continue;
}
const isPeerAvailableForUse = this.isPeerAvailableForUse(peer.id);
if (hasProtocol && hasSamePubsub && isPeerAvailableForUse) {
if (hasProtocol && isPeerAvailableForUse) {
results.push(peer);
log.info(`Peer ${peer.id} qualifies for protocol ${params.protocol}`);
}
@ -168,7 +176,7 @@ export class PeerManager {
public async renewPeer(id: PeerId, params: GetPeersParams): Promise<void> {
log.info(
`Renewing peer ${id} for protocol: ${params.protocol}, pubsubTopic: ${params.pubsubTopic}`
`Renewing peer ${id} for protocol: ${params.protocol}, routingInfo: ${params.routingInfo}`
);
const connectedPeers = await this.connectionManager.getConnectedPeers();
@ -265,7 +273,7 @@ export class PeerManager {
}
const wasUnlocked = new Date(value).getTime();
return Date.now() - wasUnlocked >= 10_000 ? true : false;
return Date.now() - wasUnlocked >= 10_000;
}
private dispatchFilterPeerConnect(id: PeerId): void {

View File

@ -12,7 +12,7 @@ import {
StoreCursor,
StoreProtocolOptions
} from "@waku/interfaces";
import { isDefined, Logger } from "@waku/utils";
import { isDefined, Logger, RoutingInfo } from "@waku/utils";
import { PeerManager } from "../peer_manager/index.js";
@ -181,7 +181,7 @@ export class Store implements IStore {
private validateDecodersAndPubsubTopic<T extends IDecodedMessage>(
decoders: IDecoder<T>[]
): {
pubsubTopic: string;
routingInfo: RoutingInfo;
contentTopics: string[];
decodersAsMap: Map<string, IDecoder<T>>;
} {
@ -191,7 +191,7 @@ export class Store implements IStore {
}
const uniquePubsubTopicsInQuery = Array.from(
new Set(decoders.map((decoder) => decoder.pubsubTopic))
new Set(decoders.map((decoder) => decoder.routingInfo.pubsubTopic))
);
if (uniquePubsubTopicsInQuery.length > 1) {
log.error("API does not support querying multiple pubsub topics at once");
@ -214,7 +214,9 @@ export class Store implements IStore {
});
const contentTopics = decoders
.filter((decoder) => decoder.pubsubTopic === pubsubTopicForQuery)
.filter(
(decoder) => decoder.routingInfo.pubsubTopic === pubsubTopicForQuery
)
.map((dec) => dec.contentTopic);
if (contentTopics.length === 0) {
@ -223,16 +225,18 @@ export class Store implements IStore {
}
return {
pubsubTopic: pubsubTopicForQuery,
routingInfo: decoders[0].routingInfo,
contentTopics,
decodersAsMap
};
}
private async getPeerToUse(pubsubTopic: string): Promise<PeerId | undefined> {
private async getPeerToUse(
routingInfo: RoutingInfo
): Promise<PeerId | undefined> {
const peers = await this.peerManager.getPeers({
protocol: Protocols.Store,
pubsubTopic
routingInfo
});
return this.options.peers

View File

@ -1,119 +0,0 @@
import { DEFAULT_NUM_SHARDS, DefaultNetworkConfig } from "@waku/interfaces";
import { contentTopicToShardIndex } from "@waku/utils";
import { expect } from "chai";
import { decoderParamsToShardInfo, isShardCompatible } from "./utils.js";
const TestContentTopic = "/test/1/waku-sdk/utf8";
describe("IWaku utils", () => {
describe("decoderParamsToShardInfo", () => {
it("should use provided shard info when available", () => {
const params = {
contentTopic: TestContentTopic,
shardInfo: {
clusterId: 10,
shard: 5
}
};
const result = decoderParamsToShardInfo(params, DefaultNetworkConfig);
expect(result.clusterId).to.equal(10);
expect(result.shard).to.equal(5);
});
it("should use network config clusterId when shard info clusterId is not provided", () => {
const params = {
contentTopic: TestContentTopic,
shardInfo: {
clusterId: 1,
shard: 5
}
};
const result = decoderParamsToShardInfo(params, DefaultNetworkConfig);
expect(result.clusterId).to.equal(1);
expect(result.shard).to.equal(5);
});
it("should use shardsUnderCluster when provided", () => {
const contentTopic = TestContentTopic;
const params = {
contentTopic,
shardInfo: {
clusterId: 10,
shardsUnderCluster: 64
}
};
const result = decoderParamsToShardInfo(params, DefaultNetworkConfig);
const expectedShardIndex = contentTopicToShardIndex(contentTopic, 64);
expect(result.clusterId).to.equal(10);
expect(result.shard).to.equal(expectedShardIndex);
});
it("should calculate shard index from content topic when shard is not provided", () => {
const contentTopic = TestContentTopic;
const params = {
contentTopic
};
const result = decoderParamsToShardInfo(params, DefaultNetworkConfig);
const expectedShardIndex = contentTopicToShardIndex(
contentTopic,
DEFAULT_NUM_SHARDS
);
expect(result.clusterId).to.equal(1);
expect(result.shard).to.equal(expectedShardIndex);
});
});
describe("isShardCompatible", () => {
it("should return false when clusterId doesn't match", () => {
const shardInfo = {
clusterId: 10,
shard: 5
};
const result = isShardCompatible(shardInfo, DefaultNetworkConfig);
expect(result).to.be.false;
});
it("should return false when shard is not included in network shards", () => {
const shardInfo = {
clusterId: 1,
shard: 5
};
const networkConfig = {
clusterId: 1,
shards: [1, 2, 3, 4]
};
const result = isShardCompatible(shardInfo, networkConfig);
expect(result).to.be.false;
});
it("should return true when clusterId matches and shard is included in network shards", () => {
const shardInfo = {
clusterId: 1,
shard: 3
};
const networkConfig = {
clusterId: 1,
shards: [1, 2, 3, 4]
};
const result = isShardCompatible(shardInfo, networkConfig);
expect(result).to.be.true;
});
});
});

View File

@ -1,47 +0,0 @@
import type {
CreateDecoderParams,
NetworkConfig,
SingleShardInfo
} from "@waku/interfaces";
import { DEFAULT_NUM_SHARDS } from "@waku/interfaces";
import { contentTopicToShardIndex } from "@waku/utils";
export const decoderParamsToShardInfo = (
params: CreateDecoderParams,
networkConfig: NetworkConfig
): SingleShardInfo => {
const clusterId = (params.shardInfo?.clusterId ||
networkConfig.clusterId) as number;
const shardsUnderCluster =
params.shardInfo && "shardsUnderCluster" in params.shardInfo
? params.shardInfo.shardsUnderCluster
: DEFAULT_NUM_SHARDS;
const shardIndex =
params.shardInfo && "shard" in params.shardInfo
? params.shardInfo.shard
: contentTopicToShardIndex(params.contentTopic, shardsUnderCluster);
return {
clusterId,
shard: shardIndex
};
};
export const isShardCompatible = (
shardInfo: SingleShardInfo,
networkConfig: NetworkConfig
): boolean => {
if (networkConfig.clusterId !== shardInfo.clusterId) {
return false;
}
if (
"shards" in networkConfig &&
!networkConfig.shards.includes(shardInfo.shard!)
) {
return false;
}
return true;
};

View File

@ -27,7 +27,7 @@ import {
HealthStatus,
Protocols
} from "@waku/interfaces";
import { Logger } from "@waku/utils";
import { createRoutingInfo, Logger, RoutingInfo } from "@waku/utils";
import { Filter } from "../filter/index.js";
import { HealthIndicator } from "../health_indicator/index.js";
@ -35,7 +35,6 @@ import { LightPush } from "../light_push/index.js";
import { PeerManager } from "../peer_manager/index.js";
import { Store } from "../store/index.js";
import { decoderParamsToShardInfo, isShardCompatible } from "./utils.js";
import { waitForRemotePeer } from "./wait_for_remote_peer.js";
const log = new Logger("waku");
@ -260,40 +259,33 @@ export class WakuNode implements IWaku {
}
public createDecoder(params: CreateDecoderParams): IDecoder<IDecodedMessage> {
const singleShardInfo = decoderParamsToShardInfo(
params,
this.networkConfig
const routingInfo = getRoutingInfo(
this.networkConfig,
params.contentTopic,
params.shardId
);
log.info(
`Creating Decoder with input:${JSON.stringify(params.shardInfo)}, determined:${JSON.stringify(singleShardInfo)}, expected:${JSON.stringify(this.networkConfig)}.`
);
if (!isShardCompatible(singleShardInfo, this.networkConfig)) {
throw Error(`Cannot create decoder: incompatible shard configuration.`);
}
return createDecoder(params.contentTopic, singleShardInfo);
return createDecoder(params.contentTopic, routingInfo);
}
public createEncoder(params: CreateEncoderParams): IEncoder {
const singleShardInfo = decoderParamsToShardInfo(
params,
this.networkConfig
const routingInfo = getRoutingInfo(
this.networkConfig,
params.contentTopic,
params.shardId
);
log.info(
`Creating Encoder with input:${JSON.stringify(params.shardInfo)}, determined:${JSON.stringify(singleShardInfo)}, expected:${JSON.stringify(this.networkConfig)}.`
);
if (!isShardCompatible(singleShardInfo, this.networkConfig)) {
throw Error(`Cannot create encoder: incompatible shard configuration.`);
}
return createEncoder({
contentTopic: params.contentTopic,
ephemeral: params.ephemeral,
pubsubTopicShardInfo: singleShardInfo
routingInfo: routingInfo
});
}
}
function getRoutingInfo(
networkConfig: NetworkConfig,
contentTopic?: string,
shardId?: number
): RoutingInfo {
return createRoutingInfo(networkConfig, { contentTopic, shardId });
}

View File

@ -5,7 +5,8 @@
* @module
*/
import { PubsubTopic, ShardInfo, SingleShardInfo } from "@waku/interfaces";
import { AutoSharding, RelayShards } from "@waku/interfaces";
import { createRoutingInfo } from "@waku/utils";
export const NOISE_KEY_1 = new Uint8Array(
((): number[] => {
@ -46,11 +47,27 @@ export const TEST_STRING = [
{ description: "Arabic", value: "مرحبا" },
{ description: "Russian", value: "Привет" },
{ description: "SQL Injection", value: "'; DROP TABLE users; --" },
{ description: "Script", value: '<script>alert("hacked");</script>' },
{ description: "XML", value: "<element>Some content</element>" },
{ description: "Basic HTML tag", value: "<h1>Heading</h1>" },
{
description: "Script",
value: '<script>alert("hacked");</script>',
invalidContentTopic: true
},
{
description: "XML",
value: "<element>Some content</element>",
invalidContentTopic: true
},
{
description: "Basic HTML tag",
value: "<h1>Heading</h1>",
invalidContentTopic: true
},
{ description: "JSON", value: '{"user":"admin","password":"123456"}' },
{ description: "shell command", value: "`rm -rf /`" },
{
description: "shell command",
value: "`rm -rf /`",
invalidContentTopic: true
},
{ description: "escaped characters", value: "\\n\\t\\0" },
{ description: "unicode special characters", value: "\u202Ereverse" },
{ description: "emoji", value: "🤫 🤥 😶 😶‍🌫️ 😐 😑 😬 🫨 🫠 🙄 😯 😦 😧 😮" }
@ -68,12 +85,18 @@ export const MOCHA_HOOK_MAX_TIMEOUT = 50_000;
export const SEPOLIA_RPC_URL =
process.env.SEPOLIA_RPC_URL || "https://sepolia.gateway.tenderly.co";
export const DefaultTestPubsubTopic: PubsubTopic = "/waku/2/rs/0/0";
export const DefaultTestShardInfo: ShardInfo = {
clusterId: 0,
export const DefaultTestClusterId = 0;
export const DefaultTestNumShardsInCluster = 10;
export const DefaultTestNetworkConfig: AutoSharding = {
clusterId: DefaultTestClusterId,
numShardsInCluster: DefaultTestNumShardsInCluster
};
export const DefaultTestRelayShards: RelayShards = {
clusterId: DefaultTestClusterId,
shards: [0]
};
export const DefaultTestSingleShardInfo: SingleShardInfo = {
clusterId: 0,
shard: 0
};
export const DefaultTestContentTopic = "/test/1/content-topic/proto";
export const DefaultTestRoutingInfo = createRoutingInfo(
DefaultTestNetworkConfig,
{ contentTopic: DefaultTestContentTopic }
);

View File

@ -1,13 +1,7 @@
import {
AutoSharding,
IDecodedMessage,
NetworkConfig,
StaticSharding
} from "@waku/interfaces";
import { contentTopicToShardIndex, Logger } from "@waku/utils";
import { ContentTopic, IDecodedMessage } from "@waku/interfaces";
import { isAutoShardingRoutingInfo, Logger, RoutingInfo } from "@waku/utils";
import { expect } from "chai";
import { DefaultTestPubsubTopic } from "../constants.js";
import { Args, MessageRpcQuery, MessageRpcResponse } from "../types.js";
import { delay, makeLogFileName } from "../utils/index.js";
@ -29,7 +23,7 @@ export class ServiceNodesFleet {
mochaContext: Mocha.Context,
nodesToCreate: number = 3,
strictChecking: boolean = false,
networkConfig: NetworkConfig,
routingInfo: RoutingInfo,
_args?: Args,
withoutFilter = false
): Promise<ServiceNodesFleet> {
@ -40,7 +34,7 @@ export class ServiceNodesFleet {
makeLogFileName(mochaContext) + Math.random().toString(36).substring(7)
);
const args = getArgs(networkConfig, _args);
const args = applyDefaultArgs(routingInfo, _args);
if (nodes[0]) {
const addr = await nodes[0].getExternalMultiaddr();
@ -93,15 +87,19 @@ export class ServiceNodesFleet {
public async sendRelayMessage(
message: MessageRpcQuery,
pubsubTopic: string = DefaultTestPubsubTopic
routingInfo: RoutingInfo
): Promise<boolean> {
const relayMessagePromises: Promise<boolean>[] = this.nodes.map((node) =>
node.sendMessage(message, pubsubTopic)
node.sendMessage(message, routingInfo)
);
const relayMessages = await Promise.all(relayMessagePromises);
return relayMessages.every((message) => message);
}
/**
* This is a dodgy things to do as it assumes the nwaku node did not flush
* any messages from its cache.
*/
public async confirmMessageLength(numMessages: number): Promise<void> {
if (this.strictChecking) {
await Promise.all(
@ -203,13 +201,12 @@ class MultipleNodesMessageCollector {
public async waitForMessages(
numMessages: number,
options?: {
pubsubTopic?: string;
timeoutDuration?: number;
exact?: boolean;
contentTopic?: ContentTopic;
}
): Promise<boolean> {
const startTime = Date.now();
const pubsubTopic = options?.pubsubTopic || DefaultTestPubsubTopic;
const timeoutDuration = options?.timeoutDuration || 400;
const exact = options?.exact || false;
@ -218,7 +215,7 @@ class MultipleNodesMessageCollector {
if (this.strictChecking) {
const results = await Promise.all(
this.relayNodes.map(async (node) => {
const msgs = await node.messages(pubsubTopic);
const msgs = await node.messages(options?.contentTopic);
return msgs.length >= numMessages;
})
);
@ -226,7 +223,7 @@ class MultipleNodesMessageCollector {
} else {
const results = await Promise.all(
this.relayNodes.map(async (node) => {
const msgs = await node.messages(pubsubTopic);
const msgs = await node.messages(options?.contentTopic);
return msgs.length >= numMessages;
})
);
@ -257,23 +254,25 @@ class MultipleNodesMessageCollector {
}
}
function getArgs(networkConfig: NetworkConfig, args?: Args): Args {
const defaultArgs = {
function applyDefaultArgs(routingInfo: RoutingInfo, args?: Args): Args {
const defaultArgs: Args = {
lightpush: true,
filter: true,
discv5Discovery: true,
peerExchange: true,
relay: true,
clusterId: networkConfig.clusterId
} as Args;
relay: true
};
if ((networkConfig as StaticSharding).shards) {
defaultArgs.shard = (networkConfig as StaticSharding).shards;
} else if ((networkConfig as AutoSharding).contentTopics) {
defaultArgs.contentTopic = (networkConfig as AutoSharding).contentTopics;
defaultArgs.shard = (networkConfig as AutoSharding).contentTopics.map(
(topic) => contentTopicToShardIndex(topic)
);
defaultArgs.clusterId = routingInfo.networkConfig.clusterId;
if (isAutoShardingRoutingInfo(routingInfo)) {
defaultArgs.numShardsInNetwork =
routingInfo.networkConfig.numShardsInCluster;
defaultArgs.contentTopic = [routingInfo.contentTopic];
} else {
defaultArgs.numShardsInNetwork = 0;
defaultArgs.shard = [routingInfo.shardId];
}
return { ...defaultArgs, ...args };

View File

@ -4,7 +4,6 @@ import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes";
import { AssertionError, expect } from "chai";
import { equals } from "uint8arrays/equals";
import { DefaultTestPubsubTopic } from "../constants.js";
import { MessageRpcResponse } from "../types.js";
import { base64ToUtf8 } from "../utils/base64_utf8.js";
import { delay } from "../utils/delay.js";
@ -67,20 +66,19 @@ export class MessageCollector {
public async waitForMessages(
numMessages: number,
options?: {
pubsubTopic?: string;
// pubsubTopic?: string;
timeoutDuration?: number;
exact?: boolean;
}
): Promise<boolean> {
const startTime = Date.now();
const pubsubTopic = this.getPubsubTopicToUse(options?.pubsubTopic);
const timeoutDuration = options?.timeoutDuration || 400;
const exact = options?.exact || false;
while (this.count < numMessages) {
if (this.nwaku) {
try {
this.list = await this.nwaku.messages(pubsubTopic);
this.list = await this.nwaku.messages();
} catch (error) {
log.error(`Can't retrieve messages because of ${error}`);
await delay(10);
@ -237,15 +235,13 @@ export class MessageCollector {
`Message text mismatch. Expected: ${options.expectedMessageText}. Got: ${receivedMessageText}`
);
} else {
const pubsubTopicToUse = this.getPubsubTopicToUse(
options.expectedPubsubTopic
);
// js-waku message specific assertions
expect(message.pubsubTopic).to.eq(
pubsubTopicToUse,
`Message pub/sub topic mismatch. Expected: ${pubsubTopicToUse}. Got: ${message.pubsubTopic}`
);
if (options.expectedPubsubTopic) {
// js-waku message specific assertions
expect(message.pubsubTopic).to.eq(
options.expectedPubsubTopic,
`Message pub/sub topic mismatch. Expected: ${options.expectedPubsubTopic}. Got: ${message.pubsubTopic}`
);
}
expect(bytesToUtf8(message.payload)).to.eq(
options.expectedMessageText,
`Message text mismatch. Expected: ${
@ -267,8 +263,4 @@ export class MessageCollector {
);
}
}
private getPubsubTopicToUse(pubsubTopic: string | undefined): string {
return pubsubTopic || DefaultTestPubsubTopic;
}
}

View File

@ -1,14 +1,23 @@
import { CreateNodeOptions, NetworkConfig, Protocols } from "@waku/interfaces";
import { createRelayNode } from "@waku/relay";
import {
ContentTopic,
type CreateNodeOptions,
type NetworkConfig,
Protocols,
type ShardId
} from "@waku/interfaces";
import { createRelayNode, RelayCreateOptions } from "@waku/relay";
import { createLightNode, WakuNode } from "@waku/sdk";
import {
derivePubsubTopicsFromNetworkConfig,
createRoutingInfo,
isAutoSharding,
isStaticSharding,
Logger,
pubsubTopicsToShardInfo
RoutingInfo
} from "@waku/utils";
import { Context } from "mocha";
import { NOISE_KEY_1 } from "../constants.js";
import { Args } from "../types.js";
import { makeLogFileName } from "../utils/index.js";
import { ServiceNode } from "./service_node.js";
@ -24,6 +33,8 @@ export const DEFAULT_DISCOVERIES_ENABLED = {
type RunNodesOptions = {
context: Context;
networkConfig: NetworkConfig;
relayShards?: ShardId[]; // Only for static sharding
contentTopics?: ContentTopic[]; // Only for auto sharding
protocols: Protocols[];
createNode: typeof createLightNode | typeof createRelayNode;
};
@ -34,32 +45,61 @@ export async function runNodes<T>(
const { context, networkConfig, createNode, protocols } = options;
const nwaku = new ServiceNode(makeLogFileName(context));
const pubsubTopics = derivePubsubTopicsFromNetworkConfig(networkConfig);
const shardInfo = pubsubTopicsToShardInfo(pubsubTopics);
await nwaku.start(
{
filter: true,
lightpush: true,
relay: true,
store: true,
shard: shardInfo.shards,
clusterId: shardInfo.clusterId
},
{ retries: 3 }
);
const waku_options: CreateNodeOptions = {
const nwakuArgs: Args = {
filter: true,
lightpush: true,
relay: true,
store: true,
clusterId: networkConfig.clusterId
};
const jswakuArgs: CreateNodeOptions = {
staticNoiseKey: NOISE_KEY_1,
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } },
networkConfig: shardInfo,
networkConfig,
lightPush: { numPeersToUse: 2 },
discovery: DEFAULT_DISCOVERIES_ENABLED
};
log.info("Starting js waku node with :", JSON.stringify(waku_options));
const routingInfos: RoutingInfo[] = [];
if (isAutoSharding(networkConfig)) {
nwakuArgs.numShardsInNetwork = networkConfig.numShardsInCluster;
nwakuArgs.contentTopic = options.contentTopics ?? [];
nwakuArgs.contentTopic.map((ct) =>
routingInfos.push(createRoutingInfo(networkConfig, { contentTopic: ct }))
);
if (options.relayShards && options.relayShards.length > 0)
throw "`relayShards` cannot be set for auto-sharding";
} else if (isStaticSharding(networkConfig) && options.relayShards) {
const shards = options.relayShards;
nwakuArgs.shard = shards;
shards.map((shardId) =>
routingInfos.push(createRoutingInfo(networkConfig, { shardId }))
);
if (options.contentTopics && options.contentTopics.length > 0)
throw "`contentTopics` cannot be set for static sharding";
} else {
throw "Invalid Network Config";
}
const jswakuRelayCreateOptions: RelayCreateOptions = {
routingInfos
};
await nwaku.start(nwakuArgs, { retries: 3 });
log.info("Starting js waku node with :", JSON.stringify(jswakuArgs));
let waku: WakuNode | undefined;
try {
waku = (await createNode(waku_options)) as unknown as WakuNode;
waku = (await createNode({
...jswakuArgs,
...jswakuRelayCreateOptions
})) as unknown as WakuNode;
await waku.start();
} catch (error) {
log.error("jswaku node failed to start:", error);
@ -68,7 +108,18 @@ export async function runNodes<T>(
if (waku) {
await waku.dial(await nwaku.getMultiaddrWithId());
await waku.waitForPeers(protocols);
await nwaku.ensureSubscriptions(pubsubTopics);
// TODO
// const clusterId = networkConfig.clusterId;
// await nwaku.ensureSubscriptions(
// relayShardsToPubsubTopics({
// clusterId,
// shards: options.relayShards ?? []
// })
// );
return [nwaku, waku as T];
} else {
throw new Error("Failed to initialize waku");

View File

@ -1,12 +1,19 @@
import type { PeerId } from "@libp2p/interface";
import { peerIdFromString } from "@libp2p/peer-id";
import { Multiaddr, multiaddr } from "@multiformats/multiaddr";
import { isDefined, shardInfoToPubsubTopics } from "@waku/utils";
import { ContentTopic, PubsubTopic } from "@waku/interfaces";
import {
formatPubsubTopic,
isAutoSharding,
isDefined,
isStaticSharding,
RoutingInfo
} from "@waku/utils";
import { Logger } from "@waku/utils";
import pRetry from "p-retry";
import portfinder from "portfinder";
import { DefaultTestPubsubTopic } from "../constants.js";
import { DefaultTestNetworkConfig } from "../constants.js";
import {
Args,
LogLevel,
@ -245,9 +252,7 @@ export class ServiceNode {
);
}
public async ensureSubscriptions(
pubsubTopics: string[] = [DefaultTestPubsubTopic]
): Promise<boolean> {
public async ensureSubscriptions(pubsubTopics: string[]): Promise<boolean> {
return this.restCall<boolean>(
"/relay/v1/subscriptions",
"POST",
@ -256,13 +261,51 @@ export class ServiceNode {
);
}
public async messages(_pubsubTopic?: string): Promise<MessageRpcResponse[]> {
const pubsubTopic =
_pubsubTopic ??
shardInfoToPubsubTopics({
clusterId: this.args?.clusterId,
shards: this.args?.shard
})[0];
public async messages(
contentTopic?: ContentTopic
): Promise<MessageRpcResponse[]> {
if (contentTopic) {
return this.contentTopicMessages(contentTopic);
}
if (this.args?.contentTopic) {
if (this.args?.contentTopic.length > 1)
throw "More that one content topic passed, not supported";
const contentTopic = this.args?.contentTopic[0];
return this.contentTopicMessages(contentTopic);
}
if (this.args?.shard) {
if (this.args?.shard.length > 1)
throw "More that one shard passed, not supported";
const pubsubTopic = formatPubsubTopic(
this.args.clusterId ?? DefaultTestNetworkConfig.clusterId,
this.args?.shard[0]
);
return this.pubsubTopicMessages(pubsubTopic);
}
throw "Content topic, shard or pubsubTopic must be set";
}
private async contentTopicMessages(
contentTopic: ContentTopic
): Promise<MessageRpcResponse[]> {
return this.restCall<MessageRpcResponse[]>(
`/relay/v1/auto/messages/${encodeURIComponent(contentTopic)}`,
"GET",
null,
async (response) => {
const data = await response.json();
return data?.length ? data : [];
}
);
}
private async pubsubTopicMessages(
pubsubTopic: PubsubTopic
): Promise<MessageRpcResponse[]> {
return this.restCall<MessageRpcResponse[]>(
`/relay/v1/messages/${encodeURIComponent(pubsubTopic)}`,
"GET",
@ -289,7 +332,20 @@ export class ServiceNode {
public async sendMessage(
message: MessageRpcQuery,
_pubsubTopic?: string
routingInfo: RoutingInfo
): Promise<boolean> {
if (isAutoSharding(routingInfo.networkConfig)) {
return this.sendMessageAutoSharding(message);
}
if (isStaticSharding(routingInfo.networkConfig)) {
return this.sendMessageStaticSharding(message, routingInfo.pubsubTopic);
}
throw "Invalid network config";
}
private async sendMessageStaticSharding(
message: MessageRpcQuery,
pubsubTopic: PubsubTopic
): Promise<boolean> {
this.checkProcess();
@ -297,21 +353,15 @@ export class ServiceNode {
message.timestamp = BigInt(new Date().valueOf()) * OneMillion;
}
const pubsubTopic =
_pubsubTopic ??
shardInfoToPubsubTopics({
clusterId: this.args?.clusterId,
shards: this.args?.shard
})[0];
return this.restCall<boolean>(
`/relay/v1/messages/${encodeURIComponent(pubsubTopic || DefaultTestPubsubTopic)}`,
`/relay/v1/messages/${encodeURIComponent(pubsubTopic)}`,
"POST",
message,
async (response) => response.status === 200
);
}
public async sendMessageAutosharding(
private async sendMessageAutoSharding(
message: MessageRpcQuery
): Promise<boolean> {
this.checkProcess();
@ -398,7 +448,11 @@ export class ServiceNode {
if (body) options.body = JSON.stringify(body);
const response = await fetch(`${this.httpUrl}${endpoint}`, options);
log.info(`Received REST Response: `, response.status);
log.info(
`Received REST Response: `,
response.status,
response.statusText
);
return await processResponse(response);
} catch (error) {
log.error(`${this.httpUrl} failed with error:`, error);
@ -429,9 +483,7 @@ export function defaultArgs(): Args {
rest: true,
restAdmin: true,
websocketSupport: true,
logLevel: LogLevel.Trace,
clusterId: 0,
shard: [0]
logLevel: LogLevel.Trace
};
}

View File

@ -1,3 +1,5 @@
import type { ClusterId, ShardId } from "@waku/interfaces";
export interface Args {
staticnode?: string;
nat?: "none";
@ -21,8 +23,9 @@ export interface Args {
websocketPort?: number;
discv5BootstrapNode?: string;
discv5UdpPort?: number;
clusterId?: number;
shard?: Array<number>;
clusterId?: ClusterId;
shard?: Array<ShardId>;
numShardsInNetwork?: number;
rlnRelayEthClientAddress?: string;
}

View File

@ -1,13 +1,11 @@
import { createDecoder, createEncoder, Decoder, Encoder } from "@waku/core";
type TestDataOptions = {
pubsubTopic: string;
};
import { AutoSharding } from "@waku/interfaces";
import { createRoutingInfo } from "@waku/utils";
// Utility to generate test data for multiple topics tests.
export function generateTestData(
topicCount: number,
options?: TestDataOptions
networkConfig: AutoSharding
): {
contentTopics: string[];
encoders: Encoder[];
@ -15,14 +13,22 @@ export function generateTestData(
} {
const contentTopics = Array.from(
{ length: topicCount },
(_, i) => `/test/${i + 1}/waku-multi/default`
// Remember that auto-sharding uses both app name and app version fields
(_, i) => `/test/0/waku-multi-${i + 1}/default`
);
const encoders = contentTopics.map((topic) =>
createEncoder({ contentTopic: topic, pubsubTopic: options?.pubsubTopic })
createEncoder({
contentTopic: topic,
routingInfo: createRoutingInfo(networkConfig, { contentTopic: topic })
})
);
const decoders = contentTopics.map((topic) =>
createDecoder(topic, options?.pubsubTopic)
createDecoder(
topic,
createRoutingInfo(networkConfig, { contentTopic: topic })
)
);
return {
contentTopics,
encoders,

View File

@ -1,13 +1,11 @@
import {
CreateNodeOptions,
DefaultNetworkConfig,
IWaku,
LightNode,
NetworkConfig,
Protocols
} from "@waku/interfaces";
import { createLightNode } from "@waku/sdk";
import { derivePubsubTopicsFromNetworkConfig } from "@waku/utils";
import { RoutingInfo } from "@waku/utils";
import { Context } from "mocha";
import pRetry from "p-retry";
@ -18,9 +16,20 @@ import { Args } from "../types.js";
import { waitForConnections } from "./waitForConnections.js";
/**
* Runs both js-waku and nwaku nodes.
*
* @param context
* @param routingInfo
* @param customArgs passed to nwaku service nodes
* @param strictChecking
* @param numServiceNodes
* @param withoutFilter
* @param jsWakuParams
*/
export async function runMultipleNodes(
context: Context,
networkConfig: NetworkConfig = DefaultNetworkConfig,
routingInfo: RoutingInfo,
customArgs?: Args,
strictChecking: boolean = false,
numServiceNodes = 2,
@ -32,7 +41,7 @@ export async function runMultipleNodes(
context,
numServiceNodes,
strictChecking,
networkConfig,
routingInfo,
customArgs,
withoutFilter
);
@ -42,7 +51,7 @@ export async function runMultipleNodes(
libp2p: {
addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] }
},
networkConfig,
networkConfig: routingInfo.networkConfig,
lightPush: { numPeersToUse: numServiceNodes },
discovery: DEFAULT_DISCOVERIES_ENABLED,
...jsWakuParams
@ -57,9 +66,10 @@ export async function runMultipleNodes(
for (const node of serviceNodes.nodes) {
await waku.dial(await node.getMultiaddrWithId());
await waku.waitForPeers([Protocols.Filter, Protocols.LightPush]);
await node.ensureSubscriptions(
derivePubsubTopicsFromNetworkConfig(networkConfig)
);
// TODO
// await node.ensureSubscriptions(
// derivePubsubTopicsFromNetworkConfig(networkConfig)
// );
const wakuConnections = waku.libp2p.getConnections();

View File

@ -9,7 +9,7 @@ import {
teardownNodesWithRedundancy
} from "../../src/index.js";
import { TestShardInfo } from "./utils.js";
import { TestRoutingInfo } from "./utils.js";
describe("Connection Limiter", function () {
let waku: LightNode;
@ -18,7 +18,7 @@ describe("Connection Limiter", function () {
beforeEachCustom(this, async () => {
[serviceNodes, waku] = await runMultipleNodes(
this.ctx,
TestShardInfo,
TestRoutingInfo,
{ lightpush: true, filter: true, peerExchange: true },
false,
2,
@ -68,7 +68,7 @@ describe("Connection Limiter", function () {
[serviceNodes, waku] = await runMultipleNodes(
this.ctx,
TestShardInfo,
TestRoutingInfo,
{ lightpush: true, filter: true, peerExchange: true },
false,
2,
@ -126,7 +126,7 @@ describe("Connection Limiter", function () {
[serviceNodes, waku] = await runMultipleNodes(
this.ctx,
TestShardInfo,
TestRoutingInfo,
{ lightpush: true, filter: true, peerExchange: true },
false,
2,

View File

@ -10,7 +10,7 @@ import {
teardownNodesWithRedundancy
} from "../../src/index.js";
import { TestShardInfo } from "./utils.js";
import { TestRoutingInfo } from "./utils.js";
describe("Dialing", function () {
const ctx: Context = this.ctx;
@ -20,7 +20,7 @@ describe("Dialing", function () {
beforeEachCustom(this, async () => {
[serviceNodes, waku] = await runMultipleNodes(
this.ctx,
TestShardInfo,
TestRoutingInfo,
{ lightpush: true, filter: true, peerExchange: true },
false,
2,
@ -33,7 +33,7 @@ describe("Dialing", function () {
ctx,
2,
false,
TestShardInfo,
TestRoutingInfo,
{
lightpush: true,
filter: true,

View File

@ -11,7 +11,7 @@ import {
teardownNodesWithRedundancy
} from "../../src/index.js";
import { TestShardInfo } from "./utils.js";
import { TestRoutingInfo } from "./utils.js";
// TODO: investigate and re-enable in https://github.com/waku-org/js-waku/issues/2453
describe.skip("DiscoveryDialer", function () {
@ -22,7 +22,7 @@ describe.skip("DiscoveryDialer", function () {
beforeEachCustom(this, async () => {
[serviceNodes, waku] = await runMultipleNodes(
this.ctx,
TestShardInfo,
TestRoutingInfo,
{ lightpush: true, filter: true, peerExchange: true },
false,
2,
@ -35,7 +35,7 @@ describe.skip("DiscoveryDialer", function () {
ctx,
2,
false,
TestShardInfo,
TestRoutingInfo,
{
lightpush: true,
filter: true,

View File

@ -11,7 +11,8 @@ import { expect } from "chai";
import {
afterEachCustom,
beforeEachCustom,
DefaultTestShardInfo,
DefaultTestNetworkConfig,
DefaultTestRoutingInfo,
delay,
NOISE_KEY_1
} from "../../src/index.js";
@ -36,7 +37,7 @@ describe("Connection state", function () {
let originalNavigator: any;
beforeEachCustom(this, async () => {
waku = await createLightNode({ networkConfig: DefaultTestShardInfo });
waku = await createLightNode({ networkConfig: DefaultTestNetworkConfig });
nwaku1 = new ServiceNode(makeLogFileName(this.ctx) + "1");
nwaku2 = new ServiceNode(makeLogFileName(this.ctx) + "2");
await nwaku1.start({ filter: true });
@ -104,11 +105,13 @@ describe("Connection state", function () {
it("`waku:online` between 2 js-waku relay nodes", async function () {
const waku1 = await createRelayNode({
staticNoiseKey: NOISE_KEY_1,
networkConfig: DefaultTestShardInfo
networkConfig: DefaultTestNetworkConfig,
routingInfos: [DefaultTestRoutingInfo]
});
const waku2 = await createRelayNode({
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } },
networkConfig: DefaultTestShardInfo
networkConfig: DefaultTestNetworkConfig,
routingInfos: [DefaultTestRoutingInfo]
});
let eventCount1 = 0;
@ -171,10 +174,12 @@ describe("Connection state", function () {
it("isConnected between 2 js-waku relay nodes", async function () {
const waku1 = await createRelayNode({
staticNoiseKey: NOISE_KEY_1
staticNoiseKey: NOISE_KEY_1,
routingInfos: [DefaultTestRoutingInfo]
});
const waku2 = await createRelayNode({
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } },
routingInfos: [DefaultTestRoutingInfo]
});
await waku1.libp2p.peerStore.merge(waku2.libp2p.peerId, {
multiaddrs: waku2.libp2p.getMultiaddrs()

View File

@ -1,6 +1,11 @@
import { createRoutingInfo } from "@waku/utils";
export const TestContentTopic = "/test/1/waku-light-push/utf8";
export const ClusterId = 3;
export const TestShardInfo = {
contentTopics: [TestContentTopic],
clusterId: ClusterId
export const TestClusterId = 2;
export const TestNetworkConfig = {
clusterId: TestClusterId,
numShardsInCluster: 8 // Cannot be under 8 for nwaku 0.36.0 and below
};
export const TestRoutingInfo = createRoutingInfo(TestNetworkConfig, {
contentTopic: TestContentTopic
});

View File

@ -6,12 +6,16 @@ import { expect } from "chai";
import {
afterEachCustom,
DefaultTestClusterId,
DefaultTestContentTopic,
DefaultTestNetworkConfig,
DefaultTestNumShardsInCluster,
DefaultTestRoutingInfo,
makeLogFileName,
NOISE_KEY_1,
ServiceNode,
tearDownNodes
} from "../src/index.js";
import { DefaultTestShardInfo } from "../src/index.js";
describe("ENR Interop: ServiceNode", function () {
let waku: RelayNode;
@ -29,14 +33,16 @@ describe("ENR Interop: ServiceNode", function () {
store: false,
filter: false,
lightpush: false,
clusterId: DefaultTestShardInfo.clusterId,
shard: DefaultTestShardInfo.shards
clusterId: DefaultTestClusterId,
numShardsInNetwork: DefaultTestNumShardsInCluster,
contentTopic: [DefaultTestContentTopic]
});
const multiAddrWithId = await nwaku.getMultiaddrWithId();
waku = await createRelayNode({
staticNoiseKey: NOISE_KEY_1,
networkConfig: DefaultTestShardInfo
networkConfig: DefaultTestNetworkConfig,
routingInfos: [DefaultTestRoutingInfo]
});
await waku.start();
await waku.dial(multiAddrWithId);
@ -64,14 +70,16 @@ describe("ENR Interop: ServiceNode", function () {
store: true,
filter: false,
lightpush: false,
clusterId: DefaultTestShardInfo.clusterId,
shard: DefaultTestShardInfo.shards
clusterId: DefaultTestClusterId,
numShardsInNetwork: DefaultTestNumShardsInCluster,
contentTopic: [DefaultTestContentTopic]
});
const multiAddrWithId = await nwaku.getMultiaddrWithId();
waku = await createRelayNode({
staticNoiseKey: NOISE_KEY_1,
networkConfig: DefaultTestShardInfo
networkConfig: DefaultTestNetworkConfig,
routingInfos: [DefaultTestRoutingInfo]
});
await waku.start();
await waku.dial(multiAddrWithId);
@ -99,14 +107,16 @@ describe("ENR Interop: ServiceNode", function () {
store: true,
filter: true,
lightpush: true,
clusterId: DefaultTestShardInfo.clusterId,
shard: DefaultTestShardInfo.shards
clusterId: DefaultTestClusterId,
numShardsInNetwork: DefaultTestNumShardsInCluster,
contentTopic: [DefaultTestContentTopic]
});
const multiAddrWithId = await nwaku.getMultiaddrWithId();
waku = await createRelayNode({
staticNoiseKey: NOISE_KEY_1,
networkConfig: DefaultTestShardInfo
networkConfig: DefaultTestNetworkConfig,
routingInfos: [DefaultTestRoutingInfo]
});
await waku.start();
await waku.dial(multiAddrWithId);

View File

@ -1,5 +1,5 @@
import { createDecoder, createEncoder } from "@waku/core";
import { Protocols } from "@waku/interfaces";
import { AutoSharding, Protocols } from "@waku/interfaces";
import type { IDecodedMessage, LightNode } from "@waku/interfaces";
import {
generatePrivateKey,
@ -15,11 +15,7 @@ import {
createEncoder as createSymEncoder
} from "@waku/message-encryption/symmetric";
import { createLightNode } from "@waku/sdk";
import {
contentTopicToPubsubTopic,
contentTopicToShardIndex,
Logger
} from "@waku/utils";
import { createRoutingInfo, Logger } from "@waku/utils";
import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai";
@ -36,15 +32,21 @@ import {
const log = new Logger("test:ephemeral");
const ClusterId = 2;
const TestClusterId = 2;
const TestNetworkConfig: AutoSharding = {
clusterId: TestClusterId,
numShardsInCluster: 8
};
const TestContentTopic = "/test/1/ephemeral/utf8";
const PubsubTopic = contentTopicToPubsubTopic(TestContentTopic, ClusterId);
const TestRoutingInfo = createRoutingInfo(TestNetworkConfig, {
contentTopic: TestContentTopic
});
const TestEncoder = createEncoder({
contentTopic: TestContentTopic,
pubsubTopic: PubsubTopic
routingInfo: TestRoutingInfo
});
const TestDecoder = createDecoder(TestContentTopic, PubsubTopic);
const TestDecoder = createDecoder(TestContentTopic, TestRoutingInfo);
const privateKey = generatePrivateKey();
const symKey = generateSymmetricKey();
@ -57,26 +59,26 @@ const AsymEncoder = createEciesEncoder({
contentTopic: AsymContentTopic,
publicKey,
ephemeral: true,
pubsubTopic: PubsubTopic
routingInfo: TestRoutingInfo
});
const SymEncoder = createSymEncoder({
contentTopic: SymContentTopic,
symKey,
ephemeral: true,
pubsubTopic: PubsubTopic
routingInfo: TestRoutingInfo
});
const ClearEncoder = createEncoder({
contentTopic: TestContentTopic,
ephemeral: true,
pubsubTopic: PubsubTopic
routingInfo: TestRoutingInfo
});
const AsymDecoder = createEciesDecoder(
AsymContentTopic,
privateKey,
PubsubTopic
TestRoutingInfo,
privateKey
);
const SymDecoder = createSymDecoder(SymContentTopic, symKey, PubsubTopic);
const SymDecoder = createSymDecoder(SymContentTopic, TestRoutingInfo, symKey);
describe("Waku Message Ephemeral field", function () {
let waku: LightNode;
@ -95,8 +97,7 @@ describe("Waku Message Ephemeral field", function () {
store: true,
relay: true,
contentTopic: contentTopics,
clusterId: ClusterId,
shard: contentTopics.map((t) => contentTopicToShardIndex(t))
clusterId: TestClusterId
});
await nwaku.ensureSubscriptionsAutosharding([
TestContentTopic,
@ -107,10 +108,7 @@ describe("Waku Message Ephemeral field", function () {
waku = await createLightNode({
staticNoiseKey: NOISE_KEY_1,
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } },
networkConfig: {
contentTopics: [TestContentTopic, AsymContentTopic, SymContentTopic],
clusterId: ClusterId
}
networkConfig: TestNetworkConfig
});
await waku.start();
await waku.dial(await nwaku.getMultiaddrWithId());
@ -138,17 +136,11 @@ describe("Waku Message Ephemeral field", function () {
const [waku1, waku2, nimWakuMultiaddr] = await Promise.all([
createLightNode({
staticNoiseKey: NOISE_KEY_1,
networkConfig: {
contentTopics: [TestContentTopic, AsymContentTopic, SymContentTopic],
clusterId: ClusterId
}
networkConfig: TestNetworkConfig
}).then((waku) => waku.start().then(() => waku)),
createLightNode({
staticNoiseKey: NOISE_KEY_2,
networkConfig: {
contentTopics: [TestContentTopic, AsymContentTopic, SymContentTopic],
clusterId: ClusterId
}
networkConfig: TestNetworkConfig
}).then((waku) => waku.start().then(() => waku)),
nwaku.getMultiaddrWithId()
]);
@ -200,7 +192,7 @@ describe("Waku Message Ephemeral field", function () {
const ephemeralEncoder = createEncoder({
contentTopic: TestContentTopic,
ephemeral: true,
pubsubTopic: PubsubTopic
routingInfo: TestRoutingInfo
});
const messages: IDecodedMessage[] = [];
@ -246,9 +238,9 @@ describe("Waku Message Ephemeral field", function () {
const encoder = createSymEncoder({
contentTopic: SymContentTopic,
symKey,
pubsubTopic: PubsubTopic
routingInfo: TestRoutingInfo
});
const decoder = createSymDecoder(SymContentTopic, symKey, PubsubTopic);
const decoder = createSymDecoder(SymContentTopic, TestRoutingInfo, symKey);
const messages: IDecodedMessage[] = [];
const callback = (msg: IDecodedMessage): void => {
@ -293,12 +285,12 @@ describe("Waku Message Ephemeral field", function () {
const encoder = createEciesEncoder({
contentTopic: AsymContentTopic,
publicKey: publicKey,
pubsubTopic: PubsubTopic
routingInfo: TestRoutingInfo
});
const decoder = createEciesDecoder(
AsymContentTopic,
privateKey,
PubsubTopic
TestRoutingInfo,
privateKey
);
const messages: IDecodedMessage[] = [];

View File

@ -1,5 +1,6 @@
import { LightNode, Protocols } from "@waku/interfaces";
import { utf8ToBytes } from "@waku/sdk";
import { createRoutingInfo } from "@waku/utils";
import { expect } from "chai";
import {
@ -18,8 +19,7 @@ import {
TestContentTopic,
TestDecoder,
TestEncoder,
TestPubsubTopic,
TestShardInfo
TestRoutingInfo
} from "./utils.js";
const runTests = (strictCheckNodes: boolean): void => {
@ -32,7 +32,7 @@ const runTests = (strictCheckNodes: boolean): void => {
beforeEachCustom(this, async () => {
ctx = this.ctx;
[serviceNodes, waku] = await runMultipleNodes(this.ctx, TestShardInfo, {
[serviceNodes, waku] = await runMultipleNodes(this.ctx, TestRoutingInfo, {
lightpush: true,
filter: true
});
@ -59,7 +59,7 @@ const runTests = (strictCheckNodes: boolean): void => {
serviceNodes.messageCollector.verifyReceivedMessage(0, {
expectedMessageText: testItem.value,
expectedContentTopic: TestContentTopic,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
});
});
@ -78,7 +78,7 @@ const runTests = (strictCheckNodes: boolean): void => {
payload: Buffer.from(utf8ToBytes(messageText)).toString("base64"),
timestamp: testItem as any
},
TestPubsubTopic
TestRoutingInfo
);
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
@ -88,7 +88,7 @@ const runTests = (strictCheckNodes: boolean): void => {
expectedMessageText: messageText,
checkTimestamp: false,
expectedContentTopic: TestContentTopic,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
// Check if the timestamp matches
@ -117,7 +117,7 @@ const runTests = (strictCheckNodes: boolean): void => {
payload: Buffer.from(utf8ToBytes(messageText)).toString("base64"),
timestamp: "2023-09-06T12:05:38.609Z" as any
},
TestPubsubTopic
TestRoutingInfo
);
// Verify that no message was received
@ -133,20 +133,21 @@ const runTests = (strictCheckNodes: boolean): void => {
);
await delay(400);
const wrongContentTopic = "/wrong/1/ContentTopic/proto";
await serviceNodes.sendRelayMessage(
{
contentTopic: TestContentTopic,
contentTopic: wrongContentTopic,
payload: Buffer.from(utf8ToBytes(messageText)).toString("base64"),
timestamp: BigInt(Date.now()) * BigInt(1000000)
},
"WrongContentTopic"
createRoutingInfo(TestRoutingInfo.networkConfig, {
contentTopic: "/wrong/1/ContentTopic/proto"
})
);
expect(
await serviceNodes.messageCollector.waitForMessages(1, {
pubsubTopic: TestPubsubTopic
})
).to.eq(false);
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
false
);
});
it("Check message with no pubsub topic is not received", async function () {
@ -184,7 +185,7 @@ const runTests = (strictCheckNodes: boolean): void => {
payload: Buffer.from(utf8ToBytes(messageText)).toString("base64"),
timestamp: BigInt(Date.now()) * BigInt(1000000)
},
TestPubsubTopic
TestRoutingInfo
);
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
@ -205,7 +206,7 @@ const runTests = (strictCheckNodes: boolean): void => {
timestamp: BigInt(Date.now()) * BigInt(1000000),
payload: undefined as any
},
TestPubsubTopic
TestRoutingInfo
);
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
@ -226,7 +227,7 @@ const runTests = (strictCheckNodes: boolean): void => {
payload: 12345 as unknown as string,
timestamp: BigInt(Date.now()) * BigInt(1000000)
},
TestPubsubTopic
TestRoutingInfo
);
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
@ -267,12 +268,12 @@ const runTests = (strictCheckNodes: boolean): void => {
serviceNodes.messageCollector.verifyReceivedMessage(0, {
expectedMessageText: "M1",
expectedContentTopic: TestContentTopic,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
serviceNodes.messageCollector.verifyReceivedMessage(1, {
expectedMessageText: "M2",
expectedContentTopic: TestContentTopic,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
});
@ -289,7 +290,7 @@ const runTests = (strictCheckNodes: boolean): void => {
serviceNodes.messageCollector.verifyReceivedMessage(0, {
expectedMessageText: "M1",
expectedContentTopic: TestContentTopic,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
await teardownNodesWithRedundancy(serviceNodes, []);
@ -297,7 +298,7 @@ const runTests = (strictCheckNodes: boolean): void => {
ctx,
2,
false,
TestShardInfo,
TestRoutingInfo,
{
lightpush: true,
filter: true,
@ -334,7 +335,7 @@ const runTests = (strictCheckNodes: boolean): void => {
serviceNodes.messageCollector.verifyReceivedMessage(1, {
expectedMessageText: "M2",
expectedContentTopic: TestContentTopic,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
});
});

View File

@ -8,6 +8,7 @@ import {
symmetric
} from "@waku/message-encryption";
import { Protocols, utf8ToBytes } from "@waku/sdk";
import { createRoutingInfo } from "@waku/utils";
import { expect } from "chai";
import {
@ -27,15 +28,15 @@ import {
} from "../../src/index.js";
import {
ClusterId,
messagePayload,
messageText,
ShardIndex,
TestClusterId,
TestContentTopic,
TestDecoder,
TestEncoder,
TestPubsubTopic,
TestShardInfo
TestNetworkConfig,
TestRoutingInfo,
TestShardIndex
} from "./utils.js";
const runTests = (strictCheckNodes: boolean): void => {
@ -47,7 +48,7 @@ const runTests = (strictCheckNodes: boolean): void => {
beforeEachCustom(this, async () => {
[serviceNodes, waku] = await runMultipleNodes(
this.ctx,
TestShardInfo,
TestRoutingInfo,
undefined,
strictCheckNodes
);
@ -84,12 +85,12 @@ const runTests = (strictCheckNodes: boolean): void => {
const encoder = ecies.createEncoder({
contentTopic: TestContentTopic,
publicKey,
pubsubTopic: TestPubsubTopic
routingInfo: TestRoutingInfo
});
const decoder = ecies.createDecoder(
TestContentTopic,
privateKey,
TestPubsubTopic
TestRoutingInfo,
privateKey
);
await waku.filter.subscribe(
@ -106,7 +107,7 @@ const runTests = (strictCheckNodes: boolean): void => {
expectedMessageText: messageText,
expectedContentTopic: TestContentTopic,
expectedVersion: 1,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
await serviceNodes.confirmMessageLength(2);
@ -117,12 +118,12 @@ const runTests = (strictCheckNodes: boolean): void => {
const encoder = symmetric.createEncoder({
contentTopic: TestContentTopic,
symKey,
pubsubTopic: TestPubsubTopic
routingInfo: TestRoutingInfo
});
const decoder = symmetric.createDecoder(
TestContentTopic,
symKey,
TestPubsubTopic
TestRoutingInfo,
symKey
);
await waku.filter.subscribe(
@ -139,7 +140,7 @@ const runTests = (strictCheckNodes: boolean): void => {
expectedMessageText: messageText,
expectedContentTopic: TestContentTopic,
expectedVersion: 1,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
await serviceNodes.confirmMessageLength(2);
@ -158,7 +159,7 @@ const runTests = (strictCheckNodes: boolean): void => {
contentTopic: TestContentTopic,
payload: utf8ToBytes(messageText)
});
await serviceNodes.sendRelayMessage(relayMessage, TestPubsubTopic);
await serviceNodes.sendRelayMessage(relayMessage, TestRoutingInfo);
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
true
@ -166,7 +167,7 @@ const runTests = (strictCheckNodes: boolean): void => {
serviceNodes.messageCollector.verifyReceivedMessage(0, {
expectedMessageText: messageText,
expectedContentTopic: TestContentTopic,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
await serviceNodes.confirmMessageLength(1);
@ -219,18 +220,20 @@ const runTests = (strictCheckNodes: boolean): void => {
serviceNodes.messageCollector.verifyReceivedMessage(0, {
expectedMessageText: messageText,
expectedContentTopic: TestContentTopic,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
// Modify subscription to include a new content topic and send a message.
const newMessageText = "Filtering still works!";
const newMessagePayload = { payload: utf8ToBytes(newMessageText) };
const newContentTopic = "/test/2/waku-filter/default";
const newRoutingInfo = createRoutingInfo(TestNetworkConfig, {
contentTopic: newContentTopic
});
const newEncoder = createEncoder({
contentTopic: newContentTopic,
pubsubTopic: TestPubsubTopic
routingInfo: newRoutingInfo
});
const newDecoder = createDecoder(newContentTopic, TestPubsubTopic);
const newDecoder = createDecoder(newContentTopic, newRoutingInfo);
await waku.filter.subscribe(
newDecoder,
serviceNodes.messageCollector.callback
@ -244,26 +247,30 @@ const runTests = (strictCheckNodes: boolean): void => {
serviceNodes.messageCollector.verifyReceivedMessage(1, {
expectedContentTopic: newContentTopic,
expectedMessageText: newMessageText,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
// Send another message on the initial content topic to verify it still works.
await waku.lightPush.send(TestEncoder, newMessagePayload);
const thirdMessageText = "Filtering still works on first subscription!";
const thirdMessagePayload = { payload: utf8ToBytes(thirdMessageText) };
await waku.lightPush.send(TestEncoder, thirdMessagePayload);
expect(await serviceNodes.messageCollector.waitForMessages(3)).to.eq(
true
);
serviceNodes.messageCollector.verifyReceivedMessage(2, {
expectedMessageText: newMessageText,
expectedMessageText: thirdMessageText,
expectedContentTopic: TestContentTopic,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
await serviceNodes.confirmMessageLength(3);
// This relies on nwaku not emptying the relay cache
// We received the 3 messages already, what else are checking?
// await serviceNodes.confirmMessageLength(3);
});
it("Subscribe and receives messages on 20 topics", async function () {
const topicCount = 20;
const td = generateTestData(topicCount, { pubsubTopic: TestPubsubTopic });
const td = generateTestData(topicCount, TestNetworkConfig);
// Subscribe to all 20 topics.
for (let i = 0; i < topicCount; i++) {
@ -288,7 +295,7 @@ const runTests = (strictCheckNodes: boolean): void => {
serviceNodes.messageCollector.verifyReceivedMessage(index, {
expectedContentTopic: topic,
expectedMessageText: `Message for Topic ${index + 1}`,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
});
});
@ -297,7 +304,7 @@ const runTests = (strictCheckNodes: boolean): void => {
it.skip("Subscribe to 30 topics in separate streams (30 streams for Filter is limit) at once and receives messages", async function () {
this.timeout(100_000);
const topicCount = 30;
const td = generateTestData(topicCount, { pubsubTopic: TestPubsubTopic });
const td = generateTestData(topicCount, TestNetworkConfig);
for (let i = 0; i < topicCount; i++) {
await waku.filter.subscribe(
@ -321,7 +328,7 @@ const runTests = (strictCheckNodes: boolean): void => {
serviceNodes.messageCollector.verifyReceivedMessage(index, {
expectedContentTopic: topic,
expectedMessageText: `Message for Topic ${index + 1}`,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
});
});
@ -329,7 +336,7 @@ const runTests = (strictCheckNodes: boolean): void => {
it("Subscribe to 100 topics (new limit) at once and receives messages", async function () {
this.timeout(100_000);
const topicCount = 100;
const td = generateTestData(topicCount, { pubsubTopic: TestPubsubTopic });
const td = generateTestData(topicCount, TestNetworkConfig);
await waku.filter.subscribe(
td.decoders,
@ -351,14 +358,14 @@ const runTests = (strictCheckNodes: boolean): void => {
serviceNodes.messageCollector.verifyReceivedMessage(index, {
expectedContentTopic: topic,
expectedMessageText: `Message for Topic ${index + 1}`,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
});
});
it("Error when try to subscribe to more than 101 topics (new limit)", async function () {
const topicCount = 101;
const td = generateTestData(topicCount, { pubsubTopic: TestPubsubTopic });
const td = generateTestData(topicCount, TestNetworkConfig);
try {
await waku.filter.subscribe(
@ -382,14 +389,10 @@ const runTests = (strictCheckNodes: boolean): void => {
it("Overlapping topic subscription", async function () {
// Define two sets of test data with overlapping topics.
const topicCount1 = 2;
const td1 = generateTestData(topicCount1, {
pubsubTopic: TestPubsubTopic
});
const td1 = generateTestData(topicCount1, TestNetworkConfig);
const topicCount2 = 4;
const td2 = generateTestData(topicCount2, {
pubsubTopic: TestPubsubTopic
});
const td2 = generateTestData(topicCount2, TestNetworkConfig);
await waku.filter.subscribe(
td1.decoders,
@ -445,31 +448,25 @@ const runTests = (strictCheckNodes: boolean): void => {
serviceNodes.messageCollector.verifyReceivedMessage(0, {
expectedMessageText: "M1",
expectedContentTopic: TestContentTopic,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
serviceNodes.messageCollector.verifyReceivedMessage(1, {
expectedMessageText: "M2",
expectedContentTopic: TestContentTopic,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
});
TEST_STRING.forEach((testItem) => {
it(`Subscribe to topic containing ${testItem.description} and receive message`, async function () {
const newContentTopic = testItem.value;
const newContentTopic = `/test/0/${testItem.description}/test`;
const newEncoder = waku.createEncoder({
contentTopic: newContentTopic,
shardInfo: {
clusterId: ClusterId,
shard: ShardIndex
}
shardId: TestShardIndex
});
const newDecoder = waku.createDecoder({
contentTopic: newContentTopic,
shardInfo: {
clusterId: ClusterId,
shard: ShardIndex
}
shardId: TestShardIndex
});
await waku.filter.subscribe(
@ -484,7 +481,7 @@ const runTests = (strictCheckNodes: boolean): void => {
serviceNodes.messageCollector.verifyReceivedMessage(0, {
expectedMessageText: messageText,
expectedContentTopic: newContentTopic,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
});
});
@ -497,11 +494,15 @@ const runTests = (strictCheckNodes: boolean): void => {
await waku.lightPush.send(TestEncoder, { payload: utf8ToBytes("M1") });
const newContentTopic = "/test/2/waku-filter/default";
const newRoutingInfo = createRoutingInfo(TestNetworkConfig, {
contentTopic: newContentTopic
});
const newEncoder = createEncoder({
contentTopic: newContentTopic,
pubsubTopic: TestPubsubTopic
routingInfo: newRoutingInfo
});
const newDecoder = createDecoder(newContentTopic, TestPubsubTopic);
const newDecoder = createDecoder(newContentTopic, newRoutingInfo);
await waku.filter.subscribe(
newDecoder,
serviceNodes.messageCollector.callback
@ -516,12 +517,12 @@ const runTests = (strictCheckNodes: boolean): void => {
serviceNodes.messageCollector.verifyReceivedMessage(0, {
expectedMessageText: "M1",
expectedContentTopic: TestContentTopic,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
serviceNodes.messageCollector.verifyReceivedMessage(1, {
expectedContentTopic: newContentTopic,
expectedMessageText: "M2",
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: newRoutingInfo.pubsubTopic
});
});
@ -570,96 +571,127 @@ const runTests = (strictCheckNodes: boolean): void => {
expectedContentTopic: TestContentTopic
});
});
});
it("Subscribe and receive messages from 2 nwaku nodes each with different pubsubtopics", async function () {
describe("Filter subscribe test with static sharding", function () {
this.timeout(100000);
let waku: LightNode;
let serviceNodes: ServiceNodesFleet;
const networkConfig = { clusterId: TestClusterId };
const routingInfo = createRoutingInfo(networkConfig, { shardId: 3 });
beforeEachCustom(this, async () => {
[serviceNodes, waku] = await runMultipleNodes(
this.ctx,
routingInfo,
{},
strictCheckNodes
);
});
afterEachCustom(this, async () => {
await teardownNodesWithRedundancy(serviceNodes, waku);
});
});
};
[true, false].map((strictCheckNodes) => runTests(strictCheckNodes));
const runTestsStatic = (strictCheckNodes: boolean): void => {
describe(`Waku Filter: Subscribe: Multiple Service Nodes on Static Shard: Strict Check mode: ${strictCheckNodes}`, function () {
this.timeout(100000);
let waku: LightNode;
let serviceNodes: ServiceNodesFleet;
const staticNetworkConfig = { clusterId: 9 };
const routingInfoShard1 = createRoutingInfo(staticNetworkConfig, {
shardId: 1
});
const encoderShard1 = createEncoder({
contentTopic: TestContentTopic,
routingInfo: routingInfoShard1
});
const decoderShard1 = createDecoder(TestContentTopic, routingInfoShard1);
beforeEachCustom(this, async () => {
[serviceNodes, waku] = await runMultipleNodes(
this.ctx,
routingInfoShard1,
undefined,
strictCheckNodes
);
});
afterEachCustom(this, async () => {
await teardownNodesWithRedundancy(serviceNodes, waku);
});
it("Subscribe and receive messages from 2 nwaku nodes each with different static shards", async function () {
await waku.filter.subscribe(
TestDecoder,
decoderShard1,
serviceNodes.messageCollector.callback
);
// Set up and start a new nwaku node with customPubsubTopic1
// Set up and start a new nwaku node on different shard
const nwaku2 = new ServiceNode(makeLogFileName(this) + "3");
try {
const customContentTopic = "/test/4/waku-filter/default";
const customDecoder = createDecoder(customContentTopic, {
clusterId: ClusterId,
shard: 4
const routingInfoShard2 = createRoutingInfo(staticNetworkConfig, {
shardId: 2
});
const customEncoder = createEncoder({
contentTopic: customContentTopic,
pubsubTopicShardInfo: { clusterId: ClusterId, shard: 4 }
const contentTopic2 = "/test/4/waku-filter/default";
const decoderShard2 = createDecoder(contentTopic2, routingInfoShard2);
const encoderShard2 = createEncoder({
contentTopic: contentTopic2,
routingInfo: routingInfoShard2
});
await nwaku2.start({
filter: true,
lightpush: true,
relay: true,
clusterId: ClusterId,
shard: [4]
clusterId: TestClusterId,
shard: [2]
});
await waku.dial(await nwaku2.getMultiaddrWithId());
await waku.waitForPeers([Protocols.Filter, Protocols.LightPush]);
await nwaku2.ensureSubscriptions([customDecoder.pubsubTopic]);
// TODO
// await nwaku2.ensureSubscriptions([customDecoder.pubsubTopic]);
const messageCollector2 = new MessageCollector();
await waku.filter.subscribe(customDecoder, messageCollector2.callback);
await waku.filter.subscribe(decoderShard2, messageCollector2.callback);
// Making sure that messages are send and reveiced for both subscriptions
// Making sure that messages are send and received for both subscriptions
// While loop is done because of https://github.com/waku-org/js-waku/issues/1606
while (
!(await serviceNodes.messageCollector.waitForMessages(1, {
pubsubTopic: TestDecoder.pubsubTopic
})) ||
!(await messageCollector2.waitForMessages(1, {
pubsubTopic: customDecoder.pubsubTopic
}))
!(await serviceNodes.messageCollector.waitForMessages(1)) ||
!(await messageCollector2.waitForMessages(1))
) {
await waku.lightPush.send(TestEncoder, {
await waku.lightPush.send(encoderShard1, {
payload: utf8ToBytes("M1")
});
await waku.lightPush.send(customEncoder, {
await waku.lightPush.send(encoderShard2, {
payload: utf8ToBytes("M2")
});
}
serviceNodes.messageCollector.verifyReceivedMessage(0, {
expectedContentTopic: TestDecoder.contentTopic,
expectedPubsubTopic: TestDecoder.pubsubTopic,
expectedContentTopic: encoderShard1.contentTopic,
expectedPubsubTopic: routingInfoShard1.pubsubTopic,
expectedMessageText: "M1"
});
messageCollector2.verifyReceivedMessage(0, {
expectedContentTopic: customDecoder.contentTopic,
expectedPubsubTopic: customDecoder.pubsubTopic,
expectedContentTopic: encoderShard2.contentTopic,
expectedPubsubTopic: routingInfoShard2.pubsubTopic,
expectedMessageText: "M2"
});
} catch (e) {
await tearDownNodes([nwaku2], []);
}
});
it("Should fail to subscribe with decoder with wrong shard", async function () {
const wrongDecoder = createDecoder(TestDecoder.contentTopic, {
clusterId: ClusterId,
shard: 5
});
// this subscription object is set up with the `customPubsubTopic1` but we're passing it a Decoder with the `customPubsubTopic2`
try {
await waku.filter.subscribe(
wrongDecoder,
serviceNodes.messageCollector.callback
);
} catch (error) {
expect((error as Error).message).to.include(
`Pubsub topic ${wrongDecoder.pubsubTopic} has not been configured on this instance.`
);
}
});
});
};
[true, false].map((strictCheckNodes) => runTests(strictCheckNodes));
[true, false].map((strictCheckNodes) => runTestsStatic(strictCheckNodes));

View File

@ -1,6 +1,7 @@
import { createDecoder, createEncoder } from "@waku/core";
import { type LightNode } from "@waku/interfaces";
import { utf8ToBytes } from "@waku/sdk";
import { createRoutingInfo } from "@waku/utils";
import { expect } from "chai";
import {
@ -13,13 +14,13 @@ import {
} from "../../src/index.js";
import {
ClusterId,
messagePayload,
messageText,
TestContentTopic,
TestDecoder,
TestEncoder,
TestPubsubTopic
TestNetworkConfig,
TestRoutingInfo
} from "./utils.js";
const runTests = (strictCheckNodes: boolean): void => {
@ -30,14 +31,10 @@ const runTests = (strictCheckNodes: boolean): void => {
let serviceNodes: ServiceNodesFleet;
beforeEachCustom(this, async () => {
[serviceNodes, waku] = await runMultipleNodes(
this.ctx,
{
contentTopics: [TestContentTopic],
clusterId: ClusterId
},
{ filter: true, lightpush: true }
);
[serviceNodes, waku] = await runMultipleNodes(this.ctx, TestRoutingInfo, {
filter: true,
lightpush: true
});
});
afterEachCustom(this, async () => {
@ -77,12 +74,15 @@ const runTests = (strictCheckNodes: boolean): void => {
serviceNodes.messageCollector.callback
);
const newContentTopic = "/test/2/waku-filter";
const newContentTopic = "/test/2/waku-filter/proto";
const newRoutingInfo = createRoutingInfo(TestNetworkConfig, {
contentTopic: newContentTopic
});
const newEncoder = createEncoder({
contentTopic: newContentTopic,
pubsubTopic: TestPubsubTopic
routingInfo: newRoutingInfo
});
const newDecoder = createDecoder(newContentTopic, TestPubsubTopic);
const newDecoder = createDecoder(newContentTopic, newRoutingInfo);
await waku.filter.subscribe(
newDecoder,
serviceNodes.messageCollector.callback
@ -103,7 +103,6 @@ const runTests = (strictCheckNodes: boolean): void => {
// Check that from 4 messages send 3 were received
expect(serviceNodes.messageCollector.count).to.eq(3);
await serviceNodes.confirmMessageLength(4);
});
it("Unsubscribe 2 topics - node subscribed to 2 topics", async function () {
@ -112,12 +111,15 @@ const runTests = (strictCheckNodes: boolean): void => {
TestDecoder,
serviceNodes.messageCollector.callback
);
const newContentTopic = "/test/2/waku-filter";
const newContentTopic = "/test/2/waku-filter/proto";
const newRoutingInfo = createRoutingInfo(TestNetworkConfig, {
contentTopic: newContentTopic
});
const newEncoder = createEncoder({
contentTopic: newContentTopic,
pubsubTopic: TestPubsubTopic
routingInfo: newRoutingInfo
});
const newDecoder = createDecoder(newContentTopic, TestPubsubTopic);
const newDecoder = createDecoder(newContentTopic, newRoutingInfo);
await waku.filter.subscribe(
newDecoder,
serviceNodes.messageCollector.callback
@ -140,7 +142,6 @@ const runTests = (strictCheckNodes: boolean): void => {
// Check that from 4 messages send 2 were received
expect(serviceNodes.messageCollector.count).to.eq(2);
await serviceNodes.confirmMessageLength(4);
});
it("Unsubscribe topics the node is not subscribed to", async function () {
@ -159,7 +160,12 @@ const runTests = (strictCheckNodes: boolean): void => {
// Unsubscribe from topics that the node is not not subscribed to and send again
await waku.filter.unsubscribe(
createDecoder("/test/2/waku-filter", TestDecoder.pubsubTopic)
createDecoder(
"/test/2/waku-filter/proto",
createRoutingInfo(TestNetworkConfig, {
contentTopic: "/test/2/waku-filter/proto"
})
)
);
await waku.lightPush.send(TestEncoder, { payload: utf8ToBytes("M2") });
expect(await serviceNodes.messageCollector.waitForMessages(2)).to.eq(
@ -174,7 +180,7 @@ const runTests = (strictCheckNodes: boolean): void => {
it("Unsubscribe from 100 topics (new limit) at once and receives messages", async function () {
this.timeout(100_000);
const topicCount = 100;
const td = generateTestData(topicCount, { pubsubTopic: TestPubsubTopic });
const td = generateTestData(topicCount, TestNetworkConfig);
await waku.filter.subscribe(
td.decoders,
@ -194,7 +200,7 @@ const runTests = (strictCheckNodes: boolean): void => {
serviceNodes.messageCollector.verifyReceivedMessage(index, {
expectedContentTopic: topic,
expectedMessageText: `Message for Topic ${index + 1}`,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
});

View File

@ -1,142 +1,27 @@
import { createDecoder, createEncoder } from "@waku/core";
import {
CreateNodeOptions,
DefaultNetworkConfig,
IWaku,
LightNode,
NetworkConfig,
Protocols
} from "@waku/interfaces";
import { createLightNode } from "@waku/sdk";
import {
contentTopicToPubsubTopic,
contentTopicToShardIndex,
derivePubsubTopicsFromNetworkConfig,
createRoutingInfo,
Logger
} from "@waku/utils";
import { utf8ToBytes } from "@waku/utils/bytes";
import { Context } from "mocha";
import pRetry from "p-retry";
import {
NOISE_KEY_1,
ServiceNodesFleet,
waitForConnections
} from "../../src/index.js";
// Constants for test configuration.
export const log = new Logger("test:filter");
export const TestContentTopic = "/test/1/waku-filter/default";
export const ClusterId = 2;
export const ShardIndex = contentTopicToShardIndex(TestContentTopic);
export const TestShardInfo = {
contentTopics: [TestContentTopic],
clusterId: ClusterId
export const TestClusterId = 2;
export const TestShardIndex = contentTopicToShardIndex(TestContentTopic);
export const TestNetworkConfig = {
clusterId: TestClusterId,
numShardsInCluster: 8
};
export const TestPubsubTopic = contentTopicToPubsubTopic(
TestContentTopic,
ClusterId
);
export const TestRoutingInfo = createRoutingInfo(TestNetworkConfig, {
contentTopic: TestContentTopic
});
export const TestEncoder = createEncoder({
contentTopic: TestContentTopic,
pubsubTopic: TestPubsubTopic
routingInfo: TestRoutingInfo
});
export const TestDecoder = createDecoder(TestContentTopic, TestPubsubTopic);
export const TestDecoder = createDecoder(TestContentTopic, TestRoutingInfo);
export const messageText = "Filtering works!";
export const messagePayload = { payload: utf8ToBytes(messageText) };
export async function runMultipleNodes(
context: Context,
networkConfig: NetworkConfig = DefaultNetworkConfig,
strictChecking: boolean = false,
numServiceNodes = 3,
withoutFilter = false
): Promise<[ServiceNodesFleet, LightNode]> {
const pubsubTopics = derivePubsubTopicsFromNetworkConfig(networkConfig);
// create numServiceNodes nodes
const serviceNodes = await ServiceNodesFleet.createAndRun(
context,
numServiceNodes,
strictChecking,
networkConfig,
undefined,
withoutFilter
);
const wakuOptions: CreateNodeOptions = {
staticNoiseKey: NOISE_KEY_1,
libp2p: {
addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] }
}
};
log.info("Starting js waku node with :", JSON.stringify(wakuOptions));
let waku: LightNode | undefined;
try {
waku = await createLightNode(wakuOptions);
await waku.start();
} catch (error) {
log.error("jswaku node failed to start:", error);
}
if (!waku) {
throw new Error("Failed to initialize waku");
}
for (const node of serviceNodes.nodes) {
await waku.dial(await node.getMultiaddrWithId());
await waku.waitForPeers([Protocols.Filter, Protocols.LightPush]);
await node.ensureSubscriptions(pubsubTopics);
const wakuConnections = waku.libp2p.getConnections();
if (wakuConnections.length < 1) {
throw new Error(`Expected at least 1 connection for js-waku.`);
}
await node.waitForLog(waku.libp2p.peerId.toString(), 100);
}
await waitForConnections(numServiceNodes, waku);
return [serviceNodes, waku];
}
export async function teardownNodesWithRedundancy(
serviceNodes: ServiceNodesFleet,
wakuNodes: IWaku | IWaku[]
): Promise<void> {
const wNodes = Array.isArray(wakuNodes) ? wakuNodes : [wakuNodes];
const stopNwakuNodes = serviceNodes.nodes.map(async (node) => {
await pRetry(
async () => {
try {
await node.stop();
} catch (error) {
log.error("Service Node failed to stop:", error);
throw error;
}
},
{ retries: 3 }
);
});
const stopWakuNodes = wNodes.map(async (waku) => {
if (waku) {
await pRetry(
async () => {
try {
await waku.stop();
} catch (error) {
log.error("Waku failed to stop:", error);
throw error;
}
},
{ retries: 3 }
);
}
});
await Promise.all([...stopNwakuNodes, ...stopWakuNodes]);
}

View File

@ -14,14 +14,11 @@ import {
} from "../../src/index.js";
import {
ClusterId,
messagePayload,
messageText,
ShardIndex,
TestContentTopic,
TestEncoder,
TestPubsubTopic,
TestShardInfo
TestRoutingInfo
} from "./utils.js";
const runTests = (strictNodeCheck: boolean): void => {
@ -35,7 +32,7 @@ const runTests = (strictNodeCheck: boolean): void => {
beforeEachCustom(this, async () => {
[serviceNodes, waku] = await runMultipleNodes(
this.ctx,
TestShardInfo,
TestRoutingInfo,
{ lightpush: true, filter: true },
strictNodeCheck,
numServiceNodes,
@ -54,20 +51,18 @@ const runTests = (strictNodeCheck: boolean): void => {
});
expect(pushResponse.successes.length).to.eq(numServiceNodes);
expect(
await serviceNodes.messageCollector.waitForMessages(1, {
pubsubTopic: TestPubsubTopic
})
).to.eq(true);
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
true
);
serviceNodes.messageCollector.verifyReceivedMessage(0, {
expectedMessageText: testItem.value,
expectedContentTopic: TestContentTopic,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
});
});
// TODO: skiped till https://github.com/waku-org/nwaku/issues/3369 resolved
// TODO: skipped till https://github.com/waku-org/nwaku/issues/3369 resolved
it.skip("Push 30 different messages", async function () {
const generateMessageText = (index: number): string => `M${index}`;
@ -79,17 +74,15 @@ const runTests = (strictNodeCheck: boolean): void => {
expect(pushResponse.successes.length).to.eq(numServiceNodes);
}
expect(
await serviceNodes.messageCollector.waitForMessages(30, {
pubsubTopic: TestPubsubTopic
})
).to.eq(true);
expect(await serviceNodes.messageCollector.waitForMessages(30)).to.eq(
true
);
for (let i = 0; i < 30; i++) {
serviceNodes.messageCollector.verifyReceivedMessage(i, {
expectedMessageText: generateMessageText(i),
expectedContentTopic: TestContentTopic,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
}
});
@ -105,21 +98,16 @@ const runTests = (strictNodeCheck: boolean): void => {
ProtocolError.EMPTY_PAYLOAD
);
expect(
await serviceNodes.messageCollector.waitForMessages(1, {
pubsubTopic: TestPubsubTopic
})
).to.eq(false);
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
false
);
});
TEST_STRING.forEach((testItem) => {
[{ description: "short", value: "hi" }].forEach((testItem) => {
it(`Push message with content topic containing ${testItem.description}`, async function () {
const contentTopic = `/test/1/${testItem.value}/proto`;
const customEncoder = waku.createEncoder({
contentTopic: testItem.value,
shardInfo: {
clusterId: ClusterId,
shard: ShardIndex
}
contentTopic
});
const pushResponse = await waku.lightPush.send(
customEncoder,
@ -129,13 +117,13 @@ const runTests = (strictNodeCheck: boolean): void => {
expect(
await serviceNodes.messageCollector.waitForMessages(1, {
pubsubTopic: TestPubsubTopic
contentTopic
})
).to.eq(true);
serviceNodes.messageCollector.verifyReceivedMessage(0, {
expectedMessageText: messageText,
expectedContentTopic: testItem.value,
expectedPubsubTopic: TestPubsubTopic
expectedContentTopic: contentTopic,
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
});
});
@ -144,7 +132,7 @@ const runTests = (strictNodeCheck: boolean): void => {
const customTestEncoder = createEncoder({
contentTopic: TestContentTopic,
metaSetter: () => new Uint8Array(10),
pubsubTopic: TestPubsubTopic
routingInfo: TestRoutingInfo
});
const pushResponse = await waku.lightPush.send(
@ -153,22 +141,20 @@ const runTests = (strictNodeCheck: boolean): void => {
);
expect(pushResponse.successes.length).to.eq(numServiceNodes);
expect(
await serviceNodes.messageCollector.waitForMessages(1, {
pubsubTopic: TestPubsubTopic
})
).to.eq(true);
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
true
);
serviceNodes.messageCollector.verifyReceivedMessage(0, {
expectedMessageText: messageText,
expectedContentTopic: TestContentTopic,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
});
it("Fails to push message with large meta", async function () {
const customTestEncoder = createEncoder({
contentTopic: TestContentTopic,
pubsubTopic: TestPubsubTopic,
routingInfo: TestRoutingInfo,
metaSetter: () => new Uint8Array(105024) // see the note below ***
});
@ -176,7 +162,7 @@ const runTests = (strictNodeCheck: boolean): void => {
// `nwaku` establishes the max lightpush msg size as `const MaxRpcSize* = MaxWakuMessageSize + 64 * 1024`
// see: https://github.com/waku-org/nwaku/blob/07beea02095035f4f4c234ec2dec1f365e6955b8/waku/waku_lightpush/rpc_codec.nim#L15
// In the PR https://github.com/waku-org/nwaku/pull/2298 we reduced the MaxWakuMessageSize
// from 1MiB to 150KiB. Therefore, the 105024 number comes from substracting ( 1*2^20 - 150*2^10 )
// from 1MiB to 150KiB. Therefore, the 105024 number comes from subtracting ( 1*2^20 - 150*2^10 )
// to the original 10^6 that this test had when MaxWakuMessageSize == 1*2^20
const pushResponse = await waku.lightPush.send(
@ -188,11 +174,9 @@ const runTests = (strictNodeCheck: boolean): void => {
expect(pushResponse.failures?.map((failure) => failure.error)).to.include(
ProtocolError.REMOTE_PEER_REJECTED
);
expect(
await serviceNodes.messageCollector.waitForMessages(1, {
pubsubTopic: TestPubsubTopic
})
).to.eq(false);
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
false
);
});
it("Push message with rate limit", async function () {
@ -212,15 +196,13 @@ const runTests = (strictNodeCheck: boolean): void => {
});
expect(pushResponse.successes.length).to.eq(numServiceNodes);
expect(
await serviceNodes.messageCollector.waitForMessages(1, {
pubsubTopic: TestPubsubTopic
})
).to.eq(true);
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
true
);
serviceNodes.messageCollector.verifyReceivedMessage(0, {
expectedMessageText: messageText,
expectedContentTopic: TestContentTopic,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
});
@ -236,16 +218,14 @@ const runTests = (strictNodeCheck: boolean): void => {
});
expect(pushResponse.successes.length).to.eq(numServiceNodes);
expect(
await serviceNodes.messageCollector.waitForMessages(1, {
pubsubTopic: TestPubsubTopic
})
).to.eq(true);
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
true
);
serviceNodes.messageCollector.verifyReceivedMessage(0, {
expectedMessageText: messageText,
expectedTimestamp: testItem,
expectedContentTopic: TestContentTopic,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
});
});
@ -268,11 +248,9 @@ const runTests = (strictNodeCheck: boolean): void => {
expect(pushResponse.failures?.map((failure) => failure.error)).to.include(
ProtocolError.SIZE_TOO_BIG
);
expect(
await serviceNodes.messageCollector.waitForMessages(1, {
pubsubTopic: TestPubsubTopic
})
).to.eq(false);
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
false
);
});
});
};

View File

@ -1,141 +1,144 @@
import { createEncoder } from "@waku/core";
import { LightNode, Protocols } from "@waku/interfaces";
import { contentTopicToPubsubTopic } from "@waku/utils";
import { utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai";
// TODO: This test is useless because the content topics all start
// with `/test/` meaning they are in the same shard
import {
afterEachCustom,
beforeEachCustom,
makeLogFileName,
MessageCollector,
runMultipleNodes,
ServiceNode,
ServiceNodesFleet,
tearDownNodes,
teardownNodesWithRedundancy
} from "../../src/index.js";
import { ClusterId, TestEncoder } from "./utils.js";
describe("Waku Light Push (Autosharding): Multiple PubsubTopics", function () {
this.timeout(30000);
const numServiceNodes = 2;
let waku: LightNode;
let serviceNodes: ServiceNodesFleet;
const customEncoder2 = createEncoder({
contentTopic: "/test/2/waku-light-push/utf8",
pubsubTopic: contentTopicToPubsubTopic(
"/test/2/waku-light-push/utf8",
ClusterId
)
});
beforeEachCustom(this, async () => {
[serviceNodes, waku] = await runMultipleNodes(
this.ctx,
{
clusterId: ClusterId,
contentTopics: [TestEncoder.contentTopic, customEncoder2.contentTopic]
},
{ lightpush: true, filter: true },
false,
numServiceNodes,
false
);
});
afterEachCustom(this, async () => {
await teardownNodesWithRedundancy(serviceNodes, waku);
});
it("Subscribe and receive messages on 2 different pubsubtopics", async function () {
const pushResponse1 = await waku.lightPush.send(TestEncoder, {
payload: utf8ToBytes("M1")
});
const pushResponse2 = await waku.lightPush.send(customEncoder2, {
payload: utf8ToBytes("M2")
});
expect(pushResponse1.successes.length).to.eq(numServiceNodes);
expect(pushResponse2.successes.length).to.eq(numServiceNodes);
const messageCollector1 = new MessageCollector(serviceNodes.nodes[0]);
const messageCollector2 = new MessageCollector(serviceNodes.nodes[1]);
expect(
await messageCollector1.waitForMessages(1, {
pubsubTopic: TestEncoder.pubsubTopic
})
).to.eq(true);
expect(
await messageCollector2.waitForMessages(1, {
pubsubTopic: customEncoder2.pubsubTopic
})
).to.eq(true);
messageCollector1.verifyReceivedMessage(0, {
expectedMessageText: "M1",
expectedContentTopic: TestEncoder.contentTopic,
expectedPubsubTopic: TestEncoder.pubsubTopic
});
messageCollector2.verifyReceivedMessage(0, {
expectedMessageText: "M2",
expectedContentTopic: customEncoder2.contentTopic,
expectedPubsubTopic: customEncoder2.pubsubTopic
});
});
it("Light push messages to 2 nwaku nodes each with different pubsubtopics", async function () {
// Set up and start a new nwaku node with Default PubsubTopic
const nwaku2 = new ServiceNode(makeLogFileName(this) + "3");
try {
await nwaku2.start({
filter: true,
lightpush: true,
relay: true,
clusterId: ClusterId,
shard: [2]
});
await nwaku2.ensureSubscriptionsAutosharding([
customEncoder2.pubsubTopic
]);
await waku.dial(await nwaku2.getMultiaddrWithId());
await waku.waitForPeers([Protocols.LightPush]);
const messageCollector2 = new MessageCollector(nwaku2);
await waku.lightPush.send(TestEncoder, {
payload: utf8ToBytes("M1")
});
await waku.lightPush.send(customEncoder2, {
payload: utf8ToBytes("M2")
});
await serviceNodes.messageCollector.waitForMessages(1, {
pubsubTopic: TestEncoder.pubsubTopic
});
await messageCollector2.waitForMessagesAutosharding(1, {
contentTopic: customEncoder2.contentTopic
});
serviceNodes.messageCollector.verifyReceivedMessage(0, {
expectedMessageText: "M1",
expectedContentTopic: TestEncoder.contentTopic,
expectedPubsubTopic: TestEncoder.pubsubTopic
});
messageCollector2.verifyReceivedMessage(0, {
expectedMessageText: "M2",
expectedContentTopic: customEncoder2.contentTopic,
expectedPubsubTopic: customEncoder2.pubsubTopic
});
} catch (e) {
await tearDownNodes([nwaku2], []);
}
});
});
// import { createEncoder } from "@waku/core";
// import { LightNode, Protocols } from "@waku/interfaces";
// import { contentTopicToPubsubTopic } from "@waku/utils";
// import { utf8ToBytes } from "@waku/utils/bytes";
// import { expect } from "chai";
//
// import {
// afterEachCustom,
// beforeEachCustom,
// makeLogFileName,
// MessageCollector,
// runMultipleNodes,
// ServiceNode,
// ServiceNodesFleet,
// tearDownNodes,
// teardownNodesWithRedundancy
// } from "../../src/index.js";
//
// import { TestClusterId, TestEncoder } from "./utils.js";
//
// describe("Waku Light Push (Autosharding): Multiple Shards", function () {
// this.timeout(30000);
// const numServiceNodes = 2;
//
// let waku: LightNode;
// let serviceNodes: ServiceNodesFleet;
//
// const customEncoder2 = createEncoder({
// contentTopic: "/test/2/waku-light-push/utf8",
// pubsubTopic: contentTopicToPubsubTopic(
// "/test/2/waku-light-push/utf8",
// TestClusterId
// )
// });
//
// beforeEachCustom(this, async () => {
// [serviceNodes, waku] = await runMultipleNodes(
// this.ctx,
// {
// clusterId: TestClusterId,
// contentTopics: [TestEncoder.contentTopic, customEncoder2.contentTopic]
// },
// { lightpush: true, filter: true },
// false,
// numServiceNodes,
// false
// );
// });
//
// afterEachCustom(this, async () => {
// await teardownNodesWithRedundancy(serviceNodes, waku);
// });
//
// it("Subscribe and receive messages on 2 different pubsubtopics", async function () {
// const pushResponse1 = await waku.lightPush.send(TestEncoder, {
// payload: utf8ToBytes("M1")
// });
// const pushResponse2 = await waku.lightPush.send(customEncoder2, {
// payload: utf8ToBytes("M2")
// });
//
// expect(pushResponse1.successes.length).to.eq(numServiceNodes);
// expect(pushResponse2.successes.length).to.eq(numServiceNodes);
//
// const messageCollector1 = new MessageCollector(serviceNodes.nodes[0]);
// const messageCollector2 = new MessageCollector(serviceNodes.nodes[1]);
//
// expect(
// await messageCollector1.waitForMessages(1, {
// pubsubTopic: TestEncoder.pubsubTopic
// })
// ).to.eq(true);
//
// expect(
// await messageCollector2.waitForMessages(1, {
// pubsubTopic: customEncoder2.pubsubTopic
// })
// ).to.eq(true);
//
// messageCollector1.verifyReceivedMessage(0, {
// expectedMessageText: "M1",
// expectedContentTopic: TestEncoder.contentTopic,
// expectedPubsubTopic: TestEncoder.pubsubTopic
// });
//
// messageCollector2.verifyReceivedMessage(0, {
// expectedMessageText: "M2",
// expectedContentTopic: customEncoder2.contentTopic,
// expectedPubsubTopic: customEncoder2.pubsubTopic
// });
// });
//
// it("Light push messages to 2 nwaku nodes each with different pubsubtopics", async function () {
// // Set up and start a new nwaku node with Default PubsubTopic
// const nwaku2 = new ServiceNode(makeLogFileName(this) + "3");
//
// try {
// await nwaku2.start({
// filter: true,
// lightpush: true,
// relay: true,
// clusterId: TestClusterId,
// shard: [2]
// });
// await nwaku2.ensureSubscriptionsAutosharding([
// customEncoder2.pubsubTopic
// ]);
// await waku.dial(await nwaku2.getMultiaddrWithId());
// await waku.waitForPeers([Protocols.LightPush]);
//
// const messageCollector2 = new MessageCollector(nwaku2);
//
// await waku.lightPush.send(TestEncoder, {
// payload: utf8ToBytes("M1")
// });
// await waku.lightPush.send(customEncoder2, {
// payload: utf8ToBytes("M2")
// });
//
// await serviceNodes.messageCollector.waitForMessages(1, {
// pubsubTopic: TestEncoder.pubsubTopic
// });
// await messageCollector2.waitForMessagesAutosharding(1, {
// contentTopic: customEncoder2.contentTopic
// });
//
// serviceNodes.messageCollector.verifyReceivedMessage(0, {
// expectedMessageText: "M1",
// expectedContentTopic: TestEncoder.contentTopic,
// expectedPubsubTopic: TestEncoder.pubsubTopic
// });
// messageCollector2.verifyReceivedMessage(0, {
// expectedMessageText: "M2",
// expectedContentTopic: customEncoder2.contentTopic,
// expectedPubsubTopic: customEncoder2.pubsubTopic
// });
// } catch (e) {
// await tearDownNodes([nwaku2], []);
// }
// });
// });

View File

@ -1,43 +1,22 @@
import { createEncoder } from "@waku/core";
import { LightNode, NetworkConfig, Protocols } from "@waku/interfaces";
import { utf8ToBytes } from "@waku/sdk";
import { createLightNode } from "@waku/sdk";
import {
contentTopicToPubsubTopic,
contentTopicToShardIndex,
Logger
} from "@waku/utils";
import { Context } from "mocha";
import { runNodes as runNodesBuilder, ServiceNode } from "../../src/index.js";
import { createRoutingInfo, Logger } from "@waku/utils";
// Constants for test configuration.
export const log = new Logger("test:lightpush");
export const TestContentTopic = "/test/1/waku-light-push/utf8";
export const ClusterId = 3;
export const ShardIndex = contentTopicToShardIndex(TestContentTopic);
export const TestPubsubTopic = contentTopicToPubsubTopic(
TestContentTopic,
ClusterId
);
export const TestShardInfo = {
contentTopics: [TestContentTopic],
clusterId: ClusterId
export const TestClusterId = 3;
export const TestNumShardsInCluster = 8;
export const TestNetworkConfig = {
clusterId: TestClusterId,
numShardsInCluster: TestNumShardsInCluster
};
export const TestRoutingInfo = createRoutingInfo(TestNetworkConfig, {
contentTopic: TestContentTopic
});
export const TestEncoder = createEncoder({
contentTopic: TestContentTopic,
pubsubTopic: TestPubsubTopic
routingInfo: TestRoutingInfo
});
export const messageText = "Light Push works!";
export const messagePayload = { payload: utf8ToBytes(messageText) };
export const runNodes = (
context: Context,
shardInfo: NetworkConfig
): Promise<[ServiceNode, LightNode]> =>
runNodesBuilder<LightNode>({
context,
createNode: createLightNode,
protocols: [Protocols.LightPush, Protocols.Filter],
networkConfig: shardInfo
});

View File

@ -1,5 +1,5 @@
import { MetadataCodec } from "@waku/core";
import type { LightNode, ShardInfo } from "@waku/interfaces";
import type { LightNode } from "@waku/interfaces";
import { createLightNode } from "@waku/sdk";
import { decodeRelayShard } from "@waku/utils";
import chai, { expect } from "chai";
@ -29,25 +29,27 @@ describe("Metadata Protocol", function () {
await tearDownNodes([nwaku1], waku);
});
describe("connections", function () {
it("same cluster, same shard: nodes connect", async function () {
const shardInfo: ShardInfo = {
clusterId: 2,
shards: [1]
};
describe("static sharding", function () {
it("same cluster, static sharding: nodes connect", async function () {
const clusterId = 2;
const shards = [1];
const numShardsInCluster = 8;
await nwaku1.start({
relay: true,
discv5Discovery: true,
peerExchange: true,
clusterId: shardInfo.clusterId,
shard: shardInfo.shards
clusterId,
shard: shards,
numShardsInNetwork: numShardsInCluster
});
const nwaku1Ma = await nwaku1.getMultiaddrWithId();
const nwaku1PeerId = await nwaku1.getPeerId();
waku = await createLightNode({ networkConfig: shardInfo });
waku = await createLightNode({
networkConfig: { clusterId, numShardsInCluster }
});
await waku.start();
await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec);
@ -65,82 +67,33 @@ describe("Metadata Protocol", function () {
}
expect(shardInfoRes).to.not.be.undefined;
expect(shardInfoRes.clusterId).to.equal(shardInfo.clusterId);
expect(shardInfoRes.shards).to.include.members(shardInfo.shards);
expect(shardInfoRes.clusterId).to.equal(clusterId);
expect(shardInfoRes.shards).to.include.members(shards);
const activeConnections = waku.libp2p.getConnections();
expect(activeConnections.length).to.equal(1);
});
it("same cluster, different shard: nodes connect", async function () {
const shardInfo1: ShardInfo = {
clusterId: 2,
shards: [1]
};
const shardInfo2: ShardInfo = {
clusterId: 2,
shards: [2]
};
it("different cluster: nodes don't connect", async function () {
const clusterIdNwaku = 2;
const custerIdJsWaku = 3;
const shards = [1];
const numShardsInCluster = 8;
await nwaku1.start({
relay: true,
discv5Discovery: true,
peerExchange: true,
clusterId: shardInfo1.clusterId,
shard: shardInfo1.shards
});
const nwaku1Ma = await nwaku1.getMultiaddrWithId();
const nwaku1PeerId = await nwaku1.getPeerId();
waku = await createLightNode({ networkConfig: shardInfo2 });
await waku.start();
await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec);
if (!waku.libp2p.services.metadata) {
expect(waku.libp2p.services.metadata).to.not.be.undefined;
return;
}
const { error, shardInfo: shardInfoRes } =
await waku.libp2p.services.metadata.query(nwaku1PeerId);
if (error) {
expect(error).to.be.null;
return;
}
expect(shardInfoRes).to.not.be.undefined;
expect(shardInfoRes.clusterId).to.equal(shardInfo1.clusterId);
expect(shardInfoRes.shards).to.include.members(shardInfo1.shards);
const activeConnections = waku.libp2p.getConnections();
expect(activeConnections.length).to.equal(1);
});
it("different cluster, same shard: nodes don't connect", async function () {
const shardInfo1: ShardInfo = {
clusterId: 2,
shards: [1]
};
const shardInfo2: ShardInfo = {
clusterId: 3,
shards: [1]
};
await nwaku1.start({
relay: true,
discv5Discovery: true,
peerExchange: true,
clusterId: shardInfo1.clusterId,
shard: shardInfo1.shards
clusterId: clusterIdNwaku,
shard: shards,
numShardsInNetwork: numShardsInCluster
});
const nwaku1Ma = await nwaku1.getMultiaddrWithId();
waku = await createLightNode({ networkConfig: shardInfo2 });
waku = await createLightNode({
networkConfig: { clusterId: custerIdJsWaku, numShardsInCluster }
});
await waku.start();
await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec);
@ -157,28 +110,151 @@ describe("Metadata Protocol", function () {
expect(waku.libp2p.getConnections().length).to.equal(0);
});
it("different cluster, different shard: nodes don't connect", async function () {
const shardInfo1: ShardInfo = {
clusterId: 2,
shards: [1]
};
const shardInfo2: ShardInfo = {
clusterId: 3,
shards: [2]
};
it("PeerStore has remote peer's shard info after successful connection", async function () {
const clusterId = 2;
const shards = [1];
const numShardsInCluster = 8;
await nwaku1.start({
relay: true,
discv5Discovery: true,
peerExchange: true,
clusterId: shardInfo1.clusterId,
shard: shardInfo1.shards
clusterId,
shard: shards,
numShardsInNetwork: numShardsInCluster
});
const nwaku1Ma = await nwaku1.getMultiaddrWithId();
const nwaku1PeerId = await nwaku1.getPeerId();
waku = await createLightNode({
networkConfig: { clusterId, numShardsInCluster }
});
await waku.start();
await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec);
// delay to ensure the connection is estabilished and shardInfo is updated
await delay(500);
const encodedShardInfo = (
await waku.libp2p.peerStore.get(nwaku1PeerId)
).metadata.get("shardInfo");
expect(encodedShardInfo).to.not.be.undefined;
const metadataShardInfo = decodeRelayShard(encodedShardInfo!);
expect(metadataShardInfo).not.be.undefined;
expect(metadataShardInfo!.clusterId).to.eq(clusterId);
expect(metadataShardInfo.shards).to.include.members(shards);
});
it("receiving a ping from a peer does not overwrite shard info", async function () {
const clusterId = 2;
const shards = [1];
const numShardsInCluster = 8;
await nwaku1.start({
relay: true,
discv5Discovery: true,
peerExchange: true,
clusterId,
shard: shards
});
const nwaku1Ma = await nwaku1.getMultiaddrWithId();
const nwaku1PeerId = await nwaku1.getPeerId();
waku = await createLightNode({
networkConfig: {
clusterId,
numShardsInCluster
},
connectionManager: {
pingKeepAlive: 1
}
});
await waku.start();
await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec);
// delay to ensure the connection is estabilished, shardInfo is updated, and there is a ping
await delay(1500);
const metadata = (await waku.libp2p.peerStore.get(nwaku1PeerId)).metadata;
expect(metadata.get("shardInfo")).to.not.be.undefined;
const pingInfo = metadata.get("ping");
expect(pingInfo).to.not.be.undefined;
});
});
describe("auto sharding", function () {
it("same cluster: nodes connect", async function () {
const clusterId = 2;
const contentTopic = "/foo/1/bar/proto";
const numShardsInCluster = 0;
await nwaku1.start({
relay: true,
discv5Discovery: true,
peerExchange: true,
clusterId,
contentTopic: [contentTopic],
numShardsInNetwork: numShardsInCluster
});
const nwaku1Ma = await nwaku1.getMultiaddrWithId();
const nwaku1PeerId = await nwaku1.getPeerId();
waku = await createLightNode({
networkConfig: { clusterId, numShardsInCluster }
});
await waku.start();
await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec);
if (!waku.libp2p.services.metadata) {
expect(waku.libp2p.services.metadata).to.not.be.undefined;
return;
}
const { error, shardInfo: shardInfoRes } =
await waku.libp2p.services.metadata.query(nwaku1PeerId);
if (error) {
expect(error).to.be.null;
return;
}
expect(shardInfoRes).to.not.be.undefined;
expect(shardInfoRes.clusterId).to.equal(clusterId);
// TODO: calculate shards from content topics
//expect(shardInfoRes.shards).to.include.members(shards);
const activeConnections = waku.libp2p.getConnections();
expect(activeConnections.length).to.equal(1);
});
it("different cluster: nodes don't connect", async function () {
const clusterIdNwaku = 2;
const clusterIdJSWaku = 3;
const contentTopic = ["/foo/1/bar/proto"];
const numShardsInCluster = 0;
await nwaku1.start({
relay: true,
discv5Discovery: true,
peerExchange: true,
clusterId: clusterIdNwaku,
contentTopic,
numShardsInNetwork: numShardsInCluster
});
const nwaku1Ma = await nwaku1.getMultiaddrWithId();
waku = await createLightNode({ networkConfig: shardInfo2 });
waku = await createLightNode({
networkConfig: {
clusterId: clusterIdJSWaku,
numShardsInCluster
}
});
await waku.start();
await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec);
@ -195,77 +271,81 @@ describe("Metadata Protocol", function () {
expect(waku.libp2p.getConnections().length).to.equal(0);
});
});
it("PeerStore has remote peer's shard info after successful connection", async function () {
const shardInfo: ShardInfo = {
clusterId: 2,
shards: [1]
};
it("PeerStore has remote peer's shard info after successful connection", async function () {
const clusterId = 2;
const contentTopic = ["/foo/1/bar/proto"];
const numShardsInCluster = 0;
await nwaku1.start({
relay: true,
discv5Discovery: true,
peerExchange: true,
clusterId: shardInfo.clusterId,
shard: shardInfo.shards
await nwaku1.start({
relay: true,
discv5Discovery: true,
peerExchange: true,
clusterId,
contentTopic
});
const nwaku1Ma = await nwaku1.getMultiaddrWithId();
const nwaku1PeerId = await nwaku1.getPeerId();
waku = await createLightNode({
networkConfig: { clusterId, numShardsInCluster }
});
await waku.start();
await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec);
// delay to ensure the connection is estabilished and shardInfo is updated
await delay(500);
const encodedShardInfo = (
await waku.libp2p.peerStore.get(nwaku1PeerId)
).metadata.get("shardInfo");
expect(encodedShardInfo).to.not.be.undefined;
const metadataShardInfo = decodeRelayShard(encodedShardInfo!);
expect(metadataShardInfo).not.be.undefined;
expect(metadataShardInfo!.clusterId).to.eq(clusterId);
// TODO derive shard from content topic
// expect(metadataShardInfo.shards).to.include.members(shards);
});
const nwaku1Ma = await nwaku1.getMultiaddrWithId();
const nwaku1PeerId = await nwaku1.getPeerId();
it("receiving a ping from a peer does not overwrite shard info", async function () {
const clusterId = 2;
const contentTopic = ["/foo/1/bar/proto"];
const numShardsInCluster = 0;
waku = await createLightNode({ networkConfig: shardInfo });
await waku.start();
await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec);
await nwaku1.start({
relay: true,
discv5Discovery: true,
peerExchange: true,
clusterId,
contentTopic
});
// delay to ensure the connection is estabilished and shardInfo is updated
await delay(500);
const nwaku1Ma = await nwaku1.getMultiaddrWithId();
const nwaku1PeerId = await nwaku1.getPeerId();
const encodedShardInfo = (
await waku.libp2p.peerStore.get(nwaku1PeerId)
).metadata.get("shardInfo");
expect(encodedShardInfo).to.not.be.undefined;
waku = await createLightNode({
networkConfig: {
clusterId,
numShardsInCluster
},
connectionManager: {
pingKeepAlive: 1
}
});
await waku.start();
await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec);
const metadataShardInfo = decodeRelayShard(encodedShardInfo!);
expect(metadataShardInfo).not.be.undefined;
// delay to ensure the connection is estabilished, shardInfo is updated, and there is a ping
await delay(1500);
expect(metadataShardInfo!.clusterId).to.eq(shardInfo.clusterId);
expect(metadataShardInfo.shards).to.include.members(shardInfo.shards);
});
const metadata = (await waku.libp2p.peerStore.get(nwaku1PeerId)).metadata;
expect(metadata.get("shardInfo")).to.not.be.undefined;
it("receiving a ping from a peer does not overwrite shard info", async function () {
const shardInfo: ShardInfo = {
clusterId: 2,
shards: [1]
};
await nwaku1.start({
relay: true,
discv5Discovery: true,
peerExchange: true,
clusterId: shardInfo.clusterId,
shard: shardInfo.shards
const pingInfo = metadata.get("ping");
expect(pingInfo).to.not.be.undefined;
});
const nwaku1Ma = await nwaku1.getMultiaddrWithId();
const nwaku1PeerId = await nwaku1.getPeerId();
waku = await createLightNode({
networkConfig: shardInfo,
connectionManager: {
pingKeepAlive: 1
}
});
await waku.start();
await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec);
// delay to ensure the connection is estabilished, shardInfo is updated, and there is a ping
await delay(1500);
const metadata = (await waku.libp2p.peerStore.get(nwaku1PeerId)).metadata;
expect(metadata.get("shardInfo")).to.not.be.undefined;
const pingInfo = metadata.get("ping");
expect(pingInfo).to.not.be.undefined;
});
});

View File

@ -17,8 +17,6 @@ describe("nwaku", () => {
"--rest-admin=true",
"--websocket-support=true",
"--log-level=TRACE",
"--cluster-id=0",
"--shard=0",
"--ports-shift=42"
];

View File

@ -5,7 +5,7 @@ import { createLightNode } from "@waku/sdk";
import {
beforeEachCustom,
DefaultTestShardInfo,
DefaultTestNetworkConfig,
makeLogFileName,
ServiceNode,
tearDownNodes
@ -40,7 +40,9 @@ describe("Peer Exchange", function () {
tests({
async setup() {
waku = await createLightNode({ networkConfig: DefaultTestShardInfo });
waku = await createLightNode({
networkConfig: DefaultTestNetworkConfig
});
await waku.start();
const nwaku2Ma = await nwaku2.getMultiaddrWithId();

View File

@ -3,8 +3,8 @@ import { type PeerId } from "@libp2p/interface";
import { peerIdFromPrivateKey } from "@libp2p/peer-id";
import { multiaddr } from "@multiformats/multiaddr";
import { PeerExchangeDiscovery } from "@waku/discovery";
import { IEnr, LightNode } from "@waku/interfaces";
import { createLightNode, ShardInfo } from "@waku/sdk";
import { IEnr, LightNode, RelayShards } from "@waku/interfaces";
import { createLightNode } from "@waku/sdk";
import { decodeRelayShard } from "@waku/utils";
import { expect } from "chai";
import Sinon from "sinon";
@ -15,8 +15,8 @@ describe("Peer Exchange Continuous Discovery", () => {
let peerId: PeerId;
let randomPeerId: PeerId;
let waku: LightNode;
const shardInfo: ShardInfo = {
clusterId: 1,
const relayShards: RelayShards = {
clusterId: 2,
shards: [1, 2]
};
const multiaddrs = [multiaddr("/ip4/127.0.0.1/udp/1234")];
@ -38,7 +38,7 @@ describe("Peer Exchange Continuous Discovery", () => {
const newPeerInfo = {
ENR: {
peerId,
shardInfo,
shardInfo: relayShards,
peerInfo: {
multiaddrs: newMultiaddrs,
id: peerId
@ -59,14 +59,14 @@ describe("Peer Exchange Continuous Discovery", () => {
});
it("Should update shard info", async () => {
const newShardInfo: ShardInfo = {
const newRelayShards: RelayShards = {
clusterId: 2,
shards: [1, 2, 3]
};
const newPeerInfo = {
ENR: {
peerId,
shardInfo: newShardInfo,
shardInfo: newRelayShards,
peerInfo: {
multiaddrs: multiaddrs,
id: peerId
@ -86,7 +86,7 @@ describe("Peer Exchange Continuous Discovery", () => {
);
const _shardInfo = decodeRelayShard(newPeer.metadata.get("shardInfo")!);
expect(_shardInfo).to.deep.equal(newShardInfo);
expect(_shardInfo).to.deep.equal(newRelayShards);
});
async function discoverPeerOnce(): Promise<void> {
@ -95,7 +95,7 @@ describe("Peer Exchange Continuous Discovery", () => {
const enr: IEnr = {
peerId,
shardInfo,
shardInfo: relayShards,
peerInfo: {
multiaddrs: multiaddrs,
id: peerId
@ -122,6 +122,6 @@ describe("Peer Exchange Continuous Discovery", () => {
multiaddrs[0].toString()
);
const _shardInfo = decodeRelayShard(peer.metadata.get("shardInfo")!);
expect(_shardInfo).to.deep.equal(shardInfo);
expect(_shardInfo).to.deep.equal(relayShards);
}
});

View File

@ -10,7 +10,9 @@ import Sinon, { SinonSpy } from "sinon";
import {
afterEachCustom,
beforeEachCustom,
DefaultTestShardInfo,
DefaultTestClusterId,
DefaultTestNetworkConfig,
DefaultTestRelayShards,
makeLogFileName,
ServiceNode,
tearDownNodes
@ -30,15 +32,15 @@ describe("Peer Exchange", function () {
nwaku1 = new ServiceNode(makeLogFileName(this.ctx) + "1");
nwaku2 = new ServiceNode(makeLogFileName(this.ctx) + "2");
await nwaku1.start({
clusterId: DefaultTestShardInfo.clusterId,
shard: DefaultTestShardInfo.shards,
clusterId: DefaultTestClusterId,
shard: DefaultTestRelayShards.shards,
discv5Discovery: true,
peerExchange: true,
relay: true
});
await nwaku2.start({
clusterId: DefaultTestShardInfo.clusterId,
shard: DefaultTestShardInfo.shards,
clusterId: DefaultTestClusterId,
shard: DefaultTestRelayShards.shards,
discv5Discovery: true,
peerExchange: true,
discv5BootstrapNode: (await nwaku1.info()).enrUri,
@ -52,7 +54,7 @@ describe("Peer Exchange", function () {
it("peer exchange sets tag", async function () {
waku = await createLightNode({
networkConfig: DefaultTestShardInfo,
networkConfig: DefaultTestNetworkConfig,
libp2p: {
peerDiscovery: [
bootstrap({ list: [(await nwaku2.getMultiaddrWithId()).toString()] }),
@ -117,8 +119,8 @@ describe("Peer Exchange", function () {
nwaku3 = new ServiceNode(makeLogFileName(this) + "3");
await nwaku3.start({
clusterId: DefaultTestShardInfo.clusterId,
shard: DefaultTestShardInfo.shards,
clusterId: DefaultTestClusterId,
shard: DefaultTestRelayShards.shards,
discv5Discovery: true,
peerExchange: true,
discv5BootstrapNode: (await nwaku1.info()).enrUri,

View File

@ -6,7 +6,6 @@ import {
} from "@waku/discovery";
import type { LightNode } from "@waku/interfaces";
import { createLightNode } from "@waku/sdk";
import { singleShardInfosToShardInfo } from "@waku/utils";
import { expect } from "chai";
import { afterEachCustom, tearDownNodes } from "../../src/index.js";
@ -36,8 +35,7 @@ describe("Peer Exchange", () => {
)
.filter((ma) => ma.includes("wss"));
const singleShardInfo = { clusterId: 1, shard: 1 };
const shardInfo = singleShardInfosToShardInfo([singleShardInfo]);
const networkConfig = { clusterId: 2, numShardsInCluster: 0 };
waku = await createLightNode({
libp2p: {
peerDiscovery: [
@ -45,7 +43,7 @@ describe("Peer Exchange", () => {
wakuPeerExchangeDiscovery()
]
},
networkConfig: shardInfo
networkConfig
});
await waku.start();

View File

@ -13,6 +13,7 @@ import {
createDecoder as createSymDecoder,
createEncoder as createSymEncoder
} from "@waku/message-encryption/symmetric";
import { createRoutingInfo } from "@waku/utils";
import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai";
@ -23,7 +24,7 @@ import {
tearDownNodes
} from "../../src/index.js";
import { runJSNodes, TestPubsubTopic } from "./utils.js";
import { runJSNodes, TestNetworkConfig, TestRoutingInfo } from "./utils.js";
describe("Waku Relay", function () {
this.timeout(15000);
@ -51,20 +52,20 @@ describe("Waku Relay", function () {
const eciesEncoder = createEciesEncoder({
contentTopic: asymTopic,
publicKey,
pubsubTopic: TestPubsubTopic
routingInfo: TestRoutingInfo
});
const symEncoder = createSymEncoder({
contentTopic: symTopic,
symKey,
pubsubTopic: TestPubsubTopic
routingInfo: TestRoutingInfo
});
const eciesDecoder = createEciesDecoder(
asymTopic,
privateKey,
TestPubsubTopic
TestRoutingInfo,
privateKey
);
const symDecoder = createSymDecoder(symTopic, symKey, TestPubsubTopic);
const symDecoder = createSymDecoder(symTopic, TestRoutingInfo, symKey);
const msgs: IDecodedMessage[] = [];
void waku2.relay.subscribeWithUnsubscribe([eciesDecoder], (wakuMsg) => {
@ -93,19 +94,20 @@ describe("Waku Relay", function () {
"Published on content topic with added then deleted observer";
const contentTopic = "/test/1/observer/proto";
const routingInfo = createRoutingInfo(TestNetworkConfig, { contentTopic });
// The promise **fails** if we receive a message on this observer.
const receivedMsgPromise: Promise<IDecodedMessage> = new Promise(
(resolve, reject) => {
const deleteObserver = waku2.relay.subscribeWithUnsubscribe(
[createDecoder(contentTopic)],
[createDecoder(contentTopic, routingInfo)],
reject
) as () => void;
deleteObserver();
setTimeout(resolve, 500);
}
);
await waku1.relay.send(createEncoder({ contentTopic }), {
await waku1.relay.send(createEncoder({ contentTopic, routingInfo }), {
payload: utf8ToBytes(messageText)
});

View File

@ -19,8 +19,8 @@ import {
TestContentTopic,
TestDecoder,
TestEncoder,
TestPubsubTopic,
TestShardInfo
TestNetworkConfig,
TestRoutingInfo
} from "./utils.js";
import { runRelayNodes } from "./utils.js";
@ -30,7 +30,12 @@ describe("Waku Relay, Interop", function () {
let nwaku: ServiceNode;
beforeEachCustom(this, async () => {
[nwaku, waku] = await runRelayNodes(this.ctx, TestShardInfo);
[nwaku, waku] = await runRelayNodes(
this.ctx,
TestNetworkConfig,
undefined,
[TestContentTopic]
);
});
afterEachCustom(this, async () => {
@ -42,8 +47,9 @@ describe("Waku Relay, Interop", function () {
while (subscribers.length === 0) {
await delay(200);
subscribers =
waku.libp2p.services.pubsub!.getSubscribers(TestPubsubTopic);
subscribers = waku.libp2p.services.pubsub!.getSubscribers(
TestRoutingInfo.pubsubTopic
);
}
const nimPeerId = await nwaku.getPeerId();
@ -86,7 +92,8 @@ describe("Waku Relay, Interop", function () {
ServiceNode.toMessageRpcQuery({
contentTopic: TestContentTopic,
payload: utf8ToBytes(messageText)
})
}),
TestRoutingInfo
);
const receivedMsg = await receivedMsgPromise;
@ -98,9 +105,10 @@ describe("Waku Relay, Interop", function () {
it("Js publishes, other Js receives", async function () {
const waku2 = await createRelayNode({
routingInfos: [TestRoutingInfo],
staticNoiseKey: NOISE_KEY_2,
emitSelf: true,
networkConfig: TestShardInfo
networkConfig: TestNetworkConfig
});
await waku2.start();

View File

@ -1,18 +1,7 @@
import { createDecoder, createEncoder } from "@waku/core";
import {
ContentTopicInfo,
IDecodedMessage,
Protocols,
RelayNode,
ShardInfo,
SingleShardInfo
} from "@waku/interfaces";
import { IDecodedMessage, Protocols, RelayNode } from "@waku/interfaces";
import { createRelayNode } from "@waku/relay";
import {
contentTopicToPubsubTopic,
pubsubTopicToSingleShardInfo,
singleShardInfoToPubsubTopic
} from "@waku/utils";
import { createRoutingInfo } from "@waku/utils";
import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai";
@ -26,43 +15,38 @@ import {
} from "../../src/index.js";
import { TestDecoder } from "../filter/utils.js";
describe("Waku Relay, multiple pubsub topics", function () {
describe("Waku Relay, static sharding, multiple pubsub topics", function () {
this.timeout(15000);
let waku1: RelayNode;
let waku2: RelayNode;
let waku3: RelayNode;
const customPubsubTopic1 = singleShardInfoToPubsubTopic({
clusterId: 3,
shard: 1
});
const customPubsubTopic2 = singleShardInfoToPubsubTopic({
clusterId: 3,
shard: 2
});
const shardInfo1: ShardInfo = { clusterId: 3, shards: [1] };
const singleShardInfo1: SingleShardInfo = {
clusterId: 3,
shard: 1
};
const clusterId = 3;
const networkConfig = { clusterId };
const shardOne = 1;
const shardTwo = 2;
const customContentTopic1 = "/test/2/waku-relay/utf8";
const customContentTopic2 = "/test/3/waku-relay/utf8";
const shardInfo2: ShardInfo = { clusterId: 3, shards: [2] };
const singleShardInfo2: SingleShardInfo = {
clusterId: 3,
shard: 2
};
const routingInfoOne = createRoutingInfo(networkConfig, {
shardId: shardOne
});
const routingInfoTwo = createRoutingInfo(networkConfig, {
shardId: shardTwo
});
const customEncoder1 = createEncoder({
pubsubTopicShardInfo: singleShardInfo1,
contentTopic: customContentTopic1
contentTopic: customContentTopic1,
routingInfo: routingInfoOne
});
const customDecoder1 = createDecoder(customContentTopic1, singleShardInfo1);
const customDecoder1 = createDecoder(customContentTopic1, routingInfoOne);
const customEncoder2 = createEncoder({
pubsubTopicShardInfo: singleShardInfo2,
contentTopic: customContentTopic2
contentTopic: customContentTopic2,
routingInfo: routingInfoTwo
});
const customDecoder2 = createDecoder(customContentTopic2, singleShardInfo2);
const shardInfoBothShards: ShardInfo = { clusterId: 3, shards: [1, 2] };
const customDecoder2 = createDecoder(customContentTopic2, routingInfoTwo);
afterEachCustom(this, async () => {
await tearDownNodes([], [waku1, waku2, waku3]);
@ -70,35 +54,36 @@ describe("Waku Relay, multiple pubsub topics", function () {
[
{
pubsub: customPubsubTopic1,
shardInfo: shardInfo1,
routingInfo: routingInfoOne,
encoder: customEncoder1,
decoder: customDecoder1
},
{
pubsub: customPubsubTopic2,
shardInfo: shardInfo2,
routingInfo: routingInfoTwo,
encoder: customEncoder2,
decoder: customDecoder2
}
].forEach((testItem) => {
it(`3 nodes on ${testItem.pubsub} topic`, async function () {
it(`3 nodes on ${testItem.routingInfo.pubsubTopic} topic`, async function () {
const [msgCollector1, msgCollector2, msgCollector3] = Array(3)
.fill(null)
.map(() => new MessageCollector());
[waku1, waku2, waku3] = await Promise.all([
createRelayNode({
networkConfig: testItem.shardInfo,
networkConfig: networkConfig,
routingInfos: [testItem.routingInfo],
staticNoiseKey: NOISE_KEY_1
}).then((waku) => waku.start().then(() => waku)),
createRelayNode({
networkConfig: testItem.shardInfo,
networkConfig: networkConfig,
routingInfos: [testItem.routingInfo],
staticNoiseKey: NOISE_KEY_2,
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
}).then((waku) => waku.start().then(() => waku)),
createRelayNode({
networkConfig: testItem.shardInfo,
networkConfig: networkConfig,
routingInfos: [testItem.routingInfo],
staticNoiseKey: NOISE_KEY_3
}).then((waku) => waku.start().then(() => waku))
]);
@ -196,16 +181,19 @@ describe("Waku Relay, multiple pubsub topics", function () {
// Waku1 and waku2 are using multiple pubsub topis
[waku1, waku2, waku3] = await Promise.all([
createRelayNode({
networkConfig: shardInfoBothShards,
networkConfig: networkConfig,
routingInfos: [routingInfoOne, routingInfoTwo],
staticNoiseKey: NOISE_KEY_1
}).then((waku) => waku.start().then(() => waku)),
createRelayNode({
networkConfig: shardInfoBothShards,
networkConfig: networkConfig,
routingInfos: [routingInfoOne, routingInfoTwo],
staticNoiseKey: NOISE_KEY_2,
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
}).then((waku) => waku.start().then(() => waku)),
createRelayNode({
networkConfig: shardInfo1,
networkConfig: networkConfig,
routingInfos: [routingInfoOne],
staticNoiseKey: NOISE_KEY_3
}).then((waku) => waku.start().then(() => waku))
]);
@ -262,18 +250,22 @@ describe("Waku Relay, multiple pubsub topics", function () {
expect(msgCollector3.hasMessage(customContentTopic1, "M3")).to.eq(true);
});
it("n1 and n2 uses a custom pubsub, n3 uses the default pubsub", async function () {
it("n1 and n2 uses relay shard 1, n3 uses relay shard 2", async function () {
[waku1, waku2, waku3] = await Promise.all([
createRelayNode({
networkConfig: shardInfo1,
networkConfig,
routingInfos: [routingInfoOne],
staticNoiseKey: NOISE_KEY_1
}).then((waku) => waku.start().then(() => waku)),
createRelayNode({
networkConfig: shardInfo1,
networkConfig,
routingInfos: [routingInfoOne],
staticNoiseKey: NOISE_KEY_2,
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
}).then((waku) => waku.start().then(() => waku)),
createRelayNode({
networkConfig,
routingInfos: [routingInfoTwo],
staticNoiseKey: NOISE_KEY_3
}).then((waku) => waku.start().then(() => waku))
]);
@ -319,55 +311,45 @@ describe("Waku Relay, multiple pubsub topics", function () {
await waku3NoMsgPromise;
expect(bytesToUtf8(waku2ReceivedMsg.payload!)).to.eq(messageText);
expect(waku2ReceivedMsg.pubsubTopic).to.eq(customPubsubTopic1);
expect(waku2ReceivedMsg.pubsubTopic).to.eq(routingInfoOne.pubsubTopic);
});
});
describe("Waku Relay (Autosharding), multiple pubsub topics", function () {
describe("Waku Relay auto-sharding, multiple pubsub topics", function () {
this.timeout(15000);
const clusterId = 7;
let waku1: RelayNode;
let waku2: RelayNode;
let waku3: RelayNode;
const networkConfig = { clusterId, numShardsInCluster: 8 };
const customContentTopic1 = "/waku/2/content/utf8";
const customContentTopic2 = "/myapp/1/latest/proto";
const autoshardingPubsubTopic1 = contentTopicToPubsubTopic(
customContentTopic1,
clusterId
);
const autoshardingPubsubTopic2 = contentTopicToPubsubTopic(
customContentTopic2,
clusterId
);
const contentTopicInfo1: ContentTopicInfo = {
clusterId: clusterId,
contentTopics: [customContentTopic1]
};
const contentTopicInfo2: ContentTopicInfo = {
clusterId: clusterId,
contentTopics: [customContentTopic2]
};
const routingInfo1 = createRoutingInfo(networkConfig, {
contentTopic: customContentTopic1
});
const routingInfo2 = createRoutingInfo(networkConfig, {
contentTopic: customContentTopic2
});
if (routingInfo1.pubsubTopic == routingInfo2.pubsubTopic)
throw "Internal error, both content topics resolve to same shard";
const customEncoder1 = createEncoder({
contentTopic: customContentTopic1,
pubsubTopicShardInfo: pubsubTopicToSingleShardInfo(autoshardingPubsubTopic1)
routingInfo: routingInfo1
});
const customDecoder1 = createDecoder(
customContentTopic1,
pubsubTopicToSingleShardInfo(autoshardingPubsubTopic1)
);
const customDecoder1 = createDecoder(customContentTopic1, routingInfo1);
const customEncoder2 = createEncoder({
contentTopic: customContentTopic2,
pubsubTopicShardInfo: pubsubTopicToSingleShardInfo(autoshardingPubsubTopic2)
routingInfo: routingInfo2
});
const customDecoder2 = createDecoder(
customContentTopic2,
pubsubTopicToSingleShardInfo(autoshardingPubsubTopic2)
);
const contentTopicInfoBothShards: ContentTopicInfo = {
clusterId: clusterId,
contentTopics: [customContentTopic1, customContentTopic2]
};
const customDecoder2 = createDecoder(customContentTopic2, routingInfo2);
const relayShard1 = { clusterId, shards: [routingInfo1.shardId] };
const relayShard2 = { clusterId, shards: [routingInfo2.shardId] };
afterEachCustom(this, async () => {
await tearDownNodes([], [waku1, waku2, waku3]);
@ -375,35 +357,38 @@ describe("Waku Relay (Autosharding), multiple pubsub topics", function () {
[
{
pubsub: autoshardingPubsubTopic1,
shardInfo: contentTopicInfo1,
routingInfo: routingInfo1,
relayShards: relayShard1,
encoder: customEncoder1,
decoder: customDecoder1
},
{
pubsub: autoshardingPubsubTopic2,
shardInfo: contentTopicInfo2,
routingInfo: routingInfo2,
relayShards: relayShard2,
encoder: customEncoder2,
decoder: customDecoder2
}
].forEach((testItem) => {
it(`3 nodes on ${testItem.pubsub} topic`, async function () {
it(`3 nodes on ${testItem.routingInfo.pubsubTopic} topic`, async function () {
const [msgCollector1, msgCollector2, msgCollector3] = Array(3)
.fill(null)
.map(() => new MessageCollector());
[waku1, waku2, waku3] = await Promise.all([
createRelayNode({
networkConfig: testItem.shardInfo,
networkConfig,
routingInfos: [testItem.routingInfo],
staticNoiseKey: NOISE_KEY_1
}).then((waku) => waku.start().then(() => waku)),
createRelayNode({
networkConfig: testItem.shardInfo,
networkConfig,
routingInfos: [testItem.routingInfo],
staticNoiseKey: NOISE_KEY_2,
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
}).then((waku) => waku.start().then(() => waku)),
createRelayNode({
networkConfig: testItem.shardInfo,
networkConfig,
routingInfos: [testItem.routingInfo],
staticNoiseKey: NOISE_KEY_3
}).then((waku) => waku.start().then(() => waku))
]);
@ -510,16 +495,19 @@ describe("Waku Relay (Autosharding), multiple pubsub topics", function () {
// Waku1 and waku2 are using multiple pubsub topis
[waku1, waku2, waku3] = await Promise.all([
createRelayNode({
networkConfig: contentTopicInfoBothShards,
networkConfig,
routingInfos: [routingInfo1, routingInfo2],
staticNoiseKey: NOISE_KEY_1
}).then((waku) => waku.start().then(() => waku)),
createRelayNode({
networkConfig: contentTopicInfoBothShards,
networkConfig,
routingInfos: [routingInfo1, routingInfo2],
staticNoiseKey: NOISE_KEY_2,
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
}).then((waku) => waku.start().then(() => waku)),
createRelayNode({
networkConfig: contentTopicInfo1,
networkConfig,
routingInfos: [routingInfo1],
staticNoiseKey: NOISE_KEY_3
}).then((waku) => waku.start().then(() => waku))
]);
@ -603,18 +591,22 @@ describe("Waku Relay (Autosharding), multiple pubsub topics", function () {
expect(msgCollector3.hasMessage(customContentTopic1, "M3")).to.eq(true);
});
it("n1 and n2 uses a custom pubsub, n3 uses the default pubsub", async function () {
it("n1 and n2 uses first shard, n3 uses the second shard", async function () {
[waku1, waku2, waku3] = await Promise.all([
createRelayNode({
networkConfig: contentTopicInfo1,
networkConfig,
routingInfos: [routingInfo1],
staticNoiseKey: NOISE_KEY_1
}).then((waku) => waku.start().then(() => waku)),
createRelayNode({
networkConfig: contentTopicInfo1,
networkConfig,
routingInfos: [routingInfo1],
staticNoiseKey: NOISE_KEY_2,
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
}).then((waku) => waku.start().then(() => waku)),
createRelayNode({
networkConfig,
routingInfos: [routingInfo2],
staticNoiseKey: NOISE_KEY_3
}).then((waku) => waku.start().then(() => waku))
]);
@ -660,6 +652,6 @@ describe("Waku Relay (Autosharding), multiple pubsub topics", function () {
await waku3NoMsgPromise;
expect(bytesToUtf8(waku2ReceivedMsg.payload!)).to.eq(messageText);
expect(waku2ReceivedMsg.pubsubTopic).to.eq(autoshardingPubsubTopic1);
expect(waku2ReceivedMsg.pubsubTopic).to.eq(routingInfo1.pubsubTopic);
});
});

View File

@ -1,5 +1,6 @@
import { createEncoder } from "@waku/core";
import { IRateLimitProof, ProtocolError, RelayNode } from "@waku/interfaces";
import { createRoutingInfo } from "@waku/utils";
import { utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai";
@ -16,13 +17,12 @@ import {
import {
messageText,
runJSNodes,
TestClusterId,
TestContentTopic,
TestDecoder,
TestEncoder,
TestExpectOptions,
TestPubsubTopic,
TestShardInfo,
TestWaitMessageOptions,
TestRoutingInfo,
waitForAllRemotePeers
} from "./utils.js";
@ -54,9 +54,7 @@ describe("Waku Relay, Publish", function () {
expect(pushResponse.successes[0].toString()).to.eq(
waku2.libp2p.peerId.toString()
);
expect(
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
).to.eq(true);
expect(await messageCollector.waitForMessages(1)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
...TestExpectOptions,
expectedMessageText: testItem.value
@ -81,9 +79,7 @@ describe("Waku Relay, Publish", function () {
waku2.libp2p.peerId.toString()
);
expect(
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
).to.eq(true);
expect(await messageCollector.waitForMessages(1)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
...TestExpectOptions,
@ -107,31 +103,16 @@ describe("Waku Relay, Publish", function () {
it("Fails to publish message with empty text", async function () {
await waku1.relay.send(TestEncoder, { payload: utf8ToBytes("") });
await delay(400);
expect(
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
).to.eq(false);
});
it("Fails to publish message with wrong content topic", async function () {
const wrong_encoder = createEncoder({
contentTopic: "/test/1/wrong/utf8",
pubsubTopic: TestPubsubTopic
});
await waku1.relay.send(wrong_encoder, {
payload: utf8ToBytes("")
});
expect(
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
).to.eq(false);
expect(await messageCollector.waitForMessages(1)).to.eq(false);
});
it("Fails to publish message with wrong pubsubtopic", async function () {
const wrong_encoder = createEncoder({
pubsubTopicShardInfo: {
clusterId: TestShardInfo.clusterId,
shard: TestShardInfo.shards[0] + 1
},
contentTopic: TestContentTopic
contentTopic: TestContentTopic,
routingInfo: createRoutingInfo(
{ clusterId: TestClusterId },
{ shardId: 32 }
)
});
const pushResponse = await waku1.relay.send(wrong_encoder, {
payload: utf8ToBytes("")
@ -140,9 +121,7 @@ describe("Waku Relay, Publish", function () {
ProtocolError.TOPIC_NOT_CONFIGURED
);
await delay(400);
expect(
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
).to.eq(false);
expect(await messageCollector.waitForMessages(1)).to.eq(false);
});
[1024 ** 2 + 65536, 2 * 1024 ** 2].forEach((testItem) => {
@ -155,9 +134,7 @@ describe("Waku Relay, Publish", function () {
ProtocolError.SIZE_TOO_BIG
);
await delay(400);
expect(
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
).to.eq(false);
expect(await messageCollector.waitForMessages(1)).to.eq(false);
});
});
@ -183,9 +160,7 @@ describe("Waku Relay, Publish", function () {
expect(pushResponse.successes[0].toString()).to.eq(
waku2.libp2p.peerId.toString()
);
expect(
await messageCollector.waitForMessages(2, TestWaitMessageOptions)
).to.eq(true);
expect(await messageCollector.waitForMessages(2)).to.eq(true);
});
// Will be skipped until https://github.com/waku-org/js-waku/issues/1464 si done
@ -210,15 +185,13 @@ describe("Waku Relay, Publish", function () {
expect(pushResponse.successes[0].toString()).to.eq(
waku2.libp2p.peerId.toString()
);
expect(
await messageCollector.waitForMessages(2, TestWaitMessageOptions)
).to.eq(true);
expect(await messageCollector.waitForMessages(2)).to.eq(true);
});
it("Publish message with large meta", async function () {
const customTestEncoder = createEncoder({
contentTopic: TestContentTopic,
pubsubTopic: TestPubsubTopic,
routingInfo: TestRoutingInfo,
metaSetter: () => new Uint8Array(10 ** 6)
});
@ -229,9 +202,7 @@ describe("Waku Relay, Publish", function () {
expect(pushResponse.successes[0].toString()).to.eq(
waku2.libp2p.peerId.toString()
);
expect(
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
).to.eq(true);
expect(await messageCollector.waitForMessages(1)).to.eq(true);
});
it("Publish message with rate limit", async function () {
@ -251,9 +222,7 @@ describe("Waku Relay, Publish", function () {
});
expect(pushResponse.successes.length).to.eq(1);
expect(
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
).to.eq(true);
expect(await messageCollector.waitForMessages(1)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
...TestExpectOptions,
expectedMessageText: messageText

View File

@ -1,6 +1,7 @@
import { createDecoder, createEncoder } from "@waku/core";
import { RelayNode } from "@waku/interfaces";
import { createRelayNode } from "@waku/relay";
import { createRoutingInfo } from "@waku/utils";
import { utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai";
@ -20,9 +21,8 @@ import {
TestDecoder,
TestEncoder,
TestExpectOptions,
TestPubsubTopic,
TestShardInfo,
TestWaitMessageOptions,
TestNetworkConfig,
TestRoutingInfo,
waitForAllRemotePeers
} from "./utils.js";
@ -44,10 +44,10 @@ describe("Waku Relay, Subscribe", function () {
it("Mutual subscription", async function () {
await waitForAllRemotePeers(waku1, waku2);
const subscribers1 = waku1.libp2p.services
.pubsub!.getSubscribers(TestPubsubTopic)
.pubsub!.getSubscribers(TestRoutingInfo.pubsubTopic)
.map((p) => p.toString());
const subscribers2 = waku2.libp2p.services
.pubsub!.getSubscribers(TestPubsubTopic)
.pubsub!.getSubscribers(TestRoutingInfo.pubsubTopic)
.map((p) => p.toString());
expect(subscribers1).to.contain(waku2.libp2p.peerId.toString());
@ -65,7 +65,8 @@ describe("Waku Relay, Subscribe", function () {
try {
const waku = await createRelayNode({
staticNoiseKey: NOISE_KEY_1,
networkConfig: TestShardInfo
networkConfig: TestNetworkConfig,
routingInfos: [TestRoutingInfo]
});
await waku.start();
@ -90,9 +91,7 @@ describe("Waku Relay, Subscribe", function () {
messageCollector.callback
);
await waku1.relay.send(TestEncoder, { payload: utf8ToBytes(messageText) });
expect(
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
).to.eq(true);
expect(await messageCollector.waitForMessages(1)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
...TestExpectOptions,
expectedMessageText: messageText
@ -115,7 +114,6 @@ describe("Waku Relay, Subscribe", function () {
// Verify that each message was received on the corresponding topic.
expect(
await messageCollector.waitForMessages(messageCount, {
...TestWaitMessageOptions,
exact: true
})
).to.eq(true);
@ -130,12 +128,15 @@ describe("Waku Relay, Subscribe", function () {
});
it("Subscribe and publish messages on 2 different content topics", async function () {
const secondContentTopic = "/test/2/waku-relay/utf8";
const secondContentTopic = "/test/0/waku-relay-2/utf8";
const secondRoutingInfo = createRoutingInfo(TestNetworkConfig, {
contentTopic: secondContentTopic
});
const secondEncoder = createEncoder({
contentTopic: secondContentTopic,
pubsubTopic: TestPubsubTopic
routingInfo: secondRoutingInfo
});
const secondDecoder = createDecoder(secondContentTopic, TestPubsubTopic);
const secondDecoder = createDecoder(secondContentTopic, secondRoutingInfo);
await waku2.relay.subscribeWithUnsubscribe(
[TestDecoder],
@ -149,7 +150,6 @@ describe("Waku Relay, Subscribe", function () {
await waku1.relay.send(secondEncoder, { payload: utf8ToBytes("M2") });
expect(
await messageCollector.waitForMessages(2, {
...TestWaitMessageOptions,
exact: true
})
).to.eq(true);
@ -166,7 +166,7 @@ describe("Waku Relay, Subscribe", function () {
it("Subscribe one by one to 100 topics and publish messages", async function () {
const topicCount = 100;
const td = generateTestData(topicCount, TestWaitMessageOptions);
const td = generateTestData(topicCount, TestNetworkConfig);
// Subscribe to topics one by one
for (let i = 0; i < topicCount; i++) {
@ -186,7 +186,6 @@ describe("Waku Relay, Subscribe", function () {
// Verify that each message was received on the corresponding topic.
expect(
await messageCollector.waitForMessages(topicCount, {
...TestWaitMessageOptions,
exact: true
})
).to.eq(true);
@ -201,7 +200,7 @@ describe("Waku Relay, Subscribe", function () {
it("Subscribe at once to 10000 topics and publish messages", async function () {
const topicCount = 10000;
const td = generateTestData(topicCount, TestWaitMessageOptions);
const td = generateTestData(topicCount, TestNetworkConfig);
// Subscribe to all topics at once
await waku2.relay.subscribeWithUnsubscribe(
@ -219,7 +218,6 @@ describe("Waku Relay, Subscribe", function () {
// Verify that each message was received on the corresponding topic.
expect(
await messageCollector.waitForMessages(topicCount, {
...TestWaitMessageOptions,
exact: true
})
).to.eq(true);
@ -248,7 +246,6 @@ describe("Waku Relay, Subscribe", function () {
expect(
await messageCollector.waitForMessages(1, {
...TestWaitMessageOptions,
exact: true
})
).to.eq(true);
@ -258,9 +255,9 @@ describe("Waku Relay, Subscribe", function () {
it.skip("Overlapping topic subscription", async function () {
// Define two sets of test data with overlapping topics.
const topicCount1 = 2;
const td1 = generateTestData(topicCount1, TestWaitMessageOptions);
const td1 = generateTestData(topicCount1, TestNetworkConfig);
const topicCount2 = 4;
const td2 = generateTestData(topicCount2, TestWaitMessageOptions);
const td2 = generateTestData(topicCount2, TestNetworkConfig);
// Subscribe to the first set of topics.
await waku2.relay.subscribeWithUnsubscribe(
@ -293,7 +290,6 @@ describe("Waku Relay, Subscribe", function () {
// Since there are overlapping topics, there should be 6 messages in total (2 from the first set + 4 from the second set).
expect(
await messageCollector.waitForMessages(6, {
...TestWaitMessageOptions,
exact: true
})
).to.eq(true);
@ -301,29 +297,39 @@ describe("Waku Relay, Subscribe", function () {
TEST_STRING.forEach((testItem) => {
it(`Subscribe to topic containing ${testItem.description} and publish message`, async function () {
const newContentTopic = testItem.value;
const newEncoder = createEncoder({
contentTopic: newContentTopic,
pubsubTopic: TestPubsubTopic
});
const newDecoder = createDecoder(newContentTopic, TestPubsubTopic);
const newContentTopic = `/test/0/${testItem.value}/null`;
await waku2.relay.subscribeWithUnsubscribe(
[newDecoder],
messageCollector.callback
);
await waku1.relay.send(newEncoder, {
payload: utf8ToBytes(messageText)
});
try {
const newRoutingInfo = createRoutingInfo(TestNetworkConfig, {
contentTopic: newContentTopic
});
expect(
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
...TestExpectOptions,
expectedMessageText: messageText,
expectedContentTopic: newContentTopic
});
const newEncoder = createEncoder({
contentTopic: newContentTopic,
routingInfo: newRoutingInfo
});
const newDecoder = createDecoder(newContentTopic, newRoutingInfo);
await waku2.relay.subscribeWithUnsubscribe(
[newDecoder],
messageCollector.callback
);
await waku1.relay.send(newEncoder, {
payload: utf8ToBytes(messageText)
});
expect(await messageCollector.waitForMessages(1)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
...TestExpectOptions,
expectedMessageText: messageText,
expectedContentTopic: newContentTopic
});
} catch (err: unknown) {
if (testItem.invalidContentTopic) {
const e = err as Error;
expect(e.message).to.contain("Invalid generation field");
}
}
});
});
});

View File

@ -1,12 +1,14 @@
import { createDecoder, createEncoder } from "@waku/core";
import {
AutoSharding,
ContentTopic,
NetworkConfig,
Protocols,
RelayNode,
ShardInfo
type ShardId
} from "@waku/interfaces";
import { createRelayNode } from "@waku/relay";
import { contentTopicToPubsubTopic, Logger } from "@waku/utils";
import { createRoutingInfo, Logger } from "@waku/utils";
import { Context } from "mocha";
import {
@ -16,25 +18,25 @@ import {
ServiceNode
} from "../../src/index.js";
export const TestClusterId = 4;
export const messageText = "Relay works!";
export const TestContentTopic = "/test/1/waku-relay/utf8";
export const TestShardInfo: ShardInfo = {
clusterId: 2,
shards: [4]
export const TestContentTopic = "/test/0/waku-relay/utf8";
export const TestNetworkConfig: AutoSharding = {
clusterId: TestClusterId,
numShardsInCluster: 8
};
export const TestPubsubTopic = contentTopicToPubsubTopic(
TestContentTopic,
TestShardInfo.clusterId
);
export const TestRoutingInfo = createRoutingInfo(TestNetworkConfig, {
contentTopic: TestContentTopic
});
export const TestEncoder = createEncoder({
contentTopic: TestContentTopic,
pubsubTopic: TestPubsubTopic
routingInfo: TestRoutingInfo
});
export const TestDecoder = createDecoder(TestContentTopic, TestPubsubTopic);
export const TestWaitMessageOptions = { pubsubTopic: TestPubsubTopic };
export const TestDecoder = createDecoder(TestContentTopic, TestRoutingInfo);
export const TestExpectOptions = {
expectedContentTopic: TestContentTopic,
expectedPubsubTopic: TestPubsubTopic
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
};
export const log = new Logger("test:relay");
@ -51,10 +53,14 @@ export async function waitForAllRemotePeers(
export const runRelayNodes = (
context: Context,
networkConfig: NetworkConfig
networkConfig: NetworkConfig,
relayShards?: ShardId[], // Only for static sharding
contentTopics?: ContentTopic[] // Only for auto sharding
): Promise<[ServiceNode, RelayNode]> =>
runNodes<RelayNode>({
networkConfig,
relayShards,
contentTopics,
context,
protocols: RELAY_PROTOCOLS,
createNode: createRelayNode
@ -64,12 +70,14 @@ export async function runJSNodes(): Promise<[RelayNode, RelayNode]> {
log.info("Starting JS Waku instances");
const [waku1, waku2] = await Promise.all([
createRelayNode({
routingInfos: [TestRoutingInfo],
staticNoiseKey: NOISE_KEY_1,
networkConfig: TestShardInfo
networkConfig: TestNetworkConfig
}).then((waku) => waku.start().then(() => waku)),
createRelayNode({
routingInfos: [TestRoutingInfo],
staticNoiseKey: NOISE_KEY_2,
networkConfig: TestShardInfo,
networkConfig: TestNetworkConfig,
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
}).then((waku) => waku.start().then(() => waku))
]);

View File

@ -1,9 +1,6 @@
import { LightNode } from "@waku/interfaces";
import { AutoSharding, LightNode } from "@waku/interfaces";
import { createEncoder, utf8ToBytes } from "@waku/sdk";
import {
contentTopicToPubsubTopic,
contentTopicToShardIndex
} from "@waku/utils";
import { contentTopicToPubsubTopic, createRoutingInfo } from "@waku/utils";
import { expect } from "chai";
import {
@ -33,10 +30,14 @@ describe("Autosharding: Running Nodes", function () {
// js-waku allows autosharding for cluster IDs different than 1
it("Cluster ID 0 - Default/Global Cluster", async function () {
const clusterId = 0;
const networkConfig: AutoSharding = { clusterId, numShardsInCluster: 8 };
const routingInfo = createRoutingInfo(networkConfig, {
contentTopic: ContentTopic
});
[serviceNodes, waku] = await runMultipleNodes(
this.ctx,
{ clusterId, contentTopics: [ContentTopic] },
routingInfo,
{ lightpush: true, filter: true },
false,
numServiceNodes,
@ -45,10 +46,7 @@ describe("Autosharding: Running Nodes", function () {
const encoder = createEncoder({
contentTopic: ContentTopic,
pubsubTopicShardInfo: {
clusterId: clusterId,
shard: contentTopicToShardIndex(ContentTopic)
}
routingInfo
});
const request = await waku.lightPush.send(encoder, {
@ -56,19 +54,19 @@ describe("Autosharding: Running Nodes", function () {
});
expect(request.successes.length).to.eq(numServiceNodes);
expect(
await serviceNodes.messageCollector.waitForMessages(1, {
pubsubTopic: encoder.pubsubTopic
})
).to.eq(true);
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(true);
});
it("Non TWN Cluster", async function () {
const clusterId = 5;
const networkConfig: AutoSharding = { clusterId, numShardsInCluster: 10 };
const routingInfo = createRoutingInfo(networkConfig, {
contentTopic: ContentTopic
});
[serviceNodes, waku] = await runMultipleNodes(
this.ctx,
{ clusterId, contentTopics: [ContentTopic] },
routingInfo,
{ lightpush: true, filter: true },
false,
numServiceNodes,
@ -77,10 +75,7 @@ describe("Autosharding: Running Nodes", function () {
const encoder = createEncoder({
contentTopic: ContentTopic,
pubsubTopicShardInfo: {
clusterId: clusterId,
shard: contentTopicToShardIndex(ContentTopic)
}
routingInfo
});
const request = await waku.lightPush.send(encoder, {
@ -88,11 +83,7 @@ describe("Autosharding: Running Nodes", function () {
});
expect(request.successes.length).to.eq(numServiceNodes);
expect(
await serviceNodes.messageCollector.waitForMessages(1, {
pubsubTopic: encoder.pubsubTopic
})
).to.eq(true);
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(true);
});
const numTest = 10;
@ -109,9 +100,14 @@ describe("Autosharding: Running Nodes", function () {
it(`random auto sharding ${
i + 1
} - Cluster ID: ${clusterId}, Content Topic: ${ContentTopic}`, async function () {
const networkConfig: AutoSharding = { clusterId, numShardsInCluster: 8 };
const routingInfo = createRoutingInfo(networkConfig, {
contentTopic: ContentTopic
});
[serviceNodes, waku] = await runMultipleNodes(
this.ctx,
{ clusterId, contentTopics: [ContentTopic] },
routingInfo,
{ lightpush: true, filter: true },
false,
numServiceNodes,
@ -120,10 +116,7 @@ describe("Autosharding: Running Nodes", function () {
const encoder = createEncoder({
contentTopic: ContentTopic,
pubsubTopicShardInfo: {
clusterId: clusterId,
shard: contentTopicToShardIndex(ContentTopic)
}
routingInfo
});
const request = await waku.lightPush.send(encoder, {
@ -133,7 +126,7 @@ describe("Autosharding: Running Nodes", function () {
expect(request.successes.length).to.eq(numServiceNodes);
expect(
await serviceNodes.messageCollector.waitForMessages(1, {
pubsubTopic: encoder.pubsubTopic
contentTopic: ContentTopic
})
).to.eq(true);
});
@ -143,7 +136,7 @@ describe("Autosharding: Running Nodes", function () {
it("Wrong topic", async function () {
const wrongTopic = "wrong_format";
try {
contentTopicToPubsubTopic(wrongTopic, clusterId);
contentTopicToPubsubTopic(wrongTopic, clusterId, 8);
throw new Error("Wrong topic should've thrown an error");
} catch (err) {
if (
@ -156,10 +149,19 @@ describe("Autosharding: Running Nodes", function () {
});
it("configure the node with multiple content topics", async function () {
const networkConfig: AutoSharding = { clusterId, numShardsInCluster: 8 };
const routingInfo = createRoutingInfo(networkConfig, {
contentTopic: ContentTopic
});
[serviceNodes, waku] = await runMultipleNodes(
this.ctx,
{ clusterId, contentTopics: [ContentTopic, ContentTopic2] },
{ lightpush: true, filter: true },
routingInfo,
{
lightpush: true,
filter: true,
contentTopic: [ContentTopic, ContentTopic2]
},
false,
numServiceNodes,
true
@ -167,18 +169,14 @@ describe("Autosharding: Running Nodes", function () {
const encoder1 = createEncoder({
contentTopic: ContentTopic,
pubsubTopicShardInfo: {
clusterId: clusterId,
shard: contentTopicToShardIndex(ContentTopic)
}
routingInfo
});
const encoder2 = createEncoder({
contentTopic: ContentTopic2,
pubsubTopicShardInfo: {
clusterId: clusterId,
shard: contentTopicToShardIndex(ContentTopic2)
}
routingInfo: createRoutingInfo(networkConfig, {
contentTopic: ContentTopic2
})
});
const request1 = await waku.lightPush.send(encoder1, {
@ -187,7 +185,7 @@ describe("Autosharding: Running Nodes", function () {
expect(request1.successes.length).to.eq(numServiceNodes);
expect(
await serviceNodes.messageCollector.waitForMessages(1, {
pubsubTopic: encoder1.pubsubTopic
contentTopic: ContentTopic
})
).to.eq(true);
@ -197,7 +195,7 @@ describe("Autosharding: Running Nodes", function () {
expect(request2.successes.length).to.eq(numServiceNodes);
expect(
await serviceNodes.messageCollector.waitForMessages(1, {
pubsubTopic: encoder2.pubsubTopic
contentTopic: ContentTopic2
})
).to.eq(true);
});

View File

@ -1,13 +1,8 @@
import { bootstrap } from "@libp2p/bootstrap";
import type { PeerId } from "@libp2p/interface";
import { wakuPeerExchangeDiscovery } from "@waku/discovery";
import {
ContentTopicInfo,
createLightNode,
LightNode,
ShardInfo,
Tags
} from "@waku/sdk";
import { AutoSharding } from "@waku/interfaces";
import { createLightNode, LightNode, Tags } from "@waku/sdk";
import { contentTopicToShardIndex } from "@waku/utils";
import chai, { expect } from "chai";
import chaiAsPromised from "chai-as-promised";
@ -48,14 +43,17 @@ describe("Static Sharding: Peer Management", function () {
it("all px service nodes subscribed to the shard topic should be dialed", async function () {
this.timeout(100_000);
const shardInfo: ShardInfo = { clusterId: clusterId, shards: [2] };
const shard = 2;
const numShardsInCluster = 8;
const networkConfig: AutoSharding = { clusterId, numShardsInCluster };
await nwaku1.start({
discv5Discovery: true,
peerExchange: true,
relay: true,
clusterId: clusterId,
shard: [2]
shard: [shard],
numShardsInNetwork: numShardsInCluster
});
const enr1 = (await nwaku1.info()).enrUri;
@ -66,7 +64,8 @@ describe("Static Sharding: Peer Management", function () {
discv5BootstrapNode: enr1,
relay: true,
clusterId: clusterId,
shard: [2]
shard: [shard],
numShardsInNetwork: numShardsInCluster
});
const enr2 = (await nwaku2.info()).enrUri;
@ -77,12 +76,13 @@ describe("Static Sharding: Peer Management", function () {
discv5BootstrapNode: enr2,
relay: true,
clusterId: clusterId,
shard: [2]
shard: [shard],
numShardsInNetwork: numShardsInCluster
});
const nwaku3Ma = await nwaku3.getMultiaddrWithId();
waku = await createLightNode({
networkConfig: shardInfo,
networkConfig: networkConfig,
libp2p: {
peerDiscovery: [
bootstrap({ list: [nwaku3Ma.toString()] }),
@ -118,9 +118,11 @@ describe("Static Sharding: Peer Management", function () {
expect(dialPeerSpy.callCount).to.equal(3);
});
it("px service nodes not subscribed to the shard should not be dialed", async function () {
it("px service nodes in same cluster, no matter the shard, should be dialed", async function () {
this.timeout(100_000);
const shardInfoToDial: ShardInfo = { clusterId: clusterId, shards: [2] };
const numShardsInCluster = 8;
const networkConfig: AutoSharding = { clusterId, numShardsInCluster };
// this service node is not subscribed to the shard
await nwaku1.start({
@ -128,7 +130,8 @@ describe("Static Sharding: Peer Management", function () {
discv5Discovery: true,
peerExchange: true,
clusterId: clusterId,
shard: [1]
shard: [1],
numShardsInNetwork: numShardsInCluster
});
const enr1 = (await nwaku1.info()).enrUri;
@ -139,7 +142,8 @@ describe("Static Sharding: Peer Management", function () {
peerExchange: true,
discv5BootstrapNode: enr1,
clusterId: clusterId,
shard: [2]
shard: [2],
numShardsInNetwork: numShardsInCluster
});
const enr2 = (await nwaku2.info()).enrUri;
@ -150,12 +154,13 @@ describe("Static Sharding: Peer Management", function () {
peerExchange: true,
discv5BootstrapNode: enr2,
clusterId: clusterId,
shard: [2]
shard: [2],
numShardsInNetwork: numShardsInCluster
});
const nwaku3Ma = await nwaku3.getMultiaddrWithId();
waku = await createLightNode({
networkConfig: shardInfoToDial,
networkConfig: networkConfig,
libp2p: {
peerDiscovery: [
bootstrap({ list: [nwaku3Ma.toString()] }),
@ -178,7 +183,7 @@ describe("Static Sharding: Peer Management", function () {
const tags = Array.from(peer.tags.keys());
if (tags.includes(Tags.PEER_EXCHANGE)) {
pxPeersDiscovered.add(peerId);
if (pxPeersDiscovered.size === 1) {
if (pxPeersDiscovered.size === 2) {
resolve();
}
}
@ -187,7 +192,7 @@ describe("Static Sharding: Peer Management", function () {
});
await delay(1000);
expect(dialPeerSpy.callCount).to.equal(2);
expect(dialPeerSpy.callCount).to.equal(3);
});
});
});
@ -219,9 +224,9 @@ describe("Autosharding: Peer Management", function () {
it("all px service nodes subscribed to the shard topic should be dialed", async function () {
this.timeout(100_000);
const contentTopicInfo: ContentTopicInfo = {
const networkConfig: AutoSharding = {
clusterId: clusterId,
contentTopics: [ContentTopic]
numShardsInCluster: 8
};
await nwaku1.start({
@ -259,7 +264,7 @@ describe("Autosharding: Peer Management", function () {
const nwaku3Ma = await nwaku3.getMultiaddrWithId();
waku = await createLightNode({
networkConfig: contentTopicInfo,
networkConfig: networkConfig,
libp2p: {
peerDiscovery: [
bootstrap({ list: [nwaku3Ma.toString()] }),
@ -294,82 +299,5 @@ describe("Autosharding: Peer Management", function () {
expect(dialPeerSpy.callCount).to.equal(3);
});
it("px service nodes not subscribed to the shard should not be dialed", async function () {
this.timeout(100_000);
const contentTopicInfoToDial: ContentTopicInfo = {
clusterId: clusterId,
contentTopics: [ContentTopic]
};
// this service node is not subscribed to the shard
await nwaku1.start({
relay: true,
discv5Discovery: true,
peerExchange: true,
clusterId: 3,
shard: Shard
});
const enr1 = (await nwaku1.info()).enrUri;
await nwaku2.start({
relay: true,
discv5Discovery: true,
peerExchange: true,
discv5BootstrapNode: enr1,
clusterId: clusterId,
shard: Shard,
contentTopic: [ContentTopic]
});
const enr2 = (await nwaku2.info()).enrUri;
await nwaku3.start({
relay: true,
discv5Discovery: true,
peerExchange: true,
discv5BootstrapNode: enr2,
clusterId: clusterId,
shard: Shard,
contentTopic: [ContentTopic]
});
const nwaku3Ma = await nwaku3.getMultiaddrWithId();
waku = await createLightNode({
networkConfig: contentTopicInfoToDial,
libp2p: {
peerDiscovery: [
bootstrap({ list: [nwaku3Ma.toString()] }),
wakuPeerExchangeDiscovery()
]
}
});
dialPeerSpy = Sinon.spy((waku as any).libp2p, "dial");
await waku.start();
const pxPeersDiscovered = new Set<PeerId>();
await new Promise<void>((resolve) => {
waku.libp2p.addEventListener("peer:discovery", (evt) => {
return void (async () => {
const peerId = evt.detail.id;
const peer = await waku.libp2p.peerStore.get(peerId);
const tags = Array.from(peer.tags.keys());
if (tags.includes(Tags.PEER_EXCHANGE)) {
pxPeersDiscovered.add(peerId);
if (pxPeersDiscovered.size === 1) {
resolve();
}
}
})();
});
});
await delay(1000);
expect(dialPeerSpy.callCount).to.equal(2);
});
});
});

View File

@ -1,15 +1,10 @@
import { LightNode, SingleShardInfo } from "@waku/interfaces";
import { LightNode, StaticSharding } from "@waku/interfaces";
import { createEncoder, utf8ToBytes } from "@waku/sdk";
import {
shardInfoToPubsubTopics,
singleShardInfosToShardInfo,
singleShardInfoToPubsubTopic
} from "@waku/utils";
import { createRoutingInfo } from "@waku/utils";
import { expect } from "chai";
import {
afterEachCustom,
beforeEachCustom,
runMultipleNodes,
ServiceNodesFleet,
teardownNodesWithRedundancy
@ -30,13 +25,15 @@ describe("Static Sharding: Running Nodes", function () {
}
});
it("shard 0", async function () {
const singleShardInfo = { clusterId: 0, shard: 0 };
const shardInfo = singleShardInfosToShardInfo([singleShardInfo]);
it("Cluster id 0, shard 0", async function () {
const clusterId = 0;
const shardId = 0;
const networkConfig: StaticSharding = { clusterId };
const routingInfo = createRoutingInfo(networkConfig, { shardId });
[serviceNodes, waku] = await runMultipleNodes(
this.ctx,
shardInfo,
routingInfo,
{ lightpush: true, filter: true },
false,
numServiceNodes,
@ -45,32 +42,27 @@ describe("Static Sharding: Running Nodes", function () {
const encoder = createEncoder({
contentTopic: ContentTopic,
pubsubTopicShardInfo: singleShardInfo
routingInfo
});
expect(encoder.pubsubTopic).to.eq(
singleShardInfoToPubsubTopic(singleShardInfo)
);
const request = await waku.lightPush.send(encoder, {
payload: utf8ToBytes("Hello World")
});
expect(request.successes.length).to.eq(numServiceNodes);
expect(
await serviceNodes.messageCollector.waitForMessages(1, {
pubsubTopic: encoder.pubsubTopic
})
).to.eq(true);
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(true);
});
// dedicated test for Default Cluster ID 0
it("Cluster ID 0 - Default/Global Cluster", async function () {
const singleShardInfo = { clusterId: 0, shard: 1 };
const shardInfo = singleShardInfosToShardInfo([singleShardInfo]);
it("Cluster ID 0, shard 1", async function () {
const clusterId = 0;
const shardId = 1;
const networkConfig: StaticSharding = { clusterId };
const routingInfo = createRoutingInfo(networkConfig, { shardId });
[serviceNodes, waku] = await runMultipleNodes(
this.ctx,
shardInfo,
routingInfo,
{ lightpush: true, filter: true },
false,
numServiceNodes,
@ -79,7 +71,7 @@ describe("Static Sharding: Running Nodes", function () {
const encoder = createEncoder({
contentTopic: ContentTopic,
pubsubTopicShardInfo: singleShardInfo
routingInfo
});
const request = await waku.lightPush.send(encoder, {
@ -87,11 +79,7 @@ describe("Static Sharding: Running Nodes", function () {
});
expect(request.successes.length).to.eq(numServiceNodes);
expect(
await serviceNodes.messageCollector.waitForMessages(1, {
pubsubTopic: shardInfoToPubsubTopics(shardInfo)[0]
})
).to.eq(true);
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(true);
});
const numTest = 10;
@ -102,15 +90,15 @@ describe("Static Sharding: Running Nodes", function () {
// Random shardId between 1 and 1000
const shardId = Math.floor(Math.random() * 1000) + 1;
const networkConfig: StaticSharding = { clusterId };
const routingInfo = createRoutingInfo(networkConfig, { shardId });
it(`random static sharding ${
i + 1
} - Cluster ID: ${clusterId}, Shard ID: ${shardId}`, async function () {
const singleShardInfo = { clusterId: clusterId, shard: shardId };
const shardInfo = singleShardInfosToShardInfo([singleShardInfo]);
[serviceNodes, waku] = await runMultipleNodes(
this.ctx,
shardInfo,
routingInfo,
{ lightpush: true, filter: true },
false,
numServiceNodes,
@ -119,7 +107,7 @@ describe("Static Sharding: Running Nodes", function () {
const encoder = createEncoder({
contentTopic: ContentTopic,
pubsubTopicShardInfo: singleShardInfo
routingInfo
});
const request = await waku.lightPush.send(encoder, {
@ -127,75 +115,9 @@ describe("Static Sharding: Running Nodes", function () {
});
expect(request.successes.length).to.eq(numServiceNodes);
expect(
await serviceNodes.messageCollector.waitForMessages(1, {
pubsubTopic: shardInfoToPubsubTopics(shardInfo)[0]
})
).to.eq(true);
});
}
describe("Others", function () {
const clusterId = 2;
const singleShardInfo1: SingleShardInfo = {
clusterId: clusterId,
shard: 2
};
const singleShardInfo2: SingleShardInfo = {
clusterId: clusterId,
shard: 3
};
beforeEachCustom(this, async () => {
[serviceNodes, waku] = await runMultipleNodes(
this.ctx,
{ clusterId, shards: [2, 3] },
{ lightpush: true, filter: true },
false,
numServiceNodes,
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
true
);
});
afterEachCustom(this, async () => {
if (serviceNodes) {
await teardownNodesWithRedundancy(serviceNodes, waku ?? []);
}
});
it("configure the node with multiple pubsub topics", async function () {
const encoder1 = createEncoder({
contentTopic: ContentTopic,
pubsubTopicShardInfo: singleShardInfo1
});
const encoder2 = createEncoder({
contentTopic: ContentTopic,
pubsubTopicShardInfo: singleShardInfo2
});
const request1 = await waku?.lightPush.send(encoder1, {
payload: utf8ToBytes("Hello World2")
});
expect(request1?.successes.length).to.eq(numServiceNodes);
expect(
await serviceNodes?.messageCollector.waitForMessages(1, {
pubsubTopic: encoder1.pubsubTopic
})
).to.eq(true);
const request2 = await waku?.lightPush.send(encoder2, {
payload: utf8ToBytes("Hello World3")
});
expect(request2?.successes.length).to.eq(numServiceNodes);
expect(
await serviceNodes?.messageCollector.waitForMessages(1, {
pubsubTopic: encoder2.pubsubTopic
})
).to.eq(true);
});
});
}
});

View File

@ -14,9 +14,11 @@ import {
runStoreNodes,
sendMessages,
startAndConnectLightNode,
TestContentTopic,
TestDecoder,
TestDecoder2,
TestShardInfo,
TestNetworkConfig,
TestRoutingInfo,
totalMsgs
} from "./utils.js";
@ -27,7 +29,12 @@ describe("Waku Store, cursor", function () {
let nwaku: ServiceNode;
beforeEachCustom(this, async () => {
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
[nwaku, waku] = await runStoreNodes(
this.ctx,
TestNetworkConfig,
[],
[TestContentTopic]
);
});
afterEachCustom(this, async () => {
@ -43,11 +50,12 @@ describe("Waku Store, cursor", function () {
[110, 120]
].forEach(([cursorIndex, messageCount]) => {
it(`Passing a valid cursor at ${cursorIndex} index when there are ${messageCount} messages`, async function () {
console.log(nwaku);
await sendMessages(
nwaku,
messageCount,
TestDecoder.contentTopic,
TestDecoder.pubsubTopic
TestRoutingInfo
);
// messages in reversed order (first message at last index)
@ -95,9 +103,9 @@ describe("Waku Store, cursor", function () {
nwaku,
totalMsgs,
TestDecoder.contentTopic,
TestDecoder.pubsubTopic
TestRoutingInfo
);
waku2 = await startAndConnectLightNode(nwaku, TestShardInfo);
waku2 = await startAndConnectLightNode(nwaku, TestNetworkConfig);
// messages in reversed order (first message at last index)
const messages: DecodedMessage[] = [];
@ -137,12 +145,7 @@ describe("Waku Store, cursor", function () {
this.skip();
}
await sendMessages(
nwaku,
totalMsgs,
TestDecoder.contentTopic,
TestDecoder.pubsubTopic
);
await sendMessages(nwaku, totalMsgs, TestContentTopic, TestRoutingInfo);
const messages: DecodedMessage[] = [];
for await (const page of waku.store.queryGenerator([TestDecoder])) {
@ -170,7 +173,7 @@ describe("Waku Store, cursor", function () {
if (
!(err instanceof Error) ||
!err.message.includes(
"Store query failed with status code: 300, description: BAD_RESPONSE: archive error: DRIVER_ERROR: cursor not found"
"Store query failed with status code: 300, description: BAD_RESPONSE: archive error: DIRVER_ERROR: cursor not found"
)
) {
throw err;
@ -187,7 +190,7 @@ describe("Waku Store, cursor", function () {
nwaku,
totalMsgs,
TestDecoder.contentTopic,
TestDecoder.pubsubTopic
TestRoutingInfo
);
const messages: DecodedMessage[] = [];
@ -196,7 +199,7 @@ describe("Waku Store, cursor", function () {
messages.push(msg as DecodedMessage);
}
}
messages[5].pubsubTopic = TestDecoder2.pubsubTopic;
messages[5].pubsubTopic = TestDecoder2.routingInfo.pubsubTopic;
const cursor = waku.store.createCursor(messages[5]);
try {
@ -210,7 +213,7 @@ describe("Waku Store, cursor", function () {
if (
!(err instanceof Error) ||
!err.message.includes(
"Store query failed with status code: 300, description: BAD_RESPONSE: archive error: DRIVER_ERROR: cursor not found"
"Store query failed with status code: 300, description: BAD_RESPONSE: archive error: DIRVER_ERROR: cursor not found"
)
) {
throw err;

View File

@ -0,0 +1,190 @@
import { createDecoder } from "@waku/core";
import { IMessage, LightNode, ShardId, StaticSharding } from "@waku/interfaces";
import { Protocols } from "@waku/sdk";
import { createRoutingInfo } from "@waku/utils";
import { expect } from "chai";
import {
afterEachCustom,
beforeEachCustom,
makeLogFileName,
ServiceNode,
tearDownNodes
} from "../../src/index.js";
import {
processQueriedMessages,
runStoreNodes,
sendMessages,
totalMsgs
} from "./utils.js";
const StaticTestClusterId = 2;
const StaticTestRelayShards = [1, 2];
const StaticTestNetworkConfig: StaticSharding = {
clusterId: StaticTestClusterId
};
const TestShardOne: ShardId = 1;
const TestContentTopicOne = "/test/0/one/proto";
const TestRoutingInfoOne = createRoutingInfo(StaticTestNetworkConfig, {
shardId: TestShardOne
});
const TestDecoderShardOne = createDecoder(
TestContentTopicOne,
TestRoutingInfoOne
);
const TestShardTwo: ShardId = 2;
const TestContentTopicTwo = "/test/0/two/proto";
const TestRoutingInfoTwo = createRoutingInfo(StaticTestNetworkConfig, {
shardId: TestShardTwo
});
const TestDecoderShardTwo = createDecoder(
TestContentTopicTwo,
TestRoutingInfoTwo
);
// TODO: Same tests but with auto-sharding
describe("Waku Store, different static shards", function () {
this.timeout(15000);
let waku: LightNode;
let nwaku: ServiceNode;
let nwaku2: ServiceNode;
beforeEachCustom(this, async () => {
[nwaku, waku] = await runStoreNodes(
this.ctx,
StaticTestNetworkConfig,
StaticTestRelayShards
);
});
afterEachCustom(this, async () => {
await tearDownNodes([nwaku, nwaku2], waku);
});
it("Generator, one shard", async function () {
await sendMessages(
nwaku,
totalMsgs,
TestContentTopicOne,
TestRoutingInfoOne
);
const messages = await processQueriedMessages(
waku,
[TestDecoderShardOne],
TestDecoderShardOne.routingInfo.pubsubTopic
);
expect(messages?.length).eq(totalMsgs);
const result = messages?.findIndex((msg) => {
return msg.payload![0]! === 0;
});
expect(result).to.not.eq(-1);
});
it("Generator, 2 different shards", async function () {
this.timeout(10000);
const totalMsgs = 10;
await sendMessages(
nwaku,
totalMsgs,
TestContentTopicOne,
TestRoutingInfoOne
);
await sendMessages(
nwaku,
totalMsgs,
TestContentTopicTwo,
TestRoutingInfoTwo
);
const customMessages = await processQueriedMessages(
waku,
[TestDecoderShardOne],
TestDecoderShardOne.routingInfo.pubsubTopic
);
expect(customMessages?.length).eq(totalMsgs);
const result1 = customMessages?.findIndex((msg) => {
return msg.payload![0]! === 0;
});
expect(result1).to.not.eq(-1);
const testMessages = await processQueriedMessages(
waku,
[TestDecoderShardTwo],
TestDecoderShardTwo.routingInfo.pubsubTopic
);
expect(testMessages?.length).eq(totalMsgs);
const result2 = testMessages?.findIndex((msg) => {
return msg.payload![0]! === 0;
});
expect(result2).to.not.eq(-1);
});
it("Generator, 2 nwaku nodes each with different shards", async function () {
this.timeout(10000);
await tearDownNodes([nwaku], []);
// make sure each nwaku node operates on dedicated shard only
nwaku = new ServiceNode(makeLogFileName(this) + "1");
await nwaku.start({
store: true,
clusterId: StaticTestClusterId,
shard: [1],
relay: true
});
// Set up and start a new nwaku node with Default Pubsubtopic
nwaku2 = new ServiceNode(makeLogFileName(this) + "2");
await nwaku2.start({
store: true,
clusterId: StaticTestClusterId,
shard: [2],
relay: true
});
const totalMsgs = 10;
await sendMessages(
nwaku,
totalMsgs,
TestDecoderShardOne.contentTopic,
TestDecoderShardOne.routingInfo
);
await sendMessages(
nwaku2,
totalMsgs,
TestDecoderShardTwo.contentTopic,
TestDecoderShardTwo.routingInfo
);
await waku.dial(await nwaku.getMultiaddrWithId());
await waku.dial(await nwaku2.getMultiaddrWithId());
await waku.waitForPeers([Protocols.Store]);
let customMessages: IMessage[] = [];
let testMessages: IMessage[] = [];
while (
customMessages.length != totalMsgs ||
testMessages.length != totalMsgs
) {
customMessages = await processQueriedMessages(
waku,
[TestDecoderShardOne],
TestDecoderShardOne.routingInfo.pubsubTopic
);
testMessages = await processQueriedMessages(
waku,
[TestDecoderShardTwo],
TestDecoderShardTwo.routingInfo.pubsubTopic
);
}
});
});

View File

@ -1,5 +1,5 @@
import { IMessage, type LightNode } from "@waku/interfaces";
import { determinePubsubTopic } from "@waku/utils";
import { formatPubsubTopic } from "@waku/utils";
import { expect } from "chai";
import {
@ -14,7 +14,7 @@ import {
runStoreNodes,
TestDecoder,
TestDecoder2,
TestShardInfo
TestNetworkConfig
} from "./utils.js";
describe("Waku Store, error handling", function () {
@ -23,7 +23,7 @@ describe("Waku Store, error handling", function () {
let nwaku: ServiceNode;
beforeEachCustom(this, async () => {
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
[nwaku, waku] = await runStoreNodes(this.ctx, TestNetworkConfig);
});
afterEachCustom(this, async () => {
@ -68,7 +68,7 @@ describe("Waku Store, error handling", function () {
});
it("Query Generator, No message returned", async function () {
const WrongTestPubsubTopic = determinePubsubTopic("/test/1/wrong/utf8");
const WrongTestPubsubTopic = formatPubsubTopic(43, 53);
const messages = await processQueriedMessages(
waku,
[TestDecoder],

View File

@ -14,6 +14,7 @@ import {
createDecoder as createSymDecoder,
createEncoder as createSymEncoder
} from "@waku/message-encryption/symmetric";
import { createRoutingInfo } from "@waku/utils";
import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai";
import { equals } from "uint8arrays/equals";
@ -35,12 +36,11 @@ import {
runStoreNodes,
sendMessages,
startAndConnectLightNode,
TestContentTopic1,
TestContentTopic,
TestDecoder,
TestDecoder2,
TestEncoder,
TestPubsubTopic1,
TestShardInfo,
TestNetworkConfig,
TestRoutingInfo,
totalMsgs
} from "./utils.js";
@ -51,7 +51,7 @@ describe("Waku Store, general", function () {
let nwaku: ServiceNode;
beforeEachCustom(this, async () => {
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
[nwaku, waku] = await runStoreNodes(this.ctx, TestNetworkConfig);
});
afterEachCustom(this, async () => {
@ -63,13 +63,13 @@ describe("Waku Store, general", function () {
nwaku,
totalMsgs,
TestDecoder.contentTopic,
TestDecoder.pubsubTopic
TestRoutingInfo
);
const messages = await processQueriedMessages(
waku,
[TestDecoder],
TestDecoder.pubsubTopic
TestRoutingInfo.pubsubTopic
);
expect(messages?.length).eq(totalMsgs);
@ -89,7 +89,7 @@ describe("Waku Store, general", function () {
payload: utf8ToBytes(testItem["value"]),
contentTopic: TestDecoder.contentTopic
}),
TestDecoder.pubsubTopic
TestRoutingInfo
)
).to.eq(true);
await delay(1); // to ensure each timestamp is unique.
@ -99,7 +99,7 @@ describe("Waku Store, general", function () {
messageCollector.list = await processQueriedMessages(
waku,
[TestDecoder],
TestDecoder.pubsubTopic
TestRoutingInfo.pubsubTopic
);
// checking that all message sent were retrieved
@ -111,57 +111,69 @@ describe("Waku Store, general", function () {
});
it("Query generator for multiple messages with multiple decoders", async function () {
const SecondDecoder = createDecoder(
TestDecoder2.contentTopic,
TestDecoder.pubsubTopic
);
const secondContentTopic = "/test/1/waku-store-two/utf8";
const secondRoutingInfo = createRoutingInfo(TestNetworkConfig, {
contentTopic: secondContentTopic
});
const secondDecoder = createDecoder(secondContentTopic, secondRoutingInfo);
await nwaku.sendMessage(
ServiceNode.toMessageRpcQuery({
payload: utf8ToBytes("M1"),
contentTopic: TestDecoder.contentTopic
contentTopic: TestContentTopic
}),
TestDecoder.pubsubTopic
TestRoutingInfo
);
await nwaku.sendMessage(
ServiceNode.toMessageRpcQuery({
payload: utf8ToBytes("M2"),
contentTopic: SecondDecoder.contentTopic
contentTopic: secondContentTopic
}),
SecondDecoder.pubsubTopic
secondRoutingInfo
);
const messageCollector = new MessageCollector(nwaku);
messageCollector.list = await processQueriedMessages(
waku,
[TestDecoder, SecondDecoder],
TestDecoder.pubsubTopic
[TestDecoder, secondDecoder],
TestRoutingInfo.pubsubTopic
);
expect(messageCollector.hasMessage(TestDecoder.contentTopic, "M1")).to.eq(
true
);
expect(messageCollector.hasMessage(SecondDecoder.contentTopic, "M2")).to.eq(
true
);
expect(messageCollector.hasMessage(secondContentTopic, "M2")).to.eq(true);
});
it("Query generator for multiple messages with different content topic format", async function () {
for (const testItem of TEST_STRING) {
if (testItem.invalidContentTopic) continue;
const contentTopic = `/test/1/${testItem.value}/proto`;
const routingInfo = createRoutingInfo(TestNetworkConfig, {
contentTopic
});
expect(
await nwaku.sendMessage(
ServiceNode.toMessageRpcQuery({
payload: utf8ToBytes(messageText),
contentTopic: testItem["value"]
contentTopic
}),
TestDecoder.pubsubTopic
routingInfo
)
).to.eq(true);
await delay(1); // to ensure each timestamp is unique.
}
for (const testItem of TEST_STRING) {
if (testItem.invalidContentTopic) continue;
const contentTopic = `/test/1/${testItem.value}/proto`;
const routingInfo = createRoutingInfo(TestNetworkConfig, {
contentTopic
});
for await (const query of waku.store.queryGenerator([
createDecoder(testItem["value"], TestDecoder.pubsubTopic)
createDecoder(contentTopic, routingInfo)
])) {
for await (const msg of query) {
expect(equals(msg!.payload, utf8ToBytes(messageText))).to.eq(true);
@ -175,7 +187,7 @@ describe("Waku Store, general", function () {
nwaku,
totalMsgs,
TestDecoder.contentTopic,
TestDecoder.pubsubTopic
TestRoutingInfo
);
const messages: IMessage[] = [];
@ -201,7 +213,7 @@ describe("Waku Store, general", function () {
nwaku,
totalMsgs,
TestDecoder.contentTopic,
TestDecoder.pubsubTopic
TestRoutingInfo
);
const desiredMsgs = 14;
@ -254,32 +266,28 @@ describe("Waku Store, general", function () {
const eciesEncoder = createEciesEncoder({
contentTopic: asymTopic,
publicKey,
pubsubTopic: TestPubsubTopic1
routingInfo: TestRoutingInfo
});
const symEncoder = createSymEncoder({
contentTopic: symTopic,
symKey,
pubsubTopic: TestPubsubTopic1
routingInfo: TestRoutingInfo
});
const otherEncoder = createEciesEncoder({
contentTopic: TestContentTopic1,
pubsubTopic: TestPubsubTopic1,
contentTopic: TestContentTopic,
routingInfo: TestRoutingInfo,
publicKey: getPublicKey(generatePrivateKey())
});
const eciesDecoder = createEciesDecoder(
asymTopic,
privateKey,
TestDecoder.pubsubTopic
);
const symDecoder = createSymDecoder(
symTopic,
symKey,
TestDecoder.pubsubTopic
TestRoutingInfo,
privateKey
);
const symDecoder = createSymDecoder(symTopic, TestRoutingInfo, symKey);
waku2 = await startAndConnectLightNode(nwaku, TestShardInfo);
waku2 = await startAndConnectLightNode(nwaku, TestNetworkConfig);
const nimWakuMultiaddr = await nwaku.getMultiaddrWithId();
await waku2.dial(nimWakuMultiaddr);
@ -320,7 +328,7 @@ describe("Waku Store, general", function () {
nwaku,
totalMsgs,
TestDecoder.contentTopic,
TestDecoder.pubsubTopic
TestRoutingInfo
);
const desiredMsgs = 14;
@ -339,17 +347,12 @@ describe("Waku Store, general", function () {
it("Query generator for 2000 messages", async function () {
this.timeout(40000);
await sendMessages(
nwaku,
2000,
TestDecoder.contentTopic,
TestDecoder.pubsubTopic
);
await sendMessages(nwaku, 2000, TestDecoder.contentTopic, TestRoutingInfo);
const messages = await processQueriedMessages(
waku,
[TestDecoder],
TestDecoder.pubsubTopic
TestRoutingInfo.pubsubTopic
);
expect(messages?.length).eq(2000);

View File

@ -13,7 +13,8 @@ import {
runStoreNodes,
sendMessages,
TestDecoder,
TestShardInfo,
TestNetworkConfig,
TestRoutingInfo,
totalMsgs
} from "./utils.js";
@ -23,7 +24,7 @@ describe("Waku Store, message hash query", function () {
let nwaku: ServiceNode;
beforeEachCustom(this, async () => {
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
[nwaku, waku] = await runStoreNodes(this.ctx, TestNetworkConfig);
});
afterEachCustom(this, async () => {
@ -35,7 +36,7 @@ describe("Waku Store, message hash query", function () {
nwaku,
totalMsgs,
TestDecoder.contentTopic,
TestDecoder.pubsubTopic,
TestDecoder.routingInfo,
true
);
@ -54,11 +55,11 @@ describe("Waku Store, message hash query", function () {
nwaku,
totalMsgs,
TestDecoder.contentTopic,
TestDecoder.pubsubTopic,
TestRoutingInfo,
true
);
const messageHashes = sentMessages.map((msg) =>
messageHash(TestDecoder.pubsubTopic, {
messageHash(TestRoutingInfo.pubsubTopic, {
payload: Buffer.from(msg.payload, "base64"),
contentTopic: msg.contentTopic || TestDecoder.contentTopic,
timestamp: msg.timestamp || undefined,
@ -72,7 +73,7 @@ describe("Waku Store, message hash query", function () {
const messages: IDecodedMessage[] = [];
for await (const page of waku.store.queryGenerator([TestDecoder], {
messageHashes,
pubsubTopic: TestDecoder.pubsubTopic
routingInfo: TestRoutingInfo
})) {
for await (const msg of page) {
messages.push(msg as IDecodedMessage);

View File

@ -1,438 +0,0 @@
import { createDecoder } from "@waku/core";
import type { ContentTopicInfo, IMessage, LightNode } from "@waku/interfaces";
import { createLightNode, Protocols } from "@waku/sdk";
import {
contentTopicToPubsubTopic,
pubsubTopicToSingleShardInfo
} from "@waku/utils";
import { expect } from "chai";
import {
afterEachCustom,
beforeEachCustom,
makeLogFileName,
NOISE_KEY_1,
ServiceNode,
tearDownNodes
} from "../../src/index.js";
import {
processQueriedMessages,
runStoreNodes,
sendMessages,
sendMessagesAutosharding,
TestDecoder,
TestDecoder2,
TestShardInfo,
totalMsgs
} from "./utils.js";
describe("Waku Store, custom pubsub topic", function () {
this.timeout(15000);
let waku: LightNode;
let nwaku: ServiceNode;
let nwaku2: ServiceNode;
beforeEachCustom(this, async () => {
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
});
afterEachCustom(this, async () => {
await tearDownNodes([nwaku, nwaku2], waku);
});
it("Generator, custom pubsub topic", async function () {
await sendMessages(
nwaku,
totalMsgs,
TestDecoder.contentTopic,
TestDecoder.pubsubTopic
);
const messages = await processQueriedMessages(
waku,
[TestDecoder],
TestDecoder.pubsubTopic
);
expect(messages?.length).eq(totalMsgs);
const result = messages?.findIndex((msg) => {
return msg.payload![0]! === 0;
});
expect(result).to.not.eq(-1);
});
it("Generator, 2 different pubsubtopics", async function () {
this.timeout(10000);
const totalMsgs = 10;
await sendMessages(
nwaku,
totalMsgs,
TestDecoder.contentTopic,
TestDecoder.pubsubTopic
);
await sendMessages(
nwaku,
totalMsgs,
TestDecoder2.contentTopic,
TestDecoder2.pubsubTopic
);
const customMessages = await processQueriedMessages(
waku,
[TestDecoder],
TestDecoder.pubsubTopic
);
expect(customMessages?.length).eq(totalMsgs);
const result1 = customMessages?.findIndex((msg) => {
return msg.payload![0]! === 0;
});
expect(result1).to.not.eq(-1);
const testMessages = await processQueriedMessages(
waku,
[TestDecoder2],
TestDecoder2.pubsubTopic
);
expect(testMessages?.length).eq(totalMsgs);
const result2 = testMessages?.findIndex((msg) => {
return msg.payload![0]! === 0;
});
expect(result2).to.not.eq(-1);
});
it("Generator, 2 nwaku nodes each with different pubsubtopics", async function () {
this.timeout(10000);
await tearDownNodes([nwaku], []);
// make sure each nwaku node operates on dedicated shard only
nwaku = new ServiceNode(makeLogFileName(this) + "1");
await nwaku.start({
store: true,
clusterId: TestShardInfo.clusterId,
shard: [TestShardInfo.shards[0]],
relay: true
});
// Set up and start a new nwaku node with Default Pubsubtopic
nwaku2 = new ServiceNode(makeLogFileName(this) + "2");
await nwaku2.start({
store: true,
clusterId: TestShardInfo.clusterId,
shard: [TestShardInfo.shards[1]],
relay: true
});
const totalMsgs = 10;
await sendMessages(
nwaku,
totalMsgs,
TestDecoder.contentTopic,
TestDecoder.pubsubTopic
);
await sendMessages(
nwaku2,
totalMsgs,
TestDecoder2.contentTopic,
TestDecoder2.pubsubTopic
);
await waku.dial(await nwaku.getMultiaddrWithId());
await waku.dial(await nwaku2.getMultiaddrWithId());
await waku.waitForPeers([Protocols.Store]);
let customMessages: IMessage[] = [];
let testMessages: IMessage[] = [];
while (
customMessages.length != totalMsgs ||
testMessages.length != totalMsgs
) {
customMessages = await processQueriedMessages(
waku,
[TestDecoder],
TestDecoder.pubsubTopic
);
testMessages = await processQueriedMessages(
waku,
[TestDecoder2],
TestDecoder2.pubsubTopic
);
}
});
});
// TODO: blocked by https://github.com/waku-org/nwaku/issues/3362
describe.skip("Waku Store (Autosharding), custom pubsub topic", function () {
this.timeout(15000);
let waku: LightNode;
let nwaku: ServiceNode;
let nwaku2: ServiceNode;
const customContentTopic1 = "/waku/2/content/utf8";
const customContentTopic2 = "/myapp/1/latest/proto";
const clusterId = 5;
const Shard2 = [1];
const autoshardingPubsubTopic1 = contentTopicToPubsubTopic(
customContentTopic1,
clusterId
);
const autoshardingPubsubTopic2 = contentTopicToPubsubTopic(
customContentTopic2,
clusterId
);
const customDecoder1 = createDecoder(
customContentTopic1,
pubsubTopicToSingleShardInfo(autoshardingPubsubTopic1)
);
const customDecoder2 = createDecoder(
customContentTopic2,
pubsubTopicToSingleShardInfo(autoshardingPubsubTopic2)
);
const contentTopicInfoBothShards: ContentTopicInfo = {
clusterId,
contentTopics: [customContentTopic1, customContentTopic2]
};
beforeEachCustom(this, async () => {
[nwaku, waku] = await runStoreNodes(this.ctx, contentTopicInfoBothShards);
});
afterEachCustom(this, async () => {
await tearDownNodes([nwaku, nwaku2], waku);
});
it("Generator, custom pubsub topic", async function () {
await sendMessagesAutosharding(nwaku, totalMsgs, customContentTopic1);
const messages = await processQueriedMessages(
waku,
[customDecoder1],
autoshardingPubsubTopic1
);
expect(messages?.length).eq(totalMsgs);
const result = messages?.findIndex((msg) => {
return msg.payload![0]! === 0;
});
expect(result).to.not.eq(-1);
});
it("Generator, 2 different pubsubtopics", async function () {
this.timeout(10000);
const totalMsgs = 10;
await sendMessagesAutosharding(nwaku, totalMsgs, customContentTopic1);
await sendMessagesAutosharding(nwaku, totalMsgs, customContentTopic2);
const customMessages = await processQueriedMessages(
waku,
[customDecoder1],
autoshardingPubsubTopic1
);
expect(customMessages?.length).eq(totalMsgs);
const result1 = customMessages?.findIndex((msg) => {
return msg.payload![0]! === 0;
});
expect(result1).to.not.eq(-1);
const testMessages = await processQueriedMessages(
waku,
[customDecoder2],
autoshardingPubsubTopic2
);
expect(testMessages?.length).eq(totalMsgs);
const result2 = testMessages?.findIndex((msg) => {
return msg.payload![0]! === 0;
});
expect(result2).to.not.eq(-1);
});
it("Generator, 2 nwaku nodes each with different pubsubtopics", async function () {
this.timeout(10000);
// Set up and start a new nwaku node with Default Pubsubtopic
nwaku2 = new ServiceNode(makeLogFileName(this) + "2");
await nwaku2.start({
store: true,
contentTopic: [customContentTopic2],
relay: true,
clusterId,
shard: Shard2
});
await nwaku2.ensureSubscriptionsAutosharding([customContentTopic2]);
const totalMsgs = 10;
await sendMessagesAutosharding(nwaku, totalMsgs, customContentTopic1);
await sendMessagesAutosharding(nwaku2, totalMsgs, customContentTopic2);
waku = await createLightNode({
staticNoiseKey: NOISE_KEY_1,
networkConfig: contentTopicInfoBothShards
});
await waku.start();
await waku.dial(await nwaku.getMultiaddrWithId());
await waku.dial(await nwaku2.getMultiaddrWithId());
await waku.waitForPeers([Protocols.Store]);
let customMessages: IMessage[] = [];
let testMessages: IMessage[] = [];
while (
customMessages.length != totalMsgs ||
testMessages.length != totalMsgs
) {
customMessages = await processQueriedMessages(
waku,
[customDecoder1],
autoshardingPubsubTopic1
);
testMessages = await processQueriedMessages(
waku,
[customDecoder2],
autoshardingPubsubTopic2
);
}
});
});
describe("Waku Store (named sharding), custom pubsub topic", function () {
this.timeout(15000);
let waku: LightNode;
let nwaku: ServiceNode;
let nwaku2: ServiceNode;
beforeEachCustom(this, async () => {
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
});
afterEachCustom(this, async () => {
await tearDownNodes([nwaku, nwaku2], waku);
});
it("Generator, custom pubsub topic", async function () {
await sendMessages(
nwaku,
totalMsgs,
TestDecoder.contentTopic,
TestDecoder.pubsubTopic
);
const messages = await processQueriedMessages(
waku,
[TestDecoder],
TestDecoder.pubsubTopic
);
expect(messages?.length).eq(totalMsgs);
const result = messages?.findIndex((msg) => {
return msg.payload![0]! === 0;
});
expect(result).to.not.eq(-1);
});
it("Generator, 2 different pubsubtopics", async function () {
this.timeout(10000);
const totalMsgs = 10;
await sendMessages(
nwaku,
totalMsgs,
TestDecoder.contentTopic,
TestDecoder.pubsubTopic
);
await sendMessages(
nwaku,
totalMsgs,
TestDecoder2.contentTopic,
TestDecoder2.pubsubTopic
);
const customMessages = await processQueriedMessages(
waku,
[TestDecoder],
TestDecoder.pubsubTopic
);
expect(customMessages?.length).eq(totalMsgs);
const result1 = customMessages?.findIndex((msg) => {
return msg.payload![0]! === 0;
});
expect(result1).to.not.eq(-1);
const testMessages = await processQueriedMessages(
waku,
[TestDecoder2],
TestDecoder2.pubsubTopic
);
expect(testMessages?.length).eq(totalMsgs);
const result2 = testMessages?.findIndex((msg) => {
return msg.payload![0]! === 0;
});
expect(result2).to.not.eq(-1);
});
it("Generator, 2 nwaku nodes each with different pubsubtopics", async function () {
this.timeout(10000);
await tearDownNodes([nwaku], []);
// make sure each nwaku node operates on dedicated shard only
nwaku = new ServiceNode(makeLogFileName(this) + "1");
await nwaku.start({
store: true,
clusterId: TestShardInfo.clusterId,
shard: [TestShardInfo.shards[0]],
relay: true
});
// Set up and start a new nwaku node with Default Pubsubtopic
nwaku2 = new ServiceNode(makeLogFileName(this) + "2");
await nwaku2.start({
store: true,
relay: true,
clusterId: TestShardInfo.clusterId,
shard: TestShardInfo.shards
});
await nwaku2.ensureSubscriptions([TestDecoder2.pubsubTopic]);
const totalMsgs = 10;
await sendMessages(
nwaku,
totalMsgs,
TestDecoder.contentTopic,
TestDecoder.pubsubTopic
);
await sendMessages(
nwaku2,
totalMsgs,
TestDecoder2.contentTopic,
TestDecoder2.pubsubTopic
);
await waku.dial(await nwaku.getMultiaddrWithId());
await waku.dial(await nwaku2.getMultiaddrWithId());
await waku.waitForPeers([Protocols.Store]);
let customMessages: IMessage[] = [];
let testMessages: IMessage[] = [];
while (
customMessages.length != totalMsgs ||
testMessages.length != totalMsgs
) {
customMessages = await processQueriedMessages(
waku,
[TestDecoder],
TestDecoder.pubsubTopic
);
testMessages = await processQueriedMessages(
waku,
[TestDecoder2],
TestDecoder2.pubsubTopic
);
}
});
});

View File

@ -13,7 +13,8 @@ import {
runStoreNodes,
sendMessages,
TestDecoder,
TestShardInfo,
TestNetworkConfig,
TestRoutingInfo,
totalMsgs
} from "./utils.js";
@ -23,7 +24,7 @@ describe("Waku Store, order", function () {
let nwaku: ServiceNode;
beforeEachCustom(this, async () => {
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
[nwaku, waku] = await runStoreNodes(this.ctx, TestNetworkConfig);
});
afterEachCustom(this, async () => {
@ -36,7 +37,7 @@ describe("Waku Store, order", function () {
nwaku,
totalMsgs,
TestDecoder.contentTopic,
TestDecoder.pubsubTopic
TestRoutingInfo
);
const messages: IMessage[] = [];
@ -64,7 +65,7 @@ describe("Waku Store, order", function () {
nwaku,
totalMsgs,
TestDecoder.contentTopic,
TestDecoder.pubsubTopic
TestRoutingInfo
);
const messages: IMessage[] = [];
@ -95,7 +96,7 @@ describe("Waku Store, order", function () {
nwaku,
totalMsgs,
TestDecoder.contentTopic,
TestDecoder.pubsubTopic
TestRoutingInfo
);
const messages: IMessage[] = [];

Some files were not shown because too many files have changed in this diff Show More