mirror of
https://github.com/logos-messaging/js-waku.git
synced 2026-01-03 22:33:07 +00:00
Merge pull request #2471 from waku-org/waku-api/fix-nwaku-master
feat!: Introduce routing info concept
This commit is contained in:
commit
0be4861c79
@ -15,7 +15,7 @@ import { ConnectionManager } from "./connection_manager.js";
|
||||
import { DiscoveryDialer } from "./discovery_dialer.js";
|
||||
import { KeepAliveManager } from "./keep_alive_manager.js";
|
||||
import { NetworkMonitor } from "./network_monitor.js";
|
||||
import { ShardReader } from "./shard_reader.js";
|
||||
import { IShardReader, ShardReader } from "./shard_reader.js";
|
||||
|
||||
describe("ConnectionManager", () => {
|
||||
let libp2p: Libp2p;
|
||||
@ -30,7 +30,7 @@ describe("ConnectionManager", () => {
|
||||
// Mock internal components
|
||||
let mockKeepAliveManager: sinon.SinonStubbedInstance<KeepAliveManager>;
|
||||
let mockDiscoveryDialer: sinon.SinonStubbedInstance<DiscoveryDialer>;
|
||||
let mockShardReader: sinon.SinonStubbedInstance<ShardReader>;
|
||||
let mockShardReader: sinon.SinonStubbedInstance<IShardReader>;
|
||||
let mockNetworkMonitor: sinon.SinonStubbedInstance<NetworkMonitor>;
|
||||
let mockConnectionLimiter: sinon.SinonStubbedInstance<ConnectionLimiter>;
|
||||
|
||||
@ -63,7 +63,7 @@ describe("ConnectionManager", () => {
|
||||
} as unknown as IWakuEventEmitter;
|
||||
|
||||
networkConfig = {
|
||||
clusterId: 1,
|
||||
clusterId: 2,
|
||||
shards: [0, 1]
|
||||
} as NetworkConfig;
|
||||
|
||||
@ -87,7 +87,7 @@ describe("ConnectionManager", () => {
|
||||
|
||||
mockShardReader = {
|
||||
isPeerOnTopic: sinon.stub().resolves(true)
|
||||
} as unknown as sinon.SinonStubbedInstance<ShardReader>;
|
||||
} as unknown as sinon.SinonStubbedInstance<IShardReader>;
|
||||
|
||||
mockNetworkMonitor = {
|
||||
start: sinon.stub(),
|
||||
|
||||
@ -5,7 +5,8 @@ import {
|
||||
IConnectionManager,
|
||||
IRelay,
|
||||
IWakuEventEmitter,
|
||||
NetworkConfig
|
||||
NetworkConfig,
|
||||
ShardId
|
||||
} from "@waku/interfaces";
|
||||
import { Libp2p } from "@waku/interfaces";
|
||||
import { Logger } from "@waku/utils";
|
||||
@ -45,7 +46,7 @@ export class ConnectionManager implements IConnectionManager {
|
||||
private readonly networkMonitor: NetworkMonitor;
|
||||
private readonly connectionLimiter: ConnectionLimiter;
|
||||
|
||||
private options: ConnectionManagerOptions;
|
||||
private readonly options: ConnectionManagerOptions;
|
||||
private libp2p: Libp2p;
|
||||
|
||||
public constructor(options: ConnectionManagerConstructorOptions) {
|
||||
@ -66,6 +67,7 @@ export class ConnectionManager implements IConnectionManager {
|
||||
this.keepAliveManager = new KeepAliveManager({
|
||||
relay: options.relay,
|
||||
libp2p: options.libp2p,
|
||||
networkConfig: options.networkConfig,
|
||||
options: {
|
||||
pingKeepAlive: this.options.pingKeepAlive,
|
||||
relayKeepAlive: this.options.relayKeepAlive
|
||||
@ -194,4 +196,11 @@ export class ConnectionManager implements IConnectionManager {
|
||||
): Promise<boolean> {
|
||||
return this.shardReader.isPeerOnTopic(peerId, pubsubTopic);
|
||||
}
|
||||
|
||||
public async isPeerOnShard(
|
||||
peerId: PeerId,
|
||||
shardId: ShardId
|
||||
): Promise<boolean> {
|
||||
return this.shardReader.isPeerOnShard(peerId, shardId);
|
||||
}
|
||||
}
|
||||
|
||||
@ -29,7 +29,7 @@ describe("Dialer", () => {
|
||||
|
||||
mockShardReader = {
|
||||
hasShardInfo: sinon.stub().resolves(false),
|
||||
isPeerOnNetwork: sinon.stub().resolves(true)
|
||||
isPeerOnCluster: sinon.stub().resolves(true)
|
||||
} as unknown as sinon.SinonStubbedInstance<ShardReader>;
|
||||
|
||||
mockOptions = {
|
||||
@ -280,9 +280,9 @@ describe("Dialer", () => {
|
||||
expect(dialStub.calledTwice).to.be.true;
|
||||
});
|
||||
|
||||
it("should skip peer when not on same shard", async () => {
|
||||
it("should skip peer when not on same cluster", async () => {
|
||||
mockShardReader.hasShardInfo.resolves(true);
|
||||
mockShardReader.isPeerOnNetwork.resolves(false);
|
||||
mockShardReader.isPeerOnCluster.resolves(false);
|
||||
|
||||
const dialStub = libp2p.dial as sinon.SinonStub;
|
||||
|
||||
@ -290,12 +290,12 @@ describe("Dialer", () => {
|
||||
|
||||
expect(dialStub.called).to.be.false;
|
||||
expect(mockShardReader.hasShardInfo.calledWith(mockPeerId)).to.be.true;
|
||||
expect(mockShardReader.isPeerOnNetwork.calledWith(mockPeerId)).to.be.true;
|
||||
expect(mockShardReader.isPeerOnCluster.calledWith(mockPeerId)).to.be.true;
|
||||
});
|
||||
|
||||
it("should dial peer when on same shard", async () => {
|
||||
mockShardReader.hasShardInfo.resolves(true);
|
||||
mockShardReader.isPeerOnNetwork.resolves(true);
|
||||
mockShardReader.isPeerOnCluster.resolves(true);
|
||||
|
||||
const dialStub = libp2p.dial as sinon.SinonStub;
|
||||
dialStub.resolves();
|
||||
@ -305,7 +305,7 @@ describe("Dialer", () => {
|
||||
expect(dialStub.calledOnce).to.be.true;
|
||||
expect(dialStub.calledWith(mockPeerId)).to.be.true;
|
||||
expect(mockShardReader.hasShardInfo.calledWith(mockPeerId)).to.be.true;
|
||||
expect(mockShardReader.isPeerOnNetwork.calledWith(mockPeerId)).to.be.true;
|
||||
expect(mockShardReader.isPeerOnCluster.calledWith(mockPeerId)).to.be.true;
|
||||
});
|
||||
|
||||
it("should dial peer when no shard info available", async () => {
|
||||
@ -319,7 +319,7 @@ describe("Dialer", () => {
|
||||
expect(dialStub.calledOnce).to.be.true;
|
||||
expect(dialStub.calledWith(mockPeerId)).to.be.true;
|
||||
expect(mockShardReader.hasShardInfo.calledWith(mockPeerId)).to.be.true;
|
||||
expect(mockShardReader.isPeerOnNetwork.called).to.be.false;
|
||||
expect(mockShardReader.isPeerOnCluster.called).to.be.false;
|
||||
});
|
||||
|
||||
it("should handle dial errors gracefully", async () => {
|
||||
@ -468,7 +468,7 @@ describe("Dialer", () => {
|
||||
|
||||
it("should handle network check errors gracefully", async () => {
|
||||
mockShardReader.hasShardInfo.resolves(true);
|
||||
mockShardReader.isPeerOnNetwork.rejects(new Error("Network check error"));
|
||||
mockShardReader.isPeerOnCluster.rejects(new Error("Network check error"));
|
||||
|
||||
const dialStub = libp2p.dial as sinon.SinonStub;
|
||||
|
||||
@ -476,7 +476,7 @@ describe("Dialer", () => {
|
||||
|
||||
expect(dialStub.called).to.be.false;
|
||||
expect(mockShardReader.hasShardInfo.calledWith(mockPeerId)).to.be.true;
|
||||
expect(mockShardReader.isPeerOnNetwork.calledWith(mockPeerId)).to.be.true;
|
||||
expect(mockShardReader.isPeerOnCluster.calledWith(mockPeerId)).to.be.true;
|
||||
});
|
||||
});
|
||||
|
||||
@ -512,7 +512,7 @@ describe("Dialer", () => {
|
||||
dialStub.resolves();
|
||||
|
||||
mockShardReader.hasShardInfo.withArgs(mockPeerId).resolves(true);
|
||||
mockShardReader.isPeerOnNetwork.withArgs(mockPeerId).resolves(true);
|
||||
mockShardReader.isPeerOnCluster.withArgs(mockPeerId).resolves(true);
|
||||
|
||||
mockShardReader.hasShardInfo.withArgs(mockPeerId2).resolves(false);
|
||||
|
||||
|
||||
@ -153,9 +153,9 @@ export class Dialer implements IDialer {
|
||||
return false;
|
||||
}
|
||||
|
||||
const isOnSameShard = await this.shardReader.isPeerOnNetwork(peerId);
|
||||
if (!isOnSameShard) {
|
||||
log.info(`Skipping peer ${peerId} - not on same shard`);
|
||||
const isOnSameCluster = await this.shardReader.isPeerOnCluster(peerId);
|
||||
if (!isOnSameCluster) {
|
||||
log.info(`Skipping peer ${peerId} - not on same cluster`);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import type { PeerId } from "@libp2p/interface";
|
||||
import { AutoSharding } from "@waku/interfaces";
|
||||
import { expect } from "chai";
|
||||
import sinon from "sinon";
|
||||
|
||||
@ -23,6 +24,11 @@ describe("KeepAliveManager", () => {
|
||||
relayKeepAlive: 60
|
||||
};
|
||||
|
||||
const defaultNetworkConfig: AutoSharding = {
|
||||
clusterId: 0,
|
||||
numShardsInCluster: 1
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
clock = sinon.useFakeTimers();
|
||||
|
||||
@ -61,6 +67,7 @@ describe("KeepAliveManager", () => {
|
||||
it("should create KeepAliveManager with required options", () => {
|
||||
keepAliveManager = new KeepAliveManager({
|
||||
options: defaultOptions,
|
||||
networkConfig: defaultNetworkConfig,
|
||||
libp2p
|
||||
});
|
||||
|
||||
@ -70,6 +77,7 @@ describe("KeepAliveManager", () => {
|
||||
it("should create KeepAliveManager with relay", () => {
|
||||
keepAliveManager = new KeepAliveManager({
|
||||
options: defaultOptions,
|
||||
networkConfig: defaultNetworkConfig,
|
||||
libp2p,
|
||||
relay
|
||||
});
|
||||
@ -82,6 +90,7 @@ describe("KeepAliveManager", () => {
|
||||
beforeEach(() => {
|
||||
keepAliveManager = new KeepAliveManager({
|
||||
options: defaultOptions,
|
||||
networkConfig: defaultNetworkConfig,
|
||||
libp2p
|
||||
});
|
||||
});
|
||||
@ -110,6 +119,7 @@ describe("KeepAliveManager", () => {
|
||||
beforeEach(() => {
|
||||
keepAliveManager = new KeepAliveManager({
|
||||
options: defaultOptions,
|
||||
networkConfig: defaultNetworkConfig,
|
||||
libp2p,
|
||||
relay
|
||||
});
|
||||
@ -158,6 +168,7 @@ describe("KeepAliveManager", () => {
|
||||
beforeEach(() => {
|
||||
keepAliveManager = new KeepAliveManager({
|
||||
options: defaultOptions,
|
||||
networkConfig: defaultNetworkConfig,
|
||||
libp2p,
|
||||
relay
|
||||
});
|
||||
@ -194,6 +205,7 @@ describe("KeepAliveManager", () => {
|
||||
beforeEach(() => {
|
||||
keepAliveManager = new KeepAliveManager({
|
||||
options: defaultOptions,
|
||||
networkConfig: defaultNetworkConfig,
|
||||
libp2p,
|
||||
relay
|
||||
});
|
||||
@ -225,6 +237,7 @@ describe("KeepAliveManager", () => {
|
||||
beforeEach(() => {
|
||||
keepAliveManager = new KeepAliveManager({
|
||||
options: defaultOptions,
|
||||
networkConfig: defaultNetworkConfig,
|
||||
libp2p
|
||||
});
|
||||
keepAliveManager.start();
|
||||
@ -244,6 +257,7 @@ describe("KeepAliveManager", () => {
|
||||
keepAliveManager.stop();
|
||||
keepAliveManager = new KeepAliveManager({
|
||||
options: { pingKeepAlive: 0, relayKeepAlive: 0 },
|
||||
networkConfig: defaultNetworkConfig,
|
||||
libp2p
|
||||
});
|
||||
keepAliveManager.start();
|
||||
@ -317,6 +331,7 @@ describe("KeepAliveManager", () => {
|
||||
beforeEach(() => {
|
||||
keepAliveManager = new KeepAliveManager({
|
||||
options: defaultOptions,
|
||||
networkConfig: defaultNetworkConfig,
|
||||
libp2p,
|
||||
relay
|
||||
});
|
||||
@ -337,6 +352,7 @@ describe("KeepAliveManager", () => {
|
||||
keepAliveManager.stop();
|
||||
keepAliveManager = new KeepAliveManager({
|
||||
options: { pingKeepAlive: 30, relayKeepAlive: 0 },
|
||||
networkConfig: defaultNetworkConfig,
|
||||
libp2p,
|
||||
relay
|
||||
});
|
||||
@ -355,6 +371,7 @@ describe("KeepAliveManager", () => {
|
||||
keepAliveManager.stop();
|
||||
keepAliveManager = new KeepAliveManager({
|
||||
options: defaultOptions,
|
||||
networkConfig: defaultNetworkConfig,
|
||||
libp2p
|
||||
});
|
||||
keepAliveManager.start();
|
||||
@ -423,6 +440,7 @@ describe("KeepAliveManager", () => {
|
||||
beforeEach(() => {
|
||||
keepAliveManager = new KeepAliveManager({
|
||||
options: defaultOptions,
|
||||
networkConfig: defaultNetworkConfig,
|
||||
libp2p,
|
||||
relay
|
||||
});
|
||||
@ -489,6 +507,7 @@ describe("KeepAliveManager", () => {
|
||||
|
||||
keepAliveManager = new KeepAliveManager({
|
||||
options: defaultOptions,
|
||||
networkConfig: defaultNetworkConfig,
|
||||
libp2p,
|
||||
relay: emptyRelay
|
||||
});
|
||||
@ -506,6 +525,7 @@ describe("KeepAliveManager", () => {
|
||||
it("should handle all zero keep alive options", () => {
|
||||
keepAliveManager = new KeepAliveManager({
|
||||
options: { pingKeepAlive: 0, relayKeepAlive: 0 },
|
||||
networkConfig: defaultNetworkConfig,
|
||||
libp2p,
|
||||
relay
|
||||
});
|
||||
@ -525,6 +545,7 @@ describe("KeepAliveManager", () => {
|
||||
|
||||
keepAliveManager = new KeepAliveManager({
|
||||
options: defaultOptions,
|
||||
networkConfig: defaultNetworkConfig,
|
||||
libp2p,
|
||||
relay
|
||||
});
|
||||
@ -544,6 +565,7 @@ describe("KeepAliveManager", () => {
|
||||
it("should handle complete peer lifecycle", async () => {
|
||||
keepAliveManager = new KeepAliveManager({
|
||||
options: defaultOptions,
|
||||
networkConfig: defaultNetworkConfig,
|
||||
libp2p,
|
||||
relay
|
||||
});
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import type { PeerId } from "@libp2p/interface";
|
||||
import type { IEncoder, IRelay, Libp2p } from "@waku/interfaces";
|
||||
import { Logger, pubsubTopicToSingleShardInfo } from "@waku/utils";
|
||||
import type { IEncoder, IRelay, Libp2p, NetworkConfig } from "@waku/interfaces";
|
||||
import { createRoutingInfo, Logger } from "@waku/utils";
|
||||
import { utf8ToBytes } from "@waku/utils/bytes";
|
||||
|
||||
import { createEncoder } from "../message/version_0.js";
|
||||
@ -15,6 +15,7 @@ type KeepAliveOptions = {
|
||||
|
||||
type CreateKeepAliveManagerOptions = {
|
||||
options: KeepAliveOptions;
|
||||
networkConfig: NetworkConfig;
|
||||
libp2p: Libp2p;
|
||||
relay?: IRelay;
|
||||
};
|
||||
@ -26,6 +27,7 @@ interface IKeepAliveManager {
|
||||
|
||||
export class KeepAliveManager implements IKeepAliveManager {
|
||||
private readonly relay?: IRelay;
|
||||
private readonly networkConfig: NetworkConfig;
|
||||
private readonly libp2p: Libp2p;
|
||||
|
||||
private readonly options: KeepAliveOptions;
|
||||
@ -38,10 +40,12 @@ export class KeepAliveManager implements IKeepAliveManager {
|
||||
public constructor({
|
||||
options,
|
||||
relay,
|
||||
networkConfig,
|
||||
libp2p
|
||||
}: CreateKeepAliveManagerOptions) {
|
||||
this.options = options;
|
||||
this.relay = relay;
|
||||
this.networkConfig = networkConfig;
|
||||
this.libp2p = libp2p;
|
||||
|
||||
this.onPeerConnect = this.onPeerConnect.bind(this);
|
||||
@ -163,8 +167,13 @@ export class KeepAliveManager implements IKeepAliveManager {
|
||||
continue;
|
||||
}
|
||||
|
||||
const routingInfo = createRoutingInfo(this.networkConfig, {
|
||||
contentTopic: RelayPingContentTopic,
|
||||
pubsubTopic: topic
|
||||
});
|
||||
|
||||
const encoder = createEncoder({
|
||||
pubsubTopicShardInfo: pubsubTopicToSingleShardInfo(topic),
|
||||
routingInfo: routingInfo,
|
||||
contentTopic: RelayPingContentTopic,
|
||||
ephemeral: true
|
||||
});
|
||||
|
||||
@ -1,9 +1,10 @@
|
||||
import { PeerId } from "@libp2p/interface";
|
||||
import {
|
||||
AutoSharding,
|
||||
DEFAULT_NUM_SHARDS,
|
||||
NetworkConfig,
|
||||
PubsubTopic,
|
||||
ShardInfo,
|
||||
SingleShardInfo
|
||||
ShardInfo
|
||||
} from "@waku/interfaces";
|
||||
import { contentTopicToShardIndex, encodeRelayShard } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
@ -28,11 +29,14 @@ describe("ShardReader", function () {
|
||||
|
||||
const testContentTopic = "/test/1/waku-light-push/utf8";
|
||||
const testClusterId = 3;
|
||||
const testShardIndex = contentTopicToShardIndex(testContentTopic);
|
||||
const testShardIndex = contentTopicToShardIndex(
|
||||
testContentTopic,
|
||||
DEFAULT_NUM_SHARDS
|
||||
);
|
||||
|
||||
const testNetworkConfig: NetworkConfig = {
|
||||
contentTopics: [testContentTopic],
|
||||
clusterId: testClusterId
|
||||
const testNetworkConfig: AutoSharding = {
|
||||
clusterId: testClusterId,
|
||||
numShardsInCluster: DEFAULT_NUM_SHARDS
|
||||
};
|
||||
|
||||
const testShardInfo: ShardInfo = {
|
||||
@ -64,10 +68,10 @@ describe("ShardReader", function () {
|
||||
});
|
||||
|
||||
describe("constructor", function () {
|
||||
it("should create ShardReader with contentTopics network config", function () {
|
||||
const config: NetworkConfig = {
|
||||
contentTopics: ["/test/1/waku-light-push/utf8"],
|
||||
clusterId: 3
|
||||
it("should create ShardReader with auto sharding network config", function () {
|
||||
const config: AutoSharding = {
|
||||
clusterId: 3,
|
||||
numShardsInCluster: 10
|
||||
};
|
||||
|
||||
const reader = new ShardReader({
|
||||
@ -78,10 +82,9 @@ describe("ShardReader", function () {
|
||||
expect(reader).to.be.instanceOf(ShardReader);
|
||||
});
|
||||
|
||||
it("should create ShardReader with shards network config", function () {
|
||||
it("should create ShardReader with static shards network config", function () {
|
||||
const config: NetworkConfig = {
|
||||
clusterId: 3,
|
||||
shards: [1, 2, 3]
|
||||
clusterId: 3
|
||||
};
|
||||
|
||||
const reader = new ShardReader({
|
||||
@ -94,7 +97,7 @@ describe("ShardReader", function () {
|
||||
});
|
||||
|
||||
describe("isPeerOnNetwork", function () {
|
||||
it("should return true when peer is on the same network", async function () {
|
||||
it("should return true when peer is on the same cluster", async function () {
|
||||
const shardInfoBytes = encodeRelayShard(testShardInfo);
|
||||
const mockPeer = {
|
||||
metadata: new Map([["shardInfo", shardInfoBytes]])
|
||||
@ -102,7 +105,7 @@ describe("ShardReader", function () {
|
||||
|
||||
mockPeerStore.get.resolves(mockPeer);
|
||||
|
||||
const result = await shardReader.isPeerOnNetwork(testPeerId);
|
||||
const result = await shardReader.isPeerOnCluster(testPeerId);
|
||||
|
||||
expect(result).to.be.true;
|
||||
sinon.assert.calledWith(mockPeerStore.get, testPeerId);
|
||||
@ -120,12 +123,12 @@ describe("ShardReader", function () {
|
||||
|
||||
mockPeerStore.get.resolves(mockPeer);
|
||||
|
||||
const result = await shardReader.isPeerOnNetwork(testPeerId);
|
||||
const result = await shardReader.isPeerOnCluster(testPeerId);
|
||||
|
||||
expect(result).to.be.false;
|
||||
});
|
||||
|
||||
it("should return false when peer has no overlapping shards", async function () {
|
||||
it("should return true even if peer has no overlapping shards", async function () {
|
||||
const noOverlapShardInfo: ShardInfo = {
|
||||
clusterId: testClusterId,
|
||||
shards: [testShardIndex + 100, testShardIndex + 200] // Use different shards
|
||||
@ -137,9 +140,9 @@ describe("ShardReader", function () {
|
||||
|
||||
mockPeerStore.get.resolves(mockPeer);
|
||||
|
||||
const result = await shardReader.isPeerOnNetwork(testPeerId);
|
||||
const result = await shardReader.isPeerOnCluster(testPeerId);
|
||||
|
||||
expect(result).to.be.false;
|
||||
expect(result).to.be.true;
|
||||
});
|
||||
|
||||
it("should return false when peer has no shard info", async function () {
|
||||
@ -149,7 +152,7 @@ describe("ShardReader", function () {
|
||||
|
||||
mockPeerStore.get.resolves(mockPeer);
|
||||
|
||||
const result = await shardReader.isPeerOnNetwork(testPeerId);
|
||||
const result = await shardReader.isPeerOnCluster(testPeerId);
|
||||
|
||||
expect(result).to.be.false;
|
||||
});
|
||||
@ -157,7 +160,7 @@ describe("ShardReader", function () {
|
||||
it("should return false when peer is not found", async function () {
|
||||
mockPeerStore.get.rejects(new Error("Peer not found"));
|
||||
|
||||
const result = await shardReader.isPeerOnNetwork(testPeerId);
|
||||
const result = await shardReader.isPeerOnCluster(testPeerId);
|
||||
|
||||
expect(result).to.be.false;
|
||||
});
|
||||
@ -172,12 +175,10 @@ describe("ShardReader", function () {
|
||||
|
||||
mockPeerStore.get.resolves(mockPeer);
|
||||
|
||||
const shard: SingleShardInfo = {
|
||||
clusterId: testClusterId,
|
||||
shard: testShardIndex
|
||||
};
|
||||
|
||||
const result = await shardReader.isPeerOnShard(testPeerId, shard);
|
||||
const result = await shardReader.isPeerOnShard(
|
||||
testPeerId,
|
||||
testShardIndex
|
||||
);
|
||||
|
||||
expect(result).to.be.true;
|
||||
});
|
||||
@ -190,12 +191,15 @@ describe("ShardReader", function () {
|
||||
|
||||
mockPeerStore.get.resolves(mockPeer);
|
||||
|
||||
const shard: SingleShardInfo = {
|
||||
clusterId: 5,
|
||||
shard: testShardIndex
|
||||
};
|
||||
const shardReaderCluster5 = new ShardReader({
|
||||
libp2p: mockLibp2p as any,
|
||||
networkConfig: { clusterId: 5 }
|
||||
});
|
||||
|
||||
const result = await shardReader.isPeerOnShard(testPeerId, shard);
|
||||
const result = await shardReaderCluster5.isPeerOnShard(
|
||||
testPeerId,
|
||||
testShardIndex
|
||||
);
|
||||
|
||||
expect(result).to.be.false;
|
||||
});
|
||||
@ -208,23 +212,10 @@ describe("ShardReader", function () {
|
||||
|
||||
mockPeerStore.get.resolves(mockPeer);
|
||||
|
||||
const shard: SingleShardInfo = {
|
||||
clusterId: testClusterId,
|
||||
shard: testShardIndex + 100
|
||||
};
|
||||
|
||||
const result = await shardReader.isPeerOnShard(testPeerId, shard);
|
||||
|
||||
expect(result).to.be.false;
|
||||
});
|
||||
|
||||
it("should return false when shard info is undefined", async function () {
|
||||
const shard: SingleShardInfo = {
|
||||
clusterId: testClusterId,
|
||||
shard: undefined
|
||||
};
|
||||
|
||||
const result = await shardReader.isPeerOnShard(testPeerId, shard);
|
||||
const result = await shardReader.isPeerOnShard(
|
||||
testPeerId,
|
||||
testShardIndex + 100
|
||||
);
|
||||
|
||||
expect(result).to.be.false;
|
||||
});
|
||||
@ -232,12 +223,10 @@ describe("ShardReader", function () {
|
||||
it("should return false when peer shard info is not found", async function () {
|
||||
mockPeerStore.get.rejects(new Error("Peer not found"));
|
||||
|
||||
const shard: SingleShardInfo = {
|
||||
clusterId: testClusterId,
|
||||
shard: testShardIndex
|
||||
};
|
||||
|
||||
const result = await shardReader.isPeerOnShard(testPeerId, shard);
|
||||
const result = await shardReader.isPeerOnShard(
|
||||
testPeerId,
|
||||
testShardIndex
|
||||
);
|
||||
|
||||
expect(result).to.be.false;
|
||||
});
|
||||
@ -307,7 +296,7 @@ describe("ShardReader", function () {
|
||||
it("should handle errors gracefully when getting peer info", async function () {
|
||||
mockPeerStore.get.rejects(new Error("Network error"));
|
||||
|
||||
const result = await shardReader.isPeerOnNetwork(testPeerId);
|
||||
const result = await shardReader.isPeerOnCluster(testPeerId);
|
||||
|
||||
expect(result).to.be.false;
|
||||
});
|
||||
@ -319,7 +308,7 @@ describe("ShardReader", function () {
|
||||
|
||||
mockPeerStore.get.resolves(mockPeer);
|
||||
|
||||
const result = await shardReader.isPeerOnNetwork(testPeerId);
|
||||
const result = await shardReader.isPeerOnCluster(testPeerId);
|
||||
|
||||
expect(result).to.be.false;
|
||||
});
|
||||
|
||||
@ -1,13 +1,12 @@
|
||||
import type { PeerId } from "@libp2p/interface";
|
||||
import type {
|
||||
ClusterId,
|
||||
NetworkConfig,
|
||||
PubsubTopic,
|
||||
ShardInfo,
|
||||
SingleShardInfo,
|
||||
StaticSharding
|
||||
ShardId,
|
||||
ShardInfo
|
||||
} from "@waku/interfaces";
|
||||
import {
|
||||
contentTopicToShardIndex,
|
||||
decodeRelayShard,
|
||||
Logger,
|
||||
pubsubTopicToSingleShardInfo
|
||||
@ -21,10 +20,14 @@ type ShardReaderConstructorOptions = {
|
||||
networkConfig: NetworkConfig;
|
||||
};
|
||||
|
||||
interface IShardReader {
|
||||
export interface IShardReader {
|
||||
hasShardInfo(id: PeerId): Promise<boolean>;
|
||||
isPeerOnNetwork(id: PeerId): Promise<boolean>;
|
||||
isPeerOnShard(id: PeerId, shard: SingleShardInfo): Promise<boolean>;
|
||||
isPeerOnCluster(id: PeerId): Promise<boolean>;
|
||||
isPeerOnShard(
|
||||
id: PeerId,
|
||||
clusterId: ClusterId,
|
||||
shard: ShardId
|
||||
): Promise<boolean>;
|
||||
isPeerOnTopic(id: PeerId, pubsubTopic: PubsubTopic): Promise<boolean>;
|
||||
}
|
||||
|
||||
@ -34,33 +37,26 @@ interface IShardReader {
|
||||
export class ShardReader implements IShardReader {
|
||||
private readonly libp2p: Libp2p;
|
||||
|
||||
private readonly staticShard: StaticSharding;
|
||||
private readonly clusterId: ClusterId;
|
||||
|
||||
public constructor(options: ShardReaderConstructorOptions) {
|
||||
this.libp2p = options.libp2p;
|
||||
|
||||
this.staticShard = this.getStaticShardFromNetworkConfig(
|
||||
options.networkConfig
|
||||
);
|
||||
this.clusterId = options.networkConfig.clusterId;
|
||||
}
|
||||
|
||||
public async isPeerOnNetwork(id: PeerId): Promise<boolean> {
|
||||
const shardInfo = await this.getShardInfo(id);
|
||||
public async isPeerOnCluster(id: PeerId): Promise<boolean> {
|
||||
const peerRelayShards = await this.getRelayShards(id);
|
||||
|
||||
if (!shardInfo) {
|
||||
if (!peerRelayShards) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const clusterMatch = shardInfo.clusterId === this.staticShard.clusterId;
|
||||
const shardOverlap = this.staticShard.shards.some((s) =>
|
||||
shardInfo.shards.includes(s)
|
||||
);
|
||||
|
||||
return clusterMatch && shardOverlap;
|
||||
return peerRelayShards.clusterId === this.clusterId;
|
||||
}
|
||||
|
||||
public async hasShardInfo(id: PeerId): Promise<boolean> {
|
||||
const shardInfo = await this.getShardInfo(id);
|
||||
const shardInfo = await this.getRelayShards(id);
|
||||
return !!shardInfo;
|
||||
}
|
||||
|
||||
@ -69,8 +65,9 @@ export class ShardReader implements IShardReader {
|
||||
pubsubTopic: PubsubTopic
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
const shardInfo = pubsubTopicToSingleShardInfo(pubsubTopic);
|
||||
return await this.isPeerOnShard(id, shardInfo);
|
||||
const { clusterId, shard } = pubsubTopicToSingleShardInfo(pubsubTopic);
|
||||
if (clusterId !== this.clusterId) return false;
|
||||
return await this.isPeerOnShard(id, shard);
|
||||
} catch (error) {
|
||||
log.error(
|
||||
`Error comparing pubsub topic ${pubsubTopic} with shard info for ${id}`,
|
||||
@ -80,23 +77,23 @@ export class ShardReader implements IShardReader {
|
||||
}
|
||||
}
|
||||
|
||||
public async isPeerOnShard(
|
||||
id: PeerId,
|
||||
shard: SingleShardInfo
|
||||
): Promise<boolean> {
|
||||
const peerShardInfo = await this.getShardInfo(id);
|
||||
|
||||
if (!peerShardInfo || shard.shard === undefined) {
|
||||
public async isPeerOnShard(id: PeerId, shard: ShardId): Promise<boolean> {
|
||||
const peerShardInfo = await this.getRelayShards(id);
|
||||
log.info(
|
||||
`Checking if peer on same shard: this { clusterId: ${this.clusterId}, shardId: ${shard} },` +
|
||||
`${id} { clusterId: ${peerShardInfo?.clusterId}, shards: ${peerShardInfo?.shards} }`
|
||||
);
|
||||
if (!peerShardInfo) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return (
|
||||
peerShardInfo.clusterId === shard.clusterId &&
|
||||
peerShardInfo.shards.includes(shard.shard)
|
||||
peerShardInfo.clusterId === this.clusterId &&
|
||||
peerShardInfo.shards.includes(shard)
|
||||
);
|
||||
}
|
||||
|
||||
private async getShardInfo(id: PeerId): Promise<ShardInfo | undefined> {
|
||||
private async getRelayShards(id: PeerId): Promise<ShardInfo | undefined> {
|
||||
try {
|
||||
const peer = await this.libp2p.peerStore.get(id);
|
||||
|
||||
@ -106,29 +103,10 @@ export class ShardReader implements IShardReader {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const decodedShardInfo = decodeRelayShard(shardInfoBytes);
|
||||
|
||||
return decodedShardInfo;
|
||||
return decodeRelayShard(shardInfoBytes);
|
||||
} catch (error) {
|
||||
log.error(`Error getting shard info for ${id}`, error);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
private getStaticShardFromNetworkConfig(
|
||||
networkConfig: NetworkConfig
|
||||
): StaticSharding {
|
||||
if ("shards" in networkConfig) {
|
||||
return networkConfig;
|
||||
}
|
||||
|
||||
const shards = networkConfig.contentTopics.map((topic) =>
|
||||
contentTopicToShardIndex(topic)
|
||||
);
|
||||
|
||||
return {
|
||||
clusterId: networkConfig.clusterId!,
|
||||
shards
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,30 +1,38 @@
|
||||
import type { IProtoMessage } from "@waku/interfaces";
|
||||
import { contentTopicToPubsubTopic } from "@waku/utils";
|
||||
import type { AutoSharding, IProtoMessage } from "@waku/interfaces";
|
||||
import { createRoutingInfo } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
import fc from "fast-check";
|
||||
|
||||
import { createDecoder, createEncoder, DecodedMessage } from "./version_0.js";
|
||||
|
||||
const contentTopic = "/js-waku/1/tests/bytes";
|
||||
const pubsubTopic = contentTopicToPubsubTopic(contentTopic);
|
||||
const testContentTopic = "/js-waku/1/tests/bytes";
|
||||
|
||||
const testNetworkConfig: AutoSharding = {
|
||||
clusterId: 0,
|
||||
numShardsInCluster: 8
|
||||
};
|
||||
const testRoutingInfo = createRoutingInfo(testNetworkConfig, {
|
||||
contentTopic: testContentTopic
|
||||
});
|
||||
|
||||
describe("Waku Message version 0", function () {
|
||||
it("Round trip binary serialization", async function () {
|
||||
await fc.assert(
|
||||
fc.asyncProperty(fc.uint8Array({ minLength: 1 }), async (payload) => {
|
||||
const encoder = createEncoder({
|
||||
contentTopic
|
||||
contentTopic: testContentTopic,
|
||||
routingInfo: testRoutingInfo
|
||||
});
|
||||
const bytes = await encoder.toWire({ payload });
|
||||
const decoder = createDecoder(contentTopic);
|
||||
const decoder = createDecoder(testContentTopic, testRoutingInfo);
|
||||
const protoResult = await decoder.fromWireToProtoObj(bytes);
|
||||
const result = (await decoder.fromProtoObj(
|
||||
pubsubTopic,
|
||||
testRoutingInfo.pubsubTopic,
|
||||
protoResult!
|
||||
)) as DecodedMessage;
|
||||
|
||||
expect(result.contentTopic).to.eq(contentTopic);
|
||||
expect(result.pubsubTopic).to.eq(pubsubTopic);
|
||||
expect(result.contentTopic).to.eq(testContentTopic);
|
||||
expect(result.pubsubTopic).to.eq(testRoutingInfo.pubsubTopic);
|
||||
expect(result.version).to.eq(0);
|
||||
expect(result.ephemeral).to.be.false;
|
||||
expect(result.payload).to.deep.eq(payload);
|
||||
@ -37,14 +45,15 @@ describe("Waku Message version 0", function () {
|
||||
await fc.assert(
|
||||
fc.asyncProperty(fc.uint8Array({ minLength: 1 }), async (payload) => {
|
||||
const encoder = createEncoder({
|
||||
contentTopic,
|
||||
contentTopic: testContentTopic,
|
||||
routingInfo: testRoutingInfo,
|
||||
ephemeral: true
|
||||
});
|
||||
const bytes = await encoder.toWire({ payload });
|
||||
const decoder = createDecoder(contentTopic);
|
||||
const decoder = createDecoder(testContentTopic, testRoutingInfo);
|
||||
const protoResult = await decoder.fromWireToProtoObj(bytes);
|
||||
const result = (await decoder.fromProtoObj(
|
||||
pubsubTopic,
|
||||
testRoutingInfo.pubsubTopic,
|
||||
protoResult!
|
||||
)) as DecodedMessage;
|
||||
|
||||
@ -68,15 +77,16 @@ describe("Waku Message version 0", function () {
|
||||
};
|
||||
|
||||
const encoder = createEncoder({
|
||||
contentTopic,
|
||||
contentTopic: testContentTopic,
|
||||
routingInfo: testRoutingInfo,
|
||||
ephemeral: true,
|
||||
metaSetter
|
||||
});
|
||||
const bytes = await encoder.toWire({ payload });
|
||||
const decoder = createDecoder(contentTopic);
|
||||
const decoder = createDecoder(testContentTopic, testRoutingInfo);
|
||||
const protoResult = await decoder.fromWireToProtoObj(bytes);
|
||||
const result = (await decoder.fromProtoObj(
|
||||
pubsubTopic,
|
||||
testRoutingInfo.pubsubTopic,
|
||||
protoResult!
|
||||
)) as DecodedMessage;
|
||||
|
||||
@ -99,28 +109,34 @@ describe("Waku Message version 0", function () {
|
||||
describe("Ensures content topic is defined", () => {
|
||||
it("Encoder throws on undefined content topic", () => {
|
||||
const wrapper = function (): void {
|
||||
createEncoder({ contentTopic: undefined as unknown as string });
|
||||
createEncoder({
|
||||
contentTopic: undefined as unknown as string,
|
||||
routingInfo: testRoutingInfo
|
||||
});
|
||||
};
|
||||
|
||||
expect(wrapper).to.throw("Content topic must be specified");
|
||||
});
|
||||
it("Encoder throws on empty string content topic", () => {
|
||||
const wrapper = function (): void {
|
||||
createEncoder({ contentTopic: "" });
|
||||
createEncoder({
|
||||
contentTopic: "",
|
||||
routingInfo: testRoutingInfo
|
||||
});
|
||||
};
|
||||
|
||||
expect(wrapper).to.throw("Content topic must be specified");
|
||||
});
|
||||
it("Decoder throws on undefined content topic", () => {
|
||||
const wrapper = function (): void {
|
||||
createDecoder(undefined as unknown as string);
|
||||
createDecoder(undefined as unknown as string, testRoutingInfo);
|
||||
};
|
||||
|
||||
expect(wrapper).to.throw("Content topic must be specified");
|
||||
});
|
||||
it("Decoder throws on empty string content topic", () => {
|
||||
const wrapper = function (): void {
|
||||
createDecoder("");
|
||||
createDecoder("", testRoutingInfo);
|
||||
};
|
||||
|
||||
expect(wrapper).to.throw("Content topic must be specified");
|
||||
@ -130,23 +146,22 @@ describe("Ensures content topic is defined", () => {
|
||||
describe("Sets sharding configuration correctly", () => {
|
||||
it("uses static shard pubsub topic instead of autosharding when set", async () => {
|
||||
// Create an encoder setup to use autosharding
|
||||
const ContentTopic = "/waku/2/content/test.js";
|
||||
const contentTopic = "/myapp/1/test/proto";
|
||||
const autoshardingEncoder = createEncoder({
|
||||
pubsubTopicShardInfo: { clusterId: 0 },
|
||||
contentTopic: ContentTopic
|
||||
contentTopic: contentTopic,
|
||||
routingInfo: createRoutingInfo(testNetworkConfig, { contentTopic })
|
||||
});
|
||||
|
||||
// When autosharding is enabled, we expect the shard index to be 1
|
||||
expect(autoshardingEncoder.pubsubTopic).to.be.eq("/waku/2/rs/0/1");
|
||||
expect(autoshardingEncoder.pubsubTopic).to.be.eq("/waku/2/rs/0/0");
|
||||
|
||||
// Create an encoder setup to use static sharding with the same content topic
|
||||
const singleShardInfo = { clusterId: 0, shard: 0 };
|
||||
const staticshardingEncoder = createEncoder({
|
||||
contentTopic: ContentTopic,
|
||||
pubsubTopicShardInfo: singleShardInfo
|
||||
contentTopic: contentTopic,
|
||||
routingInfo: createRoutingInfo({ clusterId: 0 }, { shardId: 3 })
|
||||
});
|
||||
|
||||
// When static sharding is enabled, we expect the shard index to be 0
|
||||
expect(staticshardingEncoder.pubsubTopic).to.be.eq("/waku/2/rs/0/0");
|
||||
expect(staticshardingEncoder.pubsubTopic).to.be.eq("/waku/2/rs/0/3");
|
||||
});
|
||||
});
|
||||
|
||||
@ -7,11 +7,11 @@ import type {
|
||||
IMetaSetter,
|
||||
IProtoMessage,
|
||||
IRateLimitProof,
|
||||
PubsubTopic,
|
||||
SingleShardInfo
|
||||
IRoutingInfo,
|
||||
PubsubTopic
|
||||
} from "@waku/interfaces";
|
||||
import { proto_message as proto } from "@waku/proto";
|
||||
import { determinePubsubTopic, Logger } from "@waku/utils";
|
||||
import { Logger } from "@waku/utils";
|
||||
|
||||
const log = new Logger("message:version-0");
|
||||
const OneMillion = BigInt(1_000_000);
|
||||
@ -71,7 +71,7 @@ export class Encoder implements IEncoder {
|
||||
public constructor(
|
||||
public contentTopic: string,
|
||||
public ephemeral: boolean = false,
|
||||
public pubsubTopic: PubsubTopic,
|
||||
public routingInfo: IRoutingInfo,
|
||||
public metaSetter?: IMetaSetter
|
||||
) {
|
||||
if (!contentTopic || contentTopic === "") {
|
||||
@ -79,6 +79,10 @@ export class Encoder implements IEncoder {
|
||||
}
|
||||
}
|
||||
|
||||
public get pubsubTopic(): PubsubTopic {
|
||||
return this.routingInfo.pubsubTopic;
|
||||
}
|
||||
|
||||
public async toWire(message: IMessage): Promise<Uint8Array> {
|
||||
return proto.WakuMessage.encode(await this.toProtoObj(message));
|
||||
}
|
||||
@ -112,32 +116,32 @@ export class Encoder implements IEncoder {
|
||||
* format to be sent over the Waku network. The resulting encoder can then be
|
||||
* pass to { @link @waku/interfaces!ISender.send } to automatically encode outgoing
|
||||
* messages.
|
||||
*
|
||||
* Note that a routing info may be tied to a given content topic, this is not checked by the encoder.
|
||||
*/
|
||||
export function createEncoder({
|
||||
pubsubTopic,
|
||||
pubsubTopicShardInfo,
|
||||
contentTopic,
|
||||
routingInfo,
|
||||
ephemeral,
|
||||
metaSetter
|
||||
}: EncoderOptions): Encoder {
|
||||
return new Encoder(
|
||||
contentTopic,
|
||||
ephemeral,
|
||||
determinePubsubTopic(contentTopic, pubsubTopic ?? pubsubTopicShardInfo),
|
||||
metaSetter
|
||||
);
|
||||
return new Encoder(contentTopic, ephemeral, routingInfo, metaSetter);
|
||||
}
|
||||
|
||||
export class Decoder implements IDecoder<IDecodedMessage> {
|
||||
public constructor(
|
||||
public pubsubTopic: PubsubTopic,
|
||||
public contentTopic: string
|
||||
public contentTopic: string,
|
||||
public routingInfo: IRoutingInfo
|
||||
) {
|
||||
if (!contentTopic || contentTopic === "") {
|
||||
throw new Error("Content topic must be specified");
|
||||
}
|
||||
}
|
||||
|
||||
public get pubsubTopic(): PubsubTopic {
|
||||
return this.routingInfo.pubsubTopic;
|
||||
}
|
||||
|
||||
public fromWireToProtoObj(
|
||||
bytes: Uint8Array
|
||||
): Promise<IProtoMessage | undefined> {
|
||||
@ -182,13 +186,13 @@ export class Decoder implements IDecoder<IDecodedMessage> {
|
||||
* messages.
|
||||
*
|
||||
* @param contentTopic The resulting decoder will only decode messages with this content topic.
|
||||
* @param routingInfo Routing information such as cluster id and shard id on which the message is expected to be received.
|
||||
*
|
||||
* Note that a routing info may be tied to a given content topic, this is not checked by the encoder.
|
||||
*/
|
||||
export function createDecoder(
|
||||
contentTopic: string,
|
||||
pubsubTopicShardInfo?: SingleShardInfo | PubsubTopic
|
||||
routingInfo: IRoutingInfo
|
||||
): Decoder {
|
||||
return new Decoder(
|
||||
determinePubsubTopic(contentTopic, pubsubTopicShardInfo),
|
||||
contentTopic
|
||||
);
|
||||
return new Decoder(contentTopic, routingInfo);
|
||||
}
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
import type { Peer, PeerId, Stream } from "@libp2p/interface";
|
||||
import type { MultiaddrInput } from "@multiformats/multiaddr";
|
||||
|
||||
import { ShardId } from "./sharding.js";
|
||||
|
||||
// Peer tags
|
||||
export enum Tags {
|
||||
BOOTSTRAP = "bootstrap",
|
||||
@ -161,4 +163,14 @@ export interface IConnectionManager {
|
||||
* @returns Promise resolving to true if the peer has shard info, false otherwise
|
||||
*/
|
||||
hasShardInfo(peerId: PeerId): Promise<boolean>;
|
||||
|
||||
/**
|
||||
* Returns true if the passed peer is on the passed pubsub topic
|
||||
*/
|
||||
isPeerOnTopic(peerId: PeerId, pubsubTopic: string): Promise<boolean>;
|
||||
|
||||
/**
|
||||
* Returns true if the passed peer is on the passed shard
|
||||
*/
|
||||
isPeerOnShard(peerId: PeerId, shardId: ShardId): Promise<boolean>;
|
||||
}
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type { ShardInfo } from "./sharding";
|
||||
import type { AutoSharding } from "./sharding";
|
||||
|
||||
/**
|
||||
* The default cluster ID for The Waku Network
|
||||
@ -11,11 +11,9 @@ export const DEFAULT_CLUSTER_ID = 1;
|
||||
export const DEFAULT_NUM_SHARDS = 8;
|
||||
|
||||
/**
|
||||
* DefaultShardInfo is default configuration for The Waku Network.
|
||||
* DefaultNetworkConfig is default configuration for The Waku Network.
|
||||
*/
|
||||
export const DefaultShardInfo: ShardInfo = {
|
||||
export const DefaultNetworkConfig: AutoSharding = {
|
||||
clusterId: DEFAULT_CLUSTER_ID,
|
||||
shards: [0, 1, 2, 3, 4, 5, 6, 7, 8]
|
||||
numShardsInCluster: DEFAULT_NUM_SHARDS
|
||||
};
|
||||
|
||||
export const DefaultNetworkConfig = DefaultShardInfo;
|
||||
|
||||
@ -1,13 +1,5 @@
|
||||
import type { ContentTopic, PubsubTopic } from "./misc.js";
|
||||
|
||||
export interface SingleShardInfo {
|
||||
clusterId: number;
|
||||
/**
|
||||
* TODO: make shard required
|
||||
* Specifying this field indicates to the encoder/decoder that static sharding must be used.
|
||||
*/
|
||||
shard?: number;
|
||||
}
|
||||
import type { IRoutingInfo } from "./sharding.js";
|
||||
|
||||
export interface IRateLimitProof {
|
||||
proof: Uint8Array;
|
||||
@ -81,10 +73,9 @@ export interface IMetaSetter {
|
||||
|
||||
export interface EncoderOptions {
|
||||
/**
|
||||
* @deprecated
|
||||
* The routing information for the message (cluster id, shard id, pubsubTopic)
|
||||
*/
|
||||
pubsubTopic?: PubsubTopic;
|
||||
pubsubTopicShardInfo?: SingleShardInfo;
|
||||
routingInfo: IRoutingInfo;
|
||||
/** The content topic to set on outgoing messages. */
|
||||
contentTopic: string;
|
||||
/**
|
||||
@ -101,16 +92,17 @@ export interface EncoderOptions {
|
||||
}
|
||||
|
||||
export interface IEncoder {
|
||||
pubsubTopic: PubsubTopic;
|
||||
contentTopic: string;
|
||||
ephemeral: boolean;
|
||||
routingInfo: IRoutingInfo;
|
||||
pubsubTopic: PubsubTopic;
|
||||
toWire: (message: IMessage) => Promise<Uint8Array | undefined>;
|
||||
toProtoObj: (message: IMessage) => Promise<IProtoMessage | undefined>;
|
||||
}
|
||||
|
||||
export interface IDecoder<T extends IDecodedMessage> {
|
||||
pubsubTopic: PubsubTopic;
|
||||
contentTopic: string;
|
||||
pubsubTopic: PubsubTopic;
|
||||
fromWireToProtoObj: (bytes: Uint8Array) => Promise<IProtoMessage | undefined>;
|
||||
fromProtoObj: (
|
||||
pubsubTopic: string,
|
||||
|
||||
@ -6,7 +6,7 @@ import type { CreateLibp2pOptions } from "./libp2p.js";
|
||||
import type { LightPushProtocolOptions } from "./light_push.js";
|
||||
import type { IDecodedMessage } from "./message.js";
|
||||
import type { ThisAndThat, ThisOrThat } from "./misc.js";
|
||||
import type { AutoSharding, StaticSharding } from "./sharding.js";
|
||||
import { NetworkConfig } from "./sharding.js";
|
||||
import type { StoreProtocolOptions } from "./store.js";
|
||||
|
||||
export enum Protocols {
|
||||
@ -16,8 +16,6 @@ export enum Protocols {
|
||||
Filter = "filter"
|
||||
}
|
||||
|
||||
export type NetworkConfig = StaticSharding | AutoSharding;
|
||||
|
||||
export type CreateNodeOptions = {
|
||||
/**
|
||||
* Set the user agent string to be used in identification of the node.
|
||||
|
||||
@ -1,13 +1,29 @@
|
||||
/**
|
||||
* Configuration for a Waku network. All nodes in a given network/cluster
|
||||
* should have the same configuration.
|
||||
*/
|
||||
export type NetworkConfig = StaticSharding | AutoSharding;
|
||||
|
||||
export type ShardInfo = {
|
||||
clusterId: number;
|
||||
shards: number[];
|
||||
clusterId: ClusterId;
|
||||
shards: ShardId[];
|
||||
};
|
||||
|
||||
export type ContentTopicInfo = {
|
||||
clusterId?: number; // TODO: This should be mandatory on a network config
|
||||
contentTopics: string[];
|
||||
export type StaticSharding = {
|
||||
clusterId: ClusterId;
|
||||
};
|
||||
export type AutoSharding = {
|
||||
clusterId: ClusterId;
|
||||
numShardsInCluster: number;
|
||||
};
|
||||
|
||||
export type StaticSharding = ShardInfo;
|
||||
export type AutoSharding = ContentTopicInfo;
|
||||
export type ClusterId = number;
|
||||
export type ShardId = number;
|
||||
|
||||
/**
|
||||
* Routing Information for a given message.
|
||||
*/
|
||||
export interface IRoutingInfo {
|
||||
clusterId: ClusterId;
|
||||
shardId: ShardId;
|
||||
pubsubTopic: string;
|
||||
}
|
||||
|
||||
@ -13,21 +13,12 @@ import type { ILightPush } from "./light_push.js";
|
||||
import { IDecodedMessage, IDecoder, IEncoder } from "./message.js";
|
||||
import type { Protocols } from "./protocols.js";
|
||||
import type { IRelay } from "./relay.js";
|
||||
import type { ShardId } from "./sharding.js";
|
||||
import type { IStore } from "./store.js";
|
||||
|
||||
type AutoShardSingle = {
|
||||
clusterId: number;
|
||||
shardsUnderCluster: number;
|
||||
};
|
||||
|
||||
type StaticShardSingle = {
|
||||
clusterId: number;
|
||||
shard: number;
|
||||
};
|
||||
|
||||
export type CreateDecoderParams = {
|
||||
contentTopic: string;
|
||||
shardInfo?: AutoShardSingle | StaticShardSingle;
|
||||
shardId?: ShardId;
|
||||
};
|
||||
|
||||
export type CreateEncoderParams = CreateDecoderParams & {
|
||||
|
||||
@ -1,13 +1,19 @@
|
||||
import { IProtoMessage } from "@waku/interfaces";
|
||||
import { contentTopicToPubsubTopic } from "@waku/utils";
|
||||
import { createRoutingInfo } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
import fc from "fast-check";
|
||||
|
||||
import { getPublicKey } from "./crypto/index.js";
|
||||
import { createDecoder, createEncoder } from "./ecies.js";
|
||||
|
||||
const contentTopic = "/js-waku/1/tests/bytes";
|
||||
const pubsubTopic = contentTopicToPubsubTopic(contentTopic);
|
||||
const testContentTopic = "/js-waku/1/tests/bytes";
|
||||
const testRoutingInfo = createRoutingInfo(
|
||||
{
|
||||
clusterId: 0,
|
||||
numShardsInCluster: 14
|
||||
},
|
||||
{ contentTopic: testContentTopic }
|
||||
);
|
||||
|
||||
describe("Ecies Encryption", function () {
|
||||
this.timeout(20000);
|
||||
@ -20,19 +26,27 @@ describe("Ecies Encryption", function () {
|
||||
const publicKey = getPublicKey(privateKey);
|
||||
|
||||
const encoder = createEncoder({
|
||||
contentTopic,
|
||||
contentTopic: testContentTopic,
|
||||
routingInfo: testRoutingInfo,
|
||||
publicKey
|
||||
});
|
||||
const bytes = await encoder.toWire({ payload });
|
||||
|
||||
const decoder = createDecoder(contentTopic, privateKey);
|
||||
const decoder = createDecoder(
|
||||
testContentTopic,
|
||||
testRoutingInfo,
|
||||
privateKey
|
||||
);
|
||||
const protoResult = await decoder.fromWireToProtoObj(bytes!);
|
||||
if (!protoResult) throw "Failed to proto decode";
|
||||
const result = await decoder.fromProtoObj(pubsubTopic, protoResult);
|
||||
const result = await decoder.fromProtoObj(
|
||||
testRoutingInfo.pubsubTopic,
|
||||
protoResult
|
||||
);
|
||||
if (!result) throw "Failed to decode";
|
||||
|
||||
expect(result.contentTopic).to.equal(contentTopic);
|
||||
expect(result.pubsubTopic).to.equal(pubsubTopic);
|
||||
expect(result.contentTopic).to.equal(testContentTopic);
|
||||
expect(result.pubsubTopic).to.equal(testRoutingInfo.pubsubTopic);
|
||||
expect(result.version).to.equal(1);
|
||||
expect(result?.payload).to.deep.equal(payload);
|
||||
expect(result.signature).to.be.undefined;
|
||||
@ -56,20 +70,28 @@ describe("Ecies Encryption", function () {
|
||||
const bobPublicKey = getPublicKey(bobPrivateKey);
|
||||
|
||||
const encoder = createEncoder({
|
||||
contentTopic,
|
||||
contentTopic: testContentTopic,
|
||||
routingInfo: testRoutingInfo,
|
||||
publicKey: bobPublicKey,
|
||||
sigPrivKey: alicePrivateKey
|
||||
});
|
||||
const bytes = await encoder.toWire({ payload });
|
||||
|
||||
const decoder = createDecoder(contentTopic, bobPrivateKey);
|
||||
const decoder = createDecoder(
|
||||
testContentTopic,
|
||||
testRoutingInfo,
|
||||
bobPrivateKey
|
||||
);
|
||||
const protoResult = await decoder.fromWireToProtoObj(bytes!);
|
||||
if (!protoResult) throw "Failed to proto decode";
|
||||
const result = await decoder.fromProtoObj(pubsubTopic, protoResult);
|
||||
const result = await decoder.fromProtoObj(
|
||||
testRoutingInfo.pubsubTopic,
|
||||
protoResult
|
||||
);
|
||||
if (!result) throw "Failed to decode";
|
||||
|
||||
expect(result.contentTopic).to.equal(contentTopic);
|
||||
expect(result.pubsubTopic).to.equal(pubsubTopic);
|
||||
expect(result.contentTopic).to.equal(testContentTopic);
|
||||
expect(result.pubsubTopic).to.equal(testRoutingInfo.pubsubTopic);
|
||||
expect(result.version).to.equal(1);
|
||||
expect(result?.payload).to.deep.equal(payload);
|
||||
expect(result.signature).to.not.be.undefined;
|
||||
@ -97,16 +119,24 @@ describe("Ecies Encryption", function () {
|
||||
};
|
||||
|
||||
const encoder = createEncoder({
|
||||
contentTopic,
|
||||
contentTopic: testContentTopic,
|
||||
routingInfo: testRoutingInfo,
|
||||
publicKey,
|
||||
metaSetter
|
||||
});
|
||||
const bytes = await encoder.toWire({ payload });
|
||||
|
||||
const decoder = createDecoder(contentTopic, privateKey);
|
||||
const decoder = createDecoder(
|
||||
testContentTopic,
|
||||
testRoutingInfo,
|
||||
privateKey
|
||||
);
|
||||
const protoResult = await decoder.fromWireToProtoObj(bytes!);
|
||||
if (!protoResult) throw "Failed to proto decode";
|
||||
const result = await decoder.fromProtoObj(pubsubTopic, protoResult);
|
||||
const result = await decoder.fromProtoObj(
|
||||
testRoutingInfo.pubsubTopic,
|
||||
protoResult
|
||||
);
|
||||
if (!result) throw "Failed to decode";
|
||||
|
||||
const expectedMeta = metaSetter({
|
||||
@ -131,6 +161,7 @@ describe("Ensures content topic is defined", () => {
|
||||
const wrapper = function (): void {
|
||||
createEncoder({
|
||||
contentTopic: undefined as unknown as string,
|
||||
routingInfo: testRoutingInfo,
|
||||
publicKey: new Uint8Array()
|
||||
});
|
||||
};
|
||||
@ -139,21 +170,29 @@ describe("Ensures content topic is defined", () => {
|
||||
});
|
||||
it("Encoder throws on empty string content topic", () => {
|
||||
const wrapper = function (): void {
|
||||
createEncoder({ contentTopic: "", publicKey: new Uint8Array() });
|
||||
createEncoder({
|
||||
contentTopic: "",
|
||||
routingInfo: testRoutingInfo,
|
||||
publicKey: new Uint8Array()
|
||||
});
|
||||
};
|
||||
|
||||
expect(wrapper).to.throw("Content topic must be specified");
|
||||
});
|
||||
it("Decoder throws on undefined content topic", () => {
|
||||
const wrapper = function (): void {
|
||||
createDecoder(undefined as unknown as string, new Uint8Array());
|
||||
createDecoder(
|
||||
undefined as unknown as string,
|
||||
testRoutingInfo,
|
||||
new Uint8Array()
|
||||
);
|
||||
};
|
||||
|
||||
expect(wrapper).to.throw("Content topic must be specified");
|
||||
});
|
||||
it("Decoder throws on empty string content topic", () => {
|
||||
const wrapper = function (): void {
|
||||
createDecoder("", new Uint8Array());
|
||||
createDecoder("", testRoutingInfo, new Uint8Array());
|
||||
};
|
||||
|
||||
expect(wrapper).to.throw("Content topic must be specified");
|
||||
|
||||
@ -7,11 +7,11 @@ import {
|
||||
type IMessage,
|
||||
type IMetaSetter,
|
||||
type IProtoMessage,
|
||||
type PubsubTopic,
|
||||
type SingleShardInfo
|
||||
type IRoutingInfo,
|
||||
type PubsubTopic
|
||||
} from "@waku/interfaces";
|
||||
import { WakuMessage } from "@waku/proto";
|
||||
import { determinePubsubTopic, Logger } from "@waku/utils";
|
||||
import { Logger } from "@waku/utils";
|
||||
|
||||
import { generatePrivateKey } from "./crypto/utils.js";
|
||||
import { DecodedMessage } from "./decoded_message.js";
|
||||
@ -35,8 +35,8 @@ const log = new Logger("message-encryption:ecies");
|
||||
|
||||
class Encoder implements IEncoder {
|
||||
public constructor(
|
||||
public pubsubTopic: PubsubTopic,
|
||||
public contentTopic: string,
|
||||
public routingInfo: IRoutingInfo,
|
||||
private publicKey: Uint8Array,
|
||||
private sigPrivKey?: Uint8Array,
|
||||
public ephemeral: boolean = false,
|
||||
@ -47,6 +47,10 @@ class Encoder implements IEncoder {
|
||||
}
|
||||
}
|
||||
|
||||
public get pubsubTopic(): PubsubTopic {
|
||||
return this.routingInfo.pubsubTopic;
|
||||
}
|
||||
|
||||
public async toWire(message: IMessage): Promise<Uint8Array | undefined> {
|
||||
const protoMessage = await this.toProtoObj(message);
|
||||
if (!protoMessage) return;
|
||||
@ -82,10 +86,6 @@ class Encoder implements IEncoder {
|
||||
}
|
||||
|
||||
export interface EncoderOptions extends BaseEncoderOptions {
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
pubsubTopic?: PubsubTopic;
|
||||
/** The public key to encrypt the payload for. */
|
||||
publicKey: Uint8Array;
|
||||
/** An optional private key to be used to sign the payload before encryption. */
|
||||
@ -105,17 +105,16 @@ export interface EncoderOptions extends BaseEncoderOptions {
|
||||
* in [26/WAKU2-PAYLOAD](https://rfc.vac.dev/spec/26/).
|
||||
*/
|
||||
export function createEncoder({
|
||||
pubsubTopic,
|
||||
pubsubTopicShardInfo,
|
||||
contentTopic,
|
||||
routingInfo,
|
||||
publicKey,
|
||||
sigPrivKey,
|
||||
ephemeral = false,
|
||||
metaSetter
|
||||
}: EncoderOptions): Encoder {
|
||||
return new Encoder(
|
||||
determinePubsubTopic(contentTopic, pubsubTopic ?? pubsubTopicShardInfo),
|
||||
contentTopic,
|
||||
routingInfo,
|
||||
publicKey,
|
||||
sigPrivKey,
|
||||
ephemeral,
|
||||
@ -125,11 +124,11 @@ export function createEncoder({
|
||||
|
||||
class Decoder extends DecoderV0 implements IDecoder<IEncryptedMessage> {
|
||||
public constructor(
|
||||
pubsubTopic: PubsubTopic,
|
||||
contentTopic: string,
|
||||
routingInfo: IRoutingInfo,
|
||||
private privateKey: Uint8Array
|
||||
) {
|
||||
super(pubsubTopic, contentTopic);
|
||||
super(contentTopic, routingInfo);
|
||||
}
|
||||
|
||||
public async fromProtoObj(
|
||||
@ -197,16 +196,13 @@ class Decoder extends DecoderV0 implements IDecoder<IEncryptedMessage> {
|
||||
* decode incoming messages.
|
||||
*
|
||||
* @param contentTopic The resulting decoder will only decode messages with this content topic.
|
||||
* @param routingInfo
|
||||
* @param privateKey The private key used to decrypt the message.
|
||||
*/
|
||||
export function createDecoder(
|
||||
contentTopic: string,
|
||||
privateKey: Uint8Array,
|
||||
pubsubTopicShardInfo?: SingleShardInfo | PubsubTopic
|
||||
routingInfo: IRoutingInfo,
|
||||
privateKey: Uint8Array
|
||||
): Decoder {
|
||||
return new Decoder(
|
||||
determinePubsubTopic(contentTopic, pubsubTopicShardInfo),
|
||||
contentTopic,
|
||||
privateKey
|
||||
);
|
||||
return new Decoder(contentTopic, routingInfo, privateKey);
|
||||
}
|
||||
|
||||
@ -1,13 +1,19 @@
|
||||
import { IProtoMessage } from "@waku/interfaces";
|
||||
import { contentTopicToPubsubTopic } from "@waku/utils";
|
||||
import { createRoutingInfo } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
import fc from "fast-check";
|
||||
|
||||
import { getPublicKey } from "./crypto/index.js";
|
||||
import { createDecoder, createEncoder } from "./symmetric.js";
|
||||
|
||||
const contentTopic = "/js-waku/1/tests/bytes";
|
||||
const pubsubTopic = contentTopicToPubsubTopic(contentTopic);
|
||||
const testContentTopic = "/js-waku/1/tests/bytes";
|
||||
const testRoutingInfo = createRoutingInfo(
|
||||
{
|
||||
clusterId: 0,
|
||||
numShardsInCluster: 14
|
||||
},
|
||||
{ contentTopic: testContentTopic }
|
||||
);
|
||||
|
||||
describe("Symmetric Encryption", function () {
|
||||
it("Round trip binary encryption [symmetric, no signature]", async function () {
|
||||
@ -17,19 +23,27 @@ describe("Symmetric Encryption", function () {
|
||||
fc.uint8Array({ min: 1, minLength: 32, maxLength: 32 }),
|
||||
async (payload, symKey) => {
|
||||
const encoder = createEncoder({
|
||||
contentTopic,
|
||||
contentTopic: testContentTopic,
|
||||
routingInfo: testRoutingInfo,
|
||||
symKey
|
||||
});
|
||||
const bytes = await encoder.toWire({ payload });
|
||||
|
||||
const decoder = createDecoder(contentTopic, symKey);
|
||||
const decoder = createDecoder(
|
||||
testContentTopic,
|
||||
testRoutingInfo,
|
||||
symKey
|
||||
);
|
||||
const protoResult = await decoder.fromWireToProtoObj(bytes!);
|
||||
if (!protoResult) throw "Failed to proto decode";
|
||||
const result = await decoder.fromProtoObj(pubsubTopic, protoResult);
|
||||
const result = await decoder.fromProtoObj(
|
||||
testRoutingInfo.pubsubTopic,
|
||||
protoResult
|
||||
);
|
||||
if (!result) throw "Failed to decode";
|
||||
|
||||
expect(result.contentTopic).to.equal(contentTopic);
|
||||
expect(result.pubsubTopic).to.equal(pubsubTopic);
|
||||
expect(result.contentTopic).to.equal(testContentTopic);
|
||||
expect(result.pubsubTopic).to.equal(testRoutingInfo.pubsubTopic);
|
||||
expect(result.version).to.equal(1);
|
||||
expect(result?.payload).to.deep.equal(payload);
|
||||
expect(result.signature).to.be.undefined;
|
||||
@ -50,20 +64,28 @@ describe("Symmetric Encryption", function () {
|
||||
const sigPubKey = getPublicKey(sigPrivKey);
|
||||
|
||||
const encoder = createEncoder({
|
||||
contentTopic,
|
||||
contentTopic: testContentTopic,
|
||||
routingInfo: testRoutingInfo,
|
||||
symKey,
|
||||
sigPrivKey
|
||||
});
|
||||
const bytes = await encoder.toWire({ payload });
|
||||
|
||||
const decoder = createDecoder(contentTopic, symKey);
|
||||
const decoder = createDecoder(
|
||||
testContentTopic,
|
||||
testRoutingInfo,
|
||||
symKey
|
||||
);
|
||||
const protoResult = await decoder.fromWireToProtoObj(bytes!);
|
||||
if (!protoResult) throw "Failed to proto decode";
|
||||
const result = await decoder.fromProtoObj(pubsubTopic, protoResult);
|
||||
const result = await decoder.fromProtoObj(
|
||||
testRoutingInfo.pubsubTopic,
|
||||
protoResult
|
||||
);
|
||||
if (!result) throw "Failed to decode";
|
||||
|
||||
expect(result.contentTopic).to.equal(contentTopic);
|
||||
expect(result.pubsubTopic).to.equal(pubsubTopic);
|
||||
expect(result.contentTopic).to.equal(testContentTopic);
|
||||
expect(result.pubsubTopic).to.equal(testRoutingInfo.pubsubTopic);
|
||||
expect(result.version).to.equal(1);
|
||||
expect(result?.payload).to.deep.equal(payload);
|
||||
expect(result.signature).to.not.be.undefined;
|
||||
@ -90,16 +112,24 @@ describe("Symmetric Encryption", function () {
|
||||
};
|
||||
|
||||
const encoder = createEncoder({
|
||||
contentTopic,
|
||||
contentTopic: testContentTopic,
|
||||
routingInfo: testRoutingInfo,
|
||||
symKey,
|
||||
metaSetter
|
||||
});
|
||||
const bytes = await encoder.toWire({ payload });
|
||||
|
||||
const decoder = createDecoder(contentTopic, symKey);
|
||||
const decoder = createDecoder(
|
||||
testContentTopic,
|
||||
testRoutingInfo,
|
||||
symKey
|
||||
);
|
||||
const protoResult = await decoder.fromWireToProtoObj(bytes!);
|
||||
if (!protoResult) throw "Failed to proto decode";
|
||||
const result = await decoder.fromProtoObj(pubsubTopic, protoResult);
|
||||
const result = await decoder.fromProtoObj(
|
||||
testRoutingInfo.pubsubTopic,
|
||||
protoResult
|
||||
);
|
||||
if (!result) throw "Failed to decode";
|
||||
|
||||
const expectedMeta = metaSetter({
|
||||
@ -124,6 +154,7 @@ describe("Ensures content topic is defined", () => {
|
||||
const wrapper = function (): void {
|
||||
createEncoder({
|
||||
contentTopic: undefined as unknown as string,
|
||||
routingInfo: testRoutingInfo,
|
||||
symKey: new Uint8Array()
|
||||
});
|
||||
};
|
||||
@ -132,21 +163,29 @@ describe("Ensures content topic is defined", () => {
|
||||
});
|
||||
it("Encoder throws on empty string content topic", () => {
|
||||
const wrapper = function (): void {
|
||||
createEncoder({ contentTopic: "", symKey: new Uint8Array() });
|
||||
createEncoder({
|
||||
contentTopic: "",
|
||||
routingInfo: testRoutingInfo,
|
||||
symKey: new Uint8Array()
|
||||
});
|
||||
};
|
||||
|
||||
expect(wrapper).to.throw("Content topic must be specified");
|
||||
});
|
||||
it("Decoder throws on undefined content topic", () => {
|
||||
const wrapper = function (): void {
|
||||
createDecoder(undefined as unknown as string, new Uint8Array());
|
||||
createDecoder(
|
||||
undefined as unknown as string,
|
||||
testRoutingInfo,
|
||||
new Uint8Array()
|
||||
);
|
||||
};
|
||||
|
||||
expect(wrapper).to.throw("Content topic must be specified");
|
||||
});
|
||||
it("Decoder throws on empty string content topic", () => {
|
||||
const wrapper = function (): void {
|
||||
createDecoder("", new Uint8Array());
|
||||
createDecoder("", testRoutingInfo, new Uint8Array());
|
||||
};
|
||||
|
||||
expect(wrapper).to.throw("Content topic must be specified");
|
||||
|
||||
@ -7,11 +7,11 @@ import type {
|
||||
IMessage,
|
||||
IMetaSetter,
|
||||
IProtoMessage,
|
||||
PubsubTopic,
|
||||
SingleShardInfo
|
||||
IRoutingInfo,
|
||||
PubsubTopic
|
||||
} from "@waku/interfaces";
|
||||
import { WakuMessage } from "@waku/proto";
|
||||
import { determinePubsubTopic, Logger } from "@waku/utils";
|
||||
import { Logger } from "@waku/utils";
|
||||
|
||||
import { generateSymmetricKey } from "./crypto/utils.js";
|
||||
import { DecodedMessage } from "./decoded_message.js";
|
||||
@ -35,8 +35,8 @@ const log = new Logger("message-encryption:symmetric");
|
||||
|
||||
class Encoder implements IEncoder {
|
||||
public constructor(
|
||||
public pubsubTopic: PubsubTopic,
|
||||
public contentTopic: string,
|
||||
public routingInfo: IRoutingInfo,
|
||||
private symKey: Uint8Array,
|
||||
private sigPrivKey?: Uint8Array,
|
||||
public ephemeral: boolean = false,
|
||||
@ -47,6 +47,10 @@ class Encoder implements IEncoder {
|
||||
}
|
||||
}
|
||||
|
||||
public get pubsubTopic(): PubsubTopic {
|
||||
return this.routingInfo.pubsubTopic;
|
||||
}
|
||||
|
||||
public async toWire(message: IMessage): Promise<Uint8Array | undefined> {
|
||||
const protoMessage = await this.toProtoObj(message);
|
||||
if (!protoMessage) return;
|
||||
@ -101,17 +105,16 @@ export interface EncoderOptions extends BaseEncoderOptions {
|
||||
* in [26/WAKU2-PAYLOAD](https://rfc.vac.dev/spec/26/).
|
||||
*/
|
||||
export function createEncoder({
|
||||
pubsubTopic,
|
||||
pubsubTopicShardInfo,
|
||||
contentTopic,
|
||||
routingInfo,
|
||||
symKey,
|
||||
sigPrivKey,
|
||||
ephemeral = false,
|
||||
metaSetter
|
||||
}: EncoderOptions): Encoder {
|
||||
return new Encoder(
|
||||
determinePubsubTopic(contentTopic, pubsubTopic ?? pubsubTopicShardInfo),
|
||||
contentTopic,
|
||||
routingInfo,
|
||||
symKey,
|
||||
sigPrivKey,
|
||||
ephemeral,
|
||||
@ -121,11 +124,11 @@ export function createEncoder({
|
||||
|
||||
class Decoder extends DecoderV0 implements IDecoder<IEncryptedMessage> {
|
||||
public constructor(
|
||||
pubsubTopic: PubsubTopic,
|
||||
contentTopic: string,
|
||||
routingInfo: IRoutingInfo,
|
||||
private symKey: Uint8Array
|
||||
) {
|
||||
super(pubsubTopic, contentTopic);
|
||||
super(contentTopic, routingInfo);
|
||||
}
|
||||
|
||||
public async fromProtoObj(
|
||||
@ -193,16 +196,13 @@ class Decoder extends DecoderV0 implements IDecoder<IEncryptedMessage> {
|
||||
* decode incoming messages.
|
||||
*
|
||||
* @param contentTopic The resulting decoder will only decode messages with this content topic.
|
||||
* @param routingInfo Routing information, depends on the network config (static vs auto sharding)
|
||||
* @param symKey The symmetric key used to decrypt the message.
|
||||
*/
|
||||
export function createDecoder(
|
||||
contentTopic: string,
|
||||
symKey: Uint8Array,
|
||||
pubsubTopicShardInfo?: SingleShardInfo | PubsubTopic
|
||||
routingInfo: IRoutingInfo,
|
||||
symKey: Uint8Array
|
||||
): Decoder {
|
||||
return new Decoder(
|
||||
determinePubsubTopic(contentTopic, pubsubTopicShardInfo),
|
||||
contentTopic,
|
||||
symKey
|
||||
);
|
||||
return new Decoder(contentTopic, routingInfo, symKey);
|
||||
}
|
||||
|
||||
@ -1,7 +1,5 @@
|
||||
import type { CreateNodeOptions, RelayNode } from "@waku/interfaces";
|
||||
import { DefaultNetworkConfig } from "@waku/interfaces";
|
||||
import { CreateNodeOptions, RelayNode } from "@waku/interfaces";
|
||||
import { createLibp2pAndUpdateOptions, WakuNode } from "@waku/sdk";
|
||||
import { derivePubsubTopicsFromNetworkConfig } from "@waku/utils";
|
||||
|
||||
import { Relay, RelayCreateOptions, wakuGossipSub } from "./relay.js";
|
||||
|
||||
@ -16,7 +14,7 @@ import { Relay, RelayCreateOptions, wakuGossipSub } from "./relay.js";
|
||||
* or use this function with caution.
|
||||
*/
|
||||
export async function createRelayNode(
|
||||
options: CreateNodeOptions & Partial<RelayCreateOptions>
|
||||
options: CreateNodeOptions & RelayCreateOptions
|
||||
): Promise<RelayNode> {
|
||||
options = {
|
||||
...options,
|
||||
@ -29,9 +27,9 @@ export async function createRelayNode(
|
||||
};
|
||||
|
||||
const libp2p = await createLibp2pAndUpdateOptions(options);
|
||||
const pubsubTopics = derivePubsubTopicsFromNetworkConfig(
|
||||
options.networkConfig ?? DefaultNetworkConfig
|
||||
);
|
||||
|
||||
const pubsubTopics = options.routingInfos.map((ri) => ri.pubsubTopic);
|
||||
|
||||
const relay = new Relay({
|
||||
pubsubTopics,
|
||||
libp2p
|
||||
|
||||
@ -3,14 +3,21 @@ import { TopicValidatorResult } from "@libp2p/interface";
|
||||
import type { UnsignedMessage } from "@libp2p/interface";
|
||||
import { peerIdFromPrivateKey } from "@libp2p/peer-id";
|
||||
import { createEncoder } from "@waku/core";
|
||||
import { determinePubsubTopic } from "@waku/utils";
|
||||
import { createRoutingInfo } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
import fc from "fast-check";
|
||||
|
||||
import { messageValidator } from "./message_validator.js";
|
||||
|
||||
const TestContentTopic = "/app/1/topic/utf8";
|
||||
const TestPubsubTopic = determinePubsubTopic(TestContentTopic);
|
||||
const testContentTopic = "/app/1/topic/utf8";
|
||||
const testRoutingInfo = createRoutingInfo(
|
||||
{
|
||||
clusterId: 0,
|
||||
numShardsInCluster: 8
|
||||
},
|
||||
{ contentTopic: testContentTopic }
|
||||
);
|
||||
const testPubsubTopic = testRoutingInfo.pubsubTopic;
|
||||
|
||||
describe("Message Validator", () => {
|
||||
it("Accepts a valid Waku Message", async () => {
|
||||
@ -20,14 +27,14 @@ describe("Message Validator", () => {
|
||||
const peerId = peerIdFromPrivateKey(privateKey);
|
||||
|
||||
const encoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
contentTopic: testContentTopic,
|
||||
routingInfo: testRoutingInfo
|
||||
});
|
||||
const bytes = await encoder.toWire({ payload });
|
||||
|
||||
const message: UnsignedMessage = {
|
||||
type: "unsigned",
|
||||
topic: TestPubsubTopic,
|
||||
topic: testPubsubTopic,
|
||||
data: bytes
|
||||
};
|
||||
|
||||
@ -46,7 +53,7 @@ describe("Message Validator", () => {
|
||||
|
||||
const message: UnsignedMessage = {
|
||||
type: "unsigned",
|
||||
topic: TestPubsubTopic,
|
||||
topic: testPubsubTopic,
|
||||
data
|
||||
};
|
||||
|
||||
|
||||
@ -17,6 +17,7 @@ import {
|
||||
IEncoder,
|
||||
IMessage,
|
||||
IRelay,
|
||||
type IRoutingInfo,
|
||||
Libp2p,
|
||||
ProtocolError,
|
||||
PubsubTopic,
|
||||
@ -29,7 +30,7 @@ import { pEvent } from "p-event";
|
||||
|
||||
import { RelayCodecs } from "./constants.js";
|
||||
import { messageValidator } from "./message_validator.js";
|
||||
import { TopicOnlyDecoder } from "./topic_only_message.js";
|
||||
import { ContentTopicOnlyDecoder } from "./topic_only_message.js";
|
||||
|
||||
const log = new Logger("relay");
|
||||
|
||||
@ -38,7 +39,9 @@ export type Observer<T extends IDecodedMessage> = {
|
||||
callback: Callback<T>;
|
||||
};
|
||||
|
||||
export type RelayCreateOptions = CreateNodeOptions & GossipsubOpts;
|
||||
export type RelayCreateOptions = CreateNodeOptions & {
|
||||
routingInfos: IRoutingInfo[];
|
||||
} & Partial<GossipsubOpts>;
|
||||
export type ContentTopic = string;
|
||||
|
||||
type ActiveSubscriptions = Map<PubsubTopic, ContentTopic[]>;
|
||||
@ -53,7 +56,7 @@ type RelayConstructorParams = {
|
||||
* Throws if libp2p.pubsub does not support Waku Relay
|
||||
*/
|
||||
export class Relay implements IRelay {
|
||||
public readonly pubsubTopics: Set<PubsubTopic>;
|
||||
public pubsubTopics: Set<PubsubTopic>;
|
||||
private defaultDecoder: IDecoder<IDecodedMessage>;
|
||||
|
||||
public static multicodec: string = RelayCodecs[0];
|
||||
@ -73,6 +76,7 @@ export class Relay implements IRelay {
|
||||
}
|
||||
|
||||
this.gossipSub = params.libp2p.services.pubsub as GossipSub;
|
||||
|
||||
this.pubsubTopics = new Set(params.pubsubTopics);
|
||||
|
||||
if (this.gossipSub.isStarted()) {
|
||||
@ -82,7 +86,7 @@ export class Relay implements IRelay {
|
||||
this.observers = new Map();
|
||||
|
||||
// TODO: User might want to decide what decoder should be used (e.g. for RLN)
|
||||
this.defaultDecoder = new TopicOnlyDecoder(params.pubsubTopics[0]);
|
||||
this.defaultDecoder = new ContentTopicOnlyDecoder();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -240,8 +244,9 @@ export class Relay implements IRelay {
|
||||
pubsubTopic: string,
|
||||
bytes: Uint8Array
|
||||
): Promise<void> {
|
||||
const topicOnlyMsg = await this.defaultDecoder.fromWireToProtoObj(bytes);
|
||||
if (!topicOnlyMsg || !topicOnlyMsg.contentTopic) {
|
||||
const contentTopicOnlyMsg =
|
||||
await this.defaultDecoder.fromWireToProtoObj(bytes);
|
||||
if (!contentTopicOnlyMsg || !contentTopicOnlyMsg.contentTopic) {
|
||||
log.warn("Message does not have a content topic, skipping");
|
||||
return;
|
||||
}
|
||||
@ -253,9 +258,9 @@ export class Relay implements IRelay {
|
||||
}
|
||||
|
||||
// Retrieve the set of observers for the given contentTopic
|
||||
const observers = contentTopicMap.get(topicOnlyMsg.contentTopic) as Set<
|
||||
Observer<T>
|
||||
>;
|
||||
const observers = contentTopicMap.get(
|
||||
contentTopicOnlyMsg.contentTopic
|
||||
) as Set<Observer<T>>;
|
||||
if (!observers) {
|
||||
return;
|
||||
}
|
||||
@ -277,7 +282,7 @@ export class Relay implements IRelay {
|
||||
} else {
|
||||
log.error(
|
||||
"Failed to decode messages on",
|
||||
topicOnlyMsg.contentTopic
|
||||
contentTopicOnlyMsg.contentTopic
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
@ -1,15 +1,19 @@
|
||||
import { message } from "@waku/core";
|
||||
import type {
|
||||
IDecoder,
|
||||
IProtoMessage,
|
||||
IRoutingInfo,
|
||||
ITopicOnlyMessage,
|
||||
PubsubTopic
|
||||
} from "@waku/interfaces";
|
||||
import { TopicOnlyMessage as ProtoTopicOnlyMessage } from "@waku/proto";
|
||||
|
||||
export class TopicOnlyMessage implements ITopicOnlyMessage {
|
||||
public version = message.version_0.Version;
|
||||
public payload: Uint8Array = new Uint8Array();
|
||||
public get version(): number {
|
||||
throw "Only content topic can be accessed on this message";
|
||||
}
|
||||
public get payload(): Uint8Array {
|
||||
throw "Only content topic can be accessed on this message";
|
||||
}
|
||||
public rateLimitProof: undefined;
|
||||
public timestamp: undefined;
|
||||
public meta: undefined;
|
||||
@ -26,11 +30,20 @@ export class TopicOnlyMessage implements ITopicOnlyMessage {
|
||||
}
|
||||
|
||||
// This decoder is used only for reading `contentTopic` from the WakuMessage
|
||||
export class TopicOnlyDecoder implements IDecoder<ITopicOnlyMessage> {
|
||||
public contentTopic = "";
|
||||
export class ContentTopicOnlyDecoder implements IDecoder<ITopicOnlyMessage> {
|
||||
public constructor() {}
|
||||
|
||||
// pubsubTopic is ignored
|
||||
public constructor(public pubsubTopic: PubsubTopic) {}
|
||||
public get pubsubTopic(): PubsubTopic {
|
||||
throw "Pubsub Topic is not available on this decoder, it is only meant to decode the content topic for any message";
|
||||
}
|
||||
|
||||
public get contentTopic(): string {
|
||||
throw "ContentTopic is not available on this decoder, it is only meant to decode the content topic for any message";
|
||||
}
|
||||
|
||||
public get routingInfo(): IRoutingInfo {
|
||||
throw "RoutingInfo is not available on this decoder, it is only meant to decode the content topic for any message";
|
||||
}
|
||||
|
||||
public fromWireToProtoObj(
|
||||
bytes: Uint8Array
|
||||
|
||||
@ -1,15 +1,9 @@
|
||||
import { LightNode, Protocols } from "@waku/interfaces";
|
||||
import { createDecoder, createLightNode, utf8ToBytes } from "@waku/sdk";
|
||||
import {
|
||||
createDecoder,
|
||||
createEncoder,
|
||||
createLightNode,
|
||||
utf8ToBytes
|
||||
} from "@waku/sdk";
|
||||
import {
|
||||
delay,
|
||||
shardInfoToPubsubTopics,
|
||||
singleShardInfosToShardInfo,
|
||||
singleShardInfoToPubsubTopic
|
||||
contentTopicToPubsubTopic,
|
||||
createRoutingInfo,
|
||||
delay
|
||||
} from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
|
||||
@ -23,6 +17,10 @@ import {
|
||||
} from "../../tests/src/index.js";
|
||||
|
||||
const ContentTopic = "/waku/2/content/test.high-throughput.js";
|
||||
const NetworkConfig = { clusterId: 0, numShardsInCluster: 8 };
|
||||
const RoutingInfo = createRoutingInfo(NetworkConfig, {
|
||||
contentTopic: ContentTopic
|
||||
});
|
||||
|
||||
describe("High Throughput Messaging", function () {
|
||||
const testDurationMs = 20 * 60 * 1000; // 20 minutes
|
||||
@ -41,9 +39,6 @@ describe("High Throughput Messaging", function () {
|
||||
});
|
||||
|
||||
it("Send/Receive thousands of messages quickly", async function () {
|
||||
const singleShardInfo = { clusterId: 0, shard: 0 };
|
||||
const shardInfo = singleShardInfosToShardInfo([singleShardInfo]);
|
||||
|
||||
const testStart = new Date();
|
||||
const testEnd = Date.now() + testDurationMs;
|
||||
|
||||
@ -60,8 +55,8 @@ describe("High Throughput Messaging", function () {
|
||||
store: true,
|
||||
filter: true,
|
||||
relay: true,
|
||||
clusterId: 0,
|
||||
shard: [0],
|
||||
clusterId: NetworkConfig.clusterId,
|
||||
numShardsInNetwork: NetworkConfig.numShardsInCluster,
|
||||
contentTopic: [ContentTopic]
|
||||
},
|
||||
{ retries: 3 }
|
||||
@ -69,29 +64,26 @@ describe("High Throughput Messaging", function () {
|
||||
|
||||
await delay(1000);
|
||||
|
||||
await nwaku.ensureSubscriptions(shardInfoToPubsubTopics(shardInfo));
|
||||
await nwaku.ensureSubscriptions([
|
||||
contentTopicToPubsubTopic(
|
||||
ContentTopic,
|
||||
NetworkConfig.clusterId,
|
||||
NetworkConfig.numShardsInCluster
|
||||
)
|
||||
]);
|
||||
|
||||
waku = await createLightNode({ networkConfig: shardInfo });
|
||||
waku = await createLightNode({ networkConfig: NetworkConfig });
|
||||
await waku.start();
|
||||
await waku.dial(await nwaku.getMultiaddrWithId());
|
||||
await waku.waitForPeers([Protocols.Filter]);
|
||||
|
||||
const decoder = createDecoder(ContentTopic, singleShardInfo);
|
||||
const decoder = createDecoder(ContentTopic, RoutingInfo);
|
||||
const hasSubscribed = await waku.filter.subscribe(
|
||||
[decoder],
|
||||
messageCollector.callback
|
||||
);
|
||||
if (!hasSubscribed) throw new Error("Failed to subscribe from the start.");
|
||||
|
||||
const encoder = createEncoder({
|
||||
contentTopic: ContentTopic,
|
||||
pubsubTopicShardInfo: singleShardInfo
|
||||
});
|
||||
|
||||
expect(encoder.pubsubTopic).to.eq(
|
||||
singleShardInfoToPubsubTopic(singleShardInfo)
|
||||
);
|
||||
|
||||
let messageId = 0;
|
||||
|
||||
// Send messages as fast as possible until testEnd
|
||||
@ -107,7 +99,8 @@ describe("High Throughput Messaging", function () {
|
||||
ServiceNode.toMessageRpcQuery({
|
||||
contentTopic: ContentTopic,
|
||||
payload: utf8ToBytes(message)
|
||||
})
|
||||
}),
|
||||
RoutingInfo
|
||||
);
|
||||
sent = true;
|
||||
|
||||
@ -119,7 +112,7 @@ describe("High Throughput Messaging", function () {
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: message,
|
||||
expectedContentTopic: ContentTopic,
|
||||
expectedPubsubTopic: shardInfoToPubsubTopics(shardInfo)[0]
|
||||
expectedPubsubTopic: RoutingInfo.pubsubTopic
|
||||
});
|
||||
}
|
||||
} catch (e: any) {
|
||||
|
||||
@ -1,15 +1,9 @@
|
||||
import { LightNode, Protocols } from "@waku/interfaces";
|
||||
import { createDecoder, createLightNode, utf8ToBytes } from "@waku/sdk";
|
||||
import {
|
||||
createDecoder,
|
||||
createEncoder,
|
||||
createLightNode,
|
||||
utf8ToBytes
|
||||
} from "@waku/sdk";
|
||||
import {
|
||||
delay,
|
||||
shardInfoToPubsubTopics,
|
||||
singleShardInfosToShardInfo,
|
||||
singleShardInfoToPubsubTopic
|
||||
contentTopicToPubsubTopic,
|
||||
createRoutingInfo,
|
||||
delay
|
||||
} from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
|
||||
@ -41,8 +35,7 @@ describe("Longevity", function () {
|
||||
});
|
||||
|
||||
it("Filter - 2 hours", async function () {
|
||||
const singleShardInfo = { clusterId: 0, shard: 0 };
|
||||
const shardInfo = singleShardInfosToShardInfo([singleShardInfo]);
|
||||
const networkConfig = { clusterId: 0, numShardsInCluster: 8 };
|
||||
|
||||
const testStart = new Date();
|
||||
|
||||
@ -68,29 +61,29 @@ describe("Longevity", function () {
|
||||
{ retries: 3 }
|
||||
);
|
||||
|
||||
await nwaku.ensureSubscriptions(shardInfoToPubsubTopics(shardInfo));
|
||||
await nwaku.ensureSubscriptions([
|
||||
contentTopicToPubsubTopic(
|
||||
ContentTopic,
|
||||
networkConfig.clusterId,
|
||||
networkConfig.numShardsInCluster
|
||||
)
|
||||
]);
|
||||
|
||||
waku = await createLightNode({ networkConfig: shardInfo });
|
||||
waku = await createLightNode({ networkConfig });
|
||||
await waku.start();
|
||||
await waku.dial(await nwaku.getMultiaddrWithId());
|
||||
await waku.waitForPeers([Protocols.Filter]);
|
||||
|
||||
const decoder = createDecoder(ContentTopic, singleShardInfo);
|
||||
const routingInfo = createRoutingInfo(networkConfig, {
|
||||
contentTopic: ContentTopic
|
||||
});
|
||||
const decoder = createDecoder(ContentTopic, routingInfo);
|
||||
const hasSubscribed = await waku.filter.subscribe(
|
||||
[decoder],
|
||||
messageCollector.callback
|
||||
);
|
||||
if (!hasSubscribed) throw new Error("Failed to subscribe from the start.");
|
||||
|
||||
const encoder = createEncoder({
|
||||
contentTopic: ContentTopic,
|
||||
pubsubTopicShardInfo: singleShardInfo
|
||||
});
|
||||
|
||||
expect(encoder.pubsubTopic).to.eq(
|
||||
singleShardInfoToPubsubTopic(singleShardInfo)
|
||||
);
|
||||
|
||||
let messageId = 0;
|
||||
|
||||
while (Date.now() < testEnd) {
|
||||
@ -105,7 +98,8 @@ describe("Longevity", function () {
|
||||
ServiceNode.toMessageRpcQuery({
|
||||
contentTopic: ContentTopic,
|
||||
payload: utf8ToBytes(message)
|
||||
})
|
||||
}),
|
||||
routingInfo
|
||||
);
|
||||
sent = true;
|
||||
|
||||
@ -117,7 +111,7 @@ describe("Longevity", function () {
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: message,
|
||||
expectedContentTopic: ContentTopic,
|
||||
expectedPubsubTopic: shardInfoToPubsubTopics(shardInfo)[0]
|
||||
expectedPubsubTopic: routingInfo.pubsubTopic
|
||||
});
|
||||
}
|
||||
} catch (e: any) {
|
||||
|
||||
@ -1,15 +1,9 @@
|
||||
import { LightNode, Protocols } from "@waku/interfaces";
|
||||
import { createDecoder, createLightNode, utf8ToBytes } from "@waku/sdk";
|
||||
import {
|
||||
createDecoder,
|
||||
createEncoder,
|
||||
createLightNode,
|
||||
utf8ToBytes
|
||||
} from "@waku/sdk";
|
||||
import {
|
||||
delay,
|
||||
shardInfoToPubsubTopics,
|
||||
singleShardInfosToShardInfo,
|
||||
singleShardInfoToPubsubTopic
|
||||
contentTopicToPubsubTopic,
|
||||
createRoutingInfo,
|
||||
delay
|
||||
} from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
|
||||
@ -52,8 +46,7 @@ describe("Throughput Sanity Checks - Different Message Sizes", function () {
|
||||
});
|
||||
|
||||
it("Send/Receive messages of varying sizes", async function () {
|
||||
const singleShardInfo = { clusterId: 0, shard: 0 };
|
||||
const shardInfo = singleShardInfosToShardInfo([singleShardInfo]);
|
||||
const networkConfig = { clusterId: 0, numShardsInCluster: 8 };
|
||||
|
||||
const testStart = new Date();
|
||||
const testEnd = Date.now() + testDurationMs;
|
||||
@ -74,29 +67,29 @@ describe("Throughput Sanity Checks - Different Message Sizes", function () {
|
||||
|
||||
await delay(1000);
|
||||
|
||||
await nwaku.ensureSubscriptions(shardInfoToPubsubTopics(shardInfo));
|
||||
await nwaku.ensureSubscriptions([
|
||||
contentTopicToPubsubTopic(
|
||||
ContentTopic,
|
||||
networkConfig.clusterId,
|
||||
networkConfig.numShardsInCluster
|
||||
)
|
||||
]);
|
||||
|
||||
waku = await createLightNode({ networkConfig: shardInfo });
|
||||
waku = await createLightNode({ networkConfig });
|
||||
await waku.start();
|
||||
await waku.dial(await nwaku.getMultiaddrWithId());
|
||||
await waku.waitForPeers([Protocols.Filter]);
|
||||
|
||||
const decoder = createDecoder(ContentTopic, singleShardInfo);
|
||||
const routingInfo = createRoutingInfo(networkConfig, {
|
||||
contentTopic: ContentTopic
|
||||
});
|
||||
const decoder = createDecoder(ContentTopic, routingInfo);
|
||||
const hasSubscribed = await waku.filter.subscribe(
|
||||
[decoder],
|
||||
messageCollector.callback
|
||||
);
|
||||
if (!hasSubscribed) throw new Error("Failed to subscribe from the start.");
|
||||
|
||||
const encoder = createEncoder({
|
||||
contentTopic: ContentTopic,
|
||||
pubsubTopicShardInfo: singleShardInfo
|
||||
});
|
||||
|
||||
expect(encoder.pubsubTopic).to.eq(
|
||||
singleShardInfoToPubsubTopic(singleShardInfo)
|
||||
);
|
||||
|
||||
let messageId = 0;
|
||||
const report: {
|
||||
messageId: number;
|
||||
@ -121,7 +114,8 @@ describe("Throughput Sanity Checks - Different Message Sizes", function () {
|
||||
ServiceNode.toMessageRpcQuery({
|
||||
contentTopic: ContentTopic,
|
||||
payload: utf8ToBytes(message)
|
||||
})
|
||||
}),
|
||||
routingInfo
|
||||
);
|
||||
sent = true;
|
||||
|
||||
@ -133,7 +127,7 @@ describe("Throughput Sanity Checks - Different Message Sizes", function () {
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: message,
|
||||
expectedContentTopic: ContentTopic,
|
||||
expectedPubsubTopic: shardInfoToPubsubTopics(shardInfo)[0]
|
||||
expectedPubsubTopic: routingInfo.pubsubTopic
|
||||
});
|
||||
}
|
||||
} catch (e: any) {
|
||||
|
||||
@ -37,14 +37,20 @@ describe("RLN codec with version 0", () => {
|
||||
await createTestRLNCodecSetup();
|
||||
|
||||
const rlnEncoder = createRLNEncoder({
|
||||
encoder: createEncoder({ contentTopic: TEST_CONSTANTS.contentTopic }),
|
||||
encoder: createEncoder({
|
||||
contentTopic: TEST_CONSTANTS.contentTopic,
|
||||
routingInfo: TEST_CONSTANTS.routingInfo
|
||||
}),
|
||||
rlnInstance,
|
||||
index,
|
||||
credential
|
||||
});
|
||||
const rlnDecoder = createRLNDecoder({
|
||||
rlnInstance,
|
||||
decoder: createDecoder(TEST_CONSTANTS.contentTopic)
|
||||
decoder: createDecoder(
|
||||
TEST_CONSTANTS.contentTopic,
|
||||
TEST_CONSTANTS.routingInfo
|
||||
)
|
||||
});
|
||||
|
||||
const bytes = await rlnEncoder.toWire({ payload });
|
||||
@ -65,14 +71,17 @@ describe("RLN codec with version 0", () => {
|
||||
await createTestRLNCodecSetup();
|
||||
|
||||
const rlnEncoder = new RLNEncoder(
|
||||
createEncoder({ contentTopic: TEST_CONSTANTS.contentTopic }),
|
||||
createEncoder({
|
||||
contentTopic: TEST_CONSTANTS.contentTopic,
|
||||
routingInfo: TEST_CONSTANTS.routingInfo
|
||||
}),
|
||||
rlnInstance,
|
||||
index,
|
||||
credential
|
||||
);
|
||||
const rlnDecoder = new RLNDecoder(
|
||||
rlnInstance,
|
||||
createDecoder(TEST_CONSTANTS.contentTopic)
|
||||
createDecoder(TEST_CONSTANTS.contentTopic, TEST_CONSTANTS.routingInfo)
|
||||
);
|
||||
|
||||
const proto = await rlnEncoder.toProtoObj({ payload });
|
||||
@ -96,6 +105,7 @@ describe("RLN codec with version 1", () => {
|
||||
const rlnEncoder = new RLNEncoder(
|
||||
createSymEncoder({
|
||||
contentTopic: TEST_CONSTANTS.contentTopic,
|
||||
routingInfo: TEST_CONSTANTS.routingInfo,
|
||||
symKey
|
||||
}),
|
||||
rlnInstance,
|
||||
@ -104,7 +114,11 @@ describe("RLN codec with version 1", () => {
|
||||
);
|
||||
const rlnDecoder = new RLNDecoder(
|
||||
rlnInstance,
|
||||
createSymDecoder(TEST_CONSTANTS.contentTopic, symKey)
|
||||
createSymDecoder(
|
||||
TEST_CONSTANTS.contentTopic,
|
||||
TEST_CONSTANTS.routingInfo,
|
||||
symKey
|
||||
)
|
||||
);
|
||||
|
||||
const bytes = await rlnEncoder.toWire({ payload });
|
||||
@ -128,6 +142,7 @@ describe("RLN codec with version 1", () => {
|
||||
const rlnEncoder = new RLNEncoder(
|
||||
createSymEncoder({
|
||||
contentTopic: TEST_CONSTANTS.contentTopic,
|
||||
routingInfo: TEST_CONSTANTS.routingInfo,
|
||||
symKey
|
||||
}),
|
||||
rlnInstance,
|
||||
@ -136,7 +151,11 @@ describe("RLN codec with version 1", () => {
|
||||
);
|
||||
const rlnDecoder = new RLNDecoder(
|
||||
rlnInstance,
|
||||
createSymDecoder(TEST_CONSTANTS.contentTopic, symKey)
|
||||
createSymDecoder(
|
||||
TEST_CONSTANTS.contentTopic,
|
||||
TEST_CONSTANTS.routingInfo,
|
||||
symKey
|
||||
)
|
||||
);
|
||||
|
||||
const proto = await rlnEncoder.toProtoObj({ payload });
|
||||
@ -159,6 +178,7 @@ describe("RLN codec with version 1", () => {
|
||||
const rlnEncoder = new RLNEncoder(
|
||||
createAsymEncoder({
|
||||
contentTopic: TEST_CONSTANTS.contentTopic,
|
||||
routingInfo: TEST_CONSTANTS.routingInfo,
|
||||
publicKey
|
||||
}),
|
||||
rlnInstance,
|
||||
@ -167,7 +187,11 @@ describe("RLN codec with version 1", () => {
|
||||
);
|
||||
const rlnDecoder = new RLNDecoder(
|
||||
rlnInstance,
|
||||
createAsymDecoder(TEST_CONSTANTS.contentTopic, privateKey)
|
||||
createAsymDecoder(
|
||||
TEST_CONSTANTS.contentTopic,
|
||||
TEST_CONSTANTS.routingInfo,
|
||||
privateKey
|
||||
)
|
||||
);
|
||||
|
||||
const bytes = await rlnEncoder.toWire({ payload });
|
||||
@ -192,6 +216,7 @@ describe("RLN codec with version 1", () => {
|
||||
const rlnEncoder = new RLNEncoder(
|
||||
createAsymEncoder({
|
||||
contentTopic: TEST_CONSTANTS.contentTopic,
|
||||
routingInfo: TEST_CONSTANTS.routingInfo,
|
||||
publicKey
|
||||
}),
|
||||
rlnInstance,
|
||||
@ -200,7 +225,11 @@ describe("RLN codec with version 1", () => {
|
||||
);
|
||||
const rlnDecoder = new RLNDecoder(
|
||||
rlnInstance,
|
||||
createAsymDecoder(TEST_CONSTANTS.contentTopic, privateKey)
|
||||
createAsymDecoder(
|
||||
TEST_CONSTANTS.contentTopic,
|
||||
TEST_CONSTANTS.routingInfo,
|
||||
privateKey
|
||||
)
|
||||
);
|
||||
|
||||
const proto = await rlnEncoder.toProtoObj({ payload });
|
||||
@ -221,14 +250,17 @@ describe("RLN Codec - epoch", () => {
|
||||
await createTestRLNCodecSetup();
|
||||
|
||||
const rlnEncoder = new RLNEncoder(
|
||||
createEncoder({ contentTopic: TEST_CONSTANTS.contentTopic }),
|
||||
createEncoder({
|
||||
contentTopic: TEST_CONSTANTS.contentTopic,
|
||||
routingInfo: TEST_CONSTANTS.routingInfo
|
||||
}),
|
||||
rlnInstance,
|
||||
index,
|
||||
credential
|
||||
);
|
||||
const rlnDecoder = new RLNDecoder(
|
||||
rlnInstance,
|
||||
createDecoder(TEST_CONSTANTS.contentTopic)
|
||||
createDecoder(TEST_CONSTANTS.contentTopic, TEST_CONSTANTS.routingInfo)
|
||||
);
|
||||
|
||||
const proto = await rlnEncoder.toProtoObj({ payload });
|
||||
@ -258,6 +290,7 @@ describe("RLN codec with version 0 and meta setter", () => {
|
||||
const rlnEncoder = createRLNEncoder({
|
||||
encoder: createEncoder({
|
||||
contentTopic: TEST_CONSTANTS.contentTopic,
|
||||
routingInfo: TEST_CONSTANTS.routingInfo,
|
||||
metaSetter
|
||||
}),
|
||||
rlnInstance,
|
||||
@ -266,7 +299,10 @@ describe("RLN codec with version 0 and meta setter", () => {
|
||||
});
|
||||
const rlnDecoder = createRLNDecoder({
|
||||
rlnInstance,
|
||||
decoder: createDecoder(TEST_CONSTANTS.contentTopic)
|
||||
decoder: createDecoder(
|
||||
TEST_CONSTANTS.contentTopic,
|
||||
TEST_CONSTANTS.routingInfo
|
||||
)
|
||||
});
|
||||
|
||||
const bytes = await rlnEncoder.toWire({ payload });
|
||||
@ -294,14 +330,18 @@ describe("RLN codec with version 0 and meta setter", () => {
|
||||
const metaSetter = createTestMetaSetter();
|
||||
|
||||
const rlnEncoder = new RLNEncoder(
|
||||
createEncoder({ contentTopic: TEST_CONSTANTS.contentTopic, metaSetter }),
|
||||
createEncoder({
|
||||
contentTopic: TEST_CONSTANTS.contentTopic,
|
||||
routingInfo: TEST_CONSTANTS.routingInfo,
|
||||
metaSetter
|
||||
}),
|
||||
rlnInstance,
|
||||
index,
|
||||
credential
|
||||
);
|
||||
const rlnDecoder = new RLNDecoder(
|
||||
rlnInstance,
|
||||
createDecoder(TEST_CONSTANTS.contentTopic)
|
||||
createDecoder(TEST_CONSTANTS.contentTopic, TEST_CONSTANTS.routingInfo)
|
||||
);
|
||||
|
||||
const proto = await rlnEncoder.toProtoObj({ payload });
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import type { IProtoMessage } from "@waku/interfaces";
|
||||
import { createRoutingInfo } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
|
||||
import { createRLN } from "./create.js";
|
||||
@ -15,7 +16,14 @@ export const TEST_CONSTANTS = {
|
||||
contentTopic: "/test/1/waku-message/utf8",
|
||||
emptyPubsubTopic: "",
|
||||
defaultIndex: 0,
|
||||
defaultPayload: new Uint8Array([1, 2, 3, 4, 5])
|
||||
defaultPayload: new Uint8Array([1, 2, 3, 4, 5]),
|
||||
routingInfo: createRoutingInfo(
|
||||
{
|
||||
clusterId: 0,
|
||||
numShardsInCluster: 2
|
||||
},
|
||||
{ contentTopic: "/test/1/waku-message/utf8" }
|
||||
)
|
||||
} as const;
|
||||
|
||||
export const EMPTY_PROTO_MESSAGE = {
|
||||
|
||||
@ -4,7 +4,8 @@ import type {
|
||||
IEncoder,
|
||||
IMessage,
|
||||
IProtoMessage,
|
||||
IRateLimitProof
|
||||
IRateLimitProof,
|
||||
IRoutingInfo
|
||||
} from "@waku/interfaces";
|
||||
import { Logger } from "@waku/utils";
|
||||
|
||||
@ -47,19 +48,22 @@ export class RLNEncoder implements IEncoder {
|
||||
|
||||
private async generateProof(message: IMessage): Promise<IRateLimitProof> {
|
||||
const signal = toRLNSignal(this.contentTopic, message);
|
||||
const proof = await this.rlnInstance.zerokit.generateRLNProof(
|
||||
return this.rlnInstance.zerokit.generateRLNProof(
|
||||
signal,
|
||||
this.index,
|
||||
message.timestamp,
|
||||
this.idSecretHash
|
||||
);
|
||||
return proof;
|
||||
}
|
||||
|
||||
public get pubsubTopic(): string {
|
||||
return this.encoder.pubsubTopic;
|
||||
}
|
||||
|
||||
public get routingInfo(): IRoutingInfo {
|
||||
return this.encoder.routingInfo;
|
||||
}
|
||||
|
||||
public get contentTopic(): string {
|
||||
return this.encoder.contentTopic;
|
||||
}
|
||||
|
||||
@ -2,6 +2,7 @@ import { createDecoder, createEncoder } from "@waku/core";
|
||||
import type {
|
||||
ContentTopic,
|
||||
IDecodedMessage,
|
||||
IRoutingInfo,
|
||||
EncoderOptions as WakuEncoderOptions
|
||||
} from "@waku/interfaces";
|
||||
import { Logger } from "@waku/utils";
|
||||
@ -87,11 +88,12 @@ export class RLNInstance extends RLNCredentialsManager {
|
||||
}
|
||||
|
||||
public createDecoder(
|
||||
contentTopic: ContentTopic
|
||||
contentTopic: ContentTopic,
|
||||
routingInfo: IRoutingInfo
|
||||
): RLNDecoder<IDecodedMessage> {
|
||||
return createRLNDecoder({
|
||||
rlnInstance: this,
|
||||
decoder: createDecoder(contentTopic)
|
||||
decoder: createDecoder(contentTopic, routingInfo)
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@ -5,6 +5,7 @@ import type {
|
||||
IProtoMessage,
|
||||
Libp2p
|
||||
} from "@waku/interfaces";
|
||||
import { createRoutingInfo } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
import sinon from "sinon";
|
||||
|
||||
@ -13,8 +14,15 @@ import { PeerManager } from "../peer_manager/index.js";
|
||||
import { Filter } from "./filter.js";
|
||||
import { Subscription } from "./subscription.js";
|
||||
|
||||
const PUBSUB_TOPIC = "/waku/2/rs/1/4";
|
||||
const CONTENT_TOPIC = "/test/1/waku-filter/utf8";
|
||||
const testContentTopic = "/test/1/waku-filter/utf8";
|
||||
const testNetworkconfig = {
|
||||
clusterId: 0,
|
||||
numShardsInCluster: 9
|
||||
};
|
||||
const testRoutingInfo = createRoutingInfo(testNetworkconfig, {
|
||||
contentTopic: testContentTopic
|
||||
});
|
||||
const testPubsubTopic = testRoutingInfo.pubsubTopic;
|
||||
|
||||
describe("Filter SDK", () => {
|
||||
let libp2p: Libp2p;
|
||||
@ -29,7 +37,7 @@ describe("Filter SDK", () => {
|
||||
connectionManager = mockConnectionManager();
|
||||
peerManager = mockPeerManager();
|
||||
filter = mockFilter({ libp2p, connectionManager, peerManager });
|
||||
decoder = createDecoder(CONTENT_TOPIC, PUBSUB_TOPIC);
|
||||
decoder = createDecoder(testContentTopic, testRoutingInfo);
|
||||
callback = sinon.spy();
|
||||
});
|
||||
|
||||
@ -80,10 +88,10 @@ describe("Filter SDK", () => {
|
||||
|
||||
await filter.subscribe(decoder, callback);
|
||||
|
||||
const message = createMockMessage(CONTENT_TOPIC);
|
||||
const message = createMockMessage(testContentTopic);
|
||||
const peerId = "peer1";
|
||||
|
||||
await (filter as any).onIncomingMessage(PUBSUB_TOPIC, message, peerId);
|
||||
await (filter as any).onIncomingMessage(testPubsubTopic, message, peerId);
|
||||
|
||||
expect(subscriptionInvokeStub.calledOnce).to.be.true;
|
||||
expect(subscriptionInvokeStub.firstCall.args[0]).to.equal(message);
|
||||
@ -91,7 +99,11 @@ describe("Filter SDK", () => {
|
||||
});
|
||||
|
||||
it("should successfully stop", async () => {
|
||||
const decoder2 = createDecoder("/another-content-topic", PUBSUB_TOPIC);
|
||||
const contentTopic2 = "/test/1/waku-filter-2/utf8";
|
||||
const decoder2 = createDecoder(
|
||||
contentTopic2,
|
||||
createRoutingInfo(testNetworkconfig, { contentTopic: contentTopic2 })
|
||||
);
|
||||
const stopStub = sinon.stub(Subscription.prototype, "stop");
|
||||
|
||||
sinon.stub(Subscription.prototype, "add").resolves(true);
|
||||
@ -129,7 +141,7 @@ function mockLibp2p(): Libp2p {
|
||||
function mockConnectionManager(): ConnectionManager {
|
||||
return {
|
||||
isTopicConfigured: sinon.stub().callsFake((topic: string) => {
|
||||
return topic === PUBSUB_TOPIC;
|
||||
return topic === testPubsubTopic;
|
||||
})
|
||||
} as unknown as ConnectionManager;
|
||||
}
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import { Peer, PeerId } from "@libp2p/interface";
|
||||
import { createEncoder, Encoder, LightPushCodec } from "@waku/core";
|
||||
import { Libp2p, ProtocolError } from "@waku/interfaces";
|
||||
import { createRoutingInfo } from "@waku/utils";
|
||||
import { utf8ToBytes } from "@waku/utils/bytes";
|
||||
import { expect } from "chai";
|
||||
import sinon, { SinonSpy } from "sinon";
|
||||
@ -9,7 +10,14 @@ import { PeerManager } from "../peer_manager/index.js";
|
||||
|
||||
import { LightPush } from "./light_push.js";
|
||||
|
||||
const CONTENT_TOPIC = "/test/1/waku-light-push/utf8";
|
||||
const testContentTopic = "/test/1/waku-light-push/utf8";
|
||||
const testRoutingInfo = createRoutingInfo(
|
||||
{
|
||||
clusterId: 0,
|
||||
numShardsInCluster: 7
|
||||
},
|
||||
{ contentTopic: testContentTopic }
|
||||
);
|
||||
|
||||
describe("LightPush SDK", () => {
|
||||
let libp2p: Libp2p;
|
||||
@ -18,7 +26,10 @@ describe("LightPush SDK", () => {
|
||||
|
||||
beforeEach(() => {
|
||||
libp2p = mockLibp2p();
|
||||
encoder = createEncoder({ contentTopic: CONTENT_TOPIC });
|
||||
encoder = createEncoder({
|
||||
contentTopic: testContentTopic,
|
||||
routingInfo: testRoutingInfo
|
||||
});
|
||||
lightPush = mockLightPush({ libp2p });
|
||||
});
|
||||
|
||||
|
||||
@ -124,7 +124,7 @@ export class LightPush implements ILightPush {
|
||||
this.retryManager.push(
|
||||
sendCallback.bind(this),
|
||||
options.maxAttempts || DEFAULT_MAX_ATTEMPTS,
|
||||
encoder.pubsubTopic
|
||||
encoder.routingInfo
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@ -4,6 +4,7 @@ import {
|
||||
ProtocolError,
|
||||
Protocols
|
||||
} from "@waku/interfaces";
|
||||
import { createRoutingInfo } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
import sinon from "sinon";
|
||||
|
||||
@ -11,6 +12,11 @@ import { PeerManager } from "../peer_manager/index.js";
|
||||
|
||||
import { RetryManager, ScheduledTask } from "./retry_manager.js";
|
||||
|
||||
const TestRoutingInfo = createRoutingInfo(
|
||||
{ clusterId: 0 },
|
||||
{ pubsubTopic: "/waku/2/rs/0/0" }
|
||||
);
|
||||
|
||||
describe("RetryManager", () => {
|
||||
let retryManager: RetryManager;
|
||||
let peerManager: PeerManager;
|
||||
@ -59,7 +65,7 @@ describe("RetryManager", () => {
|
||||
})
|
||||
);
|
||||
|
||||
retryManager.push(successCallback, 3, "test-topic");
|
||||
retryManager.push(successCallback, 3, TestRoutingInfo);
|
||||
retryManager.start();
|
||||
|
||||
await clock.tickAsync(200);
|
||||
@ -74,7 +80,7 @@ describe("RetryManager", () => {
|
||||
(peerManager as any).getPeers = () => [];
|
||||
const callback = sinon.spy();
|
||||
|
||||
retryManager.push(callback, 2, "test-topic");
|
||||
retryManager.push(callback, 2, TestRoutingInfo);
|
||||
retryManager.start();
|
||||
|
||||
const queue = (retryManager as any)["queue"] as ScheduledTask[];
|
||||
@ -92,7 +98,7 @@ describe("RetryManager", () => {
|
||||
(peerManager as any).getPeers = () => [];
|
||||
const callback = sinon.spy();
|
||||
|
||||
retryManager.push(callback, 1, "test-topic");
|
||||
retryManager.push(callback, 1, TestRoutingInfo);
|
||||
retryManager.start();
|
||||
const queue = (retryManager as any)["queue"] as ScheduledTask[];
|
||||
expect(queue.length).to.equal(1);
|
||||
@ -117,7 +123,7 @@ describe("RetryManager", () => {
|
||||
const task = {
|
||||
callback: failingCallback,
|
||||
maxAttempts: 2,
|
||||
pubsubTopic: "test-topic"
|
||||
routingInfo: TestRoutingInfo
|
||||
};
|
||||
await (retryManager as any)["taskExecutor"](task);
|
||||
|
||||
@ -133,17 +139,17 @@ describe("RetryManager", () => {
|
||||
throw new Error(ProtocolError.NO_PEER_AVAILABLE);
|
||||
});
|
||||
|
||||
await (retryManager as any)["taskExecutor"]({
|
||||
await (retryManager as RetryManager)["taskExecutor"]({
|
||||
callback: errorCallback,
|
||||
maxAttempts: 1,
|
||||
pubsubTopic: "test-topic"
|
||||
routingInfo: TestRoutingInfo
|
||||
});
|
||||
|
||||
expect((peerManager.renewPeer as sinon.SinonSpy).calledOnce).to.be.true;
|
||||
expect(
|
||||
(peerManager.renewPeer as sinon.SinonSpy).calledWith(mockPeerId, {
|
||||
protocol: Protocols.LightPush,
|
||||
pubsubTopic: "test-topic"
|
||||
pubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
})
|
||||
).to.be.true;
|
||||
});
|
||||
@ -157,7 +163,7 @@ describe("RetryManager", () => {
|
||||
const task = {
|
||||
callback: slowCallback,
|
||||
maxAttempts: 1,
|
||||
pubsubTopic: "test-topic"
|
||||
routingInfo: TestRoutingInfo
|
||||
};
|
||||
const executionPromise = (retryManager as any)["taskExecutor"](task);
|
||||
|
||||
@ -175,7 +181,7 @@ describe("RetryManager", () => {
|
||||
const task = {
|
||||
callback: failingCallback,
|
||||
maxAttempts: 0,
|
||||
pubsubTopic: "test-topic"
|
||||
routingInfo: TestRoutingInfo
|
||||
};
|
||||
await (retryManager as any)["taskExecutor"](task);
|
||||
|
||||
@ -190,7 +196,7 @@ describe("RetryManager", () => {
|
||||
if (called === 1) retryManager.stop();
|
||||
return Promise.resolve({ success: mockPeerId, failure: null });
|
||||
});
|
||||
retryManager.push(successCallback, 2, "test-topic");
|
||||
retryManager.push(successCallback, 2, TestRoutingInfo);
|
||||
retryManager.start();
|
||||
await clock.tickAsync(500);
|
||||
expect(called).to.equal(1);
|
||||
@ -206,7 +212,7 @@ describe("RetryManager", () => {
|
||||
failure: { error: ProtocolError.GENERIC_FAIL }
|
||||
});
|
||||
});
|
||||
retryManager.push(failCallback, 2, "test-topic");
|
||||
retryManager.push(failCallback, 2, TestRoutingInfo);
|
||||
retryManager.start();
|
||||
await clock.tickAsync(1000);
|
||||
retryManager.stop();
|
||||
|
||||
@ -1,5 +1,9 @@
|
||||
import type { PeerId } from "@libp2p/interface";
|
||||
import { type CoreProtocolResult, Protocols } from "@waku/interfaces";
|
||||
import {
|
||||
type CoreProtocolResult,
|
||||
type IRoutingInfo,
|
||||
Protocols
|
||||
} from "@waku/interfaces";
|
||||
import { Logger } from "@waku/utils";
|
||||
|
||||
import type { PeerManager } from "../peer_manager/index.js";
|
||||
@ -15,7 +19,7 @@ type AttemptCallback = (peerId: PeerId) => Promise<CoreProtocolResult>;
|
||||
|
||||
export type ScheduledTask = {
|
||||
maxAttempts: number;
|
||||
pubsubTopic: string;
|
||||
routingInfo: IRoutingInfo;
|
||||
callback: AttemptCallback;
|
||||
};
|
||||
|
||||
@ -54,12 +58,12 @@ export class RetryManager {
|
||||
public push(
|
||||
callback: AttemptCallback,
|
||||
maxAttempts: number,
|
||||
pubsubTopic: string
|
||||
routingInfo: IRoutingInfo
|
||||
): void {
|
||||
this.queue.push({
|
||||
maxAttempts,
|
||||
callback,
|
||||
pubsubTopic
|
||||
routingInfo
|
||||
});
|
||||
}
|
||||
|
||||
@ -96,7 +100,7 @@ export class RetryManager {
|
||||
const peerId = (
|
||||
await this.peerManager.getPeers({
|
||||
protocol: Protocols.LightPush,
|
||||
pubsubTopic: task.pubsubTopic
|
||||
pubsubTopic: task.routingInfo.pubsubTopic
|
||||
})
|
||||
)[0];
|
||||
|
||||
@ -142,7 +146,7 @@ export class RetryManager {
|
||||
if (shouldPeerBeChanged(error.message)) {
|
||||
await this.peerManager.renewPeer(peerId, {
|
||||
protocol: Protocols.LightPush,
|
||||
pubsubTopic: task.pubsubTopic
|
||||
pubsubTopic: task.routingInfo.pubsubTopic
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@ -1,9 +1,11 @@
|
||||
import { PeerId } from "@libp2p/interface";
|
||||
import {
|
||||
ClusterId,
|
||||
CONNECTION_LOCKED_TAG,
|
||||
IConnectionManager,
|
||||
Libp2p,
|
||||
Protocols
|
||||
Protocols,
|
||||
ShardId
|
||||
} from "@waku/interfaces";
|
||||
import { expect } from "chai";
|
||||
import sinon from "sinon";
|
||||
@ -81,6 +83,11 @@ describe("PeerManager", () => {
|
||||
pubsubTopics: [TEST_PUBSUB_TOPIC],
|
||||
getConnectedPeers: async () => peers,
|
||||
getPeers: async () => peers,
|
||||
isPeerOnShard: async (
|
||||
_id: PeerId,
|
||||
_clusterId: ClusterId,
|
||||
_shardId: ShardId
|
||||
) => true,
|
||||
isPeerOnTopic: async (_id: PeerId, _topic: string) => true
|
||||
} as unknown as IConnectionManager;
|
||||
peerManager = new PeerManager({
|
||||
|
||||
@ -4,14 +4,10 @@ import {
|
||||
PeerId,
|
||||
TypedEventEmitter
|
||||
} from "@libp2p/interface";
|
||||
import {
|
||||
ConnectionManager,
|
||||
FilterCodecs,
|
||||
LightPushCodec,
|
||||
StoreCodec
|
||||
} from "@waku/core";
|
||||
import { FilterCodecs, LightPushCodec, StoreCodec } from "@waku/core";
|
||||
import {
|
||||
CONNECTION_LOCKED_TAG,
|
||||
type IConnectionManager,
|
||||
Libp2p,
|
||||
Libp2pEventHandler,
|
||||
Protocols
|
||||
@ -29,7 +25,7 @@ type PeerManagerConfig = {
|
||||
type PeerManagerParams = {
|
||||
libp2p: Libp2p;
|
||||
config?: PeerManagerConfig;
|
||||
connectionManager: ConnectionManager;
|
||||
connectionManager: IConnectionManager;
|
||||
};
|
||||
|
||||
type GetPeersParams = {
|
||||
@ -67,7 +63,7 @@ export class PeerManager {
|
||||
private readonly numPeersToUse: number;
|
||||
|
||||
private readonly libp2p: Libp2p;
|
||||
private readonly connectionManager: ConnectionManager;
|
||||
private readonly connectionManager: IConnectionManager;
|
||||
|
||||
private readonly lockedPeers = new Set<string>();
|
||||
private readonly unlockedPeers = new Map<string, number>();
|
||||
|
||||
@ -1,119 +0,0 @@
|
||||
import { DEFAULT_NUM_SHARDS, DefaultNetworkConfig } from "@waku/interfaces";
|
||||
import { contentTopicToShardIndex } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
|
||||
import { decoderParamsToShardInfo, isShardCompatible } from "./utils.js";
|
||||
|
||||
const TestContentTopic = "/test/1/waku-sdk/utf8";
|
||||
|
||||
describe("IWaku utils", () => {
|
||||
describe("decoderParamsToShardInfo", () => {
|
||||
it("should use provided shard info when available", () => {
|
||||
const params = {
|
||||
contentTopic: TestContentTopic,
|
||||
shardInfo: {
|
||||
clusterId: 10,
|
||||
shard: 5
|
||||
}
|
||||
};
|
||||
|
||||
const result = decoderParamsToShardInfo(params, DefaultNetworkConfig);
|
||||
|
||||
expect(result.clusterId).to.equal(10);
|
||||
expect(result.shard).to.equal(5);
|
||||
});
|
||||
|
||||
it("should use network config clusterId when shard info clusterId is not provided", () => {
|
||||
const params = {
|
||||
contentTopic: TestContentTopic,
|
||||
shardInfo: {
|
||||
clusterId: 1,
|
||||
shard: 5
|
||||
}
|
||||
};
|
||||
|
||||
const result = decoderParamsToShardInfo(params, DefaultNetworkConfig);
|
||||
|
||||
expect(result.clusterId).to.equal(1);
|
||||
expect(result.shard).to.equal(5);
|
||||
});
|
||||
|
||||
it("should use shardsUnderCluster when provided", () => {
|
||||
const contentTopic = TestContentTopic;
|
||||
const params = {
|
||||
contentTopic,
|
||||
shardInfo: {
|
||||
clusterId: 10,
|
||||
shardsUnderCluster: 64
|
||||
}
|
||||
};
|
||||
|
||||
const result = decoderParamsToShardInfo(params, DefaultNetworkConfig);
|
||||
const expectedShardIndex = contentTopicToShardIndex(contentTopic, 64);
|
||||
|
||||
expect(result.clusterId).to.equal(10);
|
||||
expect(result.shard).to.equal(expectedShardIndex);
|
||||
});
|
||||
|
||||
it("should calculate shard index from content topic when shard is not provided", () => {
|
||||
const contentTopic = TestContentTopic;
|
||||
const params = {
|
||||
contentTopic
|
||||
};
|
||||
|
||||
const result = decoderParamsToShardInfo(params, DefaultNetworkConfig);
|
||||
const expectedShardIndex = contentTopicToShardIndex(
|
||||
contentTopic,
|
||||
DEFAULT_NUM_SHARDS
|
||||
);
|
||||
|
||||
expect(result.clusterId).to.equal(1);
|
||||
expect(result.shard).to.equal(expectedShardIndex);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isShardCompatible", () => {
|
||||
it("should return false when clusterId doesn't match", () => {
|
||||
const shardInfo = {
|
||||
clusterId: 10,
|
||||
shard: 5
|
||||
};
|
||||
|
||||
const result = isShardCompatible(shardInfo, DefaultNetworkConfig);
|
||||
|
||||
expect(result).to.be.false;
|
||||
});
|
||||
|
||||
it("should return false when shard is not included in network shards", () => {
|
||||
const shardInfo = {
|
||||
clusterId: 1,
|
||||
shard: 5
|
||||
};
|
||||
|
||||
const networkConfig = {
|
||||
clusterId: 1,
|
||||
shards: [1, 2, 3, 4]
|
||||
};
|
||||
|
||||
const result = isShardCompatible(shardInfo, networkConfig);
|
||||
|
||||
expect(result).to.be.false;
|
||||
});
|
||||
|
||||
it("should return true when clusterId matches and shard is included in network shards", () => {
|
||||
const shardInfo = {
|
||||
clusterId: 1,
|
||||
shard: 3
|
||||
};
|
||||
|
||||
const networkConfig = {
|
||||
clusterId: 1,
|
||||
shards: [1, 2, 3, 4]
|
||||
};
|
||||
|
||||
const result = isShardCompatible(shardInfo, networkConfig);
|
||||
|
||||
expect(result).to.be.true;
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -1,47 +0,0 @@
|
||||
import type {
|
||||
CreateDecoderParams,
|
||||
NetworkConfig,
|
||||
SingleShardInfo
|
||||
} from "@waku/interfaces";
|
||||
import { DEFAULT_NUM_SHARDS } from "@waku/interfaces";
|
||||
import { contentTopicToShardIndex } from "@waku/utils";
|
||||
|
||||
export const decoderParamsToShardInfo = (
|
||||
params: CreateDecoderParams,
|
||||
networkConfig: NetworkConfig
|
||||
): SingleShardInfo => {
|
||||
const clusterId = (params.shardInfo?.clusterId ||
|
||||
networkConfig.clusterId) as number;
|
||||
const shardsUnderCluster =
|
||||
params.shardInfo && "shardsUnderCluster" in params.shardInfo
|
||||
? params.shardInfo.shardsUnderCluster
|
||||
: DEFAULT_NUM_SHARDS;
|
||||
|
||||
const shardIndex =
|
||||
params.shardInfo && "shard" in params.shardInfo
|
||||
? params.shardInfo.shard
|
||||
: contentTopicToShardIndex(params.contentTopic, shardsUnderCluster);
|
||||
|
||||
return {
|
||||
clusterId,
|
||||
shard: shardIndex
|
||||
};
|
||||
};
|
||||
|
||||
export const isShardCompatible = (
|
||||
shardInfo: SingleShardInfo,
|
||||
networkConfig: NetworkConfig
|
||||
): boolean => {
|
||||
if (networkConfig.clusterId !== shardInfo.clusterId) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (
|
||||
"shards" in networkConfig &&
|
||||
!networkConfig.shards.includes(shardInfo.shard!)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
@ -16,6 +16,7 @@ import type {
|
||||
IFilter,
|
||||
ILightPush,
|
||||
IRelay,
|
||||
IRoutingInfo,
|
||||
IStore,
|
||||
IWaku,
|
||||
IWakuEventEmitter,
|
||||
@ -27,7 +28,7 @@ import {
|
||||
HealthStatus,
|
||||
Protocols
|
||||
} from "@waku/interfaces";
|
||||
import { Logger } from "@waku/utils";
|
||||
import { createRoutingInfo, Logger } from "@waku/utils";
|
||||
|
||||
import { Filter } from "../filter/index.js";
|
||||
import { HealthIndicator } from "../health_indicator/index.js";
|
||||
@ -35,7 +36,6 @@ import { LightPush } from "../light_push/index.js";
|
||||
import { PeerManager } from "../peer_manager/index.js";
|
||||
import { Store } from "../store/index.js";
|
||||
|
||||
import { decoderParamsToShardInfo, isShardCompatible } from "./utils.js";
|
||||
import { waitForRemotePeer } from "./wait_for_remote_peer.js";
|
||||
|
||||
const log = new Logger("waku");
|
||||
@ -260,40 +260,30 @@ export class WakuNode implements IWaku {
|
||||
}
|
||||
|
||||
public createDecoder(params: CreateDecoderParams): IDecoder<IDecodedMessage> {
|
||||
const singleShardInfo = decoderParamsToShardInfo(
|
||||
params,
|
||||
this.networkConfig
|
||||
const routingInfo = this.createRoutingInfo(
|
||||
params.contentTopic,
|
||||
params.shardId
|
||||
);
|
||||
|
||||
log.info(
|
||||
`Creating Decoder with input:${JSON.stringify(params.shardInfo)}, determined:${JSON.stringify(singleShardInfo)}, expected:${JSON.stringify(this.networkConfig)}.`
|
||||
);
|
||||
|
||||
if (!isShardCompatible(singleShardInfo, this.networkConfig)) {
|
||||
throw Error(`Cannot create decoder: incompatible shard configuration.`);
|
||||
}
|
||||
|
||||
return createDecoder(params.contentTopic, singleShardInfo);
|
||||
return createDecoder(params.contentTopic, routingInfo);
|
||||
}
|
||||
|
||||
public createEncoder(params: CreateEncoderParams): IEncoder {
|
||||
const singleShardInfo = decoderParamsToShardInfo(
|
||||
params,
|
||||
this.networkConfig
|
||||
const routingInfo = this.createRoutingInfo(
|
||||
params.contentTopic,
|
||||
params.shardId
|
||||
);
|
||||
|
||||
log.info(
|
||||
`Creating Encoder with input:${JSON.stringify(params.shardInfo)}, determined:${JSON.stringify(singleShardInfo)}, expected:${JSON.stringify(this.networkConfig)}.`
|
||||
);
|
||||
|
||||
if (!isShardCompatible(singleShardInfo, this.networkConfig)) {
|
||||
throw Error(`Cannot create encoder: incompatible shard configuration.`);
|
||||
}
|
||||
|
||||
return createEncoder({
|
||||
contentTopic: params.contentTopic,
|
||||
ephemeral: params.ephemeral,
|
||||
pubsubTopicShardInfo: singleShardInfo
|
||||
routingInfo: routingInfo
|
||||
});
|
||||
}
|
||||
|
||||
private createRoutingInfo(
|
||||
contentTopic?: string,
|
||||
shardId?: number
|
||||
): IRoutingInfo {
|
||||
return createRoutingInfo(this.networkConfig, { contentTopic, shardId });
|
||||
}
|
||||
}
|
||||
|
||||
@ -5,7 +5,8 @@
|
||||
* @module
|
||||
*/
|
||||
|
||||
import { PubsubTopic, ShardInfo, SingleShardInfo } from "@waku/interfaces";
|
||||
import { AutoSharding, ShardInfo } from "@waku/interfaces";
|
||||
import { createRoutingInfo } from "@waku/utils";
|
||||
|
||||
export const NOISE_KEY_1 = new Uint8Array(
|
||||
((): number[] => {
|
||||
@ -46,11 +47,27 @@ export const TEST_STRING = [
|
||||
{ description: "Arabic", value: "مرحبا" },
|
||||
{ description: "Russian", value: "Привет" },
|
||||
{ description: "SQL Injection", value: "'; DROP TABLE users; --" },
|
||||
{ description: "Script", value: '<script>alert("hacked");</script>' },
|
||||
{ description: "XML", value: "<element>Some content</element>" },
|
||||
{ description: "Basic HTML tag", value: "<h1>Heading</h1>" },
|
||||
{
|
||||
description: "Script",
|
||||
value: '<script>alert("hacked");</script>',
|
||||
invalidContentTopic: true
|
||||
},
|
||||
{
|
||||
description: "XML",
|
||||
value: "<element>Some content</element>",
|
||||
invalidContentTopic: true
|
||||
},
|
||||
{
|
||||
description: "Basic HTML tag",
|
||||
value: "<h1>Heading</h1>",
|
||||
invalidContentTopic: true
|
||||
},
|
||||
{ description: "JSON", value: '{"user":"admin","password":"123456"}' },
|
||||
{ description: "shell command", value: "`rm -rf /`" },
|
||||
{
|
||||
description: "shell command",
|
||||
value: "`rm -rf /`",
|
||||
invalidContentTopic: true
|
||||
},
|
||||
{ description: "escaped characters", value: "\\n\\t\\0" },
|
||||
{ description: "unicode special characters", value: "\u202Ereverse" },
|
||||
{ description: "emoji", value: "🤫 🤥 😶 😶🌫️ 😐 😑 😬 🫨 🫠 🙄 😯 😦 😧 😮" }
|
||||
@ -68,12 +85,18 @@ export const MOCHA_HOOK_MAX_TIMEOUT = 50_000;
|
||||
export const SEPOLIA_RPC_URL =
|
||||
process.env.SEPOLIA_RPC_URL || "https://sepolia.gateway.tenderly.co";
|
||||
|
||||
export const DefaultTestPubsubTopic: PubsubTopic = "/waku/2/rs/0/0";
|
||||
export const DefaultTestClusterId = 0;
|
||||
export const DefaultTestNumShardsInCluster = 10;
|
||||
export const DefaultTestNetworkConfig: AutoSharding = {
|
||||
clusterId: DefaultTestClusterId,
|
||||
numShardsInCluster: DefaultTestNumShardsInCluster
|
||||
};
|
||||
export const DefaultTestShardInfo: ShardInfo = {
|
||||
clusterId: 0,
|
||||
clusterId: DefaultTestClusterId,
|
||||
shards: [0]
|
||||
};
|
||||
export const DefaultTestSingleShardInfo: SingleShardInfo = {
|
||||
clusterId: 0,
|
||||
shard: 0
|
||||
};
|
||||
export const DefaultTestContentTopic = "/test/1/content-topic/proto";
|
||||
export const DefaultTestRoutingInfo = createRoutingInfo(
|
||||
DefaultTestNetworkConfig,
|
||||
{ contentTopic: DefaultTestContentTopic }
|
||||
);
|
||||
|
||||
@ -1,13 +1,7 @@
|
||||
import {
|
||||
AutoSharding,
|
||||
IDecodedMessage,
|
||||
NetworkConfig,
|
||||
StaticSharding
|
||||
} from "@waku/interfaces";
|
||||
import { contentTopicToShardIndex, Logger } from "@waku/utils";
|
||||
import { ContentTopic, IDecodedMessage } from "@waku/interfaces";
|
||||
import { isAutoShardingRoutingInfo, Logger, RoutingInfo } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
|
||||
import { DefaultTestPubsubTopic } from "../constants.js";
|
||||
import { Args, MessageRpcQuery, MessageRpcResponse } from "../types.js";
|
||||
import { delay, makeLogFileName } from "../utils/index.js";
|
||||
|
||||
@ -29,7 +23,7 @@ export class ServiceNodesFleet {
|
||||
mochaContext: Mocha.Context,
|
||||
nodesToCreate: number = 3,
|
||||
strictChecking: boolean = false,
|
||||
networkConfig: NetworkConfig,
|
||||
routingInfo: RoutingInfo,
|
||||
_args?: Args,
|
||||
withoutFilter = false
|
||||
): Promise<ServiceNodesFleet> {
|
||||
@ -40,7 +34,7 @@ export class ServiceNodesFleet {
|
||||
makeLogFileName(mochaContext) + Math.random().toString(36).substring(7)
|
||||
);
|
||||
|
||||
const args = getArgs(networkConfig, _args);
|
||||
const args = applyDefaultArgs(routingInfo, _args);
|
||||
|
||||
if (nodes[0]) {
|
||||
const addr = await nodes[0].getExternalMultiaddr();
|
||||
@ -93,15 +87,19 @@ export class ServiceNodesFleet {
|
||||
|
||||
public async sendRelayMessage(
|
||||
message: MessageRpcQuery,
|
||||
pubsubTopic: string = DefaultTestPubsubTopic
|
||||
routingInfo: RoutingInfo
|
||||
): Promise<boolean> {
|
||||
const relayMessagePromises: Promise<boolean>[] = this.nodes.map((node) =>
|
||||
node.sendMessage(message, pubsubTopic)
|
||||
node.sendMessage(message, routingInfo)
|
||||
);
|
||||
const relayMessages = await Promise.all(relayMessagePromises);
|
||||
return relayMessages.every((message) => message);
|
||||
}
|
||||
|
||||
/**
|
||||
* This is a dodgy things to do as it assumes the nwaku node did not flush
|
||||
* any messages from its cache.
|
||||
*/
|
||||
public async confirmMessageLength(numMessages: number): Promise<void> {
|
||||
if (this.strictChecking) {
|
||||
await Promise.all(
|
||||
@ -203,13 +201,12 @@ class MultipleNodesMessageCollector {
|
||||
public async waitForMessages(
|
||||
numMessages: number,
|
||||
options?: {
|
||||
pubsubTopic?: string;
|
||||
timeoutDuration?: number;
|
||||
exact?: boolean;
|
||||
contentTopic?: ContentTopic;
|
||||
}
|
||||
): Promise<boolean> {
|
||||
const startTime = Date.now();
|
||||
const pubsubTopic = options?.pubsubTopic || DefaultTestPubsubTopic;
|
||||
const timeoutDuration = options?.timeoutDuration || 400;
|
||||
const exact = options?.exact || false;
|
||||
|
||||
@ -218,7 +215,7 @@ class MultipleNodesMessageCollector {
|
||||
if (this.strictChecking) {
|
||||
const results = await Promise.all(
|
||||
this.relayNodes.map(async (node) => {
|
||||
const msgs = await node.messages(pubsubTopic);
|
||||
const msgs = await node.messages(options?.contentTopic);
|
||||
return msgs.length >= numMessages;
|
||||
})
|
||||
);
|
||||
@ -226,7 +223,7 @@ class MultipleNodesMessageCollector {
|
||||
} else {
|
||||
const results = await Promise.all(
|
||||
this.relayNodes.map(async (node) => {
|
||||
const msgs = await node.messages(pubsubTopic);
|
||||
const msgs = await node.messages(options?.contentTopic);
|
||||
return msgs.length >= numMessages;
|
||||
})
|
||||
);
|
||||
@ -257,23 +254,25 @@ class MultipleNodesMessageCollector {
|
||||
}
|
||||
}
|
||||
|
||||
function getArgs(networkConfig: NetworkConfig, args?: Args): Args {
|
||||
const defaultArgs = {
|
||||
function applyDefaultArgs(routingInfo: RoutingInfo, args?: Args): Args {
|
||||
const defaultArgs: Args = {
|
||||
lightpush: true,
|
||||
filter: true,
|
||||
discv5Discovery: true,
|
||||
peerExchange: true,
|
||||
relay: true,
|
||||
clusterId: networkConfig.clusterId
|
||||
} as Args;
|
||||
relay: true
|
||||
};
|
||||
|
||||
if ((networkConfig as StaticSharding).shards) {
|
||||
defaultArgs.shard = (networkConfig as StaticSharding).shards;
|
||||
} else if ((networkConfig as AutoSharding).contentTopics) {
|
||||
defaultArgs.contentTopic = (networkConfig as AutoSharding).contentTopics;
|
||||
defaultArgs.shard = (networkConfig as AutoSharding).contentTopics.map(
|
||||
(topic) => contentTopicToShardIndex(topic)
|
||||
);
|
||||
defaultArgs.clusterId = routingInfo.clusterId;
|
||||
|
||||
if (isAutoShardingRoutingInfo(routingInfo)) {
|
||||
defaultArgs.numShardsInNetwork =
|
||||
routingInfo.networkConfig.numShardsInCluster;
|
||||
|
||||
defaultArgs.contentTopic = [routingInfo.contentTopic];
|
||||
} else {
|
||||
defaultArgs.numShardsInNetwork = 0;
|
||||
defaultArgs.shard = [routingInfo.shardId];
|
||||
}
|
||||
|
||||
return { ...defaultArgs, ...args };
|
||||
|
||||
@ -4,7 +4,6 @@ import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes";
|
||||
import { AssertionError, expect } from "chai";
|
||||
import { equals } from "uint8arrays/equals";
|
||||
|
||||
import { DefaultTestPubsubTopic } from "../constants.js";
|
||||
import { MessageRpcResponse } from "../types.js";
|
||||
import { base64ToUtf8 } from "../utils/base64_utf8.js";
|
||||
import { delay } from "../utils/delay.js";
|
||||
@ -67,20 +66,18 @@ export class MessageCollector {
|
||||
public async waitForMessages(
|
||||
numMessages: number,
|
||||
options?: {
|
||||
pubsubTopic?: string;
|
||||
timeoutDuration?: number;
|
||||
exact?: boolean;
|
||||
}
|
||||
): Promise<boolean> {
|
||||
const startTime = Date.now();
|
||||
const pubsubTopic = this.getPubsubTopicToUse(options?.pubsubTopic);
|
||||
const timeoutDuration = options?.timeoutDuration || 400;
|
||||
const exact = options?.exact || false;
|
||||
|
||||
while (this.count < numMessages) {
|
||||
if (this.nwaku) {
|
||||
try {
|
||||
this.list = await this.nwaku.messages(pubsubTopic);
|
||||
this.list = await this.nwaku.messages();
|
||||
} catch (error) {
|
||||
log.error(`Can't retrieve messages because of ${error}`);
|
||||
await delay(10);
|
||||
@ -237,15 +234,13 @@ export class MessageCollector {
|
||||
`Message text mismatch. Expected: ${options.expectedMessageText}. Got: ${receivedMessageText}`
|
||||
);
|
||||
} else {
|
||||
const pubsubTopicToUse = this.getPubsubTopicToUse(
|
||||
options.expectedPubsubTopic
|
||||
);
|
||||
// js-waku message specific assertions
|
||||
expect(message.pubsubTopic).to.eq(
|
||||
pubsubTopicToUse,
|
||||
`Message pub/sub topic mismatch. Expected: ${pubsubTopicToUse}. Got: ${message.pubsubTopic}`
|
||||
);
|
||||
|
||||
if (options.expectedPubsubTopic) {
|
||||
// js-waku message specific assertions
|
||||
expect(message.pubsubTopic).to.eq(
|
||||
options.expectedPubsubTopic,
|
||||
`Message pub/sub topic mismatch. Expected: ${options.expectedPubsubTopic}. Got: ${message.pubsubTopic}`
|
||||
);
|
||||
}
|
||||
expect(bytesToUtf8(message.payload)).to.eq(
|
||||
options.expectedMessageText,
|
||||
`Message text mismatch. Expected: ${
|
||||
@ -267,8 +262,4 @@ export class MessageCollector {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private getPubsubTopicToUse(pubsubTopic: string | undefined): string {
|
||||
return pubsubTopic || DefaultTestPubsubTopic;
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,14 +1,23 @@
|
||||
import { CreateNodeOptions, NetworkConfig, Protocols } from "@waku/interfaces";
|
||||
import { createRelayNode } from "@waku/relay";
|
||||
import {
|
||||
ContentTopic,
|
||||
type CreateNodeOptions,
|
||||
type NetworkConfig,
|
||||
Protocols,
|
||||
type ShardId
|
||||
} from "@waku/interfaces";
|
||||
import { createRelayNode, RelayCreateOptions } from "@waku/relay";
|
||||
import { createLightNode, WakuNode } from "@waku/sdk";
|
||||
import {
|
||||
derivePubsubTopicsFromNetworkConfig,
|
||||
createRoutingInfo,
|
||||
isAutoSharding,
|
||||
isStaticSharding,
|
||||
Logger,
|
||||
pubsubTopicsToShardInfo
|
||||
RoutingInfo
|
||||
} from "@waku/utils";
|
||||
import { Context } from "mocha";
|
||||
|
||||
import { NOISE_KEY_1 } from "../constants.js";
|
||||
import { Args } from "../types.js";
|
||||
import { makeLogFileName } from "../utils/index.js";
|
||||
|
||||
import { ServiceNode } from "./service_node.js";
|
||||
@ -24,6 +33,8 @@ export const DEFAULT_DISCOVERIES_ENABLED = {
|
||||
type RunNodesOptions = {
|
||||
context: Context;
|
||||
networkConfig: NetworkConfig;
|
||||
relayShards?: ShardId[]; // Only for static sharding
|
||||
contentTopics?: ContentTopic[]; // Only for auto sharding
|
||||
protocols: Protocols[];
|
||||
createNode: typeof createLightNode | typeof createRelayNode;
|
||||
};
|
||||
@ -34,32 +45,62 @@ export async function runNodes<T>(
|
||||
const { context, networkConfig, createNode, protocols } = options;
|
||||
|
||||
const nwaku = new ServiceNode(makeLogFileName(context));
|
||||
const pubsubTopics = derivePubsubTopicsFromNetworkConfig(networkConfig);
|
||||
const shardInfo = pubsubTopicsToShardInfo(pubsubTopics);
|
||||
|
||||
await nwaku.start(
|
||||
{
|
||||
filter: true,
|
||||
lightpush: true,
|
||||
relay: true,
|
||||
store: true,
|
||||
shard: shardInfo.shards,
|
||||
clusterId: shardInfo.clusterId
|
||||
},
|
||||
{ retries: 3 }
|
||||
);
|
||||
const waku_options: CreateNodeOptions = {
|
||||
const nwakuArgs: Args = {
|
||||
filter: true,
|
||||
lightpush: true,
|
||||
relay: true,
|
||||
store: true,
|
||||
clusterId: networkConfig.clusterId
|
||||
};
|
||||
|
||||
const jswakuArgs: CreateNodeOptions = {
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } },
|
||||
networkConfig: shardInfo,
|
||||
networkConfig,
|
||||
lightPush: { numPeersToUse: 2 },
|
||||
discovery: DEFAULT_DISCOVERIES_ENABLED
|
||||
};
|
||||
|
||||
log.info("Starting js waku node with :", JSON.stringify(waku_options));
|
||||
const routingInfos: RoutingInfo[] = [];
|
||||
if (isAutoSharding(networkConfig)) {
|
||||
nwakuArgs.numShardsInNetwork = networkConfig.numShardsInCluster;
|
||||
nwakuArgs.contentTopic = options.contentTopics ?? [];
|
||||
|
||||
nwakuArgs.contentTopic.map((ct) =>
|
||||
routingInfos.push(createRoutingInfo(networkConfig, { contentTopic: ct }))
|
||||
);
|
||||
|
||||
if (options.relayShards && options.relayShards.length > 0)
|
||||
throw "`relayShards` cannot be set for auto-sharding";
|
||||
} else if (isStaticSharding(networkConfig) && options.relayShards) {
|
||||
const shards = options.relayShards;
|
||||
nwakuArgs.shard = shards;
|
||||
nwakuArgs.numShardsInNetwork = 0;
|
||||
|
||||
shards.map((shardId) =>
|
||||
routingInfos.push(createRoutingInfo(networkConfig, { shardId }))
|
||||
);
|
||||
|
||||
if (options.contentTopics && options.contentTopics.length > 0)
|
||||
throw "`contentTopics` cannot be set for static sharding";
|
||||
} else {
|
||||
throw "Invalid Network Config";
|
||||
}
|
||||
|
||||
const jswakuRelayCreateOptions: RelayCreateOptions = {
|
||||
routingInfos
|
||||
};
|
||||
|
||||
await nwaku.start(nwakuArgs, { retries: 3 });
|
||||
|
||||
log.info("Starting js waku node with :", JSON.stringify(jswakuArgs));
|
||||
let waku: WakuNode | undefined;
|
||||
try {
|
||||
waku = (await createNode(waku_options)) as unknown as WakuNode;
|
||||
waku = (await createNode({
|
||||
...jswakuArgs,
|
||||
...jswakuRelayCreateOptions
|
||||
})) as unknown as WakuNode;
|
||||
await waku.start();
|
||||
} catch (error) {
|
||||
log.error("jswaku node failed to start:", error);
|
||||
@ -68,7 +109,9 @@ export async function runNodes<T>(
|
||||
if (waku) {
|
||||
await waku.dial(await nwaku.getMultiaddrWithId());
|
||||
await waku.waitForPeers(protocols);
|
||||
await nwaku.ensureSubscriptions(pubsubTopics);
|
||||
|
||||
await nwaku.ensureSubscriptions(routingInfos.map((r) => r.pubsubTopic));
|
||||
|
||||
return [nwaku, waku as T];
|
||||
} else {
|
||||
throw new Error("Failed to initialize waku");
|
||||
|
||||
@ -1,12 +1,19 @@
|
||||
import type { PeerId } from "@libp2p/interface";
|
||||
import { peerIdFromString } from "@libp2p/peer-id";
|
||||
import { Multiaddr, multiaddr } from "@multiformats/multiaddr";
|
||||
import { isDefined, shardInfoToPubsubTopics } from "@waku/utils";
|
||||
import { ContentTopic, PubsubTopic } from "@waku/interfaces";
|
||||
import {
|
||||
formatPubsubTopic,
|
||||
isAutoSharding,
|
||||
isDefined,
|
||||
isStaticSharding,
|
||||
RoutingInfo
|
||||
} from "@waku/utils";
|
||||
import { Logger } from "@waku/utils";
|
||||
import pRetry from "p-retry";
|
||||
import portfinder from "portfinder";
|
||||
|
||||
import { DefaultTestPubsubTopic } from "../constants.js";
|
||||
import { DefaultTestNetworkConfig } from "../constants.js";
|
||||
import {
|
||||
Args,
|
||||
LogLevel,
|
||||
@ -245,9 +252,7 @@ export class ServiceNode {
|
||||
);
|
||||
}
|
||||
|
||||
public async ensureSubscriptions(
|
||||
pubsubTopics: string[] = [DefaultTestPubsubTopic]
|
||||
): Promise<boolean> {
|
||||
public async ensureSubscriptions(pubsubTopics: string[]): Promise<boolean> {
|
||||
return this.restCall<boolean>(
|
||||
"/relay/v1/subscriptions",
|
||||
"POST",
|
||||
@ -256,13 +261,51 @@ export class ServiceNode {
|
||||
);
|
||||
}
|
||||
|
||||
public async messages(_pubsubTopic?: string): Promise<MessageRpcResponse[]> {
|
||||
const pubsubTopic =
|
||||
_pubsubTopic ??
|
||||
shardInfoToPubsubTopics({
|
||||
clusterId: this.args?.clusterId,
|
||||
shards: this.args?.shard
|
||||
})[0];
|
||||
public async messages(
|
||||
contentTopic?: ContentTopic
|
||||
): Promise<MessageRpcResponse[]> {
|
||||
if (contentTopic) {
|
||||
return this.contentTopicMessages(contentTopic);
|
||||
}
|
||||
|
||||
if (this.args?.contentTopic) {
|
||||
if (this.args?.contentTopic.length > 1)
|
||||
throw "More that one content topic passed, not supported";
|
||||
const contentTopic = this.args?.contentTopic[0];
|
||||
|
||||
return this.contentTopicMessages(contentTopic);
|
||||
}
|
||||
|
||||
if (this.args?.shard) {
|
||||
if (this.args?.shard.length > 1)
|
||||
throw "More that one shard passed, not supported";
|
||||
const pubsubTopic = formatPubsubTopic(
|
||||
this.args.clusterId ?? DefaultTestNetworkConfig.clusterId,
|
||||
this.args?.shard[0]
|
||||
);
|
||||
return this.pubsubTopicMessages(pubsubTopic);
|
||||
}
|
||||
|
||||
throw "Content topic, shard or pubsubTopic must be set";
|
||||
}
|
||||
|
||||
private async contentTopicMessages(
|
||||
contentTopic: ContentTopic
|
||||
): Promise<MessageRpcResponse[]> {
|
||||
return this.restCall<MessageRpcResponse[]>(
|
||||
`/relay/v1/auto/messages/${encodeURIComponent(contentTopic)}`,
|
||||
"GET",
|
||||
null,
|
||||
async (response) => {
|
||||
const data = await response.json();
|
||||
return data?.length ? data : [];
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
private async pubsubTopicMessages(
|
||||
pubsubTopic: PubsubTopic
|
||||
): Promise<MessageRpcResponse[]> {
|
||||
return this.restCall<MessageRpcResponse[]>(
|
||||
`/relay/v1/messages/${encodeURIComponent(pubsubTopic)}`,
|
||||
"GET",
|
||||
@ -289,7 +332,20 @@ export class ServiceNode {
|
||||
|
||||
public async sendMessage(
|
||||
message: MessageRpcQuery,
|
||||
_pubsubTopic?: string
|
||||
routingInfo: RoutingInfo
|
||||
): Promise<boolean> {
|
||||
if (isAutoSharding(routingInfo.networkConfig)) {
|
||||
return this.sendMessageAutoSharding(message);
|
||||
}
|
||||
if (isStaticSharding(routingInfo.networkConfig)) {
|
||||
return this.sendMessageStaticSharding(message, routingInfo.pubsubTopic);
|
||||
}
|
||||
throw "Invalid network config";
|
||||
}
|
||||
|
||||
private async sendMessageStaticSharding(
|
||||
message: MessageRpcQuery,
|
||||
pubsubTopic: PubsubTopic
|
||||
): Promise<boolean> {
|
||||
this.checkProcess();
|
||||
|
||||
@ -297,21 +353,15 @@ export class ServiceNode {
|
||||
message.timestamp = BigInt(new Date().valueOf()) * OneMillion;
|
||||
}
|
||||
|
||||
const pubsubTopic =
|
||||
_pubsubTopic ??
|
||||
shardInfoToPubsubTopics({
|
||||
clusterId: this.args?.clusterId,
|
||||
shards: this.args?.shard
|
||||
})[0];
|
||||
return this.restCall<boolean>(
|
||||
`/relay/v1/messages/${encodeURIComponent(pubsubTopic || DefaultTestPubsubTopic)}`,
|
||||
`/relay/v1/messages/${encodeURIComponent(pubsubTopic)}`,
|
||||
"POST",
|
||||
message,
|
||||
async (response) => response.status === 200
|
||||
);
|
||||
}
|
||||
|
||||
public async sendMessageAutosharding(
|
||||
private async sendMessageAutoSharding(
|
||||
message: MessageRpcQuery
|
||||
): Promise<boolean> {
|
||||
this.checkProcess();
|
||||
@ -429,9 +479,7 @@ export function defaultArgs(): Args {
|
||||
rest: true,
|
||||
restAdmin: true,
|
||||
websocketSupport: true,
|
||||
logLevel: LogLevel.Trace,
|
||||
clusterId: 0,
|
||||
shard: [0]
|
||||
logLevel: LogLevel.Trace
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@ -1,3 +1,5 @@
|
||||
import type { ClusterId, ShardId } from "@waku/interfaces";
|
||||
|
||||
export interface Args {
|
||||
staticnode?: string;
|
||||
nat?: "none";
|
||||
@ -21,8 +23,9 @@ export interface Args {
|
||||
websocketPort?: number;
|
||||
discv5BootstrapNode?: string;
|
||||
discv5UdpPort?: number;
|
||||
clusterId?: number;
|
||||
shard?: Array<number>;
|
||||
clusterId?: ClusterId;
|
||||
shard?: Array<ShardId>;
|
||||
numShardsInNetwork?: number;
|
||||
rlnRelayEthClientAddress?: string;
|
||||
}
|
||||
|
||||
|
||||
@ -1,13 +1,11 @@
|
||||
import { createDecoder, createEncoder, Decoder, Encoder } from "@waku/core";
|
||||
|
||||
type TestDataOptions = {
|
||||
pubsubTopic: string;
|
||||
};
|
||||
import { AutoSharding } from "@waku/interfaces";
|
||||
import { createRoutingInfo } from "@waku/utils";
|
||||
|
||||
// Utility to generate test data for multiple topics tests.
|
||||
export function generateTestData(
|
||||
topicCount: number,
|
||||
options?: TestDataOptions
|
||||
networkConfig: AutoSharding
|
||||
): {
|
||||
contentTopics: string[];
|
||||
encoders: Encoder[];
|
||||
@ -15,14 +13,22 @@ export function generateTestData(
|
||||
} {
|
||||
const contentTopics = Array.from(
|
||||
{ length: topicCount },
|
||||
(_, i) => `/test/${i + 1}/waku-multi/default`
|
||||
// Remember that auto-sharding uses both app name and app version fields
|
||||
(_, i) => `/test/0/waku-multi-${i + 1}/default`
|
||||
);
|
||||
const encoders = contentTopics.map((topic) =>
|
||||
createEncoder({ contentTopic: topic, pubsubTopic: options?.pubsubTopic })
|
||||
createEncoder({
|
||||
contentTopic: topic,
|
||||
routingInfo: createRoutingInfo(networkConfig, { contentTopic: topic })
|
||||
})
|
||||
);
|
||||
const decoders = contentTopics.map((topic) =>
|
||||
createDecoder(topic, options?.pubsubTopic)
|
||||
createDecoder(
|
||||
topic,
|
||||
createRoutingInfo(networkConfig, { contentTopic: topic })
|
||||
)
|
||||
);
|
||||
|
||||
return {
|
||||
contentTopics,
|
||||
encoders,
|
||||
|
||||
@ -1,13 +1,16 @@
|
||||
import {
|
||||
CreateNodeOptions,
|
||||
DefaultNetworkConfig,
|
||||
IWaku,
|
||||
LightNode,
|
||||
NetworkConfig,
|
||||
Protocols
|
||||
} from "@waku/interfaces";
|
||||
import { createLightNode } from "@waku/sdk";
|
||||
import { derivePubsubTopicsFromNetworkConfig } from "@waku/utils";
|
||||
import {
|
||||
contentTopicToPubsubTopic,
|
||||
formatPubsubTopic,
|
||||
isAutoShardingRoutingInfo,
|
||||
RoutingInfo
|
||||
} from "@waku/utils";
|
||||
import { Context } from "mocha";
|
||||
import pRetry from "p-retry";
|
||||
|
||||
@ -18,9 +21,20 @@ import { Args } from "../types.js";
|
||||
|
||||
import { waitForConnections } from "./waitForConnections.js";
|
||||
|
||||
/**
|
||||
* Runs both js-waku and nwaku nodes.
|
||||
*
|
||||
* @param context
|
||||
* @param routingInfo
|
||||
* @param customArgs passed to nwaku service nodes
|
||||
* @param strictChecking
|
||||
* @param numServiceNodes
|
||||
* @param withoutFilter
|
||||
* @param jsWakuParams
|
||||
*/
|
||||
export async function runMultipleNodes(
|
||||
context: Context,
|
||||
networkConfig: NetworkConfig = DefaultNetworkConfig,
|
||||
routingInfo: RoutingInfo,
|
||||
customArgs?: Args,
|
||||
strictChecking: boolean = false,
|
||||
numServiceNodes = 2,
|
||||
@ -32,7 +46,7 @@ export async function runMultipleNodes(
|
||||
context,
|
||||
numServiceNodes,
|
||||
strictChecking,
|
||||
networkConfig,
|
||||
routingInfo,
|
||||
customArgs,
|
||||
withoutFilter
|
||||
);
|
||||
@ -42,7 +56,7 @@ export async function runMultipleNodes(
|
||||
libp2p: {
|
||||
addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] }
|
||||
},
|
||||
networkConfig,
|
||||
networkConfig: routingInfo.networkConfig,
|
||||
lightPush: { numPeersToUse: numServiceNodes },
|
||||
discovery: DEFAULT_DISCOVERIES_ENABLED,
|
||||
...jsWakuParams
|
||||
@ -54,12 +68,37 @@ export async function runMultipleNodes(
|
||||
throw new Error("Failed to initialize waku");
|
||||
}
|
||||
|
||||
const pubsubTopics = [];
|
||||
|
||||
pubsubTopics.push(routingInfo.pubsubTopic);
|
||||
|
||||
if (customArgs?.shard) {
|
||||
const shards = customArgs?.shard ?? [];
|
||||
for (const s of shards) {
|
||||
pubsubTopics.push(formatPubsubTopic(routingInfo.clusterId, s));
|
||||
}
|
||||
}
|
||||
|
||||
if (customArgs?.contentTopic && isAutoShardingRoutingInfo(routingInfo)) {
|
||||
const contentTopics = customArgs?.contentTopic ?? [];
|
||||
for (const ct of contentTopics) {
|
||||
pubsubTopics.push(
|
||||
contentTopicToPubsubTopic(
|
||||
ct,
|
||||
routingInfo.clusterId,
|
||||
routingInfo.networkConfig.numShardsInCluster
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for (const node of serviceNodes.nodes) {
|
||||
await waku.dial(await node.getMultiaddrWithId());
|
||||
await waku.waitForPeers([Protocols.Filter, Protocols.LightPush]);
|
||||
await node.ensureSubscriptions(
|
||||
derivePubsubTopicsFromNetworkConfig(networkConfig)
|
||||
);
|
||||
|
||||
if (pubsubTopics.length > 0) {
|
||||
await node.ensureSubscriptions(pubsubTopics);
|
||||
}
|
||||
|
||||
const wakuConnections = waku.libp2p.getConnections();
|
||||
|
||||
|
||||
@ -9,7 +9,7 @@ import {
|
||||
teardownNodesWithRedundancy
|
||||
} from "../../src/index.js";
|
||||
|
||||
import { TestShardInfo } from "./utils.js";
|
||||
import { TestRoutingInfo } from "./utils.js";
|
||||
|
||||
describe("Connection Limiter", function () {
|
||||
let waku: LightNode;
|
||||
@ -18,7 +18,7 @@ describe("Connection Limiter", function () {
|
||||
beforeEachCustom(this, async () => {
|
||||
[serviceNodes, waku] = await runMultipleNodes(
|
||||
this.ctx,
|
||||
TestShardInfo,
|
||||
TestRoutingInfo,
|
||||
{ lightpush: true, filter: true, peerExchange: true },
|
||||
false,
|
||||
2,
|
||||
@ -68,7 +68,7 @@ describe("Connection Limiter", function () {
|
||||
|
||||
[serviceNodes, waku] = await runMultipleNodes(
|
||||
this.ctx,
|
||||
TestShardInfo,
|
||||
TestRoutingInfo,
|
||||
{ lightpush: true, filter: true, peerExchange: true },
|
||||
false,
|
||||
2,
|
||||
@ -126,7 +126,7 @@ describe("Connection Limiter", function () {
|
||||
|
||||
[serviceNodes, waku] = await runMultipleNodes(
|
||||
this.ctx,
|
||||
TestShardInfo,
|
||||
TestRoutingInfo,
|
||||
{ lightpush: true, filter: true, peerExchange: true },
|
||||
false,
|
||||
2,
|
||||
|
||||
@ -10,7 +10,7 @@ import {
|
||||
teardownNodesWithRedundancy
|
||||
} from "../../src/index.js";
|
||||
|
||||
import { TestShardInfo } from "./utils.js";
|
||||
import { TestRoutingInfo } from "./utils.js";
|
||||
|
||||
describe("Dialing", function () {
|
||||
const ctx: Context = this.ctx;
|
||||
@ -20,7 +20,7 @@ describe("Dialing", function () {
|
||||
beforeEachCustom(this, async () => {
|
||||
[serviceNodes, waku] = await runMultipleNodes(
|
||||
this.ctx,
|
||||
TestShardInfo,
|
||||
TestRoutingInfo,
|
||||
{ lightpush: true, filter: true, peerExchange: true },
|
||||
false,
|
||||
2,
|
||||
@ -33,7 +33,7 @@ describe("Dialing", function () {
|
||||
ctx,
|
||||
2,
|
||||
false,
|
||||
TestShardInfo,
|
||||
TestRoutingInfo,
|
||||
{
|
||||
lightpush: true,
|
||||
filter: true,
|
||||
|
||||
@ -11,7 +11,7 @@ import {
|
||||
teardownNodesWithRedundancy
|
||||
} from "../../src/index.js";
|
||||
|
||||
import { TestShardInfo } from "./utils.js";
|
||||
import { TestRoutingInfo } from "./utils.js";
|
||||
|
||||
// TODO: investigate and re-enable in https://github.com/waku-org/js-waku/issues/2453
|
||||
describe.skip("DiscoveryDialer", function () {
|
||||
@ -22,7 +22,7 @@ describe.skip("DiscoveryDialer", function () {
|
||||
beforeEachCustom(this, async () => {
|
||||
[serviceNodes, waku] = await runMultipleNodes(
|
||||
this.ctx,
|
||||
TestShardInfo,
|
||||
TestRoutingInfo,
|
||||
{ lightpush: true, filter: true, peerExchange: true },
|
||||
false,
|
||||
2,
|
||||
@ -35,7 +35,7 @@ describe.skip("DiscoveryDialer", function () {
|
||||
ctx,
|
||||
2,
|
||||
false,
|
||||
TestShardInfo,
|
||||
TestRoutingInfo,
|
||||
{
|
||||
lightpush: true,
|
||||
filter: true,
|
||||
|
||||
@ -11,7 +11,8 @@ import { expect } from "chai";
|
||||
import {
|
||||
afterEachCustom,
|
||||
beforeEachCustom,
|
||||
DefaultTestShardInfo,
|
||||
DefaultTestNetworkConfig,
|
||||
DefaultTestRoutingInfo,
|
||||
delay,
|
||||
NOISE_KEY_1
|
||||
} from "../../src/index.js";
|
||||
@ -36,7 +37,7 @@ describe("Connection state", function () {
|
||||
let originalNavigator: any;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
waku = await createLightNode({ networkConfig: DefaultTestShardInfo });
|
||||
waku = await createLightNode({ networkConfig: DefaultTestNetworkConfig });
|
||||
nwaku1 = new ServiceNode(makeLogFileName(this.ctx) + "1");
|
||||
nwaku2 = new ServiceNode(makeLogFileName(this.ctx) + "2");
|
||||
await nwaku1.start({ filter: true });
|
||||
@ -104,11 +105,13 @@ describe("Connection state", function () {
|
||||
it("`waku:online` between 2 js-waku relay nodes", async function () {
|
||||
const waku1 = await createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
networkConfig: DefaultTestShardInfo
|
||||
networkConfig: DefaultTestNetworkConfig,
|
||||
routingInfos: [DefaultTestRoutingInfo]
|
||||
});
|
||||
const waku2 = await createRelayNode({
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } },
|
||||
networkConfig: DefaultTestShardInfo
|
||||
networkConfig: DefaultTestNetworkConfig,
|
||||
routingInfos: [DefaultTestRoutingInfo]
|
||||
});
|
||||
|
||||
let eventCount1 = 0;
|
||||
@ -171,10 +174,12 @@ describe("Connection state", function () {
|
||||
|
||||
it("isConnected between 2 js-waku relay nodes", async function () {
|
||||
const waku1 = await createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_1
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
routingInfos: [DefaultTestRoutingInfo]
|
||||
});
|
||||
const waku2 = await createRelayNode({
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } },
|
||||
routingInfos: [DefaultTestRoutingInfo]
|
||||
});
|
||||
await waku1.libp2p.peerStore.merge(waku2.libp2p.peerId, {
|
||||
multiaddrs: waku2.libp2p.getMultiaddrs()
|
||||
|
||||
@ -1,6 +1,11 @@
|
||||
import { createRoutingInfo } from "@waku/utils";
|
||||
|
||||
export const TestContentTopic = "/test/1/waku-light-push/utf8";
|
||||
export const ClusterId = 3;
|
||||
export const TestShardInfo = {
|
||||
contentTopics: [TestContentTopic],
|
||||
clusterId: ClusterId
|
||||
export const TestClusterId = 2;
|
||||
export const TestNetworkConfig = {
|
||||
clusterId: TestClusterId,
|
||||
numShardsInCluster: 8 // Cannot be under 8 for nwaku 0.36.0 and below
|
||||
};
|
||||
export const TestRoutingInfo = createRoutingInfo(TestNetworkConfig, {
|
||||
contentTopic: TestContentTopic
|
||||
});
|
||||
|
||||
@ -6,12 +6,16 @@ import { expect } from "chai";
|
||||
|
||||
import {
|
||||
afterEachCustom,
|
||||
DefaultTestClusterId,
|
||||
DefaultTestContentTopic,
|
||||
DefaultTestNetworkConfig,
|
||||
DefaultTestNumShardsInCluster,
|
||||
DefaultTestRoutingInfo,
|
||||
makeLogFileName,
|
||||
NOISE_KEY_1,
|
||||
ServiceNode,
|
||||
tearDownNodes
|
||||
} from "../src/index.js";
|
||||
import { DefaultTestShardInfo } from "../src/index.js";
|
||||
|
||||
describe("ENR Interop: ServiceNode", function () {
|
||||
let waku: RelayNode;
|
||||
@ -29,14 +33,16 @@ describe("ENR Interop: ServiceNode", function () {
|
||||
store: false,
|
||||
filter: false,
|
||||
lightpush: false,
|
||||
clusterId: DefaultTestShardInfo.clusterId,
|
||||
shard: DefaultTestShardInfo.shards
|
||||
clusterId: DefaultTestClusterId,
|
||||
numShardsInNetwork: DefaultTestNumShardsInCluster,
|
||||
contentTopic: [DefaultTestContentTopic]
|
||||
});
|
||||
const multiAddrWithId = await nwaku.getMultiaddrWithId();
|
||||
|
||||
waku = await createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
networkConfig: DefaultTestShardInfo
|
||||
networkConfig: DefaultTestNetworkConfig,
|
||||
routingInfos: [DefaultTestRoutingInfo]
|
||||
});
|
||||
await waku.start();
|
||||
await waku.dial(multiAddrWithId);
|
||||
@ -64,14 +70,16 @@ describe("ENR Interop: ServiceNode", function () {
|
||||
store: true,
|
||||
filter: false,
|
||||
lightpush: false,
|
||||
clusterId: DefaultTestShardInfo.clusterId,
|
||||
shard: DefaultTestShardInfo.shards
|
||||
clusterId: DefaultTestClusterId,
|
||||
numShardsInNetwork: DefaultTestNumShardsInCluster,
|
||||
contentTopic: [DefaultTestContentTopic]
|
||||
});
|
||||
const multiAddrWithId = await nwaku.getMultiaddrWithId();
|
||||
|
||||
waku = await createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
networkConfig: DefaultTestShardInfo
|
||||
networkConfig: DefaultTestNetworkConfig,
|
||||
routingInfos: [DefaultTestRoutingInfo]
|
||||
});
|
||||
await waku.start();
|
||||
await waku.dial(multiAddrWithId);
|
||||
@ -99,14 +107,16 @@ describe("ENR Interop: ServiceNode", function () {
|
||||
store: true,
|
||||
filter: true,
|
||||
lightpush: true,
|
||||
clusterId: DefaultTestShardInfo.clusterId,
|
||||
shard: DefaultTestShardInfo.shards
|
||||
clusterId: DefaultTestClusterId,
|
||||
numShardsInNetwork: DefaultTestNumShardsInCluster,
|
||||
contentTopic: [DefaultTestContentTopic]
|
||||
});
|
||||
const multiAddrWithId = await nwaku.getMultiaddrWithId();
|
||||
|
||||
waku = await createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
networkConfig: DefaultTestShardInfo
|
||||
networkConfig: DefaultTestNetworkConfig,
|
||||
routingInfos: [DefaultTestRoutingInfo]
|
||||
});
|
||||
await waku.start();
|
||||
await waku.dial(multiAddrWithId);
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import { createDecoder, createEncoder } from "@waku/core";
|
||||
import { Protocols } from "@waku/interfaces";
|
||||
import { AutoSharding, Protocols } from "@waku/interfaces";
|
||||
import type { IDecodedMessage, LightNode } from "@waku/interfaces";
|
||||
import {
|
||||
generatePrivateKey,
|
||||
@ -15,11 +15,7 @@ import {
|
||||
createEncoder as createSymEncoder
|
||||
} from "@waku/message-encryption/symmetric";
|
||||
import { createLightNode } from "@waku/sdk";
|
||||
import {
|
||||
contentTopicToPubsubTopic,
|
||||
contentTopicToShardIndex,
|
||||
Logger
|
||||
} from "@waku/utils";
|
||||
import { createRoutingInfo, Logger } from "@waku/utils";
|
||||
import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes";
|
||||
import { expect } from "chai";
|
||||
|
||||
@ -36,15 +32,21 @@ import {
|
||||
|
||||
const log = new Logger("test:ephemeral");
|
||||
|
||||
const ClusterId = 2;
|
||||
const TestClusterId = 2;
|
||||
const TestNetworkConfig: AutoSharding = {
|
||||
clusterId: TestClusterId,
|
||||
numShardsInCluster: 8
|
||||
};
|
||||
const TestContentTopic = "/test/1/ephemeral/utf8";
|
||||
const PubsubTopic = contentTopicToPubsubTopic(TestContentTopic, ClusterId);
|
||||
const TestRoutingInfo = createRoutingInfo(TestNetworkConfig, {
|
||||
contentTopic: TestContentTopic
|
||||
});
|
||||
|
||||
const TestEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
pubsubTopic: PubsubTopic
|
||||
routingInfo: TestRoutingInfo
|
||||
});
|
||||
const TestDecoder = createDecoder(TestContentTopic, PubsubTopic);
|
||||
const TestDecoder = createDecoder(TestContentTopic, TestRoutingInfo);
|
||||
|
||||
const privateKey = generatePrivateKey();
|
||||
const symKey = generateSymmetricKey();
|
||||
@ -57,26 +59,26 @@ const AsymEncoder = createEciesEncoder({
|
||||
contentTopic: AsymContentTopic,
|
||||
publicKey,
|
||||
ephemeral: true,
|
||||
pubsubTopic: PubsubTopic
|
||||
routingInfo: TestRoutingInfo
|
||||
});
|
||||
const SymEncoder = createSymEncoder({
|
||||
contentTopic: SymContentTopic,
|
||||
symKey,
|
||||
ephemeral: true,
|
||||
pubsubTopic: PubsubTopic
|
||||
routingInfo: TestRoutingInfo
|
||||
});
|
||||
const ClearEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
ephemeral: true,
|
||||
pubsubTopic: PubsubTopic
|
||||
routingInfo: TestRoutingInfo
|
||||
});
|
||||
|
||||
const AsymDecoder = createEciesDecoder(
|
||||
AsymContentTopic,
|
||||
privateKey,
|
||||
PubsubTopic
|
||||
TestRoutingInfo,
|
||||
privateKey
|
||||
);
|
||||
const SymDecoder = createSymDecoder(SymContentTopic, symKey, PubsubTopic);
|
||||
const SymDecoder = createSymDecoder(SymContentTopic, TestRoutingInfo, symKey);
|
||||
|
||||
describe("Waku Message Ephemeral field", function () {
|
||||
let waku: LightNode;
|
||||
@ -95,8 +97,8 @@ describe("Waku Message Ephemeral field", function () {
|
||||
store: true,
|
||||
relay: true,
|
||||
contentTopic: contentTopics,
|
||||
clusterId: ClusterId,
|
||||
shard: contentTopics.map((t) => contentTopicToShardIndex(t))
|
||||
clusterId: TestClusterId,
|
||||
numShardsInNetwork: TestNetworkConfig.numShardsInCluster
|
||||
});
|
||||
await nwaku.ensureSubscriptionsAutosharding([
|
||||
TestContentTopic,
|
||||
@ -107,10 +109,7 @@ describe("Waku Message Ephemeral field", function () {
|
||||
waku = await createLightNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } },
|
||||
networkConfig: {
|
||||
contentTopics: [TestContentTopic, AsymContentTopic, SymContentTopic],
|
||||
clusterId: ClusterId
|
||||
}
|
||||
networkConfig: TestNetworkConfig
|
||||
});
|
||||
await waku.start();
|
||||
await waku.dial(await nwaku.getMultiaddrWithId());
|
||||
@ -138,17 +137,11 @@ describe("Waku Message Ephemeral field", function () {
|
||||
const [waku1, waku2, nimWakuMultiaddr] = await Promise.all([
|
||||
createLightNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
networkConfig: {
|
||||
contentTopics: [TestContentTopic, AsymContentTopic, SymContentTopic],
|
||||
clusterId: ClusterId
|
||||
}
|
||||
networkConfig: TestNetworkConfig
|
||||
}).then((waku) => waku.start().then(() => waku)),
|
||||
createLightNode({
|
||||
staticNoiseKey: NOISE_KEY_2,
|
||||
networkConfig: {
|
||||
contentTopics: [TestContentTopic, AsymContentTopic, SymContentTopic],
|
||||
clusterId: ClusterId
|
||||
}
|
||||
networkConfig: TestNetworkConfig
|
||||
}).then((waku) => waku.start().then(() => waku)),
|
||||
nwaku.getMultiaddrWithId()
|
||||
]);
|
||||
@ -200,7 +193,7 @@ describe("Waku Message Ephemeral field", function () {
|
||||
const ephemeralEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
ephemeral: true,
|
||||
pubsubTopic: PubsubTopic
|
||||
routingInfo: TestRoutingInfo
|
||||
});
|
||||
|
||||
const messages: IDecodedMessage[] = [];
|
||||
@ -246,9 +239,9 @@ describe("Waku Message Ephemeral field", function () {
|
||||
const encoder = createSymEncoder({
|
||||
contentTopic: SymContentTopic,
|
||||
symKey,
|
||||
pubsubTopic: PubsubTopic
|
||||
routingInfo: TestRoutingInfo
|
||||
});
|
||||
const decoder = createSymDecoder(SymContentTopic, symKey, PubsubTopic);
|
||||
const decoder = createSymDecoder(SymContentTopic, TestRoutingInfo, symKey);
|
||||
|
||||
const messages: IDecodedMessage[] = [];
|
||||
const callback = (msg: IDecodedMessage): void => {
|
||||
@ -293,12 +286,12 @@ describe("Waku Message Ephemeral field", function () {
|
||||
const encoder = createEciesEncoder({
|
||||
contentTopic: AsymContentTopic,
|
||||
publicKey: publicKey,
|
||||
pubsubTopic: PubsubTopic
|
||||
routingInfo: TestRoutingInfo
|
||||
});
|
||||
const decoder = createEciesDecoder(
|
||||
AsymContentTopic,
|
||||
privateKey,
|
||||
PubsubTopic
|
||||
TestRoutingInfo,
|
||||
privateKey
|
||||
);
|
||||
|
||||
const messages: IDecodedMessage[] = [];
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import { LightNode, Protocols } from "@waku/interfaces";
|
||||
import { utf8ToBytes } from "@waku/sdk";
|
||||
import { createRoutingInfo } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
|
||||
import {
|
||||
@ -18,8 +19,7 @@ import {
|
||||
TestContentTopic,
|
||||
TestDecoder,
|
||||
TestEncoder,
|
||||
TestPubsubTopic,
|
||||
TestShardInfo
|
||||
TestRoutingInfo
|
||||
} from "./utils.js";
|
||||
|
||||
const runTests = (strictCheckNodes: boolean): void => {
|
||||
@ -32,7 +32,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
ctx = this.ctx;
|
||||
[serviceNodes, waku] = await runMultipleNodes(this.ctx, TestShardInfo, {
|
||||
[serviceNodes, waku] = await runMultipleNodes(this.ctx, TestRoutingInfo, {
|
||||
lightpush: true,
|
||||
filter: true
|
||||
});
|
||||
@ -59,7 +59,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: testItem.value,
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -78,7 +78,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
payload: Buffer.from(utf8ToBytes(messageText)).toString("base64"),
|
||||
timestamp: testItem as any
|
||||
},
|
||||
TestPubsubTopic
|
||||
TestRoutingInfo
|
||||
);
|
||||
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
@ -88,7 +88,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
expectedMessageText: messageText,
|
||||
checkTimestamp: false,
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
|
||||
// Check if the timestamp matches
|
||||
@ -117,7 +117,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
payload: Buffer.from(utf8ToBytes(messageText)).toString("base64"),
|
||||
timestamp: "2023-09-06T12:05:38.609Z" as any
|
||||
},
|
||||
TestPubsubTopic
|
||||
TestRoutingInfo
|
||||
);
|
||||
|
||||
// Verify that no message was received
|
||||
@ -133,20 +133,21 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
);
|
||||
await delay(400);
|
||||
|
||||
const wrongContentTopic = "/wrong/1/ContentTopic/proto";
|
||||
await serviceNodes.sendRelayMessage(
|
||||
{
|
||||
contentTopic: TestContentTopic,
|
||||
contentTopic: wrongContentTopic,
|
||||
payload: Buffer.from(utf8ToBytes(messageText)).toString("base64"),
|
||||
timestamp: BigInt(Date.now()) * BigInt(1000000)
|
||||
},
|
||||
"WrongContentTopic"
|
||||
createRoutingInfo(TestRoutingInfo.networkConfig, {
|
||||
contentTopic: "/wrong/1/ContentTopic/proto"
|
||||
})
|
||||
);
|
||||
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(false);
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
false
|
||||
);
|
||||
});
|
||||
|
||||
it("Check message with no pubsub topic is not received", async function () {
|
||||
@ -184,7 +185,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
payload: Buffer.from(utf8ToBytes(messageText)).toString("base64"),
|
||||
timestamp: BigInt(Date.now()) * BigInt(1000000)
|
||||
},
|
||||
TestPubsubTopic
|
||||
TestRoutingInfo
|
||||
);
|
||||
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
@ -205,7 +206,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
timestamp: BigInt(Date.now()) * BigInt(1000000),
|
||||
payload: undefined as any
|
||||
},
|
||||
TestPubsubTopic
|
||||
TestRoutingInfo
|
||||
);
|
||||
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
@ -226,7 +227,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
payload: 12345 as unknown as string,
|
||||
timestamp: BigInt(Date.now()) * BigInt(1000000)
|
||||
},
|
||||
TestPubsubTopic
|
||||
TestRoutingInfo
|
||||
);
|
||||
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
@ -267,12 +268,12 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: "M1",
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(1, {
|
||||
expectedMessageText: "M2",
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
});
|
||||
|
||||
@ -289,7 +290,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: "M1",
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
|
||||
await teardownNodesWithRedundancy(serviceNodes, []);
|
||||
@ -297,7 +298,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
ctx,
|
||||
2,
|
||||
false,
|
||||
TestShardInfo,
|
||||
TestRoutingInfo,
|
||||
{
|
||||
lightpush: true,
|
||||
filter: true,
|
||||
@ -334,7 +335,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(1, {
|
||||
expectedMessageText: "M2",
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -0,0 +1,120 @@
|
||||
import { createDecoder, createEncoder } from "@waku/core";
|
||||
import { LightNode } from "@waku/interfaces";
|
||||
import { Protocols, utf8ToBytes } from "@waku/sdk";
|
||||
import { createRoutingInfo, formatPubsubTopic } from "@waku/utils";
|
||||
|
||||
import {
|
||||
afterEachCustom,
|
||||
beforeEachCustom,
|
||||
makeLogFileName,
|
||||
MessageCollector,
|
||||
runMultipleNodes,
|
||||
ServiceNode,
|
||||
ServiceNodesFleet,
|
||||
tearDownNodes,
|
||||
teardownNodesWithRedundancy
|
||||
} from "../../src/index.js";
|
||||
|
||||
import { TestClusterId, TestContentTopic } from "./utils.js";
|
||||
|
||||
const runTests = (strictCheckNodes: boolean): void => {
|
||||
describe(`Waku Filter: Subscribe: Multiple Service Nodes on Static Shard: Strict Check mode: ${strictCheckNodes}`, function () {
|
||||
this.timeout(100000);
|
||||
let waku: LightNode;
|
||||
let serviceNodes: ServiceNodesFleet;
|
||||
const staticNetworkConfig = { clusterId: 9 };
|
||||
const routingInfoShard1 = createRoutingInfo(staticNetworkConfig, {
|
||||
shardId: 1
|
||||
});
|
||||
const encoderShard1 = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
routingInfo: routingInfoShard1
|
||||
});
|
||||
const decoderShard1 = createDecoder(TestContentTopic, routingInfoShard1);
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[serviceNodes, waku] = await runMultipleNodes(
|
||||
this.ctx,
|
||||
routingInfoShard1,
|
||||
undefined,
|
||||
strictCheckNodes
|
||||
);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
await teardownNodesWithRedundancy(serviceNodes, waku);
|
||||
});
|
||||
|
||||
it("Subscribe and receive messages from 2 nwaku nodes each with different static shards", async function () {
|
||||
await waku.filter.subscribe(
|
||||
decoderShard1,
|
||||
serviceNodes.messageCollector.callback
|
||||
);
|
||||
|
||||
// Set up and start a new nwaku node on different shard
|
||||
const nwaku2 = new ServiceNode(makeLogFileName(this) + "3");
|
||||
|
||||
try {
|
||||
const routingInfoShard2 = createRoutingInfo(staticNetworkConfig, {
|
||||
shardId: 2
|
||||
});
|
||||
const contentTopic2 = "/test/4/waku-filter/default";
|
||||
const decoderShard2 = createDecoder(contentTopic2, routingInfoShard2);
|
||||
const encoderShard2 = createEncoder({
|
||||
contentTopic: contentTopic2,
|
||||
routingInfo: routingInfoShard2
|
||||
});
|
||||
|
||||
const shardId = 2;
|
||||
await nwaku2.start({
|
||||
filter: true,
|
||||
lightpush: true,
|
||||
relay: true,
|
||||
clusterId: staticNetworkConfig.clusterId,
|
||||
shard: [shardId],
|
||||
numShardsInNetwork: 0 // Running static sharding
|
||||
});
|
||||
await waku.dial(await nwaku2.getMultiaddrWithId());
|
||||
await waku.waitForPeers([Protocols.Filter, Protocols.LightPush]);
|
||||
|
||||
await nwaku2.ensureSubscriptions([
|
||||
formatPubsubTopic(TestClusterId, shardId)
|
||||
]);
|
||||
|
||||
const messageCollector2 = new MessageCollector();
|
||||
|
||||
await waku.filter.subscribe(decoderShard2, messageCollector2.callback);
|
||||
|
||||
// Making sure that messages are send and received for both subscriptions
|
||||
// While loop is done because of https://github.com/waku-org/js-waku/issues/1606
|
||||
while (
|
||||
!(await serviceNodes.messageCollector.waitForMessages(1)) ||
|
||||
!(await messageCollector2.waitForMessages(1))
|
||||
) {
|
||||
await waku.lightPush.send(encoderShard1, {
|
||||
payload: utf8ToBytes("M1")
|
||||
});
|
||||
await waku.lightPush.send(encoderShard2, {
|
||||
payload: utf8ToBytes("M2")
|
||||
});
|
||||
}
|
||||
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedContentTopic: encoderShard1.contentTopic,
|
||||
expectedPubsubTopic: routingInfoShard1.pubsubTopic,
|
||||
expectedMessageText: "M1"
|
||||
});
|
||||
|
||||
messageCollector2.verifyReceivedMessage(0, {
|
||||
expectedContentTopic: encoderShard2.contentTopic,
|
||||
expectedPubsubTopic: routingInfoShard2.pubsubTopic,
|
||||
expectedMessageText: "M2"
|
||||
});
|
||||
} catch (e) {
|
||||
await tearDownNodes([nwaku2], []);
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
[true, false].map((strictCheckNodes) => runTests(strictCheckNodes));
|
||||
@ -7,7 +7,8 @@ import {
|
||||
getPublicKey,
|
||||
symmetric
|
||||
} from "@waku/message-encryption";
|
||||
import { Protocols, utf8ToBytes } from "@waku/sdk";
|
||||
import { utf8ToBytes } from "@waku/sdk";
|
||||
import { createRoutingInfo } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
|
||||
import {
|
||||
@ -15,27 +16,23 @@ import {
|
||||
beforeEachCustom,
|
||||
delay,
|
||||
generateTestData,
|
||||
makeLogFileName,
|
||||
MessageCollector,
|
||||
runMultipleNodes,
|
||||
ServiceNode,
|
||||
ServiceNodesFleet,
|
||||
tearDownNodes,
|
||||
teardownNodesWithRedundancy,
|
||||
TEST_STRING,
|
||||
waitForConnections
|
||||
} from "../../src/index.js";
|
||||
|
||||
import {
|
||||
ClusterId,
|
||||
messagePayload,
|
||||
messageText,
|
||||
ShardIndex,
|
||||
TestClusterId,
|
||||
TestContentTopic,
|
||||
TestDecoder,
|
||||
TestEncoder,
|
||||
TestPubsubTopic,
|
||||
TestShardInfo
|
||||
TestNetworkConfig,
|
||||
TestRoutingInfo,
|
||||
TestShardIndex
|
||||
} from "./utils.js";
|
||||
|
||||
const runTests = (strictCheckNodes: boolean): void => {
|
||||
@ -47,7 +44,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
beforeEachCustom(this, async () => {
|
||||
[serviceNodes, waku] = await runMultipleNodes(
|
||||
this.ctx,
|
||||
TestShardInfo,
|
||||
TestRoutingInfo,
|
||||
undefined,
|
||||
strictCheckNodes
|
||||
);
|
||||
@ -84,12 +81,12 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
const encoder = ecies.createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
publicKey,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
routingInfo: TestRoutingInfo
|
||||
});
|
||||
const decoder = ecies.createDecoder(
|
||||
TestContentTopic,
|
||||
privateKey,
|
||||
TestPubsubTopic
|
||||
TestRoutingInfo,
|
||||
privateKey
|
||||
);
|
||||
|
||||
await waku.filter.subscribe(
|
||||
@ -106,7 +103,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedVersion: 1,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
|
||||
await serviceNodes.confirmMessageLength(2);
|
||||
@ -117,12 +114,12 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
const encoder = symmetric.createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
symKey,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
routingInfo: TestRoutingInfo
|
||||
});
|
||||
const decoder = symmetric.createDecoder(
|
||||
TestContentTopic,
|
||||
symKey,
|
||||
TestPubsubTopic
|
||||
TestRoutingInfo,
|
||||
symKey
|
||||
);
|
||||
|
||||
await waku.filter.subscribe(
|
||||
@ -139,7 +136,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedVersion: 1,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
|
||||
await serviceNodes.confirmMessageLength(2);
|
||||
@ -158,7 +155,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
contentTopic: TestContentTopic,
|
||||
payload: utf8ToBytes(messageText)
|
||||
});
|
||||
await serviceNodes.sendRelayMessage(relayMessage, TestPubsubTopic);
|
||||
await serviceNodes.sendRelayMessage(relayMessage, TestRoutingInfo);
|
||||
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
true
|
||||
@ -166,7 +163,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
|
||||
await serviceNodes.confirmMessageLength(1);
|
||||
@ -219,18 +216,20 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
|
||||
// Modify subscription to include a new content topic and send a message.
|
||||
const newMessageText = "Filtering still works!";
|
||||
const newMessagePayload = { payload: utf8ToBytes(newMessageText) };
|
||||
const newContentTopic = "/test/2/waku-filter/default";
|
||||
const newRoutingInfo = createRoutingInfo(TestNetworkConfig, {
|
||||
contentTopic: newContentTopic
|
||||
});
|
||||
const newEncoder = createEncoder({
|
||||
contentTopic: newContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
routingInfo: newRoutingInfo
|
||||
});
|
||||
const newDecoder = createDecoder(newContentTopic, TestPubsubTopic);
|
||||
const newDecoder = createDecoder(newContentTopic, newRoutingInfo);
|
||||
await waku.filter.subscribe(
|
||||
newDecoder,
|
||||
serviceNodes.messageCollector.callback
|
||||
@ -244,26 +243,30 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(1, {
|
||||
expectedContentTopic: newContentTopic,
|
||||
expectedMessageText: newMessageText,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
|
||||
// Send another message on the initial content topic to verify it still works.
|
||||
await waku.lightPush.send(TestEncoder, newMessagePayload);
|
||||
const thirdMessageText = "Filtering still works on first subscription!";
|
||||
const thirdMessagePayload = { payload: utf8ToBytes(thirdMessageText) };
|
||||
await waku.lightPush.send(TestEncoder, thirdMessagePayload);
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(3)).to.eq(
|
||||
true
|
||||
);
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(2, {
|
||||
expectedMessageText: newMessageText,
|
||||
expectedMessageText: thirdMessageText,
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
|
||||
await serviceNodes.confirmMessageLength(3);
|
||||
// This relies on nwaku not emptying the relay cache
|
||||
// We received the 3 messages already, what else are checking?
|
||||
// await serviceNodes.confirmMessageLength(3);
|
||||
});
|
||||
|
||||
it("Subscribe and receives messages on 20 topics", async function () {
|
||||
const topicCount = 20;
|
||||
const td = generateTestData(topicCount, { pubsubTopic: TestPubsubTopic });
|
||||
const td = generateTestData(topicCount, TestNetworkConfig);
|
||||
|
||||
// Subscribe to all 20 topics.
|
||||
for (let i = 0; i < topicCount; i++) {
|
||||
@ -288,16 +291,16 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(index, {
|
||||
expectedContentTopic: topic,
|
||||
expectedMessageText: `Message for Topic ${index + 1}`,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// skiped as it fails in CI but not locally https://github.com/waku-org/js-waku/issues/2438
|
||||
// skipped as it fails in CI but not locally https://github.com/waku-org/js-waku/issues/2438
|
||||
it.skip("Subscribe to 30 topics in separate streams (30 streams for Filter is limit) at once and receives messages", async function () {
|
||||
this.timeout(100_000);
|
||||
const topicCount = 30;
|
||||
const td = generateTestData(topicCount, { pubsubTopic: TestPubsubTopic });
|
||||
const td = generateTestData(topicCount, TestNetworkConfig);
|
||||
|
||||
for (let i = 0; i < topicCount; i++) {
|
||||
await waku.filter.subscribe(
|
||||
@ -321,7 +324,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(index, {
|
||||
expectedContentTopic: topic,
|
||||
expectedMessageText: `Message for Topic ${index + 1}`,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -329,7 +332,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
it("Subscribe to 100 topics (new limit) at once and receives messages", async function () {
|
||||
this.timeout(100_000);
|
||||
const topicCount = 100;
|
||||
const td = generateTestData(topicCount, { pubsubTopic: TestPubsubTopic });
|
||||
const td = generateTestData(topicCount, TestNetworkConfig);
|
||||
|
||||
await waku.filter.subscribe(
|
||||
td.decoders,
|
||||
@ -351,14 +354,14 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(index, {
|
||||
expectedContentTopic: topic,
|
||||
expectedMessageText: `Message for Topic ${index + 1}`,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("Error when try to subscribe to more than 101 topics (new limit)", async function () {
|
||||
const topicCount = 101;
|
||||
const td = generateTestData(topicCount, { pubsubTopic: TestPubsubTopic });
|
||||
const td = generateTestData(topicCount, TestNetworkConfig);
|
||||
|
||||
try {
|
||||
await waku.filter.subscribe(
|
||||
@ -382,14 +385,10 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
it("Overlapping topic subscription", async function () {
|
||||
// Define two sets of test data with overlapping topics.
|
||||
const topicCount1 = 2;
|
||||
const td1 = generateTestData(topicCount1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const td1 = generateTestData(topicCount1, TestNetworkConfig);
|
||||
|
||||
const topicCount2 = 4;
|
||||
const td2 = generateTestData(topicCount2, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const td2 = generateTestData(topicCount2, TestNetworkConfig);
|
||||
|
||||
await waku.filter.subscribe(
|
||||
td1.decoders,
|
||||
@ -445,31 +444,25 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: "M1",
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(1, {
|
||||
expectedMessageText: "M2",
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
});
|
||||
|
||||
TEST_STRING.forEach((testItem) => {
|
||||
it(`Subscribe to topic containing ${testItem.description} and receive message`, async function () {
|
||||
const newContentTopic = testItem.value;
|
||||
const newContentTopic = `/test/0/${testItem.description}/test`;
|
||||
const newEncoder = waku.createEncoder({
|
||||
contentTopic: newContentTopic,
|
||||
shardInfo: {
|
||||
clusterId: ClusterId,
|
||||
shard: ShardIndex
|
||||
}
|
||||
shardId: TestShardIndex
|
||||
});
|
||||
const newDecoder = waku.createDecoder({
|
||||
contentTopic: newContentTopic,
|
||||
shardInfo: {
|
||||
clusterId: ClusterId,
|
||||
shard: ShardIndex
|
||||
}
|
||||
shardId: TestShardIndex
|
||||
});
|
||||
|
||||
await waku.filter.subscribe(
|
||||
@ -484,7 +477,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: newContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -497,11 +490,15 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
await waku.lightPush.send(TestEncoder, { payload: utf8ToBytes("M1") });
|
||||
|
||||
const newContentTopic = "/test/2/waku-filter/default";
|
||||
const newRoutingInfo = createRoutingInfo(TestNetworkConfig, {
|
||||
contentTopic: newContentTopic
|
||||
});
|
||||
|
||||
const newEncoder = createEncoder({
|
||||
contentTopic: newContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
routingInfo: newRoutingInfo
|
||||
});
|
||||
const newDecoder = createDecoder(newContentTopic, TestPubsubTopic);
|
||||
const newDecoder = createDecoder(newContentTopic, newRoutingInfo);
|
||||
await waku.filter.subscribe(
|
||||
newDecoder,
|
||||
serviceNodes.messageCollector.callback
|
||||
@ -516,12 +513,12 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: "M1",
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(1, {
|
||||
expectedContentTopic: newContentTopic,
|
||||
expectedMessageText: "M2",
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: newRoutingInfo.pubsubTopic
|
||||
});
|
||||
});
|
||||
|
||||
@ -570,94 +567,26 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
expectedContentTopic: TestContentTopic
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("Subscribe and receive messages from 2 nwaku nodes each with different pubsubtopics", async function () {
|
||||
await waku.filter.subscribe(
|
||||
TestDecoder,
|
||||
serviceNodes.messageCollector.callback
|
||||
describe("Filter subscribe test with static sharding", function () {
|
||||
this.timeout(100000);
|
||||
let waku: LightNode;
|
||||
let serviceNodes: ServiceNodesFleet;
|
||||
const networkConfig = { clusterId: TestClusterId };
|
||||
const routingInfo = createRoutingInfo(networkConfig, { shardId: 3 });
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[serviceNodes, waku] = await runMultipleNodes(
|
||||
this.ctx,
|
||||
routingInfo,
|
||||
{},
|
||||
strictCheckNodes
|
||||
);
|
||||
|
||||
// Set up and start a new nwaku node with customPubsubTopic1
|
||||
const nwaku2 = new ServiceNode(makeLogFileName(this) + "3");
|
||||
|
||||
try {
|
||||
const customContentTopic = "/test/4/waku-filter/default";
|
||||
const customDecoder = createDecoder(customContentTopic, {
|
||||
clusterId: ClusterId,
|
||||
shard: 4
|
||||
});
|
||||
const customEncoder = createEncoder({
|
||||
contentTopic: customContentTopic,
|
||||
pubsubTopicShardInfo: { clusterId: ClusterId, shard: 4 }
|
||||
});
|
||||
|
||||
await nwaku2.start({
|
||||
filter: true,
|
||||
lightpush: true,
|
||||
relay: true,
|
||||
clusterId: ClusterId,
|
||||
shard: [4]
|
||||
});
|
||||
await waku.dial(await nwaku2.getMultiaddrWithId());
|
||||
await waku.waitForPeers([Protocols.Filter, Protocols.LightPush]);
|
||||
|
||||
await nwaku2.ensureSubscriptions([customDecoder.pubsubTopic]);
|
||||
|
||||
const messageCollector2 = new MessageCollector();
|
||||
|
||||
await waku.filter.subscribe(customDecoder, messageCollector2.callback);
|
||||
|
||||
// Making sure that messages are send and reveiced for both subscriptions
|
||||
// While loop is done because of https://github.com/waku-org/js-waku/issues/1606
|
||||
while (
|
||||
!(await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestDecoder.pubsubTopic
|
||||
})) ||
|
||||
!(await messageCollector2.waitForMessages(1, {
|
||||
pubsubTopic: customDecoder.pubsubTopic
|
||||
}))
|
||||
) {
|
||||
await waku.lightPush.send(TestEncoder, {
|
||||
payload: utf8ToBytes("M1")
|
||||
});
|
||||
await waku.lightPush.send(customEncoder, {
|
||||
payload: utf8ToBytes("M2")
|
||||
});
|
||||
}
|
||||
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedContentTopic: TestDecoder.contentTopic,
|
||||
expectedPubsubTopic: TestDecoder.pubsubTopic,
|
||||
expectedMessageText: "M1"
|
||||
});
|
||||
|
||||
messageCollector2.verifyReceivedMessage(0, {
|
||||
expectedContentTopic: customDecoder.contentTopic,
|
||||
expectedPubsubTopic: customDecoder.pubsubTopic,
|
||||
expectedMessageText: "M2"
|
||||
});
|
||||
} catch (e) {
|
||||
await tearDownNodes([nwaku2], []);
|
||||
}
|
||||
});
|
||||
|
||||
it("Should fail to subscribe with decoder with wrong shard", async function () {
|
||||
const wrongDecoder = createDecoder(TestDecoder.contentTopic, {
|
||||
clusterId: ClusterId,
|
||||
shard: 5
|
||||
});
|
||||
|
||||
// this subscription object is set up with the `customPubsubTopic1` but we're passing it a Decoder with the `customPubsubTopic2`
|
||||
try {
|
||||
await waku.filter.subscribe(
|
||||
wrongDecoder,
|
||||
serviceNodes.messageCollector.callback
|
||||
);
|
||||
} catch (error) {
|
||||
expect((error as Error).message).to.include(
|
||||
`Pubsub topic ${wrongDecoder.pubsubTopic} has not been configured on this instance.`
|
||||
);
|
||||
}
|
||||
afterEachCustom(this, async () => {
|
||||
await teardownNodesWithRedundancy(serviceNodes, waku);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import { createDecoder, createEncoder } from "@waku/core";
|
||||
import { type LightNode } from "@waku/interfaces";
|
||||
import { utf8ToBytes } from "@waku/sdk";
|
||||
import { createRoutingInfo } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
|
||||
import {
|
||||
@ -13,13 +14,13 @@ import {
|
||||
} from "../../src/index.js";
|
||||
|
||||
import {
|
||||
ClusterId,
|
||||
messagePayload,
|
||||
messageText,
|
||||
TestContentTopic,
|
||||
TestDecoder,
|
||||
TestEncoder,
|
||||
TestPubsubTopic
|
||||
TestNetworkConfig,
|
||||
TestRoutingInfo
|
||||
} from "./utils.js";
|
||||
|
||||
const runTests = (strictCheckNodes: boolean): void => {
|
||||
@ -30,14 +31,10 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
let serviceNodes: ServiceNodesFleet;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[serviceNodes, waku] = await runMultipleNodes(
|
||||
this.ctx,
|
||||
{
|
||||
contentTopics: [TestContentTopic],
|
||||
clusterId: ClusterId
|
||||
},
|
||||
{ filter: true, lightpush: true }
|
||||
);
|
||||
[serviceNodes, waku] = await runMultipleNodes(this.ctx, TestRoutingInfo, {
|
||||
filter: true,
|
||||
lightpush: true
|
||||
});
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
@ -77,12 +74,15 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
serviceNodes.messageCollector.callback
|
||||
);
|
||||
|
||||
const newContentTopic = "/test/2/waku-filter";
|
||||
const newContentTopic = "/test/2/waku-filter/proto";
|
||||
const newRoutingInfo = createRoutingInfo(TestNetworkConfig, {
|
||||
contentTopic: newContentTopic
|
||||
});
|
||||
const newEncoder = createEncoder({
|
||||
contentTopic: newContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
routingInfo: newRoutingInfo
|
||||
});
|
||||
const newDecoder = createDecoder(newContentTopic, TestPubsubTopic);
|
||||
const newDecoder = createDecoder(newContentTopic, newRoutingInfo);
|
||||
await waku.filter.subscribe(
|
||||
newDecoder,
|
||||
serviceNodes.messageCollector.callback
|
||||
@ -103,7 +103,6 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
|
||||
// Check that from 4 messages send 3 were received
|
||||
expect(serviceNodes.messageCollector.count).to.eq(3);
|
||||
await serviceNodes.confirmMessageLength(4);
|
||||
});
|
||||
|
||||
it("Unsubscribe 2 topics - node subscribed to 2 topics", async function () {
|
||||
@ -112,12 +111,15 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
TestDecoder,
|
||||
serviceNodes.messageCollector.callback
|
||||
);
|
||||
const newContentTopic = "/test/2/waku-filter";
|
||||
const newContentTopic = "/test/2/waku-filter/proto";
|
||||
const newRoutingInfo = createRoutingInfo(TestNetworkConfig, {
|
||||
contentTopic: newContentTopic
|
||||
});
|
||||
const newEncoder = createEncoder({
|
||||
contentTopic: newContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
routingInfo: newRoutingInfo
|
||||
});
|
||||
const newDecoder = createDecoder(newContentTopic, TestPubsubTopic);
|
||||
const newDecoder = createDecoder(newContentTopic, newRoutingInfo);
|
||||
await waku.filter.subscribe(
|
||||
newDecoder,
|
||||
serviceNodes.messageCollector.callback
|
||||
@ -140,7 +142,6 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
|
||||
// Check that from 4 messages send 2 were received
|
||||
expect(serviceNodes.messageCollector.count).to.eq(2);
|
||||
await serviceNodes.confirmMessageLength(4);
|
||||
});
|
||||
|
||||
it("Unsubscribe topics the node is not subscribed to", async function () {
|
||||
@ -159,7 +160,12 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
|
||||
// Unsubscribe from topics that the node is not not subscribed to and send again
|
||||
await waku.filter.unsubscribe(
|
||||
createDecoder("/test/2/waku-filter", TestDecoder.pubsubTopic)
|
||||
createDecoder(
|
||||
"/test/2/waku-filter/proto",
|
||||
createRoutingInfo(TestNetworkConfig, {
|
||||
contentTopic: "/test/2/waku-filter/proto"
|
||||
})
|
||||
)
|
||||
);
|
||||
await waku.lightPush.send(TestEncoder, { payload: utf8ToBytes("M2") });
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(2)).to.eq(
|
||||
@ -174,7 +180,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
it("Unsubscribe from 100 topics (new limit) at once and receives messages", async function () {
|
||||
this.timeout(100_000);
|
||||
const topicCount = 100;
|
||||
const td = generateTestData(topicCount, { pubsubTopic: TestPubsubTopic });
|
||||
const td = generateTestData(topicCount, TestNetworkConfig);
|
||||
|
||||
await waku.filter.subscribe(
|
||||
td.decoders,
|
||||
@ -194,7 +200,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(index, {
|
||||
expectedContentTopic: topic,
|
||||
expectedMessageText: `Message for Topic ${index + 1}`,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@ -1,142 +1,31 @@
|
||||
import { createDecoder, createEncoder } from "@waku/core";
|
||||
import {
|
||||
CreateNodeOptions,
|
||||
DefaultNetworkConfig,
|
||||
IWaku,
|
||||
LightNode,
|
||||
NetworkConfig,
|
||||
Protocols
|
||||
} from "@waku/interfaces";
|
||||
import { createLightNode } from "@waku/sdk";
|
||||
import {
|
||||
contentTopicToPubsubTopic,
|
||||
contentTopicToShardIndex,
|
||||
derivePubsubTopicsFromNetworkConfig,
|
||||
createRoutingInfo,
|
||||
Logger
|
||||
} from "@waku/utils";
|
||||
import { utf8ToBytes } from "@waku/utils/bytes";
|
||||
import { Context } from "mocha";
|
||||
import pRetry from "p-retry";
|
||||
|
||||
import {
|
||||
NOISE_KEY_1,
|
||||
ServiceNodesFleet,
|
||||
waitForConnections
|
||||
} from "../../src/index.js";
|
||||
|
||||
// Constants for test configuration.
|
||||
export const log = new Logger("test:filter");
|
||||
export const TestContentTopic = "/test/1/waku-filter/default";
|
||||
export const ClusterId = 2;
|
||||
export const ShardIndex = contentTopicToShardIndex(TestContentTopic);
|
||||
export const TestShardInfo = {
|
||||
contentTopics: [TestContentTopic],
|
||||
clusterId: ClusterId
|
||||
};
|
||||
export const TestPubsubTopic = contentTopicToPubsubTopic(
|
||||
export const TestClusterId = 2;
|
||||
export const TestNumShardsInCluster = 8;
|
||||
export const TestShardIndex = contentTopicToShardIndex(
|
||||
TestContentTopic,
|
||||
ClusterId
|
||||
TestNumShardsInCluster
|
||||
);
|
||||
export const TestNetworkConfig = {
|
||||
clusterId: TestClusterId,
|
||||
numShardsInCluster: TestNumShardsInCluster
|
||||
};
|
||||
export const TestRoutingInfo = createRoutingInfo(TestNetworkConfig, {
|
||||
contentTopic: TestContentTopic
|
||||
});
|
||||
export const TestEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
routingInfo: TestRoutingInfo
|
||||
});
|
||||
export const TestDecoder = createDecoder(TestContentTopic, TestPubsubTopic);
|
||||
export const TestDecoder = createDecoder(TestContentTopic, TestRoutingInfo);
|
||||
export const messageText = "Filtering works!";
|
||||
export const messagePayload = { payload: utf8ToBytes(messageText) };
|
||||
|
||||
export async function runMultipleNodes(
|
||||
context: Context,
|
||||
networkConfig: NetworkConfig = DefaultNetworkConfig,
|
||||
strictChecking: boolean = false,
|
||||
numServiceNodes = 3,
|
||||
withoutFilter = false
|
||||
): Promise<[ServiceNodesFleet, LightNode]> {
|
||||
const pubsubTopics = derivePubsubTopicsFromNetworkConfig(networkConfig);
|
||||
// create numServiceNodes nodes
|
||||
const serviceNodes = await ServiceNodesFleet.createAndRun(
|
||||
context,
|
||||
numServiceNodes,
|
||||
strictChecking,
|
||||
networkConfig,
|
||||
undefined,
|
||||
withoutFilter
|
||||
);
|
||||
|
||||
const wakuOptions: CreateNodeOptions = {
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
libp2p: {
|
||||
addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] }
|
||||
}
|
||||
};
|
||||
|
||||
log.info("Starting js waku node with :", JSON.stringify(wakuOptions));
|
||||
let waku: LightNode | undefined;
|
||||
try {
|
||||
waku = await createLightNode(wakuOptions);
|
||||
await waku.start();
|
||||
} catch (error) {
|
||||
log.error("jswaku node failed to start:", error);
|
||||
}
|
||||
|
||||
if (!waku) {
|
||||
throw new Error("Failed to initialize waku");
|
||||
}
|
||||
|
||||
for (const node of serviceNodes.nodes) {
|
||||
await waku.dial(await node.getMultiaddrWithId());
|
||||
await waku.waitForPeers([Protocols.Filter, Protocols.LightPush]);
|
||||
await node.ensureSubscriptions(pubsubTopics);
|
||||
|
||||
const wakuConnections = waku.libp2p.getConnections();
|
||||
|
||||
if (wakuConnections.length < 1) {
|
||||
throw new Error(`Expected at least 1 connection for js-waku.`);
|
||||
}
|
||||
|
||||
await node.waitForLog(waku.libp2p.peerId.toString(), 100);
|
||||
}
|
||||
|
||||
await waitForConnections(numServiceNodes, waku);
|
||||
|
||||
return [serviceNodes, waku];
|
||||
}
|
||||
|
||||
export async function teardownNodesWithRedundancy(
|
||||
serviceNodes: ServiceNodesFleet,
|
||||
wakuNodes: IWaku | IWaku[]
|
||||
): Promise<void> {
|
||||
const wNodes = Array.isArray(wakuNodes) ? wakuNodes : [wakuNodes];
|
||||
|
||||
const stopNwakuNodes = serviceNodes.nodes.map(async (node) => {
|
||||
await pRetry(
|
||||
async () => {
|
||||
try {
|
||||
await node.stop();
|
||||
} catch (error) {
|
||||
log.error("Service Node failed to stop:", error);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
{ retries: 3 }
|
||||
);
|
||||
});
|
||||
|
||||
const stopWakuNodes = wNodes.map(async (waku) => {
|
||||
if (waku) {
|
||||
await pRetry(
|
||||
async () => {
|
||||
try {
|
||||
await waku.stop();
|
||||
} catch (error) {
|
||||
log.error("Waku failed to stop:", error);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
{ retries: 3 }
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
await Promise.all([...stopNwakuNodes, ...stopWakuNodes]);
|
||||
}
|
||||
|
||||
@ -14,14 +14,11 @@ import {
|
||||
} from "../../src/index.js";
|
||||
|
||||
import {
|
||||
ClusterId,
|
||||
messagePayload,
|
||||
messageText,
|
||||
ShardIndex,
|
||||
TestContentTopic,
|
||||
TestEncoder,
|
||||
TestPubsubTopic,
|
||||
TestShardInfo
|
||||
TestRoutingInfo
|
||||
} from "./utils.js";
|
||||
|
||||
const runTests = (strictNodeCheck: boolean): void => {
|
||||
@ -35,7 +32,7 @@ const runTests = (strictNodeCheck: boolean): void => {
|
||||
beforeEachCustom(this, async () => {
|
||||
[serviceNodes, waku] = await runMultipleNodes(
|
||||
this.ctx,
|
||||
TestShardInfo,
|
||||
TestRoutingInfo,
|
||||
{ lightpush: true, filter: true },
|
||||
strictNodeCheck,
|
||||
numServiceNodes,
|
||||
@ -54,20 +51,18 @@ const runTests = (strictNodeCheck: boolean): void => {
|
||||
});
|
||||
expect(pushResponse.successes.length).to.eq(numServiceNodes);
|
||||
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
true
|
||||
);
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: testItem.value,
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// TODO: skiped till https://github.com/waku-org/nwaku/issues/3369 resolved
|
||||
// TODO: skipped till https://github.com/waku-org/nwaku/issues/3369 resolved
|
||||
it.skip("Push 30 different messages", async function () {
|
||||
const generateMessageText = (index: number): string => `M${index}`;
|
||||
|
||||
@ -79,17 +74,15 @@ const runTests = (strictNodeCheck: boolean): void => {
|
||||
expect(pushResponse.successes.length).to.eq(numServiceNodes);
|
||||
}
|
||||
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(30, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(30)).to.eq(
|
||||
true
|
||||
);
|
||||
|
||||
for (let i = 0; i < 30; i++) {
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(i, {
|
||||
expectedMessageText: generateMessageText(i),
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
}
|
||||
});
|
||||
@ -105,46 +98,43 @@ const runTests = (strictNodeCheck: boolean): void => {
|
||||
ProtocolError.EMPTY_PAYLOAD
|
||||
);
|
||||
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(false);
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
false
|
||||
);
|
||||
});
|
||||
|
||||
TEST_STRING.forEach((testItem) => {
|
||||
it(`Push message with content topic containing ${testItem.description}`, async function () {
|
||||
const customEncoder = waku.createEncoder({
|
||||
contentTopic: testItem.value,
|
||||
shardInfo: {
|
||||
clusterId: ClusterId,
|
||||
shard: ShardIndex
|
||||
}
|
||||
});
|
||||
const pushResponse = await waku.lightPush.send(
|
||||
customEncoder,
|
||||
messagePayload
|
||||
);
|
||||
expect(pushResponse.successes.length).to.eq(numServiceNodes);
|
||||
if (!testItem.invalidContentTopic) {
|
||||
it(`Push message with content topic containing ${testItem.description}`, async function () {
|
||||
const contentTopic = `/test/1/${testItem.value}/proto`;
|
||||
const customEncoder = waku.createEncoder({
|
||||
contentTopic
|
||||
});
|
||||
const pushResponse = await waku.lightPush.send(
|
||||
customEncoder,
|
||||
messagePayload
|
||||
);
|
||||
expect(pushResponse.successes.length).to.eq(numServiceNodes);
|
||||
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: testItem.value,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
contentTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: contentTopic,
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
it("Push message with meta", async function () {
|
||||
const customTestEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
metaSetter: () => new Uint8Array(10),
|
||||
pubsubTopic: TestPubsubTopic
|
||||
routingInfo: TestRoutingInfo
|
||||
});
|
||||
|
||||
const pushResponse = await waku.lightPush.send(
|
||||
@ -153,22 +143,20 @@ const runTests = (strictNodeCheck: boolean): void => {
|
||||
);
|
||||
expect(pushResponse.successes.length).to.eq(numServiceNodes);
|
||||
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
true
|
||||
);
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
});
|
||||
|
||||
it("Fails to push message with large meta", async function () {
|
||||
const customTestEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
pubsubTopic: TestPubsubTopic,
|
||||
routingInfo: TestRoutingInfo,
|
||||
metaSetter: () => new Uint8Array(105024) // see the note below ***
|
||||
});
|
||||
|
||||
@ -176,7 +164,7 @@ const runTests = (strictNodeCheck: boolean): void => {
|
||||
// `nwaku` establishes the max lightpush msg size as `const MaxRpcSize* = MaxWakuMessageSize + 64 * 1024`
|
||||
// see: https://github.com/waku-org/nwaku/blob/07beea02095035f4f4c234ec2dec1f365e6955b8/waku/waku_lightpush/rpc_codec.nim#L15
|
||||
// In the PR https://github.com/waku-org/nwaku/pull/2298 we reduced the MaxWakuMessageSize
|
||||
// from 1MiB to 150KiB. Therefore, the 105024 number comes from substracting ( 1*2^20 - 150*2^10 )
|
||||
// from 1MiB to 150KiB. Therefore, the 105024 number comes from subtracting ( 1*2^20 - 150*2^10 )
|
||||
// to the original 10^6 that this test had when MaxWakuMessageSize == 1*2^20
|
||||
|
||||
const pushResponse = await waku.lightPush.send(
|
||||
@ -188,11 +176,9 @@ const runTests = (strictNodeCheck: boolean): void => {
|
||||
expect(pushResponse.failures?.map((failure) => failure.error)).to.include(
|
||||
ProtocolError.REMOTE_PEER_REJECTED
|
||||
);
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(false);
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
false
|
||||
);
|
||||
});
|
||||
|
||||
it("Push message with rate limit", async function () {
|
||||
@ -212,15 +198,13 @@ const runTests = (strictNodeCheck: boolean): void => {
|
||||
});
|
||||
expect(pushResponse.successes.length).to.eq(numServiceNodes);
|
||||
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
true
|
||||
);
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
});
|
||||
|
||||
@ -236,16 +220,14 @@ const runTests = (strictNodeCheck: boolean): void => {
|
||||
});
|
||||
expect(pushResponse.successes.length).to.eq(numServiceNodes);
|
||||
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
true
|
||||
);
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedTimestamp: testItem,
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -268,11 +250,9 @@ const runTests = (strictNodeCheck: boolean): void => {
|
||||
expect(pushResponse.failures?.map((failure) => failure.error)).to.include(
|
||||
ProtocolError.SIZE_TOO_BIG
|
||||
);
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(false);
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
false
|
||||
);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import { createEncoder } from "@waku/core";
|
||||
import { LightNode, Protocols } from "@waku/interfaces";
|
||||
import { contentTopicToPubsubTopic } from "@waku/utils";
|
||||
import { createRoutingInfo } from "@waku/utils";
|
||||
import { utf8ToBytes } from "@waku/utils/bytes";
|
||||
import { expect } from "chai";
|
||||
|
||||
@ -16,31 +16,40 @@ import {
|
||||
teardownNodesWithRedundancy
|
||||
} from "../../src/index.js";
|
||||
|
||||
import { ClusterId, TestEncoder } from "./utils.js";
|
||||
import {
|
||||
TestClusterId,
|
||||
TestContentTopic,
|
||||
TestEncoder,
|
||||
TestNetworkConfig,
|
||||
TestRoutingInfo
|
||||
} from "./utils.js";
|
||||
|
||||
describe("Waku Light Push (Autosharding): Multiple PubsubTopics", function () {
|
||||
describe("Waku Light Push (Autosharding): Multiple Shards", function () {
|
||||
this.timeout(30000);
|
||||
const numServiceNodes = 2;
|
||||
|
||||
let waku: LightNode;
|
||||
let serviceNodes: ServiceNodesFleet;
|
||||
|
||||
const customContentTopic2 = "/test/2/waku-light-push/utf8";
|
||||
const customRoutingInfo2 = createRoutingInfo(TestNetworkConfig, {
|
||||
contentTopic: customContentTopic2
|
||||
});
|
||||
|
||||
const customEncoder2 = createEncoder({
|
||||
contentTopic: "/test/2/waku-light-push/utf8",
|
||||
pubsubTopic: contentTopicToPubsubTopic(
|
||||
"/test/2/waku-light-push/utf8",
|
||||
ClusterId
|
||||
)
|
||||
contentTopic: customContentTopic2,
|
||||
routingInfo: customRoutingInfo2
|
||||
});
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[serviceNodes, waku] = await runMultipleNodes(
|
||||
this.ctx,
|
||||
TestRoutingInfo,
|
||||
{
|
||||
clusterId: ClusterId,
|
||||
contentTopics: [TestEncoder.contentTopic, customEncoder2.contentTopic]
|
||||
lightpush: true,
|
||||
filter: true,
|
||||
contentTopic: [TestEncoder.contentTopic, customEncoder2.contentTopic]
|
||||
},
|
||||
{ lightpush: true, filter: true },
|
||||
false,
|
||||
numServiceNodes,
|
||||
false
|
||||
@ -52,6 +61,9 @@ describe("Waku Light Push (Autosharding): Multiple PubsubTopics", function () {
|
||||
});
|
||||
|
||||
it("Subscribe and receive messages on 2 different pubsubtopics", async function () {
|
||||
if (customRoutingInfo2.pubsubTopic === TestEncoder.pubsubTopic)
|
||||
throw "Invalid test, both encoder uses same shard";
|
||||
|
||||
const pushResponse1 = await waku.lightPush.send(TestEncoder, {
|
||||
payload: utf8ToBytes("M1")
|
||||
});
|
||||
@ -66,14 +78,14 @@ describe("Waku Light Push (Autosharding): Multiple PubsubTopics", function () {
|
||||
const messageCollector2 = new MessageCollector(serviceNodes.nodes[1]);
|
||||
|
||||
expect(
|
||||
await messageCollector1.waitForMessages(1, {
|
||||
pubsubTopic: TestEncoder.pubsubTopic
|
||||
await messageCollector1.waitForMessagesAutosharding(1, {
|
||||
contentTopic: TestEncoder.contentTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
|
||||
expect(
|
||||
await messageCollector2.waitForMessages(1, {
|
||||
pubsubTopic: customEncoder2.pubsubTopic
|
||||
await messageCollector2.waitForMessagesAutosharding(1, {
|
||||
contentTopic: customEncoder2.contentTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
|
||||
@ -99,8 +111,8 @@ describe("Waku Light Push (Autosharding): Multiple PubsubTopics", function () {
|
||||
filter: true,
|
||||
lightpush: true,
|
||||
relay: true,
|
||||
clusterId: ClusterId,
|
||||
shard: [2]
|
||||
clusterId: TestClusterId,
|
||||
contentTopic: [TestContentTopic]
|
||||
});
|
||||
await nwaku2.ensureSubscriptionsAutosharding([
|
||||
customEncoder2.pubsubTopic
|
||||
@ -118,7 +130,7 @@ describe("Waku Light Push (Autosharding): Multiple PubsubTopics", function () {
|
||||
});
|
||||
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestEncoder.pubsubTopic
|
||||
contentTopic: TestEncoder.contentTopic
|
||||
});
|
||||
await messageCollector2.waitForMessagesAutosharding(1, {
|
||||
contentTopic: customEncoder2.contentTopic
|
||||
|
||||
@ -1,43 +1,22 @@
|
||||
import { createEncoder } from "@waku/core";
|
||||
import { LightNode, NetworkConfig, Protocols } from "@waku/interfaces";
|
||||
import { utf8ToBytes } from "@waku/sdk";
|
||||
import { createLightNode } from "@waku/sdk";
|
||||
import {
|
||||
contentTopicToPubsubTopic,
|
||||
contentTopicToShardIndex,
|
||||
Logger
|
||||
} from "@waku/utils";
|
||||
import { Context } from "mocha";
|
||||
|
||||
import { runNodes as runNodesBuilder, ServiceNode } from "../../src/index.js";
|
||||
import { createRoutingInfo, Logger } from "@waku/utils";
|
||||
|
||||
// Constants for test configuration.
|
||||
export const log = new Logger("test:lightpush");
|
||||
export const TestContentTopic = "/test/1/waku-light-push/utf8";
|
||||
export const ClusterId = 3;
|
||||
export const ShardIndex = contentTopicToShardIndex(TestContentTopic);
|
||||
export const TestPubsubTopic = contentTopicToPubsubTopic(
|
||||
TestContentTopic,
|
||||
ClusterId
|
||||
);
|
||||
export const TestShardInfo = {
|
||||
contentTopics: [TestContentTopic],
|
||||
clusterId: ClusterId
|
||||
export const TestClusterId = 3;
|
||||
export const TestNumShardsInCluster = 8;
|
||||
export const TestNetworkConfig = {
|
||||
clusterId: TestClusterId,
|
||||
numShardsInCluster: TestNumShardsInCluster
|
||||
};
|
||||
export const TestRoutingInfo = createRoutingInfo(TestNetworkConfig, {
|
||||
contentTopic: TestContentTopic
|
||||
});
|
||||
export const TestEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
routingInfo: TestRoutingInfo
|
||||
});
|
||||
export const messageText = "Light Push works!";
|
||||
export const messagePayload = { payload: utf8ToBytes(messageText) };
|
||||
|
||||
export const runNodes = (
|
||||
context: Context,
|
||||
shardInfo: NetworkConfig
|
||||
): Promise<[ServiceNode, LightNode]> =>
|
||||
runNodesBuilder<LightNode>({
|
||||
context,
|
||||
createNode: createLightNode,
|
||||
protocols: [Protocols.LightPush, Protocols.Filter],
|
||||
networkConfig: shardInfo
|
||||
});
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import { MetadataCodec } from "@waku/core";
|
||||
import type { LightNode, ShardInfo } from "@waku/interfaces";
|
||||
import type { LightNode } from "@waku/interfaces";
|
||||
import { createLightNode } from "@waku/sdk";
|
||||
import { decodeRelayShard } from "@waku/utils";
|
||||
import chai, { expect } from "chai";
|
||||
@ -29,25 +29,27 @@ describe("Metadata Protocol", function () {
|
||||
await tearDownNodes([nwaku1], waku);
|
||||
});
|
||||
|
||||
describe("connections", function () {
|
||||
it("same cluster, same shard: nodes connect", async function () {
|
||||
const shardInfo: ShardInfo = {
|
||||
clusterId: 2,
|
||||
shards: [1]
|
||||
};
|
||||
describe("static sharding", function () {
|
||||
it("same cluster, static sharding: nodes connect", async function () {
|
||||
const clusterId = 2;
|
||||
const shards = [1];
|
||||
const numShardsInCluster = 8;
|
||||
|
||||
await nwaku1.start({
|
||||
relay: true,
|
||||
discv5Discovery: true,
|
||||
peerExchange: true,
|
||||
clusterId: shardInfo.clusterId,
|
||||
shard: shardInfo.shards
|
||||
clusterId,
|
||||
shard: shards,
|
||||
numShardsInNetwork: numShardsInCluster
|
||||
});
|
||||
|
||||
const nwaku1Ma = await nwaku1.getMultiaddrWithId();
|
||||
const nwaku1PeerId = await nwaku1.getPeerId();
|
||||
|
||||
waku = await createLightNode({ networkConfig: shardInfo });
|
||||
waku = await createLightNode({
|
||||
networkConfig: { clusterId, numShardsInCluster }
|
||||
});
|
||||
await waku.start();
|
||||
await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec);
|
||||
|
||||
@ -65,82 +67,33 @@ describe("Metadata Protocol", function () {
|
||||
}
|
||||
|
||||
expect(shardInfoRes).to.not.be.undefined;
|
||||
expect(shardInfoRes.clusterId).to.equal(shardInfo.clusterId);
|
||||
expect(shardInfoRes.shards).to.include.members(shardInfo.shards);
|
||||
expect(shardInfoRes.clusterId).to.equal(clusterId);
|
||||
expect(shardInfoRes.shards).to.include.members(shards);
|
||||
|
||||
const activeConnections = waku.libp2p.getConnections();
|
||||
expect(activeConnections.length).to.equal(1);
|
||||
});
|
||||
|
||||
it("same cluster, different shard: nodes connect", async function () {
|
||||
const shardInfo1: ShardInfo = {
|
||||
clusterId: 2,
|
||||
shards: [1]
|
||||
};
|
||||
|
||||
const shardInfo2: ShardInfo = {
|
||||
clusterId: 2,
|
||||
shards: [2]
|
||||
};
|
||||
it("different cluster: nodes don't connect", async function () {
|
||||
const clusterIdNwaku = 2;
|
||||
const custerIdJsWaku = 3;
|
||||
const shards = [1];
|
||||
const numShardsInCluster = 8;
|
||||
|
||||
await nwaku1.start({
|
||||
relay: true,
|
||||
discv5Discovery: true,
|
||||
peerExchange: true,
|
||||
clusterId: shardInfo1.clusterId,
|
||||
shard: shardInfo1.shards
|
||||
});
|
||||
|
||||
const nwaku1Ma = await nwaku1.getMultiaddrWithId();
|
||||
const nwaku1PeerId = await nwaku1.getPeerId();
|
||||
|
||||
waku = await createLightNode({ networkConfig: shardInfo2 });
|
||||
await waku.start();
|
||||
await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec);
|
||||
|
||||
if (!waku.libp2p.services.metadata) {
|
||||
expect(waku.libp2p.services.metadata).to.not.be.undefined;
|
||||
return;
|
||||
}
|
||||
|
||||
const { error, shardInfo: shardInfoRes } =
|
||||
await waku.libp2p.services.metadata.query(nwaku1PeerId);
|
||||
|
||||
if (error) {
|
||||
expect(error).to.be.null;
|
||||
return;
|
||||
}
|
||||
|
||||
expect(shardInfoRes).to.not.be.undefined;
|
||||
expect(shardInfoRes.clusterId).to.equal(shardInfo1.clusterId);
|
||||
expect(shardInfoRes.shards).to.include.members(shardInfo1.shards);
|
||||
|
||||
const activeConnections = waku.libp2p.getConnections();
|
||||
expect(activeConnections.length).to.equal(1);
|
||||
});
|
||||
|
||||
it("different cluster, same shard: nodes don't connect", async function () {
|
||||
const shardInfo1: ShardInfo = {
|
||||
clusterId: 2,
|
||||
shards: [1]
|
||||
};
|
||||
|
||||
const shardInfo2: ShardInfo = {
|
||||
clusterId: 3,
|
||||
shards: [1]
|
||||
};
|
||||
|
||||
await nwaku1.start({
|
||||
relay: true,
|
||||
discv5Discovery: true,
|
||||
peerExchange: true,
|
||||
clusterId: shardInfo1.clusterId,
|
||||
shard: shardInfo1.shards
|
||||
clusterId: clusterIdNwaku,
|
||||
shard: shards,
|
||||
numShardsInNetwork: numShardsInCluster
|
||||
});
|
||||
|
||||
const nwaku1Ma = await nwaku1.getMultiaddrWithId();
|
||||
|
||||
waku = await createLightNode({ networkConfig: shardInfo2 });
|
||||
waku = await createLightNode({
|
||||
networkConfig: { clusterId: custerIdJsWaku, numShardsInCluster }
|
||||
});
|
||||
await waku.start();
|
||||
await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec);
|
||||
|
||||
@ -157,28 +110,152 @@ describe("Metadata Protocol", function () {
|
||||
expect(waku.libp2p.getConnections().length).to.equal(0);
|
||||
});
|
||||
|
||||
it("different cluster, different shard: nodes don't connect", async function () {
|
||||
const shardInfo1: ShardInfo = {
|
||||
clusterId: 2,
|
||||
shards: [1]
|
||||
};
|
||||
|
||||
const shardInfo2: ShardInfo = {
|
||||
clusterId: 3,
|
||||
shards: [2]
|
||||
};
|
||||
it("PeerStore has remote peer's shard info after successful connection", async function () {
|
||||
const clusterId = 2;
|
||||
const shards = [1];
|
||||
const numShardsInCluster = 8;
|
||||
|
||||
await nwaku1.start({
|
||||
relay: true,
|
||||
discv5Discovery: true,
|
||||
peerExchange: true,
|
||||
clusterId: shardInfo1.clusterId,
|
||||
shard: shardInfo1.shards
|
||||
clusterId,
|
||||
shard: shards,
|
||||
numShardsInNetwork: numShardsInCluster
|
||||
});
|
||||
|
||||
const nwaku1Ma = await nwaku1.getMultiaddrWithId();
|
||||
const nwaku1PeerId = await nwaku1.getPeerId();
|
||||
|
||||
waku = await createLightNode({
|
||||
networkConfig: { clusterId, numShardsInCluster }
|
||||
});
|
||||
await waku.start();
|
||||
await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec);
|
||||
|
||||
// delay to ensure the connection is estabilished and shardInfo is updated
|
||||
await delay(500);
|
||||
|
||||
const encodedShardInfo = (
|
||||
await waku.libp2p.peerStore.get(nwaku1PeerId)
|
||||
).metadata.get("shardInfo");
|
||||
expect(encodedShardInfo).to.not.be.undefined;
|
||||
|
||||
const metadataShardInfo = decodeRelayShard(encodedShardInfo!);
|
||||
expect(metadataShardInfo).not.be.undefined;
|
||||
|
||||
expect(metadataShardInfo!.clusterId).to.eq(clusterId);
|
||||
expect(metadataShardInfo.shards).to.include.members(shards);
|
||||
});
|
||||
|
||||
it("receiving a ping from a peer does not overwrite shard info", async function () {
|
||||
const clusterId = 2;
|
||||
const shards = [1];
|
||||
const numShardsInCluster = 0; //static sharding
|
||||
|
||||
await nwaku1.start({
|
||||
relay: true,
|
||||
discv5Discovery: true,
|
||||
peerExchange: true,
|
||||
clusterId,
|
||||
shard: shards,
|
||||
numShardsInNetwork: numShardsInCluster
|
||||
});
|
||||
|
||||
const nwaku1Ma = await nwaku1.getMultiaddrWithId();
|
||||
const nwaku1PeerId = await nwaku1.getPeerId();
|
||||
|
||||
waku = await createLightNode({
|
||||
networkConfig: {
|
||||
clusterId,
|
||||
numShardsInCluster
|
||||
},
|
||||
connectionManager: {
|
||||
pingKeepAlive: 1
|
||||
}
|
||||
});
|
||||
await waku.start();
|
||||
await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec);
|
||||
|
||||
// delay to ensure the connection is estabilished, shardInfo is updated, and there is a ping
|
||||
await delay(1500);
|
||||
|
||||
const metadata = (await waku.libp2p.peerStore.get(nwaku1PeerId)).metadata;
|
||||
expect(metadata.get("shardInfo")).to.not.be.undefined;
|
||||
|
||||
const pingInfo = metadata.get("ping");
|
||||
expect(pingInfo).to.not.be.undefined;
|
||||
});
|
||||
});
|
||||
describe("auto sharding", function () {
|
||||
it("same cluster: nodes connect", async function () {
|
||||
const clusterId = 2;
|
||||
const contentTopic = "/foo/1/bar/proto";
|
||||
const numShardsInCluster = 0;
|
||||
|
||||
await nwaku1.start({
|
||||
relay: true,
|
||||
discv5Discovery: true,
|
||||
peerExchange: true,
|
||||
clusterId,
|
||||
contentTopic: [contentTopic],
|
||||
numShardsInNetwork: numShardsInCluster
|
||||
});
|
||||
|
||||
const nwaku1Ma = await nwaku1.getMultiaddrWithId();
|
||||
const nwaku1PeerId = await nwaku1.getPeerId();
|
||||
|
||||
waku = await createLightNode({
|
||||
networkConfig: { clusterId, numShardsInCluster }
|
||||
});
|
||||
await waku.start();
|
||||
await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec);
|
||||
|
||||
if (!waku.libp2p.services.metadata) {
|
||||
expect(waku.libp2p.services.metadata).to.not.be.undefined;
|
||||
return;
|
||||
}
|
||||
|
||||
const { error, shardInfo: shardInfoRes } =
|
||||
await waku.libp2p.services.metadata.query(nwaku1PeerId);
|
||||
|
||||
if (error) {
|
||||
expect(error).to.be.null;
|
||||
return;
|
||||
}
|
||||
|
||||
expect(shardInfoRes).to.not.be.undefined;
|
||||
expect(shardInfoRes.clusterId).to.equal(clusterId);
|
||||
// TODO: calculate shards from content topics
|
||||
//expect(shardInfoRes.shards).to.include.members(shards);
|
||||
|
||||
const activeConnections = waku.libp2p.getConnections();
|
||||
expect(activeConnections.length).to.equal(1);
|
||||
});
|
||||
|
||||
it("different cluster: nodes don't connect", async function () {
|
||||
const clusterIdNwaku = 2;
|
||||
const clusterIdJSWaku = 3;
|
||||
const contentTopic = ["/foo/1/bar/proto"];
|
||||
const numShardsInCluster = 0;
|
||||
|
||||
await nwaku1.start({
|
||||
relay: true,
|
||||
discv5Discovery: true,
|
||||
peerExchange: true,
|
||||
clusterId: clusterIdNwaku,
|
||||
contentTopic,
|
||||
numShardsInNetwork: numShardsInCluster
|
||||
});
|
||||
|
||||
const nwaku1Ma = await nwaku1.getMultiaddrWithId();
|
||||
|
||||
waku = await createLightNode({ networkConfig: shardInfo2 });
|
||||
waku = await createLightNode({
|
||||
networkConfig: {
|
||||
clusterId: clusterIdJSWaku,
|
||||
numShardsInCluster
|
||||
}
|
||||
});
|
||||
await waku.start();
|
||||
await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec);
|
||||
|
||||
@ -195,77 +272,81 @@ describe("Metadata Protocol", function () {
|
||||
|
||||
expect(waku.libp2p.getConnections().length).to.equal(0);
|
||||
});
|
||||
});
|
||||
|
||||
it("PeerStore has remote peer's shard info after successful connection", async function () {
|
||||
const shardInfo: ShardInfo = {
|
||||
clusterId: 2,
|
||||
shards: [1]
|
||||
};
|
||||
it("PeerStore has remote peer's shard info after successful connection", async function () {
|
||||
const clusterId = 2;
|
||||
const contentTopic = ["/foo/1/bar/proto"];
|
||||
const numShardsInCluster = 0;
|
||||
|
||||
await nwaku1.start({
|
||||
relay: true,
|
||||
discv5Discovery: true,
|
||||
peerExchange: true,
|
||||
clusterId: shardInfo.clusterId,
|
||||
shard: shardInfo.shards
|
||||
await nwaku1.start({
|
||||
relay: true,
|
||||
discv5Discovery: true,
|
||||
peerExchange: true,
|
||||
clusterId,
|
||||
contentTopic
|
||||
});
|
||||
|
||||
const nwaku1Ma = await nwaku1.getMultiaddrWithId();
|
||||
const nwaku1PeerId = await nwaku1.getPeerId();
|
||||
|
||||
waku = await createLightNode({
|
||||
networkConfig: { clusterId, numShardsInCluster }
|
||||
});
|
||||
await waku.start();
|
||||
await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec);
|
||||
|
||||
// delay to ensure the connection is estabilished and shardInfo is updated
|
||||
await delay(500);
|
||||
|
||||
const encodedShardInfo = (
|
||||
await waku.libp2p.peerStore.get(nwaku1PeerId)
|
||||
).metadata.get("shardInfo");
|
||||
expect(encodedShardInfo).to.not.be.undefined;
|
||||
|
||||
const metadataShardInfo = decodeRelayShard(encodedShardInfo!);
|
||||
expect(metadataShardInfo).not.be.undefined;
|
||||
|
||||
expect(metadataShardInfo!.clusterId).to.eq(clusterId);
|
||||
// TODO derive shard from content topic
|
||||
// expect(metadataShardInfo.shards).to.include.members(shards);
|
||||
});
|
||||
|
||||
const nwaku1Ma = await nwaku1.getMultiaddrWithId();
|
||||
const nwaku1PeerId = await nwaku1.getPeerId();
|
||||
it("receiving a ping from a peer does not overwrite shard info", async function () {
|
||||
const clusterId = 2;
|
||||
const contentTopic = ["/foo/1/bar/proto"];
|
||||
const numShardsInCluster = 0;
|
||||
|
||||
waku = await createLightNode({ networkConfig: shardInfo });
|
||||
await waku.start();
|
||||
await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec);
|
||||
await nwaku1.start({
|
||||
relay: true,
|
||||
discv5Discovery: true,
|
||||
peerExchange: true,
|
||||
clusterId,
|
||||
contentTopic
|
||||
});
|
||||
|
||||
// delay to ensure the connection is estabilished and shardInfo is updated
|
||||
await delay(500);
|
||||
const nwaku1Ma = await nwaku1.getMultiaddrWithId();
|
||||
const nwaku1PeerId = await nwaku1.getPeerId();
|
||||
|
||||
const encodedShardInfo = (
|
||||
await waku.libp2p.peerStore.get(nwaku1PeerId)
|
||||
).metadata.get("shardInfo");
|
||||
expect(encodedShardInfo).to.not.be.undefined;
|
||||
waku = await createLightNode({
|
||||
networkConfig: {
|
||||
clusterId,
|
||||
numShardsInCluster
|
||||
},
|
||||
connectionManager: {
|
||||
pingKeepAlive: 1
|
||||
}
|
||||
});
|
||||
await waku.start();
|
||||
await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec);
|
||||
|
||||
const metadataShardInfo = decodeRelayShard(encodedShardInfo!);
|
||||
expect(metadataShardInfo).not.be.undefined;
|
||||
// delay to ensure the connection is estabilished, shardInfo is updated, and there is a ping
|
||||
await delay(1500);
|
||||
|
||||
expect(metadataShardInfo!.clusterId).to.eq(shardInfo.clusterId);
|
||||
expect(metadataShardInfo.shards).to.include.members(shardInfo.shards);
|
||||
});
|
||||
const metadata = (await waku.libp2p.peerStore.get(nwaku1PeerId)).metadata;
|
||||
expect(metadata.get("shardInfo")).to.not.be.undefined;
|
||||
|
||||
it("receiving a ping from a peer does not overwrite shard info", async function () {
|
||||
const shardInfo: ShardInfo = {
|
||||
clusterId: 2,
|
||||
shards: [1]
|
||||
};
|
||||
|
||||
await nwaku1.start({
|
||||
relay: true,
|
||||
discv5Discovery: true,
|
||||
peerExchange: true,
|
||||
clusterId: shardInfo.clusterId,
|
||||
shard: shardInfo.shards
|
||||
const pingInfo = metadata.get("ping");
|
||||
expect(pingInfo).to.not.be.undefined;
|
||||
});
|
||||
|
||||
const nwaku1Ma = await nwaku1.getMultiaddrWithId();
|
||||
const nwaku1PeerId = await nwaku1.getPeerId();
|
||||
|
||||
waku = await createLightNode({
|
||||
networkConfig: shardInfo,
|
||||
connectionManager: {
|
||||
pingKeepAlive: 1
|
||||
}
|
||||
});
|
||||
await waku.start();
|
||||
await waku.libp2p.dialProtocol(nwaku1Ma, MetadataCodec);
|
||||
|
||||
// delay to ensure the connection is estabilished, shardInfo is updated, and there is a ping
|
||||
await delay(1500);
|
||||
|
||||
const metadata = (await waku.libp2p.peerStore.get(nwaku1PeerId)).metadata;
|
||||
expect(metadata.get("shardInfo")).to.not.be.undefined;
|
||||
|
||||
const pingInfo = metadata.get("ping");
|
||||
expect(pingInfo).to.not.be.undefined;
|
||||
});
|
||||
});
|
||||
|
||||
@ -17,8 +17,6 @@ describe("nwaku", () => {
|
||||
"--rest-admin=true",
|
||||
"--websocket-support=true",
|
||||
"--log-level=TRACE",
|
||||
"--cluster-id=0",
|
||||
"--shard=0",
|
||||
"--ports-shift=42"
|
||||
];
|
||||
|
||||
|
||||
@ -5,7 +5,7 @@ import { createLightNode } from "@waku/sdk";
|
||||
|
||||
import {
|
||||
beforeEachCustom,
|
||||
DefaultTestShardInfo,
|
||||
DefaultTestNetworkConfig,
|
||||
makeLogFileName,
|
||||
ServiceNode,
|
||||
tearDownNodes
|
||||
@ -40,7 +40,9 @@ describe("Peer Exchange", function () {
|
||||
|
||||
tests({
|
||||
async setup() {
|
||||
waku = await createLightNode({ networkConfig: DefaultTestShardInfo });
|
||||
waku = await createLightNode({
|
||||
networkConfig: DefaultTestNetworkConfig
|
||||
});
|
||||
await waku.start();
|
||||
|
||||
const nwaku2Ma = await nwaku2.getMultiaddrWithId();
|
||||
|
||||
@ -16,7 +16,7 @@ describe("Peer Exchange Continuous Discovery", () => {
|
||||
let randomPeerId: PeerId;
|
||||
let waku: LightNode;
|
||||
const shardInfo: ShardInfo = {
|
||||
clusterId: 1,
|
||||
clusterId: 2,
|
||||
shards: [1, 2]
|
||||
};
|
||||
const multiaddrs = [multiaddr("/ip4/127.0.0.1/udp/1234")];
|
||||
|
||||
@ -10,6 +10,8 @@ import Sinon, { SinonSpy } from "sinon";
|
||||
import {
|
||||
afterEachCustom,
|
||||
beforeEachCustom,
|
||||
DefaultTestClusterId,
|
||||
DefaultTestNetworkConfig,
|
||||
DefaultTestShardInfo,
|
||||
makeLogFileName,
|
||||
ServiceNode,
|
||||
@ -30,14 +32,14 @@ describe("Peer Exchange", function () {
|
||||
nwaku1 = new ServiceNode(makeLogFileName(this.ctx) + "1");
|
||||
nwaku2 = new ServiceNode(makeLogFileName(this.ctx) + "2");
|
||||
await nwaku1.start({
|
||||
clusterId: DefaultTestShardInfo.clusterId,
|
||||
clusterId: DefaultTestClusterId,
|
||||
shard: DefaultTestShardInfo.shards,
|
||||
discv5Discovery: true,
|
||||
peerExchange: true,
|
||||
relay: true
|
||||
});
|
||||
await nwaku2.start({
|
||||
clusterId: DefaultTestShardInfo.clusterId,
|
||||
clusterId: DefaultTestClusterId,
|
||||
shard: DefaultTestShardInfo.shards,
|
||||
discv5Discovery: true,
|
||||
peerExchange: true,
|
||||
@ -52,7 +54,7 @@ describe("Peer Exchange", function () {
|
||||
|
||||
it("peer exchange sets tag", async function () {
|
||||
waku = await createLightNode({
|
||||
networkConfig: DefaultTestShardInfo,
|
||||
networkConfig: DefaultTestNetworkConfig,
|
||||
libp2p: {
|
||||
peerDiscovery: [
|
||||
bootstrap({ list: [(await nwaku2.getMultiaddrWithId()).toString()] }),
|
||||
@ -117,7 +119,7 @@ describe("Peer Exchange", function () {
|
||||
|
||||
nwaku3 = new ServiceNode(makeLogFileName(this) + "3");
|
||||
await nwaku3.start({
|
||||
clusterId: DefaultTestShardInfo.clusterId,
|
||||
clusterId: DefaultTestClusterId,
|
||||
shard: DefaultTestShardInfo.shards,
|
||||
discv5Discovery: true,
|
||||
peerExchange: true,
|
||||
|
||||
@ -6,7 +6,6 @@ import {
|
||||
} from "@waku/discovery";
|
||||
import type { LightNode } from "@waku/interfaces";
|
||||
import { createLightNode } from "@waku/sdk";
|
||||
import { singleShardInfosToShardInfo } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
|
||||
import { afterEachCustom, tearDownNodes } from "../../src/index.js";
|
||||
@ -36,8 +35,7 @@ describe("Peer Exchange", () => {
|
||||
)
|
||||
.filter((ma) => ma.includes("wss"));
|
||||
|
||||
const singleShardInfo = { clusterId: 1, shard: 1 };
|
||||
const shardInfo = singleShardInfosToShardInfo([singleShardInfo]);
|
||||
const networkConfig = { clusterId: 2, numShardsInCluster: 0 };
|
||||
waku = await createLightNode({
|
||||
libp2p: {
|
||||
peerDiscovery: [
|
||||
@ -45,7 +43,7 @@ describe("Peer Exchange", () => {
|
||||
wakuPeerExchangeDiscovery()
|
||||
]
|
||||
},
|
||||
networkConfig: shardInfo
|
||||
networkConfig
|
||||
});
|
||||
|
||||
await waku.start();
|
||||
|
||||
@ -13,6 +13,7 @@ import {
|
||||
createDecoder as createSymDecoder,
|
||||
createEncoder as createSymEncoder
|
||||
} from "@waku/message-encryption/symmetric";
|
||||
import { createRoutingInfo } from "@waku/utils";
|
||||
import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes";
|
||||
import { expect } from "chai";
|
||||
|
||||
@ -23,7 +24,7 @@ import {
|
||||
tearDownNodes
|
||||
} from "../../src/index.js";
|
||||
|
||||
import { runJSNodes, TestPubsubTopic } from "./utils.js";
|
||||
import { runJSNodes, TestNetworkConfig, TestRoutingInfo } from "./utils.js";
|
||||
|
||||
describe("Waku Relay", function () {
|
||||
this.timeout(15000);
|
||||
@ -51,20 +52,20 @@ describe("Waku Relay", function () {
|
||||
const eciesEncoder = createEciesEncoder({
|
||||
contentTopic: asymTopic,
|
||||
publicKey,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
routingInfo: TestRoutingInfo
|
||||
});
|
||||
const symEncoder = createSymEncoder({
|
||||
contentTopic: symTopic,
|
||||
symKey,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
routingInfo: TestRoutingInfo
|
||||
});
|
||||
|
||||
const eciesDecoder = createEciesDecoder(
|
||||
asymTopic,
|
||||
privateKey,
|
||||
TestPubsubTopic
|
||||
TestRoutingInfo,
|
||||
privateKey
|
||||
);
|
||||
const symDecoder = createSymDecoder(symTopic, symKey, TestPubsubTopic);
|
||||
const symDecoder = createSymDecoder(symTopic, TestRoutingInfo, symKey);
|
||||
|
||||
const msgs: IDecodedMessage[] = [];
|
||||
void waku2.relay.subscribeWithUnsubscribe([eciesDecoder], (wakuMsg) => {
|
||||
@ -93,19 +94,20 @@ describe("Waku Relay", function () {
|
||||
"Published on content topic with added then deleted observer";
|
||||
|
||||
const contentTopic = "/test/1/observer/proto";
|
||||
const routingInfo = createRoutingInfo(TestNetworkConfig, { contentTopic });
|
||||
|
||||
// The promise **fails** if we receive a message on this observer.
|
||||
const receivedMsgPromise: Promise<IDecodedMessage> = new Promise(
|
||||
(resolve, reject) => {
|
||||
const deleteObserver = waku2.relay.subscribeWithUnsubscribe(
|
||||
[createDecoder(contentTopic)],
|
||||
[createDecoder(contentTopic, routingInfo)],
|
||||
reject
|
||||
) as () => void;
|
||||
deleteObserver();
|
||||
setTimeout(resolve, 500);
|
||||
}
|
||||
);
|
||||
await waku1.relay.send(createEncoder({ contentTopic }), {
|
||||
await waku1.relay.send(createEncoder({ contentTopic, routingInfo }), {
|
||||
payload: utf8ToBytes(messageText)
|
||||
});
|
||||
|
||||
|
||||
@ -19,8 +19,8 @@ import {
|
||||
TestContentTopic,
|
||||
TestDecoder,
|
||||
TestEncoder,
|
||||
TestPubsubTopic,
|
||||
TestShardInfo
|
||||
TestNetworkConfig,
|
||||
TestRoutingInfo
|
||||
} from "./utils.js";
|
||||
import { runRelayNodes } from "./utils.js";
|
||||
|
||||
@ -30,7 +30,12 @@ describe("Waku Relay, Interop", function () {
|
||||
let nwaku: ServiceNode;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runRelayNodes(this.ctx, TestShardInfo);
|
||||
[nwaku, waku] = await runRelayNodes(
|
||||
this.ctx,
|
||||
TestNetworkConfig,
|
||||
undefined,
|
||||
[TestContentTopic]
|
||||
);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
@ -42,8 +47,9 @@ describe("Waku Relay, Interop", function () {
|
||||
|
||||
while (subscribers.length === 0) {
|
||||
await delay(200);
|
||||
subscribers =
|
||||
waku.libp2p.services.pubsub!.getSubscribers(TestPubsubTopic);
|
||||
subscribers = waku.libp2p.services.pubsub!.getSubscribers(
|
||||
TestRoutingInfo.pubsubTopic
|
||||
);
|
||||
}
|
||||
|
||||
const nimPeerId = await nwaku.getPeerId();
|
||||
@ -86,7 +92,8 @@ describe("Waku Relay, Interop", function () {
|
||||
ServiceNode.toMessageRpcQuery({
|
||||
contentTopic: TestContentTopic,
|
||||
payload: utf8ToBytes(messageText)
|
||||
})
|
||||
}),
|
||||
TestRoutingInfo
|
||||
);
|
||||
|
||||
const receivedMsg = await receivedMsgPromise;
|
||||
@ -98,9 +105,10 @@ describe("Waku Relay, Interop", function () {
|
||||
|
||||
it("Js publishes, other Js receives", async function () {
|
||||
const waku2 = await createRelayNode({
|
||||
routingInfos: [TestRoutingInfo],
|
||||
staticNoiseKey: NOISE_KEY_2,
|
||||
emitSelf: true,
|
||||
networkConfig: TestShardInfo
|
||||
networkConfig: TestNetworkConfig
|
||||
});
|
||||
await waku2.start();
|
||||
|
||||
|
||||
@ -1,18 +1,7 @@
|
||||
import { createDecoder, createEncoder } from "@waku/core";
|
||||
import {
|
||||
ContentTopicInfo,
|
||||
IDecodedMessage,
|
||||
Protocols,
|
||||
RelayNode,
|
||||
ShardInfo,
|
||||
SingleShardInfo
|
||||
} from "@waku/interfaces";
|
||||
import { IDecodedMessage, Protocols, RelayNode } from "@waku/interfaces";
|
||||
import { createRelayNode } from "@waku/relay";
|
||||
import {
|
||||
contentTopicToPubsubTopic,
|
||||
pubsubTopicToSingleShardInfo,
|
||||
singleShardInfoToPubsubTopic
|
||||
} from "@waku/utils";
|
||||
import { createRoutingInfo } from "@waku/utils";
|
||||
import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes";
|
||||
import { expect } from "chai";
|
||||
|
||||
@ -26,43 +15,38 @@ import {
|
||||
} from "../../src/index.js";
|
||||
import { TestDecoder } from "../filter/utils.js";
|
||||
|
||||
describe("Waku Relay, multiple pubsub topics", function () {
|
||||
describe("Waku Relay, static sharding, multiple pubsub topics", function () {
|
||||
this.timeout(15000);
|
||||
let waku1: RelayNode;
|
||||
let waku2: RelayNode;
|
||||
let waku3: RelayNode;
|
||||
|
||||
const customPubsubTopic1 = singleShardInfoToPubsubTopic({
|
||||
clusterId: 3,
|
||||
shard: 1
|
||||
});
|
||||
const customPubsubTopic2 = singleShardInfoToPubsubTopic({
|
||||
clusterId: 3,
|
||||
shard: 2
|
||||
});
|
||||
const shardInfo1: ShardInfo = { clusterId: 3, shards: [1] };
|
||||
const singleShardInfo1: SingleShardInfo = {
|
||||
clusterId: 3,
|
||||
shard: 1
|
||||
};
|
||||
const clusterId = 3;
|
||||
const networkConfig = { clusterId };
|
||||
|
||||
const shardOne = 1;
|
||||
const shardTwo = 2;
|
||||
|
||||
const customContentTopic1 = "/test/2/waku-relay/utf8";
|
||||
const customContentTopic2 = "/test/3/waku-relay/utf8";
|
||||
const shardInfo2: ShardInfo = { clusterId: 3, shards: [2] };
|
||||
const singleShardInfo2: SingleShardInfo = {
|
||||
clusterId: 3,
|
||||
shard: 2
|
||||
};
|
||||
|
||||
const routingInfoOne = createRoutingInfo(networkConfig, {
|
||||
shardId: shardOne
|
||||
});
|
||||
const routingInfoTwo = createRoutingInfo(networkConfig, {
|
||||
shardId: shardTwo
|
||||
});
|
||||
|
||||
const customEncoder1 = createEncoder({
|
||||
pubsubTopicShardInfo: singleShardInfo1,
|
||||
contentTopic: customContentTopic1
|
||||
contentTopic: customContentTopic1,
|
||||
routingInfo: routingInfoOne
|
||||
});
|
||||
const customDecoder1 = createDecoder(customContentTopic1, singleShardInfo1);
|
||||
const customDecoder1 = createDecoder(customContentTopic1, routingInfoOne);
|
||||
const customEncoder2 = createEncoder({
|
||||
pubsubTopicShardInfo: singleShardInfo2,
|
||||
contentTopic: customContentTopic2
|
||||
contentTopic: customContentTopic2,
|
||||
routingInfo: routingInfoTwo
|
||||
});
|
||||
const customDecoder2 = createDecoder(customContentTopic2, singleShardInfo2);
|
||||
const shardInfoBothShards: ShardInfo = { clusterId: 3, shards: [1, 2] };
|
||||
const customDecoder2 = createDecoder(customContentTopic2, routingInfoTwo);
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
await tearDownNodes([], [waku1, waku2, waku3]);
|
||||
@ -70,35 +54,36 @@ describe("Waku Relay, multiple pubsub topics", function () {
|
||||
|
||||
[
|
||||
{
|
||||
pubsub: customPubsubTopic1,
|
||||
shardInfo: shardInfo1,
|
||||
routingInfo: routingInfoOne,
|
||||
encoder: customEncoder1,
|
||||
decoder: customDecoder1
|
||||
},
|
||||
{
|
||||
pubsub: customPubsubTopic2,
|
||||
shardInfo: shardInfo2,
|
||||
routingInfo: routingInfoTwo,
|
||||
encoder: customEncoder2,
|
||||
decoder: customDecoder2
|
||||
}
|
||||
].forEach((testItem) => {
|
||||
it(`3 nodes on ${testItem.pubsub} topic`, async function () {
|
||||
it(`3 nodes on ${testItem.routingInfo.pubsubTopic} topic`, async function () {
|
||||
const [msgCollector1, msgCollector2, msgCollector3] = Array(3)
|
||||
.fill(null)
|
||||
.map(() => new MessageCollector());
|
||||
|
||||
[waku1, waku2, waku3] = await Promise.all([
|
||||
createRelayNode({
|
||||
networkConfig: testItem.shardInfo,
|
||||
networkConfig: networkConfig,
|
||||
routingInfos: [testItem.routingInfo],
|
||||
staticNoiseKey: NOISE_KEY_1
|
||||
}).then((waku) => waku.start().then(() => waku)),
|
||||
createRelayNode({
|
||||
networkConfig: testItem.shardInfo,
|
||||
networkConfig: networkConfig,
|
||||
routingInfos: [testItem.routingInfo],
|
||||
staticNoiseKey: NOISE_KEY_2,
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
|
||||
}).then((waku) => waku.start().then(() => waku)),
|
||||
createRelayNode({
|
||||
networkConfig: testItem.shardInfo,
|
||||
networkConfig: networkConfig,
|
||||
routingInfos: [testItem.routingInfo],
|
||||
staticNoiseKey: NOISE_KEY_3
|
||||
}).then((waku) => waku.start().then(() => waku))
|
||||
]);
|
||||
@ -196,16 +181,19 @@ describe("Waku Relay, multiple pubsub topics", function () {
|
||||
// Waku1 and waku2 are using multiple pubsub topis
|
||||
[waku1, waku2, waku3] = await Promise.all([
|
||||
createRelayNode({
|
||||
networkConfig: shardInfoBothShards,
|
||||
networkConfig: networkConfig,
|
||||
routingInfos: [routingInfoOne, routingInfoTwo],
|
||||
staticNoiseKey: NOISE_KEY_1
|
||||
}).then((waku) => waku.start().then(() => waku)),
|
||||
createRelayNode({
|
||||
networkConfig: shardInfoBothShards,
|
||||
networkConfig: networkConfig,
|
||||
routingInfos: [routingInfoOne, routingInfoTwo],
|
||||
staticNoiseKey: NOISE_KEY_2,
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
|
||||
}).then((waku) => waku.start().then(() => waku)),
|
||||
createRelayNode({
|
||||
networkConfig: shardInfo1,
|
||||
networkConfig: networkConfig,
|
||||
routingInfos: [routingInfoOne],
|
||||
staticNoiseKey: NOISE_KEY_3
|
||||
}).then((waku) => waku.start().then(() => waku))
|
||||
]);
|
||||
@ -262,18 +250,22 @@ describe("Waku Relay, multiple pubsub topics", function () {
|
||||
expect(msgCollector3.hasMessage(customContentTopic1, "M3")).to.eq(true);
|
||||
});
|
||||
|
||||
it("n1 and n2 uses a custom pubsub, n3 uses the default pubsub", async function () {
|
||||
it("n1 and n2 uses relay shard 1, n3 uses relay shard 2", async function () {
|
||||
[waku1, waku2, waku3] = await Promise.all([
|
||||
createRelayNode({
|
||||
networkConfig: shardInfo1,
|
||||
networkConfig,
|
||||
routingInfos: [routingInfoOne],
|
||||
staticNoiseKey: NOISE_KEY_1
|
||||
}).then((waku) => waku.start().then(() => waku)),
|
||||
createRelayNode({
|
||||
networkConfig: shardInfo1,
|
||||
networkConfig,
|
||||
routingInfos: [routingInfoOne],
|
||||
staticNoiseKey: NOISE_KEY_2,
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
|
||||
}).then((waku) => waku.start().then(() => waku)),
|
||||
createRelayNode({
|
||||
networkConfig,
|
||||
routingInfos: [routingInfoTwo],
|
||||
staticNoiseKey: NOISE_KEY_3
|
||||
}).then((waku) => waku.start().then(() => waku))
|
||||
]);
|
||||
@ -319,55 +311,45 @@ describe("Waku Relay, multiple pubsub topics", function () {
|
||||
await waku3NoMsgPromise;
|
||||
|
||||
expect(bytesToUtf8(waku2ReceivedMsg.payload!)).to.eq(messageText);
|
||||
expect(waku2ReceivedMsg.pubsubTopic).to.eq(customPubsubTopic1);
|
||||
expect(waku2ReceivedMsg.pubsubTopic).to.eq(routingInfoOne.pubsubTopic);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Waku Relay (Autosharding), multiple pubsub topics", function () {
|
||||
describe("Waku Relay auto-sharding, multiple pubsub topics", function () {
|
||||
this.timeout(15000);
|
||||
const clusterId = 7;
|
||||
let waku1: RelayNode;
|
||||
let waku2: RelayNode;
|
||||
let waku3: RelayNode;
|
||||
|
||||
const networkConfig = { clusterId, numShardsInCluster: 8 };
|
||||
|
||||
const customContentTopic1 = "/waku/2/content/utf8";
|
||||
const customContentTopic2 = "/myapp/1/latest/proto";
|
||||
const autoshardingPubsubTopic1 = contentTopicToPubsubTopic(
|
||||
customContentTopic1,
|
||||
clusterId
|
||||
);
|
||||
const autoshardingPubsubTopic2 = contentTopicToPubsubTopic(
|
||||
customContentTopic2,
|
||||
clusterId
|
||||
);
|
||||
const contentTopicInfo1: ContentTopicInfo = {
|
||||
clusterId: clusterId,
|
||||
contentTopics: [customContentTopic1]
|
||||
};
|
||||
const contentTopicInfo2: ContentTopicInfo = {
|
||||
clusterId: clusterId,
|
||||
contentTopics: [customContentTopic2]
|
||||
};
|
||||
|
||||
const routingInfo1 = createRoutingInfo(networkConfig, {
|
||||
contentTopic: customContentTopic1
|
||||
});
|
||||
const routingInfo2 = createRoutingInfo(networkConfig, {
|
||||
contentTopic: customContentTopic2
|
||||
});
|
||||
|
||||
if (routingInfo1.pubsubTopic == routingInfo2.pubsubTopic)
|
||||
throw "Internal error, both content topics resolve to same shard";
|
||||
|
||||
const customEncoder1 = createEncoder({
|
||||
contentTopic: customContentTopic1,
|
||||
pubsubTopicShardInfo: pubsubTopicToSingleShardInfo(autoshardingPubsubTopic1)
|
||||
routingInfo: routingInfo1
|
||||
});
|
||||
const customDecoder1 = createDecoder(
|
||||
customContentTopic1,
|
||||
pubsubTopicToSingleShardInfo(autoshardingPubsubTopic1)
|
||||
);
|
||||
const customDecoder1 = createDecoder(customContentTopic1, routingInfo1);
|
||||
const customEncoder2 = createEncoder({
|
||||
contentTopic: customContentTopic2,
|
||||
pubsubTopicShardInfo: pubsubTopicToSingleShardInfo(autoshardingPubsubTopic2)
|
||||
routingInfo: routingInfo2
|
||||
});
|
||||
const customDecoder2 = createDecoder(
|
||||
customContentTopic2,
|
||||
pubsubTopicToSingleShardInfo(autoshardingPubsubTopic2)
|
||||
);
|
||||
const contentTopicInfoBothShards: ContentTopicInfo = {
|
||||
clusterId: clusterId,
|
||||
contentTopics: [customContentTopic1, customContentTopic2]
|
||||
};
|
||||
const customDecoder2 = createDecoder(customContentTopic2, routingInfo2);
|
||||
|
||||
const relayShard1 = { clusterId, shards: [routingInfo1.shardId] };
|
||||
const relayShard2 = { clusterId, shards: [routingInfo2.shardId] };
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
await tearDownNodes([], [waku1, waku2, waku3]);
|
||||
@ -375,35 +357,38 @@ describe("Waku Relay (Autosharding), multiple pubsub topics", function () {
|
||||
|
||||
[
|
||||
{
|
||||
pubsub: autoshardingPubsubTopic1,
|
||||
shardInfo: contentTopicInfo1,
|
||||
routingInfo: routingInfo1,
|
||||
relayShards: relayShard1,
|
||||
encoder: customEncoder1,
|
||||
decoder: customDecoder1
|
||||
},
|
||||
{
|
||||
pubsub: autoshardingPubsubTopic2,
|
||||
shardInfo: contentTopicInfo2,
|
||||
routingInfo: routingInfo2,
|
||||
relayShards: relayShard2,
|
||||
encoder: customEncoder2,
|
||||
decoder: customDecoder2
|
||||
}
|
||||
].forEach((testItem) => {
|
||||
it(`3 nodes on ${testItem.pubsub} topic`, async function () {
|
||||
it(`3 nodes on ${testItem.routingInfo.pubsubTopic} topic`, async function () {
|
||||
const [msgCollector1, msgCollector2, msgCollector3] = Array(3)
|
||||
.fill(null)
|
||||
.map(() => new MessageCollector());
|
||||
|
||||
[waku1, waku2, waku3] = await Promise.all([
|
||||
createRelayNode({
|
||||
networkConfig: testItem.shardInfo,
|
||||
networkConfig,
|
||||
routingInfos: [testItem.routingInfo],
|
||||
staticNoiseKey: NOISE_KEY_1
|
||||
}).then((waku) => waku.start().then(() => waku)),
|
||||
createRelayNode({
|
||||
networkConfig: testItem.shardInfo,
|
||||
networkConfig,
|
||||
routingInfos: [testItem.routingInfo],
|
||||
staticNoiseKey: NOISE_KEY_2,
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
|
||||
}).then((waku) => waku.start().then(() => waku)),
|
||||
createRelayNode({
|
||||
networkConfig: testItem.shardInfo,
|
||||
networkConfig,
|
||||
routingInfos: [testItem.routingInfo],
|
||||
staticNoiseKey: NOISE_KEY_3
|
||||
}).then((waku) => waku.start().then(() => waku))
|
||||
]);
|
||||
@ -510,16 +495,19 @@ describe("Waku Relay (Autosharding), multiple pubsub topics", function () {
|
||||
// Waku1 and waku2 are using multiple pubsub topis
|
||||
[waku1, waku2, waku3] = await Promise.all([
|
||||
createRelayNode({
|
||||
networkConfig: contentTopicInfoBothShards,
|
||||
networkConfig,
|
||||
routingInfos: [routingInfo1, routingInfo2],
|
||||
staticNoiseKey: NOISE_KEY_1
|
||||
}).then((waku) => waku.start().then(() => waku)),
|
||||
createRelayNode({
|
||||
networkConfig: contentTopicInfoBothShards,
|
||||
networkConfig,
|
||||
routingInfos: [routingInfo1, routingInfo2],
|
||||
staticNoiseKey: NOISE_KEY_2,
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
|
||||
}).then((waku) => waku.start().then(() => waku)),
|
||||
createRelayNode({
|
||||
networkConfig: contentTopicInfo1,
|
||||
networkConfig,
|
||||
routingInfos: [routingInfo1],
|
||||
staticNoiseKey: NOISE_KEY_3
|
||||
}).then((waku) => waku.start().then(() => waku))
|
||||
]);
|
||||
@ -603,18 +591,22 @@ describe("Waku Relay (Autosharding), multiple pubsub topics", function () {
|
||||
expect(msgCollector3.hasMessage(customContentTopic1, "M3")).to.eq(true);
|
||||
});
|
||||
|
||||
it("n1 and n2 uses a custom pubsub, n3 uses the default pubsub", async function () {
|
||||
it("n1 and n2 uses first shard, n3 uses the second shard", async function () {
|
||||
[waku1, waku2, waku3] = await Promise.all([
|
||||
createRelayNode({
|
||||
networkConfig: contentTopicInfo1,
|
||||
networkConfig,
|
||||
routingInfos: [routingInfo1],
|
||||
staticNoiseKey: NOISE_KEY_1
|
||||
}).then((waku) => waku.start().then(() => waku)),
|
||||
createRelayNode({
|
||||
networkConfig: contentTopicInfo1,
|
||||
networkConfig,
|
||||
routingInfos: [routingInfo1],
|
||||
staticNoiseKey: NOISE_KEY_2,
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
|
||||
}).then((waku) => waku.start().then(() => waku)),
|
||||
createRelayNode({
|
||||
networkConfig,
|
||||
routingInfos: [routingInfo2],
|
||||
staticNoiseKey: NOISE_KEY_3
|
||||
}).then((waku) => waku.start().then(() => waku))
|
||||
]);
|
||||
@ -660,6 +652,6 @@ describe("Waku Relay (Autosharding), multiple pubsub topics", function () {
|
||||
await waku3NoMsgPromise;
|
||||
|
||||
expect(bytesToUtf8(waku2ReceivedMsg.payload!)).to.eq(messageText);
|
||||
expect(waku2ReceivedMsg.pubsubTopic).to.eq(autoshardingPubsubTopic1);
|
||||
expect(waku2ReceivedMsg.pubsubTopic).to.eq(routingInfo1.pubsubTopic);
|
||||
});
|
||||
});
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import { createEncoder } from "@waku/core";
|
||||
import { IRateLimitProof, ProtocolError, RelayNode } from "@waku/interfaces";
|
||||
import { createRoutingInfo } from "@waku/utils";
|
||||
import { utf8ToBytes } from "@waku/utils/bytes";
|
||||
import { expect } from "chai";
|
||||
|
||||
@ -16,13 +17,12 @@ import {
|
||||
import {
|
||||
messageText,
|
||||
runJSNodes,
|
||||
TestClusterId,
|
||||
TestContentTopic,
|
||||
TestDecoder,
|
||||
TestEncoder,
|
||||
TestExpectOptions,
|
||||
TestPubsubTopic,
|
||||
TestShardInfo,
|
||||
TestWaitMessageOptions,
|
||||
TestRoutingInfo,
|
||||
waitForAllRemotePeers
|
||||
} from "./utils.js";
|
||||
|
||||
@ -54,9 +54,7 @@ describe("Waku Relay, Publish", function () {
|
||||
expect(pushResponse.successes[0].toString()).to.eq(
|
||||
waku2.libp2p.peerId.toString()
|
||||
);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
|
||||
).to.eq(true);
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
...TestExpectOptions,
|
||||
expectedMessageText: testItem.value
|
||||
@ -81,9 +79,7 @@ describe("Waku Relay, Publish", function () {
|
||||
waku2.libp2p.peerId.toString()
|
||||
);
|
||||
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
|
||||
).to.eq(true);
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(true);
|
||||
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
...TestExpectOptions,
|
||||
@ -107,31 +103,16 @@ describe("Waku Relay, Publish", function () {
|
||||
it("Fails to publish message with empty text", async function () {
|
||||
await waku1.relay.send(TestEncoder, { payload: utf8ToBytes("") });
|
||||
await delay(400);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
|
||||
).to.eq(false);
|
||||
});
|
||||
|
||||
it("Fails to publish message with wrong content topic", async function () {
|
||||
const wrong_encoder = createEncoder({
|
||||
contentTopic: "/test/1/wrong/utf8",
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
await waku1.relay.send(wrong_encoder, {
|
||||
payload: utf8ToBytes("")
|
||||
});
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
|
||||
).to.eq(false);
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(false);
|
||||
});
|
||||
|
||||
it("Fails to publish message with wrong pubsubtopic", async function () {
|
||||
const wrong_encoder = createEncoder({
|
||||
pubsubTopicShardInfo: {
|
||||
clusterId: TestShardInfo.clusterId,
|
||||
shard: TestShardInfo.shards[0] + 1
|
||||
},
|
||||
contentTopic: TestContentTopic
|
||||
contentTopic: TestContentTopic,
|
||||
routingInfo: createRoutingInfo(
|
||||
{ clusterId: TestClusterId },
|
||||
{ shardId: 32 }
|
||||
)
|
||||
});
|
||||
const pushResponse = await waku1.relay.send(wrong_encoder, {
|
||||
payload: utf8ToBytes("")
|
||||
@ -140,9 +121,7 @@ describe("Waku Relay, Publish", function () {
|
||||
ProtocolError.TOPIC_NOT_CONFIGURED
|
||||
);
|
||||
await delay(400);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
|
||||
).to.eq(false);
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(false);
|
||||
});
|
||||
|
||||
[1024 ** 2 + 65536, 2 * 1024 ** 2].forEach((testItem) => {
|
||||
@ -155,9 +134,7 @@ describe("Waku Relay, Publish", function () {
|
||||
ProtocolError.SIZE_TOO_BIG
|
||||
);
|
||||
await delay(400);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
|
||||
).to.eq(false);
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(false);
|
||||
});
|
||||
});
|
||||
|
||||
@ -183,9 +160,7 @@ describe("Waku Relay, Publish", function () {
|
||||
expect(pushResponse.successes[0].toString()).to.eq(
|
||||
waku2.libp2p.peerId.toString()
|
||||
);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(2, TestWaitMessageOptions)
|
||||
).to.eq(true);
|
||||
expect(await messageCollector.waitForMessages(2)).to.eq(true);
|
||||
});
|
||||
|
||||
// Will be skipped until https://github.com/waku-org/js-waku/issues/1464 si done
|
||||
@ -210,15 +185,13 @@ describe("Waku Relay, Publish", function () {
|
||||
expect(pushResponse.successes[0].toString()).to.eq(
|
||||
waku2.libp2p.peerId.toString()
|
||||
);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(2, TestWaitMessageOptions)
|
||||
).to.eq(true);
|
||||
expect(await messageCollector.waitForMessages(2)).to.eq(true);
|
||||
});
|
||||
|
||||
it("Publish message with large meta", async function () {
|
||||
const customTestEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
pubsubTopic: TestPubsubTopic,
|
||||
routingInfo: TestRoutingInfo,
|
||||
metaSetter: () => new Uint8Array(10 ** 6)
|
||||
});
|
||||
|
||||
@ -229,9 +202,7 @@ describe("Waku Relay, Publish", function () {
|
||||
expect(pushResponse.successes[0].toString()).to.eq(
|
||||
waku2.libp2p.peerId.toString()
|
||||
);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
|
||||
).to.eq(true);
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(true);
|
||||
});
|
||||
|
||||
it("Publish message with rate limit", async function () {
|
||||
@ -251,9 +222,7 @@ describe("Waku Relay, Publish", function () {
|
||||
});
|
||||
expect(pushResponse.successes.length).to.eq(1);
|
||||
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
|
||||
).to.eq(true);
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
...TestExpectOptions,
|
||||
expectedMessageText: messageText
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import { createDecoder, createEncoder } from "@waku/core";
|
||||
import { RelayNode } from "@waku/interfaces";
|
||||
import { createRelayNode } from "@waku/relay";
|
||||
import { createRoutingInfo } from "@waku/utils";
|
||||
import { utf8ToBytes } from "@waku/utils/bytes";
|
||||
import { expect } from "chai";
|
||||
|
||||
@ -20,9 +21,8 @@ import {
|
||||
TestDecoder,
|
||||
TestEncoder,
|
||||
TestExpectOptions,
|
||||
TestPubsubTopic,
|
||||
TestShardInfo,
|
||||
TestWaitMessageOptions,
|
||||
TestNetworkConfig,
|
||||
TestRoutingInfo,
|
||||
waitForAllRemotePeers
|
||||
} from "./utils.js";
|
||||
|
||||
@ -44,10 +44,10 @@ describe("Waku Relay, Subscribe", function () {
|
||||
it("Mutual subscription", async function () {
|
||||
await waitForAllRemotePeers(waku1, waku2);
|
||||
const subscribers1 = waku1.libp2p.services
|
||||
.pubsub!.getSubscribers(TestPubsubTopic)
|
||||
.pubsub!.getSubscribers(TestRoutingInfo.pubsubTopic)
|
||||
.map((p) => p.toString());
|
||||
const subscribers2 = waku2.libp2p.services
|
||||
.pubsub!.getSubscribers(TestPubsubTopic)
|
||||
.pubsub!.getSubscribers(TestRoutingInfo.pubsubTopic)
|
||||
.map((p) => p.toString());
|
||||
|
||||
expect(subscribers1).to.contain(waku2.libp2p.peerId.toString());
|
||||
@ -65,7 +65,8 @@ describe("Waku Relay, Subscribe", function () {
|
||||
try {
|
||||
const waku = await createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
networkConfig: TestShardInfo
|
||||
networkConfig: TestNetworkConfig,
|
||||
routingInfos: [TestRoutingInfo]
|
||||
});
|
||||
await waku.start();
|
||||
|
||||
@ -90,9 +91,7 @@ describe("Waku Relay, Subscribe", function () {
|
||||
messageCollector.callback
|
||||
);
|
||||
await waku1.relay.send(TestEncoder, { payload: utf8ToBytes(messageText) });
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
|
||||
).to.eq(true);
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
...TestExpectOptions,
|
||||
expectedMessageText: messageText
|
||||
@ -115,7 +114,6 @@ describe("Waku Relay, Subscribe", function () {
|
||||
// Verify that each message was received on the corresponding topic.
|
||||
expect(
|
||||
await messageCollector.waitForMessages(messageCount, {
|
||||
...TestWaitMessageOptions,
|
||||
exact: true
|
||||
})
|
||||
).to.eq(true);
|
||||
@ -130,12 +128,15 @@ describe("Waku Relay, Subscribe", function () {
|
||||
});
|
||||
|
||||
it("Subscribe and publish messages on 2 different content topics", async function () {
|
||||
const secondContentTopic = "/test/2/waku-relay/utf8";
|
||||
const secondContentTopic = "/test/0/waku-relay-2/utf8";
|
||||
const secondRoutingInfo = createRoutingInfo(TestNetworkConfig, {
|
||||
contentTopic: secondContentTopic
|
||||
});
|
||||
const secondEncoder = createEncoder({
|
||||
contentTopic: secondContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
routingInfo: secondRoutingInfo
|
||||
});
|
||||
const secondDecoder = createDecoder(secondContentTopic, TestPubsubTopic);
|
||||
const secondDecoder = createDecoder(secondContentTopic, secondRoutingInfo);
|
||||
|
||||
await waku2.relay.subscribeWithUnsubscribe(
|
||||
[TestDecoder],
|
||||
@ -149,7 +150,6 @@ describe("Waku Relay, Subscribe", function () {
|
||||
await waku1.relay.send(secondEncoder, { payload: utf8ToBytes("M2") });
|
||||
expect(
|
||||
await messageCollector.waitForMessages(2, {
|
||||
...TestWaitMessageOptions,
|
||||
exact: true
|
||||
})
|
||||
).to.eq(true);
|
||||
@ -166,7 +166,7 @@ describe("Waku Relay, Subscribe", function () {
|
||||
|
||||
it("Subscribe one by one to 100 topics and publish messages", async function () {
|
||||
const topicCount = 100;
|
||||
const td = generateTestData(topicCount, TestWaitMessageOptions);
|
||||
const td = generateTestData(topicCount, TestNetworkConfig);
|
||||
|
||||
// Subscribe to topics one by one
|
||||
for (let i = 0; i < topicCount; i++) {
|
||||
@ -186,7 +186,6 @@ describe("Waku Relay, Subscribe", function () {
|
||||
// Verify that each message was received on the corresponding topic.
|
||||
expect(
|
||||
await messageCollector.waitForMessages(topicCount, {
|
||||
...TestWaitMessageOptions,
|
||||
exact: true
|
||||
})
|
||||
).to.eq(true);
|
||||
@ -201,7 +200,7 @@ describe("Waku Relay, Subscribe", function () {
|
||||
|
||||
it("Subscribe at once to 10000 topics and publish messages", async function () {
|
||||
const topicCount = 10000;
|
||||
const td = generateTestData(topicCount, TestWaitMessageOptions);
|
||||
const td = generateTestData(topicCount, TestNetworkConfig);
|
||||
|
||||
// Subscribe to all topics at once
|
||||
await waku2.relay.subscribeWithUnsubscribe(
|
||||
@ -219,7 +218,6 @@ describe("Waku Relay, Subscribe", function () {
|
||||
// Verify that each message was received on the corresponding topic.
|
||||
expect(
|
||||
await messageCollector.waitForMessages(topicCount, {
|
||||
...TestWaitMessageOptions,
|
||||
exact: true
|
||||
})
|
||||
).to.eq(true);
|
||||
@ -248,7 +246,6 @@ describe("Waku Relay, Subscribe", function () {
|
||||
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, {
|
||||
...TestWaitMessageOptions,
|
||||
exact: true
|
||||
})
|
||||
).to.eq(true);
|
||||
@ -258,9 +255,9 @@ describe("Waku Relay, Subscribe", function () {
|
||||
it.skip("Overlapping topic subscription", async function () {
|
||||
// Define two sets of test data with overlapping topics.
|
||||
const topicCount1 = 2;
|
||||
const td1 = generateTestData(topicCount1, TestWaitMessageOptions);
|
||||
const td1 = generateTestData(topicCount1, TestNetworkConfig);
|
||||
const topicCount2 = 4;
|
||||
const td2 = generateTestData(topicCount2, TestWaitMessageOptions);
|
||||
const td2 = generateTestData(topicCount2, TestNetworkConfig);
|
||||
|
||||
// Subscribe to the first set of topics.
|
||||
await waku2.relay.subscribeWithUnsubscribe(
|
||||
@ -293,7 +290,6 @@ describe("Waku Relay, Subscribe", function () {
|
||||
// Since there are overlapping topics, there should be 6 messages in total (2 from the first set + 4 from the second set).
|
||||
expect(
|
||||
await messageCollector.waitForMessages(6, {
|
||||
...TestWaitMessageOptions,
|
||||
exact: true
|
||||
})
|
||||
).to.eq(true);
|
||||
@ -301,29 +297,39 @@ describe("Waku Relay, Subscribe", function () {
|
||||
|
||||
TEST_STRING.forEach((testItem) => {
|
||||
it(`Subscribe to topic containing ${testItem.description} and publish message`, async function () {
|
||||
const newContentTopic = testItem.value;
|
||||
const newEncoder = createEncoder({
|
||||
contentTopic: newContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const newDecoder = createDecoder(newContentTopic, TestPubsubTopic);
|
||||
const newContentTopic = `/test/0/${testItem.value}/null`;
|
||||
|
||||
await waku2.relay.subscribeWithUnsubscribe(
|
||||
[newDecoder],
|
||||
messageCollector.callback
|
||||
);
|
||||
await waku1.relay.send(newEncoder, {
|
||||
payload: utf8ToBytes(messageText)
|
||||
});
|
||||
try {
|
||||
const newRoutingInfo = createRoutingInfo(TestNetworkConfig, {
|
||||
contentTopic: newContentTopic
|
||||
});
|
||||
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
|
||||
).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
...TestExpectOptions,
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: newContentTopic
|
||||
});
|
||||
const newEncoder = createEncoder({
|
||||
contentTopic: newContentTopic,
|
||||
routingInfo: newRoutingInfo
|
||||
});
|
||||
const newDecoder = createDecoder(newContentTopic, newRoutingInfo);
|
||||
|
||||
await waku2.relay.subscribeWithUnsubscribe(
|
||||
[newDecoder],
|
||||
messageCollector.callback
|
||||
);
|
||||
await waku1.relay.send(newEncoder, {
|
||||
payload: utf8ToBytes(messageText)
|
||||
});
|
||||
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
...TestExpectOptions,
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: newContentTopic
|
||||
});
|
||||
} catch (err: unknown) {
|
||||
if (testItem.invalidContentTopic) {
|
||||
const e = err as Error;
|
||||
expect(e.message).to.contain("Invalid generation field");
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -1,12 +1,14 @@
|
||||
import { createDecoder, createEncoder } from "@waku/core";
|
||||
import {
|
||||
AutoSharding,
|
||||
ContentTopic,
|
||||
NetworkConfig,
|
||||
Protocols,
|
||||
RelayNode,
|
||||
ShardInfo
|
||||
type ShardId
|
||||
} from "@waku/interfaces";
|
||||
import { createRelayNode } from "@waku/relay";
|
||||
import { contentTopicToPubsubTopic, Logger } from "@waku/utils";
|
||||
import { createRoutingInfo, Logger } from "@waku/utils";
|
||||
import { Context } from "mocha";
|
||||
|
||||
import {
|
||||
@ -16,25 +18,25 @@ import {
|
||||
ServiceNode
|
||||
} from "../../src/index.js";
|
||||
|
||||
export const TestClusterId = 4;
|
||||
export const messageText = "Relay works!";
|
||||
export const TestContentTopic = "/test/1/waku-relay/utf8";
|
||||
export const TestShardInfo: ShardInfo = {
|
||||
clusterId: 2,
|
||||
shards: [4]
|
||||
export const TestContentTopic = "/test/0/waku-relay/utf8";
|
||||
|
||||
export const TestNetworkConfig: AutoSharding = {
|
||||
clusterId: TestClusterId,
|
||||
numShardsInCluster: 8
|
||||
};
|
||||
export const TestPubsubTopic = contentTopicToPubsubTopic(
|
||||
TestContentTopic,
|
||||
TestShardInfo.clusterId
|
||||
);
|
||||
export const TestRoutingInfo = createRoutingInfo(TestNetworkConfig, {
|
||||
contentTopic: TestContentTopic
|
||||
});
|
||||
export const TestEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
routingInfo: TestRoutingInfo
|
||||
});
|
||||
export const TestDecoder = createDecoder(TestContentTopic, TestPubsubTopic);
|
||||
export const TestWaitMessageOptions = { pubsubTopic: TestPubsubTopic };
|
||||
export const TestDecoder = createDecoder(TestContentTopic, TestRoutingInfo);
|
||||
export const TestExpectOptions = {
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
||||
};
|
||||
export const log = new Logger("test:relay");
|
||||
|
||||
@ -51,10 +53,14 @@ export async function waitForAllRemotePeers(
|
||||
|
||||
export const runRelayNodes = (
|
||||
context: Context,
|
||||
networkConfig: NetworkConfig
|
||||
networkConfig: NetworkConfig,
|
||||
relayShards?: ShardId[], // Only for static sharding
|
||||
contentTopics?: ContentTopic[] // Only for auto sharding
|
||||
): Promise<[ServiceNode, RelayNode]> =>
|
||||
runNodes<RelayNode>({
|
||||
networkConfig,
|
||||
relayShards,
|
||||
contentTopics,
|
||||
context,
|
||||
protocols: RELAY_PROTOCOLS,
|
||||
createNode: createRelayNode
|
||||
@ -64,12 +70,14 @@ export async function runJSNodes(): Promise<[RelayNode, RelayNode]> {
|
||||
log.info("Starting JS Waku instances");
|
||||
const [waku1, waku2] = await Promise.all([
|
||||
createRelayNode({
|
||||
routingInfos: [TestRoutingInfo],
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
networkConfig: TestShardInfo
|
||||
networkConfig: TestNetworkConfig
|
||||
}).then((waku) => waku.start().then(() => waku)),
|
||||
createRelayNode({
|
||||
routingInfos: [TestRoutingInfo],
|
||||
staticNoiseKey: NOISE_KEY_2,
|
||||
networkConfig: TestShardInfo,
|
||||
networkConfig: TestNetworkConfig,
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
|
||||
}).then((waku) => waku.start().then(() => waku))
|
||||
]);
|
||||
|
||||
@ -1,9 +1,6 @@
|
||||
import { LightNode } from "@waku/interfaces";
|
||||
import { AutoSharding, LightNode } from "@waku/interfaces";
|
||||
import { createEncoder, utf8ToBytes } from "@waku/sdk";
|
||||
import {
|
||||
contentTopicToPubsubTopic,
|
||||
contentTopicToShardIndex
|
||||
} from "@waku/utils";
|
||||
import { contentTopicToPubsubTopic, createRoutingInfo } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
|
||||
import {
|
||||
@ -33,10 +30,14 @@ describe("Autosharding: Running Nodes", function () {
|
||||
// js-waku allows autosharding for cluster IDs different than 1
|
||||
it("Cluster ID 0 - Default/Global Cluster", async function () {
|
||||
const clusterId = 0;
|
||||
const networkConfig: AutoSharding = { clusterId, numShardsInCluster: 8 };
|
||||
const routingInfo = createRoutingInfo(networkConfig, {
|
||||
contentTopic: ContentTopic
|
||||
});
|
||||
|
||||
[serviceNodes, waku] = await runMultipleNodes(
|
||||
this.ctx,
|
||||
{ clusterId, contentTopics: [ContentTopic] },
|
||||
routingInfo,
|
||||
{ lightpush: true, filter: true },
|
||||
false,
|
||||
numServiceNodes,
|
||||
@ -45,10 +46,7 @@ describe("Autosharding: Running Nodes", function () {
|
||||
|
||||
const encoder = createEncoder({
|
||||
contentTopic: ContentTopic,
|
||||
pubsubTopicShardInfo: {
|
||||
clusterId: clusterId,
|
||||
shard: contentTopicToShardIndex(ContentTopic)
|
||||
}
|
||||
routingInfo
|
||||
});
|
||||
|
||||
const request = await waku.lightPush.send(encoder, {
|
||||
@ -56,19 +54,19 @@ describe("Autosharding: Running Nodes", function () {
|
||||
});
|
||||
|
||||
expect(request.successes.length).to.eq(numServiceNodes);
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: encoder.pubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(true);
|
||||
});
|
||||
|
||||
it("Non TWN Cluster", async function () {
|
||||
const clusterId = 5;
|
||||
const networkConfig: AutoSharding = { clusterId, numShardsInCluster: 10 };
|
||||
const routingInfo = createRoutingInfo(networkConfig, {
|
||||
contentTopic: ContentTopic
|
||||
});
|
||||
|
||||
[serviceNodes, waku] = await runMultipleNodes(
|
||||
this.ctx,
|
||||
{ clusterId, contentTopics: [ContentTopic] },
|
||||
routingInfo,
|
||||
{ lightpush: true, filter: true },
|
||||
false,
|
||||
numServiceNodes,
|
||||
@ -77,10 +75,7 @@ describe("Autosharding: Running Nodes", function () {
|
||||
|
||||
const encoder = createEncoder({
|
||||
contentTopic: ContentTopic,
|
||||
pubsubTopicShardInfo: {
|
||||
clusterId: clusterId,
|
||||
shard: contentTopicToShardIndex(ContentTopic)
|
||||
}
|
||||
routingInfo
|
||||
});
|
||||
|
||||
const request = await waku.lightPush.send(encoder, {
|
||||
@ -88,11 +83,7 @@ describe("Autosharding: Running Nodes", function () {
|
||||
});
|
||||
|
||||
expect(request.successes.length).to.eq(numServiceNodes);
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: encoder.pubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(true);
|
||||
});
|
||||
|
||||
const numTest = 10;
|
||||
@ -109,9 +100,14 @@ describe("Autosharding: Running Nodes", function () {
|
||||
it(`random auto sharding ${
|
||||
i + 1
|
||||
} - Cluster ID: ${clusterId}, Content Topic: ${ContentTopic}`, async function () {
|
||||
const networkConfig: AutoSharding = { clusterId, numShardsInCluster: 8 };
|
||||
const routingInfo = createRoutingInfo(networkConfig, {
|
||||
contentTopic: ContentTopic
|
||||
});
|
||||
|
||||
[serviceNodes, waku] = await runMultipleNodes(
|
||||
this.ctx,
|
||||
{ clusterId, contentTopics: [ContentTopic] },
|
||||
routingInfo,
|
||||
{ lightpush: true, filter: true },
|
||||
false,
|
||||
numServiceNodes,
|
||||
@ -120,10 +116,7 @@ describe("Autosharding: Running Nodes", function () {
|
||||
|
||||
const encoder = createEncoder({
|
||||
contentTopic: ContentTopic,
|
||||
pubsubTopicShardInfo: {
|
||||
clusterId: clusterId,
|
||||
shard: contentTopicToShardIndex(ContentTopic)
|
||||
}
|
||||
routingInfo
|
||||
});
|
||||
|
||||
const request = await waku.lightPush.send(encoder, {
|
||||
@ -133,7 +126,7 @@ describe("Autosharding: Running Nodes", function () {
|
||||
expect(request.successes.length).to.eq(numServiceNodes);
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: encoder.pubsubTopic
|
||||
contentTopic: ContentTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
});
|
||||
@ -143,7 +136,7 @@ describe("Autosharding: Running Nodes", function () {
|
||||
it("Wrong topic", async function () {
|
||||
const wrongTopic = "wrong_format";
|
||||
try {
|
||||
contentTopicToPubsubTopic(wrongTopic, clusterId);
|
||||
contentTopicToPubsubTopic(wrongTopic, clusterId, 8);
|
||||
throw new Error("Wrong topic should've thrown an error");
|
||||
} catch (err) {
|
||||
if (
|
||||
@ -156,10 +149,19 @@ describe("Autosharding: Running Nodes", function () {
|
||||
});
|
||||
|
||||
it("configure the node with multiple content topics", async function () {
|
||||
const networkConfig: AutoSharding = { clusterId, numShardsInCluster: 8 };
|
||||
const routingInfo = createRoutingInfo(networkConfig, {
|
||||
contentTopic: ContentTopic
|
||||
});
|
||||
|
||||
[serviceNodes, waku] = await runMultipleNodes(
|
||||
this.ctx,
|
||||
{ clusterId, contentTopics: [ContentTopic, ContentTopic2] },
|
||||
{ lightpush: true, filter: true },
|
||||
routingInfo,
|
||||
{
|
||||
lightpush: true,
|
||||
filter: true,
|
||||
contentTopic: [ContentTopic, ContentTopic2]
|
||||
},
|
||||
false,
|
||||
numServiceNodes,
|
||||
true
|
||||
@ -167,18 +169,14 @@ describe("Autosharding: Running Nodes", function () {
|
||||
|
||||
const encoder1 = createEncoder({
|
||||
contentTopic: ContentTopic,
|
||||
pubsubTopicShardInfo: {
|
||||
clusterId: clusterId,
|
||||
shard: contentTopicToShardIndex(ContentTopic)
|
||||
}
|
||||
routingInfo
|
||||
});
|
||||
|
||||
const encoder2 = createEncoder({
|
||||
contentTopic: ContentTopic2,
|
||||
pubsubTopicShardInfo: {
|
||||
clusterId: clusterId,
|
||||
shard: contentTopicToShardIndex(ContentTopic2)
|
||||
}
|
||||
routingInfo: createRoutingInfo(networkConfig, {
|
||||
contentTopic: ContentTopic2
|
||||
})
|
||||
});
|
||||
|
||||
const request1 = await waku.lightPush.send(encoder1, {
|
||||
@ -187,7 +185,7 @@ describe("Autosharding: Running Nodes", function () {
|
||||
expect(request1.successes.length).to.eq(numServiceNodes);
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: encoder1.pubsubTopic
|
||||
contentTopic: ContentTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
|
||||
@ -197,7 +195,7 @@ describe("Autosharding: Running Nodes", function () {
|
||||
expect(request2.successes.length).to.eq(numServiceNodes);
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: encoder2.pubsubTopic
|
||||
contentTopic: ContentTopic2
|
||||
})
|
||||
).to.eq(true);
|
||||
});
|
||||
|
||||
@ -1,13 +1,8 @@
|
||||
import { bootstrap } from "@libp2p/bootstrap";
|
||||
import type { PeerId } from "@libp2p/interface";
|
||||
import { wakuPeerExchangeDiscovery } from "@waku/discovery";
|
||||
import {
|
||||
ContentTopicInfo,
|
||||
createLightNode,
|
||||
LightNode,
|
||||
ShardInfo,
|
||||
Tags
|
||||
} from "@waku/sdk";
|
||||
import type { AutoSharding, StaticSharding } from "@waku/interfaces";
|
||||
import { createLightNode, LightNode, Tags } from "@waku/sdk";
|
||||
import { contentTopicToShardIndex } from "@waku/utils";
|
||||
import chai, { expect } from "chai";
|
||||
import chaiAsPromised from "chai-as-promised";
|
||||
@ -48,14 +43,17 @@ describe("Static Sharding: Peer Management", function () {
|
||||
it("all px service nodes subscribed to the shard topic should be dialed", async function () {
|
||||
this.timeout(100_000);
|
||||
|
||||
const shardInfo: ShardInfo = { clusterId: clusterId, shards: [2] };
|
||||
const shard = 2;
|
||||
const numShardsInCluster = 8;
|
||||
const networkConfig: StaticSharding = { clusterId };
|
||||
|
||||
await nwaku1.start({
|
||||
discv5Discovery: true,
|
||||
peerExchange: true,
|
||||
relay: true,
|
||||
clusterId: clusterId,
|
||||
shard: [2]
|
||||
shard: [shard],
|
||||
numShardsInNetwork: numShardsInCluster
|
||||
});
|
||||
|
||||
const enr1 = (await nwaku1.info()).enrUri;
|
||||
@ -66,7 +64,8 @@ describe("Static Sharding: Peer Management", function () {
|
||||
discv5BootstrapNode: enr1,
|
||||
relay: true,
|
||||
clusterId: clusterId,
|
||||
shard: [2]
|
||||
shard: [shard],
|
||||
numShardsInNetwork: numShardsInCluster
|
||||
});
|
||||
|
||||
const enr2 = (await nwaku2.info()).enrUri;
|
||||
@ -77,12 +76,13 @@ describe("Static Sharding: Peer Management", function () {
|
||||
discv5BootstrapNode: enr2,
|
||||
relay: true,
|
||||
clusterId: clusterId,
|
||||
shard: [2]
|
||||
shard: [shard],
|
||||
numShardsInNetwork: numShardsInCluster
|
||||
});
|
||||
const nwaku3Ma = await nwaku3.getMultiaddrWithId();
|
||||
|
||||
waku = await createLightNode({
|
||||
networkConfig: shardInfo,
|
||||
networkConfig: networkConfig,
|
||||
libp2p: {
|
||||
peerDiscovery: [
|
||||
bootstrap({ list: [nwaku3Ma.toString()] }),
|
||||
@ -118,9 +118,11 @@ describe("Static Sharding: Peer Management", function () {
|
||||
expect(dialPeerSpy.callCount).to.equal(3);
|
||||
});
|
||||
|
||||
it("px service nodes not subscribed to the shard should not be dialed", async function () {
|
||||
it("px service nodes in same cluster, no matter the shard, should be dialed", async function () {
|
||||
this.timeout(100_000);
|
||||
const shardInfoToDial: ShardInfo = { clusterId: clusterId, shards: [2] };
|
||||
|
||||
const numShardsInCluster = 8;
|
||||
const networkConfig: StaticSharding = { clusterId };
|
||||
|
||||
// this service node is not subscribed to the shard
|
||||
await nwaku1.start({
|
||||
@ -128,7 +130,8 @@ describe("Static Sharding: Peer Management", function () {
|
||||
discv5Discovery: true,
|
||||
peerExchange: true,
|
||||
clusterId: clusterId,
|
||||
shard: [1]
|
||||
shard: [1],
|
||||
numShardsInNetwork: numShardsInCluster
|
||||
});
|
||||
|
||||
const enr1 = (await nwaku1.info()).enrUri;
|
||||
@ -139,7 +142,8 @@ describe("Static Sharding: Peer Management", function () {
|
||||
peerExchange: true,
|
||||
discv5BootstrapNode: enr1,
|
||||
clusterId: clusterId,
|
||||
shard: [2]
|
||||
shard: [2],
|
||||
numShardsInNetwork: numShardsInCluster
|
||||
});
|
||||
|
||||
const enr2 = (await nwaku2.info()).enrUri;
|
||||
@ -150,12 +154,13 @@ describe("Static Sharding: Peer Management", function () {
|
||||
peerExchange: true,
|
||||
discv5BootstrapNode: enr2,
|
||||
clusterId: clusterId,
|
||||
shard: [2]
|
||||
shard: [2],
|
||||
numShardsInNetwork: numShardsInCluster
|
||||
});
|
||||
const nwaku3Ma = await nwaku3.getMultiaddrWithId();
|
||||
|
||||
waku = await createLightNode({
|
||||
networkConfig: shardInfoToDial,
|
||||
networkConfig: networkConfig,
|
||||
libp2p: {
|
||||
peerDiscovery: [
|
||||
bootstrap({ list: [nwaku3Ma.toString()] }),
|
||||
@ -178,7 +183,7 @@ describe("Static Sharding: Peer Management", function () {
|
||||
const tags = Array.from(peer.tags.keys());
|
||||
if (tags.includes(Tags.PEER_EXCHANGE)) {
|
||||
pxPeersDiscovered.add(peerId);
|
||||
if (pxPeersDiscovered.size === 1) {
|
||||
if (pxPeersDiscovered.size === 2) {
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
@ -187,7 +192,7 @@ describe("Static Sharding: Peer Management", function () {
|
||||
});
|
||||
|
||||
await delay(1000);
|
||||
expect(dialPeerSpy.callCount).to.equal(2);
|
||||
expect(dialPeerSpy.callCount).to.equal(3);
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -195,7 +200,8 @@ describe("Static Sharding: Peer Management", function () {
|
||||
describe("Autosharding: Peer Management", function () {
|
||||
const ContentTopic = "/myapp/1/latest/proto";
|
||||
const clusterId = 8;
|
||||
const Shard = [contentTopicToShardIndex(ContentTopic)];
|
||||
const numShardsInCluster = 8;
|
||||
const Shard = [contentTopicToShardIndex(ContentTopic, numShardsInCluster)];
|
||||
|
||||
describe("Peer Exchange", function () {
|
||||
let waku: LightNode;
|
||||
@ -219,9 +225,9 @@ describe("Autosharding: Peer Management", function () {
|
||||
it("all px service nodes subscribed to the shard topic should be dialed", async function () {
|
||||
this.timeout(100_000);
|
||||
|
||||
const contentTopicInfo: ContentTopicInfo = {
|
||||
const networkConfig: AutoSharding = {
|
||||
clusterId: clusterId,
|
||||
contentTopics: [ContentTopic]
|
||||
numShardsInCluster: 8
|
||||
};
|
||||
|
||||
await nwaku1.start({
|
||||
@ -259,7 +265,7 @@ describe("Autosharding: Peer Management", function () {
|
||||
const nwaku3Ma = await nwaku3.getMultiaddrWithId();
|
||||
|
||||
waku = await createLightNode({
|
||||
networkConfig: contentTopicInfo,
|
||||
networkConfig: networkConfig,
|
||||
libp2p: {
|
||||
peerDiscovery: [
|
||||
bootstrap({ list: [nwaku3Ma.toString()] }),
|
||||
@ -294,82 +300,5 @@ describe("Autosharding: Peer Management", function () {
|
||||
|
||||
expect(dialPeerSpy.callCount).to.equal(3);
|
||||
});
|
||||
|
||||
it("px service nodes not subscribed to the shard should not be dialed", async function () {
|
||||
this.timeout(100_000);
|
||||
const contentTopicInfoToDial: ContentTopicInfo = {
|
||||
clusterId: clusterId,
|
||||
contentTopics: [ContentTopic]
|
||||
};
|
||||
|
||||
// this service node is not subscribed to the shard
|
||||
await nwaku1.start({
|
||||
relay: true,
|
||||
discv5Discovery: true,
|
||||
peerExchange: true,
|
||||
clusterId: 3,
|
||||
shard: Shard
|
||||
});
|
||||
|
||||
const enr1 = (await nwaku1.info()).enrUri;
|
||||
|
||||
await nwaku2.start({
|
||||
relay: true,
|
||||
discv5Discovery: true,
|
||||
peerExchange: true,
|
||||
discv5BootstrapNode: enr1,
|
||||
clusterId: clusterId,
|
||||
shard: Shard,
|
||||
contentTopic: [ContentTopic]
|
||||
});
|
||||
|
||||
const enr2 = (await nwaku2.info()).enrUri;
|
||||
|
||||
await nwaku3.start({
|
||||
relay: true,
|
||||
discv5Discovery: true,
|
||||
peerExchange: true,
|
||||
discv5BootstrapNode: enr2,
|
||||
clusterId: clusterId,
|
||||
shard: Shard,
|
||||
contentTopic: [ContentTopic]
|
||||
});
|
||||
const nwaku3Ma = await nwaku3.getMultiaddrWithId();
|
||||
|
||||
waku = await createLightNode({
|
||||
networkConfig: contentTopicInfoToDial,
|
||||
libp2p: {
|
||||
peerDiscovery: [
|
||||
bootstrap({ list: [nwaku3Ma.toString()] }),
|
||||
wakuPeerExchangeDiscovery()
|
||||
]
|
||||
}
|
||||
});
|
||||
|
||||
dialPeerSpy = Sinon.spy((waku as any).libp2p, "dial");
|
||||
|
||||
await waku.start();
|
||||
|
||||
const pxPeersDiscovered = new Set<PeerId>();
|
||||
|
||||
await new Promise<void>((resolve) => {
|
||||
waku.libp2p.addEventListener("peer:discovery", (evt) => {
|
||||
return void (async () => {
|
||||
const peerId = evt.detail.id;
|
||||
const peer = await waku.libp2p.peerStore.get(peerId);
|
||||
const tags = Array.from(peer.tags.keys());
|
||||
if (tags.includes(Tags.PEER_EXCHANGE)) {
|
||||
pxPeersDiscovered.add(peerId);
|
||||
if (pxPeersDiscovered.size === 1) {
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
})();
|
||||
});
|
||||
});
|
||||
|
||||
await delay(1000);
|
||||
expect(dialPeerSpy.callCount).to.equal(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -1,15 +1,10 @@
|
||||
import { LightNode, SingleShardInfo } from "@waku/interfaces";
|
||||
import { LightNode, StaticSharding } from "@waku/interfaces";
|
||||
import { createEncoder, utf8ToBytes } from "@waku/sdk";
|
||||
import {
|
||||
shardInfoToPubsubTopics,
|
||||
singleShardInfosToShardInfo,
|
||||
singleShardInfoToPubsubTopic
|
||||
} from "@waku/utils";
|
||||
import { createRoutingInfo } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
|
||||
import {
|
||||
afterEachCustom,
|
||||
beforeEachCustom,
|
||||
runMultipleNodes,
|
||||
ServiceNodesFleet,
|
||||
teardownNodesWithRedundancy
|
||||
@ -30,13 +25,15 @@ describe("Static Sharding: Running Nodes", function () {
|
||||
}
|
||||
});
|
||||
|
||||
it("shard 0", async function () {
|
||||
const singleShardInfo = { clusterId: 0, shard: 0 };
|
||||
const shardInfo = singleShardInfosToShardInfo([singleShardInfo]);
|
||||
it("Cluster id 0, shard 0", async function () {
|
||||
const clusterId = 0;
|
||||
const shardId = 0;
|
||||
const networkConfig: StaticSharding = { clusterId };
|
||||
const routingInfo = createRoutingInfo(networkConfig, { shardId });
|
||||
|
||||
[serviceNodes, waku] = await runMultipleNodes(
|
||||
this.ctx,
|
||||
shardInfo,
|
||||
routingInfo,
|
||||
{ lightpush: true, filter: true },
|
||||
false,
|
||||
numServiceNodes,
|
||||
@ -45,32 +42,27 @@ describe("Static Sharding: Running Nodes", function () {
|
||||
|
||||
const encoder = createEncoder({
|
||||
contentTopic: ContentTopic,
|
||||
pubsubTopicShardInfo: singleShardInfo
|
||||
routingInfo
|
||||
});
|
||||
expect(encoder.pubsubTopic).to.eq(
|
||||
singleShardInfoToPubsubTopic(singleShardInfo)
|
||||
);
|
||||
|
||||
const request = await waku.lightPush.send(encoder, {
|
||||
payload: utf8ToBytes("Hello World")
|
||||
});
|
||||
|
||||
expect(request.successes.length).to.eq(numServiceNodes);
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: encoder.pubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(true);
|
||||
});
|
||||
|
||||
// dedicated test for Default Cluster ID 0
|
||||
it("Cluster ID 0 - Default/Global Cluster", async function () {
|
||||
const singleShardInfo = { clusterId: 0, shard: 1 };
|
||||
const shardInfo = singleShardInfosToShardInfo([singleShardInfo]);
|
||||
it("Cluster ID 0, shard 1", async function () {
|
||||
const clusterId = 0;
|
||||
const shardId = 1;
|
||||
const networkConfig: StaticSharding = { clusterId };
|
||||
const routingInfo = createRoutingInfo(networkConfig, { shardId });
|
||||
|
||||
[serviceNodes, waku] = await runMultipleNodes(
|
||||
this.ctx,
|
||||
shardInfo,
|
||||
routingInfo,
|
||||
{ lightpush: true, filter: true },
|
||||
false,
|
||||
numServiceNodes,
|
||||
@ -79,7 +71,7 @@ describe("Static Sharding: Running Nodes", function () {
|
||||
|
||||
const encoder = createEncoder({
|
||||
contentTopic: ContentTopic,
|
||||
pubsubTopicShardInfo: singleShardInfo
|
||||
routingInfo
|
||||
});
|
||||
|
||||
const request = await waku.lightPush.send(encoder, {
|
||||
@ -87,11 +79,7 @@ describe("Static Sharding: Running Nodes", function () {
|
||||
});
|
||||
|
||||
expect(request.successes.length).to.eq(numServiceNodes);
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: shardInfoToPubsubTopics(shardInfo)[0]
|
||||
})
|
||||
).to.eq(true);
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(true);
|
||||
});
|
||||
|
||||
const numTest = 10;
|
||||
@ -102,15 +90,15 @@ describe("Static Sharding: Running Nodes", function () {
|
||||
// Random shardId between 1 and 1000
|
||||
const shardId = Math.floor(Math.random() * 1000) + 1;
|
||||
|
||||
const networkConfig: StaticSharding = { clusterId };
|
||||
const routingInfo = createRoutingInfo(networkConfig, { shardId });
|
||||
|
||||
it(`random static sharding ${
|
||||
i + 1
|
||||
} - Cluster ID: ${clusterId}, Shard ID: ${shardId}`, async function () {
|
||||
const singleShardInfo = { clusterId: clusterId, shard: shardId };
|
||||
const shardInfo = singleShardInfosToShardInfo([singleShardInfo]);
|
||||
|
||||
[serviceNodes, waku] = await runMultipleNodes(
|
||||
this.ctx,
|
||||
shardInfo,
|
||||
routingInfo,
|
||||
{ lightpush: true, filter: true },
|
||||
false,
|
||||
numServiceNodes,
|
||||
@ -119,7 +107,7 @@ describe("Static Sharding: Running Nodes", function () {
|
||||
|
||||
const encoder = createEncoder({
|
||||
contentTopic: ContentTopic,
|
||||
pubsubTopicShardInfo: singleShardInfo
|
||||
routingInfo
|
||||
});
|
||||
|
||||
const request = await waku.lightPush.send(encoder, {
|
||||
@ -127,75 +115,9 @@ describe("Static Sharding: Running Nodes", function () {
|
||||
});
|
||||
|
||||
expect(request.successes.length).to.eq(numServiceNodes);
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: shardInfoToPubsubTopics(shardInfo)[0]
|
||||
})
|
||||
).to.eq(true);
|
||||
});
|
||||
}
|
||||
|
||||
describe("Others", function () {
|
||||
const clusterId = 2;
|
||||
|
||||
const singleShardInfo1: SingleShardInfo = {
|
||||
clusterId: clusterId,
|
||||
shard: 2
|
||||
};
|
||||
const singleShardInfo2: SingleShardInfo = {
|
||||
clusterId: clusterId,
|
||||
shard: 3
|
||||
};
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[serviceNodes, waku] = await runMultipleNodes(
|
||||
this.ctx,
|
||||
{ clusterId, shards: [2, 3] },
|
||||
{ lightpush: true, filter: true },
|
||||
false,
|
||||
numServiceNodes,
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
true
|
||||
);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
if (serviceNodes) {
|
||||
await teardownNodesWithRedundancy(serviceNodes, waku ?? []);
|
||||
}
|
||||
});
|
||||
|
||||
it("configure the node with multiple pubsub topics", async function () {
|
||||
const encoder1 = createEncoder({
|
||||
contentTopic: ContentTopic,
|
||||
pubsubTopicShardInfo: singleShardInfo1
|
||||
});
|
||||
|
||||
const encoder2 = createEncoder({
|
||||
contentTopic: ContentTopic,
|
||||
pubsubTopicShardInfo: singleShardInfo2
|
||||
});
|
||||
|
||||
const request1 = await waku?.lightPush.send(encoder1, {
|
||||
payload: utf8ToBytes("Hello World2")
|
||||
});
|
||||
|
||||
expect(request1?.successes.length).to.eq(numServiceNodes);
|
||||
expect(
|
||||
await serviceNodes?.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: encoder1.pubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
|
||||
const request2 = await waku?.lightPush.send(encoder2, {
|
||||
payload: utf8ToBytes("Hello World3")
|
||||
});
|
||||
|
||||
expect(request2?.successes.length).to.eq(numServiceNodes);
|
||||
expect(
|
||||
await serviceNodes?.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: encoder2.pubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
@ -14,9 +14,10 @@ import {
|
||||
runStoreNodes,
|
||||
sendMessages,
|
||||
startAndConnectLightNode,
|
||||
TestContentTopic,
|
||||
TestDecoder,
|
||||
TestDecoder2,
|
||||
TestShardInfo,
|
||||
TestNetworkConfig,
|
||||
TestRoutingInfo,
|
||||
totalMsgs
|
||||
} from "./utils.js";
|
||||
|
||||
@ -27,7 +28,12 @@ describe("Waku Store, cursor", function () {
|
||||
let nwaku: ServiceNode;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
|
||||
[nwaku, waku] = await runStoreNodes(
|
||||
this.ctx,
|
||||
TestNetworkConfig,
|
||||
[],
|
||||
[TestContentTopic]
|
||||
);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
@ -47,7 +53,7 @@ describe("Waku Store, cursor", function () {
|
||||
nwaku,
|
||||
messageCount,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
TestRoutingInfo
|
||||
);
|
||||
|
||||
// messages in reversed order (first message at last index)
|
||||
@ -95,9 +101,9 @@ describe("Waku Store, cursor", function () {
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
TestRoutingInfo
|
||||
);
|
||||
waku2 = await startAndConnectLightNode(nwaku, TestShardInfo);
|
||||
waku2 = await startAndConnectLightNode(nwaku, TestNetworkConfig);
|
||||
|
||||
// messages in reversed order (first message at last index)
|
||||
const messages: DecodedMessage[] = [];
|
||||
@ -132,17 +138,8 @@ describe("Waku Store, cursor", function () {
|
||||
).to.be.eq(bytesToUtf8(messages[messages.length - 1].payload));
|
||||
});
|
||||
|
||||
it("Passing invalid cursor for nwaku > 0.35.1", async function () {
|
||||
if (nwaku.version && nwaku.version.minor < 36) {
|
||||
this.skip();
|
||||
}
|
||||
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
it("Passing invalid cursor", async function () {
|
||||
await sendMessages(nwaku, totalMsgs, TestContentTopic, TestRoutingInfo);
|
||||
|
||||
const messages: DecodedMessage[] = [];
|
||||
for await (const page of waku.store.queryGenerator([TestDecoder])) {
|
||||
@ -169,49 +166,7 @@ describe("Waku Store, cursor", function () {
|
||||
} catch (err) {
|
||||
if (
|
||||
!(err instanceof Error) ||
|
||||
!err.message.includes(
|
||||
"Store query failed with status code: 300, description: BAD_RESPONSE: archive error: DRIVER_ERROR: cursor not found"
|
||||
)
|
||||
) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it("Passing cursor with wrong pubsubTopic for nwaku > 0.35.1", async function () {
|
||||
if (nwaku.version && nwaku.version.minor < 36) {
|
||||
this.skip();
|
||||
}
|
||||
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
|
||||
const messages: DecodedMessage[] = [];
|
||||
for await (const page of waku.store.queryGenerator([TestDecoder])) {
|
||||
for await (const msg of page) {
|
||||
messages.push(msg as DecodedMessage);
|
||||
}
|
||||
}
|
||||
messages[5].pubsubTopic = TestDecoder2.pubsubTopic;
|
||||
const cursor = waku.store.createCursor(messages[5]);
|
||||
|
||||
try {
|
||||
for await (const page of waku.store.queryGenerator([TestDecoder], {
|
||||
paginationCursor: cursor
|
||||
})) {
|
||||
void page;
|
||||
}
|
||||
throw new Error("Cursor with wrong pubsubtopic was accepted");
|
||||
} catch (err) {
|
||||
if (
|
||||
!(err instanceof Error) ||
|
||||
!err.message.includes(
|
||||
"Store query failed with status code: 300, description: BAD_RESPONSE: archive error: DRIVER_ERROR: cursor not found"
|
||||
)
|
||||
!err.message.includes("cursor not found")
|
||||
) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
192
packages/tests/tests/store/different_static_shards.spec.ts
Normal file
192
packages/tests/tests/store/different_static_shards.spec.ts
Normal file
@ -0,0 +1,192 @@
|
||||
import { createDecoder } from "@waku/core";
|
||||
import { IMessage, LightNode, ShardId, StaticSharding } from "@waku/interfaces";
|
||||
import { Protocols } from "@waku/sdk";
|
||||
import { createRoutingInfo, RoutingInfo } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
|
||||
import {
|
||||
afterEachCustom,
|
||||
beforeEachCustom,
|
||||
makeLogFileName,
|
||||
ServiceNode,
|
||||
tearDownNodes
|
||||
} from "../../src/index.js";
|
||||
|
||||
import {
|
||||
processQueriedMessages,
|
||||
runStoreNodes,
|
||||
sendMessages,
|
||||
totalMsgs
|
||||
} from "./utils.js";
|
||||
|
||||
const StaticTestClusterId = 2;
|
||||
const StaticTestRelayShards = [1, 2];
|
||||
const StaticTestNetworkConfig: StaticSharding = {
|
||||
clusterId: StaticTestClusterId
|
||||
};
|
||||
|
||||
const TestShardOne: ShardId = 1;
|
||||
const TestContentTopicOne = "/test/0/one/proto";
|
||||
const TestRoutingInfoOne = createRoutingInfo(StaticTestNetworkConfig, {
|
||||
shardId: TestShardOne
|
||||
});
|
||||
|
||||
const TestDecoderShardOne = createDecoder(
|
||||
TestContentTopicOne,
|
||||
TestRoutingInfoOne
|
||||
);
|
||||
|
||||
const TestShardTwo: ShardId = 2;
|
||||
const TestContentTopicTwo = "/test/0/two/proto";
|
||||
const TestRoutingInfoTwo = createRoutingInfo(StaticTestNetworkConfig, {
|
||||
shardId: TestShardTwo
|
||||
});
|
||||
|
||||
const TestDecoderShardTwo = createDecoder(
|
||||
TestContentTopicTwo,
|
||||
TestRoutingInfoTwo
|
||||
);
|
||||
|
||||
// TODO: Same tests but with auto-sharding
|
||||
describe("Waku Store, different static shards", function () {
|
||||
this.timeout(15000);
|
||||
let waku: LightNode;
|
||||
let nwaku: ServiceNode;
|
||||
let nwaku2: ServiceNode;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runStoreNodes(
|
||||
this.ctx,
|
||||
StaticTestNetworkConfig,
|
||||
StaticTestRelayShards
|
||||
);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
await tearDownNodes([nwaku, nwaku2], waku);
|
||||
});
|
||||
|
||||
it("Generator, one shard", async function () {
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestContentTopicOne,
|
||||
TestRoutingInfoOne
|
||||
);
|
||||
|
||||
const messages = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoderShardOne],
|
||||
TestDecoderShardOne.routingInfo.pubsubTopic
|
||||
);
|
||||
|
||||
expect(messages?.length).eq(totalMsgs);
|
||||
const result = messages?.findIndex((msg) => {
|
||||
return msg.payload![0]! === 0;
|
||||
});
|
||||
expect(result).to.not.eq(-1);
|
||||
});
|
||||
|
||||
it("Generator, 2 different shards", async function () {
|
||||
this.timeout(10000);
|
||||
|
||||
const totalMsgs = 10;
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestContentTopicOne,
|
||||
TestRoutingInfoOne
|
||||
);
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestContentTopicTwo,
|
||||
TestRoutingInfoTwo
|
||||
);
|
||||
|
||||
const customMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoderShardOne],
|
||||
TestDecoderShardOne.routingInfo.pubsubTopic
|
||||
);
|
||||
expect(customMessages?.length).eq(totalMsgs);
|
||||
const result1 = customMessages?.findIndex((msg) => {
|
||||
return msg.payload![0]! === 0;
|
||||
});
|
||||
expect(result1).to.not.eq(-1);
|
||||
|
||||
const testMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoderShardTwo],
|
||||
TestDecoderShardTwo.routingInfo.pubsubTopic
|
||||
);
|
||||
expect(testMessages?.length).eq(totalMsgs);
|
||||
const result2 = testMessages?.findIndex((msg) => {
|
||||
return msg.payload![0]! === 0;
|
||||
});
|
||||
expect(result2).to.not.eq(-1);
|
||||
});
|
||||
|
||||
it("Generator, 2 nwaku nodes each with different shards", async function () {
|
||||
this.timeout(10000);
|
||||
|
||||
await tearDownNodes([nwaku], []);
|
||||
|
||||
// make sure each nwaku node operates on dedicated shard only
|
||||
nwaku = new ServiceNode(makeLogFileName(this) + "1");
|
||||
await nwaku.start({
|
||||
store: true,
|
||||
clusterId: StaticTestClusterId,
|
||||
shard: [1],
|
||||
relay: true,
|
||||
numShardsInNetwork: 0 // static sharding
|
||||
});
|
||||
|
||||
// Set up and start a new nwaku node with Default Pubsubtopic
|
||||
nwaku2 = new ServiceNode(makeLogFileName(this) + "2");
|
||||
await nwaku2.start({
|
||||
store: true,
|
||||
clusterId: StaticTestClusterId,
|
||||
shard: [2],
|
||||
relay: true,
|
||||
numShardsInNetwork: 0 // static sharding
|
||||
});
|
||||
|
||||
const totalMsgs = 10;
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoderShardOne.contentTopic,
|
||||
TestDecoderShardOne.routingInfo as RoutingInfo
|
||||
);
|
||||
await sendMessages(
|
||||
nwaku2,
|
||||
totalMsgs,
|
||||
TestDecoderShardTwo.contentTopic,
|
||||
TestDecoderShardTwo.routingInfo as RoutingInfo
|
||||
);
|
||||
|
||||
await waku.dial(await nwaku.getMultiaddrWithId());
|
||||
await waku.dial(await nwaku2.getMultiaddrWithId());
|
||||
await waku.waitForPeers([Protocols.Store]);
|
||||
|
||||
let customMessages: IMessage[] = [];
|
||||
let testMessages: IMessage[] = [];
|
||||
|
||||
while (
|
||||
customMessages.length != totalMsgs ||
|
||||
testMessages.length != totalMsgs
|
||||
) {
|
||||
customMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoderShardOne],
|
||||
TestDecoderShardOne.routingInfo.pubsubTopic
|
||||
);
|
||||
testMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoderShardTwo],
|
||||
TestDecoderShardTwo.routingInfo.pubsubTopic
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
@ -1,5 +1,5 @@
|
||||
import { IMessage, type LightNode } from "@waku/interfaces";
|
||||
import { determinePubsubTopic } from "@waku/utils";
|
||||
import { formatPubsubTopic } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
|
||||
import {
|
||||
@ -14,7 +14,7 @@ import {
|
||||
runStoreNodes,
|
||||
TestDecoder,
|
||||
TestDecoder2,
|
||||
TestShardInfo
|
||||
TestNetworkConfig
|
||||
} from "./utils.js";
|
||||
|
||||
describe("Waku Store, error handling", function () {
|
||||
@ -23,7 +23,7 @@ describe("Waku Store, error handling", function () {
|
||||
let nwaku: ServiceNode;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestNetworkConfig);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
@ -68,7 +68,7 @@ describe("Waku Store, error handling", function () {
|
||||
});
|
||||
|
||||
it("Query Generator, No message returned", async function () {
|
||||
const WrongTestPubsubTopic = determinePubsubTopic("/test/1/wrong/utf8");
|
||||
const WrongTestPubsubTopic = formatPubsubTopic(43, 53);
|
||||
const messages = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoder],
|
||||
|
||||
@ -14,6 +14,7 @@ import {
|
||||
createDecoder as createSymDecoder,
|
||||
createEncoder as createSymEncoder
|
||||
} from "@waku/message-encryption/symmetric";
|
||||
import { createRoutingInfo } from "@waku/utils";
|
||||
import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes";
|
||||
import { expect } from "chai";
|
||||
import { equals } from "uint8arrays/equals";
|
||||
@ -35,12 +36,11 @@ import {
|
||||
runStoreNodes,
|
||||
sendMessages,
|
||||
startAndConnectLightNode,
|
||||
TestContentTopic1,
|
||||
TestContentTopic,
|
||||
TestDecoder,
|
||||
TestDecoder2,
|
||||
TestEncoder,
|
||||
TestPubsubTopic1,
|
||||
TestShardInfo,
|
||||
TestNetworkConfig,
|
||||
TestRoutingInfo,
|
||||
totalMsgs
|
||||
} from "./utils.js";
|
||||
|
||||
@ -51,7 +51,7 @@ describe("Waku Store, general", function () {
|
||||
let nwaku: ServiceNode;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestNetworkConfig);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
@ -63,13 +63,13 @@ describe("Waku Store, general", function () {
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
TestRoutingInfo
|
||||
);
|
||||
|
||||
const messages = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoder],
|
||||
TestDecoder.pubsubTopic
|
||||
TestRoutingInfo.pubsubTopic
|
||||
);
|
||||
|
||||
expect(messages?.length).eq(totalMsgs);
|
||||
@ -89,7 +89,7 @@ describe("Waku Store, general", function () {
|
||||
payload: utf8ToBytes(testItem["value"]),
|
||||
contentTopic: TestDecoder.contentTopic
|
||||
}),
|
||||
TestDecoder.pubsubTopic
|
||||
TestRoutingInfo
|
||||
)
|
||||
).to.eq(true);
|
||||
await delay(1); // to ensure each timestamp is unique.
|
||||
@ -99,7 +99,7 @@ describe("Waku Store, general", function () {
|
||||
messageCollector.list = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoder],
|
||||
TestDecoder.pubsubTopic
|
||||
TestRoutingInfo.pubsubTopic
|
||||
);
|
||||
|
||||
// checking that all message sent were retrieved
|
||||
@ -111,57 +111,69 @@ describe("Waku Store, general", function () {
|
||||
});
|
||||
|
||||
it("Query generator for multiple messages with multiple decoders", async function () {
|
||||
const SecondDecoder = createDecoder(
|
||||
TestDecoder2.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
const secondContentTopic = "/test/1/waku-store-two/utf8";
|
||||
const secondRoutingInfo = createRoutingInfo(TestNetworkConfig, {
|
||||
contentTopic: secondContentTopic
|
||||
});
|
||||
const secondDecoder = createDecoder(secondContentTopic, secondRoutingInfo);
|
||||
|
||||
await nwaku.sendMessage(
|
||||
ServiceNode.toMessageRpcQuery({
|
||||
payload: utf8ToBytes("M1"),
|
||||
contentTopic: TestDecoder.contentTopic
|
||||
contentTopic: TestContentTopic
|
||||
}),
|
||||
TestDecoder.pubsubTopic
|
||||
TestRoutingInfo
|
||||
);
|
||||
await nwaku.sendMessage(
|
||||
ServiceNode.toMessageRpcQuery({
|
||||
payload: utf8ToBytes("M2"),
|
||||
contentTopic: SecondDecoder.contentTopic
|
||||
contentTopic: secondContentTopic
|
||||
}),
|
||||
SecondDecoder.pubsubTopic
|
||||
secondRoutingInfo
|
||||
);
|
||||
|
||||
const messageCollector = new MessageCollector(nwaku);
|
||||
messageCollector.list = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoder, SecondDecoder],
|
||||
TestDecoder.pubsubTopic
|
||||
[TestDecoder, secondDecoder],
|
||||
TestRoutingInfo.pubsubTopic
|
||||
);
|
||||
expect(messageCollector.hasMessage(TestDecoder.contentTopic, "M1")).to.eq(
|
||||
true
|
||||
);
|
||||
expect(messageCollector.hasMessage(SecondDecoder.contentTopic, "M2")).to.eq(
|
||||
true
|
||||
);
|
||||
expect(messageCollector.hasMessage(secondContentTopic, "M2")).to.eq(true);
|
||||
});
|
||||
|
||||
it("Query generator for multiple messages with different content topic format", async function () {
|
||||
for (const testItem of TEST_STRING) {
|
||||
if (testItem.invalidContentTopic) continue;
|
||||
|
||||
const contentTopic = `/test/1/${testItem.value}/proto`;
|
||||
const routingInfo = createRoutingInfo(TestNetworkConfig, {
|
||||
contentTopic
|
||||
});
|
||||
expect(
|
||||
await nwaku.sendMessage(
|
||||
ServiceNode.toMessageRpcQuery({
|
||||
payload: utf8ToBytes(messageText),
|
||||
contentTopic: testItem["value"]
|
||||
contentTopic
|
||||
}),
|
||||
TestDecoder.pubsubTopic
|
||||
routingInfo
|
||||
)
|
||||
).to.eq(true);
|
||||
await delay(1); // to ensure each timestamp is unique.
|
||||
}
|
||||
|
||||
for (const testItem of TEST_STRING) {
|
||||
if (testItem.invalidContentTopic) continue;
|
||||
|
||||
const contentTopic = `/test/1/${testItem.value}/proto`;
|
||||
const routingInfo = createRoutingInfo(TestNetworkConfig, {
|
||||
contentTopic
|
||||
});
|
||||
|
||||
for await (const query of waku.store.queryGenerator([
|
||||
createDecoder(testItem["value"], TestDecoder.pubsubTopic)
|
||||
createDecoder(contentTopic, routingInfo)
|
||||
])) {
|
||||
for await (const msg of query) {
|
||||
expect(equals(msg!.payload, utf8ToBytes(messageText))).to.eq(true);
|
||||
@ -175,7 +187,7 @@ describe("Waku Store, general", function () {
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
TestRoutingInfo
|
||||
);
|
||||
|
||||
const messages: IMessage[] = [];
|
||||
@ -201,7 +213,7 @@ describe("Waku Store, general", function () {
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
TestRoutingInfo
|
||||
);
|
||||
|
||||
const desiredMsgs = 14;
|
||||
@ -254,32 +266,28 @@ describe("Waku Store, general", function () {
|
||||
const eciesEncoder = createEciesEncoder({
|
||||
contentTopic: asymTopic,
|
||||
publicKey,
|
||||
pubsubTopic: TestPubsubTopic1
|
||||
routingInfo: TestRoutingInfo
|
||||
});
|
||||
const symEncoder = createSymEncoder({
|
||||
contentTopic: symTopic,
|
||||
symKey,
|
||||
pubsubTopic: TestPubsubTopic1
|
||||
routingInfo: TestRoutingInfo
|
||||
});
|
||||
|
||||
const otherEncoder = createEciesEncoder({
|
||||
contentTopic: TestContentTopic1,
|
||||
pubsubTopic: TestPubsubTopic1,
|
||||
contentTopic: TestContentTopic,
|
||||
routingInfo: TestRoutingInfo,
|
||||
publicKey: getPublicKey(generatePrivateKey())
|
||||
});
|
||||
|
||||
const eciesDecoder = createEciesDecoder(
|
||||
asymTopic,
|
||||
privateKey,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
const symDecoder = createSymDecoder(
|
||||
symTopic,
|
||||
symKey,
|
||||
TestDecoder.pubsubTopic
|
||||
TestRoutingInfo,
|
||||
privateKey
|
||||
);
|
||||
const symDecoder = createSymDecoder(symTopic, TestRoutingInfo, symKey);
|
||||
|
||||
waku2 = await startAndConnectLightNode(nwaku, TestShardInfo);
|
||||
waku2 = await startAndConnectLightNode(nwaku, TestNetworkConfig);
|
||||
const nimWakuMultiaddr = await nwaku.getMultiaddrWithId();
|
||||
await waku2.dial(nimWakuMultiaddr);
|
||||
|
||||
@ -320,7 +328,7 @@ describe("Waku Store, general", function () {
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
TestRoutingInfo
|
||||
);
|
||||
|
||||
const desiredMsgs = 14;
|
||||
@ -339,17 +347,12 @@ describe("Waku Store, general", function () {
|
||||
|
||||
it("Query generator for 2000 messages", async function () {
|
||||
this.timeout(40000);
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
2000,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
await sendMessages(nwaku, 2000, TestDecoder.contentTopic, TestRoutingInfo);
|
||||
|
||||
const messages = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoder],
|
||||
TestDecoder.pubsubTopic
|
||||
TestRoutingInfo.pubsubTopic
|
||||
);
|
||||
|
||||
expect(messages?.length).eq(2000);
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import { messageHash } from "@waku/core";
|
||||
import type { IDecodedMessage, LightNode } from "@waku/interfaces";
|
||||
import { RoutingInfo } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
|
||||
import {
|
||||
@ -13,7 +14,9 @@ import {
|
||||
runStoreNodes,
|
||||
sendMessages,
|
||||
TestDecoder,
|
||||
TestShardInfo,
|
||||
TestNetworkConfig,
|
||||
TestPubsubTopic,
|
||||
TestRoutingInfo,
|
||||
totalMsgs
|
||||
} from "./utils.js";
|
||||
|
||||
@ -23,7 +26,7 @@ describe("Waku Store, message hash query", function () {
|
||||
let nwaku: ServiceNode;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestNetworkConfig);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
@ -35,7 +38,7 @@ describe("Waku Store, message hash query", function () {
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic,
|
||||
TestDecoder.routingInfo as RoutingInfo,
|
||||
true
|
||||
);
|
||||
|
||||
@ -54,11 +57,11 @@ describe("Waku Store, message hash query", function () {
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic,
|
||||
TestRoutingInfo,
|
||||
true
|
||||
);
|
||||
const messageHashes = sentMessages.map((msg) =>
|
||||
messageHash(TestDecoder.pubsubTopic, {
|
||||
messageHash(TestRoutingInfo.pubsubTopic, {
|
||||
payload: Buffer.from(msg.payload, "base64"),
|
||||
contentTopic: msg.contentTopic || TestDecoder.contentTopic,
|
||||
timestamp: msg.timestamp || undefined,
|
||||
@ -72,7 +75,7 @@ describe("Waku Store, message hash query", function () {
|
||||
const messages: IDecodedMessage[] = [];
|
||||
for await (const page of waku.store.queryGenerator([TestDecoder], {
|
||||
messageHashes,
|
||||
pubsubTopic: TestDecoder.pubsubTopic
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})) {
|
||||
for await (const msg of page) {
|
||||
messages.push(msg as IDecodedMessage);
|
||||
|
||||
@ -1,438 +0,0 @@
|
||||
import { createDecoder } from "@waku/core";
|
||||
import type { ContentTopicInfo, IMessage, LightNode } from "@waku/interfaces";
|
||||
import { createLightNode, Protocols } from "@waku/sdk";
|
||||
import {
|
||||
contentTopicToPubsubTopic,
|
||||
pubsubTopicToSingleShardInfo
|
||||
} from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
|
||||
import {
|
||||
afterEachCustom,
|
||||
beforeEachCustom,
|
||||
makeLogFileName,
|
||||
NOISE_KEY_1,
|
||||
ServiceNode,
|
||||
tearDownNodes
|
||||
} from "../../src/index.js";
|
||||
|
||||
import {
|
||||
processQueriedMessages,
|
||||
runStoreNodes,
|
||||
sendMessages,
|
||||
sendMessagesAutosharding,
|
||||
TestDecoder,
|
||||
TestDecoder2,
|
||||
TestShardInfo,
|
||||
totalMsgs
|
||||
} from "./utils.js";
|
||||
|
||||
describe("Waku Store, custom pubsub topic", function () {
|
||||
this.timeout(15000);
|
||||
let waku: LightNode;
|
||||
let nwaku: ServiceNode;
|
||||
let nwaku2: ServiceNode;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
await tearDownNodes([nwaku, nwaku2], waku);
|
||||
});
|
||||
|
||||
it("Generator, custom pubsub topic", async function () {
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
|
||||
const messages = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoder],
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
|
||||
expect(messages?.length).eq(totalMsgs);
|
||||
const result = messages?.findIndex((msg) => {
|
||||
return msg.payload![0]! === 0;
|
||||
});
|
||||
expect(result).to.not.eq(-1);
|
||||
});
|
||||
|
||||
it("Generator, 2 different pubsubtopics", async function () {
|
||||
this.timeout(10000);
|
||||
|
||||
const totalMsgs = 10;
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder2.contentTopic,
|
||||
TestDecoder2.pubsubTopic
|
||||
);
|
||||
|
||||
const customMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoder],
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
expect(customMessages?.length).eq(totalMsgs);
|
||||
const result1 = customMessages?.findIndex((msg) => {
|
||||
return msg.payload![0]! === 0;
|
||||
});
|
||||
expect(result1).to.not.eq(-1);
|
||||
|
||||
const testMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoder2],
|
||||
TestDecoder2.pubsubTopic
|
||||
);
|
||||
expect(testMessages?.length).eq(totalMsgs);
|
||||
const result2 = testMessages?.findIndex((msg) => {
|
||||
return msg.payload![0]! === 0;
|
||||
});
|
||||
expect(result2).to.not.eq(-1);
|
||||
});
|
||||
|
||||
it("Generator, 2 nwaku nodes each with different pubsubtopics", async function () {
|
||||
this.timeout(10000);
|
||||
|
||||
await tearDownNodes([nwaku], []);
|
||||
|
||||
// make sure each nwaku node operates on dedicated shard only
|
||||
nwaku = new ServiceNode(makeLogFileName(this) + "1");
|
||||
await nwaku.start({
|
||||
store: true,
|
||||
clusterId: TestShardInfo.clusterId,
|
||||
shard: [TestShardInfo.shards[0]],
|
||||
relay: true
|
||||
});
|
||||
|
||||
// Set up and start a new nwaku node with Default Pubsubtopic
|
||||
nwaku2 = new ServiceNode(makeLogFileName(this) + "2");
|
||||
await nwaku2.start({
|
||||
store: true,
|
||||
clusterId: TestShardInfo.clusterId,
|
||||
shard: [TestShardInfo.shards[1]],
|
||||
relay: true
|
||||
});
|
||||
|
||||
const totalMsgs = 10;
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
await sendMessages(
|
||||
nwaku2,
|
||||
totalMsgs,
|
||||
TestDecoder2.contentTopic,
|
||||
TestDecoder2.pubsubTopic
|
||||
);
|
||||
|
||||
await waku.dial(await nwaku.getMultiaddrWithId());
|
||||
await waku.dial(await nwaku2.getMultiaddrWithId());
|
||||
await waku.waitForPeers([Protocols.Store]);
|
||||
|
||||
let customMessages: IMessage[] = [];
|
||||
let testMessages: IMessage[] = [];
|
||||
|
||||
while (
|
||||
customMessages.length != totalMsgs ||
|
||||
testMessages.length != totalMsgs
|
||||
) {
|
||||
customMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoder],
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
testMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoder2],
|
||||
TestDecoder2.pubsubTopic
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// TODO: blocked by https://github.com/waku-org/nwaku/issues/3362
|
||||
describe.skip("Waku Store (Autosharding), custom pubsub topic", function () {
|
||||
this.timeout(15000);
|
||||
let waku: LightNode;
|
||||
let nwaku: ServiceNode;
|
||||
let nwaku2: ServiceNode;
|
||||
|
||||
const customContentTopic1 = "/waku/2/content/utf8";
|
||||
const customContentTopic2 = "/myapp/1/latest/proto";
|
||||
const clusterId = 5;
|
||||
const Shard2 = [1];
|
||||
const autoshardingPubsubTopic1 = contentTopicToPubsubTopic(
|
||||
customContentTopic1,
|
||||
clusterId
|
||||
);
|
||||
const autoshardingPubsubTopic2 = contentTopicToPubsubTopic(
|
||||
customContentTopic2,
|
||||
clusterId
|
||||
);
|
||||
const customDecoder1 = createDecoder(
|
||||
customContentTopic1,
|
||||
pubsubTopicToSingleShardInfo(autoshardingPubsubTopic1)
|
||||
);
|
||||
const customDecoder2 = createDecoder(
|
||||
customContentTopic2,
|
||||
pubsubTopicToSingleShardInfo(autoshardingPubsubTopic2)
|
||||
);
|
||||
const contentTopicInfoBothShards: ContentTopicInfo = {
|
||||
clusterId,
|
||||
contentTopics: [customContentTopic1, customContentTopic2]
|
||||
};
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, contentTopicInfoBothShards);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
await tearDownNodes([nwaku, nwaku2], waku);
|
||||
});
|
||||
|
||||
it("Generator, custom pubsub topic", async function () {
|
||||
await sendMessagesAutosharding(nwaku, totalMsgs, customContentTopic1);
|
||||
|
||||
const messages = await processQueriedMessages(
|
||||
waku,
|
||||
[customDecoder1],
|
||||
autoshardingPubsubTopic1
|
||||
);
|
||||
|
||||
expect(messages?.length).eq(totalMsgs);
|
||||
const result = messages?.findIndex((msg) => {
|
||||
return msg.payload![0]! === 0;
|
||||
});
|
||||
expect(result).to.not.eq(-1);
|
||||
});
|
||||
|
||||
it("Generator, 2 different pubsubtopics", async function () {
|
||||
this.timeout(10000);
|
||||
|
||||
const totalMsgs = 10;
|
||||
await sendMessagesAutosharding(nwaku, totalMsgs, customContentTopic1);
|
||||
await sendMessagesAutosharding(nwaku, totalMsgs, customContentTopic2);
|
||||
|
||||
const customMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[customDecoder1],
|
||||
autoshardingPubsubTopic1
|
||||
);
|
||||
expect(customMessages?.length).eq(totalMsgs);
|
||||
const result1 = customMessages?.findIndex((msg) => {
|
||||
return msg.payload![0]! === 0;
|
||||
});
|
||||
expect(result1).to.not.eq(-1);
|
||||
|
||||
const testMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[customDecoder2],
|
||||
autoshardingPubsubTopic2
|
||||
);
|
||||
expect(testMessages?.length).eq(totalMsgs);
|
||||
const result2 = testMessages?.findIndex((msg) => {
|
||||
return msg.payload![0]! === 0;
|
||||
});
|
||||
expect(result2).to.not.eq(-1);
|
||||
});
|
||||
|
||||
it("Generator, 2 nwaku nodes each with different pubsubtopics", async function () {
|
||||
this.timeout(10000);
|
||||
|
||||
// Set up and start a new nwaku node with Default Pubsubtopic
|
||||
nwaku2 = new ServiceNode(makeLogFileName(this) + "2");
|
||||
await nwaku2.start({
|
||||
store: true,
|
||||
contentTopic: [customContentTopic2],
|
||||
relay: true,
|
||||
clusterId,
|
||||
shard: Shard2
|
||||
});
|
||||
await nwaku2.ensureSubscriptionsAutosharding([customContentTopic2]);
|
||||
|
||||
const totalMsgs = 10;
|
||||
await sendMessagesAutosharding(nwaku, totalMsgs, customContentTopic1);
|
||||
await sendMessagesAutosharding(nwaku2, totalMsgs, customContentTopic2);
|
||||
|
||||
waku = await createLightNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
networkConfig: contentTopicInfoBothShards
|
||||
});
|
||||
await waku.start();
|
||||
|
||||
await waku.dial(await nwaku.getMultiaddrWithId());
|
||||
await waku.dial(await nwaku2.getMultiaddrWithId());
|
||||
await waku.waitForPeers([Protocols.Store]);
|
||||
|
||||
let customMessages: IMessage[] = [];
|
||||
let testMessages: IMessage[] = [];
|
||||
|
||||
while (
|
||||
customMessages.length != totalMsgs ||
|
||||
testMessages.length != totalMsgs
|
||||
) {
|
||||
customMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[customDecoder1],
|
||||
autoshardingPubsubTopic1
|
||||
);
|
||||
testMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[customDecoder2],
|
||||
autoshardingPubsubTopic2
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("Waku Store (named sharding), custom pubsub topic", function () {
|
||||
this.timeout(15000);
|
||||
let waku: LightNode;
|
||||
let nwaku: ServiceNode;
|
||||
let nwaku2: ServiceNode;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
await tearDownNodes([nwaku, nwaku2], waku);
|
||||
});
|
||||
|
||||
it("Generator, custom pubsub topic", async function () {
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
|
||||
const messages = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoder],
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
|
||||
expect(messages?.length).eq(totalMsgs);
|
||||
const result = messages?.findIndex((msg) => {
|
||||
return msg.payload![0]! === 0;
|
||||
});
|
||||
expect(result).to.not.eq(-1);
|
||||
});
|
||||
|
||||
it("Generator, 2 different pubsubtopics", async function () {
|
||||
this.timeout(10000);
|
||||
|
||||
const totalMsgs = 10;
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder2.contentTopic,
|
||||
TestDecoder2.pubsubTopic
|
||||
);
|
||||
|
||||
const customMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoder],
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
expect(customMessages?.length).eq(totalMsgs);
|
||||
const result1 = customMessages?.findIndex((msg) => {
|
||||
return msg.payload![0]! === 0;
|
||||
});
|
||||
expect(result1).to.not.eq(-1);
|
||||
|
||||
const testMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoder2],
|
||||
TestDecoder2.pubsubTopic
|
||||
);
|
||||
expect(testMessages?.length).eq(totalMsgs);
|
||||
const result2 = testMessages?.findIndex((msg) => {
|
||||
return msg.payload![0]! === 0;
|
||||
});
|
||||
expect(result2).to.not.eq(-1);
|
||||
});
|
||||
|
||||
it("Generator, 2 nwaku nodes each with different pubsubtopics", async function () {
|
||||
this.timeout(10000);
|
||||
|
||||
await tearDownNodes([nwaku], []);
|
||||
|
||||
// make sure each nwaku node operates on dedicated shard only
|
||||
nwaku = new ServiceNode(makeLogFileName(this) + "1");
|
||||
await nwaku.start({
|
||||
store: true,
|
||||
clusterId: TestShardInfo.clusterId,
|
||||
shard: [TestShardInfo.shards[0]],
|
||||
relay: true
|
||||
});
|
||||
|
||||
// Set up and start a new nwaku node with Default Pubsubtopic
|
||||
nwaku2 = new ServiceNode(makeLogFileName(this) + "2");
|
||||
await nwaku2.start({
|
||||
store: true,
|
||||
relay: true,
|
||||
clusterId: TestShardInfo.clusterId,
|
||||
shard: TestShardInfo.shards
|
||||
});
|
||||
await nwaku2.ensureSubscriptions([TestDecoder2.pubsubTopic]);
|
||||
|
||||
const totalMsgs = 10;
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
await sendMessages(
|
||||
nwaku2,
|
||||
totalMsgs,
|
||||
TestDecoder2.contentTopic,
|
||||
TestDecoder2.pubsubTopic
|
||||
);
|
||||
|
||||
await waku.dial(await nwaku.getMultiaddrWithId());
|
||||
await waku.dial(await nwaku2.getMultiaddrWithId());
|
||||
await waku.waitForPeers([Protocols.Store]);
|
||||
|
||||
let customMessages: IMessage[] = [];
|
||||
let testMessages: IMessage[] = [];
|
||||
|
||||
while (
|
||||
customMessages.length != totalMsgs ||
|
||||
testMessages.length != totalMsgs
|
||||
) {
|
||||
customMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoder],
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
testMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoder2],
|
||||
TestDecoder2.pubsubTopic
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
@ -13,7 +13,8 @@ import {
|
||||
runStoreNodes,
|
||||
sendMessages,
|
||||
TestDecoder,
|
||||
TestShardInfo,
|
||||
TestNetworkConfig,
|
||||
TestRoutingInfo,
|
||||
totalMsgs
|
||||
} from "./utils.js";
|
||||
|
||||
@ -23,7 +24,7 @@ describe("Waku Store, order", function () {
|
||||
let nwaku: ServiceNode;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestNetworkConfig);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
@ -36,7 +37,7 @@ describe("Waku Store, order", function () {
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
TestRoutingInfo
|
||||
);
|
||||
|
||||
const messages: IMessage[] = [];
|
||||
@ -64,7 +65,7 @@ describe("Waku Store, order", function () {
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
TestRoutingInfo
|
||||
);
|
||||
|
||||
const messages: IMessage[] = [];
|
||||
@ -95,7 +96,7 @@ describe("Waku Store, order", function () {
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
TestRoutingInfo
|
||||
);
|
||||
|
||||
const messages: IMessage[] = [];
|
||||
|
||||
@ -12,7 +12,8 @@ import {
|
||||
runStoreNodes,
|
||||
sendMessages,
|
||||
TestDecoder,
|
||||
TestShardInfo
|
||||
TestNetworkConfig,
|
||||
TestRoutingInfo
|
||||
} from "./utils.js";
|
||||
|
||||
describe("Waku Store, page size", function () {
|
||||
@ -21,7 +22,7 @@ describe("Waku Store, page size", function () {
|
||||
let nwaku: ServiceNode;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestNetworkConfig);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
@ -42,7 +43,7 @@ describe("Waku Store, page size", function () {
|
||||
nwaku,
|
||||
messageCount,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
TestRoutingInfo
|
||||
);
|
||||
|
||||
// Determine effectivePageSize for test expectations
|
||||
@ -77,12 +78,7 @@ describe("Waku Store, page size", function () {
|
||||
|
||||
// Possible issue here because pageSize differs across implementations
|
||||
it("Default pageSize", async function () {
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
20,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
await sendMessages(nwaku, 20, TestDecoder.contentTopic, TestRoutingInfo);
|
||||
|
||||
let messagesRetrieved = 0;
|
||||
for await (const query of waku.store.queryGenerator([TestDecoder])) {
|
||||
|
||||
@ -12,7 +12,8 @@ import {
|
||||
runStoreNodes,
|
||||
sendMessages,
|
||||
TestDecoder,
|
||||
TestShardInfo,
|
||||
TestNetworkConfig,
|
||||
TestRoutingInfo,
|
||||
totalMsgs
|
||||
} from "./utils.js";
|
||||
|
||||
@ -22,7 +23,7 @@ describe("Waku Store, sorting", function () {
|
||||
let nwaku: ServiceNode;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestNetworkConfig);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
@ -35,7 +36,7 @@ describe("Waku Store, sorting", function () {
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
TestRoutingInfo
|
||||
);
|
||||
|
||||
const pages: IMessage[][] = [];
|
||||
@ -96,7 +97,7 @@ describe("Waku Store, sorting", function () {
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
TestRoutingInfo
|
||||
);
|
||||
|
||||
const messages: IMessage[] = [];
|
||||
|
||||
@ -12,7 +12,8 @@ import {
|
||||
adjustDate,
|
||||
runStoreNodes,
|
||||
TestDecoder,
|
||||
TestShardInfo
|
||||
TestNetworkConfig,
|
||||
TestRoutingInfo
|
||||
} from "./utils.js";
|
||||
|
||||
describe("Waku Store, time filter", function () {
|
||||
@ -21,7 +22,7 @@ describe("Waku Store, time filter", function () {
|
||||
let nwaku: ServiceNode;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestNetworkConfig);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
@ -49,7 +50,8 @@ describe("Waku Store, time filter", function () {
|
||||
payload: new Uint8Array([0]),
|
||||
contentTopic: TestDecoder.contentTopic,
|
||||
timestamp: msgTimestamp
|
||||
})
|
||||
}),
|
||||
TestRoutingInfo
|
||||
)
|
||||
).to.eq(true);
|
||||
|
||||
@ -90,7 +92,8 @@ describe("Waku Store, time filter", function () {
|
||||
payload: new Uint8Array([0]),
|
||||
contentTopic: TestDecoder.contentTopic,
|
||||
timestamp: msgTimestamp
|
||||
})
|
||||
}),
|
||||
TestRoutingInfo
|
||||
)
|
||||
).to.eq(true);
|
||||
|
||||
|
||||
@ -5,14 +5,15 @@ import {
|
||||
Decoder
|
||||
} from "@waku/core";
|
||||
import {
|
||||
type AutoSharding,
|
||||
ContentTopic,
|
||||
LightNode,
|
||||
NetworkConfig,
|
||||
type NetworkConfig,
|
||||
Protocols,
|
||||
ShardInfo,
|
||||
type SingleShardInfo
|
||||
ShardId
|
||||
} from "@waku/interfaces";
|
||||
import { createLightNode } from "@waku/sdk";
|
||||
import { Logger, singleShardInfoToPubsubTopic } from "@waku/utils";
|
||||
import { createRoutingInfo, Logger, RoutingInfo } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
import { Context } from "mocha";
|
||||
|
||||
@ -21,27 +22,31 @@ import { MessageRpcQuery } from "../../src/types.js";
|
||||
|
||||
export const log = new Logger("test:store");
|
||||
|
||||
export const TestClusterId = 3;
|
||||
export const TestShardInfo: ShardInfo = {
|
||||
export const TestClusterId = 5;
|
||||
export const TestNetworkConfig: AutoSharding = {
|
||||
clusterId: TestClusterId,
|
||||
shards: [1, 2]
|
||||
numShardsInCluster: 8
|
||||
};
|
||||
|
||||
export const TestShardInfo1: SingleShardInfo = { clusterId: 3, shard: 1 };
|
||||
export const TestPubsubTopic1 = singleShardInfoToPubsubTopic(TestShardInfo1);
|
||||
|
||||
export const TestShardInfo2: SingleShardInfo = { clusterId: 3, shard: 2 };
|
||||
export const TestPubsubTopic2 = singleShardInfoToPubsubTopic(TestShardInfo2);
|
||||
|
||||
export const TestContentTopic1 = "/test/1/waku-store/utf8";
|
||||
export const TestEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic1,
|
||||
pubsubTopicShardInfo: TestShardInfo1
|
||||
export const TestContentTopic = "/test/1/waku-store/utf8";
|
||||
export const TestRoutingInfo = createRoutingInfo(TestNetworkConfig, {
|
||||
contentTopic: TestContentTopic
|
||||
});
|
||||
export const TestDecoder = createDecoder(TestContentTopic1, TestPubsubTopic1);
|
||||
|
||||
export const TestContentTopic2 = "/test/3/waku-store/utf8";
|
||||
export const TestDecoder2 = createDecoder(TestContentTopic2, TestPubsubTopic2);
|
||||
export const TestPubsubTopic = TestRoutingInfo.pubsubTopic;
|
||||
|
||||
export const TestEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
routingInfo: TestRoutingInfo
|
||||
});
|
||||
export const TestDecoder = createDecoder(TestContentTopic, TestRoutingInfo);
|
||||
|
||||
export const TestContentTopic2 = "/test/12/waku-store/utf8";
|
||||
export const TestRoutingInfo2 = createRoutingInfo(TestNetworkConfig, {
|
||||
contentTopic: TestContentTopic2
|
||||
});
|
||||
|
||||
export const TestDecoder2 = createDecoder(TestContentTopic2, TestRoutingInfo2);
|
||||
|
||||
export const totalMsgs = 20;
|
||||
export const messageText = "Store Push works!";
|
||||
@ -50,7 +55,7 @@ export async function sendMessages(
|
||||
instance: ServiceNode,
|
||||
numMessages: number,
|
||||
contentTopic: string,
|
||||
pubsubTopic: string,
|
||||
routingInfo: RoutingInfo,
|
||||
timestamp: boolean = false
|
||||
): Promise<MessageRpcQuery[]> {
|
||||
const messages: MessageRpcQuery[] = new Array<MessageRpcQuery>(numMessages);
|
||||
@ -60,30 +65,12 @@ export async function sendMessages(
|
||||
contentTopic: contentTopic,
|
||||
timestamp: timestamp ? new Date() : undefined
|
||||
});
|
||||
expect(await instance.sendMessage(messages[i], pubsubTopic)).to.eq(true);
|
||||
expect(await instance.sendMessage(messages[i], routingInfo)).to.eq(true);
|
||||
await delay(1); // to ensure each timestamp is unique.
|
||||
}
|
||||
return messages;
|
||||
}
|
||||
|
||||
export async function sendMessagesAutosharding(
|
||||
instance: ServiceNode,
|
||||
numMessages: number,
|
||||
contentTopic: string
|
||||
): Promise<void> {
|
||||
for (let i = 0; i < numMessages; i++) {
|
||||
expect(
|
||||
await instance.sendMessageAutosharding(
|
||||
ServiceNode.toMessageRpcQuery({
|
||||
payload: new Uint8Array([i]),
|
||||
contentTopic: contentTopic
|
||||
})
|
||||
)
|
||||
).to.eq(true);
|
||||
await delay(1); // to ensure each timestamp is unique.
|
||||
}
|
||||
}
|
||||
|
||||
export async function processQueriedMessages(
|
||||
instance: LightNode,
|
||||
decoders: Array<Decoder>,
|
||||
@ -126,17 +113,6 @@ export async function startAndConnectLightNode(
|
||||
return waku;
|
||||
}
|
||||
|
||||
export function chunkAndReverseArray(
|
||||
arr: number[],
|
||||
chunkSize: number
|
||||
): number[] {
|
||||
const result: number[] = [];
|
||||
for (let i = 0; i < arr.length; i += chunkSize) {
|
||||
result.push(...arr.slice(i, i + chunkSize).reverse());
|
||||
}
|
||||
return result.reverse();
|
||||
}
|
||||
|
||||
export const adjustDate = (baseDate: Date, adjustMs: number): Date => {
|
||||
const adjusted = new Date(baseDate);
|
||||
adjusted.setTime(adjusted.getTime() + adjustMs);
|
||||
@ -145,11 +121,15 @@ export const adjustDate = (baseDate: Date, adjustMs: number): Date => {
|
||||
|
||||
export const runStoreNodes = (
|
||||
context: Context,
|
||||
networkConfig: NetworkConfig
|
||||
networkConfig: NetworkConfig,
|
||||
shardIds?: ShardId[],
|
||||
contentTopics?: ContentTopic[]
|
||||
): Promise<[ServiceNode, LightNode]> =>
|
||||
runNodes({
|
||||
context,
|
||||
networkConfig,
|
||||
createNode: createLightNode,
|
||||
relayShards: shardIds,
|
||||
contentTopics,
|
||||
protocols: [Protocols.Store]
|
||||
});
|
||||
|
||||
@ -2,12 +2,16 @@ import type { LightNode, RelayNode } from "@waku/interfaces";
|
||||
import { Protocols } from "@waku/interfaces";
|
||||
import { createRelayNode } from "@waku/relay";
|
||||
import { createLightNode } from "@waku/sdk";
|
||||
import { formatPubsubTopic } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
|
||||
import {
|
||||
afterEachCustom,
|
||||
DefaultTestPubsubTopic,
|
||||
DefaultTestShardInfo,
|
||||
DefaultTestClusterId,
|
||||
DefaultTestContentTopic,
|
||||
DefaultTestNetworkConfig,
|
||||
DefaultTestNumShardsInCluster,
|
||||
DefaultTestRoutingInfo,
|
||||
delay,
|
||||
makeLogFileName,
|
||||
NOISE_KEY_1,
|
||||
@ -15,11 +19,7 @@ import {
|
||||
tearDownNodes
|
||||
} from "../src/index.js";
|
||||
|
||||
import {
|
||||
runRelayNodes,
|
||||
TestPubsubTopic,
|
||||
TestShardInfo
|
||||
} from "./relay/utils.js";
|
||||
import { runRelayNodes } from "./relay/utils.js";
|
||||
|
||||
describe("Wait for remote peer", function () {
|
||||
let waku1: RelayNode;
|
||||
@ -32,10 +32,15 @@ describe("Wait for remote peer", function () {
|
||||
|
||||
it("Relay - dialed first", async function () {
|
||||
this.timeout(20_000);
|
||||
[nwaku, waku1] = await runRelayNodes(this, TestShardInfo);
|
||||
[nwaku, waku1] = await runRelayNodes(
|
||||
this,
|
||||
DefaultTestNetworkConfig,
|
||||
undefined,
|
||||
[DefaultTestContentTopic]
|
||||
);
|
||||
const multiAddrWithId = await nwaku.getMultiaddrWithId();
|
||||
|
||||
const peers = waku1.relay.getMeshPeers(TestPubsubTopic);
|
||||
const peers = waku1.relay.getMeshPeers(DefaultTestRoutingInfo.pubsubTopic);
|
||||
const nimPeerId = multiAddrWithId.getPeerId();
|
||||
|
||||
expect(nimPeerId).to.not.be.undefined;
|
||||
@ -50,14 +55,16 @@ describe("Wait for remote peer", function () {
|
||||
store: false,
|
||||
filter: false,
|
||||
lightpush: false,
|
||||
clusterId: DefaultTestShardInfo.clusterId,
|
||||
shard: DefaultTestShardInfo.shards
|
||||
clusterId: DefaultTestClusterId,
|
||||
numShardsInNetwork: DefaultTestNumShardsInCluster,
|
||||
contentTopic: [DefaultTestContentTopic]
|
||||
});
|
||||
const multiAddrWithId = await nwaku.getMultiaddrWithId();
|
||||
|
||||
waku1 = await createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
networkConfig: DefaultTestShardInfo
|
||||
networkConfig: DefaultTestNetworkConfig,
|
||||
routingInfos: [DefaultTestRoutingInfo]
|
||||
});
|
||||
await waku1.start();
|
||||
|
||||
@ -66,7 +73,7 @@ describe("Wait for remote peer", function () {
|
||||
await waku1.dial(multiAddrWithId);
|
||||
await waitPromise;
|
||||
|
||||
const peers = waku1.relay.getMeshPeers(DefaultTestPubsubTopic);
|
||||
const peers = waku1.relay.getMeshPeers(DefaultTestRoutingInfo.pubsubTopic);
|
||||
const nimPeerId = multiAddrWithId.getPeerId();
|
||||
|
||||
expect(nimPeerId).to.not.be.undefined;
|
||||
@ -77,7 +84,8 @@ describe("Wait for remote peer", function () {
|
||||
this.timeout(5000);
|
||||
createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
networkConfig: DefaultTestShardInfo
|
||||
networkConfig: DefaultTestNetworkConfig,
|
||||
routingInfos: [DefaultTestRoutingInfo]
|
||||
})
|
||||
.then((waku1) => waku1.start().then(() => waku1))
|
||||
.then((waku1) => {
|
||||
@ -109,7 +117,7 @@ describe("Wait for remote peer", function () {
|
||||
|
||||
waku2 = await createLightNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
networkConfig: DefaultTestShardInfo
|
||||
networkConfig: DefaultTestNetworkConfig
|
||||
});
|
||||
await waku2.start();
|
||||
await waku2.dial(multiAddrWithId);
|
||||
@ -138,7 +146,7 @@ describe("Wait for remote peer", function () {
|
||||
|
||||
waku2 = await createLightNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
networkConfig: DefaultTestShardInfo
|
||||
networkConfig: DefaultTestNetworkConfig
|
||||
});
|
||||
await waku2.start();
|
||||
const waitPromise = waku2.waitForPeers([Protocols.Store], 2000);
|
||||
@ -169,7 +177,7 @@ describe("Wait for remote peer", function () {
|
||||
|
||||
waku2 = await createLightNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
networkConfig: DefaultTestShardInfo
|
||||
networkConfig: DefaultTestNetworkConfig
|
||||
});
|
||||
await waku2.start();
|
||||
await waku2.dial(multiAddrWithId);
|
||||
@ -198,7 +206,7 @@ describe("Wait for remote peer", function () {
|
||||
|
||||
waku2 = await createLightNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
networkConfig: DefaultTestShardInfo
|
||||
networkConfig: DefaultTestNetworkConfig
|
||||
});
|
||||
await waku2.start();
|
||||
await waku2.dial(multiAddrWithId);
|
||||
@ -228,7 +236,7 @@ describe("Wait for remote peer", function () {
|
||||
|
||||
waku2 = await createLightNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
networkConfig: DefaultTestShardInfo
|
||||
networkConfig: DefaultTestNetworkConfig
|
||||
});
|
||||
await waku2.start();
|
||||
await waku2.dial(multiAddrWithId);
|
||||
@ -248,12 +256,12 @@ describe("Wait for remote peer", function () {
|
||||
expect(peers.includes(nimPeerId as string)).to.be.true;
|
||||
});
|
||||
|
||||
it("Privacy Node - default protocol", async function () {
|
||||
it("Relay Node - default protocol", async function () {
|
||||
this.timeout(20_000);
|
||||
[nwaku, waku1] = await runRelayNodes(this, TestShardInfo);
|
||||
[nwaku, waku1] = await runRelayNodes(this, { clusterId: 0 }, [0]);
|
||||
const multiAddrWithId = await nwaku.getMultiaddrWithId();
|
||||
|
||||
const peers = waku1.relay.getMeshPeers(TestPubsubTopic);
|
||||
const peers = waku1.relay.getMeshPeers(formatPubsubTopic(0, 0));
|
||||
|
||||
const nimPeerId = multiAddrWithId.getPeerId();
|
||||
|
||||
|
||||
@ -17,14 +17,15 @@ import {
|
||||
createLightNode,
|
||||
createEncoder as createPlainEncoder
|
||||
} from "@waku/sdk";
|
||||
import { createRoutingInfo } from "@waku/utils";
|
||||
import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes";
|
||||
import { expect } from "chai";
|
||||
|
||||
import {
|
||||
afterEachCustom,
|
||||
beforeEachCustom,
|
||||
DefaultTestShardInfo,
|
||||
DefaultTestSingleShardInfo,
|
||||
DefaultTestNetworkConfig,
|
||||
DefaultTestRoutingInfo,
|
||||
makeLogFileName,
|
||||
NOISE_KEY_1,
|
||||
NOISE_KEY_2,
|
||||
@ -33,8 +34,13 @@ import {
|
||||
} from "../src/index.js";
|
||||
|
||||
const TestContentTopic = "/test/1/waku/utf8";
|
||||
|
||||
const TestEncoder = createPlainEncoder({ contentTopic: TestContentTopic });
|
||||
const TestRoutingInfo = createRoutingInfo(DefaultTestNetworkConfig, {
|
||||
contentTopic: TestContentTopic
|
||||
});
|
||||
const TestEncoder = createPlainEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
routingInfo: TestRoutingInfo
|
||||
});
|
||||
|
||||
describe("Waku Dial [node only]", function () {
|
||||
describe("Interop: ServiceNode", function () {
|
||||
@ -57,7 +63,7 @@ describe("Waku Dial [node only]", function () {
|
||||
|
||||
waku = await createLightNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
networkConfig: DefaultTestShardInfo
|
||||
networkConfig: DefaultTestNetworkConfig
|
||||
});
|
||||
await waku.start();
|
||||
await waku.dial(multiAddrWithId);
|
||||
@ -91,7 +97,7 @@ describe("Waku Dial [node only]", function () {
|
||||
|
||||
waku = await createLightNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
networkConfig: DefaultTestShardInfo
|
||||
networkConfig: DefaultTestNetworkConfig
|
||||
});
|
||||
await waku.start();
|
||||
await waku.dial(multiAddrWithId);
|
||||
@ -119,7 +125,7 @@ describe("Waku Dial [node only]", function () {
|
||||
const multiAddrWithId = await nwaku.getMultiaddrWithId();
|
||||
waku = await createLightNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
networkConfig: DefaultTestShardInfo,
|
||||
networkConfig: DefaultTestNetworkConfig,
|
||||
libp2p: {
|
||||
peerDiscovery: [bootstrap({ list: [multiAddrWithId.toString()] })]
|
||||
}
|
||||
@ -145,7 +151,7 @@ describe("Waku Dial [node only]", function () {
|
||||
|
||||
waku = await createLightNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
networkConfig: DefaultTestShardInfo,
|
||||
networkConfig: DefaultTestNetworkConfig,
|
||||
libp2p: {
|
||||
peerDiscovery: [bootstrap({ list: [nwakuMa.toString()] })]
|
||||
}
|
||||
@ -177,11 +183,13 @@ describe("Decryption Keys", function () {
|
||||
[waku1, waku2] = await Promise.all([
|
||||
createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
networkConfig: DefaultTestShardInfo
|
||||
networkConfig: DefaultTestNetworkConfig,
|
||||
routingInfos: [DefaultTestRoutingInfo]
|
||||
}).then((waku) => waku.start().then(() => waku)),
|
||||
createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_2,
|
||||
networkConfig: DefaultTestShardInfo,
|
||||
networkConfig: DefaultTestNetworkConfig,
|
||||
routingInfos: [DefaultTestRoutingInfo],
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
|
||||
}).then((waku) => waku.start().then(() => waku))
|
||||
]);
|
||||
@ -205,15 +213,11 @@ describe("Decryption Keys", function () {
|
||||
this.timeout(10000);
|
||||
|
||||
const symKey = generateSymmetricKey();
|
||||
const decoder = createDecoder(
|
||||
TestContentTopic,
|
||||
symKey,
|
||||
DefaultTestSingleShardInfo
|
||||
);
|
||||
const decoder = createDecoder(TestContentTopic, TestRoutingInfo, symKey);
|
||||
|
||||
const encoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
pubsubTopicShardInfo: DefaultTestSingleShardInfo,
|
||||
routingInfo: TestRoutingInfo,
|
||||
symKey
|
||||
});
|
||||
|
||||
@ -257,11 +261,13 @@ describe("User Agent", function () {
|
||||
createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
userAgent: waku1UserAgent,
|
||||
networkConfig: DefaultTestShardInfo
|
||||
networkConfig: DefaultTestNetworkConfig,
|
||||
routingInfos: [DefaultTestRoutingInfo]
|
||||
}).then((waku) => waku.start().then(() => waku)),
|
||||
createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_2,
|
||||
networkConfig: DefaultTestShardInfo,
|
||||
networkConfig: DefaultTestNetworkConfig,
|
||||
routingInfos: [DefaultTestRoutingInfo],
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
|
||||
}).then((waku) => waku.start().then(() => waku))
|
||||
]);
|
||||
|
||||
@ -1,19 +1,17 @@
|
||||
import { DEFAULT_CLUSTER_ID, NetworkConfig } from "@waku/interfaces";
|
||||
import { DEFAULT_CLUSTER_ID } from "@waku/interfaces";
|
||||
import { expect } from "chai";
|
||||
|
||||
import {
|
||||
contentTopicsByPubsubTopic,
|
||||
contentTopicToPubsubTopic,
|
||||
contentTopicToShardIndex,
|
||||
determinePubsubTopic,
|
||||
ensureShardingConfigured,
|
||||
ensureValidContentTopic,
|
||||
pubsubTopicToSingleShardInfo,
|
||||
shardInfoToPubsubTopics,
|
||||
singleShardInfosToShardInfo,
|
||||
singleShardInfoToPubsubTopic
|
||||
pubsubTopicToSingleShardInfo
|
||||
} from "./index.js";
|
||||
|
||||
const ClusterId = 0;
|
||||
const NumShardsInCluster = 8;
|
||||
|
||||
const testInvalidCases = (
|
||||
contentTopics: string[],
|
||||
expectedError: string
|
||||
@ -117,7 +115,9 @@ describe("contentTopicToShardIndex", () => {
|
||||
];
|
||||
contentTopicsWithExpectedShards.forEach(([topic, expectedShard]) => {
|
||||
it(`should correctly map ${topic} to shard index ${expectedShard}`, () => {
|
||||
expect(contentTopicToShardIndex(topic)).to.eq(expectedShard);
|
||||
expect(contentTopicToShardIndex(topic, NumShardsInCluster)).to.eq(
|
||||
expectedShard
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@ -142,8 +142,8 @@ describe("contentTopicToShardIndex", () => {
|
||||
["/waku/2/content/test.js", "/waku/2/users/proto"]
|
||||
];
|
||||
for (const [topic1, topic2] of contentTopics) {
|
||||
expect(contentTopicToShardIndex(topic1)).to.eq(
|
||||
contentTopicToShardIndex(topic2)
|
||||
expect(contentTopicToShardIndex(topic1, NumShardsInCluster)).to.eq(
|
||||
contentTopicToShardIndex(topic2, NumShardsInCluster)
|
||||
);
|
||||
}
|
||||
});
|
||||
@ -152,9 +152,15 @@ describe("contentTopicToShardIndex", () => {
|
||||
describe("contentTopicsByPubsubTopic", () => {
|
||||
it("groups content topics by expected pubsub topic", () => {
|
||||
const contentTopics = ["/toychat/2/huilong/proto", "/myapp/1/latest/proto"];
|
||||
const grouped = contentTopicsByPubsubTopic(contentTopics);
|
||||
const grouped = contentTopicsByPubsubTopic(
|
||||
contentTopics,
|
||||
ClusterId,
|
||||
NumShardsInCluster
|
||||
);
|
||||
|
||||
for (const contentTopic of contentTopics) {
|
||||
const pubsubTopic = contentTopicToPubsubTopic(contentTopic);
|
||||
const pubsubTopic = contentTopicToPubsubTopic(contentTopic, 0, 8);
|
||||
|
||||
expect(grouped.get(pubsubTopic)?.includes(contentTopic)).to.be.true;
|
||||
}
|
||||
});
|
||||
@ -164,25 +170,39 @@ describe("contentTopicsByPubsubTopic", () => {
|
||||
"/app/22/sometopic/someencoding",
|
||||
"/app/22/anothertopic/otherencoding"
|
||||
];
|
||||
const grouped = contentTopicsByPubsubTopic(contentTopics);
|
||||
const grouped = contentTopicsByPubsubTopic(
|
||||
contentTopics,
|
||||
ClusterId,
|
||||
NumShardsInCluster
|
||||
);
|
||||
expect(grouped.size).to.eq(1); // Only one pubsub topic expected
|
||||
const pubsubTopic = contentTopicToPubsubTopic(contentTopics[0]);
|
||||
const pubsubTopic = contentTopicToPubsubTopic(contentTopics[0], 0, 8);
|
||||
expect(grouped.get(pubsubTopic)?.length).to.eq(2); // Both topics should be grouped under the same pubsub topic
|
||||
});
|
||||
|
||||
it("handles different clusterIds correctly", () => {
|
||||
const contentTopics = ["/app/22/sometopic/someencoding"];
|
||||
const clusterId1 = 1;
|
||||
const clusterId1 = 3;
|
||||
const clusterId2 = 2;
|
||||
const grouped1 = contentTopicsByPubsubTopic(contentTopics, clusterId1);
|
||||
const grouped2 = contentTopicsByPubsubTopic(contentTopics, clusterId2);
|
||||
const grouped1 = contentTopicsByPubsubTopic(
|
||||
contentTopics,
|
||||
clusterId1,
|
||||
NumShardsInCluster
|
||||
);
|
||||
const grouped2 = contentTopicsByPubsubTopic(
|
||||
contentTopics,
|
||||
clusterId2,
|
||||
NumShardsInCluster
|
||||
);
|
||||
const pubsubTopic1 = contentTopicToPubsubTopic(
|
||||
contentTopics[0],
|
||||
clusterId1
|
||||
clusterId1,
|
||||
8
|
||||
);
|
||||
const pubsubTopic2 = contentTopicToPubsubTopic(
|
||||
contentTopics[0],
|
||||
clusterId2
|
||||
clusterId2,
|
||||
8
|
||||
);
|
||||
expect(pubsubTopic1).not.to.equal(pubsubTopic2);
|
||||
expect(grouped1.has(pubsubTopic1)).to.be.true;
|
||||
@ -224,96 +244,13 @@ describe("contentTopicsByPubsubTopic", () => {
|
||||
|
||||
it("throws an error for improperly formatted content topics", () => {
|
||||
const invalidContentTopics = ["/invalid/format"];
|
||||
expect(() => contentTopicsByPubsubTopic(invalidContentTopics)).to.throw();
|
||||
});
|
||||
});
|
||||
|
||||
describe("singleShardInfoToPubsubTopic", () => {
|
||||
it("should convert a SingleShardInfo object to the correct PubsubTopic", () => {
|
||||
const singleShardInfo = { clusterId: 2, shard: 2 };
|
||||
const expectedTopic = "/waku/2/rs/2/2";
|
||||
expect(singleShardInfoToPubsubTopic(singleShardInfo)).to.equal(
|
||||
expectedTopic
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("singleShardInfosToShardInfo", () => {
|
||||
it("should aggregate SingleShardInfos into a ShardInfo", () => {
|
||||
const singleShardInfos = [
|
||||
{ clusterId: 1, shard: 2 },
|
||||
{ clusterId: 1, shard: 3 },
|
||||
{ clusterId: 1, shard: 5 }
|
||||
];
|
||||
const expectedShardInfo = { clusterId: 1, shards: [2, 3, 5] };
|
||||
expect(singleShardInfosToShardInfo(singleShardInfos)).to.deep.equal(
|
||||
expectedShardInfo
|
||||
);
|
||||
});
|
||||
|
||||
it("should throw an error for empty SingleShardInfos array", () => {
|
||||
expect(() => singleShardInfosToShardInfo([])).to.throw("Invalid shard");
|
||||
});
|
||||
|
||||
it("should throw an error for SingleShardInfos with different clusterIds", () => {
|
||||
const invalidShardInfos = [
|
||||
{ clusterId: 1, shard: 2 },
|
||||
{ clusterId: 2, shard: 3 }
|
||||
];
|
||||
expect(() => singleShardInfosToShardInfo(invalidShardInfos)).to.throw(
|
||||
"Passed shard infos have different clusterIds"
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("shardInfoToPubsubTopics", () => {
|
||||
it("should convert content topics to PubsubTopics for autosharding", () => {
|
||||
const shardInfo = {
|
||||
contentTopics: ["/app/v1/topic1/proto", "/app/v1/topic2/proto"]
|
||||
};
|
||||
const topics = shardInfoToPubsubTopics(shardInfo);
|
||||
expect(topics).to.be.an("array").that.includes("/waku/2/rs/1/4");
|
||||
expect(topics.length).to.equal(1);
|
||||
});
|
||||
|
||||
it("should return unique PubsubTopics for static sharding", () => {
|
||||
const shardInfo = { clusterId: 1, shards: [0, 1, 0] }; // Duplicate shard to test uniqueness
|
||||
const topics = shardInfoToPubsubTopics(shardInfo);
|
||||
expect(topics).to.have.members(["/waku/2/rs/1/0", "/waku/2/rs/1/1"]);
|
||||
expect(topics.length).to.equal(2);
|
||||
});
|
||||
|
||||
[0, 1, 6].forEach((clusterId) => {
|
||||
it(`should handle clusterId, application and version for autosharding with cluster iD ${clusterId}`, () => {
|
||||
const shardInfo = {
|
||||
clusterId: clusterId,
|
||||
application: "app",
|
||||
version: "v1"
|
||||
};
|
||||
const topics = shardInfoToPubsubTopics(shardInfo);
|
||||
expect(topics)
|
||||
.to.be.an("array")
|
||||
.that.includes(`/waku/2/rs/${clusterId}/4`);
|
||||
expect(topics.length).to.equal(1);
|
||||
});
|
||||
});
|
||||
|
||||
it("should return empty list for no shard", () => {
|
||||
const shardInfo = { clusterId: 1, shards: [] };
|
||||
const topics = shardInfoToPubsubTopics(shardInfo);
|
||||
expect(topics.length).to.equal(0);
|
||||
});
|
||||
|
||||
it("should throw an error if shards are undefined for static sharding", () => {
|
||||
const shardInfo = { clusterId: 1, shards: undefined };
|
||||
expect(() => shardInfoToPubsubTopics(shardInfo)).to.throw("Invalid shard");
|
||||
});
|
||||
|
||||
it("should throw an error for missing required configuration", () => {
|
||||
const shardInfo = {};
|
||||
expect(() => shardInfoToPubsubTopics(shardInfo)).to.throw(
|
||||
"Missing required configuration in shard parameters"
|
||||
);
|
||||
expect(() =>
|
||||
contentTopicsByPubsubTopic(
|
||||
invalidContentTopics,
|
||||
ClusterId,
|
||||
NumShardsInCluster
|
||||
)
|
||||
).to.throw();
|
||||
});
|
||||
});
|
||||
|
||||
@ -327,8 +264,8 @@ describe("pubsubTopicToSingleShardInfo with various invalid formats", () => {
|
||||
];
|
||||
|
||||
it("should extract SingleShardInfo from a valid PubsubTopic", () => {
|
||||
const topic = "/waku/2/rs/1/2";
|
||||
const expectedInfo = { clusterId: 1, shard: 2 };
|
||||
const topic = "/waku/2/rs/2/2";
|
||||
const expectedInfo = { clusterId: 2, shard: 2 };
|
||||
expect(pubsubTopicToSingleShardInfo(topic)).to.deep.equal(expectedInfo);
|
||||
});
|
||||
|
||||
@ -356,114 +293,77 @@ describe("pubsubTopicToSingleShardInfo with various invalid formats", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("determinePubsubTopic", () => {
|
||||
const contentTopic = "/app/46/sometopic/someencoding";
|
||||
it("should return the pubsub topic directly if a string is provided", () => {
|
||||
const topic = "/waku/2/rs/1/3";
|
||||
expect(determinePubsubTopic(contentTopic, topic)).to.equal(topic);
|
||||
});
|
||||
|
||||
it("should return a calculated topic if SingleShardInfo is provided", () => {
|
||||
const info = { clusterId: 1, shard: 2 };
|
||||
const expectedTopic = "/waku/2/rs/1/2";
|
||||
expect(determinePubsubTopic(contentTopic, info)).to.equal(expectedTopic);
|
||||
});
|
||||
|
||||
it("should fall back to default pubsub topic when pubsubTopicShardInfo is not provided", () => {
|
||||
expect(determinePubsubTopic(contentTopic)).to.equal("/waku/2/rs/1/6");
|
||||
});
|
||||
|
||||
it("should process correctly when SingleShardInfo has no clusterId but has a shard", () => {
|
||||
const info = { shard: 0 };
|
||||
const expectedTopic = `/waku/2/rs/${DEFAULT_CLUSTER_ID}/0`;
|
||||
expect(determinePubsubTopic(contentTopic, info as any)).to.equal(
|
||||
expectedTopic
|
||||
);
|
||||
});
|
||||
|
||||
it("should derive a pubsub topic using contentTopic when SingleShardInfo only contains clusterId", () => {
|
||||
const info = { clusterId: 2 };
|
||||
const expectedTopic = contentTopicToPubsubTopic(
|
||||
contentTopic,
|
||||
info.clusterId
|
||||
);
|
||||
expect(determinePubsubTopic(contentTopic, info as any)).to.equal(
|
||||
expectedTopic
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("ensureShardingConfigured", () => {
|
||||
it("should return valid sharding parameters for static sharding", () => {
|
||||
const shardInfo = { clusterId: 1, shards: [0, 1] };
|
||||
const result = ensureShardingConfigured(shardInfo);
|
||||
expect(result.shardInfo).to.deep.include({
|
||||
clusterId: 1,
|
||||
shards: [0, 1]
|
||||
});
|
||||
expect(result.shardInfo).to.deep.include({ clusterId: 1, shards: [0, 1] });
|
||||
expect(result.pubsubTopics).to.have.members([
|
||||
"/waku/2/rs/1/0",
|
||||
"/waku/2/rs/1/1"
|
||||
]);
|
||||
});
|
||||
|
||||
it("should return valid sharding parameters for content topics autosharding", () => {
|
||||
const contentTopicInfo = { contentTopics: ["/app/v1/topic1/proto"] };
|
||||
const result = ensureShardingConfigured(contentTopicInfo);
|
||||
const expectedPubsubTopic = contentTopicToPubsubTopic(
|
||||
"/app/v1/topic1/proto",
|
||||
DEFAULT_CLUSTER_ID
|
||||
);
|
||||
expect(result.shardInfo.shards).to.include(
|
||||
contentTopicToShardIndex("/app/v1/topic1/proto")
|
||||
);
|
||||
expect(result.pubsubTopics).to.include(expectedPubsubTopic);
|
||||
});
|
||||
|
||||
it("should throw an error for missing sharding configuration", () => {
|
||||
const shardInfo = {} as any as NetworkConfig;
|
||||
expect(() => ensureShardingConfigured(shardInfo)).to.throw();
|
||||
});
|
||||
|
||||
it("handles empty shards array correctly", () => {
|
||||
const shardInfo = { clusterId: 1, shards: [] };
|
||||
expect(() => ensureShardingConfigured(shardInfo)).to.throw();
|
||||
});
|
||||
|
||||
it("handles empty contentTopics array correctly", () => {
|
||||
const shardInfo = { contentTopics: [] };
|
||||
expect(() => ensureShardingConfigured(shardInfo)).to.throw();
|
||||
});
|
||||
});
|
||||
|
||||
describe("contentTopicToPubsubTopic", () => {
|
||||
it("should correctly map a content topic to a pubsub topic", () => {
|
||||
const contentTopic = "/app/v1/topic1/proto";
|
||||
expect(contentTopicToPubsubTopic(contentTopic)).to.equal("/waku/2/rs/1/4");
|
||||
});
|
||||
|
||||
it("should map different content topics to different pubsub topics based on shard index", () => {
|
||||
const contentTopic1 = "/app/v1/topic1/proto";
|
||||
const contentTopic2 = "/app/v2/topic2/proto";
|
||||
const pubsubTopic1 = contentTopicToPubsubTopic(contentTopic1);
|
||||
const pubsubTopic2 = contentTopicToPubsubTopic(contentTopic2);
|
||||
expect(pubsubTopic1).not.to.equal(pubsubTopic2);
|
||||
});
|
||||
|
||||
it("should use the provided clusterId for the pubsub topic", () => {
|
||||
const contentTopic = "/app/v1/topic1/proto";
|
||||
const clusterId = 2;
|
||||
expect(contentTopicToPubsubTopic(contentTopic, clusterId)).to.equal(
|
||||
"/waku/2/rs/2/4"
|
||||
);
|
||||
});
|
||||
|
||||
it("should correctly map a content topic to a pubsub topic for different network shard sizes", () => {
|
||||
const contentTopic = "/app/v1/topic1/proto";
|
||||
const networkShards = 16;
|
||||
expect(contentTopicToPubsubTopic(contentTopic, 1, networkShards)).to.equal(
|
||||
"/waku/2/rs/1/4"
|
||||
);
|
||||
});
|
||||
});
|
||||
// describe("ensureShardingConfigured", () => {
|
||||
// it("should return valid sharding parameters for static sharding", () => {
|
||||
// const shardInfo = { clusterId: 1, shards: [0, 1] };
|
||||
// const result = ensureShardingConfigured(shardInfo);
|
||||
// expect(result.shardInfo).to.deep.include({
|
||||
// clusterId: 1,
|
||||
// shards: [0, 1]
|
||||
// });
|
||||
// expect(result.shardInfo).to.deep.include({ clusterId: 1, shards: [0, 1] });
|
||||
// expect(result.pubsubTopics).to.have.members([
|
||||
// "/waku/2/rs/1/0",
|
||||
// "/waku/2/rs/1/1"
|
||||
// ]);
|
||||
// });
|
||||
//
|
||||
// it("should return valid sharding parameters for content topics autosharding", () => {
|
||||
// const contentTopicInfo = { contentTopics: ["/app/v1/topic1/proto"] };
|
||||
// const result = ensureShardingConfigured(contentTopicInfo);
|
||||
// const expectedPubsubTopic = contentTopicToPubsubTopic(
|
||||
// "/app/v1/topic1/proto",
|
||||
// DEFAULT_CLUSTER_ID
|
||||
// );
|
||||
// expect(result.shardInfo.shards).to.include(
|
||||
// contentTopicToShardIndex("/app/v1/topic1/proto")
|
||||
// );
|
||||
// expect(result.pubsubTopics).to.include(expectedPubsubTopic);
|
||||
// });
|
||||
//
|
||||
// it("should throw an error for missing sharding configuration", () => {
|
||||
// const shardInfo = {} as any as NetworkConfig;
|
||||
// expect(() => ensureShardingConfigured(shardInfo)).to.throw();
|
||||
// });
|
||||
//
|
||||
// it("handles empty shards array correctly", () => {
|
||||
// const shardInfo = { clusterId: 1, shards: [] };
|
||||
// expect(() => ensureShardingConfigured(shardInfo)).to.throw();
|
||||
// });
|
||||
//
|
||||
// it("handles empty contentTopics array correctly", () => {
|
||||
// const shardInfo = { contentTopics: [] };
|
||||
// expect(() => ensureShardingConfigured(shardInfo)).to.throw();
|
||||
// });
|
||||
// });
|
||||
//
|
||||
// describe("contentTopicToPubsubTopic", () => {
|
||||
// it("should correctly map a content topic to a pubsub topic", () => {
|
||||
// const contentTopic = "/app/v1/topic1/proto";
|
||||
// expect(contentTopicToPubsubTopic(contentTopic)).to.equal("/waku/2/rs/1/4");
|
||||
// });
|
||||
//
|
||||
// it("should map different content topics to different pubsub topics based on shard index", () => {
|
||||
// const contentTopic1 = "/app/v1/topic1/proto";
|
||||
// const contentTopic2 = "/app/v2/topic2/proto";
|
||||
// const pubsubTopic1 = contentTopicToPubsubTopic(contentTopic1);
|
||||
// const pubsubTopic2 = contentTopicToPubsubTopic(contentTopic2);
|
||||
// expect(pubsubTopic1).not.to.equal(pubsubTopic2);
|
||||
// });
|
||||
//
|
||||
// it("should use the provided clusterId for the pubsub topic", () => {
|
||||
// const contentTopic = "/app/v1/topic1/proto";
|
||||
// const clusterId = 2;
|
||||
// expect(contentTopicToPubsubTopic(contentTopic, clusterId)).to.equal(
|
||||
// "/waku/2/rs/2/4"
|
||||
// );
|
||||
// });
|
||||
//
|
||||
// it("should correctly map a content topic to a pubsub topic for different network shard sizes", () => {
|
||||
// const contentTopic = "/app/v1/topic1/proto";
|
||||
// const networkShards = 16;
|
||||
// expect(contentTopicToPubsubTopic(contentTopic, 1, networkShards)).to.equal(
|
||||
// "/waku/2/rs/1/4"
|
||||
// );
|
||||
// });
|
||||
// });
|
||||
|
||||
@ -1,109 +1,21 @@
|
||||
import { sha256 } from "@noble/hashes/sha256";
|
||||
import {
|
||||
DEFAULT_CLUSTER_ID,
|
||||
NetworkConfig,
|
||||
type ClusterId,
|
||||
ContentTopic,
|
||||
PubsubTopic,
|
||||
ShardInfo,
|
||||
SingleShardInfo
|
||||
type ShardId
|
||||
} from "@waku/interfaces";
|
||||
|
||||
import { concat, utf8ToBytes } from "../../bytes/index.js";
|
||||
|
||||
import { isAutoSharding, isStaticSharding } from "./type_guards.js";
|
||||
|
||||
export * from "./type_guards.js";
|
||||
export * from "./routing_info.js";
|
||||
|
||||
export function derivePubsubTopicsFromNetworkConfig(
|
||||
networkConfig: NetworkConfig
|
||||
): PubsubTopic[] {
|
||||
if (isStaticSharding(networkConfig)) {
|
||||
if (networkConfig.shards.length === 0) {
|
||||
throw new Error(
|
||||
"Invalid shards configuration: please provide at least one shard"
|
||||
);
|
||||
}
|
||||
return shardInfoToPubsubTopics(networkConfig);
|
||||
} else if (isAutoSharding(networkConfig)) {
|
||||
if (networkConfig.contentTopics.length === 0) {
|
||||
throw new Error(
|
||||
"Invalid content topics configuration: please provide at least one content topic"
|
||||
);
|
||||
}
|
||||
return networkConfig.contentTopics.map((contentTopic) =>
|
||||
contentTopicToPubsubTopic(contentTopic, networkConfig.clusterId)
|
||||
);
|
||||
} else {
|
||||
throw new Error(
|
||||
"Unknown shard config. Please use ShardInfo or ContentTopicInfo"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export const singleShardInfoToPubsubTopic = (
|
||||
shardInfo: SingleShardInfo
|
||||
export const formatPubsubTopic = (
|
||||
clusterId: ClusterId,
|
||||
shard: ShardId
|
||||
): PubsubTopic => {
|
||||
if (shardInfo.shard === undefined) throw new Error("Invalid shard");
|
||||
|
||||
return `/waku/2/rs/${shardInfo.clusterId ?? DEFAULT_CLUSTER_ID}/${shardInfo.shard}`;
|
||||
};
|
||||
|
||||
export const singleShardInfosToShardInfo = (
|
||||
singleShardInfos: SingleShardInfo[]
|
||||
): ShardInfo => {
|
||||
if (singleShardInfos.length === 0) throw new Error("Invalid shard");
|
||||
|
||||
const clusterIds = singleShardInfos.map((shardInfo) => shardInfo.clusterId);
|
||||
if (new Set(clusterIds).size !== 1) {
|
||||
throw new Error("Passed shard infos have different clusterIds");
|
||||
}
|
||||
|
||||
const shards = singleShardInfos
|
||||
.map((shardInfo) => shardInfo.shard)
|
||||
.filter((shard): shard is number => shard !== undefined);
|
||||
|
||||
return {
|
||||
clusterId: singleShardInfos[0].clusterId,
|
||||
shards
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* @deprecated will be removed, use cluster and shard comparison directly
|
||||
*/
|
||||
export const shardInfoToPubsubTopics = (
|
||||
shardInfo: Partial<NetworkConfig>
|
||||
): PubsubTopic[] => {
|
||||
if ("contentTopics" in shardInfo && shardInfo.contentTopics) {
|
||||
// Autosharding: explicitly defined content topics
|
||||
return Array.from(
|
||||
new Set(
|
||||
shardInfo.contentTopics.map((contentTopic) =>
|
||||
contentTopicToPubsubTopic(contentTopic, shardInfo.clusterId)
|
||||
)
|
||||
)
|
||||
);
|
||||
} else if ("shards" in shardInfo) {
|
||||
// Static sharding
|
||||
if (shardInfo.shards === undefined) throw new Error("Invalid shard");
|
||||
return Array.from(
|
||||
new Set(
|
||||
shardInfo.shards.map(
|
||||
(index) =>
|
||||
`/waku/2/rs/${shardInfo.clusterId ?? DEFAULT_CLUSTER_ID}/${index}`
|
||||
)
|
||||
)
|
||||
);
|
||||
} else if ("application" in shardInfo && "version" in shardInfo) {
|
||||
// Autosharding: single shard from application and version
|
||||
return [
|
||||
contentTopicToPubsubTopic(
|
||||
`/${shardInfo.application}/${shardInfo.version}/default/default`,
|
||||
shardInfo.clusterId
|
||||
)
|
||||
];
|
||||
} else {
|
||||
throw new Error("Missing required configuration in shard parameters");
|
||||
}
|
||||
return `/waku/2/rs/${clusterId}/${shard}`;
|
||||
};
|
||||
|
||||
/**
|
||||
@ -111,7 +23,7 @@ export const shardInfoToPubsubTopics = (
|
||||
*/
|
||||
export const pubsubTopicToSingleShardInfo = (
|
||||
pubsubTopics: PubsubTopic
|
||||
): SingleShardInfo => {
|
||||
): { clusterId: ClusterId; shard: ShardId } => {
|
||||
const parts = pubsubTopics.split("/");
|
||||
|
||||
if (
|
||||
@ -134,40 +46,7 @@ export const pubsubTopicToSingleShardInfo = (
|
||||
};
|
||||
};
|
||||
|
||||
export const pubsubTopicsToShardInfo = (
|
||||
pubsubTopics: PubsubTopic[]
|
||||
): ShardInfo => {
|
||||
const shardInfoSet = new Set<string>();
|
||||
const clusterIds = new Set<number>();
|
||||
|
||||
for (const topic of pubsubTopics) {
|
||||
const { clusterId, shard } = pubsubTopicToSingleShardInfo(topic);
|
||||
shardInfoSet.add(`${clusterId}:${shard}`);
|
||||
clusterIds.add(clusterId);
|
||||
}
|
||||
|
||||
if (shardInfoSet.size === 0) {
|
||||
throw new Error("No valid pubsub topics provided");
|
||||
}
|
||||
|
||||
if (clusterIds.size > 1) {
|
||||
throw new Error(
|
||||
"Pubsub topics from multiple cluster IDs are not supported"
|
||||
);
|
||||
}
|
||||
|
||||
const clusterId = clusterIds.values().next().value!;
|
||||
const shards = Array.from(shardInfoSet).map((info) =>
|
||||
parseInt(info.split(":")[1])
|
||||
);
|
||||
|
||||
return {
|
||||
clusterId,
|
||||
shards
|
||||
};
|
||||
};
|
||||
|
||||
interface ContentTopic {
|
||||
interface ParsedContentTopic {
|
||||
generation: number;
|
||||
application: string;
|
||||
version: string;
|
||||
@ -180,39 +59,45 @@ interface ContentTopic {
|
||||
* @param contentTopic String to validate
|
||||
* @returns Object with each content topic field as an attribute
|
||||
*/
|
||||
export function ensureValidContentTopic(contentTopic: string): ContentTopic {
|
||||
const parts = contentTopic.split("/");
|
||||
export function ensureValidContentTopic(
|
||||
contentTopic: ContentTopic
|
||||
): ParsedContentTopic {
|
||||
const parts = (contentTopic as string).split("/");
|
||||
if (parts.length < 5 || parts.length > 6) {
|
||||
throw Error("Content topic format is invalid");
|
||||
throw Error(`Content topic format is invalid: ${contentTopic}`);
|
||||
}
|
||||
// Validate generation field if present
|
||||
let generation = 0;
|
||||
if (parts.length == 6) {
|
||||
generation = parseInt(parts[1]);
|
||||
if (isNaN(generation)) {
|
||||
throw new Error("Invalid generation field in content topic");
|
||||
throw new Error(
|
||||
`Invalid generation field in content topic: ${contentTopic}`
|
||||
);
|
||||
}
|
||||
if (generation > 0) {
|
||||
throw new Error("Generation greater than 0 is not supported");
|
||||
throw new Error(
|
||||
`Generation greater than 0 is not supported: ${contentTopic}`
|
||||
);
|
||||
}
|
||||
}
|
||||
// Validate remaining fields
|
||||
const fields = parts.splice(-4);
|
||||
// Validate application field
|
||||
if (fields[0].length == 0) {
|
||||
throw new Error("Application field cannot be empty");
|
||||
throw new Error(`Application field cannot be empty: ${contentTopic}`);
|
||||
}
|
||||
// Validate version field
|
||||
if (fields[1].length == 0) {
|
||||
throw new Error("Version field cannot be empty");
|
||||
throw new Error(`Version field cannot be empty: ${contentTopic}`);
|
||||
}
|
||||
// Validate topic name field
|
||||
if (fields[2].length == 0) {
|
||||
throw new Error("Topic name field cannot be empty");
|
||||
throw new Error(`Topic name field cannot be empty: ${contentTopic}`);
|
||||
}
|
||||
// Validate encoding field
|
||||
if (fields[3].length == 0) {
|
||||
throw new Error("Encoding field cannot be empty");
|
||||
throw new Error(`Encoding field cannot be empty: ${contentTopic}`);
|
||||
}
|
||||
|
||||
return {
|
||||
@ -229,27 +114,27 @@ export function ensureValidContentTopic(contentTopic: string): ContentTopic {
|
||||
* Based on the algorithm described in the RFC: https://rfc.vac.dev/spec/51//#algorithm
|
||||
*/
|
||||
export function contentTopicToShardIndex(
|
||||
contentTopic: string,
|
||||
networkShards: number = 8
|
||||
contentTopic: ContentTopic,
|
||||
numShardsInCluster: number
|
||||
): number {
|
||||
const { application, version } = ensureValidContentTopic(contentTopic);
|
||||
const digest = sha256(
|
||||
concat([utf8ToBytes(application), utf8ToBytes(version)])
|
||||
);
|
||||
const dataview = new DataView(digest.buffer.slice(-8));
|
||||
return Number(dataview.getBigUint64(0, false) % BigInt(networkShards));
|
||||
return Number(dataview.getBigUint64(0, false) % BigInt(numShardsInCluster));
|
||||
}
|
||||
|
||||
export function contentTopicToPubsubTopic(
|
||||
contentTopic: string,
|
||||
clusterId: number = DEFAULT_CLUSTER_ID,
|
||||
networkShards: number = 8
|
||||
contentTopic: ContentTopic,
|
||||
clusterId: number,
|
||||
numShardsInCluster: number
|
||||
): string {
|
||||
if (!contentTopic) {
|
||||
throw Error("Content topic must be specified");
|
||||
}
|
||||
|
||||
const shardIndex = contentTopicToShardIndex(contentTopic, networkShards);
|
||||
const shardIndex = contentTopicToShardIndex(contentTopic, numShardsInCluster);
|
||||
return `/waku/2/rs/${clusterId}/${shardIndex}`;
|
||||
}
|
||||
|
||||
@ -258,9 +143,9 @@ export function contentTopicToPubsubTopic(
|
||||
* If any of the content topics are not properly formatted, the function will throw an error.
|
||||
*/
|
||||
export function contentTopicsByPubsubTopic(
|
||||
contentTopics: string[],
|
||||
clusterId: number = DEFAULT_CLUSTER_ID,
|
||||
networkShards: number = 8
|
||||
contentTopics: ContentTopic[],
|
||||
clusterId: number,
|
||||
networkShards: number
|
||||
): Map<string, Array<string>> {
|
||||
const groupedContentTopics = new Map();
|
||||
for (const contentTopic of contentTopics) {
|
||||
@ -278,70 +163,3 @@ export function contentTopicsByPubsubTopic(
|
||||
}
|
||||
return groupedContentTopics;
|
||||
}
|
||||
|
||||
/**
|
||||
* Used when creating encoders/decoders to determine which pubsub topic to use
|
||||
*/
|
||||
export function determinePubsubTopic(
|
||||
contentTopic: string,
|
||||
// TODO: make it accept ShardInfo https://github.com/waku-org/js-waku/issues/2086
|
||||
pubsubTopicShardInfo?: SingleShardInfo | PubsubTopic
|
||||
): string {
|
||||
if (typeof pubsubTopicShardInfo == "string") {
|
||||
return pubsubTopicShardInfo;
|
||||
}
|
||||
|
||||
return pubsubTopicShardInfo?.shard !== undefined
|
||||
? singleShardInfoToPubsubTopic(pubsubTopicShardInfo)
|
||||
: contentTopicToPubsubTopic(
|
||||
contentTopic,
|
||||
pubsubTopicShardInfo?.clusterId ?? DEFAULT_CLUSTER_ID
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates sharding configuration and sets defaults where possible.
|
||||
* @returns Validated sharding parameters, with any missing values set to defaults
|
||||
*/
|
||||
export const ensureShardingConfigured = (
|
||||
networkConfig: NetworkConfig
|
||||
): {
|
||||
shardInfo: ShardInfo;
|
||||
pubsubTopics: PubsubTopic[];
|
||||
} => {
|
||||
const clusterId = networkConfig.clusterId ?? DEFAULT_CLUSTER_ID;
|
||||
const shards = "shards" in networkConfig ? networkConfig.shards : [];
|
||||
const contentTopics =
|
||||
"contentTopics" in networkConfig ? networkConfig.contentTopics : [];
|
||||
|
||||
const isShardsConfigured = shards && shards.length > 0;
|
||||
const isContentTopicsConfigured = contentTopics && contentTopics.length > 0;
|
||||
|
||||
if (isShardsConfigured) {
|
||||
return {
|
||||
shardInfo: { clusterId, shards },
|
||||
pubsubTopics: shardInfoToPubsubTopics({ clusterId, shards })
|
||||
};
|
||||
}
|
||||
|
||||
if (isContentTopicsConfigured) {
|
||||
const pubsubTopics = Array.from(
|
||||
new Set(
|
||||
contentTopics.map((topic) =>
|
||||
contentTopicToPubsubTopic(topic, clusterId)
|
||||
)
|
||||
)
|
||||
);
|
||||
const shards = Array.from(
|
||||
new Set(contentTopics.map((topic) => contentTopicToShardIndex(topic)))
|
||||
);
|
||||
return {
|
||||
shardInfo: { clusterId, shards },
|
||||
pubsubTopics
|
||||
};
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
"Missing minimum required configuration options for static sharding or autosharding."
|
||||
);
|
||||
};
|
||||
|
||||
191
packages/utils/src/common/sharding/routing_info.ts
Normal file
191
packages/utils/src/common/sharding/routing_info.ts
Normal file
@ -0,0 +1,191 @@
|
||||
import type {
|
||||
AutoSharding,
|
||||
ClusterId,
|
||||
ContentTopic,
|
||||
IRoutingInfo,
|
||||
NetworkConfig,
|
||||
PubsubTopic,
|
||||
ShardId,
|
||||
StaticSharding
|
||||
} from "@waku/interfaces";
|
||||
|
||||
import {
|
||||
contentTopicToShardIndex,
|
||||
ensureValidContentTopic,
|
||||
formatPubsubTopic,
|
||||
isAutoSharding,
|
||||
pubsubTopicToSingleShardInfo
|
||||
} from "./index.js";
|
||||
|
||||
export type RoutingInfo = AutoShardingRoutingInfo | StaticShardingRoutingInfo;
|
||||
|
||||
export abstract class BaseRoutingInfo {
|
||||
protected constructor(
|
||||
public networkConfig: NetworkConfig,
|
||||
public pubsubTopic: PubsubTopic,
|
||||
public shardId: ShardId
|
||||
) {}
|
||||
|
||||
public abstract get isAutoSharding(): boolean;
|
||||
public abstract get isStaticSharding(): boolean;
|
||||
}
|
||||
|
||||
export class AutoShardingRoutingInfo
|
||||
extends BaseRoutingInfo
|
||||
implements IRoutingInfo
|
||||
{
|
||||
public static fromContentTopic(
|
||||
contentTopic: ContentTopic,
|
||||
networkConfig: AutoSharding
|
||||
): AutoShardingRoutingInfo {
|
||||
ensureValidContentTopic(contentTopic);
|
||||
|
||||
const shardId = contentTopicToShardIndex(
|
||||
contentTopic,
|
||||
networkConfig.numShardsInCluster
|
||||
);
|
||||
const pubsubTopic = formatPubsubTopic(networkConfig.clusterId, shardId);
|
||||
|
||||
return new AutoShardingRoutingInfo(
|
||||
networkConfig,
|
||||
pubsubTopic,
|
||||
shardId,
|
||||
contentTopic
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* No checks are done with this constructor,
|
||||
* Be sure you check that the network config (auto vs static)
|
||||
* matches other parameters.
|
||||
*/
|
||||
private constructor(
|
||||
public networkConfig: AutoSharding,
|
||||
public pubsubTopic: PubsubTopic,
|
||||
public shardId: ShardId,
|
||||
public contentTopic: string
|
||||
) {
|
||||
super(networkConfig, pubsubTopic, shardId);
|
||||
}
|
||||
|
||||
public get clusterId(): number {
|
||||
return this.networkConfig.clusterId;
|
||||
}
|
||||
|
||||
public get isAutoSharding(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
public get isStaticSharding(): boolean {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export class StaticShardingRoutingInfo
|
||||
extends BaseRoutingInfo
|
||||
implements IRoutingInfo
|
||||
{
|
||||
/**
|
||||
* Create Routing Info for static sharding network, using shard
|
||||
*
|
||||
* @param shardId
|
||||
* @param networkConfig
|
||||
*/
|
||||
public static fromShard(
|
||||
shardId: ShardId,
|
||||
networkConfig: StaticSharding
|
||||
): StaticShardingRoutingInfo {
|
||||
const pubsubTopic = formatPubsubTopic(networkConfig.clusterId, shardId);
|
||||
|
||||
return new StaticShardingRoutingInfo(networkConfig, pubsubTopic, shardId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create Routing Info for static sharding network, using pubsub topic
|
||||
*
|
||||
* @param pubsubTopic
|
||||
* @param networkConfig
|
||||
*
|
||||
* @throws if the pubsub topic is malformed, or does not match the network config
|
||||
*/
|
||||
public static fromPubsubTopic(
|
||||
pubsubTopic: PubsubTopic,
|
||||
networkConfig: StaticSharding
|
||||
): StaticShardingRoutingInfo {
|
||||
const { clusterId, shard } = pubsubTopicToSingleShardInfo(pubsubTopic);
|
||||
|
||||
if (clusterId != networkConfig.clusterId)
|
||||
throw "Pubsub topic does not match network config's cluster id";
|
||||
|
||||
return new StaticShardingRoutingInfo(networkConfig, pubsubTopic, shard);
|
||||
}
|
||||
|
||||
/**
|
||||
* No checks are done with this constructor,
|
||||
* Be sure you check that the network config (auto vs static)
|
||||
* matches other parameters.
|
||||
*/
|
||||
private constructor(
|
||||
public networkConfig: StaticSharding,
|
||||
public pubsubTopic: PubsubTopic,
|
||||
public shardId: ShardId
|
||||
) {
|
||||
super(networkConfig, pubsubTopic, shardId);
|
||||
}
|
||||
|
||||
public get clusterId(): ClusterId {
|
||||
return this.networkConfig.clusterId;
|
||||
}
|
||||
|
||||
public get isAutoSharding(): boolean {
|
||||
return false;
|
||||
}
|
||||
|
||||
public get isStaticSharding(): boolean {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
export function isAutoShardingRoutingInfo(
|
||||
routingInfo: BaseRoutingInfo
|
||||
): routingInfo is AutoShardingRoutingInfo {
|
||||
return routingInfo.isAutoSharding;
|
||||
}
|
||||
|
||||
export function isStaticShardingRoutingInfo(
|
||||
routingInfo: BaseRoutingInfo
|
||||
): routingInfo is StaticShardingRoutingInfo {
|
||||
return routingInfo.isStaticSharding;
|
||||
}
|
||||
|
||||
export function createRoutingInfo(
|
||||
networkConfig: NetworkConfig,
|
||||
options: {
|
||||
contentTopic?: ContentTopic;
|
||||
shardId?: ShardId;
|
||||
pubsubTopic?: PubsubTopic;
|
||||
}
|
||||
): AutoShardingRoutingInfo | StaticShardingRoutingInfo {
|
||||
if (isAutoSharding(networkConfig)) {
|
||||
if (options.contentTopic) {
|
||||
return AutoShardingRoutingInfo.fromContentTopic(
|
||||
options.contentTopic,
|
||||
networkConfig
|
||||
);
|
||||
}
|
||||
throw new Error("AutoSharding requires contentTopic");
|
||||
} else {
|
||||
if (options.shardId !== undefined) {
|
||||
return StaticShardingRoutingInfo.fromShard(
|
||||
options.shardId,
|
||||
networkConfig
|
||||
);
|
||||
} else if (options.pubsubTopic) {
|
||||
return StaticShardingRoutingInfo.fromPubsubTopic(
|
||||
options.pubsubTopic,
|
||||
networkConfig
|
||||
);
|
||||
}
|
||||
throw new Error("StaticSharding requires shardId or pubsubTopic");
|
||||
}
|
||||
}
|
||||
@ -1,5 +1,5 @@
|
||||
import type {
|
||||
ContentTopicInfo,
|
||||
AutoSharding,
|
||||
CreateNodeOptions,
|
||||
StaticSharding
|
||||
} from "@waku/interfaces";
|
||||
@ -7,13 +7,11 @@ import type {
|
||||
export function isStaticSharding(
|
||||
config: NonNullable<CreateNodeOptions["networkConfig"]>
|
||||
): config is StaticSharding {
|
||||
return (
|
||||
"clusterId" in config && "shards" in config && !("contentTopics" in config)
|
||||
);
|
||||
return "clusterId" in config && !("numShardsInCluster" in config);
|
||||
}
|
||||
|
||||
export function isAutoSharding(
|
||||
config: NonNullable<CreateNodeOptions["networkConfig"]>
|
||||
): config is ContentTopicInfo {
|
||||
return "contentTopics" in config;
|
||||
): config is AutoSharding {
|
||||
return "clusterId" in config && "numShardsInCluster" in config;
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user