mirror of https://github.com/waku-org/js-waku.git
feat!: use ShardingParams on subscriptions, make Decoder/Encoder auto sharding friendly by default (#1958)
* fix: use pubsubTopic from current ones if not set * fix: improve type on dial method * enforce same pubusb on filter.subscribe, make content topic to pubsub mapping default for decoder / encoder * fix mapping problem * update tests * add error handling * fix typo * up lock * rm lock * up lock * remove only * fix content topic * fix ephemeral test * fix filter unsubscribe test * up utils * fix subscribe test * up interfaces and filter api * remove only * up ping test * fix subscribe test * fix push test * fix lightPush * fix multiple pubsub * remove only, fix subscribe filter test * remove only * fix cluster ID selection and named sharding subscription test * fix unsubscribe test * fix light push test * fix light push test * fix push test * fix relay publish * create runNode and fix relay tests * generalize runNodes, fix some tests * fix store tests * fix toAsyncIterator tests * remove only * fix lightPush * use generics * try fix test * run failing tests * remove only * address failed tests, remove DefaultPubsubTopic dependency in some tests
This commit is contained in:
parent
86249dfe29
commit
f3627c46a4
|
@ -1,108 +1,97 @@
|
|||
import type { IProtoMessage } from "@waku/interfaces";
|
||||
import { contentTopicToPubsubTopic } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
import fc from "fast-check";
|
||||
|
||||
import { createDecoder, createEncoder, DecodedMessage } from "./version_0.js";
|
||||
|
||||
const contentTopic = "/js-waku/1/tests/bytes";
|
||||
const pubsubTopic = contentTopicToPubsubTopic(contentTopic);
|
||||
|
||||
describe("Waku Message version 0", function () {
|
||||
it("Round trip binary serialization", async function () {
|
||||
await fc.assert(
|
||||
fc.asyncProperty(
|
||||
fc.string({ minLength: 1 }),
|
||||
fc.string({ minLength: 1 }),
|
||||
fc.uint8Array({ minLength: 1 }),
|
||||
async (contentTopic, pubsubTopic, payload) => {
|
||||
const encoder = createEncoder({
|
||||
contentTopic
|
||||
});
|
||||
const bytes = await encoder.toWire({ payload });
|
||||
const decoder = createDecoder(contentTopic);
|
||||
const protoResult = await decoder.fromWireToProtoObj(bytes);
|
||||
const result = (await decoder.fromProtoObj(
|
||||
pubsubTopic,
|
||||
protoResult!
|
||||
)) as DecodedMessage;
|
||||
fc.asyncProperty(fc.uint8Array({ minLength: 1 }), async (payload) => {
|
||||
const encoder = createEncoder({
|
||||
contentTopic
|
||||
});
|
||||
const bytes = await encoder.toWire({ payload });
|
||||
const decoder = createDecoder(contentTopic);
|
||||
const protoResult = await decoder.fromWireToProtoObj(bytes);
|
||||
const result = (await decoder.fromProtoObj(
|
||||
pubsubTopic,
|
||||
protoResult!
|
||||
)) as DecodedMessage;
|
||||
|
||||
expect(result.contentTopic).to.eq(contentTopic);
|
||||
expect(result.pubsubTopic).to.eq(pubsubTopic);
|
||||
expect(result.version).to.eq(0);
|
||||
expect(result.ephemeral).to.be.false;
|
||||
expect(result.payload).to.deep.eq(payload);
|
||||
expect(result.timestamp).to.not.be.undefined;
|
||||
}
|
||||
)
|
||||
expect(result.contentTopic).to.eq(contentTopic);
|
||||
expect(result.pubsubTopic).to.eq(pubsubTopic);
|
||||
expect(result.version).to.eq(0);
|
||||
expect(result.ephemeral).to.be.false;
|
||||
expect(result.payload).to.deep.eq(payload);
|
||||
expect(result.timestamp).to.not.be.undefined;
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it("Ephemeral field set to true", async function () {
|
||||
await fc.assert(
|
||||
fc.asyncProperty(
|
||||
fc.string({ minLength: 1 }),
|
||||
fc.string({ minLength: 1 }),
|
||||
fc.uint8Array({ minLength: 1 }),
|
||||
async (contentTopic, pubsubTopic, payload) => {
|
||||
const encoder = createEncoder({
|
||||
contentTopic,
|
||||
ephemeral: true
|
||||
});
|
||||
const bytes = await encoder.toWire({ payload });
|
||||
const decoder = createDecoder(contentTopic);
|
||||
const protoResult = await decoder.fromWireToProtoObj(bytes);
|
||||
const result = (await decoder.fromProtoObj(
|
||||
pubsubTopic,
|
||||
protoResult!
|
||||
)) as DecodedMessage;
|
||||
fc.asyncProperty(fc.uint8Array({ minLength: 1 }), async (payload) => {
|
||||
const encoder = createEncoder({
|
||||
contentTopic,
|
||||
ephemeral: true
|
||||
});
|
||||
const bytes = await encoder.toWire({ payload });
|
||||
const decoder = createDecoder(contentTopic);
|
||||
const protoResult = await decoder.fromWireToProtoObj(bytes);
|
||||
const result = (await decoder.fromProtoObj(
|
||||
pubsubTopic,
|
||||
protoResult!
|
||||
)) as DecodedMessage;
|
||||
|
||||
expect(result.ephemeral).to.be.true;
|
||||
}
|
||||
)
|
||||
expect(result.ephemeral).to.be.true;
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it("Meta field set when metaSetter is specified", async function () {
|
||||
await fc.assert(
|
||||
fc.asyncProperty(
|
||||
fc.string({ minLength: 1 }),
|
||||
fc.string({ minLength: 1 }),
|
||||
fc.uint8Array({ minLength: 1 }),
|
||||
async (contentTopic, pubsubTopic, payload) => {
|
||||
// Encode the length of the payload
|
||||
// Not a relevant real life example
|
||||
const metaSetter = (
|
||||
msg: IProtoMessage & { meta: undefined }
|
||||
): Uint8Array => {
|
||||
const buffer = new ArrayBuffer(4);
|
||||
const view = new DataView(buffer);
|
||||
view.setUint32(0, msg.payload.length, false);
|
||||
return new Uint8Array(buffer);
|
||||
};
|
||||
fc.asyncProperty(fc.uint8Array({ minLength: 1 }), async (payload) => {
|
||||
// Encode the length of the payload
|
||||
// Not a relevant real life example
|
||||
const metaSetter = (
|
||||
msg: IProtoMessage & { meta: undefined }
|
||||
): Uint8Array => {
|
||||
const buffer = new ArrayBuffer(4);
|
||||
const view = new DataView(buffer);
|
||||
view.setUint32(0, msg.payload.length, false);
|
||||
return new Uint8Array(buffer);
|
||||
};
|
||||
|
||||
const encoder = createEncoder({
|
||||
contentTopic,
|
||||
ephemeral: true,
|
||||
metaSetter
|
||||
});
|
||||
const bytes = await encoder.toWire({ payload });
|
||||
const decoder = createDecoder(contentTopic);
|
||||
const protoResult = await decoder.fromWireToProtoObj(bytes);
|
||||
const result = (await decoder.fromProtoObj(
|
||||
pubsubTopic,
|
||||
protoResult!
|
||||
)) as DecodedMessage;
|
||||
const encoder = createEncoder({
|
||||
contentTopic,
|
||||
ephemeral: true,
|
||||
metaSetter
|
||||
});
|
||||
const bytes = await encoder.toWire({ payload });
|
||||
const decoder = createDecoder(contentTopic);
|
||||
const protoResult = await decoder.fromWireToProtoObj(bytes);
|
||||
const result = (await decoder.fromProtoObj(
|
||||
pubsubTopic,
|
||||
protoResult!
|
||||
)) as DecodedMessage;
|
||||
|
||||
const expectedMeta = metaSetter({
|
||||
payload,
|
||||
timestamp: undefined,
|
||||
contentTopic: "",
|
||||
ephemeral: undefined,
|
||||
meta: undefined,
|
||||
rateLimitProof: undefined,
|
||||
version: undefined
|
||||
});
|
||||
const expectedMeta = metaSetter({
|
||||
payload,
|
||||
timestamp: undefined,
|
||||
contentTopic: "",
|
||||
ephemeral: undefined,
|
||||
meta: undefined,
|
||||
rateLimitProof: undefined,
|
||||
version: undefined
|
||||
});
|
||||
|
||||
expect(result.meta).to.deep.eq(expectedMeta);
|
||||
}
|
||||
)
|
||||
expect(result.meta).to.deep.eq(expectedMeta);
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
import type { PeerId } from "@libp2p/interface";
|
||||
|
||||
import type { IDecodedMessage, IDecoder, SingleShardInfo } from "./message.js";
|
||||
import type { IDecodedMessage, IDecoder } from "./message.js";
|
||||
import type { ContentTopic, PubsubTopic } from "./misc.js";
|
||||
import type {
|
||||
Callback,
|
||||
IBaseProtocolCore,
|
||||
IBaseProtocolSDK
|
||||
IBaseProtocolSDK,
|
||||
ShardingParams
|
||||
} from "./protocols.js";
|
||||
import type { IReceiver } from "./receiver.js";
|
||||
|
||||
|
@ -31,7 +30,6 @@ export type IFilter = IReceiver & IBaseProtocolCore;
|
|||
export type IFilterSDK = IReceiver &
|
||||
IBaseProtocolSDK & { protocol: IBaseProtocolCore } & {
|
||||
createSubscription(
|
||||
pubsubTopicShardInfo?: SingleShardInfo | PubsubTopic,
|
||||
peerId?: PeerId
|
||||
pubsubTopicShardInfo?: ShardingParams | PubsubTopic
|
||||
): Promise<IFilterSubscription>;
|
||||
};
|
||||
|
|
|
@ -29,7 +29,7 @@ export type IBaseProtocolSDK = {
|
|||
};
|
||||
|
||||
export type ContentTopicInfo = {
|
||||
clusterId: number;
|
||||
clusterId?: number;
|
||||
contentTopics: string[];
|
||||
};
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import type { PeerId, Stream } from "@libp2p/interface";
|
||||
import type { Multiaddr } from "@multiformats/multiaddr";
|
||||
import type { MultiaddrInput } from "@multiformats/multiaddr";
|
||||
|
||||
import { IConnectionManager } from "./connection_manager.js";
|
||||
import type { IFilterSDK } from "./filter.js";
|
||||
|
@ -18,7 +18,7 @@ export interface Waku {
|
|||
|
||||
connectionManager: IConnectionManager;
|
||||
|
||||
dial(peer: PeerId | Multiaddr, protocols?: Protocols[]): Promise<Stream>;
|
||||
dial(peer: PeerId | MultiaddrInput, protocols?: Protocols[]): Promise<Stream>;
|
||||
|
||||
start(): Promise<void>;
|
||||
|
||||
|
|
|
@ -1,20 +1,22 @@
|
|||
import { IProtoMessage } from "@waku/interfaces";
|
||||
import { contentTopicToPubsubTopic } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
import fc from "fast-check";
|
||||
|
||||
import { getPublicKey } from "./crypto/index.js";
|
||||
import { createDecoder, createEncoder } from "./ecies.js";
|
||||
|
||||
const contentTopic = "/js-waku/1/tests/bytes";
|
||||
const pubsubTopic = contentTopicToPubsubTopic(contentTopic);
|
||||
|
||||
describe("Ecies Encryption", function () {
|
||||
this.timeout(20000);
|
||||
it("Round trip binary encryption [ecies, no signature]", async function () {
|
||||
await fc.assert(
|
||||
fc.asyncProperty(
|
||||
fc.string({ minLength: 1 }),
|
||||
fc.string({ minLength: 1 }),
|
||||
fc.uint8Array({ minLength: 1 }),
|
||||
fc.uint8Array({ min: 1, minLength: 32, maxLength: 32 }),
|
||||
async (pubsubTopic, contentTopic, payload, privateKey) => {
|
||||
async (payload, privateKey) => {
|
||||
const publicKey = getPublicKey(privateKey);
|
||||
|
||||
const encoder = createEncoder({
|
||||
|
@ -46,18 +48,10 @@ describe("Ecies Encryption", function () {
|
|||
|
||||
await fc.assert(
|
||||
fc.asyncProperty(
|
||||
fc.string({ minLength: 1 }),
|
||||
fc.string({ minLength: 1 }),
|
||||
fc.uint8Array({ minLength: 1 }),
|
||||
fc.uint8Array({ min: 1, minLength: 32, maxLength: 32 }),
|
||||
fc.uint8Array({ min: 1, minLength: 32, maxLength: 32 }),
|
||||
async (
|
||||
pubsubTopic,
|
||||
contentTopic,
|
||||
payload,
|
||||
alicePrivateKey,
|
||||
bobPrivateKey
|
||||
) => {
|
||||
async (payload, alicePrivateKey, bobPrivateKey) => {
|
||||
const alicePublicKey = getPublicKey(alicePrivateKey);
|
||||
const bobPublicKey = getPublicKey(bobPrivateKey);
|
||||
|
||||
|
@ -89,11 +83,9 @@ describe("Ecies Encryption", function () {
|
|||
it("Check meta is set [ecies]", async function () {
|
||||
await fc.assert(
|
||||
fc.asyncProperty(
|
||||
fc.string({ minLength: 1 }),
|
||||
fc.string({ minLength: 1 }),
|
||||
fc.uint8Array({ minLength: 1 }),
|
||||
fc.uint8Array({ min: 1, minLength: 32, maxLength: 32 }),
|
||||
async (pubsubTopic, contentTopic, payload, privateKey) => {
|
||||
async (payload, privateKey) => {
|
||||
const publicKey = getPublicKey(privateKey);
|
||||
const metaSetter = (
|
||||
msg: IProtoMessage & { meta: undefined }
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import { Decoder as DecoderV0 } from "@waku/core/lib/message/version_0";
|
||||
import {
|
||||
type EncoderOptions as BaseEncoderOptions,
|
||||
DefaultPubsubTopic,
|
||||
type IDecoder,
|
||||
type IEncoder,
|
||||
type IMessage,
|
||||
|
@ -200,7 +199,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
|
|||
export function createDecoder(
|
||||
contentTopic: string,
|
||||
privateKey: Uint8Array,
|
||||
pubsubTopicShardInfo: SingleShardInfo | PubsubTopic = DefaultPubsubTopic
|
||||
pubsubTopicShardInfo?: SingleShardInfo | PubsubTopic
|
||||
): Decoder {
|
||||
return new Decoder(
|
||||
determinePubsubTopic(contentTopic, pubsubTopicShardInfo),
|
||||
|
|
|
@ -1,19 +1,21 @@
|
|||
import { IProtoMessage } from "@waku/interfaces";
|
||||
import { contentTopicToPubsubTopic } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
import fc from "fast-check";
|
||||
|
||||
import { getPublicKey } from "./crypto/index.js";
|
||||
import { createDecoder, createEncoder } from "./symmetric.js";
|
||||
|
||||
const contentTopic = "/js-waku/1/tests/bytes";
|
||||
const pubsubTopic = contentTopicToPubsubTopic(contentTopic);
|
||||
|
||||
describe("Symmetric Encryption", function () {
|
||||
it("Round trip binary encryption [symmetric, no signature]", async function () {
|
||||
await fc.assert(
|
||||
fc.asyncProperty(
|
||||
fc.string({ minLength: 1 }),
|
||||
fc.string({ minLength: 1 }),
|
||||
fc.uint8Array({ minLength: 1 }),
|
||||
fc.uint8Array({ min: 1, minLength: 32, maxLength: 32 }),
|
||||
async (pubsubTopic, contentTopic, payload, symKey) => {
|
||||
async (payload, symKey) => {
|
||||
const encoder = createEncoder({
|
||||
contentTopic,
|
||||
symKey
|
||||
|
@ -41,12 +43,10 @@ describe("Symmetric Encryption", function () {
|
|||
it("Round trip binary encryption [symmetric, signature]", async function () {
|
||||
await fc.assert(
|
||||
fc.asyncProperty(
|
||||
fc.string({ minLength: 1 }),
|
||||
fc.string({ minLength: 1 }),
|
||||
fc.uint8Array({ minLength: 1 }),
|
||||
fc.uint8Array({ min: 1, minLength: 32, maxLength: 32 }),
|
||||
fc.uint8Array({ min: 1, minLength: 32, maxLength: 32 }),
|
||||
async (pubsubTopic, contentTopic, payload, sigPrivKey, symKey) => {
|
||||
async (payload, sigPrivKey, symKey) => {
|
||||
const sigPubKey = getPublicKey(sigPrivKey);
|
||||
|
||||
const encoder = createEncoder({
|
||||
|
@ -77,11 +77,9 @@ describe("Symmetric Encryption", function () {
|
|||
it("Check meta is set [symmetric]", async function () {
|
||||
await fc.assert(
|
||||
fc.asyncProperty(
|
||||
fc.string({ minLength: 1 }),
|
||||
fc.string({ minLength: 1 }),
|
||||
fc.uint8Array({ minLength: 1 }),
|
||||
fc.uint8Array({ min: 1, minLength: 32, maxLength: 32 }),
|
||||
async (pubsubTopic, contentTopic, payload, symKey) => {
|
||||
async (payload, symKey) => {
|
||||
const metaSetter = (
|
||||
msg: IProtoMessage & { meta: undefined }
|
||||
): Uint8Array => {
|
||||
|
|
|
@ -2,57 +2,55 @@ import { TopicValidatorResult } from "@libp2p/interface";
|
|||
import type { UnsignedMessage } from "@libp2p/interface";
|
||||
import { createSecp256k1PeerId } from "@libp2p/peer-id-factory";
|
||||
import { createEncoder } from "@waku/core";
|
||||
import { determinePubsubTopic } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
import fc from "fast-check";
|
||||
|
||||
import { messageValidator } from "./message_validator.js";
|
||||
|
||||
const TestContentTopic = "/app/1/topic/utf8";
|
||||
const TestPubsubTopic = determinePubsubTopic(TestContentTopic);
|
||||
|
||||
describe("Message Validator", () => {
|
||||
it("Accepts a valid Waku Message", async () => {
|
||||
await fc.assert(
|
||||
fc.asyncProperty(
|
||||
fc.uint8Array({ minLength: 1 }),
|
||||
fc.string({ minLength: 1 }),
|
||||
fc.string({ minLength: 1 }),
|
||||
async (payload, pubsubTopic, contentTopic) => {
|
||||
const peerId = await createSecp256k1PeerId();
|
||||
fc.asyncProperty(fc.uint8Array({ minLength: 1 }), async (payload) => {
|
||||
const peerId = await createSecp256k1PeerId();
|
||||
|
||||
const encoder = createEncoder({ contentTopic });
|
||||
const bytes = await encoder.toWire({ payload });
|
||||
const encoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const bytes = await encoder.toWire({ payload });
|
||||
|
||||
const message: UnsignedMessage = {
|
||||
type: "unsigned",
|
||||
topic: pubsubTopic,
|
||||
data: bytes
|
||||
};
|
||||
const message: UnsignedMessage = {
|
||||
type: "unsigned",
|
||||
topic: TestPubsubTopic,
|
||||
data: bytes
|
||||
};
|
||||
|
||||
const result = messageValidator(peerId, message);
|
||||
const result = messageValidator(peerId, message);
|
||||
|
||||
expect(result).to.eq(TopicValidatorResult.Accept);
|
||||
}
|
||||
)
|
||||
expect(result).to.eq(TopicValidatorResult.Accept);
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it("Rejects garbage", async () => {
|
||||
await fc.assert(
|
||||
fc.asyncProperty(
|
||||
fc.uint8Array(),
|
||||
fc.string(),
|
||||
async (data, pubsubTopic) => {
|
||||
const peerId = await createSecp256k1PeerId();
|
||||
fc.asyncProperty(fc.uint8Array(), async (data) => {
|
||||
const peerId = await createSecp256k1PeerId();
|
||||
|
||||
const message: UnsignedMessage = {
|
||||
type: "unsigned",
|
||||
topic: pubsubTopic,
|
||||
data
|
||||
};
|
||||
const message: UnsignedMessage = {
|
||||
type: "unsigned",
|
||||
topic: TestPubsubTopic,
|
||||
data
|
||||
};
|
||||
|
||||
const result = messageValidator(peerId, message);
|
||||
const result = messageValidator(peerId, message);
|
||||
|
||||
expect(result).to.eq(TopicValidatorResult.Reject);
|
||||
}
|
||||
)
|
||||
expect(result).to.eq(TopicValidatorResult.Reject);
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,19 +1,18 @@
|
|||
import type { Peer } from "@libp2p/interface";
|
||||
import { FilterCore } from "@waku/core";
|
||||
import {
|
||||
type Callback,
|
||||
import type {
|
||||
Callback,
|
||||
ContentTopic,
|
||||
DefaultPubsubTopic,
|
||||
type IAsyncIterator,
|
||||
type IDecodedMessage,
|
||||
type IDecoder,
|
||||
type IFilterSDK,
|
||||
IAsyncIterator,
|
||||
IDecodedMessage,
|
||||
IDecoder,
|
||||
IFilterSDK,
|
||||
IProtoMessage,
|
||||
type Libp2p,
|
||||
type ProtocolCreateOptions,
|
||||
type PubsubTopic,
|
||||
type SingleShardInfo,
|
||||
type Unsubscribe
|
||||
Libp2p,
|
||||
ProtocolCreateOptions,
|
||||
PubsubTopic,
|
||||
ShardingParams,
|
||||
Unsubscribe
|
||||
} from "@waku/interfaces";
|
||||
import { messageHashStr } from "@waku/message-hash";
|
||||
import { WakuMessage } from "@waku/proto";
|
||||
|
@ -21,7 +20,7 @@ import {
|
|||
ensurePubsubTopicIsConfigured,
|
||||
groupByContentTopic,
|
||||
Logger,
|
||||
singleShardInfoToPubsubTopic,
|
||||
shardInfoToPubsubTopics,
|
||||
toAsyncIterator
|
||||
} from "@waku/utils";
|
||||
|
||||
|
@ -245,12 +244,12 @@ class FilterSDK extends BaseProtocolSDK implements IFilterSDK {
|
|||
* @returns The subscription object.
|
||||
*/
|
||||
async createSubscription(
|
||||
pubsubTopicShardInfo: SingleShardInfo | PubsubTopic = DefaultPubsubTopic
|
||||
pubsubTopicShardInfo: ShardingParams | PubsubTopic
|
||||
): Promise<SubscriptionManager> {
|
||||
const pubsubTopic =
|
||||
typeof pubsubTopicShardInfo == "string"
|
||||
? pubsubTopicShardInfo
|
||||
: singleShardInfoToPubsubTopic(pubsubTopicShardInfo);
|
||||
: shardInfoToPubsubTopics(pubsubTopicShardInfo)?.[0];
|
||||
|
||||
ensurePubsubTopicIsConfigured(pubsubTopic, this.protocol.pubsubTopics);
|
||||
|
||||
|
@ -294,7 +293,21 @@ class FilterSDK extends BaseProtocolSDK implements IFilterSDK {
|
|||
decoders: IDecoder<T> | IDecoder<T>[],
|
||||
callback: Callback<T>
|
||||
): Promise<Unsubscribe> {
|
||||
const subscription = await this.createSubscription();
|
||||
const pubsubTopics = this.getPubsubTopics<T>(decoders);
|
||||
|
||||
if (pubsubTopics.length === 0) {
|
||||
throw Error(
|
||||
"Failed to subscribe: no pubsubTopic found on decoders provided."
|
||||
);
|
||||
}
|
||||
|
||||
if (pubsubTopics.length > 1) {
|
||||
throw Error(
|
||||
"Failed to subscribe: all decoders should have the same pubsub topic. Use createSubscription to be more agile."
|
||||
);
|
||||
}
|
||||
|
||||
const subscription = await this.createSubscription(pubsubTopics[0]);
|
||||
|
||||
await subscription.subscribe(decoders, callback);
|
||||
|
||||
|
@ -314,6 +327,22 @@ class FilterSDK extends BaseProtocolSDK implements IFilterSDK {
|
|||
): Promise<IAsyncIterator<T>> {
|
||||
return toAsyncIterator(this, decoders);
|
||||
}
|
||||
|
||||
private getPubsubTopics<T extends IDecodedMessage>(
|
||||
decoders: IDecoder<T> | IDecoder<T>[]
|
||||
): string[] {
|
||||
if (!Array.isArray(decoders)) {
|
||||
return [decoders.pubsubTopic];
|
||||
}
|
||||
|
||||
if (decoders.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const pubsubTopics = new Set(decoders.map((d) => d.pubsubTopic));
|
||||
|
||||
return [...pubsubTopics];
|
||||
}
|
||||
}
|
||||
|
||||
export function wakuFilter(
|
||||
|
|
|
@ -2,7 +2,6 @@ import { DecodedMessage } from "@waku/core";
|
|||
import {
|
||||
DefaultPubsubTopic,
|
||||
PubsubTopic,
|
||||
ShardInfo,
|
||||
ShardingParams
|
||||
} from "@waku/interfaces";
|
||||
import { ensureShardingConfigured, Logger } from "@waku/utils";
|
||||
|
@ -12,9 +11,11 @@ import { Args, MessageRpcQuery, MessageRpcResponse } from "../types";
|
|||
import { delay, makeLogFileName } from "../utils/index.js";
|
||||
|
||||
import { MessageCollector } from "./message_collector.js";
|
||||
import { runNodes } from "./runNodes.js";
|
||||
import { defaultArgs, ServiceNode } from "./service_node.js";
|
||||
|
||||
export { ServiceNode, MessageCollector, defaultArgs };
|
||||
export { runNodes };
|
||||
|
||||
const log = new Logger("test:message-collector");
|
||||
|
||||
|
@ -28,7 +29,7 @@ export class ServiceNodesFleet {
|
|||
pubsubTopics: PubsubTopic[],
|
||||
nodesToCreate: number = 3,
|
||||
strictChecking: boolean = false,
|
||||
shardInfo?: ShardInfo,
|
||||
shardInfo?: ShardingParams,
|
||||
_args?: Args,
|
||||
withoutFilter = false
|
||||
): Promise<ServiceNodesFleet> {
|
||||
|
|
|
@ -73,7 +73,7 @@ export class MessageCollector {
|
|||
}
|
||||
): Promise<boolean> {
|
||||
const startTime = Date.now();
|
||||
const pubsubTopic = options?.pubsubTopic || DefaultPubsubTopic;
|
||||
const pubsubTopic = this.getPubsubTopicToUse(options?.pubsubTopic);
|
||||
const timeoutDuration = options?.timeoutDuration || 400;
|
||||
const exact = options?.exact || false;
|
||||
|
||||
|
@ -237,12 +237,13 @@ export class MessageCollector {
|
|||
`Message text mismatch. Expected: ${options.expectedMessageText}. Got: ${receivedMessageText}`
|
||||
);
|
||||
} else {
|
||||
const pubsubTopicToUse = this.getPubsubTopicToUse(
|
||||
options.expectedPubsubTopic
|
||||
);
|
||||
// js-waku message specific assertions
|
||||
expect(message.pubsubTopic).to.eq(
|
||||
options.expectedPubsubTopic || DefaultPubsubTopic,
|
||||
`Message pub/sub topic mismatch. Expected: ${
|
||||
options.expectedPubsubTopic || DefaultPubsubTopic
|
||||
}. Got: ${message.pubsubTopic}`
|
||||
pubsubTopicToUse,
|
||||
`Message pub/sub topic mismatch. Expected: ${pubsubTopicToUse}. Got: ${message.pubsubTopic}`
|
||||
);
|
||||
|
||||
expect(bytesToUtf8(message.payload)).to.eq(
|
||||
|
@ -266,4 +267,8 @@ export class MessageCollector {
|
|||
);
|
||||
}
|
||||
}
|
||||
|
||||
private getPubsubTopicToUse(pubsubTopic: string | undefined): string {
|
||||
return pubsubTopic || this.nwaku?.pubsubTopics?.[0] || DefaultPubsubTopic;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,78 @@
|
|||
import { waitForRemotePeer } from "@waku/core";
|
||||
import {
|
||||
ContentTopicInfo,
|
||||
ProtocolCreateOptions,
|
||||
Protocols,
|
||||
ShardingParams
|
||||
} from "@waku/interfaces";
|
||||
import { createLightNode, WakuNode } from "@waku/sdk";
|
||||
import { createRelayNode } from "@waku/sdk/relay";
|
||||
import { Logger, shardInfoToPubsubTopics } from "@waku/utils";
|
||||
import { Context } from "mocha";
|
||||
|
||||
import { NOISE_KEY_1 } from "../constants.js";
|
||||
import { makeLogFileName } from "../utils/index.js";
|
||||
|
||||
import { ServiceNode } from "./service_node.js";
|
||||
|
||||
export const log = new Logger("test:runNodes");
|
||||
|
||||
type RunNodesOptions = {
|
||||
context: Context;
|
||||
shardInfo: ShardingParams;
|
||||
protocols: Protocols[];
|
||||
createNode: typeof createLightNode | typeof createRelayNode;
|
||||
};
|
||||
|
||||
export async function runNodes<T>(
|
||||
options: RunNodesOptions
|
||||
): Promise<[ServiceNode, T]> {
|
||||
const { context, shardInfo, createNode, protocols } = options;
|
||||
|
||||
const nwaku = new ServiceNode(makeLogFileName(context));
|
||||
const pubsubTopics = shardInfoToPubsubTopics(shardInfo);
|
||||
|
||||
function isContentTopicInfo(info: ShardingParams): info is ContentTopicInfo {
|
||||
return (info as ContentTopicInfo).contentTopics !== undefined;
|
||||
}
|
||||
|
||||
await nwaku.start(
|
||||
{
|
||||
filter: true,
|
||||
lightpush: true,
|
||||
relay: true,
|
||||
store: true,
|
||||
pubsubTopic: pubsubTopics,
|
||||
// Conditionally include clusterId if shardInfo exists
|
||||
...(shardInfo && { clusterId: shardInfo.clusterId }),
|
||||
// Conditionally include contentTopic if shardInfo exists and clusterId is 1
|
||||
...(shardInfo &&
|
||||
isContentTopicInfo(shardInfo) &&
|
||||
shardInfo.clusterId === 1 && { contentTopic: shardInfo.contentTopics })
|
||||
},
|
||||
{ retries: 3 }
|
||||
);
|
||||
const waku_options: ProtocolCreateOptions = {
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } },
|
||||
shardInfo
|
||||
};
|
||||
|
||||
log.info("Starting js waku node with :", JSON.stringify(waku_options));
|
||||
let waku: WakuNode | undefined;
|
||||
try {
|
||||
waku = (await createNode(waku_options)) as WakuNode;
|
||||
await waku.start();
|
||||
} catch (error) {
|
||||
log.error("jswaku node failed to start:", error);
|
||||
}
|
||||
|
||||
if (waku) {
|
||||
await waku.dial(await nwaku.getMultiaddrWithId());
|
||||
await waitForRemotePeer(waku, protocols);
|
||||
await nwaku.ensureSubscriptions(pubsubTopics);
|
||||
return [nwaku, waku as T];
|
||||
} else {
|
||||
throw new Error("Failed to initialize waku");
|
||||
}
|
||||
}
|
|
@ -48,6 +48,7 @@ export class ServiceNode {
|
|||
private websocketPort?: number;
|
||||
private readonly logPath: string;
|
||||
private restPort?: number;
|
||||
private args?: Args;
|
||||
|
||||
/**
|
||||
* Convert a [[WakuMessage]] to a [[WakuRelayMessage]]. The latter is used
|
||||
|
@ -166,6 +167,8 @@ export class ServiceNode {
|
|||
this.logPath,
|
||||
WAKU_SERVICE_NODE_PARAMS
|
||||
);
|
||||
|
||||
this.args = mergedArgs;
|
||||
} catch (error) {
|
||||
log.error("Nwaku node failed to start:", error);
|
||||
await this.stop();
|
||||
|
@ -237,11 +240,9 @@ export class ServiceNode {
|
|||
);
|
||||
}
|
||||
|
||||
async messages(
|
||||
pubsubTopic: string = DefaultPubsubTopic
|
||||
): Promise<MessageRpcResponse[]> {
|
||||
async messages(pubsubTopic?: string): Promise<MessageRpcResponse[]> {
|
||||
return this.restCall<MessageRpcResponse[]>(
|
||||
`/relay/v1/messages/${encodeURIComponent(pubsubTopic)}`,
|
||||
`/relay/v1/messages/${encodeURIComponent(pubsubTopic || this?.args?.pubsubTopic?.[0] || DefaultPubsubTopic)}`,
|
||||
"GET",
|
||||
null,
|
||||
async (response) => {
|
||||
|
@ -266,7 +267,7 @@ export class ServiceNode {
|
|||
|
||||
async sendMessage(
|
||||
message: MessageRpcQuery,
|
||||
pubsubTopic: string = DefaultPubsubTopic
|
||||
pubsubTopic?: string
|
||||
): Promise<boolean> {
|
||||
this.checkProcess();
|
||||
|
||||
|
@ -275,7 +276,7 @@ export class ServiceNode {
|
|||
}
|
||||
|
||||
return this.restCall<boolean>(
|
||||
`/relay/v1/messages/${encodeURIComponent(pubsubTopic)}`,
|
||||
`/relay/v1/messages/${encodeURIComponent(pubsubTopic || this.args?.pubsubTopic?.[0] || DefaultPubsubTopic)}`,
|
||||
"POST",
|
||||
message,
|
||||
async (response) => response.status === 200
|
||||
|
@ -350,6 +351,10 @@ export class ServiceNode {
|
|||
return `http://127.0.0.1:${this.restPort}`;
|
||||
}
|
||||
|
||||
get pubsubTopics(): string[] {
|
||||
return this.args?.pubsubTopic ?? [];
|
||||
}
|
||||
|
||||
async restCall<T>(
|
||||
endpoint: string,
|
||||
method: "GET" | "POST",
|
||||
|
|
|
@ -1,19 +1,28 @@
|
|||
import { createDecoder, createEncoder, Decoder, Encoder } from "@waku/core";
|
||||
|
||||
type TestDataOptions = {
|
||||
pubsubTopic: string;
|
||||
};
|
||||
|
||||
// Utility to generate test data for multiple topics tests.
|
||||
export function generateTestData(topicCount: number): {
|
||||
export function generateTestData(
|
||||
topicCount: number,
|
||||
options?: TestDataOptions
|
||||
): {
|
||||
contentTopics: string[];
|
||||
encoders: Encoder[];
|
||||
decoders: Decoder[];
|
||||
} {
|
||||
const contentTopics = Array.from(
|
||||
{ length: topicCount },
|
||||
(_, i) => `/test/${i + 1}/waku-multi`
|
||||
(_, i) => `/test/${i + 1}/waku-multi/default`
|
||||
);
|
||||
const encoders = contentTopics.map((topic) =>
|
||||
createEncoder({ contentTopic: topic })
|
||||
createEncoder({ contentTopic: topic, pubsubTopic: options?.pubsubTopic })
|
||||
);
|
||||
const decoders = contentTopics.map((topic) =>
|
||||
createDecoder(topic, options?.pubsubTopic)
|
||||
);
|
||||
const decoders = contentTopics.map((topic) => createDecoder(topic));
|
||||
return {
|
||||
contentTopics,
|
||||
encoders,
|
||||
|
|
|
@ -87,7 +87,7 @@ describe("Connection state", function () {
|
|||
expect(eventCount).to.be.eq(1);
|
||||
});
|
||||
|
||||
it("`waku:online` bwtween 2 js-waku relay nodes", async function () {
|
||||
it("`waku:online` between 2 js-waku relay nodes", async function () {
|
||||
const waku1 = await createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_1
|
||||
});
|
||||
|
@ -159,7 +159,7 @@ describe("Connection state", function () {
|
|||
expect(waku.isConnected()).to.be.false;
|
||||
});
|
||||
|
||||
it("isConnected bwtween 2 js-waku relay nodes", async function () {
|
||||
it("isConnected between 2 js-waku relay nodes", async function () {
|
||||
const waku1 = await createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_1
|
||||
});
|
||||
|
|
|
@ -12,15 +12,15 @@ import {
|
|||
getPublicKey
|
||||
} from "@waku/message-encryption";
|
||||
import {
|
||||
createDecoder as eciesDecoder,
|
||||
createEncoder as eciesEncoder
|
||||
createDecoder as createEciesDecoder,
|
||||
createEncoder as createEciesEncoder
|
||||
} from "@waku/message-encryption/ecies";
|
||||
import {
|
||||
createDecoder as symDecoder,
|
||||
createEncoder as symEncoder
|
||||
createDecoder as createSymDecoder,
|
||||
createEncoder as createSymEncoder
|
||||
} from "@waku/message-encryption/symmetric";
|
||||
import { createLightNode } from "@waku/sdk";
|
||||
import { Logger } from "@waku/utils";
|
||||
import { contentTopicToPubsubTopic, Logger } from "@waku/utils";
|
||||
import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes";
|
||||
import { expect } from "chai";
|
||||
|
||||
|
@ -37,11 +37,47 @@ import {
|
|||
|
||||
const log = new Logger("test:ephemeral");
|
||||
|
||||
const ClusterId = 2;
|
||||
const TestContentTopic = "/test/1/ephemeral/utf8";
|
||||
const PubsubTopic = contentTopicToPubsubTopic(TestContentTopic, ClusterId);
|
||||
|
||||
const TestEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic
|
||||
contentTopic: TestContentTopic,
|
||||
pubsubTopic: PubsubTopic
|
||||
});
|
||||
const TestDecoder = createDecoder(TestContentTopic);
|
||||
const TestDecoder = createDecoder(TestContentTopic, PubsubTopic);
|
||||
|
||||
const privateKey = generatePrivateKey();
|
||||
const symKey = generateSymmetricKey();
|
||||
const publicKey = getPublicKey(privateKey);
|
||||
|
||||
const AsymContentTopic = "/test/1/ephemeral-asym/utf8";
|
||||
const SymContentTopic = "/test/1/ephemeral-sym/utf8";
|
||||
|
||||
const AsymEncoder = createEciesEncoder({
|
||||
contentTopic: AsymContentTopic,
|
||||
publicKey,
|
||||
ephemeral: true,
|
||||
pubsubTopic: PubsubTopic
|
||||
});
|
||||
const SymEncoder = createSymEncoder({
|
||||
contentTopic: SymContentTopic,
|
||||
symKey,
|
||||
ephemeral: true,
|
||||
pubsubTopic: PubsubTopic
|
||||
});
|
||||
const ClearEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
ephemeral: true,
|
||||
pubsubTopic: PubsubTopic
|
||||
});
|
||||
|
||||
const AsymDecoder = createEciesDecoder(
|
||||
AsymContentTopic,
|
||||
privateKey,
|
||||
PubsubTopic
|
||||
);
|
||||
const SymDecoder = createSymDecoder(SymContentTopic, symKey, PubsubTopic);
|
||||
|
||||
describe("Waku Message Ephemeral field", function () {
|
||||
let waku: LightNode;
|
||||
|
@ -59,11 +95,24 @@ describe("Waku Message Ephemeral field", function () {
|
|||
filter: true,
|
||||
lightpush: true,
|
||||
store: true,
|
||||
relay: true
|
||||
relay: true,
|
||||
pubsubTopic: [PubsubTopic],
|
||||
contentTopic: [TestContentTopic, AsymContentTopic, SymContentTopic],
|
||||
clusterId: ClusterId
|
||||
});
|
||||
await nwaku.ensureSubscriptionsAutosharding([
|
||||
TestContentTopic,
|
||||
AsymContentTopic,
|
||||
SymContentTopic
|
||||
]);
|
||||
|
||||
waku = await createLightNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } },
|
||||
shardInfo: {
|
||||
contentTopics: [TestContentTopic, AsymContentTopic, SymContentTopic],
|
||||
clusterId: ClusterId
|
||||
}
|
||||
});
|
||||
await waku.start();
|
||||
await waku.dial(await nwaku.getMultiaddrWithId());
|
||||
|
@ -74,7 +123,9 @@ describe("Waku Message Ephemeral field", function () {
|
|||
Protocols.Store
|
||||
]);
|
||||
|
||||
subscription = await waku.filter.createSubscription();
|
||||
subscription = await waku.filter.createSubscription(
|
||||
TestEncoder.pubsubTopic
|
||||
);
|
||||
});
|
||||
|
||||
it("Ephemeral messages are not stored", async function () {
|
||||
|
@ -94,37 +145,20 @@ describe("Waku Message Ephemeral field", function () {
|
|||
payload: utf8ToBytes(clearText)
|
||||
};
|
||||
|
||||
const privateKey = generatePrivateKey();
|
||||
const symKey = generateSymmetricKey();
|
||||
const publicKey = getPublicKey(privateKey);
|
||||
|
||||
const AsymContentTopic = "/test/1/ephemeral-asym/utf8";
|
||||
const SymContentTopic = "/test/1/ephemeral-sym/utf8";
|
||||
|
||||
const asymEncoder = eciesEncoder({
|
||||
contentTopic: AsymContentTopic,
|
||||
publicKey,
|
||||
ephemeral: true
|
||||
});
|
||||
const symEncoder = eciesEncoder({
|
||||
contentTopic: SymContentTopic,
|
||||
publicKey: symKey,
|
||||
ephemeral: true
|
||||
});
|
||||
const clearEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
ephemeral: true
|
||||
});
|
||||
|
||||
const asymDecoder = eciesDecoder(AsymContentTopic, privateKey);
|
||||
const symDecoder = eciesDecoder(SymContentTopic, symKey);
|
||||
|
||||
const [waku1, waku2, nimWakuMultiaddr] = await Promise.all([
|
||||
createLightNode({
|
||||
staticNoiseKey: NOISE_KEY_1
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
shardInfo: {
|
||||
contentTopics: [TestContentTopic, AsymContentTopic, SymContentTopic],
|
||||
clusterId: ClusterId
|
||||
}
|
||||
}).then((waku) => waku.start().then(() => waku)),
|
||||
createLightNode({
|
||||
staticNoiseKey: NOISE_KEY_2
|
||||
staticNoiseKey: NOISE_KEY_2,
|
||||
shardInfo: {
|
||||
contentTopics: [TestContentTopic, AsymContentTopic, SymContentTopic],
|
||||
clusterId: ClusterId
|
||||
}
|
||||
}).then((waku) => waku.start().then(() => waku)),
|
||||
nwaku.getMultiaddrWithId()
|
||||
]);
|
||||
|
@ -142,9 +176,9 @@ describe("Waku Message Ephemeral field", function () {
|
|||
|
||||
log.info("Sending messages using light push");
|
||||
await Promise.all([
|
||||
waku1.lightPush.send(asymEncoder, asymMsg),
|
||||
waku1.lightPush.send(symEncoder, symMsg),
|
||||
waku1.lightPush.send(clearEncoder, clearMsg)
|
||||
waku1.lightPush.send(AsymEncoder, asymMsg),
|
||||
waku1.lightPush.send(SymEncoder, symMsg),
|
||||
waku1.lightPush.send(ClearEncoder, clearMsg)
|
||||
]);
|
||||
|
||||
await waitForRemotePeer(waku2, [Protocols.Store]);
|
||||
|
@ -153,8 +187,8 @@ describe("Waku Message Ephemeral field", function () {
|
|||
|
||||
log.info("Retrieving messages from store");
|
||||
for await (const msgPromises of waku2.store.queryGenerator([
|
||||
asymDecoder,
|
||||
symDecoder,
|
||||
AsymDecoder,
|
||||
SymDecoder,
|
||||
TestDecoder
|
||||
])) {
|
||||
for (const promise of msgPromises) {
|
||||
|
@ -175,7 +209,8 @@ describe("Waku Message Ephemeral field", function () {
|
|||
|
||||
const ephemeralEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
ephemeral: true
|
||||
ephemeral: true,
|
||||
pubsubTopic: PubsubTopic
|
||||
});
|
||||
|
||||
const messages: DecodedMessage[] = [];
|
||||
|
@ -187,12 +222,16 @@ describe("Waku Message Ephemeral field", function () {
|
|||
await delay(200);
|
||||
const normalTxt = "Normal message";
|
||||
const ephemeralTxt = "Ephemeral Message";
|
||||
await waku.lightPush.send(TestEncoder, {
|
||||
payload: utf8ToBytes(normalTxt)
|
||||
});
|
||||
await waku.lightPush.send(ephemeralEncoder, {
|
||||
payload: utf8ToBytes(ephemeralTxt)
|
||||
});
|
||||
|
||||
await Promise.all([
|
||||
waku.lightPush.send(TestEncoder, {
|
||||
payload: utf8ToBytes(normalTxt)
|
||||
}),
|
||||
waku.lightPush.send(ephemeralEncoder, {
|
||||
payload: utf8ToBytes(ephemeralTxt)
|
||||
})
|
||||
]);
|
||||
|
||||
while (messages.length < 2) {
|
||||
await delay(250);
|
||||
}
|
||||
|
@ -214,18 +253,12 @@ describe("Waku Message Ephemeral field", function () {
|
|||
it("Ephemeral field is preserved - symmetric encryption", async function () {
|
||||
this.timeout(10000);
|
||||
|
||||
const symKey = generateSymmetricKey();
|
||||
|
||||
const ephemeralEncoder = symEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
const encoder = createSymEncoder({
|
||||
contentTopic: SymContentTopic,
|
||||
symKey,
|
||||
ephemeral: true
|
||||
pubsubTopic: PubsubTopic
|
||||
});
|
||||
const encoder = symEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
symKey
|
||||
});
|
||||
const decoder = symDecoder(TestContentTopic, symKey);
|
||||
const decoder = createSymDecoder(SymContentTopic, symKey, PubsubTopic);
|
||||
|
||||
const messages: DecodedMessage[] = [];
|
||||
const callback = (msg: DecodedMessage): void => {
|
||||
|
@ -236,12 +269,16 @@ describe("Waku Message Ephemeral field", function () {
|
|||
await delay(200);
|
||||
const normalTxt = "Normal message";
|
||||
const ephemeralTxt = "Ephemeral Message";
|
||||
await waku.lightPush.send(encoder, {
|
||||
payload: utf8ToBytes(normalTxt)
|
||||
});
|
||||
await waku.lightPush.send(ephemeralEncoder, {
|
||||
payload: utf8ToBytes(ephemeralTxt)
|
||||
});
|
||||
|
||||
await Promise.all([
|
||||
waku.lightPush.send(encoder, {
|
||||
payload: utf8ToBytes(normalTxt)
|
||||
}),
|
||||
waku.lightPush.send(SymEncoder, {
|
||||
payload: utf8ToBytes(ephemeralTxt)
|
||||
})
|
||||
]);
|
||||
|
||||
while (messages.length < 2) {
|
||||
await delay(250);
|
||||
}
|
||||
|
@ -263,19 +300,16 @@ describe("Waku Message Ephemeral field", function () {
|
|||
it("Ephemeral field is preserved - asymmetric encryption", async function () {
|
||||
this.timeout(10000);
|
||||
|
||||
const privKey = generatePrivateKey();
|
||||
const pubKey = getPublicKey(privKey);
|
||||
|
||||
const ephemeralEncoder = eciesEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
publicKey: pubKey,
|
||||
ephemeral: true
|
||||
const encoder = createEciesEncoder({
|
||||
contentTopic: AsymContentTopic,
|
||||
publicKey: publicKey,
|
||||
pubsubTopic: PubsubTopic
|
||||
});
|
||||
const encoder = eciesEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
publicKey: pubKey
|
||||
});
|
||||
const decoder = eciesDecoder(TestContentTopic, privKey);
|
||||
const decoder = createEciesDecoder(
|
||||
AsymContentTopic,
|
||||
privateKey,
|
||||
PubsubTopic
|
||||
);
|
||||
|
||||
const messages: DecodedMessage[] = [];
|
||||
const callback = (msg: DecodedMessage): void => {
|
||||
|
@ -286,12 +320,16 @@ describe("Waku Message Ephemeral field", function () {
|
|||
await delay(200);
|
||||
const normalTxt = "Normal message";
|
||||
const ephemeralTxt = "Ephemeral Message";
|
||||
await waku.lightPush.send(encoder, {
|
||||
payload: utf8ToBytes(normalTxt)
|
||||
});
|
||||
await waku.lightPush.send(ephemeralEncoder, {
|
||||
payload: utf8ToBytes(ephemeralTxt)
|
||||
});
|
||||
|
||||
await Promise.all([
|
||||
waku.lightPush.send(encoder, {
|
||||
payload: utf8ToBytes(normalTxt)
|
||||
}),
|
||||
waku.lightPush.send(AsymEncoder, {
|
||||
payload: utf8ToBytes(ephemeralTxt)
|
||||
})
|
||||
]);
|
||||
|
||||
while (messages.length < 2) {
|
||||
await delay(250);
|
||||
}
|
||||
|
|
|
@ -1,8 +1,4 @@
|
|||
import {
|
||||
DefaultPubsubTopic,
|
||||
IFilterSubscription,
|
||||
LightNode
|
||||
} from "@waku/interfaces";
|
||||
import { IFilterSubscription, LightNode } from "@waku/interfaces";
|
||||
import { utf8ToBytes } from "@waku/sdk";
|
||||
import { expect } from "chai";
|
||||
|
||||
|
@ -18,6 +14,7 @@ import {
|
|||
TestContentTopic,
|
||||
TestDecoder,
|
||||
TestEncoder,
|
||||
TestShardInfo,
|
||||
validatePingError
|
||||
} from "./utils";
|
||||
|
||||
|
@ -30,10 +27,8 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
let subscription: IFilterSubscription;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[serviceNodes, waku] = await runMultipleNodes(this.ctx, [
|
||||
DefaultPubsubTopic
|
||||
]);
|
||||
subscription = await waku.filter.createSubscription();
|
||||
[serviceNodes, waku] = await runMultipleNodes(this.ctx, TestShardInfo);
|
||||
subscription = await waku.filter.createSubscription(TestShardInfo);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
|
|
|
@ -1,10 +1,5 @@
|
|||
import { waitForRemotePeer } from "@waku/core";
|
||||
import {
|
||||
DefaultPubsubTopic,
|
||||
IFilterSubscription,
|
||||
LightNode,
|
||||
Protocols
|
||||
} from "@waku/interfaces";
|
||||
import { IFilterSubscription, LightNode, Protocols } from "@waku/interfaces";
|
||||
import { utf8ToBytes } from "@waku/sdk";
|
||||
import { expect } from "chai";
|
||||
|
||||
|
@ -23,7 +18,9 @@ import {
|
|||
teardownNodesWithRedundancy,
|
||||
TestContentTopic,
|
||||
TestDecoder,
|
||||
TestEncoder
|
||||
TestEncoder,
|
||||
TestPubsubTopic,
|
||||
TestShardInfo
|
||||
} from "./utils.js";
|
||||
|
||||
const runTests = (strictCheckNodes: boolean): void => {
|
||||
|
@ -35,10 +32,8 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
let subscription: IFilterSubscription;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[serviceNodes, waku] = await runMultipleNodes(this.ctx, [
|
||||
DefaultPubsubTopic
|
||||
]);
|
||||
subscription = await waku.filter.createSubscription();
|
||||
[serviceNodes, waku] = await runMultipleNodes(this.ctx, TestShardInfo);
|
||||
subscription = await waku.filter.createSubscription(TestShardInfo);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
|
@ -60,7 +55,8 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
);
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: testItem.value,
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -79,7 +75,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
payload: Buffer.from(utf8ToBytes(messageText)).toString("base64"),
|
||||
timestamp: testItem as any
|
||||
},
|
||||
DefaultPubsubTopic
|
||||
TestPubsubTopic
|
||||
);
|
||||
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
|
@ -88,7 +84,8 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
checkTimestamp: false,
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
|
||||
// Check if the timestamp matches
|
||||
|
@ -117,7 +114,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
payload: Buffer.from(utf8ToBytes(messageText)).toString("base64"),
|
||||
timestamp: "2023-09-06T12:05:38.609Z" as any
|
||||
},
|
||||
DefaultPubsubTopic
|
||||
TestPubsubTopic
|
||||
);
|
||||
|
||||
// Verify that no message was received
|
||||
|
@ -139,12 +136,14 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
payload: Buffer.from(utf8ToBytes(messageText)).toString("base64"),
|
||||
timestamp: BigInt(Date.now()) * BigInt(1000000)
|
||||
},
|
||||
"DefaultPubsubTopic"
|
||||
"WrongContentTopic"
|
||||
);
|
||||
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
false
|
||||
);
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(false);
|
||||
});
|
||||
|
||||
it("Check message with no content topic is not received", async function () {
|
||||
|
@ -159,7 +158,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
payload: Buffer.from(utf8ToBytes(messageText)).toString("base64"),
|
||||
timestamp: BigInt(Date.now()) * BigInt(1000000)
|
||||
},
|
||||
DefaultPubsubTopic
|
||||
TestPubsubTopic
|
||||
);
|
||||
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
|
@ -180,7 +179,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
timestamp: BigInt(Date.now()) * BigInt(1000000),
|
||||
payload: undefined as any
|
||||
},
|
||||
DefaultPubsubTopic
|
||||
TestPubsubTopic
|
||||
);
|
||||
|
||||
// For go-waku the message is received (it is possible to send a message with no payload)
|
||||
|
@ -208,7 +207,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
payload: 12345 as unknown as string,
|
||||
timestamp: BigInt(Date.now()) * BigInt(1000000)
|
||||
},
|
||||
DefaultPubsubTopic
|
||||
TestPubsubTopic
|
||||
);
|
||||
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
|
@ -239,7 +238,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
await waku.dial(await node.getMultiaddrWithId());
|
||||
await waitForRemotePeer(waku, [Protocols.Filter, Protocols.LightPush]);
|
||||
}
|
||||
subscription = await waku.filter.createSubscription();
|
||||
subscription = await waku.filter.createSubscription(TestShardInfo);
|
||||
await subscription.subscribe(
|
||||
[TestDecoder],
|
||||
serviceNodes.messageCollector.callback
|
||||
|
@ -253,11 +252,13 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
);
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: "M1",
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(1, {
|
||||
expectedMessageText: "M2",
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -285,11 +286,13 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
);
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: "M1",
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(1, {
|
||||
expectedMessageText: "M2",
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -10,7 +10,6 @@ import { Protocols } from "@waku/interfaces";
|
|||
import {
|
||||
contentTopicToPubsubTopic,
|
||||
contentTopicToShardIndex,
|
||||
pubsubTopicToSingleShardInfo,
|
||||
singleShardInfoToPubsubTopic
|
||||
} from "@waku/utils";
|
||||
import { utf8ToBytes } from "@waku/utils/bytes";
|
||||
|
@ -61,14 +60,8 @@ describe("Waku Filter V2: Multiple PubsubTopics", function () {
|
|||
const customDecoder2 = createDecoder(customContentTopic2, singleShardInfo2);
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runNodes(
|
||||
this.ctx,
|
||||
[customPubsubTopic1, customPubsubTopic2],
|
||||
shardInfo
|
||||
);
|
||||
subscription = await waku.filter.createSubscription(
|
||||
pubsubTopicToSingleShardInfo(customPubsubTopic1)
|
||||
);
|
||||
[nwaku, waku] = await runNodes(this.ctx, shardInfo);
|
||||
subscription = await waku.filter.createSubscription(shardInfo);
|
||||
messageCollector = new MessageCollector();
|
||||
});
|
||||
|
||||
|
@ -91,9 +84,8 @@ describe("Waku Filter V2: Multiple PubsubTopics", function () {
|
|||
await subscription.subscribe([customDecoder1], messageCollector.callback);
|
||||
|
||||
// Subscribe from the same lightnode to the 2nd pubsubtopic
|
||||
const subscription2 = await waku.filter.createSubscription(
|
||||
pubsubTopicToSingleShardInfo(customPubsubTopic2)
|
||||
);
|
||||
const subscription2 =
|
||||
await waku.filter.createSubscription(customPubsubTopic2);
|
||||
|
||||
const messageCollector2 = new MessageCollector();
|
||||
|
||||
|
@ -134,10 +126,8 @@ describe("Waku Filter V2: Multiple PubsubTopics", function () {
|
|||
await waitForRemotePeer(waku, [Protocols.Filter, Protocols.LightPush]);
|
||||
|
||||
// Subscribe from the same lightnode to the new nwaku on the new pubsubtopic
|
||||
const subscription2 = await waku.filter.createSubscription(
|
||||
pubsubTopicToSingleShardInfo(customPubsubTopic2),
|
||||
await nwaku2.getPeerId()
|
||||
);
|
||||
const subscription2 =
|
||||
await waku.filter.createSubscription(customPubsubTopic2);
|
||||
await nwaku2.ensureSubscriptions([customPubsubTopic2]);
|
||||
|
||||
const messageCollector2 = new MessageCollector();
|
||||
|
@ -172,7 +162,7 @@ describe("Waku Filter V2: Multiple PubsubTopics", function () {
|
|||
});
|
||||
|
||||
it("Should fail to subscribe with decoder with wrong pubsubTopic", async function () {
|
||||
// this subscription object is set up with the `customPubsubTopic` but we're passing it a Decoder with the `DefaultPubsubTopic`
|
||||
// this subscription object is set up with the `customPubsubTopic1` but we're passing it a Decoder with the `customPubsubTopic2`
|
||||
try {
|
||||
await subscription.subscribe([customDecoder2], messageCollector.callback);
|
||||
} catch (error) {
|
||||
|
@ -231,13 +221,9 @@ describe("Waku Filter V2 (Autosharding): Multiple PubsubTopics", function () {
|
|||
});
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runNodes(
|
||||
this.ctx,
|
||||
[autoshardingPubsubTopic1, autoshardingPubsubTopic2],
|
||||
contentTopicInfo
|
||||
);
|
||||
[nwaku, waku] = await runNodes(this.ctx, contentTopicInfo);
|
||||
subscription = await waku.filter.createSubscription(
|
||||
pubsubTopicToSingleShardInfo(autoshardingPubsubTopic1)
|
||||
autoshardingPubsubTopic1
|
||||
);
|
||||
messageCollector = new MessageCollector();
|
||||
});
|
||||
|
@ -266,7 +252,7 @@ describe("Waku Filter V2 (Autosharding): Multiple PubsubTopics", function () {
|
|||
|
||||
// Subscribe from the same lightnode to the 2nd pubsubtopic
|
||||
const subscription2 = await waku.filter.createSubscription(
|
||||
pubsubTopicToSingleShardInfo(autoshardingPubsubTopic2)
|
||||
autoshardingPubsubTopic2
|
||||
);
|
||||
|
||||
const messageCollector2 = new MessageCollector();
|
||||
|
@ -318,8 +304,7 @@ describe("Waku Filter V2 (Autosharding): Multiple PubsubTopics", function () {
|
|||
|
||||
// Subscribe from the same lightnode to the new nwaku on the new pubsubtopic
|
||||
const subscription2 = await waku.filter.createSubscription(
|
||||
pubsubTopicToSingleShardInfo(autoshardingPubsubTopic2),
|
||||
await nwaku2.getPeerId()
|
||||
autoshardingPubsubTopic2
|
||||
);
|
||||
await nwaku2.ensureSubscriptionsAutosharding([customContentTopic2]);
|
||||
|
||||
|
@ -355,7 +340,7 @@ describe("Waku Filter V2 (Autosharding): Multiple PubsubTopics", function () {
|
|||
});
|
||||
|
||||
it("Should fail to subscribe with decoder with wrong pubsubTopic", async function () {
|
||||
// this subscription object is set up with the `customPubsubTopic` but we're passing it a Decoder with the `DefaultPubsubTopic`
|
||||
// this subscription object is set up with the `customPubsubTopic1` but we're passing it a Decoder with the `customPubsubTopic2`
|
||||
try {
|
||||
await subscription.subscribe([customDecoder2], messageCollector.callback);
|
||||
} catch (error) {
|
||||
|
@ -383,6 +368,10 @@ describe("Waku Filter V2 (Named sharding): Multiple PubsubTopics", function () {
|
|||
clusterId: 3,
|
||||
shard: 2
|
||||
});
|
||||
const shardInfo = {
|
||||
clusterId: 3,
|
||||
shards: [1, 2]
|
||||
};
|
||||
const customContentTopic1 = "/test/2/waku-filter";
|
||||
const customContentTopic2 = "/test/3/waku-filter";
|
||||
const customEncoder1 = createEncoder({
|
||||
|
@ -397,14 +386,7 @@ describe("Waku Filter V2 (Named sharding): Multiple PubsubTopics", function () {
|
|||
const customDecoder2 = createDecoder(customContentTopic2, customPubsubTopic2);
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runNodes(
|
||||
this.ctx,
|
||||
[customPubsubTopic1, customPubsubTopic2],
|
||||
{
|
||||
clusterId: 3,
|
||||
shards: [1, 2]
|
||||
}
|
||||
);
|
||||
[nwaku, waku] = await runNodes(this.ctx, shardInfo);
|
||||
subscription = await waku.filter.createSubscription(customPubsubTopic1);
|
||||
messageCollector = new MessageCollector();
|
||||
});
|
||||
|
@ -428,9 +410,8 @@ describe("Waku Filter V2 (Named sharding): Multiple PubsubTopics", function () {
|
|||
await subscription.subscribe([customDecoder1], messageCollector.callback);
|
||||
|
||||
// Subscribe from the same lightnode to the 2nd pubsubtopic
|
||||
const subscription2 = await waku.filter.createSubscription(
|
||||
pubsubTopicToSingleShardInfo(customPubsubTopic2)
|
||||
);
|
||||
const subscription2 =
|
||||
await waku.filter.createSubscription(customPubsubTopic2);
|
||||
|
||||
const messageCollector2 = new MessageCollector();
|
||||
|
||||
|
@ -471,10 +452,8 @@ describe("Waku Filter V2 (Named sharding): Multiple PubsubTopics", function () {
|
|||
await waitForRemotePeer(waku, [Protocols.Filter, Protocols.LightPush]);
|
||||
|
||||
// Subscribe from the same lightnode to the new nwaku on the new pubsubtopic
|
||||
const subscription2 = await waku.filter.createSubscription(
|
||||
pubsubTopicToSingleShardInfo(customPubsubTopic2),
|
||||
await nwaku2.getPeerId()
|
||||
);
|
||||
const subscription2 =
|
||||
await waku.filter.createSubscription(customPubsubTopic2);
|
||||
await nwaku2.ensureSubscriptions([customPubsubTopic2]);
|
||||
|
||||
const messageCollector2 = new MessageCollector();
|
||||
|
@ -509,7 +488,7 @@ describe("Waku Filter V2 (Named sharding): Multiple PubsubTopics", function () {
|
|||
});
|
||||
|
||||
it("Should fail to subscribe with decoder with wrong pubsubTopic", async function () {
|
||||
// this subscription object is set up with the `customPubsubTopic` but we're passing it a Decoder with the `DefaultPubsubTopic`
|
||||
// this subscription object is set up with the `customPubsubTopic1` but we're passing it a Decoder with the `customPubsubTopic2`
|
||||
try {
|
||||
await subscription.subscribe([customDecoder2], messageCollector.callback);
|
||||
} catch (error) {
|
||||
|
|
|
@ -1,8 +1,4 @@
|
|||
import {
|
||||
DefaultPubsubTopic,
|
||||
IFilterSubscription,
|
||||
LightNode
|
||||
} from "@waku/interfaces";
|
||||
import { IFilterSubscription, LightNode } from "@waku/interfaces";
|
||||
import { utf8ToBytes } from "@waku/sdk";
|
||||
import { expect } from "chai";
|
||||
|
||||
|
@ -17,6 +13,7 @@ import {
|
|||
TestContentTopic,
|
||||
TestDecoder,
|
||||
TestEncoder,
|
||||
TestShardInfo,
|
||||
validatePingError
|
||||
} from "../utils.js";
|
||||
|
||||
|
@ -31,8 +28,8 @@ describe("Waku Filter V2: Ping", function () {
|
|||
let messageCollector: MessageCollector;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runNodes(this.ctx, [DefaultPubsubTopic]);
|
||||
subscription = await waku.filter.createSubscription();
|
||||
[nwaku, waku] = await runNodes(this.ctx, TestShardInfo);
|
||||
subscription = await waku.filter.createSubscription(TestShardInfo);
|
||||
messageCollector = new MessageCollector();
|
||||
});
|
||||
|
||||
|
|
|
@ -1,10 +1,5 @@
|
|||
import { waitForRemotePeer } from "@waku/core";
|
||||
import {
|
||||
DefaultPubsubTopic,
|
||||
IFilterSubscription,
|
||||
LightNode,
|
||||
Protocols
|
||||
} from "@waku/interfaces";
|
||||
import { IFilterSubscription, LightNode, Protocols } from "@waku/interfaces";
|
||||
import { utf8ToBytes } from "@waku/sdk";
|
||||
import { expect } from "chai";
|
||||
|
||||
|
@ -23,7 +18,9 @@ import {
|
|||
messageText,
|
||||
TestContentTopic,
|
||||
TestDecoder,
|
||||
TestEncoder
|
||||
TestEncoder,
|
||||
TestPubsubTopic,
|
||||
TestShardInfo
|
||||
} from "../utils.js";
|
||||
|
||||
describe("Waku Filter V2: FilterPush", function () {
|
||||
|
@ -35,9 +32,9 @@ describe("Waku Filter V2: FilterPush", function () {
|
|||
let messageCollector: MessageCollector;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runNodes(this.ctx, [DefaultPubsubTopic]);
|
||||
subscription = await waku.filter.createSubscription();
|
||||
messageCollector = new MessageCollector();
|
||||
[nwaku, waku] = await runNodes(this.ctx, TestShardInfo);
|
||||
subscription = await waku.filter.createSubscription(TestShardInfo);
|
||||
messageCollector = new MessageCollector(nwaku);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
|
@ -65,7 +62,7 @@ describe("Waku Filter V2: FilterPush", function () {
|
|||
await delay(400);
|
||||
|
||||
await nwaku.restCall<boolean>(
|
||||
`/relay/v1/messages/${encodeURIComponent(DefaultPubsubTopic)}`,
|
||||
`/relay/v1/messages/${encodeURIComponent(TestPubsubTopic)}`,
|
||||
"POST",
|
||||
{
|
||||
contentTopic: TestContentTopic,
|
||||
|
@ -99,7 +96,7 @@ describe("Waku Filter V2: FilterPush", function () {
|
|||
await delay(400);
|
||||
|
||||
await nwaku.restCall<boolean>(
|
||||
`/relay/v1/messages/${encodeURIComponent(DefaultPubsubTopic)}`,
|
||||
`/relay/v1/messages/${encodeURIComponent(TestPubsubTopic)}`,
|
||||
"POST",
|
||||
{
|
||||
contentTopic: TestContentTopic,
|
||||
|
@ -154,7 +151,7 @@ describe("Waku Filter V2: FilterPush", function () {
|
|||
await delay(400);
|
||||
|
||||
await nwaku.restCall<boolean>(
|
||||
`/relay/v1/messages/${encodeURIComponent(DefaultPubsubTopic)}`,
|
||||
`/relay/v1/messages/${encodeURIComponent(TestPubsubTopic)}`,
|
||||
"POST",
|
||||
{
|
||||
payload: Buffer.from(utf8ToBytes(messageText)).toString("base64"),
|
||||
|
@ -171,7 +168,7 @@ describe("Waku Filter V2: FilterPush", function () {
|
|||
await delay(400);
|
||||
|
||||
await nwaku.restCall<boolean>(
|
||||
`/relay/v1/messages/${encodeURIComponent(DefaultPubsubTopic)}`,
|
||||
`/relay/v1/messages/${encodeURIComponent(TestPubsubTopic)}`,
|
||||
"POST",
|
||||
{
|
||||
contentTopic: TestContentTopic,
|
||||
|
@ -193,7 +190,7 @@ describe("Waku Filter V2: FilterPush", function () {
|
|||
await delay(400);
|
||||
|
||||
await nwaku.restCall<boolean>(
|
||||
`/relay/v1/messages/${encodeURIComponent(DefaultPubsubTopic)}`,
|
||||
`/relay/v1/messages/${encodeURIComponent(TestPubsubTopic)}`,
|
||||
"POST",
|
||||
{
|
||||
contentTopic: TestContentTopic,
|
||||
|
|
|
@ -1,10 +1,5 @@
|
|||
import { createDecoder, createEncoder, waitForRemotePeer } from "@waku/core";
|
||||
import {
|
||||
DefaultPubsubTopic,
|
||||
IFilterSubscription,
|
||||
LightNode,
|
||||
Protocols
|
||||
} from "@waku/interfaces";
|
||||
import { IFilterSubscription, LightNode, Protocols } from "@waku/interfaces";
|
||||
import {
|
||||
ecies,
|
||||
generatePrivateKey,
|
||||
|
@ -14,13 +9,13 @@ import {
|
|||
} from "@waku/message-encryption";
|
||||
import { utf8ToBytes } from "@waku/sdk";
|
||||
import { expect } from "chai";
|
||||
import type { Context } from "mocha";
|
||||
|
||||
import {
|
||||
afterEachCustom,
|
||||
beforeEachCustom,
|
||||
delay,
|
||||
generateTestData,
|
||||
makeLogFileName,
|
||||
MessageCollector,
|
||||
ServiceNode,
|
||||
tearDownNodes,
|
||||
|
@ -31,7 +26,9 @@ import {
|
|||
messageText,
|
||||
TestContentTopic,
|
||||
TestDecoder,
|
||||
TestEncoder
|
||||
TestEncoder,
|
||||
TestPubsubTopic,
|
||||
TestShardInfo
|
||||
} from "../utils.js";
|
||||
|
||||
import { runNodes } from "./utils.js";
|
||||
|
@ -40,20 +37,23 @@ describe("Waku Filter V2: Subscribe: Single Service Node", function () {
|
|||
// Set the timeout for all tests in this suite. Can be overwritten at test level
|
||||
this.timeout(10000);
|
||||
let waku: LightNode;
|
||||
let waku2: LightNode;
|
||||
let nwaku: ServiceNode;
|
||||
let nwaku2: ServiceNode;
|
||||
let subscription: IFilterSubscription;
|
||||
let messageCollector: MessageCollector;
|
||||
let ctx: Context;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runNodes(this.ctx, [DefaultPubsubTopic]);
|
||||
subscription = await waku.filter.createSubscription();
|
||||
ctx = this.ctx;
|
||||
[nwaku, waku] = await runNodes(this.ctx, TestShardInfo);
|
||||
subscription = await waku.filter.createSubscription(TestShardInfo);
|
||||
messageCollector = new MessageCollector();
|
||||
await nwaku.ensureSubscriptions();
|
||||
await nwaku.ensureSubscriptions([TestPubsubTopic]);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
await tearDownNodes([nwaku, nwaku2], waku);
|
||||
await tearDownNodes([nwaku, nwaku2], [waku, waku2]);
|
||||
});
|
||||
|
||||
it("Subscribe and receive messages via lightPush", async function () {
|
||||
|
@ -64,7 +64,8 @@ describe("Waku Filter V2: Subscribe: Single Service Node", function () {
|
|||
expect(await messageCollector.waitForMessages(1)).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
expect((await nwaku.messages()).length).to.eq(1);
|
||||
});
|
||||
|
@ -74,9 +75,14 @@ describe("Waku Filter V2: Subscribe: Single Service Node", function () {
|
|||
const publicKey = getPublicKey(privateKey);
|
||||
const encoder = ecies.createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
publicKey
|
||||
publicKey,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const decoder = ecies.createDecoder(TestContentTopic, privateKey);
|
||||
const decoder = ecies.createDecoder(
|
||||
TestContentTopic,
|
||||
privateKey,
|
||||
TestPubsubTopic
|
||||
);
|
||||
|
||||
await subscription.subscribe([decoder], messageCollector.callback);
|
||||
|
||||
|
@ -86,7 +92,8 @@ describe("Waku Filter V2: Subscribe: Single Service Node", function () {
|
|||
messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedVersion: 1
|
||||
expectedVersion: 1,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
expect((await nwaku.messages()).length).to.eq(1);
|
||||
});
|
||||
|
@ -95,9 +102,14 @@ describe("Waku Filter V2: Subscribe: Single Service Node", function () {
|
|||
const symKey = generateSymmetricKey();
|
||||
const encoder = symmetric.createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
symKey
|
||||
symKey,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const decoder = symmetric.createDecoder(TestContentTopic, symKey);
|
||||
const decoder = symmetric.createDecoder(
|
||||
TestContentTopic,
|
||||
symKey,
|
||||
TestPubsubTopic
|
||||
);
|
||||
|
||||
await subscription.subscribe([decoder], messageCollector.callback);
|
||||
|
||||
|
@ -107,7 +119,8 @@ describe("Waku Filter V2: Subscribe: Single Service Node", function () {
|
|||
messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedVersion: 1
|
||||
expectedVersion: 1,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
expect((await nwaku.messages()).length).to.eq(1);
|
||||
});
|
||||
|
@ -128,7 +141,8 @@ describe("Waku Filter V2: Subscribe: Single Service Node", function () {
|
|||
expect(await messageCollector.waitForMessages(1)).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
expect((await nwaku.messages()).length).to.eq(1);
|
||||
});
|
||||
|
@ -141,7 +155,8 @@ describe("Waku Filter V2: Subscribe: Single Service Node", function () {
|
|||
expect(await messageCollector.waitForMessages(1)).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
|
||||
// Send another message on the same topic.
|
||||
|
@ -154,7 +169,8 @@ describe("Waku Filter V2: Subscribe: Single Service Node", function () {
|
|||
expect(await messageCollector.waitForMessages(2)).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(1, {
|
||||
expectedMessageText: newMessageText,
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
expect((await nwaku.messages()).length).to.eq(2);
|
||||
});
|
||||
|
@ -166,15 +182,19 @@ describe("Waku Filter V2: Subscribe: Single Service Node", function () {
|
|||
expect(await messageCollector.waitForMessages(1)).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
|
||||
// Modify subscription to include a new content topic and send a message.
|
||||
const newMessageText = "Filtering still works!";
|
||||
const newMessagePayload = { payload: utf8ToBytes(newMessageText) };
|
||||
const newContentTopic = "/test/2/waku-filter";
|
||||
const newEncoder = createEncoder({ contentTopic: newContentTopic });
|
||||
const newDecoder = createDecoder(newContentTopic);
|
||||
const newContentTopic = "/test/2/waku-filter/default";
|
||||
const newEncoder = createEncoder({
|
||||
contentTopic: newContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const newDecoder = createDecoder(newContentTopic, TestPubsubTopic);
|
||||
await subscription.subscribe([newDecoder], messageCollector.callback);
|
||||
await waku.lightPush.send(newEncoder, {
|
||||
payload: utf8ToBytes(newMessageText)
|
||||
|
@ -182,7 +202,8 @@ describe("Waku Filter V2: Subscribe: Single Service Node", function () {
|
|||
expect(await messageCollector.waitForMessages(2)).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(1, {
|
||||
expectedContentTopic: newContentTopic,
|
||||
expectedMessageText: newMessageText
|
||||
expectedMessageText: newMessageText,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
|
||||
// Send another message on the initial content topic to verify it still works.
|
||||
|
@ -190,14 +211,15 @@ describe("Waku Filter V2: Subscribe: Single Service Node", function () {
|
|||
expect(await messageCollector.waitForMessages(3)).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(2, {
|
||||
expectedMessageText: newMessageText,
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
expect((await nwaku.messages()).length).to.eq(3);
|
||||
});
|
||||
|
||||
it("Subscribe and receives messages on 20 topics", async function () {
|
||||
const topicCount = 20;
|
||||
const td = generateTestData(topicCount);
|
||||
const td = generateTestData(topicCount, { pubsubTopic: TestPubsubTopic });
|
||||
|
||||
// Subscribe to all 20 topics.
|
||||
for (let i = 0; i < topicCount; i++) {
|
||||
|
@ -216,7 +238,8 @@ describe("Waku Filter V2: Subscribe: Single Service Node", function () {
|
|||
td.contentTopics.forEach((topic, index) => {
|
||||
messageCollector.verifyReceivedMessage(index, {
|
||||
expectedContentTopic: topic,
|
||||
expectedMessageText: `Message for Topic ${index + 1}`
|
||||
expectedMessageText: `Message for Topic ${index + 1}`,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -224,7 +247,7 @@ describe("Waku Filter V2: Subscribe: Single Service Node", function () {
|
|||
it("Subscribe to 100 topics (new limit) at once and receives messages", async function () {
|
||||
this.timeout(50000);
|
||||
const topicCount = 100;
|
||||
const td = generateTestData(topicCount);
|
||||
const td = generateTestData(topicCount, { pubsubTopic: TestPubsubTopic });
|
||||
|
||||
await subscription.subscribe(td.decoders, messageCollector.callback);
|
||||
|
||||
|
@ -243,7 +266,8 @@ describe("Waku Filter V2: Subscribe: Single Service Node", function () {
|
|||
td.contentTopics.forEach((topic, index) => {
|
||||
messageCollector.verifyReceivedMessage(index, {
|
||||
expectedContentTopic: topic,
|
||||
expectedMessageText: `Message for Topic ${index + 1}`
|
||||
expectedMessageText: `Message for Topic ${index + 1}`,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
|
@ -255,7 +279,7 @@ describe("Waku Filter V2: Subscribe: Single Service Node", function () {
|
|||
|
||||
it("Error when try to subscribe to more than 101 topics (new limit)", async function () {
|
||||
const topicCount = 101;
|
||||
const td = generateTestData(topicCount);
|
||||
const td = generateTestData(topicCount, { pubsubTopic: TestPubsubTopic });
|
||||
|
||||
try {
|
||||
await subscription.subscribe(td.decoders, messageCollector.callback);
|
||||
|
@ -279,9 +303,9 @@ describe("Waku Filter V2: Subscribe: Single Service Node", function () {
|
|||
it("Overlapping topic subscription", async function () {
|
||||
// Define two sets of test data with overlapping topics.
|
||||
const topicCount1 = 2;
|
||||
const td1 = generateTestData(topicCount1);
|
||||
const td1 = generateTestData(topicCount1, { pubsubTopic: TestPubsubTopic });
|
||||
const topicCount2 = 4;
|
||||
const td2 = generateTestData(topicCount2);
|
||||
const td2 = generateTestData(topicCount2, { pubsubTopic: TestPubsubTopic });
|
||||
|
||||
// Subscribe to the first set of topics.
|
||||
await subscription.subscribe(td1.decoders, messageCollector.callback);
|
||||
|
@ -327,19 +351,24 @@ describe("Waku Filter V2: Subscribe: Single Service Node", function () {
|
|||
);
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: "M1",
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
messageCollector.verifyReceivedMessage(1, {
|
||||
expectedMessageText: "M2",
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
});
|
||||
|
||||
TEST_STRING.forEach((testItem) => {
|
||||
it(`Subscribe to topic containing ${testItem.description} and receive message`, async function () {
|
||||
const newContentTopic = testItem.value;
|
||||
const newEncoder = createEncoder({ contentTopic: newContentTopic });
|
||||
const newDecoder = createDecoder(newContentTopic);
|
||||
const newEncoder = createEncoder({
|
||||
contentTopic: newContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const newDecoder = createDecoder(newContentTopic, TestPubsubTopic);
|
||||
|
||||
await subscription.subscribe([newDecoder], messageCollector.callback);
|
||||
await waku.lightPush.send(newEncoder, messagePayload);
|
||||
|
@ -347,7 +376,8 @@ describe("Waku Filter V2: Subscribe: Single Service Node", function () {
|
|||
expect(await messageCollector.waitForMessages(1)).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: newContentTopic
|
||||
expectedContentTopic: newContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -357,10 +387,13 @@ describe("Waku Filter V2: Subscribe: Single Service Node", function () {
|
|||
await waku.lightPush.send(TestEncoder, { payload: utf8ToBytes("M1") });
|
||||
|
||||
// Create a second subscription on a different topic
|
||||
const subscription2 = await waku.filter.createSubscription();
|
||||
const newContentTopic = "/test/2/waku-filter";
|
||||
const newEncoder = createEncoder({ contentTopic: newContentTopic });
|
||||
const newDecoder = createDecoder(newContentTopic);
|
||||
const subscription2 = await waku.filter.createSubscription(TestShardInfo);
|
||||
const newContentTopic = "/test/2/waku-filter/default";
|
||||
const newEncoder = createEncoder({
|
||||
contentTopic: newContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const newDecoder = createDecoder(newContentTopic, TestPubsubTopic);
|
||||
await subscription2.subscribe([newDecoder], messageCollector.callback);
|
||||
|
||||
await waku.lightPush.send(newEncoder, { payload: utf8ToBytes("M2") });
|
||||
|
@ -369,11 +402,13 @@ describe("Waku Filter V2: Subscribe: Single Service Node", function () {
|
|||
expect(await messageCollector.waitForMessages(2)).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: "M1",
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
messageCollector.verifyReceivedMessage(1, {
|
||||
expectedContentTopic: newContentTopic,
|
||||
expectedMessageText: "M2"
|
||||
expectedMessageText: "M2",
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -381,23 +416,18 @@ describe("Waku Filter V2: Subscribe: Single Service Node", function () {
|
|||
await subscription.subscribe([TestDecoder], messageCollector.callback);
|
||||
|
||||
// Set up and start a new nwaku node
|
||||
nwaku2 = new ServiceNode(makeLogFileName(this) + "2");
|
||||
await nwaku2.start({
|
||||
filter: true,
|
||||
lightpush: true,
|
||||
relay: true
|
||||
});
|
||||
[nwaku2, waku2] = await runNodes(ctx, TestShardInfo);
|
||||
await waku.dial(await nwaku2.getMultiaddrWithId());
|
||||
await waitForRemotePeer(waku, [Protocols.Filter, Protocols.LightPush]);
|
||||
const subscription2 = await waku.filter.createSubscription(
|
||||
undefined,
|
||||
await nwaku2.getPeerId()
|
||||
);
|
||||
await nwaku2.ensureSubscriptions([DefaultPubsubTopic]);
|
||||
const subscription2 = await waku.filter.createSubscription(TestShardInfo);
|
||||
await nwaku2.ensureSubscriptions([TestPubsubTopic]);
|
||||
// Send a message using the new subscription
|
||||
const newContentTopic = "/test/2/waku-filter";
|
||||
const newEncoder = createEncoder({ contentTopic: newContentTopic });
|
||||
const newDecoder = createDecoder(newContentTopic);
|
||||
const newContentTopic = "/test/2/waku-filter/default";
|
||||
const newEncoder = createEncoder({
|
||||
contentTopic: newContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const newDecoder = createDecoder(newContentTopic, TestPubsubTopic);
|
||||
await subscription2.subscribe([newDecoder], messageCollector.callback);
|
||||
|
||||
// Making sure that messages are send and reveiced for both subscriptions
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { createDecoder, createEncoder } from "@waku/core";
|
||||
import { DefaultPubsubTopic, IFilterSubscription } from "@waku/interfaces";
|
||||
import { IFilterSubscription } from "@waku/interfaces";
|
||||
import { LightNode } from "@waku/interfaces";
|
||||
import { utf8ToBytes } from "@waku/sdk";
|
||||
import { expect } from "chai";
|
||||
|
@ -18,7 +18,9 @@ import {
|
|||
messageText,
|
||||
TestContentTopic,
|
||||
TestDecoder,
|
||||
TestEncoder
|
||||
TestEncoder,
|
||||
TestPubsubTopic,
|
||||
TestShardInfo
|
||||
} from "../utils.js";
|
||||
|
||||
describe("Waku Filter V2: Unsubscribe", function () {
|
||||
|
@ -30,12 +32,10 @@ describe("Waku Filter V2: Unsubscribe", function () {
|
|||
let messageCollector: MessageCollector;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runNodes(this.ctx, [DefaultPubsubTopic]);
|
||||
subscription = await waku.filter.createSubscription();
|
||||
[nwaku, waku] = await runNodes(this.ctx, TestShardInfo);
|
||||
subscription = await waku.filter.createSubscription(TestShardInfo);
|
||||
messageCollector = new MessageCollector();
|
||||
|
||||
// Nwaku subscribe to the default pubsub topic
|
||||
await nwaku.ensureSubscriptions();
|
||||
await nwaku.ensureSubscriptions([TestPubsubTopic]);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
|
@ -55,7 +55,8 @@ describe("Waku Filter V2: Unsubscribe", function () {
|
|||
// Check that from 2 messages send only the 1st was received
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
expect(messageCollector.count).to.eq(1);
|
||||
expect((await nwaku.messages()).length).to.eq(2);
|
||||
|
@ -65,8 +66,11 @@ describe("Waku Filter V2: Unsubscribe", function () {
|
|||
// Subscribe to 2 topics and send messages
|
||||
await subscription.subscribe([TestDecoder], messageCollector.callback);
|
||||
const newContentTopic = "/test/2/waku-filter";
|
||||
const newEncoder = createEncoder({ contentTopic: newContentTopic });
|
||||
const newDecoder = createDecoder(newContentTopic);
|
||||
const newEncoder = createEncoder({
|
||||
contentTopic: newContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const newDecoder = createDecoder(newContentTopic, TestPubsubTopic);
|
||||
await subscription.subscribe([newDecoder], messageCollector.callback);
|
||||
await waku.lightPush.send(TestEncoder, { payload: utf8ToBytes("M1") });
|
||||
await waku.lightPush.send(newEncoder, { payload: utf8ToBytes("M2") });
|
||||
|
@ -86,9 +90,12 @@ describe("Waku Filter V2: Unsubscribe", function () {
|
|||
it("Unsubscribe 2 topics - node subscribed to 2 topics", async function () {
|
||||
// Subscribe to 2 topics and send messages
|
||||
await subscription.subscribe([TestDecoder], messageCollector.callback);
|
||||
const newContentTopic = "/test/2/waku-filter";
|
||||
const newEncoder = createEncoder({ contentTopic: newContentTopic });
|
||||
const newDecoder = createDecoder(newContentTopic);
|
||||
const newContentTopic = "/test/2/waku-filter/default";
|
||||
const newEncoder = createEncoder({
|
||||
contentTopic: newContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const newDecoder = createDecoder(newContentTopic, TestPubsubTopic);
|
||||
await subscription.subscribe([newDecoder], messageCollector.callback);
|
||||
await waku.lightPush.send(TestEncoder, { payload: utf8ToBytes("M1") });
|
||||
await waku.lightPush.send(newEncoder, { payload: utf8ToBytes("M2") });
|
||||
|
@ -115,7 +122,7 @@ describe("Waku Filter V2: Unsubscribe", function () {
|
|||
|
||||
// Unsubscribe from topics that the node is not not subscribed to and send again
|
||||
await subscription.unsubscribe([]);
|
||||
await subscription.unsubscribe(["/test/2/waku-filter"]);
|
||||
await subscription.unsubscribe(["/test/2/waku-filter/default"]);
|
||||
await waku.lightPush.send(TestEncoder, { payload: utf8ToBytes("M2") });
|
||||
expect(await messageCollector.waitForMessages(2)).to.eq(true);
|
||||
|
||||
|
@ -143,7 +150,7 @@ describe("Waku Filter V2: Unsubscribe", function () {
|
|||
it("Unsubscribes all - node subscribed to 10 topics", async function () {
|
||||
// Subscribe to 10 topics and send message
|
||||
const topicCount = 10;
|
||||
const td = generateTestData(topicCount);
|
||||
const td = generateTestData(topicCount, { pubsubTopic: TestPubsubTopic });
|
||||
await subscription.subscribe(td.decoders, messageCollector.callback);
|
||||
for (let i = 0; i < topicCount; i++) {
|
||||
await waku.lightPush.send(td.encoders[i], {
|
||||
|
|
|
@ -1,76 +1,22 @@
|
|||
import { waitForRemotePeer } from "@waku/core";
|
||||
import {
|
||||
ContentTopicInfo,
|
||||
DefaultPubsubTopic,
|
||||
LightNode,
|
||||
ProtocolCreateOptions,
|
||||
Protocols,
|
||||
ShardingParams
|
||||
} from "@waku/interfaces";
|
||||
import { LightNode, Protocols, ShardingParams } from "@waku/interfaces";
|
||||
import { createLightNode } from "@waku/sdk";
|
||||
import { Logger } from "@waku/utils";
|
||||
import { Context } from "mocha";
|
||||
|
||||
import {
|
||||
makeLogFileName,
|
||||
NOISE_KEY_1,
|
||||
runNodes as runNodesBuilder,
|
||||
ServiceNode
|
||||
} from "../../../src/index.js";
|
||||
|
||||
export const log = new Logger("test:filter:single_node");
|
||||
|
||||
export async function runNodes(
|
||||
export const runNodes = (
|
||||
context: Context,
|
||||
//TODO: change this to use `ShardInfo` instead of `string[]`
|
||||
pubsubTopics: string[],
|
||||
shardInfo?: ShardingParams
|
||||
): Promise<[ServiceNode, LightNode]> {
|
||||
const nwaku = new ServiceNode(makeLogFileName(context));
|
||||
|
||||
function isContentTopicInfo(info: ShardingParams): info is ContentTopicInfo {
|
||||
return (info as ContentTopicInfo).contentTopics !== undefined;
|
||||
}
|
||||
|
||||
await nwaku.start(
|
||||
{
|
||||
filter: true,
|
||||
lightpush: true,
|
||||
relay: true,
|
||||
pubsubTopic: pubsubTopics,
|
||||
// Conditionally include clusterId if shardInfo exists
|
||||
...(shardInfo && { clusterId: shardInfo.clusterId }),
|
||||
// Conditionally include contentTopic if shardInfo exists and clusterId is 1
|
||||
...(shardInfo &&
|
||||
isContentTopicInfo(shardInfo) &&
|
||||
shardInfo.clusterId === 1 && { contentTopic: shardInfo.contentTopics })
|
||||
},
|
||||
{ retries: 3 }
|
||||
);
|
||||
const waku_options: ProtocolCreateOptions = {
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } },
|
||||
pubsubTopics: shardInfo ? undefined : pubsubTopics,
|
||||
...((pubsubTopics.length !== 1 ||
|
||||
pubsubTopics[0] !== DefaultPubsubTopic) && {
|
||||
shardInfo: shardInfo
|
||||
})
|
||||
};
|
||||
|
||||
log.info("Starting js waku node with :", JSON.stringify(waku_options));
|
||||
let waku: LightNode | undefined;
|
||||
try {
|
||||
waku = await createLightNode(waku_options);
|
||||
await waku.start();
|
||||
} catch (error) {
|
||||
log.error("jswaku node failed to start:", error);
|
||||
}
|
||||
|
||||
if (waku) {
|
||||
await waku.dial(await nwaku.getMultiaddrWithId());
|
||||
await waitForRemotePeer(waku, [Protocols.Filter, Protocols.LightPush]);
|
||||
await nwaku.ensureSubscriptions(pubsubTopics);
|
||||
return [nwaku, waku];
|
||||
} else {
|
||||
throw new Error("Failed to initialize waku");
|
||||
}
|
||||
}
|
||||
shardInfo: ShardingParams
|
||||
): Promise<[ServiceNode, LightNode]> =>
|
||||
runNodesBuilder<LightNode>({
|
||||
context,
|
||||
createNode: createLightNode,
|
||||
protocols: [Protocols.LightPush, Protocols.Filter],
|
||||
shardInfo
|
||||
});
|
||||
|
|
|
@ -1,9 +1,5 @@
|
|||
import { createDecoder, createEncoder } from "@waku/core";
|
||||
import {
|
||||
DefaultPubsubTopic,
|
||||
IFilterSubscription,
|
||||
LightNode
|
||||
} from "@waku/interfaces";
|
||||
import { IFilterSubscription, LightNode } from "@waku/interfaces";
|
||||
import {
|
||||
ecies,
|
||||
generatePrivateKey,
|
||||
|
@ -30,7 +26,9 @@ import {
|
|||
teardownNodesWithRedundancy,
|
||||
TestContentTopic,
|
||||
TestDecoder,
|
||||
TestEncoder
|
||||
TestEncoder,
|
||||
TestPubsubTopic,
|
||||
TestShardInfo
|
||||
} from "./utils.js";
|
||||
|
||||
const runTests = (strictCheckNodes: boolean): void => {
|
||||
|
@ -43,10 +41,10 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
beforeEachCustom(this, async () => {
|
||||
[serviceNodes, waku] = await runMultipleNodes(
|
||||
this.ctx,
|
||||
[DefaultPubsubTopic],
|
||||
TestShardInfo,
|
||||
strictCheckNodes
|
||||
);
|
||||
subscription = await waku.filter.createSubscription();
|
||||
subscription = await waku.filter.createSubscription(TestShardInfo);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
|
@ -79,9 +77,14 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
const publicKey = getPublicKey(privateKey);
|
||||
const encoder = ecies.createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
publicKey
|
||||
publicKey,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const decoder = ecies.createDecoder(TestContentTopic, privateKey);
|
||||
const decoder = ecies.createDecoder(
|
||||
TestContentTopic,
|
||||
privateKey,
|
||||
TestPubsubTopic
|
||||
);
|
||||
|
||||
await subscription.subscribe(
|
||||
[decoder],
|
||||
|
@ -96,7 +99,8 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedVersion: 1
|
||||
expectedVersion: 1,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
|
||||
await serviceNodes.confirmMessageLength(1);
|
||||
|
@ -106,9 +110,14 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
const symKey = generateSymmetricKey();
|
||||
const encoder = symmetric.createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
symKey
|
||||
symKey,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const decoder = symmetric.createDecoder(TestContentTopic, symKey);
|
||||
const decoder = symmetric.createDecoder(
|
||||
TestContentTopic,
|
||||
symKey,
|
||||
TestPubsubTopic
|
||||
);
|
||||
|
||||
await subscription.subscribe(
|
||||
[decoder],
|
||||
|
@ -123,7 +132,8 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedVersion: 1
|
||||
expectedVersion: 1,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
|
||||
await serviceNodes.confirmMessageLength(1);
|
||||
|
@ -142,14 +152,15 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
contentTopic: TestContentTopic,
|
||||
payload: utf8ToBytes(messageText)
|
||||
});
|
||||
await serviceNodes.sendRelayMessage(relayMessage);
|
||||
await serviceNodes.sendRelayMessage(relayMessage, TestPubsubTopic);
|
||||
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
true
|
||||
);
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
|
||||
await serviceNodes.confirmMessageLength(1);
|
||||
|
@ -201,15 +212,19 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
);
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
|
||||
// Modify subscription to include a new content topic and send a message.
|
||||
const newMessageText = "Filtering still works!";
|
||||
const newMessagePayload = { payload: utf8ToBytes(newMessageText) };
|
||||
const newContentTopic = "/test/2/waku-filter";
|
||||
const newEncoder = createEncoder({ contentTopic: newContentTopic });
|
||||
const newDecoder = createDecoder(newContentTopic);
|
||||
const newContentTopic = "/test/2/waku-filter/default";
|
||||
const newEncoder = createEncoder({
|
||||
contentTopic: newContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const newDecoder = createDecoder(newContentTopic, TestPubsubTopic);
|
||||
await subscription.subscribe(
|
||||
[newDecoder],
|
||||
serviceNodes.messageCollector.callback
|
||||
|
@ -222,7 +237,8 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
);
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(1, {
|
||||
expectedContentTopic: newContentTopic,
|
||||
expectedMessageText: newMessageText
|
||||
expectedMessageText: newMessageText,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
|
||||
// Send another message on the initial content topic to verify it still works.
|
||||
|
@ -232,7 +248,8 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
);
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(2, {
|
||||
expectedMessageText: newMessageText,
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
|
||||
await serviceNodes.confirmMessageLength(3);
|
||||
|
@ -240,7 +257,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
|
||||
it("Subscribe and receives messages on 20 topics", async function () {
|
||||
const topicCount = 20;
|
||||
const td = generateTestData(topicCount);
|
||||
const td = generateTestData(topicCount, { pubsubTopic: TestPubsubTopic });
|
||||
|
||||
// Subscribe to all 20 topics.
|
||||
for (let i = 0; i < topicCount; i++) {
|
||||
|
@ -264,7 +281,8 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
td.contentTopics.forEach((topic, index) => {
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(index, {
|
||||
expectedContentTopic: topic,
|
||||
expectedMessageText: `Message for Topic ${index + 1}`
|
||||
expectedMessageText: `Message for Topic ${index + 1}`,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -272,7 +290,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
it("Subscribe to 100 topics (new limit) at once and receives messages", async function () {
|
||||
this.timeout(50000);
|
||||
const topicCount = 100;
|
||||
const td = generateTestData(topicCount);
|
||||
const td = generateTestData(topicCount, { pubsubTopic: TestPubsubTopic });
|
||||
|
||||
await subscription.subscribe(
|
||||
td.decoders,
|
||||
|
@ -296,7 +314,8 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
td.contentTopics.forEach((topic, index) => {
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(index, {
|
||||
expectedContentTopic: topic,
|
||||
expectedMessageText: `Message for Topic ${index + 1}`
|
||||
expectedMessageText: `Message for Topic ${index + 1}`,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
|
@ -308,7 +327,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
|
||||
it("Error when try to subscribe to more than 101 topics (new limit)", async function () {
|
||||
const topicCount = 101;
|
||||
const td = generateTestData(topicCount);
|
||||
const td = generateTestData(topicCount, { pubsubTopic: TestPubsubTopic });
|
||||
|
||||
try {
|
||||
await subscription.subscribe(
|
||||
|
@ -335,9 +354,13 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
it("Overlapping topic subscription", async function () {
|
||||
// Define two sets of test data with overlapping topics.
|
||||
const topicCount1 = 2;
|
||||
const td1 = generateTestData(topicCount1);
|
||||
const td1 = generateTestData(topicCount1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const topicCount2 = 4;
|
||||
const td2 = generateTestData(topicCount2);
|
||||
const td2 = generateTestData(topicCount2, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
|
||||
// Subscribe to the first set of topics.
|
||||
await subscription.subscribe(
|
||||
|
@ -394,19 +417,24 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
).to.eq(true);
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: "M1",
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(1, {
|
||||
expectedMessageText: "M2",
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
});
|
||||
|
||||
TEST_STRING.forEach((testItem) => {
|
||||
it(`Subscribe to topic containing ${testItem.description} and receive message`, async function () {
|
||||
const newContentTopic = testItem.value;
|
||||
const newEncoder = createEncoder({ contentTopic: newContentTopic });
|
||||
const newDecoder = createDecoder(newContentTopic);
|
||||
const newEncoder = createEncoder({
|
||||
contentTopic: newContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const newDecoder = createDecoder(newContentTopic, TestPubsubTopic);
|
||||
|
||||
await subscription.subscribe(
|
||||
[newDecoder],
|
||||
|
@ -419,7 +447,8 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
);
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: newContentTopic
|
||||
expectedContentTopic: newContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -432,10 +461,13 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
await waku.lightPush.send(TestEncoder, { payload: utf8ToBytes("M1") });
|
||||
|
||||
// Create a second subscription on a different topic
|
||||
const subscription2 = await waku.filter.createSubscription();
|
||||
const newContentTopic = "/test/2/waku-filter";
|
||||
const newEncoder = createEncoder({ contentTopic: newContentTopic });
|
||||
const newDecoder = createDecoder(newContentTopic);
|
||||
const subscription2 = await waku.filter.createSubscription(TestShardInfo);
|
||||
const newContentTopic = "/test/2/waku-filter/default";
|
||||
const newEncoder = createEncoder({
|
||||
contentTopic: newContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const newDecoder = createDecoder(newContentTopic, TestPubsubTopic);
|
||||
await subscription2.subscribe(
|
||||
[newDecoder],
|
||||
serviceNodes.messageCollector.callback
|
||||
|
@ -449,11 +481,13 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
);
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: "M1",
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(1, {
|
||||
expectedContentTopic: newContentTopic,
|
||||
expectedMessageText: "M2"
|
||||
expectedMessageText: "M2",
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,9 +1,5 @@
|
|||
import { createDecoder, createEncoder } from "@waku/core";
|
||||
import {
|
||||
DefaultPubsubTopic,
|
||||
IFilterSubscription,
|
||||
LightNode
|
||||
} from "@waku/interfaces";
|
||||
import { IFilterSubscription, LightNode } from "@waku/interfaces";
|
||||
import { utf8ToBytes } from "@waku/sdk";
|
||||
import { expect } from "chai";
|
||||
|
||||
|
@ -15,13 +11,15 @@ import {
|
|||
} from "../../src/index.js";
|
||||
|
||||
import {
|
||||
ClusterId,
|
||||
messagePayload,
|
||||
messageText,
|
||||
runMultipleNodes,
|
||||
teardownNodesWithRedundancy,
|
||||
TestContentTopic,
|
||||
TestDecoder,
|
||||
TestEncoder
|
||||
TestEncoder,
|
||||
TestPubsubTopic
|
||||
} from "./utils.js";
|
||||
|
||||
const runTests = (strictCheckNodes: boolean): void => {
|
||||
|
@ -33,10 +31,15 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
let subscription: IFilterSubscription;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[serviceNodes, waku] = await runMultipleNodes(this.ctx, [
|
||||
DefaultPubsubTopic
|
||||
]);
|
||||
subscription = await waku.filter.createSubscription();
|
||||
[serviceNodes, waku] = await runMultipleNodes(this.ctx, {
|
||||
contentTopics: [TestContentTopic],
|
||||
clusterId: ClusterId
|
||||
});
|
||||
|
||||
subscription = await waku.filter.createSubscription({
|
||||
contentTopics: [TestContentTopic],
|
||||
clusterId: ClusterId
|
||||
});
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
|
@ -77,8 +80,11 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
serviceNodes.messageCollector.callback
|
||||
);
|
||||
const newContentTopic = "/test/2/waku-filter";
|
||||
const newEncoder = createEncoder({ contentTopic: newContentTopic });
|
||||
const newDecoder = createDecoder(newContentTopic);
|
||||
const newEncoder = createEncoder({
|
||||
contentTopic: newContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const newDecoder = createDecoder(newContentTopic, TestPubsubTopic);
|
||||
await subscription.subscribe(
|
||||
[newDecoder],
|
||||
serviceNodes.messageCollector.callback
|
||||
|
@ -109,8 +115,11 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
serviceNodes.messageCollector.callback
|
||||
);
|
||||
const newContentTopic = "/test/2/waku-filter";
|
||||
const newEncoder = createEncoder({ contentTopic: newContentTopic });
|
||||
const newDecoder = createDecoder(newContentTopic);
|
||||
const newEncoder = createEncoder({
|
||||
contentTopic: newContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const newDecoder = createDecoder(newContentTopic, TestPubsubTopic);
|
||||
await subscription.subscribe(
|
||||
[newDecoder],
|
||||
serviceNodes.messageCollector.callback
|
||||
|
@ -186,7 +195,7 @@ const runTests = (strictCheckNodes: boolean): void => {
|
|||
it("Unsubscribes all - node subscribed to 10 topics", async function () {
|
||||
// Subscribe to 10 topics and send message
|
||||
const topicCount = 10;
|
||||
const td = generateTestData(topicCount);
|
||||
const td = generateTestData(topicCount, { pubsubTopic: TestPubsubTopic });
|
||||
await subscription.subscribe(
|
||||
td.decoders,
|
||||
serviceNodes.messageCollector.callback
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import { createDecoder, createEncoder, waitForRemotePeer } from "@waku/core";
|
||||
import {
|
||||
DefaultPubsubTopic,
|
||||
IFilterSubscription,
|
||||
LightNode,
|
||||
ProtocolCreateOptions,
|
||||
|
@ -10,7 +9,7 @@ import {
|
|||
} from "@waku/interfaces";
|
||||
import { createLightNode } from "@waku/sdk";
|
||||
import {
|
||||
ensureShardingConfigured,
|
||||
contentTopicToPubsubTopic,
|
||||
Logger,
|
||||
shardInfoToPubsubTopics
|
||||
} from "@waku/utils";
|
||||
|
@ -26,9 +25,21 @@ import {
|
|||
|
||||
// Constants for test configuration.
|
||||
export const log = new Logger("test:filter");
|
||||
export const TestContentTopic = "/test/1/waku-filter";
|
||||
export const TestEncoder = createEncoder({ contentTopic: TestContentTopic });
|
||||
export const TestDecoder = createDecoder(TestContentTopic);
|
||||
export const TestContentTopic = "/test/1/waku-filter/default";
|
||||
export const ClusterId = 2;
|
||||
export const TestShardInfo = {
|
||||
contentTopics: [TestContentTopic],
|
||||
clusterId: ClusterId
|
||||
};
|
||||
export const TestPubsubTopic = contentTopicToPubsubTopic(
|
||||
TestContentTopic,
|
||||
ClusterId
|
||||
);
|
||||
export const TestEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
export const TestDecoder = createDecoder(TestContentTopic, TestPubsubTopic);
|
||||
export const messageText = "Filtering works!";
|
||||
export const messagePayload = { payload: utf8ToBytes(messageText) };
|
||||
|
||||
|
@ -55,20 +66,19 @@ export async function validatePingError(
|
|||
|
||||
export async function runMultipleNodes(
|
||||
context: Context,
|
||||
//TODO: change this to use `ShardInfo` instead of `string[]`
|
||||
pubsubTopics: string[],
|
||||
shardInfo: ShardingParams,
|
||||
strictChecking: boolean = false,
|
||||
shardInfo?: ShardingParams,
|
||||
numServiceNodes = 3,
|
||||
withoutFilter = false
|
||||
): Promise<[ServiceNodesFleet, LightNode]> {
|
||||
const pubsubTopics = shardInfoToPubsubTopics(shardInfo);
|
||||
// create numServiceNodes nodes
|
||||
const serviceNodes = await ServiceNodesFleet.createAndRun(
|
||||
context,
|
||||
pubsubTopics,
|
||||
numServiceNodes,
|
||||
strictChecking,
|
||||
shardInfo ? ensureShardingConfigured(shardInfo).shardInfo : shardInfo,
|
||||
shardInfo,
|
||||
undefined,
|
||||
withoutFilter
|
||||
);
|
||||
|
@ -78,11 +88,8 @@ export async function runMultipleNodes(
|
|||
libp2p: {
|
||||
addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] }
|
||||
},
|
||||
pubsubTopics: shardInfo ? shardInfoToPubsubTopics(shardInfo) : pubsubTopics,
|
||||
...((pubsubTopics.length !== 1 ||
|
||||
pubsubTopics[0] !== DefaultPubsubTopic) && {
|
||||
shardInfo: shardInfo
|
||||
})
|
||||
pubsubTopics,
|
||||
shardInfo
|
||||
};
|
||||
|
||||
log.info("Starting js waku node with :", JSON.stringify(waku_options));
|
||||
|
|
|
@ -1,10 +1,5 @@
|
|||
import { createEncoder } from "@waku/core";
|
||||
import {
|
||||
DefaultPubsubTopic,
|
||||
IRateLimitProof,
|
||||
LightNode,
|
||||
ProtocolError
|
||||
} from "@waku/interfaces";
|
||||
import { IRateLimitProof, LightNode, ProtocolError } from "@waku/interfaces";
|
||||
import { utf8ToBytes } from "@waku/sdk";
|
||||
import { expect } from "chai";
|
||||
|
||||
|
@ -24,8 +19,10 @@ import {
|
|||
messagePayload,
|
||||
messageText,
|
||||
TestContentTopic,
|
||||
TestEncoder
|
||||
} from "./utils";
|
||||
TestEncoder,
|
||||
TestPubsubTopic,
|
||||
TestShardInfo
|
||||
} from "./utils.js";
|
||||
|
||||
const runTests = (strictNodeCheck: boolean): void => {
|
||||
const numServiceNodes = 3;
|
||||
|
@ -38,9 +35,8 @@ const runTests = (strictNodeCheck: boolean): void => {
|
|||
beforeEachCustom(this, async () => {
|
||||
[serviceNodes, waku] = await runMultipleNodes(
|
||||
this.ctx,
|
||||
[DefaultPubsubTopic],
|
||||
TestShardInfo,
|
||||
strictNodeCheck,
|
||||
undefined,
|
||||
numServiceNodes,
|
||||
true
|
||||
);
|
||||
|
@ -57,12 +53,15 @@ const runTests = (strictNodeCheck: boolean): void => {
|
|||
});
|
||||
expect(pushResponse.successes.length).to.eq(numServiceNodes);
|
||||
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
true
|
||||
);
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: testItem.value,
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -77,14 +76,17 @@ const runTests = (strictNodeCheck: boolean): void => {
|
|||
expect(pushResponse.successes.length).to.eq(numServiceNodes);
|
||||
}
|
||||
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(30)).to.eq(
|
||||
true
|
||||
);
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(30, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
|
||||
for (let i = 0; i < 30; i++) {
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(i, {
|
||||
expectedMessageText: generateMessageText(i),
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
}
|
||||
});
|
||||
|
@ -95,21 +97,23 @@ const runTests = (strictNodeCheck: boolean): void => {
|
|||
});
|
||||
|
||||
expect(pushResponse.successes.length).to.eq(0);
|
||||
console.log("validated 1");
|
||||
|
||||
expect(pushResponse.failures?.map((failure) => failure.error)).to.include(
|
||||
ProtocolError.EMPTY_PAYLOAD
|
||||
);
|
||||
console.log("validated 2");
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
false
|
||||
);
|
||||
console.log("validated 3");
|
||||
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(false);
|
||||
});
|
||||
|
||||
TEST_STRING.forEach((testItem) => {
|
||||
it(`Push message with content topic containing ${testItem.description}`, async function () {
|
||||
const customEncoder = createEncoder({
|
||||
contentTopic: testItem.value
|
||||
contentTopic: testItem.value,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const pushResponse = await waku.lightPush.send(
|
||||
customEncoder,
|
||||
|
@ -117,12 +121,15 @@ const runTests = (strictNodeCheck: boolean): void => {
|
|||
);
|
||||
expect(pushResponse.successes.length).to.eq(numServiceNodes);
|
||||
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
true
|
||||
);
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: testItem.value
|
||||
expectedContentTopic: testItem.value,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -141,7 +148,8 @@ const runTests = (strictNodeCheck: boolean): void => {
|
|||
it("Push message with meta", async function () {
|
||||
const customTestEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
metaSetter: () => new Uint8Array(10)
|
||||
metaSetter: () => new Uint8Array(10),
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
|
||||
const pushResponse = await waku.lightPush.send(
|
||||
|
@ -150,18 +158,22 @@ const runTests = (strictNodeCheck: boolean): void => {
|
|||
);
|
||||
expect(pushResponse.successes.length).to.eq(numServiceNodes);
|
||||
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
true
|
||||
);
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
});
|
||||
|
||||
it("Fails to push message with large meta", async function () {
|
||||
const customTestEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
pubsubTopic: TestPubsubTopic,
|
||||
metaSetter: () => new Uint8Array(105024) // see the note below ***
|
||||
});
|
||||
|
||||
|
@ -179,21 +191,26 @@ const runTests = (strictNodeCheck: boolean): void => {
|
|||
|
||||
if (serviceNodes.type == "go-waku") {
|
||||
expect(pushResponse.successes.length).to.eq(numServiceNodes);
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
true
|
||||
);
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
} else {
|
||||
expect(pushResponse.successes.length).to.eq(0);
|
||||
expect(
|
||||
pushResponse.failures?.map((failure) => failure.error)
|
||||
).to.include(ProtocolError.REMOTE_PEER_REJECTED);
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
false
|
||||
);
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(false);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -214,12 +231,15 @@ const runTests = (strictNodeCheck: boolean): void => {
|
|||
});
|
||||
expect(pushResponse.successes.length).to.eq(numServiceNodes);
|
||||
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
true
|
||||
);
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -235,13 +255,16 @@ const runTests = (strictNodeCheck: boolean): void => {
|
|||
});
|
||||
expect(pushResponse.successes.length).to.eq(numServiceNodes);
|
||||
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
true
|
||||
);
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedTimestamp: testItem,
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -264,9 +287,11 @@ const runTests = (strictNodeCheck: boolean): void => {
|
|||
expect(pushResponse.failures?.map((failure) => failure.error)).to.include(
|
||||
ProtocolError.SIZE_TOO_BIG
|
||||
);
|
||||
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
||||
false
|
||||
);
|
||||
expect(
|
||||
await serviceNodes.messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(false);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
|
|
@ -1,10 +1,5 @@
|
|||
import { createEncoder } from "@waku/core";
|
||||
import {
|
||||
DefaultPubsubTopic,
|
||||
IRateLimitProof,
|
||||
LightNode,
|
||||
ProtocolError
|
||||
} from "@waku/interfaces";
|
||||
import { IRateLimitProof, LightNode, ProtocolError } from "@waku/interfaces";
|
||||
import { utf8ToBytes } from "@waku/sdk";
|
||||
import { expect } from "chai";
|
||||
|
||||
|
@ -22,7 +17,9 @@ import {
|
|||
messageText,
|
||||
runNodes,
|
||||
TestContentTopic,
|
||||
TestEncoder
|
||||
TestEncoder,
|
||||
TestPubsubTopic,
|
||||
TestShardInfo
|
||||
} from "../utils.js";
|
||||
|
||||
describe("Waku Light Push: Single Node", function () {
|
||||
|
@ -33,10 +30,10 @@ describe("Waku Light Push: Single Node", function () {
|
|||
let messageCollector: MessageCollector;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runNodes(this.ctx, [DefaultPubsubTopic]);
|
||||
[nwaku, waku] = await runNodes(this.ctx, TestShardInfo);
|
||||
messageCollector = new MessageCollector(nwaku);
|
||||
|
||||
await nwaku.ensureSubscriptions();
|
||||
await nwaku.ensureSubscriptions([TestPubsubTopic]);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
|
@ -50,10 +47,15 @@ describe("Waku Light Push: Single Node", function () {
|
|||
});
|
||||
expect(pushResponse.successes.length).to.eq(1);
|
||||
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(true);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: testItem.value,
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -68,12 +70,17 @@ describe("Waku Light Push: Single Node", function () {
|
|||
expect(pushResponse.successes.length).to.eq(1);
|
||||
}
|
||||
|
||||
expect(await messageCollector.waitForMessages(30)).to.eq(true);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(30, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
|
||||
for (let i = 0; i < 30; i++) {
|
||||
messageCollector.verifyReceivedMessage(i, {
|
||||
expectedMessageText: generateMessageText(i),
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
}
|
||||
});
|
||||
|
@ -87,13 +94,18 @@ describe("Waku Light Push: Single Node", function () {
|
|||
expect(pushResponse.failures?.map((failure) => failure.error)).to.include(
|
||||
ProtocolError.EMPTY_PAYLOAD
|
||||
);
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(false);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(false);
|
||||
});
|
||||
|
||||
TEST_STRING.forEach((testItem) => {
|
||||
it(`Push message with content topic containing ${testItem.description}`, async function () {
|
||||
const customEncoder = createEncoder({
|
||||
contentTopic: testItem.value
|
||||
contentTopic: testItem.value,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const pushResponse = await waku.lightPush.send(
|
||||
customEncoder,
|
||||
|
@ -101,10 +113,15 @@ describe("Waku Light Push: Single Node", function () {
|
|||
);
|
||||
expect(pushResponse.successes.length).to.eq(1);
|
||||
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(true);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: testItem.value
|
||||
expectedContentTopic: testItem.value,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -123,7 +140,8 @@ describe("Waku Light Push: Single Node", function () {
|
|||
it("Push message with meta", async function () {
|
||||
const customTestEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
metaSetter: () => new Uint8Array(10)
|
||||
metaSetter: () => new Uint8Array(10),
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
|
||||
const pushResponse = await waku.lightPush.send(
|
||||
|
@ -132,16 +150,22 @@ describe("Waku Light Push: Single Node", function () {
|
|||
);
|
||||
expect(pushResponse.successes.length).to.eq(1);
|
||||
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(true);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
});
|
||||
|
||||
it("Fails to push message with large meta", async function () {
|
||||
const customTestEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
pubsubTopic: TestPubsubTopic,
|
||||
metaSetter: () => new Uint8Array(105024) // see the note below ***
|
||||
});
|
||||
|
||||
|
@ -159,17 +183,26 @@ describe("Waku Light Push: Single Node", function () {
|
|||
|
||||
if (nwaku.type == "go-waku") {
|
||||
expect(pushResponse.successes.length).to.eq(1);
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(true);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
} else {
|
||||
expect(pushResponse.successes.length).to.eq(0);
|
||||
expect(pushResponse.failures?.map((failure) => failure.error)).to.include(
|
||||
ProtocolError.REMOTE_PEER_REJECTED
|
||||
);
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(false);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(false);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -190,10 +223,15 @@ describe("Waku Light Push: Single Node", function () {
|
|||
});
|
||||
expect(pushResponse.successes.length).to.eq(1);
|
||||
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(true);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -209,11 +247,16 @@ describe("Waku Light Push: Single Node", function () {
|
|||
});
|
||||
expect(pushResponse.successes.length).to.eq(1);
|
||||
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(true);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedTimestamp: testItem,
|
||||
expectedContentTopic: TestContentTopic
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -236,6 +279,10 @@ describe("Waku Light Push: Single Node", function () {
|
|||
expect(pushResponse.failures?.map((failure) => failure.error)).to.include(
|
||||
ProtocolError.SIZE_TOO_BIG
|
||||
);
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(false);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, {
|
||||
pubsubTopic: TestPubsubTopic
|
||||
})
|
||||
).to.eq(false);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -5,6 +5,7 @@ import {
|
|||
LightNode,
|
||||
Protocols,
|
||||
ShardInfo,
|
||||
ShardingParams,
|
||||
SingleShardInfo
|
||||
} from "@waku/interfaces";
|
||||
import {
|
||||
|
@ -15,6 +16,7 @@ import {
|
|||
} from "@waku/utils";
|
||||
import { utf8ToBytes } from "@waku/utils/bytes";
|
||||
import { expect } from "chai";
|
||||
import { Context } from "mocha";
|
||||
|
||||
import {
|
||||
afterEachCustom,
|
||||
|
@ -32,14 +34,13 @@ describe("Waku Light Push : Multiple PubsubTopics", function () {
|
|||
let nwaku: ServiceNode;
|
||||
let nwaku2: ServiceNode;
|
||||
let messageCollector: MessageCollector;
|
||||
const customPubsubTopic1 = singleShardInfoToPubsubTopic({
|
||||
clusterId: 3,
|
||||
shard: 1
|
||||
});
|
||||
|
||||
const shardInfo: ShardInfo = { clusterId: 3, shards: [1, 2] };
|
||||
const singleShardInfo1: SingleShardInfo = { clusterId: 3, shard: 1 };
|
||||
const singleShardInfo2: SingleShardInfo = { clusterId: 3, shard: 2 };
|
||||
|
||||
const customPubsubTopic1 = singleShardInfoToPubsubTopic(singleShardInfo1);
|
||||
const customPubsubTopic2 = singleShardInfoToPubsubTopic(singleShardInfo2);
|
||||
const customContentTopic1 = "/test/2/waku-light-push/utf8";
|
||||
const customContentTopic2 = "/test/3/waku-light-push/utf8";
|
||||
const customEncoder1 = createEncoder({
|
||||
|
@ -54,14 +55,7 @@ describe("Waku Light Push : Multiple PubsubTopics", function () {
|
|||
let nimPeerId: PeerId;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runNodes(
|
||||
this.ctx,
|
||||
[
|
||||
singleShardInfoToPubsubTopic(singleShardInfo1),
|
||||
singleShardInfoToPubsubTopic(singleShardInfo2)
|
||||
],
|
||||
shardInfo
|
||||
);
|
||||
[nwaku, waku] = await runNodes(this.ctx, shardInfo);
|
||||
messageCollector = new MessageCollector(nwaku);
|
||||
nimPeerId = await nwaku.getPeerId();
|
||||
});
|
||||
|
@ -108,7 +102,7 @@ describe("Waku Light Push : Multiple PubsubTopics", function () {
|
|||
|
||||
expect(
|
||||
await messageCollector2.waitForMessages(1, {
|
||||
pubsubTopic: singleShardInfoToPubsubTopic(singleShardInfo2)
|
||||
pubsubTopic: customPubsubTopic2
|
||||
})
|
||||
).to.eq(true);
|
||||
|
||||
|
@ -120,7 +114,7 @@ describe("Waku Light Push : Multiple PubsubTopics", function () {
|
|||
messageCollector2.verifyReceivedMessage(0, {
|
||||
expectedMessageText: "M2",
|
||||
expectedContentTopic: customContentTopic2,
|
||||
expectedPubsubTopic: customPubsubTopic1
|
||||
expectedPubsubTopic: customPubsubTopic2
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -204,11 +198,7 @@ describe("Waku Light Push (Autosharding): Multiple PubsubTopics", function () {
|
|||
let nimPeerId: PeerId;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runNodes(
|
||||
this.ctx,
|
||||
[autoshardingPubsubTopic1, autoshardingPubsubTopic2],
|
||||
shardInfo
|
||||
);
|
||||
[nwaku, waku] = await runNodes(this.ctx, shardInfo);
|
||||
messageCollector = new MessageCollector(nwaku);
|
||||
nimPeerId = await nwaku.getPeerId();
|
||||
});
|
||||
|
@ -318,11 +308,13 @@ describe("Waku Light Push (Autosharding): Multiple PubsubTopics", function () {
|
|||
describe("Waku Light Push (named sharding): Multiple PubsubTopics", function () {
|
||||
this.timeout(30000);
|
||||
let waku: LightNode;
|
||||
let waku2: LightNode;
|
||||
let nwaku: ServiceNode;
|
||||
let nwaku2: ServiceNode;
|
||||
let messageCollector: MessageCollector;
|
||||
let ctx: Context;
|
||||
|
||||
const clusterId = 0;
|
||||
const clusterId = 3;
|
||||
const customContentTopic1 = "/waku/2/content/utf8";
|
||||
const customContentTopic2 = "/myapp/1/latest/proto";
|
||||
const autoshardingPubsubTopic1 = contentTopicToPubsubTopic(
|
||||
|
@ -333,34 +325,44 @@ describe("Waku Light Push (named sharding): Multiple PubsubTopics", function ()
|
|||
customContentTopic2,
|
||||
clusterId
|
||||
);
|
||||
|
||||
const shardInfo1 = {
|
||||
clusterId,
|
||||
shards: [contentTopicToShardIndex(customContentTopic1)]
|
||||
};
|
||||
const customEncoder1 = createEncoder({
|
||||
contentTopic: customContentTopic1,
|
||||
pubsubTopicShardInfo: {
|
||||
clusterId,
|
||||
shard: contentTopicToShardIndex(customContentTopic1)
|
||||
}
|
||||
pubsubTopicShardInfo: shardInfo1
|
||||
});
|
||||
|
||||
const shardInfo2 = {
|
||||
clusterId,
|
||||
shards: [contentTopicToShardIndex(customContentTopic2)]
|
||||
};
|
||||
const customEncoder2 = createEncoder({
|
||||
contentTopic: customContentTopic2,
|
||||
pubsubTopicShardInfo: {
|
||||
clusterId,
|
||||
shard: contentTopicToShardIndex(customContentTopic2)
|
||||
}
|
||||
pubsubTopicShardInfo: shardInfo2
|
||||
});
|
||||
|
||||
const testShardInfo: ShardingParams = {
|
||||
clusterId,
|
||||
shards: [
|
||||
contentTopicToShardIndex(customContentTopic1),
|
||||
contentTopicToShardIndex(customContentTopic2)
|
||||
]
|
||||
};
|
||||
|
||||
let nimPeerId: PeerId;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
[nwaku, waku] = await runNodes(this.ctx, [
|
||||
autoshardingPubsubTopic1,
|
||||
autoshardingPubsubTopic2
|
||||
]);
|
||||
ctx = this.ctx;
|
||||
[nwaku, waku] = await runNodes(ctx, testShardInfo);
|
||||
messageCollector = new MessageCollector(nwaku);
|
||||
nimPeerId = await nwaku.getPeerId();
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
await tearDownNodes([nwaku, nwaku2], waku);
|
||||
await tearDownNodes([nwaku, nwaku2], [waku, waku2]);
|
||||
});
|
||||
|
||||
it("Push message on custom pubsubTopic", async function () {
|
||||
|
@ -419,13 +421,7 @@ describe("Waku Light Push (named sharding): Multiple PubsubTopics", function ()
|
|||
|
||||
it("Light push messages to 2 nwaku nodes each with different pubsubtopics", async function () {
|
||||
// Set up and start a new nwaku node with Default PubsubTopic
|
||||
nwaku2 = new ServiceNode(makeLogFileName(this) + "2");
|
||||
await nwaku2.start({
|
||||
filter: true,
|
||||
lightpush: true,
|
||||
relay: true,
|
||||
pubsubTopic: [autoshardingPubsubTopic2]
|
||||
});
|
||||
[nwaku2, waku2] = await runNodes(ctx, shardInfo2);
|
||||
await nwaku2.ensureSubscriptions([autoshardingPubsubTopic2]);
|
||||
await waku.dial(await nwaku2.getMultiaddrWithId());
|
||||
await waitForRemotePeer(waku, [Protocols.LightPush]);
|
||||
|
|
|
@ -1,78 +1,26 @@
|
|||
import { createEncoder, waitForRemotePeer } from "@waku/core";
|
||||
import {
|
||||
ContentTopicInfo,
|
||||
DefaultPubsubTopic,
|
||||
LightNode,
|
||||
Protocols,
|
||||
ShardingParams
|
||||
} from "@waku/interfaces";
|
||||
import { createLightNode, utf8ToBytes } from "@waku/sdk";
|
||||
import { Logger } from "@waku/utils";
|
||||
import { createEncoder } from "@waku/core";
|
||||
import { utf8ToBytes } from "@waku/sdk";
|
||||
import { contentTopicToPubsubTopic, Logger } from "@waku/utils";
|
||||
|
||||
import { makeLogFileName, NOISE_KEY_1, ServiceNode } from "../../src/index.js";
|
||||
import { runNodes } from "../filter/single_node/utils.js";
|
||||
|
||||
// Constants for test configuration.
|
||||
export const log = new Logger("test:lightpush");
|
||||
export const TestContentTopic = "/test/1/waku-light-push/utf8";
|
||||
export const TestEncoder = createEncoder({ contentTopic: TestContentTopic });
|
||||
export const ClusterId = 3;
|
||||
export const TestPubsubTopic = contentTopicToPubsubTopic(
|
||||
TestContentTopic,
|
||||
ClusterId
|
||||
);
|
||||
export const TestShardInfo = {
|
||||
contentTopics: [TestContentTopic],
|
||||
clusterId: ClusterId
|
||||
};
|
||||
export const TestEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
export const messageText = "Light Push works!";
|
||||
export const messagePayload = { payload: utf8ToBytes(messageText) };
|
||||
|
||||
export async function runNodes(
|
||||
context: Mocha.Context,
|
||||
pubsubTopics: string[],
|
||||
shardInfo?: ShardingParams
|
||||
): Promise<[ServiceNode, LightNode]> {
|
||||
const nwaku = new ServiceNode(makeLogFileName(context));
|
||||
|
||||
function isContentTopicInfo(info: ShardingParams): info is ContentTopicInfo {
|
||||
return (info as ContentTopicInfo).contentTopics !== undefined;
|
||||
}
|
||||
|
||||
await nwaku.start(
|
||||
{
|
||||
lightpush: true,
|
||||
filter: true,
|
||||
relay: true,
|
||||
pubsubTopic: pubsubTopics,
|
||||
// Conditionally include clusterId if shardInfo exists
|
||||
...(shardInfo && { clusterId: shardInfo.clusterId }),
|
||||
// Conditionally include contentTopic if shardInfo exists and clusterId is 1
|
||||
...(shardInfo &&
|
||||
isContentTopicInfo(shardInfo) &&
|
||||
shardInfo.clusterId === 1 && { contentTopic: shardInfo.contentTopics })
|
||||
},
|
||||
{ retries: 3 }
|
||||
);
|
||||
|
||||
let waku: LightNode | undefined;
|
||||
try {
|
||||
waku = await createLightNode({
|
||||
...((pubsubTopics.length !== 1 ||
|
||||
pubsubTopics[0] !== DefaultPubsubTopic) && {
|
||||
shardInfo: shardInfo
|
||||
}),
|
||||
pubsubTopics: shardInfo ? undefined : pubsubTopics,
|
||||
staticNoiseKey: NOISE_KEY_1
|
||||
});
|
||||
await waku.start();
|
||||
} catch (error) {
|
||||
log.error("jswaku node failed to start:", error);
|
||||
}
|
||||
|
||||
if (waku) {
|
||||
await waku.dial(await nwaku.getMultiaddrWithId());
|
||||
await waitForRemotePeer(waku, [Protocols.LightPush]);
|
||||
if (
|
||||
shardInfo &&
|
||||
"contentTopics" in shardInfo &&
|
||||
shardInfo.contentTopics.length > 0
|
||||
) {
|
||||
await nwaku.ensureSubscriptionsAutosharding(shardInfo.contentTopics);
|
||||
}
|
||||
await nwaku.ensureSubscriptions(pubsubTopics);
|
||||
return [nwaku, waku];
|
||||
} else {
|
||||
throw new Error("Failed to initialize waku");
|
||||
}
|
||||
}
|
||||
export { runNodes };
|
||||
|
|
|
@ -13,7 +13,6 @@ import {
|
|||
createDecoder as createSymDecoder,
|
||||
createEncoder as createSymEncoder
|
||||
} from "@waku/message-encryption/symmetric";
|
||||
import { createRelayNode } from "@waku/sdk/relay";
|
||||
import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes";
|
||||
import { expect } from "chai";
|
||||
|
||||
|
@ -21,12 +20,10 @@ import {
|
|||
afterEachCustom,
|
||||
beforeEachCustom,
|
||||
delay,
|
||||
NOISE_KEY_1,
|
||||
NOISE_KEY_2,
|
||||
tearDownNodes
|
||||
} from "../../src/index.js";
|
||||
|
||||
import { log, waitForAllRemotePeers } from "./utils.js";
|
||||
import { runJSNodes, TestPubsubTopic } from "./utils.js";
|
||||
|
||||
describe("Waku Relay", function () {
|
||||
this.timeout(15000);
|
||||
|
@ -34,24 +31,7 @@ describe("Waku Relay", function () {
|
|||
let waku2: RelayNode;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
log.info("Starting JS Waku instances");
|
||||
[waku1, waku2] = await Promise.all([
|
||||
createRelayNode({ staticNoiseKey: NOISE_KEY_1 }).then((waku) =>
|
||||
waku.start().then(() => waku)
|
||||
),
|
||||
createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_2,
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
|
||||
}).then((waku) => waku.start().then(() => waku))
|
||||
]);
|
||||
log.info("Instances started, adding waku2 to waku1's address book");
|
||||
await waku1.libp2p.peerStore.merge(waku2.libp2p.peerId, {
|
||||
multiaddrs: waku2.libp2p.getMultiaddrs()
|
||||
});
|
||||
await waku1.dial(waku2.libp2p.peerId);
|
||||
|
||||
await waitForAllRemotePeers(waku1, waku2);
|
||||
log.info("before each hook done");
|
||||
[waku1, waku2] = await runJSNodes();
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
|
@ -70,15 +50,21 @@ describe("Waku Relay", function () {
|
|||
|
||||
const eciesEncoder = createEciesEncoder({
|
||||
contentTopic: asymTopic,
|
||||
publicKey
|
||||
publicKey,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const symEncoder = createSymEncoder({
|
||||
contentTopic: symTopic,
|
||||
symKey
|
||||
symKey,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
|
||||
const eciesDecoder = createEciesDecoder(asymTopic, privateKey);
|
||||
const symDecoder = createSymDecoder(symTopic, symKey);
|
||||
const eciesDecoder = createEciesDecoder(
|
||||
asymTopic,
|
||||
privateKey,
|
||||
TestPubsubTopic
|
||||
);
|
||||
const symDecoder = createSymDecoder(symTopic, symKey, TestPubsubTopic);
|
||||
|
||||
const msgs: DecodedMessage[] = [];
|
||||
void waku2.relay.subscribe([eciesDecoder], (wakuMsg) => {
|
||||
|
@ -106,7 +92,7 @@ describe("Waku Relay", function () {
|
|||
const messageText =
|
||||
"Published on content topic with added then deleted observer";
|
||||
|
||||
const contentTopic = "added-then-deleted-observer";
|
||||
const contentTopic = "/test/1/observer/proto";
|
||||
|
||||
// The promise **fails** if we receive a message on this observer.
|
||||
const receivedMsgPromise: Promise<DecodedMessage> = new Promise(
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import type { PeerId } from "@libp2p/interface";
|
||||
import { DecodedMessage, waitForRemotePeer } from "@waku/core";
|
||||
import { DefaultPubsubTopic, Protocols, RelayNode } from "@waku/interfaces";
|
||||
import { Protocols, RelayNode } from "@waku/interfaces";
|
||||
import { createRelayNode } from "@waku/sdk/relay";
|
||||
import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes";
|
||||
import { expect } from "chai";
|
||||
|
@ -10,15 +10,20 @@ import {
|
|||
base64ToUtf8,
|
||||
beforeEachCustom,
|
||||
delay,
|
||||
makeLogFileName,
|
||||
NOISE_KEY_1,
|
||||
NOISE_KEY_2,
|
||||
ServiceNode,
|
||||
tearDownNodes
|
||||
} from "../../src/index.js";
|
||||
import { MessageRpcResponse } from "../../src/types.js";
|
||||
|
||||
import { TestContentTopic, TestDecoder, TestEncoder } from "./utils.js";
|
||||
import {
|
||||
TestContentTopic,
|
||||
TestDecoder,
|
||||
TestEncoder,
|
||||
TestPubsubTopic,
|
||||
TestShardInfo
|
||||
} from "./utils.js";
|
||||
import { runRelayNodes } from "./utils.js";
|
||||
|
||||
describe("Waku Relay, Interop", function () {
|
||||
this.timeout(15000);
|
||||
|
@ -26,19 +31,7 @@ describe("Waku Relay, Interop", function () {
|
|||
let nwaku: ServiceNode;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
waku = await createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_1
|
||||
});
|
||||
await waku.start();
|
||||
|
||||
nwaku = new ServiceNode(this.ctx.test?.ctx?.currentTest?.title + "");
|
||||
await nwaku.start({ relay: true });
|
||||
|
||||
await waku.dial(await nwaku.getMultiaddrWithId());
|
||||
await waitForRemotePeer(waku, [Protocols.Relay]);
|
||||
|
||||
// Nwaku subscribe to the default pubsub topic
|
||||
await nwaku.ensureSubscriptions();
|
||||
[nwaku, waku] = await runRelayNodes(this.ctx, TestShardInfo);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
|
@ -51,7 +44,7 @@ describe("Waku Relay, Interop", function () {
|
|||
while (subscribers.length === 0) {
|
||||
await delay(200);
|
||||
subscribers =
|
||||
waku.libp2p.services.pubsub!.getSubscribers(DefaultPubsubTopic);
|
||||
waku.libp2p.services.pubsub!.getSubscribers(TestPubsubTopic);
|
||||
}
|
||||
|
||||
const nimPeerId = await nwaku.getPeerId();
|
||||
|
@ -103,63 +96,38 @@ describe("Waku Relay, Interop", function () {
|
|||
expect(bytesToUtf8(receivedMsg.payload!)).to.eq(messageText);
|
||||
});
|
||||
|
||||
describe("Two nodes connected to nwaku", function () {
|
||||
let waku1: RelayNode;
|
||||
let waku2: RelayNode;
|
||||
let nwaku: ServiceNode;
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
await tearDownNodes(nwaku, [waku1, waku2]);
|
||||
it("Js publishes, other Js receives", async function () {
|
||||
const waku2 = await createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_2,
|
||||
emitSelf: true,
|
||||
shardInfo: TestShardInfo
|
||||
});
|
||||
await waku2.start();
|
||||
|
||||
it("Js publishes, other Js receives", async function () {
|
||||
[waku1, waku2] = await Promise.all([
|
||||
createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
emitSelf: true
|
||||
}).then((waku) => waku.start().then(() => waku)),
|
||||
createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_2
|
||||
}).then((waku) => waku.start().then(() => waku))
|
||||
]);
|
||||
const nwakuMultiaddr = await nwaku.getMultiaddrWithId();
|
||||
await waku2.dial(nwakuMultiaddr);
|
||||
|
||||
nwaku = new ServiceNode(makeLogFileName(this));
|
||||
await nwaku.start({ relay: true });
|
||||
await waitForRemotePeer(waku2, [Protocols.Relay]);
|
||||
|
||||
const nwakuMultiaddr = await nwaku.getMultiaddrWithId();
|
||||
await Promise.all([
|
||||
waku1.dial(nwakuMultiaddr),
|
||||
waku2.dial(nwakuMultiaddr)
|
||||
]);
|
||||
await delay(2000);
|
||||
// Check that the two JS peers are NOT directly connected
|
||||
expect(await waku.libp2p.peerStore.has(waku2.libp2p.peerId)).to.eq(false);
|
||||
expect(await waku2.libp2p.peerStore.has(waku.libp2p.peerId)).to.eq(false);
|
||||
|
||||
// Wait for identify protocol to finish
|
||||
await Promise.all([
|
||||
waitForRemotePeer(waku1, [Protocols.Relay]),
|
||||
waitForRemotePeer(waku2, [Protocols.Relay])
|
||||
]);
|
||||
const msgStr = "Hello there!";
|
||||
const message = { payload: utf8ToBytes(msgStr) };
|
||||
|
||||
await delay(2000);
|
||||
// Check that the two JS peers are NOT directly connected
|
||||
expect(await waku1.libp2p.peerStore.has(waku2.libp2p.peerId)).to.eq(
|
||||
false
|
||||
);
|
||||
expect(await waku2.libp2p.peerStore.has(waku1.libp2p.peerId)).to.eq(
|
||||
false
|
||||
);
|
||||
const waku2ReceivedMsgPromise: Promise<DecodedMessage> = new Promise(
|
||||
(resolve) => {
|
||||
void waku2.relay.subscribe(TestDecoder, resolve);
|
||||
}
|
||||
);
|
||||
|
||||
const msgStr = "Hello there!";
|
||||
const message = { payload: utf8ToBytes(msgStr) };
|
||||
await waku.relay.send(TestEncoder, message);
|
||||
const waku2ReceivedMsg = await waku2ReceivedMsgPromise;
|
||||
|
||||
const waku2ReceivedMsgPromise: Promise<DecodedMessage> = new Promise(
|
||||
(resolve) => {
|
||||
void waku2.relay.subscribe(TestDecoder, resolve);
|
||||
}
|
||||
);
|
||||
expect(bytesToUtf8(waku2ReceivedMsg.payload)).to.eq(msgStr);
|
||||
|
||||
await waku1.relay.send(TestEncoder, message);
|
||||
const waku2ReceivedMsg = await waku2ReceivedMsgPromise;
|
||||
|
||||
expect(bytesToUtf8(waku2ReceivedMsg.payload)).to.eq(msgStr);
|
||||
});
|
||||
await tearDownNodes([], waku);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import { createEncoder } from "@waku/core";
|
||||
import { IRateLimitProof, ProtocolError, RelayNode } from "@waku/interfaces";
|
||||
import { createRelayNode } from "@waku/sdk/relay";
|
||||
import { utf8ToBytes } from "@waku/utils/bytes";
|
||||
import { expect } from "chai";
|
||||
|
||||
|
@ -10,18 +9,20 @@ import {
|
|||
delay,
|
||||
generateRandomUint8Array,
|
||||
MessageCollector,
|
||||
NOISE_KEY_1,
|
||||
NOISE_KEY_2,
|
||||
tearDownNodes,
|
||||
TEST_STRING
|
||||
} from "../../src/index.js";
|
||||
|
||||
import {
|
||||
log,
|
||||
messageText,
|
||||
runJSNodes,
|
||||
TestContentTopic,
|
||||
TestDecoder,
|
||||
TestEncoder,
|
||||
TestExpectOptions,
|
||||
TestPubsubTopic,
|
||||
TestShardInfo,
|
||||
TestWaitMessageOptions,
|
||||
waitForAllRemotePeers
|
||||
} from "./utils.js";
|
||||
|
||||
|
@ -32,23 +33,7 @@ describe("Waku Relay, Publish", function () {
|
|||
let messageCollector: MessageCollector;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
log.info("Starting JS Waku instances");
|
||||
[waku1, waku2] = await Promise.all([
|
||||
createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_1
|
||||
}).then((waku) => waku.start().then(() => waku)),
|
||||
createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_2,
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
|
||||
}).then((waku) => waku.start().then(() => waku))
|
||||
]);
|
||||
log.info("Instances started, adding waku2 to waku1's address book");
|
||||
await waku1.libp2p.peerStore.merge(waku2.libp2p.peerId, {
|
||||
multiaddrs: waku2.libp2p.getMultiaddrs()
|
||||
});
|
||||
await waku1.dial(waku2.libp2p.peerId);
|
||||
log.info("before each hook done");
|
||||
await waitForAllRemotePeers(waku1, waku2);
|
||||
[waku1, waku2] = await runJSNodes();
|
||||
messageCollector = new MessageCollector();
|
||||
await waku2.relay.subscribe([TestDecoder], messageCollector.callback);
|
||||
});
|
||||
|
@ -66,10 +51,12 @@ describe("Waku Relay, Publish", function () {
|
|||
expect(pushResponse.successes[0].toString()).to.eq(
|
||||
waku2.libp2p.peerId.toString()
|
||||
);
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(true);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
|
||||
).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: testItem.value,
|
||||
expectedContentTopic: TestContentTopic
|
||||
...TestExpectOptions,
|
||||
expectedMessageText: testItem.value
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -91,11 +78,13 @@ describe("Waku Relay, Publish", function () {
|
|||
waku2.libp2p.peerId.toString()
|
||||
);
|
||||
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(true);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
|
||||
).to.eq(true);
|
||||
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
...TestExpectOptions,
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedTimestamp: testItem.valueOf()
|
||||
});
|
||||
});
|
||||
|
@ -115,20 +104,30 @@ describe("Waku Relay, Publish", function () {
|
|||
it("Fails to publish message with empty text", async function () {
|
||||
await waku1.relay.send(TestEncoder, { payload: utf8ToBytes("") });
|
||||
await delay(400);
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(false);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
|
||||
).to.eq(false);
|
||||
});
|
||||
|
||||
it("Fails to publish message with wrong content topic", async function () {
|
||||
const wrong_encoder = createEncoder({ contentTopic: "wrong" });
|
||||
const wrong_encoder = createEncoder({
|
||||
contentTopic: "/test/1/wrong/utf8",
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
await waku1.relay.send(wrong_encoder, {
|
||||
payload: utf8ToBytes("")
|
||||
});
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(false);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
|
||||
).to.eq(false);
|
||||
});
|
||||
|
||||
it("Fails to publish message with wrong pubsubtopic", async function () {
|
||||
const wrong_encoder = createEncoder({
|
||||
pubsubTopicShardInfo: { clusterId: 3, shard: 1 },
|
||||
pubsubTopicShardInfo: {
|
||||
clusterId: TestShardInfo.clusterId,
|
||||
shard: TestShardInfo.shards[0] + 1
|
||||
},
|
||||
contentTopic: TestContentTopic
|
||||
});
|
||||
const pushResponse = await waku1.relay.send(wrong_encoder, {
|
||||
|
@ -138,7 +137,9 @@ describe("Waku Relay, Publish", function () {
|
|||
ProtocolError.TOPIC_NOT_CONFIGURED
|
||||
);
|
||||
await delay(400);
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(false);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
|
||||
).to.eq(false);
|
||||
});
|
||||
|
||||
[1024 ** 2 + 65536, 2 * 1024 ** 2].forEach((testItem) => {
|
||||
|
@ -151,7 +152,9 @@ describe("Waku Relay, Publish", function () {
|
|||
ProtocolError.SIZE_TOO_BIG
|
||||
);
|
||||
await delay(400);
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(false);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
|
||||
).to.eq(false);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -177,7 +180,9 @@ describe("Waku Relay, Publish", function () {
|
|||
expect(pushResponse.successes[0].toString()).to.eq(
|
||||
waku2.libp2p.peerId.toString()
|
||||
);
|
||||
expect(await messageCollector.waitForMessages(2)).to.eq(true);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(2, TestWaitMessageOptions)
|
||||
).to.eq(true);
|
||||
});
|
||||
|
||||
// Will be skipped until https://github.com/waku-org/js-waku/issues/1464 si done
|
||||
|
@ -202,12 +207,15 @@ describe("Waku Relay, Publish", function () {
|
|||
expect(pushResponse.successes[0].toString()).to.eq(
|
||||
waku2.libp2p.peerId.toString()
|
||||
);
|
||||
expect(await messageCollector.waitForMessages(2)).to.eq(true);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(2, TestWaitMessageOptions)
|
||||
).to.eq(true);
|
||||
});
|
||||
|
||||
it("Publish message with large meta", async function () {
|
||||
const customTestEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
pubsubTopic: TestPubsubTopic,
|
||||
metaSetter: () => new Uint8Array(10 ** 6)
|
||||
});
|
||||
|
||||
|
@ -218,7 +226,9 @@ describe("Waku Relay, Publish", function () {
|
|||
expect(pushResponse.successes[0].toString()).to.eq(
|
||||
waku2.libp2p.peerId.toString()
|
||||
);
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(true);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
|
||||
).to.eq(true);
|
||||
});
|
||||
|
||||
it("Publish message with rate limit", async function () {
|
||||
|
@ -238,10 +248,12 @@ describe("Waku Relay, Publish", function () {
|
|||
});
|
||||
expect(pushResponse.successes.length).to.eq(1);
|
||||
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(true);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
|
||||
).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic
|
||||
...TestExpectOptions,
|
||||
expectedMessageText: messageText
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { createDecoder, createEncoder } from "@waku/core";
|
||||
import { DefaultPubsubTopic, RelayNode } from "@waku/interfaces";
|
||||
import { RelayNode } from "@waku/interfaces";
|
||||
import { createRelayNode } from "@waku/sdk/relay";
|
||||
import { utf8ToBytes } from "@waku/utils/bytes";
|
||||
import { expect } from "chai";
|
||||
|
@ -10,17 +10,19 @@ import {
|
|||
generateTestData,
|
||||
MessageCollector,
|
||||
NOISE_KEY_1,
|
||||
NOISE_KEY_2,
|
||||
tearDownNodes,
|
||||
TEST_STRING
|
||||
} from "../../src/index.js";
|
||||
|
||||
import {
|
||||
log,
|
||||
messageText,
|
||||
TestContentTopic,
|
||||
runJSNodes,
|
||||
TestDecoder,
|
||||
TestEncoder,
|
||||
TestExpectOptions,
|
||||
TestPubsubTopic,
|
||||
TestShardInfo,
|
||||
TestWaitMessageOptions,
|
||||
waitForAllRemotePeers
|
||||
} from "./utils.js";
|
||||
|
||||
|
@ -31,22 +33,7 @@ describe("Waku Relay, Subscribe", function () {
|
|||
let messageCollector: MessageCollector;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
log.info("Starting JS Waku instances");
|
||||
[waku1, waku2] = await Promise.all([
|
||||
createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_1
|
||||
}).then((waku) => waku.start().then(() => waku)),
|
||||
createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_2,
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
|
||||
}).then((waku) => waku.start().then(() => waku))
|
||||
]);
|
||||
log.info("Instances started, adding waku2 to waku1's address book");
|
||||
await waku1.libp2p.peerStore.merge(waku2.libp2p.peerId, {
|
||||
multiaddrs: waku2.libp2p.getMultiaddrs()
|
||||
});
|
||||
await waku1.dial(waku2.libp2p.peerId);
|
||||
log.info("before each hook done");
|
||||
[waku1, waku2] = await runJSNodes();
|
||||
messageCollector = new MessageCollector(this.ctx.nwaku);
|
||||
});
|
||||
|
||||
|
@ -57,10 +44,10 @@ describe("Waku Relay, Subscribe", function () {
|
|||
it("Mutual subscription", async function () {
|
||||
await waitForAllRemotePeers(waku1, waku2);
|
||||
const subscribers1 = waku1.libp2p.services
|
||||
.pubsub!.getSubscribers(DefaultPubsubTopic)
|
||||
.pubsub!.getSubscribers(TestPubsubTopic)
|
||||
.map((p) => p.toString());
|
||||
const subscribers2 = waku2.libp2p.services
|
||||
.pubsub!.getSubscribers(DefaultPubsubTopic)
|
||||
.pubsub!.getSubscribers(TestPubsubTopic)
|
||||
.map((p) => p.toString());
|
||||
|
||||
expect(subscribers1).to.contain(waku2.libp2p.peerId.toString());
|
||||
|
@ -76,9 +63,16 @@ describe("Waku Relay, Subscribe", function () {
|
|||
|
||||
it("Publish without waiting for remote peer", async function () {
|
||||
try {
|
||||
await waku1.relay.send(TestEncoder, {
|
||||
const waku = await createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
shardInfo: TestShardInfo
|
||||
});
|
||||
await waku.start();
|
||||
|
||||
await waku.relay.send(TestEncoder, {
|
||||
payload: utf8ToBytes(messageText)
|
||||
});
|
||||
|
||||
throw new Error("Publish was successful but was expected to fail");
|
||||
} catch (err) {
|
||||
if (
|
||||
|
@ -91,19 +85,19 @@ describe("Waku Relay, Subscribe", function () {
|
|||
});
|
||||
|
||||
it("Subscribe and publish message", async function () {
|
||||
await waitForAllRemotePeers(waku1, waku2);
|
||||
await waku2.relay.subscribe([TestDecoder], messageCollector.callback);
|
||||
await waku1.relay.send(TestEncoder, { payload: utf8ToBytes(messageText) });
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(true);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
|
||||
).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: TestContentTopic
|
||||
...TestExpectOptions,
|
||||
expectedMessageText: messageText
|
||||
});
|
||||
});
|
||||
|
||||
it("Subscribe and publish 10000 messages on the same topic", async function () {
|
||||
const messageCount = 10000;
|
||||
await waitForAllRemotePeers(waku1, waku2);
|
||||
await waku2.relay.subscribe([TestDecoder], messageCollector.callback);
|
||||
// Send a unique message on each topic.
|
||||
for (let i = 0; i < messageCount; i++) {
|
||||
|
@ -114,13 +108,16 @@ describe("Waku Relay, Subscribe", function () {
|
|||
|
||||
// Verify that each message was received on the corresponding topic.
|
||||
expect(
|
||||
await messageCollector.waitForMessages(messageCount, { exact: true })
|
||||
await messageCollector.waitForMessages(messageCount, {
|
||||
...TestWaitMessageOptions,
|
||||
exact: true
|
||||
})
|
||||
).to.eq(true);
|
||||
|
||||
for (let i = 0; i < messageCount; i++) {
|
||||
messageCollector.verifyReceivedMessage(i, {
|
||||
...TestExpectOptions,
|
||||
expectedMessageText: `M${i + 1}`,
|
||||
expectedContentTopic: TestContentTopic,
|
||||
checkTimestamp: false
|
||||
});
|
||||
}
|
||||
|
@ -128,31 +125,36 @@ describe("Waku Relay, Subscribe", function () {
|
|||
|
||||
it("Subscribe and publish messages on 2 different content topics", async function () {
|
||||
const secondContentTopic = "/test/2/waku-relay/utf8";
|
||||
const secondEncoder = createEncoder({ contentTopic: secondContentTopic });
|
||||
const secondDecoder = createDecoder(secondContentTopic);
|
||||
const secondEncoder = createEncoder({
|
||||
contentTopic: secondContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const secondDecoder = createDecoder(secondContentTopic, TestPubsubTopic);
|
||||
|
||||
await waitForAllRemotePeers(waku1, waku2);
|
||||
await waku2.relay.subscribe([TestDecoder], messageCollector.callback);
|
||||
await waku2.relay.subscribe([secondDecoder], messageCollector.callback);
|
||||
await waku1.relay.send(TestEncoder, { payload: utf8ToBytes("M1") });
|
||||
await waku1.relay.send(secondEncoder, { payload: utf8ToBytes("M2") });
|
||||
expect(await messageCollector.waitForMessages(2, { exact: true })).to.eq(
|
||||
true
|
||||
);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(2, {
|
||||
...TestWaitMessageOptions,
|
||||
exact: true
|
||||
})
|
||||
).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
expectedMessageText: "M1",
|
||||
expectedContentTopic: TestContentTopic
|
||||
...TestExpectOptions,
|
||||
expectedMessageText: "M1"
|
||||
});
|
||||
messageCollector.verifyReceivedMessage(1, {
|
||||
expectedMessageText: "M2",
|
||||
expectedContentTopic: secondContentTopic
|
||||
...TestExpectOptions,
|
||||
expectedContentTopic: secondEncoder.contentTopic,
|
||||
expectedMessageText: "M2"
|
||||
});
|
||||
});
|
||||
|
||||
it("Subscribe one by one to 100 topics and publish messages", async function () {
|
||||
const topicCount = 100;
|
||||
const td = generateTestData(topicCount);
|
||||
await waitForAllRemotePeers(waku1, waku2);
|
||||
const td = generateTestData(topicCount, TestWaitMessageOptions);
|
||||
|
||||
// Subscribe to topics one by one
|
||||
for (let i = 0; i < topicCount; i++) {
|
||||
|
@ -168,10 +170,14 @@ describe("Waku Relay, Subscribe", function () {
|
|||
|
||||
// Verify that each message was received on the corresponding topic.
|
||||
expect(
|
||||
await messageCollector.waitForMessages(topicCount, { exact: true })
|
||||
await messageCollector.waitForMessages(topicCount, {
|
||||
...TestWaitMessageOptions,
|
||||
exact: true
|
||||
})
|
||||
).to.eq(true);
|
||||
td.contentTopics.forEach((topic, index) => {
|
||||
messageCollector.verifyReceivedMessage(index, {
|
||||
...TestExpectOptions,
|
||||
expectedContentTopic: topic,
|
||||
expectedMessageText: `Message for Topic ${index + 1}`
|
||||
});
|
||||
|
@ -180,8 +186,7 @@ describe("Waku Relay, Subscribe", function () {
|
|||
|
||||
it("Subscribe at once to 10000 topics and publish messages", async function () {
|
||||
const topicCount = 10000;
|
||||
const td = generateTestData(topicCount);
|
||||
await waitForAllRemotePeers(waku1, waku2);
|
||||
const td = generateTestData(topicCount, TestWaitMessageOptions);
|
||||
|
||||
// Subscribe to all topics at once
|
||||
await waku2.relay.subscribe(td.decoders, messageCollector.callback);
|
||||
|
@ -195,10 +200,14 @@ describe("Waku Relay, Subscribe", function () {
|
|||
|
||||
// Verify that each message was received on the corresponding topic.
|
||||
expect(
|
||||
await messageCollector.waitForMessages(topicCount, { exact: true })
|
||||
await messageCollector.waitForMessages(topicCount, {
|
||||
...TestWaitMessageOptions,
|
||||
exact: true
|
||||
})
|
||||
).to.eq(true);
|
||||
td.contentTopics.forEach((topic, index) => {
|
||||
messageCollector.verifyReceivedMessage(index, {
|
||||
...TestExpectOptions,
|
||||
expectedContentTopic: topic,
|
||||
expectedMessageText: `Message for Topic ${index + 1}`,
|
||||
checkTimestamp: false
|
||||
|
@ -208,26 +217,26 @@ describe("Waku Relay, Subscribe", function () {
|
|||
|
||||
// Will be skipped until https://github.com/waku-org/js-waku/issues/1678 is fixed
|
||||
it.skip("Refresh subscription", async function () {
|
||||
await waitForAllRemotePeers(waku1, waku2);
|
||||
|
||||
await waku2.relay.subscribe([TestDecoder], messageCollector.callback);
|
||||
await waku2.relay.subscribe([TestDecoder], messageCollector.callback);
|
||||
|
||||
await waku1.relay.send(TestEncoder, { payload: utf8ToBytes("M1") });
|
||||
|
||||
expect(await messageCollector.waitForMessages(1, { exact: true })).to.eq(
|
||||
true
|
||||
);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, {
|
||||
...TestWaitMessageOptions,
|
||||
exact: true
|
||||
})
|
||||
).to.eq(true);
|
||||
});
|
||||
|
||||
// Will be skipped until https://github.com/waku-org/js-waku/issues/1678 is fixed
|
||||
it.skip("Overlapping topic subscription", async function () {
|
||||
// Define two sets of test data with overlapping topics.
|
||||
const topicCount1 = 2;
|
||||
const td1 = generateTestData(topicCount1);
|
||||
const td1 = generateTestData(topicCount1, TestWaitMessageOptions);
|
||||
const topicCount2 = 4;
|
||||
const td2 = generateTestData(topicCount2);
|
||||
await waitForAllRemotePeers(waku1, waku2);
|
||||
const td2 = generateTestData(topicCount2, TestWaitMessageOptions);
|
||||
|
||||
// Subscribe to the first set of topics.
|
||||
await waku2.relay.subscribe(td1.decoders, messageCollector.callback);
|
||||
|
@ -252,23 +261,33 @@ describe("Waku Relay, Subscribe", function () {
|
|||
|
||||
// Check if all messages were received.
|
||||
// Since there are overlapping topics, there should be 6 messages in total (2 from the first set + 4 from the second set).
|
||||
expect(await messageCollector.waitForMessages(6, { exact: true })).to.eq(
|
||||
true
|
||||
);
|
||||
expect(
|
||||
await messageCollector.waitForMessages(6, {
|
||||
...TestWaitMessageOptions,
|
||||
exact: true
|
||||
})
|
||||
).to.eq(true);
|
||||
});
|
||||
|
||||
TEST_STRING.forEach((testItem) => {
|
||||
it(`Subscribe to topic containing ${testItem.description} and publish message`, async function () {
|
||||
const newContentTopic = testItem.value;
|
||||
const newEncoder = createEncoder({ contentTopic: newContentTopic });
|
||||
const newDecoder = createDecoder(newContentTopic);
|
||||
await waitForAllRemotePeers(waku1, waku2);
|
||||
const newEncoder = createEncoder({
|
||||
contentTopic: newContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
const newDecoder = createDecoder(newContentTopic, TestPubsubTopic);
|
||||
|
||||
await waku2.relay.subscribe([newDecoder], messageCollector.callback);
|
||||
await waku1.relay.send(newEncoder, {
|
||||
payload: utf8ToBytes(messageText)
|
||||
});
|
||||
expect(await messageCollector.waitForMessages(1)).to.eq(true);
|
||||
|
||||
expect(
|
||||
await messageCollector.waitForMessages(1, TestWaitMessageOptions)
|
||||
).to.eq(true);
|
||||
messageCollector.verifyReceivedMessage(0, {
|
||||
...TestExpectOptions,
|
||||
expectedMessageText: messageText,
|
||||
expectedContentTopic: newContentTopic
|
||||
});
|
||||
|
|
|
@ -1,18 +1,85 @@
|
|||
import { createDecoder, createEncoder, waitForRemotePeer } from "@waku/core";
|
||||
import { Protocols, RelayNode } from "@waku/interfaces";
|
||||
import { Logger } from "@waku/utils";
|
||||
import {
|
||||
Protocols,
|
||||
RelayNode,
|
||||
ShardInfo,
|
||||
ShardingParams
|
||||
} from "@waku/interfaces";
|
||||
import { createRelayNode } from "@waku/sdk/relay";
|
||||
import { contentTopicToPubsubTopic, Logger } from "@waku/utils";
|
||||
import { Context } from "mocha";
|
||||
|
||||
import {
|
||||
NOISE_KEY_1,
|
||||
NOISE_KEY_2,
|
||||
runNodes,
|
||||
ServiceNode
|
||||
} from "../../src/index.js";
|
||||
|
||||
export const messageText = "Relay works!";
|
||||
export const TestContentTopic = "/test/1/waku-relay/utf8";
|
||||
export const TestEncoder = createEncoder({ contentTopic: TestContentTopic });
|
||||
export const TestDecoder = createDecoder(TestContentTopic);
|
||||
export const TestShardInfo: ShardInfo = {
|
||||
clusterId: 2,
|
||||
shards: [4]
|
||||
};
|
||||
export const TestPubsubTopic = contentTopicToPubsubTopic(
|
||||
TestContentTopic,
|
||||
TestShardInfo.clusterId
|
||||
);
|
||||
export const TestEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
pubsubTopic: TestPubsubTopic
|
||||
});
|
||||
export const TestDecoder = createDecoder(TestContentTopic, TestPubsubTopic);
|
||||
export const TestWaitMessageOptions = { pubsubTopic: TestPubsubTopic };
|
||||
export const TestExpectOptions = {
|
||||
expectedContentTopic: TestContentTopic,
|
||||
expectedPubsubTopic: TestPubsubTopic
|
||||
};
|
||||
export const log = new Logger("test:relay");
|
||||
|
||||
const RELAY_PROTOCOLS = [Protocols.Relay];
|
||||
|
||||
export async function waitForAllRemotePeers(
|
||||
...nodes: RelayNode[]
|
||||
): Promise<void> {
|
||||
log.info("Wait for mutual pubsub subscription");
|
||||
await Promise.all(
|
||||
nodes.map((node) => waitForRemotePeer(node, [Protocols.Relay]))
|
||||
nodes.map((node): Promise<void> => waitForRemotePeer(node, RELAY_PROTOCOLS))
|
||||
);
|
||||
}
|
||||
|
||||
export const runRelayNodes = (
|
||||
context: Context,
|
||||
shardInfo: ShardingParams
|
||||
): Promise<[ServiceNode, RelayNode]> =>
|
||||
runNodes<RelayNode>({
|
||||
shardInfo,
|
||||
context,
|
||||
protocols: RELAY_PROTOCOLS,
|
||||
createNode: createRelayNode
|
||||
});
|
||||
|
||||
export async function runJSNodes(): Promise<[RelayNode, RelayNode]> {
|
||||
log.info("Starting JS Waku instances");
|
||||
const [waku1, waku2] = await Promise.all([
|
||||
createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
shardInfo: TestShardInfo
|
||||
}).then((waku) => waku.start().then(() => waku)),
|
||||
createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_2,
|
||||
shardInfo: TestShardInfo,
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
|
||||
}).then((waku) => waku.start().then(() => waku))
|
||||
]);
|
||||
log.info("Instances started, adding waku2 to waku1's address book");
|
||||
await waku1.libp2p.peerStore.merge(waku2.libp2p.peerId, {
|
||||
multiaddrs: waku2.libp2p.getMultiaddrs()
|
||||
});
|
||||
await waku1.dial(waku2.libp2p.peerId);
|
||||
log.info("before each hook done");
|
||||
await waitForAllRemotePeers(waku1, waku2);
|
||||
|
||||
return [waku1, waku2];
|
||||
}
|
||||
|
|
|
@ -1,23 +1,22 @@
|
|||
import { DecodedMessage } from "@waku/core";
|
||||
import type { LightNode } from "@waku/interfaces";
|
||||
import { DefaultPubsubTopic } from "@waku/interfaces";
|
||||
import { bytesToUtf8 } from "@waku/utils/bytes";
|
||||
import { expect } from "chai";
|
||||
|
||||
import {
|
||||
afterEachCustom,
|
||||
beforeEachCustom,
|
||||
makeLogFileName,
|
||||
ServiceNode,
|
||||
tearDownNodes
|
||||
} from "../../src/index.js";
|
||||
|
||||
import {
|
||||
customShardedPubsubTopic1,
|
||||
runStoreNodes,
|
||||
sendMessages,
|
||||
startAndConnectLightNode,
|
||||
TestContentTopic,
|
||||
TestDecoder,
|
||||
TestDecoder2,
|
||||
TestShardInfo,
|
||||
totalMsgs
|
||||
} from "./utils.js";
|
||||
|
||||
|
@ -28,9 +27,7 @@ describe("Waku Store, cursor", function () {
|
|||
let nwaku: ServiceNode;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
nwaku = new ServiceNode(makeLogFileName(this.ctx));
|
||||
await nwaku.start({ store: true, lightpush: true, relay: true });
|
||||
await nwaku.ensureSubscriptions();
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
|
@ -49,10 +46,9 @@ describe("Waku Store, cursor", function () {
|
|||
await sendMessages(
|
||||
nwaku,
|
||||
messageCount,
|
||||
TestContentTopic,
|
||||
DefaultPubsubTopic
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
|
||||
// messages in reversed order (first message at last index)
|
||||
const messages: DecodedMessage[] = [];
|
||||
|
@ -95,9 +91,13 @@ describe("Waku Store, cursor", function () {
|
|||
});
|
||||
|
||||
it("Reusing cursor across nodes", async function () {
|
||||
await sendMessages(nwaku, totalMsgs, TestContentTopic, DefaultPubsubTopic);
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
waku2 = await startAndConnectLightNode(nwaku);
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
waku2 = await startAndConnectLightNode(nwaku, TestShardInfo);
|
||||
|
||||
// messages in reversed order (first message at last index)
|
||||
const messages: DecodedMessage[] = [];
|
||||
|
@ -133,8 +133,12 @@ describe("Waku Store, cursor", function () {
|
|||
});
|
||||
|
||||
it("Passing cursor with wrong message digest", async function () {
|
||||
await sendMessages(nwaku, totalMsgs, TestContentTopic, DefaultPubsubTopic);
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
|
||||
const messages: DecodedMessage[] = [];
|
||||
for await (const page of waku.store.queryGenerator([TestDecoder])) {
|
||||
|
@ -175,8 +179,12 @@ describe("Waku Store, cursor", function () {
|
|||
});
|
||||
|
||||
it("Passing cursor with wrong pubsubTopic", async function () {
|
||||
await sendMessages(nwaku, totalMsgs, TestContentTopic, DefaultPubsubTopic);
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
|
||||
const messages: DecodedMessage[] = [];
|
||||
for await (const page of waku.store.queryGenerator([TestDecoder])) {
|
||||
|
@ -184,7 +192,7 @@ describe("Waku Store, cursor", function () {
|
|||
messages.push(msg as DecodedMessage);
|
||||
}
|
||||
}
|
||||
messages[5].pubsubTopic = customShardedPubsubTopic1;
|
||||
messages[5].pubsubTopic = TestDecoder2.pubsubTopic;
|
||||
const cursor = waku.store.createCursor(messages[5]);
|
||||
|
||||
try {
|
||||
|
@ -198,7 +206,7 @@ describe("Waku Store, cursor", function () {
|
|||
if (
|
||||
!(err instanceof Error) ||
|
||||
!err.message.includes(
|
||||
`Cursor pubsub topic (${customShardedPubsubTopic1}) does not match decoder pubsub topic (${DefaultPubsubTopic})`
|
||||
`Cursor pubsub topic (${TestDecoder2.pubsubTopic}) does not match decoder pubsub topic (${TestDecoder.pubsubTopic})`
|
||||
)
|
||||
) {
|
||||
throw err;
|
||||
|
|
|
@ -1,21 +1,22 @@
|
|||
import { DefaultPubsubTopic } from "@waku/interfaces";
|
||||
import { createDecoder } from "@waku/core";
|
||||
import { IMessage, type LightNode } from "@waku/interfaces";
|
||||
import { determinePubsubTopic } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
|
||||
import {
|
||||
afterEachCustom,
|
||||
beforeEachCustom,
|
||||
makeLogFileName,
|
||||
ServiceNode,
|
||||
tearDownNodes
|
||||
} from "../../src/index.js";
|
||||
|
||||
import {
|
||||
customDecoder1,
|
||||
customShardedPubsubTopic1,
|
||||
processQueriedMessages,
|
||||
startAndConnectLightNode,
|
||||
TestDecoder
|
||||
runStoreNodes,
|
||||
TestContentTopic1,
|
||||
TestDecoder,
|
||||
TestDecoder2,
|
||||
TestShardInfo
|
||||
} from "./utils.js";
|
||||
|
||||
describe("Waku Store, error handling", function () {
|
||||
|
@ -24,10 +25,7 @@ describe("Waku Store, error handling", function () {
|
|||
let nwaku: ServiceNode;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
nwaku = new ServiceNode(makeLogFileName(this.ctx));
|
||||
await nwaku.start({ store: true, lightpush: true, relay: true });
|
||||
await nwaku.ensureSubscriptions();
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
|
@ -35,9 +33,11 @@ describe("Waku Store, error handling", function () {
|
|||
});
|
||||
|
||||
it("Query Generator, Wrong PubsubTopic", async function () {
|
||||
const wrongDecoder = createDecoder(TestContentTopic1, "WrongPubsubTopic");
|
||||
|
||||
try {
|
||||
for await (const msgPromises of waku.store.queryGenerator([
|
||||
customDecoder1
|
||||
wrongDecoder
|
||||
])) {
|
||||
void msgPromises;
|
||||
}
|
||||
|
@ -46,7 +46,7 @@ describe("Waku Store, error handling", function () {
|
|||
if (
|
||||
!(err instanceof Error) ||
|
||||
!err.message.includes(
|
||||
`Pubsub topic ${customShardedPubsubTopic1} has not been configured on this instance. Configured topics are: ${DefaultPubsubTopic}`
|
||||
`Pubsub topic ${wrongDecoder.pubsubTopic} has not been configured on this instance. Configured topics are: ${TestDecoder.pubsubTopic}`
|
||||
)
|
||||
) {
|
||||
throw err;
|
||||
|
@ -58,7 +58,7 @@ describe("Waku Store, error handling", function () {
|
|||
try {
|
||||
for await (const msgPromises of waku.store.queryGenerator([
|
||||
TestDecoder,
|
||||
customDecoder1
|
||||
TestDecoder2
|
||||
])) {
|
||||
void msgPromises;
|
||||
}
|
||||
|
@ -92,26 +92,25 @@ describe("Waku Store, error handling", function () {
|
|||
});
|
||||
|
||||
it("Query Generator, No message returned", async function () {
|
||||
const WrongTestPubsubTopic = determinePubsubTopic("/test/1/wrong/utf8");
|
||||
const messages = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoder],
|
||||
DefaultPubsubTopic
|
||||
WrongTestPubsubTopic
|
||||
);
|
||||
expect(messages?.length).eq(0);
|
||||
});
|
||||
|
||||
it("Query with Ordered Callback, Wrong PubsubTopic", async function () {
|
||||
const wrongDecoder = createDecoder(TestContentTopic1, "WrongPubsubTopic");
|
||||
try {
|
||||
await waku.store.queryWithOrderedCallback(
|
||||
[customDecoder1],
|
||||
async () => {}
|
||||
);
|
||||
await waku.store.queryWithOrderedCallback([wrongDecoder], async () => {});
|
||||
throw new Error("QueryGenerator was successful but was expected to fail");
|
||||
} catch (err) {
|
||||
if (
|
||||
!(err instanceof Error) ||
|
||||
!err.message.includes(
|
||||
`Pubsub topic ${customShardedPubsubTopic1} has not been configured on this instance. Configured topics are: ${DefaultPubsubTopic}`
|
||||
`Pubsub topic ${wrongDecoder.pubsubTopic} has not been configured on this instance. Configured topics are: ${TestDecoder.pubsubTopic}`
|
||||
)
|
||||
) {
|
||||
throw err;
|
||||
|
@ -122,7 +121,7 @@ describe("Waku Store, error handling", function () {
|
|||
it("Query with Ordered Callback, Multiple PubsubTopics", async function () {
|
||||
try {
|
||||
await waku.store.queryWithOrderedCallback(
|
||||
[TestDecoder, customDecoder1],
|
||||
[TestDecoder, TestDecoder2],
|
||||
async () => {}
|
||||
);
|
||||
throw new Error("QueryGenerator was successful but was expected to fail");
|
||||
|
@ -161,17 +160,15 @@ describe("Waku Store, error handling", function () {
|
|||
});
|
||||
|
||||
it("Query with Promise Callback, Wrong PubsubTopic", async function () {
|
||||
const wrongDecoder = createDecoder(TestContentTopic1, "WrongPubsubTopic");
|
||||
try {
|
||||
await waku.store.queryWithPromiseCallback(
|
||||
[customDecoder1],
|
||||
async () => {}
|
||||
);
|
||||
await waku.store.queryWithPromiseCallback([wrongDecoder], async () => {});
|
||||
throw new Error("QueryGenerator was successful but was expected to fail");
|
||||
} catch (err) {
|
||||
if (
|
||||
!(err instanceof Error) ||
|
||||
!err.message.includes(
|
||||
`Pubsub topic ${customShardedPubsubTopic1} has not been configured on this instance. Configured topics are: ${DefaultPubsubTopic}`
|
||||
`Pubsub topic ${wrongDecoder.pubsubTopic} has not been configured on this instance. Configured topics are: ${TestDecoder.pubsubTopic}`
|
||||
)
|
||||
) {
|
||||
throw err;
|
||||
|
@ -182,7 +179,7 @@ describe("Waku Store, error handling", function () {
|
|||
it("Query with Promise Callback, Multiple PubsubTopics", async function () {
|
||||
try {
|
||||
await waku.store.queryWithPromiseCallback(
|
||||
[TestDecoder, customDecoder1],
|
||||
[TestDecoder, TestDecoder2],
|
||||
async () => {}
|
||||
);
|
||||
throw new Error("QueryGenerator was successful but was expected to fail");
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { createDecoder, DecodedMessage, waitForRemotePeer } from "@waku/core";
|
||||
import type { IMessage, LightNode } from "@waku/interfaces";
|
||||
import { DefaultPubsubTopic, Protocols } from "@waku/interfaces";
|
||||
import { Protocols } from "@waku/interfaces";
|
||||
import {
|
||||
generatePrivateKey,
|
||||
generateSymmetricKey,
|
||||
|
@ -22,7 +22,6 @@ import {
|
|||
afterEachCustom,
|
||||
beforeEachCustom,
|
||||
delay,
|
||||
makeLogFileName,
|
||||
MessageCollector,
|
||||
ServiceNode,
|
||||
tearDownNodes,
|
||||
|
@ -30,20 +29,21 @@ import {
|
|||
} from "../../src/index.js";
|
||||
|
||||
import {
|
||||
customContentTopic1,
|
||||
log,
|
||||
messageText,
|
||||
processQueriedMessages,
|
||||
runStoreNodes,
|
||||
sendMessages,
|
||||
startAndConnectLightNode,
|
||||
TestContentTopic,
|
||||
TestContentTopic1,
|
||||
TestDecoder,
|
||||
TestDecoder2,
|
||||
TestEncoder,
|
||||
TestPubsubTopic1,
|
||||
TestShardInfo,
|
||||
totalMsgs
|
||||
} from "./utils.js";
|
||||
|
||||
const secondDecoder = createDecoder(customContentTopic1);
|
||||
|
||||
describe("Waku Store, general", function () {
|
||||
this.timeout(15000);
|
||||
let waku: LightNode;
|
||||
|
@ -51,9 +51,7 @@ describe("Waku Store, general", function () {
|
|||
let nwaku: ServiceNode;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
nwaku = new ServiceNode(makeLogFileName(this.ctx));
|
||||
await nwaku.start({ store: true, lightpush: true, relay: true });
|
||||
await nwaku.ensureSubscriptions();
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
|
@ -61,12 +59,17 @@ describe("Waku Store, general", function () {
|
|||
});
|
||||
|
||||
it("Query generator for multiple messages", async function () {
|
||||
await sendMessages(nwaku, totalMsgs, TestContentTopic, DefaultPubsubTopic);
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
|
||||
const messages = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoder],
|
||||
DefaultPubsubTopic
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
|
||||
expect(messages?.length).eq(totalMsgs);
|
||||
|
@ -84,55 +87,62 @@ describe("Waku Store, general", function () {
|
|||
await nwaku.sendMessage(
|
||||
ServiceNode.toMessageRpcQuery({
|
||||
payload: utf8ToBytes(testItem["value"]),
|
||||
contentTopic: TestContentTopic
|
||||
contentTopic: TestDecoder.contentTopic
|
||||
}),
|
||||
DefaultPubsubTopic
|
||||
TestDecoder.pubsubTopic
|
||||
)
|
||||
).to.eq(true);
|
||||
await delay(1); // to ensure each timestamp is unique.
|
||||
}
|
||||
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
const messageCollector = new MessageCollector(nwaku);
|
||||
messageCollector.list = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoder],
|
||||
DefaultPubsubTopic
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
|
||||
// checking that all message sent were retrieved
|
||||
TEST_STRING.forEach((testItem) => {
|
||||
expect(
|
||||
messageCollector.hasMessage(TestContentTopic, testItem["value"])
|
||||
messageCollector.hasMessage(TestDecoder.contentTopic, testItem["value"])
|
||||
).to.eq(true);
|
||||
});
|
||||
});
|
||||
|
||||
it("Query generator for multiple messages with multiple decoders", async function () {
|
||||
const SecondDecoder = createDecoder(
|
||||
TestDecoder2.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
|
||||
await nwaku.sendMessage(
|
||||
ServiceNode.toMessageRpcQuery({
|
||||
payload: utf8ToBytes("M1"),
|
||||
contentTopic: TestContentTopic
|
||||
contentTopic: TestDecoder.contentTopic
|
||||
}),
|
||||
DefaultPubsubTopic
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
await nwaku.sendMessage(
|
||||
ServiceNode.toMessageRpcQuery({
|
||||
payload: utf8ToBytes("M2"),
|
||||
contentTopic: customContentTopic1
|
||||
contentTopic: SecondDecoder.contentTopic
|
||||
}),
|
||||
DefaultPubsubTopic
|
||||
SecondDecoder.pubsubTopic
|
||||
);
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
|
||||
const messageCollector = new MessageCollector(nwaku);
|
||||
messageCollector.list = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoder, secondDecoder],
|
||||
DefaultPubsubTopic
|
||||
[TestDecoder, SecondDecoder],
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
expect(messageCollector.hasMessage(TestDecoder.contentTopic, "M1")).to.eq(
|
||||
true
|
||||
);
|
||||
expect(messageCollector.hasMessage(SecondDecoder.contentTopic, "M2")).to.eq(
|
||||
true
|
||||
);
|
||||
expect(messageCollector.hasMessage(TestContentTopic, "M1")).to.eq(true);
|
||||
expect(messageCollector.hasMessage(customContentTopic1, "M2")).to.eq(true);
|
||||
});
|
||||
|
||||
it("Query generator for multiple messages with different content topic format", async function () {
|
||||
|
@ -143,17 +153,15 @@ describe("Waku Store, general", function () {
|
|||
payload: utf8ToBytes(messageText),
|
||||
contentTopic: testItem["value"]
|
||||
}),
|
||||
DefaultPubsubTopic
|
||||
TestDecoder.pubsubTopic
|
||||
)
|
||||
).to.eq(true);
|
||||
await delay(1); // to ensure each timestamp is unique.
|
||||
}
|
||||
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
|
||||
for (const testItem of TEST_STRING) {
|
||||
for await (const query of waku.store.queryGenerator([
|
||||
createDecoder(testItem["value"])
|
||||
createDecoder(testItem["value"], TestDecoder.pubsubTopic)
|
||||
])) {
|
||||
for await (const msg of query) {
|
||||
expect(equals(msg!.payload, utf8ToBytes(messageText))).to.eq(true);
|
||||
|
@ -163,8 +171,12 @@ describe("Waku Store, general", function () {
|
|||
});
|
||||
|
||||
it("Callback on promise", async function () {
|
||||
await sendMessages(nwaku, totalMsgs, TestContentTopic, DefaultPubsubTopic);
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
|
||||
const messages: IMessage[] = [];
|
||||
await waku.store.queryWithPromiseCallback(
|
||||
|
@ -185,8 +197,12 @@ describe("Waku Store, general", function () {
|
|||
});
|
||||
|
||||
it("Callback on promise, aborts when callback returns true", async function () {
|
||||
await sendMessages(nwaku, totalMsgs, TestContentTopic, DefaultPubsubTopic);
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
|
||||
const desiredMsgs = 14;
|
||||
const messages: IMessage[] = [];
|
||||
|
@ -237,33 +253,35 @@ describe("Waku Store, general", function () {
|
|||
|
||||
const eciesEncoder = createEciesEncoder({
|
||||
contentTopic: asymTopic,
|
||||
publicKey
|
||||
publicKey,
|
||||
pubsubTopic: TestPubsubTopic1
|
||||
});
|
||||
const symEncoder = createSymEncoder({
|
||||
contentTopic: symTopic,
|
||||
symKey
|
||||
symKey,
|
||||
pubsubTopic: TestPubsubTopic1
|
||||
});
|
||||
|
||||
const otherEncoder = createEciesEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
contentTopic: TestContentTopic1,
|
||||
pubsubTopic: TestPubsubTopic1,
|
||||
publicKey: getPublicKey(generatePrivateKey())
|
||||
});
|
||||
|
||||
const eciesDecoder = createEciesDecoder(asymTopic, privateKey);
|
||||
const symDecoder = createSymDecoder(symTopic, symKey);
|
||||
const eciesDecoder = createEciesDecoder(
|
||||
asymTopic,
|
||||
privateKey,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
const symDecoder = createSymDecoder(
|
||||
symTopic,
|
||||
symKey,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
waku2 = await startAndConnectLightNode(nwaku);
|
||||
waku2 = await startAndConnectLightNode(nwaku, TestShardInfo);
|
||||
const nimWakuMultiaddr = await nwaku.getMultiaddrWithId();
|
||||
|
||||
await Promise.all([
|
||||
waku.dial(nimWakuMultiaddr),
|
||||
waku2.dial(nimWakuMultiaddr)
|
||||
]);
|
||||
|
||||
log.info("Waku nodes connected to nwaku");
|
||||
|
||||
await waitForRemotePeer(waku, [Protocols.LightPush]);
|
||||
await waku2.dial(nimWakuMultiaddr);
|
||||
|
||||
log.info("Sending messages using light push");
|
||||
await Promise.all([
|
||||
|
@ -298,8 +316,12 @@ describe("Waku Store, general", function () {
|
|||
});
|
||||
|
||||
it("Ordered callback, aborts when callback returns true", async function () {
|
||||
await sendMessages(nwaku, totalMsgs, TestContentTopic, DefaultPubsubTopic);
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
|
||||
const desiredMsgs = 14;
|
||||
const messages: IMessage[] = [];
|
||||
|
@ -317,12 +339,17 @@ describe("Waku Store, general", function () {
|
|||
|
||||
it("Query generator for 2000 messages", async function () {
|
||||
this.timeout(40000);
|
||||
await sendMessages(nwaku, 2000, TestContentTopic, DefaultPubsubTopic);
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
2000,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
|
||||
const messages = await processQueriedMessages(
|
||||
waku,
|
||||
[TestDecoder],
|
||||
DefaultPubsubTopic
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
|
||||
expect(messages?.length).eq(2000);
|
||||
|
|
|
@ -3,8 +3,7 @@ import type { ContentTopicInfo, IMessage, LightNode } from "@waku/interfaces";
|
|||
import { createLightNode, Protocols } from "@waku/sdk";
|
||||
import {
|
||||
contentTopicToPubsubTopic,
|
||||
pubsubTopicToSingleShardInfo,
|
||||
singleShardInfosToShardInfo
|
||||
pubsubTopicToSingleShardInfo
|
||||
} from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
|
||||
|
@ -18,20 +17,13 @@ import {
|
|||
} from "../../src/index.js";
|
||||
|
||||
import {
|
||||
customContentTopic1,
|
||||
customContentTopic2,
|
||||
customDecoder1,
|
||||
customDecoder2,
|
||||
customShardedPubsubTopic1,
|
||||
customShardedPubsubTopic2,
|
||||
customShardInfo1,
|
||||
customShardInfo2,
|
||||
processQueriedMessages,
|
||||
runStoreNodes,
|
||||
sendMessages,
|
||||
sendMessagesAutosharding,
|
||||
shardInfo1,
|
||||
shardInfoBothShards,
|
||||
startAndConnectLightNode,
|
||||
TestDecoder,
|
||||
TestDecoder2,
|
||||
TestShardInfo,
|
||||
totalMsgs
|
||||
} from "./utils.js";
|
||||
|
||||
|
@ -42,17 +34,7 @@ describe("Waku Store, custom pubsub topic", function () {
|
|||
let nwaku2: ServiceNode;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
nwaku = new ServiceNode(makeLogFileName(this.ctx));
|
||||
await nwaku.start({
|
||||
store: true,
|
||||
pubsubTopic: [customShardedPubsubTopic1, customShardedPubsubTopic2],
|
||||
clusterId: customShardInfo1.clusterId,
|
||||
relay: true
|
||||
});
|
||||
await nwaku.ensureSubscriptions([
|
||||
customShardedPubsubTopic1,
|
||||
customShardedPubsubTopic2
|
||||
]);
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
|
@ -63,14 +45,14 @@ describe("Waku Store, custom pubsub topic", function () {
|
|||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
customContentTopic1,
|
||||
customShardedPubsubTopic1
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
waku = await startAndConnectLightNode(nwaku, [], shardInfo1);
|
||||
|
||||
const messages = await processQueriedMessages(
|
||||
waku,
|
||||
[customDecoder1],
|
||||
customShardedPubsubTopic1
|
||||
[TestDecoder],
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
|
||||
expect(messages?.length).eq(totalMsgs);
|
||||
|
@ -87,22 +69,20 @@ describe("Waku Store, custom pubsub topic", function () {
|
|||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
customContentTopic1,
|
||||
customShardedPubsubTopic1
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
customContentTopic2,
|
||||
customShardedPubsubTopic2
|
||||
TestDecoder2.contentTopic,
|
||||
TestDecoder2.pubsubTopic
|
||||
);
|
||||
|
||||
waku = await startAndConnectLightNode(nwaku, [], shardInfoBothShards);
|
||||
|
||||
const customMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[customDecoder1],
|
||||
customShardedPubsubTopic1
|
||||
[TestDecoder],
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
expect(customMessages?.length).eq(totalMsgs);
|
||||
const result1 = customMessages?.findIndex((msg) => {
|
||||
|
@ -112,8 +92,8 @@ describe("Waku Store, custom pubsub topic", function () {
|
|||
|
||||
const testMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[customDecoder2],
|
||||
customShardedPubsubTopic2
|
||||
[TestDecoder2],
|
||||
TestDecoder2.pubsubTopic
|
||||
);
|
||||
expect(testMessages?.length).eq(totalMsgs);
|
||||
const result2 = testMessages?.findIndex((msg) => {
|
||||
|
@ -129,33 +109,26 @@ describe("Waku Store, custom pubsub topic", function () {
|
|||
nwaku2 = new ServiceNode(makeLogFileName(this) + "2");
|
||||
await nwaku2.start({
|
||||
store: true,
|
||||
pubsubTopic: [customShardedPubsubTopic2],
|
||||
clusterId: customShardInfo2.clusterId,
|
||||
pubsubTopic: [TestDecoder2.pubsubTopic],
|
||||
clusterId: TestShardInfo.clusterId,
|
||||
relay: true
|
||||
});
|
||||
await nwaku2.ensureSubscriptions([customShardedPubsubTopic2]);
|
||||
await nwaku2.ensureSubscriptions([TestDecoder2.pubsubTopic]);
|
||||
|
||||
const totalMsgs = 10;
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
customContentTopic1,
|
||||
customShardedPubsubTopic1
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
await sendMessages(
|
||||
nwaku2,
|
||||
totalMsgs,
|
||||
customContentTopic2,
|
||||
customShardedPubsubTopic2
|
||||
TestDecoder2.contentTopic,
|
||||
TestDecoder2.pubsubTopic
|
||||
);
|
||||
|
||||
waku = await createLightNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
shardInfo: shardInfoBothShards
|
||||
});
|
||||
await waku.start();
|
||||
|
||||
await waku.dial(await nwaku.getMultiaddrWithId());
|
||||
await waku.dial(await nwaku2.getMultiaddrWithId());
|
||||
await waitForRemotePeer(waku, [Protocols.Store]);
|
||||
|
||||
|
@ -168,13 +141,13 @@ describe("Waku Store, custom pubsub topic", function () {
|
|||
) {
|
||||
customMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[customDecoder1],
|
||||
customShardedPubsubTopic1
|
||||
[TestDecoder],
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
testMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[customDecoder2],
|
||||
customShardedPubsubTopic2
|
||||
[TestDecoder2],
|
||||
TestDecoder2.pubsubTopic
|
||||
);
|
||||
}
|
||||
});
|
||||
|
@ -197,10 +170,6 @@ describe("Waku Store (Autosharding), custom pubsub topic", function () {
|
|||
customContentTopic2,
|
||||
clusterId
|
||||
);
|
||||
const contentTopicInfo1: ContentTopicInfo = {
|
||||
clusterId,
|
||||
contentTopics: [customContentTopic1]
|
||||
};
|
||||
const customDecoder1 = createDecoder(
|
||||
customContentTopic1,
|
||||
pubsubTopicToSingleShardInfo(autoshardingPubsubTopic1)
|
||||
|
@ -215,18 +184,7 @@ describe("Waku Store (Autosharding), custom pubsub topic", function () {
|
|||
};
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
nwaku = new ServiceNode(makeLogFileName(this.ctx));
|
||||
await nwaku.start({
|
||||
store: true,
|
||||
pubsubTopic: [autoshardingPubsubTopic1, autoshardingPubsubTopic2],
|
||||
contentTopic: [customContentTopic1, customContentTopic2],
|
||||
relay: true,
|
||||
clusterId
|
||||
});
|
||||
await nwaku.ensureSubscriptionsAutosharding([
|
||||
customContentTopic1,
|
||||
customContentTopic2
|
||||
]);
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, contentTopicInfoBothShards);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
|
@ -235,7 +193,7 @@ describe("Waku Store (Autosharding), custom pubsub topic", function () {
|
|||
|
||||
it("Generator, custom pubsub topic", async function () {
|
||||
await sendMessagesAutosharding(nwaku, totalMsgs, customContentTopic1);
|
||||
waku = await startAndConnectLightNode(nwaku, [], contentTopicInfo1);
|
||||
|
||||
const messages = await processQueriedMessages(
|
||||
waku,
|
||||
[customDecoder1],
|
||||
|
@ -256,12 +214,6 @@ describe("Waku Store (Autosharding), custom pubsub topic", function () {
|
|||
await sendMessagesAutosharding(nwaku, totalMsgs, customContentTopic1);
|
||||
await sendMessagesAutosharding(nwaku, totalMsgs, customContentTopic2);
|
||||
|
||||
waku = await startAndConnectLightNode(
|
||||
nwaku,
|
||||
[],
|
||||
contentTopicInfoBothShards
|
||||
);
|
||||
|
||||
const customMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[customDecoder1],
|
||||
|
@ -340,38 +292,8 @@ describe("Waku Store (named sharding), custom pubsub topic", function () {
|
|||
let nwaku: ServiceNode;
|
||||
let nwaku2: ServiceNode;
|
||||
|
||||
const customDecoder1 = createDecoder(
|
||||
customContentTopic1,
|
||||
customShardedPubsubTopic1
|
||||
);
|
||||
const customDecoder2 = createDecoder(
|
||||
customContentTopic2,
|
||||
customShardedPubsubTopic2
|
||||
);
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
const shardInfo = singleShardInfosToShardInfo([
|
||||
customShardInfo1,
|
||||
customShardInfo2
|
||||
]);
|
||||
|
||||
nwaku = new ServiceNode(makeLogFileName(this.ctx));
|
||||
await nwaku.start({
|
||||
store: true,
|
||||
relay: true,
|
||||
pubsubTopic: [customShardedPubsubTopic1, customShardedPubsubTopic2],
|
||||
clusterId: shardInfo.clusterId
|
||||
});
|
||||
await nwaku.ensureSubscriptions([
|
||||
customShardedPubsubTopic1,
|
||||
customShardedPubsubTopic2
|
||||
]);
|
||||
|
||||
waku = await startAndConnectLightNode(
|
||||
nwaku,
|
||||
[customShardedPubsubTopic1, customShardedPubsubTopic2],
|
||||
shardInfo
|
||||
);
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
|
@ -382,14 +304,14 @@ describe("Waku Store (named sharding), custom pubsub topic", function () {
|
|||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
customContentTopic1,
|
||||
customShardedPubsubTopic1
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
|
||||
const messages = await processQueriedMessages(
|
||||
waku,
|
||||
[customDecoder1],
|
||||
customShardedPubsubTopic1
|
||||
[TestDecoder],
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
|
||||
expect(messages?.length).eq(totalMsgs);
|
||||
|
@ -406,20 +328,20 @@ describe("Waku Store (named sharding), custom pubsub topic", function () {
|
|||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
customContentTopic1,
|
||||
customShardedPubsubTopic1
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
customContentTopic2,
|
||||
customShardedPubsubTopic2
|
||||
TestDecoder2.contentTopic,
|
||||
TestDecoder2.pubsubTopic
|
||||
);
|
||||
|
||||
const customMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[customDecoder1],
|
||||
customShardedPubsubTopic1
|
||||
[TestDecoder],
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
expect(customMessages?.length).eq(totalMsgs);
|
||||
const result1 = customMessages?.findIndex((msg) => {
|
||||
|
@ -429,8 +351,8 @@ describe("Waku Store (named sharding), custom pubsub topic", function () {
|
|||
|
||||
const testMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[customDecoder2],
|
||||
customShardedPubsubTopic2
|
||||
[TestDecoder2],
|
||||
TestDecoder2.pubsubTopic
|
||||
);
|
||||
expect(testMessages?.length).eq(totalMsgs);
|
||||
const result2 = testMessages?.findIndex((msg) => {
|
||||
|
@ -446,24 +368,24 @@ describe("Waku Store (named sharding), custom pubsub topic", function () {
|
|||
nwaku2 = new ServiceNode(makeLogFileName(this) + "2");
|
||||
await nwaku2.start({
|
||||
store: true,
|
||||
pubsubTopic: [customShardedPubsubTopic2],
|
||||
pubsubTopic: [TestDecoder2.pubsubTopic],
|
||||
relay: true,
|
||||
clusterId: customShardInfo2.clusterId
|
||||
clusterId: TestShardInfo.clusterId
|
||||
});
|
||||
await nwaku2.ensureSubscriptions([customShardedPubsubTopic2]);
|
||||
await nwaku2.ensureSubscriptions([TestDecoder2.pubsubTopic]);
|
||||
|
||||
const totalMsgs = 10;
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
customContentTopic1,
|
||||
customShardedPubsubTopic1
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
await sendMessages(
|
||||
nwaku2,
|
||||
totalMsgs,
|
||||
customContentTopic2,
|
||||
customShardedPubsubTopic2
|
||||
TestDecoder2.contentTopic,
|
||||
TestDecoder2.pubsubTopic
|
||||
);
|
||||
|
||||
await waku.dial(await nwaku2.getMultiaddrWithId());
|
||||
|
@ -478,13 +400,13 @@ describe("Waku Store (named sharding), custom pubsub topic", function () {
|
|||
) {
|
||||
customMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[customDecoder1],
|
||||
customShardedPubsubTopic1
|
||||
[TestDecoder],
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
testMessages = await processQueriedMessages(
|
||||
waku,
|
||||
[customDecoder2],
|
||||
customShardedPubsubTopic2
|
||||
[TestDecoder2],
|
||||
TestDecoder2.pubsubTopic
|
||||
);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -1,22 +1,20 @@
|
|||
import { DecodedMessage, PageDirection } from "@waku/core";
|
||||
import type { IMessage, LightNode } from "@waku/interfaces";
|
||||
import { DefaultPubsubTopic } from "@waku/interfaces";
|
||||
import { expect } from "chai";
|
||||
|
||||
import {
|
||||
afterEachCustom,
|
||||
beforeEachCustom,
|
||||
makeLogFileName,
|
||||
ServiceNode,
|
||||
tearDownNodes
|
||||
} from "../../src/index.js";
|
||||
|
||||
import {
|
||||
chunkAndReverseArray,
|
||||
runStoreNodes,
|
||||
sendMessages,
|
||||
startAndConnectLightNode,
|
||||
TestContentTopic,
|
||||
TestDecoder,
|
||||
TestShardInfo,
|
||||
totalMsgs
|
||||
} from "./utils.js";
|
||||
|
||||
|
@ -26,9 +24,7 @@ describe("Waku Store, order", function () {
|
|||
let nwaku: ServiceNode;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
nwaku = new ServiceNode(makeLogFileName(this.ctx));
|
||||
await nwaku.start({ store: true, lightpush: true, relay: true });
|
||||
await nwaku.ensureSubscriptions();
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
|
@ -40,10 +36,9 @@ describe("Waku Store, order", function () {
|
|||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestContentTopic,
|
||||
DefaultPubsubTopic
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
|
||||
const messages: IMessage[] = [];
|
||||
for await (const query of waku.store.queryGenerator([TestDecoder], {
|
||||
|
@ -72,10 +67,9 @@ describe("Waku Store, order", function () {
|
|||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestContentTopic,
|
||||
DefaultPubsubTopic
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
|
||||
const messages: IMessage[] = [];
|
||||
await waku.store.queryWithPromiseCallback(
|
||||
|
@ -107,10 +101,9 @@ describe("Waku Store, order", function () {
|
|||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestContentTopic,
|
||||
DefaultPubsubTopic
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
|
||||
const messages: IMessage[] = [];
|
||||
await waku.store.queryWithOrderedCallback(
|
||||
|
|
|
@ -1,20 +1,18 @@
|
|||
import { DefaultPubsubTopic } from "@waku/interfaces";
|
||||
import type { LightNode } from "@waku/interfaces";
|
||||
import { expect } from "chai";
|
||||
|
||||
import {
|
||||
afterEachCustom,
|
||||
beforeEachCustom,
|
||||
makeLogFileName,
|
||||
ServiceNode,
|
||||
tearDownNodes
|
||||
} from "../../src/index.js";
|
||||
|
||||
import {
|
||||
runStoreNodes,
|
||||
sendMessages,
|
||||
startAndConnectLightNode,
|
||||
TestContentTopic,
|
||||
TestDecoder
|
||||
TestDecoder,
|
||||
TestShardInfo
|
||||
} from "./utils.js";
|
||||
|
||||
describe("Waku Store, page size", function () {
|
||||
|
@ -23,9 +21,7 @@ describe("Waku Store, page size", function () {
|
|||
let nwaku: ServiceNode;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
nwaku = new ServiceNode(makeLogFileName(this.ctx));
|
||||
await nwaku.start({ store: true, lightpush: true, relay: true });
|
||||
await nwaku.ensureSubscriptions();
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
|
@ -45,8 +41,8 @@ describe("Waku Store, page size", function () {
|
|||
await sendMessages(
|
||||
nwaku,
|
||||
messageCount,
|
||||
TestContentTopic,
|
||||
DefaultPubsubTopic
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
|
||||
// Determine effectivePageSize for test expectations
|
||||
|
@ -61,7 +57,6 @@ describe("Waku Store, page size", function () {
|
|||
}
|
||||
}
|
||||
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
let messagesRetrieved = 0;
|
||||
for await (const query of waku.store.queryGenerator([TestDecoder], {
|
||||
pageSize: pageSize
|
||||
|
@ -86,8 +81,12 @@ describe("Waku Store, page size", function () {
|
|||
|
||||
// Possible issue here because pageSize differs across implementations
|
||||
it("Default pageSize", async function () {
|
||||
await sendMessages(nwaku, 20, TestContentTopic, DefaultPubsubTopic);
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
await sendMessages(
|
||||
nwaku,
|
||||
20,
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
|
||||
let messagesRetrieved = 0;
|
||||
for await (const query of waku.store.queryGenerator([TestDecoder])) {
|
||||
|
|
|
@ -1,20 +1,18 @@
|
|||
import { DecodedMessage, PageDirection } from "@waku/core";
|
||||
import type { IMessage, LightNode } from "@waku/interfaces";
|
||||
import { DefaultPubsubTopic } from "@waku/interfaces";
|
||||
|
||||
import {
|
||||
afterEachCustom,
|
||||
beforeEachCustom,
|
||||
makeLogFileName,
|
||||
ServiceNode,
|
||||
tearDownNodes
|
||||
} from "../../src/index.js";
|
||||
|
||||
import {
|
||||
runStoreNodes,
|
||||
sendMessages,
|
||||
startAndConnectLightNode,
|
||||
TestContentTopic,
|
||||
TestDecoder,
|
||||
TestShardInfo,
|
||||
totalMsgs
|
||||
} from "./utils.js";
|
||||
|
||||
|
@ -24,9 +22,7 @@ describe("Waku Store, sorting", function () {
|
|||
let nwaku: ServiceNode;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
nwaku = new ServiceNode(makeLogFileName(this.ctx));
|
||||
await nwaku.start({ store: true, lightpush: true, relay: true });
|
||||
await nwaku.ensureSubscriptions();
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
|
@ -38,10 +34,9 @@ describe("Waku Store, sorting", function () {
|
|||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestContentTopic,
|
||||
DefaultPubsubTopic
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
|
||||
for await (const query of waku.store.queryGenerator([TestDecoder], {
|
||||
pageDirection: PageDirection.FORWARD
|
||||
|
@ -73,10 +68,9 @@ describe("Waku Store, sorting", function () {
|
|||
await sendMessages(
|
||||
nwaku,
|
||||
totalMsgs,
|
||||
TestContentTopic,
|
||||
DefaultPubsubTopic
|
||||
TestDecoder.contentTopic,
|
||||
TestDecoder.pubsubTopic
|
||||
);
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
|
||||
const messages: IMessage[] = [];
|
||||
await waku.store.queryWithOrderedCallback(
|
||||
|
|
|
@ -4,16 +4,15 @@ import { expect } from "chai";
|
|||
import {
|
||||
afterEachCustom,
|
||||
beforeEachCustom,
|
||||
makeLogFileName,
|
||||
ServiceNode,
|
||||
tearDownNodes
|
||||
} from "../../src/index.js";
|
||||
|
||||
import {
|
||||
adjustDate,
|
||||
startAndConnectLightNode,
|
||||
TestContentTopic,
|
||||
TestDecoder
|
||||
runStoreNodes,
|
||||
TestDecoder,
|
||||
TestShardInfo
|
||||
} from "./utils.js";
|
||||
|
||||
describe("Waku Store, time filter", function () {
|
||||
|
@ -22,9 +21,7 @@ describe("Waku Store, time filter", function () {
|
|||
let nwaku: ServiceNode;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
nwaku = new ServiceNode(makeLogFileName(this.ctx));
|
||||
await nwaku.start({ store: true, lightpush: true, relay: true });
|
||||
await nwaku.ensureSubscriptions();
|
||||
[nwaku, waku] = await runStoreNodes(this.ctx, TestShardInfo);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
|
@ -48,14 +45,12 @@ describe("Waku Store, time filter", function () {
|
|||
await nwaku.sendMessage(
|
||||
ServiceNode.toMessageRpcQuery({
|
||||
payload: new Uint8Array([0]),
|
||||
contentTopic: TestContentTopic,
|
||||
contentTopic: TestDecoder.contentTopic,
|
||||
timestamp: msgTimestamp
|
||||
})
|
||||
)
|
||||
).to.eq(true);
|
||||
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
|
||||
const messages: IMessage[] = [];
|
||||
await waku.store.queryWithOrderedCallback(
|
||||
[TestDecoder],
|
||||
|
@ -93,14 +88,12 @@ describe("Waku Store, time filter", function () {
|
|||
await nwaku.sendMessage(
|
||||
ServiceNode.toMessageRpcQuery({
|
||||
payload: new Uint8Array([0]),
|
||||
contentTopic: TestContentTopic,
|
||||
contentTopic: TestDecoder.contentTopic,
|
||||
timestamp: msgTimestamp
|
||||
})
|
||||
)
|
||||
).to.eq(true);
|
||||
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
|
||||
const messages: IMessage[] = [];
|
||||
await waku.store.queryWithOrderedCallback(
|
||||
[TestDecoder],
|
||||
|
|
|
@ -2,47 +2,46 @@ import {
|
|||
createDecoder,
|
||||
createEncoder,
|
||||
DecodedMessage,
|
||||
Decoder,
|
||||
waitForRemotePeer
|
||||
Decoder
|
||||
} from "@waku/core";
|
||||
import {
|
||||
DefaultPubsubTopic,
|
||||
LightNode,
|
||||
Protocols,
|
||||
ShardInfo,
|
||||
ShardingParams,
|
||||
type SingleShardInfo
|
||||
} from "@waku/interfaces";
|
||||
import { createLightNode } from "@waku/sdk";
|
||||
import { createLightNode, waitForRemotePeer } from "@waku/sdk";
|
||||
import { Logger, singleShardInfoToPubsubTopic } from "@waku/utils";
|
||||
import { expect } from "chai";
|
||||
import { Context } from "mocha";
|
||||
|
||||
import { delay, NOISE_KEY_1, ServiceNode } from "../../src";
|
||||
import { delay, NOISE_KEY_1, runNodes, ServiceNode } from "../../src";
|
||||
|
||||
export const log = new Logger("test:store");
|
||||
|
||||
export const TestContentTopic = "/test/1/waku-store/utf8";
|
||||
export const TestEncoder = createEncoder({ contentTopic: TestContentTopic });
|
||||
export const TestDecoder = createDecoder(TestContentTopic);
|
||||
export const customShardInfo1: SingleShardInfo = { clusterId: 3, shard: 1 };
|
||||
export const customShardedPubsubTopic1 =
|
||||
singleShardInfoToPubsubTopic(customShardInfo1);
|
||||
export const TestClusterId = 3;
|
||||
export const TestShardInfo: ShardInfo = {
|
||||
clusterId: TestClusterId,
|
||||
shards: [1, 2]
|
||||
};
|
||||
|
||||
export const customShardInfo2: SingleShardInfo = { clusterId: 3, shard: 2 };
|
||||
export const customShardedPubsubTopic2 =
|
||||
singleShardInfoToPubsubTopic(customShardInfo2);
|
||||
export const shardInfo1: ShardInfo = { clusterId: 3, shards: [1] };
|
||||
export const customContentTopic1 = "/test/2/waku-store/utf8";
|
||||
export const customContentTopic2 = "/test/3/waku-store/utf8";
|
||||
export const customDecoder1 = createDecoder(customContentTopic1, {
|
||||
clusterId: 3,
|
||||
shard: 1
|
||||
export const TestShardInfo1: SingleShardInfo = { clusterId: 3, shard: 1 };
|
||||
export const TestPubsubTopic1 = singleShardInfoToPubsubTopic(TestShardInfo1);
|
||||
|
||||
export const TestShardInfo2: SingleShardInfo = { clusterId: 3, shard: 2 };
|
||||
export const TestPubsubTopic2 = singleShardInfoToPubsubTopic(TestShardInfo2);
|
||||
|
||||
export const TestContentTopic1 = "/test/1/waku-store/utf8";
|
||||
export const TestEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic1,
|
||||
pubsubTopicShardInfo: TestShardInfo1
|
||||
});
|
||||
export const customDecoder2 = createDecoder(customContentTopic2, {
|
||||
clusterId: 3,
|
||||
shard: 2
|
||||
});
|
||||
export const shardInfoBothShards: ShardInfo = { clusterId: 3, shards: [1, 2] };
|
||||
export const TestDecoder = createDecoder(TestContentTopic1, TestPubsubTopic1);
|
||||
|
||||
export const TestContentTopic2 = "/test/3/waku-store/utf8";
|
||||
export const TestDecoder2 = createDecoder(TestContentTopic2, TestPubsubTopic2);
|
||||
|
||||
export const totalMsgs = 20;
|
||||
export const messageText = "Store Push works!";
|
||||
|
||||
|
@ -103,17 +102,12 @@ export async function processQueriedMessages(
|
|||
|
||||
export async function startAndConnectLightNode(
|
||||
instance: ServiceNode,
|
||||
pubsubTopics: string[] = [DefaultPubsubTopic],
|
||||
shardInfo?: ShardingParams
|
||||
shardInfo: ShardingParams
|
||||
): Promise<LightNode> {
|
||||
const waku = await createLightNode({
|
||||
pubsubTopics: shardInfo ? undefined : pubsubTopics,
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } },
|
||||
...((pubsubTopics.length !== 1 ||
|
||||
pubsubTopics[0] !== DefaultPubsubTopic) && {
|
||||
shardInfo: shardInfo
|
||||
})
|
||||
shardInfo: shardInfo
|
||||
});
|
||||
await waku.start();
|
||||
await waku.dial(await instance.getMultiaddrWithId());
|
||||
|
@ -148,3 +142,14 @@ export const adjustDate = (baseDate: Date, adjustMs: number): Date => {
|
|||
adjusted.setTime(adjusted.getTime() + adjustMs);
|
||||
return adjusted;
|
||||
};
|
||||
|
||||
export const runStoreNodes = (
|
||||
context: Context,
|
||||
shardInfo: ShardingParams
|
||||
): Promise<[ServiceNode, LightNode]> =>
|
||||
runNodes({
|
||||
context,
|
||||
shardInfo,
|
||||
createNode: createLightNode,
|
||||
protocols: [Protocols.Store]
|
||||
});
|
||||
|
|
|
@ -1,10 +1,5 @@
|
|||
import { createDecoder, createEncoder, waitForRemotePeer } from "@waku/core";
|
||||
import {
|
||||
DefaultPubsubTopic,
|
||||
type LightNode,
|
||||
Protocols
|
||||
} from "@waku/interfaces";
|
||||
import { createLightNode } from "@waku/sdk";
|
||||
import { createDecoder, createEncoder } from "@waku/core";
|
||||
import { type LightNode } from "@waku/interfaces";
|
||||
import { toAsyncIterator } from "@waku/utils";
|
||||
import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes";
|
||||
import chai, { expect } from "chai";
|
||||
|
@ -14,36 +9,32 @@ import {
|
|||
afterEachCustom,
|
||||
beforeEachCustom,
|
||||
delay,
|
||||
makeLogFileName,
|
||||
NOISE_KEY_1,
|
||||
ServiceNode,
|
||||
tearDownNodes
|
||||
} from "../src/index.js";
|
||||
|
||||
import { runNodes } from "./filter/single_node/utils.js";
|
||||
|
||||
chai.use(chaiAsPromised);
|
||||
|
||||
const TestContentTopic = "/test/1/waku-filter";
|
||||
const TestEncoder = createEncoder({ contentTopic: TestContentTopic });
|
||||
const TestDecoder = createDecoder(TestContentTopic);
|
||||
const TestContentTopic = "/test/1/waku-filter/default";
|
||||
const TestShardInfo = {
|
||||
contentTopics: [TestContentTopic],
|
||||
clusterId: 3
|
||||
};
|
||||
|
||||
const TestEncoder = createEncoder({
|
||||
contentTopic: TestContentTopic,
|
||||
pubsubTopicShardInfo: TestShardInfo
|
||||
});
|
||||
const TestDecoder = createDecoder(TestContentTopic, TestShardInfo);
|
||||
|
||||
describe("Util: toAsyncIterator: Filter", function () {
|
||||
let waku: LightNode;
|
||||
let nwaku: ServiceNode;
|
||||
|
||||
beforeEachCustom(this, async () => {
|
||||
nwaku = new ServiceNode(makeLogFileName(this.ctx));
|
||||
await nwaku.start({
|
||||
filter: true,
|
||||
lightpush: true,
|
||||
relay: true
|
||||
});
|
||||
waku = await createLightNode({
|
||||
staticNoiseKey: NOISE_KEY_1,
|
||||
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
|
||||
});
|
||||
await waku.start();
|
||||
await waku.dial(await nwaku.getMultiaddrWithId());
|
||||
await waitForRemotePeer(waku, [Protocols.Filter, Protocols.LightPush]);
|
||||
[nwaku, waku] = await runNodes(this.ctx, TestShardInfo);
|
||||
});
|
||||
|
||||
afterEachCustom(this, async () => {
|
||||
|
@ -63,7 +54,7 @@ describe("Util: toAsyncIterator: Filter", function () {
|
|||
const { value } = await iterator.next();
|
||||
|
||||
expect(value.contentTopic).to.eq(TestContentTopic);
|
||||
expect(value.pubsubTopic).to.eq(DefaultPubsubTopic);
|
||||
expect(value.pubsubTopic).to.eq(TestDecoder.pubsubTopic);
|
||||
expect(bytesToUtf8(value.payload)).to.eq(messageText);
|
||||
});
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { waitForRemotePeer } from "@waku/core";
|
||||
import type { LightNode, RelayNode } from "@waku/interfaces";
|
||||
import { DefaultPubsubTopic, Protocols } from "@waku/interfaces";
|
||||
import { Protocols } from "@waku/interfaces";
|
||||
import { createLightNode } from "@waku/sdk";
|
||||
import { createRelayNode } from "@waku/sdk/relay";
|
||||
import { expect } from "chai";
|
||||
|
@ -14,6 +14,12 @@ import {
|
|||
tearDownNodes
|
||||
} from "../src/index.js";
|
||||
|
||||
import {
|
||||
runRelayNodes,
|
||||
TestPubsubTopic,
|
||||
TestShardInfo
|
||||
} from "./relay/utils.js";
|
||||
|
||||
describe("Wait for remote peer", function () {
|
||||
let waku1: RelayNode;
|
||||
let waku2: LightNode;
|
||||
|
@ -25,23 +31,10 @@ describe("Wait for remote peer", function () {
|
|||
|
||||
it("Relay - dialed first", async function () {
|
||||
this.timeout(20_000);
|
||||
nwaku = new ServiceNode(makeLogFileName(this));
|
||||
await nwaku.start({
|
||||
relay: true,
|
||||
store: false,
|
||||
filter: false,
|
||||
lightpush: false
|
||||
});
|
||||
[nwaku, waku1] = await runRelayNodes(this, TestShardInfo);
|
||||
const multiAddrWithId = await nwaku.getMultiaddrWithId();
|
||||
|
||||
waku1 = await createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_1
|
||||
});
|
||||
await waku1.start();
|
||||
await waku1.dial(multiAddrWithId);
|
||||
await delay(1000);
|
||||
await waitForRemotePeer(waku1, [Protocols.Relay]);
|
||||
const peers = waku1.relay.getMeshPeers(DefaultPubsubTopic);
|
||||
const peers = waku1.relay.getMeshPeers(TestPubsubTopic);
|
||||
const nimPeerId = multiAddrWithId.getPeerId();
|
||||
|
||||
expect(nimPeerId).to.not.be.undefined;
|
||||
|
@ -252,23 +245,10 @@ describe("Wait for remote peer", function () {
|
|||
|
||||
it("Privacy Node - default protocol", async function () {
|
||||
this.timeout(20_000);
|
||||
nwaku = new ServiceNode(makeLogFileName(this));
|
||||
await nwaku.start({
|
||||
filter: false,
|
||||
lightpush: false,
|
||||
relay: true,
|
||||
store: false
|
||||
});
|
||||
[nwaku, waku1] = await runRelayNodes(this, TestShardInfo);
|
||||
const multiAddrWithId = await nwaku.getMultiaddrWithId();
|
||||
|
||||
waku1 = await createRelayNode({
|
||||
staticNoiseKey: NOISE_KEY_1
|
||||
});
|
||||
await waku1.start();
|
||||
await waku1.dial(multiAddrWithId);
|
||||
await waitForRemotePeer(waku1);
|
||||
|
||||
const peers = waku1.relay.getMeshPeers(DefaultPubsubTopic);
|
||||
const peers = waku1.relay.getMeshPeers(TestPubsubTopic);
|
||||
|
||||
const nimPeerId = multiAddrWithId.getPeerId();
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { DEFAULT_CLUSTER_ID, DefaultPubsubTopic } from "@waku/interfaces";
|
||||
import { DEFAULT_CLUSTER_ID } from "@waku/interfaces";
|
||||
import { expect } from "chai";
|
||||
|
||||
import {
|
||||
|
@ -404,7 +404,7 @@ describe("determinePubsubTopic", () => {
|
|||
});
|
||||
|
||||
it("should fall back to default pubsub topic when pubsubTopicShardInfo is not provided", () => {
|
||||
expect(determinePubsubTopic(contentTopic)).to.equal(DefaultPubsubTopic);
|
||||
expect(determinePubsubTopic(contentTopic)).to.equal("/waku/2/rs/1/6");
|
||||
});
|
||||
|
||||
it("should process correctly when SingleShardInfo has no clusterId but has a shard", () => {
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import { sha256 } from "@noble/hashes/sha256";
|
||||
import {
|
||||
DEFAULT_CLUSTER_ID,
|
||||
DefaultPubsubTopic,
|
||||
PubsubTopic,
|
||||
ShardInfo,
|
||||
ShardingParams,
|
||||
|
@ -190,6 +189,10 @@ export function contentTopicToPubsubTopic(
|
|||
clusterId: number = DEFAULT_CLUSTER_ID,
|
||||
networkShards: number = 8
|
||||
): string {
|
||||
if (!contentTopic) {
|
||||
throw Error("Content topic must be specified");
|
||||
}
|
||||
|
||||
const shardIndex = contentTopicToShardIndex(contentTopic, networkShards);
|
||||
return `/waku/2/rs/${clusterId}/${shardIndex}`;
|
||||
}
|
||||
|
@ -225,20 +228,18 @@ export function contentTopicsByPubsubTopic(
|
|||
*/
|
||||
export function determinePubsubTopic(
|
||||
contentTopic: string,
|
||||
pubsubTopicShardInfo: SingleShardInfo | PubsubTopic = DefaultPubsubTopic
|
||||
pubsubTopicShardInfo?: SingleShardInfo | PubsubTopic
|
||||
): string {
|
||||
if (typeof pubsubTopicShardInfo == "string") {
|
||||
return pubsubTopicShardInfo;
|
||||
} else {
|
||||
return pubsubTopicShardInfo
|
||||
? pubsubTopicShardInfo.shard !== undefined
|
||||
? singleShardInfoToPubsubTopic(pubsubTopicShardInfo)
|
||||
: contentTopicToPubsubTopic(
|
||||
contentTopic,
|
||||
pubsubTopicShardInfo.clusterId
|
||||
)
|
||||
: DefaultPubsubTopic;
|
||||
}
|
||||
|
||||
return pubsubTopicShardInfo?.shard !== undefined
|
||||
? singleShardInfoToPubsubTopic(pubsubTopicShardInfo)
|
||||
: contentTopicToPubsubTopic(
|
||||
contentTopic,
|
||||
pubsubTopicShardInfo?.clusterId ?? DEFAULT_CLUSTER_ID
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
Loading…
Reference in New Issue