mirror of
https://github.com/logos-messaging/js-waku.git
synced 2026-05-10 04:09:42 +00:00
Concepts are being mixed up between the global network config (static vs auto sharding), that needs to be the same of all nodes in the network, individual node configuration (eg relay node subscribing to a given shard), and the routing characteristic of a specific message (eg pubsub topic, shard). This stops proper configuration of nwaku post 0.36.0 because we know need to be deliberate on whether nwaku nodes are running with auto or static sharding. It also included various back and forth conversions between shards, pubsub topics, etc. With this change, we tidy up the network configuration, and make it explicit whether it is static or auto sharded. We also introduce the concept of routing info, which is specific to a message, and tied to the overall network configuration. Routing info abstract pubsub topic, shard, and autosharding needs. Which should lead to easier tidy up of the pubsub concept at a later stage. # Conflicts: # packages/core/src/lib/connection_manager/connection_manager.ts # packages/core/src/lib/metadata/metadata.ts # packages/interfaces/src/metadata.ts # packages/interfaces/src/sharding.ts # packages/relay/src/create.ts # packages/sdk/src/filter/filter.ts # packages/sdk/src/filter/types.ts # packages/sdk/src/light_push/light_push.spec.ts # packages/tests/tests/sharding/auto_sharding.spec.ts # packages/tests/tests/sharding/static_sharding.spec.ts # Conflicts: # packages/sdk/src/store/store.ts
259 lines
8.7 KiB
TypeScript
259 lines
8.7 KiB
TypeScript
import { createEncoder } from "@waku/core";
|
|
import { IRateLimitProof, LightNode, ProtocolError } from "@waku/interfaces";
|
|
import { utf8ToBytes } from "@waku/sdk";
|
|
import { expect } from "chai";
|
|
|
|
import {
|
|
afterEachCustom,
|
|
beforeEachCustom,
|
|
generateRandomUint8Array,
|
|
runMultipleNodes,
|
|
ServiceNodesFleet,
|
|
teardownNodesWithRedundancy,
|
|
TEST_STRING
|
|
} from "../../src/index.js";
|
|
|
|
import {
|
|
messagePayload,
|
|
messageText,
|
|
TestContentTopic,
|
|
TestEncoder,
|
|
TestRoutingInfo
|
|
} from "./utils.js";
|
|
|
|
const runTests = (strictNodeCheck: boolean): void => {
|
|
const numServiceNodes = 2;
|
|
describe(`Waku Light Push: Multiple Nodes: Strict Check: ${strictNodeCheck}`, function () {
|
|
// Set the timeout for all tests in this suite. Can be overwritten at test level
|
|
this.timeout(15000);
|
|
let waku: LightNode;
|
|
let serviceNodes: ServiceNodesFleet;
|
|
|
|
beforeEachCustom(this, async () => {
|
|
[serviceNodes, waku] = await runMultipleNodes(
|
|
this.ctx,
|
|
TestRoutingInfo,
|
|
{ lightpush: true, filter: true },
|
|
strictNodeCheck,
|
|
numServiceNodes,
|
|
true
|
|
);
|
|
});
|
|
|
|
afterEachCustom(this, async () => {
|
|
await teardownNodesWithRedundancy(serviceNodes, waku);
|
|
});
|
|
|
|
TEST_STRING.forEach((testItem) => {
|
|
it(`Push message with ${testItem.description} payload`, async function () {
|
|
const pushResponse = await waku.lightPush.send(TestEncoder, {
|
|
payload: utf8ToBytes(testItem.value)
|
|
});
|
|
expect(pushResponse.successes.length).to.eq(numServiceNodes);
|
|
|
|
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
|
true
|
|
);
|
|
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
|
expectedMessageText: testItem.value,
|
|
expectedContentTopic: TestContentTopic,
|
|
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
|
});
|
|
});
|
|
});
|
|
|
|
// TODO: skipped till https://github.com/waku-org/nwaku/issues/3369 resolved
|
|
it.skip("Push 30 different messages", async function () {
|
|
const generateMessageText = (index: number): string => `M${index}`;
|
|
|
|
for (let i = 0; i < 30; i++) {
|
|
const pushResponse = await waku.lightPush.send(TestEncoder, {
|
|
payload: utf8ToBytes(generateMessageText(i))
|
|
});
|
|
|
|
expect(pushResponse.successes.length).to.eq(numServiceNodes);
|
|
}
|
|
|
|
expect(await serviceNodes.messageCollector.waitForMessages(30)).to.eq(
|
|
true
|
|
);
|
|
|
|
for (let i = 0; i < 30; i++) {
|
|
serviceNodes.messageCollector.verifyReceivedMessage(i, {
|
|
expectedMessageText: generateMessageText(i),
|
|
expectedContentTopic: TestContentTopic,
|
|
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
|
});
|
|
}
|
|
});
|
|
|
|
it("Throws when trying to push message with empty payload", async function () {
|
|
const pushResponse = await waku.lightPush.send(TestEncoder, {
|
|
payload: new Uint8Array()
|
|
});
|
|
|
|
expect(pushResponse.successes.length).to.eq(0);
|
|
|
|
expect(pushResponse.failures?.map((failure) => failure.error)).to.include(
|
|
ProtocolError.EMPTY_PAYLOAD
|
|
);
|
|
|
|
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
|
false
|
|
);
|
|
});
|
|
|
|
[{ description: "short", value: "hi" }].forEach((testItem) => {
|
|
it(`Push message with content topic containing ${testItem.description}`, async function () {
|
|
const contentTopic = `/test/1/${testItem.value}/proto`;
|
|
const customEncoder = waku.createEncoder({
|
|
contentTopic
|
|
});
|
|
const pushResponse = await waku.lightPush.send(
|
|
customEncoder,
|
|
messagePayload
|
|
);
|
|
expect(pushResponse.successes.length).to.eq(numServiceNodes);
|
|
|
|
expect(
|
|
await serviceNodes.messageCollector.waitForMessages(1, {
|
|
contentTopic
|
|
})
|
|
).to.eq(true);
|
|
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
|
expectedMessageText: messageText,
|
|
expectedContentTopic: contentTopic,
|
|
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
|
});
|
|
});
|
|
});
|
|
|
|
it("Push message with meta", async function () {
|
|
const customTestEncoder = createEncoder({
|
|
contentTopic: TestContentTopic,
|
|
metaSetter: () => new Uint8Array(10),
|
|
routingInfo: TestRoutingInfo
|
|
});
|
|
|
|
const pushResponse = await waku.lightPush.send(
|
|
customTestEncoder,
|
|
messagePayload
|
|
);
|
|
expect(pushResponse.successes.length).to.eq(numServiceNodes);
|
|
|
|
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
|
true
|
|
);
|
|
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
|
expectedMessageText: messageText,
|
|
expectedContentTopic: TestContentTopic,
|
|
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
|
});
|
|
});
|
|
|
|
it("Fails to push message with large meta", async function () {
|
|
const customTestEncoder = createEncoder({
|
|
contentTopic: TestContentTopic,
|
|
routingInfo: TestRoutingInfo,
|
|
metaSetter: () => new Uint8Array(105024) // see the note below ***
|
|
});
|
|
|
|
// *** note: this test used 10 ** 6 when `nwaku` node had MaxWakuMessageSize == 1MiB ( 1*2^20 .)
|
|
// `nwaku` establishes the max lightpush msg size as `const MaxRpcSize* = MaxWakuMessageSize + 64 * 1024`
|
|
// see: https://github.com/waku-org/nwaku/blob/07beea02095035f4f4c234ec2dec1f365e6955b8/waku/waku_lightpush/rpc_codec.nim#L15
|
|
// In the PR https://github.com/waku-org/nwaku/pull/2298 we reduced the MaxWakuMessageSize
|
|
// from 1MiB to 150KiB. Therefore, the 105024 number comes from subtracting ( 1*2^20 - 150*2^10 )
|
|
// to the original 10^6 that this test had when MaxWakuMessageSize == 1*2^20
|
|
|
|
const pushResponse = await waku.lightPush.send(
|
|
customTestEncoder,
|
|
messagePayload
|
|
);
|
|
|
|
expect(pushResponse.successes.length).to.eq(0);
|
|
expect(pushResponse.failures?.map((failure) => failure.error)).to.include(
|
|
ProtocolError.REMOTE_PEER_REJECTED
|
|
);
|
|
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
|
false
|
|
);
|
|
});
|
|
|
|
it("Push message with rate limit", async function () {
|
|
const rateLimitProof: IRateLimitProof = {
|
|
proof: utf8ToBytes("proofData"),
|
|
merkleRoot: utf8ToBytes("merkleRootData"),
|
|
epoch: utf8ToBytes("epochData"),
|
|
shareX: utf8ToBytes("shareXData"),
|
|
shareY: utf8ToBytes("shareYData"),
|
|
nullifier: utf8ToBytes("nullifierData"),
|
|
rlnIdentifier: utf8ToBytes("rlnIdentifierData")
|
|
};
|
|
|
|
const pushResponse = await waku.lightPush.send(TestEncoder, {
|
|
payload: utf8ToBytes(messageText),
|
|
rateLimitProof: rateLimitProof
|
|
});
|
|
expect(pushResponse.successes.length).to.eq(numServiceNodes);
|
|
|
|
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
|
true
|
|
);
|
|
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
|
expectedMessageText: messageText,
|
|
expectedContentTopic: TestContentTopic,
|
|
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
|
});
|
|
});
|
|
|
|
[
|
|
Date.now() - 3600000 * 24 * 356,
|
|
Date.now() - 3600000,
|
|
Date.now() + 3600000
|
|
].forEach((testItem) => {
|
|
it(`Push message with custom timestamp: ${testItem}`, async function () {
|
|
const pushResponse = await waku.lightPush.send(TestEncoder, {
|
|
payload: utf8ToBytes(messageText),
|
|
timestamp: new Date(testItem)
|
|
});
|
|
expect(pushResponse.successes.length).to.eq(numServiceNodes);
|
|
|
|
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
|
true
|
|
);
|
|
serviceNodes.messageCollector.verifyReceivedMessage(0, {
|
|
expectedMessageText: messageText,
|
|
expectedTimestamp: testItem,
|
|
expectedContentTopic: TestContentTopic,
|
|
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
|
|
});
|
|
});
|
|
});
|
|
|
|
it("Push message equal or less that 1MB", async function () {
|
|
const bigPayload = generateRandomUint8Array(65536);
|
|
const pushResponse = await waku.lightPush.send(TestEncoder, {
|
|
payload: bigPayload
|
|
});
|
|
expect(pushResponse.successes.length).to.greaterThan(0);
|
|
});
|
|
|
|
it("Fails to push message bigger that 1MB", async function () {
|
|
const MB = 1024 ** 2;
|
|
|
|
const pushResponse = await waku.lightPush.send(TestEncoder, {
|
|
payload: generateRandomUint8Array(MB + 65536)
|
|
});
|
|
expect(pushResponse.successes.length).to.eq(0);
|
|
expect(pushResponse.failures?.map((failure) => failure.error)).to.include(
|
|
ProtocolError.SIZE_TOO_BIG
|
|
);
|
|
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
|
|
false
|
|
);
|
|
});
|
|
});
|
|
};
|
|
|
|
[true, false].map(runTests);
|