2025-07-14 11:38:42 +03:00
|
|
|
import { LightNode, Protocols } from "@waku/interfaces";
|
feat!: Introduce routing info concept
Concepts are being mixed up between the global network config (static vs auto sharding), that needs to be the same of all nodes in the network, individual node configuration (eg relay node subscribing to a given shard), and the routing characteristic of a specific message (eg pubsub topic, shard).
This stops proper configuration of nwaku post 0.36.0 because we know need to be deliberate on whether nwaku nodes are running with auto or static sharding.
It also included various back and forth conversions between shards, pubsub topics, etc.
With this change, we tidy up the network configuration, and make it explicit whether it is static or auto sharded.
We also introduce the concept of routing info, which is specific to a message, and tied to the overall network configuration.
Routing info abstract pubsub topic, shard, and autosharding needs. Which should lead to easier tidy up of the pubsub concept at a later stage.
# Conflicts:
# packages/core/src/lib/connection_manager/connection_manager.ts
# packages/core/src/lib/metadata/metadata.ts
# packages/interfaces/src/metadata.ts
# packages/interfaces/src/sharding.ts
# packages/relay/src/create.ts
# packages/sdk/src/filter/filter.ts
# packages/sdk/src/filter/types.ts
# packages/sdk/src/light_push/light_push.spec.ts
# packages/tests/tests/sharding/auto_sharding.spec.ts
# packages/tests/tests/sharding/static_sharding.spec.ts
# Conflicts:
# packages/sdk/src/store/store.ts
2025-07-11 13:33:45 +10:00
|
|
|
import { createDecoder, createLightNode, utf8ToBytes } from "@waku/sdk";
|
2025-07-19 14:24:30 +10:00
|
|
|
import {
|
|
|
|
|
contentTopicToPubsubTopic,
|
|
|
|
|
createRoutingInfo,
|
|
|
|
|
delay
|
|
|
|
|
} from "@waku/utils";
|
2025-07-14 11:38:42 +03:00
|
|
|
import { expect } from "chai";
|
|
|
|
|
|
|
|
|
|
import {
|
|
|
|
|
afterEachCustom,
|
|
|
|
|
beforeEachCustom,
|
|
|
|
|
makeLogFileName,
|
|
|
|
|
MessageCollector,
|
|
|
|
|
ServiceNode,
|
|
|
|
|
tearDownNodes
|
|
|
|
|
} from "../../tests/src/index.js";
|
|
|
|
|
|
|
|
|
|
const ContentTopic = "/waku/2/content/test.throughput-sizes.js";
|
|
|
|
|
|
|
|
|
|
function generateRandomString(size: number): string {
|
|
|
|
|
const chars =
|
|
|
|
|
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
|
|
|
|
|
|
|
|
|
let result = "";
|
|
|
|
|
for (let i = 0; i < size; i++) {
|
|
|
|
|
result += chars.charAt(Math.floor(Math.random() * chars.length));
|
|
|
|
|
}
|
|
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
describe("Throughput Sanity Checks - Different Message Sizes", function () {
|
|
|
|
|
const testDurationMs = 20 * 60 * 1000; // 20 minute
|
|
|
|
|
this.timeout(testDurationMs * 1.1);
|
|
|
|
|
let waku: LightNode;
|
|
|
|
|
let nwaku: ServiceNode;
|
|
|
|
|
let messageCollector: MessageCollector;
|
|
|
|
|
|
|
|
|
|
beforeEachCustom(this, async () => {
|
|
|
|
|
nwaku = new ServiceNode(makeLogFileName(this.ctx));
|
|
|
|
|
messageCollector = new MessageCollector(nwaku);
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
afterEachCustom(this, async () => {
|
|
|
|
|
await tearDownNodes(nwaku, waku);
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it("Send/Receive messages of varying sizes", async function () {
|
feat!: Introduce routing info concept
Concepts are being mixed up between the global network config (static vs auto sharding), that needs to be the same of all nodes in the network, individual node configuration (eg relay node subscribing to a given shard), and the routing characteristic of a specific message (eg pubsub topic, shard).
This stops proper configuration of nwaku post 0.36.0 because we know need to be deliberate on whether nwaku nodes are running with auto or static sharding.
It also included various back and forth conversions between shards, pubsub topics, etc.
With this change, we tidy up the network configuration, and make it explicit whether it is static or auto sharded.
We also introduce the concept of routing info, which is specific to a message, and tied to the overall network configuration.
Routing info abstract pubsub topic, shard, and autosharding needs. Which should lead to easier tidy up of the pubsub concept at a later stage.
# Conflicts:
# packages/core/src/lib/connection_manager/connection_manager.ts
# packages/core/src/lib/metadata/metadata.ts
# packages/interfaces/src/metadata.ts
# packages/interfaces/src/sharding.ts
# packages/relay/src/create.ts
# packages/sdk/src/filter/filter.ts
# packages/sdk/src/filter/types.ts
# packages/sdk/src/light_push/light_push.spec.ts
# packages/tests/tests/sharding/auto_sharding.spec.ts
# packages/tests/tests/sharding/static_sharding.spec.ts
# Conflicts:
# packages/sdk/src/store/store.ts
2025-07-11 13:33:45 +10:00
|
|
|
const networkConfig = { clusterId: 0, numShardsInCluster: 8 };
|
2025-07-14 11:38:42 +03:00
|
|
|
|
|
|
|
|
const testStart = new Date();
|
|
|
|
|
const testEnd = Date.now() + testDurationMs;
|
|
|
|
|
|
|
|
|
|
const sizes = [10, 100, 1000, 10_000, 100_000]; // bytes
|
|
|
|
|
|
|
|
|
|
await nwaku.start(
|
|
|
|
|
{
|
|
|
|
|
store: true,
|
|
|
|
|
filter: true,
|
|
|
|
|
relay: true,
|
|
|
|
|
clusterId: 0,
|
|
|
|
|
shard: [0],
|
|
|
|
|
contentTopic: [ContentTopic]
|
|
|
|
|
},
|
|
|
|
|
{ retries: 3 }
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
await delay(1000);
|
|
|
|
|
|
2025-07-19 14:24:30 +10:00
|
|
|
await nwaku.ensureSubscriptions([
|
|
|
|
|
contentTopicToPubsubTopic(
|
|
|
|
|
ContentTopic,
|
|
|
|
|
networkConfig.clusterId,
|
|
|
|
|
networkConfig.numShardsInCluster
|
|
|
|
|
)
|
|
|
|
|
]);
|
2025-07-14 11:38:42 +03:00
|
|
|
|
feat!: Introduce routing info concept
Concepts are being mixed up between the global network config (static vs auto sharding), that needs to be the same of all nodes in the network, individual node configuration (eg relay node subscribing to a given shard), and the routing characteristic of a specific message (eg pubsub topic, shard).
This stops proper configuration of nwaku post 0.36.0 because we know need to be deliberate on whether nwaku nodes are running with auto or static sharding.
It also included various back and forth conversions between shards, pubsub topics, etc.
With this change, we tidy up the network configuration, and make it explicit whether it is static or auto sharded.
We also introduce the concept of routing info, which is specific to a message, and tied to the overall network configuration.
Routing info abstract pubsub topic, shard, and autosharding needs. Which should lead to easier tidy up of the pubsub concept at a later stage.
# Conflicts:
# packages/core/src/lib/connection_manager/connection_manager.ts
# packages/core/src/lib/metadata/metadata.ts
# packages/interfaces/src/metadata.ts
# packages/interfaces/src/sharding.ts
# packages/relay/src/create.ts
# packages/sdk/src/filter/filter.ts
# packages/sdk/src/filter/types.ts
# packages/sdk/src/light_push/light_push.spec.ts
# packages/tests/tests/sharding/auto_sharding.spec.ts
# packages/tests/tests/sharding/static_sharding.spec.ts
# Conflicts:
# packages/sdk/src/store/store.ts
2025-07-11 13:33:45 +10:00
|
|
|
waku = await createLightNode({ networkConfig });
|
2025-07-14 11:38:42 +03:00
|
|
|
await waku.start();
|
|
|
|
|
await waku.dial(await nwaku.getMultiaddrWithId());
|
|
|
|
|
await waku.waitForPeers([Protocols.Filter]);
|
|
|
|
|
|
feat!: Introduce routing info concept
Concepts are being mixed up between the global network config (static vs auto sharding), that needs to be the same of all nodes in the network, individual node configuration (eg relay node subscribing to a given shard), and the routing characteristic of a specific message (eg pubsub topic, shard).
This stops proper configuration of nwaku post 0.36.0 because we know need to be deliberate on whether nwaku nodes are running with auto or static sharding.
It also included various back and forth conversions between shards, pubsub topics, etc.
With this change, we tidy up the network configuration, and make it explicit whether it is static or auto sharded.
We also introduce the concept of routing info, which is specific to a message, and tied to the overall network configuration.
Routing info abstract pubsub topic, shard, and autosharding needs. Which should lead to easier tidy up of the pubsub concept at a later stage.
# Conflicts:
# packages/core/src/lib/connection_manager/connection_manager.ts
# packages/core/src/lib/metadata/metadata.ts
# packages/interfaces/src/metadata.ts
# packages/interfaces/src/sharding.ts
# packages/relay/src/create.ts
# packages/sdk/src/filter/filter.ts
# packages/sdk/src/filter/types.ts
# packages/sdk/src/light_push/light_push.spec.ts
# packages/tests/tests/sharding/auto_sharding.spec.ts
# packages/tests/tests/sharding/static_sharding.spec.ts
# Conflicts:
# packages/sdk/src/store/store.ts
2025-07-11 13:33:45 +10:00
|
|
|
const routingInfo = createRoutingInfo(networkConfig, {
|
|
|
|
|
contentTopic: ContentTopic
|
|
|
|
|
});
|
|
|
|
|
const decoder = createDecoder(ContentTopic, routingInfo);
|
2025-07-14 11:38:42 +03:00
|
|
|
const hasSubscribed = await waku.filter.subscribe(
|
|
|
|
|
[decoder],
|
|
|
|
|
messageCollector.callback
|
|
|
|
|
);
|
|
|
|
|
if (!hasSubscribed) throw new Error("Failed to subscribe from the start.");
|
|
|
|
|
|
|
|
|
|
let messageId = 0;
|
|
|
|
|
const report: {
|
|
|
|
|
messageId: number;
|
|
|
|
|
size: number;
|
|
|
|
|
timestamp: string;
|
|
|
|
|
sent: boolean;
|
|
|
|
|
received: boolean;
|
|
|
|
|
error?: string;
|
|
|
|
|
}[] = [];
|
|
|
|
|
|
|
|
|
|
while (Date.now() < testEnd) {
|
|
|
|
|
const now = new Date();
|
|
|
|
|
// Pick a random size from sizes array
|
|
|
|
|
const size = sizes[Math.floor(Math.random() * sizes.length)];
|
|
|
|
|
const message = generateRandomString(size);
|
|
|
|
|
let sent = false;
|
|
|
|
|
let received = false;
|
|
|
|
|
let err: string | undefined;
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
await nwaku.sendMessage(
|
|
|
|
|
ServiceNode.toMessageRpcQuery({
|
|
|
|
|
contentTopic: ContentTopic,
|
|
|
|
|
payload: utf8ToBytes(message)
|
feat!: Introduce routing info concept
Concepts are being mixed up between the global network config (static vs auto sharding), that needs to be the same of all nodes in the network, individual node configuration (eg relay node subscribing to a given shard), and the routing characteristic of a specific message (eg pubsub topic, shard).
This stops proper configuration of nwaku post 0.36.0 because we know need to be deliberate on whether nwaku nodes are running with auto or static sharding.
It also included various back and forth conversions between shards, pubsub topics, etc.
With this change, we tidy up the network configuration, and make it explicit whether it is static or auto sharded.
We also introduce the concept of routing info, which is specific to a message, and tied to the overall network configuration.
Routing info abstract pubsub topic, shard, and autosharding needs. Which should lead to easier tidy up of the pubsub concept at a later stage.
# Conflicts:
# packages/core/src/lib/connection_manager/connection_manager.ts
# packages/core/src/lib/metadata/metadata.ts
# packages/interfaces/src/metadata.ts
# packages/interfaces/src/sharding.ts
# packages/relay/src/create.ts
# packages/sdk/src/filter/filter.ts
# packages/sdk/src/filter/types.ts
# packages/sdk/src/light_push/light_push.spec.ts
# packages/tests/tests/sharding/auto_sharding.spec.ts
# packages/tests/tests/sharding/static_sharding.spec.ts
# Conflicts:
# packages/sdk/src/store/store.ts
2025-07-11 13:33:45 +10:00
|
|
|
}),
|
|
|
|
|
routingInfo
|
2025-07-14 11:38:42 +03:00
|
|
|
);
|
|
|
|
|
sent = true;
|
|
|
|
|
|
|
|
|
|
received = await messageCollector.waitForMessages(1, {
|
|
|
|
|
timeoutDuration: 3000
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
if (received) {
|
|
|
|
|
messageCollector.verifyReceivedMessage(0, {
|
|
|
|
|
expectedMessageText: message,
|
|
|
|
|
expectedContentTopic: ContentTopic,
|
feat!: Introduce routing info concept
Concepts are being mixed up between the global network config (static vs auto sharding), that needs to be the same of all nodes in the network, individual node configuration (eg relay node subscribing to a given shard), and the routing characteristic of a specific message (eg pubsub topic, shard).
This stops proper configuration of nwaku post 0.36.0 because we know need to be deliberate on whether nwaku nodes are running with auto or static sharding.
It also included various back and forth conversions between shards, pubsub topics, etc.
With this change, we tidy up the network configuration, and make it explicit whether it is static or auto sharded.
We also introduce the concept of routing info, which is specific to a message, and tied to the overall network configuration.
Routing info abstract pubsub topic, shard, and autosharding needs. Which should lead to easier tidy up of the pubsub concept at a later stage.
# Conflicts:
# packages/core/src/lib/connection_manager/connection_manager.ts
# packages/core/src/lib/metadata/metadata.ts
# packages/interfaces/src/metadata.ts
# packages/interfaces/src/sharding.ts
# packages/relay/src/create.ts
# packages/sdk/src/filter/filter.ts
# packages/sdk/src/filter/types.ts
# packages/sdk/src/light_push/light_push.spec.ts
# packages/tests/tests/sharding/auto_sharding.spec.ts
# packages/tests/tests/sharding/static_sharding.spec.ts
# Conflicts:
# packages/sdk/src/store/store.ts
2025-07-11 13:33:45 +10:00
|
|
|
expectedPubsubTopic: routingInfo.pubsubTopic
|
2025-07-14 11:38:42 +03:00
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
} catch (e: any) {
|
|
|
|
|
err = e.message || String(e);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
report.push({
|
|
|
|
|
messageId,
|
|
|
|
|
size,
|
|
|
|
|
timestamp: now.toISOString(),
|
|
|
|
|
sent,
|
|
|
|
|
received,
|
|
|
|
|
error: err
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
messageId++;
|
|
|
|
|
messageCollector.list = []; // clearing the message collector
|
|
|
|
|
await delay(400);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const failedMessages = report.filter(
|
|
|
|
|
(m) => !m.sent || !m.received || m.error
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
console.log("\n=== Throughput Sizes Test Summary ===");
|
|
|
|
|
console.log("Start time:", testStart.toISOString());
|
|
|
|
|
console.log("End time:", new Date().toISOString());
|
|
|
|
|
console.log("Total messages:", report.length);
|
|
|
|
|
console.log("Failures:", failedMessages.length);
|
|
|
|
|
|
|
|
|
|
// Additional size info
|
|
|
|
|
const sizeCounts: Record<number, number> = {};
|
|
|
|
|
for (const entry of report) {
|
|
|
|
|
sizeCounts[entry.size] = (sizeCounts[entry.size] || 0) + 1;
|
|
|
|
|
}
|
|
|
|
|
console.log("\nMessage size distribution:");
|
|
|
|
|
for (const size of Object.keys(sizeCounts).sort(
|
|
|
|
|
(a, b) => Number(a) - Number(b)
|
|
|
|
|
)) {
|
|
|
|
|
console.log(`Size ${size} bytes: ${sizeCounts[Number(size)]} messages`);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (failedMessages.length > 0) {
|
|
|
|
|
console.log("\n--- Failed Messages ---");
|
|
|
|
|
for (const fail of failedMessages) {
|
|
|
|
|
console.log(
|
|
|
|
|
`#${fail.messageId} (size: ${fail.size} bytes) @ ${fail.timestamp} | sent: ${fail.sent} | received: ${fail.received} | error: ${fail.error || "N/A"}`
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
expect(
|
|
|
|
|
failedMessages.length,
|
|
|
|
|
`Some messages failed: ${failedMessages.length}`
|
|
|
|
|
).to.eq(0);
|
|
|
|
|
});
|
|
|
|
|
});
|