feat: use docker image for rln encoder e2e test

This commit is contained in:
Arseniy Klempner 2025-12-17 18:17:16 -08:00
parent cbf99f27b5
commit 804144daba
No known key found for this signature in database
GPG Key ID: 51653F18863BD24B
9 changed files with 634 additions and 245 deletions

View File

@ -13,7 +13,8 @@ module.exports = function (config) {
files: [
{
pattern: "src/**/*.spec.ts",
// Exclude node tests - they require Docker nwaku nodes and are run via test:node
pattern: "src/**/!(*.node).spec.ts",
type: "js"
},
{
@ -50,7 +51,7 @@ module.exports = function (config) {
],
preprocessors: {
"src/**/*.spec.ts": ["webpack"]
"src/**/!(*.node).spec.ts": ["webpack"]
},
client: {

View File

@ -0,0 +1,198 @@
/* eslint-disable @typescript-eslint/no-var-requires */
/**
* Karma configuration for node integration tests that require nwaku Docker nodes.
* These tests connect to pre-started Docker nwaku nodes.
*
* Usage: npm run test:node
* (This will start the nwaku fleet, run these tests, and clean up)
*/
const path = require("path");
const webpack = require("webpack");
const rootConfig = require("../../karma.conf.cjs");
module.exports = function (config) {
rootConfig(config);
const configuration = {
frameworks: ["mocha", "webpack"],
files: [
{
// Only run node integration tests (requires Docker nwaku nodes)
pattern: "src/**/*.node.spec.ts",
type: "js"
},
{
pattern: "src/resources/**/*.wasm",
included: false,
served: true,
watched: false,
type: "wasm",
nocache: true
},
{
pattern: "src/resources/**/*.zkey",
included: false,
served: true,
watched: false,
nocache: true
},
{
pattern: "../../node_modules/@waku/zerokit-rln-wasm/*.wasm",
included: false,
served: true,
watched: false,
type: "wasm",
nocache: true
},
{
pattern: "../../node_modules/@waku/zerokit-rln-wasm-utils/*.wasm",
included: false,
served: true,
watched: false,
type: "wasm",
nocache: true
},
{
// Fleet info is written by the integration test runner
pattern: "fleet-info.json",
included: false,
served: true,
watched: false,
nocache: true
}
],
preprocessors: {
"src/**/*.node.spec.ts": ["webpack"]
},
client: {
mocha: {
timeout: 300000 // 5 minutes
}
},
browserDisconnectTimeout: 300000, // 5 minutes
browserDisconnectTolerance: 3, // Number of tries before failing
browserNoActivityTimeout: 300000, // 5 minutes
captureTimeout: 300000, // 5 minutes
pingTimeout: 300000, // 5 minutes
mime: {
"application/wasm": ["wasm"],
"application/octet-stream": ["zkey"]
},
customHeaders: [
{
match: ".*\\.wasm$",
name: "Content-Type",
value: "application/wasm"
},
{
match: ".*\\.zkey$",
name: "Content-Type",
value: "application/octet-stream"
}
],
proxies: {
"/base/rln_wasm_bg.wasm":
"/absolute" +
path.resolve(
__dirname,
"../../node_modules/@waku/zerokit-rln-wasm/rln_wasm_bg.wasm"
),
"/base/rln_wasm_utils_bg.wasm":
"/absolute" +
path.resolve(
__dirname,
"../../node_modules/@waku/zerokit-rln-wasm-utils/rln_wasm_utils_bg.wasm"
),
"/base/rln.wasm":
"/absolute" + path.resolve(__dirname, "src/resources/rln.wasm"),
"/base/rln_final.zkey":
"/absolute" + path.resolve(__dirname, "src/resources/rln_final.zkey")
},
webpack: {
mode: "development",
experiments: {
asyncWebAssembly: true,
syncWebAssembly: true,
topLevelAwait: true
},
output: {
wasmLoading: "fetch",
path: path.resolve(__dirname, "dist"),
publicPath: "/base/",
clean: true
},
module: {
rules: [
{
test: /\.ts$/,
use: "ts-loader",
exclude: /node_modules/
},
{
test: /\.wasm$/,
type: "asset/resource",
generator: {
filename: "[name][ext]"
}
},
{
test: /\.zkey$/,
type: "asset/resource",
generator: {
filename: "[name][ext]"
}
}
]
},
plugins: [
new webpack.DefinePlugin({
"process.env.CI": process.env.CI || false,
"process.env.DISPLAY": "Browser"
}),
new webpack.ProvidePlugin({
process: "process/browser.js"
})
],
resolve: {
extensions: [".ts", ".js", ".wasm"],
modules: ["node_modules", "../../node_modules"],
alias: {
"@waku/zerokit-rln-wasm": path.resolve(
__dirname,
"../../node_modules/@waku/zerokit-rln-wasm/rln_wasm.js"
)
},
fallback: {
crypto: false,
fs: false,
path: false,
stream: false
}
},
stats: { warnings: false },
devtool: "inline-source-map"
},
reporters: ["progress"],
port: 9876,
colors: true,
logLevel: config.LOG_INFO,
autoWatch: true,
browsers: process.env.CI ? ["ChromeHeadlessCI"] : ["ChromeHeadless"],
singleRun: true,
concurrency: 1, // Reduce concurrency to avoid memory pressure
browserSocketTimeout: 180000 // 3 minutes
};
config.set(configuration);
};

View File

@ -38,8 +38,10 @@
"check:tsc": "tsc -p tsconfig.dev.json",
"check:lint": "eslint \"src/!(resources)/**/*.{ts,js}\" *.js",
"check:spelling": "cspell \"{README.md,src/**/*.ts}\"",
"test": "NODE_ENV=test run-s test:*",
"test": "NODE_ENV=test run-s test:browser",
"test:browser": "karma start karma.conf.cjs",
"test:node": "node src/test-utils/run-integration-tests.js",
"nwaku:start": "npx ts-node --esm src/test-utils/start-nwaku-fleet.ts start 2",
"watch:build": "tsc -p tsconfig.json -w",
"watch:test": "mocha --watch",
"prepublish": "npm run build",
@ -62,6 +64,7 @@
"@waku/build-utils": "^1.0.0",
"@waku/message-encryption": "^0.0.37",
"@waku/sdk": "^0.0.36",
"@waku/tests": "*",
"deep-equal-in-any-order": "^2.0.6",
"fast-check": "^3.23.2",
"rollup-plugin-copy": "^3.5.0"

View File

@ -22,12 +22,10 @@ export class RLNEncoder implements IEncoder {
private readonly encoder: IEncoder,
private readonly rlnInstance: RLNInstance,
private readonly rateLimit: number,
public index: number,
public pathElements: Uint8Array[],
public identityPathIndex: Uint8Array[],
identityCredential: IdentityCredential
) {
if (index < 0) throw new Error("Invalid membership index");
this.idSecretHash = identityCredential.IDSecretHash;
}
@ -48,7 +46,6 @@ export class RLNEncoder implements IEncoder {
if (!message.rateLimitProof) {
message.rateLimitProof = await this.generateProof(
message,
this.index,
this.pathElements,
this.identityPathIndex
);
@ -67,7 +64,6 @@ export class RLNEncoder implements IEncoder {
if (!message.rateLimitProof) {
protoMessage.rateLimitProof = await this.generateProof(
message,
this.index,
this.pathElements,
this.identityPathIndex
);
@ -80,7 +76,6 @@ export class RLNEncoder implements IEncoder {
private async generateProof(
message: IMessage,
leafIndex: number,
pathElements: Uint8Array[],
identityPathIndex: Uint8Array[]
): Promise<IRateLimitProof> {
@ -90,7 +85,6 @@ export class RLNEncoder implements IEncoder {
const { proof, epoch, rlnIdentifier } =
await this.rlnInstance.zerokit.generateRLNProof(
signal,
leafIndex,
message.timestamp,
this.idSecretHash,
pathElements,
@ -122,7 +116,6 @@ export class RLNEncoder implements IEncoder {
type RLNEncoderOptions = {
encoder: IEncoder;
rlnInstance: RLNInstance;
index: number;
credential: IdentityCredential;
pathElements: Uint8Array[];
identityPathIndex: Uint8Array[];
@ -134,7 +127,6 @@ export const createRLNEncoder = (options: RLNEncoderOptions): RLNEncoder => {
options.encoder,
options.rlnInstance,
options.rateLimit,
options.index,
options.pathElements,
options.identityPathIndex,
options.credential

View File

@ -0,0 +1,120 @@
import { multiaddr } from "@multiformats/multiaddr";
import { createLightNode, Protocols } from "@waku/sdk";
import { expect } from "chai";
import { createRLNEncoder } from "./codec.js";
import { Keystore } from "./keystore/index.js";
import { RLNInstance } from "./rln.js";
import { BytesUtils } from "./utils/index.js";
import { getPathDirectionsFromIndex } from "./utils/merkle.js";
import { TEST_KEYSTORE_DATA } from "./utils/test_keystore.js";
interface NodeInfo {
multiaddr: string;
restPort: string;
peerId: string;
}
interface FleetInfo {
nodes: NodeInfo[];
}
async function getFleetInfo(): Promise<FleetInfo> {
const response = await fetch("/base/fleet-info.json");
if (!response.ok) {
throw new Error(
`Failed to fetch fleet info: ${response.status} ${response.statusText}. ` +
"Make sure to start the nwaku fleet before running tests."
);
}
return response.json();
}
describe("RLN Proof Integration Tests", function () {
this.timeout(30000);
it("sends a message with a proof", async function () {
// Get fleet info from the pre-started nwaku nodes
const fleetInfo = await getFleetInfo();
expect(fleetInfo.nodes.length).to.be.greaterThanOrEqual(2);
const waku = await createLightNode({
networkConfig: {
clusterId: 0,
numShardsInCluster: 1
},
defaultBootstrap: false,
libp2p: {
filterMultiaddrs: false
}
});
// Create RLN instance
const rlnInstance = await RLNInstance.create();
// Load credential from test keystore
const keystore = Keystore.fromString(TEST_KEYSTORE_DATA.keystoreJson);
if (!keystore) {
throw new Error("Failed to load test keystore");
}
const credential = await keystore.readCredential(
TEST_KEYSTORE_DATA.credentialHash,
TEST_KEYSTORE_DATA.password
);
if (!credential) {
throw new Error("Failed to unlock credential with provided password");
}
// Prepare merkle proof data
const merkleProof = TEST_KEYSTORE_DATA.merkleProof.map((p) => BigInt(p));
const membershipIndex = Number(TEST_KEYSTORE_DATA.membershipIndex);
const rateLimit = Number(TEST_KEYSTORE_DATA.rateLimit);
const proofElementIndexes = getPathDirectionsFromIndex(
BigInt(membershipIndex)
);
// Convert merkle proof to bytes format
const pathElements = merkleProof.map((proof) =>
BytesUtils.bytes32FromBigInt(proof)
);
const identityPathIndex = proofElementIndexes.map((index) =>
BytesUtils.writeUIntLE(new Uint8Array(1), index, 0, 1)
);
// Create base encoder
const contentTopic = "/rln/1/test/proto";
const baseEncoder = waku.createEncoder({
contentTopic
});
// Create RLN encoder
const rlnEncoder = createRLNEncoder({
encoder: baseEncoder,
rlnInstance,
credential: credential.identity,
pathElements,
identityPathIndex,
rateLimit
});
// Connect to all nodes in the fleet
for (const nodeInfo of fleetInfo.nodes) {
const nwakuMultiaddr = multiaddr(nodeInfo.multiaddr);
await waku.dial(nwakuMultiaddr, [Protocols.LightPush]);
}
await waku.waitForPeers([Protocols.LightPush]);
// Create message
const messageTimestamp = new Date();
const message = {
payload: new TextEncoder().encode("Hello RLN!"),
timestamp: messageTimestamp
};
// Send message with proof
const result = await waku.lightPush.send(rlnEncoder, message);
expect(result.successes.length).to.be.greaterThan(0);
});
});

View File

@ -1,12 +1,8 @@
import { multiaddr } from "@multiformats/multiaddr";
import { createLightNode, IMessage, Protocols } from "@waku/sdk";
import { expect } from "chai";
import { createRLNEncoder } from "./codec.js";
import { Keystore } from "./keystore/index.js";
import { Proof, proofToBytes } from "./proof.js";
import { RLNInstance } from "./rln.js";
// import { epochBytesToInt } from "./utils/epoch.js";
import { BytesUtils } from "./utils/index.js";
import {
calculateRateCommitment,
@ -16,7 +12,7 @@ import {
} from "./utils/merkle.js";
import { TEST_KEYSTORE_DATA } from "./utils/test_keystore.js";
describe.only("RLN Proof Integration Tests", function () {
describe("RLN Proof Unit Tests", function () {
this.timeout(30000);
it("validate stored merkle proof data", function () {
@ -74,7 +70,7 @@ describe.only("RLN Proof Integration Tests", function () {
const testMessage = new TextEncoder().encode("test");
const proof = await rlnInstance.zerokit.generateRLNProof(
const { proof } = await rlnInstance.zerokit.generateRLNProof(
testMessage,
new Date(),
credential.identity.IDSecretHash,
@ -97,10 +93,6 @@ describe.only("RLN Proof Integration Tests", function () {
expect(isValid).to.be.true;
});
const nwakuNode3 = multiaddr(
"/ip4/192.168.0.216/tcp/8002/ws/p2p/16Uiu2HAm4YTSbqhsa6xHfuqvo11T1oX4JgD5fMuDujsd1qojkfPi"
);
it("should parse proof bytes into Proof class", async function () {
const rlnInstance = await RLNInstance.create();
@ -117,21 +109,12 @@ describe.only("RLN Proof Integration Tests", function () {
throw new Error("Failed to unlock credential with provided password");
}
const idCommitment = credential.identity.IDCommitmentBigInt;
const merkleProof = TEST_KEYSTORE_DATA.merkleProof.map((p) => BigInt(p));
const merkleRoot = BigInt(TEST_KEYSTORE_DATA.merkleRoot);
const membershipIndex = BigInt(TEST_KEYSTORE_DATA.membershipIndex);
const rateLimit = BigInt(TEST_KEYSTORE_DATA.rateLimit);
const rateCommitment = calculateRateCommitment(idCommitment, rateLimit);
const proofElementIndexes = extractPathDirectionsFromProof(
merkleProof,
rateCommitment,
merkleRoot
);
if (!proofElementIndexes) {
throw new Error("Failed to extract proof element indexes");
}
const proofElementIndexes = getPathDirectionsFromIndex(membershipIndex);
const testMessage = new TextEncoder().encode("test");
@ -139,10 +122,9 @@ describe.only("RLN Proof Integration Tests", function () {
const { proof, epoch, rlnIdentifier } =
await rlnInstance.zerokit.generateRLNProof(
testMessage,
Number(membershipIndex),
new Date(),
credential.identity.IDSecretHash,
merkleProof.map((proof) => BytesUtils.fromBigInt(proof, 32, "little")),
merkleProof.map((proof) => BytesUtils.bytes32FromBigInt(proof)),
proofElementIndexes.map((index) =>
BytesUtils.writeUIntLE(new Uint8Array(1), index, 0, 1)
),
@ -185,215 +167,4 @@ describe.only("RLN Proof Integration Tests", function () {
parsedProof.externalNullifier
);
});
it.only("sends a message with a proof", async function () {
const waku = await createLightNode({
networkConfig: {
clusterId: 0,
numShardsInCluster: 1
},
defaultBootstrap: false,
libp2p: {
filterMultiaddrs: false
}
});
// Create RLN instance
const rlnInstance = await RLNInstance.create();
// Load credential from test keystore
const keystore = Keystore.fromString(TEST_KEYSTORE_DATA.keystoreJson);
if (!keystore) {
throw new Error("Failed to load test keystore");
}
const credential = await keystore.readCredential(
TEST_KEYSTORE_DATA.credentialHash,
TEST_KEYSTORE_DATA.password
);
if (!credential) {
throw new Error("Failed to unlock credential with provided password");
}
// Prepare merkle proof data
const idCommitment = credential.identity.IDCommitmentBigInt;
const merkleProof = TEST_KEYSTORE_DATA.merkleProof.map((p) => BigInt(p));
const merkleRoot = BigInt(TEST_KEYSTORE_DATA.merkleRoot);
const membershipIndex = Number(TEST_KEYSTORE_DATA.membershipIndex);
const rateLimit = Number(TEST_KEYSTORE_DATA.rateLimit);
const rateCommitment = calculateRateCommitment(
idCommitment,
BigInt(rateLimit)
);
const proofElementIndexes = extractPathDirectionsFromProof(
merkleProof,
rateCommitment,
merkleRoot
);
if (!proofElementIndexes) {
throw new Error("Failed to extract proof element indexes");
}
// Convert merkle proof to bytes format
const pathElements = merkleProof.map((proof) =>
BytesUtils.fromBigInt(proof, 32, "little")
);
const identityPathIndex = proofElementIndexes.map((index) =>
BytesUtils.writeUIntLE(new Uint8Array(1), index, 0, 1)
);
// Create base encoder
const contentTopic = "/rln/1/test/proto";
// const pubsubTopic = "/waku/2/rs/1/0";
const baseEncoder = waku.createEncoder({
contentTopic
});
// Create RLN encoder
const rlnEncoder = createRLNEncoder({
encoder: baseEncoder,
rlnInstance,
index: membershipIndex,
credential: credential.identity,
pathElements,
identityPathIndex,
rateLimit
});
await waku.dial(nwakuNode3, [Protocols.LightPush]);
await waku.waitForPeers([Protocols.LightPush]);
// Create message
const messageTimestamp = new Date();
const message = {
payload: new TextEncoder().encode("Hello RLN!"),
timestamp: messageTimestamp
};
// Send message with proof
const result = await waku.lightPush.send(rlnEncoder, message);
console.log("LightPush result:", result);
if (result.failures) {
console.log(result.failures.map((f) => f.error));
}
expect(result.successes.length).to.be.greaterThan(0);
});
it("send many messages, track which succeed or fail", async function () {
this.timeout(50000);
const waku = await createLightNode({
networkConfig: {
clusterId: 0,
numShardsInCluster: 1
},
defaultBootstrap: false,
libp2p: {
filterMultiaddrs: false
}
});
console.log("node created");
// Create RLN instance
const rlnInstance = await RLNInstance.create();
// Load credential from test keystore
const keystore = Keystore.fromString(TEST_KEYSTORE_DATA.keystoreJson);
if (!keystore) {
throw new Error("Failed to load test keystore");
}
const credential = await keystore.readCredential(
TEST_KEYSTORE_DATA.credentialHash,
TEST_KEYSTORE_DATA.password
);
if (!credential) {
throw new Error("Failed to unlock credential with provided password");
}
// Prepare merkle proof data
const idCommitment = credential.identity.IDCommitmentBigInt;
const merkleProof = TEST_KEYSTORE_DATA.merkleProof.map((p) => BigInt(p));
const merkleRoot = BigInt(TEST_KEYSTORE_DATA.merkleRoot);
const membershipIndex = Number(TEST_KEYSTORE_DATA.membershipIndex);
const rateLimit = Number(TEST_KEYSTORE_DATA.rateLimit);
const rateCommitment = calculateRateCommitment(
idCommitment,
BigInt(rateLimit)
);
const proofElementIndexes = extractPathDirectionsFromProof(
merkleProof,
rateCommitment,
merkleRoot
);
if (!proofElementIndexes) {
throw new Error("Failed to extract proof element indexes");
}
// Convert merkle proof to bytes format
const pathElements = merkleProof.map((proof) =>
BytesUtils.fromBigInt(proof, 32, "little")
);
const identityPathIndex = proofElementIndexes.map((index) =>
BytesUtils.writeUIntLE(new Uint8Array(1), index, 0, 1)
);
// Create base encoder
const contentTopic = "/rln/1/test/proto";
// const pubsubTopic = "/waku/2/rs/1/0";
const baseEncoder = waku.createEncoder({
contentTopic
});
// Create RLN encoder
const rlnEncoder = createRLNEncoder({
encoder: baseEncoder,
rlnInstance,
index: membershipIndex,
credential: credential.identity,
pathElements,
identityPathIndex,
rateLimit
});
// connect to node
await waku.dial(nwakuNode3, [Protocols.LightPush]);
console.log("node dialed");
await waku.waitForPeers([Protocols.LightPush]);
console.log("peers waited");
const messagesToSend = 20;
const results: {
success: boolean;
epoch: number;
}[] = [];
for (let i = 0; i < messagesToSend; i++) {
// Create message
const messageTimestamp = new Date();
const message = {
payload: new TextEncoder().encode("Hello RLN!"),
timestamp: messageTimestamp
};
// Send message with proof
console.log("sending message", i);
const result = await waku.lightPush.send(rlnEncoder, message, {
autoRetry: false
});
const success = result.successes.length > 0;
console.log("success:", success);
const timestampSeconds = Math.floor(message.timestamp!.getTime() / 1000);
results.push({
success,
epoch: timestampSeconds
});
await new Promise((resolve) => setTimeout(resolve, 2500));
}
});
});

View File

@ -0,0 +1,149 @@
/* eslint-env node */
/**
* Integration test runner for RLN package.
*
* This script:
* 1. Pulls the specific nwaku Docker image
* 2. Starts a fleet of nwaku nodes
* 3. Runs the Karma browser tests
* 4. Stops the fleet (cleanup)
*
* Usage: node src/test-utils/run-integration-tests.js
*/
import { exec, spawn } from "child_process";
import * as fs from "fs";
import * as path from "path";
import { fileURLToPath } from "url";
import { promisify } from "util";
const execAsync = promisify(exec);
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
// Use WAKUNODE_IMAGE from environment (set by CI) or fall back to our RLN-specific image
const NWAKU_IMAGE = "quay.io/wakuorg/nwaku-pr:3660";
const FLEET_INFO_PATH = path.join(__dirname, "../../fleet-info.json");
const NUM_NODES = 2;
// Ensure the environment variable is set for ServiceNode
process.env.WAKUNODE_IMAGE = NWAKU_IMAGE;
async function pullImage() {
try {
await execAsync(`docker inspect ${NWAKU_IMAGE}`);
} catch {
await execAsync(`docker pull ${NWAKU_IMAGE}`);
}
}
async function startFleet() {
const { ServiceNode } = await import("@waku/tests");
const nodes = [];
const nodeInfos = [];
for (let i = 0; i < NUM_NODES; i++) {
const node = new ServiceNode(`rln_integration_${i}_${Date.now()}`);
const args = {
relay: true,
lightpush: true,
filter: true,
store: true,
clusterId: 0,
shard: [0]
};
// Connect subsequent nodes to the first node
if (i > 0 && nodes[0]) {
const firstNodeAddr = await nodes[0].getExternalMultiaddr();
if (firstNodeAddr) {
args.staticnode = firstNodeAddr;
}
}
await node.start(args, { retries: 3 });
nodes.push(node);
const multiaddr = await node.getMultiaddrWithId();
const peerId = await node.getPeerId();
nodeInfos.push({
multiaddr: multiaddr.toString(),
restPort: node.httpUrl,
peerId: peerId.toString()
});
}
// Write fleet info to file
const fleetInfo = { nodes: nodeInfos };
fs.writeFileSync(FLEET_INFO_PATH, JSON.stringify(fleetInfo, null, 2));
return nodes;
}
async function runKarmaTests() {
return new Promise((resolve, reject) => {
const karma = spawn("npx", ["karma", "start", "karma.node.conf.cjs"], {
stdio: "inherit",
env: {
...process.env,
NODE_ENV: "test"
}
});
karma.on("error", (error) => {
reject(new Error(`Karma failed to start: ${error.message}`));
});
karma.on("exit", (code) => {
if (code === 0) {
resolve();
} else {
reject(new Error(`Karma tests failed with exit code ${code}`));
}
});
});
}
async function stopFleet(nodes) {
for (let i = 0; i < nodes.length; i++) {
await nodes[i].stop();
}
// Clean up fleet info file
if (fs.existsSync(FLEET_INFO_PATH)) {
fs.unlinkSync(FLEET_INFO_PATH);
}
}
async function main() {
let nodes = [];
let exitCode = 0;
try {
// Pull the Docker image
await pullImage();
// Start the fleet
nodes = await startFleet();
// Run the tests
await runKarmaTests();
} catch (error) {
exitCode = 1;
} finally {
if (nodes.length > 0) {
await stopFleet(nodes);
}
}
process.exit(exitCode);
}
main().catch((error) => {
console.error("Unexpected error:", error);
process.exit(1);
});

View File

@ -0,0 +1,155 @@
/* eslint-disable no-console */
/* eslint-disable import/no-extraneous-dependencies */
/**
* Script to start a fleet of nwaku nodes for RLN integration tests.
* Reuses the tests package infrastructure.
*
* Usage:
* npx ts-node --esm src/test-utils/start-nwaku-fleet.ts start [numNodes]
* npx ts-node --esm src/test-utils/start-nwaku-fleet.ts stop
*/
import * as fs from "fs";
import * as path from "path";
import { fileURLToPath } from "url";
import { ServiceNode } from "@waku/tests";
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
// Use WAKUNODE_IMAGE from environment or fall back to RLN-specific image
const NWAKU_IMAGE =
process.env.WAKUNODE_IMAGE || "quay.io/wakuorg/nwaku-pr:3660";
process.env.WAKUNODE_IMAGE = NWAKU_IMAGE;
interface NodeInfo {
multiaddr: string;
restPort: string;
peerId: string;
}
interface FleetInfo {
nodes: NodeInfo[];
}
const FLEET_INFO_PATH = path.join(__dirname, "../../fleet-info.json");
const activeNodes: ServiceNode[] = [];
async function startFleet(numNodes: number = 2): Promise<FleetInfo> {
console.log(
`Starting fleet of ${numNodes} nwaku nodes with image: ${process.env.WAKUNODE_IMAGE}`
);
const nodes: NodeInfo[] = [];
for (let i = 0; i < numNodes; i++) {
const node = new ServiceNode(`rln_test_node_${i}_${Date.now()}`);
const args: Record<string, unknown> = {
relay: true,
lightpush: true,
filter: true,
store: true,
clusterId: 0,
shard: [0]
};
// Connect subsequent nodes to the first node
if (i > 0 && activeNodes[0]) {
const firstNodeAddr = await activeNodes[0].getExternalMultiaddr();
if (firstNodeAddr) {
args.staticnode = firstNodeAddr;
}
}
await node.start(args, { retries: 3 });
activeNodes.push(node);
const multiaddr = await node.getMultiaddrWithId();
const nodeInfo: NodeInfo = {
multiaddr: multiaddr.toString(),
restPort: node.httpUrl,
peerId: (await node.getPeerId()).toString()
};
nodes.push(nodeInfo);
console.log(`Node ${i} started: ${nodeInfo.multiaddr}`);
}
const fleetInfo: FleetInfo = { nodes };
// Write fleet info to file for the browser test to read
fs.writeFileSync(FLEET_INFO_PATH, JSON.stringify(fleetInfo, null, 2));
console.log(`Fleet info written to ${FLEET_INFO_PATH}`);
return fleetInfo;
}
async function stopFleet(): Promise<void> {
console.log("Stopping all nwaku nodes...");
// Try to read the fleet info file to get node references
// But since ServiceNode instances are in memory, we need to stop them directly
for (const node of activeNodes) {
try {
await node.stop();
console.log("Node stopped");
} catch (err) {
console.log(`Error stopping node: ${err}`);
}
}
// Clean up the fleet info file
if (fs.existsSync(FLEET_INFO_PATH)) {
fs.unlinkSync(FLEET_INFO_PATH);
console.log("Fleet info file removed");
}
console.log("Fleet stopped");
}
// Keep the process running after start
async function startAndWait(numNodes: number): Promise<void> {
await startFleet(numNodes);
console.log("\nFleet is running. Press Ctrl+C to stop.\n");
// Handle graceful shutdown
process.on("SIGINT", () => {
console.log("\nReceived SIGINT, stopping fleet...");
void stopFleet().then(() => process.exit(0));
});
process.on("SIGTERM", () => {
console.log("\nReceived SIGTERM, stopping fleet...");
void stopFleet().then(() => process.exit(0));
});
// Keep process alive
await new Promise(() => {});
}
// CLI interface
const command = process.argv[2];
if (command === "start") {
const numNodes = parseInt(process.argv[3] || "2", 10);
startAndWait(numNodes).catch((err) => {
console.error("Failed to start fleet:", err);
process.exit(1);
});
} else if (command === "stop") {
// Note: stop command won't work well since nodes are in-memory
// The recommended way is to use Ctrl+C on the start command
console.log("Use Ctrl+C on the running start command to stop the fleet");
process.exit(0);
} else {
console.log("Usage:");
console.log(
" npx ts-node --esm src/test-utils/start-nwaku-fleet.ts start [numNodes]"
);
console.log(" # Press Ctrl+C to stop");
process.exit(1);
}

View File

@ -31,5 +31,5 @@ export function dateToEpochBytes(timestamp: Date): Uint8Array {
export function dateToNanosecondBytes(timestamp: Date): Uint8Array {
const nanoseconds = BigInt(timestamp.getTime()) * 1000000n;
return BytesUtils.fromBigInt(nanoseconds, 8, "little");
return BytesUtils.bytes32FromBigInt(nanoseconds, "little");
}