mirror of
https://github.com/logos-messaging/logos-messaging-js.git
synced 2026-01-25 11:23:11 +00:00
Merge bdbe6181e2f26ab909ca9ea7c5d140b9cc98423b into f2ad23ad4354fb3440ca369ed91ba4d882bbacf6
This commit is contained in:
commit
b76370ae1f
73
package-lock.json
generated
73
package-lock.json
generated
@ -6872,9 +6872,9 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@wagmi/cli": {
|
||||
"version": "2.7.0",
|
||||
"resolved": "https://registry.npmjs.org/@wagmi/cli/-/cli-2.7.0.tgz",
|
||||
"integrity": "sha512-M0FDVK2/mQSOJne3nG7GiZrecw069GYFY6YGQZbG9IyxPgfOHRgVBvGkeXzGXmb3ezFlzn5jCCIQ2q/9lYh07g==",
|
||||
"version": "2.8.0",
|
||||
"resolved": "https://registry.npmjs.org/@wagmi/cli/-/cli-2.8.0.tgz",
|
||||
"integrity": "sha512-2VhDj8u8vwLZwMZ8CX4pTuO0Qm28Z9uH9qOEWgF/xXUCeVV+4e4YsknEyGcoxYwEmTkdlCmuCvMj4up2XK6vxQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@ -7138,10 +7138,10 @@
|
||||
"link": true
|
||||
},
|
||||
"node_modules/@waku/zerokit-rln-wasm": {
|
||||
"version": "0.2.1",
|
||||
"resolved": "https://registry.npmjs.org/@waku/zerokit-rln-wasm/-/zerokit-rln-wasm-0.2.1.tgz",
|
||||
"integrity": "sha512-2Xp7e92y4qZpsiTPGBSVr4gVJ9mJTLaudlo0DQxNpxJUBtoJKpxdH5xDCQDiorbkWZC2j9EId+ohhxHO/xC1QQ==",
|
||||
"license": "MIT or Apache2"
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@waku/zerokit-rln-wasm/-/zerokit-rln-wasm-1.0.0.tgz",
|
||||
"integrity": "sha512-kRAeUePAY3++i5XXniCx+tqDH+3rdfPKED/lFRrbQ8ZiNWpu059fKxtPQqqvd8jNZQUOWDc7HRTpq2TVbWd8yQ==",
|
||||
"license": "MIT OR Apache-2.0"
|
||||
},
|
||||
"node_modules/@webassemblyjs/ast": {
|
||||
"version": "1.14.1",
|
||||
@ -17851,12 +17851,6 @@
|
||||
"jiti": "lib/jiti-cli.mjs"
|
||||
}
|
||||
},
|
||||
"node_modules/js-sha3": {
|
||||
"version": "0.9.3",
|
||||
"resolved": "https://registry.npmjs.org/js-sha3/-/js-sha3-0.9.3.tgz",
|
||||
"integrity": "sha512-BcJPCQeLg6WjEx3FE591wVAevlli8lxsxm9/FzV4HXkV49TmBH38Yvrpce6fjbADGMKFrBMGTqrVz3qPIZ88Gg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/js-tokens": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
|
||||
@ -32504,9 +32498,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/viem": {
|
||||
"version": "2.38.4",
|
||||
"resolved": "https://registry.npmjs.org/viem/-/viem-2.38.4.tgz",
|
||||
"integrity": "sha512-qnyPNg6Lz1EEC86si/1dq7GlOyZVFHSgAW+p8Q31R5idnAYCOdTM2q5KLE4/ykMeMXzY0bnp5MWTtR/wjCtWmQ==",
|
||||
"version": "2.39.0",
|
||||
"resolved": "https://registry.npmjs.org/viem/-/viem-2.39.0.tgz",
|
||||
"integrity": "sha512-rCN+IfnMESlrg/iPyyVL+M9NS/BHzyyNy72470tFmbTuscY3iPaZGMtJDcHKKV8TC6HV9DjWk0zWX6cpu0juyA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
@ -32569,27 +32563,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/viem/node_modules/ws": {
|
||||
"version": "8.18.3",
|
||||
"resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz",
|
||||
"integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"bufferutil": "^4.0.1",
|
||||
"utf-8-validate": ">=5.0.2"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"bufferutil": {
|
||||
"optional": true
|
||||
},
|
||||
"utf-8-validate": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/void-elements": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/void-elements/-/void-elements-2.0.1.tgz",
|
||||
@ -34031,6 +34004,12 @@
|
||||
"@esbuild/win32-x64": "0.21.5"
|
||||
}
|
||||
},
|
||||
"packages/browser-tests/node_modules/js-sha3": {
|
||||
"version": "0.9.3",
|
||||
"resolved": "https://registry.npmjs.org/js-sha3/-/js-sha3-0.9.3.tgz",
|
||||
"integrity": "sha512-BcJPCQeLg6WjEx3FE591wVAevlli8lxsxm9/FzV4HXkV49TmBH38Yvrpce6fjbADGMKFrBMGTqrVz3qPIZ88Gg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"packages/browser-tests/node_modules/undici-types": {
|
||||
"version": "6.19.8",
|
||||
"dev": true,
|
||||
@ -34201,6 +34180,10 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"packages/enr/node_modules/js-sha3": {
|
||||
"version": "0.9.3",
|
||||
"license": "MIT"
|
||||
},
|
||||
"packages/headless-tests": {
|
||||
"name": "@waku/headless-tests",
|
||||
"version": "0.1.0",
|
||||
@ -34269,6 +34252,10 @@
|
||||
"node": ">=22"
|
||||
}
|
||||
},
|
||||
"packages/message-encryption/node_modules/js-sha3": {
|
||||
"version": "0.9.3",
|
||||
"license": "MIT"
|
||||
},
|
||||
"packages/proto": {
|
||||
"name": "@waku/proto",
|
||||
"version": "0.0.15",
|
||||
@ -34735,7 +34722,7 @@
|
||||
"@wagmi/core": "^2.22.1",
|
||||
"@waku/core": "^0.0.40",
|
||||
"@waku/utils": "^0.0.27",
|
||||
"@waku/zerokit-rln-wasm": "^0.2.1",
|
||||
"@waku/zerokit-rln-wasm": "^1.0.0",
|
||||
"chai": "^5.1.2",
|
||||
"chai-as-promised": "^8.0.1",
|
||||
"chai-spies": "^1.1.0",
|
||||
@ -34757,8 +34744,9 @@
|
||||
"@types/sinon": "^17.0.3",
|
||||
"@wagmi/cli": "^2.7.0",
|
||||
"@waku/build-utils": "^1.0.0",
|
||||
"@waku/interfaces": "0.0.34",
|
||||
"@waku/message-encryption": "^0.0.37",
|
||||
"@waku/sdk": "^0.0.36",
|
||||
"@waku/tests": "*",
|
||||
"deep-equal-in-any-order": "^2.0.6",
|
||||
"fast-check": "^3.23.2",
|
||||
"rollup-plugin-copy": "^3.5.0"
|
||||
@ -34917,6 +34905,13 @@
|
||||
"node": ">=0.3.1"
|
||||
}
|
||||
},
|
||||
"packages/rln/node_modules/js-sha3": {
|
||||
"version": "0.9.3",
|
||||
"resolved": "https://registry.npmjs.org/js-sha3/-/js-sha3-0.9.3.tgz",
|
||||
"integrity": "sha512-BcJPCQeLg6WjEx3FE591wVAevlli8lxsxm9/FzV4HXkV49TmBH38Yvrpce6fjbADGMKFrBMGTqrVz3qPIZ88Gg==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"packages/rln/node_modules/loupe": {
|
||||
"version": "3.1.3",
|
||||
"license": "MIT"
|
||||
|
||||
@ -24,4 +24,4 @@ if (process.env.CI) {
|
||||
console.log("Running tests serially. To enable parallel execution update mocha config");
|
||||
}
|
||||
|
||||
module.exports = config;
|
||||
module.exports = config;
|
||||
|
||||
@ -13,7 +13,8 @@ module.exports = function (config) {
|
||||
|
||||
files: [
|
||||
{
|
||||
pattern: "src/**/*.spec.ts",
|
||||
// Exclude node tests - they require Docker nwaku nodes and are run via test:node
|
||||
pattern: "src/**/!(*.node).spec.ts",
|
||||
type: "js"
|
||||
},
|
||||
{
|
||||
@ -25,7 +26,7 @@ module.exports = function (config) {
|
||||
nocache: true
|
||||
},
|
||||
{
|
||||
pattern: "src/resources/**/*.zkey",
|
||||
pattern: "src/resources/**/*.arkzkey",
|
||||
included: false,
|
||||
served: true,
|
||||
watched: false,
|
||||
@ -42,7 +43,7 @@ module.exports = function (config) {
|
||||
],
|
||||
|
||||
preprocessors: {
|
||||
"src/**/*.spec.ts": ["webpack"]
|
||||
"src/**/!(*.node).spec.ts": ["webpack"]
|
||||
},
|
||||
|
||||
client: {
|
||||
@ -59,7 +60,7 @@ module.exports = function (config) {
|
||||
|
||||
mime: {
|
||||
"application/wasm": ["wasm"],
|
||||
"application/octet-stream": ["zkey"]
|
||||
"application/octet-stream": ["arkzkey"]
|
||||
},
|
||||
|
||||
customHeaders: [
|
||||
@ -69,7 +70,7 @@ module.exports = function (config) {
|
||||
value: "application/wasm"
|
||||
},
|
||||
{
|
||||
match: ".*\\.zkey$",
|
||||
match: ".*\\.arkzkey$",
|
||||
name: "Content-Type",
|
||||
value: "application/octet-stream"
|
||||
}
|
||||
@ -84,8 +85,8 @@ module.exports = function (config) {
|
||||
),
|
||||
"/base/rln.wasm":
|
||||
"/absolute" + path.resolve(__dirname, "src/resources/rln.wasm"),
|
||||
"/base/rln_final.zkey":
|
||||
"/absolute" + path.resolve(__dirname, "src/resources/rln_final.zkey")
|
||||
"/base/rln_final.arkzkey":
|
||||
"/absolute" + path.resolve(__dirname, "src/resources/rln_final.arkzkey")
|
||||
},
|
||||
|
||||
webpack: {
|
||||
@ -116,7 +117,7 @@ module.exports = function (config) {
|
||||
}
|
||||
},
|
||||
{
|
||||
test: /\.zkey$/,
|
||||
test: /\.arkzkey$/,
|
||||
type: "asset/resource",
|
||||
generator: {
|
||||
filename: "[name][ext]"
|
||||
|
||||
184
packages/rln/karma.node.conf.cjs
Normal file
184
packages/rln/karma.node.conf.cjs
Normal file
@ -0,0 +1,184 @@
|
||||
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||
/**
|
||||
* Karma configuration for node integration tests that require nwaku Docker nodes.
|
||||
* These tests connect to pre-started Docker nwaku nodes.
|
||||
*
|
||||
* Usage: npm run test:node
|
||||
* (This will start the nwaku fleet, run these tests, and clean up)
|
||||
*/
|
||||
const path = require("path");
|
||||
|
||||
const webpack = require("webpack");
|
||||
|
||||
const rootConfig = require("../../karma.conf.cjs");
|
||||
|
||||
module.exports = function (config) {
|
||||
rootConfig(config);
|
||||
|
||||
const configuration = {
|
||||
frameworks: ["mocha", "webpack"],
|
||||
|
||||
files: [
|
||||
{
|
||||
// Only run node integration tests (requires Docker nwaku nodes)
|
||||
pattern: "src/**/*.node.spec.ts",
|
||||
type: "js"
|
||||
},
|
||||
{
|
||||
pattern: "src/resources/**/*.wasm",
|
||||
included: false,
|
||||
served: true,
|
||||
watched: false,
|
||||
type: "wasm",
|
||||
nocache: true
|
||||
},
|
||||
{
|
||||
pattern: "src/resources/**/*.arkzkey",
|
||||
included: false,
|
||||
served: true,
|
||||
watched: false,
|
||||
nocache: true
|
||||
},
|
||||
{
|
||||
pattern: "../../node_modules/@waku/zerokit-rln-wasm/*.wasm",
|
||||
included: false,
|
||||
served: true,
|
||||
watched: false,
|
||||
type: "wasm",
|
||||
nocache: true
|
||||
},
|
||||
{
|
||||
// Fleet info is written by the integration test runner
|
||||
pattern: "fleet-info.json",
|
||||
included: false,
|
||||
served: true,
|
||||
watched: false,
|
||||
nocache: true
|
||||
}
|
||||
],
|
||||
|
||||
preprocessors: {
|
||||
"src/**/*.node.spec.ts": ["webpack"]
|
||||
},
|
||||
|
||||
client: {
|
||||
mocha: {
|
||||
timeout: 300000 // 5 minutes
|
||||
}
|
||||
},
|
||||
|
||||
browserDisconnectTimeout: 300000, // 5 minutes
|
||||
browserDisconnectTolerance: 3, // Number of tries before failing
|
||||
browserNoActivityTimeout: 300000, // 5 minutes
|
||||
captureTimeout: 300000, // 5 minutes
|
||||
pingTimeout: 300000, // 5 minutes
|
||||
|
||||
mime: {
|
||||
"application/wasm": ["wasm"],
|
||||
"application/octet-stream": ["arkzkey"]
|
||||
},
|
||||
|
||||
customHeaders: [
|
||||
{
|
||||
match: ".*\\.wasm$",
|
||||
name: "Content-Type",
|
||||
value: "application/wasm"
|
||||
},
|
||||
{
|
||||
match: ".*\\.arkzkey$",
|
||||
name: "Content-Type",
|
||||
value: "application/octet-stream"
|
||||
}
|
||||
],
|
||||
|
||||
proxies: {
|
||||
"/base/rln_wasm_bg.wasm":
|
||||
"/absolute" +
|
||||
path.resolve(
|
||||
__dirname,
|
||||
"../../node_modules/@waku/zerokit-rln-wasm/rln_wasm_bg.wasm"
|
||||
),
|
||||
"/base/rln.wasm":
|
||||
"/absolute" + path.resolve(__dirname, "src/resources/rln.wasm"),
|
||||
"/base/rln_final.arkzkey":
|
||||
"/absolute" + path.resolve(__dirname, "src/resources/rln_final.arkzkey")
|
||||
},
|
||||
|
||||
webpack: {
|
||||
mode: "development",
|
||||
experiments: {
|
||||
asyncWebAssembly: true,
|
||||
syncWebAssembly: true,
|
||||
topLevelAwait: true
|
||||
},
|
||||
output: {
|
||||
wasmLoading: "fetch",
|
||||
path: path.resolve(__dirname, "dist"),
|
||||
publicPath: "/base/",
|
||||
clean: true
|
||||
},
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /\.ts$/,
|
||||
use: "ts-loader",
|
||||
exclude: /node_modules/
|
||||
},
|
||||
{
|
||||
test: /\.wasm$/,
|
||||
type: "asset/resource",
|
||||
generator: {
|
||||
filename: "[name][ext]"
|
||||
}
|
||||
},
|
||||
{
|
||||
test: /\.arkzkey$/,
|
||||
type: "asset/resource",
|
||||
generator: {
|
||||
filename: "[name][ext]"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
plugins: [
|
||||
new webpack.DefinePlugin({
|
||||
"process.env.CI": process.env.CI || false,
|
||||
"process.env.DISPLAY": "Browser"
|
||||
}),
|
||||
new webpack.ProvidePlugin({
|
||||
process: "process/browser.js"
|
||||
})
|
||||
],
|
||||
resolve: {
|
||||
extensions: [".ts", ".js", ".wasm"],
|
||||
modules: ["node_modules", "../../node_modules"],
|
||||
alias: {
|
||||
"@waku/zerokit-rln-wasm": path.resolve(
|
||||
__dirname,
|
||||
"../../node_modules/@waku/zerokit-rln-wasm/rln_wasm.js"
|
||||
)
|
||||
},
|
||||
fallback: {
|
||||
crypto: false,
|
||||
fs: false,
|
||||
path: false,
|
||||
stream: false
|
||||
}
|
||||
},
|
||||
stats: { warnings: false },
|
||||
devtool: "inline-source-map"
|
||||
},
|
||||
|
||||
reporters: ["progress"],
|
||||
port: 9876,
|
||||
colors: true,
|
||||
logLevel: config.LOG_INFO,
|
||||
autoWatch: true,
|
||||
browsers: process.env.CI ? ["ChromeHeadlessCI"] : ["ChromeHeadless"],
|
||||
singleRun: true,
|
||||
concurrency: 1, // Reduce concurrency to avoid memory pressure
|
||||
browserSocketTimeout: 180000 // 3 minutes
|
||||
};
|
||||
|
||||
config.set(configuration);
|
||||
};
|
||||
@ -38,8 +38,10 @@
|
||||
"check:tsc": "tsc -p tsconfig.dev.json",
|
||||
"check:lint": "eslint \"src/!(resources)/**/*.{ts,js}\" *.js",
|
||||
"check:spelling": "cspell \"{README.md,src/**/*.ts}\"",
|
||||
"test": "NODE_ENV=test run-s test:*",
|
||||
"test": "NODE_ENV=test run-s test:browser",
|
||||
"test:browser": "karma start karma.conf.cjs",
|
||||
"test:node": "node src/test-utils/run-integration-tests.js",
|
||||
"nwaku:start": "npx ts-node --esm src/test-utils/start-nwaku-fleet.ts start 2",
|
||||
"watch:build": "tsc -p tsconfig.json -w",
|
||||
"watch:test": "mocha --watch",
|
||||
"prepublish": "npm run build",
|
||||
@ -60,8 +62,9 @@
|
||||
"@types/sinon": "^17.0.3",
|
||||
"@wagmi/cli": "^2.7.0",
|
||||
"@waku/build-utils": "^1.0.0",
|
||||
"@waku/interfaces": "0.0.34",
|
||||
"@waku/message-encryption": "^0.0.37",
|
||||
"@waku/sdk": "^0.0.36",
|
||||
"@waku/tests": "*",
|
||||
"deep-equal-in-any-order": "^2.0.6",
|
||||
"fast-check": "^3.23.2",
|
||||
"rollup-plugin-copy": "^3.5.0"
|
||||
@ -82,7 +85,7 @@
|
||||
"@wagmi/core": "^2.22.1",
|
||||
"@waku/core": "^0.0.40",
|
||||
"@waku/utils": "^0.0.27",
|
||||
"@waku/zerokit-rln-wasm": "^0.2.1",
|
||||
"@waku/zerokit-rln-wasm": "^1.0.0",
|
||||
"chai": "^5.1.2",
|
||||
"chai-as-promised": "^8.0.1",
|
||||
"chai-spies": "^1.1.0",
|
||||
|
||||
121
packages/rln/src/codec.ts
Normal file
121
packages/rln/src/codec.ts
Normal file
@ -0,0 +1,121 @@
|
||||
import type {
|
||||
IEncoder,
|
||||
IMessage,
|
||||
IProtoMessage,
|
||||
IRateLimitProof,
|
||||
IRoutingInfo
|
||||
} from "@waku/interfaces";
|
||||
import { Logger } from "@waku/utils";
|
||||
|
||||
import { RLNCredentialsManager } from "./credentials_manager.js";
|
||||
import { Proof } from "./proof.js";
|
||||
import { RLNInstance } from "./rln.js";
|
||||
import { BytesUtils } from "./utils/bytes.js";
|
||||
import { dateToNanosecondBytes } from "./utils/epoch.js";
|
||||
|
||||
const log = new Logger("waku:rln:encoder");
|
||||
|
||||
export class RLNEncoder implements IEncoder {
|
||||
public constructor(
|
||||
private readonly encoder: IEncoder,
|
||||
private readonly rlnInstance: RLNInstance,
|
||||
private readonly rateLimit: number,
|
||||
private readonly credentialsManager: RLNCredentialsManager
|
||||
) {}
|
||||
|
||||
private toRlnSignal(message: IMessage): Uint8Array {
|
||||
if (!message.timestamp)
|
||||
throw new Error("RLNEncoder: message must have a timestamp set");
|
||||
const contentTopicBytes = new TextEncoder().encode(this.contentTopic);
|
||||
const timestampBytes = dateToNanosecondBytes(message.timestamp);
|
||||
|
||||
return BytesUtils.concatenate(
|
||||
message.payload,
|
||||
contentTopicBytes,
|
||||
timestampBytes
|
||||
);
|
||||
}
|
||||
|
||||
public async toWire(message: IMessage): Promise<Uint8Array | undefined> {
|
||||
if (!message.rateLimitProof) {
|
||||
message.rateLimitProof = await this.generateProof(message);
|
||||
log.info("Proof generated", message.rateLimitProof);
|
||||
}
|
||||
return this.encoder.toWire(message);
|
||||
}
|
||||
|
||||
public async toProtoObj(
|
||||
message: IMessage
|
||||
): Promise<IProtoMessage | undefined> {
|
||||
const protoMessage = await this.encoder.toProtoObj(message);
|
||||
if (!protoMessage) return;
|
||||
|
||||
protoMessage.contentTopic = this.contentTopic;
|
||||
if (!message.rateLimitProof) {
|
||||
protoMessage.rateLimitProof = await this.generateProof(message);
|
||||
log.info("Proof generated", protoMessage.rateLimitProof);
|
||||
} else {
|
||||
protoMessage.rateLimitProof = message.rateLimitProof;
|
||||
}
|
||||
return protoMessage;
|
||||
}
|
||||
|
||||
private async generateProof(message: IMessage): Promise<IRateLimitProof> {
|
||||
if (!message.timestamp)
|
||||
throw new Error("RLNEncoder: message must have a timestamp set");
|
||||
if (!this.credentialsManager.credentials) {
|
||||
throw new Error("RLNEncoder: credentials not set");
|
||||
}
|
||||
if (
|
||||
!this.credentialsManager.pathElements ||
|
||||
!this.credentialsManager.identityPathIndex
|
||||
) {
|
||||
throw new Error("RLNEncoder: merkle proof not set");
|
||||
}
|
||||
const signal = this.toRlnSignal(message);
|
||||
const { proof, epoch, rlnIdentifier } =
|
||||
await this.rlnInstance.zerokit.generateRLNProof(
|
||||
signal,
|
||||
message.timestamp,
|
||||
this.credentialsManager.credentials.identity.IDSecretHash,
|
||||
this.credentialsManager.pathElements,
|
||||
this.credentialsManager.identityPathIndex,
|
||||
this.rateLimit,
|
||||
0 // TODO: need to track messages sent per epoch
|
||||
);
|
||||
|
||||
return new Proof(proof.toBytesLE(), epoch, rlnIdentifier);
|
||||
}
|
||||
|
||||
public get pubsubTopic(): string {
|
||||
return this.encoder.pubsubTopic;
|
||||
}
|
||||
|
||||
public get routingInfo(): IRoutingInfo {
|
||||
return this.encoder.routingInfo;
|
||||
}
|
||||
|
||||
public get contentTopic(): string {
|
||||
return this.encoder.contentTopic;
|
||||
}
|
||||
|
||||
public get ephemeral(): boolean {
|
||||
return this.encoder.ephemeral;
|
||||
}
|
||||
}
|
||||
|
||||
type RLNEncoderOptions = {
|
||||
encoder: IEncoder;
|
||||
rlnInstance: RLNInstance;
|
||||
credentialsManager: RLNCredentialsManager;
|
||||
rateLimit: number;
|
||||
};
|
||||
|
||||
export const createRLNEncoder = (options: RLNEncoderOptions): RLNEncoder => {
|
||||
return new RLNEncoder(
|
||||
options.encoder,
|
||||
options.rlnInstance,
|
||||
options.rateLimit,
|
||||
options.credentialsManager
|
||||
);
|
||||
};
|
||||
@ -1,2 +1,3 @@
|
||||
export * from "./constants.js";
|
||||
export * from "./types.js";
|
||||
export { RLNBaseContract } from "./rln_base_contract.js";
|
||||
|
||||
@ -579,4 +579,47 @@ export class RLNBaseContract {
|
||||
}
|
||||
return { token, price };
|
||||
}
|
||||
|
||||
/**
|
||||
* Watches for RootStored events emitted by the contract
|
||||
* @param onLogs Callback function invoked when new RootStored events are detected
|
||||
* @param options Optional configuration for the watcher
|
||||
* @returns A function that can be invoked to stop watching for events
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const unwatch = contract.watchRootStoredEvent({
|
||||
* onLogs: (logs) => {
|
||||
* logs.forEach(log => {
|
||||
* console.log('New root:', log.args.newRoot);
|
||||
* console.log('Block number:', log.blockNumber);
|
||||
* });
|
||||
* }
|
||||
* });
|
||||
*
|
||||
* // Later, to stop watching:
|
||||
* unwatch();
|
||||
* ```
|
||||
*/
|
||||
public async watchRootStoredEvent(
|
||||
callback: () => void,
|
||||
pollingInterval?: number
|
||||
): Promise<() => void> {
|
||||
log.info("Starting to watch RootStored events", {
|
||||
address: this.contract.address,
|
||||
pollingInterval
|
||||
});
|
||||
|
||||
const fromBlock = await this.rpcClient.getBlockNumber();
|
||||
|
||||
return this.contract.watchEvent.RootStored({
|
||||
onLogs: (_) => {
|
||||
callback();
|
||||
},
|
||||
onError: (error) => log.error("Error watching RootStored events:", error),
|
||||
pollingInterval,
|
||||
fromBlock,
|
||||
batch: false
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ -3,6 +3,7 @@ import { publicActions } from "viem";
|
||||
|
||||
import { RLN_CONTRACT } from "./contract/constants.js";
|
||||
import { RLNBaseContract } from "./contract/rln_base_contract.js";
|
||||
import { IdentityCredential } from "./identity.js";
|
||||
import { Keystore } from "./keystore/index.js";
|
||||
import type {
|
||||
DecryptedCredentials,
|
||||
@ -10,7 +11,12 @@ import type {
|
||||
} from "./keystore/index.js";
|
||||
import { KeystoreEntity, Password } from "./keystore/types.js";
|
||||
import { RegisterMembershipOptions, StartRLNOptions } from "./types.js";
|
||||
import { createViemClientFromWindow, RpcClient } from "./utils/index.js";
|
||||
import {
|
||||
BytesUtils,
|
||||
createViemClientFromWindow,
|
||||
getPathDirectionsFromIndex,
|
||||
RpcClient
|
||||
} from "./utils/index.js";
|
||||
import { Zerokit } from "./zerokit.js";
|
||||
|
||||
const log = new Logger("rln:credentials");
|
||||
@ -28,9 +34,14 @@ export class RLNCredentialsManager {
|
||||
|
||||
protected keystore = Keystore.create();
|
||||
public credentials: undefined | DecryptedCredentials;
|
||||
public pathElements: undefined | Uint8Array[];
|
||||
public identityPathIndex: undefined | Uint8Array[];
|
||||
|
||||
public zerokit: Zerokit;
|
||||
|
||||
private unwatchRootStored?: () => void;
|
||||
private rootPollingInterval?: number = 5000;
|
||||
|
||||
public constructor(zerokit: Zerokit) {
|
||||
log.info("RLNCredentialsManager initialized");
|
||||
this.zerokit = zerokit;
|
||||
@ -73,6 +84,11 @@ export class RLNCredentialsManager {
|
||||
rateLimit: rateLimit ?? this.zerokit.rateLimit
|
||||
});
|
||||
|
||||
if (this.credentials) {
|
||||
await this.updateMerkleProof();
|
||||
await this.startWatchingRootStored();
|
||||
}
|
||||
|
||||
log.info("RLNCredentialsManager successfully started");
|
||||
this.started = true;
|
||||
} catch (error) {
|
||||
@ -96,9 +112,10 @@ export class RLNCredentialsManager {
|
||||
|
||||
if ("signature" in options) {
|
||||
log.info("Using Zerokit to generate identity");
|
||||
identity = this.zerokit.generateSeededIdentityCredential(
|
||||
const extendedIdentity = this.zerokit.generateSeededIdentityCredential(
|
||||
options.signature
|
||||
);
|
||||
identity = IdentityCredential.fromBytes(extendedIdentity.toBytesLE());
|
||||
}
|
||||
|
||||
if (!identity) {
|
||||
@ -225,4 +242,80 @@ export class RLNCredentialsManager {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the Merkle proof for the current credentials
|
||||
* Fetches the latest proof from the contract and updates pathElements and identityPathIndex
|
||||
*/
|
||||
private async updateMerkleProof(): Promise<void> {
|
||||
if (!this.contract || !this.credentials) {
|
||||
log.warn("Cannot update merkle proof: contract or credentials not set");
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const treeIndex = this.credentials.membership.treeIndex;
|
||||
log.info(`Updating merkle proof for tree index: ${treeIndex}`);
|
||||
|
||||
// Get the merkle proof from the contract
|
||||
const proof = await this.contract.getMerkleProof(treeIndex);
|
||||
|
||||
// Convert bigint[] to Uint8Array[] for pathElements
|
||||
this.pathElements = proof.map((element) =>
|
||||
BytesUtils.bytes32FromBigInt(element, "little")
|
||||
);
|
||||
|
||||
// Get path directions from the tree index
|
||||
const pathDirections = getPathDirectionsFromIndex(BigInt(treeIndex));
|
||||
|
||||
// Convert path directions to Uint8Array[] for identityPathIndex
|
||||
this.identityPathIndex = pathDirections.map((direction: number) =>
|
||||
Uint8Array.from([direction])
|
||||
);
|
||||
|
||||
log.info("Successfully updated merkle proof", {
|
||||
pathElementsCount: this.pathElements.length,
|
||||
pathIndexCount: this.identityPathIndex!.length
|
||||
});
|
||||
} catch (error) {
|
||||
log.error("Failed to update merkle proof:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts watching for RootStored events and updates merkle proof when detected
|
||||
*/
|
||||
private async startWatchingRootStored(): Promise<void> {
|
||||
if (!this.contract) {
|
||||
log.warn("Cannot watch for RootStored events: contract not set");
|
||||
return;
|
||||
}
|
||||
|
||||
// Stop any existing watcher
|
||||
this.stopWatchingRootStored();
|
||||
|
||||
log.info("Starting to watch for RootStored events");
|
||||
|
||||
this.unwatchRootStored = await this.contract.watchRootStoredEvent(() => {
|
||||
// Update the merkle proof when root changes (fire-and-forget)
|
||||
this.updateMerkleProof().catch((error) => {
|
||||
log.error(
|
||||
"Failed to update merkle proof after RootStored event:",
|
||||
error
|
||||
);
|
||||
});
|
||||
}, this.rootPollingInterval);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops watching for RootStored events
|
||||
*/
|
||||
private stopWatchingRootStored(): void {
|
||||
if (this.unwatchRootStored) {
|
||||
log.info("Stopping RootStored event watcher");
|
||||
this.unwatchRootStored();
|
||||
this.unwatchRootStored = undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
140
packages/rln/src/encoder.node.spec.ts
Normal file
140
packages/rln/src/encoder.node.spec.ts
Normal file
@ -0,0 +1,140 @@
|
||||
import { multiaddr } from "@multiformats/multiaddr";
|
||||
import { createLightNode, Protocols } from "@waku/sdk";
|
||||
import { expect } from "chai";
|
||||
import Sinon from "sinon";
|
||||
|
||||
import { createRLNEncoder } from "./codec.js";
|
||||
import { RLNCredentialsManager } from "./credentials_manager.js";
|
||||
import { Keystore } from "./keystore/index.js";
|
||||
import { RLNInstance } from "./rln.js";
|
||||
import { BytesUtils } from "./utils/index.js";
|
||||
import { getPathDirectionsFromIndex } from "./utils/merkle.js";
|
||||
import { TEST_KEYSTORE_DATA } from "./utils/test_keystore.js";
|
||||
|
||||
interface NodeInfo {
|
||||
multiaddr: string;
|
||||
restPort: string;
|
||||
peerId: string;
|
||||
}
|
||||
|
||||
interface FleetInfo {
|
||||
nodes: NodeInfo[];
|
||||
}
|
||||
|
||||
async function getFleetInfo(): Promise<FleetInfo> {
|
||||
const response = await fetch("/base/fleet-info.json");
|
||||
if (!response.ok) {
|
||||
throw new Error(
|
||||
`Failed to fetch fleet info: ${response.status} ${response.statusText}. ` +
|
||||
"Make sure to start the nwaku fleet before running tests."
|
||||
);
|
||||
}
|
||||
return response.json();
|
||||
}
|
||||
|
||||
describe("RLN Proof Integration Tests", function () {
|
||||
this.timeout(30000);
|
||||
|
||||
it("sends a message with a proof", async function () {
|
||||
// Get fleet info from the pre-started nwaku nodes
|
||||
const fleetInfo = await getFleetInfo();
|
||||
expect(fleetInfo.nodes.length).to.be.greaterThanOrEqual(2);
|
||||
|
||||
const waku = await createLightNode({
|
||||
networkConfig: {
|
||||
clusterId: 0,
|
||||
numShardsInCluster: 1
|
||||
},
|
||||
defaultBootstrap: false,
|
||||
libp2p: {
|
||||
filterMultiaddrs: false
|
||||
}
|
||||
});
|
||||
|
||||
// Create RLN instance
|
||||
const rlnInstance = await RLNInstance.create();
|
||||
|
||||
// Load credential from test keystore
|
||||
const keystore = Keystore.fromString(TEST_KEYSTORE_DATA.keystoreJson);
|
||||
if (!keystore) {
|
||||
throw new Error("Failed to load test keystore");
|
||||
}
|
||||
const credential = await keystore.readCredential(
|
||||
TEST_KEYSTORE_DATA.credentialHash,
|
||||
TEST_KEYSTORE_DATA.password
|
||||
);
|
||||
if (!credential) {
|
||||
throw new Error("Failed to unlock credential with provided password");
|
||||
}
|
||||
|
||||
// Prepare merkle proof data
|
||||
const merkleProof = TEST_KEYSTORE_DATA.merkleProof.map((p) => BigInt(p));
|
||||
const membershipIndex = Number(TEST_KEYSTORE_DATA.membershipIndex);
|
||||
const rateLimit = Number(TEST_KEYSTORE_DATA.rateLimit);
|
||||
|
||||
const proofElementIndexes = getPathDirectionsFromIndex(
|
||||
BigInt(membershipIndex)
|
||||
);
|
||||
|
||||
// Convert merkle proof to bytes format
|
||||
const pathElements = merkleProof.map((proof) =>
|
||||
BytesUtils.bytes32FromBigInt(proof)
|
||||
);
|
||||
const identityPathIndex = proofElementIndexes.map((index) =>
|
||||
BytesUtils.writeUIntLE(new Uint8Array(1), index, 0, 1)
|
||||
);
|
||||
|
||||
// Create mock credentials manager
|
||||
const mockCredentialsManager = Sinon.createStubInstance(
|
||||
RLNCredentialsManager
|
||||
);
|
||||
|
||||
// Set up the mock to return test values
|
||||
Object.defineProperty(mockCredentialsManager, "credentials", {
|
||||
get: () => credential,
|
||||
configurable: true
|
||||
});
|
||||
Object.defineProperty(mockCredentialsManager, "pathElements", {
|
||||
get: () => pathElements,
|
||||
configurable: true
|
||||
});
|
||||
Object.defineProperty(mockCredentialsManager, "identityPathIndex", {
|
||||
get: () => identityPathIndex,
|
||||
configurable: true
|
||||
});
|
||||
|
||||
// Create base encoder
|
||||
const contentTopic = "/rln/1/test/proto";
|
||||
const baseEncoder = waku.createEncoder({
|
||||
contentTopic
|
||||
});
|
||||
|
||||
// Create RLN encoder
|
||||
const rlnEncoder = createRLNEncoder({
|
||||
encoder: baseEncoder,
|
||||
rlnInstance,
|
||||
credentialsManager:
|
||||
mockCredentialsManager as unknown as RLNCredentialsManager,
|
||||
rateLimit
|
||||
});
|
||||
|
||||
// Connect to all nodes in the fleet
|
||||
for (const nodeInfo of fleetInfo.nodes) {
|
||||
const nwakuMultiaddr = multiaddr(nodeInfo.multiaddr);
|
||||
await waku.dial(nwakuMultiaddr, [Protocols.LightPush]);
|
||||
}
|
||||
|
||||
await waku.waitForPeers([Protocols.LightPush]);
|
||||
|
||||
// Create message
|
||||
const messageTimestamp = new Date();
|
||||
const message = {
|
||||
payload: new TextEncoder().encode("Hello RLN!"),
|
||||
timestamp: messageTimestamp
|
||||
};
|
||||
|
||||
// Send message with proof
|
||||
const result = await waku.lightPush.send(rlnEncoder, message);
|
||||
expect(result.successes.length).to.be.greaterThan(0);
|
||||
});
|
||||
});
|
||||
@ -29,6 +29,16 @@ import type {
|
||||
|
||||
const log = new Logger("rln:keystore");
|
||||
|
||||
/**
|
||||
* Custom replacer function to handle BigInt serialization in JSON.stringify
|
||||
*/
|
||||
const bigIntReplacer = (_key: string, value: unknown): unknown => {
|
||||
if (typeof value === "bigint") {
|
||||
return value.toString();
|
||||
}
|
||||
return value;
|
||||
};
|
||||
|
||||
type NwakuCredential = {
|
||||
crypto: {
|
||||
cipher: ICipherModule["function"];
|
||||
@ -160,7 +170,7 @@ export class Keystore {
|
||||
}
|
||||
|
||||
public toString(): string {
|
||||
return JSON.stringify(this.data);
|
||||
return JSON.stringify(this.data, bigIntReplacer);
|
||||
}
|
||||
|
||||
public toObject(): NwakuKeystore {
|
||||
@ -328,20 +338,23 @@ export class Keystore {
|
||||
options.identity;
|
||||
|
||||
return utf8ToBytes(
|
||||
JSON.stringify({
|
||||
treeIndex: options.membership.treeIndex,
|
||||
identityCredential: {
|
||||
idCommitment: Array.from(IDCommitment),
|
||||
idNullifier: Array.from(IDNullifier),
|
||||
idSecretHash: Array.from(IDSecretHash),
|
||||
idTrapdoor: Array.from(IDTrapdoor)
|
||||
JSON.stringify(
|
||||
{
|
||||
treeIndex: options.membership.treeIndex,
|
||||
identityCredential: {
|
||||
idCommitment: Array.from(IDCommitment),
|
||||
idNullifier: Array.from(IDNullifier),
|
||||
idSecretHash: Array.from(IDSecretHash),
|
||||
idTrapdoor: Array.from(IDTrapdoor)
|
||||
},
|
||||
membershipContract: {
|
||||
chainId: options.membership.chainId,
|
||||
address: options.membership.address
|
||||
},
|
||||
userMessageLimit: options.membership.rateLimit
|
||||
},
|
||||
membershipContract: {
|
||||
chainId: options.membership.chainId,
|
||||
address: options.membership.address
|
||||
},
|
||||
userMessageLimit: options.membership.rateLimit
|
||||
})
|
||||
bigIntReplacer
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
170
packages/rln/src/proof.spec.ts
Normal file
170
packages/rln/src/proof.spec.ts
Normal file
@ -0,0 +1,170 @@
|
||||
import { expect } from "chai";
|
||||
|
||||
import { Keystore } from "./keystore/index.js";
|
||||
import { Proof, proofToBytes } from "./proof.js";
|
||||
import { RLNInstance } from "./rln.js";
|
||||
import { BytesUtils } from "./utils/index.js";
|
||||
import {
|
||||
calculateRateCommitment,
|
||||
getPathDirectionsFromIndex,
|
||||
MERKLE_TREE_DEPTH,
|
||||
reconstructMerkleRoot
|
||||
} from "./utils/merkle.js";
|
||||
import { TEST_KEYSTORE_DATA } from "./utils/test_keystore.js";
|
||||
|
||||
describe("RLN Proof Unit Tests", function () {
|
||||
this.timeout(30000);
|
||||
|
||||
it("validate stored merkle proof data", function () {
|
||||
const merkleProof = TEST_KEYSTORE_DATA.merkleProof.map((p) => BigInt(p));
|
||||
|
||||
expect(merkleProof).to.be.an("array");
|
||||
expect(merkleProof).to.have.lengthOf(MERKLE_TREE_DEPTH);
|
||||
|
||||
for (let i = 0; i < merkleProof.length; i++) {
|
||||
const element = merkleProof[i];
|
||||
expect(element).to.be.a(
|
||||
"bigint",
|
||||
`Proof element ${i} should be a bigint`
|
||||
);
|
||||
expect(element).to.not.equal(0n, `Proof element ${i} should not be zero`);
|
||||
}
|
||||
});
|
||||
|
||||
it("should generate a valid RLN proof", async function () {
|
||||
const rlnInstance = await RLNInstance.create();
|
||||
const keystore = Keystore.fromString(TEST_KEYSTORE_DATA.keystoreJson);
|
||||
if (!keystore) {
|
||||
throw new Error("Failed to load test keystore");
|
||||
}
|
||||
const credentialHash = TEST_KEYSTORE_DATA.credentialHash;
|
||||
const password = TEST_KEYSTORE_DATA.password;
|
||||
const credential = await keystore.readCredential(credentialHash, password);
|
||||
if (!credential) {
|
||||
throw new Error("Failed to unlock credential with provided password");
|
||||
}
|
||||
|
||||
const idCommitment = credential.identity.IDCommitmentBigInt;
|
||||
|
||||
const merkleProof = TEST_KEYSTORE_DATA.merkleProof.map((p) => BigInt(p));
|
||||
const merkleRoot = BigInt(TEST_KEYSTORE_DATA.merkleRoot);
|
||||
const membershipIndex = BigInt(TEST_KEYSTORE_DATA.membershipIndex);
|
||||
const rateLimit = BigInt(TEST_KEYSTORE_DATA.rateLimit);
|
||||
|
||||
const rateCommitment = calculateRateCommitment(idCommitment, rateLimit);
|
||||
|
||||
const proofElementIndexes = getPathDirectionsFromIndex(membershipIndex);
|
||||
|
||||
expect(proofElementIndexes).to.have.lengthOf(MERKLE_TREE_DEPTH);
|
||||
|
||||
const reconstructedRoot = reconstructMerkleRoot(
|
||||
merkleProof,
|
||||
membershipIndex,
|
||||
rateCommitment
|
||||
);
|
||||
|
||||
expect(reconstructedRoot).to.equal(
|
||||
merkleRoot,
|
||||
"Reconstructed root should match stored root"
|
||||
);
|
||||
|
||||
const testMessage = new TextEncoder().encode("test");
|
||||
|
||||
const { proof } = await rlnInstance.zerokit.generateRLNProof(
|
||||
testMessage,
|
||||
new Date(),
|
||||
credential.identity.IDSecretHash,
|
||||
merkleProof.map((element) =>
|
||||
BytesUtils.bytes32FromBigInt(element, "little")
|
||||
),
|
||||
proofElementIndexes.map((index) =>
|
||||
BytesUtils.writeUIntLE(new Uint8Array(1), index, 0, 1)
|
||||
),
|
||||
Number(rateLimit),
|
||||
0
|
||||
);
|
||||
|
||||
const isValid = rlnInstance.zerokit.verifyRLNProof(
|
||||
BytesUtils.writeUIntLE(new Uint8Array(8), testMessage.length, 0, 8),
|
||||
testMessage,
|
||||
proof,
|
||||
[BytesUtils.bytes32FromBigInt(merkleRoot, "little")]
|
||||
);
|
||||
expect(isValid).to.be.true;
|
||||
});
|
||||
|
||||
it("should parse proof bytes into Proof class", async function () {
|
||||
const rlnInstance = await RLNInstance.create();
|
||||
|
||||
// Load credential from test keystore
|
||||
const keystore = Keystore.fromString(TEST_KEYSTORE_DATA.keystoreJson);
|
||||
if (!keystore) {
|
||||
throw new Error("Failed to load test keystore");
|
||||
}
|
||||
const credential = await keystore.readCredential(
|
||||
TEST_KEYSTORE_DATA.credentialHash,
|
||||
TEST_KEYSTORE_DATA.password
|
||||
);
|
||||
if (!credential) {
|
||||
throw new Error("Failed to unlock credential with provided password");
|
||||
}
|
||||
|
||||
const merkleProof = TEST_KEYSTORE_DATA.merkleProof.map((p) => BigInt(p));
|
||||
const merkleRoot = BigInt(TEST_KEYSTORE_DATA.merkleRoot);
|
||||
const membershipIndex = BigInt(TEST_KEYSTORE_DATA.membershipIndex);
|
||||
const rateLimit = BigInt(TEST_KEYSTORE_DATA.rateLimit);
|
||||
|
||||
const proofElementIndexes = getPathDirectionsFromIndex(membershipIndex);
|
||||
|
||||
const testMessage = new TextEncoder().encode("test");
|
||||
|
||||
// Generate the proof
|
||||
const { proof, epoch, rlnIdentifier } =
|
||||
await rlnInstance.zerokit.generateRLNProof(
|
||||
testMessage,
|
||||
new Date(),
|
||||
credential.identity.IDSecretHash,
|
||||
merkleProof.map((proof) => BytesUtils.bytes32FromBigInt(proof)),
|
||||
proofElementIndexes.map((index) =>
|
||||
BytesUtils.writeUIntLE(new Uint8Array(1), index, 0, 1)
|
||||
),
|
||||
Number(rateLimit),
|
||||
0
|
||||
);
|
||||
|
||||
// Parse proof bytes into Proof class
|
||||
const parsedProof = new Proof(proof.toBytesLE(), epoch, rlnIdentifier);
|
||||
|
||||
// Verify all fields have correct lengths according to Nim format:
|
||||
// proof<128> | root<32> | external_nullifier<32> | share_x<32> | share_y<32> | nullifier<32>
|
||||
expect(parsedProof.proof).to.have.lengthOf(128);
|
||||
expect(parsedProof.merkleRoot).to.have.lengthOf(32);
|
||||
expect(parsedProof.externalNullifier).to.have.lengthOf(32);
|
||||
expect(parsedProof.shareX).to.have.lengthOf(32);
|
||||
expect(parsedProof.shareY).to.have.lengthOf(32);
|
||||
expect(parsedProof.nullifier).to.have.lengthOf(32);
|
||||
|
||||
// Verify merkle root matches expected
|
||||
const parsedMerkleRoot = BytesUtils.toBigInt(parsedProof.merkleRoot);
|
||||
expect(parsedMerkleRoot).to.equal(
|
||||
merkleRoot,
|
||||
"Parsed merkle root should match expected"
|
||||
);
|
||||
|
||||
// Verify round-trip: proofToBytes should reconstruct original bytes
|
||||
const reconstructedBytes = proofToBytes(parsedProof);
|
||||
expect(reconstructedBytes).to.deep.equal(
|
||||
proof.toBytesLE(),
|
||||
"Reconstructed bytes should match original"
|
||||
);
|
||||
|
||||
// Verify extractMetadata works
|
||||
const metadata = parsedProof.extractMetadata();
|
||||
expect(metadata.nullifier).to.deep.equal(parsedProof.nullifier);
|
||||
expect(metadata.shareX).to.deep.equal(parsedProof.shareX);
|
||||
expect(metadata.shareY).to.deep.equal(parsedProof.shareY);
|
||||
expect(metadata.externalNullifier).to.deep.equal(
|
||||
parsedProof.externalNullifier
|
||||
);
|
||||
});
|
||||
});
|
||||
81
packages/rln/src/proof.ts
Normal file
81
packages/rln/src/proof.ts
Normal file
@ -0,0 +1,81 @@
|
||||
import type { IRateLimitProof } from "@waku/interfaces";
|
||||
|
||||
import { BytesUtils } from "./utils/index.js";
|
||||
|
||||
// Offsets for parsing proof bytes
|
||||
// Format: proof<128> | root<32> | external_nullifier<32> | share_x<32> | share_y<32> | nullifier<32>
|
||||
const proofOffset = 128;
|
||||
const rootOffset = proofOffset + 32;
|
||||
const externalNullifierOffset = rootOffset + 32;
|
||||
const shareXOffset = externalNullifierOffset + 32;
|
||||
const shareYOffset = shareXOffset + 32;
|
||||
const nullifierOffset = shareYOffset + 32;
|
||||
|
||||
class ProofMetadata {
|
||||
public constructor(
|
||||
public readonly nullifier: Uint8Array,
|
||||
public readonly shareX: Uint8Array,
|
||||
public readonly shareY: Uint8Array,
|
||||
public readonly externalNullifier: Uint8Array
|
||||
) {}
|
||||
}
|
||||
|
||||
export class Proof implements IRateLimitProof {
|
||||
public readonly proof: Uint8Array;
|
||||
public readonly merkleRoot: Uint8Array;
|
||||
public readonly externalNullifier: Uint8Array;
|
||||
public readonly shareX: Uint8Array;
|
||||
public readonly shareY: Uint8Array;
|
||||
public readonly nullifier: Uint8Array;
|
||||
public readonly epoch: Uint8Array;
|
||||
public readonly rlnIdentifier: Uint8Array;
|
||||
|
||||
public constructor(
|
||||
proofBytes: Uint8Array,
|
||||
epoch: Uint8Array,
|
||||
rlnIdentifier: Uint8Array
|
||||
) {
|
||||
if (proofBytes.length < nullifierOffset) {
|
||||
throw new Error("invalid proof");
|
||||
}
|
||||
// parse the proof as proof<128> | root<32> | external_nullifier<32> | share_x<32> | share_y<32> | nullifier<32>
|
||||
this.proof = proofBytes.subarray(0, proofOffset);
|
||||
this.merkleRoot = proofBytes.subarray(proofOffset, rootOffset);
|
||||
this.externalNullifier = proofBytes.subarray(
|
||||
rootOffset,
|
||||
externalNullifierOffset
|
||||
);
|
||||
this.shareX = proofBytes.subarray(externalNullifierOffset, shareXOffset);
|
||||
this.shareY = proofBytes.subarray(shareXOffset, shareYOffset);
|
||||
this.nullifier = proofBytes.subarray(shareYOffset, nullifierOffset);
|
||||
|
||||
if (epoch.length !== 32) {
|
||||
throw new Error("invalid epoch");
|
||||
}
|
||||
if (rlnIdentifier.length !== 32) {
|
||||
throw new Error("invalid rlnIdentifier");
|
||||
}
|
||||
this.epoch = epoch;
|
||||
this.rlnIdentifier = rlnIdentifier;
|
||||
}
|
||||
|
||||
public extractMetadata(): ProofMetadata {
|
||||
return new ProofMetadata(
|
||||
this.nullifier,
|
||||
this.shareX,
|
||||
this.shareY,
|
||||
this.externalNullifier
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function proofToBytes(p: Proof): Uint8Array {
|
||||
return BytesUtils.concatenate(
|
||||
p.proof,
|
||||
p.merkleRoot,
|
||||
p.externalNullifier,
|
||||
p.shareX,
|
||||
p.shareY,
|
||||
p.nullifier
|
||||
);
|
||||
}
|
||||
BIN
packages/rln/src/resources/rln_final.arkzkey
Normal file
BIN
packages/rln/src/resources/rln_final.arkzkey
Normal file
Binary file not shown.
Binary file not shown.
@ -1,5 +1,5 @@
|
||||
import { Logger } from "@waku/utils";
|
||||
import init, * as zerokitRLN from "@waku/zerokit-rln-wasm";
|
||||
import init, { WasmRLN } from "@waku/zerokit-rln-wasm";
|
||||
|
||||
import { DEFAULT_RATE_LIMIT } from "./contract/constants.js";
|
||||
import { RLNCredentialsManager } from "./credentials_manager.js";
|
||||
@ -17,12 +17,11 @@ export class RLNInstance extends RLNCredentialsManager {
|
||||
public static async create(): Promise<RLNInstance> {
|
||||
try {
|
||||
await init();
|
||||
zerokitRLN.initPanicHook();
|
||||
|
||||
const witnessCalculator = await RLNInstance.loadWitnessCalculator();
|
||||
const zkey = await RLNInstance.loadZkey();
|
||||
|
||||
const zkRLN = zerokitRLN.newRLN(zkey);
|
||||
const zkRLN = new WasmRLN(zkey);
|
||||
const zerokit = new Zerokit(zkRLN, witnessCalculator, DEFAULT_RATE_LIMIT);
|
||||
|
||||
return new RLNInstance(zerokit);
|
||||
@ -61,7 +60,7 @@ export class RLNInstance extends RLNCredentialsManager {
|
||||
|
||||
public static async loadZkey(): Promise<Uint8Array> {
|
||||
try {
|
||||
const url = new URL("./resources/rln_final.zkey", import.meta.url);
|
||||
const url = new URL("./resources/rln_final.arkzkey", import.meta.url);
|
||||
const response = await fetch(url);
|
||||
|
||||
if (!response.ok) {
|
||||
|
||||
69
packages/rln/src/scripts/update_merkle_proof.ts
Normal file
69
packages/rln/src/scripts/update_merkle_proof.ts
Normal file
@ -0,0 +1,69 @@
|
||||
import { type Address, createWalletClient, http, publicActions } from "viem";
|
||||
import { lineaSepolia } from "viem/chains";
|
||||
|
||||
import { RLN_CONTRACT } from "../contract/constants.js";
|
||||
import { RLNBaseContract } from "../contract/rln_base_contract.js";
|
||||
import { TEST_KEYSTORE_DATA } from "../utils/test_keystore.js";
|
||||
|
||||
async function updateMerkleProof(): Promise<void> {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log("Connecting to Linea Sepolia RPC...");
|
||||
|
||||
// Create RPC client (read-only, no account needed)
|
||||
const rpcClient = createWalletClient({
|
||||
chain: lineaSepolia,
|
||||
transport: http("https://rpc.sepolia.linea.build")
|
||||
}).extend(publicActions);
|
||||
|
||||
// eslint-disable-next-line no-console
|
||||
console.log("Initializing RLN contract...");
|
||||
const contract = await RLNBaseContract.create({
|
||||
address: RLN_CONTRACT.address as Address,
|
||||
rpcClient
|
||||
});
|
||||
|
||||
const membershipIndex = Number(TEST_KEYSTORE_DATA.membershipIndex);
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Fetching merkle proof for index ${membershipIndex}...`);
|
||||
|
||||
// Get current merkle root
|
||||
const merkleRoot = await contract.getMerkleRoot();
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Current merkle root: ${merkleRoot}`);
|
||||
|
||||
// Get merkle proof for the membership index
|
||||
const merkleProof = await contract.getMerkleProof(membershipIndex);
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Merkle proof (${merkleProof.length} elements):`);
|
||||
merkleProof.forEach((element, i) => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(` [${i}]: ${element}`);
|
||||
});
|
||||
|
||||
// Format the output for updating test_keystore.ts
|
||||
// eslint-disable-next-line no-console
|
||||
console.log("\n=== Update test_keystore.ts with these values ===\n");
|
||||
// eslint-disable-next-line no-console
|
||||
console.log("merkleProof: [");
|
||||
merkleProof.forEach((element, i) => {
|
||||
const comma = i < merkleProof.length - 1 ? "," : "";
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(` "${element}"${comma}`);
|
||||
});
|
||||
// eslint-disable-next-line no-console
|
||||
console.log("],");
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`merkleRoot: "${merkleRoot}",`);
|
||||
}
|
||||
|
||||
updateMerkleProof()
|
||||
.then(() => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log("\nScript completed successfully!");
|
||||
process.exit(0);
|
||||
})
|
||||
.catch((error) => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error("Error updating merkle proof:", error);
|
||||
process.exit(1);
|
||||
});
|
||||
149
packages/rln/src/test-utils/run-integration-tests.js
Normal file
149
packages/rln/src/test-utils/run-integration-tests.js
Normal file
@ -0,0 +1,149 @@
|
||||
/* eslint-env node */
|
||||
|
||||
/**
|
||||
* Integration test runner for RLN package.
|
||||
*
|
||||
* This script:
|
||||
* 1. Pulls the specific nwaku Docker image
|
||||
* 2. Starts a fleet of nwaku nodes
|
||||
* 3. Runs the Karma browser tests
|
||||
* 4. Stops the fleet (cleanup)
|
||||
*
|
||||
* Usage: node src/test-utils/run-integration-tests.js
|
||||
*/
|
||||
|
||||
import { exec, spawn } from "child_process";
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
import { promisify } from "util";
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
// Use WAKUNODE_IMAGE from environment (set by CI) or fall back to our RLN-specific image
|
||||
const NWAKU_IMAGE = "quay.io/wakuorg/nwaku-pr:3660";
|
||||
const FLEET_INFO_PATH = path.join(__dirname, "../../fleet-info.json");
|
||||
const NUM_NODES = 2;
|
||||
|
||||
// Ensure the environment variable is set for ServiceNode
|
||||
process.env.WAKUNODE_IMAGE = NWAKU_IMAGE;
|
||||
|
||||
async function pullImage() {
|
||||
try {
|
||||
await execAsync(`docker inspect ${NWAKU_IMAGE}`);
|
||||
} catch {
|
||||
await execAsync(`docker pull ${NWAKU_IMAGE}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function startFleet() {
|
||||
const { ServiceNode } = await import("@waku/tests");
|
||||
|
||||
const nodes = [];
|
||||
const nodeInfos = [];
|
||||
|
||||
for (let i = 0; i < NUM_NODES; i++) {
|
||||
const node = new ServiceNode(`rln_integration_${i}_${Date.now()}`);
|
||||
|
||||
const args = {
|
||||
relay: true,
|
||||
lightpush: true,
|
||||
filter: true,
|
||||
store: true,
|
||||
clusterId: 0,
|
||||
shard: [0]
|
||||
};
|
||||
|
||||
// Connect subsequent nodes to the first node
|
||||
if (i > 0 && nodes[0]) {
|
||||
const firstNodeAddr = await nodes[0].getExternalMultiaddr();
|
||||
if (firstNodeAddr) {
|
||||
args.staticnode = firstNodeAddr;
|
||||
}
|
||||
}
|
||||
|
||||
await node.start(args, { retries: 3 });
|
||||
nodes.push(node);
|
||||
|
||||
const multiaddr = await node.getMultiaddrWithId();
|
||||
const peerId = await node.getPeerId();
|
||||
|
||||
nodeInfos.push({
|
||||
multiaddr: multiaddr.toString(),
|
||||
restPort: node.httpUrl,
|
||||
peerId: peerId.toString()
|
||||
});
|
||||
}
|
||||
|
||||
// Write fleet info to file
|
||||
const fleetInfo = { nodes: nodeInfos };
|
||||
fs.writeFileSync(FLEET_INFO_PATH, JSON.stringify(fleetInfo, null, 2));
|
||||
return nodes;
|
||||
}
|
||||
|
||||
async function runKarmaTests() {
|
||||
return new Promise((resolve, reject) => {
|
||||
const karma = spawn("npx", ["karma", "start", "karma.node.conf.cjs"], {
|
||||
stdio: "inherit",
|
||||
env: {
|
||||
...process.env,
|
||||
NODE_ENV: "test"
|
||||
}
|
||||
});
|
||||
|
||||
karma.on("error", (error) => {
|
||||
reject(new Error(`Karma failed to start: ${error.message}`));
|
||||
});
|
||||
|
||||
karma.on("exit", (code) => {
|
||||
if (code === 0) {
|
||||
resolve();
|
||||
} else {
|
||||
reject(new Error(`Karma tests failed with exit code ${code}`));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function stopFleet(nodes) {
|
||||
for (let i = 0; i < nodes.length; i++) {
|
||||
await nodes[i].stop();
|
||||
}
|
||||
|
||||
// Clean up fleet info file
|
||||
if (fs.existsSync(FLEET_INFO_PATH)) {
|
||||
fs.unlinkSync(FLEET_INFO_PATH);
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
let nodes = [];
|
||||
let exitCode = 0;
|
||||
|
||||
try {
|
||||
// Pull the Docker image
|
||||
await pullImage();
|
||||
|
||||
// Start the fleet
|
||||
nodes = await startFleet();
|
||||
|
||||
// Run the tests
|
||||
await runKarmaTests();
|
||||
} catch (error) {
|
||||
exitCode = 1;
|
||||
} finally {
|
||||
if (nodes.length > 0) {
|
||||
await stopFleet(nodes);
|
||||
}
|
||||
}
|
||||
|
||||
process.exit(exitCode);
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
console.error("Unexpected error:", error);
|
||||
process.exit(1);
|
||||
});
|
||||
155
packages/rln/src/test-utils/start-nwaku-fleet.ts
Normal file
155
packages/rln/src/test-utils/start-nwaku-fleet.ts
Normal file
@ -0,0 +1,155 @@
|
||||
/* eslint-disable no-console */
|
||||
/* eslint-disable import/no-extraneous-dependencies */
|
||||
/**
|
||||
* Script to start a fleet of nwaku nodes for RLN integration tests.
|
||||
* Reuses the tests package infrastructure.
|
||||
*
|
||||
* Usage:
|
||||
* npx ts-node --esm src/test-utils/start-nwaku-fleet.ts start [numNodes]
|
||||
* npx ts-node --esm src/test-utils/start-nwaku-fleet.ts stop
|
||||
*/
|
||||
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
import { ServiceNode } from "@waku/tests";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
// Use WAKUNODE_IMAGE from environment or fall back to RLN-specific image
|
||||
const NWAKU_IMAGE =
|
||||
process.env.WAKUNODE_IMAGE || "quay.io/wakuorg/nwaku-pr:3660";
|
||||
process.env.WAKUNODE_IMAGE = NWAKU_IMAGE;
|
||||
|
||||
interface NodeInfo {
|
||||
multiaddr: string;
|
||||
restPort: string;
|
||||
peerId: string;
|
||||
}
|
||||
|
||||
interface FleetInfo {
|
||||
nodes: NodeInfo[];
|
||||
}
|
||||
|
||||
const FLEET_INFO_PATH = path.join(__dirname, "../../fleet-info.json");
|
||||
const activeNodes: ServiceNode[] = [];
|
||||
|
||||
async function startFleet(numNodes: number = 2): Promise<FleetInfo> {
|
||||
console.log(
|
||||
`Starting fleet of ${numNodes} nwaku nodes with image: ${process.env.WAKUNODE_IMAGE}`
|
||||
);
|
||||
|
||||
const nodes: NodeInfo[] = [];
|
||||
|
||||
for (let i = 0; i < numNodes; i++) {
|
||||
const node = new ServiceNode(`rln_test_node_${i}_${Date.now()}`);
|
||||
|
||||
const args: Record<string, unknown> = {
|
||||
relay: true,
|
||||
lightpush: true,
|
||||
filter: true,
|
||||
store: true,
|
||||
clusterId: 0,
|
||||
shard: [0]
|
||||
};
|
||||
|
||||
// Connect subsequent nodes to the first node
|
||||
if (i > 0 && activeNodes[0]) {
|
||||
const firstNodeAddr = await activeNodes[0].getExternalMultiaddr();
|
||||
if (firstNodeAddr) {
|
||||
args.staticnode = firstNodeAddr;
|
||||
}
|
||||
}
|
||||
|
||||
await node.start(args, { retries: 3 });
|
||||
activeNodes.push(node);
|
||||
|
||||
const multiaddr = await node.getMultiaddrWithId();
|
||||
|
||||
const nodeInfo: NodeInfo = {
|
||||
multiaddr: multiaddr.toString(),
|
||||
restPort: node.httpUrl,
|
||||
peerId: (await node.getPeerId()).toString()
|
||||
};
|
||||
|
||||
nodes.push(nodeInfo);
|
||||
console.log(`Node ${i} started: ${nodeInfo.multiaddr}`);
|
||||
}
|
||||
|
||||
const fleetInfo: FleetInfo = { nodes };
|
||||
|
||||
// Write fleet info to file for the browser test to read
|
||||
fs.writeFileSync(FLEET_INFO_PATH, JSON.stringify(fleetInfo, null, 2));
|
||||
console.log(`Fleet info written to ${FLEET_INFO_PATH}`);
|
||||
|
||||
return fleetInfo;
|
||||
}
|
||||
|
||||
async function stopFleet(): Promise<void> {
|
||||
console.log("Stopping all nwaku nodes...");
|
||||
|
||||
// Try to read the fleet info file to get node references
|
||||
// But since ServiceNode instances are in memory, we need to stop them directly
|
||||
for (const node of activeNodes) {
|
||||
try {
|
||||
await node.stop();
|
||||
console.log("Node stopped");
|
||||
} catch (err) {
|
||||
console.log(`Error stopping node: ${err}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up the fleet info file
|
||||
if (fs.existsSync(FLEET_INFO_PATH)) {
|
||||
fs.unlinkSync(FLEET_INFO_PATH);
|
||||
console.log("Fleet info file removed");
|
||||
}
|
||||
|
||||
console.log("Fleet stopped");
|
||||
}
|
||||
|
||||
// Keep the process running after start
|
||||
async function startAndWait(numNodes: number): Promise<void> {
|
||||
await startFleet(numNodes);
|
||||
|
||||
console.log("\nFleet is running. Press Ctrl+C to stop.\n");
|
||||
|
||||
// Handle graceful shutdown
|
||||
process.on("SIGINT", () => {
|
||||
console.log("\nReceived SIGINT, stopping fleet...");
|
||||
void stopFleet().then(() => process.exit(0));
|
||||
});
|
||||
|
||||
process.on("SIGTERM", () => {
|
||||
console.log("\nReceived SIGTERM, stopping fleet...");
|
||||
void stopFleet().then(() => process.exit(0));
|
||||
});
|
||||
|
||||
// Keep process alive
|
||||
await new Promise(() => {});
|
||||
}
|
||||
|
||||
// CLI interface
|
||||
const command = process.argv[2];
|
||||
|
||||
if (command === "start") {
|
||||
const numNodes = parseInt(process.argv[3] || "2", 10);
|
||||
startAndWait(numNodes).catch((err) => {
|
||||
console.error("Failed to start fleet:", err);
|
||||
process.exit(1);
|
||||
});
|
||||
} else if (command === "stop") {
|
||||
// Note: stop command won't work well since nodes are in-memory
|
||||
// The recommended way is to use Ctrl+C on the start command
|
||||
console.log("Use Ctrl+C on the running start command to stop the fleet");
|
||||
process.exit(0);
|
||||
} else {
|
||||
console.log("Usage:");
|
||||
console.log(
|
||||
" npx ts-node --esm src/test-utils/start-nwaku-fleet.ts start [numNodes]"
|
||||
);
|
||||
console.log(" # Press Ctrl+C to stop");
|
||||
process.exit(1);
|
||||
}
|
||||
@ -49,6 +49,47 @@ export class BytesUtils {
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a BigInt to a bytes32 (32-byte Uint8Array)
|
||||
* @param value - The BigInt to convert (must fit in 32 bytes)
|
||||
* @param outputEndianness - Endianness of the output bytes ('big' or 'little')
|
||||
* @returns 32-byte Uint8Array representation of the BigInt
|
||||
*/
|
||||
public static bytes32FromBigInt(
|
||||
value: bigint,
|
||||
outputEndianness: "big" | "little" = "little"
|
||||
): Uint8Array {
|
||||
if (value < 0n) {
|
||||
throw new Error("Cannot convert negative BigInt to bytes");
|
||||
}
|
||||
|
||||
if (value >> 256n !== 0n) {
|
||||
throw new Error(
|
||||
`BigInt value is too large to fit in 32 bytes (max bit length: 256)`
|
||||
);
|
||||
}
|
||||
|
||||
if (value === 0n) {
|
||||
return new Uint8Array(32);
|
||||
}
|
||||
|
||||
const result = new Uint8Array(32);
|
||||
let workingValue = value;
|
||||
|
||||
// Extract bytes in big-endian order
|
||||
for (let i = 31; i >= 0; i--) {
|
||||
result[i] = Number(workingValue & 0xffn);
|
||||
workingValue = workingValue >> 8n;
|
||||
}
|
||||
|
||||
// If we need little-endian output, reverse the array
|
||||
if (outputEndianness === "little") {
|
||||
result.reverse();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes an unsigned integer to a buffer in little-endian format
|
||||
*/
|
||||
|
||||
@ -1,30 +1,35 @@
|
||||
import { Logger } from "@waku/utils";
|
||||
import { BytesUtils } from "./bytes.js";
|
||||
|
||||
const DefaultEpochUnitSeconds = 10; // the rln-relay epoch length in seconds
|
||||
|
||||
const log = new Logger("rln:epoch");
|
||||
|
||||
export function dateToEpoch(
|
||||
timestamp: Date,
|
||||
epochUnitSeconds: number = DefaultEpochUnitSeconds
|
||||
): number {
|
||||
const time = timestamp.getTime();
|
||||
const epoch = Math.floor(time / 1000 / epochUnitSeconds);
|
||||
log.info("generated epoch", epoch);
|
||||
return epoch;
|
||||
}
|
||||
|
||||
export function epochIntToBytes(epoch: number): Uint8Array {
|
||||
const bytes = new Uint8Array(32);
|
||||
const db = new DataView(bytes.buffer);
|
||||
db.setUint32(0, epoch, true);
|
||||
log.info("encoded epoch", epoch, bytes);
|
||||
return bytes;
|
||||
return BytesUtils.writeUIntLE(new Uint8Array(32), epoch, 0, 32);
|
||||
}
|
||||
|
||||
export function epochBytesToInt(bytes: Uint8Array): number {
|
||||
const dv = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
|
||||
const epoch = dv.getUint32(0, true);
|
||||
log.info("decoded epoch", epoch, bytes);
|
||||
return epoch;
|
||||
}
|
||||
|
||||
export function dateToEpochSeconds(timestamp: Date): number {
|
||||
return Math.floor(timestamp.getTime() / 1000);
|
||||
}
|
||||
|
||||
export function dateToEpochBytes(timestamp: Date): Uint8Array {
|
||||
return epochIntToBytes(dateToEpochSeconds(timestamp));
|
||||
}
|
||||
|
||||
export function dateToNanosecondBytes(timestamp: Date): Uint8Array {
|
||||
const nanoseconds = BigInt(timestamp.getTime()) * 1000000n;
|
||||
return BytesUtils.bytes32FromBigInt(nanoseconds, "little");
|
||||
}
|
||||
|
||||
@ -1,25 +0,0 @@
|
||||
import * as zerokitRLN from "@waku/zerokit-rln-wasm";
|
||||
|
||||
import { BytesUtils } from "./bytes.js";
|
||||
|
||||
export function poseidonHash(...input: Array<Uint8Array>): Uint8Array {
|
||||
const inputLen = BytesUtils.writeUIntLE(
|
||||
new Uint8Array(8),
|
||||
input.length,
|
||||
0,
|
||||
8
|
||||
);
|
||||
const lenPrefixedData = BytesUtils.concatenate(inputLen, ...input);
|
||||
return zerokitRLN.poseidonHash(lenPrefixedData);
|
||||
}
|
||||
|
||||
export function sha256(input: Uint8Array): Uint8Array {
|
||||
const inputLen = BytesUtils.writeUIntLE(
|
||||
new Uint8Array(8),
|
||||
input.length,
|
||||
0,
|
||||
8
|
||||
);
|
||||
const lenPrefixedData = BytesUtils.concatenate(inputLen, input);
|
||||
return zerokitRLN.hash(lenPrefixedData);
|
||||
}
|
||||
@ -1,4 +1,16 @@
|
||||
export { createViemClientFromWindow, RpcClient } from "./rpcClient.js";
|
||||
export { BytesUtils } from "./bytes.js";
|
||||
export { sha256, poseidonHash } from "./hash.js";
|
||||
export { dateToEpoch, epochIntToBytes, epochBytesToInt } from "./epoch.js";
|
||||
export {
|
||||
dateToEpoch,
|
||||
epochIntToBytes,
|
||||
epochBytesToInt,
|
||||
dateToEpochSeconds,
|
||||
dateToEpochBytes,
|
||||
dateToNanosecondBytes
|
||||
} from "./epoch.js";
|
||||
export {
|
||||
getPathDirectionsFromIndex,
|
||||
calculateRateCommitment,
|
||||
reconstructMerkleRoot,
|
||||
MERKLE_TREE_DEPTH
|
||||
} from "./merkle.js";
|
||||
|
||||
94
packages/rln/src/utils/merkle.ts
Normal file
94
packages/rln/src/utils/merkle.ts
Normal file
@ -0,0 +1,94 @@
|
||||
import { Hasher, WasmFr } from "@waku/zerokit-rln-wasm";
|
||||
|
||||
import { BytesUtils } from "./bytes.js";
|
||||
|
||||
/**
|
||||
* The fixed depth of the Merkle tree used in the RLN contract
|
||||
* This is a constant that will never change for the on-chain implementation
|
||||
*/
|
||||
export const MERKLE_TREE_DEPTH = 20;
|
||||
|
||||
/**
|
||||
* Reconstructs a Merkle tree root from a proof and leaf information
|
||||
*
|
||||
* @param proof - Array of MERKLE_TREE_DEPTH bigint elements representing the Merkle proof
|
||||
* @param leafIndex - The index of the leaf in the tree (used to determine left/right positioning)
|
||||
* @param leafValue - The value of the leaf (typically the rate commitment)
|
||||
* @returns The reconstructed root as a bigint
|
||||
*/
|
||||
export function reconstructMerkleRoot(
|
||||
proof: readonly bigint[],
|
||||
leafIndex: bigint,
|
||||
leafValue: bigint
|
||||
): bigint {
|
||||
if (proof.length !== MERKLE_TREE_DEPTH) {
|
||||
throw new Error(
|
||||
`Expected proof of length ${MERKLE_TREE_DEPTH}, got ${proof.length}`
|
||||
);
|
||||
}
|
||||
|
||||
let currentValue = WasmFr.fromBytesLE(
|
||||
BytesUtils.bytes32FromBigInt(leafValue)
|
||||
);
|
||||
|
||||
for (let level = 0; level < MERKLE_TREE_DEPTH; level++) {
|
||||
const bit = (leafIndex >> BigInt(level)) & 1n;
|
||||
|
||||
const proofFr = WasmFr.fromBytesLE(
|
||||
BytesUtils.bytes32FromBigInt(proof[level])
|
||||
);
|
||||
|
||||
if (bit === 0n) {
|
||||
// Current node is a left child: hash(current, proof[level])
|
||||
currentValue = Hasher.poseidonHashPair(currentValue, proofFr);
|
||||
} else {
|
||||
// Current node is a right child: hash(proof[level], current)
|
||||
currentValue = Hasher.poseidonHashPair(proofFr, currentValue);
|
||||
}
|
||||
}
|
||||
|
||||
return BytesUtils.toBigInt(currentValue.toBytesLE(), "little");
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the rate commitment from an ID commitment and rate limit
|
||||
* This matches the contract's calculation: PoseidonT3.hash([idCommitment, rateLimit])
|
||||
*
|
||||
* @param idCommitment - The identity commitment as a bigint
|
||||
* @param rateLimit - The rate limit as a bigint
|
||||
* @returns The rate commitment as a bigint
|
||||
*/
|
||||
export function calculateRateCommitment(
|
||||
idCommitment: bigint,
|
||||
rateLimit: bigint
|
||||
): bigint {
|
||||
const idBytes = BytesUtils.bytes32FromBigInt(idCommitment);
|
||||
const rateLimitBytes = BytesUtils.bytes32FromBigInt(rateLimit);
|
||||
|
||||
const hashResult = Hasher.poseidonHashPair(
|
||||
WasmFr.fromBytesLE(idBytes),
|
||||
WasmFr.fromBytesLE(rateLimitBytes)
|
||||
);
|
||||
return BytesUtils.toBigInt(hashResult.toBytesLE(), "little");
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a leaf index to an array of path direction bits
|
||||
*
|
||||
* @param leafIndex - The index of the leaf in the tree
|
||||
* @returns Array of MERKLE_TREE_DEPTH numbers (0 or 1) representing path directions
|
||||
* - 0 means the node is a left child (hash order: current, sibling)
|
||||
* - 1 means the node is a right child (hash order: sibling, current)
|
||||
*/
|
||||
export function getPathDirectionsFromIndex(leafIndex: bigint): number[] {
|
||||
const pathDirections: number[] = [];
|
||||
|
||||
// For each level (0 to MERKLE_TREE_DEPTH-1), extract the bit that determines left/right
|
||||
for (let level = 0; level < MERKLE_TREE_DEPTH; level++) {
|
||||
// Check if bit `level` is set in the leaf index
|
||||
const bit = (leafIndex >> BigInt(level)) & 1n;
|
||||
pathDirections.push(Number(bit));
|
||||
}
|
||||
|
||||
return pathDirections;
|
||||
}
|
||||
33
packages/rln/src/utils/test_keystore.ts
Normal file
33
packages/rln/src/utils/test_keystore.ts
Normal file
@ -0,0 +1,33 @@
|
||||
export const TEST_KEYSTORE_DATA = {
|
||||
keystoreJson:
|
||||
'{"application":"waku-rln-relay","appIdentifier":"01234567890abcdef","version":"0.2","credentials":{"E0A8AC077B95F64C1B2C4B116468B22EFA3B1CFF250069AE07422F645BAA555E":{"crypto":{"cipher":"aes-128-ctr","cipherparams":{"iv":"96aff104d7bb23cefb57a4c5e816a3b9"},"ciphertext":"1ae2c7a47274d12d6a4b439da48abfa89be29e4ba3308d153e2e808d3e120cc85da472ab1e0278c945231092162d31d753ecb48484ac0c3a7efe6380d08f5dedecc9cda26bd156a30d232b9da4313c5ec92b21cd3dc3ca03cff68afde94a063799b658cc3e4a5c648e620d584a8a184d2d473e3e94c897e21e0de7580639dcf40c0133f36896ac5bee2dd5fe8810a5441e31e1938ecc4b195db57c1b6d320a374508406dfb7a4879081b70100140515b4c6c551f25f9b4c9a7214ac2dc222410bf74666407343dfd4af477c85cf2f316bb7a512a88948d88f5474374563d51d02c13eede6b6cf64fab7991e529157d7de39033099d26f323d9710159b47d2511695b4fb428e3b02c760e1470a3ece712c6a03692d067e0e17930bc25ce7dc4ad2634e07ef51fa7369de6b4d495c7ae1d8ad8dccdd2fa12802db4203c527887adf5eb42e2551e120b8a455892d0ac9369faf708465a983c03c7c8f77c268f85cacc7b718a1e9e2800b160ca1f7a78f2c160cbc97396f5dfe0e0f3b35addb4f8d667021c79eec5248122d8c983075b9e8ca20679e90a12bdbeefb33df21523b4e1ea7ab57ddc706b43bf4827fbc3530d20cb906468af5c5c31ac08815f3ed1d00341be7e287a3fb7ef67aecf2e56f694c51ba6db8641ac873e26659c92a8527c42df2d5ac15ff6201bdfa8a5ee34b6a90ff864fba89370a8c51efcb4ed1b69f3ed0e37ee97c66eb84763f107e1214e088e3149b2433a8da595293343b2290b0a84b7f796b70005d1672446d98d45da7c89c3eb8d91ece94ee41099f9f43c6810ce71d9f75ac3dffe1de0c79e40baad486ecaefbd0cc0e89aed7e0a16ea271a371d3f5927a1c7b813608de5715692e58322260a4bcd4ccba4b2376df01f58645c16a7b37c8473b94c7577ae774e5c72132ed15507ab2027ddabf137aa417b134b653eda247314","kdf":"pbkdf2","kdfparams":{"dklen":32,"c":262144,"prf":"hmac-sha256","salt":"5f2081f089e9e277873bf1f538c60d714749a2bb910d8f1ed119d8d403235a8c"},"mac":"8d0667893b7d3b5f0b37c43edef616a8d295dc58292c98655eec8b5fe2ad69c3"}}}}',
|
||||
credentialHash:
|
||||
"E0A8AC077B95F64C1B2C4B116468B22EFA3B1CFF250069AE07422F645BAA555E",
|
||||
password: "12345678",
|
||||
merkleProof: [
|
||||
"21837427992620339064281119305700224965155897361776876451171527491637273262703",
|
||||
"2849928341676773476316761863425436901389023422598778907382563142042850204484",
|
||||
"21699429914184421678079077958020273488709892845081201722564329942861605328226",
|
||||
"8522396354694062508299995669286882048091268903835874022564768254605186873188",
|
||||
"4967828252976847302563643214799688359334626491919847999565033460501719790119",
|
||||
"985039452502497454598906195897243897432778848314526706136284672198477696437",
|
||||
"19922236706682864826758848301828373105737204541535252785791101041288809679484",
|
||||
"1241870589869015758600129850815671823696180350556207862318506998039540071293",
|
||||
"21551820661461729022865262380882070649935529853313286572328683688269863701601",
|
||||
"16870197621778677478951480138572599814910741341994641594346262317677658226992",
|
||||
"12413880268183407374852357075976609371175688755676981206018884971008854919922",
|
||||
"14271763308400718165336499097156975241954733520325982997864342600795471836726",
|
||||
"20066985985293572387227381049700832219069292839614107140851619262827735677018",
|
||||
"9394776414966240069580838672673694685292165040808226440647796406499139370960",
|
||||
"11331146992410411304059858900317123658895005918277453009197229807340014528524",
|
||||
"15819538789928229930262697811477882737253464456578333862691129291651619515538",
|
||||
"19217088683336594659449020493828377907203207941212636669271704950158751593251",
|
||||
"21035245323335827719745544373081896983162834604456827698288649288827293579666",
|
||||
"6939770416153240137322503476966641397417391950902474480970945462551409848591",
|
||||
"10941962436777715901943463195175331263348098796018438960955633645115732864202"
|
||||
],
|
||||
merkleRoot:
|
||||
"2736078608533319394386474878088665333284588969678017122712404976506399404519",
|
||||
membershipIndex: "703",
|
||||
rateLimit: "300"
|
||||
};
|
||||
38
packages/rln/src/zerokit.browser.spec.ts
Normal file
38
packages/rln/src/zerokit.browser.spec.ts
Normal file
@ -0,0 +1,38 @@
|
||||
import { expect } from "chai";
|
||||
|
||||
import { RLNInstance } from "./rln.js";
|
||||
|
||||
describe("@waku/rln", () => {
|
||||
it("should generate the same membership key if the same seed is provided", async function () {
|
||||
const rlnInstance = await RLNInstance.create();
|
||||
|
||||
const seed = "This is a test seed";
|
||||
const memKeys1 = rlnInstance.zerokit.generateSeededIdentityCredential(seed);
|
||||
const memKeys2 = rlnInstance.zerokit.generateSeededIdentityCredential(seed);
|
||||
|
||||
memKeys1
|
||||
.getCommitment()
|
||||
.toBytesLE()
|
||||
.forEach((element, index) => {
|
||||
expect(element).to.equal(memKeys2.getCommitment().toBytesLE()[index]);
|
||||
});
|
||||
memKeys1
|
||||
.getNullifier()
|
||||
.toBytesLE()
|
||||
.forEach((element, index) => {
|
||||
expect(element).to.equal(memKeys2.getNullifier().toBytesLE()[index]);
|
||||
});
|
||||
memKeys1
|
||||
.getSecretHash()
|
||||
.toBytesLE()
|
||||
.forEach((element, index) => {
|
||||
expect(element).to.equal(memKeys2.getSecretHash().toBytesLE()[index]);
|
||||
});
|
||||
memKeys1
|
||||
.getTrapdoor()
|
||||
.toBytesLE()
|
||||
.forEach((element, index) => {
|
||||
expect(element).to.equal(memKeys2.getTrapdoor().toBytesLE()[index]);
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -1,26 +0,0 @@
|
||||
import { expect } from "chai";
|
||||
|
||||
import { RLNInstance } from "./rln.js";
|
||||
|
||||
describe("@waku/rln", () => {
|
||||
it("should generate the same membership key if the same seed is provided", async function () {
|
||||
const rlnInstance = await RLNInstance.create();
|
||||
|
||||
const seed = "This is a test seed";
|
||||
const memKeys1 = rlnInstance.zerokit.generateSeededIdentityCredential(seed);
|
||||
const memKeys2 = rlnInstance.zerokit.generateSeededIdentityCredential(seed);
|
||||
|
||||
memKeys1.IDCommitment.forEach((element, index) => {
|
||||
expect(element).to.equal(memKeys2.IDCommitment[index]);
|
||||
});
|
||||
memKeys1.IDNullifier.forEach((element, index) => {
|
||||
expect(element).to.equal(memKeys2.IDNullifier[index]);
|
||||
});
|
||||
memKeys1.IDSecretHash.forEach((element, index) => {
|
||||
expect(element).to.equal(memKeys2.IDSecretHash[index]);
|
||||
});
|
||||
memKeys1.IDTrapdoor.forEach((element, index) => {
|
||||
expect(element).to.equal(memKeys2.IDTrapdoor[index]);
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -1,35 +1,137 @@
|
||||
import * as zerokitRLN from "@waku/zerokit-rln-wasm";
|
||||
import {
|
||||
ExtendedIdentity,
|
||||
Hasher,
|
||||
VecWasmFr,
|
||||
WasmFr,
|
||||
WasmRLN,
|
||||
WasmRLNProof,
|
||||
WasmRLNWitnessInput
|
||||
} from "@waku/zerokit-rln-wasm";
|
||||
|
||||
import { DEFAULT_RATE_LIMIT } from "./contract/constants.js";
|
||||
import { IdentityCredential } from "./identity.js";
|
||||
import { DEFAULT_RATE_LIMIT, RATE_LIMIT_PARAMS } from "./contract/constants.js";
|
||||
import { WitnessCalculator } from "./resources/witness_calculator";
|
||||
import { dateToEpochBytes } from "./utils/epoch.js";
|
||||
import { MERKLE_TREE_DEPTH } from "./utils/merkle.js";
|
||||
|
||||
export class Zerokit {
|
||||
public constructor(
|
||||
private readonly zkRLN: number,
|
||||
private readonly zkRLN: WasmRLN,
|
||||
private readonly witnessCalculator: WitnessCalculator,
|
||||
private readonly _rateLimit: number = DEFAULT_RATE_LIMIT
|
||||
public readonly rateLimit: number = DEFAULT_RATE_LIMIT,
|
||||
public readonly rlnIdentifier: Uint8Array = (() => {
|
||||
const encoded = new TextEncoder().encode("rln/waku-rln-relay/v2.0.0");
|
||||
const padded = new Uint8Array(32);
|
||||
padded.set(encoded);
|
||||
return padded;
|
||||
})()
|
||||
) {}
|
||||
|
||||
public get getZkRLN(): number {
|
||||
return this.zkRLN;
|
||||
}
|
||||
|
||||
public get getWitnessCalculator(): WitnessCalculator {
|
||||
return this.witnessCalculator;
|
||||
}
|
||||
|
||||
public get rateLimit(): number {
|
||||
return this._rateLimit;
|
||||
}
|
||||
|
||||
public generateSeededIdentityCredential(seed: string): IdentityCredential {
|
||||
public generateSeededIdentityCredential(seed: string): ExtendedIdentity {
|
||||
const stringEncoder = new TextEncoder();
|
||||
const seedBytes = stringEncoder.encode(seed);
|
||||
const memKeys = zerokitRLN.generateSeededExtendedMembershipKey(
|
||||
this.zkRLN,
|
||||
seedBytes
|
||||
return ExtendedIdentity.generateSeeded(seedBytes);
|
||||
}
|
||||
|
||||
public async generateRLNProof(
|
||||
msg: Uint8Array,
|
||||
timestamp: Date,
|
||||
idSecretHash: Uint8Array,
|
||||
pathElements: Uint8Array[],
|
||||
identityPathIndex: Uint8Array[],
|
||||
rateLimit: number,
|
||||
messageId: number // number of message sent by the user in this epoch
|
||||
): Promise<{
|
||||
proof: WasmRLNProof;
|
||||
epoch: Uint8Array;
|
||||
rlnIdentifier: Uint8Array;
|
||||
}> {
|
||||
const epoch = dateToEpochBytes(timestamp);
|
||||
|
||||
if (epoch.length !== 32)
|
||||
throw new Error(`Epoch must be 32 bytes, got ${epoch.length}`);
|
||||
if (idSecretHash.length !== 32)
|
||||
throw new Error(
|
||||
`ID secret hash must be 32 bytes, got ${idSecretHash.length}`
|
||||
);
|
||||
if (pathElements.length !== MERKLE_TREE_DEPTH)
|
||||
throw new Error(`Path elements must be ${MERKLE_TREE_DEPTH} bytes`);
|
||||
if (identityPathIndex.length !== MERKLE_TREE_DEPTH)
|
||||
throw new Error(`Identity path index must be ${MERKLE_TREE_DEPTH} bytes`);
|
||||
if (
|
||||
rateLimit < RATE_LIMIT_PARAMS.MIN_RATE ||
|
||||
rateLimit > RATE_LIMIT_PARAMS.MAX_RATE
|
||||
) {
|
||||
throw new Error(
|
||||
`Rate limit must be between ${RATE_LIMIT_PARAMS.MIN_RATE} and ${RATE_LIMIT_PARAMS.MAX_RATE}`
|
||||
);
|
||||
}
|
||||
|
||||
if (messageId < 0 || messageId >= rateLimit) {
|
||||
throw new Error(
|
||||
`messageId must be an integer between 0 and ${rateLimit - 1}, got ${messageId}`
|
||||
);
|
||||
}
|
||||
const pathElementsVec = new VecWasmFr();
|
||||
for (const element of pathElements) {
|
||||
pathElementsVec.push(WasmFr.fromBytesLE(element));
|
||||
}
|
||||
const identityPathIndexBytes = new Uint8Array(identityPathIndex.length);
|
||||
for (let i = 0; i < identityPathIndex.length; i++) {
|
||||
// We assume that each identity path index is already in little-endian format
|
||||
identityPathIndexBytes.set(identityPathIndex[i], i);
|
||||
}
|
||||
const x = Hasher.hashToFieldLE(msg);
|
||||
const externalNullifier = Hasher.poseidonHashPair(
|
||||
Hasher.hashToFieldLE(epoch),
|
||||
Hasher.hashToFieldLE(this.rlnIdentifier)
|
||||
);
|
||||
return IdentityCredential.fromBytes(memKeys);
|
||||
const witness = new WasmRLNWitnessInput(
|
||||
WasmFr.fromBytesLE(idSecretHash),
|
||||
WasmFr.fromUint(rateLimit),
|
||||
WasmFr.fromUint(messageId),
|
||||
pathElementsVec,
|
||||
identityPathIndexBytes,
|
||||
x,
|
||||
externalNullifier
|
||||
);
|
||||
|
||||
const calculatedWitness: bigint[] =
|
||||
await this.witnessCalculator.calculateWitness(
|
||||
witness.toBigIntJson() as Record<string, unknown>
|
||||
);
|
||||
const proof = this.zkRLN.generateRLNProofWithWitness(
|
||||
calculatedWitness,
|
||||
witness
|
||||
);
|
||||
return {
|
||||
proof,
|
||||
epoch,
|
||||
rlnIdentifier: this.rlnIdentifier
|
||||
};
|
||||
}
|
||||
|
||||
public verifyRLNProof(
|
||||
signalLength: Uint8Array,
|
||||
signal: Uint8Array,
|
||||
proof: WasmRLNProof,
|
||||
roots: Uint8Array[]
|
||||
): boolean {
|
||||
if (signalLength.length !== 8)
|
||||
throw new Error("signalLength must be 8 bytes");
|
||||
if (roots.length == 0) throw new Error("roots array is empty");
|
||||
if (roots.find((root) => root.length !== 32)) {
|
||||
throw new Error("All roots must be 32 bytes");
|
||||
}
|
||||
|
||||
const rootsVec = new VecWasmFr();
|
||||
for (const root of roots) {
|
||||
rootsVec.push(WasmFr.fromBytesLE(root));
|
||||
}
|
||||
const x = Hasher.hashToFieldLE(signal);
|
||||
return this.zkRLN.verifyWithRoots(proof, rootsVec, x);
|
||||
}
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user