diff --git a/.cspell.json b/.cspell.json index e8582e38b0..dd57575bc2 100644 --- a/.cspell.json +++ b/.cspell.json @@ -24,9 +24,11 @@ "cipherparams", "ciphertext", "circleci", + "circom", "codecov", "codegen", "commitlint", + "cooldown", "dependabot", "dialable", "dingpu", @@ -41,9 +43,7 @@ "Encrypters", "enr", "enrs", - "unsubscription", "enrtree", - "unhandle", "ephem", "esnext", "ethersproject", @@ -62,7 +62,6 @@ "ineed", "IPAM", "ipfs", - "cooldown", "iwant", "jdev", "jswaku", @@ -122,9 +121,11 @@ "typedoc", "undialable", "unencrypted", + "unhandle", "unmarshal", "unmount", "unmounts", + "unsubscription", "untracked", "upgrader", "vacp", @@ -139,6 +140,7 @@ "weboko", "websockets", "wifi", + "WTNS", "xsalsa20", "zerokit", "Привет", diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 170e2df8d1..7ef6042c11 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,13 +1,13 @@ { - "packages/utils": "0.0.26", - "packages/proto": "0.0.13", - "packages/interfaces": "0.0.33", - "packages/enr": "0.0.32", - "packages/core": "0.0.38", - "packages/message-encryption": "0.0.36", - "packages/relay": "0.0.21", - "packages/sdk": "0.0.34", - "packages/discovery": "0.0.11", - "packages/sds": "0.0.6", - "packages/rln": "0.1.8" + "packages/utils": "0.0.27", + "packages/proto": "0.0.14", + "packages/interfaces": "0.0.34", + "packages/enr": "0.0.33", + "packages/core": "0.0.39", + "packages/message-encryption": "0.0.37", + "packages/relay": "0.0.22", + "packages/sdk": "0.0.35", + "packages/discovery": "0.0.12", + "packages/sds": "0.0.7", + "packages/rln": "0.1.9" } diff --git a/package-lock.json b/package-lock.json index e50a8d7164..f5eebd8d44 100644 --- a/package-lock.json +++ b/package-lock.json @@ -13,10 +13,10 @@ "packages/core", "packages/discovery", "packages/message-encryption", - "packages/sdk", - "packages/relay", "packages/sds", "packages/rln", + "packages/sdk", + "packages/relay", "packages/tests", "packages/reliability-tests", "packages/headless-tests", @@ -7536,9 +7536,17 @@ "version": "4.17.18", "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.18.tgz", "integrity": "sha512-KJ65INaxqxmU6EoCiJmRPZC9H9RVWCRd349tXM2M3O5NA7cY6YL7c0bHAHQ93NOfTObEQ004kd2QVHs/r0+m4g==", - "dev": true, "license": "MIT" }, + "node_modules/@types/lodash.debounce": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@types/lodash.debounce/-/lodash.debounce-4.0.9.tgz", + "integrity": "sha512-Ma5JcgTREwpLRwMM+XwBR7DaWe96nC38uCBDFKZWbNKD+osjVzdpnUSwBcqCptrp16sSOLBAUb50Car5I0TCsQ==", + "license": "MIT", + "dependencies": { + "@types/lodash": "*" + } + }, "node_modules/@types/markdown-it": { "version": "14.1.2", "resolved": "https://registry.npmjs.org/@types/markdown-it/-/markdown-it-14.1.2.tgz", @@ -8406,9 +8414,9 @@ "link": true }, "node_modules/@waku/zerokit-rln-wasm": { - "version": "0.0.13", - "resolved": "https://registry.npmjs.org/@waku/zerokit-rln-wasm/-/zerokit-rln-wasm-0.0.13.tgz", - "integrity": "sha512-x7CRIIslmfCmTZc7yVp3dhLlKeLUs8ILIm9kv7+wVJ23H4pPw0Z+uH0ueLIYYfwODI6fDiwJj3S1vdFzM8D1zA==", + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/@waku/zerokit-rln-wasm/-/zerokit-rln-wasm-0.2.1.tgz", + "integrity": "sha512-2Xp7e92y4qZpsiTPGBSVr4gVJ9mJTLaudlo0DQxNpxJUBtoJKpxdH5xDCQDiorbkWZC2j9EId+ohhxHO/xC1QQ==", "license": "MIT or Apache2" }, "node_modules/@webassemblyjs/ast": { @@ -36650,15 +36658,15 @@ }, "packages/core": { "name": "@waku/core", - "version": "0.0.38", + "version": "0.0.39", "license": "MIT OR Apache-2.0", "dependencies": { "@libp2p/ping": "2.0.35", "@noble/hashes": "^1.3.2", - "@waku/enr": "^0.0.32", - "@waku/interfaces": "0.0.33", - "@waku/proto": "0.0.13", - "@waku/utils": "0.0.26", + "@waku/enr": "^0.0.33", + "@waku/interfaces": "0.0.34", + "@waku/proto": "0.0.14", + "@waku/utils": "0.0.27", "debug": "^4.3.4", "it-all": "^3.0.4", "it-length-prefixed": "^9.0.4", @@ -36718,14 +36726,14 @@ }, "packages/discovery": { "name": "@waku/discovery", - "version": "0.0.11", + "version": "0.0.12", "license": "MIT OR Apache-2.0", "dependencies": { - "@waku/core": "0.0.38", - "@waku/enr": "0.0.32", - "@waku/interfaces": "0.0.33", - "@waku/proto": "^0.0.13", - "@waku/utils": "0.0.26", + "@waku/core": "0.0.39", + "@waku/enr": "0.0.33", + "@waku/interfaces": "0.0.34", + "@waku/proto": "^0.0.14", + "@waku/utils": "0.0.27", "debug": "^4.3.4", "dns-over-http-resolver": "^3.0.8", "hi-base32": "^0.5.1", @@ -36754,7 +36762,7 @@ }, "packages/enr": { "name": "@waku/enr", - "version": "0.0.32", + "version": "0.0.33", "license": "MIT OR Apache-2.0", "dependencies": { "@ethersproject/rlp": "^5.7.0", @@ -36762,7 +36770,7 @@ "@libp2p/peer-id": "5.1.7", "@multiformats/multiaddr": "^12.0.0", "@noble/secp256k1": "^1.7.1", - "@waku/utils": "0.0.26", + "@waku/utils": "0.0.27", "debug": "^4.3.4", "js-sha3": "^0.9.2" }, @@ -36773,7 +36781,7 @@ "@types/chai": "^4.3.11", "@types/mocha": "^10.0.6", "@waku/build-utils": "*", - "@waku/interfaces": "0.0.33", + "@waku/interfaces": "0.0.34", "chai": "^4.3.10", "cspell": "^8.6.1", "fast-check": "^3.19.0", @@ -37293,7 +37301,7 @@ }, "packages/interfaces": { "name": "@waku/interfaces", - "version": "0.0.33", + "version": "0.0.34", "license": "MIT OR Apache-2.0", "devDependencies": { "@chainsafe/libp2p-gossipsub": "14.1.1", @@ -37308,14 +37316,14 @@ }, "packages/message-encryption": { "name": "@waku/message-encryption", - "version": "0.0.36", + "version": "0.0.37", "license": "MIT OR Apache-2.0", "dependencies": { "@noble/secp256k1": "^1.7.1", - "@waku/core": "0.0.38", - "@waku/interfaces": "0.0.33", - "@waku/proto": "0.0.13", - "@waku/utils": "0.0.26", + "@waku/core": "0.0.39", + "@waku/interfaces": "0.0.34", + "@waku/proto": "0.0.14", + "@waku/utils": "0.0.27", "debug": "^4.3.4", "js-sha3": "^0.9.2", "uint8arrays": "^5.0.1" @@ -37345,7 +37353,7 @@ }, "packages/proto": { "name": "@waku/proto", - "version": "0.0.13", + "version": "0.0.14", "license": "MIT OR Apache-2.0", "dependencies": { "protons-runtime": "^5.4.0" @@ -37367,16 +37375,16 @@ }, "packages/relay": { "name": "@waku/relay", - "version": "0.0.21", + "version": "0.0.22", "license": "MIT OR Apache-2.0", "dependencies": { "@chainsafe/libp2p-gossipsub": "14.1.1", "@noble/hashes": "^1.3.2", - "@waku/core": "0.0.38", - "@waku/interfaces": "0.0.33", - "@waku/proto": "0.0.13", - "@waku/sdk": "0.0.34", - "@waku/utils": "0.0.26", + "@waku/core": "0.0.39", + "@waku/interfaces": "0.0.34", + "@waku/proto": "0.0.14", + "@waku/sdk": "0.0.35", + "@waku/utils": "0.0.27", "chai": "^4.3.10", "debug": "^4.3.4", "fast-check": "^3.19.0", @@ -37453,14 +37461,14 @@ }, "packages/rln": { "name": "@waku/rln", - "version": "0.1.8", + "version": "0.1.9", "license": "MIT OR Apache-2.0", "dependencies": { "@chainsafe/bls-keystore": "3.0.0", "@noble/hashes": "^1.2.0", - "@waku/core": "^0.0.38", - "@waku/utils": "^0.0.26", - "@waku/zerokit-rln-wasm": "^0.0.13", + "@waku/core": "^0.0.39", + "@waku/utils": "^0.0.27", + "@waku/zerokit-rln-wasm": "^0.2.1", "chai": "^5.1.2", "chai-as-promised": "^8.0.1", "chai-spies": "^1.1.0", @@ -37481,8 +37489,8 @@ "@types/lodash": "^4.17.15", "@types/sinon": "^17.0.3", "@waku/build-utils": "^1.0.0", - "@waku/interfaces": "0.0.33", - "@waku/message-encryption": "^0.0.36", + "@waku/interfaces": "0.0.34", + "@waku/message-encryption": "^0.0.37", "deep-equal-in-any-order": "^2.0.6", "fast-check": "^3.23.2", "rollup-plugin-copy": "^3.5.0" @@ -37598,7 +37606,7 @@ }, "packages/sdk": { "name": "@waku/sdk", - "version": "0.0.34", + "version": "0.0.35", "license": "MIT OR Apache-2.0", "dependencies": { "@chainsafe/libp2p-noise": "16.1.3", @@ -37608,12 +37616,15 @@ "@libp2p/ping": "2.0.35", "@libp2p/websockets": "9.2.16", "@noble/hashes": "^1.3.3", - "@waku/core": "0.0.38", - "@waku/discovery": "0.0.11", - "@waku/interfaces": "0.0.33", - "@waku/proto": "^0.0.13", - "@waku/utils": "0.0.26", - "libp2p": "2.8.11" + "@types/lodash.debounce": "^4.0.9", + "@waku/core": "0.0.39", + "@waku/discovery": "0.0.12", + "@waku/interfaces": "0.0.34", + "@waku/proto": "^0.0.14", + "@waku/sds": "^0.0.7", + "@waku/utils": "0.0.27", + "libp2p": "2.8.11", + "lodash.debounce": "^4.0.8" }, "devDependencies": { "@libp2p/interface": "2.10.4", @@ -37624,6 +37635,7 @@ "@types/chai": "^4.3.11", "@types/mocha": "^10.0.9", "@waku/build-utils": "*", + "@waku/message-encryption": "^0.0.37", "chai": "^5.1.1", "cspell": "^8.6.1", "interface-datastore": "8.3.2", @@ -37644,6 +37656,102 @@ "@sinonjs/commons": "^3.0.1" } }, + "packages/sdk/node_modules/@waku/sds/node_modules/@waku/interfaces": { + "version": "0.0.32", + "resolved": "https://registry.npmjs.org/@waku/interfaces/-/interfaces-0.0.32.tgz", + "integrity": "sha512-4MNfc7ZzQCyQZR1GQQKPgHaWTuPTIvE2wo/b7iokjdeOT+ZSKyJFSetcV07cqnBwyzUv1gc53bJdzyHwVIa5Vw==", + "extraneous": true, + "license": "MIT OR Apache-2.0", + "engines": { + "node": ">=22" + } + }, + "packages/sdk/node_modules/@waku/sds/node_modules/@waku/proto": { + "version": "0.0.12", + "resolved": "https://registry.npmjs.org/@waku/proto/-/proto-0.0.12.tgz", + "integrity": "sha512-JR7wiy3Di628Ywo9qKIi7rhfdC2K7ABoaWa9WX4ZQKieYDs+YwOK+syE53VNwXrtponNeLDI0JIOFzRDalUm1A==", + "extraneous": true, + "license": "MIT OR Apache-2.0", + "dependencies": { + "protons-runtime": "^5.4.0" + }, + "engines": { + "node": ">=22" + } + }, + "packages/sdk/node_modules/@waku/sds/node_modules/@waku/utils": { + "version": "0.0.25", + "resolved": "https://registry.npmjs.org/@waku/utils/-/utils-0.0.25.tgz", + "integrity": "sha512-yCbfQ3uqByGNUvCNTj6oHi8fJ6BdVvg+Rj0y2YKrZDSNn73uTMF856lCJdsE86eqDZNCDaRaawTs3ZNEXyWaXw==", + "extraneous": true, + "license": "MIT OR Apache-2.0", + "dependencies": { + "@noble/hashes": "^1.3.2", + "@waku/interfaces": "0.0.32", + "chai": "^4.3.10", + "debug": "^4.3.4", + "uint8arrays": "^5.0.1" + }, + "engines": { + "node": ">=22" + } + }, + "packages/sdk/node_modules/@waku/sds/node_modules/assertion-error": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "extraneous": true, + "license": "MIT", + "engines": { + "node": "*" + } + }, + "packages/sdk/node_modules/@waku/sds/node_modules/check-error": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.3.tgz", + "integrity": "sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg==", + "extraneous": true, + "license": "MIT", + "dependencies": { + "get-func-name": "^2.0.2" + }, + "engines": { + "node": "*" + } + }, + "packages/sdk/node_modules/@waku/sds/node_modules/deep-eql": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.4.tgz", + "integrity": "sha512-SUwdGfqdKOwxCPeVYjwSyRpJ7Z+fhpwIAtmCUdZIWZ/YP5R9WAsyuSgpLVDi9bjWoN2LXHNss/dk3urXtdQxGg==", + "extraneous": true, + "license": "MIT", + "dependencies": { + "type-detect": "^4.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "packages/sdk/node_modules/@waku/sds/node_modules/loupe": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.7.tgz", + "integrity": "sha512-zSMINGVYkdpYSOBmLi0D1Uo7JU9nVdQKrHxC8eYlV+9YKK9WePqAlL7lSlorG/U2Fw1w0hTBmaa/jrQ3UbPHtA==", + "extraneous": true, + "license": "MIT", + "dependencies": { + "get-func-name": "^2.0.1" + } + }, + "packages/sdk/node_modules/@waku/sds/node_modules/pathval": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", + "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "extraneous": true, + "license": "MIT", + "engines": { + "node": "*" + } + }, "packages/sdk/node_modules/assertion-error": { "version": "2.0.1", "dev": true, @@ -37723,13 +37831,13 @@ }, "packages/sds": { "name": "@waku/sds", - "version": "0.0.6", + "version": "0.0.7", "license": "MIT OR Apache-2.0", "dependencies": { "@libp2p/interface": "2.10.4", "@noble/hashes": "^1.7.1", - "@waku/proto": "^0.0.13", - "@waku/utils": "^0.0.26", + "@waku/proto": "^0.0.14", + "@waku/utils": "^0.0.27", "chai": "^5.1.2", "lodash": "^4.17.21" }, @@ -37849,11 +37957,11 @@ }, "packages/utils": { "name": "@waku/utils", - "version": "0.0.26", + "version": "0.0.27", "license": "MIT OR Apache-2.0", "dependencies": { "@noble/hashes": "^1.3.2", - "@waku/interfaces": "0.0.33", + "@waku/interfaces": "0.0.34", "chai": "^4.3.10", "debug": "^4.3.4", "uint8arrays": "^5.0.1" diff --git a/package.json b/package.json index 53baf7ee6b..b4551f0b68 100644 --- a/package.json +++ b/package.json @@ -10,10 +10,10 @@ "packages/core", "packages/discovery", "packages/message-encryption", - "packages/sdk", - "packages/relay", "packages/sds", "packages/rln", + "packages/sdk", + "packages/relay", "packages/tests", "packages/reliability-tests", "packages/headless-tests", diff --git a/packages/browser-tests/tests/headless.spec.ts b/packages/browser-tests/tests/headless.spec.ts index 0817d1c1cf..dd4bcaee25 100644 --- a/packages/browser-tests/tests/headless.spec.ts +++ b/packages/browser-tests/tests/headless.spec.ts @@ -69,7 +69,8 @@ test.describe("waku", () => { console.log("Debug:", debug); }); - test("can dial peers", async ({ page }) => { + // TODO: https://github.com/waku-org/js-waku/issues/2619 + test.skip("can dial peers", async ({ page }) => { const result = await page.evaluate((peerAddrs) => { return window.wakuAPI.dialPeers(window.waku, peerAddrs); }, ACTIVE_PEERS); diff --git a/packages/core/CHANGELOG.md b/packages/core/CHANGELOG.md index 2edf76eee3..adc7cac31d 100644 --- a/packages/core/CHANGELOG.md +++ b/packages/core/CHANGELOG.md @@ -5,6 +5,27 @@ All notable changes to this project will be documented in this file. The file is maintained by [Release Please](https://github.com/googleapis/release-please) based on [Conventional Commits](https://www.conventionalcommits.org) specification, and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.0.39](https://github.com/waku-org/js-waku/compare/core-v0.0.38...core-v0.0.39) (2025-09-20) + + +### Features + +* Add start/stop to filter ([#2592](https://github.com/waku-org/js-waku/issues/2592)) ([2fba052](https://github.com/waku-org/js-waku/commit/2fba052b8b98cb64f6383de95d01b33beb771448)) +* Expose message hash from IDecodedMessage ([#2578](https://github.com/waku-org/js-waku/issues/2578)) ([836d6b8](https://github.com/waku-org/js-waku/commit/836d6b8793a5124747684f6ea76b6dd47c73048b)) +* Implement lp-v3 error codes with backwards compatibility ([#2501](https://github.com/waku-org/js-waku/issues/2501)) ([1625302](https://github.com/waku-org/js-waku/commit/16253026c6e30052d87d9975b58480951de469d8)) +* Implement peer-store re-bootstrapping ([#2641](https://github.com/waku-org/js-waku/issues/2641)) ([11d84ad](https://github.com/waku-org/js-waku/commit/11d84ad342fe45158ef0734f9ca070f14704503f)) +* StoreConnect events ([#2601](https://github.com/waku-org/js-waku/issues/2601)) ([0dfbcf6](https://github.com/waku-org/js-waku/commit/0dfbcf6b6bd9225dcb0dec540aeb1eb2703c8397)) + + +### Dependencies + +* The following workspace dependencies were updated + * dependencies + * @waku/enr bumped from ^0.0.32 to ^0.0.33 + * @waku/interfaces bumped from 0.0.33 to 0.0.34 + * @waku/proto bumped from 0.0.13 to 0.0.14 + * @waku/utils bumped from 0.0.26 to 0.0.27 + ## [0.0.38](https://github.com/waku-org/js-waku/compare/core-v0.0.37...core-v0.0.38) (2025-08-14) diff --git a/packages/core/package.json b/packages/core/package.json index 3d740a2869..ff2fb1899d 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,6 +1,6 @@ { "name": "@waku/core", - "version": "0.0.38", + "version": "0.0.39", "description": "TypeScript implementation of the Waku v2 protocol", "types": "./dist/index.d.ts", "module": "./dist/index.js", @@ -64,11 +64,11 @@ "node": ">=22" }, "dependencies": { - "@waku/enr": "^0.0.32", - "@waku/interfaces": "0.0.33", + "@waku/enr": "^0.0.33", + "@waku/interfaces": "0.0.34", "@libp2p/ping": "2.0.35", - "@waku/proto": "0.0.13", - "@waku/utils": "0.0.26", + "@waku/proto": "0.0.14", + "@waku/utils": "0.0.27", "debug": "^4.3.4", "@noble/hashes": "^1.3.2", "it-all": "^3.0.4", diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index c8ac89ffe5..8021ac0a91 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -10,7 +10,11 @@ export * as waku_filter from "./lib/filter/index.js"; export { FilterCore, FilterCodecs } from "./lib/filter/index.js"; export * as waku_light_push from "./lib/light_push/index.js"; -export { LightPushCodec, LightPushCore } from "./lib/light_push/index.js"; +export { + LightPushCore, + LightPushCodec, + LightPushCodecV2 +} from "./lib/light_push/index.js"; export * as waku_store from "./lib/store/index.js"; export { StoreCore, StoreCodec } from "./lib/store/index.js"; diff --git a/packages/core/src/lib/connection_manager/connection_limiter.spec.ts b/packages/core/src/lib/connection_manager/connection_limiter.spec.ts index db1d650e60..57d068b904 100644 --- a/packages/core/src/lib/connection_manager/connection_limiter.spec.ts +++ b/packages/core/src/lib/connection_manager/connection_limiter.spec.ts @@ -87,6 +87,12 @@ describe("ConnectionLimiter", () => { mockPeer2 = createMockPeer("12D3KooWTest2", [Tags.BOOTSTRAP]); // Ensure mockPeer2 is prioritized and dialed mockConnection = createMockConnection(mockPeerId, [Tags.BOOTSTRAP]); + dialer = { + start: sinon.stub(), + stop: sinon.stub(), + dial: sinon.stub().resolves() + } as unknown as sinon.SinonStubbedInstance; + libp2p = { addEventListener: sinon.stub(), removeEventListener: sinon.stub(), @@ -95,7 +101,11 @@ describe("ConnectionLimiter", () => { getConnections: sinon.stub().returns([]), peerStore: { all: sinon.stub().resolves([]), - get: sinon.stub().resolves(mockPeer) + get: sinon.stub().resolves(mockPeer), + merge: sinon.stub().resolves() + }, + components: { + components: {} } }; @@ -112,6 +122,20 @@ describe("ConnectionLimiter", () => { isConnected: sinon.stub().returns(true), isP2PConnected: sinon.stub().returns(true) } as unknown as sinon.SinonStubbedInstance; + + // Mock the libp2p components needed by isAddressesSupported + libp2p.components = { + components: {}, + transportManager: { + getTransports: sinon.stub().returns([ + { + dialFilter: sinon + .stub() + .returns([multiaddr("/dns4/test/tcp/443/wss")]) + } + ]) + } + }; }); afterEach(() => { @@ -274,11 +298,6 @@ describe("ConnectionLimiter", () => { describe("dialPeersFromStore", () => { beforeEach(() => { - dialer = { - start: sinon.stub(), - stop: sinon.stub(), - dial: sinon.stub().resolves() - } as unknown as sinon.SinonStubbedInstance; libp2p.hangUp = sinon.stub().resolves(); connectionLimiter = createLimiter(); mockPeer.addresses = [ @@ -404,11 +423,6 @@ describe("ConnectionLimiter", () => { describe("maintainConnectionsCount", () => { beforeEach(() => { - dialer = { - start: sinon.stub(), - stop: sinon.stub(), - dial: sinon.stub().resolves() - } as unknown as sinon.SinonStubbedInstance; libp2p.hangUp = sinon.stub().resolves(); connectionLimiter = createLimiter({ maxConnections: 2 }); mockPeer.addresses = [ @@ -515,6 +529,7 @@ describe("ConnectionLimiter", () => { ]; libp2p.peerStore.all.resolves([bootstrapPeer, pxPeer, localPeer]); libp2p.getConnections.returns([]); + connectionLimiter = createLimiter(); const peers = await (connectionLimiter as any).getPrioritizedPeers(); expect(peers[0].id.toString()).to.equal("b"); expect(peers[1].id.toString()).to.equal("px"); diff --git a/packages/core/src/lib/connection_manager/connection_limiter.ts b/packages/core/src/lib/connection_manager/connection_limiter.ts index 3b59c5f286..fae83b2828 100644 --- a/packages/core/src/lib/connection_manager/connection_limiter.ts +++ b/packages/core/src/lib/connection_manager/connection_limiter.ts @@ -12,6 +12,7 @@ import { Logger } from "@waku/utils"; import { Dialer } from "./dialer.js"; import { NetworkMonitor } from "./network_monitor.js"; +import { isAddressesSupported } from "./utils.js"; const log = new Logger("connection-limiter"); @@ -145,13 +146,15 @@ export class ConnectionLimiter implements IConnectionLimiter { const peers = await this.getPrioritizedPeers(); if (peers.length === 0) { - log.info(`No peers to dial, node is utilizing all known peers`); + log.info(`No peers to dial, skipping`); + await this.triggerBootstrap(); return; } const promises = peers .slice(0, this.options.maxConnections - connections.length) .map((p) => this.dialer.dial(p.id)); + await Promise.all(promises); return; @@ -218,6 +221,7 @@ export class ConnectionLimiter implements IConnectionLimiter { if (peers.length === 0) { log.info(`No peers to dial, skipping`); + await this.triggerBootstrap(); return; } @@ -240,6 +244,9 @@ export class ConnectionLimiter implements IConnectionLimiter { private async getPrioritizedPeers(): Promise { const allPeers = await this.libp2p.peerStore.all(); const allConnections = this.libp2p.getConnections(); + const allConnectionsSet = new Set( + allConnections.map((c) => c.remotePeer.toString()) + ); log.info( `Found ${allPeers.length} peers in store, and found ${allConnections.length} connections` @@ -247,11 +254,10 @@ export class ConnectionLimiter implements IConnectionLimiter { const notConnectedPeers = allPeers.filter( (p) => - !allConnections.some((c) => c.remotePeer.equals(p.id)) && - p.addresses.some( - (a) => - a.multiaddr.toString().includes("wss") || - a.multiaddr.toString().includes("ws") + !allConnectionsSet.has(p.id.toString()) && + isAddressesSupported( + this.libp2p, + p.addresses.map((a) => a.multiaddr) ) ); @@ -267,7 +273,19 @@ export class ConnectionLimiter implements IConnectionLimiter { p.tags.has(Tags.PEER_CACHE) ); - return [...bootstrapPeers, ...peerExchangePeers, ...localStorePeers]; + const restPeers = notConnectedPeers.filter( + (p) => + !p.tags.has(Tags.BOOTSTRAP) && + !p.tags.has(Tags.PEER_EXCHANGE) && + !p.tags.has(Tags.PEER_CACHE) + ); + + return [ + ...bootstrapPeers, + ...peerExchangePeers, + ...localStorePeers, + ...restPeers + ]; } private async getBootstrapPeers(): Promise { @@ -291,4 +309,41 @@ export class ConnectionLimiter implements IConnectionLimiter { return null; } } + + /** + * Triggers the bootstrap or peer cache discovery if they are mounted. + * @returns void + */ + private async triggerBootstrap(): Promise { + log.info("Triggering bootstrap discovery"); + + const bootstrapComponents = Object.values(this.libp2p.components.components) + .filter((c) => !!c) + .filter((c: unknown) => + [`@waku/${Tags.BOOTSTRAP}`, `@waku/${Tags.PEER_CACHE}`].includes( + (c as { [Symbol.toStringTag]: string })?.[Symbol.toStringTag] + ) + ); + + if (bootstrapComponents.length === 0) { + log.warn("No bootstrap components found to trigger"); + return; + } + + log.info( + `Found ${bootstrapComponents.length} bootstrap components, starting them` + ); + + const promises = bootstrapComponents.map(async (component) => { + try { + await (component as { stop: () => Promise })?.stop?.(); + await (component as { start: () => Promise })?.start?.(); + log.info("Successfully started bootstrap component"); + } catch (error) { + log.error("Failed to start bootstrap component", error); + } + }); + + await Promise.all(promises); + } } diff --git a/packages/core/src/lib/connection_manager/connection_manager.spec.ts b/packages/core/src/lib/connection_manager/connection_manager.spec.ts index 45d64781f6..d8a108625a 100644 --- a/packages/core/src/lib/connection_manager/connection_manager.spec.ts +++ b/packages/core/src/lib/connection_manager/connection_manager.spec.ts @@ -52,6 +52,12 @@ describe("ConnectionManager", () => { dialProtocol: sinon.stub().resolves({} as Stream), hangUp: sinon.stub().resolves(), getPeers: sinon.stub().returns([]), + getConnections: sinon.stub().returns([]), + addEventListener: sinon.stub(), + removeEventListener: sinon.stub(), + components: { + components: {} + }, peerStore: { get: sinon.stub().resolves(null), merge: sinon.stub().resolves() diff --git a/packages/core/src/lib/connection_manager/utils.ts b/packages/core/src/lib/connection_manager/utils.ts index 02fa68a2b6..ded582d80f 100644 --- a/packages/core/src/lib/connection_manager/utils.ts +++ b/packages/core/src/lib/connection_manager/utils.ts @@ -1,6 +1,7 @@ import { isPeerId, type Peer, type PeerId } from "@libp2p/interface"; import { peerIdFromString } from "@libp2p/peer-id"; import { Multiaddr, multiaddr, MultiaddrInput } from "@multiformats/multiaddr"; +import { Libp2p } from "@waku/interfaces"; import { bytesToUtf8 } from "@waku/utils/bytes"; /** @@ -49,3 +50,25 @@ export const mapToPeerId = (input: PeerId | MultiaddrInput): PeerId => { ? input : peerIdFromString(multiaddr(input).getPeerId()!); }; + +/** + * Checks if the address is supported by the libp2p instance. + * @param libp2p - The libp2p instance. + * @param addresses - The addresses to check. + * @returns True if the addresses are supported, false otherwise. + */ +export const isAddressesSupported = ( + libp2p: Libp2p, + addresses: Multiaddr[] +): boolean => { + const transports = + libp2p?.components?.transportManager?.getTransports() || []; + + if (transports.length === 0) { + return false; + } + + return transports + .map((transport) => transport.dialFilter(addresses)) + .some((supportedAddresses) => supportedAddresses.length > 0); +}; diff --git a/packages/core/src/lib/filter/filter.ts b/packages/core/src/lib/filter/filter.ts index 146fcd73c2..0b8a32b92e 100644 --- a/packages/core/src/lib/filter/filter.ts +++ b/packages/core/src/lib/filter/filter.ts @@ -2,9 +2,9 @@ import type { PeerId } from "@libp2p/interface"; import type { IncomingStreamData } from "@libp2p/interface-internal"; import { type ContentTopic, - type CoreProtocolResult, + type FilterCoreResult, + FilterError, type Libp2p, - ProtocolError, type PubsubTopic } from "@waku/interfaces"; import { WakuMessage } from "@waku/proto"; @@ -42,34 +42,44 @@ export class FilterCore { public constructor( private handleIncomingMessage: IncomingMessageHandler, - libp2p: Libp2p + private libp2p: Libp2p ) { this.streamManager = new StreamManager( FilterCodecs.SUBSCRIBE, libp2p.components ); + } - libp2p - .handle(FilterCodecs.PUSH, this.onRequest.bind(this), { + public async start(): Promise { + try { + await this.libp2p.handle(FilterCodecs.PUSH, this.onRequest.bind(this), { maxInboundStreams: 100 - }) - .catch((e) => { - log.error("Failed to register ", FilterCodecs.PUSH, e); }); + } catch (e) { + log.error("Failed to register ", FilterCodecs.PUSH, e); + } + } + + public async stop(): Promise { + try { + await this.libp2p.unhandle(FilterCodecs.PUSH); + } catch (e) { + log.error("Failed to unregister ", FilterCodecs.PUSH, e); + } } public async subscribe( pubsubTopic: PubsubTopic, peerId: PeerId, contentTopics: ContentTopic[] - ): Promise { + ): Promise { const stream = await this.streamManager.getStream(peerId); if (!stream) { return { success: null, failure: { - error: ProtocolError.NO_STREAM_AVAILABLE, + error: FilterError.NO_STREAM_AVAILABLE, peerId: peerId } }; @@ -98,7 +108,7 @@ export class FilterCore { return { success: null, failure: { - error: ProtocolError.GENERIC_FAIL, + error: FilterError.GENERIC_FAIL, peerId: peerId } }; @@ -113,7 +123,7 @@ export class FilterCore { ); return { failure: { - error: ProtocolError.REMOTE_PEER_REJECTED, + error: FilterError.REMOTE_PEER_REJECTED, peerId: peerId }, success: null @@ -130,7 +140,7 @@ export class FilterCore { pubsubTopic: PubsubTopic, peerId: PeerId, contentTopics: ContentTopic[] - ): Promise { + ): Promise { const stream = await this.streamManager.getStream(peerId); if (!stream) { @@ -138,7 +148,7 @@ export class FilterCore { return { success: null, failure: { - error: ProtocolError.NO_STREAM_AVAILABLE, + error: FilterError.NO_STREAM_AVAILABLE, peerId: peerId } }; @@ -156,7 +166,7 @@ export class FilterCore { return { success: null, failure: { - error: ProtocolError.GENERIC_FAIL, + error: FilterError.GENERIC_FAIL, peerId: peerId } }; @@ -171,7 +181,7 @@ export class FilterCore { public async unsubscribeAll( pubsubTopic: PubsubTopic, peerId: PeerId - ): Promise { + ): Promise { const stream = await this.streamManager.getStream(peerId); if (!stream) { @@ -179,7 +189,7 @@ export class FilterCore { return { success: null, failure: { - error: ProtocolError.NO_STREAM_AVAILABLE, + error: FilterError.NO_STREAM_AVAILABLE, peerId: peerId } }; @@ -198,7 +208,7 @@ export class FilterCore { if (!res || !res.length) { return { failure: { - error: ProtocolError.NO_RESPONSE, + error: FilterError.NO_RESPONSE, peerId: peerId }, success: null @@ -214,7 +224,7 @@ export class FilterCore { ); return { failure: { - error: ProtocolError.REMOTE_PEER_REJECTED, + error: FilterError.REMOTE_PEER_REJECTED, peerId: peerId }, success: null @@ -227,7 +237,7 @@ export class FilterCore { }; } - public async ping(peerId: PeerId): Promise { + public async ping(peerId: PeerId): Promise { const stream = await this.streamManager.getStream(peerId); if (!stream) { @@ -235,7 +245,7 @@ export class FilterCore { return { success: null, failure: { - error: ProtocolError.NO_STREAM_AVAILABLE, + error: FilterError.NO_STREAM_AVAILABLE, peerId: peerId } }; @@ -257,7 +267,7 @@ export class FilterCore { return { success: null, failure: { - error: ProtocolError.GENERIC_FAIL, + error: FilterError.GENERIC_FAIL, peerId: peerId } }; @@ -267,7 +277,7 @@ export class FilterCore { return { success: null, failure: { - error: ProtocolError.NO_RESPONSE, + error: FilterError.NO_RESPONSE, peerId: peerId } }; @@ -283,7 +293,7 @@ export class FilterCore { return { success: null, failure: { - error: ProtocolError.REMOTE_PEER_REJECTED, + error: FilterError.REMOTE_PEER_REJECTED, peerId: peerId } }; diff --git a/packages/core/src/lib/light_push/constants.ts b/packages/core/src/lib/light_push/constants.ts new file mode 100644 index 0000000000..339e5f9090 --- /dev/null +++ b/packages/core/src/lib/light_push/constants.ts @@ -0,0 +1,7 @@ +export const CODECS = { + v2: "/vac/waku/lightpush/2.0.0-beta1", + v3: "/vac/waku/lightpush/3.0.0" +} as const; + +export const LightPushCodecV2 = CODECS.v2; +export const LightPushCodec = CODECS.v3; diff --git a/packages/core/src/lib/light_push/index.ts b/packages/core/src/lib/light_push/index.ts index 4c5c37dccb..87655dbb35 100644 --- a/packages/core/src/lib/light_push/index.ts +++ b/packages/core/src/lib/light_push/index.ts @@ -1 +1,2 @@ -export { LightPushCore, LightPushCodec, PushResponse } from "./light_push.js"; +export { LightPushCore } from "./light_push.js"; +export { LightPushCodec, LightPushCodecV2 } from "./constants.js"; diff --git a/packages/core/src/lib/light_push/light_push.ts b/packages/core/src/lib/light_push/light_push.ts index 6de027121b..eb3b517eeb 100644 --- a/packages/core/src/lib/light_push/light_push.ts +++ b/packages/core/src/lib/light_push/light_push.ts @@ -1,14 +1,11 @@ -import type { PeerId } from "@libp2p/interface"; +import type { PeerId, Stream } from "@libp2p/interface"; import { - type CoreProtocolResult, type IEncoder, type IMessage, type Libp2p, - ProtocolError, - type ThisOrThat + type LightPushCoreResult, + LightPushError } from "@waku/interfaces"; -import { PushResponse } from "@waku/proto"; -import { isMessageSizeUnderCap } from "@waku/utils"; import { Logger } from "@waku/utils"; import all from "it-all"; import * as lp from "it-length-prefixed"; @@ -17,92 +14,71 @@ import { Uint8ArrayList } from "uint8arraylist"; import { StreamManager } from "../stream_manager/index.js"; -import { PushRpc } from "./push_rpc.js"; -import { isRLNResponseError } from "./utils.js"; +import { CODECS } from "./constants.js"; +import { ProtocolHandler } from "./protocol_handler.js"; const log = new Logger("light-push"); -export const LightPushCodec = "/vac/waku/lightpush/2.0.0-beta1"; -export { PushResponse }; - -type PreparePushMessageResult = ThisOrThat<"query", PushRpc>; - /** * Implements the [Waku v2 Light Push protocol](https://rfc.vac.dev/spec/19/). */ export class LightPushCore { private readonly streamManager: StreamManager; + private readonly streamManagerV2: StreamManager; - public readonly multicodec = LightPushCodec; + public readonly multicodec = [CODECS.v3, CODECS.v2]; - public constructor(libp2p: Libp2p) { - this.streamManager = new StreamManager(LightPushCodec, libp2p.components); - } - - private async preparePushMessage( - encoder: IEncoder, - message: IMessage - ): Promise { - try { - if (!message.payload || message.payload.length === 0) { - log.error("Failed to send waku light push: payload is empty"); - return { query: null, error: ProtocolError.EMPTY_PAYLOAD }; - } - - if (!(await isMessageSizeUnderCap(encoder, message))) { - log.error("Failed to send waku light push: message is bigger than 1MB"); - return { query: null, error: ProtocolError.SIZE_TOO_BIG }; - } - - const protoMessage = await encoder.toProtoObj(message); - if (!protoMessage) { - log.error("Failed to encode to protoMessage, aborting push"); - return { - query: null, - error: ProtocolError.ENCODE_FAILED - }; - } - - const query = PushRpc.createRequest(protoMessage, encoder.pubsubTopic); - return { query, error: null }; - } catch (error) { - log.error("Failed to prepare push message", error); - - return { - query: null, - error: ProtocolError.GENERIC_FAIL - }; - } + public constructor(private libp2p: Libp2p) { + this.streamManagerV2 = new StreamManager(CODECS.v2, libp2p.components); + this.streamManager = new StreamManager(CODECS.v3, libp2p.components); } public async send( encoder: IEncoder, message: IMessage, - peerId: PeerId - ): Promise { - const { query, error: preparationError } = await this.preparePushMessage( - encoder, - message + peerId: PeerId, + useLegacy: boolean = false + ): Promise { + const protocol = await this.getProtocol(peerId, useLegacy); + + log.info( + `Sending light push request to peer:${peerId.toString()}, protocol:${protocol}` ); - if (preparationError || !query) { + if (!protocol) { return { success: null, failure: { - error: preparationError, + error: LightPushError.GENERIC_FAIL, peerId } }; } - const stream = await this.streamManager.getStream(peerId); + const { rpc, error: prepError } = await ProtocolHandler.preparePushMessage( + encoder, + message, + protocol + ); + + if (prepError) { + return { + success: null, + failure: { + error: prepError, + peerId + } + }; + } + + const stream = await this.getStream(peerId, protocol); if (!stream) { log.error(`Failed to get a stream for remote peer:${peerId.toString()}`); return { success: null, failure: { - error: ProtocolError.NO_STREAM_AVAILABLE, + error: LightPushError.NO_STREAM_AVAILABLE, peerId: peerId } }; @@ -111,76 +87,74 @@ export class LightPushCore { let res: Uint8ArrayList[] | undefined; try { res = await pipe( - [query.encode()], + [rpc.encode()], lp.encode, stream, lp.decode, async (source) => await all(source) ); } catch (err) { - // can fail only because of `stream` abortion log.error("Failed to send waku light push request", err); return { success: null, failure: { - error: ProtocolError.STREAM_ABORTED, + error: LightPushError.STREAM_ABORTED, peerId: peerId } }; } const bytes = new Uint8ArrayList(); - res.forEach((chunk) => { - bytes.append(chunk); - }); + res.forEach((chunk) => bytes.append(chunk)); - let response: PushResponse | undefined; + if (bytes.length === 0) { + return { + success: null, + failure: { + error: LightPushError.NO_RESPONSE, + peerId: peerId + } + }; + } + + return ProtocolHandler.handleResponse(bytes, protocol, peerId); + } + + private async getProtocol( + peerId: PeerId, + useLegacy: boolean + ): Promise { try { - response = PushRpc.decode(bytes).response; - } catch (err) { - log.error("Failed to decode push reply", err); - return { - success: null, - failure: { - error: ProtocolError.DECODE_FAILED, - peerId: peerId - } - }; - } + const peer = await this.libp2p.peerStore.get(peerId); - if (!response) { - log.error("Remote peer fault: No response in PushRPC"); - return { - success: null, - failure: { - error: ProtocolError.NO_RESPONSE, - peerId: peerId - } - }; + if ( + useLegacy || + (!peer.protocols.includes(CODECS.v3) && + peer.protocols.includes(CODECS.v2)) + ) { + return CODECS.v2; + } else if (peer.protocols.includes(CODECS.v3)) { + return CODECS.v3; + } else { + throw new Error("No supported protocol found"); + } + } catch (error) { + log.error("Failed to get protocol", error); + return undefined; } + } - if (isRLNResponseError(response.info)) { - log.error("Remote peer fault: RLN generation"); - return { - success: null, - failure: { - error: ProtocolError.RLN_PROOF_GENERATION, - peerId: peerId - } - }; + private async getStream( + peerId: PeerId, + protocol: string + ): Promise { + switch (protocol) { + case CODECS.v2: + return this.streamManagerV2.getStream(peerId); + case CODECS.v3: + return this.streamManager.getStream(peerId); + default: + return undefined; } - - if (!response.isSuccess) { - log.error("Remote peer rejected the message: ", response.info); - return { - success: null, - failure: { - error: ProtocolError.REMOTE_PEER_REJECTED, - peerId: peerId - } - }; - } - - return { success: peerId, failure: null }; } } diff --git a/packages/core/src/lib/light_push/protocol_handler.ts b/packages/core/src/lib/light_push/protocol_handler.ts new file mode 100644 index 0000000000..429664f32d --- /dev/null +++ b/packages/core/src/lib/light_push/protocol_handler.ts @@ -0,0 +1,191 @@ +import type { PeerId } from "@libp2p/interface"; +import type { IEncoder, IMessage, LightPushCoreResult } from "@waku/interfaces"; +import { LightPushError, LightPushStatusCode } from "@waku/interfaces"; +import { PushResponse, WakuMessage } from "@waku/proto"; +import { isMessageSizeUnderCap, Logger } from "@waku/utils"; +import { Uint8ArrayList } from "uint8arraylist"; + +import { CODECS } from "./constants.js"; +import { PushRpcV2 } from "./push_rpc.js"; +import { PushRpc } from "./push_rpc_v3.js"; +import { isRLNResponseError } from "./utils.js"; + +type VersionedPushRpc = + | ({ version: "v2" } & PushRpcV2) + | ({ version: "v3" } & PushRpc); + +type PreparePushMessageResult = + | { rpc: VersionedPushRpc; error: null } + | { rpc: null; error: LightPushError }; + +const log = new Logger("light-push:protocol-handler"); + +export class ProtocolHandler { + public static async preparePushMessage( + encoder: IEncoder, + message: IMessage, + protocol: string + ): Promise { + try { + if (!message.payload || message.payload.length === 0) { + log.error("Failed to send waku light push: payload is empty"); + return { rpc: null, error: LightPushError.EMPTY_PAYLOAD }; + } + + if (!(await isMessageSizeUnderCap(encoder, message))) { + log.error("Failed to send waku light push: message is bigger than 1MB"); + return { rpc: null, error: LightPushError.SIZE_TOO_BIG }; + } + + const protoMessage = await encoder.toProtoObj(message); + if (!protoMessage) { + log.error("Failed to encode to protoMessage, aborting push"); + return { rpc: null, error: LightPushError.ENCODE_FAILED }; + } + + if (protocol === CODECS.v3) { + log.info("Creating v3 RPC message"); + return { + rpc: ProtocolHandler.createV3Rpc(protoMessage, encoder.pubsubTopic), + error: null + }; + } + + log.info("Creating v2 RPC message"); + return { + rpc: ProtocolHandler.createV2Rpc(protoMessage, encoder.pubsubTopic), + error: null + }; + } catch (err) { + log.error("Failed to prepare push message", err); + return { rpc: null, error: LightPushError.GENERIC_FAIL }; + } + } + + /** + * Decode and evaluate a LightPush response according to the protocol version + */ + public static handleResponse( + bytes: Uint8ArrayList, + protocol: string, + peerId: PeerId + ): LightPushCoreResult { + if (protocol === CODECS.v3) { + return ProtocolHandler.handleV3Response(bytes, peerId); + } + + return ProtocolHandler.handleV2Response(bytes, peerId); + } + + private static handleV3Response( + bytes: Uint8ArrayList, + peerId: PeerId + ): LightPushCoreResult { + try { + const decodedRpcV3 = PushRpc.decodeResponse(bytes); + const statusCode = decodedRpcV3.statusCode; + const statusDesc = decodedRpcV3.statusDesc; + + if (statusCode !== LightPushStatusCode.SUCCESS) { + const error = LightPushError.REMOTE_PEER_REJECTED; + log.error( + `Remote peer rejected with v3 status code ${statusCode}: ${statusDesc}` + ); + return { + success: null, + failure: { + error, + peerId: peerId + } + }; + } + + if (decodedRpcV3.relayPeerCount !== undefined) { + log.info(`Message relayed to ${decodedRpcV3.relayPeerCount} peers`); + } + + return { success: peerId, failure: null }; + } catch (err) { + return { + success: null, + failure: { + error: LightPushError.DECODE_FAILED, + peerId: peerId + } + }; + } + } + + private static handleV2Response( + bytes: Uint8ArrayList, + peerId: PeerId + ): LightPushCoreResult { + let response: PushResponse | undefined; + try { + const decodedRpc = PushRpcV2.decode(bytes); + response = decodedRpc.response; + } catch (err) { + return { + success: null, + failure: { + error: LightPushError.DECODE_FAILED, + peerId: peerId + } + }; + } + + if (!response) { + return { + success: null, + failure: { + error: LightPushError.NO_RESPONSE, + peerId: peerId + } + }; + } + + if (isRLNResponseError(response.info)) { + log.error("Remote peer fault: RLN generation"); + return { + success: null, + failure: { + error: LightPushError.RLN_PROOF_GENERATION, + peerId: peerId + } + }; + } + + if (!response.isSuccess) { + log.error("Remote peer rejected the message: ", response.info); + return { + success: null, + failure: { + error: LightPushError.REMOTE_PEER_REJECTED, + peerId: peerId + } + }; + } + + return { success: peerId, failure: null }; + } + + private static createV2Rpc( + message: WakuMessage, + pubsubTopic: string + ): VersionedPushRpc { + const v2Rpc = PushRpcV2.createRequest(message, pubsubTopic); + return Object.assign(v2Rpc, { version: "v2" as const }); + } + + private static createV3Rpc( + message: WakuMessage, + pubsubTopic: string + ): VersionedPushRpc { + if (!message.timestamp) { + message.timestamp = BigInt(Date.now()) * BigInt(1_000_000); + } + + const v3Rpc = PushRpc.createRequest(message, pubsubTopic); + return Object.assign(v3Rpc, { version: "v3" as const }); + } +} diff --git a/packages/core/src/lib/light_push/push_rpc.ts b/packages/core/src/lib/light_push/push_rpc.ts index 7b726e3e49..71fadde1fa 100644 --- a/packages/core/src/lib/light_push/push_rpc.ts +++ b/packages/core/src/lib/light_push/push_rpc.ts @@ -2,14 +2,14 @@ import { proto_lightpush as proto } from "@waku/proto"; import type { Uint8ArrayList } from "uint8arraylist"; import { v4 as uuid } from "uuid"; -export class PushRpc { +export class PushRpcV2 { public constructor(public proto: proto.PushRpc) {} public static createRequest( message: proto.WakuMessage, pubsubTopic: string - ): PushRpc { - return new PushRpc({ + ): PushRpcV2 { + return new PushRpcV2({ requestId: uuid(), request: { message: message, @@ -19,9 +19,9 @@ export class PushRpc { }); } - public static decode(bytes: Uint8ArrayList): PushRpc { + public static decode(bytes: Uint8ArrayList): PushRpcV2 { const res = proto.PushRpc.decode(bytes); - return new PushRpc(res); + return new PushRpcV2(res); } public encode(): Uint8Array { diff --git a/packages/core/src/lib/light_push/push_rpc_v3.ts b/packages/core/src/lib/light_push/push_rpc_v3.ts new file mode 100644 index 0000000000..d4a42b42f4 --- /dev/null +++ b/packages/core/src/lib/light_push/push_rpc_v3.ts @@ -0,0 +1,162 @@ +import { proto_lightpush as proto } from "@waku/proto"; +import type { Uint8ArrayList } from "uint8arraylist"; +import { v4 as uuid } from "uuid"; + +/** + * LightPush v3 protocol RPC handler. + * Implements the v3 message format with correct field numbers: + * - requestId: 1 + * - pubsubTopic: 20 + * - message: 21 + */ +export class PushRpc { + public constructor( + public proto: proto.LightPushRequestV3 | proto.LightPushResponseV3 + ) {} + + /** + * Create a v3 request message with proper field numbering + */ + public static createRequest( + message: proto.WakuMessage, + pubsubTopic: string + ): PushRpc { + return new PushRpc({ + requestId: uuid(), + pubsubTopic: pubsubTopic, + message: message + }); + } + + /** + * Create a v3 response message with status code handling + */ + public static createResponse( + requestId: string, + statusCode: number, + statusDesc?: string, + relayPeerCount?: number + ): PushRpc { + return new PushRpc({ + requestId, + statusCode, + statusDesc, + relayPeerCount + }); + } + + /** + * Decode v3 request message + */ + public static decodeRequest(bytes: Uint8ArrayList): PushRpc { + const res = proto.LightPushRequestV3.decode(bytes); + return new PushRpc(res); + } + + /** + * Decode v3 response message + */ + public static decodeResponse(bytes: Uint8ArrayList): PushRpc { + const res = proto.LightPushResponseV3.decode(bytes); + return new PushRpc(res); + } + + /** + * Encode message to bytes + */ + public encode(): Uint8Array { + if (this.isRequest()) { + return proto.LightPushRequestV3.encode( + this.proto as proto.LightPushRequestV3 + ); + } else { + return proto.LightPushResponseV3.encode( + this.proto as proto.LightPushResponseV3 + ); + } + } + + /** + * Get request data (if this is a request message) + */ + public get request(): proto.LightPushRequestV3 | undefined { + return this.isRequest() + ? (this.proto as proto.LightPushRequestV3) + : undefined; + } + + /** + * Get response data (if this is a response message) + */ + public get response(): proto.LightPushResponseV3 | undefined { + return this.isResponse() + ? (this.proto as proto.LightPushResponseV3) + : undefined; + } + + /** + * Get the request ID + */ + public get requestId(): string { + return this.proto.requestId; + } + + /** + * Get the pubsub topic (only available in requests) + */ + public get pubsubTopic(): string | undefined { + return this.isRequest() + ? (this.proto as proto.LightPushRequestV3).pubsubTopic + : undefined; + } + + /** + * Get the message (only available in requests) + */ + public get message(): proto.WakuMessage | undefined { + return this.isRequest() + ? (this.proto as proto.LightPushRequestV3).message + : undefined; + } + + /** + * Get the status code (only available in responses) + */ + public get statusCode(): number | undefined { + return this.isResponse() + ? (this.proto as proto.LightPushResponseV3).statusCode + : undefined; + } + + /** + * Get the status description (only available in responses) + */ + public get statusDesc(): string | undefined { + return this.isResponse() + ? (this.proto as proto.LightPushResponseV3).statusDesc + : undefined; + } + + /** + * Get the relay peer count (only available in responses) + */ + public get relayPeerCount(): number | undefined { + return this.isResponse() + ? (this.proto as proto.LightPushResponseV3).relayPeerCount + : undefined; + } + + /** + * Check if this is a request message + */ + private isRequest(): boolean { + return "pubsubTopic" in this.proto && "message" in this.proto; + } + + /** + * Check if this is a response message + */ + private isResponse(): boolean { + return "statusCode" in this.proto; + } +} diff --git a/packages/core/src/lib/stream_manager/stream_manager.ts b/packages/core/src/lib/stream_manager/stream_manager.ts index 20ac373ac8..63584c5086 100644 --- a/packages/core/src/lib/stream_manager/stream_manager.ts +++ b/packages/core/src/lib/stream_manager/stream_manager.ts @@ -13,7 +13,7 @@ export class StreamManager { private streamPool: Map> = new Map(); public constructor( - private multicodec: string, + private readonly multicodec: string, private readonly libp2p: Libp2pComponents ) { this.log = new Logger(`stream-manager:${multicodec}`); diff --git a/packages/discovery/CHANGELOG.md b/packages/discovery/CHANGELOG.md index 51a7b39459..928877ffc3 100644 --- a/packages/discovery/CHANGELOG.md +++ b/packages/discovery/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## [0.0.12](https://github.com/waku-org/js-waku/compare/discovery-v0.0.11...discovery-v0.0.12) (2025-09-20) + + +### Dependencies + +* The following workspace dependencies were updated + * dependencies + * @waku/core bumped from 0.0.38 to 0.0.39 + * @waku/enr bumped from 0.0.32 to 0.0.33 + * @waku/interfaces bumped from 0.0.33 to 0.0.34 + * @waku/proto bumped from ^0.0.13 to ^0.0.14 + * @waku/utils bumped from 0.0.26 to 0.0.27 + ## [0.0.11](https://github.com/waku-org/js-waku/compare/discovery-v0.0.10...discovery-v0.0.11) (2025-08-14) diff --git a/packages/discovery/package.json b/packages/discovery/package.json index e8196283f8..6082f191c2 100644 --- a/packages/discovery/package.json +++ b/packages/discovery/package.json @@ -1,6 +1,6 @@ { "name": "@waku/discovery", - "version": "0.0.11", + "version": "0.0.12", "description": "Contains various discovery mechanisms: DNS Discovery (EIP-1459, Peer Exchange, Local Peer Cache Discovery.", "types": "./dist/index.d.ts", "module": "./dist/index.js", @@ -51,11 +51,11 @@ "node": ">=22" }, "dependencies": { - "@waku/core": "0.0.38", - "@waku/enr": "0.0.32", - "@waku/interfaces": "0.0.33", - "@waku/proto": "^0.0.13", - "@waku/utils": "0.0.26", + "@waku/core": "0.0.39", + "@waku/enr": "0.0.33", + "@waku/interfaces": "0.0.34", + "@waku/proto": "^0.0.14", + "@waku/utils": "0.0.27", "debug": "^4.3.4", "dns-over-http-resolver": "^3.0.8", "hi-base32": "^0.5.1", diff --git a/packages/enr/CHANGELOG.md b/packages/enr/CHANGELOG.md index cc79e407ec..da19ed4550 100644 --- a/packages/enr/CHANGELOG.md +++ b/packages/enr/CHANGELOG.md @@ -99,6 +99,17 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 * devDependencies * @waku/interfaces bumped from 0.0.27 to 0.0.28 +## [0.0.33](https://github.com/waku-org/js-waku/compare/enr-v0.0.32...enr-v0.0.33) (2025-09-20) + + +### Dependencies + +* The following workspace dependencies were updated + * dependencies + * @waku/utils bumped from 0.0.26 to 0.0.27 + * devDependencies + * @waku/interfaces bumped from 0.0.33 to 0.0.34 + ## [0.0.32](https://github.com/waku-org/js-waku/compare/enr-v0.0.31...enr-v0.0.32) (2025-08-14) diff --git a/packages/enr/package.json b/packages/enr/package.json index 10f621e493..80144ddc32 100644 --- a/packages/enr/package.json +++ b/packages/enr/package.json @@ -1,6 +1,6 @@ { "name": "@waku/enr", - "version": "0.0.32", + "version": "0.0.33", "description": "ENR (EIP-778) for Waku", "types": "./dist/index.d.ts", "module": "./dist/index.js", @@ -56,7 +56,7 @@ "@libp2p/peer-id": "5.1.7", "@multiformats/multiaddr": "^12.0.0", "@noble/secp256k1": "^1.7.1", - "@waku/utils": "0.0.26", + "@waku/utils": "0.0.27", "debug": "^4.3.4", "js-sha3": "^0.9.2" }, @@ -67,7 +67,7 @@ "@types/chai": "^4.3.11", "@types/mocha": "^10.0.6", "@waku/build-utils": "*", - "@waku/interfaces": "0.0.33", + "@waku/interfaces": "0.0.34", "chai": "^4.3.10", "cspell": "^8.6.1", "fast-check": "^3.19.0", diff --git a/packages/interfaces/CHANGELOG.md b/packages/interfaces/CHANGELOG.md index cd2166888f..f551033845 100644 --- a/packages/interfaces/CHANGELOG.md +++ b/packages/interfaces/CHANGELOG.md @@ -5,6 +5,17 @@ All notable changes to this project will be documented in this file. The file is maintained by [Release Please](https://github.com/googleapis/release-please) based on [Conventional Commits](https://www.conventionalcommits.org) specification, and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.0.34](https://github.com/waku-org/js-waku/compare/interfaces-v0.0.33...interfaces-v0.0.34) (2025-09-20) + + +### Features + +* Add start/stop to filter ([#2592](https://github.com/waku-org/js-waku/issues/2592)) ([2fba052](https://github.com/waku-org/js-waku/commit/2fba052b8b98cb64f6383de95d01b33beb771448)) +* Expose message hash from IDecodedMessage ([#2578](https://github.com/waku-org/js-waku/issues/2578)) ([836d6b8](https://github.com/waku-org/js-waku/commit/836d6b8793a5124747684f6ea76b6dd47c73048b)) +* Implement lp-v3 error codes with backwards compatibility ([#2501](https://github.com/waku-org/js-waku/issues/2501)) ([1625302](https://github.com/waku-org/js-waku/commit/16253026c6e30052d87d9975b58480951de469d8)) +* Query on connect ([#2602](https://github.com/waku-org/js-waku/issues/2602)) ([8542d04](https://github.com/waku-org/js-waku/commit/8542d04bf5c9472f955ef8c9e5bc9e89c70f4738)) +* StoreConnect events ([#2601](https://github.com/waku-org/js-waku/issues/2601)) ([0dfbcf6](https://github.com/waku-org/js-waku/commit/0dfbcf6b6bd9225dcb0dec540aeb1eb2703c8397)) + ## [0.0.33](https://github.com/waku-org/js-waku/compare/interfaces-v0.0.32...interfaces-v0.0.33) (2025-08-14) diff --git a/packages/interfaces/package.json b/packages/interfaces/package.json index ae3304ffd0..9387df9f1b 100644 --- a/packages/interfaces/package.json +++ b/packages/interfaces/package.json @@ -1,6 +1,6 @@ { "name": "@waku/interfaces", - "version": "0.0.33", + "version": "0.0.34", "description": "Definition of Waku interfaces", "types": "./dist/index.d.ts", "module": "./dist/index.js", diff --git a/packages/interfaces/src/filter.ts b/packages/interfaces/src/filter.ts index 1eff38e21b..cf383fcf4e 100644 --- a/packages/interfaces/src/filter.ts +++ b/packages/interfaces/src/filter.ts @@ -4,6 +4,16 @@ import type { Callback } from "./protocols.js"; export type IFilter = { readonly multicodec: string; + /** + * Starts the filter protocol. + */ + start(): Promise; + + /** + * Stops the filter protocol. + */ + stop(): Promise; + /** * Subscribes to messages that match the filtering criteria defined in the specified decoders. * Executes a callback upon receiving each message. diff --git a/packages/interfaces/src/light_push.ts b/packages/interfaces/src/light_push.ts index e58d48f092..4a8790d90a 100644 --- a/packages/interfaces/src/light_push.ts +++ b/packages/interfaces/src/light_push.ts @@ -1,4 +1,6 @@ -import type { ISender, ISendOptions } from "./sender.js"; +import { IEncoder, IMessage } from "./message.js"; +import { LightPushSDKResult } from "./protocols.js"; +import type { ISendOptions } from "./sender.js"; export type LightPushProtocolOptions = ISendOptions & { /** @@ -15,8 +17,40 @@ export type LightPushProtocolOptions = ISendOptions & { numPeersToUse?: number; }; -export type ILightPush = ISender & { - readonly multicodec: string; +export type ILightPush = { + readonly multicodec: string[]; start: () => void; stop: () => void; + send: ( + encoder: IEncoder, + message: IMessage, + options?: ISendOptions + ) => Promise; +}; + +export enum LightPushStatusCode { + SUCCESS = 200, + BAD_REQUEST = 400, + PAYLOAD_TOO_LARGE = 413, + INVALID_MESSAGE = 420, + UNSUPPORTED_TOPIC = 421, + TOO_MANY_REQUESTS = 429, + INTERNAL_ERROR = 500, + UNAVAILABLE = 503, + NO_RLN_PROOF = 504, + NO_PEERS = 505 +} + +export const StatusDescriptions: Record = { + [LightPushStatusCode.SUCCESS]: "Message sent successfully", + [LightPushStatusCode.BAD_REQUEST]: "Bad request format", + [LightPushStatusCode.PAYLOAD_TOO_LARGE]: + "Message payload exceeds maximum size", + [LightPushStatusCode.INVALID_MESSAGE]: "Message validation failed", + [LightPushStatusCode.UNSUPPORTED_TOPIC]: "Unsupported pubsub topic", + [LightPushStatusCode.TOO_MANY_REQUESTS]: "Rate limit exceeded", + [LightPushStatusCode.INTERNAL_ERROR]: "Internal server error", + [LightPushStatusCode.UNAVAILABLE]: "Service temporarily unavailable", + [LightPushStatusCode.NO_RLN_PROOF]: "RLN proof generation failed", + [LightPushStatusCode.NO_PEERS]: "No relay peers available" }; diff --git a/packages/interfaces/src/protocols.ts b/packages/interfaces/src/protocols.ts index 6cbea0a51b..0fb60c182f 100644 --- a/packages/interfaces/src/protocols.ts +++ b/packages/interfaces/src/protocols.ts @@ -130,117 +130,123 @@ export type Callback = ( msg: T ) => void | Promise; -export enum ProtocolError { - // - // GENERAL ERRORS SECTION - // - /** - * Could not determine the origin of the fault. Best to check connectivity and try again - * */ +export enum LightPushError { GENERIC_FAIL = "Generic error", - - /** - * The remote peer rejected the message. Information provided by the remote peer - * is logged. Review message validity, or mitigation for `NO_PEER_AVAILABLE` - * or `DECODE_FAILED` can be used. - */ - REMOTE_PEER_REJECTED = "Remote peer rejected", - - /** - * Failure to protobuf decode the message. May be due to a remote peer issue, - * ensuring that messages are sent via several peer enable mitigation of this error. - */ DECODE_FAILED = "Failed to decode", - - /** - * Failure to find a peer with suitable protocols. This may due to a connection issue. - * Mitigation can be: retrying after a given time period, display connectivity issue - * to user or listening for `peer:connected:bootstrap` or `peer:connected:peer-exchange` - * on the connection manager before retrying. - */ NO_PEER_AVAILABLE = "No peer available", - - /** - * Failure to find a stream to the peer. This may be because the connection with the peer is not still alive. - * Mitigation can be: retrying after a given time period, or mitigation for `NO_PEER_AVAILABLE` can be used. - */ NO_STREAM_AVAILABLE = "No stream available", - - /** - * The remote peer did not behave as expected. Mitigation for `NO_PEER_AVAILABLE` - * or `DECODE_FAILED` can be used. - */ NO_RESPONSE = "No response received", - - // - // SEND ERRORS SECTION - // - /** - * Failure to protobuf encode the message. This is not recoverable and needs - * further investigation. - */ - ENCODE_FAILED = "Failed to encode", - - /** - * The message payload is empty, making the message invalid. Ensure that a non-empty - * payload is set on the outgoing message. - */ - EMPTY_PAYLOAD = "Payload is empty", - - /** - * The message size is above the maximum message size allowed on the Waku Network. - * Compressing the message or using an alternative strategy for large messages is recommended. - */ - SIZE_TOO_BIG = "Size is too big", - - /** - * The PubsubTopic passed to the send function is not configured on the Waku node. - * Please ensure that the PubsubTopic is used when initializing the Waku node. - */ - TOPIC_NOT_CONFIGURED = "Topic not configured", - - /** - * Fails when - */ STREAM_ABORTED = "Stream aborted", - /** - * General proof generation error message. - * nwaku: https://github.com/waku-org/nwaku/blob/c3cb06ac6c03f0f382d3941ea53b330f6a8dd127/waku/waku_rln_relay/group_manager/group_manager_base.nim#L201C19-L201C42 - */ + ENCODE_FAILED = "Failed to encode", + EMPTY_PAYLOAD = "Payload is empty", + SIZE_TOO_BIG = "Size is too big", + TOPIC_NOT_CONFIGURED = "Topic not configured", RLN_PROOF_GENERATION = "Proof generation failed", + REMOTE_PEER_REJECTED = "Remote peer rejected", - // - // RECEIVE ERRORS SECTION - // - /** - * The pubsub topic configured on the decoder does not match the pubsub topic setup on the protocol. - * Ensure that the pubsub topic used for decoder creation is the same as the one used for protocol. - */ - TOPIC_DECODER_MISMATCH = "Topic decoder mismatch", - - /** - * The topics passed in the decoders do not match each other, or don't exist at all. - * Ensure that all the pubsub topics used in the decoders are valid and match each other. - */ - INVALID_DECODER_TOPICS = "Invalid decoder topics" + BAD_REQUEST = "Bad request format", + PAYLOAD_TOO_LARGE = "Message payload exceeds maximum size", + INVALID_MESSAGE = "Message validation failed", + UNSUPPORTED_TOPIC = "Unsupported pubsub topic", + TOO_MANY_REQUESTS = "Rate limit exceeded", + INTERNAL_ERROR = "Internal server error", + UNAVAILABLE = "Service temporarily unavailable", + NO_RLN_PROOF = "RLN proof generation failed", + NO_PEERS = "No relay peers available" } -export interface Failure { - error: ProtocolError; +export enum FilterError { + // General errors + GENERIC_FAIL = "Generic error", + DECODE_FAILED = "Failed to decode", + NO_PEER_AVAILABLE = "No peer available", + NO_STREAM_AVAILABLE = "No stream available", + NO_RESPONSE = "No response received", + STREAM_ABORTED = "Stream aborted", + + // Filter specific errors + REMOTE_PEER_REJECTED = "Remote peer rejected", + TOPIC_NOT_CONFIGURED = "Topic not configured", + SUBSCRIPTION_FAILED = "Subscription failed", + UNSUBSCRIBE_FAILED = "Unsubscribe failed", + PING_FAILED = "Ping failed", + TOPIC_DECODER_MISMATCH = "Topic decoder mismatch", + INVALID_DECODER_TOPICS = "Invalid decoder topics", + SUBSCRIPTION_LIMIT_EXCEEDED = "Subscription limit exceeded", + INVALID_CONTENT_TOPIC = "Invalid content topic", + PUSH_MESSAGE_FAILED = "Push message failed", + EMPTY_MESSAGE = "Empty message received", + MISSING_PUBSUB_TOPIC = "Pubsub topic missing from push message" +} + +export interface LightPushFailure { + error: LightPushError; peerId?: PeerId; } -export type CoreProtocolResult = ThisOrThat< +export interface FilterFailure { + error: FilterError; + peerId?: PeerId; +} + +export type LightPushCoreResult = ThisOrThat< "success", PeerId, "failure", - Failure + LightPushFailure >; +export type FilterCoreResult = ThisOrThat< + "success", + PeerId, + "failure", + FilterFailure +>; + +export type LightPushSDKResult = ThisAndThat< + "successes", + PeerId[], + "failures", + LightPushFailure[] +>; + +export type FilterSDKResult = ThisAndThat< + "successes", + PeerId[], + "failures", + FilterFailure[] +>; + +/** + * @deprecated replace usage by specific result types + */ export type SDKProtocolResult = ThisAndThat< "successes", PeerId[], "failures", - Failure[] + Array<{ + error: ProtocolError; + peerId?: PeerId; + }> >; + +/** + * @deprecated replace usage by specific result types + */ +export enum ProtocolError { + GENERIC_FAIL = "Generic error", + REMOTE_PEER_REJECTED = "Remote peer rejected", + DECODE_FAILED = "Failed to decode", + NO_PEER_AVAILABLE = "No peer available", + NO_STREAM_AVAILABLE = "No stream available", + NO_RESPONSE = "No response received", + ENCODE_FAILED = "Failed to encode", + EMPTY_PAYLOAD = "Payload is empty", + SIZE_TOO_BIG = "Size is too big", + TOPIC_NOT_CONFIGURED = "Topic not configured", + STREAM_ABORTED = "Stream aborted", + RLN_PROOF_GENERATION = "Proof generation failed", + TOPIC_DECODER_MISMATCH = "Topic decoder mismatch", + INVALID_DECODER_TOPICS = "Invalid decoder topics" +} diff --git a/packages/interfaces/src/sender.ts b/packages/interfaces/src/sender.ts index 0c924b3f3f..da4fc5f003 100644 --- a/packages/interfaces/src/sender.ts +++ b/packages/interfaces/src/sender.ts @@ -1,5 +1,5 @@ import type { IEncoder, IMessage } from "./message.js"; -import { SDKProtocolResult } from "./protocols.js"; +import { LightPushSDKResult } from "./protocols.js"; export type ISendOptions = { /** @@ -13,6 +13,13 @@ export type ISendOptions = { * @default 3 */ maxAttempts?: number; + + /** + * Use v2 of the light push protocol. + * This parameter will be removed in the future. + * @default false + */ + useLegacy?: boolean; }; export interface ISender { @@ -20,5 +27,5 @@ export interface ISender { encoder: IEncoder, message: IMessage, sendOptions?: ISendOptions - ) => Promise; + ) => Promise; } diff --git a/packages/message-encryption/CHANGELOG.md b/packages/message-encryption/CHANGELOG.md index a59ca1bee2..4bf6ed0264 100644 --- a/packages/message-encryption/CHANGELOG.md +++ b/packages/message-encryption/CHANGELOG.md @@ -101,6 +101,18 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 * @waku/interfaces bumped from 0.0.27 to 0.0.28 * @waku/utils bumped from 0.0.20 to 0.0.21 +## [0.0.37](https://github.com/waku-org/js-waku/compare/message-encryption-v0.0.36...message-encryption-v0.0.37) (2025-09-20) + + +### Dependencies + +* The following workspace dependencies were updated + * dependencies + * @waku/core bumped from 0.0.38 to 0.0.39 + * @waku/interfaces bumped from 0.0.33 to 0.0.34 + * @waku/proto bumped from 0.0.13 to 0.0.14 + * @waku/utils bumped from 0.0.26 to 0.0.27 + ## [0.0.36](https://github.com/waku-org/js-waku/compare/message-encryption-v0.0.35...message-encryption-v0.0.36) (2025-08-14) diff --git a/packages/message-encryption/package.json b/packages/message-encryption/package.json index 98fd35c4dd..b9a4901875 100644 --- a/packages/message-encryption/package.json +++ b/packages/message-encryption/package.json @@ -1,6 +1,6 @@ { "name": "@waku/message-encryption", - "version": "0.0.36", + "version": "0.0.37", "description": "Waku Message Payload Encryption", "types": "./dist/index.d.ts", "module": "./dist/index.js", @@ -76,10 +76,10 @@ }, "dependencies": { "@noble/secp256k1": "^1.7.1", - "@waku/core": "0.0.38", - "@waku/interfaces": "0.0.33", - "@waku/proto": "0.0.13", - "@waku/utils": "0.0.26", + "@waku/core": "0.0.39", + "@waku/interfaces": "0.0.34", + "@waku/proto": "0.0.14", + "@waku/utils": "0.0.27", "debug": "^4.3.4", "js-sha3": "^0.9.2", "uint8arrays": "^5.0.1" diff --git a/packages/proto/CHANGELOG.md b/packages/proto/CHANGELOG.md index 4ba26b9177..87a88c1a97 100644 --- a/packages/proto/CHANGELOG.md +++ b/packages/proto/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.0.14](https://github.com/waku-org/js-waku/compare/proto-v0.0.13...proto-v0.0.14) (2025-09-20) + + +### Features + +* Implement lp-v3 error codes with backwards compatibility ([#2501](https://github.com/waku-org/js-waku/issues/2501)) ([1625302](https://github.com/waku-org/js-waku/commit/16253026c6e30052d87d9975b58480951de469d8)) + ## [0.0.13](https://github.com/waku-org/js-waku/compare/proto-v0.0.12...proto-v0.0.13) (2025-08-14) diff --git a/packages/proto/package.json b/packages/proto/package.json index 2ad1f253cd..eb46635413 100644 --- a/packages/proto/package.json +++ b/packages/proto/package.json @@ -1,6 +1,6 @@ { "name": "@waku/proto", - "version": "0.0.13", + "version": "0.0.14", "description": "Protobuf definitions for Waku", "types": "./dist/index.d.ts", "module": "./dist/index.js", diff --git a/packages/proto/src/generated/sds_message.ts b/packages/proto/src/generated/sds_message.ts index 20ef8746ae..eba12d4acd 100644 --- a/packages/proto/src/generated/sds_message.ts +++ b/packages/proto/src/generated/sds_message.ts @@ -84,7 +84,7 @@ export interface SdsMessage { senderId: string messageId: string channelId: string - lamportTimestamp?: number + lamportTimestamp?: bigint causalHistory: HistoryEntry[] bloomFilter?: Uint8Array content?: Uint8Array @@ -117,7 +117,7 @@ export namespace SdsMessage { if (obj.lamportTimestamp != null) { w.uint32(80) - w.int32(obj.lamportTimestamp) + w.uint64(obj.lamportTimestamp) } if (obj.causalHistory != null) { @@ -167,7 +167,7 @@ export namespace SdsMessage { break } case 10: { - obj.lamportTimestamp = reader.int32() + obj.lamportTimestamp = reader.uint64() break } case 11: { diff --git a/packages/proto/src/lib/light_push.proto b/packages/proto/src/lib/light_push.proto index b980115ab9..9ceba1ab2f 100644 --- a/packages/proto/src/lib/light_push.proto +++ b/packages/proto/src/lib/light_push.proto @@ -39,4 +39,4 @@ message LightPushResponseV3 { uint32 status_code = 10; optional string status_desc = 11; optional uint32 relay_peer_count = 12; -} \ No newline at end of file +} diff --git a/packages/proto/src/lib/sds_message.proto b/packages/proto/src/lib/sds_message.proto index 5344a0d33a..c38e99b084 100644 --- a/packages/proto/src/lib/sds_message.proto +++ b/packages/proto/src/lib/sds_message.proto @@ -9,7 +9,7 @@ message SdsMessage { string sender_id = 1; // Participant ID of the message sender string message_id = 2; // Unique identifier of the message string channel_id = 3; // Identifier of the channel to which the message belongs - optional int32 lamport_timestamp = 10; // Logical timestamp for causal ordering in channel + optional uint64 lamport_timestamp = 10; // Logical timestamp for causal ordering in channel repeated HistoryEntry causal_history = 11; // List of preceding message IDs that this message causally depends on. Generally 2 or 3 message IDs are included. optional bytes bloom_filter = 12; // Bloom filter representing received message IDs in channel optional bytes content = 20; // Actual content of the message diff --git a/packages/relay/CHANGELOG.md b/packages/relay/CHANGELOG.md index 50cc0da8d5..381558687b 100644 --- a/packages/relay/CHANGELOG.md +++ b/packages/relay/CHANGELOG.md @@ -25,6 +25,25 @@ * @waku/interfaces bumped from 0.0.16 to 0.0.17 * @waku/utils bumped from 0.0.9 to 0.0.10 +## [0.0.22](https://github.com/waku-org/js-waku/compare/relay-v0.0.21...relay-v0.0.22) (2025-09-20) + + +### Features + +* Expose message hash from IDecodedMessage ([#2578](https://github.com/waku-org/js-waku/issues/2578)) ([836d6b8](https://github.com/waku-org/js-waku/commit/836d6b8793a5124747684f6ea76b6dd47c73048b)) +* Implement lp-v3 error codes with backwards compatibility ([#2501](https://github.com/waku-org/js-waku/issues/2501)) ([1625302](https://github.com/waku-org/js-waku/commit/16253026c6e30052d87d9975b58480951de469d8)) + + +### Dependencies + +* The following workspace dependencies were updated + * dependencies + * @waku/core bumped from 0.0.38 to 0.0.39 + * @waku/sdk bumped from 0.0.34 to 0.0.35 + * @waku/interfaces bumped from 0.0.33 to 0.0.34 + * @waku/proto bumped from 0.0.13 to 0.0.14 + * @waku/utils bumped from 0.0.26 to 0.0.27 + ## [0.0.21](https://github.com/waku-org/js-waku/compare/relay-v0.0.20...relay-v0.0.21) (2025-08-14) diff --git a/packages/relay/package.json b/packages/relay/package.json index 44a3e6990b..a3f9bec26a 100644 --- a/packages/relay/package.json +++ b/packages/relay/package.json @@ -1,6 +1,6 @@ { "name": "@waku/relay", - "version": "0.0.21", + "version": "0.0.22", "description": "Relay Protocol for Waku", "types": "./dist/index.d.ts", "module": "./dist/index.js", @@ -51,11 +51,11 @@ "dependencies": { "@chainsafe/libp2p-gossipsub": "14.1.1", "@noble/hashes": "^1.3.2", - "@waku/core": "0.0.38", - "@waku/sdk": "0.0.34", - "@waku/interfaces": "0.0.33", - "@waku/proto": "0.0.13", - "@waku/utils": "0.0.26", + "@waku/core": "0.0.39", + "@waku/sdk": "0.0.35", + "@waku/interfaces": "0.0.34", + "@waku/proto": "0.0.14", + "@waku/utils": "0.0.27", "chai": "^4.3.10", "debug": "^4.3.4", "fast-check": "^3.19.0", diff --git a/packages/relay/src/relay.ts b/packages/relay/src/relay.ts index 6f6dd98fa7..cd1336ef72 100644 --- a/packages/relay/src/relay.ts +++ b/packages/relay/src/relay.ts @@ -19,9 +19,9 @@ import { IRelay, type IRoutingInfo, Libp2p, - ProtocolError, - PubsubTopic, - SDKProtocolResult + LightPushError, + LightPushSDKResult, + PubsubTopic } from "@waku/interfaces"; import { isWireSizeUnderCap, toAsyncIterator } from "@waku/utils"; import { pushOrInitMapSet } from "@waku/utils"; @@ -127,7 +127,7 @@ export class Relay implements IRelay { public async send( encoder: IEncoder, message: IMessage - ): Promise { + ): Promise { const { pubsubTopic } = encoder; if (!this.pubsubTopics.has(pubsubTopic)) { log.error("Failed to send waku relay: topic not configured"); @@ -135,7 +135,7 @@ export class Relay implements IRelay { successes: [], failures: [ { - error: ProtocolError.TOPIC_NOT_CONFIGURED + error: LightPushError.TOPIC_NOT_CONFIGURED } ] }; @@ -148,7 +148,7 @@ export class Relay implements IRelay { successes: [], failures: [ { - error: ProtocolError.ENCODE_FAILED + error: LightPushError.ENCODE_FAILED } ] }; @@ -160,7 +160,7 @@ export class Relay implements IRelay { successes: [], failures: [ { - error: ProtocolError.SIZE_TOO_BIG + error: LightPushError.SIZE_TOO_BIG } ] }; diff --git a/packages/rln/CHANGELOG.md b/packages/rln/CHANGELOG.md index e808d9076b..064d78fa9f 100644 --- a/packages/rln/CHANGELOG.md +++ b/packages/rln/CHANGELOG.md @@ -1,5 +1,23 @@ # Changelog +## [0.1.9](https://github.com/waku-org/js-waku/compare/rln-v0.1.8...rln-v0.1.9) (2025-09-20) + + +### Features + +* Expose message hash from IDecodedMessage ([#2578](https://github.com/waku-org/js-waku/issues/2578)) ([836d6b8](https://github.com/waku-org/js-waku/commit/836d6b8793a5124747684f6ea76b6dd47c73048b)) + + +### Dependencies + +* The following workspace dependencies were updated + * dependencies + * @waku/core bumped from ^0.0.38 to ^0.0.39 + * @waku/utils bumped from ^0.0.26 to ^0.0.27 + * devDependencies + * @waku/interfaces bumped from 0.0.33 to 0.0.34 + * @waku/message-encryption bumped from ^0.0.36 to ^0.0.37 + ## [0.1.8](https://github.com/waku-org/js-waku/compare/rln-v0.1.7...rln-v0.1.8) (2025-08-14) diff --git a/packages/rln/package.json b/packages/rln/package.json index 2ddd3e9311..4ea0199115 100644 --- a/packages/rln/package.json +++ b/packages/rln/package.json @@ -1,6 +1,6 @@ { "name": "@waku/rln", - "version": "0.1.8", + "version": "0.1.9", "description": "RLN (Rate Limiting Nullifier) implementation for Waku", "types": "./dist/index.d.ts", "module": "./dist/index.js", @@ -54,12 +54,12 @@ "@rollup/plugin-node-resolve": "^15.2.3", "@types/chai": "^5.0.1", "@types/chai-spies": "^1.0.6", - "@waku/interfaces": "0.0.33", + "@waku/interfaces": "0.0.34", "@types/deep-equal-in-any-order": "^1.0.4", "@types/lodash": "^4.17.15", "@types/sinon": "^17.0.3", "@waku/build-utils": "^1.0.0", - "@waku/message-encryption": "^0.0.36", + "@waku/message-encryption": "^0.0.37", "deep-equal-in-any-order": "^2.0.6", "fast-check": "^3.23.2", "rollup-plugin-copy": "^3.5.0" @@ -76,10 +76,10 @@ ], "dependencies": { "@chainsafe/bls-keystore": "3.0.0", - "@waku/core": "^0.0.38", - "@waku/utils": "^0.0.26", + "@waku/core": "^0.0.39", + "@waku/utils": "^0.0.27", "@noble/hashes": "^1.2.0", - "@waku/zerokit-rln-wasm": "^0.0.13", + "@waku/zerokit-rln-wasm": "^0.2.1", "ethereum-cryptography": "^3.1.0", "ethers": "^5.7.2", "lodash": "^4.17.21", diff --git a/packages/rln/src/codec.spec.ts b/packages/rln/src/codec.spec.ts deleted file mode 100644 index dfb6c80f1b..0000000000 --- a/packages/rln/src/codec.spec.ts +++ /dev/null @@ -1,363 +0,0 @@ -import { createDecoder, createEncoder } from "@waku/core/lib/message/version_0"; -import { IDecodedMessage } from "@waku/interfaces"; -import { - generatePrivateKey, - generateSymmetricKey, - getPublicKey -} from "@waku/message-encryption"; -import { - createDecoder as createAsymDecoder, - createEncoder as createAsymEncoder -} from "@waku/message-encryption/ecies"; -import { - createDecoder as createSymDecoder, - createEncoder as createSymEncoder -} from "@waku/message-encryption/symmetric"; -import { expect } from "chai"; - -import { - createRLNDecoder, - createRLNEncoder, - RLNDecoder, - RLNEncoder -} from "./codec.js"; -import { - createTestMetaSetter, - createTestRLNCodecSetup, - EMPTY_PROTO_MESSAGE, - TEST_CONSTANTS, - verifyRLNMessage -} from "./codec.test-utils.js"; -import { RlnMessage } from "./message.js"; -import { epochBytesToInt } from "./utils/index.js"; - -describe("RLN codec with version 0", () => { - it("toWire", async function () { - const { rlnInstance, credential, index, payload } = - await createTestRLNCodecSetup(); - - const rlnEncoder = createRLNEncoder({ - encoder: createEncoder({ - contentTopic: TEST_CONSTANTS.contentTopic, - routingInfo: TEST_CONSTANTS.routingInfo - }), - rlnInstance, - index, - credential - }); - const rlnDecoder = createRLNDecoder({ - rlnInstance, - decoder: createDecoder( - TEST_CONSTANTS.contentTopic, - TEST_CONSTANTS.routingInfo - ) - }); - - const bytes = await rlnEncoder.toWire({ payload }); - - expect(bytes).to.not.be.undefined; - const protoResult = await rlnDecoder.fromWireToProtoObj(bytes!); - expect(protoResult).to.not.be.undefined; - const msg = (await rlnDecoder.fromProtoObj( - TEST_CONSTANTS.emptyPubsubTopic, - protoResult! - ))!; - - verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 0, rlnInstance); - }); - - it("toProtoObj", async function () { - const { rlnInstance, credential, index, payload } = - await createTestRLNCodecSetup(); - - const rlnEncoder = new RLNEncoder( - createEncoder({ - contentTopic: TEST_CONSTANTS.contentTopic, - routingInfo: TEST_CONSTANTS.routingInfo - }), - rlnInstance, - index, - credential - ); - const rlnDecoder = new RLNDecoder( - rlnInstance, - createDecoder(TEST_CONSTANTS.contentTopic, TEST_CONSTANTS.routingInfo) - ); - - const proto = await rlnEncoder.toProtoObj({ payload }); - - expect(proto).to.not.be.undefined; - const msg = (await rlnDecoder.fromProtoObj( - TEST_CONSTANTS.emptyPubsubTopic, - proto! - )) as RlnMessage; - - verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 0, rlnInstance); - }); -}); - -describe("RLN codec with version 1", () => { - it("Symmetric, toWire", async function () { - const { rlnInstance, credential, index, payload } = - await createTestRLNCodecSetup(); - const symKey = generateSymmetricKey(); - - const rlnEncoder = new RLNEncoder( - createSymEncoder({ - contentTopic: TEST_CONSTANTS.contentTopic, - routingInfo: TEST_CONSTANTS.routingInfo, - symKey - }), - rlnInstance, - index, - credential - ); - const rlnDecoder = new RLNDecoder( - rlnInstance, - createSymDecoder( - TEST_CONSTANTS.contentTopic, - TEST_CONSTANTS.routingInfo, - symKey - ) - ); - - const bytes = await rlnEncoder.toWire({ payload }); - - expect(bytes).to.not.be.undefined; - const protoResult = await rlnDecoder.fromWireToProtoObj(bytes!); - expect(protoResult).to.not.be.undefined; - const msg = (await rlnDecoder.fromProtoObj( - TEST_CONSTANTS.emptyPubsubTopic, - protoResult! - ))!; - - verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 1, rlnInstance); - }); - - it("Symmetric, toProtoObj", async function () { - const { rlnInstance, credential, index, payload } = - await createTestRLNCodecSetup(); - const symKey = generateSymmetricKey(); - - const rlnEncoder = new RLNEncoder( - createSymEncoder({ - contentTopic: TEST_CONSTANTS.contentTopic, - routingInfo: TEST_CONSTANTS.routingInfo, - symKey - }), - rlnInstance, - index, - credential - ); - const rlnDecoder = new RLNDecoder( - rlnInstance, - createSymDecoder( - TEST_CONSTANTS.contentTopic, - TEST_CONSTANTS.routingInfo, - symKey - ) - ); - - const proto = await rlnEncoder.toProtoObj({ payload }); - - expect(proto).to.not.be.undefined; - const msg = await rlnDecoder.fromProtoObj( - TEST_CONSTANTS.emptyPubsubTopic, - proto! - ); - - verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 1, rlnInstance); - }); - - it("Asymmetric, toWire", async function () { - const { rlnInstance, credential, index, payload } = - await createTestRLNCodecSetup(); - const privateKey = generatePrivateKey(); - const publicKey = getPublicKey(privateKey); - - const rlnEncoder = new RLNEncoder( - createAsymEncoder({ - contentTopic: TEST_CONSTANTS.contentTopic, - routingInfo: TEST_CONSTANTS.routingInfo, - publicKey - }), - rlnInstance, - index, - credential - ); - const rlnDecoder = new RLNDecoder( - rlnInstance, - createAsymDecoder( - TEST_CONSTANTS.contentTopic, - TEST_CONSTANTS.routingInfo, - privateKey - ) - ); - - const bytes = await rlnEncoder.toWire({ payload }); - - expect(bytes).to.not.be.undefined; - const protoResult = await rlnDecoder.fromWireToProtoObj(bytes!); - expect(protoResult).to.not.be.undefined; - const msg = (await rlnDecoder.fromProtoObj( - TEST_CONSTANTS.emptyPubsubTopic, - protoResult! - ))!; - - verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 1, rlnInstance); - }); - - it("Asymmetric, toProtoObj", async function () { - const { rlnInstance, credential, index, payload } = - await createTestRLNCodecSetup(); - const privateKey = generatePrivateKey(); - const publicKey = getPublicKey(privateKey); - - const rlnEncoder = new RLNEncoder( - createAsymEncoder({ - contentTopic: TEST_CONSTANTS.contentTopic, - routingInfo: TEST_CONSTANTS.routingInfo, - publicKey - }), - rlnInstance, - index, - credential - ); - const rlnDecoder = new RLNDecoder( - rlnInstance, - createAsymDecoder( - TEST_CONSTANTS.contentTopic, - TEST_CONSTANTS.routingInfo, - privateKey - ) - ); - - const proto = await rlnEncoder.toProtoObj({ payload }); - - expect(proto).to.not.be.undefined; - const msg = await rlnDecoder.fromProtoObj( - TEST_CONSTANTS.emptyPubsubTopic, - proto! - ); - - verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 1, rlnInstance); - }); -}); - -describe("RLN Codec - epoch", () => { - it("toProtoObj", async function () { - const { rlnInstance, credential, index, payload } = - await createTestRLNCodecSetup(); - - const rlnEncoder = new RLNEncoder( - createEncoder({ - contentTopic: TEST_CONSTANTS.contentTopic, - routingInfo: TEST_CONSTANTS.routingInfo - }), - rlnInstance, - index, - credential - ); - const rlnDecoder = new RLNDecoder( - rlnInstance, - createDecoder(TEST_CONSTANTS.contentTopic, TEST_CONSTANTS.routingInfo) - ); - - const proto = await rlnEncoder.toProtoObj({ payload }); - - expect(proto).to.not.be.undefined; - const msg = (await rlnDecoder.fromProtoObj( - TEST_CONSTANTS.emptyPubsubTopic, - proto! - )) as RlnMessage; - - const epochBytes = proto!.rateLimitProof!.epoch; - const epoch = epochBytesToInt(epochBytes); - - expect(msg.epoch!.toString(10).length).to.eq(9); - expect(msg.epoch).to.eq(epoch); - - verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 0, rlnInstance); - }); -}); - -describe("RLN codec with version 0 and meta setter", () => { - it("toWire", async function () { - const { rlnInstance, credential, index, payload } = - await createTestRLNCodecSetup(); - const metaSetter = createTestMetaSetter(); - - const rlnEncoder = createRLNEncoder({ - encoder: createEncoder({ - contentTopic: TEST_CONSTANTS.contentTopic, - routingInfo: TEST_CONSTANTS.routingInfo, - metaSetter - }), - rlnInstance, - index, - credential - }); - const rlnDecoder = createRLNDecoder({ - rlnInstance, - decoder: createDecoder( - TEST_CONSTANTS.contentTopic, - TEST_CONSTANTS.routingInfo - ) - }); - - const bytes = await rlnEncoder.toWire({ payload }); - - expect(bytes).to.not.be.undefined; - const protoResult = await rlnDecoder.fromWireToProtoObj(bytes!); - expect(protoResult).to.not.be.undefined; - const msg = (await rlnDecoder.fromProtoObj( - TEST_CONSTANTS.emptyPubsubTopic, - protoResult! - ))!; - - const expectedMeta = metaSetter({ - ...EMPTY_PROTO_MESSAGE, - payload: protoResult!.payload - }); - - expect(msg!.meta).to.deep.eq(expectedMeta); - verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 0, rlnInstance); - }); - - it("toProtoObj", async function () { - const { rlnInstance, credential, index, payload } = - await createTestRLNCodecSetup(); - const metaSetter = createTestMetaSetter(); - - const rlnEncoder = new RLNEncoder( - createEncoder({ - contentTopic: TEST_CONSTANTS.contentTopic, - routingInfo: TEST_CONSTANTS.routingInfo, - metaSetter - }), - rlnInstance, - index, - credential - ); - const rlnDecoder = new RLNDecoder( - rlnInstance, - createDecoder(TEST_CONSTANTS.contentTopic, TEST_CONSTANTS.routingInfo) - ); - - const proto = await rlnEncoder.toProtoObj({ payload }); - - expect(proto).to.not.be.undefined; - const msg = (await rlnDecoder.fromProtoObj( - TEST_CONSTANTS.emptyPubsubTopic, - proto! - )) as RlnMessage; - - const expectedMeta = metaSetter({ - ...EMPTY_PROTO_MESSAGE, - payload: msg!.payload - }); - - expect(msg!.meta).to.deep.eq(expectedMeta); - verifyRLNMessage(msg, payload, TEST_CONSTANTS.contentTopic, 0, rlnInstance); - }); -}); diff --git a/packages/rln/src/codec.test-utils.ts b/packages/rln/src/codec.test-utils.ts deleted file mode 100644 index 4b16ae7131..0000000000 --- a/packages/rln/src/codec.test-utils.ts +++ /dev/null @@ -1,88 +0,0 @@ -import type { IProtoMessage } from "@waku/interfaces"; -import { createRoutingInfo } from "@waku/utils"; -import { expect } from "chai"; - -import { createRLN } from "./create.js"; -import type { IdentityCredential } from "./identity.js"; - -export interface TestRLNCodecSetup { - rlnInstance: any; - credential: IdentityCredential; - index: number; - payload: Uint8Array; -} - -export const TEST_CONSTANTS = { - contentTopic: "/test/1/waku-message/utf8", - emptyPubsubTopic: "", - defaultIndex: 0, - defaultPayload: new Uint8Array([1, 2, 3, 4, 5]), - routingInfo: createRoutingInfo( - { - clusterId: 0, - numShardsInCluster: 2 - }, - { contentTopic: "/test/1/waku-message/utf8" } - ) -} as const; - -export const EMPTY_PROTO_MESSAGE = { - timestamp: undefined, - contentTopic: "", - ephemeral: undefined, - meta: undefined, - rateLimitProof: undefined, - version: undefined -} as const; - -/** - * Creates a basic RLN setup for codec tests - */ -export async function createTestRLNCodecSetup(): Promise { - const rlnInstance = await createRLN(); - const credential = rlnInstance.zerokit.generateIdentityCredentials(); - rlnInstance.zerokit.insertMember(credential.IDCommitment); - - return { - rlnInstance, - credential, - index: TEST_CONSTANTS.defaultIndex, - payload: TEST_CONSTANTS.defaultPayload - }; -} - -/** - * Creates a meta setter function for testing - */ -export function createTestMetaSetter(): ( - msg: IProtoMessage & { meta: undefined } -) => Uint8Array { - return (msg: IProtoMessage & { meta: undefined }): Uint8Array => { - const buffer = new ArrayBuffer(4); - const view = new DataView(buffer); - view.setUint32(0, msg.payload.length, false); - return new Uint8Array(buffer); - }; -} - -/** - * Verifies common RLN message properties - */ -export function verifyRLNMessage( - msg: any, - payload: Uint8Array, - contentTopic: string, - version: number, - rlnInstance: any -): void { - expect(msg.rateLimitProof).to.not.be.undefined; - expect(msg.verify([rlnInstance.zerokit.getMerkleRoot()])).to.be.true; - expect(msg.verifyNoRoot()).to.be.true; - expect(msg.epoch).to.not.be.undefined; - expect(msg.epoch).to.be.gt(0); - - expect(msg.contentTopic).to.eq(contentTopic); - expect(msg.msg.version).to.eq(version); - expect(msg.payload).to.deep.eq(payload); - expect(msg.timestamp).to.not.be.undefined; -} diff --git a/packages/rln/src/codec.ts b/packages/rln/src/codec.ts deleted file mode 100644 index 067e2f382a..0000000000 --- a/packages/rln/src/codec.ts +++ /dev/null @@ -1,138 +0,0 @@ -import type { - IDecodedMessage, - IDecoder, - IEncoder, - IMessage, - IProtoMessage, - IRateLimitProof, - IRoutingInfo -} from "@waku/interfaces"; -import { Logger } from "@waku/utils"; - -import type { IdentityCredential } from "./identity.js"; -import { RlnMessage, toRLNSignal } from "./message.js"; -import { RLNInstance } from "./rln.js"; - -const log = new Logger("rln:encoder"); - -export class RLNEncoder implements IEncoder { - private readonly idSecretHash: Uint8Array; - - public constructor( - private readonly encoder: IEncoder, - private readonly rlnInstance: RLNInstance, - private readonly index: number, - identityCredential: IdentityCredential - ) { - if (index < 0) throw new Error("Invalid membership index"); - this.idSecretHash = identityCredential.IDSecretHash; - } - - public async toWire(message: IMessage): Promise { - message.rateLimitProof = await this.generateProof(message); - log.info("Proof generated", message.rateLimitProof); - return this.encoder.toWire(message); - } - - public async toProtoObj( - message: IMessage - ): Promise { - const protoMessage = await this.encoder.toProtoObj(message); - if (!protoMessage) return; - - protoMessage.contentTopic = this.contentTopic; - protoMessage.rateLimitProof = await this.generateProof(message); - log.info("Proof generated", protoMessage.rateLimitProof); - return protoMessage; - } - - private async generateProof(message: IMessage): Promise { - const signal = toRLNSignal(this.contentTopic, message); - return this.rlnInstance.zerokit.generateRLNProof( - signal, - this.index, - message.timestamp, - this.idSecretHash - ); - } - - public get pubsubTopic(): string { - return this.encoder.pubsubTopic; - } - - public get routingInfo(): IRoutingInfo { - return this.encoder.routingInfo; - } - - public get contentTopic(): string { - return this.encoder.contentTopic; - } - - public get ephemeral(): boolean { - return this.encoder.ephemeral; - } -} - -type RLNEncoderOptions = { - encoder: IEncoder; - rlnInstance: RLNInstance; - index: number; - credential: IdentityCredential; -}; - -export const createRLNEncoder = (options: RLNEncoderOptions): RLNEncoder => { - return new RLNEncoder( - options.encoder, - options.rlnInstance, - options.index, - options.credential - ); -}; - -export class RLNDecoder - implements IDecoder> -{ - public constructor( - private readonly rlnInstance: RLNInstance, - private readonly decoder: IDecoder - ) {} - - public get pubsubTopic(): string { - return this.decoder.pubsubTopic; - } - - public get contentTopic(): string { - return this.decoder.contentTopic; - } - - public fromWireToProtoObj( - bytes: Uint8Array - ): Promise { - const protoMessage = this.decoder.fromWireToProtoObj(bytes); - log.info("Message decoded", protoMessage); - return Promise.resolve(protoMessage); - } - - public async fromProtoObj( - pubsubTopic: string, - proto: IProtoMessage - ): Promise | undefined> { - const msg: T | undefined = await this.decoder.fromProtoObj( - pubsubTopic, - proto - ); - if (!msg) return; - return new RlnMessage(this.rlnInstance, msg, proto.rateLimitProof); - } -} - -type RLNDecoderOptions = { - decoder: IDecoder; - rlnInstance: RLNInstance; -}; - -export const createRLNDecoder = ( - options: RLNDecoderOptions -): RLNDecoder => { - return new RLNDecoder(options.rlnInstance, options.decoder); -}; diff --git a/packages/rln/src/contract/constants.ts b/packages/rln/src/contract/constants.ts index 4808a88a14..7869e65bc6 100644 --- a/packages/rln/src/contract/constants.ts +++ b/packages/rln/src/contract/constants.ts @@ -19,26 +19,16 @@ export const PRICE_CALCULATOR_CONTRACT = { * @see https://github.com/waku-org/specs/blob/master/standards/core/rln-contract.md#implementation-suggestions */ export const RATE_LIMIT_TIERS = { - LOW: 20, // Suggested minimum rate - 20 messages per epoch - MEDIUM: 200, - HIGH: 600 // Suggested maximum rate - 600 messages per epoch + STANDARD: 300, + MAX: 600 } as const; // Global rate limit parameters export const RATE_LIMIT_PARAMS = { - MIN_RATE: RATE_LIMIT_TIERS.LOW, - MAX_RATE: RATE_LIMIT_TIERS.HIGH, - MAX_TOTAL_RATE: 160_000, // Maximum total rate limit across all memberships - EPOCH_LENGTH: 600 // Epoch length in seconds (10 minutes) + MIN_RATE: RATE_LIMIT_TIERS.STANDARD, + MAX_RATE: RATE_LIMIT_TIERS.MAX, + MAX_TOTAL_RATE: 160_000, + EPOCH_LENGTH: 600 } as const; -/** - * Default Q value for the RLN contract - * This is the upper bound for the ID commitment - * @see https://github.com/waku-org/specs/blob/master/standards/core/rln-contract.md#implementation-suggestions - */ -export const RLN_Q = BigInt( - "21888242871839275222246405745257275088548364400416034343698204186575808495617" -); - export const DEFAULT_RATE_LIMIT = RATE_LIMIT_PARAMS.MAX_RATE; diff --git a/packages/rln/src/contract/index.ts b/packages/rln/src/contract/index.ts index 7f96557dc9..5d4d612733 100644 --- a/packages/rln/src/contract/index.ts +++ b/packages/rln/src/contract/index.ts @@ -1,3 +1,2 @@ -export { RLNContract } from "./rln_contract.js"; export * from "./constants.js"; export * from "./types.js"; diff --git a/packages/rln/src/contract/rln_base_contract.ts b/packages/rln/src/contract/rln_base_contract.ts index 7546596cd0..9934360be3 100644 --- a/packages/rln/src/contract/rln_base_contract.ts +++ b/packages/rln/src/contract/rln_base_contract.ts @@ -3,7 +3,6 @@ import { ethers } from "ethers"; import { IdentityCredential } from "../identity.js"; import { DecryptedCredentials } from "../keystore/types.js"; -import { BytesUtils } from "../utils/bytes.js"; import { RLN_ABI } from "./abi/rln.js"; import { @@ -632,7 +631,7 @@ export class RLNBaseContract { permit.v, permit.r, permit.s, - BytesUtils.buildBigIntFromUint8ArrayBE(identity.IDCommitment), + identity.IDCommitmentBigInt, this.rateLimit, idCommitmentsToErase.map((id) => ethers.BigNumber.from(id)) ); diff --git a/packages/rln/src/contract/rln_contract.spec.ts b/packages/rln/src/contract/rln_contract.spec.ts deleted file mode 100644 index 7603e7cc2e..0000000000 --- a/packages/rln/src/contract/rln_contract.spec.ts +++ /dev/null @@ -1,90 +0,0 @@ -import { hexToBytes } from "@waku/utils/bytes"; -import { expect, use } from "chai"; -import chaiAsPromised from "chai-as-promised"; -import * as ethers from "ethers"; -import sinon, { SinonSandbox } from "sinon"; - -import { createTestRLNInstance, initializeRLNContract } from "./test_setup.js"; -import { - createMockRegistryContract, - createRegisterStub, - mockRLNRegisteredEvent, - verifyRegistration -} from "./test_utils.js"; - -use(chaiAsPromised); - -describe("RLN Contract abstraction - RLN", () => { - let sandbox: SinonSandbox; - - beforeEach(async () => { - sandbox = sinon.createSandbox(); - }); - - afterEach(() => { - sandbox.restore(); - }); - - describe("Member Registration", () => { - it("should fetch members from events and store them in the RLN instance", async () => { - const { rlnInstance, insertMemberSpy } = await createTestRLNInstance(); - const membershipRegisteredEvent = mockRLNRegisteredEvent(); - const queryFilterStub = sinon.stub().returns([membershipRegisteredEvent]); - - const mockedRegistryContract = createMockRegistryContract({ - queryFilter: queryFilterStub - }); - - const rlnContract = await initializeRLNContract( - rlnInstance, - mockedRegistryContract - ); - - await rlnContract.fetchMembers({ - fromBlock: 0, - fetchRange: 1000, - fetchChunks: 2 - }); - - expect( - insertMemberSpy.calledWith( - ethers.utils.zeroPad( - hexToBytes(membershipRegisteredEvent.args!.idCommitment), - 32 - ) - ) - ).to.be.true; - expect(queryFilterStub.called).to.be.true; - }); - - it("should register a member", async () => { - const { rlnInstance, identity, insertMemberSpy } = - await createTestRLNInstance(); - - const registerStub = createRegisterStub(identity); - const mockedRegistryContract = createMockRegistryContract({ - register: registerStub, - queryFilter: () => [] - }); - - const rlnContract = await initializeRLNContract( - rlnInstance, - mockedRegistryContract - ); - - const decryptedCredentials = - await rlnContract.registerWithIdentity(identity); - - if (!decryptedCredentials) { - throw new Error("Failed to retrieve credentials"); - } - - verifyRegistration( - decryptedCredentials, - identity, - registerStub, - insertMemberSpy - ); - }); - }); -}); diff --git a/packages/rln/src/contract/rln_contract.ts b/packages/rln/src/contract/rln_contract.ts deleted file mode 100644 index eae91b323e..0000000000 --- a/packages/rln/src/contract/rln_contract.ts +++ /dev/null @@ -1,147 +0,0 @@ -import { Logger } from "@waku/utils"; -import { hexToBytes } from "@waku/utils/bytes"; -import { ethers } from "ethers"; - -import type { RLNInstance } from "../rln.js"; -import { MerkleRootTracker } from "../root_tracker.js"; -import { BytesUtils } from "../utils/bytes.js"; - -import { RLNBaseContract } from "./rln_base_contract.js"; -import { RLNContractInitOptions } from "./types.js"; - -const log = new Logger("rln:contract"); - -export class RLNContract extends RLNBaseContract { - private instance: RLNInstance; - private merkleRootTracker: MerkleRootTracker; - - /** - * Asynchronous initializer for RLNContract. - * Allows injecting a mocked contract for testing purposes. - */ - public static async init( - rlnInstance: RLNInstance, - options: RLNContractInitOptions - ): Promise { - const rlnContract = new RLNContract(rlnInstance, options); - - return rlnContract; - } - - private constructor( - rlnInstance: RLNInstance, - options: RLNContractInitOptions - ) { - super(options); - - this.instance = rlnInstance; - - const initialRoot = rlnInstance.zerokit.getMerkleRoot(); - this.merkleRootTracker = new MerkleRootTracker(5, initialRoot); - } - - public override processEvents(events: ethers.Event[]): void { - const toRemoveTable = new Map(); - const toInsertTable = new Map(); - - events.forEach((evt) => { - if (!evt.args) { - return; - } - - if ( - evt.event === "MembershipErased" || - evt.event === "MembershipExpired" - ) { - let index = evt.args.index; - - if (!index) { - return; - } - - if (typeof index === "number" || typeof index === "string") { - index = ethers.BigNumber.from(index); - } else { - log.error("Index is not a number or string", { - index, - event: evt - }); - return; - } - - const toRemoveVal = toRemoveTable.get(evt.blockNumber); - if (toRemoveVal != undefined) { - toRemoveVal.push(index.toNumber()); - toRemoveTable.set(evt.blockNumber, toRemoveVal); - } else { - toRemoveTable.set(evt.blockNumber, [index.toNumber()]); - } - } else if (evt.event === "MembershipRegistered") { - let eventsPerBlock = toInsertTable.get(evt.blockNumber); - if (eventsPerBlock == undefined) { - eventsPerBlock = []; - } - - eventsPerBlock.push(evt); - toInsertTable.set(evt.blockNumber, eventsPerBlock); - } - }); - - this.removeMembers(this.instance, toRemoveTable); - this.insertMembers(this.instance, toInsertTable); - } - - private insertMembers( - rlnInstance: RLNInstance, - toInsert: Map - ): void { - toInsert.forEach((events: ethers.Event[], blockNumber: number) => { - events.forEach((evt) => { - if (!evt.args) return; - - const _idCommitment = evt.args.idCommitment as string; - let index = evt.args.index; - - if (!_idCommitment || !index) { - return; - } - - if (typeof index === "number" || typeof index === "string") { - index = ethers.BigNumber.from(index); - } - - const idCommitment = BytesUtils.zeroPadLE( - hexToBytes(_idCommitment), - 32 - ); - rlnInstance.zerokit.insertMember(idCommitment); - - const numericIndex = index.toNumber(); - this._members.set(numericIndex, { - index, - idCommitment: _idCommitment - }); - }); - - const currentRoot = rlnInstance.zerokit.getMerkleRoot(); - this.merkleRootTracker.pushRoot(blockNumber, currentRoot); - }); - } - - private removeMembers( - rlnInstance: RLNInstance, - toRemove: Map - ): void { - const removeDescending = new Map([...toRemove].reverse()); - removeDescending.forEach((indexes: number[], blockNumber: number) => { - indexes.forEach((index) => { - if (this._members.has(index)) { - this._members.delete(index); - rlnInstance.zerokit.deleteMember(index); - } - }); - - this.merkleRootTracker.backFill(blockNumber); - }); - } -} diff --git a/packages/rln/src/contract/test_setup.ts b/packages/rln/src/contract/test_setup.ts deleted file mode 100644 index b5da3f6af6..0000000000 --- a/packages/rln/src/contract/test_setup.ts +++ /dev/null @@ -1,86 +0,0 @@ -import { hexToBytes } from "@waku/utils/bytes"; -import { ethers } from "ethers"; -import sinon from "sinon"; - -import { createRLN } from "../create.js"; -import type { IdentityCredential } from "../identity.js"; - -import { DEFAULT_RATE_LIMIT, RLN_CONTRACT } from "./constants.js"; -import { RLNContract } from "./rln_contract.js"; - -export interface TestRLNInstance { - rlnInstance: any; - identity: IdentityCredential; - insertMemberSpy: sinon.SinonStub; -} - -/** - * Creates a test RLN instance with basic setup - */ -export async function createTestRLNInstance(): Promise { - const rlnInstance = await createRLN(); - const insertMemberSpy = sinon.stub(); - rlnInstance.zerokit.insertMember = insertMemberSpy; - - const mockSignature = - "0xdeb8a6b00a8e404deb1f52d3aa72ed7f60a2ff4484c737eedaef18a0aacb2dfb4d5d74ac39bb71fa358cf2eb390565a35b026cc6272f2010d4351e17670311c21c"; - const identity = - rlnInstance.zerokit.generateSeededIdentityCredential(mockSignature); - - return { - rlnInstance, - identity, - insertMemberSpy - }; -} - -/** - * Initializes an RLN contract with the given registry contract - */ -export async function initializeRLNContract( - rlnInstance: any, - mockedRegistryContract: ethers.Contract -): Promise { - const provider = new ethers.providers.JsonRpcProvider(); - const voidSigner = new ethers.VoidSigner(RLN_CONTRACT.address, provider); - - const originalRegister = mockedRegistryContract.register; - (mockedRegistryContract as any).register = function (...args: any[]) { - const result = originalRegister.apply(this, args); - - if (args[0] && rlnInstance.zerokit) { - const idCommitmentBigInt = args[0]; - const idCommitmentHex = - "0x" + idCommitmentBigInt.toString(16).padStart(64, "0"); - const idCommitment = ethers.utils.zeroPad( - hexToBytes(idCommitmentHex), - 32 - ); - rlnInstance.zerokit.insertMember(idCommitment); - } - - return result; - }; - - const contract = await RLNContract.init(rlnInstance, { - address: RLN_CONTRACT.address, - signer: voidSigner, - rateLimit: DEFAULT_RATE_LIMIT, - contract: mockedRegistryContract - }); - - return contract; -} - -/** - * Common test message data - */ -export const TEST_DATA = { - contentTopic: "/test/1/waku-message/utf8", - emptyPubsubTopic: "", - testMessage: Uint8Array.from( - "Hello World".split("").map((x) => x.charCodeAt(0)) - ), - mockSignature: - "0xdeb8a6b00a8e404deb1f52d3aa72ed7f60a2ff4484c737eedaef18a0aacb2dfb4d5d74ac39bb71fa358cf2eb390565a35b026cc6272f2010d4351e17670311c21c" -}; diff --git a/packages/rln/src/contract/test_utils.ts b/packages/rln/src/contract/test_utils.ts deleted file mode 100644 index a2ac8bc403..0000000000 --- a/packages/rln/src/contract/test_utils.ts +++ /dev/null @@ -1,179 +0,0 @@ -import { hexToBytes } from "@waku/utils/bytes"; -import { expect } from "chai"; -import * as ethers from "ethers"; -import sinon from "sinon"; - -import type { IdentityCredential } from "../identity.js"; - -import { DEFAULT_RATE_LIMIT, RLN_CONTRACT } from "./constants.js"; - -export const mockRateLimits = { - minRate: 20, - maxRate: 600, - maxTotalRate: 1200, - currentTotalRate: 500 -}; - -type MockProvider = { - getLogs: () => never[]; - getBlockNumber: () => Promise; - getNetwork: () => Promise<{ chainId: number }>; -}; - -type MockFilters = { - MembershipRegistered: () => { address: string }; - MembershipErased: () => { address: string }; - MembershipExpired: () => { address: string }; -}; - -export function createMockProvider(): MockProvider { - return { - getLogs: () => [], - getBlockNumber: () => Promise.resolve(1000), - getNetwork: () => Promise.resolve({ chainId: 11155111 }) - }; -} - -export function createMockFilters(): MockFilters { - return { - MembershipRegistered: () => ({ address: RLN_CONTRACT.address }), - MembershipErased: () => ({ address: RLN_CONTRACT.address }), - MembershipExpired: () => ({ address: RLN_CONTRACT.address }) - }; -} - -type ContractOverrides = Partial<{ - filters: Record; - [key: string]: unknown; -}>; - -export function createMockRegistryContract( - overrides: ContractOverrides = {} -): ethers.Contract { - const filters = { - MembershipRegistered: () => ({ address: RLN_CONTRACT.address }), - MembershipErased: () => ({ address: RLN_CONTRACT.address }), - MembershipExpired: () => ({ address: RLN_CONTRACT.address }) - }; - - const baseContract = { - minMembershipRateLimit: () => - Promise.resolve(ethers.BigNumber.from(mockRateLimits.minRate)), - maxMembershipRateLimit: () => - Promise.resolve(ethers.BigNumber.from(mockRateLimits.maxRate)), - maxTotalRateLimit: () => - Promise.resolve(ethers.BigNumber.from(mockRateLimits.maxTotalRate)), - currentTotalRateLimit: () => - Promise.resolve(ethers.BigNumber.from(mockRateLimits.currentTotalRate)), - queryFilter: () => [], - provider: createMockProvider(), - filters, - on: () => ({}), - removeAllListeners: () => ({}), - register: () => ({ - wait: () => - Promise.resolve({ - events: [mockRLNRegisteredEvent()] - }) - }), - estimateGas: { - register: () => Promise.resolve(ethers.BigNumber.from(100000)) - }, - functions: { - register: () => Promise.resolve() - }, - getMemberIndex: () => Promise.resolve(null), - interface: { - getEvent: (eventName: string) => ({ - name: eventName, - format: () => {} - }) - }, - address: RLN_CONTRACT.address - }; - - // Merge overrides while preserving filters - const merged = { - ...baseContract, - ...overrides, - filters: { ...filters, ...(overrides.filters || {}) } - }; - - return merged as unknown as ethers.Contract; -} - -export function mockRLNRegisteredEvent(idCommitment?: string): ethers.Event { - return { - args: { - idCommitment: - idCommitment || - "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef", - membershipRateLimit: ethers.BigNumber.from(DEFAULT_RATE_LIMIT), - index: ethers.BigNumber.from(1) - }, - event: "MembershipRegistered" - } as unknown as ethers.Event; -} - -export function formatIdCommitment(idCommitmentBigInt: bigint): string { - return "0x" + idCommitmentBigInt.toString(16).padStart(64, "0"); -} - -export function createRegisterStub( - identity: IdentityCredential -): sinon.SinonStub { - return sinon.stub().callsFake(() => ({ - wait: () => - Promise.resolve({ - events: [ - { - event: "MembershipRegistered", - args: { - idCommitment: formatIdCommitment(identity.IDCommitmentBigInt), - membershipRateLimit: ethers.BigNumber.from(DEFAULT_RATE_LIMIT), - index: ethers.BigNumber.from(1) - } - } - ] - }) - })); -} - -export function verifyRegistration( - decryptedCredentials: any, - identity: IdentityCredential, - registerStub: sinon.SinonStub, - insertMemberSpy: sinon.SinonStub -): void { - if (!decryptedCredentials) { - throw new Error("Decrypted credentials should not be undefined"); - } - - // Verify registration call - expect( - registerStub.calledWith( - sinon.match.same(identity.IDCommitmentBigInt), - sinon.match.same(DEFAULT_RATE_LIMIT), - sinon.match.array, - sinon.match.object - ) - ).to.be.true; - - // Verify credential properties - expect(decryptedCredentials).to.have.property("identity"); - expect(decryptedCredentials).to.have.property("membership"); - expect(decryptedCredentials.membership).to.include({ - address: RLN_CONTRACT.address, - treeIndex: 1 - }); - - // Verify member insertion - const expectedIdCommitment = ethers.utils.zeroPad( - hexToBytes(formatIdCommitment(identity.IDCommitmentBigInt)), - 32 - ); - expect(insertMemberSpy.callCount).to.equal(1); - expect(insertMemberSpy.getCall(0).args[0]).to.deep.equal( - expectedIdCommitment - ); -} diff --git a/packages/rln/src/create.spec.ts b/packages/rln/src/create.spec.ts deleted file mode 100644 index 7dc2e4fd5e..0000000000 --- a/packages/rln/src/create.spec.ts +++ /dev/null @@ -1,137 +0,0 @@ -import { assert, expect } from "chai"; - -import { createRLN } from "./create.js"; - -describe("js-rln", () => { - it("should verify a proof", async function () { - const rlnInstance = await createRLN(); - - const credential = rlnInstance.zerokit.generateIdentityCredentials(); - - //peer's index in the Merkle Tree - const index = 5; - - // Create a Merkle tree with random members - for (let i = 0; i < 10; i++) { - if (i == index) { - // insert the current peer's pk - rlnInstance.zerokit.insertMember(credential.IDCommitment); - } else { - // create a new key pair - rlnInstance.zerokit.insertMember( - rlnInstance.zerokit.generateIdentityCredentials().IDCommitment - ); - } - } - - // prepare the message - const uint8Msg = Uint8Array.from( - "Hello World".split("").map((x) => x.charCodeAt(0)) - ); - - // setting up the epoch - const epoch = new Date(); - - // generating proof - const proof = await rlnInstance.zerokit.generateRLNProof( - uint8Msg, - index, - epoch, - credential.IDSecretHash - ); - - try { - // verify the proof - const verifResult = rlnInstance.zerokit.verifyRLNProof(proof, uint8Msg); - expect(verifResult).to.be.true; - } catch (err) { - assert.fail(0, 1, "should not have failed proof verification"); - } - - try { - // Modifying the signal so it's invalid - uint8Msg[4] = 4; - // verify the proof - const verifResult = rlnInstance.zerokit.verifyRLNProof(proof, uint8Msg); - expect(verifResult).to.be.false; - } catch (err) { - console.log(err); - } - }); - it("should verify a proof with a seeded membership key generation", async function () { - const rlnInstance = await createRLN(); - const seed = "This is a test seed"; - const credential = - rlnInstance.zerokit.generateSeededIdentityCredential(seed); - - //peer's index in the Merkle Tree - const index = 5; - - // Create a Merkle tree with random members - for (let i = 0; i < 10; i++) { - if (i == index) { - // insert the current peer's pk - rlnInstance.zerokit.insertMember(credential.IDCommitment); - } else { - // create a new key pair - rlnInstance.zerokit.insertMember( - rlnInstance.zerokit.generateIdentityCredentials().IDCommitment - ); - } - } - - // prepare the message - const uint8Msg = Uint8Array.from( - "Hello World".split("").map((x) => x.charCodeAt(0)) - ); - - // setting up the epoch - const epoch = new Date(); - - // generating proof - const proof = await rlnInstance.zerokit.generateRLNProof( - uint8Msg, - index, - epoch, - credential.IDSecretHash - ); - - try { - // verify the proof - const verifResult = rlnInstance.zerokit.verifyRLNProof(proof, uint8Msg); - expect(verifResult).to.be.true; - } catch (err) { - assert.fail(0, 1, "should not have failed proof verification"); - } - - try { - // Modifying the signal so it's invalid - uint8Msg[4] = 4; - // verify the proof - const verifResult = rlnInstance.zerokit.verifyRLNProof(proof, uint8Msg); - expect(verifResult).to.be.false; - } catch (err) { - console.log(err); - } - }); - - it("should generate the same membership key if the same seed is provided", async function () { - const rlnInstance = await createRLN(); - const seed = "This is a test seed"; - const memKeys1 = rlnInstance.zerokit.generateSeededIdentityCredential(seed); - const memKeys2 = rlnInstance.zerokit.generateSeededIdentityCredential(seed); - - memKeys1.IDCommitment.forEach((element, index) => { - expect(element).to.equal(memKeys2.IDCommitment[index]); - }); - memKeys1.IDNullifier.forEach((element, index) => { - expect(element).to.equal(memKeys2.IDNullifier[index]); - }); - memKeys1.IDSecretHash.forEach((element, index) => { - expect(element).to.equal(memKeys2.IDSecretHash[index]); - }); - memKeys1.IDTrapdoor.forEach((element, index) => { - expect(element).to.equal(memKeys2.IDTrapdoor[index]); - }); - }); -}); diff --git a/packages/rln/src/credentials_manager.ts b/packages/rln/src/credentials_manager.ts index c4fdceec70..b82d9421f1 100644 --- a/packages/rln/src/credentials_manager.ts +++ b/packages/rln/src/credentials_manager.ts @@ -1,11 +1,8 @@ -import { hmac } from "@noble/hashes/hmac"; -import { sha256 } from "@noble/hashes/sha2"; import { Logger } from "@waku/utils"; import { ethers } from "ethers"; -import { RLN_CONTRACT, RLN_Q } from "./contract/constants.js"; +import { RLN_CONTRACT } from "./contract/constants.js"; import { RLNBaseContract } from "./contract/rln_base_contract.js"; -import { IdentityCredential } from "./identity.js"; import { Keystore } from "./keystore/index.js"; import type { DecryptedCredentials, @@ -13,7 +10,6 @@ import type { } from "./keystore/index.js"; import { KeystoreEntity, Password } from "./keystore/types.js"; import { RegisterMembershipOptions, StartRLNOptions } from "./types.js"; -import { BytesUtils } from "./utils/bytes.js"; import { extractMetaMaskSigner } from "./utils/index.js"; import { Zerokit } from "./zerokit.js"; @@ -21,7 +17,6 @@ const log = new Logger("rln:credentials"); /** * Manages credentials for RLN - * This is a lightweight implementation of the RLN contract that doesn't require Zerokit * It is used to register membership and generate identity credentials */ export class RLNCredentialsManager { @@ -34,9 +29,9 @@ export class RLNCredentialsManager { protected keystore = Keystore.create(); public credentials: undefined | DecryptedCredentials; - public zerokit: undefined | Zerokit; + public zerokit: Zerokit; - public constructor(zerokit?: Zerokit) { + public constructor(zerokit: Zerokit) { log.info("RLNCredentialsManager initialized"); this.zerokit = zerokit; } @@ -81,7 +76,7 @@ export class RLNCredentialsManager { this.contract = await RLNBaseContract.create({ address: address!, signer: signer!, - rateLimit: rateLimit ?? this.zerokit?.rateLimit + rateLimit: rateLimit ?? this.zerokit.rateLimit }); log.info("RLNCredentialsManager successfully started"); @@ -106,18 +101,10 @@ export class RLNCredentialsManager { let identity = "identity" in options && options.identity; if ("signature" in options) { - log.info("Generating identity from signature"); - if (this.zerokit) { - log.info("Using Zerokit to generate identity"); - identity = this.zerokit.generateSeededIdentityCredential( - options.signature - ); - } else { - log.info("Using local implementation to generate identity"); - identity = await this.generateSeededIdentityCredential( - options.signature - ); - } + log.info("Using Zerokit to generate identity"); + identity = this.zerokit.generateSeededIdentityCredential( + options.signature + ); } if (!identity) { @@ -242,55 +229,4 @@ export class RLNCredentialsManager { ); } } - - /** - * Generates an identity credential from a seed string - * This is a pure implementation that doesn't rely on Zerokit - * @param seed A string seed to generate the identity from - * @returns IdentityCredential - */ - private async generateSeededIdentityCredential( - seed: string - ): Promise { - log.info("Generating seeded identity credential"); - // Convert the seed to bytes - const encoder = new TextEncoder(); - const seedBytes = encoder.encode(seed); - - // Generate deterministic values using HMAC-SHA256 - // We use different context strings for each component to ensure they're different - const idTrapdoorBE = hmac(sha256, seedBytes, encoder.encode("IDTrapdoor")); - const idNullifierBE = hmac( - sha256, - seedBytes, - encoder.encode("IDNullifier") - ); - - const combinedBytes = new Uint8Array([...idTrapdoorBE, ...idNullifierBE]); - const idSecretHashBE = sha256(combinedBytes); - - const idCommitmentRawBE = sha256(idSecretHashBE); - const idCommitmentBE = this.reduceIdCommitment(idCommitmentRawBE); - - log.info( - "Successfully generated identity credential, storing in Big Endian format" - ); - return new IdentityCredential( - idTrapdoorBE, - idNullifierBE, - idSecretHashBE, - idCommitmentBE - ); - } - - /** - * Helper: take 32-byte BE, reduce mod Q, return 32-byte BE - */ - private reduceIdCommitment( - bytesBE: Uint8Array, - limit: bigint = RLN_Q - ): Uint8Array { - const nBE = BytesUtils.buildBigIntFromUint8ArrayBE(bytesBE); - return BytesUtils.bigIntToUint8Array32BE(nBE % limit); - } } diff --git a/packages/rln/src/identity.ts b/packages/rln/src/identity.ts index 87167165ba..873463ea6e 100644 --- a/packages/rln/src/identity.ts +++ b/packages/rln/src/identity.ts @@ -11,8 +11,7 @@ export class IdentityCredential { public readonly IDSecretHash: Uint8Array, public readonly IDCommitment: Uint8Array ) { - this.IDCommitmentBigInt = - BytesUtils.buildBigIntFromUint8ArrayBE(IDCommitment); + this.IDCommitmentBigInt = BytesUtils.toBigInt(IDCommitment); } public static fromBytes(memKeys: Uint8Array): IdentityCredential { diff --git a/packages/rln/src/index.ts b/packages/rln/src/index.ts index 0a07db7810..3348e370f7 100644 --- a/packages/rln/src/index.ts +++ b/packages/rln/src/index.ts @@ -1,28 +1,18 @@ -import { RLNDecoder, RLNEncoder } from "./codec.js"; import { RLN_ABI } from "./contract/abi/rln.js"; -import { RLN_CONTRACT, RLNContract } from "./contract/index.js"; +import { RLN_CONTRACT } from "./contract/index.js"; import { RLNBaseContract } from "./contract/rln_base_contract.js"; import { createRLN } from "./create.js"; -import { RLNCredentialsManager } from "./credentials_manager.js"; import { IdentityCredential } from "./identity.js"; import { Keystore } from "./keystore/index.js"; -import { Proof } from "./proof.js"; import { RLNInstance } from "./rln.js"; -import { MerkleRootTracker } from "./root_tracker.js"; import { extractMetaMaskSigner } from "./utils/index.js"; export { - RLNCredentialsManager, RLNBaseContract, createRLN, Keystore, RLNInstance, IdentityCredential, - Proof, - RLNEncoder, - RLNDecoder, - MerkleRootTracker, - RLNContract, RLN_CONTRACT, extractMetaMaskSigner, RLN_ABI diff --git a/packages/rln/src/keystore/keystore.spec.ts b/packages/rln/src/keystore/keystore.spec.ts index 02f2f20bd3..95543e8535 100644 --- a/packages/rln/src/keystore/keystore.spec.ts +++ b/packages/rln/src/keystore/keystore.spec.ts @@ -222,9 +222,7 @@ describe("Keystore", () => { ]) } as unknown as IdentityCredential; // Add the missing property for test correctness - identity.IDCommitmentBigInt = BytesUtils.buildBigIntFromUint8ArrayBE( - identity.IDCommitment - ); + identity.IDCommitmentBigInt = BytesUtils.toBigInt(identity.IDCommitment); const membership = { chainId: "0xAA36A7", treeIndex: 8, @@ -276,9 +274,7 @@ describe("Keystore", () => { 58, 94, 20, 246, 8, 33, 65, 238, 37, 112, 97, 65, 241, 255, 93, 171, 15 ] } as unknown as IdentityCredential; - identity.IDCommitmentBigInt = BytesUtils.buildBigIntFromUint8ArrayBE( - identity.IDCommitment - ); + identity.IDCommitmentBigInt = BytesUtils.toBigInt(identity.IDCommitment); const membership = { chainId: "0xAA36A7", treeIndex: 8, diff --git a/packages/rln/src/keystore/keystore.ts b/packages/rln/src/keystore/keystore.ts index 880d707bf3..f001f2938d 100644 --- a/packages/rln/src/keystore/keystore.ts +++ b/packages/rln/src/keystore/keystore.ts @@ -264,20 +264,14 @@ export class Keystore { _.get(obj, "identityCredential.idSecretHash", []) ); - // Big Endian - const idCommitmentBE = BytesUtils.switchEndianness(idCommitmentLE); - const idTrapdoorBE = BytesUtils.switchEndianness(idTrapdoorLE); - const idNullifierBE = BytesUtils.switchEndianness(idNullifierLE); - const idSecretHashBE = BytesUtils.switchEndianness(idSecretHashLE); - const idCommitmentBigInt = - BytesUtils.buildBigIntFromUint8ArrayBE(idCommitmentBE); + const idCommitmentBigInt = BytesUtils.toBigInt(idCommitmentLE); return { identity: { - IDCommitment: idCommitmentBE, - IDTrapdoor: idTrapdoorBE, - IDNullifier: idNullifierBE, - IDSecretHash: idSecretHashBE, + IDCommitment: idCommitmentLE, + IDTrapdoor: idTrapdoorLE, + IDNullifier: idNullifierLE, + IDSecretHash: idSecretHashLE, IDCommitmentBigInt: idCommitmentBigInt }, membership: { @@ -329,35 +323,18 @@ export class Keystore { // follows nwaku implementation // https://github.com/waku-org/nwaku/blob/f05528d4be3d3c876a8b07f9bb7dfaae8aa8ec6e/waku/waku_keystore/protocol_types.nim#L98 - // IdentityCredential is stored in Big Endian format => switch to Little Endian private static fromIdentityToBytes(options: KeystoreEntity): Uint8Array { const { IDCommitment, IDNullifier, IDSecretHash, IDTrapdoor } = options.identity; - const idCommitmentLE = BytesUtils.switchEndianness(IDCommitment); - const idNullifierLE = BytesUtils.switchEndianness(IDNullifier); - const idSecretHashLE = BytesUtils.switchEndianness(IDSecretHash); - const idTrapdoorLE = BytesUtils.switchEndianness(IDTrapdoor); - - // eslint-disable-next-line no-console - console.log({ - idCommitmentBE: IDCommitment, - idCommitmentLE, - idNullifierBE: IDNullifier, - idNullifierLE, - idSecretHashBE: IDSecretHash, - idSecretHashLE, - idTrapdoorBE: IDTrapdoor, - idTrapdoorLE - }); return utf8ToBytes( JSON.stringify({ treeIndex: options.membership.treeIndex, identityCredential: { - idCommitment: Array.from(idCommitmentLE), - idNullifier: Array.from(idNullifierLE), - idSecretHash: Array.from(idSecretHashLE), - idTrapdoor: Array.from(idTrapdoorLE) + idCommitment: Array.from(IDCommitment), + idNullifier: Array.from(IDNullifier), + idSecretHash: Array.from(IDSecretHash), + idTrapdoor: Array.from(IDTrapdoor) }, membershipContract: { chainId: options.membership.chainId, diff --git a/packages/rln/src/message.ts b/packages/rln/src/message.ts deleted file mode 100644 index 1cca8a4ed2..0000000000 --- a/packages/rln/src/message.ts +++ /dev/null @@ -1,81 +0,0 @@ -import { message } from "@waku/core"; -import type { - IDecodedMessage, - IMessage, - IRateLimitProof, - IRlnMessage -} from "@waku/interfaces"; -import * as utils from "@waku/utils/bytes"; - -import { RLNInstance } from "./rln.js"; -import { epochBytesToInt } from "./utils/index.js"; - -export function toRLNSignal(contentTopic: string, msg: IMessage): Uint8Array { - const contentTopicBytes = utils.utf8ToBytes(contentTopic ?? ""); - return new Uint8Array([...(msg.payload ?? []), ...contentTopicBytes]); -} - -export class RlnMessage implements IRlnMessage { - public pubsubTopic = ""; - public version = message.version_0.Version; - - public constructor( - private rlnInstance: RLNInstance, - private msg: T, - public rateLimitProof: IRateLimitProof | undefined - ) {} - - public verify(roots: Uint8Array[]): boolean | undefined { - return this.rateLimitProof - ? this.rlnInstance.zerokit.verifyWithRoots( - this.rateLimitProof, - toRLNSignal(this.msg.contentTopic, this.msg), - roots - ) // this.rlnInstance.verifyRLNProof once issue status-im/nwaku#1248 is fixed - : undefined; - } - - public verifyNoRoot(): boolean | undefined { - return this.rateLimitProof - ? this.rlnInstance.zerokit.verifyWithNoRoot( - this.rateLimitProof, - toRLNSignal(this.msg.contentTopic, this.msg) - ) // this.rlnInstance.verifyRLNProof once issue status-im/nwaku#1248 is fixed - : undefined; - } - - public get payload(): Uint8Array { - return this.msg.payload; - } - - public get hash(): Uint8Array { - return this.msg.hash; - } - - public get hashStr(): string { - return this.msg.hashStr; - } - - public get contentTopic(): string { - return this.msg.contentTopic; - } - - public get timestamp(): Date | undefined { - return this.msg.timestamp; - } - - public get ephemeral(): boolean | undefined { - return this.msg.ephemeral; - } - - public get meta(): Uint8Array | undefined { - return this.msg.meta; - } - - public get epoch(): number | undefined { - const bytes = this.rateLimitProof?.epoch; - if (!bytes) return undefined; - - return epochBytesToInt(bytes); - } -} diff --git a/packages/rln/src/proof.ts b/packages/rln/src/proof.ts deleted file mode 100644 index ce3ad6f26d..0000000000 --- a/packages/rln/src/proof.ts +++ /dev/null @@ -1,69 +0,0 @@ -import type { IRateLimitProof } from "@waku/interfaces"; - -import { BytesUtils, poseidonHash } from "./utils/index.js"; - -const proofOffset = 128; -const rootOffset = proofOffset + 32; -const epochOffset = rootOffset + 32; -const shareXOffset = epochOffset + 32; -const shareYOffset = shareXOffset + 32; -const nullifierOffset = shareYOffset + 32; -const rlnIdentifierOffset = nullifierOffset + 32; - -class ProofMetadata { - public constructor( - public readonly nullifier: Uint8Array, - public readonly shareX: Uint8Array, - public readonly shareY: Uint8Array, - public readonly externalNullifier: Uint8Array - ) {} -} - -export class Proof implements IRateLimitProof { - public readonly proof: Uint8Array; - public readonly merkleRoot: Uint8Array; - public readonly epoch: Uint8Array; - public readonly shareX: Uint8Array; - public readonly shareY: Uint8Array; - public readonly nullifier: Uint8Array; - public readonly rlnIdentifier: Uint8Array; - - public constructor(proofBytes: Uint8Array) { - if (proofBytes.length < rlnIdentifierOffset) { - throw new Error("invalid proof"); - } - // parse the proof as proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> - this.proof = proofBytes.subarray(0, proofOffset); - this.merkleRoot = proofBytes.subarray(proofOffset, rootOffset); - this.epoch = proofBytes.subarray(rootOffset, epochOffset); - this.shareX = proofBytes.subarray(epochOffset, shareXOffset); - this.shareY = proofBytes.subarray(shareXOffset, shareYOffset); - this.nullifier = proofBytes.subarray(shareYOffset, nullifierOffset); - this.rlnIdentifier = proofBytes.subarray( - nullifierOffset, - rlnIdentifierOffset - ); - } - - public extractMetadata(): ProofMetadata { - const externalNullifier = poseidonHash(this.epoch, this.rlnIdentifier); - return new ProofMetadata( - this.nullifier, - this.shareX, - this.shareY, - externalNullifier - ); - } -} - -export function proofToBytes(p: IRateLimitProof): Uint8Array { - return BytesUtils.concatenate( - p.proof, - p.merkleRoot, - p.epoch, - p.shareX, - p.shareY, - p.nullifier, - p.rlnIdentifier - ); -} diff --git a/packages/rln/src/resources/rln.wasm b/packages/rln/src/resources/rln.wasm index 04aaeef783..ddca70ce2e 100644 Binary files a/packages/rln/src/resources/rln.wasm and b/packages/rln/src/resources/rln.wasm differ diff --git a/packages/rln/src/resources/rln_final.zkey b/packages/rln/src/resources/rln_final.zkey index c6cc7d491a..46489a11c0 100644 Binary files a/packages/rln/src/resources/rln_final.zkey and b/packages/rln/src/resources/rln_final.zkey differ diff --git a/packages/rln/src/resources/verification_key.d.ts b/packages/rln/src/resources/verification_key.d.ts deleted file mode 100644 index 99ec4f67d1..0000000000 --- a/packages/rln/src/resources/verification_key.d.ts +++ /dev/null @@ -1,13 +0,0 @@ -declare const verificationKey: { - protocol: string; - curve: string; - nPublic: number; - vk_alpha_1: string[]; - vk_beta_2: string[][]; - vk_gamma_2: string[][]; - vk_delta_2: string[][]; - vk_alphabeta_12: string[][][]; - IC: string[][]; -}; - -export default verificationKey; diff --git a/packages/rln/src/resources/verification_key.js b/packages/rln/src/resources/verification_key.js deleted file mode 100644 index 15425bef8c..0000000000 --- a/packages/rln/src/resources/verification_key.js +++ /dev/null @@ -1,112 +0,0 @@ -const verificationKey = { - protocol: "groth16", - curve: "bn128", - nPublic: 6, - vk_alpha_1: [ - "20124996762962216725442980738609010303800849578410091356605067053491763969391", - "9118593021526896828671519912099489027245924097793322973632351264852174143923", - "1" - ], - vk_beta_2: [ - [ - "4693952934005375501364248788849686435240706020501681709396105298107971354382", - "14346958885444710485362620645446987998958218205939139994511461437152241966681" - ], - [ - "16851772916911573982706166384196538392731905827088356034885868448550849804972", - "823612331030938060799959717749043047845343400798220427319188951998582076532" - ], - ["1", "0"] - ], - vk_gamma_2: [ - [ - "10857046999023057135944570762232829481370756359578518086990519993285655852781", - "11559732032986387107991004021392285783925812861821192530917403151452391805634" - ], - [ - "8495653923123431417604973247489272438418190587263600148770280649306958101930", - "4082367875863433681332203403145435568316851327593401208105741076214120093531" - ], - ["1", "0"] - ], - vk_delta_2: [ - [ - "8353516066399360694538747105302262515182301251524941126222712285088022964076", - "9329524012539638256356482961742014315122377605267454801030953882967973561832" - ], - [ - "16805391589556134376869247619848130874761233086443465978238468412168162326401", - "10111259694977636294287802909665108497237922060047080343914303287629927847739" - ], - ["1", "0"] - ], - vk_alphabeta_12: [ - [ - [ - "12608968655665301215455851857466367636344427685631271961542642719683786103711", - "9849575605876329747382930567422916152871921500826003490242628251047652318086" - ], - [ - "6322029441245076030714726551623552073612922718416871603535535085523083939021", - "8700115492541474338049149013125102281865518624059015445617546140629435818912" - ], - [ - "10674973475340072635573101639867487770811074181475255667220644196793546640210", - "2926286967251299230490668407790788696102889214647256022788211245826267484824" - ] - ], - [ - [ - "9660441540778523475944706619139394922744328902833875392144658911530830074820", - "19548113127774514328631808547691096362144426239827206966690021428110281506546" - ], - [ - "1870837942477655969123169532603615788122896469891695773961478956740992497097", - "12536105729661705698805725105036536744930776470051238187456307227425796690780" - ], - [ - "21811903352654147452884857281720047789720483752548991551595462057142824037334", - "19021616763967199151052893283384285352200445499680068407023236283004353578353" - ] - ] - ], - IC: [ - [ - "11992897507809711711025355300535923222599547639134311050809253678876341466909", - "17181525095924075896332561978747020491074338784673526378866503154966799128110", - "1" - ], - [ - "17018665030246167677911144513385572506766200776123272044534328594850561667818", - "18601114175490465275436712413925513066546725461375425769709566180981674884464", - "1" - ], - [ - "18799470100699658367834559797874857804183288553462108031963980039244731716542", - "13064227487174191981628537974951887429496059857753101852163607049188825592007", - "1" - ], - [ - "17432501889058124609368103715904104425610382063762621017593209214189134571156", - "13406815149699834788256141097399354592751313348962590382887503595131085938635", - "1" - ], - [ - "10320964835612716439094703312987075811498239445882526576970512041988148264481", - "9024164961646353611176283204118089412001502110138072989569118393359029324867", - "1" - ], - [ - "718355081067365548229685160476620267257521491773976402837645005858953849298", - "14635482993933988261008156660773180150752190597753512086153001683711587601974", - "1" - ], - [ - "11777720285956632126519898515392071627539405001940313098390150593689568177535", - "8483603647274280691250972408211651407952870456587066148445913156086740744515", - "1" - ] - ] -}; - -export default verificationKey; diff --git a/packages/rln/src/resources/witness_calculator.d.ts b/packages/rln/src/resources/witness_calculator.d.ts index eb6f86aab9..a54b1d62c8 100644 --- a/packages/rln/src/resources/witness_calculator.d.ts +++ b/packages/rln/src/resources/witness_calculator.d.ts @@ -1,11 +1,25 @@ -export async function builder( +export const builder: ( code: Uint8Array, - sanityCheck: boolean -): Promise; + sanityCheck?: boolean +) => Promise; export class WitnessCalculator { - public calculateWitness( - input: unknown, - sanityCheck: boolean - ): Promise>; + constructor(instance: any, sanityCheck?: boolean); + + circom_version(): number; + + calculateWitness( + input: Record, + sanityCheck?: boolean + ): Promise; + + calculateBinWitness( + input: Record, + sanityCheck?: boolean + ): Promise; + + calculateWTNSBin( + input: Record, + sanityCheck?: boolean + ): Promise; } diff --git a/packages/rln/src/resources/witness_calculator.js b/packages/rln/src/resources/witness_calculator.js index 47b218d881..1befc32e98 100644 --- a/packages/rln/src/resources/witness_calculator.js +++ b/packages/rln/src/resources/witness_calculator.js @@ -1,6 +1,6 @@ // File generated with https://github.com/iden3/circom // following the instructions from: -// https://github.com/vacp2p/zerokit/tree/master/rln#compiling-circuits +// https://github.com/vacp2p/zerokit/tree/master/rln#advanced-custom-circuit-compilation export async function builder(code, options) { options = options || {}; diff --git a/packages/rln/src/rln.ts b/packages/rln/src/rln.ts index 00e7eaa249..491fd14d44 100644 --- a/packages/rln/src/rln.ts +++ b/packages/rln/src/rln.ts @@ -1,37 +1,14 @@ -import { createDecoder, createEncoder } from "@waku/core"; -import type { - ContentTopic, - IDecodedMessage, - IRoutingInfo, - EncoderOptions as WakuEncoderOptions -} from "@waku/interfaces"; import { Logger } from "@waku/utils"; -import init from "@waku/zerokit-rln-wasm"; -import * as zerokitRLN from "@waku/zerokit-rln-wasm"; +import init, * as zerokitRLN from "@waku/zerokit-rln-wasm"; -import { - createRLNDecoder, - createRLNEncoder, - type RLNDecoder, - type RLNEncoder -} from "./codec.js"; import { DEFAULT_RATE_LIMIT } from "./contract/constants.js"; import { RLNCredentialsManager } from "./credentials_manager.js"; -import type { - DecryptedCredentials, - EncryptedCredentials -} from "./keystore/index.js"; -import verificationKey from "./resources/verification_key"; import * as wc from "./resources/witness_calculator"; import { WitnessCalculator } from "./resources/witness_calculator"; import { Zerokit } from "./zerokit.js"; const log = new Logger("rln"); -type WakuRLNEncoderOptions = WakuEncoderOptions & { - credentials: EncryptedCredentials | DecryptedCredentials; -}; - export class RLNInstance extends RLNCredentialsManager { /** * Create an instance of RLN @@ -39,18 +16,13 @@ export class RLNInstance extends RLNCredentialsManager { */ public static async create(): Promise { try { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - await (init as any)?.(); - zerokitRLN.init_panic_hook(); + await init(); + zerokitRLN.initPanicHook(); const witnessCalculator = await RLNInstance.loadWitnessCalculator(); const zkey = await RLNInstance.loadZkey(); - const stringEncoder = new TextEncoder(); - const vkey = stringEncoder.encode(JSON.stringify(verificationKey)); - - const DEPTH = 20; - const zkRLN = zerokitRLN.newRLN(DEPTH, zkey, vkey); + const zkRLN = zerokitRLN.newRLN(zkey); const zerokit = new Zerokit(zkRLN, witnessCalculator, DEFAULT_RATE_LIMIT); return new RLNInstance(zerokit); @@ -64,39 +36,6 @@ export class RLNInstance extends RLNCredentialsManager { super(zerokit); } - public async createEncoder( - options: WakuRLNEncoderOptions - ): Promise { - const { credentials: decryptedCredentials } = - await RLNInstance.decryptCredentialsIfNeeded(options.credentials); - const credentials = decryptedCredentials || this.credentials; - - if (!credentials) { - throw Error( - "Failed to create Encoder: missing RLN credentials. Use createRLNEncoder directly." - ); - } - - await this.verifyCredentialsAgainstContract(credentials); - - return createRLNEncoder({ - encoder: createEncoder(options), - rlnInstance: this, - index: credentials.membership.treeIndex, - credential: credentials.identity - }); - } - - public createDecoder( - contentTopic: ContentTopic, - routingInfo: IRoutingInfo - ): RLNDecoder { - return createRLNDecoder({ - rlnInstance: this, - decoder: createDecoder(contentTopic, routingInfo) - }); - } - public static async loadWitnessCalculator(): Promise { try { const url = new URL("./resources/rln.wasm", import.meta.url); diff --git a/packages/rln/src/root_tracker.spec.ts b/packages/rln/src/root_tracker.spec.ts deleted file mode 100644 index 8e3c03efe0..0000000000 --- a/packages/rln/src/root_tracker.spec.ts +++ /dev/null @@ -1,56 +0,0 @@ -import { assert, expect } from "chai"; - -import { MerkleRootTracker } from "./root_tracker.js"; - -describe("js-rln", () => { - it("should track merkle roots and backfill from block number", async function () { - const acceptableRootWindow = 3; - - const tracker = new MerkleRootTracker( - acceptableRootWindow, - new Uint8Array([0, 0, 0, 0]) - ); - expect(tracker.roots()).to.have.length(1); - expect(tracker.buffer()).to.have.length(0); - expect(tracker.roots()[0]).to.deep.equal(new Uint8Array([0, 0, 0, 0])); - - for (let i = 1; i <= 30; i++) { - tracker.pushRoot(i, new Uint8Array([0, 0, 0, i])); - } - - expect(tracker.roots()).to.have.length(acceptableRootWindow); - expect(tracker.buffer()).to.have.length(20); - assert.sameDeepMembers(tracker.roots(), [ - new Uint8Array([0, 0, 0, 30]), - new Uint8Array([0, 0, 0, 29]), - new Uint8Array([0, 0, 0, 28]) - ]); - - // Buffer should keep track of 20 blocks previous to the current valid merkle root window - expect(tracker.buffer()[0]).to.be.eql(new Uint8Array([0, 0, 0, 8])); - expect(tracker.buffer()[19]).to.be.eql(new Uint8Array([0, 0, 0, 27])); - - // Remove roots 29 and 30 - tracker.backFill(29); - assert.sameDeepMembers(tracker.roots(), [ - new Uint8Array([0, 0, 0, 28]), - new Uint8Array([0, 0, 0, 27]), - new Uint8Array([0, 0, 0, 26]) - ]); - - expect(tracker.buffer()).to.have.length(18); - expect(tracker.buffer()[0]).to.be.eql(new Uint8Array([0, 0, 0, 8])); - expect(tracker.buffer()[17]).to.be.eql(new Uint8Array([0, 0, 0, 25])); - - // Remove roots from block 15 onwards. These blocks exists within the buffer - tracker.backFill(15); - assert.sameDeepMembers(tracker.roots(), [ - new Uint8Array([0, 0, 0, 14]), - new Uint8Array([0, 0, 0, 13]), - new Uint8Array([0, 0, 0, 12]) - ]); - expect(tracker.buffer()).to.have.length(4); - expect(tracker.buffer()[0]).to.be.eql(new Uint8Array([0, 0, 0, 8])); - expect(tracker.buffer()[3]).to.be.eql(new Uint8Array([0, 0, 0, 11])); - }); -}); diff --git a/packages/rln/src/root_tracker.ts b/packages/rln/src/root_tracker.ts deleted file mode 100644 index 79b71845c1..0000000000 --- a/packages/rln/src/root_tracker.ts +++ /dev/null @@ -1,92 +0,0 @@ -class RootPerBlock { - public constructor( - public root: Uint8Array, - public blockNumber: number - ) {} -} - -const maxBufferSize = 20; - -export class MerkleRootTracker { - private validMerkleRoots: Array = new Array(); - private merkleRootBuffer: Array = new Array(); - - public constructor( - private acceptableRootWindowSize: number, - initialRoot: Uint8Array - ) { - this.pushRoot(0, initialRoot); - } - - public backFill(fromBlockNumber: number): void { - if (this.validMerkleRoots.length == 0) return; - - let numBlocks = 0; - for (let i = this.validMerkleRoots.length - 1; i >= 0; i--) { - if (this.validMerkleRoots[i].blockNumber >= fromBlockNumber) { - numBlocks++; - } - } - - if (numBlocks == 0) return; - - const olderBlock = fromBlockNumber < this.validMerkleRoots[0].blockNumber; - - // Remove last roots - let rootsToPop = numBlocks; - if (this.validMerkleRoots.length < rootsToPop) { - rootsToPop = this.validMerkleRoots.length; - } - - this.validMerkleRoots = this.validMerkleRoots.slice( - 0, - this.validMerkleRoots.length - rootsToPop - ); - - if (this.merkleRootBuffer.length == 0) return; - - if (olderBlock) { - const idx = this.merkleRootBuffer.findIndex( - (x) => x.blockNumber == fromBlockNumber - ); - if (idx > -1) { - this.merkleRootBuffer = this.merkleRootBuffer.slice(0, idx); - } - } - - // Backfill the tree's acceptable roots - let rootsToRestore = - this.acceptableRootWindowSize - this.validMerkleRoots.length; - if (this.merkleRootBuffer.length < rootsToRestore) { - rootsToRestore = this.merkleRootBuffer.length; - } - - for (let i = 0; i < rootsToRestore; i++) { - const x = this.merkleRootBuffer.pop(); - if (x) this.validMerkleRoots.unshift(x); - } - } - - public pushRoot(blockNumber: number, root: Uint8Array): void { - this.validMerkleRoots.push(new RootPerBlock(root, blockNumber)); - - // Maintain valid merkle root window - if (this.validMerkleRoots.length > this.acceptableRootWindowSize) { - const x = this.validMerkleRoots.shift(); - if (x) this.merkleRootBuffer.push(x); - } - - // Maintain merkle root buffer - if (this.merkleRootBuffer.length > maxBufferSize) { - this.merkleRootBuffer.shift(); - } - } - - public roots(): Array { - return this.validMerkleRoots.map((x) => x.root); - } - - public buffer(): Array { - return this.merkleRootBuffer.map((x) => x.root); - } -} diff --git a/packages/rln/src/utils/bytes.ts b/packages/rln/src/utils/bytes.ts index d871e8f564..4df17bd380 100644 --- a/packages/rln/src/utils/bytes.ts +++ b/packages/rln/src/utils/bytes.ts @@ -1,56 +1,52 @@ export class BytesUtils { /** - * Switches endianness of a byte array + * Concatenate Uint8Arrays + * @param input + * @returns concatenation of all Uint8Array received as input */ - public static switchEndianness(bytes: Uint8Array): Uint8Array { - return new Uint8Array([...bytes].reverse()); - } - - /** - * Builds a BigInt from a big-endian Uint8Array - * @param bytes The big-endian bytes to convert - * @returns The resulting BigInt in big-endian format - */ - public static buildBigIntFromUint8ArrayBE(bytes: Uint8Array): bigint { - let result = 0n; - for (let i = 0; i < bytes.length; i++) { - result = (result << 8n) + BigInt(bytes[i]); + public static concatenate(...input: Uint8Array[]): Uint8Array { + let totalLength = 0; + for (const arr of input) { + totalLength += arr.length; + } + const result = new Uint8Array(totalLength); + let offset = 0; + for (const arr of input) { + result.set(arr, offset); + offset += arr.length; } return result; } /** - * Switches endianness of a bigint value - * @param value The bigint value to switch endianness for - * @returns The bigint value with reversed endianness + * Convert a Uint8Array to a BigInt with configurable input endianness + * @param bytes - The byte array to convert + * @param inputEndianness - Endianness of the input bytes ('big' or 'little') + * @returns BigInt representation of the bytes */ - public static switchEndiannessBigInt(value: bigint): bigint { - // Convert bigint to byte array - const bytes = []; - let tempValue = value; - while (tempValue > 0n) { - bytes.push(Number(tempValue & 0xffn)); - tempValue >>= 8n; + public static toBigInt( + bytes: Uint8Array, + inputEndianness: "big" | "little" = "little" + ): bigint { + if (bytes.length === 0) { + return 0n; } - // Reverse bytes and convert back to bigint - return bytes - .reverse() - .reduce((acc, byte) => (acc << 8n) + BigInt(byte), 0n); - } + // Create a copy to avoid modifying the original array + const workingBytes = new Uint8Array(bytes); - /** - * Converts a big-endian bigint to a 32-byte big-endian Uint8Array - * @param value The big-endian bigint to convert - * @returns A 32-byte big-endian Uint8Array - */ - public static bigIntToUint8Array32BE(value: bigint): Uint8Array { - const bytes = new Uint8Array(32); - for (let i = 31; i >= 0; i--) { - bytes[i] = Number(value & 0xffn); - value >>= 8n; + // Reverse bytes if input is little-endian to work with big-endian internally + if (inputEndianness === "little") { + workingBytes.reverse(); } - return bytes; + + // Convert to BigInt + let result = 0n; + for (let i = 0; i < workingBytes.length; i++) { + result = (result << 8n) | BigInt(workingBytes[i]); + } + + return result; } /** @@ -81,20 +77,6 @@ export class BytesUtils { return buf; } - /** - * Fills with zeros to set length - * @param array little endian Uint8Array - * @param length amount to pad - * @returns little endian Uint8Array padded with zeros to set length - */ - public static zeroPadLE(array: Uint8Array, length: number): Uint8Array { - const result = new Uint8Array(length); - for (let i = 0; i < length; i++) { - result[i] = array[i] || 0; - } - return result; - } - // Adapted from https://github.com/feross/buffer public static checkInt( buf: Uint8Array, @@ -108,23 +90,4 @@ export class BytesUtils { throw new RangeError('"value" argument is out of bounds'); if (offset + ext > buf.length) throw new RangeError("Index out of range"); } - - /** - * Concatenate Uint8Arrays - * @param input - * @returns concatenation of all Uint8Array received as input - */ - public static concatenate(...input: Uint8Array[]): Uint8Array { - let totalLength = 0; - for (const arr of input) { - totalLength += arr.length; - } - const result = new Uint8Array(totalLength); - let offset = 0; - for (const arr of input) { - result.set(arr, offset); - offset += arr.length; - } - return result; - } } diff --git a/packages/rln/src/zerokit.spec.ts b/packages/rln/src/zerokit.spec.ts new file mode 100644 index 0000000000..6126c7c6ac --- /dev/null +++ b/packages/rln/src/zerokit.spec.ts @@ -0,0 +1,26 @@ +import { expect } from "chai"; + +import { RLNInstance } from "./rln.js"; + +describe("@waku/rln", () => { + it("should generate the same membership key if the same seed is provided", async function () { + const rlnInstance = await RLNInstance.create(); + + const seed = "This is a test seed"; + const memKeys1 = rlnInstance.zerokit.generateSeededIdentityCredential(seed); + const memKeys2 = rlnInstance.zerokit.generateSeededIdentityCredential(seed); + + memKeys1.IDCommitment.forEach((element, index) => { + expect(element).to.equal(memKeys2.IDCommitment[index]); + }); + memKeys1.IDNullifier.forEach((element, index) => { + expect(element).to.equal(memKeys2.IDNullifier[index]); + }); + memKeys1.IDSecretHash.forEach((element, index) => { + expect(element).to.equal(memKeys2.IDSecretHash[index]); + }); + memKeys1.IDTrapdoor.forEach((element, index) => { + expect(element).to.equal(memKeys2.IDTrapdoor[index]); + }); + }); +}); diff --git a/packages/rln/src/zerokit.ts b/packages/rln/src/zerokit.ts index a7e7e628f4..47df182f00 100644 --- a/packages/rln/src/zerokit.ts +++ b/packages/rln/src/zerokit.ts @@ -1,11 +1,8 @@ -import type { IRateLimitProof } from "@waku/interfaces"; import * as zerokitRLN from "@waku/zerokit-rln-wasm"; -import { DEFAULT_RATE_LIMIT, RATE_LIMIT_PARAMS } from "./contract/constants.js"; +import { DEFAULT_RATE_LIMIT } from "./contract/constants.js"; import { IdentityCredential } from "./identity.js"; -import { Proof, proofToBytes } from "./proof.js"; import { WitnessCalculator } from "./resources/witness_calculator"; -import { BytesUtils, dateToEpoch, epochIntToBytes } from "./utils/index.js"; export class Zerokit { public constructor( @@ -26,226 +23,13 @@ export class Zerokit { return this._rateLimit; } - public generateIdentityCredentials(): IdentityCredential { - const memKeys = zerokitRLN.generateExtendedMembershipKey(this.zkRLN); // TODO: rename this function in zerokit rln-wasm - return IdentityCredential.fromBytes(memKeys); - } - public generateSeededIdentityCredential(seed: string): IdentityCredential { const stringEncoder = new TextEncoder(); const seedBytes = stringEncoder.encode(seed); - // TODO: rename this function in zerokit rln-wasm const memKeys = zerokitRLN.generateSeededExtendedMembershipKey( this.zkRLN, seedBytes ); return IdentityCredential.fromBytes(memKeys); } - - public insertMember(idCommitment: Uint8Array): void { - zerokitRLN.insertMember(this.zkRLN, idCommitment); - } - - public insertMembers( - index: number, - ...idCommitments: Array - ): void { - // serializes a seq of IDCommitments to a byte seq - // the order of serialization is |id_commitment_len<8>|id_commitment| - const idCommitmentLen = BytesUtils.writeUIntLE( - new Uint8Array(8), - idCommitments.length, - 0, - 8 - ); - const idCommitmentBytes = BytesUtils.concatenate( - idCommitmentLen, - ...idCommitments - ); - zerokitRLN.setLeavesFrom(this.zkRLN, index, idCommitmentBytes); - } - - public deleteMember(index: number): void { - zerokitRLN.deleteLeaf(this.zkRLN, index); - } - - public getMerkleRoot(): Uint8Array { - return zerokitRLN.getRoot(this.zkRLN); - } - - public serializeMessage( - uint8Msg: Uint8Array, - memIndex: number, - epoch: Uint8Array, - idKey: Uint8Array, - rateLimit?: number - ): Uint8Array { - // calculate message length - const msgLen = BytesUtils.writeUIntLE( - new Uint8Array(8), - uint8Msg.length, - 0, - 8 - ); - const memIndexBytes = BytesUtils.writeUIntLE( - new Uint8Array(8), - memIndex, - 0, - 8 - ); - const rateLimitBytes = BytesUtils.writeUIntLE( - new Uint8Array(8), - rateLimit ?? this.rateLimit, - 0, - 8 - ); - - // [ id_key<32> | id_index<8> | epoch<32> | signal_len<8> | signal | rate_limit<8> ] - return BytesUtils.concatenate( - idKey, - memIndexBytes, - epoch, - msgLen, - uint8Msg, - rateLimitBytes - ); - } - - public async generateRLNProof( - msg: Uint8Array, - index: number, - epoch: Uint8Array | Date | undefined, - idSecretHash: Uint8Array, - rateLimit?: number - ): Promise { - if (epoch === undefined) { - epoch = epochIntToBytes(dateToEpoch(new Date())); - } else if (epoch instanceof Date) { - epoch = epochIntToBytes(dateToEpoch(epoch)); - } - - const effectiveRateLimit = rateLimit ?? this.rateLimit; - - if (epoch.length !== 32) throw new Error("invalid epoch"); - if (idSecretHash.length !== 32) throw new Error("invalid id secret hash"); - if (index < 0) throw new Error("index must be >= 0"); - if ( - effectiveRateLimit < RATE_LIMIT_PARAMS.MIN_RATE || - effectiveRateLimit > RATE_LIMIT_PARAMS.MAX_RATE - ) { - throw new Error( - `Rate limit must be between ${RATE_LIMIT_PARAMS.MIN_RATE} and ${RATE_LIMIT_PARAMS.MAX_RATE}` - ); - } - - const serialized_msg = this.serializeMessage( - msg, - index, - epoch, - idSecretHash, - effectiveRateLimit - ); - const rlnWitness = zerokitRLN.getSerializedRLNWitness( - this.zkRLN, - serialized_msg - ); - const inputs = zerokitRLN.RLNWitnessToJson(this.zkRLN, rlnWitness); - const calculatedWitness = await this.witnessCalculator.calculateWitness( - inputs, - false - ); - - const proofBytes = zerokitRLN.generate_rln_proof_with_witness( - this.zkRLN, - calculatedWitness, - rlnWitness - ); - - return new Proof(proofBytes); - } - - public verifyRLNProof( - proof: IRateLimitProof | Uint8Array, - msg: Uint8Array, - rateLimit?: number - ): boolean { - let pBytes: Uint8Array; - if (proof instanceof Uint8Array) { - pBytes = proof; - } else { - pBytes = proofToBytes(proof); - } - - // calculate message length - const msgLen = BytesUtils.writeUIntLE(new Uint8Array(8), msg.length, 0, 8); - const rateLimitBytes = BytesUtils.writeUIntLE( - new Uint8Array(8), - rateLimit ?? this.rateLimit, - 0, - 8 - ); - - return zerokitRLN.verifyRLNProof( - this.zkRLN, - BytesUtils.concatenate(pBytes, msgLen, msg, rateLimitBytes) - ); - } - - public verifyWithRoots( - proof: IRateLimitProof | Uint8Array, - msg: Uint8Array, - roots: Array, - rateLimit?: number - ): boolean { - let pBytes: Uint8Array; - if (proof instanceof Uint8Array) { - pBytes = proof; - } else { - pBytes = proofToBytes(proof); - } - // calculate message length - const msgLen = BytesUtils.writeUIntLE(new Uint8Array(8), msg.length, 0, 8); - const rateLimitBytes = BytesUtils.writeUIntLE( - new Uint8Array(8), - rateLimit ?? this.rateLimit, - 0, - 8 - ); - - const rootsBytes = BytesUtils.concatenate(...roots); - - return zerokitRLN.verifyWithRoots( - this.zkRLN, - BytesUtils.concatenate(pBytes, msgLen, msg, rateLimitBytes), - rootsBytes - ); - } - - public verifyWithNoRoot( - proof: IRateLimitProof | Uint8Array, - msg: Uint8Array, - rateLimit?: number - ): boolean { - let pBytes: Uint8Array; - if (proof instanceof Uint8Array) { - pBytes = proof; - } else { - pBytes = proofToBytes(proof); - } - - // calculate message length - const msgLen = BytesUtils.writeUIntLE(new Uint8Array(8), msg.length, 0, 8); - const rateLimitBytes = BytesUtils.writeUIntLE( - new Uint8Array(8), - rateLimit ?? this.rateLimit, - 0, - 8 - ); - - return zerokitRLN.verifyWithRoots( - this.zkRLN, - BytesUtils.concatenate(pBytes, msgLen, msg, rateLimitBytes), - new Uint8Array() - ); - } } diff --git a/packages/sdk/CHANGELOG.md b/packages/sdk/CHANGELOG.md index 72183a85d3..c4bc621f58 100644 --- a/packages/sdk/CHANGELOG.md +++ b/packages/sdk/CHANGELOG.md @@ -47,6 +47,40 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 * @waku/interfaces bumped from 0.0.19 to 0.0.20 * @waku/peer-exchange bumped from ^0.0.17 to ^0.0.18 +## [0.0.35](https://github.com/waku-org/js-waku/compare/sdk-v0.0.34...sdk-v0.0.35) (2025-09-20) + + +### Features + +* Add debounce to health indicator ([#2594](https://github.com/waku-org/js-waku/issues/2594)) ([a7f30b1](https://github.com/waku-org/js-waku/commit/a7f30b121143454340aa7b3aeb4f55470905c54d)) +* Add start/stop to filter ([#2592](https://github.com/waku-org/js-waku/issues/2592)) ([2fba052](https://github.com/waku-org/js-waku/commit/2fba052b8b98cb64f6383de95d01b33beb771448)) +* Expose message hash from IDecodedMessage ([#2578](https://github.com/waku-org/js-waku/issues/2578)) ([836d6b8](https://github.com/waku-org/js-waku/commit/836d6b8793a5124747684f6ea76b6dd47c73048b)) +* Implement lp-v3 error codes with backwards compatibility ([#2501](https://github.com/waku-org/js-waku/issues/2501)) ([1625302](https://github.com/waku-org/js-waku/commit/16253026c6e30052d87d9975b58480951de469d8)) +* Implement peer-store re-bootstrapping ([#2641](https://github.com/waku-org/js-waku/issues/2641)) ([11d84ad](https://github.com/waku-org/js-waku/commit/11d84ad342fe45158ef0734f9ca070f14704503f)) +* Introduce reliable channels ([#2526](https://github.com/waku-org/js-waku/issues/2526)) ([4d5c152](https://github.com/waku-org/js-waku/commit/4d5c152f5b1b1c241bbe7bb96d13d927a6f7550e)) +* Query on connect ([#2602](https://github.com/waku-org/js-waku/issues/2602)) ([8542d04](https://github.com/waku-org/js-waku/commit/8542d04bf5c9472f955ef8c9e5bc9e89c70f4738)) +* StoreConnect events ([#2601](https://github.com/waku-org/js-waku/issues/2601)) ([0dfbcf6](https://github.com/waku-org/js-waku/commit/0dfbcf6b6bd9225dcb0dec540aeb1eb2703c8397)) + + +### Bug Fixes + +* (sds) ensure incoming messages have their retrieval hint stored ([#2604](https://github.com/waku-org/js-waku/issues/2604)) ([914beb6](https://github.com/waku-org/js-waku/commit/914beb6531a84f8c11ca951721225d47f9e6c285)) +* Make health events emission consistent ([#2570](https://github.com/waku-org/js-waku/issues/2570)) ([c8dfdb1](https://github.com/waku-org/js-waku/commit/c8dfdb1ace8f0f8f668d8f2bb6e0eaed90041782)) + + +### Dependencies + +* The following workspace dependencies were updated + * dependencies + * @waku/core bumped from 0.0.38 to 0.0.39 + * @waku/discovery bumped from 0.0.11 to 0.0.12 + * @waku/interfaces bumped from 0.0.33 to 0.0.34 + * @waku/proto bumped from ^0.0.13 to ^0.0.14 + * @waku/sds bumped from ^0.0.6 to ^0.0.7 + * @waku/utils bumped from 0.0.26 to 0.0.27 + * devDependencies + * @waku/message-encryption bumped from ^0.0.36 to ^0.0.37 + ## [0.0.34](https://github.com/waku-org/js-waku/compare/sdk-v0.0.33...sdk-v0.0.34) (2025-08-14) diff --git a/packages/sdk/package.json b/packages/sdk/package.json index 01a819032d..a72b4025d7 100644 --- a/packages/sdk/package.json +++ b/packages/sdk/package.json @@ -1,6 +1,6 @@ { "name": "@waku/sdk", - "version": "0.0.34", + "version": "0.0.35", "description": "A unified SDK for easy creation and management of js-waku nodes.", "types": "./dist/index.d.ts", "module": "./dist/index.js", @@ -67,22 +67,26 @@ "@libp2p/ping": "2.0.35", "@libp2p/websockets": "9.2.16", "@noble/hashes": "^1.3.3", - "@waku/core": "0.0.38", - "@waku/discovery": "0.0.11", - "@waku/interfaces": "0.0.33", - "@waku/proto": "^0.0.13", - "@waku/utils": "0.0.26", - "libp2p": "2.8.11" + "@types/lodash.debounce": "^4.0.9", + "@waku/core": "0.0.39", + "@waku/discovery": "0.0.12", + "@waku/interfaces": "0.0.34", + "@waku/proto": "^0.0.14", + "@waku/sds": "^0.0.7", + "@waku/utils": "0.0.27", + "libp2p": "2.8.11", + "lodash.debounce": "^4.0.8" }, "devDependencies": { "@libp2p/interface": "2.10.4", - "@types/chai": "^4.3.11", "@rollup/plugin-commonjs": "^25.0.7", "@rollup/plugin-json": "^6.0.0", "@rollup/plugin-node-resolve": "^15.2.3", "@rollup/plugin-replace": "^5.0.5", + "@types/chai": "^4.3.11", "@types/mocha": "^10.0.9", "@waku/build-utils": "*", + "@waku/message-encryption": "^0.0.37", "chai": "^5.1.1", "cspell": "^8.6.1", "interface-datastore": "8.3.2", @@ -104,4 +108,4 @@ "LICENSE", "README.md" ] -} \ No newline at end of file +} diff --git a/packages/sdk/src/filter/filter.ts b/packages/sdk/src/filter/filter.ts index 43895fab7c..4d12f8d32d 100644 --- a/packages/sdk/src/filter/filter.ts +++ b/packages/sdk/src/filter/filter.ts @@ -45,6 +45,14 @@ export class Filter implements IFilter { return this.protocol.multicodec; } + public async start(): Promise { + await this.protocol.start(); + } + + public async stop(): Promise { + await this.protocol.stop(); + } + public unsubscribeAll(): void { for (const subscription of this.subscriptions.values()) { subscription.stop(); diff --git a/packages/sdk/src/health_indicator/health_indicator.ts b/packages/sdk/src/health_indicator/health_indicator.ts index 297d4ba652..1e7888fba6 100644 --- a/packages/sdk/src/health_indicator/health_indicator.ts +++ b/packages/sdk/src/health_indicator/health_indicator.ts @@ -7,6 +7,7 @@ import { WakuEvent } from "@waku/interfaces"; import { Logger } from "@waku/utils"; +import debounce from "lodash.debounce"; type PeerEvent = (_event: CustomEvent) => void; @@ -24,10 +25,13 @@ interface IHealthIndicator { } export class HealthIndicator implements IHealthIndicator { + private isStarted = false; + private readonly libp2p: Libp2p; private readonly events: IWakuEventEmitter; private value: HealthStatus = HealthStatus.Unhealthy; + private readonly debouncedAssessHealth: ReturnType; public constructor(params: HealthIndicatorParams) { this.libp2p = params.libp2p; @@ -35,9 +39,18 @@ export class HealthIndicator implements IHealthIndicator { this.onPeerIdentify = this.onPeerIdentify.bind(this); this.onPeerDisconnected = this.onPeerDisconnected.bind(this); + + this.debouncedAssessHealth = debounce(() => { + void this.assessHealth(); + }, 100); } public start(): void { + if (this.isStarted) { + return; + } + + this.isStarted = true; log.info("start: adding listeners to libp2p"); this.libp2p.addEventListener( @@ -49,10 +62,15 @@ export class HealthIndicator implements IHealthIndicator { this.onPeerDisconnected as PeerEvent ); - void this.assessHealth(); + this.debouncedAssessHealth(); } public stop(): void { + if (!this.isStarted) { + return; + } + + this.isStarted = false; log.info("stop: removing listeners to libp2p"); this.libp2p.removeEventListener( @@ -63,22 +81,22 @@ export class HealthIndicator implements IHealthIndicator { "peer:disconnect", this.onPeerDisconnected as PeerEvent ); + + this.debouncedAssessHealth.cancel(); } public toValue(): HealthStatus { return this.value; } - private async onPeerDisconnected(_event: CustomEvent): Promise { + private onPeerDisconnected(_event: CustomEvent): void { log.info(`onPeerDisconnected: received libp2p event`); - await this.assessHealth(); + this.debouncedAssessHealth(); } - private async onPeerIdentify( - _event: CustomEvent - ): Promise { + private onPeerIdentify(_event: CustomEvent): void { log.info(`onPeerIdentify: received libp2p event`); - await this.assessHealth(); + this.debouncedAssessHealth(); } private async assessHealth(): Promise { diff --git a/packages/sdk/src/index.ts b/packages/sdk/src/index.ts index 441cba30ea..8c0546b16c 100644 --- a/packages/sdk/src/index.ts +++ b/packages/sdk/src/index.ts @@ -17,6 +17,7 @@ export { export { LightPush } from "./light_push/index.js"; export { Filter } from "./filter/index.js"; export { Store } from "./store/index.js"; +export * from "./reliable_channel/index.js"; export * as waku from "@waku/core"; export * as utils from "@waku/utils"; diff --git a/packages/sdk/src/light_push/light_push.spec.ts b/packages/sdk/src/light_push/light_push.spec.ts index faabc16459..c0a9d0848d 100644 --- a/packages/sdk/src/light_push/light_push.spec.ts +++ b/packages/sdk/src/light_push/light_push.spec.ts @@ -1,6 +1,11 @@ import { Peer, PeerId } from "@libp2p/interface"; -import { createEncoder, Encoder, LightPushCodec } from "@waku/core"; -import { Libp2p, ProtocolError } from "@waku/interfaces"; +import { + createEncoder, + Encoder, + LightPushCodec, + LightPushCodecV2 +} from "@waku/core"; +import { Libp2p, LightPushError, LightPushStatusCode } from "@waku/interfaces"; import { createRoutingInfo } from "@waku/utils"; import { utf8ToBytes } from "@waku/utils/bytes"; import { expect } from "chai"; @@ -40,8 +45,8 @@ describe("LightPush SDK", () => { const failures = result.failures ?? []; expect(failures.length).to.be.eq(1); - expect(failures.some((v) => v.error === ProtocolError.NO_PEER_AVAILABLE)).to - .be.true; + expect(failures.some((v) => v.error === LightPushError.NO_PEER_AVAILABLE)) + .to.be.true; }); it("should send to specified number of peers of used peers", async () => { @@ -127,6 +132,45 @@ describe("LightPush SDK", () => { expect(result.successes?.length).to.be.eq(1); expect(result.failures?.length).to.be.eq(1); }); + + describe("v3 protocol support", () => { + it("should work with v3 peers", async () => { + libp2p = mockLibp2p({ + peers: [mockV3Peer("1"), mockV3Peer("2")] + }); + }); + + it("should work with mixed v2 and v3 peers", async () => { + libp2p = mockLibp2p({ + peers: [mockV2AndV3Peer("1"), mockPeer("2"), mockV3Peer("3")] + }); + + // Mock responses for different protocol versions + const v3Response = mockV3SuccessResponse(5); + const v2Response = mockV2SuccessResponse(); + const v3ErrorResponse = mockV3ErrorResponse( + LightPushStatusCode.PAYLOAD_TOO_LARGE + ); + const v2ErrorResponse = mockV2ErrorResponse("Message too large"); + + expect(v3Response.statusCode).to.eq(LightPushStatusCode.SUCCESS); + expect(v3Response.relayPeerCount).to.eq(5); + expect(v2Response.isSuccess).to.be.true; + expect(v3ErrorResponse.statusCode).to.eq( + LightPushStatusCode.PAYLOAD_TOO_LARGE + ); + expect(v2ErrorResponse.isSuccess).to.be.false; + }); + + it("should handle v3 RLN errors", async () => { + const v3RLNError = mockV3RLNErrorResponse(); + const v2RLNError = mockV2RLNErrorResponse(); + + expect(v3RLNError.statusCode).to.eq(LightPushStatusCode.NO_RLN_PROOF); + expect(v3RLNError.statusDesc).to.include("RLN proof generation failed"); + expect(v2RLNError.info).to.include("RLN proof generation failed"); + }); + }); }); type MockLibp2pOptions = { @@ -136,7 +180,16 @@ type MockLibp2pOptions = { function mockLibp2p(options?: MockLibp2pOptions): Libp2p { const peers = options?.peers || []; const peerStore = { - get: (id: any) => Promise.resolve(peers.find((p) => p.id === id)) + get: (id: any) => { + const peer = peers.find((p) => p.id === id); + if (peer) { + return Promise.resolve({ + ...peer, + protocols: peer.protocols || [LightPushCodec] + }); + } + return Promise.resolve(undefined); + } }; return { @@ -179,9 +232,92 @@ function mockLightPush(options: MockLightPushOptions): LightPush { return lightPush; } -function mockPeer(id: string): Peer { +function mockPeer(id: string, protocols: string[] = [LightPushCodec]): Peer { return { - id, - protocols: [LightPushCodec] - } as unknown as Peer; + id: { toString: () => id } as PeerId, + protocols: protocols, + metadata: new Map(), + addresses: [], + tags: new Map() + }; +} + +// V3-specific mock functions +function mockV3Peer(id: string): Peer { + return mockPeer(id, [LightPushCodec]); +} + +function mockV2AndV3Peer(id: string): Peer { + return mockPeer(id, [LightPushCodec, LightPushCodecV2]); +} + +function mockV3SuccessResponse(relayPeerCount?: number): { + statusCode: LightPushStatusCode; + statusDesc: string; + relayPeerCount?: number; + isSuccess: boolean; +} { + return { + statusCode: LightPushStatusCode.SUCCESS, + statusDesc: "Message sent successfully", + relayPeerCount, + isSuccess: true + }; +} + +function mockV3ErrorResponse( + statusCode: LightPushStatusCode, + statusDesc?: string +): { + statusCode: LightPushStatusCode; + statusDesc: string; + isSuccess: boolean; +} { + return { + statusCode, + statusDesc: statusDesc || "Error occurred", + isSuccess: false + }; +} + +function mockV2SuccessResponse(): { + isSuccess: boolean; + info: string; +} { + return { + isSuccess: true, + info: "Message sent successfully" + }; +} + +function mockV2ErrorResponse(info?: string): { + isSuccess: boolean; + info: string; +} { + return { + isSuccess: false, + info: info || "Error occurred" + }; +} + +function mockV3RLNErrorResponse(): { + statusCode: LightPushStatusCode; + statusDesc: string; + isSuccess: boolean; +} { + return { + statusCode: LightPushStatusCode.NO_RLN_PROOF, + statusDesc: "RLN proof generation failed", + isSuccess: false + }; +} + +function mockV2RLNErrorResponse(): { + isSuccess: boolean; + info: string; +} { + return { + isSuccess: false, + info: "RLN proof generation failed" + }; } diff --git a/packages/sdk/src/light_push/light_push.ts b/packages/sdk/src/light_push/light_push.ts index 947ce1528b..669c77e38c 100644 --- a/packages/sdk/src/light_push/light_push.ts +++ b/packages/sdk/src/light_push/light_push.ts @@ -1,17 +1,17 @@ import type { PeerId } from "@libp2p/interface"; import { LightPushCore } from "@waku/core"; import { - type CoreProtocolResult, - Failure, type IEncoder, ILightPush, type IMessage, type ISendOptions, type Libp2p, + LightPushCoreResult, + LightPushError, + LightPushFailure, type LightPushProtocolOptions, - ProtocolError, - Protocols, - SDKProtocolResult + LightPushSDKResult, + Protocols } from "@waku/interfaces"; import { Logger } from "@waku/utils"; @@ -55,7 +55,7 @@ export class LightPush implements ILightPush { }); } - public get multicodec(): string { + public get multicodec(): string[] { return this.protocol.multicodec; } @@ -71,8 +71,9 @@ export class LightPush implements ILightPush { encoder: IEncoder, message: IMessage, options: ISendOptions = {} - ): Promise { + ): Promise { options = { + useLegacy: false, ...this.config, ...options }; @@ -82,45 +83,48 @@ export class LightPush implements ILightPush { log.info("send: attempting to send a message to pubsubTopic:", pubsubTopic); const peerIds = await this.peerManager.getPeers({ - protocol: Protocols.LightPush, + protocol: options.useLegacy ? "light-push-v2" : Protocols.LightPush, pubsubTopic: encoder.pubsubTopic }); - const coreResults: CoreProtocolResult[] = + const coreResults = peerIds?.length > 0 ? await Promise.all( peerIds.map((peerId) => - this.protocol.send(encoder, message, peerId).catch((_e) => ({ - success: null, - failure: { - error: ProtocolError.GENERIC_FAIL - } - })) + this.protocol + .send(encoder, message, peerId, options.useLegacy) + .catch((_e) => ({ + success: null, + failure: { + error: LightPushError.GENERIC_FAIL + } + })) ) ) : []; - const results: SDKProtocolResult = coreResults.length + const results: LightPushSDKResult = coreResults.length ? { successes: coreResults .filter((v) => v.success) .map((v) => v.success) as PeerId[], failures: coreResults .filter((v) => v.failure) - .map((v) => v.failure) as Failure[] + .map((v) => v.failure) as LightPushFailure[] } : { successes: [], failures: [ { - error: ProtocolError.NO_PEER_AVAILABLE + error: LightPushError.NO_PEER_AVAILABLE } ] }; if (options.autoRetry && results.successes.length === 0) { - const sendCallback = (peerId: PeerId): Promise => - this.protocol.send(encoder, message, peerId); + const sendCallback = (peerId: PeerId): Promise => + this.protocol.send(encoder, message, peerId, options.useLegacy); + this.retryManager.push( sendCallback.bind(this), options.maxAttempts || DEFAULT_MAX_ATTEMPTS, diff --git a/packages/sdk/src/light_push/retry_manager.spec.ts b/packages/sdk/src/light_push/retry_manager.spec.ts index c9eb95eea8..4ac5f9972e 100644 --- a/packages/sdk/src/light_push/retry_manager.spec.ts +++ b/packages/sdk/src/light_push/retry_manager.spec.ts @@ -1,6 +1,7 @@ import type { PeerId } from "@libp2p/interface"; import { - type CoreProtocolResult, + type LightPushCoreResult, + LightPushError, ProtocolError, Protocols } from "@waku/interfaces"; @@ -59,7 +60,7 @@ describe("RetryManager", () => { it("should process tasks in queue", async () => { const successCallback = sinon.spy( - async (peerId: PeerId): Promise => ({ + async (peerId: PeerId): Promise => ({ success: peerId, failure: null }) @@ -112,9 +113,9 @@ describe("RetryManager", () => { it("should retry failed tasks", async () => { const failingCallback = sinon.spy( - async (): Promise => ({ + async (): Promise => ({ success: null, - failure: { error: "test error" as any } + failure: { error: LightPushError.GENERIC_FAIL } }) ); @@ -135,7 +136,7 @@ describe("RetryManager", () => { }); it("should request peer renewal on specific errors", async () => { - const errorCallback = sinon.spy(async (): Promise => { + const errorCallback = sinon.spy(async (): Promise => { throw new Error(ProtocolError.NO_PEER_AVAILABLE); }); @@ -155,7 +156,7 @@ describe("RetryManager", () => { }); it("should handle task timeouts", async () => { - const slowCallback = sinon.spy(async (): Promise => { + const slowCallback = sinon.spy(async (): Promise => { await new Promise((resolve) => setTimeout(resolve, 15000)); return { success: mockPeerId, failure: null }; }); @@ -174,9 +175,11 @@ describe("RetryManager", () => { }); it("should not execute task if max attempts is 0", async () => { - const failingCallback = sinon.spy(async (): Promise => { - throw new Error("test error" as any); - }); + const failingCallback = sinon.spy( + async (): Promise => { + throw new Error("test error" as any); + } + ); const task = { callback: failingCallback, @@ -209,7 +212,7 @@ describe("RetryManager", () => { called++; return Promise.resolve({ success: null, - failure: { error: ProtocolError.GENERIC_FAIL } + failure: { error: LightPushError.GENERIC_FAIL } }); }); retryManager.push(failCallback, 2, TestRoutingInfo); diff --git a/packages/sdk/src/light_push/retry_manager.ts b/packages/sdk/src/light_push/retry_manager.ts index 0fc156efe4..3363149670 100644 --- a/packages/sdk/src/light_push/retry_manager.ts +++ b/packages/sdk/src/light_push/retry_manager.ts @@ -1,7 +1,7 @@ import type { PeerId } from "@libp2p/interface"; import { - type CoreProtocolResult, type IRoutingInfo, + type LightPushCoreResult, Protocols } from "@waku/interfaces"; import { Logger } from "@waku/utils"; @@ -15,7 +15,7 @@ type RetryManagerConfig = { peerManager: PeerManager; }; -type AttemptCallback = (peerId: PeerId) => Promise; +type AttemptCallback = (peerId: PeerId) => Promise; export type ScheduledTask = { maxAttempts: number; @@ -123,7 +123,13 @@ export class RetryManager { task.callback(peerId) ]); - if (response?.failure) { + // If timeout resolves first, response will be void (undefined) + // In this case, we should treat it as a timeout error + if (response === undefined) { + throw new Error("Task timeout"); + } + + if (response.failure) { throw Error(response.failure.error); } diff --git a/packages/sdk/src/light_push/utils.ts b/packages/sdk/src/light_push/utils.ts index 85afd07a39..a011c84a4f 100644 --- a/packages/sdk/src/light_push/utils.ts +++ b/packages/sdk/src/light_push/utils.ts @@ -1,13 +1,13 @@ -import { ProtocolError } from "@waku/interfaces"; +import { LightPushError } from "@waku/interfaces"; export const shouldPeerBeChanged = ( - failure: string | ProtocolError + failure: string | LightPushError ): boolean => { const toBeChanged = - failure === ProtocolError.REMOTE_PEER_REJECTED || - failure === ProtocolError.NO_RESPONSE || - failure === ProtocolError.RLN_PROOF_GENERATION || - failure === ProtocolError.NO_PEER_AVAILABLE; + failure === LightPushError.REMOTE_PEER_REJECTED || + failure === LightPushError.NO_RESPONSE || + failure === LightPushError.RLN_PROOF_GENERATION || + failure === LightPushError.NO_PEER_AVAILABLE; if (toBeChanged) { return true; diff --git a/packages/sdk/src/peer_manager/peer_manager.spec.ts b/packages/sdk/src/peer_manager/peer_manager.spec.ts index 60174a9078..f4d92301db 100644 --- a/packages/sdk/src/peer_manager/peer_manager.spec.ts +++ b/packages/sdk/src/peer_manager/peer_manager.spec.ts @@ -85,7 +85,8 @@ describe("PeerManager", () => { _clusterId: ClusterId, _shardId: ShardId ) => true, - isPeerOnTopic: async (_id: PeerId, _topic: string) => true + isPeerOnTopic: async (_id: PeerId, _topic: string) => true, + hasShardInfo: async (_id: PeerId) => true } as unknown as IConnectionManager; peerManager = new PeerManager({ libp2p, diff --git a/packages/sdk/src/peer_manager/peer_manager.ts b/packages/sdk/src/peer_manager/peer_manager.ts index 5b62292b24..cc378558b3 100644 --- a/packages/sdk/src/peer_manager/peer_manager.ts +++ b/packages/sdk/src/peer_manager/peer_manager.ts @@ -4,7 +4,12 @@ import { PeerId, TypedEventEmitter } from "@libp2p/interface"; -import { FilterCodecs, LightPushCodec, StoreCodec } from "@waku/core"; +import { + FilterCodecs, + LightPushCodec, + LightPushCodecV2, + StoreCodec +} from "@waku/core"; import { CONNECTION_LOCKED_TAG, type IConnectionManager, @@ -28,8 +33,10 @@ type PeerManagerParams = { connectionManager: IConnectionManager; }; +type SupportedProtocols = Protocols | "light-push-v2"; + type GetPeersParams = { - protocol: Protocols; + protocol: SupportedProtocols; pubsubTopic: string; }; @@ -119,7 +126,7 @@ export class PeerManager { for (const peer of connectedPeers) { const hasProtocol = this.hasPeerProtocol(peer, params.protocol); - const hasSamePubsub = await this.connectionManager.isPeerOnTopic( + const hasSamePubsub = await this.isPeerOnPubsub( peer.id, params.pubsubTopic ); @@ -204,12 +211,19 @@ export class PeerManager { private async onConnected(event: CustomEvent): Promise { const result = event.detail; - if ( - result.protocols.includes(this.matchProtocolToCodec(Protocols.Filter)) - ) { + + const isFilterPeer = result.protocols.includes( + this.getProtocolCodecs(Protocols.Filter) + ); + const isStorePeer = result.protocols.includes( + this.getProtocolCodecs(Protocols.Store) + ); + + if (isFilterPeer) { this.dispatchFilterPeerConnect(result.peerId); } - if (result.protocols.includes(this.matchProtocolToCodec(Protocols.Store))) { + + if (isStorePeer) { this.dispatchStorePeerConnect(result.peerId); } } @@ -230,8 +244,8 @@ export class PeerManager { } } - private hasPeerProtocol(peer: Peer, protocol: Protocols): boolean { - return peer.protocols.includes(this.matchProtocolToCodec(protocol)); + private hasPeerProtocol(peer: Peer, protocol: SupportedProtocols): boolean { + return peer.protocols.includes(this.getProtocolCodecs(protocol)); } private lockPeer(id: PeerId): void { @@ -289,14 +303,18 @@ export class PeerManager { ); } - private matchProtocolToCodec(protocol: Protocols): string { - const protocolToCodec = { + private getProtocolCodecs(protocol: SupportedProtocols): string { + if (protocol === Protocols.Relay) { + throw new Error("Relay protocol is not supported"); + } + + const protocolToCodecs = { [Protocols.Filter]: FilterCodecs.SUBSCRIBE, [Protocols.LightPush]: LightPushCodec, [Protocols.Store]: StoreCodec, - [Protocols.Relay]: "" + "light-push-v2": LightPushCodecV2 }; - return protocolToCodec[protocol]; + return protocolToCodecs[protocol]; } } diff --git a/packages/sdk/src/query_on_connect/query_on_connect.spec.ts b/packages/sdk/src/query_on_connect/query_on_connect.spec.ts index cdbc6f2da6..b87caa5ce7 100644 --- a/packages/sdk/src/query_on_connect/query_on_connect.spec.ts +++ b/packages/sdk/src/query_on_connect/query_on_connect.spec.ts @@ -95,6 +95,7 @@ describe("QueryOnConnect", () => { it("should create QueryOnConnect instance with all required parameters", () => { queryOnConnect = new QueryOnConnect( mockDecoders, + () => false, mockPeerManagerEventEmitter, mockWakuEventEmitter, mockQueryGenerator, @@ -108,6 +109,7 @@ describe("QueryOnConnect", () => { it("should create QueryOnConnect instance without options", () => { queryOnConnect = new QueryOnConnect( mockDecoders, + () => false, mockPeerManagerEventEmitter, mockWakuEventEmitter, mockQueryGenerator @@ -120,6 +122,7 @@ describe("QueryOnConnect", () => { it("should accept empty decoders array", () => { queryOnConnect = new QueryOnConnect( [], + () => false, mockPeerManagerEventEmitter, mockWakuEventEmitter, mockQueryGenerator, @@ -134,6 +137,7 @@ describe("QueryOnConnect", () => { beforeEach(() => { queryOnConnect = new QueryOnConnect( mockDecoders, + () => false, mockPeerManagerEventEmitter, mockWakuEventEmitter, mockQueryGenerator, @@ -173,6 +177,7 @@ describe("QueryOnConnect", () => { beforeEach(() => { queryOnConnect = new QueryOnConnect( mockDecoders, + () => false, mockPeerManagerEventEmitter, mockWakuEventEmitter, mockQueryGenerator, @@ -224,6 +229,7 @@ describe("QueryOnConnect", () => { queryOnConnect = new QueryOnConnect( mockDecoders, + () => false, mockPeerManagerEventEmitter, mockWakuEventEmitter, mockQueryGenerator, @@ -276,6 +282,7 @@ describe("QueryOnConnect", () => { queryOnConnect = new QueryOnConnect( mockDecoders, + () => false, mockPeerManagerEventEmitter, mockWakuEventEmitter, mockQueryGenerator, @@ -298,6 +305,7 @@ describe("QueryOnConnect", () => { queryOnConnect = new QueryOnConnect( mockDecoders, + () => false, mockPeerManagerEventEmitter, mockWakuEventEmitter, mockQueryGenerator, @@ -320,6 +328,7 @@ describe("QueryOnConnect", () => { queryOnConnect = new QueryOnConnect( mockDecoders, + () => false, mockPeerManagerEventEmitter, mockWakuEventEmitter, mockQueryGenerator, @@ -391,6 +400,7 @@ describe("QueryOnConnect", () => { const queryOnConnect = new QueryOnConnect( mockDecoders, + () => false, mockPeerManagerEventEmitter, mockWakuEventEmitter, mockQueryGenerator, @@ -418,6 +428,7 @@ describe("QueryOnConnect", () => { queryOnConnect = new QueryOnConnect( mockDecoders, + () => false, mockPeerManagerEventEmitter, mockWakuEventEmitter, mockQueryGenerator, @@ -443,6 +454,7 @@ describe("QueryOnConnect", () => { let resolveMessageEvent: (messages: IDecodedMessage[]) => void; let rejectMessageEvent: (reason: string) => void; let connectStoreEvent: CustomEvent; + let timeoutId: NodeJS.Timeout; beforeEach(() => { // Create a promise that resolves when a message event is emitted @@ -472,6 +484,7 @@ describe("QueryOnConnect", () => { queryOnConnect = new QueryOnConnect( mockDecoders, + () => false, mockPeerManagerEventEmitter, mockWakuEventEmitter, mockQueryGenerator, @@ -482,6 +495,7 @@ describe("QueryOnConnect", () => { queryOnConnect.addEventListener( QueryOnConnectEvent.MessagesRetrieved, (event: CustomEvent) => { + clearTimeout(timeoutId); resolveMessageEvent(event.detail); } ); @@ -491,12 +505,16 @@ describe("QueryOnConnect", () => { }); // Set a timeout to reject if no message is received - setTimeout( + timeoutId = setTimeout( () => rejectMessageEvent("No message received within timeout"), 500 ); }); + afterEach(() => { + clearTimeout(timeoutId); + }); + it("should emit message when we just started and store connect event occurs", async () => { const mockMessage: IDecodedMessage = { hash: utf8ToBytes("1234"), @@ -599,6 +617,7 @@ describe("QueryOnConnect", () => { queryOnConnect = new QueryOnConnect( mockDecoders, + () => false, mockPeerManagerEventEmitter, mockWakuEventEmitter, mockQueryGenerator, @@ -744,6 +763,248 @@ describe("QueryOnConnect", () => { expect(mockQueryGenerator.calledTwice).to.be.true; }); }); + + describe("stopIfTrue predicate", () => { + beforeEach(() => { + mockPeerManagerEventEmitter.addEventListener = sinon.stub(); + mockWakuEventEmitter.addEventListener = sinon.stub(); + }); + + it("should stop query iteration when stopIfTrue returns true", async () => { + const messages = [ + { + hash: new Uint8Array(), + hashStr: "msg1", + version: 1, + timestamp: new Date(), + contentTopic: "/test/1/content", + pubsubTopic: "/waku/2/default-waku/proto", + payload: new Uint8Array([1]), + rateLimitProof: undefined, + ephemeral: false, + meta: undefined + }, + { + hash: new Uint8Array(), + hashStr: "stop-hash", + version: 1, + timestamp: new Date(), + contentTopic: "/test/1/content", + pubsubTopic: "/waku/2/default-waku/proto", + payload: new Uint8Array([2]), + rateLimitProof: undefined, + ephemeral: false, + meta: undefined + }, + { + hash: new Uint8Array(), + hashStr: "msg3", + version: 1, + timestamp: new Date(), + contentTopic: "/test/1/content", + pubsubTopic: "/waku/2/default-waku/proto", + payload: new Uint8Array([3]), + rateLimitProof: undefined, + ephemeral: false, + meta: undefined + } + ]; + + // Setup generator to yield 3 pages, stop should occur on page 2 + const mockAsyncGenerator = async function* (): AsyncGenerator< + Promise[] + > { + yield [Promise.resolve(messages[0])]; + yield [Promise.resolve(messages[1])]; + yield [Promise.resolve(messages[2])]; + }; + mockQueryGenerator.returns(mockAsyncGenerator()); + + const stopPredicate = (msg: IDecodedMessage): boolean => + msg.hashStr === "stop-hash"; + + queryOnConnect = new QueryOnConnect( + mockDecoders, + stopPredicate, + mockPeerManagerEventEmitter, + mockWakuEventEmitter, + mockQueryGenerator, + options + ); + + const receivedMessages: IDecodedMessage[] = []; + queryOnConnect.addEventListener( + QueryOnConnectEvent.MessagesRetrieved, + (event: CustomEvent) => { + receivedMessages.push(...event.detail); + } + ); + + queryOnConnect.start(); + await queryOnConnect["maybeQuery"](mockPeerId); + + // Should have received messages from first 2 pages only + expect(receivedMessages).to.have.length(2); + expect(receivedMessages[0].hashStr).to.equal("msg1"); + expect(receivedMessages[1].hashStr).to.equal("stop-hash"); + }); + + it("should process all pages when stopIfTrue never returns true", async () => { + const messages = [ + { + hash: new Uint8Array(), + hashStr: "msg1", + version: 1, + timestamp: new Date(), + contentTopic: "/test/1/content", + pubsubTopic: "/waku/2/default-waku/proto", + payload: new Uint8Array([1]), + rateLimitProof: undefined, + ephemeral: false, + meta: undefined + }, + { + hash: new Uint8Array(), + hashStr: "msg2", + version: 1, + timestamp: new Date(), + contentTopic: "/test/1/content", + pubsubTopic: "/waku/2/default-waku/proto", + payload: new Uint8Array([2]), + rateLimitProof: undefined, + ephemeral: false, + meta: undefined + }, + { + hash: new Uint8Array(), + hashStr: "msg3", + version: 1, + timestamp: new Date(), + contentTopic: "/test/1/content", + pubsubTopic: "/waku/2/default-waku/proto", + payload: new Uint8Array([3]), + rateLimitProof: undefined, + ephemeral: false, + meta: undefined + } + ]; + + const mockAsyncGenerator = async function* (): AsyncGenerator< + Promise[] + > { + yield [Promise.resolve(messages[0])]; + yield [Promise.resolve(messages[1])]; + yield [Promise.resolve(messages[2])]; + }; + mockQueryGenerator.returns(mockAsyncGenerator()); + + const stopPredicate = (): boolean => false; + + queryOnConnect = new QueryOnConnect( + mockDecoders, + stopPredicate, + mockPeerManagerEventEmitter, + mockWakuEventEmitter, + mockQueryGenerator, + options + ); + + const receivedMessages: IDecodedMessage[] = []; + queryOnConnect.addEventListener( + QueryOnConnectEvent.MessagesRetrieved, + (event: CustomEvent) => { + receivedMessages.push(...event.detail); + } + ); + + queryOnConnect.start(); + await queryOnConnect["maybeQuery"](mockPeerId); + + // Should have received all 3 messages + expect(receivedMessages).to.have.length(3); + }); + + it("should stop on first message of a page if stopIfTrue matches", async () => { + const messages = [ + { + hash: new Uint8Array(), + hashStr: "stop-hash", + version: 1, + timestamp: new Date(), + contentTopic: "/test/1/content", + pubsubTopic: "/waku/2/default-waku/proto", + payload: new Uint8Array([1]), + rateLimitProof: undefined, + ephemeral: false, + meta: undefined + }, + { + hash: new Uint8Array(), + hashStr: "msg2", + version: 1, + timestamp: new Date(), + contentTopic: "/test/1/content", + pubsubTopic: "/waku/2/default-waku/proto", + payload: new Uint8Array([2]), + rateLimitProof: undefined, + ephemeral: false, + meta: undefined + }, + { + hash: new Uint8Array(), + hashStr: "msg3", + version: 1, + timestamp: new Date(), + contentTopic: "/test/1/content", + pubsubTopic: "/waku/2/default-waku/proto", + payload: new Uint8Array([3]), + rateLimitProof: undefined, + ephemeral: false, + meta: undefined + } + ]; + + const mockAsyncGenerator = async function* (): AsyncGenerator< + Promise[] + > { + yield [ + Promise.resolve(messages[0]), + Promise.resolve(messages[1]), + Promise.resolve(messages[2]) + ]; + }; + mockQueryGenerator.returns(mockAsyncGenerator()); + + const stopPredicate = (msg: IDecodedMessage): boolean => + msg.hashStr === "stop-hash"; + + queryOnConnect = new QueryOnConnect( + mockDecoders, + stopPredicate, + mockPeerManagerEventEmitter, + mockWakuEventEmitter, + mockQueryGenerator, + options + ); + + const receivedMessages: IDecodedMessage[] = []; + queryOnConnect.addEventListener( + QueryOnConnectEvent.MessagesRetrieved, + (event: CustomEvent) => { + receivedMessages.push(...event.detail); + } + ); + + queryOnConnect.start(); + await queryOnConnect["maybeQuery"](mockPeerId); + + // Should have received all 3 messages from the page, even though first matched + expect(receivedMessages).to.have.length(3); + expect(receivedMessages[0].hashStr).to.equal("stop-hash"); + expect(receivedMessages[1].hashStr).to.equal("msg2"); + expect(receivedMessages[2].hashStr).to.equal("msg3"); + }); + }); }); describe("calculateTimeRange", () => { diff --git a/packages/sdk/src/query_on_connect/query_on_connect.ts b/packages/sdk/src/query_on_connect/query_on_connect.ts index f42c2ada91..da9e78a763 100644 --- a/packages/sdk/src/query_on_connect/query_on_connect.ts +++ b/packages/sdk/src/query_on_connect/query_on_connect.ts @@ -17,7 +17,7 @@ import { const log = new Logger("sdk:query-on-connect"); export const DEFAULT_FORCE_QUERY_THRESHOLD_MS = 5 * 60 * 1000; // 5 minutes -export const MAX_TIME_RANGE_QUERY_MS = 24 * 60 * 60 * 1000; // 24 hours +export const MAX_TIME_RANGE_QUERY_MS = 30 * 24 * 60 * 60 * 1000; // 30 days (queries are split) export interface QueryOnConnectOptions { /** @@ -54,6 +54,7 @@ export class QueryOnConnect< public constructor( public decoders: IDecoder[], + public stopIfTrue: (msg: T) => boolean, private readonly peerManagerEventEmitter: TypedEventEmitter, private readonly wakuEventEmitter: IWakuEventEmitter, private readonly _queryGenerator: ( @@ -125,8 +126,13 @@ export class QueryOnConnect< const messages = (await Promise.all(page)).filter( (m) => m !== undefined ); + const stop = messages.some((msg: T) => this.stopIfTrue(msg)); // Bundle the messages to help batch process by sds this.dispatchMessages(messages); + + if (stop) { + break; + } } // Didn't throw, so it didn't fail diff --git a/packages/sdk/src/reliable_channel/events.ts b/packages/sdk/src/reliable_channel/events.ts new file mode 100644 index 0000000000..c79c2c0c0f --- /dev/null +++ b/packages/sdk/src/reliable_channel/events.ts @@ -0,0 +1,66 @@ +import { IDecodedMessage, ProtocolError } from "@waku/interfaces"; +import type { HistoryEntry, MessageId } from "@waku/sds"; + +export const ReliableChannelEvent = { + /** + * The message is being sent over the wire. + * + * This event may be emitted several times if the retry mechanism kicks in. + */ + SendingMessage: "sending-message", + /** + * The message has been sent over the wire but has not been acknowledged by + * any other party yet. + * + * We are now waiting for acknowledgements. + * + * This event may be emitted several times if the + * several times if the retry mechanisms kicks in. + */ + MessageSent: "message-sent", + /** + * A received bloom filter seems to indicate that the messages was received + * by another party. + * + * However, this is probabilistic. The retry mechanism will wait a bit longer + * before trying to send the message again. + */ + MessagePossiblyAcknowledged: "message-possibly-acknowledged", + /** + * The message was fully acknowledged by other members of the channel + */ + MessageAcknowledged: "message-acknowledged", + /** + * It was not possible to send the messages due to a non-recoverable error, + * most likely an internal error for a developer to resolve. + */ + SendingMessageIrrecoverableError: "sending-message-irrecoverable-error", + /** + * A new message has been received. + */ + MessageReceived: "message-received", + /** + * We are aware of a missing message but failed to retrieve it successfully. + */ + IrretrievableMessage: "irretrievable-message" +}; + +export type ReliableChannelEvent = + (typeof ReliableChannelEvent)[keyof typeof ReliableChannelEvent]; + +export interface ReliableChannelEvents { + "sending-message": CustomEvent; + "message-sent": CustomEvent; + "message-possibly-acknowledged": CustomEvent<{ + messageId: MessageId; + possibleAckCount: number; + }>; + "message-acknowledged": CustomEvent; + // TODO probably T extends IDecodedMessage? + "message-received": CustomEvent; + "irretrievable-message": CustomEvent; + "sending-message-irrecoverable-error": CustomEvent<{ + messageId: MessageId; + error: ProtocolError; + }>; +} diff --git a/packages/sdk/src/reliable_channel/index.ts b/packages/sdk/src/reliable_channel/index.ts new file mode 100644 index 0000000000..60622414bf --- /dev/null +++ b/packages/sdk/src/reliable_channel/index.ts @@ -0,0 +1,2 @@ +export { ReliableChannel, ReliableChannelOptions } from "./reliable_channel.js"; +export { ReliableChannelEvents, ReliableChannelEvent } from "./events.js"; diff --git a/packages/sdk/src/reliable_channel/missing_message_retriever.ts b/packages/sdk/src/reliable_channel/missing_message_retriever.ts new file mode 100644 index 0000000000..f5f1cb3503 --- /dev/null +++ b/packages/sdk/src/reliable_channel/missing_message_retriever.ts @@ -0,0 +1,78 @@ +import type { + IDecodedMessage, + IDecoder, + QueryRequestParams +} from "@waku/interfaces"; +import type { MessageId } from "@waku/sds"; +import { Logger } from "@waku/utils"; + +const log = new Logger("sdk:missing-message-retriever"); + +const DEFAULT_RETRIEVE_FREQUENCY_MS = 10 * 1000; // 10 seconds + +export class MissingMessageRetriever { + private retrieveInterval: ReturnType | undefined; + private missingMessages: Map>; // Waku Message Ids + + public constructor( + private readonly decoder: IDecoder, + private readonly retrieveFrequencyMs: number = DEFAULT_RETRIEVE_FREQUENCY_MS, + private readonly _retrieve: ( + decoders: IDecoder[], + options?: Partial + ) => AsyncGenerator[]>, + private readonly onMessageRetrieved?: (message: T) => Promise + ) { + this.missingMessages = new Map(); + } + + public start(): void { + if (this.retrieveInterval) { + clearInterval(this.retrieveInterval); + } + if (this.retrieveFrequencyMs !== 0) { + log.info(`start retrieve loop every ${this.retrieveFrequencyMs}ms`); + this.retrieveInterval = setInterval(() => { + void this.retrieveMissingMessage(); + }, this.retrieveFrequencyMs); + } + } + + public stop(): void { + if (this.retrieveInterval) { + clearInterval(this.retrieveInterval); + } + } + + public addMissingMessage( + messageId: MessageId, + retrievalHint: Uint8Array + ): void { + if (!this.missingMessages.has(messageId)) { + log.info("missing message notice", messageId, retrievalHint); + this.missingMessages.set(messageId, retrievalHint); + } + } + + public removeMissingMessage(messageId: MessageId): void { + if (this.missingMessages.has(messageId)) { + this.missingMessages.delete(messageId); + } + } + + private async retrieveMissingMessage(): Promise { + if (this.missingMessages.size) { + const messageHashes = Array.from(this.missingMessages.values()); + log.info("attempting to retrieve missing message", messageHashes.length); + for await (const page of this._retrieve([this.decoder], { + messageHashes + })) { + for await (const msg of page) { + if (msg && this.onMessageRetrieved) { + await this.onMessageRetrieved(msg); + } + } + } + } + } +} diff --git a/packages/sdk/src/reliable_channel/reliable_channel.spec.ts b/packages/sdk/src/reliable_channel/reliable_channel.spec.ts new file mode 100644 index 0000000000..ad69d35009 --- /dev/null +++ b/packages/sdk/src/reliable_channel/reliable_channel.spec.ts @@ -0,0 +1,1133 @@ +import { PeerId, TypedEventEmitter } from "@libp2p/interface"; +import { createDecoder, createEncoder } from "@waku/core"; +import { + AutoSharding, + HealthStatus, + IDecodedMessage, + IDecoder, + IEncoder, + type IMessage, + ISendOptions, + IWaku, + LightPushError, + LightPushSDKResult, + QueryRequestParams +} from "@waku/interfaces"; +import { ContentMessage, SyncMessage } from "@waku/sds"; +import { + createRoutingInfo, + delay, + MockWakuEvents, + MockWakuNode +} from "@waku/utils"; +import { bytesToUtf8, hexToBytes, utf8ToBytes } from "@waku/utils/bytes"; +import { expect } from "chai"; +import { beforeEach, describe } from "mocha"; +import sinon from "sinon"; + +import { ReliableChannel } from "./index.js"; + +const TEST_CONTENT_TOPIC = "/my-tests/0/topic-name/proto"; +const TEST_NETWORK_CONFIG: AutoSharding = { + clusterId: 0, + numShardsInCluster: 1 +}; +const TEST_ROUTING_INFO = createRoutingInfo(TEST_NETWORK_CONFIG, { + contentTopic: TEST_CONTENT_TOPIC +}); + +describe("Reliable Channel", () => { + let mockWakuNode: IWaku; + let encoder: IEncoder; + let decoder: IDecoder; + + beforeEach(async () => { + mockWakuNode = new MockWakuNode(); + encoder = createEncoder({ + contentTopic: TEST_CONTENT_TOPIC, + routingInfo: TEST_ROUTING_INFO + }); + decoder = createDecoder(TEST_CONTENT_TOPIC, TEST_ROUTING_INFO); + }); + + it("Outgoing message is emitted as sending", async () => { + const reliableChannel = await ReliableChannel.create( + mockWakuNode, + "MyChannel", + "alice", + encoder, + decoder + ); + + const message = utf8ToBytes("message in channel"); + + // Setting up message tracking + const messageId = reliableChannel.send(message); + let messageSending = false; + reliableChannel.addEventListener("sending-message", (event) => { + if (event.detail === messageId) { + messageSending = true; + } + }); + + while (!messageSending) { + await delay(50); + } + + expect(messageSending).to.be.true; + }); + + it("Outgoing message is emitted as sent", async () => { + const reliableChannel = await ReliableChannel.create( + mockWakuNode, + "MyChannel", + "alice", + encoder, + decoder + ); + + const message = utf8ToBytes("message in channel"); + + const messageId = reliableChannel.send(message); + + // Setting up message tracking + let messageSent = false; + reliableChannel.addEventListener("message-sent", (event) => { + if (event.detail === messageId) { + messageSent = true; + } + }); + + while (!messageSent) { + await delay(50); + } + + expect(messageSent).to.be.true; + }); + + it("Encoder error raises irrecoverable error", async () => { + mockWakuNode.lightPush!.send = ( + _encoder: IEncoder, + _message: IMessage, + _sendOptions?: ISendOptions + ): Promise => { + return Promise.resolve({ + failures: [{ error: LightPushError.EMPTY_PAYLOAD }], + successes: [] + }); + }; + + const reliableChannel = await ReliableChannel.create( + mockWakuNode, + "MyChannel", + "alice", + encoder, + decoder + ); + + const message = utf8ToBytes("payload doesnt matter"); + + encoder.contentTopic = "..."; + const messageId = reliableChannel.send(message); + + // Setting up message tracking + let irrecoverableError = false; + reliableChannel.addEventListener( + "sending-message-irrecoverable-error", + (event) => { + if (event.detail.messageId === messageId) { + irrecoverableError = true; + } + } + ); + + while (!irrecoverableError) { + await delay(50); + } + + expect(irrecoverableError).to.be.true; + }); + + it("Outgoing message is not emitted as acknowledged from own outgoing messages", async () => { + const reliableChannel = await ReliableChannel.create( + mockWakuNode, + "MyChannel", + "alice", + encoder, + decoder + ); + + const message = utf8ToBytes("first message in channel"); + + // Setting up message tracking + const messageId = ReliableChannel.getMessageId(message); + let messageAcknowledged = false; + reliableChannel.addEventListener("message-acknowledged", (event) => { + if (event.detail === messageId) { + messageAcknowledged = true; + } + }); + + reliableChannel.send(message); + + // Sending a second message from the same node should not acknowledge the first one + reliableChannel.send(utf8ToBytes("second message in channel")); + + expect(messageAcknowledged).to.be.false; + }); + + // TODO: https://github.com/waku-org/js-waku/issues/2648 + it.skip("Outgoing message is possibly acknowledged", async () => { + const commonEventEmitter = new TypedEventEmitter(); + const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter); + const mockWakuNodeBob = new MockWakuNode(commonEventEmitter); + + const reliableChannelAlice = await ReliableChannel.create( + mockWakuNodeAlice, + "MyChannel", + "alice", + encoder, + decoder + ); + const reliableChannelBob = await ReliableChannel.create( + mockWakuNodeBob, + "MyChannel", + "bob", + encoder, + decoder, + // Bob only includes one message in causal history + { causalHistorySize: 1 } + ); + + const messages = ["first", "second", "third"].map((m) => { + return utf8ToBytes(m); + }); + + // Alice sets up message tracking for first message + const firstMessageId = ReliableChannel.getMessageId(messages[0]); + let firstMessagePossiblyAcknowledged = false; + reliableChannelAlice.addEventListener( + "message-possibly-acknowledged", + (event) => { + if (event.detail.messageId === firstMessageId) { + firstMessagePossiblyAcknowledged = true; + } + } + ); + + let messageReceived = false; + reliableChannelBob.addEventListener("message-received", (event) => { + if (bytesToUtf8(event.detail.payload) === "third") { + messageReceived = true; + } + }); + + for (const m of messages) { + reliableChannelAlice.send(m); + } + + // Wait for Bob to receive last message to ensure it is all included in filter + while (!messageReceived) { + await delay(50); + } + + // Bobs sends a message now, it should include first one in bloom filter + reliableChannelBob.send(utf8ToBytes("message back")); + while (!firstMessagePossiblyAcknowledged) { + await delay(50); + } + + expect(firstMessagePossiblyAcknowledged).to.be.true; + }); + + it("Outgoing message is acknowledged", async () => { + const commonEventEmitter = new TypedEventEmitter(); + const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter); + const mockWakuNodeBob = new MockWakuNode(commonEventEmitter); + + const reliableChannelAlice = await ReliableChannel.create( + mockWakuNodeAlice, + "MyChannel", + "alice", + encoder, + decoder + ); + const reliableChannelBob = await ReliableChannel.create( + mockWakuNodeBob, + "MyChannel", + "bob", + encoder, + decoder + ); + + const message = utf8ToBytes("first message in channel"); + + const messageId = reliableChannelAlice.send(message); + + // Alice sets up message tracking + let messageAcknowledged = false; + reliableChannelAlice.addEventListener("message-acknowledged", (event) => { + if (event.detail === messageId) { + messageAcknowledged = true; + } + }); + + let bobReceivedMessage = false; + reliableChannelBob.addEventListener("message-received", () => { + bobReceivedMessage = true; + }); + + // Wait for bob to receive the message to ensure it's included in causal history + while (!bobReceivedMessage) { + await delay(50); + } + + // Bobs sends a message now, it should include first one in causal history + reliableChannelBob.send(utf8ToBytes("second message in channel")); + while (!messageAcknowledged) { + await delay(50); + } + + expect(messageAcknowledged).to.be.true; + }); + + it("Incoming message is emitted as received", async () => { + const reliableChannel = await ReliableChannel.create( + mockWakuNode, + "MyChannel", + "alice", + encoder, + decoder + ); + + let receivedMessage: IDecodedMessage; + reliableChannel.addEventListener("message-received", (event) => { + receivedMessage = event.detail; + }); + + const message = utf8ToBytes("message in channel"); + + reliableChannel.send(message); + while (!receivedMessage!) { + await delay(50); + } + + expect(bytesToUtf8(receivedMessage!.payload)).to.eq(bytesToUtf8(message)); + }); + + describe("Retries", () => { + it("Outgoing message is retried until acknowledged", async () => { + const commonEventEmitter = new TypedEventEmitter(); + const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter); + const mockWakuNodeBob = new MockWakuNode(commonEventEmitter); + + const reliableChannelAlice = await ReliableChannel.create( + mockWakuNodeAlice, + "MyChannel", + "alice", + encoder, + decoder, + { + retryIntervalMs: 200, // faster for a quick test, + processTaskMinElapseMs: 10 // faster so it process message as soon as they arrive + } + ); + const reliableChannelBob = await ReliableChannel.create( + mockWakuNodeBob, + "MyChannel", + "bob", + encoder, + decoder, + { + syncMinIntervalMs: 0, // do not send sync messages automatically + maxRetryAttempts: 0 // This one does not perform retries + } + ); + + const msgTxt = "first message in channel"; + const message = utf8ToBytes(msgTxt); + + // Let's count how many times Bob receives Alice's message + let messageCount = 0; + reliableChannelBob.addEventListener("message-received", (event) => { + if (bytesToUtf8(event.detail.payload) === msgTxt) { + messageCount++; + } + }); + + reliableChannelAlice.send(message); + + while (messageCount < 1) { + await delay(10); + } + expect(messageCount).to.equal(1, "Bob received Alice's message once"); + + // No response from Bob should trigger a retry from Alice + while (messageCount < 2) { + await delay(10); + } + expect(messageCount).to.equal(2, "retried once"); + + // Bobs sends a message now, it should include first one in causal history + reliableChannelBob.send(utf8ToBytes("second message in channel")); + + // Wait long enough to confirm no retry is executed + await delay(300); + + // Alice should have stopped sending + expect(messageCount).to.equal(2, "hasn't retried since it's acked"); + }); + }); + + // the test is failing when run with all tests in sdk package + // no clear reason why, skipping for now + // TODO: fix this test https://github.com/waku-org/js-waku/issues/2648 + describe.skip("Missing Message Retrieval", () => { + it("Automatically retrieves missing message", async () => { + const commonEventEmitter = new TypedEventEmitter(); + const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter); + + // Setup, Alice first + const reliableChannelAlice = await ReliableChannel.create( + mockWakuNodeAlice, + "MyChannel", + "alice", + encoder, + decoder, + { + // disable any automation to better control the test + retryIntervalMs: 0, + syncMinIntervalMs: 0, + retrieveFrequencyMs: 0, + processTaskMinElapseMs: 10 + } + ); + + // Bob is offline, Alice sends a message, this is the message we want + // Bob to receive in this test. + const message = utf8ToBytes("missing message"); + reliableChannelAlice.send(message); + // Wait to be sent + await new Promise((resolve) => { + reliableChannelAlice.addEventListener("message-sent", resolve, { + once: true + }); + }); + + const sdsMessage = new ContentMessage( + ReliableChannel.getMessageId(message), + "MyChannel", + "alice", + [], + 1n, + undefined, + message + ); + + // Now Bob goes online + const mockWakuNodeBob = new MockWakuNode(commonEventEmitter); + + // Stub store.queryGenerator to return a message + const mockMessage = { + payload: sdsMessage.encode() + }; + const queryGeneratorStub = sinon.stub().callsFake(async function* ( + _decoders: IDecoder[], + _options?: Partial + ) { + yield [Promise.resolve(mockMessage as IDecodedMessage)]; + }); + + (mockWakuNodeBob.store as any) = { + queryGenerator: queryGeneratorStub + }; + + const reliableChannelBob = await ReliableChannel.create( + mockWakuNodeBob, + "MyChannel", + "bob", + encoder, + decoder, + { + retryIntervalMs: 0, // disable any automation to better control the test + syncMinIntervalMs: 0, + processTaskMinElapseMs: 10, + retrieveFrequencyMs: 100 // quick loop so the test go fast + } + ); + + const waitForMessageRetrieved = new Promise((resolve) => { + reliableChannelBob.addEventListener("message-received", (event) => { + if (bytesToUtf8(event.detail.payload) === "missing message") { + resolve(true); + } + }); + + setTimeout(() => { + resolve(false); + }, 1000); + }); + + // Alice sends a sync message, Bob should learn about missing message + // and retrieve it + await reliableChannelAlice["sendSyncMessage"](); + + const messageRetrieved = await waitForMessageRetrieved; + expect(messageRetrieved, "message retrieved").to.be.true; + + // Verify the stub was called once with the right messageHash info + expect(queryGeneratorStub.calledOnce, "query generator called once").to.be + .true; + const callArgs = queryGeneratorStub.getCall(0).args; + expect(callArgs[1]).to.have.property("messageHashes"); + expect(callArgs[1].messageHashes).to.be.an("array"); + }); + }); + + describe("Query On Connect Integration E2E Tests", () => { + let mockWakuNode: MockWakuNode; + let reliableChannel: ReliableChannel; + let encoder: IEncoder; + let decoder: IDecoder; + let mockPeerManagerEvents: TypedEventEmitter; + let queryGeneratorStub: sinon.SinonStub; + let mockPeerId: PeerId; + + beforeEach(async () => { + // Setup mock waku node with store capability + mockWakuNode = new MockWakuNode(); + + // Setup mock peer manager events for QueryOnConnect + mockPeerManagerEvents = new TypedEventEmitter(); + (mockWakuNode as any).peerManager = { + events: mockPeerManagerEvents + }; + + // Setup encoder and decoder + encoder = createEncoder({ + contentTopic: TEST_CONTENT_TOPIC, + routingInfo: TEST_ROUTING_INFO + }); + + decoder = createDecoder(TEST_CONTENT_TOPIC, TEST_ROUTING_INFO); + + // Setup store with queryGenerator for QueryOnConnect + queryGeneratorStub = sinon.stub(); + mockWakuNode.store = { + queryGenerator: queryGeneratorStub + } as any; + + mockPeerId = { + toString: () => "QmTestPeerId" + } as unknown as PeerId; + }); + + it("should trigger QueryOnConnect when going offline and store peer reconnects", async () => { + // Create a message that will be auto-retrieved + const messageText = "Auto-retrieved message"; + const messagePayload = utf8ToBytes(messageText); + + const sdsMessage = new ContentMessage( + ReliableChannel.getMessageId(messagePayload), + "testChannel", + "testSender", + [], + 1n, + undefined, + messagePayload + ); + + const autoRetrievedMessage: IDecodedMessage = { + hash: hexToBytes("1234"), + hashStr: "1234", + version: 1, + timestamp: new Date(), + contentTopic: TEST_CONTENT_TOPIC, + pubsubTopic: decoder.pubsubTopic, + payload: sdsMessage.encode(), + rateLimitProof: undefined, + ephemeral: false, + meta: undefined + }; + + // Setup queryGenerator to return the auto-retrieved message + queryGeneratorStub.callsFake(async function* () { + yield [Promise.resolve(autoRetrievedMessage)]; + }); + + // Create ReliableChannel with queryOnConnect enabled + reliableChannel = await ReliableChannel.create( + mockWakuNode, + "testChannel", + "testSender", + encoder, + decoder + ); + + // Wait for initial setup + await delay(50); + + // Setup complete - focus on testing QueryOnConnect trigger + + // Simulate going offline (change health status) + mockWakuNode.events.dispatchEvent( + new CustomEvent("health", { detail: HealthStatus.Unhealthy }) + ); + + await delay(10); + + // Simulate store peer reconnection which should trigger QueryOnConnect + mockPeerManagerEvents.dispatchEvent( + new CustomEvent("store:connect", { detail: mockPeerId }) + ); + + // Wait for store query to be triggered + await delay(200); + + // Verify that QueryOnConnect was triggered by the conditions + expect(queryGeneratorStub.called).to.be.true; + }); + + it("should trigger QueryOnConnect when time threshold is exceeded", async () => { + // Create multiple messages that will be auto-retrieved + const message1Text = "First auto-retrieved message"; + const message2Text = "Second auto-retrieved message"; + const message1Payload = utf8ToBytes(message1Text); + const message2Payload = utf8ToBytes(message2Text); + + const sdsMessage1 = new ContentMessage( + ReliableChannel.getMessageId(message1Payload), + "testChannel", + "testSender", + [], + 1n, + undefined, + message1Payload + ); + + const sdsMessage2 = new ContentMessage( + ReliableChannel.getMessageId(message2Payload), + "testChannel", + "testSender", + [], + 2n, + undefined, + message2Payload + ); + + const autoRetrievedMessage1: IDecodedMessage = { + hash: hexToBytes("5678"), + hashStr: "5678", + version: 1, + timestamp: new Date(Date.now() - 1000), + contentTopic: TEST_CONTENT_TOPIC, + pubsubTopic: decoder.pubsubTopic, + payload: sdsMessage1.encode(), + rateLimitProof: undefined, + ephemeral: false, + meta: undefined + }; + + const autoRetrievedMessage2: IDecodedMessage = { + hash: hexToBytes("9abc"), + hashStr: "9abc", + version: 1, + timestamp: new Date(), + contentTopic: TEST_CONTENT_TOPIC, + pubsubTopic: decoder.pubsubTopic, + payload: sdsMessage2.encode(), + rateLimitProof: undefined, + ephemeral: false, + meta: undefined + }; + + // Setup queryGenerator to return multiple messages + queryGeneratorStub.callsFake(async function* () { + yield [Promise.resolve(autoRetrievedMessage1)]; + yield [Promise.resolve(autoRetrievedMessage2)]; + }); + + // Create ReliableChannel with queryOnConnect enabled + reliableChannel = await ReliableChannel.create( + mockWakuNode, + "testChannel", + "testSender", + encoder, + decoder, + { queryOnConnect: true } + ); + + await delay(50); + + // Simulate old last successful query by accessing QueryOnConnect internals + // The default threshold is 5 minutes, so we'll set it to an old time + if ((reliableChannel as any).queryOnConnect) { + ((reliableChannel as any).queryOnConnect as any).lastSuccessfulQuery = + Date.now() - 6 * 60 * 1000; // 6 minutes ago + } + + // Simulate store peer connection which should trigger retrieval due to time threshold + mockPeerManagerEvents.dispatchEvent( + new CustomEvent("store:connect", { detail: mockPeerId }) + ); + + // Wait for store query to be triggered + await delay(200); + + // Verify that QueryOnConnect was triggered due to time threshold + expect(queryGeneratorStub.called).to.be.true; + }); + }); + + describe("stopIfTrue Integration with QueryOnConnect", () => { + let mockWakuNode: MockWakuNode; + let encoder: IEncoder; + let decoder: IDecoder; + let mockPeerManagerEvents: TypedEventEmitter; + let queryGeneratorStub: sinon.SinonStub; + let mockPeerId: PeerId; + + beforeEach(async () => { + mockWakuNode = new MockWakuNode(); + mockPeerManagerEvents = new TypedEventEmitter(); + (mockWakuNode as any).peerManager = { + events: mockPeerManagerEvents + }; + + encoder = createEncoder({ + contentTopic: TEST_CONTENT_TOPIC, + routingInfo: TEST_ROUTING_INFO + }); + + decoder = createDecoder(TEST_CONTENT_TOPIC, TEST_ROUTING_INFO); + + queryGeneratorStub = sinon.stub(); + mockWakuNode.store = { + queryGenerator: queryGeneratorStub + } as any; + + mockPeerId = { + toString: () => "QmTestPeerId" + } as unknown as PeerId; + }); + + it("should stop query when sync message from same channel is found", async () => { + const channelId = "testChannel"; + const senderId = "testSender"; + + // Create messages: one from different channel, one sync from same channel, one more + const sdsMessageDifferentChannel = new ContentMessage( + "msg1", + "differentChannel", + senderId, + [], + 1n, + undefined, + utf8ToBytes("different channel") + ); + + const sdsSyncMessage = new SyncMessage( + "sync-msg-id", + channelId, + senderId, + [], + 2n, + undefined, + undefined + ); + + const sdsMessageAfterSync = new ContentMessage( + "msg3", + channelId, + senderId, + [], + 3n, + undefined, + utf8ToBytes("after sync") + ); + + const messages: IDecodedMessage[] = [ + { + hash: hexToBytes("1111"), + hashStr: "1111", + version: 1, + timestamp: new Date(), + contentTopic: TEST_CONTENT_TOPIC, + pubsubTopic: decoder.pubsubTopic, + payload: sdsMessageDifferentChannel.encode(), + rateLimitProof: undefined, + ephemeral: false, + meta: undefined + }, + { + hash: hexToBytes("2222"), + hashStr: "2222", + version: 1, + timestamp: new Date(), + contentTopic: TEST_CONTENT_TOPIC, + pubsubTopic: decoder.pubsubTopic, + payload: sdsSyncMessage.encode(), + rateLimitProof: undefined, + ephemeral: false, + meta: undefined + }, + { + hash: hexToBytes("3333"), + hashStr: "3333", + version: 1, + timestamp: new Date(), + contentTopic: TEST_CONTENT_TOPIC, + pubsubTopic: decoder.pubsubTopic, + payload: sdsMessageAfterSync.encode(), + rateLimitProof: undefined, + ephemeral: false, + meta: undefined + } + ]; + + // Setup generator to yield 3 messages, but should stop after 2nd + queryGeneratorStub.callsFake(async function* () { + yield [Promise.resolve(messages[0])]; + yield [Promise.resolve(messages[1])]; + yield [Promise.resolve(messages[2])]; + }); + + const reliableChannel = await ReliableChannel.create( + mockWakuNode, + channelId, + senderId, + encoder, + decoder + ); + + await delay(50); + + // Trigger query on connect + mockPeerManagerEvents.dispatchEvent( + new CustomEvent("store:connect", { detail: mockPeerId }) + ); + + await delay(200); + + // queryGenerator should have been called + expect(queryGeneratorStub.called).to.be.true; + // The query should have stopped after finding sync message from same channel + expect(reliableChannel).to.not.be.undefined; + }); + + it("should stop query on content message from same channel", async () => { + const channelId = "testChannel"; + const senderId = "testSender"; + + const sdsContentMessage = new ContentMessage( + "msg1", + channelId, + senderId, + [{ messageId: "previous-msg-id" }], + 1n, + undefined, + utf8ToBytes("content message") + ); + + const sdsMessageAfter = new ContentMessage( + "msg2", + channelId, + senderId, + [], + 2n, + undefined, + utf8ToBytes("after content") + ); + + const messages: IDecodedMessage[] = [ + { + hash: hexToBytes("1111"), + hashStr: "1111", + version: 1, + timestamp: new Date(), + contentTopic: TEST_CONTENT_TOPIC, + pubsubTopic: decoder.pubsubTopic, + payload: sdsContentMessage.encode(), + rateLimitProof: undefined, + ephemeral: false, + meta: undefined + }, + { + hash: hexToBytes("2222"), + hashStr: "2222", + version: 1, + timestamp: new Date(), + contentTopic: TEST_CONTENT_TOPIC, + pubsubTopic: decoder.pubsubTopic, + payload: sdsMessageAfter.encode(), + rateLimitProof: undefined, + ephemeral: false, + meta: undefined + } + ]; + + let pagesYielded = 0; + queryGeneratorStub.callsFake(async function* () { + pagesYielded++; + yield [Promise.resolve(messages[0])]; + pagesYielded++; + yield [Promise.resolve(messages[1])]; + }); + + const reliableChannel = await ReliableChannel.create( + mockWakuNode, + channelId, + senderId, + encoder, + decoder + ); + + await delay(50); + + mockPeerManagerEvents.dispatchEvent( + new CustomEvent("store:connect", { detail: mockPeerId }) + ); + + await delay(200); + + expect(queryGeneratorStub.called).to.be.true; + expect(reliableChannel).to.not.be.undefined; + // Should have stopped after first page with content message + expect(pagesYielded).to.equal(1); + }); + + it("should continue query when messages are from different channels", async () => { + const channelId = "testChannel"; + const senderId = "testSender"; + + const sdsMessageDifferent1 = new ContentMessage( + "msg1", + "differentChannel1", + senderId, + [], + 1n, + undefined, + utf8ToBytes("different 1") + ); + + const sdsMessageDifferent2 = new ContentMessage( + "msg2", + "differentChannel2", + senderId, + [], + 2n, + undefined, + utf8ToBytes("different 2") + ); + + const sdsMessageDifferent3 = new ContentMessage( + "msg3", + "differentChannel3", + senderId, + [], + 3n, + undefined, + utf8ToBytes("different 3") + ); + + const messages: IDecodedMessage[] = [ + { + hash: hexToBytes("1111"), + hashStr: "1111", + version: 1, + timestamp: new Date(), + contentTopic: TEST_CONTENT_TOPIC, + pubsubTopic: decoder.pubsubTopic, + payload: sdsMessageDifferent1.encode(), + rateLimitProof: undefined, + ephemeral: false, + meta: undefined + }, + { + hash: hexToBytes("2222"), + hashStr: "2222", + version: 1, + timestamp: new Date(), + contentTopic: TEST_CONTENT_TOPIC, + pubsubTopic: decoder.pubsubTopic, + payload: sdsMessageDifferent2.encode(), + rateLimitProof: undefined, + ephemeral: false, + meta: undefined + }, + { + hash: hexToBytes("3333"), + hashStr: "3333", + version: 1, + timestamp: new Date(), + contentTopic: TEST_CONTENT_TOPIC, + pubsubTopic: decoder.pubsubTopic, + payload: sdsMessageDifferent3.encode(), + rateLimitProof: undefined, + ephemeral: false, + meta: undefined + } + ]; + + let pagesYielded = 0; + queryGeneratorStub.callsFake(async function* () { + pagesYielded++; + yield [Promise.resolve(messages[0])]; + pagesYielded++; + yield [Promise.resolve(messages[1])]; + pagesYielded++; + yield [Promise.resolve(messages[2])]; + }); + + const reliableChannel = await ReliableChannel.create( + mockWakuNode, + channelId, + senderId, + encoder, + decoder + ); + + await delay(50); + + mockPeerManagerEvents.dispatchEvent( + new CustomEvent("store:connect", { detail: mockPeerId }) + ); + + await delay(200); + + expect(queryGeneratorStub.called).to.be.true; + expect(reliableChannel).to.not.be.undefined; + // Should have processed all pages since no matching channel + expect(pagesYielded).to.equal(3); + }); + }); + + describe("isChannelMessageWithCausalHistory predicate", () => { + let mockWakuNode: MockWakuNode; + let reliableChannel: ReliableChannel; + let encoder: IEncoder; + let decoder: IDecoder; + + beforeEach(async () => { + mockWakuNode = new MockWakuNode(); + encoder = createEncoder({ + contentTopic: TEST_CONTENT_TOPIC, + routingInfo: TEST_ROUTING_INFO + }); + decoder = createDecoder(TEST_CONTENT_TOPIC, TEST_ROUTING_INFO); + + reliableChannel = await ReliableChannel.create( + mockWakuNode, + "testChannel", + "testSender", + encoder, + decoder, + { queryOnConnect: false } + ); + }); + + it("should return false for malformed SDS messages", () => { + const msg = { + payload: new Uint8Array([1, 2, 3]) + } as IDecodedMessage; + + const result = reliableChannel["isChannelMessageWithCausalHistory"](msg); + expect(result).to.be.false; + }); + + it("should return false for different channelId", () => { + const sdsMsg = new ContentMessage( + "msg1", + "differentChannel", + "sender", + [], + 1n, + undefined, + utf8ToBytes("content") + ); + + const msg = { + payload: sdsMsg.encode() + } as IDecodedMessage; + + const result = reliableChannel["isChannelMessageWithCausalHistory"](msg); + expect(result).to.be.false; + }); + + it("should return false for sync message without causal history", () => { + const syncMsg = new SyncMessage( + "sync-msg-id", + "testChannel", + "sender", + [], + 1n, + undefined, + undefined + ); + + const msg = { + payload: syncMsg.encode() + } as IDecodedMessage; + + const result = reliableChannel["isChannelMessageWithCausalHistory"](msg); + expect(result).to.be.false; + }); + + it("should return false for content message without causal history", () => { + const contentMsg = new ContentMessage( + "msg1", + "testChannel", + "sender", + [], + 1n, + undefined, + utf8ToBytes("content") + ); + + const msg = { + payload: contentMsg.encode() + } as IDecodedMessage; + + const result = reliableChannel["isChannelMessageWithCausalHistory"](msg); + expect(result).to.be.false; + }); + + it("should return true for message with causal history", () => { + const contentMsg = new ContentMessage( + "msg1", + "testChannel", + "sender", + [{ messageId: "previous-msg-id" }], + 1n, + undefined, + utf8ToBytes("content") + ); + + const msg = { + payload: contentMsg.encode() + } as IDecodedMessage; + + const result = reliableChannel["isChannelMessageWithCausalHistory"](msg); + expect(result).to.be.true; + }); + + it("should return true for sync message with causal history", () => { + const syncMsg = new SyncMessage( + "sync-msg-id", + "testChannel", + "sender", + [{ messageId: "previous-msg-id" }], + 1n, + undefined, + undefined + ); + + const msg = { + payload: syncMsg.encode() + } as IDecodedMessage; + + const result = reliableChannel["isChannelMessageWithCausalHistory"](msg); + expect(result).to.be.true; + }); + }); +}); diff --git a/packages/sdk/src/reliable_channel/reliable_channel.ts b/packages/sdk/src/reliable_channel/reliable_channel.ts new file mode 100644 index 0000000000..49b55aa495 --- /dev/null +++ b/packages/sdk/src/reliable_channel/reliable_channel.ts @@ -0,0 +1,691 @@ +import { TypedEventEmitter } from "@libp2p/interface"; +import { messageHash } from "@waku/core"; +import { + type Callback, + type IDecodedMessage, + type IDecoder, + type IEncoder, + type IMessage, + ISendOptions, + type IWaku, + LightPushError, + LightPushSDKResult, + QueryRequestParams +} from "@waku/interfaces"; +import { + type ChannelId, + isContentMessage, + MessageChannel, + MessageChannelEvent, + type MessageChannelOptions, + Message as SdsMessage, + type SenderId, + SyncMessage +} from "@waku/sds"; +import { Logger } from "@waku/utils"; + +import { + QueryOnConnect, + QueryOnConnectEvent +} from "../query_on_connect/index.js"; + +import { ReliableChannelEvent, ReliableChannelEvents } from "./events.js"; +import { MissingMessageRetriever } from "./missing_message_retriever.js"; +import { RetryManager } from "./retry_manager.js"; + +const log = new Logger("sdk:reliable-channel"); + +const DEFAULT_SYNC_MIN_INTERVAL_MS = 30 * 1000; // 30 seconds +const DEFAULT_RETRY_INTERVAL_MS = 30 * 1000; // 30 seconds +const DEFAULT_MAX_RETRY_ATTEMPTS = 10; +const DEFAULT_SWEEP_IN_BUF_INTERVAL_MS = 5 * 1000; +const DEFAULT_PROCESS_TASK_MIN_ELAPSE_MS = 1000; + +const IRRECOVERABLE_SENDING_ERRORS: LightPushError[] = [ + LightPushError.ENCODE_FAILED, + LightPushError.EMPTY_PAYLOAD, + LightPushError.SIZE_TOO_BIG, + LightPushError.RLN_PROOF_GENERATION +]; + +export type ReliableChannelOptions = MessageChannelOptions & { + /** + * The minimum interval between 2 sync messages in the channel. + * + * Meaning, how frequently we want messages in the channel, noting that the + * responsibility of sending a sync messages is shared between participants + * of the channel. + * + * `0` means no sync messages will be sent. + * + * @default 30,000 (30 seconds) [[DEFAULT_SYNC_MIN_INTERVAL_MS]] + */ + syncMinIntervalMs?: number; + + /** + * How long to wait before re-sending a message that as not acknowledged. + * + * @default 60,000 (60 seconds) [[DEFAULT_RETRY_INTERVAL_MS]] + */ + retryIntervalMs?: number; + + /** + * How many times do we attempt resending messages that were not acknowledged. + * + * @default 10 [[DEFAULT_MAX_RETRY_ATTEMPTS]] + */ + maxRetryAttempts?: number; + + /** + * How often store queries are done to retrieve missing messages. + * + * @default 10,000 (10 seconds) + */ + retrieveFrequencyMs?: number; + + /** + * How often SDS message channel incoming buffer is swept. + * + * @default 5000 (every 5 seconds) + */ + sweepInBufIntervalMs?: number; + + /** + * Whether to automatically do a store query after connection to store nodes. + * + * @default true + */ + queryOnConnect?: boolean; + + /** + * Whether to auto start the message channel + * + * @default true + */ + autoStart?: boolean; + + /** The minimum elapse time between calling the underlying channel process + * task for incoming messages. This is to avoid overload when processing + * a lot of messages. + * + * @default 1000 (1 second) + */ + processTaskMinElapseMs?: number; +}; + +/** + * An easy-to-use reliable channel that ensures all participants to the channel have eventual message consistency. + * + * Use events to track: + * - if your outgoing messages are sent, acknowledged or error out + * - for new incoming messages + * @emits [[ReliableChannelEvents]] + * + */ +export class ReliableChannel< + T extends IDecodedMessage +> extends TypedEventEmitter { + private readonly _send: ( + encoder: IEncoder, + message: IMessage, + sendOptions?: ISendOptions + ) => Promise; + + private readonly _subscribe: ( + decoders: IDecoder | IDecoder[], + callback: Callback + ) => Promise; + + private readonly _retrieve?: ( + decoders: IDecoder[], + options?: Partial + ) => AsyncGenerator[]>; + + private readonly syncMinIntervalMs: number; + private syncTimeout: ReturnType | undefined; + private sweepInBufInterval: ReturnType | undefined; + private readonly sweepInBufIntervalMs: number; + private processTaskTimeout: ReturnType | undefined; + private readonly retryManager: RetryManager | undefined; + private readonly missingMessageRetriever?: MissingMessageRetriever; + private readonly queryOnConnect?: QueryOnConnect; + private readonly processTaskMinElapseMs: number; + private _started: boolean; + + private constructor( + public node: IWaku, + public messageChannel: MessageChannel, + private encoder: IEncoder, + private decoder: IDecoder, + options?: ReliableChannelOptions + ) { + super(); + if (node.lightPush) { + this._send = node.lightPush.send.bind(node.lightPush); + } else if (node.relay) { + this._send = node.relay.send.bind(node.relay); + } else { + throw "No protocol available to send messages"; + } + + if (node.filter) { + this._subscribe = node.filter.subscribe.bind(node.filter); + } else if (node.relay) { + // TODO: Why do relay and filter have different interfaces? + // this._subscribe = node.relay.subscribeWithUnsubscribe; + throw "Not implemented"; + } else { + throw "No protocol available to receive messages"; + } + + if (node.store) { + this._retrieve = node.store.queryGenerator.bind(node.store); + const peerManagerEvents = (node as any)?.peerManager?.events; + if ( + peerManagerEvents !== undefined && + (options?.queryOnConnect ?? true) + ) { + this.queryOnConnect = new QueryOnConnect( + [this.decoder], + this.isChannelMessageWithCausalHistory.bind(this), + peerManagerEvents, + node.events, + this._retrieve.bind(this) + ); + } + } + + this.syncMinIntervalMs = + options?.syncMinIntervalMs ?? DEFAULT_SYNC_MIN_INTERVAL_MS; + + this.sweepInBufIntervalMs = + options?.sweepInBufIntervalMs ?? DEFAULT_SWEEP_IN_BUF_INTERVAL_MS; + + const retryIntervalMs = + options?.retryIntervalMs ?? DEFAULT_RETRY_INTERVAL_MS; + const maxRetryAttempts = + options?.maxRetryAttempts ?? DEFAULT_MAX_RETRY_ATTEMPTS; + + if (retryIntervalMs && maxRetryAttempts) { + // TODO: there is a lot to improve. e.g. not point retry to send if node is offline. + this.retryManager = new RetryManager(retryIntervalMs, maxRetryAttempts); + } + + this.processTaskMinElapseMs = + options?.processTaskMinElapseMs ?? DEFAULT_PROCESS_TASK_MIN_ELAPSE_MS; + + if (this._retrieve) { + this.missingMessageRetriever = new MissingMessageRetriever( + this.decoder, + options?.retrieveFrequencyMs, + this._retrieve, + async (msg: T) => { + await this.processIncomingMessage(msg); + } + ); + } + + this._started = false; + } + + public get isStarted(): boolean { + return this._started; + } + + /** + * Used to identify messages, pass the payload of a message you are + * about to send to track the events for this message. + * This is pre-sds wrapping + * @param messagePayload + */ + public static getMessageId(messagePayload: Uint8Array): string { + return MessageChannel.getMessageId(messagePayload); + } + + /** + * Create a new message channels. Message channels enables end-to-end + * reliability by ensuring that all messages in the channel are received + * by other users, and retrieved by this local node. + * + * emits events about outgoing messages, see [[`ReliableChannel`]] docs. + * + * Note that all participants in a message channels need to get the messages + * from the channel. Meaning: + * - all participants must be able to decrypt the messages + * - all participants must be subscribing to content topic(s) where the messages are sent + * + * @param node The waku node to use to send and receive messages + * @param channelId An id for the channel, all participants of the channel should use the same id + * @param senderId An id for the sender, to ensure acknowledgements are only valid if originating from someone else; best if persisted between sessions + * @param encoder A channel operates within a singular encryption layer, hence the same encoder is needed for all messages + * @param decoder A channel operates within a singular encryption layer, hence the same decoder is needed for all messages + * @param options + */ + public static async create( + node: IWaku, + channelId: ChannelId, + senderId: SenderId, + encoder: IEncoder, + decoder: IDecoder, + options?: ReliableChannelOptions + ): Promise> { + const sdsMessageChannel = new MessageChannel(channelId, senderId, options); + const messageChannel = new ReliableChannel( + node, + sdsMessageChannel, + encoder, + decoder, + options + ); + + const autoStart = options?.autoStart ?? true; + if (autoStart) { + await messageChannel.start(); + } + + return messageChannel; + } + + /** + * Sends a message in the channel, will attempt to re-send if not acknowledged + * by other participants. + * + * @param messagePayload + * @returns the message id + */ + public send(messagePayload: Uint8Array): string { + const messageId = ReliableChannel.getMessageId(messagePayload); + if (!this._started) { + this.safeSendEvent("sending-message-irrecoverable-error", { + detail: { messageId: messageId, error: "channel is not started" } + }); + } + const wrapAndSendBind = this._wrapAndSend.bind(this, messagePayload); + this.retryManager?.startRetries(messageId, wrapAndSendBind); + wrapAndSendBind(); + return messageId; + } + + private _wrapAndSend(messagePayload: Uint8Array): void { + this.messageChannel.pushOutgoingMessage( + messagePayload, + async ( + sdsMessage: SdsMessage + ): Promise<{ success: boolean; retrievalHint?: Uint8Array }> => { + // Callback is called once message has added to the SDS outgoing queue + // We start by trying to send the message now. + + // `payload` wrapped in SDS + const sdsPayload = sdsMessage.encode(); + + const wakuMessage = { + payload: sdsPayload + }; + + const messageId = ReliableChannel.getMessageId(messagePayload); + + // TODO: should the encoder give me the message hash? + // Encoding now to fail early, used later to get message hash + const protoMessage = await this.encoder.toProtoObj(wakuMessage); + if (!protoMessage) { + this.safeSendEvent("sending-message-irrecoverable-error", { + detail: { + messageId: messageId, + error: "could not encode message" + } + }); + return { success: false }; + } + const retrievalHint = messageHash( + this.encoder.pubsubTopic, + protoMessage + ); + + this.safeSendEvent("sending-message", { + detail: messageId + }); + + const sendRes = await this._send(this.encoder, wakuMessage); + + // If it's a recoverable failure, we will try again to send later + // If not, then we should error to the user now + for (const { error } of sendRes.failures) { + if (IRRECOVERABLE_SENDING_ERRORS.includes(error)) { + // Not recoverable, best to return it + log.error("Irrecoverable error, cannot send message: ", error); + this.safeSendEvent("sending-message-irrecoverable-error", { + detail: { + messageId, + error + } + }); + return { success: false, retrievalHint }; + } + } + + return { + success: true, + retrievalHint + }; + } + ); + + // Process outgoing messages straight away + this.messageChannel + .processTasks() + .then(() => { + this.messageChannel.sweepOutgoingBuffer(); + }) + .catch((err) => { + log.error("error encountered when processing sds tasks", err); + }); + } + + private async subscribe(): Promise { + this.assertStarted(); + return this._subscribe(this.decoder, async (message: T) => { + await this.processIncomingMessage(message); + }); + } + + /** + * Don't forget to call `this.messageChannel.sweepIncomingBuffer();` once done. + * @param msg + * @private + */ + private async processIncomingMessage( + msg: T + ): Promise { + // New message arrives, we need to unwrap it first + const sdsMessage = SdsMessage.decode(msg.payload); + + if (!sdsMessage) { + log.error("could not SDS decode message", msg); + return; + } + + if (sdsMessage.channelId !== this.messageChannel.channelId) { + log.warn( + "ignoring message with different channel id", + sdsMessage.channelId + ); + return; + } + + const retrievalHint = msg.hash; + log.info(`processing message ${sdsMessage.messageId}:${msg.hashStr}`); + // SDS Message decoded, let's pass it to the channel so we can learn about + // missing messages or the status of previous outgoing messages + this.messageChannel.pushIncomingMessage(sdsMessage, retrievalHint); + + this.missingMessageRetriever?.removeMissingMessage(sdsMessage.messageId); + + if (sdsMessage.content && sdsMessage.content.length > 0) { + // Now, process the message with callback + + // Overrides msg.payload with unwrapped payload + // TODO: can we do better? + const { payload: _p, ...allButPayload } = msg; + const unwrappedMessage = Object.assign(allButPayload, { + payload: sdsMessage.content, + hash: msg.hash, + hashStr: msg.hashStr, + version: msg.version, + contentTopic: msg.contentTopic, + pubsubTopic: msg.pubsubTopic, + timestamp: msg.timestamp, + rateLimitProof: msg.rateLimitProof, + ephemeral: msg.ephemeral, + meta: msg.meta + }); + + this.safeSendEvent("message-received", { + detail: unwrappedMessage as unknown as T + }); + } + + this.queueProcessTasks(); + } + + private async processIncomingMessages( + messages: T[] + ): Promise { + for (const message of messages) { + await this.processIncomingMessage(message); + } + } + + // TODO: For now we only queue process tasks for incoming messages + // As this is where there is most volume + private queueProcessTasks(): void { + // If one is already queued, then we can ignore it + if (this.processTaskTimeout === undefined) { + this.processTaskTimeout = setTimeout(() => { + void this.messageChannel.processTasks().catch((err) => { + log.error("error encountered when processing sds tasks", err); + }); + + // Clear timeout once triggered + clearTimeout(this.processTaskTimeout); + this.processTaskTimeout = undefined; + }, this.processTaskMinElapseMs); // we ensure that we don't call process tasks more than once per second + } + } + + public async start(): Promise { + if (this._started) return true; + this._started = true; + this.setupEventListeners(); + this.restartSync(); + this.startSweepIncomingBufferLoop(); + if (this._retrieve) { + this.missingMessageRetriever?.start(); + this.queryOnConnect?.start(); + } + return this.subscribe(); + } + + public stop(): void { + if (!this._started) return; + this._started = false; + this.stopSync(); + this.stopSweepIncomingBufferLoop(); + this.missingMessageRetriever?.stop(); + this.queryOnConnect?.stop(); + // TODO unsubscribe + // TODO unsetMessageListeners + } + + private assertStarted(): void { + if (!this._started) throw Error("Message Channel must be started"); + } + + private startSweepIncomingBufferLoop(): void { + this.stopSweepIncomingBufferLoop(); + this.sweepInBufInterval = setInterval(() => { + log.info("sweep incoming buffer"); + this.messageChannel.sweepIncomingBuffer(); + }, this.sweepInBufIntervalMs); + } + + private stopSweepIncomingBufferLoop(): void { + if (this.sweepInBufInterval) clearInterval(this.sweepInBufInterval); + } + + private restartSync(multiplier: number = 1): void { + if (this.syncTimeout) { + clearTimeout(this.syncTimeout); + } + if (this.syncMinIntervalMs) { + const timeoutMs = this.random() * this.syncMinIntervalMs * multiplier; + + this.syncTimeout = setTimeout(() => { + void this.sendSyncMessage(); + // Always restart a sync, no matter whether the message was sent. + // Set a multiplier so we wait a bit longer to not hog the conversation + void this.restartSync(2); + }, timeoutMs); + } + } + + private stopSync(): void { + if (this.syncTimeout) { + clearTimeout(this.syncTimeout); + } + } + + // Used to enable overriding when testing + private random(): number { + return Math.random(); + } + + private safeSendEvent( + event: T, + eventInit?: CustomEventInit + ): void { + try { + this.dispatchEvent(new CustomEvent(event, eventInit)); + } catch (error) { + log.error(`Failed to dispatch event ${event}:`, error); + } + } + + private async sendSyncMessage(): Promise { + this.assertStarted(); + await this.messageChannel.pushOutgoingSyncMessage( + async (syncMessage: SyncMessage): Promise => { + // Callback is called once message has added to the SDS outgoing queue + // We start by trying to send the message now. + + // `payload` wrapped in SDS + const sdsPayload = syncMessage.encode(); + + const wakuMessage = { + payload: sdsPayload + }; + + const sendRes = await this._send(this.encoder, wakuMessage); + if (sendRes.failures.length > 0) { + log.error("Error sending sync message: ", sendRes); + return false; + } + + return true; + } + ); + + // Process outgoing messages straight away + // TODO: review and optimize + await this.messageChannel.processTasks(); + this.messageChannel.sweepOutgoingBuffer(); + } + + private isChannelMessageWithCausalHistory(msg: T): boolean { + // TODO: we do end-up decoding messages twice as this is used to stop store queries. + const sdsMessage = SdsMessage.decode(msg.payload); + + if (!sdsMessage) { + return false; + } + + if (sdsMessage.channelId !== this.messageChannel.channelId) { + return false; + } + + return sdsMessage.causalHistory && sdsMessage.causalHistory.length > 0; + } + + private setupEventListeners(): void { + this.messageChannel.addEventListener( + MessageChannelEvent.OutMessageSent, + (event) => { + if (event.detail.content) { + const messageId = ReliableChannel.getMessageId(event.detail.content); + this.safeSendEvent("message-sent", { + detail: messageId + }); + } + } + ); + + this.messageChannel.addEventListener( + MessageChannelEvent.OutMessageAcknowledged, + (event) => { + if (event.detail) { + this.safeSendEvent("message-acknowledged", { + detail: event.detail + }); + + // Stopping retries + this.retryManager?.stopRetries(event.detail); + } + } + ); + + this.messageChannel.addEventListener( + MessageChannelEvent.OutMessagePossiblyAcknowledged, + (event) => { + if (event.detail) { + this.safeSendEvent("message-possibly-acknowledged", { + detail: { + messageId: event.detail.messageId, + possibleAckCount: event.detail.count + } + }); + } + } + ); + + this.messageChannel.addEventListener( + MessageChannelEvent.InSyncReceived, + (_event) => { + // restart the timeout when a sync message has been received + this.restartSync(); + } + ); + + this.messageChannel.addEventListener( + MessageChannelEvent.InMessageReceived, + (event) => { + // restart the timeout when a content message has been received + if (isContentMessage(event.detail)) { + // send a sync message faster to ack someone's else + this.restartSync(0.5); + } + } + ); + + this.messageChannel.addEventListener( + MessageChannelEvent.OutMessageSent, + (event) => { + // restart the timeout when a content message has been sent + if (isContentMessage(event.detail)) { + this.restartSync(); + } + } + ); + + this.messageChannel.addEventListener( + MessageChannelEvent.InMessageMissing, + (event) => { + for (const { messageId, retrievalHint } of event.detail) { + if (retrievalHint && this.missingMessageRetriever) { + this.missingMessageRetriever.addMissingMessage( + messageId, + retrievalHint + ); + } + } + } + ); + + if (this.queryOnConnect) { + this.queryOnConnect.addEventListener( + QueryOnConnectEvent.MessagesRetrieved, + (event) => { + void this.processIncomingMessages(event.detail); + } + ); + } + } +} diff --git a/packages/sdk/src/reliable_channel/reliable_channel_acks.spec.ts b/packages/sdk/src/reliable_channel/reliable_channel_acks.spec.ts new file mode 100644 index 0000000000..9cce421c59 --- /dev/null +++ b/packages/sdk/src/reliable_channel/reliable_channel_acks.spec.ts @@ -0,0 +1,187 @@ +import { TypedEventEmitter } from "@libp2p/interface"; +import { createDecoder, createEncoder } from "@waku/core"; +import { + AutoSharding, + IDecodedMessage, + IDecoder, + IEncoder +} from "@waku/interfaces"; +import { + createRoutingInfo, + delay, + MockWakuEvents, + MockWakuNode +} from "@waku/utils"; +import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes"; +import { expect } from "chai"; +import { beforeEach, describe } from "mocha"; + +import { ReliableChannel } from "./index.js"; + +const TEST_CONTENT_TOPIC = "/my-tests/0/topic-name/proto"; +const TEST_NETWORK_CONFIG: AutoSharding = { + clusterId: 0, + numShardsInCluster: 1 +}; +const TEST_ROUTING_INFO = createRoutingInfo(TEST_NETWORK_CONFIG, { + contentTopic: TEST_CONTENT_TOPIC +}); + +describe("Reliable Channel: Acks", () => { + let encoder: IEncoder; + let decoder: IDecoder; + + beforeEach(async () => { + encoder = createEncoder({ + contentTopic: TEST_CONTENT_TOPIC, + routingInfo: TEST_ROUTING_INFO + }); + decoder = createDecoder(TEST_CONTENT_TOPIC, TEST_ROUTING_INFO); + }); + + it("Outgoing message is acknowledged", async () => { + const commonEventEmitter = new TypedEventEmitter(); + const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter); + const mockWakuNodeBob = new MockWakuNode(commonEventEmitter); + + const reliableChannelAlice = await ReliableChannel.create( + mockWakuNodeAlice, + "MyChannel", + "alice", + encoder, + decoder + ); + const reliableChannelBob = await ReliableChannel.create( + mockWakuNodeBob, + "MyChannel", + "bob", + encoder, + decoder + ); + + const message = utf8ToBytes("first message in channel"); + + // Alice sets up message tracking + const messageId = ReliableChannel.getMessageId(message); + + let messageReceived = false; + reliableChannelBob.addEventListener("message-received", (event) => { + if (bytesToUtf8(event.detail.payload) === "first message in channel") { + messageReceived = true; + } + }); + + let messageAcknowledged = false; + reliableChannelAlice.addEventListener("message-acknowledged", (event) => { + if (event.detail === messageId) { + messageAcknowledged = true; + } + }); + + reliableChannelAlice.send(message); + + // Wait for Bob to receive the message to ensure it uses it in causal history + while (!messageReceived) { + await delay(50); + } + // Bobs sends a message now, it should include first one in causal history + reliableChannelBob.send(utf8ToBytes("second message in channel")); + while (!messageAcknowledged) { + await delay(50); + } + + expect(messageAcknowledged).to.be.true; + }); + + it("Re-sent message is acknowledged once other parties join.", async () => { + const commonEventEmitter = new TypedEventEmitter(); + const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter); + + // Setup, Alice first + const reliableChannelAlice = await ReliableChannel.create( + mockWakuNodeAlice, + "MyChannel", + "alice", + encoder, + decoder, + { + retryIntervalMs: 0, // disable any automation to better control the test + syncMinIntervalMs: 0, + processTaskMinElapseMs: 10 + } + ); + + // Bob is offline, Alice sends a message, this is the message we want + // acknowledged in this test. + const message = utf8ToBytes("message to be acknowledged"); + const messageId = ReliableChannel.getMessageId(message); + let messageAcknowledged = false; + reliableChannelAlice.addEventListener("message-acknowledged", (event) => { + if (event.detail === messageId) { + messageAcknowledged = true; + } + }); + reliableChannelAlice.send(message); + + // Wait a bit to ensure Bob does not receive the message + await delay(100); + + // Now Bob goes online + const mockWakuNodeBob = new MockWakuNode(commonEventEmitter); + const reliableChannelBob = await ReliableChannel.create( + mockWakuNodeBob, + "MyChannel", + "bob", + encoder, + decoder, + { + retryIntervalMs: 0, // disable any automation to better control the test + syncMinIntervalMs: 0, + processTaskMinElapseMs: 10 + } + ); + + // Track when Bob receives the message + let bobReceivedMessage = false; + reliableChannelBob.addEventListener("message-received", (event) => { + if (bytesToUtf8(event.detail.payload!) === "message to be acknowledged") { + bobReceivedMessage = true; + } + }); + + // Some sync messages are exchanged + await reliableChannelAlice["sendSyncMessage"](); + await reliableChannelBob["sendSyncMessage"](); + + // wait a bit to ensure messages are processed + await delay(100); + + // Some content messages are exchanged too + reliableChannelAlice.send(utf8ToBytes("some message")); + reliableChannelBob.send(utf8ToBytes("some other message")); + + // wait a bit to ensure messages are processed + await delay(100); + + // At this point, the message shouldn't be acknowledged yet as Bob + // does not have a complete log + expect(messageAcknowledged).to.be.false; + + // Now Alice resends the message + reliableChannelAlice.send(message); + + // Wait for Bob to receive the message + while (!bobReceivedMessage) { + await delay(50); + } + + // Bob receives it, and should include it in its sync + await reliableChannelBob["sendSyncMessage"](); + while (!messageAcknowledged) { + await delay(50); + } + + // The sync should acknowledge the message + expect(messageAcknowledged).to.be.true; + }); +}); diff --git a/packages/sdk/src/reliable_channel/reliable_channel_encryption.spec.ts b/packages/sdk/src/reliable_channel/reliable_channel_encryption.spec.ts new file mode 100644 index 0000000000..628c99bfaa --- /dev/null +++ b/packages/sdk/src/reliable_channel/reliable_channel_encryption.spec.ts @@ -0,0 +1,327 @@ +import { TypedEventEmitter } from "@libp2p/interface"; +import { + AutoSharding, + IDecodedMessage, + IDecoder, + IEncoder, + type IMessage, + ISendOptions, + IWaku, + LightPushError, + LightPushSDKResult +} from "@waku/interfaces"; +import { generatePrivateKey, getPublicKey } from "@waku/message-encryption"; +import { + createDecoder as createEciesDecoder, + createEncoder as createEciesEncoder +} from "@waku/message-encryption/ecies"; +import { + createRoutingInfo, + delay, + MockWakuEvents, + MockWakuNode +} from "@waku/utils"; +import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes"; +import { expect } from "chai"; +import { beforeEach, describe } from "mocha"; + +import { ReliableChannel } from "./index.js"; + +const TEST_CONTENT_TOPIC = "/my-tests/0/topic-name/proto"; +const TEST_NETWORK_CONFIG: AutoSharding = { + clusterId: 0, + numShardsInCluster: 1 +}; +const TEST_ROUTING_INFO = createRoutingInfo(TEST_NETWORK_CONFIG, { + contentTopic: TEST_CONTENT_TOPIC +}); + +describe("Reliable Channel: Encryption", () => { + let mockWakuNode: IWaku; + let encoder: IEncoder; + let decoder: IDecoder; + + beforeEach(async () => { + mockWakuNode = new MockWakuNode(); + const privateKey = generatePrivateKey(); + const publicKey = getPublicKey(privateKey); + encoder = createEciesEncoder({ + contentTopic: TEST_CONTENT_TOPIC, + routingInfo: TEST_ROUTING_INFO, + publicKey + }); + decoder = createEciesDecoder( + TEST_CONTENT_TOPIC, + TEST_ROUTING_INFO, + privateKey + ); + }); + + it("Outgoing message is emitted as sending", async () => { + const reliableChannel = await ReliableChannel.create( + mockWakuNode, + "MyChannel", + "alice", + encoder, + decoder + ); + + const message = utf8ToBytes("message in channel"); + + // Setting up message tracking + const messageId = ReliableChannel.getMessageId(message); + let messageSending = false; + reliableChannel.addEventListener("sending-message", (event) => { + if (event.detail === messageId) { + messageSending = true; + } + }); + + reliableChannel.send(message); + while (!messageSending) { + await delay(50); + } + + expect(messageSending).to.be.true; + }); + + it("Outgoing message is emitted as sent", async () => { + const reliableChannel = await ReliableChannel.create( + mockWakuNode, + "MyChannel", + "alice", + encoder, + decoder + ); + + const message = utf8ToBytes("message in channel"); + + // Setting up message tracking + const messageId = ReliableChannel.getMessageId(message); + let messageSent = false; + reliableChannel.addEventListener("message-sent", (event) => { + if (event.detail === messageId) { + messageSent = true; + } + }); + + reliableChannel.send(message); + while (!messageSent) { + await delay(50); + } + + expect(messageSent).to.be.true; + }); + + it("Encoder error raises irrecoverable error", async () => { + mockWakuNode.lightPush!.send = ( + _encoder: IEncoder, + _message: IMessage, + _sendOptions?: ISendOptions + ): Promise => { + return Promise.resolve({ + failures: [{ error: LightPushError.EMPTY_PAYLOAD }], + successes: [] + }); + }; + + const reliableChannel = await ReliableChannel.create( + mockWakuNode, + "MyChannel", + "alice", + encoder, + decoder + ); + + const message = utf8ToBytes("payload doesnt matter"); + + // Setting up message tracking + const messageId = ReliableChannel.getMessageId(message); + let irrecoverableError = false; + reliableChannel.addEventListener( + "sending-message-irrecoverable-error", + (event) => { + if (event.detail.messageId === messageId) { + irrecoverableError = true; + } + } + ); + + encoder.contentTopic = "..."; + reliableChannel.send(message); + while (!irrecoverableError) { + await delay(50); + } + + expect(irrecoverableError).to.be.true; + }); + + it("Outgoing message is not emitted as acknowledged from own outgoing messages", async () => { + const reliableChannel = await ReliableChannel.create( + mockWakuNode, + "MyChannel", + "alice", + encoder, + decoder + ); + + const message = utf8ToBytes("first message in channel"); + + // Setting up message tracking + const messageId = ReliableChannel.getMessageId(message); + let messageAcknowledged = false; + reliableChannel.addEventListener("message-acknowledged", (event) => { + if (event.detail === messageId) { + messageAcknowledged = true; + } + }); + + reliableChannel.send(message); + + // Sending a second message from the same node should not acknowledge the first one + reliableChannel.send(utf8ToBytes("second message in channel")); + + // Wait a bit to be sure no event is emitted + await delay(200); + + expect(messageAcknowledged).to.be.false; + }); + + // TODO: https://github.com/waku-org/js-waku/issues/2648 + it.skip("Outgoing message is possibly acknowledged", async () => { + const commonEventEmitter = new TypedEventEmitter(); + const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter); + const mockWakuNodeBob = new MockWakuNode(commonEventEmitter); + + const reliableChannelAlice = await ReliableChannel.create( + mockWakuNodeAlice, + "MyChannel", + "alice", + encoder, + decoder + ); + const reliableChannelBob = await ReliableChannel.create( + mockWakuNodeBob, + "MyChannel", + "bob", + encoder, + decoder, + // Bob only includes one message in causal history + { causalHistorySize: 1 } + ); + + const messages = ["first", "second", "third"].map((m) => { + return utf8ToBytes(m); + }); + + // Alice sets up message tracking for first message + const firstMessageId = ReliableChannel.getMessageId(messages[0]); + let firstMessagePossiblyAcknowledged = false; + reliableChannelAlice.addEventListener( + "message-possibly-acknowledged", + (event) => { + if (event.detail.messageId === firstMessageId) { + firstMessagePossiblyAcknowledged = true; + } + } + ); + + let bobMessageReceived = 0; + reliableChannelAlice.addEventListener("message-received", () => { + bobMessageReceived++; + }); + + for (const m of messages) { + reliableChannelAlice.send(m); + } + + // Wait for Bob to receive all messages to ensure filter is updated + while (bobMessageReceived < 3) { + await delay(50); + } + + // Bobs sends a message now, it should include first one in bloom filter + reliableChannelBob.send(utf8ToBytes("message back")); + while (!firstMessagePossiblyAcknowledged) { + await delay(50); + } + + expect(firstMessagePossiblyAcknowledged).to.be.true; + }); + + it("Outgoing message is acknowledged", async () => { + const commonEventEmitter = new TypedEventEmitter(); + const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter); + const mockWakuNodeBob = new MockWakuNode(commonEventEmitter); + + const reliableChannelAlice = await ReliableChannel.create( + mockWakuNodeAlice, + "MyChannel", + "alice", + encoder, + decoder + ); + const reliableChannelBob = await ReliableChannel.create( + mockWakuNodeBob, + "MyChannel", + "bob", + encoder, + decoder + ); + + const message = utf8ToBytes("first message in channel"); + + // Alice sets up message tracking + const messageId = ReliableChannel.getMessageId(message); + let messageAcknowledged = false; + reliableChannelAlice.addEventListener("message-acknowledged", (event) => { + if (event.detail === messageId) { + messageAcknowledged = true; + } + }); + + let bobReceivedMessage = false; + reliableChannelBob.addEventListener("message-received", () => { + bobReceivedMessage = true; + }); + + reliableChannelAlice.send(message); + + // Wait for Bob to receive the message + while (!bobReceivedMessage) { + await delay(50); + } + + // Bobs sends a message now, it should include first one in causal history + reliableChannelBob.send(utf8ToBytes("second message in channel")); + while (!messageAcknowledged) { + await delay(50); + } + + expect(messageAcknowledged).to.be.true; + }); + + it("Incoming message is emitted as received", async () => { + const reliableChannel = await ReliableChannel.create( + mockWakuNode, + "MyChannel", + "alice", + encoder, + decoder + ); + + let receivedMessage: IDecodedMessage; + reliableChannel.addEventListener("message-received", (event) => { + receivedMessage = event.detail; + }); + + const message = utf8ToBytes("message in channel"); + + reliableChannel.send(message); + while (!receivedMessage!) { + await delay(50); + } + + expect(bytesToUtf8(receivedMessage!.payload)).to.eq(bytesToUtf8(message)); + }); +}); diff --git a/packages/sdk/src/reliable_channel/reliable_channel_sync.spec.ts b/packages/sdk/src/reliable_channel/reliable_channel_sync.spec.ts new file mode 100644 index 0000000000..226d5b8c6a --- /dev/null +++ b/packages/sdk/src/reliable_channel/reliable_channel_sync.spec.ts @@ -0,0 +1,397 @@ +import { TypedEventEmitter } from "@libp2p/interface"; +import { createDecoder, createEncoder } from "@waku/core"; +import { + AutoSharding, + IDecodedMessage, + IDecoder, + IEncoder, + IWaku +} from "@waku/interfaces"; +import { MessageChannelEvent } from "@waku/sds"; +import { + createRoutingInfo, + delay, + MockWakuEvents, + MockWakuNode +} from "@waku/utils"; +import { utf8ToBytes } from "@waku/utils/bytes"; +import { expect } from "chai"; +import { beforeEach, describe } from "mocha"; + +import { ReliableChannel } from "./index.js"; + +const TEST_CONTENT_TOPIC = "/my-tests/0/topic-name/proto"; +const TEST_NETWORK_CONFIG: AutoSharding = { + clusterId: 0, + numShardsInCluster: 1 +}; +const TEST_ROUTING_INFO = createRoutingInfo(TEST_NETWORK_CONFIG, { + contentTopic: TEST_CONTENT_TOPIC +}); + +describe("Reliable Channel: Sync", () => { + let mockWakuNode: IWaku; + let encoder: IEncoder; + let decoder: IDecoder; + + beforeEach(async () => { + mockWakuNode = new MockWakuNode(); + encoder = createEncoder({ + contentTopic: TEST_CONTENT_TOPIC, + routingInfo: TEST_ROUTING_INFO + }); + decoder = createDecoder(TEST_CONTENT_TOPIC, TEST_ROUTING_INFO); + }); + + it("Sync message is sent within sync frequency", async () => { + const syncMinIntervalMs = 100; + const reliableChannel = await ReliableChannel.create( + mockWakuNode, + "MyChannel", + "alice", + encoder, + decoder, + { + syncMinIntervalMs + } + ); + + // Send a message to have a history + const sentMsgId = reliableChannel.send(utf8ToBytes("some message")); + let messageSent = false; + reliableChannel.addEventListener("message-sent", (event) => { + if (event.detail === sentMsgId) { + messageSent = true; + } + }); + + while (!messageSent) { + await delay(50); + } + + let syncMessageSent = false; + reliableChannel.messageChannel.addEventListener( + MessageChannelEvent.OutSyncSent, + (_event) => { + syncMessageSent = true; + } + ); + + await delay(syncMinIntervalMs); + + expect(syncMessageSent).to.be.true; + }); + + it("Sync message are not sent excessively within sync frequency", async () => { + const syncMinIntervalMs = 100; + const reliableChannel = await ReliableChannel.create( + mockWakuNode, + "MyChannel", + "alice", + encoder, + decoder, + { + syncMinIntervalMs + } + ); + + let syncMessageSentCount = 0; + reliableChannel.messageChannel.addEventListener( + MessageChannelEvent.OutSyncSent, + (_event) => { + syncMessageSentCount++; + } + ); + + await delay(syncMinIntervalMs); + + // There is randomness to this, but it should not be excessive + expect(syncMessageSentCount).to.be.lessThan(3); + }); + + it("Sync message is not sent if another sync message was just received", async function () { + this.timeout(5000); + + const commonEventEmitter = new TypedEventEmitter(); + const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter); + const mockWakuNodeBob = new MockWakuNode(commonEventEmitter); + + const syncMinIntervalMs = 1000; + + const reliableChannelAlice = await ReliableChannel.create( + mockWakuNodeAlice, + "MyChannel", + "alice", + encoder, + decoder, + { + syncMinIntervalMs: 0, // does not send sync messages automatically + processTaskMinElapseMs: 10 + } + ); + const reliableChannelBob = await ReliableChannel.create( + mockWakuNodeBob, + "MyChannel", + "bob", + encoder, + decoder, + { + syncMinIntervalMs, + processTaskMinElapseMs: 10 + } + ); + (reliableChannelBob as any).random = () => { + return 1; + }; // will wait a full second + + // Send a message to have a history + const sentMsgId = reliableChannelAlice.send(utf8ToBytes("some message")); + let messageSent = false; + reliableChannelAlice.addEventListener("message-sent", (event) => { + if (event.detail === sentMsgId) { + messageSent = true; + } + }); + + while (!messageSent) { + await delay(50); + } + + let syncMessageSent = false; + reliableChannelBob.messageChannel.addEventListener( + MessageChannelEvent.OutSyncSent, + (_event) => { + syncMessageSent = true; + } + ); + + while (!syncMessageSent) { + // Bob will send a sync message as soon as it started, we are waiting for this one + await delay(100); + } + // Let's reset the tracker + syncMessageSent = false; + // We should be faster than Bob as Bob will "randomly" wait a full second + await reliableChannelAlice["sendSyncMessage"](); + + // Bob should be waiting a full second before sending a message after Alice + await delay(900); + + // Now, let's wait Bob to send the sync message + await delay(200); + expect(syncMessageSent).to.be.true; + }); + + it("Sync message is not sent if another non-ephemeral message was just received", async function () { + this.timeout(5000); + + const commonEventEmitter = new TypedEventEmitter(); + const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter); + const mockWakuNodeBob = new MockWakuNode(commonEventEmitter); + + const syncMinIntervalMs = 1000; + + const reliableChannelAlice = await ReliableChannel.create( + mockWakuNodeAlice, + "MyChannel", + "alice", + encoder, + decoder, + { + syncMinIntervalMs: 0, // does not send sync messages automatically + processTaskMinElapseMs: 10 + } + ); + const reliableChannelBob = await ReliableChannel.create( + mockWakuNodeBob, + "MyChannel", + "bob", + encoder, + decoder, + { + syncMinIntervalMs, + processTaskMinElapseMs: 10 + } + ); + (reliableChannelBob as any).random = () => { + return 1; + }; // will wait a full second + + // Send a message to have a history + const sentMsgId = reliableChannelAlice.send(utf8ToBytes("some message")); + let messageSent = false; + reliableChannelAlice.addEventListener("message-sent", (event) => { + if (event.detail === sentMsgId) { + messageSent = true; + } + }); + + while (!messageSent) { + await delay(50); + } + + let syncMessageSent = false; + reliableChannelBob.messageChannel.addEventListener( + MessageChannelEvent.OutSyncSent, + (_event) => { + syncMessageSent = true; + } + ); + + while (!syncMessageSent) { + // Bob will send a sync message as soon as it started, we are waiting for this one + await delay(100); + } + // Let's reset the tracker + syncMessageSent = false; + // We should be faster than Bob as Bob will "randomly" wait a full second + reliableChannelAlice.send(utf8ToBytes("some message")); + + // Bob should be waiting a full second before sending a message after Alice + await delay(900); + + // Now, let's wait Bob to send the sync message + await delay(200); + expect(syncMessageSent).to.be.true; + }); + + it("Sync message is not sent if another sync message was just sent", async function () { + this.timeout(5000); + const syncMinIntervalMs = 1000; + + const reliableChannel = await ReliableChannel.create( + mockWakuNode, + "MyChannel", + "alice", + encoder, + decoder, + { syncMinIntervalMs } + ); + (reliableChannel as any).random = () => { + return 1; + }; // will wait a full second + + // Send a message to have a history + const sentMsgId = reliableChannel.send(utf8ToBytes("some message")); + let messageSent = false; + reliableChannel.addEventListener("message-sent", (event) => { + if (event.detail === sentMsgId) { + messageSent = true; + } + }); + + while (!messageSent) { + await delay(50); + } + + let syncMessageSent = false; + reliableChannel.messageChannel.addEventListener( + MessageChannelEvent.OutSyncSent, + (_event) => { + syncMessageSent = true; + } + ); + + while (!syncMessageSent) { + // Will send a sync message as soon as it started, we are waiting for this one + await delay(100); + } + // Let's reset the tracker + syncMessageSent = false; + // We should be faster than automated sync as it will "randomly" wait a full second + await reliableChannel["sendSyncMessage"](); + + // should be waiting a full second before sending a message after Alice + await delay(900); + + // Now, let's wait to send the automated sync message + await delay(200); + expect(syncMessageSent).to.be.true; + }); + + it("Sync message is not sent if another non-ephemeral message was just sent", async function () { + this.timeout(5000); + const syncMinIntervalMs = 1000; + + const reliableChannel = await ReliableChannel.create( + mockWakuNode, + "MyChannel", + "alice", + encoder, + decoder, + { syncMinIntervalMs } + ); + (reliableChannel as any).random = () => { + return 1; + }; // will wait a full second + + // Send a message to have a history + const sentMsgId = reliableChannel.send(utf8ToBytes("some message")); + let messageSent = false; + reliableChannel.addEventListener("message-sent", (event) => { + if (event.detail === sentMsgId) { + messageSent = true; + } + }); + + while (!messageSent) { + await delay(50); + } + + let syncMessageSent = false; + reliableChannel.messageChannel.addEventListener( + MessageChannelEvent.OutSyncSent, + (_event) => { + syncMessageSent = true; + } + ); + + while (!syncMessageSent) { + // Will send a sync message as soon as it started, we are waiting for this one + await delay(100); + } + // Let's reset the tracker + syncMessageSent = false; + // We should be faster than automated sync as it will "randomly" wait a full second + reliableChannel.send(utf8ToBytes("non-ephemeral message")); + + // should be waiting a full second before sending a message after Alice + await delay(900); + + // Now, let's wait to send the automated sync message + await delay(200); + expect(syncMessageSent).to.be.true; + }); + + it("Own sync message does not acknowledge own messages", async () => { + const syncMinIntervalMs = 100; + const reliableChannel = await ReliableChannel.create( + mockWakuNode, + "MyChannel", + "alice", + encoder, + decoder, + { + syncMinIntervalMs + } + ); + + const msg = utf8ToBytes("some message"); + const msgId = ReliableChannel.getMessageId(msg); + + let messageAcknowledged = false; + reliableChannel.messageChannel.addEventListener( + MessageChannelEvent.OutMessageAcknowledged, + (event) => { + if (event.detail === msgId) messageAcknowledged = true; + } + ); + + reliableChannel.send(msg); + + await delay(syncMinIntervalMs * 2); + + // There is randomness to this, but it should not be excessive + expect(messageAcknowledged).to.be.false; + }); +}); diff --git a/packages/sdk/src/reliable_channel/retry_manager.spec.ts b/packages/sdk/src/reliable_channel/retry_manager.spec.ts new file mode 100644 index 0000000000..1a80a5cea1 --- /dev/null +++ b/packages/sdk/src/reliable_channel/retry_manager.spec.ts @@ -0,0 +1,48 @@ +import { delay } from "@waku/utils"; +import { expect } from "chai"; + +import { RetryManager } from "./retry_manager.js"; + +describe("Retry Manager", () => { + it("Retries within given interval", async function () { + const retryManager = new RetryManager(100, 1); + + let retryCount = 0; + retryManager.startRetries("1", () => { + retryCount++; + }); + + await delay(110); + + expect(retryCount).to.equal(1); + }); + + it("Retries within maximum given attempts", async function () { + const maxAttempts = 5; + const retryManager = new RetryManager(10, maxAttempts); + + let retryCount = 0; + retryManager.startRetries("1", () => { + retryCount++; + }); + + await delay(200); + + expect(retryCount).to.equal(maxAttempts); + }); + + it("Wait given interval before re-trying", async function () { + const retryManager = new RetryManager(100, 1); + + let retryCount = 0; + retryManager.startRetries("1", () => { + retryCount++; + }); + + await delay(90); + expect(retryCount).to.equal(0); + + await delay(110); + expect(retryCount).to.equal(1); + }); +}); diff --git a/packages/sdk/src/reliable_channel/retry_manager.ts b/packages/sdk/src/reliable_channel/retry_manager.ts new file mode 100644 index 0000000000..00426fd854 --- /dev/null +++ b/packages/sdk/src/reliable_channel/retry_manager.ts @@ -0,0 +1,51 @@ +export class RetryManager { + private timeouts: Map>; + + public constructor( + // TODO: back-off strategy + private retryIntervalMs: number, + private maxRetryNumber: number + ) { + this.timeouts = new Map(); + + if ( + !retryIntervalMs || + retryIntervalMs <= 0 || + !maxRetryNumber || + maxRetryNumber <= 0 + ) { + throw Error( + `Invalid retryIntervalMs ${retryIntervalMs} or maxRetryNumber ${maxRetryNumber} values` + ); + } + } + + public stopRetries(id: string): void { + const timeout = this.timeouts.get(id); + if (timeout) { + clearTimeout(timeout); + } + } + + public startRetries(id: string, retry: () => void | Promise): void { + this.retry(id, retry, 0); + } + + private retry( + id: string, + retry: () => void | Promise, + attemptNumber: number + ): void { + clearTimeout(this.timeouts.get(id)); + if (attemptNumber < this.maxRetryNumber) { + const interval = setTimeout(() => { + void retry(); + + // Register for next retry until we are told to stop; + this.retry(id, retry, ++attemptNumber); + }, this.retryIntervalMs); + + this.timeouts.set(id, interval); + } + } +} diff --git a/packages/sdk/src/waku/wait_for_remote_peer.spec.ts b/packages/sdk/src/waku/wait_for_remote_peer.spec.ts index b3fb33d3e7..9b0073b379 100644 --- a/packages/sdk/src/waku/wait_for_remote_peer.spec.ts +++ b/packages/sdk/src/waku/wait_for_remote_peer.spec.ts @@ -1,5 +1,10 @@ import type { Connection, Peer, PeerStore } from "@libp2p/interface"; -import { FilterCodecs, LightPushCodec, StoreCodec } from "@waku/core"; +import { + FilterCodecs, + LightPushCodec, + LightPushCodecV2, + StoreCodec +} from "@waku/core"; import { IRelay, Protocols } from "@waku/interfaces"; import { expect } from "chai"; import sinon from "sinon"; @@ -114,7 +119,10 @@ describe("waitForRemotePeer", () => { err = e as Error; } - expect(addEventListenerSpy.calledOnceWith("peer:identify")).to.be.true; + expect(addEventListenerSpy.calledTwice).to.be.true; + addEventListenerSpy + .getCalls() + .forEach((c) => expect(c.firstArg).to.equal("peer:identify")); expect(err).not.to.be.undefined; expect(err!.message).to.be.eq("Timed out waiting for a remote peer."); @@ -148,9 +156,12 @@ describe("waitForRemotePeer", () => { }); it("should wait for LightPush peer to be connected", async () => { + let call = 0; const addEventListenerSpy = sinon.spy( (_type: string, _cb: (e: any) => void) => { - _cb({ detail: { protocols: [LightPushCodec] } }); + const proto = call === 0 ? LightPushCodec : LightPushCodecV2; + call++; + _cb({ detail: { protocols: [proto] } }); } ); eventTarget.addEventListener = addEventListenerSpy; @@ -174,7 +185,10 @@ describe("waitForRemotePeer", () => { err = e as Error; } - expect(addEventListenerSpy.calledOnceWith("peer:identify")).to.be.true; + expect(addEventListenerSpy.calledTwice).to.be.true; + addEventListenerSpy + .getCalls() + .forEach((c) => expect(c.firstArg).to.equal("peer:identify")); expect(err).to.be.undefined; // check with metadata serivice @@ -196,8 +210,10 @@ describe("waitForRemotePeer", () => { err = e as Error; } - expect(addEventListenerSpy.calledTwice).to.be.true; - expect(addEventListenerSpy.lastCall.calledWith("peer:identify")).to.be.true; + expect(addEventListenerSpy.callCount).to.equal(4); + addEventListenerSpy + .getCalls() + .forEach((c) => expect(c.firstArg).to.equal("peer:identify")); expect(err).to.be.undefined; }); diff --git a/packages/sdk/src/waku/wait_for_remote_peer.ts b/packages/sdk/src/waku/wait_for_remote_peer.ts index ae10d6be8a..37178ae989 100644 --- a/packages/sdk/src/waku/wait_for_remote_peer.ts +++ b/packages/sdk/src/waku/wait_for_remote_peer.ts @@ -1,5 +1,10 @@ import type { IdentifyResult } from "@libp2p/interface"; -import { FilterCodecs, LightPushCodec, StoreCodec } from "@waku/core"; +import { + FilterCodecs, + LightPushCodec, + LightPushCodecV2, + StoreCodec +} from "@waku/core"; import type { IWaku, Libp2p } from "@waku/interfaces"; import { Protocols } from "@waku/interfaces"; import { Logger } from "@waku/utils"; @@ -82,6 +87,13 @@ export async function waitForRemotePeer( type EventListener = (_: CustomEvent) => void; +function protocolToPeerPromise( + codecs: string[], + libp2p: Libp2p +): Promise[] { + return codecs.map((codec) => waitForConnectedPeer(codec, libp2p)); +} + /** * Waits for required peers to be connected. */ @@ -96,15 +108,21 @@ async function waitForProtocols( } if (waku.store && protocols.includes(Protocols.Store)) { - promises.push(waitForConnectedPeer(StoreCodec, waku.libp2p)); + promises.push(...protocolToPeerPromise([StoreCodec], waku.libp2p)); } if (waku.lightPush && protocols.includes(Protocols.LightPush)) { - promises.push(waitForConnectedPeer(LightPushCodec, waku.libp2p)); + const lpPromises = protocolToPeerPromise( + [LightPushCodec, LightPushCodecV2], + waku.libp2p + ); + promises.push(Promise.any(lpPromises)); } if (waku.filter && protocols.includes(Protocols.Filter)) { - promises.push(waitForConnectedPeer(FilterCodecs.SUBSCRIBE, waku.libp2p)); + promises.push( + ...protocolToPeerPromise([FilterCodecs.SUBSCRIBE], waku.libp2p) + ); } return Promise.all(promises); @@ -246,15 +264,17 @@ function getEnabledProtocols(waku: IWaku): Protocols[] { function mapProtocolsToCodecs(protocols: Protocols[]): Map { const codecs: Map = new Map(); - const protocolToCodec: Record = { - [Protocols.Filter]: FilterCodecs.SUBSCRIBE, - [Protocols.LightPush]: LightPushCodec, - [Protocols.Store]: StoreCodec + const protocolToCodec: Record = { + [Protocols.Filter]: [FilterCodecs.SUBSCRIBE], + [Protocols.LightPush]: [LightPushCodec, LightPushCodecV2], + [Protocols.Store]: [StoreCodec] }; for (const protocol of protocols) { if (protocolToCodec[protocol]) { - codecs.set(protocolToCodec[protocol], false); + protocolToCodec[protocol].forEach((codec) => { + codecs.set(codec, false); + }); } } diff --git a/packages/sdk/src/waku/waku.ts b/packages/sdk/src/waku/waku.ts index 4f89080215..7336b06df7 100644 --- a/packages/sdk/src/waku/waku.ts +++ b/packages/sdk/src/waku/waku.ts @@ -182,7 +182,7 @@ export class WakuNode implements IWaku { } if (_protocols.includes(Protocols.LightPush)) { if (this.lightPush) { - codecs.push(this.lightPush.multicodec); + codecs.push(...this.lightPush.multicodec); } else { log.error( "Light Push codec not included in dial codec: protocol not mounted locally" @@ -216,6 +216,7 @@ export class WakuNode implements IWaku { this._nodeStateLock = true; await this.libp2p.start(); + await this.filter?.start(); this.connectionManager.start(); this.peerManager.start(); this.healthIndicator.start(); @@ -231,6 +232,7 @@ export class WakuNode implements IWaku { this._nodeStateLock = true; this.lightPush?.stop(); + await this.filter?.stop(); this.healthIndicator.stop(); this.peerManager.stop(); this.connectionManager.stop(); diff --git a/packages/sds/CHANGELOG.md b/packages/sds/CHANGELOG.md index 5cc915668a..1573d584ec 100644 --- a/packages/sds/CHANGELOG.md +++ b/packages/sds/CHANGELOG.md @@ -1,5 +1,28 @@ # Changelog +## [0.0.7](https://github.com/waku-org/js-waku/compare/sds-v0.0.6...sds-v0.0.7) (2025-09-20) + + +### Features + +* Implement peer-store re-bootstrapping ([#2641](https://github.com/waku-org/js-waku/issues/2641)) ([11d84ad](https://github.com/waku-org/js-waku/commit/11d84ad342fe45158ef0734f9ca070f14704503f)) +* Introduce reliable channels ([#2526](https://github.com/waku-org/js-waku/issues/2526)) ([4d5c152](https://github.com/waku-org/js-waku/commit/4d5c152f5b1b1c241bbe7bb96d13d927a6f7550e)) + + +### Bug Fixes + +* (sds) ensure incoming messages have their retrieval hint stored ([#2604](https://github.com/waku-org/js-waku/issues/2604)) ([914beb6](https://github.com/waku-org/js-waku/commit/914beb6531a84f8c11ca951721225d47f9e6c285)) +* Make health events emission consistent ([#2570](https://github.com/waku-org/js-waku/issues/2570)) ([c8dfdb1](https://github.com/waku-org/js-waku/commit/c8dfdb1ace8f0f8f668d8f2bb6e0eaed90041782)) +* **sds:** Initialize lamport timestamp with current time ([#2610](https://github.com/waku-org/js-waku/issues/2610)) ([cb3af8c](https://github.com/waku-org/js-waku/commit/cb3af8cd4d820e20de1e342d40dbf85bea75e16d)) + + +### Dependencies + +* The following workspace dependencies were updated + * dependencies + * @waku/proto bumped from ^0.0.13 to ^0.0.14 + * @waku/utils bumped from ^0.0.26 to ^0.0.27 + ## [0.0.6](https://github.com/waku-org/js-waku/compare/sds-v0.0.5...sds-v0.0.6) (2025-08-14) diff --git a/packages/sds/package.json b/packages/sds/package.json index 5e27d32b53..0e59d4fa41 100644 --- a/packages/sds/package.json +++ b/packages/sds/package.json @@ -1,6 +1,6 @@ { "name": "@waku/sds", - "version": "0.0.6", + "version": "0.0.7", "description": "Scalable Data Sync implementation for the browser. Based on https://github.com/vacp2p/rfc-index/blob/main/vac/raw/sds.md", "types": "./dist/index.d.ts", "module": "./dist/index.js", @@ -62,8 +62,8 @@ "dependencies": { "@libp2p/interface": "2.10.4", "@noble/hashes": "^1.7.1", - "@waku/proto": "^0.0.13", - "@waku/utils": "^0.0.26", + "@waku/proto": "^0.0.14", + "@waku/utils": "^0.0.27", "chai": "^5.1.2", "lodash": "^4.17.21" }, diff --git a/packages/sds/src/message_channel/lamport_timestamp.spec.ts b/packages/sds/src/message_channel/lamport_timestamp.spec.ts new file mode 100644 index 0000000000..57aec2666e --- /dev/null +++ b/packages/sds/src/message_channel/lamport_timestamp.spec.ts @@ -0,0 +1,56 @@ +import { expect } from "chai"; + +import { lamportTimestampIncrement } from "./message_channel.js"; + +describe("lamportTimestampIncrement", () => { + it("should increment timestamp by 1 when current time is not greater", () => { + const futureTimestamp = BigInt(Date.now()) + 1000n; + const result = lamportTimestampIncrement(futureTimestamp); + expect(result).to.equal(futureTimestamp + 1n); + }); + + it("should use current time when it's greater than incremented timestamp", () => { + const pastTimestamp = BigInt(Date.now()) - 1000n; + const result = lamportTimestampIncrement(pastTimestamp); + const now = BigInt(Date.now()); + // Result should be at least as large as now (within small tolerance for test execution time) + expect(result >= now - 10n).to.be.true; + expect(result <= now + 10n).to.be.true; + }); + + it("should handle timestamp equal to current time", () => { + const currentTimestamp = BigInt(Date.now()); + const result = lamportTimestampIncrement(currentTimestamp); + // Should increment by 1 since now is likely not greater than current + 1 + expect(result >= currentTimestamp + 1n).to.be.true; + }); + + it("should ensure monotonic increase", () => { + let timestamp = BigInt(Date.now()) + 5000n; + const results: bigint[] = []; + + for (let i = 0; i < 5; i++) { + timestamp = lamportTimestampIncrement(timestamp); + results.push(timestamp); + } + + // Verify all timestamps are strictly increasing + for (let i = 1; i < results.length; i++) { + expect(results[i] > results[i - 1]).to.be.true; + } + }); + + it("should handle very large timestamps", () => { + const largeTimestamp = BigInt(Number.MAX_SAFE_INTEGER) * 1000n; + const result = lamportTimestampIncrement(largeTimestamp); + expect(result).to.equal(largeTimestamp + 1n); + }); + + it("should jump to current time when timestamp is far in the past", () => { + const veryOldTimestamp = 1000n; // Very old timestamp (1 second after epoch) + const result = lamportTimestampIncrement(veryOldTimestamp); + const now = BigInt(Date.now()); + expect(result >= now - 10n).to.be.true; + expect(result <= now + 10n).to.be.true; + }); +}); diff --git a/packages/sds/src/message_channel/message.spec.ts b/packages/sds/src/message_channel/message.spec.ts index 37dfd5db28..680bf5cdb5 100644 --- a/packages/sds/src/message_channel/message.spec.ts +++ b/packages/sds/src/message_channel/message.spec.ts @@ -18,7 +18,7 @@ describe("Message serialization", () => { "my-channel", "me", [], - 0, + 0n, bloomFilter.toBytes(), undefined ); @@ -42,7 +42,7 @@ describe("Message serialization", () => { "my-channel", "me", [{ messageId: depMessageId, retrievalHint: depRetrievalHint }], - 0, + 0n, undefined, undefined ); @@ -50,7 +50,7 @@ describe("Message serialization", () => { const bytes = message.encode(); const decMessage = Message.decode(bytes); - expect(decMessage.causalHistory).to.deep.equal([ + expect(decMessage!.causalHistory).to.deep.equal([ { messageId: depMessageId, retrievalHint: depRetrievalHint } ]); }); @@ -63,7 +63,7 @@ describe("ContentMessage comparison with < operator", () => { "channel", "sender", [], - 100, // Lower timestamp + 100n, // Lower timestamp undefined, new Uint8Array([1]) ); @@ -73,7 +73,7 @@ describe("ContentMessage comparison with < operator", () => { "channel", "sender", [], - 200, // Higher timestamp + 200n, // Higher timestamp undefined, new Uint8Array([2]) ); @@ -89,7 +89,7 @@ describe("ContentMessage comparison with < operator", () => { "channel", "sender", [], - 100, // Same timestamp + 100n, // Same timestamp undefined, new Uint8Array([1]) ); @@ -99,7 +99,7 @@ describe("ContentMessage comparison with < operator", () => { "channel", "sender", [], - 100, // Same timestamp + 100n, // Same timestamp undefined, new Uint8Array([2]) ); diff --git a/packages/sds/src/message_channel/message.ts b/packages/sds/src/message_channel/message.ts index eeb5c732d2..78b99f9006 100644 --- a/packages/sds/src/message_channel/message.ts +++ b/packages/sds/src/message_channel/message.ts @@ -1,17 +1,20 @@ import { proto_sds_message } from "@waku/proto"; +import { Logger } from "@waku/utils"; export type MessageId = string; export type HistoryEntry = proto_sds_message.HistoryEntry; export type ChannelId = string; export type SenderId = string; +const log = new Logger("sds:message"); + export class Message implements proto_sds_message.SdsMessage { public constructor( public messageId: string, public channelId: string, public senderId: string, public causalHistory: proto_sds_message.HistoryEntry[], - public lamportTimestamp?: number | undefined, + public lamportTimestamp?: bigint | undefined, public bloomFilter?: Uint8Array | undefined, public content?: Uint8Array | undefined, /** @@ -24,25 +27,64 @@ export class Message implements proto_sds_message.SdsMessage { return proto_sds_message.SdsMessage.encode(this); } - public static decode(data: Uint8Array): Message { - const { - messageId, - channelId, - senderId, - causalHistory, - lamportTimestamp, - bloomFilter, - content - } = proto_sds_message.SdsMessage.decode(data); - return new Message( - messageId, - channelId, - senderId, - causalHistory, - lamportTimestamp, - bloomFilter, - content - ); + public static decode( + data: Uint8Array + ): undefined | ContentMessage | SyncMessage | EphemeralMessage { + try { + const { + messageId, + channelId, + senderId, + causalHistory, + lamportTimestamp, + bloomFilter, + content + } = proto_sds_message.SdsMessage.decode(data); + + if (testContentMessage({ lamportTimestamp, content })) { + return new ContentMessage( + messageId, + channelId, + senderId, + causalHistory, + lamportTimestamp!, + bloomFilter, + content! + ); + } + + if (testEphemeralMessage({ lamportTimestamp, content })) { + return new EphemeralMessage( + messageId, + channelId, + senderId, + causalHistory, + undefined, + bloomFilter, + content! + ); + } + + if (testSyncMessage({ lamportTimestamp, content })) { + return new SyncMessage( + messageId, + channelId, + senderId, + causalHistory, + lamportTimestamp!, + bloomFilter, + undefined + ); + } + log.error( + "message received was of unknown type", + lamportTimestamp, + content + ); + } catch (err) { + log.error("failed to decode sds message", err); + } + return undefined; } } @@ -52,7 +94,7 @@ export class SyncMessage extends Message { public channelId: string, public senderId: string, public causalHistory: proto_sds_message.HistoryEntry[], - public lamportTimestamp: number, + public lamportTimestamp: bigint, public bloomFilter: Uint8Array | undefined, public content: undefined, /** @@ -73,14 +115,21 @@ export class SyncMessage extends Message { } } +function testSyncMessage(message: { + lamportTimestamp?: bigint; + content?: Uint8Array; +}): boolean { + return Boolean( + "lamportTimestamp" in message && + typeof message.lamportTimestamp === "bigint" && + (message.content === undefined || message.content.length === 0) + ); +} + export function isSyncMessage( message: Message | ContentMessage | SyncMessage | EphemeralMessage ): message is SyncMessage { - return Boolean( - "lamportTimestamp" in message && - typeof message.lamportTimestamp === "number" && - (message.content === undefined || message.content.length === 0) - ); + return testSyncMessage(message); } export class EphemeralMessage extends Message { @@ -116,6 +165,13 @@ export class EphemeralMessage extends Message { export function isEphemeralMessage( message: Message | ContentMessage | SyncMessage | EphemeralMessage ): message is EphemeralMessage { + return testEphemeralMessage(message); +} + +function testEphemeralMessage(message: { + lamportTimestamp?: bigint; + content?: Uint8Array; +}): boolean { return Boolean( message.lamportTimestamp === undefined && "content" in message && @@ -130,7 +186,7 @@ export class ContentMessage extends Message { public channelId: string, public senderId: string, public causalHistory: proto_sds_message.HistoryEntry[], - public lamportTimestamp: number, + public lamportTimestamp: bigint, public bloomFilter: Uint8Array | undefined, public content: Uint8Array, /** @@ -166,9 +222,16 @@ export class ContentMessage extends Message { export function isContentMessage( message: Message | ContentMessage ): message is ContentMessage { + return testContentMessage(message); +} + +function testContentMessage(message: { + lamportTimestamp?: bigint; + content?: Uint8Array; +}): message is { lamportTimestamp: bigint; content: Uint8Array } { return Boolean( "lamportTimestamp" in message && - typeof message.lamportTimestamp === "number" && + typeof message.lamportTimestamp === "bigint" && message.content && message.content.length ); diff --git a/packages/sds/src/message_channel/message_channel.spec.ts b/packages/sds/src/message_channel/message_channel.spec.ts index 6132f469d9..91184f04d8 100644 --- a/packages/sds/src/message_channel/message_channel.spec.ts +++ b/packages/sds/src/message_channel/message_channel.spec.ts @@ -40,7 +40,7 @@ const sendMessage = async ( payload: Uint8Array, callback: (message: ContentMessage) => Promise<{ success: boolean }> ): Promise => { - await channel.pushOutgoingMessage(payload, callback); + channel.pushOutgoingMessage(payload, callback); await channel.processTasks(); }; @@ -75,7 +75,7 @@ describe("MessageChannel", function () { const timestampBefore = channelA["lamportTimestamp"]; await sendMessage(channelA, utf8ToBytes("message"), callback); const timestampAfter = channelA["lamportTimestamp"]; - expect(timestampAfter).to.equal(timestampBefore + 1); + expect(timestampAfter).to.equal(timestampBefore + 1n); }); it("should push the message to the outgoing buffer", async () => { @@ -95,7 +95,7 @@ describe("MessageChannel", function () { it("should insert message id into causal history", async () => { const payload = utf8ToBytes("message"); - const expectedTimestamp = channelA["lamportTimestamp"] + 1; + const expectedTimestamp = channelA["lamportTimestamp"] + 1n; const messageId = MessageChannel.getMessageId(payload); await sendMessage(channelA, payload, callback); const messageIdLog = channelA["localHistory"] as ILocalHistory; @@ -154,8 +154,8 @@ describe("MessageChannel", function () { }); // Causal history should only contain the last N messages as defined by causalHistorySize - const causalHistory = outgoingBuffer[outgoingBuffer.length - 1] - .causalHistory as HistoryEntry[]; + const causalHistory = + outgoingBuffer[outgoingBuffer.length - 1].causalHistory; expect(causalHistory.length).to.equal(causalHistorySize); const expectedCausalHistory = messages @@ -181,25 +181,35 @@ describe("MessageChannel", function () { return { success: true }; }); const timestampAfter = channelA["lamportTimestamp"]; - expect(timestampAfter).to.equal(timestampBefore + 1); + expect(timestampAfter).to.equal(timestampBefore + 1n); }); - it("should update lamport timestamp if greater than current timestamp and dependencies are met", async () => { + // TODO: test is failing in CI, investigate in https://github.com/waku-org/js-waku/issues/2648 + it.skip("should update lamport timestamp if greater than current timestamp and dependencies are met", async () => { + const testChannelA = new MessageChannel(channelId, "alice"); + const testChannelB = new MessageChannel(channelId, "bob"); + + const timestampBefore = testChannelA["lamportTimestamp"]; + for (const m of messagesA) { - await sendMessage(channelA, utf8ToBytes(m), callback); + await sendMessage(testChannelA, utf8ToBytes(m), callback); } for (const m of messagesB) { - await sendMessage(channelB, utf8ToBytes(m), async (message) => { - await receiveMessage(channelA, message); + await sendMessage(testChannelB, utf8ToBytes(m), async (message) => { + await receiveMessage(testChannelA, message); return { success: true }; }); } - const timestampAfter = channelA["lamportTimestamp"]; - expect(timestampAfter).to.equal(messagesB.length); + const timestampAfter = testChannelA["lamportTimestamp"]; + expect(timestampAfter - timestampBefore).to.equal( + BigInt(messagesB.length) + ); }); - it("should maintain proper timestamps if all messages received", async () => { - let timestamp = 0; + // TODO: test is failing in CI, investigate in https://github.com/waku-org/js-waku/issues/2648 + it.skip("should maintain proper timestamps if all messages received", async () => { + const aTimestampBefore = channelA["lamportTimestamp"]; + let timestamp = channelB["lamportTimestamp"]; for (const m of messagesA) { await sendMessage(channelA, utf8ToBytes(m), async (message) => { timestamp++; @@ -219,7 +229,9 @@ describe("MessageChannel", function () { } const expectedLength = messagesA.length + messagesB.length; - expect(channelA["lamportTimestamp"]).to.equal(expectedLength); + expect(channelA["lamportTimestamp"]).to.equal( + aTimestampBefore + BigInt(expectedLength) + ); expect(channelA["lamportTimestamp"]).to.equal( channelB["lamportTimestamp"] ); @@ -283,7 +295,7 @@ describe("MessageChannel", function () { channelA.channelId, "not-alice", [], - 1, + 1n, undefined, payload, testRetrievalHint @@ -292,14 +304,12 @@ describe("MessageChannel", function () { ); const localHistory = channelA["localHistory"] as ILocalHistory; - console.log("localHistory", localHistory); expect(localHistory.length).to.equal(1); // Find the message in local history const historyEntry = localHistory.find( (entry) => entry.messageId === messageId ); - console.log("history entry", historyEntry); expect(historyEntry).to.exist; expect(historyEntry!.retrievalHint).to.deep.equal(testRetrievalHint); }); @@ -314,6 +324,8 @@ describe("MessageChannel", function () { const message2Id = MessageChannel.getMessageId(message2Payload); const message3Id = MessageChannel.getMessageId(message3Payload); + const startTimestamp = channelA["lamportTimestamp"]; + // Send own message first (timestamp will be 1) await sendMessage(channelA, message1Payload, callback); @@ -325,7 +337,7 @@ describe("MessageChannel", function () { channelA.channelId, "bob", [], - 3, // Higher timestamp + startTimestamp + 3n, // Higher timestamp undefined, message3Payload ) @@ -339,7 +351,7 @@ describe("MessageChannel", function () { channelA.channelId, "carol", [], - 2, // Middle timestamp + startTimestamp + 2n, // Middle timestamp undefined, message2Payload ) @@ -352,21 +364,27 @@ describe("MessageChannel", function () { const first = localHistory.findIndex( ({ messageId, lamportTimestamp }) => { - return messageId === message1Id && lamportTimestamp === 1; + return ( + messageId === message1Id && lamportTimestamp === startTimestamp + 1n + ); } ); expect(first).to.eq(0); const second = localHistory.findIndex( ({ messageId, lamportTimestamp }) => { - return messageId === message2Id && lamportTimestamp === 2; + return ( + messageId === message2Id && lamportTimestamp === startTimestamp + 2n + ); } ); expect(second).to.eq(1); const third = localHistory.findIndex( ({ messageId, lamportTimestamp }) => { - return messageId === message3Id && lamportTimestamp === 3; + return ( + messageId === message3Id && lamportTimestamp === startTimestamp + 3n + ); } ); expect(third).to.eq(2); @@ -388,7 +406,7 @@ describe("MessageChannel", function () { channelA.channelId, "bob", [], - 5, // Same timestamp + 5n, // Same timestamp undefined, message2Payload ) @@ -401,7 +419,7 @@ describe("MessageChannel", function () { channelA.channelId, "carol", [], - 5, // Same timestamp + 5n, // Same timestamp undefined, message1Payload ) @@ -416,14 +434,14 @@ describe("MessageChannel", function () { const first = localHistory.findIndex( ({ messageId, lamportTimestamp }) => { - return messageId === expectedOrder[0] && lamportTimestamp == 5; + return messageId === expectedOrder[0] && lamportTimestamp == 5n; } ); expect(first).to.eq(0); const second = localHistory.findIndex( ({ messageId, lamportTimestamp }) => { - return messageId === expectedOrder[1] && lamportTimestamp == 5; + return messageId === expectedOrder[1] && lamportTimestamp == 5n; } ); expect(second).to.eq(1); @@ -596,7 +614,6 @@ describe("MessageChannel", function () { it("First message is missed, then re-sent, should be ack'd", async () => { const firstMessage = utf8ToBytes("first message"); const firstMessageId = MessageChannel.getMessageId(firstMessage); - console.log("firstMessage", firstMessageId); let messageAcked = false; channelA.addEventListener( MessageChannelEvent.OutMessageAcknowledged, @@ -630,11 +647,12 @@ describe("MessageChannel", function () { }); // And be sends a sync message - await channelB.pushOutgoingSyncMessage(async (message) => { + const res = await channelB.pushOutgoingSyncMessage(async (message) => { await receiveMessage(channelA, message); return true; }); + expect(res).to.be.true; expect(messageAcked).to.be.true; }); }); @@ -1072,17 +1090,41 @@ describe("MessageChannel", function () { causalHistorySize: 2 }); channelB = new MessageChannel(channelId, "bob", { causalHistorySize: 2 }); + const message = utf8ToBytes("first message in channel"); + channelA["localHistory"].push( + new ContentMessage( + MessageChannel.getMessageId(message), + "MyChannel", + "alice", + [], + 1n, + undefined, + message + ) + ); }); it("should be sent with empty content", async () => { - await channelA.pushOutgoingSyncMessage(async (message) => { + const res = await channelA.pushOutgoingSyncMessage(async (message) => { expect(message.content).to.be.undefined; return true; }); + expect(res).to.be.true; + }); + + it("should not be sent when there is no history", async () => { + const channelC = new MessageChannel(channelId, "carol", { + causalHistorySize: 2 + }); + const res = await channelC.pushOutgoingSyncMessage(async (_msg) => { + throw "callback was called when it's not expected"; + }); + expect(res).to.be.false; }); it("should not be added to outgoing buffer, bloom filter, or local log", async () => { - await channelA.pushOutgoingSyncMessage(); + const res = await channelA.pushOutgoingSyncMessage(); + expect(res).to.be.true; const outgoingBuffer = channelA["outgoingBuffer"] as Message[]; expect(outgoingBuffer.length).to.equal(0); @@ -1093,15 +1135,16 @@ describe("MessageChannel", function () { ).to.equal(false); const localLog = channelA["localHistory"]; - expect(localLog.length).to.equal(0); + expect(localLog.length).to.equal(1); // beforeEach adds one message }); it("should not be delivered", async () => { const timestampBefore = channelB["lamportTimestamp"]; - await channelA.pushOutgoingSyncMessage(async (message) => { + const res = await channelA.pushOutgoingSyncMessage(async (message) => { await receiveMessage(channelB, message); return true; }); + expect(res).to.be.true; const timestampAfter = channelB["lamportTimestamp"]; expect(timestampAfter).to.equal(timestampBefore); @@ -1115,20 +1158,23 @@ describe("MessageChannel", function () { }); it("should update ack status of messages in outgoing buffer", async () => { + const channelC = new MessageChannel(channelId, "carol", { + causalHistorySize: 2 + }); for (const m of messagesA) { - await sendMessage(channelA, utf8ToBytes(m), async (message) => { + await sendMessage(channelC, utf8ToBytes(m), async (message) => { await receiveMessage(channelB, message); return { success: true }; }); } await sendSyncMessage(channelB, async (message) => { - await receiveMessage(channelA, message); + await receiveMessage(channelC, message); return true; }); - const causalHistorySize = channelA["causalHistorySize"]; - const outgoingBuffer = channelA["outgoingBuffer"] as Message[]; + const causalHistorySize = channelC["causalHistorySize"]; + const outgoingBuffer = channelC["outgoingBuffer"] as Message[]; expect(outgoingBuffer.length).to.equal( messagesA.length - causalHistorySize ); diff --git a/packages/sds/src/message_channel/message_channel.ts b/packages/sds/src/message_channel/message_channel.ts index a9cd980a71..3df21f160a 100644 --- a/packages/sds/src/message_channel/message_channel.ts +++ b/packages/sds/src/message_channel/message_channel.ts @@ -56,7 +56,7 @@ export type ILocalHistory = Pick< export class MessageChannel extends TypedEventEmitter { public readonly channelId: ChannelId; public readonly senderId: SenderId; - private lamportTimestamp: number; + private lamportTimestamp: bigint; private filter: DefaultBloomFilter; private outgoingBuffer: ContentMessage[]; private possibleAcks: Map; @@ -95,7 +95,8 @@ export class MessageChannel extends TypedEventEmitter { super(); this.channelId = channelId; this.senderId = senderId; - this.lamportTimestamp = 0; + // Initialize channel lamport timestamp to current time in milliseconds. + this.lamportTimestamp = BigInt(Date.now()); this.filter = new DefaultBloomFilter(DEFAULT_BLOOM_FILTER_OPTIONS); this.outgoingBuffer = []; this.possibleAcks = new Map(); @@ -174,13 +175,13 @@ export class MessageChannel extends TypedEventEmitter { * * @throws Error if the payload is empty */ - public async pushOutgoingMessage( + public pushOutgoingMessage( payload: Uint8Array, callback?: (processedMessage: ContentMessage) => Promise<{ success: boolean; retrievalHint?: Uint8Array; }> - ): Promise { + ): void { if (!payload || !payload.length) { throw Error("Only messages with valid payloads are allowed"); } @@ -285,6 +286,7 @@ export class MessageChannel extends TypedEventEmitter { } log.info( this.senderId, + "message from incoming buffer", message.messageId, "is missing dependencies", missingDependencies.map(({ messageId, retrievalHint }) => { @@ -366,7 +368,7 @@ export class MessageChannel extends TypedEventEmitter { public async pushOutgoingSyncMessage( callback?: (message: SyncMessage) => Promise ): Promise { - this.lamportTimestamp++; + this.lamportTimestamp = lamportTimestampIncrement(this.lamportTimestamp); const message = new SyncMessage( // does not need to be secure randomness `sync-${Math.random().toString(36).substring(2)}`, @@ -382,6 +384,14 @@ export class MessageChannel extends TypedEventEmitter { undefined ); + if (!message.causalHistory || message.causalHistory.length === 0) { + log.info( + this.senderId, + "no causal history in sync message, aborting sending" + ); + return false; + } + if (callback) { try { await callback(message); @@ -398,7 +408,8 @@ export class MessageChannel extends TypedEventEmitter { throw error; } } - return false; + // No problem encountered so returning true + return true; } private _pushIncomingMessage(message: Message): void { @@ -470,10 +481,15 @@ export class MessageChannel extends TypedEventEmitter { this.timeReceived.set(message.messageId, Date.now()); log.info( this.senderId, + "new incoming message", message.messageId, "is missing dependencies", missingDependencies.map((ch) => ch.messageId) ); + + this.safeSendEvent(MessageChannelEvent.InMessageMissing, { + detail: Array.from(missingDependencies) + }); } else { if (isContentMessage(message) && this.deliverMessage(message)) { this.safeSendEvent(MessageChannelEvent.InMessageDelivered, { @@ -518,7 +534,7 @@ export class MessageChannel extends TypedEventEmitter { retrievalHint?: Uint8Array; }> ): Promise { - this.lamportTimestamp++; + this.lamportTimestamp = lamportTimestampIncrement(this.lamportTimestamp); const messageId = MessageChannel.getMessageId(payload); @@ -716,3 +732,12 @@ export class MessageChannel extends TypedEventEmitter { }); } } + +export function lamportTimestampIncrement(lamportTimestamp: bigint): bigint { + const now = BigInt(Date.now()); + lamportTimestamp++; + if (now > lamportTimestamp) { + return now; + } + return lamportTimestamp; +} diff --git a/packages/tests/src/lib/service_node.ts b/packages/tests/src/lib/service_node.ts index a71526f9dc..f16fad2a19 100644 --- a/packages/tests/src/lib/service_node.ts +++ b/packages/tests/src/lib/service_node.ts @@ -323,7 +323,7 @@ export class ServiceNode { this.checkProcess(); return this.restCall( - "/relay/v1/subscriptions", + "/relay/v1/auto/subscriptions", "POST", contentTopics, async (response) => response.status === 200 diff --git a/packages/tests/tests/light-push/index.node.spec.ts b/packages/tests/tests/light-push/index.node.spec.ts index 6733c2dbb9..755dc178ea 100644 --- a/packages/tests/tests/light-push/index.node.spec.ts +++ b/packages/tests/tests/light-push/index.node.spec.ts @@ -1,5 +1,5 @@ import { createEncoder } from "@waku/core"; -import { IRateLimitProof, LightNode, ProtocolError } from "@waku/interfaces"; +import { IRateLimitProof, LightNode, LightPushError } from "@waku/interfaces"; import { utf8ToBytes } from "@waku/sdk"; import { expect } from "chai"; @@ -21,9 +21,9 @@ import { TestRoutingInfo } from "./utils.js"; -const runTests = (strictNodeCheck: boolean): void => { +const runTests = (strictNodeCheck: boolean, useLegacy: boolean): void => { const numServiceNodes = 2; - describe(`Waku Light Push: Multiple Nodes: Strict Check: ${strictNodeCheck}`, function () { + describe(`Waku Light Push (legacy=${useLegacy ? "v2" : "v3"}): Multiple Nodes: Strict Check: ${strictNodeCheck}`, function () { // Set the timeout for all tests in this suite. Can be overwritten at test level this.timeout(15000); let waku: LightNode; @@ -36,7 +36,8 @@ const runTests = (strictNodeCheck: boolean): void => { { lightpush: true, filter: true }, strictNodeCheck, numServiceNodes, - true + true, + { lightPush: { useLegacy } } ); }); @@ -95,7 +96,7 @@ const runTests = (strictNodeCheck: boolean): void => { expect(pushResponse.successes.length).to.eq(0); expect(pushResponse.failures?.map((failure) => failure.error)).to.include( - ProtocolError.EMPTY_PAYLOAD + LightPushError.EMPTY_PAYLOAD ); expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq( @@ -174,7 +175,7 @@ const runTests = (strictNodeCheck: boolean): void => { expect(pushResponse.successes.length).to.eq(0); expect(pushResponse.failures?.map((failure) => failure.error)).to.include( - ProtocolError.REMOTE_PEER_REJECTED + LightPushError.REMOTE_PEER_REJECTED ); expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq( false @@ -248,7 +249,7 @@ const runTests = (strictNodeCheck: boolean): void => { }); expect(pushResponse.successes.length).to.eq(0); expect(pushResponse.failures?.map((failure) => failure.error)).to.include( - ProtocolError.SIZE_TOO_BIG + LightPushError.SIZE_TOO_BIG ); expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq( false @@ -257,4 +258,6 @@ const runTests = (strictNodeCheck: boolean): void => { }); }; -[true, false].map(runTests); +[true, false].forEach((strictNodeCheck) => { + [true, false].forEach((legacy) => runTests(strictNodeCheck, legacy)); +}); diff --git a/packages/tests/tests/light-push/multiple_pubsub.node.spec.ts b/packages/tests/tests/light-push/multiple_pubsub.node.spec.ts index a71218faf0..5bafa45c71 100644 --- a/packages/tests/tests/light-push/multiple_pubsub.node.spec.ts +++ b/packages/tests/tests/light-push/multiple_pubsub.node.spec.ts @@ -1,5 +1,5 @@ import { createEncoder } from "@waku/core"; -import { LightNode, Protocols } from "@waku/interfaces"; +import { IWaku, Protocols } from "@waku/interfaces"; import { createRoutingInfo } from "@waku/utils"; import { utf8ToBytes } from "@waku/utils/bytes"; import { expect } from "chai"; @@ -28,7 +28,7 @@ describe("Waku Light Push (Autosharding): Multiple Shards", function () { this.timeout(30000); const numServiceNodes = 2; - let waku: LightNode; + let waku: IWaku; let serviceNodes: ServiceNodesFleet; const customContentTopic2 = "/test/2/waku-light-push/utf8"; @@ -48,6 +48,7 @@ describe("Waku Light Push (Autosharding): Multiple Shards", function () { { lightpush: true, filter: true, + relay: true, contentTopic: [TestEncoder.contentTopic, customEncoder2.contentTopic] }, false, @@ -60,45 +61,56 @@ describe("Waku Light Push (Autosharding): Multiple Shards", function () { await teardownNodesWithRedundancy(serviceNodes, waku); }); - it("Subscribe and receive messages on 2 different pubsubtopics", async function () { - if (customRoutingInfo2.pubsubTopic === TestEncoder.pubsubTopic) - throw "Invalid test, both encoder uses same shard"; + [true, false].forEach((useLegacy) => { + it(`Subscribe and receive messages on 2 different pubsubtopics with ${useLegacy ? "v2" : "v3"} protocol`, async function () { + if (customRoutingInfo2.pubsubTopic === TestEncoder.pubsubTopic) + throw "Invalid test, both encoder uses same shard"; - const pushResponse1 = await waku.lightPush.send(TestEncoder, { - payload: utf8ToBytes("M1") - }); - const pushResponse2 = await waku.lightPush.send(customEncoder2, { - payload: utf8ToBytes("M2") - }); + const pushResponse1 = await waku.lightPush!.send( + TestEncoder, + { + payload: utf8ToBytes("M1") + }, + { useLegacy } + ); - expect(pushResponse1.successes.length).to.eq(numServiceNodes); - expect(pushResponse2.successes.length).to.eq(numServiceNodes); + const pushResponse2 = await waku.lightPush!.send( + customEncoder2, + { + payload: utf8ToBytes("M2") + }, + { useLegacy } + ); - const messageCollector1 = new MessageCollector(serviceNodes.nodes[0]); - const messageCollector2 = new MessageCollector(serviceNodes.nodes[1]); + expect(pushResponse1?.successes.length).to.eq(numServiceNodes); + expect(pushResponse2?.successes.length).to.eq(numServiceNodes); - expect( - await messageCollector1.waitForMessagesAutosharding(1, { - contentTopic: TestEncoder.contentTopic - }) - ).to.eq(true); + const messageCollector1 = new MessageCollector(serviceNodes.nodes[0]); + const messageCollector2 = new MessageCollector(serviceNodes.nodes[1]); - expect( - await messageCollector2.waitForMessagesAutosharding(1, { - contentTopic: customEncoder2.contentTopic - }) - ).to.eq(true); + expect( + await messageCollector1.waitForMessagesAutosharding(1, { + contentTopic: TestEncoder.contentTopic + }) + ).to.eq(true); - messageCollector1.verifyReceivedMessage(0, { - expectedMessageText: "M1", - expectedContentTopic: TestEncoder.contentTopic, - expectedPubsubTopic: TestEncoder.pubsubTopic - }); + expect( + await messageCollector2.waitForMessagesAutosharding(1, { + contentTopic: customEncoder2.contentTopic + }) + ).to.eq(true); - messageCollector2.verifyReceivedMessage(0, { - expectedMessageText: "M2", - expectedContentTopic: customEncoder2.contentTopic, - expectedPubsubTopic: customEncoder2.pubsubTopic + messageCollector1.verifyReceivedMessage(0, { + expectedMessageText: "M1", + expectedContentTopic: TestEncoder.contentTopic, + expectedPubsubTopic: TestEncoder.pubsubTopic + }); + + messageCollector2.verifyReceivedMessage(0, { + expectedMessageText: "M2", + expectedContentTopic: customEncoder2.contentTopic, + expectedPubsubTopic: customEncoder2.pubsubTopic + }); }); }); @@ -122,10 +134,10 @@ describe("Waku Light Push (Autosharding): Multiple Shards", function () { const messageCollector2 = new MessageCollector(nwaku2); - await waku.lightPush.send(TestEncoder, { + await waku.lightPush!.send(TestEncoder, { payload: utf8ToBytes("M1") }); - await waku.lightPush.send(customEncoder2, { + await waku.lightPush!.send(customEncoder2, { payload: utf8ToBytes("M2") }); diff --git a/packages/tests/tests/light-push/v2_interop.spec.ts b/packages/tests/tests/light-push/v2_interop.spec.ts new file mode 100644 index 0000000000..f67d4c1a3a --- /dev/null +++ b/packages/tests/tests/light-push/v2_interop.spec.ts @@ -0,0 +1,83 @@ +import { LightNode } from "@waku/interfaces"; +import { createLightNode, utf8ToBytes } from "@waku/sdk"; +import { expect } from "chai"; + +import { + afterEachCustom, + beforeEachCustom, + NOISE_KEY_2, + runMultipleNodes, + ServiceNodesFleet, + teardownNodesWithRedundancy +} from "../../src/index.js"; +import { DEFAULT_DISCOVERIES_ENABLED } from "../../src/lib/runNodes.js"; + +import { TestContentTopic, TestEncoder, TestRoutingInfo } from "./utils.js"; + +describe(`Waku Light Push V2 and V3 interop`, function () { + this.timeout(15000); + let waku: LightNode; + let waku2: LightNode; + let serviceNodes: ServiceNodesFleet; + + beforeEachCustom(this, async () => { + [serviceNodes, waku] = await runMultipleNodes( + this.ctx, + TestRoutingInfo, + { lightpush: true, filter: true, relay: true }, + true, + 2, + true + ); + + waku2 = await createLightNode({ + staticNoiseKey: NOISE_KEY_2, + libp2p: { + addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } + }, + networkConfig: TestRoutingInfo.networkConfig, + lightPush: { numPeersToUse: 1 }, + discovery: DEFAULT_DISCOVERIES_ENABLED + }); + + await waku2.dial(await serviceNodes.nodes[1].getMultiaddrWithId()); + }); + + afterEachCustom(this, async () => { + await teardownNodesWithRedundancy(serviceNodes, [waku, waku2]); + }); + + it(`Push messages througth V2 and V3 from 2 js-waku and receives`, async function () { + let pushResponse = await waku.lightPush.send( + TestEncoder, + { + payload: utf8ToBytes("v2") + }, + { useLegacy: true } + ); + expect(pushResponse.successes.length).to.eq(2); + + expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(true); + serviceNodes.messageCollector.verifyReceivedMessage(0, { + expectedMessageText: "v2", + expectedContentTopic: TestContentTopic, + expectedPubsubTopic: TestRoutingInfo.pubsubTopic + }); + + pushResponse = await waku2.lightPush.send( + TestEncoder, + { + payload: utf8ToBytes("v3") + }, + { useLegacy: false } + ); + expect(pushResponse.successes.length).to.eq(1); + + expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(true); + serviceNodes.messageCollector.verifyReceivedMessage(0, { + expectedMessageText: "v3", + expectedContentTopic: TestContentTopic, + expectedPubsubTopic: TestRoutingInfo.pubsubTopic + }); + }); +}); diff --git a/packages/tests/tests/peer-cache/peer_cache.spec.ts b/packages/tests/tests/peer-cache/peer_cache.spec.ts index 28019d3e97..3839e3bd57 100644 --- a/packages/tests/tests/peer-cache/peer_cache.spec.ts +++ b/packages/tests/tests/peer-cache/peer_cache.spec.ts @@ -85,7 +85,7 @@ describe("Peer Cache Discovery", function () { waku = await createLightNode({ networkConfig: DefaultTestNetworkConfig, discovery: { - peerExchange: true, + peerExchange: false, peerCache: true }, peerCache: mockCache @@ -116,7 +116,7 @@ describe("Peer Cache Discovery", function () { networkConfig: DefaultTestNetworkConfig, bootstrapPeers: [(await nwaku2.getMultiaddrWithId()).toString()], discovery: { - peerExchange: true, + peerExchange: false, peerCache: true }, peerCache: mockCache diff --git a/packages/utils/CHANGELOG.md b/packages/utils/CHANGELOG.md index 79e99113cf..fcab723a80 100644 --- a/packages/utils/CHANGELOG.md +++ b/packages/utils/CHANGELOG.md @@ -12,6 +12,25 @@ * devDependencies * @waku/interfaces bumped from 0.0.16 to 0.0.17 +## [0.0.27](https://github.com/waku-org/js-waku/compare/utils-v0.0.26...utils-v0.0.27) (2025-09-20) + + +### Features + +* Introduce reliable channels ([#2526](https://github.com/waku-org/js-waku/issues/2526)) ([4d5c152](https://github.com/waku-org/js-waku/commit/4d5c152f5b1b1c241bbe7bb96d13d927a6f7550e)) + + +### Bug Fixes + +* Remove sharding circular dependency ([#2590](https://github.com/waku-org/js-waku/issues/2590)) ([78c856d](https://github.com/waku-org/js-waku/commit/78c856d0796a73848815b615bea24d3f5395da78)) + + +### Dependencies + +* The following workspace dependencies were updated + * dependencies + * @waku/interfaces bumped from 0.0.33 to 0.0.34 + ## [0.0.26](https://github.com/waku-org/js-waku/compare/utils-v0.0.25...utils-v0.0.26) (2025-08-14) diff --git a/packages/utils/package.json b/packages/utils/package.json index d335f07c94..be791ef342 100644 --- a/packages/utils/package.json +++ b/packages/utils/package.json @@ -1,6 +1,6 @@ { "name": "@waku/utils", - "version": "0.0.26", + "version": "0.0.27", "description": "Different utilities for Waku", "types": "./dist/index.d.ts", "module": "./dist/index.js", @@ -64,7 +64,7 @@ }, "dependencies": { "@noble/hashes": "^1.3.2", - "@waku/interfaces": "0.0.33", + "@waku/interfaces": "0.0.34", "chai": "^4.3.10", "debug": "^4.3.4", "uint8arrays": "^5.0.1" diff --git a/packages/utils/src/common/index.ts b/packages/utils/src/common/index.ts index 26573fe6ff..3197d5199b 100644 --- a/packages/utils/src/common/index.ts +++ b/packages/utils/src/common/index.ts @@ -7,3 +7,4 @@ export * from "./sharding/index.js"; export * from "./push_or_init_map.js"; export * from "./relay_shard_codec.js"; export * from "./delay.js"; +export * from "./mock_node.js"; diff --git a/packages/utils/src/common/mock_node.ts b/packages/utils/src/common/mock_node.ts new file mode 100644 index 0000000000..40472fea17 --- /dev/null +++ b/packages/utils/src/common/mock_node.ts @@ -0,0 +1,166 @@ +import { Peer, PeerId, Stream, TypedEventEmitter } from "@libp2p/interface"; +import { MultiaddrInput } from "@multiformats/multiaddr"; +import { + Callback, + CreateDecoderParams, + CreateEncoderParams, + HealthStatus, + IDecodedMessage, + IDecoder, + IEncoder, + IFilter, + ILightPush, + type IMessage, + IRelay, + ISendOptions, + IStore, + IWaku, + IWakuEventEmitter, + Libp2p, + LightPushSDKResult, + Protocols +} from "@waku/interfaces"; + +export type MockWakuEvents = { + ["new-message"]: CustomEvent; +}; + +export class MockWakuNode implements IWaku { + public relay?: IRelay; + public store?: IStore; + public filter?: IFilter; + public lightPush?: ILightPush; + public protocols: string[]; + + private readonly subscriptions: { + decoders: IDecoder[]; + callback: Callback; + }[]; + + public constructor( + private mockMessageEmitter?: TypedEventEmitter + ) { + this.protocols = []; + this.events = new TypedEventEmitter(); + this.subscriptions = []; + + this.lightPush = { + multicodec: [], + send: this._send.bind(this), + start(): void {}, + stop(): void {} + }; + + this.filter = { + start: async () => {}, + stop: async () => {}, + multicodec: "filter", + subscribe: this._subscribe.bind(this), + unsubscribe( + _decoders: IDecoder | IDecoder[] + ): Promise { + throw "Not implemented"; + }, + unsubscribeAll(): void { + throw "Not implemented"; + } + }; + } + + public get libp2p(): Libp2p { + throw "No libp2p on MockWakuNode"; + } + + private async _send( + encoder: IEncoder, + message: IMessage, + _sendOptions?: ISendOptions + ): Promise { + for (const { decoders, callback } of this.subscriptions) { + const protoMessage = await encoder.toProtoObj(message); + if (!protoMessage) throw "Issue in mock encoding message"; + for (const decoder of decoders) { + const decodedMessage = await decoder.fromProtoObj( + decoder.pubsubTopic, + protoMessage + ); + if (!decodedMessage) throw "Issue in mock decoding message"; + await callback(decodedMessage); + if (this.mockMessageEmitter) { + this.mockMessageEmitter.dispatchEvent( + new CustomEvent("new-message", { + detail: decodedMessage + }) + ); + } + } + } + return { + failures: [], + successes: [] + }; + } + + private async _subscribe( + decoders: IDecoder | IDecoder[], + callback: Callback + ): Promise { + this.subscriptions.push({ + decoders: Array.isArray(decoders) ? decoders : [decoders], + callback + }); + if (this.mockMessageEmitter) { + this.mockMessageEmitter.addEventListener("new-message", (event) => { + void callback(event.detail as unknown as T); + }); + } + return Promise.resolve(true); + } + + public events: IWakuEventEmitter; + + public get peerId(): PeerId { + throw "no peerId on MockWakuNode"; + } + public get health(): HealthStatus { + throw "no health on MockWakuNode"; + } + public dial( + _peer: PeerId | MultiaddrInput, + _protocols?: Protocols[] + ): Promise { + throw new Error("Method not implemented."); + } + public hangUp(_peer: PeerId | MultiaddrInput): Promise { + throw new Error("Method not implemented."); + } + public start(): Promise { + return Promise.resolve(); + } + public stop(): Promise { + throw new Error("Method not implemented."); + } + public waitForPeers( + _protocols?: Protocols[], + _timeoutMs?: number + ): Promise { + throw new Error("Method not implemented."); + } + public createDecoder( + _params: CreateDecoderParams + ): IDecoder { + throw new Error("Method not implemented."); + } + public createEncoder(_params: CreateEncoderParams): IEncoder { + throw new Error("Method not implemented."); + } + public isStarted(): boolean { + throw new Error("Method not implemented."); + } + public isConnected(): boolean { + throw new Error("Method not implemented."); + } + public getConnectedPeers(): Promise { + throw new Error("Method not implemented."); + } +}