Compare commits

..

12 Commits

Author SHA1 Message Date
Igor Sirotin
0ca94f3c8b
chore: update license files to comply with Logos licensing requirements (#2780) 2026-02-05 13:21:38 +00:00
Sasha
c8667058a6
fix: test dependencies (#2779)
* fix: test dependencies

* up

* up lock
2026-02-04 09:49:09 +01:00
Sasha
d4c6c9970d
chore: release master (#2735) 2026-01-19 09:16:02 +01:00
isit666
7816642fae
feat: add dialTimeout, change dialingQueue to Map (#2773) 2026-01-16 21:22:17 +01:00
Sasha
74ad13ba24
chore: rename repo to comply with logos (#2757)
* chore: rename repo to comply with logos

* up script

* up allure

* fix logos-messaging-allure-js name

* fix logos-messaging-allure-js name

* fix logos-messaging-allure-js name
2026-01-05 23:53:38 +01:00
Fabiana Cecin
9a1e9cecc5
fix: peer cache test failure (#2770)
* fix peer cache test

* simplify test fix

---------

Co-authored-by: Arseniy Klempner <arseniyk@status.im>
2025-12-23 16:51:15 -08:00
Danish Arora
ab237410f9
chore: enable relay when lightpush is used (#2762) 2025-12-23 16:24:36 -08:00
Arseniy Klempner
f2ad23ad43
feat(rln)!: generate contract types, migrate from ethers to viem (#2705)
* feat: use wagmi to generate contract types

* feat: migrate rln from ethers to viem

* fix: remove .gitmodules

* fix: update readme

* fix: refactor to use a single viem client object

* fix: update comments, tsconfig

* feat: remove membership event tracking

* fix: script name in package.json and readme

* fix: only allow linea sepolia

* fix: consolidate viem types, typed window

* fix: use viem to infer type of decoded event

* fix: use js for generate abi script

* feat: generate abi and build rln package as release condition

* fix: check that eth_requestAccounts returns an array

* fix: handle error messages

* fix: use https instead of git for cloning in script

* fix: add warning annotations for contract typings check

* fix: install deps for rln package before building

* fix: use pnpm when installing rln contracts

* fix: use workspace flag to run abi script

* fix: add ref to checkout action

* fix: include pnpm in ci
2025-12-01 17:32:35 -08:00
Hanno Cornelius
788f7e62c5
feat: incorporate sds-r into reliable channels (#2701)
* wip

* feat: integrate sds-r with message channels

* fix: fix implementation guide, remove unrelated claude file

* feat: integrate sds-r within reliable channels SDK

* fix: fix import, export

* fix: fix build errors, simplify parallel operation

* fix: sigh. this file has 9 lives

* fix: simplify more

* fix: disable repair if not part of retrieval strategy

* fix: remove dead code, simplify

* fix: improve repair loop

Co-authored-by: fryorcraken <110212804+fryorcraken@users.noreply.github.com>

* chore: make retrievalStrategy mandatory argument

* chore: add repair multiplier, safer checks

---------

Co-authored-by: fryorcraken <commits@fryorcraken.xyz>
Co-authored-by: fryorcraken <110212804+fryorcraken@users.noreply.github.com>
2025-11-21 15:03:48 +00:00
fryorcraken
e5f51d7df1
feat: Reliable Channel: Status Sync, overflow protection, stop TODOs (#2729)
* feat(sds): messages with lost deps are delivered

This is to re-enable participation in the SDS protocol. Meaning the
received message with missing dependencies becomes part of the causal
history, re-enabling acknowledgements.

* fix(sds): avoid overflow in message history storage

* feat(reliable-channel): Emit a "Synced" Status with message counts

Return a "synced" or "syncing" status on `ReliableChannel.status` that
let the developer know whether messages are missing, and if so, how many.

* fix: clean up subscriptions, intervals and timeouts when stopping

# Conflicts:
#	packages/sdk/src/reliable_channel/reliable_channel.ts

* chore: extract random timeout

* fix rebase

* revert listener changes

* typo

* Ensuring no inconsistency on missing message

* test: streamline, stop channels

* clear sync status sets when stopping channel

* prevent sync status event spam

* test: improve naming

* try/catch for callback

* encapsulate/simplify reliable channel API

* sanity checks

* test: ensure sync status cleanup
2025-11-16 08:57:12 +11:00
fryorcraken
84a6ea69cf
fix: cleanup routines on reliable channel and core protocols (#2733)
* fix: add stop methods to protocols to prevent event listener leaks

* fix: add abort signal support for graceful store query cancellation

* fix: call protocol stop methods in WakuNode.stop()

* fix: improve QueryOnConnect cleanup and abort signal handling

* fix: improve MissingMessageRetriever cleanup with abort signal

* fix: add stopAllRetries method to RetryManager for proper cleanup

* fix: implement comprehensive ReliableChannel stop() with proper cleanup

* fix: add active query tracking to QueryOnConnect and await its stop()

* fix: add stop() to IRelayAPI and IStore interfaces, implement in SDK wrappers

* align with usual naming (isStarted)

* remove unnecessary `await`

* test: `stop()` is now async

* chore: use more concise syntax

---------

Co-authored-by: Levente Kiss <levente.kiss@solarpunk.buzz>
2025-11-13 12:32:15 +11:00
049e564e89
ci: add missing jenkins lib 2025-11-06 18:02:33 +01:00
112 changed files with 4228 additions and 2764 deletions

View File

@ -104,6 +104,7 @@
"reactjs", "reactjs",
"recid", "recid",
"rlnrelay", "rlnrelay",
"rlnv",
"roadmap", "roadmap",
"sandboxed", "sandboxed",
"scanf", "scanf",
@ -132,7 +133,9 @@
"upgrader", "upgrader",
"vacp", "vacp",
"varint", "varint",
"viem",
"vkey", "vkey",
"wagmi",
"waku", "waku",
"wakuconnect", "wakuconnect",
"wakunode", "wakunode",

2
.github/CODEOWNERS vendored
View File

@ -1 +1 @@
* @waku-org/js-waku * @logos-messaging/js-waku

View File

@ -11,5 +11,5 @@ jobs:
steps: steps:
- uses: actions/add-to-project@v0.5.0 - uses: actions/add-to-project@v0.5.0
with: with:
project-url: https://github.com/orgs/waku-org/projects/2 project-url: https://github.com/orgs/logos-messaging/projects/2
github-token: ${{ secrets.ADD_TO_PROJECT_20240815 }} github-token: ${{ secrets.ADD_TO_PROJECT_20240815 }}

View File

@ -23,7 +23,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
repository: waku-org/js-waku repository: logos-messaging/logos-messaging-js
- uses: actions/setup-node@v3 - uses: actions/setup-node@v3
with: with:
@ -38,7 +38,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
repository: waku-org/js-waku repository: logos-messaging/logos-messaging-js
- uses: actions/setup-node@v3 - uses: actions/setup-node@v3
with: with:
node-version: ${{ env.NODE_JS }} node-version: ${{ env.NODE_JS }}
@ -63,7 +63,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
repository: waku-org/js-waku repository: logos-messaging/logos-messaging-js
- uses: actions/setup-node@v3 - uses: actions/setup-node@v3
with: with:
node-version: ${{ env.NODE_JS }} node-version: ${{ env.NODE_JS }}
@ -104,7 +104,7 @@ jobs:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
repository: waku-org/js-waku repository: logos-messaging/logos-messaging-js
if: ${{ steps.release.outputs.releases_created }} if: ${{ steps.release.outputs.releases_created }}
- uses: actions/setup-node@v3 - uses: actions/setup-node@v3
@ -113,12 +113,44 @@ jobs:
node-version: ${{ env.NODE_JS }} node-version: ${{ env.NODE_JS }}
registry-url: "https://registry.npmjs.org" registry-url: "https://registry.npmjs.org"
- uses: pnpm/action-setup@v4
if: ${{ steps.release.outputs.releases_created }}
with:
version: 9
- run: npm install - run: npm install
if: ${{ steps.release.outputs.releases_created }} if: ${{ steps.release.outputs.releases_created }}
- run: npm run build - run: npm run build
if: ${{ steps.release.outputs.releases_created }} if: ${{ steps.release.outputs.releases_created }}
- name: Setup Foundry
if: ${{ steps.release.outputs.releases_created }}
uses: foundry-rs/foundry-toolchain@v1
with:
version: nightly
- name: Generate RLN contract ABIs
id: rln-abi
if: ${{ steps.release.outputs.releases_created }}
run: |
npm run setup:contract-abi -w @waku/rln || {
echo "::warning::Failed to generate contract ABIs, marking @waku/rln as private to skip publishing"
cd packages/rln
node -e "const fs = require('fs'); const pkg = JSON.parse(fs.readFileSync('package.json', 'utf8')); pkg.private = true; fs.writeFileSync('package.json', JSON.stringify(pkg, null, 2));"
echo "failed=true" >> $GITHUB_OUTPUT
}
- name: Rebuild with new ABIs
if: ${{ steps.release.outputs.releases_created && steps.rln-abi.outputs.failed != 'true' }}
run: |
npm install -w packages/rln
npm run build -w @waku/rln || {
echo "::warning::Failed to build @waku/rln, marking as private to skip publishing"
cd packages/rln
node -e "const fs = require('fs'); const pkg = JSON.parse(fs.readFileSync('package.json', 'utf8')); pkg.private = true; fs.writeFileSync('package.json', JSON.stringify(pkg, null, 2));"
}
- run: npm run publish - run: npm run publish
if: ${{ steps.release.outputs.releases_created }} if: ${{ steps.release.outputs.releases_created }}
env: env:

View File

@ -12,7 +12,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
repository: waku-org/js-waku repository: logos-messaging/logos-messaging-js
- uses: actions/setup-node@v3 - uses: actions/setup-node@v3
with: with:

View File

@ -16,17 +16,47 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
repository: waku-org/js-waku repository: logos-messaging/logos-messaging-js
ref: ${{ github.ref }}
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
with: with:
node-version: ${{ env.NODE_JS }} node-version: ${{ env.NODE_JS }}
registry-url: "https://registry.npmjs.org" registry-url: "https://registry.npmjs.org"
- uses: pnpm/action-setup@v4
with:
version: 9
- run: npm install - run: npm install
- run: npm run build - run: npm run build
- name: Setup Foundry
uses: foundry-rs/foundry-toolchain@v1
with:
version: nightly
- name: Generate RLN contract ABIs
id: rln-abi
run: |
npm run setup:contract-abi -w @waku/rln || {
echo "::warning::Failed to generate contract ABIs, marking @waku/rln as private to skip publishing"
cd packages/rln
node -e "const fs = require('fs'); const pkg = JSON.parse(fs.readFileSync('package.json', 'utf8')); pkg.private = true; fs.writeFileSync('package.json', JSON.stringify(pkg, null, 2));"
echo "failed=true" >> $GITHUB_OUTPUT
}
- name: Rebuild with new ABIs
if: steps.rln-abi.outputs.failed != 'true'
run: |
npm install -w packages/rln
npm run build -w @waku/rln || {
echo "::warning::Failed to build @waku/rln, marking as private to skip publishing"
cd packages/rln
node -e "const fs = require('fs'); const pkg = JSON.parse(fs.readFileSync('package.json', 'utf8')); pkg.private = true; fs.writeFileSync('package.json', JSON.stringify(pkg, null, 2));"
}
- run: npm run publish -- --tag next - run: npm run publish -- --tag next
env: env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_JS_WAKU_PUBLISH }} NODE_AUTH_TOKEN: ${{ secrets.NPM_JS_WAKU_PUBLISH }}

View File

@ -43,7 +43,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
repository: waku-org/js-waku repository: logos-messaging/logos-messaging-js
- name: Remove unwanted software - name: Remove unwanted software
uses: ./.github/actions/prune-vm uses: ./.github/actions/prune-vm
@ -69,7 +69,7 @@ jobs:
uses: actions/checkout@v3 uses: actions/checkout@v3
continue-on-error: true continue-on-error: true
with: with:
repository: waku-org/allure-jswaku repository: logos-messaging/logos-messaging-allure-js
ref: gh-pages ref: gh-pages
path: gh-pages path: gh-pages
token: ${{ env.GITHUB_TOKEN }} token: ${{ env.GITHUB_TOKEN }}
@ -89,7 +89,7 @@ jobs:
uses: peaceiris/actions-gh-pages@v3 uses: peaceiris/actions-gh-pages@v3
with: with:
personal_token: ${{ env.GITHUB_TOKEN }} personal_token: ${{ env.GITHUB_TOKEN }}
external_repository: waku-org/allure-jswaku external_repository: logos-messaging/logos-messaging-allure-js
publish_branch: gh-pages publish_branch: gh-pages
publish_dir: allure-history publish_dir: allure-history
@ -125,4 +125,4 @@ jobs:
echo "## Run Information" >> $GITHUB_STEP_SUMMARY echo "## Run Information" >> $GITHUB_STEP_SUMMARY
echo "- **NWAKU**: ${{ env.WAKUNODE_IMAGE }}" >> $GITHUB_STEP_SUMMARY echo "- **NWAKU**: ${{ env.WAKUNODE_IMAGE }}" >> $GITHUB_STEP_SUMMARY
echo "## Test Results" >> $GITHUB_STEP_SUMMARY echo "## Test Results" >> $GITHUB_STEP_SUMMARY
echo "Allure report will be available at: https://waku-org.github.io/allure-jswaku/${{ github.run_number }}" >> $GITHUB_STEP_SUMMARY echo "Allure report will be available at: https://logos-messaging.github.io/logos-messaging-allure-js/${{ github.run_number }}" >> $GITHUB_STEP_SUMMARY

View File

@ -35,7 +35,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
repository: waku-org/js-waku repository: logos-messaging/logos-messaging-js
- name: Remove unwanted software - name: Remove unwanted software
uses: ./.github/actions/prune-vm uses: ./.github/actions/prune-vm
@ -75,7 +75,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
repository: waku-org/js-waku repository: logos-messaging/logos-messaging-js
- name: Remove unwanted software - name: Remove unwanted software
uses: ./.github/actions/prune-vm uses: ./.github/actions/prune-vm

1
.gitignore vendored
View File

@ -20,3 +20,4 @@ packages/discovery/mock_local_storage
CLAUDE.md CLAUDE.md
.env .env
postgres-data/ postgres-data/
packages/rln/waku-rlnv2-contract/

View File

@ -1,15 +1,15 @@
{ {
"packages/utils": "0.0.27", "packages/utils": "0.0.28",
"packages/proto": "0.0.15", "packages/proto": "0.0.15",
"packages/interfaces": "0.0.34", "packages/interfaces": "0.0.35",
"packages/enr": "0.0.33", "packages/enr": "0.0.34",
"packages/core": "0.0.40", "packages/core": "0.0.41",
"packages/message-encryption": "0.0.38", "packages/message-encryption": "0.0.39",
"packages/relay": "0.0.23", "packages/relay": "0.0.24",
"packages/sdk": "0.0.36", "packages/sdk": "0.0.37",
"packages/discovery": "0.0.13", "packages/discovery": "0.0.14",
"packages/sds": "0.0.8", "packages/sds": "0.0.9",
"packages/rln": "0.1.10", "packages/rln": "0.1.11",
"packages/react": "0.0.8", "packages/react": "0.0.9",
"packages/run": "0.0.2" "packages/run": "0.0.3"
} }

View File

@ -187,7 +187,7 @@
same "printed page" as the copyright notice for easier same "printed page" as the copyright notice for easier
identification within third-party archives. identification within third-party archives.
Copyright 2018 Status Research & Development GmbH Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License. you may not use this file except in compliance with the License.

View File

@ -1,21 +1,21 @@
The MIT License (MIT) The MIT License (MIT)
Copyright (c) 2021 Status Research & Development GmbH Copyright © 2025-2026 Logos
Permission is hereby granted, free of charge, to any person obtaining a copy Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal of this software and associated documentation files (the “Software”), to deal
in the Software without restriction, including without limitation the rights in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions: furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all The above copyright notice and this permission notice shall be included in
copies or substantial portions of the Software. all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
SOFTWARE. THE SOFTWARE.

5
ci/Jenkinsfile vendored
View File

@ -1,3 +1,6 @@
#!/usr/bin/env groovy
library 'status-jenkins-lib@v1.9.27'
pipeline { pipeline {
agent { agent {
docker { docker {
@ -56,7 +59,7 @@ pipeline {
steps { steps {
sshagent(credentials: ['status-im-auto-ssh']) { sshagent(credentials: ['status-im-auto-ssh']) {
script { script {
nix.develop('npm run deploy', pure: true) nix.develop('npm run deploy', pure: false)
} }
} }
} }

View File

@ -13,8 +13,8 @@ const Args = process.argv.slice(2);
const USE_HTTPS = Args[0] && Args[0].toUpperCase() === "HTTPS"; const USE_HTTPS = Args[0] && Args[0].toUpperCase() === "HTTPS";
const branch = "gh-pages"; const branch = "gh-pages";
const org = "waku-org"; const org = "logos-messaging";
const repo = "js-waku"; const repo = "logos-messaging-js";
/* use SSH auth by default */ /* use SSH auth by default */
let repoUrl = USE_HTTPS let repoUrl = USE_HTTPS
? `https://github.com/${org}/${repo}.git` ? `https://github.com/${org}/${repo}.git`

1453
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -18,10 +18,10 @@
}, },
"dependencies": { "dependencies": {
"@playwright/test": "^1.51.1", "@playwright/test": "^1.51.1",
"@waku/discovery": "^0.0.11", "@waku/discovery": "*",
"@waku/interfaces": "^0.0.33", "@waku/interfaces": "*",
"@waku/sdk": "^0.0.34", "@waku/sdk": "*",
"@waku/utils": "0.0.27", "@waku/utils": "*",
"cors": "^2.8.5", "cors": "^2.8.5",
"dotenv-flow": "^0.4.0", "dotenv-flow": "^0.4.0",
"express": "^4.21.2", "express": "^4.21.2",

View File

@ -1,7 +1,7 @@
{ {
"name": "@waku/build-utils", "name": "@waku/build-utils",
"version": "1.0.0", "version": "1.0.0",
"description": "Build utilities for js-waku", "description": "Build utilities for logos-messaging-js",
"main": "index.js", "main": "index.js",
"module": "index.js", "module": "index.js",
"type": "module", "type": "module",
@ -14,12 +14,12 @@
}, },
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/waku-org/js-waku.git" "url": "git+https://github.com/logos-messaging/logos-messaging-js.git"
}, },
"author": "Waku Team", "author": "Waku Team",
"license": "MIT OR Apache-2.0", "license": "MIT OR Apache-2.0",
"bugs": { "bugs": {
"url": "https://github.com/waku-org/js-waku/issues" "url": "https://github.com/logos-messaging/logos-messaging-js/issues"
}, },
"homepage": "https://github.com/waku-org/js-waku#readme" "homepage": "https://github.com/logos-messaging/logos-messaging-js#readme"
} }

View File

@ -5,6 +5,28 @@ All notable changes to this project will be documented in this file.
The file is maintained by [Release Please](https://github.com/googleapis/release-please) based on [Conventional Commits](https://www.conventionalcommits.org) specification, The file is maintained by [Release Please](https://github.com/googleapis/release-please) based on [Conventional Commits](https://www.conventionalcommits.org) specification,
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.0.41](https://github.com/logos-messaging/logos-messaging-js/compare/core-v0.0.40...core-v0.0.41) (2026-01-16)
### Features
* Add dialTimeout, change dialingQueue to Map ([#2773](https://github.com/logos-messaging/logos-messaging-js/issues/2773)) ([7816642](https://github.com/logos-messaging/logos-messaging-js/commit/7816642fae3eba4f87c196b9571246456a1525e7))
* Reliable Channel: Status Sync, overflow protection, stop TODOs ([#2729](https://github.com/logos-messaging/logos-messaging-js/issues/2729)) ([e5f51d7](https://github.com/logos-messaging/logos-messaging-js/commit/e5f51d7df101020a1a6d0787ce68fab4f28922f5))
### Bug Fixes
* Cleanup routines on reliable channel and core protocols ([#2733](https://github.com/logos-messaging/logos-messaging-js/issues/2733)) ([84a6ea6](https://github.com/logos-messaging/logos-messaging-js/commit/84a6ea69cf8630dacea0cafd58dd8c605ee8dc48))
### Dependencies
* The following workspace dependencies were updated
* dependencies
* @waku/enr bumped from ^0.0.33 to ^0.0.34
* @waku/interfaces bumped from 0.0.34 to 0.0.35
* @waku/utils bumped from 0.0.27 to 0.0.28
## [0.0.40](https://github.com/waku-org/js-waku/compare/core-v0.0.39...core-v0.0.40) (2025-10-31) ## [0.0.40](https://github.com/waku-org/js-waku/compare/core-v0.0.39...core-v0.0.40) (2025-10-31)

View File

@ -1,6 +1,6 @@
{ {
"name": "@waku/core", "name": "@waku/core",
"version": "0.0.40", "version": "0.0.41",
"description": "TypeScript implementation of the Waku v2 protocol", "description": "TypeScript implementation of the Waku v2 protocol",
"types": "./dist/index.d.ts", "types": "./dist/index.d.ts",
"module": "./dist/index.js", "module": "./dist/index.js",
@ -25,17 +25,18 @@
} }
}, },
"type": "module", "type": "module",
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/core#readme", "homepage": "https://github.com/logos-messaging/logos-messaging-js/tree/master/packages/core#readme",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/waku-org/js-waku.git" "url": "git+https://github.com/logos-messaging/logos-messaging-js.git"
}, },
"bugs": { "bugs": {
"url": "https://github.com/waku-org/js-waku/issues" "url": "https://github.com/logos-messaging/logos-messaging-js/issues"
}, },
"license": "MIT OR Apache-2.0", "license": "MIT OR Apache-2.0",
"keywords": [ "keywords": [
"waku", "waku",
"logos-messaging",
"decentralised", "decentralised",
"communication", "communication",
"web3", "web3",
@ -64,11 +65,11 @@
"node": ">=22" "node": ">=22"
}, },
"dependencies": { "dependencies": {
"@waku/enr": "^0.0.33", "@waku/enr": "^0.0.34",
"@waku/interfaces": "0.0.34", "@waku/interfaces": "0.0.35",
"@libp2p/ping": "2.0.35", "@libp2p/ping": "2.0.35",
"@waku/proto": "0.0.15", "@waku/proto": "0.0.15",
"@waku/utils": "0.0.27", "@waku/utils": "0.0.28",
"debug": "^4.3.4", "debug": "^4.3.4",
"@noble/hashes": "^1.3.2", "@noble/hashes": "^1.3.2",
"it-all": "^3.0.4", "it-all": "^3.0.4",

View File

@ -65,7 +65,8 @@ describe("ConnectionLimiter", () => {
enableAutoRecovery: true, enableAutoRecovery: true,
maxDialingPeers: 3, maxDialingPeers: 3,
failedDialCooldown: 60, failedDialCooldown: 60,
dialCooldown: 10 dialCooldown: 10,
dialTimeout: 30
}; };
function createLimiter( function createLimiter(

View File

@ -79,7 +79,7 @@ export class ConnectionLimiter implements IConnectionLimiter {
/** /**
* NOTE: Event is not being emitted on closing nor losing a connection. * NOTE: Event is not being emitted on closing nor losing a connection.
* @see https://github.com/libp2p/js-libp2p/issues/939 * @see https://github.com/libp2p/js-libp2p/issues/939
* @see https://github.com/status-im/js-waku/issues/252 * @see https://github.com/logos-messaging/logos-messaging-js/issues/252
* *
* >This event will be triggered anytime we are disconnected from another peer, * >This event will be triggered anytime we are disconnected from another peer,
* >regardless of the circumstances of that disconnection. * >regardless of the circumstances of that disconnection.

View File

@ -29,6 +29,7 @@ const DEFAULT_MAX_CONNECTIONS = 10;
const DEFAULT_MAX_DIALING_PEERS = 3; const DEFAULT_MAX_DIALING_PEERS = 3;
const DEFAULT_FAILED_DIAL_COOLDOWN_SEC = 60; const DEFAULT_FAILED_DIAL_COOLDOWN_SEC = 60;
const DEFAULT_DIAL_COOLDOWN_SEC = 10; const DEFAULT_DIAL_COOLDOWN_SEC = 10;
const DEFAULT_DIAL_TIMEOUT_SEC = 30;
type ConnectionManagerConstructorOptions = { type ConnectionManagerConstructorOptions = {
libp2p: Libp2p; libp2p: Libp2p;
@ -61,6 +62,7 @@ export class ConnectionManager implements IConnectionManager {
maxDialingPeers: DEFAULT_MAX_DIALING_PEERS, maxDialingPeers: DEFAULT_MAX_DIALING_PEERS,
failedDialCooldown: DEFAULT_FAILED_DIAL_COOLDOWN_SEC, failedDialCooldown: DEFAULT_FAILED_DIAL_COOLDOWN_SEC,
dialCooldown: DEFAULT_DIAL_COOLDOWN_SEC, dialCooldown: DEFAULT_DIAL_COOLDOWN_SEC,
dialTimeout: DEFAULT_DIAL_TIMEOUT_SEC,
...options.config ...options.config
}; };

View File

@ -39,6 +39,7 @@ describe("Dialer", () => {
maxDialingPeers: 3, maxDialingPeers: 3,
failedDialCooldown: 60, failedDialCooldown: 60,
dialCooldown: 10, dialCooldown: 10,
dialTimeout: 30,
maxConnections: 10, maxConnections: 10,
enableAutoRecovery: true enableAutoRecovery: true
}; };

View File

@ -23,7 +23,7 @@ export class Dialer implements IDialer {
private readonly shardReader: ShardReader; private readonly shardReader: ShardReader;
private readonly options: ConnectionManagerOptions; private readonly options: ConnectionManagerOptions;
private dialingQueue: PeerId[] = []; private dialingQueue: Map<string, PeerId> = new Map();
private dialHistory: Map<string, number> = new Map(); private dialHistory: Map<string, number> = new Map();
private failedDials: Map<string, number> = new Map(); private failedDials: Map<string, number> = new Map();
private dialingInterval: NodeJS.Timeout | null = null; private dialingInterval: NodeJS.Timeout | null = null;
@ -70,7 +70,7 @@ export class Dialer implements IDialer {
return; return;
} }
const isEmptyQueue = this.dialingQueue.length === 0; const isEmptyQueue = this.dialingQueue.size === 0;
const isNotDialing = !this.isProcessing && !this.isImmediateDialing; const isNotDialing = !this.isProcessing && !this.isImmediateDialing;
// If queue is empty and we're not currently processing, dial immediately // If queue is empty and we're not currently processing, dial immediately
@ -81,29 +81,28 @@ export class Dialer implements IDialer {
this.isImmediateDialing = false; this.isImmediateDialing = false;
log.info("Released immediate dial lock"); log.info("Released immediate dial lock");
} else { } else {
this.dialingQueue.push(peerId); this.dialingQueue.set(peerId.toString(), peerId);
log.info( log.info(
`Added peer to dialing queue, queue size: ${this.dialingQueue.length}` `Added peer to dialing queue, queue size: ${this.dialingQueue.size}`
); );
} }
} }
private async processQueue(): Promise<void> { private async processQueue(): Promise<void> {
if (this.dialingQueue.length === 0 || this.isProcessing) { if (this.dialingQueue.size === 0 || this.isProcessing) {
return; return;
} }
this.isProcessing = true; this.isProcessing = true;
try { try {
const peersToDial = this.dialingQueue.slice( const allPeers = Array.from(this.dialingQueue.values());
0, const peersToDial = allPeers.slice(0, this.options.maxDialingPeers);
this.options.maxDialingPeers
); peersToDial.forEach((peer) => this.dialingQueue.delete(peer.toString()));
this.dialingQueue = this.dialingQueue.slice(peersToDial.length);
log.info( log.info(
`Processing dial queue: dialing ${peersToDial.length} peers, ${this.dialingQueue.length} remaining in queue` `Processing dial queue: dialing ${peersToDial.length} peers, ${this.dialingQueue.size} remaining in queue`
); );
await Promise.all(peersToDial.map((peerId) => this.dialPeer(peerId))); await Promise.all(peersToDial.map((peerId) => this.dialPeer(peerId)));
@ -116,7 +115,19 @@ export class Dialer implements IDialer {
try { try {
log.info(`Dialing peer from queue: ${peerId}`); log.info(`Dialing peer from queue: ${peerId}`);
await this.libp2p.dial(peerId); await Promise.race([
this.libp2p.dial(peerId),
new Promise<never>((_, reject) =>
setTimeout(
() =>
reject(
new Error(`Dial timeout after ${this.options.dialTimeout}s`)
),
this.options.dialTimeout * 1000
)
)
]);
this.dialHistory.set(peerId.toString(), Date.now()); this.dialHistory.set(peerId.toString(), Date.now());
this.failedDials.delete(peerId.toString()); this.failedDials.delete(peerId.toString());

View File

@ -61,6 +61,7 @@ export class FilterCore {
} }
public async stop(): Promise<void> { public async stop(): Promise<void> {
this.streamManager.stop();
try { try {
await this.libp2p.unhandle(FilterCodecs.PUSH); await this.libp2p.unhandle(FilterCodecs.PUSH);
} catch (e) { } catch (e) {

View File

@ -33,6 +33,11 @@ export class LightPushCore {
this.streamManager = new StreamManager(CODECS.v3, libp2p.components); this.streamManager = new StreamManager(CODECS.v3, libp2p.components);
} }
public stop(): void {
this.streamManager.stop();
this.streamManagerV2.stop();
}
public async send( public async send(
encoder: IEncoder, encoder: IEncoder,
message: IMessage, message: IMessage,

View File

@ -35,6 +35,10 @@ export class StoreCore {
this.streamManager = new StreamManager(StoreCodec, libp2p.components); this.streamManager = new StreamManager(StoreCodec, libp2p.components);
} }
public stop(): void {
this.streamManager.stop();
}
public get maxTimeLimit(): number { public get maxTimeLimit(): number {
return MAX_TIME_RANGE; return MAX_TIME_RANGE;
} }
@ -68,6 +72,11 @@ export class StoreCore {
let currentCursor = queryOpts.paginationCursor; let currentCursor = queryOpts.paginationCursor;
while (true) { while (true) {
if (queryOpts.abortSignal?.aborted) {
log.info("Store query aborted by signal");
break;
}
const storeQueryRequest = StoreQueryRequest.create({ const storeQueryRequest = StoreQueryRequest.create({
...queryOpts, ...queryOpts,
paginationCursor: currentCursor paginationCursor: currentCursor
@ -89,13 +98,22 @@ export class StoreCore {
break; break;
} }
const res = await pipe( let res;
[storeQueryRequest.encode()], try {
lp.encode, res = await pipe(
stream, [storeQueryRequest.encode()],
lp.decode, lp.encode,
async (source) => await all(source) stream,
); lp.decode,
async (source) => await all(source)
);
} catch (error) {
if (error instanceof Error && error.name === "AbortError") {
log.info(`Store query aborted for peer ${peerId.toString()}`);
break;
}
throw error;
}
const bytes = new Uint8ArrayList(); const bytes = new Uint8ArrayList();
res.forEach((chunk) => { res.forEach((chunk) => {
@ -122,6 +140,11 @@ export class StoreCore {
`${storeQueryResponse.messages.length} messages retrieved from store` `${storeQueryResponse.messages.length} messages retrieved from store`
); );
if (queryOpts.abortSignal?.aborted) {
log.info("Store query aborted by signal before processing messages");
break;
}
const decodedMessages = storeQueryResponse.messages.map((protoMsg) => { const decodedMessages = storeQueryResponse.messages.map((protoMsg) => {
if (!protoMsg.message) { if (!protoMsg.message) {
return Promise.resolve(undefined); return Promise.resolve(undefined);

View File

@ -27,6 +27,10 @@ describe("StreamManager", () => {
} as any as Libp2pComponents); } as any as Libp2pComponents);
}); });
afterEach(() => {
sinon.restore();
});
it("should return usable stream attached to connection", async () => { it("should return usable stream attached to connection", async () => {
for (const writeStatus of ["ready", "writing"]) { for (const writeStatus of ["ready", "writing"]) {
const con1 = createMockConnection(); const con1 = createMockConnection();

View File

@ -23,6 +23,15 @@ export class StreamManager {
); );
} }
public stop(): void {
this.libp2p.events.removeEventListener(
"peer:update",
this.handlePeerUpdateStreamPool
);
this.streamPool.clear();
this.ongoingCreation.clear();
}
public async getStream(peerId: PeerId): Promise<Stream | undefined> { public async getStream(peerId: PeerId): Promise<Stream | undefined> {
try { try {
const peerIdStr = peerId.toString(); const peerIdStr = peerId.toString();

View File

@ -1,5 +1,17 @@
# Changelog # Changelog
## [0.0.14](https://github.com/logos-messaging/logos-messaging-js/compare/discovery-v0.0.13...discovery-v0.0.14) (2026-01-16)
### Dependencies
* The following workspace dependencies were updated
* dependencies
* @waku/core bumped from 0.0.40 to 0.0.41
* @waku/enr bumped from 0.0.33 to 0.0.34
* @waku/interfaces bumped from 0.0.34 to 0.0.35
* @waku/utils bumped from 0.0.27 to 0.0.28
## [0.0.13](https://github.com/waku-org/js-waku/compare/discovery-v0.0.12...discovery-v0.0.13) (2025-10-31) ## [0.0.13](https://github.com/waku-org/js-waku/compare/discovery-v0.0.12...discovery-v0.0.13) (2025-10-31)

View File

@ -1,6 +1,6 @@
{ {
"name": "@waku/discovery", "name": "@waku/discovery",
"version": "0.0.13", "version": "0.0.14",
"description": "Contains various discovery mechanisms: DNS Discovery (EIP-1459, Peer Exchange, Local Peer Cache Discovery.", "description": "Contains various discovery mechanisms: DNS Discovery (EIP-1459, Peer Exchange, Local Peer Cache Discovery.",
"types": "./dist/index.d.ts", "types": "./dist/index.d.ts",
"module": "./dist/index.js", "module": "./dist/index.js",
@ -12,17 +12,18 @@
}, },
"type": "module", "type": "module",
"author": "Waku Team", "author": "Waku Team",
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/discovery#readme", "homepage": "https://github.com/logos-messaging/logos-messaging-js/tree/master/packages/discovery#readme",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/waku-org/js-waku.git" "url": "git+https://github.com/logos-messaging/logos-messaging-js.git"
}, },
"bugs": { "bugs": {
"url": "https://github.com/waku-org/js-waku/issues" "url": "https://github.com/logos-messaging/logos-messaging-js/issues"
}, },
"license": "MIT OR Apache-2.0", "license": "MIT OR Apache-2.0",
"keywords": [ "keywords": [
"waku", "waku",
"logos-messaging",
"decentralized", "decentralized",
"secure", "secure",
"communication", "communication",
@ -51,11 +52,11 @@
"node": ">=22" "node": ">=22"
}, },
"dependencies": { "dependencies": {
"@waku/core": "0.0.40", "@waku/core": "0.0.41",
"@waku/enr": "0.0.33", "@waku/enr": "0.0.34",
"@waku/interfaces": "0.0.34", "@waku/interfaces": "0.0.35",
"@waku/proto": "^0.0.15", "@waku/proto": "^0.0.15",
"@waku/utils": "0.0.27", "@waku/utils": "0.0.28",
"debug": "^4.3.4", "debug": "^4.3.4",
"dns-over-http-resolver": "^3.0.8", "dns-over-http-resolver": "^3.0.8",
"hi-base32": "^0.5.1", "hi-base32": "^0.5.1",

View File

@ -99,6 +99,17 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
* devDependencies * devDependencies
* @waku/interfaces bumped from 0.0.27 to 0.0.28 * @waku/interfaces bumped from 0.0.27 to 0.0.28
## [0.0.34](https://github.com/logos-messaging/logos-messaging-js/compare/enr-v0.0.33...enr-v0.0.34) (2026-01-16)
### Dependencies
* The following workspace dependencies were updated
* dependencies
* @waku/utils bumped from 0.0.27 to 0.0.28
* devDependencies
* @waku/interfaces bumped from 0.0.34 to 0.0.35
## [0.0.33](https://github.com/waku-org/js-waku/compare/enr-v0.0.32...enr-v0.0.33) (2025-09-20) ## [0.0.33](https://github.com/waku-org/js-waku/compare/enr-v0.0.32...enr-v0.0.33) (2025-09-20)

View File

@ -1,6 +1,6 @@
{ {
"name": "@waku/enr", "name": "@waku/enr",
"version": "0.0.33", "version": "0.0.34",
"description": "ENR (EIP-778) for Waku", "description": "ENR (EIP-778) for Waku",
"types": "./dist/index.d.ts", "types": "./dist/index.d.ts",
"module": "./dist/index.js", "module": "./dist/index.js",
@ -12,17 +12,18 @@
}, },
"type": "module", "type": "module",
"author": "Waku Team", "author": "Waku Team",
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/enr#readme", "homepage": "https://github.com/logos-messaging/logos-messaging-js/tree/master/packages/enr#readme",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/waku-org/js-waku.git" "url": "git+https://github.com/logos-messaging/logos-messaging-js.git"
}, },
"bugs": { "bugs": {
"url": "https://github.com/waku-org/js-waku/issues" "url": "https://github.com/logos-messaging/logos-messaging-js/issues"
}, },
"license": "MIT OR Apache-2.0", "license": "MIT OR Apache-2.0",
"keywords": [ "keywords": [
"waku", "waku",
"logos-messaging",
"decentralized", "decentralized",
"secure", "secure",
"communication", "communication",
@ -56,7 +57,7 @@
"@libp2p/peer-id": "5.1.7", "@libp2p/peer-id": "5.1.7",
"@multiformats/multiaddr": "^12.0.0", "@multiformats/multiaddr": "^12.0.0",
"@noble/secp256k1": "^1.7.1", "@noble/secp256k1": "^1.7.1",
"@waku/utils": "0.0.27", "@waku/utils": "0.0.28",
"debug": "^4.3.4", "debug": "^4.3.4",
"js-sha3": "^0.9.2" "js-sha3": "^0.9.2"
}, },
@ -67,7 +68,7 @@
"@types/chai": "^4.3.11", "@types/chai": "^4.3.11",
"@types/mocha": "^10.0.6", "@types/mocha": "^10.0.6",
"@waku/build-utils": "*", "@waku/build-utils": "*",
"@waku/interfaces": "0.0.34", "@waku/interfaces": "0.0.35",
"chai": "^4.3.10", "chai": "^4.3.10",
"cspell": "^8.6.1", "cspell": "^8.6.1",
"fast-check": "^3.19.0", "fast-check": "^3.19.0",

View File

@ -5,6 +5,18 @@ All notable changes to this project will be documented in this file.
The file is maintained by [Release Please](https://github.com/googleapis/release-please) based on [Conventional Commits](https://www.conventionalcommits.org) specification, The file is maintained by [Release Please](https://github.com/googleapis/release-please) based on [Conventional Commits](https://www.conventionalcommits.org) specification,
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.0.35](https://github.com/logos-messaging/logos-messaging-js/compare/interfaces-v0.0.34...interfaces-v0.0.35) (2026-01-16)
### Features
* Add dialTimeout, change dialingQueue to Map ([#2773](https://github.com/logos-messaging/logos-messaging-js/issues/2773)) ([7816642](https://github.com/logos-messaging/logos-messaging-js/commit/7816642fae3eba4f87c196b9571246456a1525e7))
### Bug Fixes
* Cleanup routines on reliable channel and core protocols ([#2733](https://github.com/logos-messaging/logos-messaging-js/issues/2733)) ([84a6ea6](https://github.com/logos-messaging/logos-messaging-js/commit/84a6ea69cf8630dacea0cafd58dd8c605ee8dc48))
## [0.0.34](https://github.com/waku-org/js-waku/compare/interfaces-v0.0.33...interfaces-v0.0.34) (2025-09-20) ## [0.0.34](https://github.com/waku-org/js-waku/compare/interfaces-v0.0.33...interfaces-v0.0.34) (2025-09-20)

View File

@ -1,6 +1,6 @@
{ {
"name": "@waku/interfaces", "name": "@waku/interfaces",
"version": "0.0.34", "version": "0.0.35",
"description": "Definition of Waku interfaces", "description": "Definition of Waku interfaces",
"types": "./dist/index.d.ts", "types": "./dist/index.d.ts",
"module": "./dist/index.js", "module": "./dist/index.js",
@ -12,17 +12,18 @@
}, },
"type": "module", "type": "module",
"author": "Waku Team", "author": "Waku Team",
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/interfaces#readme", "homepage": "https://github.com/logos-messaging/logos-messaging-js/tree/master/packages/interfaces#readme",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/waku-org/js-waku.git" "url": "git+https://github.com/logos-messaging/logos-messaging-js.git"
}, },
"bugs": { "bugs": {
"url": "https://github.com/waku-org/js-waku/issues" "url": "https://github.com/logos-messaging/logos-messaging-js/issues"
}, },
"license": "MIT OR Apache-2.0", "license": "MIT OR Apache-2.0",
"keywords": [ "keywords": [
"waku", "waku",
"logos-messaging",
"decentralized", "decentralized",
"secure", "secure",
"communication", "communication",

View File

@ -74,6 +74,13 @@ export type ConnectionManagerOptions = {
* @default 10 seconds * @default 10 seconds
*/ */
dialCooldown: number; dialCooldown: number;
/**
* Time to wait for a dial attempt to complete before timing out.
*
* @default 30 seconds
*/
dialTimeout: number;
}; };
export interface IConnectionManager { export interface IConnectionManager {

View File

@ -16,6 +16,7 @@ export interface IRelayAPI {
readonly pubsubTopics: Set<PubsubTopic>; readonly pubsubTopics: Set<PubsubTopic>;
readonly gossipSub: GossipSub; readonly gossipSub: GossipSub;
start: () => Promise<void>; start: () => Promise<void>;
stop: () => Promise<void>;
waitForPeers: () => Promise<void>; waitForPeers: () => Promise<void>;
getMeshPeers: (topic?: TopicStr) => PeerIdStr[]; getMeshPeers: (topic?: TopicStr) => PeerIdStr[];
} }

View File

@ -88,11 +88,18 @@ export type QueryRequestParams = {
* Only use if you know what you are doing. * Only use if you know what you are doing.
*/ */
peerId?: PeerId; peerId?: PeerId;
/**
* An optional AbortSignal to cancel the query.
* When the signal is aborted, the query will stop processing and return early.
*/
abortSignal?: AbortSignal;
}; };
export type IStore = { export type IStore = {
readonly multicodec: string; readonly multicodec: string;
stop(): void;
createCursor(message: IDecodedMessage): StoreCursor; createCursor(message: IDecodedMessage): StoreCursor;
queryGenerator: <T extends IDecodedMessage>( queryGenerator: <T extends IDecodedMessage>(
decoders: IDecoder<T>[], decoders: IDecoder<T>[],

View File

@ -101,6 +101,17 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
* @waku/interfaces bumped from 0.0.27 to 0.0.28 * @waku/interfaces bumped from 0.0.27 to 0.0.28
* @waku/utils bumped from 0.0.20 to 0.0.21 * @waku/utils bumped from 0.0.20 to 0.0.21
## [0.0.39](https://github.com/logos-messaging/logos-messaging-js/compare/message-encryption-v0.0.38...message-encryption-v0.0.39) (2026-01-16)
### Dependencies
* The following workspace dependencies were updated
* dependencies
* @waku/core bumped from 0.0.40 to 0.0.41
* @waku/interfaces bumped from 0.0.34 to 0.0.35
* @waku/utils bumped from 0.0.27 to 0.0.28
## [0.0.38](https://github.com/waku-org/js-waku/compare/message-encryption-v0.0.37...message-encryption-v0.0.38) (2025-10-31) ## [0.0.38](https://github.com/waku-org/js-waku/compare/message-encryption-v0.0.37...message-encryption-v0.0.38) (2025-10-31)

View File

@ -1,6 +1,6 @@
{ {
"name": "@waku/message-encryption", "name": "@waku/message-encryption",
"version": "0.0.38", "version": "0.0.39",
"description": "Waku Message Payload Encryption", "description": "Waku Message Payload Encryption",
"types": "./dist/index.d.ts", "types": "./dist/index.d.ts",
"module": "./dist/index.js", "module": "./dist/index.js",
@ -33,17 +33,18 @@
}, },
"type": "module", "type": "module",
"author": "Waku Team", "author": "Waku Team",
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/message-encryption#readme", "homepage": "https://github.com/logos-messaging/logos-messaging-js/tree/master/packages/message-encryption#readme",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/waku-org/js-waku.git" "url": "git+https://github.com/logos-messaging/logos-messaging-js.git"
}, },
"bugs": { "bugs": {
"url": "https://github.com/waku-org/js-waku/issues" "url": "https://github.com/logos-messaging/logos-messaging-js/issues"
}, },
"license": "MIT OR Apache-2.0", "license": "MIT OR Apache-2.0",
"keywords": [ "keywords": [
"waku", "waku",
"logos-messaging",
"decentralized", "decentralized",
"secure", "secure",
"communication", "communication",
@ -76,10 +77,10 @@
}, },
"dependencies": { "dependencies": {
"@noble/secp256k1": "^1.7.1", "@noble/secp256k1": "^1.7.1",
"@waku/core": "0.0.40", "@waku/core": "0.0.41",
"@waku/interfaces": "0.0.34", "@waku/interfaces": "0.0.35",
"@waku/proto": "0.0.15", "@waku/proto": "0.0.15",
"@waku/utils": "0.0.27", "@waku/utils": "0.0.28",
"debug": "^4.3.4", "debug": "^4.3.4",
"js-sha3": "^0.9.2", "js-sha3": "^0.9.2",
"uint8arrays": "^5.0.1" "uint8arrays": "^5.0.1"

View File

@ -12,17 +12,18 @@
}, },
"type": "module", "type": "module",
"author": "Waku Team", "author": "Waku Team",
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/proto#readme", "homepage": "https://github.com/logos-messaging/logos-messaging-js/tree/master/packages/proto#readme",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/waku-org/js-waku.git" "url": "git+https://github.com/logos-messaging/logos-messaging-js.git"
}, },
"bugs": { "bugs": {
"url": "https://github.com/waku-org/js-waku/issues" "url": "https://github.com/logos-messaging/logos-messaging-js/issues"
}, },
"license": "MIT OR Apache-2.0", "license": "MIT OR Apache-2.0",
"keywords": [ "keywords": [
"waku", "waku",
"logos-messaging",
"decentralized", "decentralized",
"secure", "secure",
"communication", "communication",

View File

@ -5,6 +5,17 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.0.9](https://github.com/logos-messaging/logos-messaging-js/compare/react-v0.0.8...react-v0.0.9) (2026-01-16)
### Dependencies
* The following workspace dependencies were updated
* dependencies
* @waku/interfaces bumped from 0.0.34 to 0.0.35
* @waku/sdk bumped from 0.0.36 to 0.0.37
* @waku/utils bumped from 0.0.27 to 0.0.28
## [0.0.8](https://github.com/waku-org/js-waku/compare/react-v0.0.7...react-v0.0.8) (2025-10-31) ## [0.0.8](https://github.com/waku-org/js-waku/compare/react-v0.0.7...react-v0.0.8) (2025-10-31)

View File

@ -1,7 +1,7 @@
{ {
"name": "@waku/react", "name": "@waku/react",
"version": "0.0.8", "version": "0.0.9",
"description": "React hooks and components to use js-waku", "description": "React hooks and components to use logos-messaging-js",
"type": "module", "type": "module",
"main": "dist/index.cjs.js", "main": "dist/index.cjs.js",
"module": "dist/index.esm.mjs", "module": "dist/index.esm.mjs",
@ -18,17 +18,18 @@
} }
}, },
"author": "Waku Team", "author": "Waku Team",
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/react#readme", "homepage": "https://github.com/logos-messaging/logos-messaging-js/tree/master/packages/react#readme",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/waku-org/js-waku.git" "url": "git+https://github.com/logos-messaging/logos-messaging-js.git"
}, },
"bugs": { "bugs": {
"url": "https://github.com/waku-org/js-waku/issues" "url": "https://github.com/logos-messaging/logos-messaging-js/issues"
}, },
"license": "MIT OR Apache-2.0", "license": "MIT OR Apache-2.0",
"keywords": [ "keywords": [
"waku", "waku",
"logos-messaging",
"decentralized", "decentralized",
"secure", "secure",
"communication", "communication",
@ -54,9 +55,9 @@
"node": ">=22" "node": ">=22"
}, },
"dependencies": { "dependencies": {
"@waku/interfaces": "0.0.34", "@waku/interfaces": "0.0.35",
"@waku/sdk": "0.0.36", "@waku/sdk": "0.0.37",
"@waku/utils": "0.0.27" "@waku/utils": "0.0.28"
}, },
"devDependencies": { "devDependencies": {
"@rollup/plugin-commonjs": "^25.0.7", "@rollup/plugin-commonjs": "^25.0.7",

View File

@ -25,6 +25,23 @@
* @waku/interfaces bumped from 0.0.16 to 0.0.17 * @waku/interfaces bumped from 0.0.16 to 0.0.17
* @waku/utils bumped from 0.0.9 to 0.0.10 * @waku/utils bumped from 0.0.9 to 0.0.10
## [0.0.24](https://github.com/logos-messaging/logos-messaging-js/compare/relay-v0.0.23...relay-v0.0.24) (2026-01-16)
### Bug Fixes
* Cleanup routines on reliable channel and core protocols ([#2733](https://github.com/logos-messaging/logos-messaging-js/issues/2733)) ([84a6ea6](https://github.com/logos-messaging/logos-messaging-js/commit/84a6ea69cf8630dacea0cafd58dd8c605ee8dc48))
### Dependencies
* The following workspace dependencies were updated
* dependencies
* @waku/core bumped from 0.0.40 to 0.0.41
* @waku/sdk bumped from 0.0.36 to 0.0.37
* @waku/interfaces bumped from 0.0.34 to 0.0.35
* @waku/utils bumped from 0.0.27 to 0.0.28
## [0.0.23](https://github.com/waku-org/js-waku/compare/relay-v0.0.22...relay-v0.0.23) (2025-10-31) ## [0.0.23](https://github.com/waku-org/js-waku/compare/relay-v0.0.22...relay-v0.0.23) (2025-10-31)

View File

@ -1,6 +1,6 @@
{ {
"name": "@waku/relay", "name": "@waku/relay",
"version": "0.0.23", "version": "0.0.24",
"description": "Relay Protocol for Waku", "description": "Relay Protocol for Waku",
"types": "./dist/index.d.ts", "types": "./dist/index.d.ts",
"module": "./dist/index.js", "module": "./dist/index.js",
@ -11,17 +11,18 @@
} }
}, },
"type": "module", "type": "module",
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/relay#readme", "homepage": "https://github.com/logos-messaging/logos-messaging-js/tree/master/packages/relay#readme",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/waku-org/js-waku.git" "url": "git+https://github.com/logos-messaging/logos-messaging-js.git"
}, },
"bugs": { "bugs": {
"url": "https://github.com/waku-org/js-waku/issues" "url": "https://github.com/logos-messaging/logos-messaging-js/issues"
}, },
"license": "MIT OR Apache-2.0", "license": "MIT OR Apache-2.0",
"keywords": [ "keywords": [
"waku", "waku",
"logos-messaging",
"decentralised", "decentralised",
"communication", "communication",
"web3", "web3",
@ -51,11 +52,11 @@
"dependencies": { "dependencies": {
"@chainsafe/libp2p-gossipsub": "14.1.1", "@chainsafe/libp2p-gossipsub": "14.1.1",
"@noble/hashes": "^1.3.2", "@noble/hashes": "^1.3.2",
"@waku/core": "0.0.40", "@waku/core": "0.0.41",
"@waku/sdk": "0.0.36", "@waku/sdk": "0.0.37",
"@waku/interfaces": "0.0.34", "@waku/interfaces": "0.0.35",
"@waku/proto": "0.0.15", "@waku/proto": "0.0.15",
"@waku/utils": "0.0.27", "@waku/utils": "0.0.28",
"chai": "^4.3.10", "chai": "^4.3.10",
"debug": "^4.3.4", "debug": "^4.3.4",
"fast-check": "^3.19.0", "fast-check": "^3.19.0",

View File

@ -67,6 +67,10 @@ export class Relay implements IRelay {
* Observers under key `""` are always called. * Observers under key `""` are always called.
*/ */
private observers: Map<PubsubTopic, Map<ContentTopic, Set<unknown>>>; private observers: Map<PubsubTopic, Map<ContentTopic, Set<unknown>>>;
private messageEventHandlers: Map<
PubsubTopic,
(event: CustomEvent<GossipsubMessage>) => void
> = new Map();
public constructor(params: RelayConstructorParams) { public constructor(params: RelayConstructorParams) {
if (!this.isRelayPubsub(params.libp2p.services.pubsub)) { if (!this.isRelayPubsub(params.libp2p.services.pubsub)) {
@ -105,6 +109,19 @@ export class Relay implements IRelay {
this.subscribeToAllTopics(); this.subscribeToAllTopics();
} }
public async stop(): Promise<void> {
for (const pubsubTopic of this.pubsubTopics) {
const handler = this.messageEventHandlers.get(pubsubTopic);
if (handler) {
this.gossipSub.removeEventListener("gossipsub:message", handler);
}
this.gossipSub.topicValidators.delete(pubsubTopic);
this.gossipSub.unsubscribe(pubsubTopic);
}
this.messageEventHandlers.clear();
this.observers.clear();
}
/** /**
* Wait for at least one peer with the given protocol to be connected and in the gossipsub * Wait for at least one peer with the given protocol to be connected and in the gossipsub
* mesh for all pubsubTopics. * mesh for all pubsubTopics.
@ -299,17 +316,17 @@ export class Relay implements IRelay {
* @override * @override
*/ */
private gossipSubSubscribe(pubsubTopic: string): void { private gossipSubSubscribe(pubsubTopic: string): void {
this.gossipSub.addEventListener( const handler = (event: CustomEvent<GossipsubMessage>): void => {
"gossipsub:message", if (event.detail.msg.topic !== pubsubTopic) return;
(event: CustomEvent<GossipsubMessage>) => {
if (event.detail.msg.topic !== pubsubTopic) return;
this.processIncomingMessage( this.processIncomingMessage(
event.detail.msg.topic, event.detail.msg.topic,
event.detail.msg.data event.detail.msg.data
).catch((e) => log.error("Failed to process incoming message", e)); ).catch((e) => log.error("Failed to process incoming message", e));
} };
);
this.messageEventHandlers.set(pubsubTopic, handler);
this.gossipSub.addEventListener("gossipsub:message", handler);
this.gossipSub.topicValidators.set(pubsubTopic, messageValidator); this.gossipSub.topicValidators.set(pubsubTopic, messageValidator);
this.gossipSub.subscribe(pubsubTopic); this.gossipSub.subscribe(pubsubTopic);

View File

@ -13,17 +13,18 @@
}, },
"type": "module", "type": "module",
"author": "Waku Team", "author": "Waku Team",
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/reliability-tests#readme", "homepage": "https://github.com/logos-messaging/logos-messaging-js/tree/master/packages/reliability-tests#readme",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/waku-org/js-waku.git" "url": "git+https://github.com/logos-messaging/logos-messaging-js.git"
}, },
"bugs": { "bugs": {
"url": "https://github.com/waku-org/js-waku/issues" "url": "https://github.com/logos-messaging/logos-messaging-js/issues"
}, },
"license": "MIT OR Apache-2.0", "license": "MIT OR Apache-2.0",
"keywords": [ "keywords": [
"waku", "waku",
"logos-messaging",
"decentralized", "decentralized",
"secure", "secure",
"communication", "communication",

View File

@ -3,5 +3,10 @@ module.exports = {
tsconfigRootDir: __dirname, tsconfigRootDir: __dirname,
project: "./tsconfig.dev.json" project: "./tsconfig.dev.json"
}, },
ignorePatterns: ["src/resources/**/*"] ignorePatterns: ["src/resources/**/*"],
overrides: [
{
files: ["*.config.ts", "*.config.js"]
}
]
}; };

View File

@ -1,5 +1,27 @@
# Changelog # Changelog
## [0.1.11](https://github.com/logos-messaging/logos-messaging-js/compare/rln-v0.1.10...rln-v0.1.11) (2026-01-16)
### ⚠ BREAKING CHANGES
* **rln:** generate contract types, migrate from ethers to viem ([#2705](https://github.com/logos-messaging/logos-messaging-js/issues/2705))
### Features
* **rln:** Generate contract types, migrate from ethers to viem ([#2705](https://github.com/logos-messaging/logos-messaging-js/issues/2705)) ([f2ad23a](https://github.com/logos-messaging/logos-messaging-js/commit/f2ad23ad4354fb3440ca369ed91ba4d882bbacf6))
### Dependencies
* The following workspace dependencies were updated
* dependencies
* @waku/core bumped from ^0.0.40 to ^0.0.41
* @waku/utils bumped from ^0.0.27 to ^0.0.28
* devDependencies
* @waku/interfaces bumped from 0.0.34 to 0.0.35
* @waku/message-encryption bumped from ^0.0.37 to ^0.0.39
## [0.1.10](https://github.com/waku-org/js-waku/compare/rln-v0.1.9...rln-v0.1.10) (2025-10-31) ## [0.1.10](https://github.com/waku-org/js-waku/compare/rln-v0.1.9...rln-v0.1.10) (2025-10-31)

View File

@ -12,6 +12,18 @@ This package provides RLN functionality for the Waku protocol, enabling rate-lim
npm install @waku/rln npm install @waku/rln
``` ```
## Smart Contract Type Generation
We use `wagmi` to generate TypeScript bindings for interacting with the RLN smart contracts.
When changes are pushed to the `waku-rlnv2-contract` repository, run the following script to fetch and build the latest contracts and generate the TypeScript bindings:
```
npm run setup:contract-abi
```
Note that we commit/bundle the generated typings, so it's not necessary to run this script unless the contracts are updated.
## Usage ## Usage
```typescript ```typescript
@ -20,11 +32,6 @@ import { RLN } from '@waku/rln';
// Usage examples coming soon // Usage examples coming soon
``` ```
## Constants
- Implementation contract: 0xde2260ca49300357d5af4153cda0d18f7b3ea9b3
- Proxy contract: 0xb9cd878c90e49f797b4431fbf4fb333108cb90e6
## License ## License
MIT OR Apache-2.0 MIT OR Apache-2.0

View File

@ -0,0 +1,66 @@
import { execSync } from "child_process";
import { existsSync, rmSync } from "fs";
import { dirname, join } from "path";
import process from "process";
import { fileURLToPath } from "url";
// Get script directory (equivalent to BASH_SOURCE in bash)
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const CONTRACT_DIR = join(__dirname, "waku-rlnv2-contract");
const REPO_URL = "https://github.com/logos-messaging/waku-rlnv2-contract.git";
/**
* Execute a shell command and print output in real-time
* @param {string} command - The command to execute
* @param {object} options - Options for execSync
*/
function exec(command, options = {}) {
execSync(command, {
stdio: "inherit",
cwd: options.cwd || __dirname,
...options
});
}
async function main() {
try {
console.log("📦 Setting up waku-rlnv2-contract...");
// Remove existing directory if it exists
if (existsSync(CONTRACT_DIR)) {
console.log("🗑️ Removing existing waku-rlnv2-contract directory...");
rmSync(CONTRACT_DIR, { recursive: true, force: true });
}
// Clone the repository
console.log("📥 Cloning waku-rlnv2-contract...");
exec(`git clone ${REPO_URL} ${CONTRACT_DIR}`);
// Install dependencies
console.log("📦 Installing dependencies...");
exec("pnpm i", { cwd: CONTRACT_DIR });
// Build contracts with Foundry
console.log("🔨 Building contracts with Foundry...");
exec("forge build", { cwd: CONTRACT_DIR });
// Generate ABIs with wagmi
console.log("⚙️ Generating ABIs with wagmi...");
exec("npx wagmi generate");
console.log("✅ Contract ABIs generated successfully!");
} catch (error) {
console.log(
"❌ Error generating contract ABIs:",
error instanceof Error ? error.message : error
);
process.exit(1);
}
}
main().catch((error) => {
console.log(error);
process.exit(1);
});

View File

@ -1,6 +1,6 @@
{ {
"name": "@waku/rln", "name": "@waku/rln",
"version": "0.1.10", "version": "0.1.11",
"description": "RLN (Rate Limiting Nullifier) implementation for Waku", "description": "RLN (Rate Limiting Nullifier) implementation for Waku",
"types": "./dist/index.d.ts", "types": "./dist/index.d.ts",
"module": "./dist/index.js", "module": "./dist/index.js",
@ -11,17 +11,18 @@
} }
}, },
"type": "module", "type": "module",
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/rln#readme", "homepage": "https://github.com/logos-messaging/logos-messaging-js/tree/master/packages/rln#readme",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/waku-org/js-waku.git" "url": "git+https://github.com/logos-messaging/logos-messaging-js.git"
}, },
"bugs": { "bugs": {
"url": "https://github.com/waku-org/js-waku/issues" "url": "https://github.com/logos-messaging/logos-messaging-js/issues"
}, },
"license": "MIT OR Apache-2.0", "license": "MIT OR Apache-2.0",
"keywords": [ "keywords": [
"waku", "waku",
"logos-messaging",
"rln", "rln",
"rate-limiting", "rate-limiting",
"privacy", "privacy",
@ -43,7 +44,8 @@
"watch:build": "tsc -p tsconfig.json -w", "watch:build": "tsc -p tsconfig.json -w",
"watch:test": "mocha --watch", "watch:test": "mocha --watch",
"prepublish": "npm run build", "prepublish": "npm run build",
"reset-hard": "git clean -dfx -e .idea && git reset --hard && npm i && npm run build" "reset-hard": "git clean -dfx -e .idea && git reset --hard && npm i && npm run build",
"setup:contract-abi": "node generate_contract_abi.js"
}, },
"engines": { "engines": {
"node": ">=22" "node": ">=22"
@ -54,12 +56,13 @@
"@rollup/plugin-node-resolve": "^15.2.3", "@rollup/plugin-node-resolve": "^15.2.3",
"@types/chai": "^5.0.1", "@types/chai": "^5.0.1",
"@types/chai-spies": "^1.0.6", "@types/chai-spies": "^1.0.6",
"@waku/interfaces": "0.0.34",
"@types/deep-equal-in-any-order": "^1.0.4", "@types/deep-equal-in-any-order": "^1.0.4",
"@types/lodash": "^4.17.15", "@types/lodash": "^4.17.15",
"@types/sinon": "^17.0.3", "@types/sinon": "^17.0.3",
"@wagmi/cli": "^2.7.0",
"@waku/build-utils": "^1.0.0", "@waku/build-utils": "^1.0.0",
"@waku/message-encryption": "^0.0.38", "@waku/interfaces": "0.0.35",
"@waku/message-encryption": "^0.0.39",
"deep-equal-in-any-order": "^2.0.6", "deep-equal-in-any-order": "^2.0.6",
"fast-check": "^3.23.2", "fast-check": "^3.23.2",
"rollup-plugin-copy": "^3.5.0" "rollup-plugin-copy": "^3.5.0"
@ -76,18 +79,19 @@
], ],
"dependencies": { "dependencies": {
"@chainsafe/bls-keystore": "3.0.0", "@chainsafe/bls-keystore": "3.0.0",
"@waku/core": "^0.0.40",
"@waku/utils": "^0.0.27",
"@noble/hashes": "^1.2.0", "@noble/hashes": "^1.2.0",
"@wagmi/core": "^2.22.1",
"@waku/core": "^0.0.41",
"@waku/utils": "^0.0.28",
"@waku/zerokit-rln-wasm": "^0.2.1", "@waku/zerokit-rln-wasm": "^0.2.1",
"ethereum-cryptography": "^3.1.0",
"ethers": "^5.7.2",
"lodash": "^4.17.21",
"uuid": "^11.0.5",
"chai": "^5.1.2", "chai": "^5.1.2",
"chai-as-promised": "^8.0.1", "chai-as-promised": "^8.0.1",
"chai-spies": "^1.1.0", "chai-spies": "^1.1.0",
"chai-subset": "^1.6.0", "chai-subset": "^1.6.0",
"sinon": "^19.0.2" "ethereum-cryptography": "^3.1.0",
"lodash": "^4.17.21",
"sinon": "^19.0.2",
"uuid": "^11.0.5",
"viem": "^2.38.4"
} }
} }

View File

@ -1,93 +0,0 @@
export const PRICE_CALCULATOR_ABI = [
{
inputs: [
{ internalType: "address", name: "_token", type: "address" },
{
internalType: "uint256",
name: "_pricePerMessagePerEpoch",
type: "uint256"
}
],
stateMutability: "nonpayable",
type: "constructor"
},
{ inputs: [], name: "OnlyTokensAllowed", type: "error" },
{
anonymous: false,
inputs: [
{
indexed: true,
internalType: "address",
name: "previousOwner",
type: "address"
},
{
indexed: true,
internalType: "address",
name: "newOwner",
type: "address"
}
],
name: "OwnershipTransferred",
type: "event"
},
{
inputs: [{ internalType: "uint32", name: "_rateLimit", type: "uint32" }],
name: "calculate",
outputs: [
{ internalType: "address", name: "", type: "address" },
{ internalType: "uint256", name: "", type: "uint256" }
],
stateMutability: "view",
type: "function"
},
{
inputs: [],
name: "owner",
outputs: [{ internalType: "address", name: "", type: "address" }],
stateMutability: "view",
type: "function"
},
{
inputs: [],
name: "pricePerMessagePerEpoch",
outputs: [{ internalType: "uint256", name: "", type: "uint256" }],
stateMutability: "view",
type: "function"
},
{
inputs: [],
name: "renounceOwnership",
outputs: [],
stateMutability: "nonpayable",
type: "function"
},
{
inputs: [
{ internalType: "address", name: "_token", type: "address" },
{
internalType: "uint256",
name: "_pricePerMessagePerEpoch",
type: "uint256"
}
],
name: "setTokenAndPrice",
outputs: [],
stateMutability: "nonpayable",
type: "function"
},
{
inputs: [],
name: "token",
outputs: [{ internalType: "address", name: "", type: "address" }],
stateMutability: "view",
type: "function"
},
{
inputs: [{ internalType: "address", name: "newOwner", type: "address" }],
name: "transferOwnership",
outputs: [],
stateMutability: "nonpayable",
type: "function"
}
];

View File

@ -1,646 +0,0 @@
export const RLN_ABI = [
{ inputs: [], stateMutability: "nonpayable", type: "constructor" },
{
inputs: [
{ internalType: "uint256", name: "idCommitment", type: "uint256" }
],
name: "CannotEraseActiveMembership",
type: "error"
},
{ inputs: [], name: "CannotExceedMaxTotalRateLimit", type: "error" },
{
inputs: [
{ internalType: "uint256", name: "idCommitment", type: "uint256" }
],
name: "CannotExtendNonGracePeriodMembership",
type: "error"
},
{
inputs: [
{ internalType: "uint256", name: "idCommitment", type: "uint256" }
],
name: "InvalidIdCommitment",
type: "error"
},
{ inputs: [], name: "InvalidMembershipRateLimit", type: "error" },
{
inputs: [
{ internalType: "uint256", name: "startIndex", type: "uint256" },
{ internalType: "uint256", name: "endIndex", type: "uint256" }
],
name: "InvalidPaginationQuery",
type: "error"
},
{
inputs: [
{ internalType: "uint256", name: "idCommitment", type: "uint256" }
],
name: "MembershipDoesNotExist",
type: "error"
},
{
inputs: [
{ internalType: "uint256", name: "idCommitment", type: "uint256" }
],
name: "NonHolderCannotEraseGracePeriodMembership",
type: "error"
},
{
inputs: [
{ internalType: "uint256", name: "idCommitment", type: "uint256" }
],
name: "NonHolderCannotExtend",
type: "error"
},
{
anonymous: false,
inputs: [
{
indexed: false,
internalType: "address",
name: "previousAdmin",
type: "address"
},
{
indexed: false,
internalType: "address",
name: "newAdmin",
type: "address"
}
],
name: "AdminChanged",
type: "event"
},
{
anonymous: false,
inputs: [
{
indexed: true,
internalType: "address",
name: "beacon",
type: "address"
}
],
name: "BeaconUpgraded",
type: "event"
},
{
anonymous: false,
inputs: [
{ indexed: false, internalType: "uint8", name: "version", type: "uint8" }
],
name: "Initialized",
type: "event"
},
{
anonymous: false,
inputs: [
{
indexed: false,
internalType: "uint256",
name: "idCommitment",
type: "uint256"
},
{
indexed: false,
internalType: "uint32",
name: "membershipRateLimit",
type: "uint32"
},
{ indexed: false, internalType: "uint32", name: "index", type: "uint32" }
],
name: "MembershipErased",
type: "event"
},
{
anonymous: false,
inputs: [
{
indexed: false,
internalType: "uint256",
name: "idCommitment",
type: "uint256"
},
{
indexed: false,
internalType: "uint32",
name: "membershipRateLimit",
type: "uint32"
},
{ indexed: false, internalType: "uint32", name: "index", type: "uint32" }
],
name: "MembershipExpired",
type: "event"
},
{
anonymous: false,
inputs: [
{
indexed: false,
internalType: "uint256",
name: "idCommitment",
type: "uint256"
},
{
indexed: false,
internalType: "uint32",
name: "membershipRateLimit",
type: "uint32"
},
{ indexed: false, internalType: "uint32", name: "index", type: "uint32" },
{
indexed: false,
internalType: "uint256",
name: "newGracePeriodStartTimestamp",
type: "uint256"
}
],
name: "MembershipExtended",
type: "event"
},
{
anonymous: false,
inputs: [
{
indexed: false,
internalType: "uint256",
name: "idCommitment",
type: "uint256"
},
{
indexed: false,
internalType: "uint256",
name: "membershipRateLimit",
type: "uint256"
},
{ indexed: false, internalType: "uint32", name: "index", type: "uint32" }
],
name: "MembershipRegistered",
type: "event"
},
{
anonymous: false,
inputs: [
{
indexed: true,
internalType: "address",
name: "previousOwner",
type: "address"
},
{
indexed: true,
internalType: "address",
name: "newOwner",
type: "address"
}
],
name: "OwnershipTransferred",
type: "event"
},
{
anonymous: false,
inputs: [
{
indexed: true,
internalType: "address",
name: "implementation",
type: "address"
}
],
name: "Upgraded",
type: "event"
},
{
inputs: [],
name: "MAX_MEMBERSHIP_SET_SIZE",
outputs: [{ internalType: "uint32", name: "", type: "uint32" }],
stateMutability: "view",
type: "function"
},
{
inputs: [],
name: "MERKLE_TREE_DEPTH",
outputs: [{ internalType: "uint8", name: "", type: "uint8" }],
stateMutability: "view",
type: "function"
},
{
inputs: [],
name: "Q",
outputs: [{ internalType: "uint256", name: "", type: "uint256" }],
stateMutability: "view",
type: "function"
},
{
inputs: [],
name: "activeDurationForNewMemberships",
outputs: [{ internalType: "uint32", name: "", type: "uint32" }],
stateMutability: "view",
type: "function"
},
{
inputs: [],
name: "currentTotalRateLimit",
outputs: [{ internalType: "uint256", name: "", type: "uint256" }],
stateMutability: "view",
type: "function"
},
{
inputs: [],
name: "deployedBlockNumber",
outputs: [{ internalType: "uint32", name: "", type: "uint32" }],
stateMutability: "view",
type: "function"
},
{
inputs: [
{ internalType: "address", name: "holder", type: "address" },
{ internalType: "address", name: "token", type: "address" }
],
name: "depositsToWithdraw",
outputs: [{ internalType: "uint256", name: "balance", type: "uint256" }],
stateMutability: "view",
type: "function"
},
{
inputs: [
{ internalType: "uint256[]", name: "idCommitments", type: "uint256[]" }
],
name: "eraseMemberships",
outputs: [],
stateMutability: "nonpayable",
type: "function"
},
{
inputs: [
{ internalType: "uint256[]", name: "idCommitments", type: "uint256[]" },
{ internalType: "bool", name: "eraseFromMembershipSet", type: "bool" }
],
name: "eraseMemberships",
outputs: [],
stateMutability: "nonpayable",
type: "function"
},
{
inputs: [
{ internalType: "uint256[]", name: "idCommitments", type: "uint256[]" }
],
name: "extendMemberships",
outputs: [],
stateMutability: "nonpayable",
type: "function"
},
{
inputs: [
{ internalType: "uint256", name: "idCommitment", type: "uint256" }
],
name: "getMembershipInfo",
outputs: [
{ internalType: "uint32", name: "", type: "uint32" },
{ internalType: "uint32", name: "", type: "uint32" },
{ internalType: "uint256", name: "", type: "uint256" }
],
stateMutability: "view",
type: "function"
},
{
inputs: [{ internalType: "uint40", name: "index", type: "uint40" }],
name: "getMerkleProof",
outputs: [{ internalType: "uint256[20]", name: "", type: "uint256[20]" }],
stateMutability: "view",
type: "function"
},
{
inputs: [
{ internalType: "uint32", name: "startIndex", type: "uint32" },
{ internalType: "uint32", name: "endIndex", type: "uint32" }
],
name: "getRateCommitmentsInRangeBoundsInclusive",
outputs: [{ internalType: "uint256[]", name: "", type: "uint256[]" }],
stateMutability: "view",
type: "function"
},
{
inputs: [],
name: "gracePeriodDurationForNewMemberships",
outputs: [{ internalType: "uint32", name: "", type: "uint32" }],
stateMutability: "view",
type: "function"
},
{
inputs: [{ internalType: "uint256", name: "", type: "uint256" }],
name: "indicesOfLazilyErasedMemberships",
outputs: [{ internalType: "uint32", name: "", type: "uint32" }],
stateMutability: "view",
type: "function"
},
{
inputs: [
{ internalType: "address", name: "_priceCalculator", type: "address" },
{ internalType: "uint32", name: "_maxTotalRateLimit", type: "uint32" },
{
internalType: "uint32",
name: "_minMembershipRateLimit",
type: "uint32"
},
{
internalType: "uint32",
name: "_maxMembershipRateLimit",
type: "uint32"
},
{ internalType: "uint32", name: "_activeDuration", type: "uint32" },
{ internalType: "uint32", name: "_gracePeriod", type: "uint32" }
],
name: "initialize",
outputs: [],
stateMutability: "nonpayable",
type: "function"
},
{
inputs: [
{ internalType: "uint256", name: "_idCommitment", type: "uint256" }
],
name: "isExpired",
outputs: [{ internalType: "bool", name: "", type: "bool" }],
stateMutability: "view",
type: "function"
},
{
inputs: [
{ internalType: "uint256", name: "_idCommitment", type: "uint256" }
],
name: "isInGracePeriod",
outputs: [{ internalType: "bool", name: "", type: "bool" }],
stateMutability: "view",
type: "function"
},
{
inputs: [
{ internalType: "uint256", name: "idCommitment", type: "uint256" }
],
name: "isInMembershipSet",
outputs: [{ internalType: "bool", name: "", type: "bool" }],
stateMutability: "view",
type: "function"
},
{
inputs: [
{ internalType: "uint256", name: "idCommitment", type: "uint256" }
],
name: "isValidIdCommitment",
outputs: [{ internalType: "bool", name: "", type: "bool" }],
stateMutability: "pure",
type: "function"
},
{
inputs: [{ internalType: "uint32", name: "rateLimit", type: "uint32" }],
name: "isValidMembershipRateLimit",
outputs: [{ internalType: "bool", name: "", type: "bool" }],
stateMutability: "view",
type: "function"
},
{
inputs: [],
name: "maxMembershipRateLimit",
outputs: [{ internalType: "uint32", name: "", type: "uint32" }],
stateMutability: "view",
type: "function"
},
{
inputs: [],
name: "maxTotalRateLimit",
outputs: [{ internalType: "uint32", name: "", type: "uint32" }],
stateMutability: "view",
type: "function"
},
{
inputs: [
{ internalType: "uint256", name: "_idCommitment", type: "uint256" }
],
name: "membershipExpirationTimestamp",
outputs: [{ internalType: "uint256", name: "", type: "uint256" }],
stateMutability: "view",
type: "function"
},
{
inputs: [
{ internalType: "uint256", name: "idCommitment", type: "uint256" }
],
name: "memberships",
outputs: [
{ internalType: "uint256", name: "depositAmount", type: "uint256" },
{ internalType: "uint32", name: "activeDuration", type: "uint32" },
{
internalType: "uint256",
name: "gracePeriodStartTimestamp",
type: "uint256"
},
{ internalType: "uint32", name: "gracePeriodDuration", type: "uint32" },
{ internalType: "uint32", name: "rateLimit", type: "uint32" },
{ internalType: "uint32", name: "index", type: "uint32" },
{ internalType: "address", name: "holder", type: "address" },
{ internalType: "address", name: "token", type: "address" }
],
stateMutability: "view",
type: "function"
},
{
inputs: [],
name: "merkleTree",
outputs: [
{ internalType: "uint40", name: "maxIndex", type: "uint40" },
{ internalType: "uint40", name: "numberOfLeaves", type: "uint40" }
],
stateMutability: "view",
type: "function"
},
{
inputs: [],
name: "minMembershipRateLimit",
outputs: [{ internalType: "uint32", name: "", type: "uint32" }],
stateMutability: "view",
type: "function"
},
{
inputs: [],
name: "nextFreeIndex",
outputs: [{ internalType: "uint32", name: "", type: "uint32" }],
stateMutability: "view",
type: "function"
},
{
inputs: [],
name: "owner",
outputs: [{ internalType: "address", name: "", type: "address" }],
stateMutability: "view",
type: "function"
},
{
inputs: [],
name: "priceCalculator",
outputs: [
{ internalType: "contract IPriceCalculator", name: "", type: "address" }
],
stateMutability: "view",
type: "function"
},
{
inputs: [],
name: "proxiableUUID",
outputs: [{ internalType: "bytes32", name: "", type: "bytes32" }],
stateMutability: "view",
type: "function"
},
{
inputs: [
{ internalType: "uint256", name: "idCommitment", type: "uint256" },
{ internalType: "uint32", name: "rateLimit", type: "uint32" },
{
internalType: "uint256[]",
name: "idCommitmentsToErase",
type: "uint256[]"
}
],
name: "register",
outputs: [],
stateMutability: "nonpayable",
type: "function"
},
{
inputs: [
{ internalType: "address", name: "owner", type: "address" },
{ internalType: "uint256", name: "deadline", type: "uint256" },
{ internalType: "uint8", name: "v", type: "uint8" },
{ internalType: "bytes32", name: "r", type: "bytes32" },
{ internalType: "bytes32", name: "s", type: "bytes32" },
{ internalType: "uint256", name: "idCommitment", type: "uint256" },
{ internalType: "uint32", name: "rateLimit", type: "uint32" },
{
internalType: "uint256[]",
name: "idCommitmentsToErase",
type: "uint256[]"
}
],
name: "registerWithPermit",
outputs: [],
stateMutability: "nonpayable",
type: "function"
},
{
inputs: [],
name: "renounceOwnership",
outputs: [],
stateMutability: "nonpayable",
type: "function"
},
{
inputs: [],
name: "root",
outputs: [{ internalType: "uint256", name: "", type: "uint256" }],
stateMutability: "view",
type: "function"
},
{
inputs: [
{
internalType: "uint32",
name: "_activeDurationForNewMembership",
type: "uint32"
}
],
name: "setActiveDuration",
outputs: [],
stateMutability: "nonpayable",
type: "function"
},
{
inputs: [
{
internalType: "uint32",
name: "_gracePeriodDurationForNewMembership",
type: "uint32"
}
],
name: "setGracePeriodDuration",
outputs: [],
stateMutability: "nonpayable",
type: "function"
},
{
inputs: [
{
internalType: "uint32",
name: "_maxMembershipRateLimit",
type: "uint32"
}
],
name: "setMaxMembershipRateLimit",
outputs: [],
stateMutability: "nonpayable",
type: "function"
},
{
inputs: [
{ internalType: "uint32", name: "_maxTotalRateLimit", type: "uint32" }
],
name: "setMaxTotalRateLimit",
outputs: [],
stateMutability: "nonpayable",
type: "function"
},
{
inputs: [
{
internalType: "uint32",
name: "_minMembershipRateLimit",
type: "uint32"
}
],
name: "setMinMembershipRateLimit",
outputs: [],
stateMutability: "nonpayable",
type: "function"
},
{
inputs: [
{ internalType: "address", name: "_priceCalculator", type: "address" }
],
name: "setPriceCalculator",
outputs: [],
stateMutability: "nonpayable",
type: "function"
},
{
inputs: [{ internalType: "address", name: "newOwner", type: "address" }],
name: "transferOwnership",
outputs: [],
stateMutability: "nonpayable",
type: "function"
},
{
inputs: [
{ internalType: "address", name: "newImplementation", type: "address" }
],
name: "upgradeTo",
outputs: [],
stateMutability: "nonpayable",
type: "function"
},
{
inputs: [
{ internalType: "address", name: "newImplementation", type: "address" },
{ internalType: "bytes", name: "data", type: "bytes" }
],
name: "upgradeToAndCall",
outputs: [],
stateMutability: "payable",
type: "function"
},
{
inputs: [{ internalType: "address", name: "token", type: "address" }],
name: "withdraw",
outputs: [],
stateMutability: "nonpayable",
type: "function"
}
];

View File

@ -1,16 +1,15 @@
import { PRICE_CALCULATOR_ABI } from "./abi/price_calculator.js"; import { linearPriceCalculatorAbi, wakuRlnV2Abi } from "./wagmi/generated.js";
import { RLN_ABI } from "./abi/rln.js";
export const RLN_CONTRACT = { export const RLN_CONTRACT = {
chainId: 59141, chainId: 59141,
address: "0xb9cd878c90e49f797b4431fbf4fb333108cb90e6", address: "0xb9cd878c90e49f797b4431fbf4fb333108cb90e6",
abi: RLN_ABI abi: wakuRlnV2Abi
}; };
export const PRICE_CALCULATOR_CONTRACT = { export const PRICE_CALCULATOR_CONTRACT = {
chainId: 59141, chainId: 59141,
address: "0xBcfC0660Df69f53ab409F32bb18A3fb625fcE644", address: "0xBcfC0660Df69f53ab409F32bb18A3fb625fcE644",
abi: PRICE_CALCULATOR_ABI abi: linearPriceCalculatorAbi
}; };
/** /**

View File

@ -1,28 +1,39 @@
import { expect, use } from "chai"; import { expect, use } from "chai";
import chaiAsPromised from "chai-as-promised"; import chaiAsPromised from "chai-as-promised";
import { ethers } from "ethers";
import sinon from "sinon"; import sinon from "sinon";
import { RLNBaseContract } from "./rln_base_contract.js"; import { RLNBaseContract } from "./rln_base_contract.js";
use(chaiAsPromised); use(chaiAsPromised);
function createMockRLNBaseContract(provider: any): RLNBaseContract { function createMockRLNBaseContract(
mockContract: any,
mockRpcClient: any
): RLNBaseContract {
const dummy = Object.create(RLNBaseContract.prototype); const dummy = Object.create(RLNBaseContract.prototype);
dummy.contract = { provider }; dummy.contract = mockContract;
dummy.rpcClient = mockRpcClient;
return dummy as RLNBaseContract; return dummy as RLNBaseContract;
} }
describe("RLNBaseContract.getPriceForRateLimit (unit)", function () { describe("RLNBaseContract.getPriceForRateLimit (unit)", function () {
let provider: any; let mockContract: any;
let calculateStub: sinon.SinonStub; let mockRpcClient: any;
let mockContractFactory: any; let priceCalculatorReadStub: sinon.SinonStub;
let readContractStub: sinon.SinonStub;
beforeEach(() => { beforeEach(() => {
provider = {}; priceCalculatorReadStub = sinon.stub();
calculateStub = sinon.stub(); readContractStub = sinon.stub();
mockContractFactory = function () {
return { calculate: calculateStub }; mockContract = {
read: {
priceCalculator: priceCalculatorReadStub
}
};
mockRpcClient = {
readContract: readContractStub
}; };
}); });
@ -32,35 +43,53 @@ describe("RLNBaseContract.getPriceForRateLimit (unit)", function () {
it("returns token and price for valid calculate", async () => { it("returns token and price for valid calculate", async () => {
const fakeToken = "0x1234567890abcdef1234567890abcdef12345678"; const fakeToken = "0x1234567890abcdef1234567890abcdef12345678";
const fakePrice = ethers.BigNumber.from(42); const fakePrice = 42n;
calculateStub.resolves([fakeToken, fakePrice]); const priceCalculatorAddress = "0xabcdef1234567890abcdef1234567890abcdef12";
priceCalculatorReadStub.resolves(priceCalculatorAddress);
readContractStub.resolves([fakeToken, fakePrice]);
const rlnBase = createMockRLNBaseContract(mockContract, mockRpcClient);
const result = await rlnBase.getPriceForRateLimit(20);
const rlnBase = createMockRLNBaseContract(provider);
const result = await rlnBase.getPriceForRateLimit(20, mockContractFactory);
expect(result.token).to.equal(fakeToken); expect(result.token).to.equal(fakeToken);
expect(result.price).to.not.be.null; expect(result.price).to.equal(fakePrice);
if (result.price) { expect(priceCalculatorReadStub.calledOnce).to.be.true;
expect(result.price.eq(fakePrice)).to.be.true; expect(readContractStub.calledOnce).to.be.true;
}
expect(calculateStub.calledOnceWith(20)).to.be.true; const readContractCall = readContractStub.getCall(0);
expect(readContractCall.args[0]).to.deep.include({
address: priceCalculatorAddress,
functionName: "calculate",
args: [20]
});
}); });
it("throws if calculate throws", async () => { it("throws if calculate throws", async () => {
calculateStub.rejects(new Error("fail")); const priceCalculatorAddress = "0xabcdef1234567890abcdef1234567890abcdef12";
const rlnBase = createMockRLNBaseContract(provider); priceCalculatorReadStub.resolves(priceCalculatorAddress);
await expect( readContractStub.rejects(new Error("fail"));
rlnBase.getPriceForRateLimit(20, mockContractFactory)
).to.be.rejectedWith("fail"); const rlnBase = createMockRLNBaseContract(mockContract, mockRpcClient);
expect(calculateStub.calledOnceWith(20)).to.be.true; await expect(rlnBase.getPriceForRateLimit(20)).to.be.rejectedWith("fail");
expect(priceCalculatorReadStub.calledOnce).to.be.true;
expect(readContractStub.calledOnce).to.be.true;
}); });
it("throws if calculate returns malformed data", async () => { it("returns null values if calculate returns malformed data", async () => {
calculateStub.resolves([null, null]); const priceCalculatorAddress = "0xabcdef1234567890abcdef1234567890abcdef12";
priceCalculatorReadStub.resolves(priceCalculatorAddress);
readContractStub.resolves([null, null]);
const rlnBase = createMockRLNBaseContract(mockContract, mockRpcClient);
const result = await rlnBase.getPriceForRateLimit(20);
const rlnBase = createMockRLNBaseContract(provider);
const result = await rlnBase.getPriceForRateLimit(20, mockContractFactory);
expect(result.token).to.be.null; expect(result.token).to.be.null;
expect(result.price).to.be.null; expect(result.price).to.be.null;
expect(priceCalculatorReadStub.calledOnce).to.be.true;
expect(readContractStub.calledOnce).to.be.true;
}); });
}); });

View File

@ -1,92 +1,74 @@
import { Logger } from "@waku/utils"; import { Logger } from "@waku/utils";
import { ethers } from "ethers"; import {
type Address,
decodeEventLog,
getContract,
type GetContractReturnType,
type Hash,
type PublicClient,
type WalletClient
} from "viem";
import { IdentityCredential } from "../identity.js"; import { IdentityCredential } from "../identity.js";
import { DecryptedCredentials } from "../keystore/types.js"; import type { DecryptedCredentials } from "../keystore/types.js";
import type { RpcClient } from "../utils/index.js";
import { RLN_ABI } from "./abi/rln.js";
import { import {
DEFAULT_RATE_LIMIT, DEFAULT_RATE_LIMIT,
PRICE_CALCULATOR_CONTRACT, RATE_LIMIT_PARAMS,
RATE_LIMIT_PARAMS RLN_CONTRACT
} from "./constants.js"; } from "./constants.js";
import { import {
CustomQueryOptions,
FetchMembersOptions,
Member,
MembershipInfo, MembershipInfo,
MembershipRegisteredEvent,
MembershipState, MembershipState,
RLNContractInitOptions RLNContractOptions
} from "./types.js"; } from "./types.js";
import { iPriceCalculatorAbi, wakuRlnV2Abi } from "./wagmi/generated.js";
const log = new Logger("rln:contract:base"); const log = new Logger("rln:contract:base");
export class RLNBaseContract { export class RLNBaseContract {
public contract: ethers.Contract; public contract: GetContractReturnType<
private deployBlock: undefined | number; typeof wakuRlnV2Abi,
PublicClient | WalletClient
>;
public rpcClient: RpcClient;
private rateLimit: number; private rateLimit: number;
private minRateLimit?: number; private minRateLimit?: number;
private maxRateLimit?: number; private maxRateLimit?: number;
protected _members: Map<number, Member> = new Map();
private _membersFilter: ethers.EventFilter;
private _membershipErasedFilter: ethers.EventFilter;
private _membersExpiredFilter: ethers.EventFilter;
/** /**
* Private constructor for RLNBaseContract. Use static create() instead. * Private constructor for RLNBaseContract. Use static create() instead.
*/ */
protected constructor(options: RLNContractInitOptions) { protected constructor(options: RLNContractOptions) {
const { const { address, rpcClient, rateLimit = DEFAULT_RATE_LIMIT } = options;
address,
signer,
rateLimit = DEFAULT_RATE_LIMIT,
contract
} = options;
log.info("Initializing RLNBaseContract", { address, rateLimit }); log.info("Initializing RLNBaseContract", { address, rateLimit });
this.contract = contract || new ethers.Contract(address, RLN_ABI, signer); this.rpcClient = rpcClient;
this.contract = getContract({
address,
abi: wakuRlnV2Abi,
client: this.rpcClient
});
this.rateLimit = rateLimit; this.rateLimit = rateLimit;
try {
log.info("Setting up event filters");
// Initialize event filters
this._membersFilter = this.contract.filters.MembershipRegistered();
this._membershipErasedFilter = this.contract.filters.MembershipErased();
this._membersExpiredFilter = this.contract.filters.MembershipExpired();
log.info("Event filters initialized successfully");
} catch (error) {
log.error("Failed to initialize event filters", { error });
throw new Error(
"Failed to initialize event filters: " + (error as Error).message
);
}
// Initialize members and subscriptions
this.fetchMembers()
.then(() => {
this.subscribeToMembers();
})
.catch((error) => {
log.error("Failed to initialize members", { error });
});
} }
/** /**
* Static async factory to create and initialize RLNBaseContract * Static async factory to create and initialize RLNBaseContract
*/ */
public static async create( public static async create(
options: RLNContractInitOptions options: RLNContractOptions
): Promise<RLNBaseContract> { ): Promise<RLNBaseContract> {
const instance = new RLNBaseContract(options); const instance = new RLNBaseContract(options);
const [min, max] = await Promise.all([ const [min, max] = await Promise.all([
instance.contract.minMembershipRateLimit(), instance.contract.read.minMembershipRateLimit(),
instance.contract.maxMembershipRateLimit() instance.contract.read.maxMembershipRateLimit()
]); ]);
instance.minRateLimit = ethers.BigNumber.from(min).toNumber();
instance.maxRateLimit = ethers.BigNumber.from(max).toNumber(); instance.minRateLimit = min;
instance.maxRateLimit = max;
instance.validateRateLimit(instance.rateLimit); instance.validateRateLimit(instance.rateLimit);
return instance; return instance;
@ -106,13 +88,6 @@ export class RLNBaseContract {
return this.contract.address; return this.contract.address;
} }
/**
* Gets the contract provider
*/
public get provider(): ethers.providers.Provider {
return this.contract.provider;
}
/** /**
* Gets the minimum allowed rate limit (cached) * Gets the minimum allowed rate limit (cached)
*/ */
@ -136,8 +111,7 @@ export class RLNBaseContract {
* @returns Promise<number> The maximum total rate limit in messages per epoch * @returns Promise<number> The maximum total rate limit in messages per epoch
*/ */
public async getMaxTotalRateLimit(): Promise<number> { public async getMaxTotalRateLimit(): Promise<number> {
const maxTotalRate = await this.contract.maxTotalRateLimit(); return await this.contract.read.maxTotalRateLimit();
return maxTotalRate.toNumber();
} }
/** /**
@ -145,8 +119,7 @@ export class RLNBaseContract {
* @returns Promise<number> The current total rate limit usage in messages per epoch * @returns Promise<number> The current total rate limit usage in messages per epoch
*/ */
public async getCurrentTotalRateLimit(): Promise<number> { public async getCurrentTotalRateLimit(): Promise<number> {
const currentTotal = await this.contract.currentTotalRateLimit(); return Number(await this.contract.read.currentTotalRateLimit());
return currentTotal.toNumber();
} }
/** /**
@ -154,11 +127,10 @@ export class RLNBaseContract {
* @returns Promise<number> The remaining rate limit that can be allocated * @returns Promise<number> The remaining rate limit that can be allocated
*/ */
public async getRemainingTotalRateLimit(): Promise<number> { public async getRemainingTotalRateLimit(): Promise<number> {
const [maxTotal, currentTotal] = await Promise.all([ return (
this.contract.maxTotalRateLimit(), (await this.contract.read.maxTotalRateLimit()) -
this.contract.currentTotalRateLimit() Number(await this.contract.read.currentTotalRateLimit())
]); );
return Number(maxTotal) - Number(currentTotal);
} }
/** /**
@ -170,233 +142,35 @@ export class RLNBaseContract {
this.rateLimit = newRateLimit; this.rateLimit = newRateLimit;
} }
public get members(): Member[] { /**
const sortedMembers = Array.from(this._members.values()).sort( * Gets the Merkle tree root for RLN proof verification
(left, right) => left.index.toNumber() - right.index.toNumber() * @returns Promise<bigint> The Merkle tree root
); *
return sortedMembers; */
public async getMerkleRoot(): Promise<bigint> {
return this.contract.read.root();
} }
public async fetchMembers(options: FetchMembersOptions = {}): Promise<void> { /**
const registeredMemberEvents = await RLNBaseContract.queryFilter( * Gets the Merkle proof for a member at a given index
this.contract, * @param index The index of the member in the membership set
{ * @returns Promise<bigint[]> Array of 20 Merkle proof elements
fromBlock: this.deployBlock, *
...options, */
membersFilter: this.membersFilter public async getMerkleProof(index: number): Promise<readonly bigint[]> {
} return await this.contract.read.getMerkleProof([index]);
);
const removedMemberEvents = await RLNBaseContract.queryFilter(
this.contract,
{
fromBlock: this.deployBlock,
...options,
membersFilter: this.membershipErasedFilter
}
);
const expiredMemberEvents = await RLNBaseContract.queryFilter(
this.contract,
{
fromBlock: this.deployBlock,
...options,
membersFilter: this.membersExpiredFilter
}
);
const events = [
...registeredMemberEvents,
...removedMemberEvents,
...expiredMemberEvents
];
this.processEvents(events);
}
public static async queryFilter(
contract: ethers.Contract,
options: CustomQueryOptions
): Promise<ethers.Event[]> {
const FETCH_CHUNK = 5;
const BLOCK_RANGE = 3000;
const {
fromBlock,
membersFilter,
fetchRange = BLOCK_RANGE,
fetchChunks = FETCH_CHUNK
} = options;
if (fromBlock === undefined) {
return contract.queryFilter(membersFilter);
}
if (!contract.provider) {
throw Error("No provider found on the contract.");
}
const toBlock = await contract.provider.getBlockNumber();
if (toBlock - fromBlock < fetchRange) {
return contract.queryFilter(membersFilter, fromBlock, toBlock);
}
const events: ethers.Event[][] = [];
const chunks = RLNBaseContract.splitToChunks(
fromBlock,
toBlock,
fetchRange
);
for (const portion of RLNBaseContract.takeN<[number, number]>(
chunks,
fetchChunks
)) {
const promises = portion.map(([left, right]) =>
RLNBaseContract.ignoreErrors(
contract.queryFilter(membersFilter, left, right),
[]
)
);
const fetchedEvents = await Promise.all(promises);
events.push(fetchedEvents.flatMap((v) => v));
}
return events.flatMap((v) => v);
}
public processEvents(events: ethers.Event[]): void {
const toRemoveTable = new Map<number, number[]>();
const toInsertTable = new Map<number, ethers.Event[]>();
events.forEach((evt) => {
if (!evt.args) {
return;
}
if (
evt.event === "MembershipErased" ||
evt.event === "MembershipExpired"
) {
let index = evt.args.index;
if (!index) {
return;
}
if (typeof index === "number" || typeof index === "string") {
index = ethers.BigNumber.from(index);
}
const toRemoveVal = toRemoveTable.get(evt.blockNumber);
if (toRemoveVal != undefined) {
toRemoveVal.push(index.toNumber());
toRemoveTable.set(evt.blockNumber, toRemoveVal);
} else {
toRemoveTable.set(evt.blockNumber, [index.toNumber()]);
}
} else if (evt.event === "MembershipRegistered") {
let eventsPerBlock = toInsertTable.get(evt.blockNumber);
if (eventsPerBlock == undefined) {
eventsPerBlock = [];
}
eventsPerBlock.push(evt);
toInsertTable.set(evt.blockNumber, eventsPerBlock);
}
});
}
public static splitToChunks(
from: number,
to: number,
step: number
): Array<[number, number]> {
const chunks: Array<[number, number]> = [];
let left = from;
while (left < to) {
const right = left + step < to ? left + step : to;
chunks.push([left, right] as [number, number]);
left = right;
}
return chunks;
}
public static *takeN<T>(array: T[], size: number): Iterable<T[]> {
let start = 0;
while (start < array.length) {
const portion = array.slice(start, start + size);
yield portion;
start += size;
}
}
public static async ignoreErrors<T>(
promise: Promise<T>,
defaultValue: T
): Promise<T> {
try {
return await promise;
} catch (err: unknown) {
if (err instanceof Error) {
log.info(`Ignoring an error during query: ${err.message}`);
} else {
log.info(`Ignoring an unknown error during query`);
}
return defaultValue;
}
}
public subscribeToMembers(): void {
this.contract.on(
this.membersFilter,
(
_idCommitment: bigint,
_membershipRateLimit: ethers.BigNumber,
_index: ethers.BigNumber,
event: ethers.Event
) => {
this.processEvents([event]);
}
);
this.contract.on(
this.membershipErasedFilter,
(
_idCommitment: bigint,
_membershipRateLimit: ethers.BigNumber,
_index: ethers.BigNumber,
event: ethers.Event
) => {
this.processEvents([event]);
}
);
this.contract.on(
this.membersExpiredFilter,
(
_idCommitment: bigint,
_membershipRateLimit: ethers.BigNumber,
_index: ethers.BigNumber,
event: ethers.Event
) => {
this.processEvents([event]);
}
);
} }
public async getMembershipInfo( public async getMembershipInfo(
idCommitmentBigInt: bigint idCommitmentBigInt: bigint
): Promise<MembershipInfo | undefined> { ): Promise<MembershipInfo | undefined> {
try { try {
const membershipData = const membershipData = await this.contract.read.memberships([
await this.contract.memberships(idCommitmentBigInt); idCommitmentBigInt
const currentBlock = await this.contract.provider.getBlockNumber(); ]);
const currentBlock = await this.rpcClient.getBlockNumber();
const [ const [
depositAmount, depositAmount,
activeDuration, activeDuration,
@ -408,12 +182,13 @@ export class RLNBaseContract {
token token
] = membershipData; ] = membershipData;
const gracePeriodEnd = gracePeriodStartTimestamp.add(gracePeriodDuration); const gracePeriodEnd =
Number(gracePeriodStartTimestamp) + Number(gracePeriodDuration);
let state: MembershipState; let state: MembershipState;
if (currentBlock < gracePeriodStartTimestamp.toNumber()) { if (currentBlock < Number(gracePeriodStartTimestamp)) {
state = MembershipState.Active; state = MembershipState.Active;
} else if (currentBlock < gracePeriodEnd.toNumber()) { } else if (currentBlock < gracePeriodEnd) {
state = MembershipState.GracePeriod; state = MembershipState.GracePeriod;
} else { } else {
state = MembershipState.Expired; state = MembershipState.Expired;
@ -422,9 +197,9 @@ export class RLNBaseContract {
return { return {
index, index,
idCommitment: idCommitmentBigInt.toString(), idCommitment: idCommitmentBigInt.toString(),
rateLimit: Number(rateLimit), rateLimit: rateLimit,
startBlock: gracePeriodStartTimestamp.toNumber(), startBlock: Number(gracePeriodStartTimestamp),
endBlock: gracePeriodEnd.toNumber(), endBlock: gracePeriodEnd,
state, state,
depositAmount, depositAmount,
activeDuration, activeDuration,
@ -438,43 +213,87 @@ export class RLNBaseContract {
} }
} }
public async extendMembership( public async extendMembership(idCommitmentBigInt: bigint): Promise<Hash> {
idCommitmentBigInt: bigint if (!this.rpcClient.account) {
): Promise<ethers.ContractTransaction> { throw new Error(
const tx = await this.contract.extendMemberships([idCommitmentBigInt]); "Failed to extendMembership: no account set in wallet client"
await tx.wait(); );
return tx; }
try {
await this.contract.simulate.extendMemberships([[idCommitmentBigInt]], {
chain: this.rpcClient.chain,
account: this.rpcClient.account.address
});
} catch (err) {
if (err instanceof Error) {
throw new Error(
"Error simulating extending membership: " + err.message
);
} else {
throw new Error("Error simulating extending membership", {
cause: err
});
}
}
const hash = await this.contract.write.extendMemberships(
[[idCommitmentBigInt]],
{
account: this.rpcClient.account,
chain: this.rpcClient.chain
}
);
await this.rpcClient.waitForTransactionReceipt({ hash });
return hash;
} }
public async eraseMembership( public async eraseMembership(
idCommitmentBigInt: bigint, idCommitmentBigInt: bigint,
eraseFromMembershipSet: boolean = true eraseFromMembershipSet: boolean = true
): Promise<ethers.ContractTransaction> { ): Promise<Hash> {
if ( if (
!(await this.isExpired(idCommitmentBigInt)) || !(await this.isExpired(idCommitmentBigInt)) ||
!(await this.isInGracePeriod(idCommitmentBigInt)) !(await this.isInGracePeriod(idCommitmentBigInt))
) { ) {
throw new Error("Membership is not expired or in grace period"); throw new Error("Membership is not expired or in grace period");
} }
if (!this.rpcClient.account) {
throw new Error(
"Failed to eraseMembership: no account set in wallet client"
);
}
const estimatedGas = await this.contract.estimateGas[ try {
"eraseMemberships(uint256[],bool)" await this.contract.simulate.eraseMemberships(
]([idCommitmentBigInt], eraseFromMembershipSet); [[idCommitmentBigInt], eraseFromMembershipSet],
const gasLimit = estimatedGas.add(10000); {
chain: this.rpcClient.chain,
account: this.rpcClient.account.address
}
);
} catch (err) {
if (err instanceof Error) {
throw new Error("Error simulating eraseMemberships: " + err.message);
} else {
throw new Error("Error simulating eraseMemberships", { cause: err });
}
}
const tx = await this.contract["eraseMemberships(uint256[],bool)"]( const hash = await this.contract.write.eraseMemberships(
[idCommitmentBigInt], [[idCommitmentBigInt], eraseFromMembershipSet],
eraseFromMembershipSet, {
{ gasLimit } chain: this.rpcClient.chain,
account: this.rpcClient.account
}
); );
await tx.wait(); await this.rpcClient.waitForTransactionReceipt({ hash });
return tx; return hash;
} }
public async registerMembership( public async registerMembership(
idCommitmentBigInt: bigint, idCommitmentBigInt: bigint,
rateLimit: number = DEFAULT_RATE_LIMIT rateLimit: number = DEFAULT_RATE_LIMIT
): Promise<ethers.ContractTransaction> { ): Promise<Hash> {
if ( if (
rateLimit < RATE_LIMIT_PARAMS.MIN_RATE || rateLimit < RATE_LIMIT_PARAMS.MIN_RATE ||
rateLimit > RATE_LIMIT_PARAMS.MAX_RATE rateLimit > RATE_LIMIT_PARAMS.MAX_RATE
@ -483,21 +302,80 @@ export class RLNBaseContract {
`Rate limit must be between ${RATE_LIMIT_PARAMS.MIN_RATE} and ${RATE_LIMIT_PARAMS.MAX_RATE}` `Rate limit must be between ${RATE_LIMIT_PARAMS.MIN_RATE} and ${RATE_LIMIT_PARAMS.MAX_RATE}`
); );
} }
return this.contract.register(idCommitmentBigInt, rateLimit, []); if (!this.rpcClient.account) {
throw new Error(
"Failed to registerMembership: no account set in wallet client"
);
}
try {
await this.contract.simulate.register(
[idCommitmentBigInt, rateLimit, []],
{
chain: this.rpcClient.chain,
account: this.rpcClient.account.address
}
);
} catch (err) {
if (err instanceof Error) {
throw new Error("Error simulating register membership: " + err.message);
} else {
throw new Error("Error simulating register membership", { cause: err });
}
}
const hash = await this.contract.write.register(
[idCommitmentBigInt, rateLimit, []],
{
chain: this.rpcClient.chain,
account: this.rpcClient.account
}
);
await this.rpcClient.waitForTransactionReceipt({ hash });
return hash;
} }
public async withdraw(token: string, walletAddress: string): Promise<void> { /**
try { * Withdraw deposited tokens after membership is erased.
const tx = await this.contract.withdraw(token, walletAddress); * The smart contract validates that the sender is the holder of the membership,
await tx.wait(); * and will only send tokens to that address.
} catch (error) { * @param token - Token address to withdraw
log.error(`Error in withdraw: ${(error as Error).message}`); */
public async withdraw(token: string): Promise<Hash> {
if (!this.rpcClient.account) {
throw new Error("Failed to withdraw: no account set in wallet client");
} }
try {
await this.contract.simulate.withdraw([token as Address], {
chain: this.rpcClient.chain,
account: this.rpcClient.account.address
});
} catch (err) {
if (err instanceof Error) {
throw new Error("Error simulating withdraw: " + err.message);
} else {
throw new Error("Error simulating withdraw", { cause: err });
}
}
const hash = await this.contract.write.withdraw([token as Address], {
chain: this.rpcClient.chain,
account: this.rpcClient.account
});
await this.rpcClient.waitForTransactionReceipt({ hash });
return hash;
} }
public async registerWithIdentity( public async registerWithIdentity(
identity: IdentityCredential identity: IdentityCredential
): Promise<DecryptedCredentials | undefined> { ): Promise<DecryptedCredentials | undefined> {
try { try {
if (!this.rpcClient.account) {
throw new Error(
"Failed to registerWithIdentity: no account set in wallet client"
);
}
log.info( log.info(
`Registering identity with rate limit: ${this.rateLimit} messages/epoch` `Registering identity with rate limit: ${this.rateLimit} messages/epoch`
); );
@ -520,62 +398,71 @@ export class RLNBaseContract {
); );
} }
const estimatedGas = await this.contract.estimateGas.register( await this.contract.simulate.register(
identity.IDCommitmentBigInt, [identity.IDCommitmentBigInt, this.rateLimit, []],
this.rateLimit, {
[] chain: this.rpcClient.chain,
account: this.rpcClient.account.address
}
); );
const gasLimit = estimatedGas.add(10000);
const txRegisterResponse: ethers.ContractTransaction = const hash: Hash = await this.contract.write.register(
await this.contract.register( [identity.IDCommitmentBigInt, this.rateLimit, []],
identity.IDCommitmentBigInt, {
this.rateLimit, chain: this.rpcClient.chain,
[], account: this.rpcClient.account
{ }
gasLimit );
}
);
const txRegisterReceipt = await txRegisterResponse.wait(); const txRegisterReceipt = await this.rpcClient.waitForTransactionReceipt({
hash
});
if (txRegisterReceipt.status === 0) { if (txRegisterReceipt.status === "reverted") {
throw new Error("Transaction failed on-chain"); throw new Error("Transaction failed on-chain");
} }
const memberRegistered = txRegisterReceipt.events?.find( // Parse MembershipRegistered event from logs
(event: ethers.Event) => event.event === "MembershipRegistered" const memberRegisteredLog = txRegisterReceipt.logs.find((log) => {
); try {
const decoded = decodeEventLog({
abi: wakuRlnV2Abi,
data: log.data,
topics: log.topics
});
return decoded.eventName === "MembershipRegistered";
} catch {
return false;
}
});
if (!memberRegistered || !memberRegistered.args) { if (!memberRegisteredLog) {
log.error( log.error(
"Failed to register membership: No MembershipRegistered event found" "Failed to register membership: No MembershipRegistered event found"
); );
return undefined; return undefined;
} }
const decodedData: MembershipRegisteredEvent = { // Decode the event
idCommitment: memberRegistered.args.idCommitment, const decoded = decodeEventLog({
membershipRateLimit: memberRegistered.args.membershipRateLimit, abi: wakuRlnV2Abi,
index: memberRegistered.args.index data: memberRegisteredLog.data,
}; topics: memberRegisteredLog.topics,
eventName: "MembershipRegistered"
});
log.info( log.info(
`Successfully registered membership with index ${decodedData.index} ` + `Successfully registered membership with index ${decoded.args.index} ` +
`and rate limit ${decodedData.membershipRateLimit}` `and rate limit ${decoded.args.membershipRateLimit}`
); );
const network = await this.contract.provider.getNetwork();
const address = this.contract.address;
const membershipId = Number(decodedData.index);
return { return {
identity, identity,
membership: { membership: {
address, address: this.contract.address,
treeIndex: membershipId, treeIndex: decoded.args.index,
chainId: network.chainId.toString(), chainId: String(RLN_CONTRACT.chainId),
rateLimit: decodedData.membershipRateLimit.toNumber() rateLimit: Number(decoded.args.membershipRateLimit)
} }
}; };
} catch (error) { } catch (error) {
@ -608,78 +495,6 @@ export class RLNBaseContract {
} }
} }
public async registerWithPermitAndErase(
identity: IdentityCredential,
permit: {
owner: string;
deadline: number;
v: number;
r: string;
s: string;
},
idCommitmentsToErase: string[]
): Promise<DecryptedCredentials | undefined> {
try {
log.info(
`Registering identity with permit and rate limit: ${this.rateLimit} messages/epoch`
);
const txRegisterResponse: ethers.ContractTransaction =
await this.contract.registerWithPermit(
permit.owner,
permit.deadline,
permit.v,
permit.r,
permit.s,
identity.IDCommitmentBigInt,
this.rateLimit,
idCommitmentsToErase.map((id) => ethers.BigNumber.from(id))
);
const txRegisterReceipt = await txRegisterResponse.wait();
const memberRegistered = txRegisterReceipt.events?.find(
(event: ethers.Event) => event.event === "MembershipRegistered"
);
if (!memberRegistered || !memberRegistered.args) {
log.error(
"Failed to register membership with permit: No MembershipRegistered event found"
);
return undefined;
}
const decodedData: MembershipRegisteredEvent = {
idCommitment: memberRegistered.args.idCommitment,
membershipRateLimit: memberRegistered.args.membershipRateLimit,
index: memberRegistered.args.index
};
log.info(
`Successfully registered membership with permit. Index: ${decodedData.index}, ` +
`Rate limit: ${decodedData.membershipRateLimit}, Erased ${idCommitmentsToErase.length} commitments`
);
const network = await this.contract.provider.getNetwork();
const address = this.contract.address;
const membershipId = Number(decodedData.index);
return {
identity,
membership: {
address,
treeIndex: membershipId,
chainId: network.chainId.toString(),
rateLimit: decodedData.membershipRateLimit.toNumber()
}
};
} catch (error) {
log.error(
`Error in registerWithPermitAndErase: ${(error as Error).message}`
);
return undefined;
}
}
/** /**
* Validates that the rate limit is within the allowed range (sync) * Validates that the rate limit is within the allowed range (sync)
* @throws Error if the rate limit is outside the allowed range * @throws Error if the rate limit is outside the allowed range
@ -695,50 +510,17 @@ export class RLNBaseContract {
} }
} }
private get membersFilter(): ethers.EventFilter { private async getMemberIndex(idCommitmentBigInt: bigint): Promise<number> {
if (!this._membersFilter) { // Current version of the contract has the index at position 5 in the membership struct
throw Error("Members filter was not initialized."); return (await this.contract.read.memberships([idCommitmentBigInt]))[5];
}
return this._membersFilter;
}
private get membershipErasedFilter(): ethers.EventFilter {
if (!this._membershipErasedFilter) {
throw Error("MembershipErased filter was not initialized.");
}
return this._membershipErasedFilter;
}
private get membersExpiredFilter(): ethers.EventFilter {
if (!this._membersExpiredFilter) {
throw Error("MembersExpired filter was not initialized.");
}
return this._membersExpiredFilter;
}
private async getMemberIndex(
idCommitmentBigInt: bigint
): Promise<ethers.BigNumber | undefined> {
try {
const events = await this.contract.queryFilter(
this.contract.filters.MembershipRegistered(idCommitmentBigInt)
);
if (events.length === 0) return undefined;
// Get the most recent registration event
const event = events[events.length - 1];
return event.args?.index;
} catch (error) {
return undefined;
}
} }
public async getMembershipStatus( public async getMembershipStatus(
idCommitment: bigint idCommitment: bigint
): Promise<"expired" | "grace" | "active"> { ): Promise<"expired" | "grace" | "active"> {
const [isExpired, isInGrace] = await Promise.all([ const [isExpired, isInGrace] = await Promise.all([
this.contract.isExpired(idCommitment), this.contract.read.isExpired([idCommitment]),
this.contract.isInGracePeriod(idCommitment) this.contract.read.isInGracePeriod([idCommitment])
]); ]);
if (isExpired) return "expired"; if (isExpired) return "expired";
@ -753,7 +535,7 @@ export class RLNBaseContract {
*/ */
public async isExpired(idCommitmentBigInt: bigint): Promise<boolean> { public async isExpired(idCommitmentBigInt: bigint): Promise<boolean> {
try { try {
return await this.contract.isExpired(idCommitmentBigInt); return await this.contract.read.isExpired([idCommitmentBigInt]);
} catch (error) { } catch (error) {
log.error("Error in isExpired:", error); log.error("Error in isExpired:", error);
return false; return false;
@ -767,7 +549,7 @@ export class RLNBaseContract {
*/ */
public async isInGracePeriod(idCommitmentBigInt: bigint): Promise<boolean> { public async isInGracePeriod(idCommitmentBigInt: bigint): Promise<boolean> {
try { try {
return await this.contract.isInGracePeriod(idCommitmentBigInt); return await this.contract.read.isInGracePeriod([idCommitmentBigInt]);
} catch (error) { } catch (error) {
log.error("Error in isInGracePeriod:", error); log.error("Error in isInGracePeriod:", error);
return false; return false;
@ -779,21 +561,18 @@ export class RLNBaseContract {
* @param rateLimit The rate limit to calculate the price for * @param rateLimit The rate limit to calculate the price for
* @param contractFactory Optional factory for creating the contract (for testing) * @param contractFactory Optional factory for creating the contract (for testing)
*/ */
public async getPriceForRateLimit( public async getPriceForRateLimit(rateLimit: number): Promise<{
rateLimit: number,
contractFactory?: typeof import("ethers").Contract
): Promise<{
token: string | null; token: string | null;
price: import("ethers").BigNumber | null; price: bigint | null;
}> { }> {
const provider = this.contract.provider; const address = await this.contract.read.priceCalculator();
const ContractCtor = contractFactory || ethers.Contract; const [token, price] = await this.rpcClient.readContract({
const priceCalculator = new ContractCtor( address,
PRICE_CALCULATOR_CONTRACT.address, abi: iPriceCalculatorAbi,
PRICE_CALCULATOR_CONTRACT.abi, functionName: "calculate",
provider args: [rateLimit]
); });
const [token, price] = await priceCalculator.calculate(rateLimit);
// Defensive: if token or price is null/undefined, return nulls // Defensive: if token or price is null/undefined, return nulls
if (!token || !price) { if (!token || !price) {
return { token: null, price: null }; return { token: null, price: null };

View File

@ -1,28 +1,22 @@
import { ethers } from "ethers"; import { Address } from "viem";
export interface CustomQueryOptions extends FetchMembersOptions { import { RpcClient } from "../utils/index.js";
membersFilter: ethers.EventFilter;
}
export type Member = { export type Member = {
idCommitment: string; idCommitment: string;
index: ethers.BigNumber; index: bigint;
}; };
export interface RLNContractOptions { export interface RLNContractOptions {
signer: ethers.Signer; rpcClient: RpcClient;
address: string; address: Address;
rateLimit?: number; rateLimit?: number;
} }
export interface RLNContractInitOptions extends RLNContractOptions {
contract?: ethers.Contract;
}
export interface MembershipRegisteredEvent { export interface MembershipRegisteredEvent {
idCommitment: string; idCommitment: string;
membershipRateLimit: ethers.BigNumber; membershipRateLimit: bigint;
index: ethers.BigNumber; index: bigint;
} }
export type FetchMembersOptions = { export type FetchMembersOptions = {
@ -32,13 +26,13 @@ export type FetchMembersOptions = {
}; };
export interface MembershipInfo { export interface MembershipInfo {
index: ethers.BigNumber; index: number;
idCommitment: string; idCommitment: string;
rateLimit: number; rateLimit: number;
startBlock: number; startBlock: number;
endBlock: number; endBlock: number;
state: MembershipState; state: MembershipState;
depositAmount: ethers.BigNumber; depositAmount: bigint;
activeDuration: number; activeDuration: number;
gracePeriodDuration: number; gracePeriodDuration: number;
holder: string; holder: string;

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,5 @@
import { Logger } from "@waku/utils"; import { Logger } from "@waku/utils";
import { ethers } from "ethers"; import { publicActions } from "viem";
import { RLN_CONTRACT } from "./contract/constants.js"; import { RLN_CONTRACT } from "./contract/constants.js";
import { RLNBaseContract } from "./contract/rln_base_contract.js"; import { RLNBaseContract } from "./contract/rln_base_contract.js";
@ -10,7 +10,7 @@ import type {
} from "./keystore/index.js"; } from "./keystore/index.js";
import { KeystoreEntity, Password } from "./keystore/types.js"; import { KeystoreEntity, Password } from "./keystore/types.js";
import { RegisterMembershipOptions, StartRLNOptions } from "./types.js"; import { RegisterMembershipOptions, StartRLNOptions } from "./types.js";
import { extractMetaMaskSigner } from "./utils/index.js"; import { createViemClientFromWindow, RpcClient } from "./utils/index.js";
import { Zerokit } from "./zerokit.js"; import { Zerokit } from "./zerokit.js";
const log = new Logger("rln:credentials"); const log = new Logger("rln:credentials");
@ -24,7 +24,7 @@ export class RLNCredentialsManager {
protected starting = false; protected starting = false;
public contract: undefined | RLNBaseContract; public contract: undefined | RLNBaseContract;
public signer: undefined | ethers.Signer; public rpcClient: undefined | RpcClient;
protected keystore = Keystore.create(); protected keystore = Keystore.create();
public credentials: undefined | DecryptedCredentials; public credentials: undefined | DecryptedCredentials;
@ -36,10 +36,6 @@ export class RLNCredentialsManager {
this.zerokit = zerokit; this.zerokit = zerokit;
} }
public get provider(): undefined | ethers.providers.Provider {
return this.contract?.provider;
}
public async start(options: StartRLNOptions = {}): Promise<void> { public async start(options: StartRLNOptions = {}): Promise<void> {
if (this.started || this.starting) { if (this.started || this.starting) {
log.info("RLNCredentialsManager already started or starting"); log.info("RLNCredentialsManager already started or starting");
@ -59,10 +55,8 @@ export class RLNCredentialsManager {
log.info("Credentials successfully decrypted"); log.info("Credentials successfully decrypted");
} }
const { signer, address, rateLimit } = await this.determineStartOptions( const { rpcClient, address, rateLimit } =
options, await this.determineStartOptions(options, credentials);
credentials
);
log.info(`Using contract address: ${address}`); log.info(`Using contract address: ${address}`);
@ -72,10 +66,10 @@ export class RLNCredentialsManager {
} }
this.credentials = credentials; this.credentials = credentials;
this.signer = signer!; this.rpcClient = rpcClient!;
this.contract = await RLNBaseContract.create({ this.contract = await RLNBaseContract.create({
address: address!, address: address! as `0x${string}`,
signer: signer!, rpcClient: this.rpcClient,
rateLimit: rateLimit ?? this.zerokit.rateLimit rateLimit: rateLimit ?? this.zerokit.rateLimit
}); });
@ -134,7 +128,7 @@ export class RLNCredentialsManager {
protected async determineStartOptions( protected async determineStartOptions(
options: StartRLNOptions, options: StartRLNOptions,
credentials: KeystoreEntity | undefined credentials: KeystoreEntity | undefined
): Promise<StartRLNOptions> { ): Promise<StartRLNOptions & { rpcClient: RpcClient }> {
let chainId = credentials?.membership.chainId; let chainId = credentials?.membership.chainId;
const address = const address =
credentials?.membership.address || credentials?.membership.address ||
@ -146,11 +140,14 @@ export class RLNCredentialsManager {
log.info(`Using Linea contract with chainId: ${chainId}`); log.info(`Using Linea contract with chainId: ${chainId}`);
} }
const signer = options.signer || (await extractMetaMaskSigner()); const rpcClient: RpcClient = options.walletClient
const currentChainId = await signer.getChainId(); ? options.walletClient.extend(publicActions)
: await createViemClientFromWindow();
const currentChainId = rpcClient.chain?.id;
log.info(`Current chain ID: ${currentChainId}`); log.info(`Current chain ID: ${currentChainId}`);
if (chainId && chainId !== currentChainId.toString()) { if (chainId && chainId !== currentChainId?.toString()) {
log.error( log.error(
`Chain ID mismatch: contract=${chainId}, current=${currentChainId}` `Chain ID mismatch: contract=${chainId}, current=${currentChainId}`
); );
@ -160,7 +157,7 @@ export class RLNCredentialsManager {
} }
return { return {
signer, rpcClient,
address address
}; };
} }
@ -206,9 +203,9 @@ export class RLNCredentialsManager {
protected async verifyCredentialsAgainstContract( protected async verifyCredentialsAgainstContract(
credentials: KeystoreEntity credentials: KeystoreEntity
): Promise<void> { ): Promise<void> {
if (!this.contract) { if (!this.contract || !this.rpcClient) {
throw Error( throw Error(
"Failed to verify chain coordinates: no contract initialized." "Failed to verify chain coordinates: no contract or viem client initialized."
); );
} }
@ -221,8 +218,7 @@ export class RLNCredentialsManager {
} }
const chainId = credentials.membership.chainId; const chainId = credentials.membership.chainId;
const network = await this.contract.provider.getNetwork(); const currentChainId = await this.rpcClient.getChainId();
const currentChainId = network.chainId;
if (chainId !== currentChainId.toString()) { if (chainId !== currentChainId.toString()) {
throw Error( throw Error(
`Failed to verify chain coordinates: credentials chainID=${chainId} is not equal to registryContract chainID=${currentChainId}` `Failed to verify chain coordinates: credentials chainID=${chainId} is not equal to registryContract chainID=${currentChainId}`

View File

@ -1,11 +1,10 @@
import { RLN_ABI } from "./contract/abi/rln.js";
import { RLN_CONTRACT } from "./contract/index.js"; import { RLN_CONTRACT } from "./contract/index.js";
import { RLNBaseContract } from "./contract/rln_base_contract.js"; import { RLNBaseContract } from "./contract/rln_base_contract.js";
import { createRLN } from "./create.js"; import { createRLN } from "./create.js";
import { IdentityCredential } from "./identity.js"; import { IdentityCredential } from "./identity.js";
import { Keystore } from "./keystore/index.js"; import { Keystore } from "./keystore/index.js";
import { RLNInstance } from "./rln.js"; import { RLNInstance } from "./rln.js";
import { extractMetaMaskSigner } from "./utils/index.js"; import { createViemClientFromWindow } from "./utils/index.js";
export { export {
RLNBaseContract, RLNBaseContract,
@ -14,10 +13,16 @@ export {
RLNInstance, RLNInstance,
IdentityCredential, IdentityCredential,
RLN_CONTRACT, RLN_CONTRACT,
extractMetaMaskSigner, createViemClientFromWindow
RLN_ABI
}; };
export {
wakuRlnV2Abi,
linearPriceCalculatorAbi,
iPriceCalculatorAbi,
membershipUpgradeableAbi
} from "./contract/wagmi/generated.js";
export type { export type {
DecryptedCredentials, DecryptedCredentials,
EncryptedCredentials, EncryptedCredentials,

View File

@ -1,4 +1,4 @@
import { ethers } from "ethers"; import { WalletClient } from "viem";
import { IdentityCredential } from "./identity.js"; import { IdentityCredential } from "./identity.js";
import { import {
@ -8,9 +8,9 @@ import {
export type StartRLNOptions = { export type StartRLNOptions = {
/** /**
* If not set - will extract MetaMask account and get signer from it. * If not set - will attempt to create from provider injected in window.
*/ */
signer?: ethers.Signer; walletClient?: WalletClient;
/** /**
* If not set - will use default SEPOLIA_CONTRACT address. * If not set - will use default SEPOLIA_CONTRACT address.
*/ */

View File

@ -1,4 +1,4 @@
export { extractMetaMaskSigner } from "./metamask.js"; export { createViemClientFromWindow, RpcClient } from "./rpcClient.js";
export { BytesUtils } from "./bytes.js"; export { BytesUtils } from "./bytes.js";
export { sha256, poseidonHash } from "./hash.js"; export { sha256, poseidonHash } from "./hash.js";
export { dateToEpoch, epochIntToBytes, epochBytesToInt } from "./epoch.js"; export { dateToEpoch, epochIntToBytes, epochBytesToInt } from "./epoch.js";

View File

@ -1,17 +0,0 @@
import { ethers } from "ethers";
export const extractMetaMaskSigner = async (): Promise<ethers.Signer> => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const ethereum = (window as any).ethereum;
if (!ethereum) {
throw Error(
"Missing or invalid Ethereum provider. Please install a Web3 wallet such as MetaMask."
);
}
await ethereum.request({ method: "eth_requestAccounts" });
const provider = new ethers.providers.Web3Provider(ethereum, "any");
return provider.getSigner();
};

View File

@ -0,0 +1,61 @@
import "viem/window";
import {
type Address,
createWalletClient,
custom,
PublicActions,
publicActions,
WalletClient
} from "viem";
import { lineaSepolia } from "viem/chains";
export type RpcClient = WalletClient & PublicActions;
/**
* Checks window for injected Ethereum provider, requests user to connect, and creates an RPC client object
* capable of performing both read and write operations on the blockchain.
*
* If the wallet is not connected to the Linea Sepolia network, it will attempt to switch to it.
* If the wallet does not have the Linea Sepolia network added, it will attempt to add it.
*/
export const createViemClientFromWindow = async (): Promise<RpcClient> => {
const ethereum = window.ethereum;
if (!ethereum) {
throw Error(
"Missing or invalid Ethereum provider. Please install a Web3 wallet such as MetaMask."
);
}
const accounts = await ethereum.request({ method: "eth_requestAccounts" });
if (!Array.isArray(accounts)) {
throw Error("Failed to get accounts");
}
const account = accounts[0] as Address;
const rpcClient: RpcClient = createWalletClient({
account: account as Address,
chain: lineaSepolia,
transport: custom(window.ethereum!)
}).extend(publicActions);
// Ensure wallet is connected to Linea Sepolia
try {
await rpcClient.switchChain({ id: lineaSepolia.id });
} catch (error: unknown) {
// This error code indicates that the chain has not been added to the wallet
if (
typeof error === "object" &&
error !== null &&
"code" in error &&
error.code === 4902
) {
await rpcClient.addChain({ chain: lineaSepolia });
await rpcClient.switchChain({ id: lineaSepolia.id });
} else {
throw error;
}
}
return rpcClient;
};

View File

@ -1,3 +1,4 @@
{ {
"extends": "../../tsconfig.dev" "extends": "../../tsconfig.dev",
"exclude": ["wagmi.config.ts"]
} }

View File

@ -6,5 +6,5 @@
"tsBuildInfoFile": "dist/.tsbuildinfo" "tsBuildInfoFile": "dist/.tsbuildinfo"
}, },
"include": ["src"], "include": ["src"],
"exclude": ["src/**/*.spec.ts", "src/test_utils"] "exclude": ["wagmi.config.ts", "src/**/*.spec.ts", "src/test_utils"]
} }

View File

@ -0,0 +1,18 @@
import { defineConfig } from "@wagmi/cli";
import { foundry } from "@wagmi/cli/plugins";
export default defineConfig({
out: "src/contract/wagmi/generated.ts",
plugins: [
foundry({
project: "./waku-rlnv2-contract",
artifacts: "out",
include: [
"WakuRlnV2.sol/**",
"Membership.sol/**",
"LinearPriceCalculator.sol/**",
"IPriceCalculator.sol/**"
]
})
]
});

View File

@ -1,5 +1,17 @@
# Changelog # Changelog
## [0.0.3](https://github.com/logos-messaging/logos-messaging-js/compare/run-v0.0.2...run-v0.0.3) (2026-01-16)
### Dependencies
* The following workspace dependencies were updated
* dependencies
* @waku/core bumped from 0.0.40 to 0.0.41
* @waku/interfaces bumped from * to 0.0.35
* @waku/sdk bumped from 0.0.36 to 0.0.37
* @waku/utils bumped from * to 0.0.28
## [0.0.2](https://github.com/waku-org/js-waku/compare/run-v0.0.1...run-v0.0.2) (2025-10-31) ## [0.0.2](https://github.com/waku-org/js-waku/compare/run-v0.0.1...run-v0.0.2) (2025-10-31)

View File

@ -1,20 +1,21 @@
{ {
"name": "@waku/run", "name": "@waku/run",
"version": "0.0.2", "version": "0.0.3",
"description": "Run a local Waku network for development and testing", "description": "Run a local Waku network for development and testing",
"type": "module", "type": "module",
"author": "Waku Team", "author": "Waku Team",
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/run#readme", "homepage": "https://github.com/logos-messaging/logos-messaging-js/tree/master/packages/run#readme",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/waku-org/js-waku.git" "url": "git+https://github.com/logos-messaging/logos-messaging-js.git"
}, },
"bugs": { "bugs": {
"url": "https://github.com/waku-org/js-waku/issues" "url": "https://github.com/logos-messaging/logos-messaging-js/issues"
}, },
"license": "MIT OR Apache-2.0", "license": "MIT OR Apache-2.0",
"keywords": [ "keywords": [
"waku", "waku",
"logos-messaging",
"decentralized", "decentralized",
"communication", "communication",
"web3", "web3",
@ -50,10 +51,10 @@
"node": ">=22" "node": ">=22"
}, },
"dependencies": { "dependencies": {
"@waku/core": "0.0.40", "@waku/core": "0.0.41",
"@waku/interfaces": "*", "@waku/interfaces": "0.0.35",
"@waku/sdk": "0.0.36", "@waku/sdk": "0.0.37",
"@waku/utils": "*" "@waku/utils": "0.0.28"
}, },
"devDependencies": { "devDependencies": {
"@types/chai": "^4.3.11", "@types/chai": "^4.3.11",

View File

@ -47,6 +47,32 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
* @waku/interfaces bumped from 0.0.19 to 0.0.20 * @waku/interfaces bumped from 0.0.19 to 0.0.20
* @waku/peer-exchange bumped from ^0.0.17 to ^0.0.18 * @waku/peer-exchange bumped from ^0.0.17 to ^0.0.18
## [0.0.37](https://github.com/logos-messaging/logos-messaging-js/compare/sdk-v0.0.36...sdk-v0.0.37) (2026-01-16)
### Features
* Incorporate sds-r into reliable channels ([#2701](https://github.com/logos-messaging/logos-messaging-js/issues/2701)) ([788f7e6](https://github.com/logos-messaging/logos-messaging-js/commit/788f7e62c5141d10d013c91c28d549188d165762))
* Reliable Channel: Status Sync, overflow protection, stop TODOs ([#2729](https://github.com/logos-messaging/logos-messaging-js/issues/2729)) ([e5f51d7](https://github.com/logos-messaging/logos-messaging-js/commit/e5f51d7df101020a1a6d0787ce68fab4f28922f5))
### Bug Fixes
* Cleanup routines on reliable channel and core protocols ([#2733](https://github.com/logos-messaging/logos-messaging-js/issues/2733)) ([84a6ea6](https://github.com/logos-messaging/logos-messaging-js/commit/84a6ea69cf8630dacea0cafd58dd8c605ee8dc48))
### Dependencies
* The following workspace dependencies were updated
* dependencies
* @waku/core bumped from 0.0.40 to 0.0.41
* @waku/discovery bumped from 0.0.13 to 0.0.14
* @waku/interfaces bumped from 0.0.34 to 0.0.35
* @waku/sds bumped from ^0.0.8 to ^0.0.9
* @waku/utils bumped from 0.0.27 to 0.0.28
* devDependencies
* @waku/message-encryption bumped from ^0.0.38 to ^0.0.39
## [0.0.36](https://github.com/waku-org/js-waku/compare/sdk-v0.0.35...sdk-v0.0.36) (2025-10-31) ## [0.0.36](https://github.com/waku-org/js-waku/compare/sdk-v0.0.35...sdk-v0.0.36) (2025-10-31)

View File

@ -1,7 +1,7 @@
{ {
"name": "@waku/sdk", "name": "@waku/sdk",
"version": "0.0.36", "version": "0.0.37",
"description": "A unified SDK for easy creation and management of js-waku nodes.", "description": "A unified SDK for easy creation and management of logos-messaging-js nodes.",
"types": "./dist/index.d.ts", "types": "./dist/index.d.ts",
"module": "./dist/index.js", "module": "./dist/index.js",
"exports": { "exports": {
@ -21,17 +21,18 @@
}, },
"type": "module", "type": "module",
"author": "Waku Team", "author": "Waku Team",
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/sdk#readme", "homepage": "https://github.com/logos-messaging/logos-messaging-js/tree/master/packages/sdk#readme",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/waku-org/js-waku.git" "url": "git+https://github.com/logos-messaging/logos-messaging-js.git"
}, },
"bugs": { "bugs": {
"url": "https://github.com/waku-org/js-waku/issues" "url": "https://github.com/logos-messaging/logos-messaging-js/issues"
}, },
"license": "MIT OR Apache-2.0", "license": "MIT OR Apache-2.0",
"keywords": [ "keywords": [
"waku", "waku",
"logos-messaging",
"decentralized", "decentralized",
"secure", "secure",
"communication", "communication",
@ -68,12 +69,12 @@
"@libp2p/websockets": "9.2.16", "@libp2p/websockets": "9.2.16",
"@noble/hashes": "^1.3.3", "@noble/hashes": "^1.3.3",
"@types/lodash.debounce": "^4.0.9", "@types/lodash.debounce": "^4.0.9",
"@waku/core": "0.0.40", "@waku/core": "0.0.41",
"@waku/discovery": "0.0.13", "@waku/discovery": "0.0.14",
"@waku/interfaces": "0.0.34", "@waku/interfaces": "0.0.35",
"@waku/proto": "^0.0.15", "@waku/proto": "^0.0.15",
"@waku/sds": "^0.0.8", "@waku/sds": "^0.0.9",
"@waku/utils": "0.0.27", "@waku/utils": "0.0.28",
"libp2p": "2.8.11", "libp2p": "2.8.11",
"lodash.debounce": "^4.0.8" "lodash.debounce": "^4.0.8"
}, },
@ -86,7 +87,7 @@
"@types/chai": "^4.3.11", "@types/chai": "^4.3.11",
"@types/mocha": "^10.0.9", "@types/mocha": "^10.0.9",
"@waku/build-utils": "*", "@waku/build-utils": "*",
"@waku/message-encryption": "^0.0.38", "@waku/message-encryption": "^0.0.39",
"chai": "^5.1.1", "chai": "^5.1.1",
"cspell": "^8.6.1", "cspell": "^8.6.1",
"interface-datastore": "8.3.2", "interface-datastore": "8.3.2",

View File

@ -9,6 +9,7 @@ import { Libp2p, LightPushError, LightPushStatusCode } from "@waku/interfaces";
import { createRoutingInfo } from "@waku/utils"; import { createRoutingInfo } from "@waku/utils";
import { utf8ToBytes } from "@waku/utils/bytes"; import { utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai"; import { expect } from "chai";
import { afterEach } from "mocha";
import sinon, { SinonSpy } from "sinon"; import sinon, { SinonSpy } from "sinon";
import { PeerManager } from "../peer_manager/index.js"; import { PeerManager } from "../peer_manager/index.js";
@ -38,6 +39,10 @@ describe("LightPush SDK", () => {
lightPush = mockLightPush({ libp2p }); lightPush = mockLightPush({ libp2p });
}); });
afterEach(() => {
sinon.restore();
});
it("should fail to send if no connected peers found", async () => { it("should fail to send if no connected peers found", async () => {
const result = await lightPush.send(encoder, { const result = await lightPush.send(encoder, {
payload: utf8ToBytes("test") payload: utf8ToBytes("test")

View File

@ -65,6 +65,7 @@ export class LightPush implements ILightPush {
public stop(): void { public stop(): void {
this.retryManager.stop(); this.retryManager.stop();
this.protocol.stop();
} }
public async send( public async send(

View File

@ -47,7 +47,9 @@ describe("RetryManager", () => {
sinon.restore(); sinon.restore();
}); });
it("should start and stop interval correctly", () => { // TODO: Skipped because the global state is not being restored and it breaks
// tests of functionalities that rely on intervals
it.skip("should start and stop interval correctly", () => {
const setIntervalSpy = sinon.spy(global, "setInterval"); const setIntervalSpy = sinon.spy(global, "setInterval");
const clearIntervalSpy = sinon.spy(global, "clearInterval"); const clearIntervalSpy = sinon.spy(global, "clearInterval");

View File

@ -10,6 +10,7 @@ import {
import { delay } from "@waku/utils"; import { delay } from "@waku/utils";
import { utf8ToBytes } from "@waku/utils/bytes"; import { utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai"; import { expect } from "chai";
import { afterEach } from "mocha";
import sinon from "sinon"; import sinon from "sinon";
import { import {
@ -91,6 +92,10 @@ describe("QueryOnConnect", () => {
}; };
}); });
afterEach(() => {
sinon.restore();
});
describe("constructor", () => { describe("constructor", () => {
it("should create QueryOnConnect instance with all required parameters", () => { it("should create QueryOnConnect instance with all required parameters", () => {
queryOnConnect = new QueryOnConnect( queryOnConnect = new QueryOnConnect(
@ -158,14 +163,14 @@ describe("QueryOnConnect", () => {
expect(wakuEventSpy.calledWith(WakuEvent.Health)).to.be.true; expect(wakuEventSpy.calledWith(WakuEvent.Health)).to.be.true;
}); });
it("should remove event listeners when stopped", () => { it("should remove event listeners when stopped", async () => {
const peerRemoveSpy = const peerRemoveSpy =
mockPeerManagerEventEmitter.removeEventListener as sinon.SinonSpy; mockPeerManagerEventEmitter.removeEventListener as sinon.SinonSpy;
const wakuRemoveSpy = const wakuRemoveSpy =
mockWakuEventEmitter.removeEventListener as sinon.SinonSpy; mockWakuEventEmitter.removeEventListener as sinon.SinonSpy;
queryOnConnect.start(); queryOnConnect.start();
queryOnConnect.stop(); await queryOnConnect.stop();
expect(peerRemoveSpy.calledWith(PeerManagerEventNames.StoreConnect)).to.be expect(peerRemoveSpy.calledWith(PeerManagerEventNames.StoreConnect)).to.be
.true; .true;
@ -337,6 +342,7 @@ describe("QueryOnConnect", () => {
}); });
afterEach(() => { afterEach(() => {
sinon.restore();
mockClock.restore(); mockClock.restore();
}); });

View File

@ -52,6 +52,13 @@ export class QueryOnConnect<
private lastTimeOffline: number; private lastTimeOffline: number;
private readonly forceQueryThresholdMs: number; private readonly forceQueryThresholdMs: number;
private isStarted: boolean = false;
private abortController?: AbortController;
private activeQueryPromise?: Promise<void>;
private boundStoreConnectHandler?: (event: CustomEvent<PeerId>) => void;
private boundHealthHandler?: (event: CustomEvent<HealthStatus>) => void;
public constructor( public constructor(
public decoders: IDecoder<T>[], public decoders: IDecoder<T>[],
public stopIfTrue: (msg: T) => boolean, public stopIfTrue: (msg: T) => boolean,
@ -71,11 +78,37 @@ export class QueryOnConnect<
} }
public start(): void { public start(): void {
if (this.isStarted) {
log.warn("QueryOnConnect already running");
return;
}
log.info("starting query-on-connect service"); log.info("starting query-on-connect service");
this.isStarted = true;
this.abortController = new AbortController();
this.setupEventListeners(); this.setupEventListeners();
} }
public stop(): void { public async stop(): Promise<void> {
if (!this.isStarted) {
return;
}
log.info("stopping query-on-connect service");
this.isStarted = false;
if (this.abortController) {
this.abortController.abort();
this.abortController = undefined;
}
if (this.activeQueryPromise) {
log.info("Waiting for active query to complete...");
try {
await this.activeQueryPromise;
} catch (error) {
log.warn("Active query failed during stop:", error);
}
}
this.unsetEventListeners(); this.unsetEventListeners();
} }
@ -107,7 +140,10 @@ export class QueryOnConnect<
this.lastTimeOffline > this.lastSuccessfulQuery || this.lastTimeOffline > this.lastSuccessfulQuery ||
timeSinceLastQuery > this.forceQueryThresholdMs timeSinceLastQuery > this.forceQueryThresholdMs
) { ) {
await this.query(peerId); this.activeQueryPromise = this.query(peerId).finally(() => {
this.activeQueryPromise = undefined;
});
await this.activeQueryPromise;
} else { } else {
log.info(`no querying`); log.info(`no querying`);
} }
@ -120,7 +156,8 @@ export class QueryOnConnect<
for await (const page of this._queryGenerator(this.decoders, { for await (const page of this._queryGenerator(this.decoders, {
timeStart, timeStart,
timeEnd, timeEnd,
peerId peerId,
abortSignal: this.abortController?.signal
})) { })) {
// Await for decoding // Await for decoding
const messages = (await Promise.all(page)).filter( const messages = (await Promise.all(page)).filter(
@ -166,33 +203,41 @@ export class QueryOnConnect<
} }
private setupEventListeners(): void { private setupEventListeners(): void {
this.boundStoreConnectHandler = (event: CustomEvent<PeerId>) => {
void this.maybeQuery(event.detail).catch((err) =>
log.error("query-on-connect error", err)
);
};
this.boundHealthHandler = this.updateLastOfflineDate.bind(this);
this.peerManagerEventEmitter.addEventListener( this.peerManagerEventEmitter.addEventListener(
PeerManagerEventNames.StoreConnect, PeerManagerEventNames.StoreConnect,
(event) => this.boundStoreConnectHandler
void this.maybeQuery(event.detail).catch((err) =>
log.error("query-on-connect error", err)
)
); );
this.wakuEventEmitter.addEventListener( this.wakuEventEmitter.addEventListener(
WakuEvent.Health, WakuEvent.Health,
this.updateLastOfflineDate.bind(this) this.boundHealthHandler
); );
} }
private unsetEventListeners(): void { private unsetEventListeners(): void {
this.peerManagerEventEmitter.removeEventListener( if (this.boundStoreConnectHandler) {
PeerManagerEventNames.StoreConnect, this.peerManagerEventEmitter.removeEventListener(
(event) => PeerManagerEventNames.StoreConnect,
void this.maybeQuery(event.detail).catch((err) => this.boundStoreConnectHandler
log.error("query-on-connect error", err) );
) this.boundStoreConnectHandler = undefined;
); }
this.wakuEventEmitter.removeEventListener( if (this.boundHealthHandler) {
WakuEvent.Health, this.wakuEventEmitter.removeEventListener(
this.updateLastOfflineDate.bind(this) WakuEvent.Health,
); this.boundHealthHandler
);
this.boundHealthHandler = undefined;
}
} }
private updateLastOfflineDate(event: CustomEvent<HealthStatus>): void { private updateLastOfflineDate(event: CustomEvent<HealthStatus>): void {

View File

@ -1,2 +1,8 @@
export { ReliableChannel, ReliableChannelOptions } from "./reliable_channel.js"; export { ReliableChannel, ReliableChannelOptions } from "./reliable_channel.js";
export { ReliableChannelEvents, ReliableChannelEvent } from "./events.js"; export { ReliableChannelEvents, ReliableChannelEvent } from "./events.js";
export {
StatusEvent,
StatusEvents,
StatusDetail,
ISyncStatusEvents
} from "./sync_status.js";

View File

@ -13,6 +13,8 @@ const DEFAULT_RETRIEVE_FREQUENCY_MS = 10 * 1000; // 10 seconds
export class MissingMessageRetriever<T extends IDecodedMessage> { export class MissingMessageRetriever<T extends IDecodedMessage> {
private retrieveInterval: ReturnType<typeof setInterval> | undefined; private retrieveInterval: ReturnType<typeof setInterval> | undefined;
private missingMessages: Map<MessageId, Uint8Array<ArrayBufferLike>>; // Waku Message Ids private missingMessages: Map<MessageId, Uint8Array<ArrayBufferLike>>; // Waku Message Ids
private activeQueryPromise: Promise<void> | undefined;
private abortController?: AbortController;
public constructor( public constructor(
private readonly decoder: IDecoder<T>, private readonly decoder: IDecoder<T>,
@ -29,7 +31,11 @@ export class MissingMessageRetriever<T extends IDecodedMessage> {
public start(): void { public start(): void {
if (this.retrieveInterval) { if (this.retrieveInterval) {
clearInterval(this.retrieveInterval); clearInterval(this.retrieveInterval);
this.retrieveInterval = undefined;
} }
this.abortController = new AbortController();
if (this.retrieveFrequencyMs !== 0) { if (this.retrieveFrequencyMs !== 0) {
log.info(`start retrieve loop every ${this.retrieveFrequencyMs}ms`); log.info(`start retrieve loop every ${this.retrieveFrequencyMs}ms`);
this.retrieveInterval = setInterval(() => { this.retrieveInterval = setInterval(() => {
@ -38,10 +44,30 @@ export class MissingMessageRetriever<T extends IDecodedMessage> {
} }
} }
public stop(): void { public async stop(): Promise<void> {
log.info("Stopping MissingMessageRetriever...");
if (this.retrieveInterval) { if (this.retrieveInterval) {
clearInterval(this.retrieveInterval); clearInterval(this.retrieveInterval);
this.retrieveInterval = undefined;
} }
if (this.abortController) {
this.abortController.abort();
this.abortController = undefined;
}
if (this.activeQueryPromise) {
log.info("Waiting for active query to complete...");
try {
await this.activeQueryPromise;
} catch (error) {
log.warn("Active query failed during stop:", error);
}
}
this.missingMessages.clear();
log.info("MissingMessageRetriever stopped");
} }
public addMissingMessage( public addMissingMessage(
@ -64,15 +90,30 @@ export class MissingMessageRetriever<T extends IDecodedMessage> {
if (this.missingMessages.size) { if (this.missingMessages.size) {
const messageHashes = Array.from(this.missingMessages.values()); const messageHashes = Array.from(this.missingMessages.values());
log.info("attempting to retrieve missing message", messageHashes.length); log.info("attempting to retrieve missing message", messageHashes.length);
for await (const page of this._retrieve([this.decoder], {
messageHashes this.activeQueryPromise = (async () => {
})) { try {
for await (const msg of page) { for await (const page of this._retrieve([this.decoder], {
if (msg && this.onMessageRetrieved) { messageHashes,
await this.onMessageRetrieved(msg); abortSignal: this.abortController?.signal
})) {
for await (const msg of page) {
if (msg && this.onMessageRetrieved) {
await this.onMessageRetrieved(msg);
}
}
} }
} catch (error) {
if (error instanceof Error && error.name === "AbortError") {
log.info("Store query aborted");
return;
}
log.error("Store query failed:", error);
} }
} })();
await this.activeQueryPromise;
this.activeQueryPromise = undefined;
} }
} }
} }

View File

@ -0,0 +1,67 @@
import { Logger } from "@waku/utils";
const log = new Logger("sdk:random-timeout");
/**
* Enables waiting a random time before doing an action (using `setTimeout`),
* with possibility to apply a multiplier to manipulate said time.
*/
export class RandomTimeout {
private timeout: ReturnType<typeof setTimeout> | undefined;
public constructor(
/**
* The maximum interval one would wait before the call is made, in milliseconds.
*/
private maxIntervalMs: number,
/**
* When not zero: Anytime a call is made, then a new call will be rescheduled
* using this multiplier
*/
private multiplierOnCall: number,
/**
* The function to call when the timer is reached
*/
private callback: () => void | Promise<void>
) {
if (!Number.isFinite(maxIntervalMs) || maxIntervalMs < 0) {
throw new Error(
`maxIntervalMs must be a non-negative finite number, got: ${maxIntervalMs}`
);
}
if (!Number.isFinite(multiplierOnCall)) {
throw new Error(
`multiplierOnCall must be a finite number, got: ${multiplierOnCall}`
);
}
}
/**
* Use to start the timer. If a timer was already set, it deletes it and
* schedule a new one.
* @param multiplier applied to [[maxIntervalMs]]
*/
public restart(multiplier: number = 1): void {
this.stop();
if (this.maxIntervalMs) {
const timeoutMs = Math.random() * this.maxIntervalMs * multiplier;
this.timeout = setTimeout(() => {
try {
void this.callback();
} catch (error) {
log.error("Error in RandomTimeout callback:", error);
}
void this.restart(this.multiplierOnCall);
}, timeoutMs);
}
}
public stop(): void {
if (this.timeout) {
clearTimeout(this.timeout);
this.timeout = undefined;
}
}
}

View File

@ -13,7 +13,7 @@ import {
LightPushSDKResult, LightPushSDKResult,
QueryRequestParams QueryRequestParams
} from "@waku/interfaces"; } from "@waku/interfaces";
import { ContentMessage, SyncMessage } from "@waku/sds"; import { ContentMessage, MessageChannelEvent, SyncMessage } from "@waku/sds";
import { import {
createRoutingInfo, createRoutingInfo,
delay, delay,
@ -22,7 +22,7 @@ import {
} from "@waku/utils"; } from "@waku/utils";
import { bytesToUtf8, hexToBytes, utf8ToBytes } from "@waku/utils/bytes"; import { bytesToUtf8, hexToBytes, utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai"; import { expect } from "chai";
import { beforeEach, describe } from "mocha"; import { afterEach, beforeEach, describe } from "mocha";
import sinon from "sinon"; import sinon from "sinon";
import { ReliableChannel } from "./index.js"; import { ReliableChannel } from "./index.js";
@ -40,6 +40,9 @@ describe("Reliable Channel", () => {
let mockWakuNode: IWaku; let mockWakuNode: IWaku;
let encoder: IEncoder; let encoder: IEncoder;
let decoder: IDecoder<IDecodedMessage>; let decoder: IDecoder<IDecodedMessage>;
let reliableChannel: ReliableChannel<IDecodedMessage>;
let reliableChannelAlice: ReliableChannel<IDecodedMessage>;
let reliableChannelBob: ReliableChannel<IDecodedMessage>;
beforeEach(async () => { beforeEach(async () => {
mockWakuNode = new MockWakuNode(); mockWakuNode = new MockWakuNode();
@ -50,8 +53,14 @@ describe("Reliable Channel", () => {
decoder = createDecoder(TEST_CONTENT_TOPIC, TEST_ROUTING_INFO); decoder = createDecoder(TEST_CONTENT_TOPIC, TEST_ROUTING_INFO);
}); });
afterEach(async () => {
await reliableChannel?.stop();
await reliableChannelAlice?.stop();
await reliableChannelBob?.stop();
});
it("Outgoing message is emitted as sending", async () => { it("Outgoing message is emitted as sending", async () => {
const reliableChannel = await ReliableChannel.create( reliableChannel = await ReliableChannel.create(
mockWakuNode, mockWakuNode,
"MyChannel", "MyChannel",
"alice", "alice",
@ -78,7 +87,7 @@ describe("Reliable Channel", () => {
}); });
it("Outgoing message is emitted as sent", async () => { it("Outgoing message is emitted as sent", async () => {
const reliableChannel = await ReliableChannel.create( reliableChannel = await ReliableChannel.create(
mockWakuNode, mockWakuNode,
"MyChannel", "MyChannel",
"alice", "alice",
@ -117,7 +126,7 @@ describe("Reliable Channel", () => {
}); });
}; };
const reliableChannel = await ReliableChannel.create( reliableChannel = await ReliableChannel.create(
mockWakuNode, mockWakuNode,
"MyChannel", "MyChannel",
"alice", "alice",
@ -149,7 +158,7 @@ describe("Reliable Channel", () => {
}); });
it("Outgoing message is not emitted as acknowledged from own outgoing messages", async () => { it("Outgoing message is not emitted as acknowledged from own outgoing messages", async () => {
const reliableChannel = await ReliableChannel.create( reliableChannel = await ReliableChannel.create(
mockWakuNode, mockWakuNode,
"MyChannel", "MyChannel",
"alice", "alice",
@ -176,20 +185,20 @@ describe("Reliable Channel", () => {
expect(messageAcknowledged).to.be.false; expect(messageAcknowledged).to.be.false;
}); });
// TODO: https://github.com/waku-org/js-waku/issues/2648 // TODO: https://github.com/logos-messaging/logos-messaging-js/issues/2648
it.skip("Outgoing message is possibly acknowledged", async () => { it.skip("Outgoing message is possibly acknowledged", async () => {
const commonEventEmitter = new TypedEventEmitter<MockWakuEvents>(); const commonEventEmitter = new TypedEventEmitter<MockWakuEvents>();
const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter); const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter);
const mockWakuNodeBob = new MockWakuNode(commonEventEmitter); const mockWakuNodeBob = new MockWakuNode(commonEventEmitter);
const reliableChannelAlice = await ReliableChannel.create( reliableChannelAlice = await ReliableChannel.create(
mockWakuNodeAlice, mockWakuNodeAlice,
"MyChannel", "MyChannel",
"alice", "alice",
encoder, encoder,
decoder decoder
); );
const reliableChannelBob = await ReliableChannel.create( reliableChannelBob = await ReliableChannel.create(
mockWakuNodeBob, mockWakuNodeBob,
"MyChannel", "MyChannel",
"bob", "bob",
@ -245,14 +254,14 @@ describe("Reliable Channel", () => {
const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter); const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter);
const mockWakuNodeBob = new MockWakuNode(commonEventEmitter); const mockWakuNodeBob = new MockWakuNode(commonEventEmitter);
const reliableChannelAlice = await ReliableChannel.create( reliableChannelAlice = await ReliableChannel.create(
mockWakuNodeAlice, mockWakuNodeAlice,
"MyChannel", "MyChannel",
"alice", "alice",
encoder, encoder,
decoder decoder
); );
const reliableChannelBob = await ReliableChannel.create( reliableChannelBob = await ReliableChannel.create(
mockWakuNodeBob, mockWakuNodeBob,
"MyChannel", "MyChannel",
"bob", "bob",
@ -292,7 +301,7 @@ describe("Reliable Channel", () => {
}); });
it("Incoming message is emitted as received", async () => { it("Incoming message is emitted as received", async () => {
const reliableChannel = await ReliableChannel.create( reliableChannel = await ReliableChannel.create(
mockWakuNode, mockWakuNode,
"MyChannel", "MyChannel",
"alice", "alice",
@ -321,7 +330,7 @@ describe("Reliable Channel", () => {
const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter); const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter);
const mockWakuNodeBob = new MockWakuNode(commonEventEmitter); const mockWakuNodeBob = new MockWakuNode(commonEventEmitter);
const reliableChannelAlice = await ReliableChannel.create( reliableChannelAlice = await ReliableChannel.create(
mockWakuNodeAlice, mockWakuNodeAlice,
"MyChannel", "MyChannel",
"alice", "alice",
@ -332,7 +341,7 @@ describe("Reliable Channel", () => {
processTaskMinElapseMs: 10 // faster so it process message as soon as they arrive processTaskMinElapseMs: 10 // faster so it process message as soon as they arrive
} }
); );
const reliableChannelBob = await ReliableChannel.create( reliableChannelBob = await ReliableChannel.create(
mockWakuNodeBob, mockWakuNodeBob,
"MyChannel", "MyChannel",
"bob", "bob",
@ -379,16 +388,13 @@ describe("Reliable Channel", () => {
}); });
}); });
// the test is failing when run with all tests in sdk package describe("Missing Message Retrieval", () => {
// no clear reason why, skipping for now
// TODO: fix this test https://github.com/waku-org/js-waku/issues/2648
describe.skip("Missing Message Retrieval", () => {
it("Automatically retrieves missing message", async () => { it("Automatically retrieves missing message", async () => {
const commonEventEmitter = new TypedEventEmitter<MockWakuEvents>(); const commonEventEmitter = new TypedEventEmitter<MockWakuEvents>();
const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter); const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter);
// Setup, Alice first // Setup, Alice first
const reliableChannelAlice = await ReliableChannel.create( reliableChannelAlice = await ReliableChannel.create(
mockWakuNodeAlice, mockWakuNodeAlice,
"MyChannel", "MyChannel",
"alice", "alice",
@ -442,7 +448,7 @@ describe("Reliable Channel", () => {
queryGenerator: queryGeneratorStub queryGenerator: queryGeneratorStub
}; };
const reliableChannelBob = await ReliableChannel.create( reliableChannelBob = await ReliableChannel.create(
mockWakuNodeBob, mockWakuNodeBob,
"MyChannel", "MyChannel",
"bob", "bob",
@ -484,201 +490,6 @@ describe("Reliable Channel", () => {
}); });
}); });
describe("Query On Connect Integration E2E Tests", () => {
let mockWakuNode: MockWakuNode;
let reliableChannel: ReliableChannel<IDecodedMessage>;
let encoder: IEncoder;
let decoder: IDecoder<IDecodedMessage>;
let mockPeerManagerEvents: TypedEventEmitter<any>;
let queryGeneratorStub: sinon.SinonStub;
let mockPeerId: PeerId;
beforeEach(async () => {
// Setup mock waku node with store capability
mockWakuNode = new MockWakuNode();
// Setup mock peer manager events for QueryOnConnect
mockPeerManagerEvents = new TypedEventEmitter();
(mockWakuNode as any).peerManager = {
events: mockPeerManagerEvents
};
// Setup encoder and decoder
encoder = createEncoder({
contentTopic: TEST_CONTENT_TOPIC,
routingInfo: TEST_ROUTING_INFO
});
decoder = createDecoder(TEST_CONTENT_TOPIC, TEST_ROUTING_INFO);
// Setup store with queryGenerator for QueryOnConnect
queryGeneratorStub = sinon.stub();
mockWakuNode.store = {
queryGenerator: queryGeneratorStub
} as any;
mockPeerId = {
toString: () => "QmTestPeerId"
} as unknown as PeerId;
});
it("should trigger QueryOnConnect when going offline and store peer reconnects", async () => {
// Create a message that will be auto-retrieved
const messageText = "Auto-retrieved message";
const messagePayload = utf8ToBytes(messageText);
const sdsMessage = new ContentMessage(
ReliableChannel.getMessageId(messagePayload),
"testChannel",
"testSender",
[],
1n,
undefined,
messagePayload
);
const autoRetrievedMessage: IDecodedMessage = {
hash: hexToBytes("1234"),
hashStr: "1234",
version: 1,
timestamp: new Date(),
contentTopic: TEST_CONTENT_TOPIC,
pubsubTopic: decoder.pubsubTopic,
payload: sdsMessage.encode(),
rateLimitProof: undefined,
ephemeral: false,
meta: undefined
};
// Setup queryGenerator to return the auto-retrieved message
queryGeneratorStub.callsFake(async function* () {
yield [Promise.resolve(autoRetrievedMessage)];
});
// Create ReliableChannel with queryOnConnect enabled
reliableChannel = await ReliableChannel.create(
mockWakuNode,
"testChannel",
"testSender",
encoder,
decoder
);
// Wait for initial setup
await delay(50);
// Setup complete - focus on testing QueryOnConnect trigger
// Simulate going offline (change health status)
mockWakuNode.events.dispatchEvent(
new CustomEvent("health", { detail: HealthStatus.Unhealthy })
);
await delay(10);
// Simulate store peer reconnection which should trigger QueryOnConnect
mockPeerManagerEvents.dispatchEvent(
new CustomEvent("store:connect", { detail: mockPeerId })
);
// Wait for store query to be triggered
await delay(200);
// Verify that QueryOnConnect was triggered by the conditions
expect(queryGeneratorStub.called).to.be.true;
});
it("should trigger QueryOnConnect when time threshold is exceeded", async () => {
// Create multiple messages that will be auto-retrieved
const message1Text = "First auto-retrieved message";
const message2Text = "Second auto-retrieved message";
const message1Payload = utf8ToBytes(message1Text);
const message2Payload = utf8ToBytes(message2Text);
const sdsMessage1 = new ContentMessage(
ReliableChannel.getMessageId(message1Payload),
"testChannel",
"testSender",
[],
1n,
undefined,
message1Payload
);
const sdsMessage2 = new ContentMessage(
ReliableChannel.getMessageId(message2Payload),
"testChannel",
"testSender",
[],
2n,
undefined,
message2Payload
);
const autoRetrievedMessage1: IDecodedMessage = {
hash: hexToBytes("5678"),
hashStr: "5678",
version: 1,
timestamp: new Date(Date.now() - 1000),
contentTopic: TEST_CONTENT_TOPIC,
pubsubTopic: decoder.pubsubTopic,
payload: sdsMessage1.encode(),
rateLimitProof: undefined,
ephemeral: false,
meta: undefined
};
const autoRetrievedMessage2: IDecodedMessage = {
hash: hexToBytes("9abc"),
hashStr: "9abc",
version: 1,
timestamp: new Date(),
contentTopic: TEST_CONTENT_TOPIC,
pubsubTopic: decoder.pubsubTopic,
payload: sdsMessage2.encode(),
rateLimitProof: undefined,
ephemeral: false,
meta: undefined
};
// Setup queryGenerator to return multiple messages
queryGeneratorStub.callsFake(async function* () {
yield [Promise.resolve(autoRetrievedMessage1)];
yield [Promise.resolve(autoRetrievedMessage2)];
});
// Create ReliableChannel with queryOnConnect enabled
reliableChannel = await ReliableChannel.create(
mockWakuNode,
"testChannel",
"testSender",
encoder,
decoder,
{ queryOnConnect: true }
);
await delay(50);
// Simulate old last successful query by accessing QueryOnConnect internals
// The default threshold is 5 minutes, so we'll set it to an old time
if ((reliableChannel as any).queryOnConnect) {
((reliableChannel as any).queryOnConnect as any).lastSuccessfulQuery =
Date.now() - 6 * 60 * 1000; // 6 minutes ago
}
// Simulate store peer connection which should trigger retrieval due to time threshold
mockPeerManagerEvents.dispatchEvent(
new CustomEvent("store:connect", { detail: mockPeerId })
);
// Wait for store query to be triggered
await delay(200);
// Verify that QueryOnConnect was triggered due to time threshold
expect(queryGeneratorStub.called).to.be.true;
});
});
describe("stopIfTrue Integration with QueryOnConnect", () => { describe("stopIfTrue Integration with QueryOnConnect", () => {
let mockWakuNode: MockWakuNode; let mockWakuNode: MockWakuNode;
let encoder: IEncoder; let encoder: IEncoder;
@ -792,7 +603,7 @@ describe("Reliable Channel", () => {
yield [Promise.resolve(messages[2])]; yield [Promise.resolve(messages[2])];
}); });
const reliableChannel = await ReliableChannel.create( reliableChannel = await ReliableChannel.create(
mockWakuNode, mockWakuNode,
channelId, channelId,
senderId, senderId,
@ -874,7 +685,7 @@ describe("Reliable Channel", () => {
yield [Promise.resolve(messages[1])]; yield [Promise.resolve(messages[1])];
}); });
const reliableChannel = await ReliableChannel.create( reliableChannel = await ReliableChannel.create(
mockWakuNode, mockWakuNode,
channelId, channelId,
senderId, senderId,
@ -979,7 +790,7 @@ describe("Reliable Channel", () => {
yield [Promise.resolve(messages[2])]; yield [Promise.resolve(messages[2])];
}); });
const reliableChannel = await ReliableChannel.create( reliableChannel = await ReliableChannel.create(
mockWakuNode, mockWakuNode,
channelId, channelId,
senderId, senderId,
@ -1004,7 +815,6 @@ describe("Reliable Channel", () => {
describe("isChannelMessageWithCausalHistory predicate", () => { describe("isChannelMessageWithCausalHistory predicate", () => {
let mockWakuNode: MockWakuNode; let mockWakuNode: MockWakuNode;
let reliableChannel: ReliableChannel<IDecodedMessage>;
let encoder: IEncoder; let encoder: IEncoder;
let decoder: IDecoder<IDecodedMessage>; let decoder: IDecoder<IDecodedMessage>;
@ -1130,4 +940,317 @@ describe("Reliable Channel", () => {
expect(result).to.be.true; expect(result).to.be.true;
}); });
}); });
describe("Irretrievably lost messages", () => {
it("Sends ack once message is marked as irretrievably lost", async function (): Promise<void> {
this.timeout(5000);
sinon.restore();
const commonEventEmitter = new TypedEventEmitter<MockWakuEvents>();
const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter);
// Setup, Alice first
reliableChannelAlice = await ReliableChannel.create(
mockWakuNodeAlice,
"MyChannel",
"alice",
encoder,
decoder,
{
// disable any automation to better control the test
retryIntervalMs: 0,
syncMinIntervalMs: 0,
retrieveFrequencyMs: 0,
processTaskMinElapseMs: 10
}
);
// Bob is offline, Alice sends a message, this is the message we want
// Bob to consider irretrievable in this test.
const message = utf8ToBytes("missing message");
reliableChannelAlice.send(message);
// Wait to be sent
await new Promise((resolve) => {
reliableChannelAlice.addEventListener("message-sent", resolve, {
once: true
});
});
// Now Bob goes online
const mockWakuNodeBob = new MockWakuNode(commonEventEmitter);
reliableChannelBob = await ReliableChannel.create(
mockWakuNodeBob,
"MyChannel",
"bob",
encoder,
decoder,
{
retryIntervalMs: 0, // disable any automation to better control the test
syncMinIntervalMs: 0,
sweepInBufIntervalMs: 20,
processTaskMinElapseMs: 10,
retrieveFrequencyMs: 0,
timeoutForLostMessagesMs: 30
}
);
let messageWithDepRcvd = false;
reliableChannelBob.addEventListener("message-received", (event) => {
if (bytesToUtf8(event.detail.payload) === "message with dep") {
messageWithDepRcvd = true;
}
});
// Alice sends a second message that refers to the first message.
// Bob should emit it, and learn about missing messages, and then finally
// mark it lost
const messageWithDep = utf8ToBytes("message with dep");
const messageWithDepId = reliableChannelAlice.send(messageWithDep);
let messageIsAcknowledged = false;
reliableChannelAlice.messageChannel.addEventListener(
MessageChannelEvent.OutMessageAcknowledged,
(event) => {
if (event.detail == messageWithDepId) {
messageIsAcknowledged = true;
}
}
);
// Wait to be sent
await new Promise((resolve) => {
reliableChannelAlice.addEventListener("message-sent", resolve, {
once: true
});
});
let messageMarkedLost = false;
reliableChannelBob.messageChannel.addEventListener(
MessageChannelEvent.InMessageLost,
(_event) => {
// TODO: check message matches
messageMarkedLost = true;
}
);
while (!messageWithDepRcvd) {
await delay(50);
}
expect(messageWithDepRcvd, "message with dep received and emitted").to.be
.true;
while (!messageMarkedLost) {
await delay(50);
}
expect(messageMarkedLost, "message marked as lost").to.be.true;
// Bob should now include Alice's message in a sync message and ack it
await reliableChannelBob["sendSyncMessage"]();
while (!messageIsAcknowledged) {
await delay(50);
}
expect(messageIsAcknowledged, "message has been acknowledged").to.be.true;
});
});
});
describe("Query On Connect Integration E2E Tests", () => {
let mockWakuNode: MockWakuNode;
let reliableChannel: ReliableChannel<IDecodedMessage>;
let encoder: IEncoder;
let decoder: IDecoder<IDecodedMessage>;
let mockPeerManagerEvents: TypedEventEmitter<any>;
let queryGeneratorStub: sinon.SinonStub;
let mockPeerId: PeerId;
beforeEach(async () => {
// Setup mock waku node with store capability
mockWakuNode = new MockWakuNode();
// Setup mock peer manager events for QueryOnConnect
mockPeerManagerEvents = new TypedEventEmitter();
(mockWakuNode as any).peerManager = {
events: mockPeerManagerEvents
};
// Setup encoder and decoder
encoder = createEncoder({
contentTopic: TEST_CONTENT_TOPIC,
routingInfo: TEST_ROUTING_INFO
});
decoder = createDecoder(TEST_CONTENT_TOPIC, TEST_ROUTING_INFO);
// Setup store with queryGenerator for QueryOnConnect
queryGeneratorStub = sinon.stub();
mockWakuNode.store = {
queryGenerator: queryGeneratorStub
} as any;
mockPeerId = {
toString: () => "QmTestPeerId"
} as unknown as PeerId;
});
afterEach(async () => {
await reliableChannel?.stop();
});
it("should trigger QueryOnConnect when going offline and store peer reconnects", async () => {
// Create a message that will be auto-retrieved
const messageText = "Auto-retrieved message";
const messagePayload = utf8ToBytes(messageText);
const sdsMessage = new ContentMessage(
ReliableChannel.getMessageId(messagePayload),
"testChannel",
"testSender",
[],
1n,
undefined,
messagePayload
);
const autoRetrievedMessage: IDecodedMessage = {
hash: hexToBytes("1234"),
hashStr: "1234",
version: 1,
timestamp: new Date(),
contentTopic: TEST_CONTENT_TOPIC,
pubsubTopic: decoder.pubsubTopic,
payload: sdsMessage.encode(),
rateLimitProof: undefined,
ephemeral: false,
meta: undefined
};
// Setup queryGenerator to return the auto-retrieved message
queryGeneratorStub.callsFake(async function* () {
yield [Promise.resolve(autoRetrievedMessage)];
});
// Create ReliableChannel with queryOnConnect enabled
reliableChannel = await ReliableChannel.create(
mockWakuNode,
"testChannel",
"testSender",
encoder,
decoder
);
// Wait for initial setup
await delay(50);
// Setup complete - focus on testing QueryOnConnect trigger
// Simulate going offline (change health status)
mockWakuNode.events.dispatchEvent(
new CustomEvent("health", { detail: HealthStatus.Unhealthy })
);
await delay(10);
// Simulate store peer reconnection which should trigger QueryOnConnect
mockPeerManagerEvents.dispatchEvent(
new CustomEvent("store:connect", { detail: mockPeerId })
);
// Wait for store query to be triggered
await delay(200);
// Verify that QueryOnConnect was triggered by the conditions
expect(queryGeneratorStub.called).to.be.true;
});
it("should trigger QueryOnConnect when time threshold is exceeded", async () => {
// Create multiple messages that will be auto-retrieved
const message1Text = "First auto-retrieved message";
const message2Text = "Second auto-retrieved message";
const message1Payload = utf8ToBytes(message1Text);
const message2Payload = utf8ToBytes(message2Text);
const sdsMessage1 = new ContentMessage(
ReliableChannel.getMessageId(message1Payload),
"testChannel",
"testSender",
[],
1n,
undefined,
message1Payload
);
const sdsMessage2 = new ContentMessage(
ReliableChannel.getMessageId(message2Payload),
"testChannel",
"testSender",
[],
2n,
undefined,
message2Payload
);
const autoRetrievedMessage1: IDecodedMessage = {
hash: hexToBytes("5678"),
hashStr: "5678",
version: 1,
timestamp: new Date(Date.now() - 1000),
contentTopic: TEST_CONTENT_TOPIC,
pubsubTopic: decoder.pubsubTopic,
payload: sdsMessage1.encode(),
rateLimitProof: undefined,
ephemeral: false,
meta: undefined
};
const autoRetrievedMessage2: IDecodedMessage = {
hash: hexToBytes("9abc"),
hashStr: "9abc",
version: 1,
timestamp: new Date(),
contentTopic: TEST_CONTENT_TOPIC,
pubsubTopic: decoder.pubsubTopic,
payload: sdsMessage2.encode(),
rateLimitProof: undefined,
ephemeral: false,
meta: undefined
};
// Setup queryGenerator to return multiple messages
queryGeneratorStub.callsFake(async function* () {
yield [Promise.resolve(autoRetrievedMessage1)];
yield [Promise.resolve(autoRetrievedMessage2)];
});
// Create ReliableChannel with queryOnConnect enabled
reliableChannel = await ReliableChannel.create(
mockWakuNode,
"testChannel",
"testSender",
encoder,
decoder,
{ queryOnConnect: true }
);
await delay(50);
// Simulate old last successful query by accessing QueryOnConnect internals
// The default threshold is 5 minutes, so we'll set it to an old time
if ((reliableChannel as any).queryOnConnect) {
((reliableChannel as any).queryOnConnect as any).lastSuccessfulQuery =
Date.now() - 6 * 60 * 1000; // 6 minutes ago
}
// Simulate store peer connection which should trigger retrieval due to time threshold
mockPeerManagerEvents.dispatchEvent(
new CustomEvent("store:connect", { detail: mockPeerId })
);
// Wait for store query to be triggered
await delay(200);
// Verify that QueryOnConnect was triggered due to time threshold
expect(queryGeneratorStub.called).to.be.true;
});
}); });

View File

@ -17,9 +17,10 @@ import {
isContentMessage, isContentMessage,
MessageChannel, MessageChannel,
MessageChannelEvent, MessageChannelEvent,
MessageChannelEvents,
type MessageChannelOptions, type MessageChannelOptions,
type ParticipantId,
Message as SdsMessage, Message as SdsMessage,
type SenderId,
SyncMessage SyncMessage
} from "@waku/sds"; } from "@waku/sds";
import { Logger } from "@waku/utils"; import { Logger } from "@waku/utils";
@ -31,14 +32,18 @@ import {
import { ReliableChannelEvent, ReliableChannelEvents } from "./events.js"; import { ReliableChannelEvent, ReliableChannelEvents } from "./events.js";
import { MissingMessageRetriever } from "./missing_message_retriever.js"; import { MissingMessageRetriever } from "./missing_message_retriever.js";
import { RandomTimeout } from "./random_timeout.js";
import { RetryManager } from "./retry_manager.js"; import { RetryManager } from "./retry_manager.js";
import { ISyncStatusEvents, SyncStatus } from "./sync_status.js";
const log = new Logger("sdk:reliable-channel"); const log = new Logger("sdk:reliable-channel");
const DEFAULT_SYNC_MIN_INTERVAL_MS = 30 * 1000; // 30 seconds const DEFAULT_SYNC_MIN_INTERVAL_MS = 30 * 1000; // 30 seconds
const SYNC_INTERVAL_REPAIR_MULTIPLIER = 0.3; // Reduce sync interval when repairs pending
const DEFAULT_RETRY_INTERVAL_MS = 30 * 1000; // 30 seconds const DEFAULT_RETRY_INTERVAL_MS = 30 * 1000; // 30 seconds
const DEFAULT_MAX_RETRY_ATTEMPTS = 10; const DEFAULT_MAX_RETRY_ATTEMPTS = 10;
const DEFAULT_SWEEP_IN_BUF_INTERVAL_MS = 5 * 1000; const DEFAULT_SWEEP_IN_BUF_INTERVAL_MS = 5 * 1000;
const DEFAULT_SWEEP_REPAIR_INTERVAL_MS = 10 * 1000; // 10 seconds
const DEFAULT_PROCESS_TASK_MIN_ELAPSE_MS = 1000; const DEFAULT_PROCESS_TASK_MIN_ELAPSE_MS = 1000;
const IRRECOVERABLE_SENDING_ERRORS: LightPushError[] = [ const IRRECOVERABLE_SENDING_ERRORS: LightPushError[] = [
@ -48,6 +53,15 @@ const IRRECOVERABLE_SENDING_ERRORS: LightPushError[] = [
LightPushError.RLN_PROOF_GENERATION LightPushError.RLN_PROOF_GENERATION
]; ];
/**
* Strategy for retrieving missing messages.
* - 'both': Use SDS-R peer repair and Store queries in parallel (default)
* - 'sds-r-only': Only use SDS-R peer repair
* - 'store-only': Only use Store queries (legacy behavior)
* - 'none': No automatic retrieval
*/
export type RetrievalStrategy = "both" | "sds-r-only" | "store-only" | "none";
export type ReliableChannelOptions = MessageChannelOptions & { export type ReliableChannelOptions = MessageChannelOptions & {
/** /**
* The minimum interval between 2 sync messages in the channel. * The minimum interval between 2 sync messages in the channel.
@ -78,6 +92,7 @@ export type ReliableChannelOptions = MessageChannelOptions & {
/** /**
* How often store queries are done to retrieve missing messages. * How often store queries are done to retrieve missing messages.
* Only applies when retrievalStrategy includes Store ('both' or 'store-only').
* *
* @default 10,000 (10 seconds) * @default 10,000 (10 seconds)
*/ */
@ -111,6 +126,13 @@ export type ReliableChannelOptions = MessageChannelOptions & {
* @default 1000 (1 second) * @default 1000 (1 second)
*/ */
processTaskMinElapseMs?: number; processTaskMinElapseMs?: number;
/**
* Strategy for retrieving missing messages.
*
* @default 'both'
*/
retrievalStrategy?: RetrievalStrategy;
}; };
/** /**
@ -136,27 +158,34 @@ export class ReliableChannel<
callback: Callback<T> callback: Callback<T>
) => Promise<boolean>; ) => Promise<boolean>;
private readonly _unsubscribe?: (
decoders: IDecoder<T> | IDecoder<T>[]
) => Promise<boolean>;
private readonly _retrieve?: <T extends IDecodedMessage>( private readonly _retrieve?: <T extends IDecodedMessage>(
decoders: IDecoder<T>[], decoders: IDecoder<T>[],
options?: Partial<QueryRequestParams> options?: Partial<QueryRequestParams>
) => AsyncGenerator<Promise<T | undefined>[]>; ) => AsyncGenerator<Promise<T | undefined>[]>;
private readonly syncMinIntervalMs: number; private eventListenerCleanups: Array<() => void> = [];
private syncTimeout: ReturnType<typeof setTimeout> | undefined; private syncRandomTimeout: RandomTimeout;
private sweepInBufInterval: ReturnType<typeof setInterval> | undefined; private sweepInBufInterval: ReturnType<typeof setInterval> | undefined;
private readonly sweepInBufIntervalMs: number; private readonly sweepInBufIntervalMs: number;
private sweepRepairInterval: ReturnType<typeof setInterval> | undefined;
private processTaskTimeout: ReturnType<typeof setTimeout> | undefined; private processTaskTimeout: ReturnType<typeof setTimeout> | undefined;
private readonly retryManager: RetryManager | undefined; private readonly retryManager: RetryManager | undefined;
private readonly missingMessageRetriever?: MissingMessageRetriever<T>; private readonly missingMessageRetriever?: MissingMessageRetriever<T>;
private readonly queryOnConnect?: QueryOnConnect<T>; private readonly queryOnConnect?: QueryOnConnect<T>;
private readonly processTaskMinElapseMs: number; private readonly processTaskMinElapseMs: number;
private _started: boolean; private _started: boolean;
private activePendingProcessTask?: Promise<void>;
private constructor( private constructor(
public node: IWaku, public node: IWaku,
public messageChannel: MessageChannel, public messageChannel: MessageChannel,
private encoder: IEncoder, private encoder: IEncoder,
private decoder: IDecoder<T>, private decoder: IDecoder<T>,
private retrievalStrategy: RetrievalStrategy,
options?: ReliableChannelOptions options?: ReliableChannelOptions
) { ) {
super(); super();
@ -170,6 +199,7 @@ export class ReliableChannel<
if (node.filter) { if (node.filter) {
this._subscribe = node.filter.subscribe.bind(node.filter); this._subscribe = node.filter.subscribe.bind(node.filter);
this._unsubscribe = node.filter.unsubscribe.bind(node.filter);
} else if (node.relay) { } else if (node.relay) {
// TODO: Why do relay and filter have different interfaces? // TODO: Why do relay and filter have different interfaces?
// this._subscribe = node.relay.subscribeWithUnsubscribe; // this._subscribe = node.relay.subscribeWithUnsubscribe;
@ -195,8 +225,11 @@ export class ReliableChannel<
} }
} }
this.syncMinIntervalMs = this.syncRandomTimeout = new RandomTimeout(
options?.syncMinIntervalMs ?? DEFAULT_SYNC_MIN_INTERVAL_MS; options?.syncMinIntervalMs ?? DEFAULT_SYNC_MIN_INTERVAL_MS,
2,
this.sendSyncMessage.bind(this)
);
this.sweepInBufIntervalMs = this.sweepInBufIntervalMs =
options?.sweepInBufIntervalMs ?? DEFAULT_SWEEP_IN_BUF_INTERVAL_MS; options?.sweepInBufIntervalMs ?? DEFAULT_SWEEP_IN_BUF_INTERVAL_MS;
@ -214,7 +247,8 @@ export class ReliableChannel<
this.processTaskMinElapseMs = this.processTaskMinElapseMs =
options?.processTaskMinElapseMs ?? DEFAULT_PROCESS_TASK_MIN_ELAPSE_MS; options?.processTaskMinElapseMs ?? DEFAULT_PROCESS_TASK_MIN_ELAPSE_MS;
if (this._retrieve) { // Only enable Store retrieval based on strategy
if (this._retrieve && this.shouldUseStore()) {
this.missingMessageRetriever = new MissingMessageRetriever( this.missingMessageRetriever = new MissingMessageRetriever(
this.decoder, this.decoder,
options?.retrieveFrequencyMs, options?.retrieveFrequencyMs,
@ -226,8 +260,22 @@ export class ReliableChannel<
} }
this._started = false; this._started = false;
this._internalSyncStatus = new SyncStatus();
this.syncStatus = this._internalSyncStatus;
} }
/**
* Emit events when the channel is aware of missing message.
* Note that "synced" may mean some messages are irretrievably lost.
* Check the emitted data for details.
*
* @emits [[StatusEvents]]
*
*/
public readonly syncStatus: ISyncStatusEvents;
private readonly _internalSyncStatus: SyncStatus;
public get isStarted(): boolean { public get isStarted(): boolean {
return this._started; return this._started;
} }
@ -264,17 +312,26 @@ export class ReliableChannel<
public static async create<T extends IDecodedMessage>( public static async create<T extends IDecodedMessage>(
node: IWaku, node: IWaku,
channelId: ChannelId, channelId: ChannelId,
senderId: SenderId, senderId: ParticipantId,
encoder: IEncoder, encoder: IEncoder,
decoder: IDecoder<T>, decoder: IDecoder<T>,
options?: ReliableChannelOptions options?: ReliableChannelOptions
): Promise<ReliableChannel<T>> { ): Promise<ReliableChannel<T>> {
const sdsMessageChannel = new MessageChannel(channelId, senderId, options); // Enable SDS-R repair only if retrieval strategy uses it
const retrievalStrategy = options?.retrievalStrategy ?? "both";
const enableRepair =
retrievalStrategy === "both" || retrievalStrategy === "sds-r-only";
const sdsMessageChannel = new MessageChannel(channelId, senderId, {
...options,
enableRepair
});
const messageChannel = new ReliableChannel( const messageChannel = new ReliableChannel(
node, node,
sdsMessageChannel, sdsMessageChannel,
encoder, encoder,
decoder, decoder,
retrievalStrategy,
options options
); );
@ -384,10 +441,21 @@ export class ReliableChannel<
private async subscribe(): Promise<boolean> { private async subscribe(): Promise<boolean> {
this.assertStarted(); this.assertStarted();
return this._subscribe(this.decoder, async (message: T) => { return this._subscribe(this.decoder, async (message: T) => {
if (!this._started) {
log.info("ReliableChannel stopped, ignoring incoming message");
return;
}
await this.processIncomingMessage(message); await this.processIncomingMessage(message);
}); });
} }
private async unsubscribe(): Promise<boolean> {
if (!this._unsubscribe) {
throw Error("No unsubscribe method available");
}
return this._unsubscribe(this.decoder);
}
/** /**
* Don't forget to call `this.messageChannel.sweepIncomingBuffer();` once done. * Don't forget to call `this.messageChannel.sweepIncomingBuffer();` once done.
* @param msg * @param msg
@ -418,6 +486,7 @@ export class ReliableChannel<
// missing messages or the status of previous outgoing messages // missing messages or the status of previous outgoing messages
this.messageChannel.pushIncomingMessage(sdsMessage, retrievalHint); this.messageChannel.pushIncomingMessage(sdsMessage, retrievalHint);
// Remove from Store retriever if message was retrieved
this.missingMessageRetriever?.removeMissingMessage(sdsMessage.messageId); this.missingMessageRetriever?.removeMissingMessage(sdsMessage.messageId);
if (sdsMessage.content && sdsMessage.content.length > 0) { if (sdsMessage.content && sdsMessage.content.length > 0) {
@ -458,26 +527,42 @@ export class ReliableChannel<
// TODO: For now we only queue process tasks for incoming messages // TODO: For now we only queue process tasks for incoming messages
// As this is where there is most volume // As this is where there is most volume
private queueProcessTasks(): void { private queueProcessTasks(): void {
if (!this._started) return;
// If one is already queued, then we can ignore it // If one is already queued, then we can ignore it
if (this.processTaskTimeout === undefined) { if (this.processTaskTimeout === undefined) {
this.processTaskTimeout = setTimeout(() => { this.processTaskTimeout = setTimeout(() => {
void this.messageChannel.processTasks().catch((err) => { this.activePendingProcessTask = this.messageChannel
log.error("error encountered when processing sds tasks", err); .processTasks()
}); .catch((err) => {
log.error("error encountered when processing sds tasks", err);
})
.finally(() => {
this.activePendingProcessTask = undefined;
});
// Clear timeout once triggered // Clear timeout once triggered
clearTimeout(this.processTaskTimeout); this.clearProcessTasks();
this.processTaskTimeout = undefined;
}, this.processTaskMinElapseMs); // we ensure that we don't call process tasks more than once per second }, this.processTaskMinElapseMs); // we ensure that we don't call process tasks more than once per second
} }
} }
private clearProcessTasks(): void {
if (this.processTaskTimeout) {
clearTimeout(this.processTaskTimeout);
this.processTaskTimeout = undefined;
}
}
public async start(): Promise<boolean> { public async start(): Promise<boolean> {
if (this._started) return true; if (this._started) return true;
this._started = true; this._started = true;
this.setupEventListeners(); this.setupEventListeners();
this.restartSync(); this.restartSync();
this.startSweepIncomingBufferLoop(); this.startSweepIncomingBufferLoop();
this.startRepairSweepLoop();
if (this._retrieve) { if (this._retrieve) {
this.missingMessageRetriever?.start(); this.missingMessageRetriever?.start();
this.queryOnConnect?.start(); this.queryOnConnect?.start();
@ -485,15 +570,33 @@ export class ReliableChannel<
return this.subscribe(); return this.subscribe();
} }
public stop(): void { public async stop(): Promise<void> {
if (!this._started) return; if (!this._started) return;
log.info("Stopping ReliableChannel...");
this._started = false; this._started = false;
this.removeAllEventListeners();
this.stopSync(); this.stopSync();
this.stopSweepIncomingBufferLoop(); this.stopSweepIncomingBufferLoop();
this.missingMessageRetriever?.stop(); this.stopRepairSweepLoop();
this.queryOnConnect?.stop(); this.clearProcessTasks();
// TODO unsubscribe
// TODO unsetMessageListeners if (this.activePendingProcessTask) {
await this.activePendingProcessTask;
}
await this.missingMessageRetriever?.stop();
await this.queryOnConnect?.stop();
this.retryManager?.stopAllRetries();
await this.unsubscribe();
this._internalSyncStatus.cleanUp();
log.info("ReliableChannel stopped successfully");
} }
private assertStarted(): void { private assertStarted(): void {
@ -509,34 +612,65 @@ export class ReliableChannel<
} }
private stopSweepIncomingBufferLoop(): void { private stopSweepIncomingBufferLoop(): void {
if (this.sweepInBufInterval) clearInterval(this.sweepInBufInterval); if (this.sweepInBufInterval) {
clearInterval(this.sweepInBufInterval);
this.sweepInBufInterval = undefined;
}
}
private startRepairSweepLoop(): void {
if (!this.shouldUseSdsR()) {
return;
}
this.stopRepairSweepLoop();
this.sweepRepairInterval = setInterval(() => {
void this.messageChannel
.sweepRepairIncomingBuffer(async (message) => {
// Rebroadcast the repair message
const wakuMessage = { payload: message.encode() };
const result = await this._send(this.encoder, wakuMessage);
return result.failures.length === 0;
})
.catch((err) => {
log.error("error encountered when sweeping repair buffer", err);
});
}, DEFAULT_SWEEP_REPAIR_INTERVAL_MS);
}
private stopRepairSweepLoop(): void {
if (this.sweepRepairInterval) {
clearInterval(this.sweepRepairInterval);
this.sweepInBufInterval = undefined;
}
}
private shouldUseStore(): boolean {
return (
this.retrievalStrategy === "both" ||
this.retrievalStrategy === "store-only"
);
}
private shouldUseSdsR(): boolean {
return (
this.retrievalStrategy === "both" ||
this.retrievalStrategy === "sds-r-only"
);
} }
private restartSync(multiplier: number = 1): void { private restartSync(multiplier: number = 1): void {
if (this.syncTimeout) { // Adaptive sync: use shorter interval when repairs are pending
clearTimeout(this.syncTimeout); const hasPendingRepairs =
} this.shouldUseSdsR() && this.messageChannel.hasPendingRepairRequests();
if (this.syncMinIntervalMs) { const effectiveMultiplier = hasPendingRepairs
const timeoutMs = this.random() * this.syncMinIntervalMs * multiplier; ? multiplier * SYNC_INTERVAL_REPAIR_MULTIPLIER
: multiplier;
this.syncTimeout = setTimeout(() => { this.syncRandomTimeout.restart(effectiveMultiplier);
void this.sendSyncMessage();
// Always restart a sync, no matter whether the message was sent.
// Set a multiplier so we wait a bit longer to not hog the conversation
void this.restartSync(2);
}, timeoutMs);
}
} }
private stopSync(): void { private stopSync(): void {
if (this.syncTimeout) { this.syncRandomTimeout.stop();
clearTimeout(this.syncTimeout);
}
}
// Used to enable overriding when testing
private random(): number {
return Math.random();
} }
private safeSendEvent<T extends ReliableChannelEvent>( private safeSendEvent<T extends ReliableChannelEvent>(
@ -595,20 +729,36 @@ export class ReliableChannel<
return sdsMessage.causalHistory && sdsMessage.causalHistory.length > 0; return sdsMessage.causalHistory && sdsMessage.causalHistory.length > 0;
} }
private addTrackedEventListener<K extends keyof MessageChannelEvents>(
eventName: K,
listener: (event: MessageChannelEvents[K]) => void
): void {
this.messageChannel.addEventListener(eventName, listener as any);
this.eventListenerCleanups.push(() => {
this.messageChannel.removeEventListener(eventName, listener as any);
});
}
private setupEventListeners(): void { private setupEventListeners(): void {
this.messageChannel.addEventListener( this.addTrackedEventListener(
MessageChannelEvent.OutMessageSent, MessageChannelEvent.OutMessageSent,
(event) => { (event) => {
if (event.detail.content) { if (isContentMessage(event.detail)) {
const messageId = ReliableChannel.getMessageId(event.detail.content); const messageId = ReliableChannel.getMessageId(event.detail.content);
this.safeSendEvent("message-sent", { this.safeSendEvent("message-sent", {
detail: messageId detail: messageId
}); });
// restart the timeout when a content message has been sent
// because the functionality is fulfilled (content message contains
// causal history)
this.restartSync();
} }
} }
); );
this.messageChannel.addEventListener( this.addTrackedEventListener(
MessageChannelEvent.OutMessageAcknowledged, MessageChannelEvent.OutMessageAcknowledged,
(event) => { (event) => {
if (event.detail) { if (event.detail) {
@ -616,13 +766,13 @@ export class ReliableChannel<
detail: event.detail detail: event.detail
}); });
// Stopping retries // Stopping retries as the message was acknowledged
this.retryManager?.stopRetries(event.detail); this.retryManager?.stopRetries(event.detail);
} }
} }
); );
this.messageChannel.addEventListener( this.addTrackedEventListener(
MessageChannelEvent.OutMessagePossiblyAcknowledged, MessageChannelEvent.OutMessagePossiblyAcknowledged,
(event) => { (event) => {
if (event.detail) { if (event.detail) {
@ -636,7 +786,7 @@ export class ReliableChannel<
} }
); );
this.messageChannel.addEventListener( this.addTrackedEventListener(
MessageChannelEvent.InSyncReceived, MessageChannelEvent.InSyncReceived,
(_event) => { (_event) => {
// restart the timeout when a sync message has been received // restart the timeout when a sync message has been received
@ -644,9 +794,10 @@ export class ReliableChannel<
} }
); );
this.messageChannel.addEventListener( this.addTrackedEventListener(
MessageChannelEvent.InMessageReceived, MessageChannelEvent.InMessageReceived,
(event) => { (event) => {
this._internalSyncStatus.onMessagesReceived(event.detail.messageId);
// restart the timeout when a content message has been received // restart the timeout when a content message has been received
if (isContentMessage(event.detail)) { if (isContentMessage(event.detail)) {
// send a sync message faster to ack someone's else // send a sync message faster to ack someone's else
@ -655,20 +806,16 @@ export class ReliableChannel<
} }
); );
this.messageChannel.addEventListener( this.addTrackedEventListener(
MessageChannelEvent.OutMessageSent,
(event) => {
// restart the timeout when a content message has been sent
if (isContentMessage(event.detail)) {
this.restartSync();
}
}
);
this.messageChannel.addEventListener(
MessageChannelEvent.InMessageMissing, MessageChannelEvent.InMessageMissing,
(event) => { (event) => {
this._internalSyncStatus.onMessagesMissing(
...event.detail.map((m) => m.messageId)
);
for (const { messageId, retrievalHint } of event.detail) { for (const { messageId, retrievalHint } of event.detail) {
// Store retrieval (for 'both' and 'store-only' strategies)
// SDS-R repair happens automatically via RepairManager for 'both' and 'sds-r-only'
if (retrievalHint && this.missingMessageRetriever) { if (retrievalHint && this.missingMessageRetriever) {
this.missingMessageRetriever.addMissingMessage( this.missingMessageRetriever.addMissingMessage(
messageId, messageId,
@ -679,13 +826,39 @@ export class ReliableChannel<
} }
); );
this.addTrackedEventListener(MessageChannelEvent.InMessageLost, (event) => {
this._internalSyncStatus.onMessagesLost(
...event.detail.map((m) => m.messageId)
);
});
if (this.queryOnConnect) { if (this.queryOnConnect) {
const queryListener = (event: any): void => {
void this.processIncomingMessages(event.detail);
};
this.queryOnConnect.addEventListener( this.queryOnConnect.addEventListener(
QueryOnConnectEvent.MessagesRetrieved, QueryOnConnectEvent.MessagesRetrieved,
(event) => { queryListener
void this.processIncomingMessages(event.detail);
}
); );
this.eventListenerCleanups.push(() => {
this.queryOnConnect?.removeEventListener(
QueryOnConnectEvent.MessagesRetrieved,
queryListener
);
});
} }
} }
private removeAllEventListeners(): void {
for (const cleanup of this.eventListenerCleanups) {
try {
cleanup();
} catch (error) {
log.error("error removing event listener:", error);
}
}
this.eventListenerCleanups = [];
}
} }

View File

@ -187,7 +187,7 @@ describe("Reliable Channel: Encryption", () => {
expect(messageAcknowledged).to.be.false; expect(messageAcknowledged).to.be.false;
}); });
// TODO: https://github.com/waku-org/js-waku/issues/2648 // TODO: https://github.com/logos-messaging/logos-messaging-js/issues/2648
it.skip("Outgoing message is possibly acknowledged", async () => { it.skip("Outgoing message is possibly acknowledged", async () => {
const commonEventEmitter = new TypedEventEmitter<MockWakuEvents>(); const commonEventEmitter = new TypedEventEmitter<MockWakuEvents>();
const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter); const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter);

View File

@ -66,7 +66,7 @@ describe("Reliable Channel: Sync", () => {
}); });
while (!messageSent) { while (!messageSent) {
await delay(50); await delay(10);
} }
let syncMessageSent = false; let syncMessageSent = false;

View File

@ -0,0 +1,207 @@
import { createDecoder, createEncoder } from "@waku/core";
import {
AutoSharding,
IDecodedMessage,
IDecoder,
IEncoder
} from "@waku/interfaces";
import { createRoutingInfo, delay, MockWakuNode } from "@waku/utils";
import { utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai";
import { beforeEach, describe } from "mocha";
import {
createMockNodes,
sendAndWaitForEvent,
TEST_CONSTANTS,
waitFor
} from "./test_utils.js";
import { ReliableChannel, StatusDetail } from "./index.js";
const TEST_CONTENT_TOPIC = "/my-tests/0/topic-name/proto";
const TEST_NETWORK_CONFIG: AutoSharding = {
clusterId: 0,
numShardsInCluster: 1
};
const TEST_ROUTING_INFO = createRoutingInfo(TEST_NETWORK_CONFIG, {
contentTopic: TEST_CONTENT_TOPIC
});
describe("Sync Status", () => {
let encoder: IEncoder;
let decoder: IDecoder<IDecodedMessage>;
let mockWakuNodeAlice: MockWakuNode;
let mockWakuNodeBob: MockWakuNode;
let reliableChannelAlice: ReliableChannel<any> | undefined;
let reliableChannelBob: ReliableChannel<any> | undefined;
beforeEach(async () => {
encoder = createEncoder({
contentTopic: TEST_CONTENT_TOPIC,
routingInfo: TEST_ROUTING_INFO
});
decoder = createDecoder(TEST_CONTENT_TOPIC, TEST_ROUTING_INFO);
const mockNodes = createMockNodes();
mockWakuNodeAlice = mockNodes.alice;
mockWakuNodeBob = mockNodes.bob;
});
afterEach(async () => {
if (reliableChannelAlice) {
await reliableChannelAlice.stop();
reliableChannelAlice = undefined;
}
if (reliableChannelBob) {
await reliableChannelBob.stop();
reliableChannelBob = undefined;
}
});
it("Synced status is emitted when a message is received", async () => {
reliableChannelAlice = await ReliableChannel.create(
mockWakuNodeAlice,
"MyChannel",
"alice",
encoder,
decoder
);
reliableChannelBob = await ReliableChannel.create(
mockWakuNodeBob,
"MyChannel",
"bob",
encoder,
decoder
);
let statusDetail: StatusDetail | undefined;
reliableChannelBob.syncStatus.addEventListener("synced", (event) => {
statusDetail = event.detail;
});
const message = utf8ToBytes("message in channel");
reliableChannelAlice.send(message);
await waitFor(() => statusDetail);
expect(statusDetail!.received).to.eq(1);
});
it("Synced status is emitted when a missing message is received", async () => {
reliableChannelAlice = await ReliableChannel.create(
mockWakuNodeAlice,
"MyChannel",
"alice",
encoder,
decoder,
{
retryIntervalMs: TEST_CONSTANTS.RETRY_INTERVAL_MS
}
);
// Send a message before Bob goes online so it's marked as missing
await sendAndWaitForEvent(
reliableChannelAlice,
utf8ToBytes("missing message")
);
reliableChannelBob = await ReliableChannel.create(
mockWakuNodeBob,
"MyChannel",
"bob",
encoder,
decoder
);
let syncingStatusDetail: StatusDetail | undefined;
reliableChannelBob.syncStatus.addEventListener("syncing", (event) => {
syncingStatusDetail = event.detail;
});
let syncedStatusDetail: StatusDetail | undefined;
reliableChannelBob.syncStatus.addEventListener("synced", (event) => {
syncedStatusDetail = event.detail;
});
await sendAndWaitForEvent(
reliableChannelAlice,
utf8ToBytes("second message with missing message as dep")
);
await waitFor(() => syncingStatusDetail);
expect(syncingStatusDetail!.missing).to.eq(1);
expect(syncingStatusDetail!.received).to.eq(1);
await waitFor(() => syncedStatusDetail);
expect(syncedStatusDetail!.missing).to.eq(0);
expect(syncedStatusDetail!.received).to.eq(2);
});
it("Synced status is emitted when a missing message is marked as lost", async () => {
reliableChannelAlice = await ReliableChannel.create(
mockWakuNodeAlice,
"MyChannel",
"alice",
encoder,
decoder,
{
syncMinIntervalMs: 0,
retryIntervalMs: 0 // Do not retry so we can lose the message
}
);
// Send a message before Bob goes online so it's marked as missing
await sendAndWaitForEvent(
reliableChannelAlice,
utf8ToBytes("missing message")
);
reliableChannelBob = await ReliableChannel.create(
mockWakuNodeBob,
"MyChannel",
"bob",
encoder,
decoder,
{
retrieveFrequencyMs: 0,
syncMinIntervalMs: 0,
sweepInBufIntervalMs: 0, // we want to control this
timeoutForLostMessagesMs: 200 // timeout within the test
}
);
let syncingStatusDetail: StatusDetail | undefined;
reliableChannelBob.syncStatus.addEventListener("syncing", (event) => {
syncingStatusDetail = event.detail;
});
await sendAndWaitForEvent(
reliableChannelAlice,
utf8ToBytes("second message with missing message as dep")
);
await waitFor(() => syncingStatusDetail);
expect(syncingStatusDetail!.missing).to.eq(1, "at first, one missing");
expect(syncingStatusDetail!.received).to.eq(1, "at first, one received");
expect(syncingStatusDetail!.lost).to.eq(0, "at first, no loss");
let syncedStatusDetail: StatusDetail | undefined;
reliableChannelBob.syncStatus.addEventListener("synced", (event) => {
syncedStatusDetail = event.detail;
});
// await long enough so message will be marked as lost
await delay(200);
reliableChannelBob.messageChannel["sweepIncomingBuffer"]();
await waitFor(() => syncedStatusDetail);
expect(syncedStatusDetail!.missing).to.eq(0, "no more missing message");
expect(syncedStatusDetail!.received).to.eq(1, "still one received message");
expect(syncedStatusDetail!.lost).to.eq(1, "missing message is marked lost");
});
});

View File

@ -24,19 +24,33 @@ export class RetryManager {
const timeout = this.timeouts.get(id); const timeout = this.timeouts.get(id);
if (timeout) { if (timeout) {
clearTimeout(timeout); clearTimeout(timeout);
this.timeouts.delete(id);
} }
} }
public stopAllRetries(): void {
for (const [_id, timeout] of this.timeouts.entries()) {
clearTimeout(timeout);
}
this.timeouts.clear();
}
public startRetries(id: string, retry: () => void | Promise<void>): void { public startRetries(id: string, retry: () => void | Promise<void>): void {
this.retry(id, retry, 0); this.retry(id, retry, 0);
} }
public stop(): void {
for (const timeout of this.timeouts.values()) {
clearTimeout(timeout);
}
}
private retry( private retry(
id: string, id: string,
retry: () => void | Promise<void>, retry: () => void | Promise<void>,
attemptNumber: number attemptNumber: number
): void { ): void {
clearTimeout(this.timeouts.get(id)); this.stopRetries(id);
if (attemptNumber < this.maxRetryNumber) { if (attemptNumber < this.maxRetryNumber) {
const interval = setTimeout(() => { const interval = setTimeout(() => {
void retry(); void retry();

View File

@ -0,0 +1,189 @@
import { MessageId } from "@waku/sds";
import { delay } from "@waku/utils";
import { expect } from "chai";
import { StatusDetail, StatusEvents, SyncStatus } from "./sync_status.js";
async function testSyncStatus(
syncStatus: SyncStatus,
statusEvent: keyof StatusEvents,
onMessageFn: (...msgIds: MessageId[]) => void,
expectedStatusDetail: Partial<StatusDetail>,
...messageIds: MessageId[]
): Promise<void> {
let statusDetail: StatusDetail;
syncStatus.addEventListener(statusEvent, (event) => {
statusDetail = event.detail;
});
onMessageFn.bind(syncStatus)(...messageIds);
while (!statusDetail!) {
await delay(10);
}
expect(statusDetail.received).to.eq(expectedStatusDetail.received ?? 0);
expect(statusDetail.missing).to.eq(expectedStatusDetail.missing ?? 0);
expect(statusDetail.lost).to.eq(expectedStatusDetail.lost ?? 0);
}
describe("Sync Status", () => {
let syncStatus: SyncStatus;
beforeEach(() => {
syncStatus = new SyncStatus();
});
afterEach(() => {
syncStatus.cleanUp();
});
it("Emits 'synced' when new message received", async () => {
await testSyncStatus(
syncStatus,
"synced",
syncStatus.onMessagesReceived,
{ received: 1 },
"123"
);
});
it("Emits 'syncing' when message flagged as missed", async () => {
await testSyncStatus(
syncStatus,
"syncing",
syncStatus.onMessagesMissing,
{ missing: 1 },
"123"
);
});
it("Emits 'synced' when message flagged as lost", async () => {
await testSyncStatus(
syncStatus,
"synced",
syncStatus.onMessagesLost,
{ lost: 1 },
"123"
);
});
it("Emits 'syncing' then 'synced' when message flagged as missing and then received", async () => {
await testSyncStatus(
syncStatus,
"syncing",
syncStatus.onMessagesMissing,
{ missing: 1 },
"123"
);
await testSyncStatus(
syncStatus,
"synced",
syncStatus.onMessagesReceived,
{ received: 1 },
"123"
);
});
it("Emits 'syncing' then 'synced' when message flagged as missing and then lost", async () => {
await testSyncStatus(
syncStatus,
"syncing",
syncStatus.onMessagesMissing,
{ missing: 1 },
"123"
);
await testSyncStatus(
syncStatus,
"synced",
syncStatus.onMessagesLost,
{ lost: 1 },
"123"
);
});
it("Emits 'synced' then 'synced' when message flagged as lost and then received", async () => {
await testSyncStatus(
syncStatus,
"synced",
syncStatus.onMessagesLost,
{ lost: 1 },
"123"
);
await testSyncStatus(
syncStatus,
"synced",
syncStatus.onMessagesReceived,
{ received: 1 },
"123"
);
});
it("Emits 'syncing' until all messages are received or lost", async () => {
await testSyncStatus(
syncStatus,
"synced",
syncStatus.onMessagesReceived,
{ received: 1 },
"1"
);
await testSyncStatus(
syncStatus,
"syncing",
syncStatus.onMessagesMissing,
{ received: 1, missing: 3 },
"2",
"3",
"4"
);
await testSyncStatus(
syncStatus,
"syncing",
syncStatus.onMessagesReceived,
{ received: 2, missing: 2 },
"2"
);
await testSyncStatus(
syncStatus,
"syncing",
syncStatus.onMessagesReceived,
{ received: 3, missing: 1 },
"3"
);
await testSyncStatus(
syncStatus,
"synced",
syncStatus.onMessagesLost,
{ received: 3, lost: 1 },
"4"
);
});
it("Debounces events when receiving batch of messages", async () => {
let eventCount = 0;
let statusDetail: StatusDetail | undefined;
syncStatus.addEventListener("synced", (event) => {
eventCount++;
statusDetail = event.detail;
});
// Process 100 messages in the same task
for (let i = 0; i < 100; i++) {
syncStatus.onMessagesReceived(`msg-${i}`);
}
// Wait for microtask to complete
await delay(10);
// Should only emit 1 event despite 100 calls
expect(eventCount).to.eq(1, "Should only emit one event for batch");
expect(statusDetail!.received).to.eq(100, "Should track all 100 messages");
});
});

View File

@ -0,0 +1,163 @@
import { TypedEventEmitter } from "@libp2p/interface";
import { MessageId } from "@waku/sds";
import { Logger } from "@waku/utils";
const log = new Logger("sds:sync-status");
export const StatusEvent = {
/**
* We are not aware of any missing messages that we may be able to get
* We MAY have messages lost forever, see the `event.detail`
*/
Synced: "synced", // TODO or synced or health or caught-up?
/**
* We are aware of missing messages that we may be able to get
*/
Syncing: "syncing" // TODO: it assumes "syncing" is happening via SDS repair or store queries
};
export type StatusEvent = (typeof StatusEvent)[keyof typeof StatusEvent];
export type StatusDetail = {
/**
* number of received messages
*/
received: number;
/**
* number of missing messages that are not yet considered as irretrievably lost
*/
missing: number;
/**
* number of messages considered as irretrievably lost
*/
lost: number;
};
export interface StatusEvents {
synced: CustomEvent<StatusDetail>;
syncing: CustomEvent<StatusDetail>;
}
/**
* Read-only interface for sync status events.
* Only exposes event listener methods, hiding internal state management.
*/
export interface ISyncStatusEvents {
addEventListener(
event: "synced",
callback: (e: CustomEvent<StatusDetail>) => void
): void;
addEventListener(
event: "syncing",
callback: (e: CustomEvent<StatusDetail>) => void
): void;
removeEventListener(
event: "synced",
callback: (e: CustomEvent<StatusDetail>) => void
): void;
removeEventListener(
event: "syncing",
callback: (e: CustomEvent<StatusDetail>) => void
): void;
}
export class SyncStatus extends TypedEventEmitter<StatusEvents> {
private readonly receivedMessages: Set<MessageId>;
private readonly missingMessages: Set<MessageId>;
private readonly lostMessages: Set<MessageId>;
private sendScheduled = false;
private cleaned = false;
public constructor() {
super();
this.receivedMessages = new Set();
this.missingMessages = new Set();
this.lostMessages = new Set();
}
/**
* Cleanup all tracked message IDs. Should be called when stopping the channel.
*/
public cleanUp(): void {
// Mark as cleaned to prevent any pending microtasks from firing
this.cleaned = true;
this.receivedMessages.clear();
this.missingMessages.clear();
this.lostMessages.clear();
}
public onMessagesReceived(...messageIds: MessageId[]): void {
for (const messageId of messageIds) {
this.missingMessages.delete(messageId);
this.lostMessages.delete(messageId);
this.receivedMessages.add(messageId);
}
this.scheduleSend();
}
public onMessagesMissing(...messageIds: MessageId[]): void {
for (const messageId of messageIds) {
if (
!this.receivedMessages.has(messageId) &&
!this.lostMessages.has(messageId)
) {
this.missingMessages.add(messageId);
} else {
log.error(
"A message previously received or lost has been marked as missing",
messageId
);
}
}
this.scheduleSend();
}
public onMessagesLost(...messageIds: MessageId[]): void {
for (const messageId of messageIds) {
this.missingMessages.delete(messageId);
this.lostMessages.add(messageId);
}
this.scheduleSend();
}
/**
* Schedule an event to be sent on the next microtask.
* Multiple calls within the same task will result in only one event being sent.
* This prevents event spam when processing batches of messages.
*/
private scheduleSend(): void {
if (!this.sendScheduled) {
this.sendScheduled = true;
queueMicrotask(() => {
this.sendScheduled = false;
this.safeSend();
});
}
}
private safeSend(): void {
// Don't send events if cleanup was already called
if (this.cleaned) {
return;
}
const statusEvent =
this.missingMessages.size === 0
? StatusEvent.Synced
: StatusEvent.Syncing;
try {
this.dispatchEvent(
new CustomEvent(statusEvent, {
detail: {
received: this.receivedMessages.size,
missing: this.missingMessages.size,
lost: this.lostMessages.size
}
})
);
} catch (error) {
log.error(`Failed to dispatch sync status:`, error);
}
}
}

View File

@ -0,0 +1,68 @@
import { TypedEventEmitter } from "@libp2p/interface";
import { delay, MockWakuEvents, MockWakuNode } from "@waku/utils";
import { ReliableChannel } from "./reliable_channel.js";
export const TEST_CONSTANTS = {
POLL_INTERVAL_MS: 50,
RETRY_INTERVAL_MS: 300
} as const;
/**
* Wait for a condition to become truthy, with timeout
* @param condition Function that returns the value when ready, or undefined while waiting
* @param timeoutMs Maximum time to wait before throwing
* @returns The value returned by condition
* @throws Error if timeout is reached
*/
export async function waitFor<T>(
condition: () => T | undefined,
timeoutMs = 5000
): Promise<T> {
const start = Date.now();
while (!condition()) {
if (Date.now() - start > timeoutMs) {
throw new Error(
`Timeout after ${timeoutMs}ms waiting for condition to be met`
);
}
await delay(TEST_CONSTANTS.POLL_INTERVAL_MS);
}
return condition()!;
}
/**
* Send a message and wait for the "message-sent" event
* @param channel The ReliableChannel to send from
* @param message The message payload to send
*/
export async function sendAndWaitForEvent(
channel: ReliableChannel<any>,
message: Uint8Array
): Promise<void> {
return new Promise((resolve) => {
const handler = (): void => {
channel.removeEventListener("message-sent", handler);
resolve();
};
channel.addEventListener("message-sent", handler);
channel.send(message);
});
}
/**
* Create a common event emitter and two mock Waku nodes
* @returns Object containing the emitter and two mock nodes (alice and bob)
*/
export function createMockNodes(): {
emitter: TypedEventEmitter<MockWakuEvents>;
alice: MockWakuNode;
bob: MockWakuNode;
} {
const emitter = new TypedEventEmitter<MockWakuEvents>();
return {
emitter,
alice: new MockWakuNode(emitter),
bob: new MockWakuNode(emitter)
};
}

View File

@ -46,6 +46,10 @@ export class Store implements IStore {
return this.protocol.multicodec; return this.protocol.multicodec;
} }
public stop(): void {
this.protocol.stop();
}
/** /**
* Queries the Waku Store for historical messages using the provided decoders and options. * Queries the Waku Store for historical messages using the provided decoders and options.
* Returns an asynchronous generator that yields promises of decoded messages. * Returns an asynchronous generator that yields promises of decoded messages.

View File

@ -19,6 +19,10 @@ describe("waitForRemotePeer", () => {
eventTarget = new EventTarget(); eventTarget = new EventTarget();
}); });
afterEach(() => {
sinon.restore();
});
it("should reject if WakuNode is not started", async () => { it("should reject if WakuNode is not started", async () => {
const wakuMock = mockWakuNode({ const wakuMock = mockWakuNode({
connections: [{}] connections: [{}]

View File

@ -232,7 +232,9 @@ export class WakuNode implements IWaku {
this._nodeStateLock = true; this._nodeStateLock = true;
this.lightPush?.stop(); this.lightPush?.stop();
this.store?.stop();
await this.filter?.stop(); await this.filter?.stop();
await this.relay?.stop();
this.healthIndicator.stop(); this.healthIndicator.stop();
this.peerManager.stop(); this.peerManager.stop();
this.connectionManager.stop(); this.connectionManager.stop();

View File

@ -1,5 +1,20 @@
# Changelog # Changelog
## [0.0.9](https://github.com/logos-messaging/logos-messaging-js/compare/sds-v0.0.8...sds-v0.0.9) (2026-01-16)
### Features
* Incorporate sds-r into reliable channels ([#2701](https://github.com/logos-messaging/logos-messaging-js/issues/2701)) ([788f7e6](https://github.com/logos-messaging/logos-messaging-js/commit/788f7e62c5141d10d013c91c28d549188d165762))
* Reliable Channel: Status Sync, overflow protection, stop TODOs ([#2729](https://github.com/logos-messaging/logos-messaging-js/issues/2729)) ([e5f51d7](https://github.com/logos-messaging/logos-messaging-js/commit/e5f51d7df101020a1a6d0787ce68fab4f28922f5))
### Dependencies
* The following workspace dependencies were updated
* dependencies
* @waku/utils bumped from ^0.0.27 to ^0.0.28
## [0.0.8](https://github.com/waku-org/js-waku/compare/sds-v0.0.7...sds-v0.0.8) (2025-10-31) ## [0.0.8](https://github.com/waku-org/js-waku/compare/sds-v0.0.7...sds-v0.0.8) (2025-10-31)

View File

@ -1,6 +1,6 @@
{ {
"name": "@waku/sds", "name": "@waku/sds",
"version": "0.0.8", "version": "0.0.9",
"description": "Scalable Data Sync implementation for the browser. Based on https://github.com/vacp2p/rfc-index/blob/main/vac/raw/sds.md", "description": "Scalable Data Sync implementation for the browser. Based on https://github.com/vacp2p/rfc-index/blob/main/vac/raw/sds.md",
"types": "./dist/index.d.ts", "types": "./dist/index.d.ts",
"module": "./dist/index.js", "module": "./dist/index.js",
@ -21,17 +21,18 @@
}, },
"type": "module", "type": "module",
"author": "Waku Team", "author": "Waku Team",
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/scalable-data-sync#readme", "homepage": "https://github.com/logos-messaging/logos-messaging-js/tree/master/packages/scalable-data-sync#readme",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/waku-org/js-waku.git" "url": "git+https://github.com/logos-messaging/logos-messaging-js.git"
}, },
"bugs": { "bugs": {
"url": "https://github.com/waku-org/js-waku/issues" "url": "https://github.com/logos-messaging/logos-messaging-js/issues"
}, },
"license": "MIT OR Apache-2.0", "license": "MIT OR Apache-2.0",
"keywords": [ "keywords": [
"waku", "waku",
"logos-messaging",
"decentralized", "decentralized",
"secure", "secure",
"communication", "communication",
@ -63,7 +64,7 @@
"@libp2p/interface": "2.10.4", "@libp2p/interface": "2.10.4",
"@noble/hashes": "^1.7.1", "@noble/hashes": "^1.7.1",
"@waku/proto": "^0.0.15", "@waku/proto": "^0.0.15",
"@waku/utils": "^0.0.27", "@waku/utils": "^0.0.28",
"chai": "^5.1.2", "chai": "^5.1.2",
"lodash": "^4.17.21" "lodash": "^4.17.21"
}, },

View File

@ -12,10 +12,8 @@ export enum MessageChannelEvent {
InMessageLost = "sds:in:message-irretrievably-lost", InMessageLost = "sds:in:message-irretrievably-lost",
ErrorTask = "sds:error-task", ErrorTask = "sds:error-task",
// SDS-R Repair Events // SDS-R Repair Events
RepairRequestQueued = "sds:repair:request-queued",
RepairRequestSent = "sds:repair:request-sent", RepairRequestSent = "sds:repair:request-sent",
RepairRequestReceived = "sds:repair:request-received", RepairRequestReceived = "sds:repair:request-received",
RepairResponseQueued = "sds:repair:response-queued",
RepairResponseSent = "sds:repair:response-sent" RepairResponseSent = "sds:repair:response-sent"
} }
@ -33,10 +31,6 @@ export type MessageChannelEvents = {
[MessageChannelEvent.OutSyncSent]: CustomEvent<Message>; [MessageChannelEvent.OutSyncSent]: CustomEvent<Message>;
[MessageChannelEvent.InSyncReceived]: CustomEvent<Message>; [MessageChannelEvent.InSyncReceived]: CustomEvent<Message>;
[MessageChannelEvent.ErrorTask]: CustomEvent<unknown>; [MessageChannelEvent.ErrorTask]: CustomEvent<unknown>;
[MessageChannelEvent.RepairRequestQueued]: CustomEvent<{
messageId: MessageId;
tReq: number;
}>;
[MessageChannelEvent.RepairRequestSent]: CustomEvent<{ [MessageChannelEvent.RepairRequestSent]: CustomEvent<{
messageIds: MessageId[]; messageIds: MessageId[];
carrierMessageId: MessageId; carrierMessageId: MessageId;
@ -45,10 +39,6 @@ export type MessageChannelEvents = {
messageIds: MessageId[]; messageIds: MessageId[];
fromSenderId?: ParticipantId; fromSenderId?: ParticipantId;
}>; }>;
[MessageChannelEvent.RepairResponseQueued]: CustomEvent<{
messageId: MessageId;
tResp: number;
}>;
[MessageChannelEvent.RepairResponseSent]: CustomEvent<{ [MessageChannelEvent.RepairResponseSent]: CustomEvent<{
messageId: MessageId; messageId: MessageId;
}>; }>;

View File

@ -0,0 +1,50 @@
import { expect } from "chai";
import { MemLocalHistory } from "./mem_local_history.js";
import { ContentMessage } from "./message.js";
describe("MemLocalHistory", () => {
it("Cap max size when messages are pushed one at a time", () => {
const maxSize = 2;
const hist = new MemLocalHistory(maxSize);
hist.push(
new ContentMessage("1", "c", "a", [], 1n, undefined, new Uint8Array([1]))
);
expect(hist.length).to.eq(1);
hist.push(
new ContentMessage("2", "c", "a", [], 2n, undefined, new Uint8Array([2]))
);
expect(hist.length).to.eq(2);
hist.push(
new ContentMessage("3", "c", "a", [], 3n, undefined, new Uint8Array([3]))
);
expect(hist.length).to.eq(2);
expect(hist.findIndex((m) => m.messageId === "1")).to.eq(-1);
expect(hist.findIndex((m) => m.messageId === "2")).to.not.eq(-1);
expect(hist.findIndex((m) => m.messageId === "3")).to.not.eq(-1);
});
it("Cap max size when a pushed array is exceeding the cap", () => {
const maxSize = 2;
const hist = new MemLocalHistory(maxSize);
hist.push(
new ContentMessage("1", "c", "a", [], 1n, undefined, new Uint8Array([1]))
);
expect(hist.length).to.eq(1);
hist.push(
new ContentMessage("2", "c", "a", [], 2n, undefined, new Uint8Array([2])),
new ContentMessage("3", "c", "a", [], 3n, undefined, new Uint8Array([3]))
);
expect(hist.length).to.eq(2);
expect(hist.findIndex((m) => m.messageId === "1")).to.eq(-1);
expect(hist.findIndex((m) => m.messageId === "2")).to.not.eq(-1);
expect(hist.findIndex((m) => m.messageId === "3")).to.not.eq(-1);
});
});

View File

@ -2,18 +2,31 @@ import _ from "lodash";
import { ContentMessage, isContentMessage } from "./message.js"; import { ContentMessage, isContentMessage } from "./message.js";
export const DEFAULT_MAX_LENGTH = 10_000;
/** /**
* In-Memory implementation of a local store of messages. * In-Memory implementation of a local history of messages.
* *
* Messages are store in SDS chronological order: * Messages are store in SDS chronological order:
* - messages[0] is the oldest message * - messages[0] is the oldest message
* - messages[n] is the newest message * - messages[n] is the newest message
* *
* Only stores content message: `message.lamportTimestamp` and `message.content` are present. * Only stores content message: `message.lamportTimestamp` and `message.content` are present.
*
* Oldest messages are dropped when `maxLength` is reached.
* If an array of items longer than `maxLength` is pushed, dropping will happen
* at next push.
*/ */
export class MemLocalHistory { export class MemLocalHistory {
private items: ContentMessage[] = []; private items: ContentMessage[] = [];
/**
* Construct a new in-memory local history
*
* @param maxLength The maximum number of message to store.
*/
public constructor(private maxLength: number = DEFAULT_MAX_LENGTH) {}
public get length(): number { public get length(): number {
return this.items.length; return this.items.length;
} }
@ -33,6 +46,12 @@ export class MemLocalHistory {
// Remove duplicates by messageId while maintaining order // Remove duplicates by messageId while maintaining order
this.items = _.uniqBy(combinedItems, "messageId"); this.items = _.uniqBy(combinedItems, "messageId");
// Let's drop older messages if max length is reached
if (this.length > this.maxLength) {
const numItemsToRemove = this.length - this.maxLength;
this.items.splice(0, numItemsToRemove);
}
return this.items.length; return this.items.length;
} }

View File

@ -185,7 +185,7 @@ describe("MessageChannel", function () {
expect(timestampAfter).to.equal(timestampBefore + 1n); expect(timestampAfter).to.equal(timestampBefore + 1n);
}); });
// TODO: test is failing in CI, investigate in https://github.com/waku-org/js-waku/issues/2648 // TODO: test is failing in CI, investigate in https://github.com/logos-messaging/logos-messaging-js/issues/2648
it.skip("should update lamport timestamp if greater than current timestamp and dependencies are met", async () => { it.skip("should update lamport timestamp if greater than current timestamp and dependencies are met", async () => {
const testChannelA = new MessageChannel(channelId, "alice"); const testChannelA = new MessageChannel(channelId, "alice");
const testChannelB = new MessageChannel(channelId, "bob"); const testChannelB = new MessageChannel(channelId, "bob");
@ -207,7 +207,7 @@ describe("MessageChannel", function () {
); );
}); });
// TODO: test is failing in CI, investigate in https://github.com/waku-org/js-waku/issues/2648 // TODO: test is failing in CI, investigate in https://github.com/logos-messaging/logos-messaging-js/issues/2648
it.skip("should maintain proper timestamps if all messages received", async () => { it.skip("should maintain proper timestamps if all messages received", async () => {
const aTimestampBefore = channelA["lamportTimestamp"]; const aTimestampBefore = channelA["lamportTimestamp"];
let timestamp = channelB["lamportTimestamp"]; let timestamp = channelB["lamportTimestamp"];

View File

@ -128,13 +128,7 @@ export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
// Only construct RepairManager if repair is enabled (default: true) // Only construct RepairManager if repair is enabled (default: true)
if (options.enableRepair ?? true) { if (options.enableRepair ?? true) {
this.repairManager = new RepairManager( this.repairManager = new RepairManager(senderId, options.repairConfig);
senderId,
options.repairConfig,
(event: string, detail: unknown) => {
this.safeSendEvent(event as MessageChannelEvent, { detail });
}
);
} }
} }
@ -142,6 +136,14 @@ export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
return bytesToHex(sha256(payload)); return bytesToHex(sha256(payload));
} }
/**
* Check if there are pending repair requests that need to be sent.
* Useful for adaptive sync intervals - increase frequency when repairs pending.
*/
public hasPendingRepairRequests(currentTime = Date.now()): boolean {
return this.repairManager?.hasRequestsReady(currentTime) ?? false;
}
/** /**
* Processes all queued tasks sequentially to ensure proper message ordering. * Processes all queued tasks sequentially to ensure proper message ordering.
* *
@ -283,7 +285,7 @@ export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
/** /**
* Processes messages in the incoming buffer, delivering those with satisfied dependencies. * Processes messages in the incoming buffer, delivering those with satisfied dependencies.
* *
* @returns Array of history entries for messages still missing dependencies * @returns The missing dependencies
*/ */
public sweepIncomingBuffer(): HistoryEntry[] { public sweepIncomingBuffer(): HistoryEntry[] {
const { buffer, missing } = this.incomingBuffer.reduce<{ const { buffer, missing } = this.incomingBuffer.reduce<{
@ -319,8 +321,8 @@ export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
}) })
); );
// Optionally, if a message has not been received after a predetermined amount of time, // Optionally, if a message did not get its dependencies fulfilled after a predetermined amount of time,
// its dependencies are marked as irretrievably lost (implicitly by removing it from the buffer without delivery) // they are marked as irretrievably lost (implicitly by removing it from the buffer without delivery)
if (this.timeoutForLostMessagesMs) { if (this.timeoutForLostMessagesMs) {
const timeReceived = this.timeReceived.get(message.messageId); const timeReceived = this.timeReceived.get(message.messageId);
if ( if (
@ -330,9 +332,19 @@ export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
this.safeSendEvent(MessageChannelEvent.InMessageLost, { this.safeSendEvent(MessageChannelEvent.InMessageLost, {
detail: Array.from(missingDependencies) detail: Array.from(missingDependencies)
}); });
// We deliver the message to resume participation in the log
if (isContentMessage(message) && this.deliverMessage(message)) {
this.safeSendEvent(MessageChannelEvent.InMessageDelivered, {
detail: message.messageId
});
}
// The message and its missing dependencies are dropped
// from the incoming buffer
return { buffer, missing }; return { buffer, missing };
} }
} }
missingDependencies.forEach((dependency) => { missingDependencies.forEach((dependency) => {
missing.add(dependency); missing.add(dependency);
}); });

View File

@ -20,11 +20,6 @@ const log = new Logger("sds:repair:manager");
*/ */
const PARTICIPANTS_PER_RESPONSE_GROUP = 128; const PARTICIPANTS_PER_RESPONSE_GROUP = 128;
/**
* Event emitter callback for repair events
*/
export type RepairEventEmitter = (event: string, detail: unknown) => void;
/** /**
* Configuration for SDS-R repair protocol * Configuration for SDS-R repair protocol
*/ */
@ -58,16 +53,10 @@ export class RepairManager {
private readonly config: Required<RepairConfig>; private readonly config: Required<RepairConfig>;
private readonly outgoingBuffer: OutgoingRepairBuffer; private readonly outgoingBuffer: OutgoingRepairBuffer;
private readonly incomingBuffer: IncomingRepairBuffer; private readonly incomingBuffer: IncomingRepairBuffer;
private readonly eventEmitter?: RepairEventEmitter;
public constructor( public constructor(participantId: ParticipantId, config: RepairConfig = {}) {
participantId: ParticipantId,
config: RepairConfig = {},
eventEmitter?: RepairEventEmitter
) {
this.participantId = participantId; this.participantId = participantId;
this.config = { ...DEFAULT_REPAIR_CONFIG, ...config }; this.config = { ...DEFAULT_REPAIR_CONFIG, ...config };
this.eventEmitter = eventEmitter;
this.outgoingBuffer = new OutgoingRepairBuffer(this.config.bufferSize); this.outgoingBuffer = new OutgoingRepairBuffer(this.config.bufferSize);
this.incomingBuffer = new IncomingRepairBuffer(this.config.bufferSize); this.incomingBuffer = new IncomingRepairBuffer(this.config.bufferSize);
@ -142,19 +131,13 @@ export class RepairManager {
// Calculate when to request this repair // Calculate when to request this repair
const tReq = this.calculateTReq(entry.messageId, currentTime); const tReq = this.calculateTReq(entry.messageId, currentTime);
// Add to outgoing buffer - only log and emit event if actually added // Add to outgoing buffer - only log if actually added
const wasAdded = this.outgoingBuffer.add(entry, tReq); const wasAdded = this.outgoingBuffer.add(entry, tReq);
if (wasAdded) { if (wasAdded) {
log.info( log.info(
`Added missing dependency ${entry.messageId} to repair buffer with T_req=${tReq}` `Added missing dependency ${entry.messageId} to repair buffer with T_req=${tReq}`
); );
// Emit event
this.eventEmitter?.("RepairRequestQueued", {
messageId: entry.messageId,
tReq
});
} }
} }
} }
@ -238,19 +221,13 @@ export class RepairManager {
currentTime currentTime
); );
// Add to incoming buffer - only log and emit event if actually added // Add to incoming buffer - only log if actually added
const wasAdded = this.incomingBuffer.add(request, tResp); const wasAdded = this.incomingBuffer.add(request, tResp);
if (wasAdded) { if (wasAdded) {
log.info( log.info(
`Will respond to repair request for ${request.messageId} at T_resp=${tResp}` `Will respond to repair request for ${request.messageId} at T_resp=${tResp}`
); );
// Emit event
this.eventEmitter?.("RepairResponseQueued", {
messageId: request.messageId,
tResp
});
} }
} }
} }
@ -328,4 +305,12 @@ export class RepairManager {
`Updated response groups to ${this.config.numResponseGroups} for ${numParticipants} participants` `Updated response groups to ${this.config.numResponseGroups} for ${numParticipants} participants`
); );
} }
/**
* Check if there are repair requests ready to be sent
*/
public hasRequestsReady(currentTime = Date.now()): boolean {
const items = this.outgoingBuffer.getItems();
return items.length > 0 && items[0].tReq <= currentTime;
}
} }

Some files were not shown because too many files have changed in this diff Show More