Merge branch 'master' of github.com:waku-org/js-waku into weboko/send-api

This commit is contained in:
Sasha 2025-09-29 22:01:32 +02:00
commit 998c2a1836
No known key found for this signature in database
76 changed files with 4315 additions and 517 deletions

View File

@ -1,13 +1,13 @@
{
"packages/utils": "0.0.26",
"packages/proto": "0.0.13",
"packages/interfaces": "0.0.33",
"packages/enr": "0.0.32",
"packages/core": "0.0.38",
"packages/message-encryption": "0.0.36",
"packages/relay": "0.0.21",
"packages/sdk": "0.0.34",
"packages/discovery": "0.0.11",
"packages/sds": "0.0.6",
"packages/rln": "0.1.8"
"packages/utils": "0.0.27",
"packages/proto": "0.0.14",
"packages/interfaces": "0.0.34",
"packages/enr": "0.0.33",
"packages/core": "0.0.39",
"packages/message-encryption": "0.0.37",
"packages/relay": "0.0.22",
"packages/sdk": "0.0.35",
"packages/discovery": "0.0.12",
"packages/sds": "0.0.7",
"packages/rln": "0.1.9"
}

188
package-lock.json generated
View File

@ -13,10 +13,10 @@
"packages/core",
"packages/discovery",
"packages/message-encryption",
"packages/sdk",
"packages/relay",
"packages/sds",
"packages/rln",
"packages/sdk",
"packages/relay",
"packages/tests",
"packages/reliability-tests",
"packages/headless-tests",
@ -36658,15 +36658,15 @@
},
"packages/core": {
"name": "@waku/core",
"version": "0.0.38",
"version": "0.0.39",
"license": "MIT OR Apache-2.0",
"dependencies": {
"@libp2p/ping": "2.0.35",
"@noble/hashes": "^1.3.2",
"@waku/enr": "^0.0.32",
"@waku/interfaces": "0.0.33",
"@waku/proto": "0.0.13",
"@waku/utils": "0.0.26",
"@waku/enr": "^0.0.33",
"@waku/interfaces": "0.0.34",
"@waku/proto": "0.0.14",
"@waku/utils": "0.0.27",
"debug": "^4.3.4",
"it-all": "^3.0.4",
"it-length-prefixed": "^9.0.4",
@ -36726,14 +36726,14 @@
},
"packages/discovery": {
"name": "@waku/discovery",
"version": "0.0.11",
"version": "0.0.12",
"license": "MIT OR Apache-2.0",
"dependencies": {
"@waku/core": "0.0.38",
"@waku/enr": "0.0.32",
"@waku/interfaces": "0.0.33",
"@waku/proto": "^0.0.13",
"@waku/utils": "0.0.26",
"@waku/core": "0.0.39",
"@waku/enr": "0.0.33",
"@waku/interfaces": "0.0.34",
"@waku/proto": "^0.0.14",
"@waku/utils": "0.0.27",
"debug": "^4.3.4",
"dns-over-http-resolver": "^3.0.8",
"hi-base32": "^0.5.1",
@ -36762,7 +36762,7 @@
},
"packages/enr": {
"name": "@waku/enr",
"version": "0.0.32",
"version": "0.0.33",
"license": "MIT OR Apache-2.0",
"dependencies": {
"@ethersproject/rlp": "^5.7.0",
@ -36770,7 +36770,7 @@
"@libp2p/peer-id": "5.1.7",
"@multiformats/multiaddr": "^12.0.0",
"@noble/secp256k1": "^1.7.1",
"@waku/utils": "0.0.26",
"@waku/utils": "0.0.27",
"debug": "^4.3.4",
"js-sha3": "^0.9.2"
},
@ -36781,7 +36781,7 @@
"@types/chai": "^4.3.11",
"@types/mocha": "^10.0.6",
"@waku/build-utils": "*",
"@waku/interfaces": "0.0.33",
"@waku/interfaces": "0.0.34",
"chai": "^4.3.10",
"cspell": "^8.6.1",
"fast-check": "^3.19.0",
@ -37301,7 +37301,7 @@
},
"packages/interfaces": {
"name": "@waku/interfaces",
"version": "0.0.33",
"version": "0.0.34",
"license": "MIT OR Apache-2.0",
"devDependencies": {
"@chainsafe/libp2p-gossipsub": "14.1.1",
@ -37316,14 +37316,14 @@
},
"packages/message-encryption": {
"name": "@waku/message-encryption",
"version": "0.0.36",
"version": "0.0.37",
"license": "MIT OR Apache-2.0",
"dependencies": {
"@noble/secp256k1": "^1.7.1",
"@waku/core": "0.0.38",
"@waku/interfaces": "0.0.33",
"@waku/proto": "0.0.13",
"@waku/utils": "0.0.26",
"@waku/core": "0.0.39",
"@waku/interfaces": "0.0.34",
"@waku/proto": "0.0.14",
"@waku/utils": "0.0.27",
"debug": "^4.3.4",
"js-sha3": "^0.9.2",
"uint8arrays": "^5.0.1"
@ -37353,7 +37353,7 @@
},
"packages/proto": {
"name": "@waku/proto",
"version": "0.0.13",
"version": "0.0.14",
"license": "MIT OR Apache-2.0",
"dependencies": {
"protons-runtime": "^5.4.0"
@ -37375,16 +37375,16 @@
},
"packages/relay": {
"name": "@waku/relay",
"version": "0.0.21",
"version": "0.0.22",
"license": "MIT OR Apache-2.0",
"dependencies": {
"@chainsafe/libp2p-gossipsub": "14.1.1",
"@noble/hashes": "^1.3.2",
"@waku/core": "0.0.38",
"@waku/interfaces": "0.0.33",
"@waku/proto": "0.0.13",
"@waku/sdk": "0.0.34",
"@waku/utils": "0.0.26",
"@waku/core": "0.0.39",
"@waku/interfaces": "0.0.34",
"@waku/proto": "0.0.14",
"@waku/sdk": "0.0.35",
"@waku/utils": "0.0.27",
"chai": "^4.3.10",
"debug": "^4.3.4",
"fast-check": "^3.19.0",
@ -37461,13 +37461,13 @@
},
"packages/rln": {
"name": "@waku/rln",
"version": "0.1.8",
"version": "0.1.9",
"license": "MIT OR Apache-2.0",
"dependencies": {
"@chainsafe/bls-keystore": "3.0.0",
"@noble/hashes": "^1.2.0",
"@waku/core": "^0.0.38",
"@waku/utils": "^0.0.26",
"@waku/core": "^0.0.39",
"@waku/utils": "^0.0.27",
"@waku/zerokit-rln-wasm": "^0.0.13",
"chai": "^5.1.2",
"chai-as-promised": "^8.0.1",
@ -37489,8 +37489,8 @@
"@types/lodash": "^4.17.15",
"@types/sinon": "^17.0.3",
"@waku/build-utils": "^1.0.0",
"@waku/interfaces": "0.0.33",
"@waku/message-encryption": "^0.0.36",
"@waku/interfaces": "0.0.34",
"@waku/message-encryption": "^0.0.37",
"deep-equal-in-any-order": "^2.0.6",
"fast-check": "^3.23.2",
"rollup-plugin-copy": "^3.5.0"
@ -37606,7 +37606,7 @@
},
"packages/sdk": {
"name": "@waku/sdk",
"version": "0.0.34",
"version": "0.0.35",
"license": "MIT OR Apache-2.0",
"dependencies": {
"@chainsafe/libp2p-noise": "16.1.3",
@ -37617,11 +37617,12 @@
"@libp2p/websockets": "9.2.16",
"@noble/hashes": "^1.3.3",
"@types/lodash.debounce": "^4.0.9",
"@waku/core": "0.0.38",
"@waku/discovery": "0.0.11",
"@waku/interfaces": "0.0.33",
"@waku/proto": "^0.0.13",
"@waku/utils": "0.0.26",
"@waku/core": "0.0.39",
"@waku/discovery": "0.0.12",
"@waku/interfaces": "0.0.34",
"@waku/proto": "^0.0.14",
"@waku/sds": "^0.0.7",
"@waku/utils": "0.0.27",
"libp2p": "2.8.11",
"lodash.debounce": "^4.0.8"
},
@ -37634,6 +37635,7 @@
"@types/chai": "^4.3.11",
"@types/mocha": "^10.0.9",
"@waku/build-utils": "*",
"@waku/message-encryption": "^0.0.37",
"chai": "^5.1.1",
"cspell": "^8.6.1",
"interface-datastore": "8.3.2",
@ -37654,6 +37656,102 @@
"@sinonjs/commons": "^3.0.1"
}
},
"packages/sdk/node_modules/@waku/sds/node_modules/@waku/interfaces": {
"version": "0.0.32",
"resolved": "https://registry.npmjs.org/@waku/interfaces/-/interfaces-0.0.32.tgz",
"integrity": "sha512-4MNfc7ZzQCyQZR1GQQKPgHaWTuPTIvE2wo/b7iokjdeOT+ZSKyJFSetcV07cqnBwyzUv1gc53bJdzyHwVIa5Vw==",
"extraneous": true,
"license": "MIT OR Apache-2.0",
"engines": {
"node": ">=22"
}
},
"packages/sdk/node_modules/@waku/sds/node_modules/@waku/proto": {
"version": "0.0.12",
"resolved": "https://registry.npmjs.org/@waku/proto/-/proto-0.0.12.tgz",
"integrity": "sha512-JR7wiy3Di628Ywo9qKIi7rhfdC2K7ABoaWa9WX4ZQKieYDs+YwOK+syE53VNwXrtponNeLDI0JIOFzRDalUm1A==",
"extraneous": true,
"license": "MIT OR Apache-2.0",
"dependencies": {
"protons-runtime": "^5.4.0"
},
"engines": {
"node": ">=22"
}
},
"packages/sdk/node_modules/@waku/sds/node_modules/@waku/utils": {
"version": "0.0.25",
"resolved": "https://registry.npmjs.org/@waku/utils/-/utils-0.0.25.tgz",
"integrity": "sha512-yCbfQ3uqByGNUvCNTj6oHi8fJ6BdVvg+Rj0y2YKrZDSNn73uTMF856lCJdsE86eqDZNCDaRaawTs3ZNEXyWaXw==",
"extraneous": true,
"license": "MIT OR Apache-2.0",
"dependencies": {
"@noble/hashes": "^1.3.2",
"@waku/interfaces": "0.0.32",
"chai": "^4.3.10",
"debug": "^4.3.4",
"uint8arrays": "^5.0.1"
},
"engines": {
"node": ">=22"
}
},
"packages/sdk/node_modules/@waku/sds/node_modules/assertion-error": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz",
"integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==",
"extraneous": true,
"license": "MIT",
"engines": {
"node": "*"
}
},
"packages/sdk/node_modules/@waku/sds/node_modules/check-error": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.3.tgz",
"integrity": "sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg==",
"extraneous": true,
"license": "MIT",
"dependencies": {
"get-func-name": "^2.0.2"
},
"engines": {
"node": "*"
}
},
"packages/sdk/node_modules/@waku/sds/node_modules/deep-eql": {
"version": "4.1.4",
"resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.4.tgz",
"integrity": "sha512-SUwdGfqdKOwxCPeVYjwSyRpJ7Z+fhpwIAtmCUdZIWZ/YP5R9WAsyuSgpLVDi9bjWoN2LXHNss/dk3urXtdQxGg==",
"extraneous": true,
"license": "MIT",
"dependencies": {
"type-detect": "^4.0.0"
},
"engines": {
"node": ">=6"
}
},
"packages/sdk/node_modules/@waku/sds/node_modules/loupe": {
"version": "2.3.7",
"resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.7.tgz",
"integrity": "sha512-zSMINGVYkdpYSOBmLi0D1Uo7JU9nVdQKrHxC8eYlV+9YKK9WePqAlL7lSlorG/U2Fw1w0hTBmaa/jrQ3UbPHtA==",
"extraneous": true,
"license": "MIT",
"dependencies": {
"get-func-name": "^2.0.1"
}
},
"packages/sdk/node_modules/@waku/sds/node_modules/pathval": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz",
"integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==",
"extraneous": true,
"license": "MIT",
"engines": {
"node": "*"
}
},
"packages/sdk/node_modules/assertion-error": {
"version": "2.0.1",
"dev": true,
@ -37733,13 +37831,13 @@
},
"packages/sds": {
"name": "@waku/sds",
"version": "0.0.6",
"version": "0.0.7",
"license": "MIT OR Apache-2.0",
"dependencies": {
"@libp2p/interface": "2.10.4",
"@noble/hashes": "^1.7.1",
"@waku/proto": "^0.0.13",
"@waku/utils": "^0.0.26",
"@waku/proto": "^0.0.14",
"@waku/utils": "^0.0.27",
"chai": "^5.1.2",
"lodash": "^4.17.21"
},
@ -37859,11 +37957,11 @@
},
"packages/utils": {
"name": "@waku/utils",
"version": "0.0.26",
"version": "0.0.27",
"license": "MIT OR Apache-2.0",
"dependencies": {
"@noble/hashes": "^1.3.2",
"@waku/interfaces": "0.0.33",
"@waku/interfaces": "0.0.34",
"chai": "^4.3.10",
"debug": "^4.3.4",
"uint8arrays": "^5.0.1"

View File

@ -10,10 +10,10 @@
"packages/core",
"packages/discovery",
"packages/message-encryption",
"packages/sdk",
"packages/relay",
"packages/sds",
"packages/rln",
"packages/sdk",
"packages/relay",
"packages/tests",
"packages/reliability-tests",
"packages/headless-tests",

View File

@ -5,6 +5,27 @@ All notable changes to this project will be documented in this file.
The file is maintained by [Release Please](https://github.com/googleapis/release-please) based on [Conventional Commits](https://www.conventionalcommits.org) specification,
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.0.39](https://github.com/waku-org/js-waku/compare/core-v0.0.38...core-v0.0.39) (2025-09-20)
### Features
* Add start/stop to filter ([#2592](https://github.com/waku-org/js-waku/issues/2592)) ([2fba052](https://github.com/waku-org/js-waku/commit/2fba052b8b98cb64f6383de95d01b33beb771448))
* Expose message hash from IDecodedMessage ([#2578](https://github.com/waku-org/js-waku/issues/2578)) ([836d6b8](https://github.com/waku-org/js-waku/commit/836d6b8793a5124747684f6ea76b6dd47c73048b))
* Implement lp-v3 error codes with backwards compatibility ([#2501](https://github.com/waku-org/js-waku/issues/2501)) ([1625302](https://github.com/waku-org/js-waku/commit/16253026c6e30052d87d9975b58480951de469d8))
* Implement peer-store re-bootstrapping ([#2641](https://github.com/waku-org/js-waku/issues/2641)) ([11d84ad](https://github.com/waku-org/js-waku/commit/11d84ad342fe45158ef0734f9ca070f14704503f))
* StoreConnect events ([#2601](https://github.com/waku-org/js-waku/issues/2601)) ([0dfbcf6](https://github.com/waku-org/js-waku/commit/0dfbcf6b6bd9225dcb0dec540aeb1eb2703c8397))
### Dependencies
* The following workspace dependencies were updated
* dependencies
* @waku/enr bumped from ^0.0.32 to ^0.0.33
* @waku/interfaces bumped from 0.0.33 to 0.0.34
* @waku/proto bumped from 0.0.13 to 0.0.14
* @waku/utils bumped from 0.0.26 to 0.0.27
## [0.0.38](https://github.com/waku-org/js-waku/compare/core-v0.0.37...core-v0.0.38) (2025-08-14)

View File

@ -1,6 +1,6 @@
{
"name": "@waku/core",
"version": "0.0.38",
"version": "0.0.39",
"description": "TypeScript implementation of the Waku v2 protocol",
"types": "./dist/index.d.ts",
"module": "./dist/index.js",
@ -64,11 +64,11 @@
"node": ">=22"
},
"dependencies": {
"@waku/enr": "^0.0.32",
"@waku/interfaces": "0.0.33",
"@waku/enr": "^0.0.33",
"@waku/interfaces": "0.0.34",
"@libp2p/ping": "2.0.35",
"@waku/proto": "0.0.13",
"@waku/utils": "0.0.26",
"@waku/proto": "0.0.14",
"@waku/utils": "0.0.27",
"debug": "^4.3.4",
"@noble/hashes": "^1.3.2",
"it-all": "^3.0.4",

View File

@ -10,7 +10,11 @@ export * as waku_filter from "./lib/filter/index.js";
export { FilterCore, FilterCodecs } from "./lib/filter/index.js";
export * as waku_light_push from "./lib/light_push/index.js";
export { LightPushCodec, LightPushCore } from "./lib/light_push/index.js";
export {
LightPushCore,
LightPushCodec,
LightPushCodecV2
} from "./lib/light_push/index.js";
export * as waku_store from "./lib/store/index.js";
export { StoreCore, StoreCodec } from "./lib/store/index.js";

View File

@ -87,6 +87,12 @@ describe("ConnectionLimiter", () => {
mockPeer2 = createMockPeer("12D3KooWTest2", [Tags.BOOTSTRAP]); // Ensure mockPeer2 is prioritized and dialed
mockConnection = createMockConnection(mockPeerId, [Tags.BOOTSTRAP]);
dialer = {
start: sinon.stub(),
stop: sinon.stub(),
dial: sinon.stub().resolves()
} as unknown as sinon.SinonStubbedInstance<Dialer>;
libp2p = {
addEventListener: sinon.stub(),
removeEventListener: sinon.stub(),
@ -95,7 +101,11 @@ describe("ConnectionLimiter", () => {
getConnections: sinon.stub().returns([]),
peerStore: {
all: sinon.stub().resolves([]),
get: sinon.stub().resolves(mockPeer)
get: sinon.stub().resolves(mockPeer),
merge: sinon.stub().resolves()
},
components: {
components: {}
}
};
@ -112,6 +122,20 @@ describe("ConnectionLimiter", () => {
isConnected: sinon.stub().returns(true),
isP2PConnected: sinon.stub().returns(true)
} as unknown as sinon.SinonStubbedInstance<NetworkMonitor>;
// Mock the libp2p components needed by isAddressesSupported
libp2p.components = {
components: {},
transportManager: {
getTransports: sinon.stub().returns([
{
dialFilter: sinon
.stub()
.returns([multiaddr("/dns4/test/tcp/443/wss")])
}
])
}
};
});
afterEach(() => {
@ -274,11 +298,6 @@ describe("ConnectionLimiter", () => {
describe("dialPeersFromStore", () => {
beforeEach(() => {
dialer = {
start: sinon.stub(),
stop: sinon.stub(),
dial: sinon.stub().resolves()
} as unknown as sinon.SinonStubbedInstance<Dialer>;
libp2p.hangUp = sinon.stub().resolves();
connectionLimiter = createLimiter();
mockPeer.addresses = [
@ -404,11 +423,6 @@ describe("ConnectionLimiter", () => {
describe("maintainConnectionsCount", () => {
beforeEach(() => {
dialer = {
start: sinon.stub(),
stop: sinon.stub(),
dial: sinon.stub().resolves()
} as unknown as sinon.SinonStubbedInstance<Dialer>;
libp2p.hangUp = sinon.stub().resolves();
connectionLimiter = createLimiter({ maxConnections: 2 });
mockPeer.addresses = [
@ -515,6 +529,7 @@ describe("ConnectionLimiter", () => {
];
libp2p.peerStore.all.resolves([bootstrapPeer, pxPeer, localPeer]);
libp2p.getConnections.returns([]);
connectionLimiter = createLimiter();
const peers = await (connectionLimiter as any).getPrioritizedPeers();
expect(peers[0].id.toString()).to.equal("b");
expect(peers[1].id.toString()).to.equal("px");

View File

@ -9,9 +9,11 @@ import {
WakuEvent
} from "@waku/interfaces";
import { Logger } from "@waku/utils";
import { numberToBytes } from "@waku/utils/bytes";
import { Dialer } from "./dialer.js";
import { NetworkMonitor } from "./network_monitor.js";
import { isAddressesSupported } from "./utils.js";
const log = new Logger("connection-limiter");
@ -123,6 +125,7 @@ export class ConnectionLimiter implements IConnectionLimiter {
private async maintainConnections(): Promise<void> {
await this.maintainConnectionsCount();
await this.maintainBootstrapConnections();
await this.maintainTTLConnectedPeers();
}
private async onDisconnectedEvent(): Promise<void> {
@ -145,13 +148,15 @@ export class ConnectionLimiter implements IConnectionLimiter {
const peers = await this.getPrioritizedPeers();
if (peers.length === 0) {
log.info(`No peers to dial, node is utilizing all known peers`);
log.info(`No peers to dial, skipping`);
await this.triggerBootstrap();
return;
}
const promises = peers
.slice(0, this.options.maxConnections - connections.length)
.map((p) => this.dialer.dial(p.id));
await Promise.all(promises);
return;
@ -210,6 +215,28 @@ export class ConnectionLimiter implements IConnectionLimiter {
}
}
private async maintainTTLConnectedPeers(): Promise<void> {
log.info(`Maintaining TTL connected peers`);
const promises = this.libp2p.getConnections().map(async (c) => {
try {
await this.libp2p.peerStore.merge(c.remotePeer, {
metadata: {
ttl: numberToBytes(Date.now())
}
});
log.info(`TTL updated for connected peer ${c.remotePeer.toString()}`);
} catch (error) {
log.error(
`Unexpected error while maintaining TTL connected peer`,
error
);
}
});
await Promise.all(promises);
}
private async dialPeersFromStore(): Promise<void> {
log.info(`Dialing peers from store`);
@ -218,6 +245,7 @@ export class ConnectionLimiter implements IConnectionLimiter {
if (peers.length === 0) {
log.info(`No peers to dial, skipping`);
await this.triggerBootstrap();
return;
}
@ -248,10 +276,9 @@ export class ConnectionLimiter implements IConnectionLimiter {
const notConnectedPeers = allPeers.filter(
(p) =>
!allConnections.some((c) => c.remotePeer.equals(p.id)) &&
p.addresses.some(
(a) =>
a.multiaddr.toString().includes("wss") ||
a.multiaddr.toString().includes("ws")
isAddressesSupported(
this.libp2p,
p.addresses.map((a) => a.multiaddr)
)
);
@ -267,7 +294,19 @@ export class ConnectionLimiter implements IConnectionLimiter {
p.tags.has(Tags.PEER_CACHE)
);
return [...bootstrapPeers, ...peerExchangePeers, ...localStorePeers];
const restPeers = notConnectedPeers.filter(
(p) =>
!p.tags.has(Tags.BOOTSTRAP) &&
!p.tags.has(Tags.PEER_EXCHANGE) &&
!p.tags.has(Tags.PEER_CACHE)
);
return [
...bootstrapPeers,
...peerExchangePeers,
...localStorePeers,
...restPeers
];
}
private async getBootstrapPeers(): Promise<Peer[]> {
@ -291,4 +330,41 @@ export class ConnectionLimiter implements IConnectionLimiter {
return null;
}
}
/**
* Triggers the bootstrap or peer cache discovery if they are mounted.
* @returns void
*/
private async triggerBootstrap(): Promise<void> {
log.info("Triggering bootstrap discovery");
const bootstrapComponents = Object.values(this.libp2p.components.components)
.filter((c) => !!c)
.filter((c: unknown) =>
[`@waku/${Tags.BOOTSTRAP}`, `@waku/${Tags.PEER_CACHE}`].includes(
(c as { [Symbol.toStringTag]: string })?.[Symbol.toStringTag]
)
);
if (bootstrapComponents.length === 0) {
log.warn("No bootstrap components found to trigger");
return;
}
log.info(
`Found ${bootstrapComponents.length} bootstrap components, starting them`
);
const promises = bootstrapComponents.map(async (component) => {
try {
await (component as { stop: () => Promise<void> })?.stop?.();
await (component as { start: () => Promise<void> })?.start?.();
log.info("Successfully started bootstrap component");
} catch (error) {
log.error("Failed to start bootstrap component", error);
}
});
await Promise.all(promises);
}
}

View File

@ -52,6 +52,12 @@ describe("ConnectionManager", () => {
dialProtocol: sinon.stub().resolves({} as Stream),
hangUp: sinon.stub().resolves(),
getPeers: sinon.stub().returns([]),
getConnections: sinon.stub().returns([]),
addEventListener: sinon.stub(),
removeEventListener: sinon.stub(),
components: {
components: {}
},
peerStore: {
get: sinon.stub().resolves(null),
merge: sinon.stub().resolves()

View File

@ -1,6 +1,7 @@
import { isPeerId, type Peer, type PeerId } from "@libp2p/interface";
import { peerIdFromString } from "@libp2p/peer-id";
import { Multiaddr, multiaddr, MultiaddrInput } from "@multiformats/multiaddr";
import { Libp2p } from "@waku/interfaces";
import { bytesToUtf8 } from "@waku/utils/bytes";
/**
@ -49,3 +50,25 @@ export const mapToPeerId = (input: PeerId | MultiaddrInput): PeerId => {
? input
: peerIdFromString(multiaddr(input).getPeerId()!);
};
/**
* Checks if the address is supported by the libp2p instance.
* @param libp2p - The libp2p instance.
* @param addresses - The addresses to check.
* @returns True if the addresses are supported, false otherwise.
*/
export const isAddressesSupported = (
libp2p: Libp2p,
addresses: Multiaddr[]
): boolean => {
const transports =
libp2p?.components?.transportManager?.getTransports() || [];
if (transports.length === 0) {
return false;
}
return transports
.map((transport) => transport.dialFilter(addresses))
.some((supportedAddresses) => supportedAddresses.length > 0);
};

View File

@ -2,9 +2,9 @@ import type { PeerId } from "@libp2p/interface";
import type { IncomingStreamData } from "@libp2p/interface-internal";
import {
type ContentTopic,
type CoreProtocolResult,
type FilterCoreResult,
FilterError,
type Libp2p,
ProtocolError,
type PubsubTopic
} from "@waku/interfaces";
import { WakuMessage } from "@waku/proto";
@ -72,14 +72,14 @@ export class FilterCore {
pubsubTopic: PubsubTopic,
peerId: PeerId,
contentTopics: ContentTopic[]
): Promise<CoreProtocolResult> {
): Promise<FilterCoreResult> {
const stream = await this.streamManager.getStream(peerId);
if (!stream) {
return {
success: null,
failure: {
error: ProtocolError.NO_STREAM_AVAILABLE,
error: FilterError.NO_STREAM_AVAILABLE,
peerId: peerId
}
};
@ -108,7 +108,7 @@ export class FilterCore {
return {
success: null,
failure: {
error: ProtocolError.GENERIC_FAIL,
error: FilterError.GENERIC_FAIL,
peerId: peerId
}
};
@ -123,7 +123,7 @@ export class FilterCore {
);
return {
failure: {
error: ProtocolError.REMOTE_PEER_REJECTED,
error: FilterError.REMOTE_PEER_REJECTED,
peerId: peerId
},
success: null
@ -140,7 +140,7 @@ export class FilterCore {
pubsubTopic: PubsubTopic,
peerId: PeerId,
contentTopics: ContentTopic[]
): Promise<CoreProtocolResult> {
): Promise<FilterCoreResult> {
const stream = await this.streamManager.getStream(peerId);
if (!stream) {
@ -148,7 +148,7 @@ export class FilterCore {
return {
success: null,
failure: {
error: ProtocolError.NO_STREAM_AVAILABLE,
error: FilterError.NO_STREAM_AVAILABLE,
peerId: peerId
}
};
@ -166,7 +166,7 @@ export class FilterCore {
return {
success: null,
failure: {
error: ProtocolError.GENERIC_FAIL,
error: FilterError.GENERIC_FAIL,
peerId: peerId
}
};
@ -181,7 +181,7 @@ export class FilterCore {
public async unsubscribeAll(
pubsubTopic: PubsubTopic,
peerId: PeerId
): Promise<CoreProtocolResult> {
): Promise<FilterCoreResult> {
const stream = await this.streamManager.getStream(peerId);
if (!stream) {
@ -189,7 +189,7 @@ export class FilterCore {
return {
success: null,
failure: {
error: ProtocolError.NO_STREAM_AVAILABLE,
error: FilterError.NO_STREAM_AVAILABLE,
peerId: peerId
}
};
@ -208,7 +208,7 @@ export class FilterCore {
if (!res || !res.length) {
return {
failure: {
error: ProtocolError.NO_RESPONSE,
error: FilterError.NO_RESPONSE,
peerId: peerId
},
success: null
@ -224,7 +224,7 @@ export class FilterCore {
);
return {
failure: {
error: ProtocolError.REMOTE_PEER_REJECTED,
error: FilterError.REMOTE_PEER_REJECTED,
peerId: peerId
},
success: null
@ -237,7 +237,7 @@ export class FilterCore {
};
}
public async ping(peerId: PeerId): Promise<CoreProtocolResult> {
public async ping(peerId: PeerId): Promise<FilterCoreResult> {
const stream = await this.streamManager.getStream(peerId);
if (!stream) {
@ -245,7 +245,7 @@ export class FilterCore {
return {
success: null,
failure: {
error: ProtocolError.NO_STREAM_AVAILABLE,
error: FilterError.NO_STREAM_AVAILABLE,
peerId: peerId
}
};
@ -267,7 +267,7 @@ export class FilterCore {
return {
success: null,
failure: {
error: ProtocolError.GENERIC_FAIL,
error: FilterError.GENERIC_FAIL,
peerId: peerId
}
};
@ -277,7 +277,7 @@ export class FilterCore {
return {
success: null,
failure: {
error: ProtocolError.NO_RESPONSE,
error: FilterError.NO_RESPONSE,
peerId: peerId
}
};
@ -293,7 +293,7 @@ export class FilterCore {
return {
success: null,
failure: {
error: ProtocolError.REMOTE_PEER_REJECTED,
error: FilterError.REMOTE_PEER_REJECTED,
peerId: peerId
}
};

View File

@ -0,0 +1,7 @@
export const CODECS = {
v2: "/vac/waku/lightpush/2.0.0-beta1",
v3: "/vac/waku/lightpush/3.0.0"
} as const;
export const LightPushCodecV2 = CODECS.v2;
export const LightPushCodec = CODECS.v3;

View File

@ -1 +1,2 @@
export { LightPushCore, LightPushCodec, PushResponse } from "./light_push.js";
export { LightPushCore } from "./light_push.js";
export { LightPushCodec, LightPushCodecV2 } from "./constants.js";

View File

@ -1,14 +1,11 @@
import type { PeerId } from "@libp2p/interface";
import type { PeerId, Stream } from "@libp2p/interface";
import {
type CoreProtocolResult,
type IEncoder,
type IMessage,
type Libp2p,
ProtocolError,
type ThisOrThat
type LightPushCoreResult,
LightPushError
} from "@waku/interfaces";
import { PushResponse } from "@waku/proto";
import { isMessageSizeUnderCap } from "@waku/utils";
import { Logger } from "@waku/utils";
import all from "it-all";
import * as lp from "it-length-prefixed";
@ -17,92 +14,71 @@ import { Uint8ArrayList } from "uint8arraylist";
import { StreamManager } from "../stream_manager/index.js";
import { PushRpc } from "./push_rpc.js";
import { isRLNResponseError } from "./utils.js";
import { CODECS } from "./constants.js";
import { ProtocolHandler } from "./protocol_handler.js";
const log = new Logger("light-push");
export const LightPushCodec = "/vac/waku/lightpush/2.0.0-beta1";
export { PushResponse };
type PreparePushMessageResult = ThisOrThat<"query", PushRpc>;
/**
* Implements the [Waku v2 Light Push protocol](https://rfc.vac.dev/spec/19/).
*/
export class LightPushCore {
private readonly streamManager: StreamManager;
private readonly streamManagerV2: StreamManager;
public readonly multicodec = LightPushCodec;
public readonly multicodec = [CODECS.v3, CODECS.v2];
public constructor(libp2p: Libp2p) {
this.streamManager = new StreamManager(LightPushCodec, libp2p.components);
}
private async preparePushMessage(
encoder: IEncoder,
message: IMessage
): Promise<PreparePushMessageResult> {
try {
if (!message.payload || message.payload.length === 0) {
log.error("Failed to send waku light push: payload is empty");
return { query: null, error: ProtocolError.EMPTY_PAYLOAD };
}
if (!(await isMessageSizeUnderCap(encoder, message))) {
log.error("Failed to send waku light push: message is bigger than 1MB");
return { query: null, error: ProtocolError.SIZE_TOO_BIG };
}
const protoMessage = await encoder.toProtoObj(message);
if (!protoMessage) {
log.error("Failed to encode to protoMessage, aborting push");
return {
query: null,
error: ProtocolError.ENCODE_FAILED
};
}
const query = PushRpc.createRequest(protoMessage, encoder.pubsubTopic);
return { query, error: null };
} catch (error) {
log.error("Failed to prepare push message", error);
return {
query: null,
error: ProtocolError.GENERIC_FAIL
};
}
public constructor(private libp2p: Libp2p) {
this.streamManagerV2 = new StreamManager(CODECS.v2, libp2p.components);
this.streamManager = new StreamManager(CODECS.v3, libp2p.components);
}
public async send(
encoder: IEncoder,
message: IMessage,
peerId: PeerId
): Promise<CoreProtocolResult> {
const { query, error: preparationError } = await this.preparePushMessage(
encoder,
message
peerId: PeerId,
useLegacy: boolean = false
): Promise<LightPushCoreResult> {
const protocol = await this.getProtocol(peerId, useLegacy);
log.info(
`Sending light push request to peer:${peerId.toString()}, protocol:${protocol}`
);
if (preparationError || !query) {
if (!protocol) {
return {
success: null,
failure: {
error: preparationError,
error: LightPushError.GENERIC_FAIL,
peerId
}
};
}
const stream = await this.streamManager.getStream(peerId);
const { rpc, error: prepError } = await ProtocolHandler.preparePushMessage(
encoder,
message,
protocol
);
if (prepError) {
return {
success: null,
failure: {
error: prepError,
peerId
}
};
}
const stream = await this.getStream(peerId, protocol);
if (!stream) {
log.error(`Failed to get a stream for remote peer:${peerId.toString()}`);
return {
success: null,
failure: {
error: ProtocolError.NO_STREAM_AVAILABLE,
error: LightPushError.NO_STREAM_AVAILABLE,
peerId: peerId
}
};
@ -111,76 +87,74 @@ export class LightPushCore {
let res: Uint8ArrayList[] | undefined;
try {
res = await pipe(
[query.encode()],
[rpc.encode()],
lp.encode,
stream,
lp.decode,
async (source) => await all(source)
);
} catch (err) {
// can fail only because of `stream` abortion
log.error("Failed to send waku light push request", err);
return {
success: null,
failure: {
error: ProtocolError.STREAM_ABORTED,
error: LightPushError.STREAM_ABORTED,
peerId: peerId
}
};
}
const bytes = new Uint8ArrayList();
res.forEach((chunk) => {
bytes.append(chunk);
});
res.forEach((chunk) => bytes.append(chunk));
let response: PushResponse | undefined;
if (bytes.length === 0) {
return {
success: null,
failure: {
error: LightPushError.NO_RESPONSE,
peerId: peerId
}
};
}
return ProtocolHandler.handleResponse(bytes, protocol, peerId);
}
private async getProtocol(
peerId: PeerId,
useLegacy: boolean
): Promise<string | undefined> {
try {
response = PushRpc.decode(bytes).response;
} catch (err) {
log.error("Failed to decode push reply", err);
return {
success: null,
failure: {
error: ProtocolError.DECODE_FAILED,
peerId: peerId
}
};
}
const peer = await this.libp2p.peerStore.get(peerId);
if (!response) {
log.error("Remote peer fault: No response in PushRPC");
return {
success: null,
failure: {
error: ProtocolError.NO_RESPONSE,
peerId: peerId
}
};
if (
useLegacy ||
(!peer.protocols.includes(CODECS.v3) &&
peer.protocols.includes(CODECS.v2))
) {
return CODECS.v2;
} else if (peer.protocols.includes(CODECS.v3)) {
return CODECS.v3;
} else {
throw new Error("No supported protocol found");
}
} catch (error) {
log.error("Failed to get protocol", error);
return undefined;
}
}
if (isRLNResponseError(response.info)) {
log.error("Remote peer fault: RLN generation");
return {
success: null,
failure: {
error: ProtocolError.RLN_PROOF_GENERATION,
peerId: peerId
}
};
private async getStream(
peerId: PeerId,
protocol: string
): Promise<Stream | undefined> {
switch (protocol) {
case CODECS.v2:
return this.streamManagerV2.getStream(peerId);
case CODECS.v3:
return this.streamManager.getStream(peerId);
default:
return undefined;
}
if (!response.isSuccess) {
log.error("Remote peer rejected the message: ", response.info);
return {
success: null,
failure: {
error: ProtocolError.REMOTE_PEER_REJECTED,
peerId: peerId
}
};
}
return { success: peerId, failure: null };
}
}

View File

@ -0,0 +1,191 @@
import type { PeerId } from "@libp2p/interface";
import type { IEncoder, IMessage, LightPushCoreResult } from "@waku/interfaces";
import { LightPushError, LightPushStatusCode } from "@waku/interfaces";
import { PushResponse, WakuMessage } from "@waku/proto";
import { isMessageSizeUnderCap, Logger } from "@waku/utils";
import { Uint8ArrayList } from "uint8arraylist";
import { CODECS } from "./constants.js";
import { PushRpcV2 } from "./push_rpc.js";
import { PushRpc } from "./push_rpc_v3.js";
import { isRLNResponseError } from "./utils.js";
type VersionedPushRpc =
| ({ version: "v2" } & PushRpcV2)
| ({ version: "v3" } & PushRpc);
type PreparePushMessageResult =
| { rpc: VersionedPushRpc; error: null }
| { rpc: null; error: LightPushError };
const log = new Logger("light-push:protocol-handler");
export class ProtocolHandler {
public static async preparePushMessage(
encoder: IEncoder,
message: IMessage,
protocol: string
): Promise<PreparePushMessageResult> {
try {
if (!message.payload || message.payload.length === 0) {
log.error("Failed to send waku light push: payload is empty");
return { rpc: null, error: LightPushError.EMPTY_PAYLOAD };
}
if (!(await isMessageSizeUnderCap(encoder, message))) {
log.error("Failed to send waku light push: message is bigger than 1MB");
return { rpc: null, error: LightPushError.SIZE_TOO_BIG };
}
const protoMessage = await encoder.toProtoObj(message);
if (!protoMessage) {
log.error("Failed to encode to protoMessage, aborting push");
return { rpc: null, error: LightPushError.ENCODE_FAILED };
}
if (protocol === CODECS.v3) {
log.info("Creating v3 RPC message");
return {
rpc: ProtocolHandler.createV3Rpc(protoMessage, encoder.pubsubTopic),
error: null
};
}
log.info("Creating v2 RPC message");
return {
rpc: ProtocolHandler.createV2Rpc(protoMessage, encoder.pubsubTopic),
error: null
};
} catch (err) {
log.error("Failed to prepare push message", err);
return { rpc: null, error: LightPushError.GENERIC_FAIL };
}
}
/**
* Decode and evaluate a LightPush response according to the protocol version
*/
public static handleResponse(
bytes: Uint8ArrayList,
protocol: string,
peerId: PeerId
): LightPushCoreResult {
if (protocol === CODECS.v3) {
return ProtocolHandler.handleV3Response(bytes, peerId);
}
return ProtocolHandler.handleV2Response(bytes, peerId);
}
private static handleV3Response(
bytes: Uint8ArrayList,
peerId: PeerId
): LightPushCoreResult {
try {
const decodedRpcV3 = PushRpc.decodeResponse(bytes);
const statusCode = decodedRpcV3.statusCode;
const statusDesc = decodedRpcV3.statusDesc;
if (statusCode !== LightPushStatusCode.SUCCESS) {
const error = LightPushError.REMOTE_PEER_REJECTED;
log.error(
`Remote peer rejected with v3 status code ${statusCode}: ${statusDesc}`
);
return {
success: null,
failure: {
error,
peerId: peerId
}
};
}
if (decodedRpcV3.relayPeerCount !== undefined) {
log.info(`Message relayed to ${decodedRpcV3.relayPeerCount} peers`);
}
return { success: peerId, failure: null };
} catch (err) {
return {
success: null,
failure: {
error: LightPushError.DECODE_FAILED,
peerId: peerId
}
};
}
}
private static handleV2Response(
bytes: Uint8ArrayList,
peerId: PeerId
): LightPushCoreResult {
let response: PushResponse | undefined;
try {
const decodedRpc = PushRpcV2.decode(bytes);
response = decodedRpc.response;
} catch (err) {
return {
success: null,
failure: {
error: LightPushError.DECODE_FAILED,
peerId: peerId
}
};
}
if (!response) {
return {
success: null,
failure: {
error: LightPushError.NO_RESPONSE,
peerId: peerId
}
};
}
if (isRLNResponseError(response.info)) {
log.error("Remote peer fault: RLN generation");
return {
success: null,
failure: {
error: LightPushError.RLN_PROOF_GENERATION,
peerId: peerId
}
};
}
if (!response.isSuccess) {
log.error("Remote peer rejected the message: ", response.info);
return {
success: null,
failure: {
error: LightPushError.REMOTE_PEER_REJECTED,
peerId: peerId
}
};
}
return { success: peerId, failure: null };
}
private static createV2Rpc(
message: WakuMessage,
pubsubTopic: string
): VersionedPushRpc {
const v2Rpc = PushRpcV2.createRequest(message, pubsubTopic);
return Object.assign(v2Rpc, { version: "v2" as const });
}
private static createV3Rpc(
message: WakuMessage,
pubsubTopic: string
): VersionedPushRpc {
if (!message.timestamp) {
message.timestamp = BigInt(Date.now()) * BigInt(1_000_000);
}
const v3Rpc = PushRpc.createRequest(message, pubsubTopic);
return Object.assign(v3Rpc, { version: "v3" as const });
}
}

View File

@ -2,14 +2,14 @@ import { proto_lightpush as proto } from "@waku/proto";
import type { Uint8ArrayList } from "uint8arraylist";
import { v4 as uuid } from "uuid";
export class PushRpc {
export class PushRpcV2 {
public constructor(public proto: proto.PushRpc) {}
public static createRequest(
message: proto.WakuMessage,
pubsubTopic: string
): PushRpc {
return new PushRpc({
): PushRpcV2 {
return new PushRpcV2({
requestId: uuid(),
request: {
message: message,
@ -19,9 +19,9 @@ export class PushRpc {
});
}
public static decode(bytes: Uint8ArrayList): PushRpc {
public static decode(bytes: Uint8ArrayList): PushRpcV2 {
const res = proto.PushRpc.decode(bytes);
return new PushRpc(res);
return new PushRpcV2(res);
}
public encode(): Uint8Array {

View File

@ -0,0 +1,162 @@
import { proto_lightpush as proto } from "@waku/proto";
import type { Uint8ArrayList } from "uint8arraylist";
import { v4 as uuid } from "uuid";
/**
* LightPush v3 protocol RPC handler.
* Implements the v3 message format with correct field numbers:
* - requestId: 1
* - pubsubTopic: 20
* - message: 21
*/
export class PushRpc {
public constructor(
public proto: proto.LightPushRequestV3 | proto.LightPushResponseV3
) {}
/**
* Create a v3 request message with proper field numbering
*/
public static createRequest(
message: proto.WakuMessage,
pubsubTopic: string
): PushRpc {
return new PushRpc({
requestId: uuid(),
pubsubTopic: pubsubTopic,
message: message
});
}
/**
* Create a v3 response message with status code handling
*/
public static createResponse(
requestId: string,
statusCode: number,
statusDesc?: string,
relayPeerCount?: number
): PushRpc {
return new PushRpc({
requestId,
statusCode,
statusDesc,
relayPeerCount
});
}
/**
* Decode v3 request message
*/
public static decodeRequest(bytes: Uint8ArrayList): PushRpc {
const res = proto.LightPushRequestV3.decode(bytes);
return new PushRpc(res);
}
/**
* Decode v3 response message
*/
public static decodeResponse(bytes: Uint8ArrayList): PushRpc {
const res = proto.LightPushResponseV3.decode(bytes);
return new PushRpc(res);
}
/**
* Encode message to bytes
*/
public encode(): Uint8Array {
if (this.isRequest()) {
return proto.LightPushRequestV3.encode(
this.proto as proto.LightPushRequestV3
);
} else {
return proto.LightPushResponseV3.encode(
this.proto as proto.LightPushResponseV3
);
}
}
/**
* Get request data (if this is a request message)
*/
public get request(): proto.LightPushRequestV3 | undefined {
return this.isRequest()
? (this.proto as proto.LightPushRequestV3)
: undefined;
}
/**
* Get response data (if this is a response message)
*/
public get response(): proto.LightPushResponseV3 | undefined {
return this.isResponse()
? (this.proto as proto.LightPushResponseV3)
: undefined;
}
/**
* Get the request ID
*/
public get requestId(): string {
return this.proto.requestId;
}
/**
* Get the pubsub topic (only available in requests)
*/
public get pubsubTopic(): string | undefined {
return this.isRequest()
? (this.proto as proto.LightPushRequestV3).pubsubTopic
: undefined;
}
/**
* Get the message (only available in requests)
*/
public get message(): proto.WakuMessage | undefined {
return this.isRequest()
? (this.proto as proto.LightPushRequestV3).message
: undefined;
}
/**
* Get the status code (only available in responses)
*/
public get statusCode(): number | undefined {
return this.isResponse()
? (this.proto as proto.LightPushResponseV3).statusCode
: undefined;
}
/**
* Get the status description (only available in responses)
*/
public get statusDesc(): string | undefined {
return this.isResponse()
? (this.proto as proto.LightPushResponseV3).statusDesc
: undefined;
}
/**
* Get the relay peer count (only available in responses)
*/
public get relayPeerCount(): number | undefined {
return this.isResponse()
? (this.proto as proto.LightPushResponseV3).relayPeerCount
: undefined;
}
/**
* Check if this is a request message
*/
private isRequest(): boolean {
return "pubsubTopic" in this.proto && "message" in this.proto;
}
/**
* Check if this is a response message
*/
private isResponse(): boolean {
return "statusCode" in this.proto;
}
}

View File

@ -13,7 +13,7 @@ export class StreamManager {
private streamPool: Map<string, Promise<void>> = new Map();
public constructor(
private multicodec: string,
private readonly multicodec: string,
private readonly libp2p: Libp2pComponents
) {
this.log = new Logger(`stream-manager:${multicodec}`);

View File

@ -1,5 +1,18 @@
# Changelog
## [0.0.12](https://github.com/waku-org/js-waku/compare/discovery-v0.0.11...discovery-v0.0.12) (2025-09-20)
### Dependencies
* The following workspace dependencies were updated
* dependencies
* @waku/core bumped from 0.0.38 to 0.0.39
* @waku/enr bumped from 0.0.32 to 0.0.33
* @waku/interfaces bumped from 0.0.33 to 0.0.34
* @waku/proto bumped from ^0.0.13 to ^0.0.14
* @waku/utils bumped from 0.0.26 to 0.0.27
## [0.0.11](https://github.com/waku-org/js-waku/compare/discovery-v0.0.10...discovery-v0.0.11) (2025-08-14)

View File

@ -1,6 +1,6 @@
{
"name": "@waku/discovery",
"version": "0.0.11",
"version": "0.0.12",
"description": "Contains various discovery mechanisms: DNS Discovery (EIP-1459, Peer Exchange, Local Peer Cache Discovery.",
"types": "./dist/index.d.ts",
"module": "./dist/index.js",
@ -51,11 +51,11 @@
"node": ">=22"
},
"dependencies": {
"@waku/core": "0.0.38",
"@waku/enr": "0.0.32",
"@waku/interfaces": "0.0.33",
"@waku/proto": "^0.0.13",
"@waku/utils": "0.0.26",
"@waku/core": "0.0.39",
"@waku/enr": "0.0.33",
"@waku/interfaces": "0.0.34",
"@waku/proto": "^0.0.14",
"@waku/utils": "0.0.27",
"debug": "^4.3.4",
"dns-over-http-resolver": "^3.0.8",
"hi-base32": "^0.5.1",

View File

@ -99,6 +99,17 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
* devDependencies
* @waku/interfaces bumped from 0.0.27 to 0.0.28
## [0.0.33](https://github.com/waku-org/js-waku/compare/enr-v0.0.32...enr-v0.0.33) (2025-09-20)
### Dependencies
* The following workspace dependencies were updated
* dependencies
* @waku/utils bumped from 0.0.26 to 0.0.27
* devDependencies
* @waku/interfaces bumped from 0.0.33 to 0.0.34
## [0.0.32](https://github.com/waku-org/js-waku/compare/enr-v0.0.31...enr-v0.0.32) (2025-08-14)

View File

@ -1,6 +1,6 @@
{
"name": "@waku/enr",
"version": "0.0.32",
"version": "0.0.33",
"description": "ENR (EIP-778) for Waku",
"types": "./dist/index.d.ts",
"module": "./dist/index.js",
@ -56,7 +56,7 @@
"@libp2p/peer-id": "5.1.7",
"@multiformats/multiaddr": "^12.0.0",
"@noble/secp256k1": "^1.7.1",
"@waku/utils": "0.0.26",
"@waku/utils": "0.0.27",
"debug": "^4.3.4",
"js-sha3": "^0.9.2"
},
@ -67,7 +67,7 @@
"@types/chai": "^4.3.11",
"@types/mocha": "^10.0.6",
"@waku/build-utils": "*",
"@waku/interfaces": "0.0.33",
"@waku/interfaces": "0.0.34",
"chai": "^4.3.10",
"cspell": "^8.6.1",
"fast-check": "^3.19.0",

View File

@ -5,6 +5,17 @@ All notable changes to this project will be documented in this file.
The file is maintained by [Release Please](https://github.com/googleapis/release-please) based on [Conventional Commits](https://www.conventionalcommits.org) specification,
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.0.34](https://github.com/waku-org/js-waku/compare/interfaces-v0.0.33...interfaces-v0.0.34) (2025-09-20)
### Features
* Add start/stop to filter ([#2592](https://github.com/waku-org/js-waku/issues/2592)) ([2fba052](https://github.com/waku-org/js-waku/commit/2fba052b8b98cb64f6383de95d01b33beb771448))
* Expose message hash from IDecodedMessage ([#2578](https://github.com/waku-org/js-waku/issues/2578)) ([836d6b8](https://github.com/waku-org/js-waku/commit/836d6b8793a5124747684f6ea76b6dd47c73048b))
* Implement lp-v3 error codes with backwards compatibility ([#2501](https://github.com/waku-org/js-waku/issues/2501)) ([1625302](https://github.com/waku-org/js-waku/commit/16253026c6e30052d87d9975b58480951de469d8))
* Query on connect ([#2602](https://github.com/waku-org/js-waku/issues/2602)) ([8542d04](https://github.com/waku-org/js-waku/commit/8542d04bf5c9472f955ef8c9e5bc9e89c70f4738))
* StoreConnect events ([#2601](https://github.com/waku-org/js-waku/issues/2601)) ([0dfbcf6](https://github.com/waku-org/js-waku/commit/0dfbcf6b6bd9225dcb0dec540aeb1eb2703c8397))
## [0.0.33](https://github.com/waku-org/js-waku/compare/interfaces-v0.0.32...interfaces-v0.0.33) (2025-08-14)

View File

@ -1,6 +1,6 @@
{
"name": "@waku/interfaces",
"version": "0.0.33",
"version": "0.0.34",
"description": "Definition of Waku interfaces",
"types": "./dist/index.d.ts",
"module": "./dist/index.js",

View File

@ -1,4 +1,6 @@
import type { ISender, ISendOptions } from "./sender.js";
import { IEncoder, IMessage } from "./message.js";
import { LightPushSDKResult } from "./protocols.js";
import type { ISendOptions } from "./sender.js";
export type LightPushProtocolOptions = ISendOptions & {
/**
@ -15,8 +17,40 @@ export type LightPushProtocolOptions = ISendOptions & {
numPeersToUse?: number;
};
export type ILightPush = ISender & {
readonly multicodec: string;
export type ILightPush = {
readonly multicodec: string[];
start: () => void;
stop: () => void;
send: (
encoder: IEncoder,
message: IMessage,
options?: ISendOptions
) => Promise<LightPushSDKResult>;
};
export enum LightPushStatusCode {
SUCCESS = 200,
BAD_REQUEST = 400,
PAYLOAD_TOO_LARGE = 413,
INVALID_MESSAGE = 420,
UNSUPPORTED_TOPIC = 421,
TOO_MANY_REQUESTS = 429,
INTERNAL_ERROR = 500,
UNAVAILABLE = 503,
NO_RLN_PROOF = 504,
NO_PEERS = 505
}
export const StatusDescriptions: Record<LightPushStatusCode, string> = {
[LightPushStatusCode.SUCCESS]: "Message sent successfully",
[LightPushStatusCode.BAD_REQUEST]: "Bad request format",
[LightPushStatusCode.PAYLOAD_TOO_LARGE]:
"Message payload exceeds maximum size",
[LightPushStatusCode.INVALID_MESSAGE]: "Message validation failed",
[LightPushStatusCode.UNSUPPORTED_TOPIC]: "Unsupported pubsub topic",
[LightPushStatusCode.TOO_MANY_REQUESTS]: "Rate limit exceeded",
[LightPushStatusCode.INTERNAL_ERROR]: "Internal server error",
[LightPushStatusCode.UNAVAILABLE]: "Service temporarily unavailable",
[LightPushStatusCode.NO_RLN_PROOF]: "RLN proof generation failed",
[LightPushStatusCode.NO_PEERS]: "No relay peers available"
};

View File

@ -130,117 +130,123 @@ export type Callback<T extends IDecodedMessage> = (
msg: T
) => void | Promise<void>;
export enum ProtocolError {
//
// GENERAL ERRORS SECTION
//
/**
* Could not determine the origin of the fault. Best to check connectivity and try again
* */
export enum LightPushError {
GENERIC_FAIL = "Generic error",
/**
* The remote peer rejected the message. Information provided by the remote peer
* is logged. Review message validity, or mitigation for `NO_PEER_AVAILABLE`
* or `DECODE_FAILED` can be used.
*/
REMOTE_PEER_REJECTED = "Remote peer rejected",
/**
* Failure to protobuf decode the message. May be due to a remote peer issue,
* ensuring that messages are sent via several peer enable mitigation of this error.
*/
DECODE_FAILED = "Failed to decode",
/**
* Failure to find a peer with suitable protocols. This may due to a connection issue.
* Mitigation can be: retrying after a given time period, display connectivity issue
* to user or listening for `peer:connected:bootstrap` or `peer:connected:peer-exchange`
* on the connection manager before retrying.
*/
NO_PEER_AVAILABLE = "No peer available",
/**
* Failure to find a stream to the peer. This may be because the connection with the peer is not still alive.
* Mitigation can be: retrying after a given time period, or mitigation for `NO_PEER_AVAILABLE` can be used.
*/
NO_STREAM_AVAILABLE = "No stream available",
/**
* The remote peer did not behave as expected. Mitigation for `NO_PEER_AVAILABLE`
* or `DECODE_FAILED` can be used.
*/
NO_RESPONSE = "No response received",
//
// SEND ERRORS SECTION
//
/**
* Failure to protobuf encode the message. This is not recoverable and needs
* further investigation.
*/
ENCODE_FAILED = "Failed to encode",
/**
* The message payload is empty, making the message invalid. Ensure that a non-empty
* payload is set on the outgoing message.
*/
EMPTY_PAYLOAD = "Payload is empty",
/**
* The message size is above the maximum message size allowed on the Waku Network.
* Compressing the message or using an alternative strategy for large messages is recommended.
*/
SIZE_TOO_BIG = "Size is too big",
/**
* The PubsubTopic passed to the send function is not configured on the Waku node.
* Please ensure that the PubsubTopic is used when initializing the Waku node.
*/
TOPIC_NOT_CONFIGURED = "Topic not configured",
/**
* Fails when
*/
STREAM_ABORTED = "Stream aborted",
/**
* General proof generation error message.
* nwaku: https://github.com/waku-org/nwaku/blob/c3cb06ac6c03f0f382d3941ea53b330f6a8dd127/waku/waku_rln_relay/group_manager/group_manager_base.nim#L201C19-L201C42
*/
ENCODE_FAILED = "Failed to encode",
EMPTY_PAYLOAD = "Payload is empty",
SIZE_TOO_BIG = "Size is too big",
TOPIC_NOT_CONFIGURED = "Topic not configured",
RLN_PROOF_GENERATION = "Proof generation failed",
REMOTE_PEER_REJECTED = "Remote peer rejected",
//
// RECEIVE ERRORS SECTION
//
/**
* The pubsub topic configured on the decoder does not match the pubsub topic setup on the protocol.
* Ensure that the pubsub topic used for decoder creation is the same as the one used for protocol.
*/
TOPIC_DECODER_MISMATCH = "Topic decoder mismatch",
/**
* The topics passed in the decoders do not match each other, or don't exist at all.
* Ensure that all the pubsub topics used in the decoders are valid and match each other.
*/
INVALID_DECODER_TOPICS = "Invalid decoder topics"
BAD_REQUEST = "Bad request format",
PAYLOAD_TOO_LARGE = "Message payload exceeds maximum size",
INVALID_MESSAGE = "Message validation failed",
UNSUPPORTED_TOPIC = "Unsupported pubsub topic",
TOO_MANY_REQUESTS = "Rate limit exceeded",
INTERNAL_ERROR = "Internal server error",
UNAVAILABLE = "Service temporarily unavailable",
NO_RLN_PROOF = "RLN proof generation failed",
NO_PEERS = "No relay peers available"
}
export interface Failure {
error: ProtocolError;
export enum FilterError {
// General errors
GENERIC_FAIL = "Generic error",
DECODE_FAILED = "Failed to decode",
NO_PEER_AVAILABLE = "No peer available",
NO_STREAM_AVAILABLE = "No stream available",
NO_RESPONSE = "No response received",
STREAM_ABORTED = "Stream aborted",
// Filter specific errors
REMOTE_PEER_REJECTED = "Remote peer rejected",
TOPIC_NOT_CONFIGURED = "Topic not configured",
SUBSCRIPTION_FAILED = "Subscription failed",
UNSUBSCRIBE_FAILED = "Unsubscribe failed",
PING_FAILED = "Ping failed",
TOPIC_DECODER_MISMATCH = "Topic decoder mismatch",
INVALID_DECODER_TOPICS = "Invalid decoder topics",
SUBSCRIPTION_LIMIT_EXCEEDED = "Subscription limit exceeded",
INVALID_CONTENT_TOPIC = "Invalid content topic",
PUSH_MESSAGE_FAILED = "Push message failed",
EMPTY_MESSAGE = "Empty message received",
MISSING_PUBSUB_TOPIC = "Pubsub topic missing from push message"
}
export interface LightPushFailure {
error: LightPushError;
peerId?: PeerId;
}
export type CoreProtocolResult = ThisOrThat<
export interface FilterFailure {
error: FilterError;
peerId?: PeerId;
}
export type LightPushCoreResult = ThisOrThat<
"success",
PeerId,
"failure",
Failure
LightPushFailure
>;
export type FilterCoreResult = ThisOrThat<
"success",
PeerId,
"failure",
FilterFailure
>;
export type LightPushSDKResult = ThisAndThat<
"successes",
PeerId[],
"failures",
LightPushFailure[]
>;
export type FilterSDKResult = ThisAndThat<
"successes",
PeerId[],
"failures",
FilterFailure[]
>;
/**
* @deprecated replace usage by specific result types
*/
export type SDKProtocolResult = ThisAndThat<
"successes",
PeerId[],
"failures",
Failure[]
Array<{
error: ProtocolError;
peerId?: PeerId;
}>
>;
/**
* @deprecated replace usage by specific result types
*/
export enum ProtocolError {
GENERIC_FAIL = "Generic error",
REMOTE_PEER_REJECTED = "Remote peer rejected",
DECODE_FAILED = "Failed to decode",
NO_PEER_AVAILABLE = "No peer available",
NO_STREAM_AVAILABLE = "No stream available",
NO_RESPONSE = "No response received",
ENCODE_FAILED = "Failed to encode",
EMPTY_PAYLOAD = "Payload is empty",
SIZE_TOO_BIG = "Size is too big",
TOPIC_NOT_CONFIGURED = "Topic not configured",
STREAM_ABORTED = "Stream aborted",
RLN_PROOF_GENERATION = "Proof generation failed",
TOPIC_DECODER_MISMATCH = "Topic decoder mismatch",
INVALID_DECODER_TOPICS = "Invalid decoder topics"
}

View File

@ -1,5 +1,5 @@
import type { IEncoder, IMessage } from "./message.js";
import { SDKProtocolResult } from "./protocols.js";
import { LightPushSDKResult } from "./protocols.js";
export type ISendOptions = {
/**
@ -13,6 +13,13 @@ export type ISendOptions = {
* @default 3
*/
maxAttempts?: number;
/**
* Use v2 of the light push protocol.
* This parameter will be removed in the future.
* @default false
*/
useLegacy?: boolean;
};
export interface ISender {
@ -20,5 +27,5 @@ export interface ISender {
encoder: IEncoder,
message: IMessage,
sendOptions?: ISendOptions
) => Promise<SDKProtocolResult>;
) => Promise<LightPushSDKResult>;
}

View File

@ -101,6 +101,18 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
* @waku/interfaces bumped from 0.0.27 to 0.0.28
* @waku/utils bumped from 0.0.20 to 0.0.21
## [0.0.37](https://github.com/waku-org/js-waku/compare/message-encryption-v0.0.36...message-encryption-v0.0.37) (2025-09-20)
### Dependencies
* The following workspace dependencies were updated
* dependencies
* @waku/core bumped from 0.0.38 to 0.0.39
* @waku/interfaces bumped from 0.0.33 to 0.0.34
* @waku/proto bumped from 0.0.13 to 0.0.14
* @waku/utils bumped from 0.0.26 to 0.0.27
## [0.0.36](https://github.com/waku-org/js-waku/compare/message-encryption-v0.0.35...message-encryption-v0.0.36) (2025-08-14)

View File

@ -1,6 +1,6 @@
{
"name": "@waku/message-encryption",
"version": "0.0.36",
"version": "0.0.37",
"description": "Waku Message Payload Encryption",
"types": "./dist/index.d.ts",
"module": "./dist/index.js",
@ -76,10 +76,10 @@
},
"dependencies": {
"@noble/secp256k1": "^1.7.1",
"@waku/core": "0.0.38",
"@waku/interfaces": "0.0.33",
"@waku/proto": "0.0.13",
"@waku/utils": "0.0.26",
"@waku/core": "0.0.39",
"@waku/interfaces": "0.0.34",
"@waku/proto": "0.0.14",
"@waku/utils": "0.0.27",
"debug": "^4.3.4",
"js-sha3": "^0.9.2",
"uint8arrays": "^5.0.1"

View File

@ -1,5 +1,12 @@
# Changelog
## [0.0.14](https://github.com/waku-org/js-waku/compare/proto-v0.0.13...proto-v0.0.14) (2025-09-20)
### Features
* Implement lp-v3 error codes with backwards compatibility ([#2501](https://github.com/waku-org/js-waku/issues/2501)) ([1625302](https://github.com/waku-org/js-waku/commit/16253026c6e30052d87d9975b58480951de469d8))
## [0.0.13](https://github.com/waku-org/js-waku/compare/proto-v0.0.12...proto-v0.0.13) (2025-08-14)

View File

@ -1,6 +1,6 @@
{
"name": "@waku/proto",
"version": "0.0.13",
"version": "0.0.14",
"description": "Protobuf definitions for Waku",
"types": "./dist/index.d.ts",
"module": "./dist/index.js",

View File

@ -39,4 +39,4 @@ message LightPushResponseV3 {
uint32 status_code = 10;
optional string status_desc = 11;
optional uint32 relay_peer_count = 12;
}
}

View File

@ -25,6 +25,25 @@
* @waku/interfaces bumped from 0.0.16 to 0.0.17
* @waku/utils bumped from 0.0.9 to 0.0.10
## [0.0.22](https://github.com/waku-org/js-waku/compare/relay-v0.0.21...relay-v0.0.22) (2025-09-20)
### Features
* Expose message hash from IDecodedMessage ([#2578](https://github.com/waku-org/js-waku/issues/2578)) ([836d6b8](https://github.com/waku-org/js-waku/commit/836d6b8793a5124747684f6ea76b6dd47c73048b))
* Implement lp-v3 error codes with backwards compatibility ([#2501](https://github.com/waku-org/js-waku/issues/2501)) ([1625302](https://github.com/waku-org/js-waku/commit/16253026c6e30052d87d9975b58480951de469d8))
### Dependencies
* The following workspace dependencies were updated
* dependencies
* @waku/core bumped from 0.0.38 to 0.0.39
* @waku/sdk bumped from 0.0.34 to 0.0.35
* @waku/interfaces bumped from 0.0.33 to 0.0.34
* @waku/proto bumped from 0.0.13 to 0.0.14
* @waku/utils bumped from 0.0.26 to 0.0.27
## [0.0.21](https://github.com/waku-org/js-waku/compare/relay-v0.0.20...relay-v0.0.21) (2025-08-14)

View File

@ -1,6 +1,6 @@
{
"name": "@waku/relay",
"version": "0.0.21",
"version": "0.0.22",
"description": "Relay Protocol for Waku",
"types": "./dist/index.d.ts",
"module": "./dist/index.js",
@ -51,11 +51,11 @@
"dependencies": {
"@chainsafe/libp2p-gossipsub": "14.1.1",
"@noble/hashes": "^1.3.2",
"@waku/core": "0.0.38",
"@waku/sdk": "0.0.34",
"@waku/interfaces": "0.0.33",
"@waku/proto": "0.0.13",
"@waku/utils": "0.0.26",
"@waku/core": "0.0.39",
"@waku/sdk": "0.0.35",
"@waku/interfaces": "0.0.34",
"@waku/proto": "0.0.14",
"@waku/utils": "0.0.27",
"chai": "^4.3.10",
"debug": "^4.3.4",
"fast-check": "^3.19.0",

View File

@ -19,9 +19,9 @@ import {
IRelay,
type IRoutingInfo,
Libp2p,
ProtocolError,
PubsubTopic,
SDKProtocolResult
LightPushError,
LightPushSDKResult,
PubsubTopic
} from "@waku/interfaces";
import { isWireSizeUnderCap, toAsyncIterator } from "@waku/utils";
import { pushOrInitMapSet } from "@waku/utils";
@ -127,7 +127,7 @@ export class Relay implements IRelay {
public async send(
encoder: IEncoder,
message: IMessage
): Promise<SDKProtocolResult> {
): Promise<LightPushSDKResult> {
const { pubsubTopic } = encoder;
if (!this.pubsubTopics.has(pubsubTopic)) {
log.error("Failed to send waku relay: topic not configured");
@ -135,7 +135,7 @@ export class Relay implements IRelay {
successes: [],
failures: [
{
error: ProtocolError.TOPIC_NOT_CONFIGURED
error: LightPushError.TOPIC_NOT_CONFIGURED
}
]
};
@ -148,7 +148,7 @@ export class Relay implements IRelay {
successes: [],
failures: [
{
error: ProtocolError.ENCODE_FAILED
error: LightPushError.ENCODE_FAILED
}
]
};
@ -160,7 +160,7 @@ export class Relay implements IRelay {
successes: [],
failures: [
{
error: ProtocolError.SIZE_TOO_BIG
error: LightPushError.SIZE_TOO_BIG
}
]
};

View File

@ -1,5 +1,23 @@
# Changelog
## [0.1.9](https://github.com/waku-org/js-waku/compare/rln-v0.1.8...rln-v0.1.9) (2025-09-20)
### Features
* Expose message hash from IDecodedMessage ([#2578](https://github.com/waku-org/js-waku/issues/2578)) ([836d6b8](https://github.com/waku-org/js-waku/commit/836d6b8793a5124747684f6ea76b6dd47c73048b))
### Dependencies
* The following workspace dependencies were updated
* dependencies
* @waku/core bumped from ^0.0.38 to ^0.0.39
* @waku/utils bumped from ^0.0.26 to ^0.0.27
* devDependencies
* @waku/interfaces bumped from 0.0.33 to 0.0.34
* @waku/message-encryption bumped from ^0.0.36 to ^0.0.37
## [0.1.8](https://github.com/waku-org/js-waku/compare/rln-v0.1.7...rln-v0.1.8) (2025-08-14)

View File

@ -1,6 +1,6 @@
{
"name": "@waku/rln",
"version": "0.1.8",
"version": "0.1.9",
"description": "RLN (Rate Limiting Nullifier) implementation for Waku",
"types": "./dist/index.d.ts",
"module": "./dist/index.js",
@ -54,12 +54,12 @@
"@rollup/plugin-node-resolve": "^15.2.3",
"@types/chai": "^5.0.1",
"@types/chai-spies": "^1.0.6",
"@waku/interfaces": "0.0.33",
"@waku/interfaces": "0.0.34",
"@types/deep-equal-in-any-order": "^1.0.4",
"@types/lodash": "^4.17.15",
"@types/sinon": "^17.0.3",
"@waku/build-utils": "^1.0.0",
"@waku/message-encryption": "^0.0.36",
"@waku/message-encryption": "^0.0.37",
"deep-equal-in-any-order": "^2.0.6",
"fast-check": "^3.23.2",
"rollup-plugin-copy": "^3.5.0"
@ -76,8 +76,8 @@
],
"dependencies": {
"@chainsafe/bls-keystore": "3.0.0",
"@waku/core": "^0.0.38",
"@waku/utils": "^0.0.26",
"@waku/core": "^0.0.39",
"@waku/utils": "^0.0.27",
"@noble/hashes": "^1.2.0",
"@waku/zerokit-rln-wasm": "^0.0.13",
"ethereum-cryptography": "^3.1.0",

View File

@ -47,6 +47,40 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
* @waku/interfaces bumped from 0.0.19 to 0.0.20
* @waku/peer-exchange bumped from ^0.0.17 to ^0.0.18
## [0.0.35](https://github.com/waku-org/js-waku/compare/sdk-v0.0.34...sdk-v0.0.35) (2025-09-20)
### Features
* Add debounce to health indicator ([#2594](https://github.com/waku-org/js-waku/issues/2594)) ([a7f30b1](https://github.com/waku-org/js-waku/commit/a7f30b121143454340aa7b3aeb4f55470905c54d))
* Add start/stop to filter ([#2592](https://github.com/waku-org/js-waku/issues/2592)) ([2fba052](https://github.com/waku-org/js-waku/commit/2fba052b8b98cb64f6383de95d01b33beb771448))
* Expose message hash from IDecodedMessage ([#2578](https://github.com/waku-org/js-waku/issues/2578)) ([836d6b8](https://github.com/waku-org/js-waku/commit/836d6b8793a5124747684f6ea76b6dd47c73048b))
* Implement lp-v3 error codes with backwards compatibility ([#2501](https://github.com/waku-org/js-waku/issues/2501)) ([1625302](https://github.com/waku-org/js-waku/commit/16253026c6e30052d87d9975b58480951de469d8))
* Implement peer-store re-bootstrapping ([#2641](https://github.com/waku-org/js-waku/issues/2641)) ([11d84ad](https://github.com/waku-org/js-waku/commit/11d84ad342fe45158ef0734f9ca070f14704503f))
* Introduce reliable channels ([#2526](https://github.com/waku-org/js-waku/issues/2526)) ([4d5c152](https://github.com/waku-org/js-waku/commit/4d5c152f5b1b1c241bbe7bb96d13d927a6f7550e))
* Query on connect ([#2602](https://github.com/waku-org/js-waku/issues/2602)) ([8542d04](https://github.com/waku-org/js-waku/commit/8542d04bf5c9472f955ef8c9e5bc9e89c70f4738))
* StoreConnect events ([#2601](https://github.com/waku-org/js-waku/issues/2601)) ([0dfbcf6](https://github.com/waku-org/js-waku/commit/0dfbcf6b6bd9225dcb0dec540aeb1eb2703c8397))
### Bug Fixes
* (sds) ensure incoming messages have their retrieval hint stored ([#2604](https://github.com/waku-org/js-waku/issues/2604)) ([914beb6](https://github.com/waku-org/js-waku/commit/914beb6531a84f8c11ca951721225d47f9e6c285))
* Make health events emission consistent ([#2570](https://github.com/waku-org/js-waku/issues/2570)) ([c8dfdb1](https://github.com/waku-org/js-waku/commit/c8dfdb1ace8f0f8f668d8f2bb6e0eaed90041782))
### Dependencies
* The following workspace dependencies were updated
* dependencies
* @waku/core bumped from 0.0.38 to 0.0.39
* @waku/discovery bumped from 0.0.11 to 0.0.12
* @waku/interfaces bumped from 0.0.33 to 0.0.34
* @waku/proto bumped from ^0.0.13 to ^0.0.14
* @waku/sds bumped from ^0.0.6 to ^0.0.7
* @waku/utils bumped from 0.0.26 to 0.0.27
* devDependencies
* @waku/message-encryption bumped from ^0.0.36 to ^0.0.37
## [0.0.34](https://github.com/waku-org/js-waku/compare/sdk-v0.0.33...sdk-v0.0.34) (2025-08-14)

View File

@ -1,6 +1,6 @@
{
"name": "@waku/sdk",
"version": "0.0.34",
"version": "0.0.35",
"description": "A unified SDK for easy creation and management of js-waku nodes.",
"types": "./dist/index.d.ts",
"module": "./dist/index.js",
@ -68,11 +68,12 @@
"@libp2p/websockets": "9.2.16",
"@noble/hashes": "^1.3.3",
"@types/lodash.debounce": "^4.0.9",
"@waku/core": "0.0.38",
"@waku/discovery": "0.0.11",
"@waku/interfaces": "0.0.33",
"@waku/proto": "^0.0.13",
"@waku/utils": "0.0.26",
"@waku/core": "0.0.39",
"@waku/discovery": "0.0.12",
"@waku/interfaces": "0.0.34",
"@waku/proto": "^0.0.14",
"@waku/sds": "^0.0.7",
"@waku/utils": "0.0.27",
"libp2p": "2.8.11",
"lodash.debounce": "^4.0.8"
},
@ -85,6 +86,7 @@
"@types/chai": "^4.3.11",
"@types/mocha": "^10.0.9",
"@waku/build-utils": "*",
"@waku/message-encryption": "^0.0.37",
"chai": "^5.1.1",
"cspell": "^8.6.1",
"interface-datastore": "8.3.2",

View File

@ -17,6 +17,7 @@ export {
export { LightPush } from "./light_push/index.js";
export { Filter } from "./filter/index.js";
export { Store } from "./store/index.js";
export * from "./reliable_channel/index.js";
export * as waku from "@waku/core";
export * as utils from "@waku/utils";

View File

@ -1,6 +1,11 @@
import { Peer, PeerId } from "@libp2p/interface";
import { createEncoder, Encoder, LightPushCodec } from "@waku/core";
import { Libp2p, ProtocolError } from "@waku/interfaces";
import {
createEncoder,
Encoder,
LightPushCodec,
LightPushCodecV2
} from "@waku/core";
import { Libp2p, LightPushError, LightPushStatusCode } from "@waku/interfaces";
import { createRoutingInfo } from "@waku/utils";
import { utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai";
@ -40,8 +45,8 @@ describe("LightPush SDK", () => {
const failures = result.failures ?? [];
expect(failures.length).to.be.eq(1);
expect(failures.some((v) => v.error === ProtocolError.NO_PEER_AVAILABLE)).to
.be.true;
expect(failures.some((v) => v.error === LightPushError.NO_PEER_AVAILABLE))
.to.be.true;
});
it("should send to specified number of peers of used peers", async () => {
@ -127,6 +132,45 @@ describe("LightPush SDK", () => {
expect(result.successes?.length).to.be.eq(1);
expect(result.failures?.length).to.be.eq(1);
});
describe("v3 protocol support", () => {
it("should work with v3 peers", async () => {
libp2p = mockLibp2p({
peers: [mockV3Peer("1"), mockV3Peer("2")]
});
});
it("should work with mixed v2 and v3 peers", async () => {
libp2p = mockLibp2p({
peers: [mockV2AndV3Peer("1"), mockPeer("2"), mockV3Peer("3")]
});
// Mock responses for different protocol versions
const v3Response = mockV3SuccessResponse(5);
const v2Response = mockV2SuccessResponse();
const v3ErrorResponse = mockV3ErrorResponse(
LightPushStatusCode.PAYLOAD_TOO_LARGE
);
const v2ErrorResponse = mockV2ErrorResponse("Message too large");
expect(v3Response.statusCode).to.eq(LightPushStatusCode.SUCCESS);
expect(v3Response.relayPeerCount).to.eq(5);
expect(v2Response.isSuccess).to.be.true;
expect(v3ErrorResponse.statusCode).to.eq(
LightPushStatusCode.PAYLOAD_TOO_LARGE
);
expect(v2ErrorResponse.isSuccess).to.be.false;
});
it("should handle v3 RLN errors", async () => {
const v3RLNError = mockV3RLNErrorResponse();
const v2RLNError = mockV2RLNErrorResponse();
expect(v3RLNError.statusCode).to.eq(LightPushStatusCode.NO_RLN_PROOF);
expect(v3RLNError.statusDesc).to.include("RLN proof generation failed");
expect(v2RLNError.info).to.include("RLN proof generation failed");
});
});
});
type MockLibp2pOptions = {
@ -136,7 +180,16 @@ type MockLibp2pOptions = {
function mockLibp2p(options?: MockLibp2pOptions): Libp2p {
const peers = options?.peers || [];
const peerStore = {
get: (id: any) => Promise.resolve(peers.find((p) => p.id === id))
get: (id: any) => {
const peer = peers.find((p) => p.id === id);
if (peer) {
return Promise.resolve({
...peer,
protocols: peer.protocols || [LightPushCodec]
});
}
return Promise.resolve(undefined);
}
};
return {
@ -179,9 +232,92 @@ function mockLightPush(options: MockLightPushOptions): LightPush {
return lightPush;
}
function mockPeer(id: string): Peer {
function mockPeer(id: string, protocols: string[] = [LightPushCodec]): Peer {
return {
id,
protocols: [LightPushCodec]
} as unknown as Peer;
id: { toString: () => id } as PeerId,
protocols: protocols,
metadata: new Map(),
addresses: [],
tags: new Map()
};
}
// V3-specific mock functions
function mockV3Peer(id: string): Peer {
return mockPeer(id, [LightPushCodec]);
}
function mockV2AndV3Peer(id: string): Peer {
return mockPeer(id, [LightPushCodec, LightPushCodecV2]);
}
function mockV3SuccessResponse(relayPeerCount?: number): {
statusCode: LightPushStatusCode;
statusDesc: string;
relayPeerCount?: number;
isSuccess: boolean;
} {
return {
statusCode: LightPushStatusCode.SUCCESS,
statusDesc: "Message sent successfully",
relayPeerCount,
isSuccess: true
};
}
function mockV3ErrorResponse(
statusCode: LightPushStatusCode,
statusDesc?: string
): {
statusCode: LightPushStatusCode;
statusDesc: string;
isSuccess: boolean;
} {
return {
statusCode,
statusDesc: statusDesc || "Error occurred",
isSuccess: false
};
}
function mockV2SuccessResponse(): {
isSuccess: boolean;
info: string;
} {
return {
isSuccess: true,
info: "Message sent successfully"
};
}
function mockV2ErrorResponse(info?: string): {
isSuccess: boolean;
info: string;
} {
return {
isSuccess: false,
info: info || "Error occurred"
};
}
function mockV3RLNErrorResponse(): {
statusCode: LightPushStatusCode;
statusDesc: string;
isSuccess: boolean;
} {
return {
statusCode: LightPushStatusCode.NO_RLN_PROOF,
statusDesc: "RLN proof generation failed",
isSuccess: false
};
}
function mockV2RLNErrorResponse(): {
isSuccess: boolean;
info: string;
} {
return {
isSuccess: false,
info: "RLN proof generation failed"
};
}

View File

@ -1,17 +1,17 @@
import type { PeerId } from "@libp2p/interface";
import { LightPushCore } from "@waku/core";
import {
type CoreProtocolResult,
Failure,
type IEncoder,
ILightPush,
type IMessage,
type ISendOptions,
type Libp2p,
LightPushCoreResult,
LightPushError,
LightPushFailure,
type LightPushProtocolOptions,
ProtocolError,
Protocols,
SDKProtocolResult
LightPushSDKResult,
Protocols
} from "@waku/interfaces";
import { Logger } from "@waku/utils";
@ -55,7 +55,7 @@ export class LightPush implements ILightPush {
});
}
public get multicodec(): string {
public get multicodec(): string[] {
return this.protocol.multicodec;
}
@ -71,8 +71,9 @@ export class LightPush implements ILightPush {
encoder: IEncoder,
message: IMessage,
options: ISendOptions = {}
): Promise<SDKProtocolResult> {
): Promise<LightPushSDKResult> {
options = {
useLegacy: false,
...this.config,
...options
};
@ -82,45 +83,48 @@ export class LightPush implements ILightPush {
log.info("send: attempting to send a message to pubsubTopic:", pubsubTopic);
const peerIds = await this.peerManager.getPeers({
protocol: Protocols.LightPush,
protocol: options.useLegacy ? "light-push-v2" : Protocols.LightPush,
pubsubTopic: encoder.pubsubTopic
});
const coreResults: CoreProtocolResult[] =
const coreResults =
peerIds?.length > 0
? await Promise.all(
peerIds.map((peerId) =>
this.protocol.send(encoder, message, peerId).catch((_e) => ({
success: null,
failure: {
error: ProtocolError.GENERIC_FAIL
}
}))
this.protocol
.send(encoder, message, peerId, options.useLegacy)
.catch((_e) => ({
success: null,
failure: {
error: LightPushError.GENERIC_FAIL
}
}))
)
)
: [];
const results: SDKProtocolResult = coreResults.length
const results: LightPushSDKResult = coreResults.length
? {
successes: coreResults
.filter((v) => v.success)
.map((v) => v.success) as PeerId[],
failures: coreResults
.filter((v) => v.failure)
.map((v) => v.failure) as Failure[]
.map((v) => v.failure) as LightPushFailure[]
}
: {
successes: [],
failures: [
{
error: ProtocolError.NO_PEER_AVAILABLE
error: LightPushError.NO_PEER_AVAILABLE
}
]
};
if (options.autoRetry && results.successes.length === 0) {
const sendCallback = (peerId: PeerId): Promise<CoreProtocolResult> =>
this.protocol.send(encoder, message, peerId);
const sendCallback = (peerId: PeerId): Promise<LightPushCoreResult> =>
this.protocol.send(encoder, message, peerId, options.useLegacy);
this.retryManager.push(
sendCallback.bind(this),
options.maxAttempts || DEFAULT_MAX_ATTEMPTS,

View File

@ -1,6 +1,7 @@
import type { PeerId } from "@libp2p/interface";
import {
type CoreProtocolResult,
type LightPushCoreResult,
LightPushError,
ProtocolError,
Protocols
} from "@waku/interfaces";
@ -59,7 +60,7 @@ describe("RetryManager", () => {
it("should process tasks in queue", async () => {
const successCallback = sinon.spy(
async (peerId: PeerId): Promise<CoreProtocolResult> => ({
async (peerId: PeerId): Promise<LightPushCoreResult> => ({
success: peerId,
failure: null
})
@ -112,9 +113,9 @@ describe("RetryManager", () => {
it("should retry failed tasks", async () => {
const failingCallback = sinon.spy(
async (): Promise<CoreProtocolResult> => ({
async (): Promise<LightPushCoreResult> => ({
success: null,
failure: { error: "test error" as any }
failure: { error: LightPushError.GENERIC_FAIL }
})
);
@ -135,7 +136,7 @@ describe("RetryManager", () => {
});
it("should request peer renewal on specific errors", async () => {
const errorCallback = sinon.spy(async (): Promise<CoreProtocolResult> => {
const errorCallback = sinon.spy(async (): Promise<LightPushCoreResult> => {
throw new Error(ProtocolError.NO_PEER_AVAILABLE);
});
@ -155,7 +156,7 @@ describe("RetryManager", () => {
});
it("should handle task timeouts", async () => {
const slowCallback = sinon.spy(async (): Promise<CoreProtocolResult> => {
const slowCallback = sinon.spy(async (): Promise<LightPushCoreResult> => {
await new Promise((resolve) => setTimeout(resolve, 15000));
return { success: mockPeerId, failure: null };
});
@ -174,9 +175,11 @@ describe("RetryManager", () => {
});
it("should not execute task if max attempts is 0", async () => {
const failingCallback = sinon.spy(async (): Promise<CoreProtocolResult> => {
throw new Error("test error" as any);
});
const failingCallback = sinon.spy(
async (): Promise<LightPushCoreResult> => {
throw new Error("test error" as any);
}
);
const task = {
callback: failingCallback,
@ -209,7 +212,7 @@ describe("RetryManager", () => {
called++;
return Promise.resolve({
success: null,
failure: { error: ProtocolError.GENERIC_FAIL }
failure: { error: LightPushError.GENERIC_FAIL }
});
});
retryManager.push(failCallback, 2, TestRoutingInfo);

View File

@ -1,7 +1,7 @@
import type { PeerId } from "@libp2p/interface";
import {
type CoreProtocolResult,
type IRoutingInfo,
type LightPushCoreResult,
Protocols
} from "@waku/interfaces";
import { Logger } from "@waku/utils";
@ -15,7 +15,7 @@ type RetryManagerConfig = {
peerManager: PeerManager;
};
type AttemptCallback = (peerId: PeerId) => Promise<CoreProtocolResult>;
type AttemptCallback = (peerId: PeerId) => Promise<LightPushCoreResult>;
export type ScheduledTask = {
maxAttempts: number;
@ -123,7 +123,13 @@ export class RetryManager {
task.callback(peerId)
]);
if (response?.failure) {
// If timeout resolves first, response will be void (undefined)
// In this case, we should treat it as a timeout error
if (response === undefined) {
throw new Error("Task timeout");
}
if (response.failure) {
throw Error(response.failure.error);
}

View File

@ -1,13 +1,13 @@
import { ProtocolError } from "@waku/interfaces";
import { LightPushError } from "@waku/interfaces";
export const shouldPeerBeChanged = (
failure: string | ProtocolError
failure: string | LightPushError
): boolean => {
const toBeChanged =
failure === ProtocolError.REMOTE_PEER_REJECTED ||
failure === ProtocolError.NO_RESPONSE ||
failure === ProtocolError.RLN_PROOF_GENERATION ||
failure === ProtocolError.NO_PEER_AVAILABLE;
failure === LightPushError.REMOTE_PEER_REJECTED ||
failure === LightPushError.NO_RESPONSE ||
failure === LightPushError.RLN_PROOF_GENERATION ||
failure === LightPushError.NO_PEER_AVAILABLE;
if (toBeChanged) {
return true;

View File

@ -85,7 +85,8 @@ describe("PeerManager", () => {
_clusterId: ClusterId,
_shardId: ShardId
) => true,
isPeerOnTopic: async (_id: PeerId, _topic: string) => true
isPeerOnTopic: async (_id: PeerId, _topic: string) => true,
hasShardInfo: async (_id: PeerId) => true
} as unknown as IConnectionManager;
peerManager = new PeerManager({
libp2p,

View File

@ -4,7 +4,12 @@ import {
PeerId,
TypedEventEmitter
} from "@libp2p/interface";
import { FilterCodecs, LightPushCodec, StoreCodec } from "@waku/core";
import {
FilterCodecs,
LightPushCodec,
LightPushCodecV2,
StoreCodec
} from "@waku/core";
import {
CONNECTION_LOCKED_TAG,
type IConnectionManager,
@ -28,8 +33,10 @@ type PeerManagerParams = {
connectionManager: IConnectionManager;
};
type SupportedProtocols = Protocols | "light-push-v2";
type GetPeersParams = {
protocol: Protocols;
protocol: SupportedProtocols;
pubsubTopic: string;
};
@ -119,7 +126,7 @@ export class PeerManager {
for (const peer of connectedPeers) {
const hasProtocol = this.hasPeerProtocol(peer, params.protocol);
const hasSamePubsub = await this.connectionManager.isPeerOnTopic(
const hasSamePubsub = await this.isPeerOnPubsub(
peer.id,
params.pubsubTopic
);
@ -204,12 +211,19 @@ export class PeerManager {
private async onConnected(event: CustomEvent<IdentifyResult>): Promise<void> {
const result = event.detail;
if (
result.protocols.includes(this.matchProtocolToCodec(Protocols.Filter))
) {
const isFilterPeer = result.protocols.includes(
this.getProtocolCodecs(Protocols.Filter)
);
const isStorePeer = result.protocols.includes(
this.getProtocolCodecs(Protocols.Store)
);
if (isFilterPeer) {
this.dispatchFilterPeerConnect(result.peerId);
}
if (result.protocols.includes(this.matchProtocolToCodec(Protocols.Store))) {
if (isStorePeer) {
this.dispatchStorePeerConnect(result.peerId);
}
}
@ -230,8 +244,8 @@ export class PeerManager {
}
}
private hasPeerProtocol(peer: Peer, protocol: Protocols): boolean {
return peer.protocols.includes(this.matchProtocolToCodec(protocol));
private hasPeerProtocol(peer: Peer, protocol: SupportedProtocols): boolean {
return peer.protocols.includes(this.getProtocolCodecs(protocol));
}
private lockPeer(id: PeerId): void {
@ -289,14 +303,18 @@ export class PeerManager {
);
}
private matchProtocolToCodec(protocol: Protocols): string {
const protocolToCodec = {
private getProtocolCodecs(protocol: SupportedProtocols): string {
if (protocol === Protocols.Relay) {
throw new Error("Relay protocol is not supported");
}
const protocolToCodecs = {
[Protocols.Filter]: FilterCodecs.SUBSCRIBE,
[Protocols.LightPush]: LightPushCodec,
[Protocols.Store]: StoreCodec,
[Protocols.Relay]: ""
"light-push-v2": LightPushCodecV2
};
return protocolToCodec[protocol];
return protocolToCodecs[protocol];
}
}

View File

@ -443,6 +443,7 @@ describe("QueryOnConnect", () => {
let resolveMessageEvent: (messages: IDecodedMessage[]) => void;
let rejectMessageEvent: (reason: string) => void;
let connectStoreEvent: CustomEvent<PeerId>;
let timeoutId: NodeJS.Timeout;
beforeEach(() => {
// Create a promise that resolves when a message event is emitted
@ -482,6 +483,7 @@ describe("QueryOnConnect", () => {
queryOnConnect.addEventListener(
QueryOnConnectEvent.MessagesRetrieved,
(event: CustomEvent<IDecodedMessage[]>) => {
clearTimeout(timeoutId);
resolveMessageEvent(event.detail);
}
);
@ -491,12 +493,16 @@ describe("QueryOnConnect", () => {
});
// Set a timeout to reject if no message is received
setTimeout(
timeoutId = setTimeout(
() => rejectMessageEvent("No message received within timeout"),
500
);
});
afterEach(() => {
clearTimeout(timeoutId);
});
it("should emit message when we just started and store connect event occurs", async () => {
const mockMessage: IDecodedMessage = {
hash: utf8ToBytes("1234"),

View File

@ -0,0 +1,66 @@
import { IDecodedMessage, ProtocolError } from "@waku/interfaces";
import type { HistoryEntry, MessageId } from "@waku/sds";
export const ReliableChannelEvent = {
/**
* The message is being sent over the wire.
*
* This event may be emitted several times if the retry mechanism kicks in.
*/
SendingMessage: "sending-message",
/**
* The message has been sent over the wire but has not been acknowledged by
* any other party yet.
*
* We are now waiting for acknowledgements.
*
* This event may be emitted several times if the
* several times if the retry mechanisms kicks in.
*/
MessageSent: "message-sent",
/**
* A received bloom filter seems to indicate that the messages was received
* by another party.
*
* However, this is probabilistic. The retry mechanism will wait a bit longer
* before trying to send the message again.
*/
MessagePossiblyAcknowledged: "message-possibly-acknowledged",
/**
* The message was fully acknowledged by other members of the channel
*/
MessageAcknowledged: "message-acknowledged",
/**
* It was not possible to send the messages due to a non-recoverable error,
* most likely an internal error for a developer to resolve.
*/
SendingMessageIrrecoverableError: "sending-message-irrecoverable-error",
/**
* A new message has been received.
*/
MessageReceived: "message-received",
/**
* We are aware of a missing message but failed to retrieve it successfully.
*/
IrretrievableMessage: "irretrievable-message"
};
export type ReliableChannelEvent =
(typeof ReliableChannelEvent)[keyof typeof ReliableChannelEvent];
export interface ReliableChannelEvents {
"sending-message": CustomEvent<MessageId>;
"message-sent": CustomEvent<MessageId>;
"message-possibly-acknowledged": CustomEvent<{
messageId: MessageId;
possibleAckCount: number;
}>;
"message-acknowledged": CustomEvent<MessageId>;
// TODO probably T extends IDecodedMessage?
"message-received": CustomEvent<IDecodedMessage>;
"irretrievable-message": CustomEvent<HistoryEntry>;
"sending-message-irrecoverable-error": CustomEvent<{
messageId: MessageId;
error: ProtocolError;
}>;
}

View File

@ -0,0 +1,2 @@
export { ReliableChannel, ReliableChannelOptions } from "./reliable_channel.js";
export { ReliableChannelEvents, ReliableChannelEvent } from "./events.js";

View File

@ -0,0 +1,78 @@
import type {
IDecodedMessage,
IDecoder,
QueryRequestParams
} from "@waku/interfaces";
import type { MessageId } from "@waku/sds";
import { Logger } from "@waku/utils";
const log = new Logger("sdk:missing-message-retriever");
const DEFAULT_RETRIEVE_FREQUENCY_MS = 10 * 1000; // 10 seconds
export class MissingMessageRetriever<T extends IDecodedMessage> {
private retrieveInterval: ReturnType<typeof setInterval> | undefined;
private missingMessages: Map<MessageId, Uint8Array<ArrayBufferLike>>; // Waku Message Ids
public constructor(
private readonly decoder: IDecoder<T>,
private readonly retrieveFrequencyMs: number = DEFAULT_RETRIEVE_FREQUENCY_MS,
private readonly _retrieve: <T extends IDecodedMessage>(
decoders: IDecoder<T>[],
options?: Partial<QueryRequestParams>
) => AsyncGenerator<Promise<T | undefined>[]>,
private readonly onMessageRetrieved?: (message: T) => Promise<void>
) {
this.missingMessages = new Map();
}
public start(): void {
if (this.retrieveInterval) {
clearInterval(this.retrieveInterval);
}
if (this.retrieveFrequencyMs !== 0) {
log.info(`start retrieve loop every ${this.retrieveFrequencyMs}ms`);
this.retrieveInterval = setInterval(() => {
void this.retrieveMissingMessage();
}, this.retrieveFrequencyMs);
}
}
public stop(): void {
if (this.retrieveInterval) {
clearInterval(this.retrieveInterval);
}
}
public addMissingMessage(
messageId: MessageId,
retrievalHint: Uint8Array
): void {
if (!this.missingMessages.has(messageId)) {
log.info("missing message notice", messageId, retrievalHint);
this.missingMessages.set(messageId, retrievalHint);
}
}
public removeMissingMessage(messageId: MessageId): void {
if (this.missingMessages.has(messageId)) {
this.missingMessages.delete(messageId);
}
}
private async retrieveMissingMessage(): Promise<void> {
if (this.missingMessages.size) {
const messageHashes = Array.from(this.missingMessages.values());
log.info("attempting to retrieve missing message", messageHashes.length);
for await (const page of this._retrieve([this.decoder], {
messageHashes
})) {
for await (const msg of page) {
if (msg && this.onMessageRetrieved) {
await this.onMessageRetrieved(msg);
}
}
}
}
}
}

View File

@ -0,0 +1,680 @@
import { PeerId, TypedEventEmitter } from "@libp2p/interface";
import { createDecoder, createEncoder } from "@waku/core";
import {
AutoSharding,
HealthStatus,
IDecodedMessage,
IDecoder,
IEncoder,
type IMessage,
ISendOptions,
IWaku,
LightPushError,
LightPushSDKResult,
QueryRequestParams
} from "@waku/interfaces";
import { ContentMessage } from "@waku/sds";
import {
createRoutingInfo,
delay,
MockWakuEvents,
MockWakuNode
} from "@waku/utils";
import { bytesToUtf8, hexToBytes, utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai";
import { beforeEach, describe } from "mocha";
import sinon from "sinon";
import { ReliableChannel } from "./index.js";
const TEST_CONTENT_TOPIC = "/my-tests/0/topic-name/proto";
const TEST_NETWORK_CONFIG: AutoSharding = {
clusterId: 0,
numShardsInCluster: 1
};
const TEST_ROUTING_INFO = createRoutingInfo(TEST_NETWORK_CONFIG, {
contentTopic: TEST_CONTENT_TOPIC
});
describe("Reliable Channel", () => {
let mockWakuNode: IWaku;
let encoder: IEncoder;
let decoder: IDecoder<IDecodedMessage>;
beforeEach(async () => {
mockWakuNode = new MockWakuNode();
encoder = createEncoder({
contentTopic: TEST_CONTENT_TOPIC,
routingInfo: TEST_ROUTING_INFO
});
decoder = createDecoder(TEST_CONTENT_TOPIC, TEST_ROUTING_INFO);
});
it("Outgoing message is emitted as sending", async () => {
const reliableChannel = await ReliableChannel.create(
mockWakuNode,
"MyChannel",
"alice",
encoder,
decoder
);
const message = utf8ToBytes("message in channel");
// Setting up message tracking
const messageId = reliableChannel.send(message);
let messageSending = false;
reliableChannel.addEventListener("sending-message", (event) => {
if (event.detail === messageId) {
messageSending = true;
}
});
while (!messageSending) {
await delay(50);
}
expect(messageSending).to.be.true;
});
it("Outgoing message is emitted as sent", async () => {
const reliableChannel = await ReliableChannel.create(
mockWakuNode,
"MyChannel",
"alice",
encoder,
decoder
);
const message = utf8ToBytes("message in channel");
const messageId = reliableChannel.send(message);
// Setting up message tracking
let messageSent = false;
reliableChannel.addEventListener("message-sent", (event) => {
if (event.detail === messageId) {
messageSent = true;
}
});
while (!messageSent) {
await delay(50);
}
expect(messageSent).to.be.true;
});
it("Encoder error raises irrecoverable error", async () => {
mockWakuNode.lightPush!.send = (
_encoder: IEncoder,
_message: IMessage,
_sendOptions?: ISendOptions
): Promise<LightPushSDKResult> => {
return Promise.resolve({
failures: [{ error: LightPushError.EMPTY_PAYLOAD }],
successes: []
});
};
const reliableChannel = await ReliableChannel.create(
mockWakuNode,
"MyChannel",
"alice",
encoder,
decoder
);
const message = utf8ToBytes("payload doesnt matter");
encoder.contentTopic = "...";
const messageId = reliableChannel.send(message);
// Setting up message tracking
let irrecoverableError = false;
reliableChannel.addEventListener(
"sending-message-irrecoverable-error",
(event) => {
if (event.detail.messageId === messageId) {
irrecoverableError = true;
}
}
);
while (!irrecoverableError) {
await delay(50);
}
expect(irrecoverableError).to.be.true;
});
it("Outgoing message is not emitted as acknowledged from own outgoing messages", async () => {
const reliableChannel = await ReliableChannel.create(
mockWakuNode,
"MyChannel",
"alice",
encoder,
decoder
);
const message = utf8ToBytes("first message in channel");
// Setting up message tracking
const messageId = ReliableChannel.getMessageId(message);
let messageAcknowledged = false;
reliableChannel.addEventListener("message-acknowledged", (event) => {
if (event.detail === messageId) {
messageAcknowledged = true;
}
});
reliableChannel.send(message);
// Sending a second message from the same node should not acknowledge the first one
reliableChannel.send(utf8ToBytes("second message in channel"));
expect(messageAcknowledged).to.be.false;
});
it("Outgoing message is possibly acknowledged", async () => {
const commonEventEmitter = new TypedEventEmitter<MockWakuEvents>();
const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter);
const mockWakuNodeBob = new MockWakuNode(commonEventEmitter);
const reliableChannelAlice = await ReliableChannel.create(
mockWakuNodeAlice,
"MyChannel",
"alice",
encoder,
decoder
);
const reliableChannelBob = await ReliableChannel.create(
mockWakuNodeBob,
"MyChannel",
"bob",
encoder,
decoder,
// Bob only includes one message in causal history
{ causalHistorySize: 1 }
);
const messages = ["first", "second", "third"].map((m) => {
return utf8ToBytes(m);
});
// Alice sets up message tracking for first message
const firstMessageId = ReliableChannel.getMessageId(messages[0]);
let firstMessagePossiblyAcknowledged = false;
reliableChannelAlice.addEventListener(
"message-possibly-acknowledged",
(event) => {
if (event.detail.messageId === firstMessageId) {
firstMessagePossiblyAcknowledged = true;
}
}
);
let messageReceived = false;
reliableChannelBob.addEventListener("message-received", (event) => {
if (bytesToUtf8(event.detail.payload) === "third") {
messageReceived = true;
}
});
for (const m of messages) {
reliableChannelAlice.send(m);
}
// Wait for Bob to receive last message to ensure it is all included in filter
while (!messageReceived) {
await delay(50);
}
// Bobs sends a message now, it should include first one in bloom filter
reliableChannelBob.send(utf8ToBytes("message back"));
while (!firstMessagePossiblyAcknowledged) {
await delay(50);
}
expect(firstMessagePossiblyAcknowledged).to.be.true;
});
it("Outgoing message is acknowledged", async () => {
const commonEventEmitter = new TypedEventEmitter<MockWakuEvents>();
const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter);
const mockWakuNodeBob = new MockWakuNode(commonEventEmitter);
const reliableChannelAlice = await ReliableChannel.create(
mockWakuNodeAlice,
"MyChannel",
"alice",
encoder,
decoder
);
const reliableChannelBob = await ReliableChannel.create(
mockWakuNodeBob,
"MyChannel",
"bob",
encoder,
decoder
);
const message = utf8ToBytes("first message in channel");
const messageId = reliableChannelAlice.send(message);
// Alice sets up message tracking
let messageAcknowledged = false;
reliableChannelAlice.addEventListener("message-acknowledged", (event) => {
if (event.detail === messageId) {
messageAcknowledged = true;
}
});
let bobReceivedMessage = false;
reliableChannelBob.addEventListener("message-received", () => {
bobReceivedMessage = true;
});
// Wait for bob to receive the message to ensure it's included in causal history
while (!bobReceivedMessage) {
await delay(50);
}
// Bobs sends a message now, it should include first one in causal history
reliableChannelBob.send(utf8ToBytes("second message in channel"));
while (!messageAcknowledged) {
await delay(50);
}
expect(messageAcknowledged).to.be.true;
});
it("Incoming message is emitted as received", async () => {
const reliableChannel = await ReliableChannel.create(
mockWakuNode,
"MyChannel",
"alice",
encoder,
decoder
);
let receivedMessage: IDecodedMessage;
reliableChannel.addEventListener("message-received", (event) => {
receivedMessage = event.detail;
});
const message = utf8ToBytes("message in channel");
reliableChannel.send(message);
while (!receivedMessage!) {
await delay(50);
}
expect(bytesToUtf8(receivedMessage!.payload)).to.eq(bytesToUtf8(message));
});
describe("Retries", () => {
it("Outgoing message is retried until acknowledged", async () => {
const commonEventEmitter = new TypedEventEmitter<MockWakuEvents>();
const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter);
const mockWakuNodeBob = new MockWakuNode(commonEventEmitter);
const reliableChannelAlice = await ReliableChannel.create(
mockWakuNodeAlice,
"MyChannel",
"alice",
encoder,
decoder,
{
retryIntervalMs: 200, // faster for a quick test,
processTaskMinElapseMs: 10 // faster so it process message as soon as they arrive
}
);
const reliableChannelBob = await ReliableChannel.create(
mockWakuNodeBob,
"MyChannel",
"bob",
encoder,
decoder,
{
syncMinIntervalMs: 0, // do not send sync messages automatically
maxRetryAttempts: 0 // This one does not perform retries
}
);
const msgTxt = "first message in channel";
const message = utf8ToBytes(msgTxt);
// Let's count how many times Bob receives Alice's message
let messageCount = 0;
reliableChannelBob.addEventListener("message-received", (event) => {
if (bytesToUtf8(event.detail.payload) === msgTxt) {
messageCount++;
}
});
reliableChannelAlice.send(message);
while (messageCount < 1) {
await delay(10);
}
expect(messageCount).to.equal(1, "Bob received Alice's message once");
// No response from Bob should trigger a retry from Alice
while (messageCount < 2) {
await delay(10);
}
expect(messageCount).to.equal(2, "retried once");
// Bobs sends a message now, it should include first one in causal history
reliableChannelBob.send(utf8ToBytes("second message in channel"));
// Wait long enough to confirm no retry is executed
await delay(300);
// Alice should have stopped sending
expect(messageCount).to.equal(2, "hasn't retried since it's acked");
});
});
// the test is failing when run with all tests in sdk package
// no clear reason why, skipping for now
// TODO: fix this test https://github.com/waku-org/js-waku/issues/2648
describe.skip("Missing Message Retrieval", () => {
it("Automatically retrieves missing message", async () => {
const commonEventEmitter = new TypedEventEmitter<MockWakuEvents>();
const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter);
// Setup, Alice first
const reliableChannelAlice = await ReliableChannel.create(
mockWakuNodeAlice,
"MyChannel",
"alice",
encoder,
decoder,
{
// disable any automation to better control the test
retryIntervalMs: 0,
syncMinIntervalMs: 0,
retrieveFrequencyMs: 0,
processTaskMinElapseMs: 10
}
);
// Bob is offline, Alice sends a message, this is the message we want
// Bob to receive in this test.
const message = utf8ToBytes("missing message");
reliableChannelAlice.send(message);
// Wait to be sent
await new Promise((resolve) => {
reliableChannelAlice.addEventListener("message-sent", resolve, {
once: true
});
});
const sdsMessage = new ContentMessage(
ReliableChannel.getMessageId(message),
"MyChannel",
"alice",
[],
1,
undefined,
message
);
// Now Bob goes online
const mockWakuNodeBob = new MockWakuNode(commonEventEmitter);
// Stub store.queryGenerator to return a message
const mockMessage = {
payload: sdsMessage.encode()
};
const queryGeneratorStub = sinon.stub().callsFake(async function* (
_decoders: IDecoder<IDecodedMessage>[],
_options?: Partial<QueryRequestParams>
) {
yield [Promise.resolve(mockMessage as IDecodedMessage)];
});
(mockWakuNodeBob.store as any) = {
queryGenerator: queryGeneratorStub
};
const reliableChannelBob = await ReliableChannel.create(
mockWakuNodeBob,
"MyChannel",
"bob",
encoder,
decoder,
{
retryIntervalMs: 0, // disable any automation to better control the test
syncMinIntervalMs: 0,
processTaskMinElapseMs: 10,
retrieveFrequencyMs: 100 // quick loop so the test go fast
}
);
const waitForMessageRetrieved = new Promise((resolve) => {
reliableChannelBob.addEventListener("message-received", (event) => {
if (bytesToUtf8(event.detail.payload) === "missing message") {
resolve(true);
}
});
setTimeout(() => {
resolve(false);
}, 1000);
});
// Alice sends a sync message, Bob should learn about missing message
// and retrieve it
await reliableChannelAlice["sendSyncMessage"]();
const messageRetrieved = await waitForMessageRetrieved;
expect(messageRetrieved, "message retrieved").to.be.true;
// Verify the stub was called once with the right messageHash info
expect(queryGeneratorStub.calledOnce, "query generator called once").to.be
.true;
const callArgs = queryGeneratorStub.getCall(0).args;
expect(callArgs[1]).to.have.property("messageHashes");
expect(callArgs[1].messageHashes).to.be.an("array");
});
});
describe("Query On Connect Integration E2E Tests", () => {
let mockWakuNode: MockWakuNode;
let reliableChannel: ReliableChannel<IDecodedMessage>;
let encoder: IEncoder;
let decoder: IDecoder<IDecodedMessage>;
let mockPeerManagerEvents: TypedEventEmitter<any>;
let queryGeneratorStub: sinon.SinonStub;
let mockPeerId: PeerId;
beforeEach(async () => {
// Setup mock waku node with store capability
mockWakuNode = new MockWakuNode();
// Setup mock peer manager events for QueryOnConnect
mockPeerManagerEvents = new TypedEventEmitter();
(mockWakuNode as any).peerManager = {
events: mockPeerManagerEvents
};
// Setup encoder and decoder
encoder = createEncoder({
contentTopic: TEST_CONTENT_TOPIC,
routingInfo: TEST_ROUTING_INFO
});
decoder = createDecoder(TEST_CONTENT_TOPIC, TEST_ROUTING_INFO);
// Setup store with queryGenerator for QueryOnConnect
queryGeneratorStub = sinon.stub();
mockWakuNode.store = {
queryGenerator: queryGeneratorStub
} as any;
mockPeerId = {
toString: () => "QmTestPeerId"
} as unknown as PeerId;
});
it("should trigger QueryOnConnect when going offline and store peer reconnects", async () => {
// Create a message that will be auto-retrieved
const messageText = "Auto-retrieved message";
const messagePayload = utf8ToBytes(messageText);
const sdsMessage = new ContentMessage(
ReliableChannel.getMessageId(messagePayload),
"testChannel",
"testSender",
[],
1,
undefined,
messagePayload
);
const autoRetrievedMessage: IDecodedMessage = {
hash: hexToBytes("1234"),
hashStr: "1234",
version: 1,
timestamp: new Date(),
contentTopic: TEST_CONTENT_TOPIC,
pubsubTopic: decoder.pubsubTopic,
payload: sdsMessage.encode(),
rateLimitProof: undefined,
ephemeral: false,
meta: undefined
};
// Setup queryGenerator to return the auto-retrieved message
queryGeneratorStub.callsFake(async function* () {
yield [Promise.resolve(autoRetrievedMessage)];
});
// Create ReliableChannel with queryOnConnect enabled
reliableChannel = await ReliableChannel.create(
mockWakuNode,
"testChannel",
"testSender",
encoder,
decoder
);
// Wait for initial setup
await delay(50);
// Setup complete - focus on testing QueryOnConnect trigger
// Simulate going offline (change health status)
mockWakuNode.events.dispatchEvent(
new CustomEvent("health", { detail: HealthStatus.Unhealthy })
);
await delay(10);
// Simulate store peer reconnection which should trigger QueryOnConnect
mockPeerManagerEvents.dispatchEvent(
new CustomEvent("store:connect", { detail: mockPeerId })
);
// Wait for store query to be triggered
await delay(200);
// Verify that QueryOnConnect was triggered by the conditions
expect(queryGeneratorStub.called).to.be.true;
});
it("should trigger QueryOnConnect when time threshold is exceeded", async () => {
// Create multiple messages that will be auto-retrieved
const message1Text = "First auto-retrieved message";
const message2Text = "Second auto-retrieved message";
const message1Payload = utf8ToBytes(message1Text);
const message2Payload = utf8ToBytes(message2Text);
const sdsMessage1 = new ContentMessage(
ReliableChannel.getMessageId(message1Payload),
"testChannel",
"testSender",
[],
1,
undefined,
message1Payload
);
const sdsMessage2 = new ContentMessage(
ReliableChannel.getMessageId(message2Payload),
"testChannel",
"testSender",
[],
2,
undefined,
message2Payload
);
const autoRetrievedMessage1: IDecodedMessage = {
hash: hexToBytes("5678"),
hashStr: "5678",
version: 1,
timestamp: new Date(Date.now() - 1000),
contentTopic: TEST_CONTENT_TOPIC,
pubsubTopic: decoder.pubsubTopic,
payload: sdsMessage1.encode(),
rateLimitProof: undefined,
ephemeral: false,
meta: undefined
};
const autoRetrievedMessage2: IDecodedMessage = {
hash: hexToBytes("9abc"),
hashStr: "9abc",
version: 1,
timestamp: new Date(),
contentTopic: TEST_CONTENT_TOPIC,
pubsubTopic: decoder.pubsubTopic,
payload: sdsMessage2.encode(),
rateLimitProof: undefined,
ephemeral: false,
meta: undefined
};
// Setup queryGenerator to return multiple messages
queryGeneratorStub.callsFake(async function* () {
yield [Promise.resolve(autoRetrievedMessage1)];
yield [Promise.resolve(autoRetrievedMessage2)];
});
// Create ReliableChannel with queryOnConnect enabled
reliableChannel = await ReliableChannel.create(
mockWakuNode,
"testChannel",
"testSender",
encoder,
decoder,
{ queryOnConnect: true }
);
await delay(50);
// Simulate old last successful query by accessing QueryOnConnect internals
// The default threshold is 5 minutes, so we'll set it to an old time
if ((reliableChannel as any).queryOnConnect) {
((reliableChannel as any).queryOnConnect as any).lastSuccessfulQuery =
Date.now() - 6 * 60 * 1000; // 6 minutes ago
}
// Simulate store peer connection which should trigger retrieval due to time threshold
mockPeerManagerEvents.dispatchEvent(
new CustomEvent("store:connect", { detail: mockPeerId })
);
// Wait for store query to be triggered
await delay(200);
// Verify that QueryOnConnect was triggered due to time threshold
expect(queryGeneratorStub.called).to.be.true;
});
});
});

View File

@ -0,0 +1,676 @@
import { TypedEventEmitter } from "@libp2p/interface";
import { messageHash } from "@waku/core";
import {
type Callback,
type IDecodedMessage,
type IDecoder,
type IEncoder,
type IMessage,
ISendOptions,
type IWaku,
LightPushError,
LightPushSDKResult,
QueryRequestParams
} from "@waku/interfaces";
import {
type ChannelId,
isContentMessage,
MessageChannel,
MessageChannelEvent,
type MessageChannelOptions,
Message as SdsMessage,
type SenderId,
SyncMessage
} from "@waku/sds";
import { Logger } from "@waku/utils";
import {
QueryOnConnect,
QueryOnConnectEvent
} from "../query_on_connect/index.js";
import { ReliableChannelEvent, ReliableChannelEvents } from "./events.js";
import { MissingMessageRetriever } from "./missing_message_retriever.js";
import { RetryManager } from "./retry_manager.js";
const log = new Logger("sdk:reliable-channel");
const DEFAULT_SYNC_MIN_INTERVAL_MS = 30 * 1000; // 30 seconds
const DEFAULT_RETRY_INTERVAL_MS = 30 * 1000; // 30 seconds
const DEFAULT_MAX_RETRY_ATTEMPTS = 10;
const DEFAULT_SWEEP_IN_BUF_INTERVAL_MS = 5 * 1000;
const DEFAULT_PROCESS_TASK_MIN_ELAPSE_MS = 1000;
const IRRECOVERABLE_SENDING_ERRORS: LightPushError[] = [
LightPushError.ENCODE_FAILED,
LightPushError.EMPTY_PAYLOAD,
LightPushError.SIZE_TOO_BIG,
LightPushError.RLN_PROOF_GENERATION
];
export type ReliableChannelOptions = MessageChannelOptions & {
/**
* The minimum interval between 2 sync messages in the channel.
*
* Meaning, how frequently we want messages in the channel, noting that the
* responsibility of sending a sync messages is shared between participants
* of the channel.
*
* `0` means no sync messages will be sent.
*
* @default 30,000 (30 seconds) [[DEFAULT_SYNC_MIN_INTERVAL_MS]]
*/
syncMinIntervalMs?: number;
/**
* How long to wait before re-sending a message that as not acknowledged.
*
* @default 60,000 (60 seconds) [[DEFAULT_RETRY_INTERVAL_MS]]
*/
retryIntervalMs?: number;
/**
* How many times do we attempt resending messages that were not acknowledged.
*
* @default 10 [[DEFAULT_MAX_RETRY_ATTEMPTS]]
*/
maxRetryAttempts?: number;
/**
* How often store queries are done to retrieve missing messages.
*
* @default 10,000 (10 seconds)
*/
retrieveFrequencyMs?: number;
/**
* How often SDS message channel incoming buffer is swept.
*
* @default 5000 (every 5 seconds)
*/
sweepInBufIntervalMs?: number;
/**
* Whether to automatically do a store query after connection to store nodes.
*
* @default true
*/
queryOnConnect?: boolean;
/**
* Whether to auto start the message channel
*
* @default true
*/
autoStart?: boolean;
/** The minimum elapse time between calling the underlying channel process
* task for incoming messages. This is to avoid overload when processing
* a lot of messages.
*
* @default 1000 (1 second)
*/
processTaskMinElapseMs?: number;
};
/**
* An easy-to-use reliable channel that ensures all participants to the channel have eventual message consistency.
*
* Use events to track:
* - if your outgoing messages are sent, acknowledged or error out
* - for new incoming messages
* @emits [[ReliableChannelEvents]]
*
*/
export class ReliableChannel<
T extends IDecodedMessage
> extends TypedEventEmitter<ReliableChannelEvents> {
private readonly _send: (
encoder: IEncoder,
message: IMessage,
sendOptions?: ISendOptions
) => Promise<LightPushSDKResult>;
private readonly _subscribe: (
decoders: IDecoder<T> | IDecoder<T>[],
callback: Callback<T>
) => Promise<boolean>;
private readonly _retrieve?: <T extends IDecodedMessage>(
decoders: IDecoder<T>[],
options?: Partial<QueryRequestParams>
) => AsyncGenerator<Promise<T | undefined>[]>;
private readonly syncMinIntervalMs: number;
private syncTimeout: ReturnType<typeof setTimeout> | undefined;
private sweepInBufInterval: ReturnType<typeof setInterval> | undefined;
private readonly sweepInBufIntervalMs: number;
private processTaskTimeout: ReturnType<typeof setTimeout> | undefined;
private readonly retryManager: RetryManager | undefined;
private readonly missingMessageRetriever?: MissingMessageRetriever<T>;
private readonly queryOnConnect?: QueryOnConnect<T>;
private readonly processTaskMinElapseMs: number;
private _started: boolean;
private constructor(
public node: IWaku,
public messageChannel: MessageChannel,
private encoder: IEncoder,
private decoder: IDecoder<T>,
options?: ReliableChannelOptions
) {
super();
if (node.lightPush) {
this._send = node.lightPush.send.bind(node.lightPush);
} else if (node.relay) {
this._send = node.relay.send.bind(node.relay);
} else {
throw "No protocol available to send messages";
}
if (node.filter) {
this._subscribe = node.filter.subscribe.bind(node.filter);
} else if (node.relay) {
// TODO: Why do relay and filter have different interfaces?
// this._subscribe = node.relay.subscribeWithUnsubscribe;
throw "Not implemented";
} else {
throw "No protocol available to receive messages";
}
if (node.store) {
this._retrieve = node.store.queryGenerator.bind(node.store);
const peerManagerEvents = (node as any)?.peerManager?.events;
if (
peerManagerEvents !== undefined &&
(options?.queryOnConnect ?? true)
) {
log.info("auto-query enabled");
this.queryOnConnect = new QueryOnConnect(
[this.decoder],
peerManagerEvents,
node.events,
this._retrieve.bind(this)
);
}
}
this.syncMinIntervalMs =
options?.syncMinIntervalMs ?? DEFAULT_SYNC_MIN_INTERVAL_MS;
this.sweepInBufIntervalMs =
options?.sweepInBufIntervalMs ?? DEFAULT_SWEEP_IN_BUF_INTERVAL_MS;
const retryIntervalMs =
options?.retryIntervalMs ?? DEFAULT_RETRY_INTERVAL_MS;
const maxRetryAttempts =
options?.maxRetryAttempts ?? DEFAULT_MAX_RETRY_ATTEMPTS;
if (retryIntervalMs && maxRetryAttempts) {
// TODO: there is a lot to improve. e.g. not point retry to send if node is offline.
this.retryManager = new RetryManager(retryIntervalMs, maxRetryAttempts);
}
this.processTaskMinElapseMs =
options?.processTaskMinElapseMs ?? DEFAULT_PROCESS_TASK_MIN_ELAPSE_MS;
if (this._retrieve) {
this.missingMessageRetriever = new MissingMessageRetriever(
this.decoder,
options?.retrieveFrequencyMs,
this._retrieve,
async (msg: T) => {
await this.processIncomingMessage(msg);
}
);
}
this._started = false;
}
public get isStarted(): boolean {
return this._started;
}
/**
* Used to identify messages, pass the payload of a message you are
* about to send to track the events for this message.
* This is pre-sds wrapping
* @param messagePayload
*/
public static getMessageId(messagePayload: Uint8Array): string {
return MessageChannel.getMessageId(messagePayload);
}
/**
* Create a new message channels. Message channels enables end-to-end
* reliability by ensuring that all messages in the channel are received
* by other users, and retrieved by this local node.
*
* emits events about outgoing messages, see [[`ReliableChannel`]] docs.
*
* Note that all participants in a message channels need to get the messages
* from the channel. Meaning:
* - all participants must be able to decrypt the messages
* - all participants must be subscribing to content topic(s) where the messages are sent
*
* @param node The waku node to use to send and receive messages
* @param channelId An id for the channel, all participants of the channel should use the same id
* @param senderId An id for the sender, to ensure acknowledgements are only valid if originating from someone else; best if persisted between sessions
* @param encoder A channel operates within a singular encryption layer, hence the same encoder is needed for all messages
* @param decoder A channel operates within a singular encryption layer, hence the same decoder is needed for all messages
* @param options
*/
public static async create<T extends IDecodedMessage>(
node: IWaku,
channelId: ChannelId,
senderId: SenderId,
encoder: IEncoder,
decoder: IDecoder<T>,
options?: ReliableChannelOptions
): Promise<ReliableChannel<T>> {
const sdsMessageChannel = new MessageChannel(channelId, senderId, options);
const messageChannel = new ReliableChannel(
node,
sdsMessageChannel,
encoder,
decoder,
options
);
const autoStart = options?.autoStart ?? true;
if (autoStart) {
await messageChannel.start();
}
return messageChannel;
}
/**
* Sends a message in the channel, will attempt to re-send if not acknowledged
* by other participants.
*
* @param messagePayload
* @returns the message id
*/
public send(messagePayload: Uint8Array): string {
const messageId = ReliableChannel.getMessageId(messagePayload);
if (!this._started) {
this.safeSendEvent("sending-message-irrecoverable-error", {
detail: { messageId: messageId, error: "channel is not started" }
});
}
const wrapAndSendBind = this._wrapAndSend.bind(this, messagePayload);
this.retryManager?.startRetries(messageId, wrapAndSendBind);
wrapAndSendBind();
return messageId;
}
private _wrapAndSend(messagePayload: Uint8Array): void {
this.messageChannel.pushOutgoingMessage(
messagePayload,
async (
sdsMessage: SdsMessage
): Promise<{ success: boolean; retrievalHint?: Uint8Array }> => {
// Callback is called once message has added to the SDS outgoing queue
// We start by trying to send the message now.
// `payload` wrapped in SDS
const sdsPayload = sdsMessage.encode();
const wakuMessage = {
payload: sdsPayload
};
const messageId = ReliableChannel.getMessageId(messagePayload);
// TODO: should the encoder give me the message hash?
// Encoding now to fail early, used later to get message hash
const protoMessage = await this.encoder.toProtoObj(wakuMessage);
if (!protoMessage) {
this.safeSendEvent("sending-message-irrecoverable-error", {
detail: {
messageId: messageId,
error: "could not encode message"
}
});
return { success: false };
}
const retrievalHint = messageHash(
this.encoder.pubsubTopic,
protoMessage
);
this.safeSendEvent("sending-message", {
detail: messageId
});
const sendRes = await this._send(this.encoder, wakuMessage);
// If it's a recoverable failure, we will try again to send later
// If not, then we should error to the user now
for (const { error } of sendRes.failures) {
if (IRRECOVERABLE_SENDING_ERRORS.includes(error)) {
// Not recoverable, best to return it
log.error("Irrecoverable error, cannot send message: ", error);
this.safeSendEvent("sending-message-irrecoverable-error", {
detail: {
messageId,
error
}
});
return { success: false, retrievalHint };
}
}
return {
success: true,
retrievalHint
};
}
);
// Process outgoing messages straight away
this.messageChannel
.processTasks()
.then(() => {
this.messageChannel.sweepOutgoingBuffer();
})
.catch((err) => {
log.error("error encountered when processing sds tasks", err);
});
}
private async subscribe(): Promise<boolean> {
this.assertStarted();
return this._subscribe(this.decoder, async (message: T) => {
await this.processIncomingMessage(message);
});
}
/**
* Don't forget to call `this.messageChannel.sweepIncomingBuffer();` once done.
* @param msg
* @private
*/
private async processIncomingMessage<T extends IDecodedMessage>(
msg: T
): Promise<void> {
// New message arrives, we need to unwrap it first
const sdsMessage = SdsMessage.decode(msg.payload);
if (!sdsMessage) {
log.error("could not SDS decode message", msg);
return;
}
if (sdsMessage.channelId !== this.messageChannel.channelId) {
log.warn(
"ignoring message with different channel id",
sdsMessage.channelId
);
return;
}
const retrievalHint = msg.hash;
log.info(`processing message ${sdsMessage.messageId}:${msg.hashStr}`);
// SDS Message decoded, let's pass it to the channel so we can learn about
// missing messages or the status of previous outgoing messages
this.messageChannel.pushIncomingMessage(sdsMessage, retrievalHint);
this.missingMessageRetriever?.removeMissingMessage(sdsMessage.messageId);
if (sdsMessage.content && sdsMessage.content.length > 0) {
// Now, process the message with callback
// Overrides msg.payload with unwrapped payload
// TODO: can we do better?
const { payload: _p, ...allButPayload } = msg;
const unwrappedMessage = Object.assign(allButPayload, {
payload: sdsMessage.content,
hash: msg.hash,
hashStr: msg.hashStr,
version: msg.version,
contentTopic: msg.contentTopic,
pubsubTopic: msg.pubsubTopic,
timestamp: msg.timestamp,
rateLimitProof: msg.rateLimitProof,
ephemeral: msg.ephemeral,
meta: msg.meta
});
this.safeSendEvent("message-received", {
detail: unwrappedMessage as unknown as T
});
}
this.queueProcessTasks();
}
private async processIncomingMessages<T extends IDecodedMessage>(
messages: T[]
): Promise<void> {
for (const message of messages) {
await this.processIncomingMessage(message);
}
}
// TODO: For now we only queue process tasks for incoming messages
// As this is where there is most volume
private queueProcessTasks(): void {
// If one is already queued, then we can ignore it
if (this.processTaskTimeout === undefined) {
this.processTaskTimeout = setTimeout(() => {
void this.messageChannel.processTasks().catch((err) => {
log.error("error encountered when processing sds tasks", err);
});
// Clear timeout once triggered
clearTimeout(this.processTaskTimeout);
this.processTaskTimeout = undefined;
}, this.processTaskMinElapseMs); // we ensure that we don't call process tasks more than once per second
}
}
public async start(): Promise<boolean> {
if (this._started) return true;
this._started = true;
this.setupEventListeners();
this.restartSync();
this.startSweepIncomingBufferLoop();
if (this._retrieve) {
this.missingMessageRetriever?.start();
this.queryOnConnect?.start();
}
return this.subscribe();
}
public stop(): void {
if (!this._started) return;
this._started = false;
this.stopSync();
this.stopSweepIncomingBufferLoop();
this.missingMessageRetriever?.stop();
this.queryOnConnect?.stop();
// TODO unsubscribe
// TODO unsetMessageListeners
}
private assertStarted(): void {
if (!this._started) throw Error("Message Channel must be started");
}
private startSweepIncomingBufferLoop(): void {
this.stopSweepIncomingBufferLoop();
this.sweepInBufInterval = setInterval(() => {
log.info("sweep incoming buffer");
this.messageChannel.sweepIncomingBuffer();
}, this.sweepInBufIntervalMs);
}
private stopSweepIncomingBufferLoop(): void {
if (this.sweepInBufInterval) clearInterval(this.sweepInBufInterval);
}
private restartSync(multiplier: number = 1): void {
if (this.syncTimeout) {
clearTimeout(this.syncTimeout);
}
if (this.syncMinIntervalMs) {
const timeoutMs = this.random() * this.syncMinIntervalMs * multiplier;
this.syncTimeout = setTimeout(() => {
void this.sendSyncMessage();
// Always restart a sync, no matter whether the message was sent.
// Set a multiplier so we wait a bit longer to not hog the conversation
void this.restartSync(2);
}, timeoutMs);
}
}
private stopSync(): void {
if (this.syncTimeout) {
clearTimeout(this.syncTimeout);
}
}
// Used to enable overriding when testing
private random(): number {
return Math.random();
}
private safeSendEvent<T extends ReliableChannelEvent>(
event: T,
eventInit?: CustomEventInit
): void {
try {
this.dispatchEvent(new CustomEvent(event, eventInit));
} catch (error) {
log.error(`Failed to dispatch event ${event}:`, error);
}
}
private async sendSyncMessage(): Promise<void> {
this.assertStarted();
await this.messageChannel.pushOutgoingSyncMessage(
async (syncMessage: SyncMessage): Promise<boolean> => {
// Callback is called once message has added to the SDS outgoing queue
// We start by trying to send the message now.
// `payload` wrapped in SDS
const sdsPayload = syncMessage.encode();
const wakuMessage = {
payload: sdsPayload
};
const sendRes = await this._send(this.encoder, wakuMessage);
if (sendRes.failures.length > 0) {
log.error("Error sending sync message: ", sendRes);
return false;
}
return true;
}
);
// Process outgoing messages straight away
// TODO: review and optimize
await this.messageChannel.processTasks();
this.messageChannel.sweepOutgoingBuffer();
}
private setupEventListeners(): void {
this.messageChannel.addEventListener(
MessageChannelEvent.OutMessageSent,
(event) => {
if (event.detail.content) {
const messageId = ReliableChannel.getMessageId(event.detail.content);
this.safeSendEvent("message-sent", {
detail: messageId
});
}
}
);
this.messageChannel.addEventListener(
MessageChannelEvent.OutMessageAcknowledged,
(event) => {
if (event.detail) {
this.safeSendEvent("message-acknowledged", {
detail: event.detail
});
// Stopping retries
this.retryManager?.stopRetries(event.detail);
}
}
);
this.messageChannel.addEventListener(
MessageChannelEvent.OutMessagePossiblyAcknowledged,
(event) => {
if (event.detail) {
this.safeSendEvent("message-possibly-acknowledged", {
detail: {
messageId: event.detail.messageId,
possibleAckCount: event.detail.count
}
});
}
}
);
this.messageChannel.addEventListener(
MessageChannelEvent.InSyncReceived,
(_event) => {
// restart the timeout when a sync message has been received
this.restartSync();
}
);
this.messageChannel.addEventListener(
MessageChannelEvent.InMessageReceived,
(event) => {
// restart the timeout when a content message has been received
if (isContentMessage(event.detail)) {
// send a sync message faster to ack someone's else
this.restartSync(0.5);
}
}
);
this.messageChannel.addEventListener(
MessageChannelEvent.OutMessageSent,
(event) => {
// restart the timeout when a content message has been sent
if (isContentMessage(event.detail)) {
this.restartSync();
}
}
);
this.messageChannel.addEventListener(
MessageChannelEvent.InMessageMissing,
(event) => {
for (const { messageId, retrievalHint } of event.detail) {
if (retrievalHint && this.missingMessageRetriever) {
this.missingMessageRetriever.addMissingMessage(
messageId,
retrievalHint
);
}
}
}
);
if (this.queryOnConnect) {
this.queryOnConnect.addEventListener(
QueryOnConnectEvent.MessagesRetrieved,
(event) => {
void this.processIncomingMessages(event.detail);
}
);
}
}
}

View File

@ -0,0 +1,187 @@
import { TypedEventEmitter } from "@libp2p/interface";
import { createDecoder, createEncoder } from "@waku/core";
import {
AutoSharding,
IDecodedMessage,
IDecoder,
IEncoder
} from "@waku/interfaces";
import {
createRoutingInfo,
delay,
MockWakuEvents,
MockWakuNode
} from "@waku/utils";
import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai";
import { beforeEach, describe } from "mocha";
import { ReliableChannel } from "./index.js";
const TEST_CONTENT_TOPIC = "/my-tests/0/topic-name/proto";
const TEST_NETWORK_CONFIG: AutoSharding = {
clusterId: 0,
numShardsInCluster: 1
};
const TEST_ROUTING_INFO = createRoutingInfo(TEST_NETWORK_CONFIG, {
contentTopic: TEST_CONTENT_TOPIC
});
describe("Reliable Channel: Acks", () => {
let encoder: IEncoder;
let decoder: IDecoder<IDecodedMessage>;
beforeEach(async () => {
encoder = createEncoder({
contentTopic: TEST_CONTENT_TOPIC,
routingInfo: TEST_ROUTING_INFO
});
decoder = createDecoder(TEST_CONTENT_TOPIC, TEST_ROUTING_INFO);
});
it("Outgoing message is acknowledged", async () => {
const commonEventEmitter = new TypedEventEmitter<MockWakuEvents>();
const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter);
const mockWakuNodeBob = new MockWakuNode(commonEventEmitter);
const reliableChannelAlice = await ReliableChannel.create(
mockWakuNodeAlice,
"MyChannel",
"alice",
encoder,
decoder
);
const reliableChannelBob = await ReliableChannel.create(
mockWakuNodeBob,
"MyChannel",
"bob",
encoder,
decoder
);
const message = utf8ToBytes("first message in channel");
// Alice sets up message tracking
const messageId = ReliableChannel.getMessageId(message);
let messageReceived = false;
reliableChannelBob.addEventListener("message-received", (event) => {
if (bytesToUtf8(event.detail.payload) === "first message in channel") {
messageReceived = true;
}
});
let messageAcknowledged = false;
reliableChannelAlice.addEventListener("message-acknowledged", (event) => {
if (event.detail === messageId) {
messageAcknowledged = true;
}
});
reliableChannelAlice.send(message);
// Wait for Bob to receive the message to ensure it uses it in causal history
while (!messageReceived) {
await delay(50);
}
// Bobs sends a message now, it should include first one in causal history
reliableChannelBob.send(utf8ToBytes("second message in channel"));
while (!messageAcknowledged) {
await delay(50);
}
expect(messageAcknowledged).to.be.true;
});
it("Re-sent message is acknowledged once other parties join.", async () => {
const commonEventEmitter = new TypedEventEmitter<MockWakuEvents>();
const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter);
// Setup, Alice first
const reliableChannelAlice = await ReliableChannel.create(
mockWakuNodeAlice,
"MyChannel",
"alice",
encoder,
decoder,
{
retryIntervalMs: 0, // disable any automation to better control the test
syncMinIntervalMs: 0,
processTaskMinElapseMs: 10
}
);
// Bob is offline, Alice sends a message, this is the message we want
// acknowledged in this test.
const message = utf8ToBytes("message to be acknowledged");
const messageId = ReliableChannel.getMessageId(message);
let messageAcknowledged = false;
reliableChannelAlice.addEventListener("message-acknowledged", (event) => {
if (event.detail === messageId) {
messageAcknowledged = true;
}
});
reliableChannelAlice.send(message);
// Wait a bit to ensure Bob does not receive the message
await delay(100);
// Now Bob goes online
const mockWakuNodeBob = new MockWakuNode(commonEventEmitter);
const reliableChannelBob = await ReliableChannel.create(
mockWakuNodeBob,
"MyChannel",
"bob",
encoder,
decoder,
{
retryIntervalMs: 0, // disable any automation to better control the test
syncMinIntervalMs: 0,
processTaskMinElapseMs: 10
}
);
// Track when Bob receives the message
let bobReceivedMessage = false;
reliableChannelBob.addEventListener("message-received", (event) => {
if (bytesToUtf8(event.detail.payload!) === "message to be acknowledged") {
bobReceivedMessage = true;
}
});
// Some sync messages are exchanged
await reliableChannelAlice["sendSyncMessage"]();
await reliableChannelBob["sendSyncMessage"]();
// wait a bit to ensure messages are processed
await delay(100);
// Some content messages are exchanged too
reliableChannelAlice.send(utf8ToBytes("some message"));
reliableChannelBob.send(utf8ToBytes("some other message"));
// wait a bit to ensure messages are processed
await delay(100);
// At this point, the message shouldn't be acknowledged yet as Bob
// does not have a complete log
expect(messageAcknowledged).to.be.false;
// Now Alice resends the message
reliableChannelAlice.send(message);
// Wait for Bob to receive the message
while (!bobReceivedMessage) {
await delay(50);
}
// Bob receives it, and should include it in its sync
await reliableChannelBob["sendSyncMessage"]();
while (!messageAcknowledged) {
await delay(50);
}
// The sync should acknowledge the message
expect(messageAcknowledged).to.be.true;
});
});

View File

@ -0,0 +1,326 @@
import { TypedEventEmitter } from "@libp2p/interface";
import {
AutoSharding,
IDecodedMessage,
IDecoder,
IEncoder,
type IMessage,
ISendOptions,
IWaku,
LightPushError,
LightPushSDKResult
} from "@waku/interfaces";
import { generatePrivateKey, getPublicKey } from "@waku/message-encryption";
import {
createDecoder as createEciesDecoder,
createEncoder as createEciesEncoder
} from "@waku/message-encryption/ecies";
import {
createRoutingInfo,
delay,
MockWakuEvents,
MockWakuNode
} from "@waku/utils";
import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai";
import { beforeEach, describe } from "mocha";
import { ReliableChannel } from "./index.js";
const TEST_CONTENT_TOPIC = "/my-tests/0/topic-name/proto";
const TEST_NETWORK_CONFIG: AutoSharding = {
clusterId: 0,
numShardsInCluster: 1
};
const TEST_ROUTING_INFO = createRoutingInfo(TEST_NETWORK_CONFIG, {
contentTopic: TEST_CONTENT_TOPIC
});
describe("Reliable Channel: Encryption", () => {
let mockWakuNode: IWaku;
let encoder: IEncoder;
let decoder: IDecoder<IDecodedMessage>;
beforeEach(async () => {
mockWakuNode = new MockWakuNode();
const privateKey = generatePrivateKey();
const publicKey = getPublicKey(privateKey);
encoder = createEciesEncoder({
contentTopic: TEST_CONTENT_TOPIC,
routingInfo: TEST_ROUTING_INFO,
publicKey
});
decoder = createEciesDecoder(
TEST_CONTENT_TOPIC,
TEST_ROUTING_INFO,
privateKey
);
});
it("Outgoing message is emitted as sending", async () => {
const reliableChannel = await ReliableChannel.create(
mockWakuNode,
"MyChannel",
"alice",
encoder,
decoder
);
const message = utf8ToBytes("message in channel");
// Setting up message tracking
const messageId = ReliableChannel.getMessageId(message);
let messageSending = false;
reliableChannel.addEventListener("sending-message", (event) => {
if (event.detail === messageId) {
messageSending = true;
}
});
reliableChannel.send(message);
while (!messageSending) {
await delay(50);
}
expect(messageSending).to.be.true;
});
it("Outgoing message is emitted as sent", async () => {
const reliableChannel = await ReliableChannel.create(
mockWakuNode,
"MyChannel",
"alice",
encoder,
decoder
);
const message = utf8ToBytes("message in channel");
// Setting up message tracking
const messageId = ReliableChannel.getMessageId(message);
let messageSent = false;
reliableChannel.addEventListener("message-sent", (event) => {
if (event.detail === messageId) {
messageSent = true;
}
});
reliableChannel.send(message);
while (!messageSent) {
await delay(50);
}
expect(messageSent).to.be.true;
});
it("Encoder error raises irrecoverable error", async () => {
mockWakuNode.lightPush!.send = (
_encoder: IEncoder,
_message: IMessage,
_sendOptions?: ISendOptions
): Promise<LightPushSDKResult> => {
return Promise.resolve({
failures: [{ error: LightPushError.EMPTY_PAYLOAD }],
successes: []
});
};
const reliableChannel = await ReliableChannel.create(
mockWakuNode,
"MyChannel",
"alice",
encoder,
decoder
);
const message = utf8ToBytes("payload doesnt matter");
// Setting up message tracking
const messageId = ReliableChannel.getMessageId(message);
let irrecoverableError = false;
reliableChannel.addEventListener(
"sending-message-irrecoverable-error",
(event) => {
if (event.detail.messageId === messageId) {
irrecoverableError = true;
}
}
);
encoder.contentTopic = "...";
reliableChannel.send(message);
while (!irrecoverableError) {
await delay(50);
}
expect(irrecoverableError).to.be.true;
});
it("Outgoing message is not emitted as acknowledged from own outgoing messages", async () => {
const reliableChannel = await ReliableChannel.create(
mockWakuNode,
"MyChannel",
"alice",
encoder,
decoder
);
const message = utf8ToBytes("first message in channel");
// Setting up message tracking
const messageId = ReliableChannel.getMessageId(message);
let messageAcknowledged = false;
reliableChannel.addEventListener("message-acknowledged", (event) => {
if (event.detail === messageId) {
messageAcknowledged = true;
}
});
reliableChannel.send(message);
// Sending a second message from the same node should not acknowledge the first one
reliableChannel.send(utf8ToBytes("second message in channel"));
// Wait a bit to be sure no event is emitted
await delay(200);
expect(messageAcknowledged).to.be.false;
});
it("Outgoing message is possibly acknowledged", async () => {
const commonEventEmitter = new TypedEventEmitter<MockWakuEvents>();
const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter);
const mockWakuNodeBob = new MockWakuNode(commonEventEmitter);
const reliableChannelAlice = await ReliableChannel.create(
mockWakuNodeAlice,
"MyChannel",
"alice",
encoder,
decoder
);
const reliableChannelBob = await ReliableChannel.create(
mockWakuNodeBob,
"MyChannel",
"bob",
encoder,
decoder,
// Bob only includes one message in causal history
{ causalHistorySize: 1 }
);
const messages = ["first", "second", "third"].map((m) => {
return utf8ToBytes(m);
});
// Alice sets up message tracking for first message
const firstMessageId = ReliableChannel.getMessageId(messages[0]);
let firstMessagePossiblyAcknowledged = false;
reliableChannelAlice.addEventListener(
"message-possibly-acknowledged",
(event) => {
if (event.detail.messageId === firstMessageId) {
firstMessagePossiblyAcknowledged = true;
}
}
);
let bobMessageReceived = 0;
reliableChannelAlice.addEventListener("message-received", () => {
bobMessageReceived++;
});
for (const m of messages) {
reliableChannelAlice.send(m);
}
// Wait for Bob to receive all messages to ensure filter is updated
while (bobMessageReceived < 3) {
await delay(50);
}
// Bobs sends a message now, it should include first one in bloom filter
reliableChannelBob.send(utf8ToBytes("message back"));
while (!firstMessagePossiblyAcknowledged) {
await delay(50);
}
expect(firstMessagePossiblyAcknowledged).to.be.true;
});
it("Outgoing message is acknowledged", async () => {
const commonEventEmitter = new TypedEventEmitter<MockWakuEvents>();
const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter);
const mockWakuNodeBob = new MockWakuNode(commonEventEmitter);
const reliableChannelAlice = await ReliableChannel.create(
mockWakuNodeAlice,
"MyChannel",
"alice",
encoder,
decoder
);
const reliableChannelBob = await ReliableChannel.create(
mockWakuNodeBob,
"MyChannel",
"bob",
encoder,
decoder
);
const message = utf8ToBytes("first message in channel");
// Alice sets up message tracking
const messageId = ReliableChannel.getMessageId(message);
let messageAcknowledged = false;
reliableChannelAlice.addEventListener("message-acknowledged", (event) => {
if (event.detail === messageId) {
messageAcknowledged = true;
}
});
let bobReceivedMessage = false;
reliableChannelBob.addEventListener("message-received", () => {
bobReceivedMessage = true;
});
reliableChannelAlice.send(message);
// Wait for Bob to receive the message
while (!bobReceivedMessage) {
await delay(50);
}
// Bobs sends a message now, it should include first one in causal history
reliableChannelBob.send(utf8ToBytes("second message in channel"));
while (!messageAcknowledged) {
await delay(50);
}
expect(messageAcknowledged).to.be.true;
});
it("Incoming message is emitted as received", async () => {
const reliableChannel = await ReliableChannel.create(
mockWakuNode,
"MyChannel",
"alice",
encoder,
decoder
);
let receivedMessage: IDecodedMessage;
reliableChannel.addEventListener("message-received", (event) => {
receivedMessage = event.detail;
});
const message = utf8ToBytes("message in channel");
reliableChannel.send(message);
while (!receivedMessage!) {
await delay(50);
}
expect(bytesToUtf8(receivedMessage!.payload)).to.eq(bytesToUtf8(message));
});
});

View File

@ -0,0 +1,332 @@
import { TypedEventEmitter } from "@libp2p/interface";
import { createDecoder, createEncoder } from "@waku/core";
import {
AutoSharding,
IDecodedMessage,
IDecoder,
IEncoder,
IWaku
} from "@waku/interfaces";
import { MessageChannelEvent } from "@waku/sds";
import {
createRoutingInfo,
delay,
MockWakuEvents,
MockWakuNode
} from "@waku/utils";
import { utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai";
import { beforeEach, describe } from "mocha";
import { ReliableChannel } from "./index.js";
const TEST_CONTENT_TOPIC = "/my-tests/0/topic-name/proto";
const TEST_NETWORK_CONFIG: AutoSharding = {
clusterId: 0,
numShardsInCluster: 1
};
const TEST_ROUTING_INFO = createRoutingInfo(TEST_NETWORK_CONFIG, {
contentTopic: TEST_CONTENT_TOPIC
});
describe("Reliable Channel: Sync", () => {
let mockWakuNode: IWaku;
let encoder: IEncoder;
let decoder: IDecoder<IDecodedMessage>;
beforeEach(async () => {
mockWakuNode = new MockWakuNode();
encoder = createEncoder({
contentTopic: TEST_CONTENT_TOPIC,
routingInfo: TEST_ROUTING_INFO
});
decoder = createDecoder(TEST_CONTENT_TOPIC, TEST_ROUTING_INFO);
});
it("Sync message is sent within sync frequency", async () => {
const syncMinIntervalMs = 100;
const reliableChannel = await ReliableChannel.create(
mockWakuNode,
"MyChannel",
"alice",
encoder,
decoder,
{
syncMinIntervalMs
}
);
let syncMessageSent = false;
reliableChannel.messageChannel.addEventListener(
MessageChannelEvent.OutSyncSent,
(_event) => {
syncMessageSent = true;
}
);
await delay(syncMinIntervalMs);
expect(syncMessageSent).to.be.true;
});
it("Sync message are not sent excessively within sync frequency", async () => {
const syncMinIntervalMs = 100;
const reliableChannel = await ReliableChannel.create(
mockWakuNode,
"MyChannel",
"alice",
encoder,
decoder,
{
syncMinIntervalMs
}
);
let syncMessageSentCount = 0;
reliableChannel.messageChannel.addEventListener(
MessageChannelEvent.OutSyncSent,
(_event) => {
syncMessageSentCount++;
}
);
await delay(syncMinIntervalMs);
// There is randomness to this, but it should not be excessive
expect(syncMessageSentCount).to.be.lessThan(3);
});
it("Sync message is not sent if another sync message was just received", async function () {
this.timeout(5000);
const commonEventEmitter = new TypedEventEmitter<MockWakuEvents>();
const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter);
const mockWakuNodeBob = new MockWakuNode(commonEventEmitter);
const syncMinIntervalMs = 1000;
const reliableChannelAlice = await ReliableChannel.create(
mockWakuNodeAlice,
"MyChannel",
"alice",
encoder,
decoder,
{
syncMinIntervalMs: 0, // does not send sync messages automatically
processTaskMinElapseMs: 10
}
);
const reliableChannelBob = await ReliableChannel.create(
mockWakuNodeBob,
"MyChannel",
"bob",
encoder,
decoder,
{
syncMinIntervalMs,
processTaskMinElapseMs: 10
}
);
(reliableChannelBob as any).random = () => {
return 1;
}; // will wait a full second
let syncMessageSent = false;
reliableChannelBob.messageChannel.addEventListener(
MessageChannelEvent.OutSyncSent,
(_event) => {
syncMessageSent = true;
}
);
while (!syncMessageSent) {
// Bob will send a sync message as soon as it started, we are waiting for this one
await delay(100);
}
// Let's reset the tracker
syncMessageSent = false;
// We should be faster than Bob as Bob will "randomly" wait a full second
await reliableChannelAlice["sendSyncMessage"]();
// Bob should be waiting a full second before sending a message after Alice
await delay(900);
// Now, let's wait Bob to send the sync message
await delay(200);
expect(syncMessageSent).to.be.true;
});
it("Sync message is not sent if another non-ephemeral message was just received", async function () {
this.timeout(5000);
const commonEventEmitter = new TypedEventEmitter<MockWakuEvents>();
const mockWakuNodeAlice = new MockWakuNode(commonEventEmitter);
const mockWakuNodeBob = new MockWakuNode(commonEventEmitter);
const syncMinIntervalMs = 1000;
const reliableChannelAlice = await ReliableChannel.create(
mockWakuNodeAlice,
"MyChannel",
"alice",
encoder,
decoder,
{
syncMinIntervalMs: 0, // does not send sync messages automatically
processTaskMinElapseMs: 10
}
);
const reliableChannelBob = await ReliableChannel.create(
mockWakuNodeBob,
"MyChannel",
"bob",
encoder,
decoder,
{
syncMinIntervalMs,
processTaskMinElapseMs: 10
}
);
(reliableChannelBob as any).random = () => {
return 1;
}; // will wait a full second
let syncMessageSent = false;
reliableChannelBob.messageChannel.addEventListener(
MessageChannelEvent.OutSyncSent,
(_event) => {
syncMessageSent = true;
}
);
while (!syncMessageSent) {
// Bob will send a sync message as soon as it started, we are waiting for this one
await delay(100);
}
// Let's reset the tracker
syncMessageSent = false;
// We should be faster than Bob as Bob will "randomly" wait a full second
reliableChannelAlice.send(utf8ToBytes("some message"));
// Bob should be waiting a full second before sending a message after Alice
await delay(900);
// Now, let's wait Bob to send the sync message
await delay(200);
expect(syncMessageSent).to.be.true;
});
it("Sync message is not sent if another sync message was just sent", async function () {
this.timeout(5000);
const syncMinIntervalMs = 1000;
const reliableChannel = await ReliableChannel.create(
mockWakuNode,
"MyChannel",
"alice",
encoder,
decoder,
{ syncMinIntervalMs }
);
(reliableChannel as any).random = () => {
return 1;
}; // will wait a full second
let syncMessageSent = false;
reliableChannel.messageChannel.addEventListener(
MessageChannelEvent.OutSyncSent,
(_event) => {
syncMessageSent = true;
}
);
while (!syncMessageSent) {
// Will send a sync message as soon as it started, we are waiting for this one
await delay(100);
}
// Let's reset the tracker
syncMessageSent = false;
// We should be faster than automated sync as it will "randomly" wait a full second
await reliableChannel["sendSyncMessage"]();
// should be waiting a full second before sending a message after Alice
await delay(900);
// Now, let's wait to send the automated sync message
await delay(200);
expect(syncMessageSent).to.be.true;
});
it("Sync message is not sent if another non-ephemeral message was just sent", async function () {
this.timeout(5000);
const syncMinIntervalMs = 1000;
const reliableChannel = await ReliableChannel.create(
mockWakuNode,
"MyChannel",
"alice",
encoder,
decoder,
{ syncMinIntervalMs }
);
(reliableChannel as any).random = () => {
return 1;
}; // will wait a full second
let syncMessageSent = false;
reliableChannel.messageChannel.addEventListener(
MessageChannelEvent.OutSyncSent,
(_event) => {
syncMessageSent = true;
}
);
while (!syncMessageSent) {
// Will send a sync message as soon as it started, we are waiting for this one
await delay(100);
}
// Let's reset the tracker
syncMessageSent = false;
// We should be faster than automated sync as it will "randomly" wait a full second
reliableChannel.send(utf8ToBytes("non-ephemeral message"));
// should be waiting a full second before sending a message after Alice
await delay(900);
// Now, let's wait to send the automated sync message
await delay(200);
expect(syncMessageSent).to.be.true;
});
it("Own sync message does not acknowledge own messages", async () => {
const syncMinIntervalMs = 100;
const reliableChannel = await ReliableChannel.create(
mockWakuNode,
"MyChannel",
"alice",
encoder,
decoder,
{
syncMinIntervalMs
}
);
const msg = utf8ToBytes("some message");
const msgId = ReliableChannel.getMessageId(msg);
let messageAcknowledged = false;
reliableChannel.messageChannel.addEventListener(
MessageChannelEvent.OutMessageAcknowledged,
(event) => {
if (event.detail === msgId) messageAcknowledged = true;
}
);
reliableChannel.send(msg);
await delay(syncMinIntervalMs * 2);
// There is randomness to this, but it should not be excessive
expect(messageAcknowledged).to.be.false;
});
});

View File

@ -0,0 +1,48 @@
import { delay } from "@waku/utils";
import { expect } from "chai";
import { RetryManager } from "./retry_manager.js";
describe("Retry Manager", () => {
it("Retries within given interval", async function () {
const retryManager = new RetryManager(100, 1);
let retryCount = 0;
retryManager.startRetries("1", () => {
retryCount++;
});
await delay(110);
expect(retryCount).to.equal(1);
});
it("Retries within maximum given attempts", async function () {
const maxAttempts = 5;
const retryManager = new RetryManager(10, maxAttempts);
let retryCount = 0;
retryManager.startRetries("1", () => {
retryCount++;
});
await delay(200);
expect(retryCount).to.equal(maxAttempts);
});
it("Wait given interval before re-trying", async function () {
const retryManager = new RetryManager(100, 1);
let retryCount = 0;
retryManager.startRetries("1", () => {
retryCount++;
});
await delay(90);
expect(retryCount).to.equal(0);
await delay(110);
expect(retryCount).to.equal(1);
});
});

View File

@ -0,0 +1,51 @@
export class RetryManager {
private timeouts: Map<string, ReturnType<typeof setTimeout>>;
public constructor(
// TODO: back-off strategy
private retryIntervalMs: number,
private maxRetryNumber: number
) {
this.timeouts = new Map();
if (
!retryIntervalMs ||
retryIntervalMs <= 0 ||
!maxRetryNumber ||
maxRetryNumber <= 0
) {
throw Error(
`Invalid retryIntervalMs ${retryIntervalMs} or maxRetryNumber ${maxRetryNumber} values`
);
}
}
public stopRetries(id: string): void {
const timeout = this.timeouts.get(id);
if (timeout) {
clearTimeout(timeout);
}
}
public startRetries(id: string, retry: () => void | Promise<void>): void {
this.retry(id, retry, 0);
}
private retry(
id: string,
retry: () => void | Promise<void>,
attemptNumber: number
): void {
clearTimeout(this.timeouts.get(id));
if (attemptNumber < this.maxRetryNumber) {
const interval = setTimeout(() => {
void retry();
// Register for next retry until we are told to stop;
this.retry(id, retry, ++attemptNumber);
}, this.retryIntervalMs);
this.timeouts.set(id, interval);
}
}
}

View File

@ -1,5 +1,10 @@
import type { Connection, Peer, PeerStore } from "@libp2p/interface";
import { FilterCodecs, LightPushCodec, StoreCodec } from "@waku/core";
import {
FilterCodecs,
LightPushCodec,
LightPushCodecV2,
StoreCodec
} from "@waku/core";
import { IRelay, Protocols } from "@waku/interfaces";
import { expect } from "chai";
import sinon from "sinon";
@ -114,7 +119,10 @@ describe("waitForRemotePeer", () => {
err = e as Error;
}
expect(addEventListenerSpy.calledOnceWith("peer:identify")).to.be.true;
expect(addEventListenerSpy.calledTwice).to.be.true;
addEventListenerSpy
.getCalls()
.forEach((c) => expect(c.firstArg).to.equal("peer:identify"));
expect(err).not.to.be.undefined;
expect(err!.message).to.be.eq("Timed out waiting for a remote peer.");
@ -148,9 +156,12 @@ describe("waitForRemotePeer", () => {
});
it("should wait for LightPush peer to be connected", async () => {
let call = 0;
const addEventListenerSpy = sinon.spy(
(_type: string, _cb: (e: any) => void) => {
_cb({ detail: { protocols: [LightPushCodec] } });
const proto = call === 0 ? LightPushCodec : LightPushCodecV2;
call++;
_cb({ detail: { protocols: [proto] } });
}
);
eventTarget.addEventListener = addEventListenerSpy;
@ -174,7 +185,10 @@ describe("waitForRemotePeer", () => {
err = e as Error;
}
expect(addEventListenerSpy.calledOnceWith("peer:identify")).to.be.true;
expect(addEventListenerSpy.calledTwice).to.be.true;
addEventListenerSpy
.getCalls()
.forEach((c) => expect(c.firstArg).to.equal("peer:identify"));
expect(err).to.be.undefined;
// check with metadata serivice
@ -196,8 +210,10 @@ describe("waitForRemotePeer", () => {
err = e as Error;
}
expect(addEventListenerSpy.calledTwice).to.be.true;
expect(addEventListenerSpy.lastCall.calledWith("peer:identify")).to.be.true;
expect(addEventListenerSpy.callCount).to.equal(4);
addEventListenerSpy
.getCalls()
.forEach((c) => expect(c.firstArg).to.equal("peer:identify"));
expect(err).to.be.undefined;
});

View File

@ -1,5 +1,10 @@
import type { IdentifyResult } from "@libp2p/interface";
import { FilterCodecs, LightPushCodec, StoreCodec } from "@waku/core";
import {
FilterCodecs,
LightPushCodec,
LightPushCodecV2,
StoreCodec
} from "@waku/core";
import type { IWaku, Libp2p } from "@waku/interfaces";
import { Protocols } from "@waku/interfaces";
import { Logger } from "@waku/utils";
@ -82,6 +87,13 @@ export async function waitForRemotePeer(
type EventListener = (_: CustomEvent<IdentifyResult>) => void;
function protocolToPeerPromise(
codecs: string[],
libp2p: Libp2p
): Promise<void>[] {
return codecs.map((codec) => waitForConnectedPeer(codec, libp2p));
}
/**
* Waits for required peers to be connected.
*/
@ -96,15 +108,21 @@ async function waitForProtocols(
}
if (waku.store && protocols.includes(Protocols.Store)) {
promises.push(waitForConnectedPeer(StoreCodec, waku.libp2p));
promises.push(...protocolToPeerPromise([StoreCodec], waku.libp2p));
}
if (waku.lightPush && protocols.includes(Protocols.LightPush)) {
promises.push(waitForConnectedPeer(LightPushCodec, waku.libp2p));
const lpPromises = protocolToPeerPromise(
[LightPushCodec, LightPushCodecV2],
waku.libp2p
);
promises.push(Promise.any(lpPromises));
}
if (waku.filter && protocols.includes(Protocols.Filter)) {
promises.push(waitForConnectedPeer(FilterCodecs.SUBSCRIBE, waku.libp2p));
promises.push(
...protocolToPeerPromise([FilterCodecs.SUBSCRIBE], waku.libp2p)
);
}
return Promise.all(promises);
@ -246,15 +264,17 @@ function getEnabledProtocols(waku: IWaku): Protocols[] {
function mapProtocolsToCodecs(protocols: Protocols[]): Map<string, boolean> {
const codecs: Map<string, boolean> = new Map();
const protocolToCodec: Record<string, string> = {
[Protocols.Filter]: FilterCodecs.SUBSCRIBE,
[Protocols.LightPush]: LightPushCodec,
[Protocols.Store]: StoreCodec
const protocolToCodec: Record<string, string[]> = {
[Protocols.Filter]: [FilterCodecs.SUBSCRIBE],
[Protocols.LightPush]: [LightPushCodec, LightPushCodecV2],
[Protocols.Store]: [StoreCodec]
};
for (const protocol of protocols) {
if (protocolToCodec[protocol]) {
codecs.set(protocolToCodec[protocol], false);
protocolToCodec[protocol].forEach((codec) => {
codecs.set(codec, false);
});
}
}

View File

@ -193,7 +193,7 @@ export class WakuNode implements IWaku {
}
if (_protocols.includes(Protocols.LightPush)) {
if (this.lightPush) {
codecs.push(this.lightPush.multicodec);
codecs.push(...this.lightPush.multicodec);
} else {
log.error(
"Light Push codec not included in dial codec: protocol not mounted locally"

View File

@ -1,5 +1,28 @@
# Changelog
## [0.0.7](https://github.com/waku-org/js-waku/compare/sds-v0.0.6...sds-v0.0.7) (2025-09-20)
### Features
* Implement peer-store re-bootstrapping ([#2641](https://github.com/waku-org/js-waku/issues/2641)) ([11d84ad](https://github.com/waku-org/js-waku/commit/11d84ad342fe45158ef0734f9ca070f14704503f))
* Introduce reliable channels ([#2526](https://github.com/waku-org/js-waku/issues/2526)) ([4d5c152](https://github.com/waku-org/js-waku/commit/4d5c152f5b1b1c241bbe7bb96d13d927a6f7550e))
### Bug Fixes
* (sds) ensure incoming messages have their retrieval hint stored ([#2604](https://github.com/waku-org/js-waku/issues/2604)) ([914beb6](https://github.com/waku-org/js-waku/commit/914beb6531a84f8c11ca951721225d47f9e6c285))
* Make health events emission consistent ([#2570](https://github.com/waku-org/js-waku/issues/2570)) ([c8dfdb1](https://github.com/waku-org/js-waku/commit/c8dfdb1ace8f0f8f668d8f2bb6e0eaed90041782))
* **sds:** Initialize lamport timestamp with current time ([#2610](https://github.com/waku-org/js-waku/issues/2610)) ([cb3af8c](https://github.com/waku-org/js-waku/commit/cb3af8cd4d820e20de1e342d40dbf85bea75e16d))
### Dependencies
* The following workspace dependencies were updated
* dependencies
* @waku/proto bumped from ^0.0.13 to ^0.0.14
* @waku/utils bumped from ^0.0.26 to ^0.0.27
## [0.0.6](https://github.com/waku-org/js-waku/compare/sds-v0.0.5...sds-v0.0.6) (2025-08-14)

View File

@ -1,6 +1,6 @@
{
"name": "@waku/sds",
"version": "0.0.6",
"version": "0.0.7",
"description": "Scalable Data Sync implementation for the browser. Based on https://github.com/vacp2p/rfc-index/blob/main/vac/raw/sds.md",
"types": "./dist/index.d.ts",
"module": "./dist/index.js",
@ -62,8 +62,8 @@
"dependencies": {
"@libp2p/interface": "2.10.4",
"@noble/hashes": "^1.7.1",
"@waku/proto": "^0.0.13",
"@waku/utils": "^0.0.26",
"@waku/proto": "^0.0.14",
"@waku/utils": "^0.0.27",
"chai": "^5.1.2",
"lodash": "^4.17.21"
},

View File

@ -50,7 +50,7 @@ describe("Message serialization", () => {
const bytes = message.encode();
const decMessage = Message.decode(bytes);
expect(decMessage.causalHistory).to.deep.equal([
expect(decMessage!.causalHistory).to.deep.equal([
{ messageId: depMessageId, retrievalHint: depRetrievalHint }
]);
});

View File

@ -1,10 +1,13 @@
import { proto_sds_message } from "@waku/proto";
import { Logger } from "@waku/utils";
export type MessageId = string;
export type HistoryEntry = proto_sds_message.HistoryEntry;
export type ChannelId = string;
export type SenderId = string;
const log = new Logger("sds:message");
export class Message implements proto_sds_message.SdsMessage {
public constructor(
public messageId: string,
@ -24,7 +27,9 @@ export class Message implements proto_sds_message.SdsMessage {
return proto_sds_message.SdsMessage.encode(this);
}
public static decode(data: Uint8Array): Message {
public static decode(
data: Uint8Array
): undefined | ContentMessage | SyncMessage | EphemeralMessage {
const {
messageId,
channelId,
@ -34,15 +39,48 @@ export class Message implements proto_sds_message.SdsMessage {
bloomFilter,
content
} = proto_sds_message.SdsMessage.decode(data);
return new Message(
messageId,
channelId,
senderId,
causalHistory,
if (testContentMessage({ lamportTimestamp, content })) {
return new ContentMessage(
messageId,
channelId,
senderId,
causalHistory,
lamportTimestamp!,
bloomFilter,
content!
);
}
if (testEphemeralMessage({ lamportTimestamp, content })) {
return new EphemeralMessage(
messageId,
channelId,
senderId,
causalHistory,
undefined,
bloomFilter,
content!
);
}
if (testSyncMessage({ lamportTimestamp, content })) {
return new SyncMessage(
messageId,
channelId,
senderId,
causalHistory,
lamportTimestamp!,
bloomFilter,
undefined
);
}
log.error(
"message received was of unknown type",
lamportTimestamp,
bloomFilter,
content
);
return undefined;
}
}
@ -73,9 +111,10 @@ export class SyncMessage extends Message {
}
}
export function isSyncMessage(
message: Message | ContentMessage | SyncMessage | EphemeralMessage
): message is SyncMessage {
function testSyncMessage(message: {
lamportTimestamp?: number;
content?: Uint8Array;
}): boolean {
return Boolean(
"lamportTimestamp" in message &&
typeof message.lamportTimestamp === "number" &&
@ -83,6 +122,12 @@ export function isSyncMessage(
);
}
export function isSyncMessage(
message: Message | ContentMessage | SyncMessage | EphemeralMessage
): message is SyncMessage {
return testSyncMessage(message);
}
export class EphemeralMessage extends Message {
public constructor(
public messageId: string,
@ -116,6 +161,13 @@ export class EphemeralMessage extends Message {
export function isEphemeralMessage(
message: Message | ContentMessage | SyncMessage | EphemeralMessage
): message is EphemeralMessage {
return testEphemeralMessage(message);
}
function testEphemeralMessage(message: {
lamportTimestamp?: number;
content?: Uint8Array;
}): boolean {
return Boolean(
message.lamportTimestamp === undefined &&
"content" in message &&
@ -166,6 +218,13 @@ export class ContentMessage extends Message {
export function isContentMessage(
message: Message | ContentMessage
): message is ContentMessage {
return testContentMessage(message);
}
function testContentMessage(message: {
lamportTimestamp?: number;
content?: Uint8Array;
}): message is { lamportTimestamp: number; content: Uint8Array } {
return Boolean(
"lamportTimestamp" in message &&
typeof message.lamportTimestamp === "number" &&

View File

@ -40,7 +40,7 @@ const sendMessage = async (
payload: Uint8Array,
callback: (message: ContentMessage) => Promise<{ success: boolean }>
): Promise<void> => {
await channel.pushOutgoingMessage(payload, callback);
channel.pushOutgoingMessage(payload, callback);
await channel.processTasks();
};
@ -154,8 +154,8 @@ describe("MessageChannel", function () {
});
// Causal history should only contain the last N messages as defined by causalHistorySize
const causalHistory = outgoingBuffer[outgoingBuffer.length - 1]
.causalHistory as HistoryEntry[];
const causalHistory =
outgoingBuffer[outgoingBuffer.length - 1].causalHistory;
expect(causalHistory.length).to.equal(causalHistorySize);
const expectedCausalHistory = messages
@ -184,22 +184,30 @@ describe("MessageChannel", function () {
expect(timestampAfter).to.equal(timestampBefore + 1);
});
it("should update lamport timestamp if greater than current timestamp and dependencies are met", async () => {
// TODO: test is failing in CI, investigate in https://github.com/waku-org/js-waku/issues/2648
it.skip("should update lamport timestamp if greater than current timestamp and dependencies are met", async () => {
const testChannelA = new MessageChannel(channelId, "alice");
const testChannelB = new MessageChannel(channelId, "bob");
const timestampBefore = testChannelA["lamportTimestamp"];
for (const m of messagesA) {
await sendMessage(channelA, utf8ToBytes(m), callback);
await sendMessage(testChannelA, utf8ToBytes(m), callback);
}
for (const m of messagesB) {
await sendMessage(channelB, utf8ToBytes(m), async (message) => {
await receiveMessage(channelA, message);
await sendMessage(testChannelB, utf8ToBytes(m), async (message) => {
await receiveMessage(testChannelA, message);
return { success: true };
});
}
const timestampAfter = channelA["lamportTimestamp"];
expect(timestampAfter).to.equal(messagesB.length);
const timestampAfter = testChannelA["lamportTimestamp"];
expect(timestampAfter - timestampBefore).to.equal(messagesB.length);
});
it("should maintain proper timestamps if all messages received", async () => {
let timestamp = 0;
// TODO: test is failing in CI, investigate in https://github.com/waku-org/js-waku/issues/2648
it.skip("should maintain proper timestamps if all messages received", async () => {
const aTimestampBefore = channelA["lamportTimestamp"];
let timestamp = channelB["lamportTimestamp"];
for (const m of messagesA) {
await sendMessage(channelA, utf8ToBytes(m), async (message) => {
timestamp++;
@ -219,7 +227,9 @@ describe("MessageChannel", function () {
}
const expectedLength = messagesA.length + messagesB.length;
expect(channelA["lamportTimestamp"]).to.equal(expectedLength);
expect(channelA["lamportTimestamp"]).to.equal(
aTimestampBefore + expectedLength
);
expect(channelA["lamportTimestamp"]).to.equal(
channelB["lamportTimestamp"]
);
@ -292,14 +302,12 @@ describe("MessageChannel", function () {
);
const localHistory = channelA["localHistory"] as ILocalHistory;
console.log("localHistory", localHistory);
expect(localHistory.length).to.equal(1);
// Find the message in local history
const historyEntry = localHistory.find(
(entry) => entry.messageId === messageId
);
console.log("history entry", historyEntry);
expect(historyEntry).to.exist;
expect(historyEntry!.retrievalHint).to.deep.equal(testRetrievalHint);
});
@ -314,6 +322,8 @@ describe("MessageChannel", function () {
const message2Id = MessageChannel.getMessageId(message2Payload);
const message3Id = MessageChannel.getMessageId(message3Payload);
const startTimestamp = channelA["lamportTimestamp"];
// Send own message first (timestamp will be 1)
await sendMessage(channelA, message1Payload, callback);
@ -325,7 +335,7 @@ describe("MessageChannel", function () {
channelA.channelId,
"bob",
[],
3, // Higher timestamp
startTimestamp + 3, // Higher timestamp
undefined,
message3Payload
)
@ -339,7 +349,7 @@ describe("MessageChannel", function () {
channelA.channelId,
"carol",
[],
2, // Middle timestamp
startTimestamp + 2, // Middle timestamp
undefined,
message2Payload
)
@ -352,21 +362,27 @@ describe("MessageChannel", function () {
const first = localHistory.findIndex(
({ messageId, lamportTimestamp }) => {
return messageId === message1Id && lamportTimestamp === 1;
return (
messageId === message1Id && lamportTimestamp === startTimestamp + 1
);
}
);
expect(first).to.eq(0);
const second = localHistory.findIndex(
({ messageId, lamportTimestamp }) => {
return messageId === message2Id && lamportTimestamp === 2;
return (
messageId === message2Id && lamportTimestamp === startTimestamp + 2
);
}
);
expect(second).to.eq(1);
const third = localHistory.findIndex(
({ messageId, lamportTimestamp }) => {
return messageId === message3Id && lamportTimestamp === 3;
return (
messageId === message3Id && lamportTimestamp === startTimestamp + 3
);
}
);
expect(third).to.eq(2);
@ -596,7 +612,6 @@ describe("MessageChannel", function () {
it("First message is missed, then re-sent, should be ack'd", async () => {
const firstMessage = utf8ToBytes("first message");
const firstMessageId = MessageChannel.getMessageId(firstMessage);
console.log("firstMessage", firstMessageId);
let messageAcked = false;
channelA.addEventListener(
MessageChannelEvent.OutMessageAcknowledged,

View File

@ -95,7 +95,9 @@ export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
super();
this.channelId = channelId;
this.senderId = senderId;
this.lamportTimestamp = 0;
// SDS RFC says to use nanoseconds, but current time in nanosecond is > Number.MAX_SAFE_INTEGER
// So instead we are using milliseconds and proposing a spec change (TODO)
this.lamportTimestamp = Date.now();
this.filter = new DefaultBloomFilter(DEFAULT_BLOOM_FILTER_OPTIONS);
this.outgoingBuffer = [];
this.possibleAcks = new Map();
@ -174,13 +176,13 @@ export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
*
* @throws Error if the payload is empty
*/
public async pushOutgoingMessage(
public pushOutgoingMessage(
payload: Uint8Array,
callback?: (processedMessage: ContentMessage) => Promise<{
success: boolean;
retrievalHint?: Uint8Array;
}>
): Promise<void> {
): void {
if (!payload || !payload.length) {
throw Error("Only messages with valid payloads are allowed");
}
@ -285,6 +287,7 @@ export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
}
log.info(
this.senderId,
"message from incoming buffer",
message.messageId,
"is missing dependencies",
missingDependencies.map(({ messageId, retrievalHint }) => {
@ -470,10 +473,15 @@ export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
this.timeReceived.set(message.messageId, Date.now());
log.info(
this.senderId,
"new incoming message",
message.messageId,
"is missing dependencies",
missingDependencies.map((ch) => ch.messageId)
);
this.safeSendEvent(MessageChannelEvent.InMessageMissing, {
detail: Array.from(missingDependencies)
});
} else {
if (isContentMessage(message) && this.deliverMessage(message)) {
this.safeSendEvent(MessageChannelEvent.InMessageDelivered, {

View File

@ -323,7 +323,7 @@ export class ServiceNode {
this.checkProcess();
return this.restCall<boolean>(
"/relay/v1/subscriptions",
"/relay/v1/auto/subscriptions",
"POST",
contentTopics,
async (response) => response.status === 200

View File

@ -1,5 +1,5 @@
import { createEncoder } from "@waku/core";
import { IRateLimitProof, LightNode, ProtocolError } from "@waku/interfaces";
import { IRateLimitProof, LightNode, LightPushError } from "@waku/interfaces";
import { utf8ToBytes } from "@waku/sdk";
import { expect } from "chai";
@ -21,9 +21,9 @@ import {
TestRoutingInfo
} from "./utils.js";
const runTests = (strictNodeCheck: boolean): void => {
const runTests = (strictNodeCheck: boolean, useLegacy: boolean): void => {
const numServiceNodes = 2;
describe(`Waku Light Push: Multiple Nodes: Strict Check: ${strictNodeCheck}`, function () {
describe(`Waku Light Push (legacy=${useLegacy ? "v2" : "v3"}): Multiple Nodes: Strict Check: ${strictNodeCheck}`, function () {
// Set the timeout for all tests in this suite. Can be overwritten at test level
this.timeout(15000);
let waku: LightNode;
@ -36,7 +36,8 @@ const runTests = (strictNodeCheck: boolean): void => {
{ lightpush: true, filter: true },
strictNodeCheck,
numServiceNodes,
true
true,
{ lightPush: { useLegacy } }
);
});
@ -95,7 +96,7 @@ const runTests = (strictNodeCheck: boolean): void => {
expect(pushResponse.successes.length).to.eq(0);
expect(pushResponse.failures?.map((failure) => failure.error)).to.include(
ProtocolError.EMPTY_PAYLOAD
LightPushError.EMPTY_PAYLOAD
);
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
@ -174,7 +175,7 @@ const runTests = (strictNodeCheck: boolean): void => {
expect(pushResponse.successes.length).to.eq(0);
expect(pushResponse.failures?.map((failure) => failure.error)).to.include(
ProtocolError.REMOTE_PEER_REJECTED
LightPushError.REMOTE_PEER_REJECTED
);
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
false
@ -248,7 +249,7 @@ const runTests = (strictNodeCheck: boolean): void => {
});
expect(pushResponse.successes.length).to.eq(0);
expect(pushResponse.failures?.map((failure) => failure.error)).to.include(
ProtocolError.SIZE_TOO_BIG
LightPushError.SIZE_TOO_BIG
);
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(
false
@ -257,4 +258,6 @@ const runTests = (strictNodeCheck: boolean): void => {
});
};
[true, false].map(runTests);
[true, false].forEach((strictNodeCheck) => {
[true, false].forEach((legacy) => runTests(strictNodeCheck, legacy));
});

View File

@ -1,5 +1,5 @@
import { createEncoder } from "@waku/core";
import { LightNode, Protocols } from "@waku/interfaces";
import { IWaku, Protocols } from "@waku/interfaces";
import { createRoutingInfo } from "@waku/utils";
import { utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai";
@ -28,7 +28,7 @@ describe("Waku Light Push (Autosharding): Multiple Shards", function () {
this.timeout(30000);
const numServiceNodes = 2;
let waku: LightNode;
let waku: IWaku;
let serviceNodes: ServiceNodesFleet;
const customContentTopic2 = "/test/2/waku-light-push/utf8";
@ -48,6 +48,7 @@ describe("Waku Light Push (Autosharding): Multiple Shards", function () {
{
lightpush: true,
filter: true,
relay: true,
contentTopic: [TestEncoder.contentTopic, customEncoder2.contentTopic]
},
false,
@ -60,45 +61,56 @@ describe("Waku Light Push (Autosharding): Multiple Shards", function () {
await teardownNodesWithRedundancy(serviceNodes, waku);
});
it("Subscribe and receive messages on 2 different pubsubtopics", async function () {
if (customRoutingInfo2.pubsubTopic === TestEncoder.pubsubTopic)
throw "Invalid test, both encoder uses same shard";
[true, false].forEach((useLegacy) => {
it(`Subscribe and receive messages on 2 different pubsubtopics with ${useLegacy ? "v2" : "v3"} protocol`, async function () {
if (customRoutingInfo2.pubsubTopic === TestEncoder.pubsubTopic)
throw "Invalid test, both encoder uses same shard";
const pushResponse1 = await waku.lightPush.send(TestEncoder, {
payload: utf8ToBytes("M1")
});
const pushResponse2 = await waku.lightPush.send(customEncoder2, {
payload: utf8ToBytes("M2")
});
const pushResponse1 = await waku.lightPush!.send(
TestEncoder,
{
payload: utf8ToBytes("M1")
},
{ useLegacy }
);
expect(pushResponse1.successes.length).to.eq(numServiceNodes);
expect(pushResponse2.successes.length).to.eq(numServiceNodes);
const pushResponse2 = await waku.lightPush!.send(
customEncoder2,
{
payload: utf8ToBytes("M2")
},
{ useLegacy }
);
const messageCollector1 = new MessageCollector(serviceNodes.nodes[0]);
const messageCollector2 = new MessageCollector(serviceNodes.nodes[1]);
expect(pushResponse1?.successes.length).to.eq(numServiceNodes);
expect(pushResponse2?.successes.length).to.eq(numServiceNodes);
expect(
await messageCollector1.waitForMessagesAutosharding(1, {
contentTopic: TestEncoder.contentTopic
})
).to.eq(true);
const messageCollector1 = new MessageCollector(serviceNodes.nodes[0]);
const messageCollector2 = new MessageCollector(serviceNodes.nodes[1]);
expect(
await messageCollector2.waitForMessagesAutosharding(1, {
contentTopic: customEncoder2.contentTopic
})
).to.eq(true);
expect(
await messageCollector1.waitForMessagesAutosharding(1, {
contentTopic: TestEncoder.contentTopic
})
).to.eq(true);
messageCollector1.verifyReceivedMessage(0, {
expectedMessageText: "M1",
expectedContentTopic: TestEncoder.contentTopic,
expectedPubsubTopic: TestEncoder.pubsubTopic
});
expect(
await messageCollector2.waitForMessagesAutosharding(1, {
contentTopic: customEncoder2.contentTopic
})
).to.eq(true);
messageCollector2.verifyReceivedMessage(0, {
expectedMessageText: "M2",
expectedContentTopic: customEncoder2.contentTopic,
expectedPubsubTopic: customEncoder2.pubsubTopic
messageCollector1.verifyReceivedMessage(0, {
expectedMessageText: "M1",
expectedContentTopic: TestEncoder.contentTopic,
expectedPubsubTopic: TestEncoder.pubsubTopic
});
messageCollector2.verifyReceivedMessage(0, {
expectedMessageText: "M2",
expectedContentTopic: customEncoder2.contentTopic,
expectedPubsubTopic: customEncoder2.pubsubTopic
});
});
});
@ -122,10 +134,10 @@ describe("Waku Light Push (Autosharding): Multiple Shards", function () {
const messageCollector2 = new MessageCollector(nwaku2);
await waku.lightPush.send(TestEncoder, {
await waku.lightPush!.send(TestEncoder, {
payload: utf8ToBytes("M1")
});
await waku.lightPush.send(customEncoder2, {
await waku.lightPush!.send(customEncoder2, {
payload: utf8ToBytes("M2")
});

View File

@ -0,0 +1,83 @@
import { LightNode } from "@waku/interfaces";
import { createLightNode, utf8ToBytes } from "@waku/sdk";
import { expect } from "chai";
import {
afterEachCustom,
beforeEachCustom,
NOISE_KEY_2,
runMultipleNodes,
ServiceNodesFleet,
teardownNodesWithRedundancy
} from "../../src/index.js";
import { DEFAULT_DISCOVERIES_ENABLED } from "../../src/lib/runNodes.js";
import { TestContentTopic, TestEncoder, TestRoutingInfo } from "./utils.js";
describe(`Waku Light Push V2 and V3 interop`, function () {
this.timeout(15000);
let waku: LightNode;
let waku2: LightNode;
let serviceNodes: ServiceNodesFleet;
beforeEachCustom(this, async () => {
[serviceNodes, waku] = await runMultipleNodes(
this.ctx,
TestRoutingInfo,
{ lightpush: true, filter: true, relay: true },
true,
2,
true
);
waku2 = await createLightNode({
staticNoiseKey: NOISE_KEY_2,
libp2p: {
addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] }
},
networkConfig: TestRoutingInfo.networkConfig,
lightPush: { numPeersToUse: 1 },
discovery: DEFAULT_DISCOVERIES_ENABLED
});
await waku2.dial(await serviceNodes.nodes[1].getMultiaddrWithId());
});
afterEachCustom(this, async () => {
await teardownNodesWithRedundancy(serviceNodes, [waku, waku2]);
});
it(`Push messages througth V2 and V3 from 2 js-waku and receives`, async function () {
let pushResponse = await waku.lightPush.send(
TestEncoder,
{
payload: utf8ToBytes("v2")
},
{ useLegacy: true }
);
expect(pushResponse.successes.length).to.eq(2);
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(true);
serviceNodes.messageCollector.verifyReceivedMessage(0, {
expectedMessageText: "v2",
expectedContentTopic: TestContentTopic,
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
pushResponse = await waku2.lightPush.send(
TestEncoder,
{
payload: utf8ToBytes("v3")
},
{ useLegacy: false }
);
expect(pushResponse.successes.length).to.eq(1);
expect(await serviceNodes.messageCollector.waitForMessages(1)).to.eq(true);
serviceNodes.messageCollector.verifyReceivedMessage(0, {
expectedMessageText: "v3",
expectedContentTopic: TestContentTopic,
expectedPubsubTopic: TestRoutingInfo.pubsubTopic
});
});
});

View File

@ -85,7 +85,7 @@ describe("Peer Cache Discovery", function () {
waku = await createLightNode({
networkConfig: DefaultTestNetworkConfig,
discovery: {
peerExchange: true,
peerExchange: false,
peerCache: true
},
peerCache: mockCache
@ -116,7 +116,7 @@ describe("Peer Cache Discovery", function () {
networkConfig: DefaultTestNetworkConfig,
bootstrapPeers: [(await nwaku2.getMultiaddrWithId()).toString()],
discovery: {
peerExchange: true,
peerExchange: false,
peerCache: true
},
peerCache: mockCache

View File

@ -12,6 +12,25 @@
* devDependencies
* @waku/interfaces bumped from 0.0.16 to 0.0.17
## [0.0.27](https://github.com/waku-org/js-waku/compare/utils-v0.0.26...utils-v0.0.27) (2025-09-20)
### Features
* Introduce reliable channels ([#2526](https://github.com/waku-org/js-waku/issues/2526)) ([4d5c152](https://github.com/waku-org/js-waku/commit/4d5c152f5b1b1c241bbe7bb96d13d927a6f7550e))
### Bug Fixes
* Remove sharding circular dependency ([#2590](https://github.com/waku-org/js-waku/issues/2590)) ([78c856d](https://github.com/waku-org/js-waku/commit/78c856d0796a73848815b615bea24d3f5395da78))
### Dependencies
* The following workspace dependencies were updated
* dependencies
* @waku/interfaces bumped from 0.0.33 to 0.0.34
## [0.0.26](https://github.com/waku-org/js-waku/compare/utils-v0.0.25...utils-v0.0.26) (2025-08-14)

View File

@ -1,6 +1,6 @@
{
"name": "@waku/utils",
"version": "0.0.26",
"version": "0.0.27",
"description": "Different utilities for Waku",
"types": "./dist/index.d.ts",
"module": "./dist/index.js",
@ -64,7 +64,7 @@
},
"dependencies": {
"@noble/hashes": "^1.3.2",
"@waku/interfaces": "0.0.33",
"@waku/interfaces": "0.0.34",
"chai": "^4.3.10",
"debug": "^4.3.4",
"uint8arrays": "^5.0.1"

View File

@ -7,3 +7,4 @@ export * from "./sharding/index.js";
export * from "./push_or_init_map.js";
export * from "./relay_shard_codec.js";
export * from "./delay.js";
export * from "./mock_node.js";

View File

@ -0,0 +1,166 @@
import { Peer, PeerId, Stream, TypedEventEmitter } from "@libp2p/interface";
import { MultiaddrInput } from "@multiformats/multiaddr";
import {
Callback,
CreateDecoderParams,
CreateEncoderParams,
HealthStatus,
IDecodedMessage,
IDecoder,
IEncoder,
IFilter,
ILightPush,
type IMessage,
IRelay,
ISendOptions,
IStore,
IWaku,
IWakuEventEmitter,
Libp2p,
LightPushSDKResult,
Protocols
} from "@waku/interfaces";
export type MockWakuEvents = {
["new-message"]: CustomEvent<IDecodedMessage>;
};
export class MockWakuNode implements IWaku {
public relay?: IRelay;
public store?: IStore;
public filter?: IFilter;
public lightPush?: ILightPush;
public protocols: string[];
private readonly subscriptions: {
decoders: IDecoder<any>[];
callback: Callback<any>;
}[];
public constructor(
private mockMessageEmitter?: TypedEventEmitter<MockWakuEvents>
) {
this.protocols = [];
this.events = new TypedEventEmitter();
this.subscriptions = [];
this.lightPush = {
multicodec: [],
send: this._send.bind(this),
start(): void {},
stop(): void {}
};
this.filter = {
start: async () => {},
stop: async () => {},
multicodec: "filter",
subscribe: this._subscribe.bind(this),
unsubscribe<T extends IDecodedMessage>(
_decoders: IDecoder<T> | IDecoder<T>[]
): Promise<boolean> {
throw "Not implemented";
},
unsubscribeAll(): void {
throw "Not implemented";
}
};
}
public get libp2p(): Libp2p {
throw "No libp2p on MockWakuNode";
}
private async _send(
encoder: IEncoder,
message: IMessage,
_sendOptions?: ISendOptions
): Promise<LightPushSDKResult> {
for (const { decoders, callback } of this.subscriptions) {
const protoMessage = await encoder.toProtoObj(message);
if (!protoMessage) throw "Issue in mock encoding message";
for (const decoder of decoders) {
const decodedMessage = await decoder.fromProtoObj(
decoder.pubsubTopic,
protoMessage
);
if (!decodedMessage) throw "Issue in mock decoding message";
await callback(decodedMessage);
if (this.mockMessageEmitter) {
this.mockMessageEmitter.dispatchEvent(
new CustomEvent<IDecodedMessage>("new-message", {
detail: decodedMessage
})
);
}
}
}
return {
failures: [],
successes: []
};
}
private async _subscribe<T extends IDecodedMessage>(
decoders: IDecoder<T> | IDecoder<T>[],
callback: Callback<T>
): Promise<boolean> {
this.subscriptions.push({
decoders: Array.isArray(decoders) ? decoders : [decoders],
callback
});
if (this.mockMessageEmitter) {
this.mockMessageEmitter.addEventListener("new-message", (event) => {
void callback(event.detail as unknown as T);
});
}
return Promise.resolve(true);
}
public events: IWakuEventEmitter;
public get peerId(): PeerId {
throw "no peerId on MockWakuNode";
}
public get health(): HealthStatus {
throw "no health on MockWakuNode";
}
public dial(
_peer: PeerId | MultiaddrInput,
_protocols?: Protocols[]
): Promise<Stream> {
throw new Error("Method not implemented.");
}
public hangUp(_peer: PeerId | MultiaddrInput): Promise<boolean> {
throw new Error("Method not implemented.");
}
public start(): Promise<void> {
return Promise.resolve();
}
public stop(): Promise<void> {
throw new Error("Method not implemented.");
}
public waitForPeers(
_protocols?: Protocols[],
_timeoutMs?: number
): Promise<void> {
throw new Error("Method not implemented.");
}
public createDecoder(
_params: CreateDecoderParams
): IDecoder<IDecodedMessage> {
throw new Error("Method not implemented.");
}
public createEncoder(_params: CreateEncoderParams): IEncoder {
throw new Error("Method not implemented.");
}
public isStarted(): boolean {
throw new Error("Method not implemented.");
}
public isConnected(): boolean {
throw new Error("Method not implemented.");
}
public getConnectedPeers(): Promise<Peer[]> {
throw new Error("Method not implemented.");
}
}