chore: upgrade libp2p and related deps (#1482)

* chore: update noise

* update: package.lock

* update: @chainsafe/libp2p-gossipsub

* rm unwanted libp2p interface deps & bump up libp2p

* refactor code for new deps

* update: new package.lock

* setup prettier, refactor eslint  and rm trailing commas

* update package.lock

* fix build

* import type for interface

* fix imports for merge

* update typedoc exports

* add: CustomEvent import

* use new libp2p interface

* add aegir as dev dep for tests
This commit is contained in:
Danish Arora 2023-08-16 20:18:13 +05:30 committed by GitHub
parent 7b6ead14ac
commit 87717981eb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
153 changed files with 15132 additions and 15182 deletions

View File

@ -1,6 +1,9 @@
{ {
"root": true, "root": true,
"parser": "@typescript-eslint/parser", "parser": "@typescript-eslint/parser",
"parserOptions": {
"project": ["./tsconfig.json"]
},
"env": { "es6": true }, "env": { "es6": true },
"ignorePatterns": ["node_modules", "build", "coverage", "proto"], "ignorePatterns": ["node_modules", "build", "coverage", "proto"],
"plugins": ["import", "eslint-comments", "functional"], "plugins": ["import", "eslint-comments", "functional"],
@ -13,6 +16,7 @@
], ],
"globals": { "BigInt": true, "console": true, "WebAssembly": true }, "globals": { "BigInt": true, "console": true, "WebAssembly": true },
"rules": { "rules": {
"comma-dangle": ["error", "never"],
"@typescript-eslint/explicit-function-return-type": "off", "@typescript-eslint/explicit-function-return-type": "off",
"@typescript-eslint/explicit-module-boundary-types": "off", "@typescript-eslint/explicit-module-boundary-types": "off",
"eslint-comments/disable-enable-pair": [ "eslint-comments/disable-enable-pair": [
@ -58,10 +62,19 @@
"overrides": [ "overrides": [
{ {
"files": ["*.spec.ts", "**/test_utils/*.ts", "*.js", "*.cjs"], "files": ["*.spec.ts", "**/test_utils/*.ts", "*.js", "*.cjs"],
"env": {
"node": true
},
"rules": { "rules": {
"@typescript-eslint/no-non-null-assertion": "off", "@typescript-eslint/no-non-null-assertion": "off",
"@typescript-eslint/no-explicit-any": "off", "@typescript-eslint/no-explicit-any": "off",
"no-console": "off" "no-console": "off",
"import/no-extraneous-dependencies": [
"error",
{
"devDependencies": true
}
]
} }
}, },
{ {

3
.prettierrc.json Normal file
View File

@ -0,0 +1,3 @@
{
"trailingComma": "none"
}

View File

@ -38,8 +38,8 @@ const main = async (url, branch) => {
repo: url, repo: url,
branch: branch, branch: branch,
dotfiles: true, dotfiles: true,
silent: false, silent: false
}); });
}; };
main(repoUrl, branch); void main(repoUrl, branch);

View File

@ -70,7 +70,7 @@ async function readWorkspace(packagePath) {
name: json.name, name: json.name,
private: !!json.private, private: !!json.private,
version: json.version, version: json.version,
workspace: packagePath, workspace: packagePath
}; };
} }

27786
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -2,12 +2,12 @@ module.exports = {
root: true, root: true,
parserOptions: { parserOptions: {
ecmaVersion: 2021, ecmaVersion: 2021,
sourceType: "module", sourceType: "module"
}, },
env: { env: {
browser: true, browser: true,
es2021: true, es2021: true,
node: true, node: true
}, },
extends: ["eslint:recommended"], extends: ["eslint:recommended"],
rules: { rules: {
@ -17,8 +17,8 @@ module.exports = {
semi: ["error", "always"], semi: ["error", "always"],
"no-unused-vars": [ "no-unused-vars": [
"error", "error",
{ vars: "all", args: "after-used", ignoreRestSiblings: false }, { vars: "all", args: "after-used", ignoreRestSiblings: false }
], ],
"no-console": "warn", "no-console": "warn"
}, }
}; };

View File

@ -1,6 +1,6 @@
module.exports = { module.exports = {
parserOptions: { parserOptions: {
tsconfigRootDir: __dirname, tsconfigRootDir: __dirname,
project: "./tsconfig.dev.json", project: "./tsconfig.dev.json"
}, }
}; };

View File

@ -6,7 +6,7 @@ module.exports = function (config) {
frameworks: ["webpack", "mocha"], frameworks: ["webpack", "mocha"],
files: ["src/lib/**/!(node).spec.ts"], files: ["src/lib/**/!(node).spec.ts"],
preprocessors: { preprocessors: {
"src/lib/**/!(node).spec.ts": ["webpack"], "src/lib/**/!(node).spec.ts": ["webpack"]
}, },
envPreprocessor: ["CI"], envPreprocessor: ["CI"],
reporters: ["progress"], reporters: ["progress"],
@ -14,32 +14,32 @@ module.exports = function (config) {
singleRun: true, singleRun: true,
client: { client: {
mocha: { mocha: {
timeout: 6000, // Default is 2s timeout: 6000 // Default is 2s
}, }
}, },
webpack: { webpack: {
mode: "development", mode: "development",
module: { module: {
rules: [{ test: /\.([cm]?ts|tsx)$/, loader: "ts-loader" }], rules: [{ test: /\.([cm]?ts|tsx)$/, loader: "ts-loader" }]
}, },
plugins: [ plugins: [
new webpack.DefinePlugin({ new webpack.DefinePlugin({
"process.env.CI": process.env.CI || false, "process.env.CI": process.env.CI || false
}), }),
new webpack.ProvidePlugin({ new webpack.ProvidePlugin({
process: "process/browser.js", process: "process/browser.js"
}), })
], ],
resolve: { resolve: {
extensions: [".ts", ".tsx", ".js"], extensions: [".ts", ".tsx", ".js"],
extensionAlias: { extensionAlias: {
".js": [".js", ".ts"], ".js": [".js", ".ts"],
".cjs": [".cjs", ".cts"], ".cjs": [".cjs", ".cts"],
".mjs": [".mjs", ".mts"], ".mjs": [".mjs", ".mts"]
}, }
}, },
stats: { warnings: false }, stats: { warnings: false },
devtool: "inline-source-map", devtool: "inline-source-map"
}, }
}); });
}; };

View File

@ -85,11 +85,6 @@
"uuid": "^9.0.0" "uuid": "^9.0.0"
}, },
"devDependencies": { "devDependencies": {
"@libp2p/interface-connection": "^5.1.1",
"@libp2p/interface-libp2p": "^3.2.0",
"@libp2p/interface-peer-id": "^2.0.2",
"@libp2p/interface-peer-store": "^2.0.4",
"@libp2p/interface-registrar": "^2.0.12",
"@multiformats/multiaddr": "^12.0.0", "@multiformats/multiaddr": "^12.0.0",
"@rollup/plugin-commonjs": "^24.0.1", "@rollup/plugin-commonjs": "^24.0.1",
"@rollup/plugin-json": "^6.0.0", "@rollup/plugin-json": "^6.0.0",
@ -119,7 +114,7 @@
}, },
"peerDependencies": { "peerDependencies": {
"@multiformats/multiaddr": "^12.0.0", "@multiformats/multiaddr": "^12.0.0",
"libp2p": "^0.45.9" "libp2p": "^0.46.3"
}, },
"peerDependenciesMeta": { "peerDependenciesMeta": {
"@multiformats/multiaddr": { "@multiformats/multiaddr": {

View File

@ -11,14 +11,14 @@ export default {
input, input,
output: { output: {
dir: "bundle", dir: "bundle",
format: "esm", format: "esm"
}, },
plugins: [ plugins: [
commonjs(), commonjs(),
json(), json(),
nodeResolve({ nodeResolve({
browser: true, browser: true,
preferBuiltins: false, preferBuiltins: false
}), })
], ]
}; };

View File

@ -4,7 +4,7 @@ export { createEncoder, createDecoder } from "./lib/message/version_0.js";
export type { export type {
Encoder, Encoder,
Decoder, Decoder,
DecodedMessage, DecodedMessage
} from "./lib/message/version_0.js"; } from "./lib/message/version_0.js";
export * as message from "./lib/message/index.js"; export * as message from "./lib/message/index.js";
@ -22,7 +22,7 @@ export {
PageDirection, PageDirection,
wakuStore, wakuStore,
StoreCodec, StoreCodec,
createCursor, createCursor
} from "./lib/store/index.js"; } from "./lib/store/index.js";
export { waitForRemotePeer } from "./lib/wait_for_remote_peer.js"; export { waitForRemotePeer } from "./lib/wait_for_remote_peer.js";

View File

@ -1,12 +1,12 @@
import type { Stream } from "@libp2p/interface-connection"; import type { Libp2p } from "@libp2p/interface";
import type { Libp2p } from "@libp2p/interface-libp2p"; import type { Stream } from "@libp2p/interface/connection";
import type { PeerId } from "@libp2p/interface-peer-id"; import type { PeerId } from "@libp2p/interface/peer-id";
import { Peer, PeerStore } from "@libp2p/interface-peer-store"; import { Peer, PeerStore } from "@libp2p/interface/peer-store";
import type { IBaseProtocol, Libp2pComponents } from "@waku/interfaces"; import type { IBaseProtocol, Libp2pComponents } from "@waku/interfaces";
import { import {
getPeersForProtocol, getPeersForProtocol,
selectConnection, selectConnection,
selectPeerForProtocol, selectPeerForProtocol
} from "@waku/utils/libp2p"; } from "@waku/utils/libp2p";
/** /**
@ -19,13 +19,13 @@ export class BaseProtocol implements IBaseProtocol {
constructor( constructor(
public multicodec: string, public multicodec: string,
private components: Libp2pComponents, private components: Libp2pComponents
) { ) {
this.addLibp2pEventListener = components.events.addEventListener.bind( this.addLibp2pEventListener = components.events.addEventListener.bind(
components.events, components.events
); );
this.removeLibp2pEventListener = components.events.removeEventListener.bind( this.removeLibp2pEventListener = components.events.removeEventListener.bind(
components.events, components.events
); );
} }
@ -46,13 +46,13 @@ export class BaseProtocol implements IBaseProtocol {
const { peer } = await selectPeerForProtocol( const { peer } = await selectPeerForProtocol(
this.peerStore, this.peerStore,
[this.multicodec], [this.multicodec],
peerId, peerId
); );
return peer; return peer;
} }
protected async newStream(peer: Peer): Promise<Stream> { protected async newStream(peer: Peer): Promise<Stream> {
const connections = this.components.connectionManager.getConnections( const connections = this.components.connectionManager.getConnections(
peer.id, peer.id
); );
const connection = selectConnection(connections); const connection = selectConnection(connections);
if (!connection) { if (!connection) {

View File

@ -1,6 +1,6 @@
import type { PeerId } from "@libp2p/interface-peer-id"; import type { PeerId } from "@libp2p/interface/peer-id";
import type { PeerInfo } from "@libp2p/interface-peer-info"; import type { PeerInfo } from "@libp2p/interface/peer-info";
import type { Peer } from "@libp2p/interface-peer-store"; import type { Peer } from "@libp2p/interface/peer-store";
import { CustomEvent, EventEmitter } from "@libp2p/interfaces/events"; import { CustomEvent, EventEmitter } from "@libp2p/interfaces/events";
import { import {
ConnectionManagerOptions, ConnectionManagerOptions,
@ -9,7 +9,7 @@ import {
IPeersByDiscoveryEvents, IPeersByDiscoveryEvents,
IRelay, IRelay,
KeepAliveOptions, KeepAliveOptions,
PeersByDiscoveryResult, PeersByDiscoveryResult
} from "@waku/interfaces"; } from "@waku/interfaces";
import { Libp2p, Tags } from "@waku/interfaces"; import { Libp2p, Tags } from "@waku/interfaces";
import debug from "debug"; import debug from "debug";
@ -41,7 +41,7 @@ export class ConnectionManager
libp2p: Libp2p, libp2p: Libp2p,
keepAliveOptions: KeepAliveOptions, keepAliveOptions: KeepAliveOptions,
relay?: IRelay, relay?: IRelay,
options?: ConnectionManagerOptions, options?: ConnectionManagerOptions
): ConnectionManager { ): ConnectionManager {
let instance = ConnectionManager.instances.get(peerId); let instance = ConnectionManager.instances.get(peerId);
if (!instance) { if (!instance) {
@ -49,7 +49,7 @@ export class ConnectionManager
libp2p, libp2p,
keepAliveOptions, keepAliveOptions,
relay, relay,
options, options
); );
ConnectionManager.instances.set(peerId, instance); ConnectionManager.instances.set(peerId, instance);
} }
@ -92,12 +92,12 @@ export class ConnectionManager
return { return {
DISCOVERED: { DISCOVERED: {
[Tags.BOOTSTRAP]: peersDiscoveredByBootstrap, [Tags.BOOTSTRAP]: peersDiscoveredByBootstrap,
[Tags.PEER_EXCHANGE]: peersDiscoveredByPeerExchange, [Tags.PEER_EXCHANGE]: peersDiscoveredByPeerExchange
}, },
CONNECTED: { CONNECTED: {
[Tags.BOOTSTRAP]: peersConnectedByBootstrap, [Tags.BOOTSTRAP]: peersConnectedByBootstrap,
[Tags.PEER_EXCHANGE]: peersConnectedByPeerExchange, [Tags.PEER_EXCHANGE]: peersConnectedByPeerExchange
}, }
}; };
} }
@ -105,7 +105,7 @@ export class ConnectionManager
libp2p: Libp2p, libp2p: Libp2p,
keepAliveOptions: KeepAliveOptions, keepAliveOptions: KeepAliveOptions,
relay?: IRelay, relay?: IRelay,
options?: Partial<ConnectionManagerOptions>, options?: Partial<ConnectionManagerOptions>
) { ) {
super(); super();
this.libp2p = libp2p; this.libp2p = libp2p;
@ -113,7 +113,7 @@ export class ConnectionManager
maxDialAttemptsForPeer: DEFAULT_MAX_DIAL_ATTEMPTS_FOR_PEER, maxDialAttemptsForPeer: DEFAULT_MAX_DIAL_ATTEMPTS_FOR_PEER,
maxBootstrapPeersAllowed: DEFAULT_MAX_BOOTSTRAP_PEERS_ALLOWED, maxBootstrapPeersAllowed: DEFAULT_MAX_BOOTSTRAP_PEERS_ALLOWED,
maxParallelDials: DEFAULT_MAX_PARALLEL_DIALS, maxParallelDials: DEFAULT_MAX_PARALLEL_DIALS,
...options, ...options
}; };
this.keepAliveManager = new KeepAliveManager(keepAliveOptions, relay); this.keepAliveManager = new KeepAliveManager(keepAliveOptions, relay);
@ -126,7 +126,7 @@ export class ConnectionManager
// which means that before the ConnectionManager is initialized, some peers may have been discovered // which means that before the ConnectionManager is initialized, some peers may have been discovered
// we will dial the peers in peerStore ONCE before we start to listen to the `peer:discovery` events within the ConnectionManager // we will dial the peers in peerStore ONCE before we start to listen to the `peer:discovery` events within the ConnectionManager
this.dialPeerStorePeers().catch((error) => this.dialPeerStorePeers().catch((error) =>
log(`Unexpected error while dialing peer store peers`, error), log(`Unexpected error while dialing peer store peers`, error)
); );
} }
@ -159,15 +159,15 @@ export class ConnectionManager
this.keepAliveManager.stopAll(); this.keepAliveManager.stopAll();
this.libp2p.removeEventListener( this.libp2p.removeEventListener(
"peer:connect", "peer:connect",
this.onEventHandlers["peer:connect"], this.onEventHandlers["peer:connect"]
); );
this.libp2p.removeEventListener( this.libp2p.removeEventListener(
"peer:disconnect", "peer:disconnect",
this.onEventHandlers["peer:disconnect"], this.onEventHandlers["peer:disconnect"]
); );
this.libp2p.removeEventListener( this.libp2p.removeEventListener(
"peer:discovery", "peer:discovery",
this.onEventHandlers["peer:discovery"], this.onEventHandlers["peer:discovery"]
); );
} }
@ -198,7 +198,7 @@ export class ConnectionManager
log( log(
`Error dialing peer ${peerId.toString()} - ${ `Error dialing peer ${peerId.toString()} - ${
(error as any).message (error as any).message
}`, }`
); );
} }
this.dialErrorsForPeer.set(peerId.toString(), error); this.dialErrorsForPeer.set(peerId.toString(), error);
@ -225,14 +225,14 @@ export class ConnectionManager
} }
log( log(
`Deleting undialable peer ${peerId.toString()} from peer store. Error: ${errorMessage}`, `Deleting undialable peer ${peerId.toString()} from peer store. Error: ${errorMessage}`
); );
this.dialErrorsForPeer.delete(peerId.toString()); this.dialErrorsForPeer.delete(peerId.toString());
await this.libp2p.peerStore.delete(peerId); await this.libp2p.peerStore.delete(peerId);
} catch (error) { } catch (error) {
throw new Error( throw new Error(
`Error deleting undialable peer ${peerId.toString()} from peer store - ${error}`, `Error deleting undialable peer ${peerId.toString()} from peer store - ${error}`
); );
} }
} }
@ -245,7 +245,7 @@ export class ConnectionManager
log(`Dropped connection with peer ${peerId.toString()}`); log(`Dropped connection with peer ${peerId.toString()}`);
} catch (error) { } catch (error) {
log( log(
`Error dropping connection with peer ${peerId.toString()} - ${error}`, `Error dropping connection with peer ${peerId.toString()} - ${error}`
); );
} }
} }
@ -266,14 +266,14 @@ export class ConnectionManager
private startPeerDiscoveryListener(): void { private startPeerDiscoveryListener(): void {
this.libp2p.addEventListener( this.libp2p.addEventListener(
"peer:discovery", "peer:discovery",
this.onEventHandlers["peer:discovery"], this.onEventHandlers["peer:discovery"]
); );
} }
private startPeerConnectionListener(): void { private startPeerConnectionListener(): void {
this.libp2p.addEventListener( this.libp2p.addEventListener(
"peer:connect", "peer:connect",
this.onEventHandlers["peer:connect"], this.onEventHandlers["peer:connect"]
); );
} }
@ -292,7 +292,7 @@ export class ConnectionManager
*/ */
this.libp2p.addEventListener( this.libp2p.addEventListener(
"peer:disconnect", "peer:disconnect",
this.onEventHandlers["peer:disconnect"], this.onEventHandlers["peer:disconnect"]
); );
} }
@ -315,7 +315,7 @@ export class ConnectionManager
const { id: peerId } = evt.detail; const { id: peerId } = evt.detail;
const isBootstrap = (await this.getTagNamesForPeer(peerId)).includes( const isBootstrap = (await this.getTagNamesForPeer(peerId)).includes(
Tags.BOOTSTRAP, Tags.BOOTSTRAP
); );
this.dispatchEvent( this.dispatchEvent(
@ -324,9 +324,9 @@ export class ConnectionManager
? EPeersByDiscoveryEvents.PEER_DISCOVERY_BOOTSTRAP ? EPeersByDiscoveryEvents.PEER_DISCOVERY_BOOTSTRAP
: EPeersByDiscoveryEvents.PEER_DISCOVERY_PEER_EXCHANGE, : EPeersByDiscoveryEvents.PEER_DISCOVERY_PEER_EXCHANGE,
{ {
detail: peerId, detail: peerId
}, }
), )
); );
try { try {
@ -343,7 +343,7 @@ export class ConnectionManager
this.keepAliveManager.start(peerId, this.libp2p.services.ping); this.keepAliveManager.start(peerId, this.libp2p.services.ping);
const isBootstrap = (await this.getTagNamesForPeer(peerId)).includes( const isBootstrap = (await this.getTagNamesForPeer(peerId)).includes(
Tags.BOOTSTRAP, Tags.BOOTSTRAP
); );
if (isBootstrap) { if (isBootstrap) {
@ -361,9 +361,9 @@ export class ConnectionManager
new CustomEvent<PeerId>( new CustomEvent<PeerId>(
EPeersByDiscoveryEvents.PEER_CONNECT_BOOTSTRAP, EPeersByDiscoveryEvents.PEER_CONNECT_BOOTSTRAP,
{ {
detail: peerId, detail: peerId
}, }
), )
); );
} }
} else { } else {
@ -371,9 +371,9 @@ export class ConnectionManager
new CustomEvent<PeerId>( new CustomEvent<PeerId>(
EPeersByDiscoveryEvents.PEER_CONNECT_PEER_EXCHANGE, EPeersByDiscoveryEvents.PEER_CONNECT_PEER_EXCHANGE,
{ {
detail: peerId, detail: peerId
}, }
), )
); );
} }
})(); })();
@ -382,7 +382,7 @@ export class ConnectionManager
return (evt: CustomEvent<PeerId>): void => { return (evt: CustomEvent<PeerId>): void => {
this.keepAliveManager.stop(evt.detail); this.keepAliveManager.stop(evt.detail);
}; };
}, }
}; };
/** /**

View File

@ -35,27 +35,27 @@ export class FilterSubscribeRpc {
static createSubscribeRequest( static createSubscribeRequest(
pubsubTopic: string, pubsubTopic: string,
contentTopics: string[], contentTopics: string[]
): FilterSubscribeRpc { ): FilterSubscribeRpc {
return new FilterSubscribeRpc({ return new FilterSubscribeRpc({
requestId: uuid(), requestId: uuid(),
filterSubscribeType: filterSubscribeType:
proto.FilterSubscribeRequest.FilterSubscribeType.SUBSCRIBE, proto.FilterSubscribeRequest.FilterSubscribeType.SUBSCRIBE,
pubsubTopic, pubsubTopic,
contentTopics, contentTopics
}); });
} }
static createUnsubscribeRequest( static createUnsubscribeRequest(
pubsubTopic: string, pubsubTopic: string,
contentTopics: string[], contentTopics: string[]
): FilterSubscribeRpc { ): FilterSubscribeRpc {
return new FilterSubscribeRpc({ return new FilterSubscribeRpc({
requestId: uuid(), requestId: uuid(),
filterSubscribeType: filterSubscribeType:
proto.FilterSubscribeRequest.FilterSubscribeType.UNSUBSCRIBE, proto.FilterSubscribeRequest.FilterSubscribeType.UNSUBSCRIBE,
pubsubTopic, pubsubTopic,
contentTopics, contentTopics
}); });
} }
@ -65,7 +65,7 @@ export class FilterSubscribeRpc {
filterSubscribeType: filterSubscribeType:
proto.FilterSubscribeRequest.FilterSubscribeType.UNSUBSCRIBE_ALL, proto.FilterSubscribeRequest.FilterSubscribeType.UNSUBSCRIBE_ALL,
pubsubTopic, pubsubTopic,
contentTopics: [], contentTopics: []
}); });
} }
@ -75,7 +75,7 @@ export class FilterSubscribeRpc {
filterSubscribeType: filterSubscribeType:
proto.FilterSubscribeRequest.FilterSubscribeType.SUBSCRIBER_PING, proto.FilterSubscribeRequest.FilterSubscribeType.SUBSCRIBER_PING,
pubsubTopic: "", pubsubTopic: "",
contentTopics: [], contentTopics: []
}); });
} }

View File

@ -1,7 +1,7 @@
import { Stream } from "@libp2p/interface-connection"; import { Stream } from "@libp2p/interface/connection";
import type { PeerId } from "@libp2p/interface-peer-id"; import type { PeerId } from "@libp2p/interface/peer-id";
import type { Peer } from "@libp2p/interface-peer-store"; import type { Peer } from "@libp2p/interface/peer-store";
import type { IncomingStreamData } from "@libp2p/interface-registrar"; import type { IncomingStreamData } from "@libp2p/interface-internal/registrar";
import type { import type {
Callback, Callback,
ContentTopic, ContentTopic,
@ -16,7 +16,7 @@ import type {
ProtocolCreateOptions, ProtocolCreateOptions,
ProtocolOptions, ProtocolOptions,
PubSubTopic, PubSubTopic,
Unsubscribe, Unsubscribe
} from "@waku/interfaces"; } from "@waku/interfaces";
import { WakuMessage } from "@waku/proto"; import { WakuMessage } from "@waku/proto";
import { groupByContentTopic, toAsyncIterator } from "@waku/utils"; import { groupByContentTopic, toAsyncIterator } from "@waku/utils";
@ -31,7 +31,7 @@ import { DefaultPubSubTopic } from "../constants.js";
import { import {
FilterPushRpc, FilterPushRpc,
FilterSubscribeResponse, FilterSubscribeResponse,
FilterSubscribeRpc, FilterSubscribeRpc
} from "./filter_rpc.js"; } from "./filter_rpc.js";
const log = debug("waku:filter:v2"); const log = debug("waku:filter:v2");
@ -43,7 +43,7 @@ type SubscriptionCallback<T extends IDecodedMessage> = {
const FilterCodecs = { const FilterCodecs = {
SUBSCRIBE: "/vac/waku/filter-subscribe/2.0.0-beta1", SUBSCRIBE: "/vac/waku/filter-subscribe/2.0.0-beta1",
PUSH: "/vac/waku/filter-push/2.0.0-beta1", PUSH: "/vac/waku/filter-push/2.0.0-beta1"
}; };
class Subscription { class Subscription {
@ -59,7 +59,7 @@ class Subscription {
constructor( constructor(
pubSubTopic: PubSubTopic, pubSubTopic: PubSubTopic,
remotePeer: Peer, remotePeer: Peer,
newStream: (peer: Peer) => Promise<Stream>, newStream: (peer: Peer) => Promise<Stream>
) { ) {
this.peer = remotePeer; this.peer = remotePeer;
this.pubSubTopic = pubSubTopic; this.pubSubTopic = pubSubTopic;
@ -69,7 +69,7 @@ class Subscription {
async subscribe<T extends IDecodedMessage>( async subscribe<T extends IDecodedMessage>(
decoders: IDecoder<T> | IDecoder<T>[], decoders: IDecoder<T> | IDecoder<T>[],
callback: Callback<T>, callback: Callback<T>
): Promise<void> { ): Promise<void> {
const decodersArray = Array.isArray(decoders) ? decoders : [decoders]; const decodersArray = Array.isArray(decoders) ? decoders : [decoders];
const decodersGroupedByCT = groupByContentTopic(decodersArray); const decodersGroupedByCT = groupByContentTopic(decodersArray);
@ -79,7 +79,7 @@ class Subscription {
const request = FilterSubscribeRpc.createSubscribeRequest( const request = FilterSubscribeRpc.createSubscribeRequest(
this.pubSubTopic, this.pubSubTopic,
contentTopics, contentTopics
); );
try { try {
@ -88,7 +88,7 @@ class Subscription {
lp.encode, lp.encode,
stream, stream,
lp.decode, lp.decode,
async (source) => await all(source), async (source) => await all(source)
); );
const { statusCode, requestId, statusDesc } = const { statusCode, requestId, statusDesc } =
@ -96,7 +96,7 @@ class Subscription {
if (statusCode < 200 || statusCode >= 300) { if (statusCode < 200 || statusCode >= 300) {
throw new Error( throw new Error(
`Filter subscribe request ${requestId} failed with status code ${statusCode}: ${statusDesc}`, `Filter subscribe request ${requestId} failed with status code ${statusCode}: ${statusDesc}`
); );
} }
@ -104,7 +104,7 @@ class Subscription {
"Subscribed to peer ", "Subscribed to peer ",
this.peer.id.toString(), this.peer.id.toString(),
"for content topics", "for content topics",
contentTopics, contentTopics
); );
} catch (e) { } catch (e) {
throw new Error( throw new Error(
@ -113,7 +113,7 @@ class Subscription {
" for content topics: " + " for content topics: " +
contentTopics + contentTopics +
": " + ": " +
e, e
); );
} }
@ -125,7 +125,7 @@ class Subscription {
// Decoder that decode to different implementations of `IDecodedMessage` // Decoder that decode to different implementations of `IDecodedMessage`
const subscriptionCallback = { const subscriptionCallback = {
decoders, decoders,
callback, callback
} as unknown as SubscriptionCallback<IDecodedMessage>; } as unknown as SubscriptionCallback<IDecodedMessage>;
// The callback and decoder may override previous values, this is on // The callback and decoder may override previous values, this is on
@ -138,7 +138,7 @@ class Subscription {
const stream = await this.newStream(this.peer); const stream = await this.newStream(this.peer);
const unsubscribeRequest = FilterSubscribeRpc.createUnsubscribeRequest( const unsubscribeRequest = FilterSubscribeRpc.createUnsubscribeRequest(
this.pubSubTopic, this.pubSubTopic,
contentTopics, contentTopics
); );
try { try {
@ -163,7 +163,7 @@ class Subscription {
lp.encode, lp.encode,
stream, stream,
lp.decode, lp.decode,
async (source) => await all(source), async (source) => await all(source)
); );
const { statusCode, requestId, statusDesc } = const { statusCode, requestId, statusDesc } =
@ -171,7 +171,7 @@ class Subscription {
if (statusCode < 200 || statusCode >= 300) { if (statusCode < 200 || statusCode >= 300) {
throw new Error( throw new Error(
`Filter ping request ${requestId} failed with status code ${statusCode}: ${statusDesc}`, `Filter ping request ${requestId} failed with status code ${statusCode}: ${statusDesc}`
); );
} }
@ -186,7 +186,7 @@ class Subscription {
const stream = await this.newStream(this.peer); const stream = await this.newStream(this.peer);
const request = FilterSubscribeRpc.createUnsubscribeAllRequest( const request = FilterSubscribeRpc.createUnsubscribeAllRequest(
this.pubSubTopic, this.pubSubTopic
); );
try { try {
@ -195,7 +195,7 @@ class Subscription {
lp.encode, lp.encode,
stream, stream,
lp.decode, lp.decode,
async (source) => await all(source), async (source) => await all(source)
); );
const { statusCode, requestId, statusDesc } = const { statusCode, requestId, statusDesc } =
@ -203,7 +203,7 @@ class Subscription {
if (statusCode < 200 || statusCode >= 300) { if (statusCode < 200 || statusCode >= 300) {
throw new Error( throw new Error(
`Filter unsubscribe all request ${requestId} failed with status code ${statusCode}: ${statusDesc}`, `Filter unsubscribe all request ${requestId} failed with status code ${statusCode}: ${statusDesc}`
); );
} }
@ -231,7 +231,7 @@ class Filter extends BaseProtocol implements IReceiver {
private getActiveSubscription( private getActiveSubscription(
pubSubTopic: PubSubTopic, pubSubTopic: PubSubTopic,
peerIdStr: PeerIdStr, peerIdStr: PeerIdStr
): Subscription | undefined { ): Subscription | undefined {
return this.activeSubscriptions.get(`${pubSubTopic}_${peerIdStr}`); return this.activeSubscriptions.get(`${pubSubTopic}_${peerIdStr}`);
} }
@ -239,7 +239,7 @@ class Filter extends BaseProtocol implements IReceiver {
private setActiveSubscription( private setActiveSubscription(
pubSubTopic: PubSubTopic, pubSubTopic: PubSubTopic,
peerIdStr: PeerIdStr, peerIdStr: PeerIdStr,
subscription: Subscription, subscription: Subscription
): Subscription { ): Subscription {
this.activeSubscriptions.set(`${pubSubTopic}_${peerIdStr}`, subscription); this.activeSubscriptions.set(`${pubSubTopic}_${peerIdStr}`, subscription);
return subscription; return subscription;
@ -259,7 +259,7 @@ class Filter extends BaseProtocol implements IReceiver {
async createSubscription( async createSubscription(
pubSubTopic?: string, pubSubTopic?: string,
peerId?: PeerId, peerId?: PeerId
): Promise<Subscription> { ): Promise<Subscription> {
const _pubSubTopic = const _pubSubTopic =
pubSubTopic ?? this.options.pubSubTopic ?? DefaultPubSubTopic; pubSubTopic ?? this.options.pubSubTopic ?? DefaultPubSubTopic;
@ -271,7 +271,7 @@ class Filter extends BaseProtocol implements IReceiver {
this.setActiveSubscription( this.setActiveSubscription(
_pubSubTopic, _pubSubTopic,
peer.id.toString(), peer.id.toString(),
new Subscription(_pubSubTopic, peer, this.newStream.bind(this, peer)), new Subscription(_pubSubTopic, peer, this.newStream.bind(this, peer))
); );
return subscription; return subscription;
@ -279,7 +279,7 @@ class Filter extends BaseProtocol implements IReceiver {
public toSubscriptionIterator<T extends IDecodedMessage>( public toSubscriptionIterator<T extends IDecodedMessage>(
decoders: IDecoder<T> | IDecoder<T>[], decoders: IDecoder<T> | IDecoder<T>[],
opts?: ProtocolOptions | undefined, opts?: ProtocolOptions | undefined
): Promise<IAsyncIterator<T>> { ): Promise<IAsyncIterator<T>> {
return toAsyncIterator(this, decoders, opts); return toAsyncIterator(this, decoders, opts);
} }
@ -302,7 +302,7 @@ class Filter extends BaseProtocol implements IReceiver {
async subscribe<T extends IDecodedMessage>( async subscribe<T extends IDecodedMessage>(
decoders: IDecoder<T> | IDecoder<T>[], decoders: IDecoder<T> | IDecoder<T>[],
callback: Callback<T>, callback: Callback<T>,
opts?: ProtocolOptions, opts?: ProtocolOptions
): Promise<Unsubscribe> { ): Promise<Unsubscribe> {
const subscription = await this.createSubscription(undefined, opts?.peerId); const subscription = await this.createSubscription(undefined, opts?.peerId);
@ -310,8 +310,8 @@ class Filter extends BaseProtocol implements IReceiver {
const contentTopics = Array.from( const contentTopics = Array.from(
groupByContentTopic( groupByContentTopic(
Array.isArray(decoders) ? decoders : [decoders], Array.isArray(decoders) ? decoders : [decoders]
).keys(), ).keys()
); );
return async () => { return async () => {
@ -341,7 +341,7 @@ class Filter extends BaseProtocol implements IReceiver {
const peerIdStr = streamData.connection.remotePeer.toString(); const peerIdStr = streamData.connection.remotePeer.toString();
const subscription = this.getActiveSubscription( const subscription = this.getActiveSubscription(
pubsubTopic, pubsubTopic,
peerIdStr, peerIdStr
); );
if (!subscription) { if (!subscription) {
@ -357,7 +357,7 @@ class Filter extends BaseProtocol implements IReceiver {
}, },
(e) => { (e) => {
log("Error with receiving pipe", e); log("Error with receiving pipe", e);
}, }
); );
} catch (e) { } catch (e) {
log("Error decoding message", e); log("Error decoding message", e);
@ -366,7 +366,7 @@ class Filter extends BaseProtocol implements IReceiver {
} }
export function wakuFilter( export function wakuFilter(
init: Partial<ProtocolCreateOptions> = {}, init: Partial<ProtocolCreateOptions> = {}
): (libp2p: Libp2p) => IFilter { ): (libp2p: Libp2p) => IFilter {
return (libp2p: Libp2p) => new Filter(libp2p, init); return (libp2p: Libp2p) => new Filter(libp2p, init);
} }
@ -374,7 +374,7 @@ export function wakuFilter(
async function pushMessage<T extends IDecodedMessage>( async function pushMessage<T extends IDecodedMessage>(
subscriptionCallback: SubscriptionCallback<T>, subscriptionCallback: SubscriptionCallback<T>,
pubSubTopic: PubSubTopic, pubSubTopic: PubSubTopic,
message: WakuMessage, message: WakuMessage
): Promise<void> { ): Promise<void> {
const { decoders, callback } = subscriptionCallback; const { decoders, callback } = subscriptionCallback;
@ -388,7 +388,7 @@ async function pushMessage<T extends IDecodedMessage>(
const decodePromises = decoders.map((dec) => const decodePromises = decoders.map((dec) =>
dec dec
.fromProtoObj(pubSubTopic, message as IProtoMessage) .fromProtoObj(pubSubTopic, message as IProtoMessage)
.then((decoded) => decoded || Promise.reject("Decoding failed")), .then((decoded) => decoded || Promise.reject("Decoding failed"))
); );
const decodedMessage = await Promise.any(decodePromises); const decodedMessage = await Promise.any(decodePromises);

View File

@ -1,4 +1,4 @@
import type { PeerId } from "@libp2p/interface-peer-id"; import type { PeerId } from "@libp2p/interface/peer-id";
import type { IRelay } from "@waku/interfaces"; import type { IRelay } from "@waku/interfaces";
import type { KeepAliveOptions } from "@waku/interfaces"; import type { KeepAliveOptions } from "@waku/interfaces";
import debug from "debug"; import debug from "debug";
@ -44,7 +44,7 @@ export class KeepAliveManager {
if (relay && relayPeriodSecs !== 0) { if (relay && relayPeriodSecs !== 0) {
const encoder = createEncoder({ const encoder = createEncoder({
contentTopic: RelayPingContentTopic, contentTopic: RelayPingContentTopic,
ephemeral: true, ephemeral: true
}); });
const interval = setInterval(() => { const interval = setInterval(() => {
log("Sending Waku Relay ping message"); log("Sending Waku Relay ping message");
@ -73,7 +73,7 @@ export class KeepAliveManager {
public stopAll(): void { public stopAll(): void {
for (const timer of [ for (const timer of [
...Object.values(this.pingKeepAliveTimers), ...Object.values(this.pingKeepAliveTimers),
...Object.values(this.relayKeepAliveTimers), ...Object.values(this.relayKeepAliveTimers)
]) { ]) {
clearInterval(timer); clearInterval(timer);
} }

View File

@ -1,4 +1,4 @@
import type { PeerId } from "@libp2p/interface-peer-id"; import type { PeerId } from "@libp2p/interface/peer-id";
import { import {
IEncoder, IEncoder,
ILightPush, ILightPush,
@ -7,7 +7,7 @@ import {
ProtocolCreateOptions, ProtocolCreateOptions,
ProtocolOptions, ProtocolOptions,
SendError, SendError,
SendResult, SendResult
} from "@waku/interfaces"; } from "@waku/interfaces";
import { PushResponse } from "@waku/proto"; import { PushResponse } from "@waku/proto";
import { isSizeValid } from "@waku/utils"; import { isSizeValid } from "@waku/utils";
@ -41,7 +41,7 @@ class LightPush extends BaseProtocol implements ILightPush {
async send( async send(
encoder: IEncoder, encoder: IEncoder,
message: IMessage, message: IMessage,
opts?: ProtocolOptions, opts?: ProtocolOptions
): Promise<SendResult> { ): Promise<SendResult> {
const { pubSubTopic = DefaultPubSubTopic } = this.options; const { pubSubTopic = DefaultPubSubTopic } = this.options;
@ -56,7 +56,7 @@ class LightPush extends BaseProtocol implements ILightPush {
log("Failed to send waku light push: message is bigger that 1MB"); log("Failed to send waku light push: message is bigger that 1MB");
return { return {
recipients, recipients,
error: SendError.SIZE_TOO_BIG, error: SendError.SIZE_TOO_BIG
}; };
} }
@ -65,7 +65,7 @@ class LightPush extends BaseProtocol implements ILightPush {
log("Failed to encode to protoMessage, aborting push"); log("Failed to encode to protoMessage, aborting push");
return { return {
recipients, recipients,
error: SendError.ENCODE_FAILED, error: SendError.ENCODE_FAILED
}; };
} }
const query = PushRpc.createRequest(protoMessage, pubSubTopic); const query = PushRpc.createRequest(protoMessage, pubSubTopic);
@ -74,7 +74,7 @@ class LightPush extends BaseProtocol implements ILightPush {
lp.encode, lp.encode,
stream, stream,
lp.decode, lp.decode,
async (source) => await all(source), async (source) => await all(source)
); );
try { try {
const bytes = new Uint8ArrayList(); const bytes = new Uint8ArrayList();
@ -100,13 +100,13 @@ class LightPush extends BaseProtocol implements ILightPush {
} }
return { return {
error, error,
recipients, recipients
}; };
} }
} }
export function wakuLightPush( export function wakuLightPush(
init: Partial<ProtocolCreateOptions> = {}, init: Partial<ProtocolCreateOptions> = {}
): (libp2p: Libp2p) => ILightPush { ): (libp2p: Libp2p) => ILightPush {
return (libp2p: Libp2p) => new LightPush(libp2p, init); return (libp2p: Libp2p) => new LightPush(libp2p, init);
} }

View File

@ -7,15 +7,15 @@ export class PushRpc {
static createRequest( static createRequest(
message: proto.WakuMessage, message: proto.WakuMessage,
pubSubTopic: string, pubSubTopic: string
): PushRpc { ): PushRpc {
return new PushRpc({ return new PushRpc({
requestId: uuid(), requestId: uuid(),
request: { request: {
message: message, message: message,
pubsubTopic: pubSubTopic, pubsubTopic: pubSubTopic
}, },
response: undefined, response: undefined
}); });
} }

View File

@ -13,14 +13,14 @@ describe("Waku Message version 0", function () {
fc.uint8Array({ minLength: 1 }), fc.uint8Array({ minLength: 1 }),
async (contentTopic, pubSubTopic, payload) => { async (contentTopic, pubSubTopic, payload) => {
const encoder = createEncoder({ const encoder = createEncoder({
contentTopic, contentTopic
}); });
const bytes = await encoder.toWire({ payload }); const bytes = await encoder.toWire({ payload });
const decoder = createDecoder(contentTopic); const decoder = createDecoder(contentTopic);
const protoResult = await decoder.fromWireToProtoObj(bytes); const protoResult = await decoder.fromWireToProtoObj(bytes);
const result = (await decoder.fromProtoObj( const result = (await decoder.fromProtoObj(
pubSubTopic, pubSubTopic,
protoResult!, protoResult!
)) as DecodedMessage; )) as DecodedMessage;
expect(result.contentTopic).to.eq(contentTopic); expect(result.contentTopic).to.eq(contentTopic);
@ -29,8 +29,8 @@ describe("Waku Message version 0", function () {
expect(result.ephemeral).to.be.false; expect(result.ephemeral).to.be.false;
expect(result.payload).to.deep.eq(payload); expect(result.payload).to.deep.eq(payload);
expect(result.timestamp).to.not.be.undefined; expect(result.timestamp).to.not.be.undefined;
}, }
), )
); );
}); });
@ -43,19 +43,19 @@ describe("Waku Message version 0", function () {
async (contentTopic, pubSubTopic, payload) => { async (contentTopic, pubSubTopic, payload) => {
const encoder = createEncoder({ const encoder = createEncoder({
contentTopic, contentTopic,
ephemeral: true, ephemeral: true
}); });
const bytes = await encoder.toWire({ payload }); const bytes = await encoder.toWire({ payload });
const decoder = createDecoder(contentTopic); const decoder = createDecoder(contentTopic);
const protoResult = await decoder.fromWireToProtoObj(bytes); const protoResult = await decoder.fromWireToProtoObj(bytes);
const result = (await decoder.fromProtoObj( const result = (await decoder.fromProtoObj(
pubSubTopic, pubSubTopic,
protoResult!, protoResult!
)) as DecodedMessage; )) as DecodedMessage;
expect(result.ephemeral).to.be.true; expect(result.ephemeral).to.be.true;
}, }
), )
); );
}); });
@ -69,7 +69,7 @@ describe("Waku Message version 0", function () {
// Encode the length of the payload // Encode the length of the payload
// Not a relevant real life example // Not a relevant real life example
const metaSetter = ( const metaSetter = (
msg: IProtoMessage & { meta: undefined }, msg: IProtoMessage & { meta: undefined }
): Uint8Array => { ): Uint8Array => {
const buffer = new ArrayBuffer(4); const buffer = new ArrayBuffer(4);
const view = new DataView(buffer); const view = new DataView(buffer);
@ -80,14 +80,14 @@ describe("Waku Message version 0", function () {
const encoder = createEncoder({ const encoder = createEncoder({
contentTopic, contentTopic,
ephemeral: true, ephemeral: true,
metaSetter, metaSetter
}); });
const bytes = await encoder.toWire({ payload }); const bytes = await encoder.toWire({ payload });
const decoder = createDecoder(contentTopic); const decoder = createDecoder(contentTopic);
const protoResult = await decoder.fromWireToProtoObj(bytes); const protoResult = await decoder.fromWireToProtoObj(bytes);
const result = (await decoder.fromProtoObj( const result = (await decoder.fromProtoObj(
pubSubTopic, pubSubTopic,
protoResult!, protoResult!
)) as DecodedMessage; )) as DecodedMessage;
const expectedMeta = metaSetter({ const expectedMeta = metaSetter({
@ -97,12 +97,12 @@ describe("Waku Message version 0", function () {
ephemeral: undefined, ephemeral: undefined,
meta: undefined, meta: undefined,
rateLimitProof: undefined, rateLimitProof: undefined,
version: undefined, version: undefined
}); });
expect(result.meta).to.deep.eq(expectedMeta); expect(result.meta).to.deep.eq(expectedMeta);
}, }
), )
); );
}); });
}); });

View File

@ -6,7 +6,7 @@ import type {
IMessage, IMessage,
IMetaSetter, IMetaSetter,
IProtoMessage, IProtoMessage,
IRateLimitProof, IRateLimitProof
} from "@waku/interfaces"; } from "@waku/interfaces";
import { proto_message as proto } from "@waku/proto"; import { proto_message as proto } from "@waku/proto";
import debug from "debug"; import debug from "debug";
@ -20,7 +20,7 @@ export { proto };
export class DecodedMessage implements IDecodedMessage { export class DecodedMessage implements IDecodedMessage {
constructor( constructor(
public pubSubTopic: string, public pubSubTopic: string,
protected proto: proto.WakuMessage, protected proto: proto.WakuMessage
) {} ) {}
get ephemeral(): boolean { get ephemeral(): boolean {
@ -73,7 +73,7 @@ export class Encoder implements IEncoder {
constructor( constructor(
public contentTopic: string, public contentTopic: string,
public ephemeral: boolean = false, public ephemeral: boolean = false,
public metaSetter?: IMetaSetter, public metaSetter?: IMetaSetter
) { ) {
if (!contentTopic || contentTopic === "") { if (!contentTopic || contentTopic === "") {
throw new Error("Content topic must be specified"); throw new Error("Content topic must be specified");
@ -94,7 +94,7 @@ export class Encoder implements IEncoder {
timestamp: BigInt(timestamp.valueOf()) * OneMillion, timestamp: BigInt(timestamp.valueOf()) * OneMillion,
meta: undefined, meta: undefined,
rateLimitProof: message.rateLimitProof, rateLimitProof: message.rateLimitProof,
ephemeral: this.ephemeral, ephemeral: this.ephemeral
}; };
if (this.metaSetter) { if (this.metaSetter) {
@ -118,7 +118,7 @@ export class Encoder implements IEncoder {
export function createEncoder({ export function createEncoder({
contentTopic, contentTopic,
ephemeral, ephemeral,
metaSetter, metaSetter
}: EncoderOptions): Encoder { }: EncoderOptions): Encoder {
return new Encoder(contentTopic, ephemeral, metaSetter); return new Encoder(contentTopic, ephemeral, metaSetter);
} }
@ -140,13 +140,13 @@ export class Decoder implements IDecoder<DecodedMessage> {
timestamp: protoMessage.timestamp ?? undefined, timestamp: protoMessage.timestamp ?? undefined,
meta: protoMessage.meta ?? undefined, meta: protoMessage.meta ?? undefined,
rateLimitProof: protoMessage.rateLimitProof ?? undefined, rateLimitProof: protoMessage.rateLimitProof ?? undefined,
ephemeral: protoMessage.ephemeral ?? false, ephemeral: protoMessage.ephemeral ?? false
}); });
} }
async fromProtoObj( async fromProtoObj(
pubSubTopic: string, pubSubTopic: string,
proto: IProtoMessage, proto: IProtoMessage
): Promise<DecodedMessage | undefined> { ): Promise<DecodedMessage | undefined> {
// https://rfc.vac.dev/spec/14/ // https://rfc.vac.dev/spec/14/
// > If omitted, the value SHOULD be interpreted as version 0. // > If omitted, the value SHOULD be interpreted as version 0.
@ -155,7 +155,7 @@ export class Decoder implements IDecoder<DecodedMessage> {
"Failed to decode due to incorrect version, expected:", "Failed to decode due to incorrect version, expected:",
Version, Version,
", actual:", ", actual:",
proto.version, proto.version
); );
return Promise.resolve(undefined); return Promise.resolve(undefined);
} }

View File

@ -4,7 +4,7 @@ export const DefaultWantedNumber = 1;
export enum Fleet { export enum Fleet {
Prod = "prod", Prod = "prod",
Test = "test", Test = "test"
} }
/** /**
@ -19,7 +19,7 @@ export enum Fleet {
*/ */
export function getPredefinedBootstrapNodes( export function getPredefinedBootstrapNodes(
fleet: Fleet = Fleet.Prod, fleet: Fleet = Fleet.Prod,
wantedNumber: number = DefaultWantedNumber, wantedNumber: number = DefaultWantedNumber
): string[] { ): string[] {
if (wantedNumber <= 0) { if (wantedNumber <= 0) {
return []; return [];
@ -51,8 +51,8 @@ export const fleets = {
"node-01.do-ams3.wakuv2.prod": "node-01.do-ams3.wakuv2.prod":
"/dns4/node-01.do-ams3.wakuv2.prod.statusim.net/tcp/8000/wss/p2p/16Uiu2HAmL5okWopX7NqZWBUKVqW8iUxCEmd5GMHLVPwCgzYzQv3e", "/dns4/node-01.do-ams3.wakuv2.prod.statusim.net/tcp/8000/wss/p2p/16Uiu2HAmL5okWopX7NqZWBUKVqW8iUxCEmd5GMHLVPwCgzYzQv3e",
"node-01.gc-us-central1-a.wakuv2.prod": "node-01.gc-us-central1-a.wakuv2.prod":
"/dns4/node-01.gc-us-central1-a.wakuv2.prod.statusim.net/tcp/8000/wss/p2p/16Uiu2HAmVkKntsECaYfefR1V2yCR79CegLATuTPE6B9TxgxBiiiA", "/dns4/node-01.gc-us-central1-a.wakuv2.prod.statusim.net/tcp/8000/wss/p2p/16Uiu2HAmVkKntsECaYfefR1V2yCR79CegLATuTPE6B9TxgxBiiiA"
}, }
}, },
"wakuv2.test": { "wakuv2.test": {
"waku-websocket": { "waku-websocket": {
@ -61,8 +61,8 @@ export const fleets = {
"node-01.do-ams3.wakuv2.test": "node-01.do-ams3.wakuv2.test":
"/dns4/node-01.do-ams3.wakuv2.test.statusim.net/tcp/8000/wss/p2p/16Uiu2HAmPLe7Mzm8TsYUubgCAW1aJoeFScxrLj8ppHFivPo97bUZ", "/dns4/node-01.do-ams3.wakuv2.test.statusim.net/tcp/8000/wss/p2p/16Uiu2HAmPLe7Mzm8TsYUubgCAW1aJoeFScxrLj8ppHFivPo97bUZ",
"node-01.gc-us-central1-a.wakuv2.test": "node-01.gc-us-central1-a.wakuv2.test":
"/dns4/node-01.gc-us-central1-a.wakuv2.test.statusim.net/tcp/8000/wss/p2p/16Uiu2HAmJb2e28qLXxT5kZxVUUoJt72EMzNGXB47Rxx5hw3q4YjS", "/dns4/node-01.gc-us-central1-a.wakuv2.test.statusim.net/tcp/8000/wss/p2p/16Uiu2HAmJb2e28qLXxT5kZxVUUoJt72EMzNGXB47Rxx5hw3q4YjS"
}, }
}, }
}, }
}; };

View File

@ -1,7 +1,7 @@
export function pushOrInitMapSet<K, V>( export function pushOrInitMapSet<K, V>(
map: Map<K, Set<V>>, map: Map<K, Set<V>>,
key: K, key: K,
newValue: V, newValue: V
): void { ): void {
let arr = map.get(key); let arr = map.get(key);
if (typeof arr === "undefined") { if (typeof arr === "undefined") {

View File

@ -6,7 +6,7 @@ const OneMillion = BigInt(1_000_000);
export enum PageDirection { export enum PageDirection {
BACKWARD = "backward", BACKWARD = "backward",
FORWARD = "forward", FORWARD = "forward"
} }
export interface Params { export interface Params {
@ -43,7 +43,7 @@ export class HistoryRpc {
const pagingInfo = { const pagingInfo = {
pageSize: BigInt(params.pageSize), pageSize: BigInt(params.pageSize),
cursor: params.cursor, cursor: params.cursor,
direction, direction
} as proto.PagingInfo; } as proto.PagingInfo;
let startTime, endTime; let startTime, endTime;
@ -63,9 +63,9 @@ export class HistoryRpc {
contentFilters, contentFilters,
pagingInfo, pagingInfo,
startTime, startTime,
endTime, endTime
}, },
response: undefined, response: undefined
}); });
} }
@ -80,7 +80,7 @@ export class HistoryRpc {
} }
function directionToProto( function directionToProto(
pageDirection: PageDirection, pageDirection: PageDirection
): proto.PagingInfo.Direction { ): proto.PagingInfo.Direction {
switch (pageDirection) { switch (pageDirection) {
case PageDirection.BACKWARD: case PageDirection.BACKWARD:

View File

@ -1,5 +1,5 @@
import type { Stream } from "@libp2p/interface-connection"; import type { Stream } from "@libp2p/interface/connection";
import type { PeerId } from "@libp2p/interface-peer-id"; import type { PeerId } from "@libp2p/interface/peer-id";
import { sha256 } from "@noble/hashes/sha256"; import { sha256 } from "@noble/hashes/sha256";
import { import {
Cursor, Cursor,
@ -7,7 +7,7 @@ import {
IDecoder, IDecoder,
IStore, IStore,
Libp2p, Libp2p,
ProtocolCreateOptions, ProtocolCreateOptions
} from "@waku/interfaces"; } from "@waku/interfaces";
import { proto_store as proto } from "@waku/proto"; import { proto_store as proto } from "@waku/proto";
import { isDefined } from "@waku/utils"; import { isDefined } from "@waku/utils";
@ -106,7 +106,7 @@ class Store extends BaseProtocol implements IStore {
async queryOrderedCallback<T extends IDecodedMessage>( async queryOrderedCallback<T extends IDecodedMessage>(
decoders: IDecoder<T>[], decoders: IDecoder<T>[],
callback: (message: T) => Promise<void | boolean> | boolean | void, callback: (message: T) => Promise<void | boolean> | boolean | void,
options?: QueryOptions, options?: QueryOptions
): Promise<void> { ): Promise<void> {
let abort = false; let abort = false;
for await (const promises of this.queryGenerator(decoders, options)) { for await (const promises of this.queryGenerator(decoders, options)) {
@ -129,7 +129,7 @@ class Store extends BaseProtocol implements IStore {
if (msg && !abort) { if (msg && !abort) {
abort = Boolean(await callback(msg)); abort = Boolean(await callback(msg));
} }
}), })
); );
} }
} }
@ -155,9 +155,9 @@ class Store extends BaseProtocol implements IStore {
async queryCallbackOnPromise<T extends IDecodedMessage>( async queryCallbackOnPromise<T extends IDecodedMessage>(
decoders: IDecoder<T>[], decoders: IDecoder<T>[],
callback: ( callback: (
message: Promise<T | undefined>, message: Promise<T | undefined>
) => Promise<void | boolean> | boolean | void, ) => Promise<void | boolean> | boolean | void,
options?: QueryOptions, options?: QueryOptions
): Promise<void> { ): Promise<void> {
let abort = false; let abort = false;
let promises: Promise<void>[] = []; let promises: Promise<void>[] = [];
@ -192,7 +192,7 @@ class Store extends BaseProtocol implements IStore {
*/ */
async *queryGenerator<T extends IDecodedMessage>( async *queryGenerator<T extends IDecodedMessage>(
decoders: IDecoder<T>[], decoders: IDecoder<T>[],
options?: QueryOptions, options?: QueryOptions
): AsyncGenerator<Promise<T | undefined>[]> { ): AsyncGenerator<Promise<T | undefined>[]> {
const { pubSubTopic = DefaultPubSubTopic } = this.options; const { pubSubTopic = DefaultPubSubTopic } = this.options;
@ -207,7 +207,7 @@ class Store extends BaseProtocol implements IStore {
decoders.forEach((dec) => { decoders.forEach((dec) => {
if (decodersAsMap.has(dec.contentTopic)) { if (decodersAsMap.has(dec.contentTopic)) {
throw new Error( throw new Error(
"API does not support different decoder per content topic", "API does not support different decoder per content topic"
); );
} }
decodersAsMap.set(dec.contentTopic, dec); decodersAsMap.set(dec.contentTopic, dec);
@ -219,15 +219,15 @@ class Store extends BaseProtocol implements IStore {
{ {
pubSubTopic: pubSubTopic, pubSubTopic: pubSubTopic,
pageDirection: PageDirection.BACKWARD, pageDirection: PageDirection.BACKWARD,
pageSize: DefaultPageSize, pageSize: DefaultPageSize
}, },
options, options,
{ contentTopics, startTime, endTime }, { contentTopics, startTime, endTime }
); );
log("Querying history with the following options", { log("Querying history with the following options", {
...options, ...options,
peerId: options?.peerId?.toString(), peerId: options?.peerId?.toString()
}); });
const peer = await this.getPeer(options?.peerId); const peer = await this.getPeer(options?.peerId);
@ -236,7 +236,7 @@ class Store extends BaseProtocol implements IStore {
this.newStream.bind(this, peer), this.newStream.bind(this, peer),
queryOpts, queryOpts,
decodersAsMap, decodersAsMap,
options?.cursor, options?.cursor
)) { )) {
yield messages; yield messages;
} }
@ -247,14 +247,14 @@ async function* paginate<T extends IDecodedMessage>(
streamFactory: () => Promise<Stream>, streamFactory: () => Promise<Stream>,
queryOpts: Params, queryOpts: Params,
decoders: Map<string, IDecoder<T>>, decoders: Map<string, IDecoder<T>>,
cursor?: Cursor, cursor?: Cursor
): AsyncGenerator<Promise<T | undefined>[]> { ): AsyncGenerator<Promise<T | undefined>[]> {
if ( if (
queryOpts.contentTopics.toString() !== queryOpts.contentTopics.toString() !==
Array.from(decoders.keys()).toString() Array.from(decoders.keys()).toString()
) { ) {
throw new Error( throw new Error(
"Internal error, the decoders should match the query's content topics", "Internal error, the decoders should match the query's content topics"
); );
} }
@ -267,7 +267,7 @@ async function* paginate<T extends IDecodedMessage>(
log( log(
"Querying store peer", "Querying store peer",
`for (${queryOpts.pubSubTopic})`, `for (${queryOpts.pubSubTopic})`,
queryOpts.contentTopics, queryOpts.contentTopics
); );
const stream = await streamFactory(); const stream = await streamFactory();
@ -277,7 +277,7 @@ async function* paginate<T extends IDecodedMessage>(
lp.encode, lp.encode,
stream, stream,
lp.decode, lp.decode,
async (source) => await all(source), async (source) => await all(source)
); );
const bytes = new Uint8ArrayList(); const bytes = new Uint8ArrayList();
@ -300,7 +300,7 @@ async function* paginate<T extends IDecodedMessage>(
if (!response.messages || !response.messages.length) { if (!response.messages || !response.messages.length) {
log( log(
"Stopping pagination due to store `response.messages` field missing or empty", "Stopping pagination due to store `response.messages` field missing or empty"
); );
break; break;
} }
@ -314,7 +314,7 @@ async function* paginate<T extends IDecodedMessage>(
if (decoder) { if (decoder) {
return decoder.fromProtoObj( return decoder.fromProtoObj(
queryOpts.pubSubTopic, queryOpts.pubSubTopic,
toProtoMessage(protoMsg), toProtoMessage(protoMsg)
); );
} }
} }
@ -326,7 +326,7 @@ async function* paginate<T extends IDecodedMessage>(
// If the server does not return cursor then there is an issue, // If the server does not return cursor then there is an issue,
// Need to abort, or we end up in an infinite loop // Need to abort, or we end up in an infinite loop
log( log(
"Stopping pagination due to `response.pagingInfo.cursor` missing from store response", "Stopping pagination due to `response.pagingInfo.cursor` missing from store response"
); );
break; break;
} }
@ -348,7 +348,7 @@ async function* paginate<T extends IDecodedMessage>(
export async function createCursor( export async function createCursor(
message: IDecodedMessage, message: IDecodedMessage,
pubsubTopic: string = DefaultPubSubTopic, pubsubTopic: string = DefaultPubSubTopic
): Promise<Cursor> { ): Promise<Cursor> {
if ( if (
!message || !message ||
@ -369,12 +369,12 @@ export async function createCursor(
digest, digest,
pubsubTopic, pubsubTopic,
senderTime: messageTime, senderTime: messageTime,
receiverTime: messageTime, receiverTime: messageTime
}; };
} }
export function wakuStore( export function wakuStore(
init: Partial<ProtocolCreateOptions> = {}, init: Partial<ProtocolCreateOptions> = {}
): (libp2p: Libp2p) => IStore { ): (libp2p: Libp2p) => IStore {
return (libp2p: Libp2p) => new Store(libp2p, init); return (libp2p: Libp2p) => new Store(libp2p, init);
} }

View File

@ -7,7 +7,7 @@ describe("to proto message", () => {
it("Fields are not dropped", () => { it("Fields are not dropped", () => {
const wire: WakuMessageProto = { const wire: WakuMessageProto = {
payload: new Uint8Array(), payload: new Uint8Array(),
contentTopic: "foo", contentTopic: "foo"
}; };
const protoMessage = toProtoMessage(wire); const protoMessage = toProtoMessage(wire);

View File

@ -8,7 +8,7 @@ const EmptyMessage: IProtoMessage = {
timestamp: undefined, timestamp: undefined,
meta: undefined, meta: undefined,
rateLimitProof: undefined, rateLimitProof: undefined,
ephemeral: undefined, ephemeral: undefined
}; };
export function toProtoMessage(wire: WakuMessageProto): IProtoMessage { export function toProtoMessage(wire: WakuMessageProto): IProtoMessage {

View File

@ -1,4 +1,4 @@
import type { IdentifyResult } from "@libp2p/interface-libp2p"; import type { IdentifyResult } from "@libp2p/interface";
import type { IBaseProtocol, IRelay, Waku } from "@waku/interfaces"; import type { IBaseProtocol, IRelay, Waku } from "@waku/interfaces";
import { Protocols } from "@waku/interfaces"; import { Protocols } from "@waku/interfaces";
import debug from "debug"; import debug from "debug";
@ -28,7 +28,7 @@ const log = debug("waku:wait-for-remote-peer");
export async function waitForRemotePeer( export async function waitForRemotePeer(
waku: Waku, waku: Waku,
protocols?: Protocols[], protocols?: Protocols[],
timeoutMs?: number, timeoutMs?: number
): Promise<void> { ): Promise<void> {
protocols = protocols ?? getEnabledProtocols(waku); protocols = protocols ?? getEnabledProtocols(waku);
@ -64,7 +64,7 @@ export async function waitForRemotePeer(
await rejectOnTimeout( await rejectOnTimeout(
Promise.all(promises), Promise.all(promises),
timeoutMs, timeoutMs,
"Timed out waiting for a remote peer.", "Timed out waiting for a remote peer."
); );
} else { } else {
await Promise.all(promises); await Promise.all(promises);
@ -114,7 +114,7 @@ const awaitTimeout = (ms: number, rejectReason: string): Promise<void> =>
async function rejectOnTimeout<T>( async function rejectOnTimeout<T>(
promise: Promise<T>, promise: Promise<T>,
timeoutMs: number, timeoutMs: number,
rejectReason: string, rejectReason: string
): Promise<void> { ): Promise<void> {
await Promise.race([promise, awaitTimeout(timeoutMs, rejectReason)]); await Promise.race([promise, awaitTimeout(timeoutMs, rejectReason)]);
} }

View File

@ -1,5 +1,5 @@
import type { Stream } from "@libp2p/interface-connection"; import type { Stream } from "@libp2p/interface/connection";
import { isPeerId, PeerId } from "@libp2p/interface-peer-id"; import { isPeerId, PeerId } from "@libp2p/interface/peer-id";
import { multiaddr, Multiaddr, MultiaddrInput } from "@multiformats/multiaddr"; import { multiaddr, Multiaddr, MultiaddrInput } from "@multiformats/multiaddr";
import type { import type {
IFilter, IFilter,
@ -7,7 +7,7 @@ import type {
IRelay, IRelay,
IStore, IStore,
Libp2p, Libp2p,
Waku, Waku
} from "@waku/interfaces"; } from "@waku/interfaces";
import { Protocols } from "@waku/interfaces"; import { Protocols } from "@waku/interfaces";
import debug from "debug"; import debug from "debug";
@ -56,7 +56,7 @@ export class WakuNode implements Waku {
store?: (libp2p: Libp2p) => IStore, store?: (libp2p: Libp2p) => IStore,
lightPush?: (libp2p: Libp2p) => ILightPush, lightPush?: (libp2p: Libp2p) => ILightPush,
filter?: (libp2p: Libp2p) => IFilter, filter?: (libp2p: Libp2p) => IFilter,
relay?: (libp2p: Libp2p) => IRelay, relay?: (libp2p: Libp2p) => IRelay
) { ) {
this.libp2p = libp2p; this.libp2p = libp2p;
@ -86,14 +86,14 @@ export class WakuNode implements Waku {
peerId, peerId,
libp2p, libp2p,
{ pingKeepAlive, relayKeepAlive }, { pingKeepAlive, relayKeepAlive },
this.relay, this.relay
); );
log( log(
"Waku node created", "Waku node created",
peerId, peerId,
`relay: ${!!this.relay}, store: ${!!this.store}, light push: ${!!this `relay: ${!!this.relay}, store: ${!!this.store}, light push: ${!!this
.lightPush}, filter: ${!!this.filter}`, .lightPush}, filter: ${!!this.filter}`
); );
} }
@ -105,7 +105,7 @@ export class WakuNode implements Waku {
*/ */
async dial( async dial(
peer: PeerId | MultiaddrInput, peer: PeerId | MultiaddrInput,
protocols?: Protocols[], protocols?: Protocols[]
): Promise<Stream> { ): Promise<Stream> {
const _protocols = protocols ?? []; const _protocols = protocols ?? [];
const peerId = mapToPeerIdOrMultiaddr(peer); const peerId = mapToPeerIdOrMultiaddr(peer);
@ -121,11 +121,11 @@ export class WakuNode implements Waku {
if (_protocols.includes(Protocols.Relay)) { if (_protocols.includes(Protocols.Relay)) {
if (this.relay) { if (this.relay) {
this.relay.gossipSub.multicodecs.forEach((codec: string) => this.relay.gossipSub.multicodecs.forEach((codec: string) =>
codecs.push(codec), codecs.push(codec)
); );
} else { } else {
log( log(
"Relay codec not included in dial codec: protocol not mounted locally", "Relay codec not included in dial codec: protocol not mounted locally"
); );
} }
} }
@ -134,7 +134,7 @@ export class WakuNode implements Waku {
codecs.push(this.store.multicodec); codecs.push(this.store.multicodec);
} else { } else {
log( log(
"Store codec not included in dial codec: protocol not mounted locally", "Store codec not included in dial codec: protocol not mounted locally"
); );
} }
} }
@ -143,7 +143,7 @@ export class WakuNode implements Waku {
codecs.push(this.lightPush.multicodec); codecs.push(this.lightPush.multicodec);
} else { } else {
log( log(
"Light Push codec not included in dial codec: protocol not mounted locally", "Light Push codec not included in dial codec: protocol not mounted locally"
); );
} }
} }
@ -152,7 +152,7 @@ export class WakuNode implements Waku {
codecs.push(this.filter.multicodec); codecs.push(this.filter.multicodec);
} else { } else {
log( log(
"Filter codec not included in dial codec: protocol not mounted locally", "Filter codec not included in dial codec: protocol not mounted locally"
); );
} }
} }
@ -191,7 +191,7 @@ export class WakuNode implements Waku {
} }
} }
function mapToPeerIdOrMultiaddr( function mapToPeerIdOrMultiaddr(
peerId: PeerId | MultiaddrInput, peerId: PeerId | MultiaddrInput
): PeerId | Multiaddr { ): PeerId | Multiaddr {
return isPeerId(peerId) ? peerId : multiaddr(peerId); return isPeerId(peerId) ? peerId : multiaddr(peerId);
} }

View File

@ -1,6 +1,6 @@
module.exports = { module.exports = {
parserOptions: { parserOptions: {
tsconfigRootDir: __dirname, tsconfigRootDir: __dirname,
project: "./tsconfig.dev.json", project: "./tsconfig.dev.json"
}, }
}; };

View File

@ -6,7 +6,7 @@ module.exports = function (config) {
frameworks: ["webpack", "mocha"], frameworks: ["webpack", "mocha"],
files: ["src/**/!(node).spec.ts"], files: ["src/**/!(node).spec.ts"],
preprocessors: { preprocessors: {
"src/**/!(node).spec.ts": ["webpack"], "src/**/!(node).spec.ts": ["webpack"]
}, },
envPreprocessor: ["CI"], envPreprocessor: ["CI"],
reporters: ["progress"], reporters: ["progress"],
@ -14,8 +14,8 @@ module.exports = function (config) {
singleRun: true, singleRun: true,
client: { client: {
mocha: { mocha: {
timeout: 6000, // Default is 2s timeout: 6000 // Default is 2s
}, }
}, },
webpack: { webpack: {
mode: "development", mode: "development",
@ -27,31 +27,31 @@ module.exports = function (config) {
{ {
loader: "ts-loader", loader: "ts-loader",
options: { options: {
configFile: "tsconfig.karma.json", configFile: "tsconfig.karma.json"
}, }
}, }
], ]
}, }
], ]
}, },
plugins: [ plugins: [
new webpack.DefinePlugin({ new webpack.DefinePlugin({
"process.env.CI": process.env.CI || false, "process.env.CI": process.env.CI || false
}), }),
new webpack.ProvidePlugin({ new webpack.ProvidePlugin({
process: "process/browser.js", process: "process/browser.js"
}), })
], ],
resolve: { resolve: {
extensions: [".ts", ".tsx", ".js"], extensions: [".ts", ".tsx", ".js"],
extensionAlias: { extensionAlias: {
".js": [".js", ".ts"], ".js": [".js", ".ts"],
".cjs": [".cjs", ".cts"], ".cjs": [".cjs", ".cts"],
".mjs": [".mjs", ".mts"], ".mjs": [".mjs", ".mts"]
}, }
}, },
stats: { warnings: false }, stats: { warnings: false },
devtool: "inline-source-map", devtool: "inline-source-map"
}, }
}); });
}; };

View File

@ -51,8 +51,6 @@
"node": ">=16" "node": ">=16"
}, },
"dependencies": { "dependencies": {
"@libp2p/interface-peer-discovery": "^2.0.0",
"@libp2p/interfaces": "^3.3.2",
"@waku/enr": "0.0.16", "@waku/enr": "0.0.16",
"@waku/utils": "0.0.10", "@waku/utils": "0.0.10",
"debug": "^4.3.4", "debug": "^4.3.4",
@ -61,8 +59,6 @@
"uint8arrays": "^4.0.4" "uint8arrays": "^4.0.4"
}, },
"devDependencies": { "devDependencies": {
"@libp2p/interface-peer-info": "^1.0.10",
"@libp2p/interface-peer-store": "^2.0.4",
"@libp2p/peer-id": "^2.0.4", "@libp2p/peer-id": "^2.0.4",
"@libp2p/peer-id-factory": "^2.0.4", "@libp2p/peer-id-factory": "^2.0.4",
"@multiformats/multiaddr": "^12.0.0", "@multiformats/multiaddr": "^12.0.0",

View File

@ -11,14 +11,14 @@ export default {
input, input,
output: { output: {
dir: "bundle", dir: "bundle",
format: "esm", format: "esm"
}, },
plugins: [ plugins: [
commonjs(), commonjs(),
json(), json(),
nodeResolve({ nodeResolve({
browser: true, browser: true,
preferBuiltins: false, preferBuiltins: false
}), })
], ]
}; };

View File

@ -19,7 +19,7 @@ const singleBranch = `enrtree-branch:${branchDomainA}`;
const doubleBranch = `enrtree-branch:${branchDomainA},${branchDomainB}`; const doubleBranch = `enrtree-branch:${branchDomainA},${branchDomainB}`;
const multiComponentBranch = [ const multiComponentBranch = [
`enrtree-branch:${branchDomainA},${partialBranchA}`, `enrtree-branch:${branchDomainA},${partialBranchA}`,
`${partialBranchB},${branchDomainB}`, `${partialBranchB},${branchDomainB}`
]; ];
// Note: once td.when is asked to throw for an input it will always throw. // Note: once td.when is asked to throw for an input it will always throw.
@ -72,7 +72,7 @@ describe("DNS Node Discovery", () => {
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns); const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], { const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
relay: 1, relay: 1
}); });
expect(peers.length).to.eq(1); expect(peers.length).to.eq(1);
@ -88,7 +88,7 @@ describe("DNS Node Discovery", () => {
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns); const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], { const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
relay: 1, relay: 1
}); });
expect(peers.length).to.eq(0); expect(peers.length).to.eq(0);
@ -102,7 +102,7 @@ describe("DNS Node Discovery", () => {
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns); const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
let peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], { let peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
relay: 1, relay: 1
}); });
expect(peers.length).to.eq(0); expect(peers.length).to.eq(0);
@ -120,7 +120,7 @@ describe("DNS Node Discovery", () => {
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns); const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], { const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
relay: 1, relay: 1
}); });
expect(peers.length).to.eq(0); expect(peers.length).to.eq(0);
}); });
@ -129,7 +129,7 @@ describe("DNS Node Discovery", () => {
mockDns.addRes(`${rootDomain}.${host}`, [mockData.enrBranchBadPrefix]); mockDns.addRes(`${rootDomain}.${host}`, [mockData.enrBranchBadPrefix]);
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns); const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], { const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
relay: 1, relay: 1
}); });
expect(peers.length).to.eq(0); expect(peers.length).to.eq(0);
}); });
@ -140,7 +140,7 @@ describe("DNS Node Discovery", () => {
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns); const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
const peersA = await dnsNodeDiscovery.getPeers([mockData.enrTree], { const peersA = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
relay: 1, relay: 1
}); });
expect(peersA.length).to.eq(1); expect(peersA.length).to.eq(1);
@ -149,7 +149,7 @@ describe("DNS Node Discovery", () => {
mockDns.addThrow(`${branchDomainD}.${host}`); mockDns.addThrow(`${branchDomainD}.${host}`);
const peersB = await dnsNodeDiscovery.getPeers([mockData.enrTree], { const peersB = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
relay: 1, relay: 1
}); });
expect(peersB.length).to.eq(1); expect(peersB.length).to.eq(1);
expect(peersA[0].ip).to.eq(peersB[0].ip); expect(peersA[0].ip).to.eq(peersB[0].ip);
@ -169,12 +169,12 @@ describe("DNS Node Discovery w/ capabilities", () => {
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns); const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], { const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
relay: 1, relay: 1
}); });
expect(peers.length).to.eq(1); expect(peers.length).to.eq(1);
expect(peers[0].peerId?.toString()).to.eq( expect(peers[0].peerId?.toString()).to.eq(
"16Uiu2HAmPsYLvfKafxgRsb6tioYyGnSvGXS2iuMigptHrqHPNPzx", "16Uiu2HAmPsYLvfKafxgRsb6tioYyGnSvGXS2iuMigptHrqHPNPzx"
); );
}); });
@ -184,12 +184,12 @@ describe("DNS Node Discovery w/ capabilities", () => {
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns); const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], { const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
store: 1, store: 1,
relay: 1, relay: 1
}); });
expect(peers.length).to.eq(1); expect(peers.length).to.eq(1);
expect(peers[0].peerId?.toString()).to.eq( expect(peers[0].peerId?.toString()).to.eq(
"16Uiu2HAm2HyS6brcCspSbszG9i36re2bWBVjMe3tMdnFp1Hua34F", "16Uiu2HAm2HyS6brcCspSbszG9i36re2bWBVjMe3tMdnFp1Hua34F"
); );
}); });
@ -198,19 +198,19 @@ describe("DNS Node Discovery w/ capabilities", () => {
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns); const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], { const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
store: 1, store: 1
}); });
expect(peers.length).to.eq(1); expect(peers.length).to.eq(1);
expect(peers[0].peerId?.toString()).to.eq( expect(peers[0].peerId?.toString()).to.eq(
"16Uiu2HAkv3La3ECgQpdYeEJfrX36EWdhkUDv4C9wvXM8TFZ9dNgd", "16Uiu2HAkv3La3ECgQpdYeEJfrX36EWdhkUDv4C9wvXM8TFZ9dNgd"
); );
}); });
it("retrieves all peers (2) when cannot fulfill all requirements", async () => { it("retrieves all peers (2) when cannot fulfill all requirements", async () => {
mockDns.addRes(`${rootDomain}.${host}`, [doubleBranch]); mockDns.addRes(`${rootDomain}.${host}`, [doubleBranch]);
mockDns.addRes(`${branchDomainA}.${host}`, [ mockDns.addRes(`${branchDomainA}.${host}`, [
mockData.enrWithWaku2RelayStore, mockData.enrWithWaku2RelayStore
]); ]);
mockDns.addRes(`${branchDomainB}.${host}`, [mockData.enrWithWaku2Relay]); mockDns.addRes(`${branchDomainB}.${host}`, [mockData.enrWithWaku2Relay]);
@ -218,45 +218,45 @@ describe("DNS Node Discovery w/ capabilities", () => {
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], { const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
store: 1, store: 1,
relay: 2, relay: 2,
filter: 1, filter: 1
}); });
expect(peers.length).to.eq(2); expect(peers.length).to.eq(2);
const peerIds = peers.map((p) => p.peerId?.toString()); const peerIds = peers.map((p) => p.peerId?.toString());
expect(peerIds).to.contain( expect(peerIds).to.contain(
"16Uiu2HAm2HyS6brcCspSbszG9i36re2bWBVjMe3tMdnFp1Hua34F", "16Uiu2HAm2HyS6brcCspSbszG9i36re2bWBVjMe3tMdnFp1Hua34F"
); );
expect(peerIds).to.contain( expect(peerIds).to.contain(
"16Uiu2HAmPsYLvfKafxgRsb6tioYyGnSvGXS2iuMigptHrqHPNPzx", "16Uiu2HAmPsYLvfKafxgRsb6tioYyGnSvGXS2iuMigptHrqHPNPzx"
); );
}); });
it("retrieves all peers (3) when branch entries are composed of multiple strings", async function () { it("retrieves all peers (3) when branch entries are composed of multiple strings", async function () {
mockDns.addRes(`${rootDomain}.${host}`, multiComponentBranch); mockDns.addRes(`${rootDomain}.${host}`, multiComponentBranch);
mockDns.addRes(`${branchDomainA}.${host}`, [ mockDns.addRes(`${branchDomainA}.${host}`, [
mockData.enrWithWaku2RelayStore, mockData.enrWithWaku2RelayStore
]); ]);
mockDns.addRes(`${branchDomainB}.${host}`, [mockData.enrWithWaku2Relay]); mockDns.addRes(`${branchDomainB}.${host}`, [mockData.enrWithWaku2Relay]);
mockDns.addRes(`${partialBranchA}${partialBranchB}.${host}`, [ mockDns.addRes(`${partialBranchA}${partialBranchB}.${host}`, [
mockData.enrWithWaku2Store, mockData.enrWithWaku2Store
]); ]);
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns); const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], { const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
store: 2, store: 2,
relay: 2, relay: 2
}); });
expect(peers.length).to.eq(3); expect(peers.length).to.eq(3);
const peerIds = peers.map((p) => p.peerId?.toString()); const peerIds = peers.map((p) => p.peerId?.toString());
expect(peerIds).to.contain( expect(peerIds).to.contain(
"16Uiu2HAm2HyS6brcCspSbszG9i36re2bWBVjMe3tMdnFp1Hua34F", "16Uiu2HAm2HyS6brcCspSbszG9i36re2bWBVjMe3tMdnFp1Hua34F"
); );
expect(peerIds).to.contain( expect(peerIds).to.contain(
"16Uiu2HAmPsYLvfKafxgRsb6tioYyGnSvGXS2iuMigptHrqHPNPzx", "16Uiu2HAmPsYLvfKafxgRsb6tioYyGnSvGXS2iuMigptHrqHPNPzx"
); );
expect(peerIds).to.contain( expect(peerIds).to.contain(
"16Uiu2HAkv3La3ECgQpdYeEJfrX36EWdhkUDv4C9wvXM8TFZ9dNgd", "16Uiu2HAkv3La3ECgQpdYeEJfrX36EWdhkUDv4C9wvXM8TFZ9dNgd"
); );
}); });
}); });
@ -278,7 +278,7 @@ describe("DNS Node Discovery [live data]", function () {
relay: maxQuantity, relay: maxQuantity,
store: maxQuantity, store: maxQuantity,
filter: maxQuantity, filter: maxQuantity,
lightPush: maxQuantity, lightPush: maxQuantity
}); });
expect(peers.length).to.eq(maxQuantity); expect(peers.length).to.eq(maxQuantity);
@ -301,7 +301,7 @@ describe("DNS Node Discovery [live data]", function () {
relay: maxQuantity, relay: maxQuantity,
store: maxQuantity, store: maxQuantity,
filter: maxQuantity, filter: maxQuantity,
lightPush: maxQuantity, lightPush: maxQuantity
}); });
expect(peers.length).to.eq(maxQuantity); expect(peers.length).to.eq(maxQuantity);

View File

@ -6,7 +6,7 @@ import { DnsOverHttps } from "./dns_over_https.js";
import { ENRTree } from "./enrtree.js"; import { ENRTree } from "./enrtree.js";
import { import {
fetchNodesUntilCapabilitiesFulfilled, fetchNodesUntilCapabilitiesFulfilled,
yieldNodesUntilCapabilitiesFulfilled, yieldNodesUntilCapabilitiesFulfilled
} from "./fetch_nodes.js"; } from "./fetch_nodes.js";
const log = debug("waku:discovery:dns"); const log = debug("waku:discovery:dns");
@ -34,7 +34,7 @@ export class DnsNodeDiscovery {
private readonly _errorTolerance: number = 10; private readonly _errorTolerance: number = 10;
public static async dnsOverHttp( public static async dnsOverHttp(
dnsClient?: DnsClient, dnsClient?: DnsClient
): Promise<DnsNodeDiscovery> { ): Promise<DnsNodeDiscovery> {
if (!dnsClient) { if (!dnsClient) {
dnsClient = await DnsOverHttps.create(); dnsClient = await DnsOverHttps.create();
@ -51,29 +51,29 @@ export class DnsNodeDiscovery {
*/ */
async getPeers( async getPeers(
enrTreeUrls: string[], enrTreeUrls: string[],
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>, wantedNodeCapabilityCount: Partial<NodeCapabilityCount>
): Promise<IEnr[]> { ): Promise<IEnr[]> {
const networkIndex = Math.floor(Math.random() * enrTreeUrls.length); const networkIndex = Math.floor(Math.random() * enrTreeUrls.length);
const { publicKey, domain } = ENRTree.parseTree(enrTreeUrls[networkIndex]); const { publicKey, domain } = ENRTree.parseTree(enrTreeUrls[networkIndex]);
const context: SearchContext = { const context: SearchContext = {
domain, domain,
publicKey, publicKey,
visits: {}, visits: {}
}; };
const peers = await fetchNodesUntilCapabilitiesFulfilled( const peers = await fetchNodesUntilCapabilitiesFulfilled(
wantedNodeCapabilityCount, wantedNodeCapabilityCount,
this._errorTolerance, this._errorTolerance,
() => this._search(domain, context), () => this._search(domain, context)
); );
log( log(
"retrieved peers: ", "retrieved peers: ",
peers.map((peer) => { peers.map((peer) => {
return { return {
id: peer.peerId?.toString(), id: peer.peerId?.toString(),
multiaddrs: peer.multiaddrs?.map((ma) => ma.toString()), multiaddrs: peer.multiaddrs?.map((ma) => ma.toString())
}; };
}), })
); );
return peers; return peers;
} }
@ -88,20 +88,20 @@ export class DnsNodeDiscovery {
*/ */
async *getNextPeer( async *getNextPeer(
enrTreeUrls: string[], enrTreeUrls: string[],
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>, wantedNodeCapabilityCount: Partial<NodeCapabilityCount>
): AsyncGenerator<IEnr> { ): AsyncGenerator<IEnr> {
const networkIndex = Math.floor(Math.random() * enrTreeUrls.length); const networkIndex = Math.floor(Math.random() * enrTreeUrls.length);
const { publicKey, domain } = ENRTree.parseTree(enrTreeUrls[networkIndex]); const { publicKey, domain } = ENRTree.parseTree(enrTreeUrls[networkIndex]);
const context: SearchContext = { const context: SearchContext = {
domain, domain,
publicKey, publicKey,
visits: {}, visits: {}
}; };
for await (const peer of yieldNodesUntilCapabilitiesFulfilled( for await (const peer of yieldNodesUntilCapabilitiesFulfilled(
wantedNodeCapabilityCount, wantedNodeCapabilityCount,
this._errorTolerance, this._errorTolerance,
() => this._search(domain, context), () => this._search(domain, context)
)) { )) {
yield peer; yield peer;
} }
@ -113,7 +113,7 @@ export class DnsNodeDiscovery {
*/ */
private async _search( private async _search(
subdomain: string, subdomain: string,
context: SearchContext, context: SearchContext
): Promise<ENR | null> { ): Promise<ENR | null> {
try { try {
const entry = await this._getTXTRecord(subdomain, context); const entry = await this._getTXTRecord(subdomain, context);
@ -139,7 +139,7 @@ export class DnsNodeDiscovery {
} }
} catch (error) { } catch (error) {
log( log(
`Failed to search DNS tree ${entryType} at subdomain ${subdomain}: ${error}`, `Failed to search DNS tree ${entryType} at subdomain ${subdomain}: ${error}`
); );
return null; return null;
} }
@ -157,7 +157,7 @@ export class DnsNodeDiscovery {
*/ */
private async _getTXTRecord( private async _getTXTRecord(
subdomain: string, subdomain: string,
context: SearchContext, context: SearchContext
): Promise<string> { ): Promise<string> {
if (this._DNSTreeCache[subdomain]) { if (this._DNSTreeCache[subdomain]) {
return this._DNSTreeCache[subdomain]; return this._DNSTreeCache[subdomain];

View File

@ -18,7 +18,7 @@ export class DnsOverHttps implements DnsClient {
*/ */
public static async create( public static async create(
endpoints?: Endpoint[], endpoints?: Endpoint[],
retries?: number, retries?: number
): Promise<DnsOverHttps> { ): Promise<DnsOverHttps> {
const _endpoints = endpoints ?? (await wellknown.endpoints("doh")); const _endpoints = endpoints ?? (await wellknown.endpoints("doh"));
@ -27,7 +27,7 @@ export class DnsOverHttps implements DnsClient {
private constructor( private constructor(
private endpoints: Endpoint[], private endpoints: Endpoint[],
private retries: number = 3, private retries: number = 3
) {} ) {}
/** /**
@ -42,12 +42,12 @@ export class DnsOverHttps implements DnsClient {
try { try {
const res = await query( const res = await query(
{ {
question: { type: "TXT", name: domain }, question: { type: "TXT", name: domain }
}, },
{ {
endpoints: this.endpoints, endpoints: this.endpoints,
retries: this.retries, retries: this.retries
}, }
); );
answers = res.answers; answers = res.answers;
} catch (error) { } catch (error) {

View File

@ -19,7 +19,7 @@ describe("ENRTree", () => {
} catch (err: unknown) { } catch (err: unknown) {
const e = err as Error; const e = err as Error;
expect(e.toString()).includes( expect(e.toString()).includes(
"ENRTree root entry must start with 'enrtree-root:'", "ENRTree root entry must start with 'enrtree-root:'"
); );
} }
}); });
@ -56,7 +56,7 @@ describe("ENRTree", () => {
} catch (err: unknown) { } catch (err: unknown) {
const e = err as Error; const e = err as Error;
expect(e.toString()).includes( expect(e.toString()).includes(
"ENRTree tree entry must start with 'enrtree:'", "ENRTree tree entry must start with 'enrtree:'"
); );
} }
}); });
@ -75,7 +75,7 @@ describe("ENRTree", () => {
const expected = [ const expected = [
"D2SNLTAGWNQ34NTQTPHNZDECFU", "D2SNLTAGWNQ34NTQTPHNZDECFU",
"67BLTJEU5R2D5S3B4QKJSBRFCY", "67BLTJEU5R2D5S3B4QKJSBRFCY",
"A2HDMZBB4JIU53VTEGC4TG6P4A", "A2HDMZBB4JIU53VTEGC4TG6P4A"
]; ];
const branches = ENRTree.parseBranch(dns.enrBranch); const branches = ENRTree.parseBranch(dns.enrBranch);
@ -88,7 +88,7 @@ describe("ENRTree", () => {
} catch (err: unknown) { } catch (err: unknown) {
const e = err as Error; const e = err as Error;
expect(e.toString()).includes( expect(e.toString()).includes(
"ENRTree branch entry must start with 'enrtree-branch:'", "ENRTree branch entry must start with 'enrtree-branch:'"
); );
} }
}); });

View File

@ -29,7 +29,7 @@ export class ENRTree {
static parseAndVerifyRoot(root: string, publicKey: string): string { static parseAndVerifyRoot(root: string, publicKey: string): string {
if (!root.startsWith(this.ROOT_PREFIX)) if (!root.startsWith(this.ROOT_PREFIX))
throw new Error( throw new Error(
`ENRTree root entry must start with '${this.ROOT_PREFIX}'`, `ENRTree root entry must start with '${this.ROOT_PREFIX}'`
); );
const rootValues = ENRTree.parseRootValues(root); const rootValues = ENRTree.parseRootValues(root);
@ -43,13 +43,13 @@ export class ENRTree {
const signedComponentBuffer = utf8ToBytes(signedComponent); const signedComponentBuffer = utf8ToBytes(signedComponent);
const signatureBuffer = fromString(rootValues.signature, "base64url").slice( const signatureBuffer = fromString(rootValues.signature, "base64url").slice(
0, 0,
64, 64
); );
const isVerified = verifySignature( const isVerified = verifySignature(
signatureBuffer, signatureBuffer,
keccak256(signedComponentBuffer), keccak256(signedComponentBuffer),
new Uint8Array(decodedPublicKey), new Uint8Array(decodedPublicKey)
); );
if (!isVerified) throw new Error("Unable to verify ENRTree root signature"); if (!isVerified) throw new Error("Unable to verify ENRTree root signature");
@ -59,7 +59,7 @@ export class ENRTree {
static parseRootValues(txt: string): ENRRootValues { static parseRootValues(txt: string): ENRRootValues {
const matches = txt.match( const matches = txt.match(
/^enrtree-root:v1 e=([^ ]+) l=([^ ]+) seq=(\d+) sig=([^ ]+)$/, /^enrtree-root:v1 e=([^ ]+) l=([^ ]+) seq=(\d+) sig=([^ ]+)$/
); );
if (!Array.isArray(matches)) if (!Array.isArray(matches))
@ -89,7 +89,7 @@ export class ENRTree {
static parseTree(tree: string): ENRTreeValues { static parseTree(tree: string): ENRTreeValues {
if (!tree.startsWith(this.TREE_PREFIX)) if (!tree.startsWith(this.TREE_PREFIX))
throw new Error( throw new Error(
`ENRTree tree entry must start with '${this.TREE_PREFIX}'`, `ENRTree tree entry must start with '${this.TREE_PREFIX}'`
); );
const matches = tree.match(/^enrtree:\/\/([^@]+)@(.+)$/); const matches = tree.match(/^enrtree:\/\/([^@]+)@(.+)$/);
@ -115,7 +115,7 @@ export class ENRTree {
static parseBranch(branch: string): string[] { static parseBranch(branch: string): string[] {
if (!branch.startsWith(this.BRANCH_PREFIX)) if (!branch.startsWith(this.BRANCH_PREFIX))
throw new Error( throw new Error(
`ENRTree branch entry must start with '${this.BRANCH_PREFIX}'`, `ENRTree branch entry must start with '${this.BRANCH_PREFIX}'`
); );
return branch.split(this.BRANCH_PREFIX)[1].split(","); return branch.split(this.BRANCH_PREFIX)[1].split(",");

View File

@ -15,8 +15,8 @@ async function createEnr(waku2: Waku2): Promise<ENR> {
multiaddr("/dns4/node1.do-ams.wakuv2.test.statusim.net/tcp/443/wss"), multiaddr("/dns4/node1.do-ams.wakuv2.test.statusim.net/tcp/443/wss"),
multiaddr("/dns6/node2.ac-chi.wakuv2.test.statusim.net/tcp/443/wss"), multiaddr("/dns6/node2.ac-chi.wakuv2.test.statusim.net/tcp/443/wss"),
multiaddr( multiaddr(
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss", "/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss"
), )
]; ];
enr.waku2 = waku2; enr.waku2 = waku2;
@ -27,7 +27,7 @@ const Waku2None = {
relay: false, relay: false,
store: false, store: false,
filter: false, filter: false,
lightPush: false, lightPush: false
}; };
describe("Fetch nodes until capabilities are fulfilled", function () { describe("Fetch nodes until capabilities are fulfilled", function () {
@ -39,7 +39,7 @@ describe("Fetch nodes until capabilities are fulfilled", function () {
const res = await fetchNodesUntilCapabilitiesFulfilled( const res = await fetchNodesUntilCapabilitiesFulfilled(
{ relay: 1 }, { relay: 1 },
0, 0,
getNode, getNode
); );
expect(res.length).to.eq(1); expect(res.length).to.eq(1);
@ -62,7 +62,7 @@ describe("Fetch nodes until capabilities are fulfilled", function () {
const res = await fetchNodesUntilCapabilitiesFulfilled( const res = await fetchNodesUntilCapabilitiesFulfilled(
{ store: 1 }, { store: 1 },
1, 1,
getNode, getNode
); );
expect(res.length).to.eq(1); expect(res.length).to.eq(1);
@ -76,7 +76,7 @@ describe("Fetch nodes until capabilities are fulfilled", function () {
const relayStoreNode = await createEnr({ const relayStoreNode = await createEnr({
...Waku2None, ...Waku2None,
relay: true, relay: true,
store: true, store: true
}); });
const retrievedNodes = [relayNode1, relayNode2, relayNode3, relayStoreNode]; const retrievedNodes = [relayNode1, relayNode2, relayNode3, relayStoreNode];
@ -91,7 +91,7 @@ describe("Fetch nodes until capabilities are fulfilled", function () {
const res = await fetchNodesUntilCapabilitiesFulfilled( const res = await fetchNodesUntilCapabilitiesFulfilled(
{ store: 1, relay: 2 }, { store: 1, relay: 2 },
1, 1,
getNode, getNode
); );
expect(res.length).to.eq(3); expect(res.length).to.eq(3);
@ -108,7 +108,7 @@ describe("Fetch nodes until capabilities are fulfilled", function () {
const res = await fetchNodesUntilCapabilitiesFulfilled( const res = await fetchNodesUntilCapabilitiesFulfilled(
{ filter: 1, relay: 1 }, { filter: 1, relay: 1 },
5, 5,
getNode, getNode
); );
expect(res.length).to.eq(1); expect(res.length).to.eq(1);

View File

@ -13,13 +13,13 @@ const log = debug("waku:discovery:fetch_nodes");
export async function fetchNodesUntilCapabilitiesFulfilled( export async function fetchNodesUntilCapabilitiesFulfilled(
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>, wantedNodeCapabilityCount: Partial<NodeCapabilityCount>,
errorTolerance: number, errorTolerance: number,
getNode: () => Promise<IEnr | null>, getNode: () => Promise<IEnr | null>
): Promise<IEnr[]> { ): Promise<IEnr[]> {
const wanted = { const wanted = {
relay: wantedNodeCapabilityCount.relay ?? 0, relay: wantedNodeCapabilityCount.relay ?? 0,
store: wantedNodeCapabilityCount.store ?? 0, store: wantedNodeCapabilityCount.store ?? 0,
filter: wantedNodeCapabilityCount.filter ?? 0, filter: wantedNodeCapabilityCount.filter ?? 0,
lightPush: wantedNodeCapabilityCount.lightPush ?? 0, lightPush: wantedNodeCapabilityCount.lightPush ?? 0
}; };
const maxSearches = const maxSearches =
@ -29,7 +29,7 @@ export async function fetchNodesUntilCapabilitiesFulfilled(
relay: 0, relay: 0,
store: 0, store: 0,
filter: 0, filter: 0,
lightPush: 0, lightPush: 0
}; };
let totalSearches = 0; let totalSearches = 0;
@ -64,13 +64,13 @@ export async function fetchNodesUntilCapabilitiesFulfilled(
export async function* yieldNodesUntilCapabilitiesFulfilled( export async function* yieldNodesUntilCapabilitiesFulfilled(
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>, wantedNodeCapabilityCount: Partial<NodeCapabilityCount>,
errorTolerance: number, errorTolerance: number,
getNode: () => Promise<IEnr | null>, getNode: () => Promise<IEnr | null>
): AsyncGenerator<IEnr> { ): AsyncGenerator<IEnr> {
const wanted = { const wanted = {
relay: wantedNodeCapabilityCount.relay ?? 0, relay: wantedNodeCapabilityCount.relay ?? 0,
store: wantedNodeCapabilityCount.store ?? 0, store: wantedNodeCapabilityCount.store ?? 0,
filter: wantedNodeCapabilityCount.filter ?? 0, filter: wantedNodeCapabilityCount.filter ?? 0,
lightPush: wantedNodeCapabilityCount.lightPush ?? 0, lightPush: wantedNodeCapabilityCount.lightPush ?? 0
}; };
const maxSearches = const maxSearches =
@ -80,7 +80,7 @@ export async function* yieldNodesUntilCapabilitiesFulfilled(
relay: 0, relay: 0,
store: 0, store: 0,
filter: 0, filter: 0,
lightPush: 0, lightPush: 0
}; };
let totalSearches = 0; let totalSearches = 0;
@ -108,7 +108,7 @@ export async function* yieldNodesUntilCapabilitiesFulfilled(
function isSatisfied( function isSatisfied(
wanted: NodeCapabilityCount, wanted: NodeCapabilityCount,
actual: NodeCapabilityCount, actual: NodeCapabilityCount
): boolean { ): boolean {
return ( return (
actual.relay >= wanted.relay && actual.relay >= wanted.relay &&
@ -146,7 +146,7 @@ function addCapabilities(node: Waku2, total: NodeCapabilityCount): void {
function helpsSatisfyCapabilities( function helpsSatisfyCapabilities(
node: Waku2, node: Waku2,
wanted: NodeCapabilityCount, wanted: NodeCapabilityCount,
actual: NodeCapabilityCount, actual: NodeCapabilityCount
): boolean { ): boolean {
if (isSatisfied(wanted, actual)) { if (isSatisfied(wanted, actual)) {
throw "Internal Error: Waku2 wanted capabilities are already fulfilled"; throw "Internal Error: Waku2 wanted capabilities are already fulfilled";
@ -168,12 +168,12 @@ function helpsSatisfyCapabilities(
*/ */
function missingCapabilities( function missingCapabilities(
wanted: NodeCapabilityCount, wanted: NodeCapabilityCount,
actual: NodeCapabilityCount, actual: NodeCapabilityCount
): Waku2 { ): Waku2 {
return { return {
relay: actual.relay < wanted.relay, relay: actual.relay < wanted.relay,
store: actual.store < wanted.store, store: actual.store < wanted.store,
filter: actual.filter < wanted.filter, filter: actual.filter < wanted.filter,
lightPush: actual.lightPush < wanted.lightPush, lightPush: actual.lightPush < wanted.lightPush
}; };
} }

View File

@ -1,11 +1,11 @@
import { CustomEvent, EventEmitter } from "@libp2p/interface/events";
import type { import type {
PeerDiscovery, PeerDiscovery,
PeerDiscoveryEvents, PeerDiscoveryEvents
} from "@libp2p/interface-peer-discovery"; } from "@libp2p/interface/peer-discovery";
import { peerDiscovery as symbol } from "@libp2p/interface-peer-discovery"; import { peerDiscovery as symbol } from "@libp2p/interface/peer-discovery";
import type { PeerInfo } from "@libp2p/interface-peer-info"; import type { PeerInfo } from "@libp2p/interface/peer-info";
import type { PeerStore } from "@libp2p/interface-peer-store"; import type { PeerStore } from "@libp2p/interface/peer-store";
import { CustomEvent, EventEmitter } from "@libp2p/interfaces/events";
import type { IEnr } from "@waku/interfaces"; import type { IEnr } from "@waku/interfaces";
import debug from "debug"; import debug from "debug";
@ -17,7 +17,7 @@ const log = debug("waku:peer-discovery-dns");
const enrTree = { const enrTree = {
TEST: "enrtree://AOGECG2SPND25EEFMAJ5WF3KSGJNSGV356DSTL2YVLLZWIV6SAYBM@test.waku.nodes.status.im", TEST: "enrtree://AOGECG2SPND25EEFMAJ5WF3KSGJNSGV356DSTL2YVLLZWIV6SAYBM@test.waku.nodes.status.im",
PROD: "enrtree://AOGECG2SPND25EEFMAJ5WF3KSGJNSGV356DSTL2YVLLZWIV6SAYBM@prod.waku.nodes.status.im", PROD: "enrtree://AOGECG2SPND25EEFMAJ5WF3KSGJNSGV356DSTL2YVLLZWIV6SAYBM@prod.waku.nodes.status.im"
}; };
const DEFAULT_BOOTSTRAP_TAG_NAME = "bootstrap"; const DEFAULT_BOOTSTRAP_TAG_NAME = "bootstrap";
@ -93,7 +93,7 @@ export class PeerDiscoveryDns
this.nextPeer = dns.getNextPeer.bind( this.nextPeer = dns.getNextPeer.bind(
dns, dns,
enrUrls, enrUrls,
wantedNodeCapabilityCount, wantedNodeCapabilityCount
); );
} }
@ -112,9 +112,9 @@ export class PeerDiscoveryDns
tags: { tags: {
[DEFAULT_BOOTSTRAP_TAG_NAME]: { [DEFAULT_BOOTSTRAP_TAG_NAME]: {
value: this._options.tagValue ?? DEFAULT_BOOTSTRAP_TAG_VALUE, value: this._options.tagValue ?? DEFAULT_BOOTSTRAP_TAG_VALUE,
ttl: this._options.tagTTL ?? DEFAULT_BOOTSTRAP_TAG_TTL, ttl: this._options.tagTTL ?? DEFAULT_BOOTSTRAP_TAG_TTL
}, }
}, }
}; };
let isPeerChanged = false; let isPeerChanged = false;
@ -135,7 +135,7 @@ export class PeerDiscoveryDns
if (isPeerChanged) { if (isPeerChanged) {
this.dispatchEvent( this.dispatchEvent(
new CustomEvent<PeerInfo>("peer", { detail: peerInfo }), new CustomEvent<PeerInfo>("peer", { detail: peerInfo })
); );
} }
} }
@ -159,7 +159,7 @@ export class PeerDiscoveryDns
export function wakuDnsDiscovery( export function wakuDnsDiscovery(
enrUrls: string[], enrUrls: string[],
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>, wantedNodeCapabilityCount: Partial<NodeCapabilityCount>
): (components: DnsDiscoveryComponents) => PeerDiscoveryDns { ): (components: DnsDiscoveryComponents) => PeerDiscoveryDns {
return (components: DnsDiscoveryComponents) => return (components: DnsDiscoveryComponents) =>
new PeerDiscoveryDns(components, { enrUrls, wantedNodeCapabilityCount }); new PeerDiscoveryDns(components, { enrUrls, wantedNodeCapabilityCount });

View File

@ -1,6 +1,6 @@
module.exports = { module.exports = {
parserOptions: { parserOptions: {
tsconfigRootDir: __dirname, tsconfigRootDir: __dirname,
project: "./tsconfig.dev.json", project: "./tsconfig.dev.json"
}, }
}; };

View File

@ -6,7 +6,7 @@ module.exports = function (config) {
frameworks: ["webpack", "mocha"], frameworks: ["webpack", "mocha"],
files: ["src/**/*.ts"], files: ["src/**/*.ts"],
preprocessors: { preprocessors: {
"src/**/*.ts": ["webpack"], "src/**/*.ts": ["webpack"]
}, },
envPreprocessor: ["CI"], envPreprocessor: ["CI"],
reporters: ["progress"], reporters: ["progress"],
@ -14,32 +14,32 @@ module.exports = function (config) {
singleRun: true, singleRun: true,
client: { client: {
mocha: { mocha: {
timeout: 6000, // Default is 2s timeout: 6000 // Default is 2s
}, }
}, },
webpack: { webpack: {
mode: "development", mode: "development",
module: { module: {
rules: [{ test: /\.([cm]?ts|tsx)$/, loader: "ts-loader" }], rules: [{ test: /\.([cm]?ts|tsx)$/, loader: "ts-loader" }]
}, },
plugins: [ plugins: [
new webpack.DefinePlugin({ new webpack.DefinePlugin({
"process.env.CI": process.env.CI || false, "process.env.CI": process.env.CI || false
}), }),
new webpack.ProvidePlugin({ new webpack.ProvidePlugin({
process: "process/browser.js", process: "process/browser.js"
}), })
], ],
resolve: { resolve: {
extensions: [".ts", ".tsx", ".js"], extensions: [".ts", ".tsx", ".js"],
extensionAlias: { extensionAlias: {
".js": [".js", ".ts"], ".js": [".js", ".ts"],
".cjs": [".cjs", ".cts"], ".cjs": [".cjs", ".cts"],
".mjs": [".mjs", ".mts"], ".mjs": [".mjs", ".mts"]
}, }
}, },
stats: { warnings: false }, stats: { warnings: false },
devtool: "inline-source-map", devtool: "inline-source-map"
}, }
}); });
}; };

View File

@ -61,8 +61,6 @@
"js-sha3": "^0.8.0" "js-sha3": "^0.8.0"
}, },
"devDependencies": { "devDependencies": {
"@libp2p/interface-peer-id": "^2.0.2",
"@libp2p/interface-peer-info": "^1.0.10",
"@libp2p/peer-id-factory": "^2.0.4", "@libp2p/peer-id-factory": "^2.0.4",
"@rollup/plugin-commonjs": "^24.0.1", "@rollup/plugin-commonjs": "^24.0.1",
"@rollup/plugin-json": "^6.0.0", "@rollup/plugin-json": "^6.0.0",

View File

@ -11,14 +11,14 @@ export default {
input, input,
output: { output: {
dir: "bundle", dir: "bundle",
format: "esm", format: "esm"
}, },
plugins: [ plugins: [
commonjs(), commonjs(),
json(), json(),
nodeResolve({ nodeResolve({
browser: true, browser: true,
preferBuiltins: false, preferBuiltins: false
}), })
], ]
}; };

View File

@ -1,4 +1,4 @@
import { PeerId } from "@libp2p/interface-peer-id"; import type { PeerId } from "@libp2p/interface/peer-id";
import type { ENRKey, ENRValue } from "@waku/interfaces"; import type { ENRKey, ENRValue } from "@waku/interfaces";
import { utf8ToBytes } from "@waku/utils/bytes"; import { utf8ToBytes } from "@waku/utils/bytes";
@ -9,7 +9,7 @@ import { getPublicKeyFromPeerId } from "./peer_id.js";
export class EnrCreator { export class EnrCreator {
static fromPublicKey( static fromPublicKey(
publicKey: Uint8Array, publicKey: Uint8Array,
kvs: Record<ENRKey, ENRValue> = {}, kvs: Record<ENRKey, ENRValue> = {}
): Promise<ENR> { ): Promise<ENR> {
// EIP-778 specifies that the key must be in compressed format, 33 bytes // EIP-778 specifies that the key must be in compressed format, 33 bytes
if (publicKey.length !== 33) { if (publicKey.length !== 33) {
@ -18,13 +18,13 @@ export class EnrCreator {
return ENR.create({ return ENR.create({
...kvs, ...kvs,
id: utf8ToBytes("v4"), id: utf8ToBytes("v4"),
secp256k1: publicKey, secp256k1: publicKey
}); });
} }
static async fromPeerId( static async fromPeerId(
peerId: PeerId, peerId: PeerId,
kvs: Record<ENRKey, ENRValue> = {}, kvs: Record<ENRKey, ENRValue> = {}
): Promise<ENR> { ): Promise<ENR> {
switch (peerId.type) { switch (peerId.type) {
case "secp256k1": case "secp256k1":

View File

@ -12,15 +12,15 @@ import sha3 from "js-sha3";
*/ */
export async function sign( export async function sign(
message: Uint8Array, message: Uint8Array,
privateKey: Uint8Array, privateKey: Uint8Array
): Promise<Uint8Array> { ): Promise<Uint8Array> {
const [signature, recoveryId] = await secp.sign(message, privateKey, { const [signature, recoveryId] = await secp.sign(message, privateKey, {
recovered: true, recovered: true,
der: false, der: false
}); });
return concat( return concat(
[signature, new Uint8Array([recoveryId])], [signature, new Uint8Array([recoveryId])],
signature.length + 1, signature.length + 1
); );
} }
@ -42,7 +42,7 @@ export function compressPublicKey(publicKey: Uint8Array): Uint8Array {
export function verifySignature( export function verifySignature(
signature: Uint8Array, signature: Uint8Array,
message: Uint8Array | string, message: Uint8Array | string,
publicKey: Uint8Array, publicKey: Uint8Array
): boolean { ): boolean {
try { try {
const _signature = secp.Signature.fromCompact(signature.slice(0, 64)); const _signature = secp.Signature.fromCompact(signature.slice(0, 64));

View File

@ -10,7 +10,7 @@ export class EnrDecoder {
static fromString(encoded: string): Promise<ENR> { static fromString(encoded: string): Promise<ENR> {
if (!encoded.startsWith(ENR.RECORD_PREFIX)) { if (!encoded.startsWith(ENR.RECORD_PREFIX)) {
throw new Error( throw new Error(
`"string encoded ENR must start with '${ENR.RECORD_PREFIX}'`, `"string encoded ENR must start with '${ENR.RECORD_PREFIX}'`
); );
} }
return EnrDecoder.fromRLP(fromString(encoded.slice(4), "base64url")); return EnrDecoder.fromRLP(fromString(encoded.slice(4), "base64url"));
@ -64,7 +64,7 @@ function checkValues(values: Uint8Array[]): {
} }
if (!seq || Array.isArray(seq)) { if (!seq || Array.isArray(seq)) {
throw new Error( throw new Error(
"Decoded ENR invalid sequence number: must be a byte array", "Decoded ENR invalid sequence number: must be a byte array"
); );
} }
@ -75,7 +75,7 @@ function checkSignature(
seq: Uint8Array, seq: Uint8Array,
kvs: Uint8Array[], kvs: Uint8Array[],
enr: ENR, enr: ENR,
signature: Uint8Array, signature: Uint8Array
): void { ): void {
const rlpEncodedBytes = hexToBytes(RLP.encode([seq, ...kvs])); const rlpEncodedBytes = hexToBytes(RLP.encode([seq, ...kvs]));
if (!enr.verify(rlpEncodedBytes, signature)) { if (!enr.verify(rlpEncodedBytes, signature)) {

View File

@ -9,7 +9,7 @@ import { ENR } from "./enr.js";
export class EnrEncoder { export class EnrEncoder {
static async toValues( static async toValues(
enr: ENR, enr: ENR,
privateKey?: Uint8Array, privateKey?: Uint8Array
): Promise<(ENRKey | ENRValue | number[])[]> { ): Promise<(ENRKey | ENRValue | number[])[]> {
// sort keys and flatten into [k, v, k, v, ...] // sort keys and flatten into [k, v, k, v, ...]
const content: Array<ENRKey | ENRValue | number[]> = Array.from(enr.keys()) const content: Array<ENRKey | ENRValue | number[]> = Array.from(enr.keys())
@ -20,7 +20,7 @@ export class EnrEncoder {
content.unshift(new Uint8Array([Number(enr.seq)])); content.unshift(new Uint8Array([Number(enr.seq)]));
if (privateKey) { if (privateKey) {
content.unshift( content.unshift(
await enr.sign(hexToBytes(RLP.encode(content)), privateKey), await enr.sign(hexToBytes(RLP.encode(content)), privateKey)
); );
} else { } else {
if (!enr.signature) { if (!enr.signature) {
@ -33,7 +33,7 @@ export class EnrEncoder {
static async toBytes(enr: ENR, privateKey?: Uint8Array): Promise<Uint8Array> { static async toBytes(enr: ENR, privateKey?: Uint8Array): Promise<Uint8Array> {
const encoded = hexToBytes( const encoded = hexToBytes(
RLP.encode(await EnrEncoder.toValues(enr, privateKey)), RLP.encode(await EnrEncoder.toValues(enr, privateKey))
); );
if (encoded.length >= MAX_RECORD_SIZE) { if (encoded.length >= MAX_RECORD_SIZE) {
throw new Error("ENR must be less than 300 bytes"); throw new Error("ENR must be less than 300 bytes");

View File

@ -1,4 +1,4 @@
import type { PeerId } from "@libp2p/interface-peer-id"; import type { PeerId } from "@libp2p/interface/peer-id";
import { createSecp256k1PeerId } from "@libp2p/peer-id-factory"; import { createSecp256k1PeerId } from "@libp2p/peer-id-factory";
import { multiaddr } from "@multiformats/multiaddr"; import { multiaddr } from "@multiformats/multiaddr";
import * as secp from "@noble/secp256k1"; import * as secp from "@noble/secp256k1";
@ -14,7 +14,7 @@ import { EnrEncoder } from "./encoder.js";
import { import {
ENR, ENR,
TransportProtocol, TransportProtocol,
TransportProtocolPerIpVersion, TransportProtocolPerIpVersion
} from "./enr.js"; } from "./enr.js";
import { getPrivateKeyFromPeerId } from "./peer_id.js"; import { getPrivateKeyFromPeerId } from "./peer_id.js";
@ -29,15 +29,15 @@ describe("ENR", function () {
multiaddr("/dns4/node1.do-ams.wakuv2.test.statusim.net/tcp/443/wss"), multiaddr("/dns4/node1.do-ams.wakuv2.test.statusim.net/tcp/443/wss"),
multiaddr("/dns6/node2.ac-chi.wakuv2.test.statusim.net/tcp/443/wss"), multiaddr("/dns6/node2.ac-chi.wakuv2.test.statusim.net/tcp/443/wss"),
multiaddr( multiaddr(
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss", "/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss"
), )
]; ];
enr.waku2 = { enr.waku2 = {
relay: true, relay: true,
store: false, store: false,
filter: true, filter: true,
lightPush: false, lightPush: false
}; };
const txt = await EnrEncoder.toString(enr, privateKey); const txt = await EnrEncoder.toString(enr, privateKey);
@ -53,19 +53,19 @@ describe("ENR", function () {
expect(enr2.multiaddrs!.length).to.be.equal(3); expect(enr2.multiaddrs!.length).to.be.equal(3);
const multiaddrsAsStr = enr2.multiaddrs!.map((ma) => ma.toString()); const multiaddrsAsStr = enr2.multiaddrs!.map((ma) => ma.toString());
expect(multiaddrsAsStr).to.include( expect(multiaddrsAsStr).to.include(
"/dns4/node1.do-ams.wakuv2.test.statusim.net/tcp/443/wss", "/dns4/node1.do-ams.wakuv2.test.statusim.net/tcp/443/wss"
); );
expect(multiaddrsAsStr).to.include( expect(multiaddrsAsStr).to.include(
"/dns6/node2.ac-chi.wakuv2.test.statusim.net/tcp/443/wss", "/dns6/node2.ac-chi.wakuv2.test.statusim.net/tcp/443/wss"
); );
expect(multiaddrsAsStr).to.include( expect(multiaddrsAsStr).to.include(
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss", "/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss"
); );
expect(enr2.waku2).to.deep.equal({ expect(enr2.waku2).to.deep.equal({
relay: true, relay: true,
store: false, store: false,
filter: true, filter: true,
lightPush: false, lightPush: false
}); });
}); });
@ -87,13 +87,13 @@ describe("ENR", function () {
expect(enr.multiaddrs!.length).to.be.equal(3); expect(enr.multiaddrs!.length).to.be.equal(3);
const multiaddrsAsStr = enr.multiaddrs!.map((ma) => ma.toString()); const multiaddrsAsStr = enr.multiaddrs!.map((ma) => ma.toString());
expect(multiaddrsAsStr).to.include( expect(multiaddrsAsStr).to.include(
"/dns4/node-01.do-ams3.wakuv2.test.statusim.net/tcp/443/wss", "/dns4/node-01.do-ams3.wakuv2.test.statusim.net/tcp/443/wss"
); );
expect(multiaddrsAsStr).to.include( expect(multiaddrsAsStr).to.include(
"/dns6/node-01.ac-cn-hongkong-c.wakuv2.test.statusim.net/tcp/443/wss", "/dns6/node-01.ac-cn-hongkong-c.wakuv2.test.statusim.net/tcp/443/wss"
); );
expect(multiaddrsAsStr).to.include( expect(multiaddrsAsStr).to.include(
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss", "/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss"
); );
}); });
@ -107,7 +107,7 @@ describe("ENR", function () {
expect(enr.ip).to.be.equal("134.209.139.210"); expect(enr.ip).to.be.equal("134.209.139.210");
expect(enr.publicKey).to.not.be.undefined; expect(enr.publicKey).to.not.be.undefined;
expect(enr.peerId?.toString()).to.be.equal( expect(enr.peerId?.toString()).to.be.equal(
"16Uiu2HAmPLe7Mzm8TsYUubgCAW1aJoeFScxrLj8ppHFivPo97bUZ", "16Uiu2HAmPLe7Mzm8TsYUubgCAW1aJoeFScxrLj8ppHFivPo97bUZ"
); );
}); });
@ -159,7 +159,7 @@ describe("ENR", function () {
const enr = await ENR.create( const enr = await ENR.create(
{ id: utf8ToBytes("v3") }, { id: utf8ToBytes("v3") },
BigInt(0), BigInt(0),
new Uint8Array(), new Uint8Array()
); );
enr.verify(new Uint8Array(), new Uint8Array()); enr.verify(new Uint8Array(), new Uint8Array());
assert.fail("Expect error here"); assert.fail("Expect error here");
@ -174,7 +174,7 @@ describe("ENR", function () {
const enr = await ENR.create( const enr = await ENR.create(
{ id: utf8ToBytes("v4") }, { id: utf8ToBytes("v4") },
BigInt(0), BigInt(0),
new Uint8Array(), new Uint8Array()
); );
enr.verify(new Uint8Array(), new Uint8Array()); enr.verify(new Uint8Array(), new Uint8Array());
assert.fail("Expect error here"); assert.fail("Expect error here");
@ -200,7 +200,7 @@ describe("ENR", function () {
beforeEach(async function () { beforeEach(async function () {
const seq = BigInt(1); const seq = BigInt(1);
privateKey = hexToBytes( privateKey = hexToBytes(
"b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291", "b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291"
); );
record = await EnrCreator.fromPublicKey(secp.getPublicKey(privateKey)); record = await EnrCreator.fromPublicKey(secp.getPublicKey(privateKey));
record.setLocationMultiaddr(multiaddr("/ip4/127.0.0.1/udp/30303")); record.setLocationMultiaddr(multiaddr("/ip4/127.0.0.1/udp/30303"));
@ -210,7 +210,7 @@ describe("ENR", function () {
it("should properly compute the node id", () => { it("should properly compute the node id", () => {
expect(record.nodeId).to.equal( expect(record.nodeId).to.equal(
"a448f24c6d18e575453db13171562b71999873db5b286df957af199ec94617f7", "a448f24c6d18e575453db13171562b71999873db5b286df957af199ec94617f7"
); );
}); });
@ -245,7 +245,7 @@ describe("ENR", function () {
beforeEach(async () => { beforeEach(async () => {
privateKey = hexToBytes( privateKey = hexToBytes(
"b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291", "b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291"
); );
record = await EnrCreator.fromPublicKey(secp.getPublicKey(privateKey)); record = await EnrCreator.fromPublicKey(secp.getPublicKey(privateKey));
}); });
@ -262,14 +262,14 @@ describe("ENR", function () {
record.set("udp", tuples0[1][1]); record.set("udp", tuples0[1][1]);
// and get the multiaddr // and get the multiaddr
expect( expect(
record.getLocationMultiaddr(TransportProtocol.UDP)!.toString(), record.getLocationMultiaddr(TransportProtocol.UDP)!.toString()
).to.equal(multi0.toString()); ).to.equal(multi0.toString());
// set the multiaddr // set the multiaddr
const multi1 = multiaddr("/ip4/0.0.0.0/udp/30300"); const multi1 = multiaddr("/ip4/0.0.0.0/udp/30300");
record.setLocationMultiaddr(multi1); record.setLocationMultiaddr(multi1);
// and get the multiaddr // and get the multiaddr
expect( expect(
record.getLocationMultiaddr(TransportProtocol.UDP)!.toString(), record.getLocationMultiaddr(TransportProtocol.UDP)!.toString()
).to.equal(multi1.toString()); ).to.equal(multi1.toString());
// and get the underlying records // and get the underlying records
const tuples1 = multi1.tuples(); const tuples1 = multi1.tuples();
@ -290,14 +290,14 @@ describe("ENR", function () {
record.set("tcp", tuples0[1][1]); record.set("tcp", tuples0[1][1]);
// and get the multiaddr // and get the multiaddr
expect( expect(
record.getLocationMultiaddr(TransportProtocol.TCP)!.toString(), record.getLocationMultiaddr(TransportProtocol.TCP)!.toString()
).to.equal(multi0.toString()); ).to.equal(multi0.toString());
// set the multiaddr // set the multiaddr
const multi1 = multiaddr("/ip4/0.0.0.0/tcp/30300"); const multi1 = multiaddr("/ip4/0.0.0.0/tcp/30300");
record.setLocationMultiaddr(multi1); record.setLocationMultiaddr(multi1);
// and get the multiaddr // and get the multiaddr
expect( expect(
record.getLocationMultiaddr(TransportProtocol.TCP)!.toString(), record.getLocationMultiaddr(TransportProtocol.TCP)!.toString()
).to.equal(multi1.toString()); ).to.equal(multi1.toString());
// and get the underlying records // and get the underlying records
const tuples1 = multi1.tuples(); const tuples1 = multi1.tuples();
@ -312,7 +312,7 @@ describe("ENR", function () {
const tcp = 8080; const tcp = 8080;
const udp = 8080; const udp = 8080;
const wsMultiaddr = multiaddr( const wsMultiaddr = multiaddr(
"/dns4/node-01.do-ams3.wakuv2.prod.statusim.net/tcp/8000/wss", "/dns4/node-01.do-ams3.wakuv2.prod.statusim.net/tcp/8000/wss"
); );
let peerId: PeerId; let peerId: PeerId;
let enr: ENR; let enr: ENR;
@ -331,43 +331,43 @@ describe("ENR", function () {
it("should properly create location multiaddrs - udp4", () => { it("should properly create location multiaddrs - udp4", () => {
expect( expect(
enr.getLocationMultiaddr(TransportProtocolPerIpVersion.UDP4), enr.getLocationMultiaddr(TransportProtocolPerIpVersion.UDP4)
).to.deep.equal(multiaddr(`/ip4/${ip4}/udp/${udp}`)); ).to.deep.equal(multiaddr(`/ip4/${ip4}/udp/${udp}`));
}); });
it("should properly create location multiaddrs - tcp4", () => { it("should properly create location multiaddrs - tcp4", () => {
expect( expect(
enr.getLocationMultiaddr(TransportProtocolPerIpVersion.TCP4), enr.getLocationMultiaddr(TransportProtocolPerIpVersion.TCP4)
).to.deep.equal(multiaddr(`/ip4/${ip4}/tcp/${tcp}`)); ).to.deep.equal(multiaddr(`/ip4/${ip4}/tcp/${tcp}`));
}); });
it("should properly create location multiaddrs - udp6", () => { it("should properly create location multiaddrs - udp6", () => {
expect( expect(
enr.getLocationMultiaddr(TransportProtocolPerIpVersion.UDP6), enr.getLocationMultiaddr(TransportProtocolPerIpVersion.UDP6)
).to.deep.equal(multiaddr(`/ip6/${ip6}/udp/${udp}`)); ).to.deep.equal(multiaddr(`/ip6/${ip6}/udp/${udp}`));
}); });
it("should properly create location multiaddrs - tcp6", () => { it("should properly create location multiaddrs - tcp6", () => {
expect( expect(
enr.getLocationMultiaddr(TransportProtocolPerIpVersion.TCP6), enr.getLocationMultiaddr(TransportProtocolPerIpVersion.TCP6)
).to.deep.equal(multiaddr(`/ip6/${ip6}/tcp/${tcp}`)); ).to.deep.equal(multiaddr(`/ip6/${ip6}/tcp/${tcp}`));
}); });
it("should properly create location multiaddrs - udp", () => { it("should properly create location multiaddrs - udp", () => {
// default to ip4 // default to ip4
expect(enr.getLocationMultiaddr(TransportProtocol.UDP)).to.deep.equal( expect(enr.getLocationMultiaddr(TransportProtocol.UDP)).to.deep.equal(
multiaddr(`/ip4/${ip4}/udp/${udp}`), multiaddr(`/ip4/${ip4}/udp/${udp}`)
); );
// if ip6 is set, use it // if ip6 is set, use it
enr.ip = undefined; enr.ip = undefined;
expect(enr.getLocationMultiaddr(TransportProtocol.UDP)).to.deep.equal( expect(enr.getLocationMultiaddr(TransportProtocol.UDP)).to.deep.equal(
multiaddr(`/ip6/${ip6}/udp/${udp}`), multiaddr(`/ip6/${ip6}/udp/${udp}`)
); );
// if ip6 does not exist, use ip4 // if ip6 does not exist, use ip4
enr.ip6 = undefined; enr.ip6 = undefined;
enr.ip = ip4; enr.ip = ip4;
expect(enr.getLocationMultiaddr(TransportProtocol.UDP)).to.deep.equal( expect(enr.getLocationMultiaddr(TransportProtocol.UDP)).to.deep.equal(
multiaddr(`/ip4/${ip4}/udp/${udp}`), multiaddr(`/ip4/${ip4}/udp/${udp}`)
); );
enr.ip6 = ip6; enr.ip6 = ip6;
}); });
@ -375,18 +375,18 @@ describe("ENR", function () {
it("should properly create location multiaddrs - tcp", () => { it("should properly create location multiaddrs - tcp", () => {
// default to ip4 // default to ip4
expect(enr.getLocationMultiaddr(TransportProtocol.TCP)).to.deep.equal( expect(enr.getLocationMultiaddr(TransportProtocol.TCP)).to.deep.equal(
multiaddr(`/ip4/${ip4}/tcp/${tcp}`), multiaddr(`/ip4/${ip4}/tcp/${tcp}`)
); );
// if ip6 is set, use it // if ip6 is set, use it
enr.ip = undefined; enr.ip = undefined;
expect(enr.getLocationMultiaddr(TransportProtocol.TCP)).to.deep.equal( expect(enr.getLocationMultiaddr(TransportProtocol.TCP)).to.deep.equal(
multiaddr(`/ip6/${ip6}/tcp/${tcp}`), multiaddr(`/ip6/${ip6}/tcp/${tcp}`)
); );
// if ip6 does not exist, use ip4 // if ip6 does not exist, use ip4
enr.ip6 = undefined; enr.ip6 = undefined;
enr.ip = ip4; enr.ip = ip4;
expect(enr.getLocationMultiaddr(TransportProtocol.TCP)).to.deep.equal( expect(enr.getLocationMultiaddr(TransportProtocol.TCP)).to.deep.equal(
multiaddr(`/ip4/${ip4}/tcp/${tcp}`), multiaddr(`/ip4/${ip4}/tcp/${tcp}`)
); );
enr.ip6 = ip6; enr.ip6 = ip6;
}); });
@ -397,19 +397,19 @@ describe("ENR", function () {
expect(peerInfo.id.toString()).to.equal(peerId.toString()); expect(peerInfo.id.toString()).to.equal(peerId.toString());
expect(peerInfo.multiaddrs.length).to.equal(5); expect(peerInfo.multiaddrs.length).to.equal(5);
expect(peerInfo.multiaddrs.map((ma) => ma.toString())).to.contain( expect(peerInfo.multiaddrs.map((ma) => ma.toString())).to.contain(
multiaddr(`/ip4/${ip4}/tcp/${tcp}`).toString(), multiaddr(`/ip4/${ip4}/tcp/${tcp}`).toString()
); );
expect(peerInfo.multiaddrs.map((ma) => ma.toString())).to.contain( expect(peerInfo.multiaddrs.map((ma) => ma.toString())).to.contain(
multiaddr(`/ip6/${ip6}/tcp/${tcp}`).toString(), multiaddr(`/ip6/${ip6}/tcp/${tcp}`).toString()
); );
expect(peerInfo.multiaddrs.map((ma) => ma.toString())).to.contain( expect(peerInfo.multiaddrs.map((ma) => ma.toString())).to.contain(
multiaddr(`/ip4/${ip4}/udp/${udp}`).toString(), multiaddr(`/ip4/${ip4}/udp/${udp}`).toString()
); );
expect(peerInfo.multiaddrs.map((ma) => ma.toString())).to.contain( expect(peerInfo.multiaddrs.map((ma) => ma.toString())).to.contain(
multiaddr(`/ip6/${ip6}/udp/${udp}`).toString(), multiaddr(`/ip6/${ip6}/udp/${udp}`).toString()
); );
expect(peerInfo.multiaddrs.map((ma) => ma.toString())).to.contain( expect(peerInfo.multiaddrs.map((ma) => ma.toString())).to.contain(
wsMultiaddr.toString(), wsMultiaddr.toString()
); );
}); });
}); });
@ -428,7 +428,7 @@ describe("ENR", function () {
relay: false, relay: false,
store: false, store: false,
filter: false, filter: false,
lightPush: false, lightPush: false
}; };
}); });

View File

@ -1,12 +1,12 @@
import type { PeerId } from "@libp2p/interface-peer-id"; import type { PeerId } from "@libp2p/interface/peer-id";
import type { PeerInfo } from "@libp2p/interface-peer-info"; import type { PeerInfo } from "@libp2p/interface/peer-info";
import type { Multiaddr } from "@multiformats/multiaddr"; import type { Multiaddr } from "@multiformats/multiaddr";
import type { import type {
ENRKey, ENRKey,
ENRValue, ENRValue,
IEnr, IEnr,
NodeId, NodeId,
SequenceNumber, SequenceNumber
} from "@waku/interfaces"; } from "@waku/interfaces";
import debug from "debug"; import debug from "debug";
@ -21,13 +21,13 @@ const log = debug("waku:enr");
export enum TransportProtocol { export enum TransportProtocol {
TCP = "tcp", TCP = "tcp",
UDP = "udp", UDP = "udp"
} }
export enum TransportProtocolPerIpVersion { export enum TransportProtocolPerIpVersion {
TCP4 = "tcp4", TCP4 = "tcp4",
UDP4 = "udp4", UDP4 = "udp4",
TCP6 = "tcp6", TCP6 = "tcp6",
UDP6 = "udp6", UDP6 = "udp6"
} }
export class ENR extends RawEnr implements IEnr { export class ENR extends RawEnr implements IEnr {
@ -37,7 +37,7 @@ export class ENR extends RawEnr implements IEnr {
static async create( static async create(
kvs: Record<ENRKey, ENRValue> = {}, kvs: Record<ENRKey, ENRValue> = {},
seq: SequenceNumber = BigInt(1), seq: SequenceNumber = BigInt(1),
signature?: Uint8Array, signature?: Uint8Array
): Promise<ENR> { ): Promise<ENR> {
const enr = new ENR(kvs, seq, signature); const enr = new ENR(kvs, seq, signature);
try { try {
@ -61,7 +61,7 @@ export class ENR extends RawEnr implements IEnr {
} }
} }
getLocationMultiaddr: ( getLocationMultiaddr: (
protocol: TransportProtocol | TransportProtocolPerIpVersion, protocol: TransportProtocol | TransportProtocolPerIpVersion
) => Multiaddr | undefined = locationMultiaddrFromEnrFields.bind({}, this); ) => Multiaddr | undefined = locationMultiaddrFromEnrFields.bind({}, this);
setLocationMultiaddr(multiaddr: Multiaddr): void { setLocationMultiaddr(multiaddr: Multiaddr): void {
@ -93,7 +93,7 @@ export class ENR extends RawEnr implements IEnr {
for (const protocol of Object.values(TransportProtocolPerIpVersion)) { for (const protocol of Object.values(TransportProtocolPerIpVersion)) {
const ma = this.getLocationMultiaddr( const ma = this.getLocationMultiaddr(
protocol as TransportProtocolPerIpVersion, protocol as TransportProtocolPerIpVersion
); );
if (ma) multiaddrs.push(ma); if (ma) multiaddrs.push(ma);
} }
@ -109,7 +109,7 @@ export class ENR extends RawEnr implements IEnr {
return { return {
id, id,
multiaddrs: this.getAllLocationMultiaddrs(), multiaddrs: this.getAllLocationMultiaddrs(),
protocols: [], protocols: []
}; };
} }
@ -122,7 +122,7 @@ export class ENR extends RawEnr implements IEnr {
* @param protocol * @param protocol
*/ */
getFullMultiaddr( getFullMultiaddr(
protocol: TransportProtocol | TransportProtocolPerIpVersion, protocol: TransportProtocol | TransportProtocolPerIpVersion
): Multiaddr | undefined { ): Multiaddr | undefined {
if (this.peerId) { if (this.peerId) {
const locationMultiaddr = this.getLocationMultiaddr(protocol); const locationMultiaddr = this.getLocationMultiaddr(protocol);

View File

@ -5,7 +5,7 @@ import { multiaddrFromFields } from "./multiaddr_from_fields.js";
export function locationMultiaddrFromEnrFields( export function locationMultiaddrFromEnrFields(
enr: IEnr, enr: IEnr,
protocol: "udp" | "udp4" | "udp6" | "tcp" | "tcp4" | "tcp6", protocol: "udp" | "udp4" | "udp6" | "tcp" | "tcp4" | "tcp6"
): Multiaddr | undefined { ): Multiaddr | undefined {
switch (protocol) { switch (protocol) {
case "udp": case "udp":
@ -42,6 +42,6 @@ export function locationMultiaddrFromEnrFields(
isIpv6 ? "ip6" : "ip4", isIpv6 ? "ip6" : "ip4",
protoName, protoName,
ipVal, ipVal,
protoVal, protoVal
); );
} }

View File

@ -6,12 +6,12 @@ export function multiaddrFromFields(
ipFamily: string, ipFamily: string,
protocol: string, protocol: string,
ipBytes: Uint8Array, ipBytes: Uint8Array,
protocolBytes: Uint8Array, protocolBytes: Uint8Array
): Multiaddr { ): Multiaddr {
let ma = multiaddr("/" + ipFamily + "/" + convertToString(ipFamily, ipBytes)); let ma = multiaddr("/" + ipFamily + "/" + convertToString(ipFamily, ipBytes));
ma = ma.encapsulate( ma = ma.encapsulate(
multiaddr("/" + protocol + "/" + convertToString(protocol, protocolBytes)), multiaddr("/" + protocol + "/" + convertToString(protocol, protocolBytes))
); );
return ma; return ma;

View File

@ -8,11 +8,11 @@ describe("ENR multiaddrs codec", function () {
const multiaddrs = [ const multiaddrs = [
multiaddr("/dns4/node-01.do-ams3.wakuv2.test.statusim.net/tcp/443/wss"), multiaddr("/dns4/node-01.do-ams3.wakuv2.test.statusim.net/tcp/443/wss"),
multiaddr( multiaddr(
"/dns6/node-01.ac-cn-hongkong-c.wakuv2.test.statusim.net/tcp/443/wss", "/dns6/node-01.ac-cn-hongkong-c.wakuv2.test.statusim.net/tcp/443/wss"
), ),
multiaddr( multiaddr(
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss", "/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss"
), )
]; ];
const bytes = encodeMultiaddrs(multiaddrs); const bytes = encodeMultiaddrs(multiaddrs);
@ -20,13 +20,13 @@ describe("ENR multiaddrs codec", function () {
const multiaddrsAsStr = result.map((ma) => ma.toString()); const multiaddrsAsStr = result.map((ma) => ma.toString());
expect(multiaddrsAsStr).to.include( expect(multiaddrsAsStr).to.include(
"/dns4/node-01.do-ams3.wakuv2.test.statusim.net/tcp/443/wss", "/dns4/node-01.do-ams3.wakuv2.test.statusim.net/tcp/443/wss"
); );
expect(multiaddrsAsStr).to.include( expect(multiaddrsAsStr).to.include(
"/dns6/node-01.ac-cn-hongkong-c.wakuv2.test.statusim.net/tcp/443/wss", "/dns6/node-01.ac-cn-hongkong-c.wakuv2.test.statusim.net/tcp/443/wss"
); );
expect(multiaddrsAsStr).to.include( expect(multiaddrsAsStr).to.include(
"/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss", "/onion3/vww6ybal4bd7szmgncyruucpgfkqahzddi37ktceo3ah7ngmcopnpyyd:1234/wss"
); );
}); });
}); });

View File

@ -12,7 +12,7 @@ export function decodeMultiaddrs(bytes: Uint8Array): Multiaddr[] {
const sizeDataView = new DataView( const sizeDataView = new DataView(
bytes.buffer, bytes.buffer,
index, index,
MULTIADDR_LENGTH_SIZE, MULTIADDR_LENGTH_SIZE
); );
const size = sizeDataView.getUint16(0); const size = sizeDataView.getUint16(0);
index += MULTIADDR_LENGTH_SIZE; index += MULTIADDR_LENGTH_SIZE;
@ -28,7 +28,7 @@ export function decodeMultiaddrs(bytes: Uint8Array): Multiaddr[] {
export function encodeMultiaddrs(multiaddrs: Multiaddr[]): Uint8Array { export function encodeMultiaddrs(multiaddrs: Multiaddr[]): Uint8Array {
const totalLength = multiaddrs.reduce( const totalLength = multiaddrs.reduce(
(acc, ma) => acc + MULTIADDR_LENGTH_SIZE + ma.bytes.length, (acc, ma) => acc + MULTIADDR_LENGTH_SIZE + ma.bytes.length,
0, 0
); );
const bytes = new Uint8Array(totalLength); const bytes = new Uint8Array(totalLength);
const dataView = new DataView(bytes.buffer); const dataView = new DataView(bytes.buffer);

View File

@ -1,10 +1,10 @@
import { unmarshalPrivateKey, unmarshalPublicKey } from "@libp2p/crypto/keys"; import { unmarshalPrivateKey, unmarshalPublicKey } from "@libp2p/crypto/keys";
import { supportedKeys } from "@libp2p/crypto/keys"; import { supportedKeys } from "@libp2p/crypto/keys";
import type { PeerId } from "@libp2p/interface-peer-id"; import type { PeerId } from "@libp2p/interface/peer-id";
import { peerIdFromKeys } from "@libp2p/peer-id"; import { peerIdFromKeys } from "@libp2p/peer-id";
export function createPeerIdFromPublicKey( export function createPeerIdFromPublicKey(
publicKey: Uint8Array, publicKey: Uint8Array
): Promise<PeerId> { ): Promise<PeerId> {
const _publicKey = new supportedKeys.secp256k1.Secp256k1PublicKey(publicKey); const _publicKey = new supportedKeys.secp256k1.Secp256k1PublicKey(publicKey);
return peerIdFromKeys(_publicKey.bytes, undefined); return peerIdFromKeys(_publicKey.bytes, undefined);
@ -20,7 +20,7 @@ export function getPublicKeyFromPeerId(peerId: PeerId): Uint8Array {
// Only used in tests // Only used in tests
export async function getPrivateKeyFromPeerId( export async function getPrivateKeyFromPeerId(
peerId: PeerId, peerId: PeerId
): Promise<Uint8Array> { ): Promise<Uint8Array> {
if (peerId.type !== "secp256k1") { if (peerId.type !== "secp256k1") {
throw new Error("Unsupported peer id type"); throw new Error("Unsupported peer id type");

View File

@ -1,7 +1,7 @@
import type { Multiaddr } from "@multiformats/multiaddr"; import type { Multiaddr } from "@multiformats/multiaddr";
import { import {
convertToBytes, convertToBytes,
convertToString, convertToString
} from "@multiformats/multiaddr/convert"; } from "@multiformats/multiaddr/convert";
import type { ENRKey, ENRValue, SequenceNumber, Waku2 } from "@waku/interfaces"; import type { ENRKey, ENRValue, SequenceNumber, Waku2 } from "@waku/interfaces";
import { bytesToUtf8 } from "@waku/utils/bytes"; import { bytesToUtf8 } from "@waku/utils/bytes";
@ -17,7 +17,7 @@ export class RawEnr extends Map<ENRKey, ENRValue> {
protected constructor( protected constructor(
kvs: Record<ENRKey, ENRValue> = {}, kvs: Record<ENRKey, ENRValue> = {},
seq: SequenceNumber = BigInt(1), seq: SequenceNumber = BigInt(1),
signature?: Uint8Array, signature?: Uint8Array
) { ) {
super(Object.entries(kvs)); super(Object.entries(kvs));
this.seq = seq; this.seq = seq;
@ -147,7 +147,7 @@ export class RawEnr extends Map<ENRKey, ENRValue> {
this, this,
"waku2", "waku2",
waku2, waku2,
(w) => new Uint8Array([encodeWaku2(w)]), (w) => new Uint8Array([encodeWaku2(w)])
); );
} }
} }
@ -155,7 +155,7 @@ export class RawEnr extends Map<ENRKey, ENRValue> {
function getStringValue( function getStringValue(
map: Map<ENRKey, ENRValue>, map: Map<ENRKey, ENRValue>,
key: ENRKey, key: ENRKey,
proto: string, proto: string
): string | undefined { ): string | undefined {
const raw = map.get(key); const raw = map.get(key);
if (!raw) return; if (!raw) return;
@ -165,7 +165,7 @@ function getStringValue(
function getNumberAsStringValue( function getNumberAsStringValue(
map: Map<ENRKey, ENRValue>, map: Map<ENRKey, ENRValue>,
key: ENRKey, key: ENRKey,
proto: string, proto: string
): number | undefined { ): number | undefined {
const raw = map.get(key); const raw = map.get(key);
if (!raw) return; if (!raw) return;
@ -176,7 +176,7 @@ function setStringValue(
map: Map<ENRKey, ENRValue>, map: Map<ENRKey, ENRValue>,
key: ENRKey, key: ENRKey,
proto: string, proto: string,
value: string | undefined, value: string | undefined
): void { ): void {
deleteUndefined(map, key, value, convertToBytes.bind({}, proto)); deleteUndefined(map, key, value, convertToBytes.bind({}, proto));
} }
@ -185,7 +185,7 @@ function setNumberAsStringValue(
map: Map<ENRKey, ENRValue>, map: Map<ENRKey, ENRValue>,
key: ENRKey, key: ENRKey,
proto: string, proto: string,
value: number | undefined, value: number | undefined
): void { ): void {
setStringValue(map, key, proto, value?.toString(10)); setStringValue(map, key, proto, value?.toString(10));
} }
@ -194,7 +194,7 @@ function deleteUndefined<K, V, W>(
map: Map<K, W>, map: Map<K, W>,
key: K, key: K,
value: V | undefined, value: V | undefined,
transform: (v: V) => W, transform: (v: V) => W
): void { ): void {
if (value !== undefined) { if (value !== undefined) {
map.set(key, transform(value)); map.set(key, transform(value));

View File

@ -5,10 +5,10 @@ import { bytesToHex } from "@waku/utils/bytes";
import { keccak256 } from "./crypto.js"; import { keccak256 } from "./crypto.js";
export async function sign( export async function sign(
privKey: Uint8Array, privKey: Uint8Array,
msg: Uint8Array, msg: Uint8Array
): Promise<Uint8Array> { ): Promise<Uint8Array> {
return secp.sign(keccak256(msg), privKey, { return secp.sign(keccak256(msg), privKey, {
der: false, der: false
}); });
} }

View File

@ -11,7 +11,7 @@ const waku2FieldEncodings = {
allTrue: 15, allTrue: 15,
allFalse: 0, allFalse: 0,
relayAndFilterTrue: 5, relayAndFilterTrue: 5,
storeAndLightPushTrue: 10, storeAndLightPushTrue: 10
}; };
describe("ENR waku2 codec", function () { describe("ENR waku2 codec", function () {
@ -22,7 +22,7 @@ describe("ENR waku2 codec", function () {
relay: false, relay: false,
store: false, store: false,
filter: false, filter: false,
lightPush: false, lightPush: false
}; };
}); });

View File

@ -19,7 +19,7 @@ export function decodeWaku2(byte: number): Waku2 {
relay: false, relay: false,
store: false, store: false,
filter: false, filter: false,
lightPush: false, lightPush: false
}; };
if (byte % 2) waku2.relay = true; if (byte % 2) waku2.relay = true;

View File

@ -1,6 +1,6 @@
module.exports = { module.exports = {
parserOptions: { parserOptions: {
tsconfigRootDir: __dirname, tsconfigRootDir: __dirname,
project: "./tsconfig.dev.json", project: "./tsconfig.dev.json"
}, }
}; };

View File

@ -47,19 +47,12 @@
"node": ">=16" "node": ">=16"
}, },
"devDependencies": { "devDependencies": {
"@chainsafe/libp2p-gossipsub": "^9.1.0", "@chainsafe/libp2p-gossipsub": "^10.0.0",
"@libp2p/interface-connection": "^5.1.1",
"@libp2p/interface-connection-manager": "^3.0.1",
"@libp2p/interface-libp2p": "^3.2.0",
"@libp2p/interface-peer-id": "^2.0.2",
"@libp2p/interface-peer-info": "^1.0.10",
"@libp2p/interface-peer-store": "^2.0.4",
"@libp2p/interface-registrar": "^2.0.12",
"@multiformats/multiaddr": "^12.0.0", "@multiformats/multiaddr": "^12.0.0",
"cspell": "^7.0.0", "cspell": "^7.0.0",
"npm-run-all": "^4.1.5", "npm-run-all": "^4.1.5",
"typescript": "^5.0.4", "typescript": "^5.0.4",
"libp2p": "^0.45.9" "libp2p": "^0.46.3"
}, },
"typedoc": { "typedoc": {
"entryPoint": "./src/index.ts" "entryPoint": "./src/index.ts"

View File

@ -1,10 +1,10 @@
import type { PeerId } from "@libp2p/interface-peer-id"; import type { PeerId } from "@libp2p/interface/peer-id";
import type { Peer } from "@libp2p/interface-peer-store"; import type { Peer } from "@libp2p/interface/peer-store";
import type { EventEmitter } from "@libp2p/interfaces/events"; import type { EventEmitter } from "@libp2p/interfaces/events";
export enum Tags { export enum Tags {
BOOTSTRAP = "bootstrap", BOOTSTRAP = "bootstrap",
PEER_EXCHANGE = "peer-exchange", PEER_EXCHANGE = "peer-exchange"
} }
export interface ConnectionManagerOptions { export interface ConnectionManagerOptions {
@ -28,7 +28,7 @@ export enum EPeersByDiscoveryEvents {
PEER_DISCOVERY_BOOTSTRAP = "peer:discovery:bootstrap", PEER_DISCOVERY_BOOTSTRAP = "peer:discovery:bootstrap",
PEER_DISCOVERY_PEER_EXCHANGE = "peer:discovery:peer-exchange", PEER_DISCOVERY_PEER_EXCHANGE = "peer:discovery:peer-exchange",
PEER_CONNECT_BOOTSTRAP = "peer:connected:bootstrap", PEER_CONNECT_BOOTSTRAP = "peer:connected:bootstrap",
PEER_CONNECT_PEER_EXCHANGE = "peer:connected:peer-exchange", PEER_CONNECT_PEER_EXCHANGE = "peer:connected:peer-exchange"
} }
export interface IPeersByDiscoveryEvents { export interface IPeersByDiscoveryEvents {

View File

@ -1,5 +1,5 @@
import type { PeerId } from "@libp2p/interface-peer-id"; import type { PeerId } from "@libp2p/interface/peer-id";
import type { PeerInfo } from "@libp2p/interface-peer-info"; import type { PeerInfo } from "@libp2p/interface/peer-info";
import type { Multiaddr } from "@multiformats/multiaddr"; import type { Multiaddr } from "@multiformats/multiaddr";
export type ENRKey = string; export type ENRKey = string;

View File

@ -1,4 +1,4 @@
import type { PeerId } from "@libp2p/interface-peer-id"; import type { PeerId } from "@libp2p/interface/peer-id";
import type { IDecodedMessage, IDecoder } from "./message.js"; import type { IDecodedMessage, IDecoder } from "./message.js";
import type { ContentTopic } from "./misc.js"; import type { ContentTopic } from "./misc.js";
@ -12,7 +12,7 @@ export type ContentFilter = {
export interface IFilterSubscription { export interface IFilterSubscription {
subscribe<T extends IDecodedMessage>( subscribe<T extends IDecodedMessage>(
decoders: IDecoder<T> | IDecoder<T>[], decoders: IDecoder<T> | IDecoder<T>[],
callback: Callback<T>, callback: Callback<T>
): Promise<void>; ): Promise<void>;
unsubscribe(contentTopics: ContentTopic[]): Promise<void>; unsubscribe(contentTopics: ContentTopic[]): Promise<void>;
@ -26,6 +26,6 @@ export type IFilter = IReceiver &
IBaseProtocol & { IBaseProtocol & {
createSubscription( createSubscription(
pubSubTopic?: string, pubSubTopic?: string,
peerId?: PeerId, peerId?: PeerId
): Promise<IFilterSubscription>; ): Promise<IFilterSubscription>;
}; };

View File

@ -1,5 +1,5 @@
import type { GossipSub } from "@chainsafe/libp2p-gossipsub"; import type { GossipSub } from "@chainsafe/libp2p-gossipsub";
import type { Libp2p as BaseLibp2p } from "@libp2p/interface-libp2p"; import type { Libp2p as BaseLibp2p } from "@libp2p/interface";
import type { Libp2pInit } from "libp2p"; import type { Libp2pInit } from "libp2p";
import type { identifyService } from "libp2p/identify"; import type { identifyService } from "libp2p/identify";
import type { PingService } from "libp2p/ping"; import type { PingService } from "libp2p/ping";

View File

@ -73,6 +73,6 @@ export interface IDecoder<T extends IDecodedMessage> {
fromWireToProtoObj: (bytes: Uint8Array) => Promise<IProtoMessage | undefined>; fromWireToProtoObj: (bytes: Uint8Array) => Promise<IProtoMessage | undefined>;
fromProtoObj: ( fromProtoObj: (
pubSubTopic: string, pubSubTopic: string,
proto: IProtoMessage, proto: IProtoMessage
) => Promise<T | undefined>; ) => Promise<T | undefined>;
} }

View File

@ -1,6 +1,6 @@
import type { ConnectionManager } from "@libp2p/interface-connection-manager"; import type { PeerId } from "@libp2p/interface/peer-id";
import type { PeerId } from "@libp2p/interface-peer-id"; import type { PeerStore } from "@libp2p/interface/peer-store";
import type { PeerStore } from "@libp2p/interface-peer-store"; import type { ConnectionManager } from "@libp2p/interface-internal/connection-manager";
import { IEnr } from "./enr.js"; import { IEnr } from "./enr.js";
import { IBaseProtocol } from "./protocols.js"; import { IBaseProtocol } from "./protocols.js";

View File

@ -1,6 +1,6 @@
import type { Libp2p } from "@libp2p/interface-libp2p"; import type { Libp2p } from "@libp2p/interface";
import type { PeerId } from "@libp2p/interface-peer-id"; import type { PeerId } from "@libp2p/interface/peer-id";
import type { Peer, PeerStore } from "@libp2p/interface-peer-store"; import type { Peer, PeerStore } from "@libp2p/interface/peer-store";
import type { Libp2pOptions } from "libp2p"; import type { Libp2pOptions } from "libp2p";
import type { IDecodedMessage } from "./message.js"; import type { IDecodedMessage } from "./message.js";
@ -9,7 +9,7 @@ export enum Protocols {
Relay = "relay", Relay = "relay",
Store = "store", Store = "store",
LightPush = "lightpush", LightPush = "lightpush",
Filter = "filter", Filter = "filter"
} }
export interface IBaseProtocol { export interface IBaseProtocol {
@ -62,7 +62,7 @@ export type ProtocolOptions = {
}; };
export type Callback<T extends IDecodedMessage> = ( export type Callback<T extends IDecodedMessage> = (
msg: T, msg: T
) => void | Promise<void>; ) => void | Promise<void>;
export enum SendError { export enum SendError {
@ -70,7 +70,7 @@ export enum SendError {
ENCODE_FAILED = "Failed to encode", ENCODE_FAILED = "Failed to encode",
DECODE_FAILED = "Failed to decode", DECODE_FAILED = "Failed to decode",
SIZE_TOO_BIG = "Size is too big", SIZE_TOO_BIG = "Size is too big",
NO_RPC_RESPONSE = "No RPC response", NO_RPC_RESPONSE = "No RPC response"
} }
export interface SendResult { export interface SendResult {

View File

@ -9,11 +9,11 @@ export type ActiveSubscriptions = Map<PubSubTopic, ContentTopic[]>;
export interface IReceiver { export interface IReceiver {
toSubscriptionIterator: <T extends IDecodedMessage>( toSubscriptionIterator: <T extends IDecodedMessage>(
decoders: IDecoder<T> | IDecoder<T>[], decoders: IDecoder<T> | IDecoder<T>[],
opts?: ProtocolOptions, opts?: ProtocolOptions
) => Promise<IAsyncIterator<T>>; ) => Promise<IAsyncIterator<T>>;
subscribe: <T extends IDecodedMessage>( subscribe: <T extends IDecodedMessage>(
decoders: IDecoder<T> | IDecoder<T>[], decoders: IDecoder<T> | IDecoder<T>[],
callback: Callback<T>, callback: Callback<T>,
opts?: ProtocolOptions, opts?: ProtocolOptions
) => Unsubscribe | Promise<Unsubscribe>; ) => Unsubscribe | Promise<Unsubscribe>;
} }

View File

@ -11,7 +11,7 @@ import type { ISender } from "./sender.js";
* @property start - Function to start the relay, returning a Promise that resolves when initialization is complete. * @property start - Function to start the relay, returning a Promise that resolves when initialization is complete.
* @property getMeshPeers - Function to retrieve the mesh peers for a given topic or all topics if none is specified. Returns an array of peer IDs as strings. * @property getMeshPeers - Function to retrieve the mesh peers for a given topic or all topics if none is specified. Returns an array of peer IDs as strings.
*/ */
interface IRelayAPI { export interface IRelayAPI {
readonly gossipSub: GossipSub; readonly gossipSub: GossipSub;
start: () => Promise<void>; start: () => Promise<void>;
getMeshPeers: (topic?: TopicStr) => PeerIdStr[]; getMeshPeers: (topic?: TopicStr) => PeerIdStr[];

View File

@ -5,6 +5,6 @@ export interface ISender {
send: ( send: (
encoder: IEncoder, encoder: IEncoder,
message: IMessage, message: IMessage,
opts?: ProtocolOptions, opts?: ProtocolOptions
) => Promise<SendResult>; ) => Promise<SendResult>;
} }

View File

@ -3,7 +3,7 @@ import type { IBaseProtocol, ProtocolOptions } from "./protocols.js";
export enum PageDirection { export enum PageDirection {
BACKWARD = "backward", BACKWARD = "backward",
FORWARD = "forward", FORWARD = "forward"
} }
export interface TimeFilter { export interface TimeFilter {
@ -49,17 +49,17 @@ export interface IStore extends IBaseProtocol {
queryOrderedCallback: <T extends IDecodedMessage>( queryOrderedCallback: <T extends IDecodedMessage>(
decoders: IDecoder<T>[], decoders: IDecoder<T>[],
callback: (message: T) => Promise<void | boolean> | boolean | void, callback: (message: T) => Promise<void | boolean> | boolean | void,
options?: StoreQueryOptions, options?: StoreQueryOptions
) => Promise<void>; ) => Promise<void>;
queryCallbackOnPromise: <T extends IDecodedMessage>( queryCallbackOnPromise: <T extends IDecodedMessage>(
decoders: IDecoder<T>[], decoders: IDecoder<T>[],
callback: ( callback: (
message: Promise<T | undefined>, message: Promise<T | undefined>
) => Promise<void | boolean> | boolean | void, ) => Promise<void | boolean> | boolean | void,
options?: StoreQueryOptions, options?: StoreQueryOptions
) => Promise<void>; ) => Promise<void>;
queryGenerator: <T extends IDecodedMessage>( queryGenerator: <T extends IDecodedMessage>(
decoders: IDecoder<T>[], decoders: IDecoder<T>[],
options?: StoreQueryOptions, options?: StoreQueryOptions
) => AsyncGenerator<Promise<T | undefined>[]>; ) => AsyncGenerator<Promise<T | undefined>[]>;
} }

View File

@ -1,5 +1,5 @@
import type { Stream } from "@libp2p/interface-connection"; import type { Stream } from "@libp2p/interface/connection";
import type { PeerId } from "@libp2p/interface-peer-id"; import type { PeerId } from "@libp2p/interface/peer-id";
import type { Multiaddr } from "@multiformats/multiaddr"; import type { Multiaddr } from "@multiformats/multiaddr";
import { IConnectionManager } from "./connection_manager.js"; import { IConnectionManager } from "./connection_manager.js";

View File

@ -1,6 +1,6 @@
module.exports = { module.exports = {
parserOptions: { parserOptions: {
tsconfigRootDir: __dirname, tsconfigRootDir: __dirname,
project: "./tsconfig.dev.json", project: "./tsconfig.dev.json"
}, }
}; };

View File

@ -10,7 +10,7 @@ Symmetric encryption uses a unique key to encrypt and decrypt messages.
import { import {
createDecoder, createDecoder,
createEncoder, createEncoder,
generateSymmetricKey, generateSymmetricKey
} from "@waku/message-encryption/symmetric"; } from "@waku/message-encryption/symmetric";
// Generate a random key // Generate a random key
@ -40,7 +40,7 @@ import {
createDecoder, createDecoder,
createEncoder, createEncoder,
generatePrivateKey, generatePrivateKey,
getPublicKey, getPublicKey
} from "@waku/message-encryption/ecies"; } from "@waku/message-encryption/ecies";
// Generate a random private key // Generate a random private key

View File

@ -6,7 +6,7 @@ module.exports = function (config) {
frameworks: ["webpack", "mocha"], frameworks: ["webpack", "mocha"],
files: ["src/**/*.ts"], files: ["src/**/*.ts"],
preprocessors: { preprocessors: {
"src/**/*.ts": ["webpack"], "src/**/*.ts": ["webpack"]
}, },
envPreprocessor: ["CI"], envPreprocessor: ["CI"],
reporters: ["progress"], reporters: ["progress"],
@ -14,32 +14,32 @@ module.exports = function (config) {
singleRun: true, singleRun: true,
client: { client: {
mocha: { mocha: {
timeout: 6000, // Default is 2s timeout: 6000 // Default is 2s
}, }
}, },
webpack: { webpack: {
mode: "development", mode: "development",
module: { module: {
rules: [{ test: /\.([cm]?ts|tsx)$/, loader: "ts-loader" }], rules: [{ test: /\.([cm]?ts|tsx)$/, loader: "ts-loader" }]
}, },
plugins: [ plugins: [
new webpack.DefinePlugin({ new webpack.DefinePlugin({
"process.env.CI": process.env.CI || false, "process.env.CI": process.env.CI || false
}), }),
new webpack.ProvidePlugin({ new webpack.ProvidePlugin({
process: "process/browser.js", process: "process/browser.js"
}), })
], ],
resolve: { resolve: {
extensions: [".ts", ".tsx", ".js"], extensions: [".ts", ".tsx", ".js"],
extensionAlias: { extensionAlias: {
".js": [".js", ".ts"], ".js": [".js", ".ts"],
".cjs": [".cjs", ".cts"], ".cjs": [".cjs", ".cts"],
".mjs": [".mjs", ".mts"], ".mjs": [".mjs", ".mts"]
}, }
}, },
stats: { warnings: false }, stats: { warnings: false },
devtool: "inline-source-map", devtool: "inline-source-map"
}, }
}); });
}; };

View File

@ -80,12 +80,6 @@
"js-sha3": "^0.8.0" "js-sha3": "^0.8.0"
}, },
"devDependencies": { "devDependencies": {
"@libp2p/interface-connection": "^5.1.1",
"@libp2p/interface-connection-manager": "^3.0.1",
"@libp2p/interface-libp2p": "^3.2.0",
"@libp2p/interface-peer-id": "^2.0.2",
"@libp2p/interface-peer-store": "^2.0.4",
"@libp2p/interface-registrar": "^2.0.12",
"@rollup/plugin-commonjs": "^24.0.1", "@rollup/plugin-commonjs": "^24.0.1",
"@rollup/plugin-json": "^6.0.0", "@rollup/plugin-json": "^6.0.0",
"@rollup/plugin-node-resolve": "^15.0.2", "@rollup/plugin-node-resolve": "^15.0.2",

View File

@ -11,14 +11,14 @@ export default {
input, input,
output: { output: {
dir: "bundle", dir: "bundle",
format: "esm", format: "esm"
}, },
plugins: [ plugins: [
commonjs(), commonjs(),
json(), json(),
nodeResolve({ nodeResolve({
browser: true, browser: true,
preferBuiltins: false, preferBuiltins: false
}), })
], ]
}; };

View File

@ -2,9 +2,9 @@ export const Symmetric = {
keySize: 32, keySize: 32,
ivSize: 12, ivSize: 12,
tagSize: 16, tagSize: 16,
algorithm: { name: "AES-GCM", length: 128 }, algorithm: { name: "AES-GCM", length: 128 }
}; };
export const Asymmetric = { export const Asymmetric = {
keySize: 32, keySize: 32
}; };

View File

@ -13,7 +13,7 @@ function kdf(secret: Uint8Array, outputLength: number): Promise<Uint8Array> {
const counters = new Uint8Array([ctr >> 24, ctr >> 16, ctr >> 8, ctr]); const counters = new Uint8Array([ctr >> 24, ctr >> 16, ctr >> 8, ctr]);
const countersSecret = concat( const countersSecret = concat(
[counters, secret], [counters, secret],
counters.length + secret.length, counters.length + secret.length
); );
const willBeHashResult = sha256(countersSecret); const willBeHashResult = sha256(countersSecret);
willBeResult = willBeResult.then((result) => willBeResult = willBeResult.then((result) =>
@ -21,9 +21,9 @@ function kdf(secret: Uint8Array, outputLength: number): Promise<Uint8Array> {
const _hashResult = new Uint8Array(hashResult); const _hashResult = new Uint8Array(hashResult);
return concat( return concat(
[result, _hashResult], [result, _hashResult],
result.length + _hashResult.length, result.length + _hashResult.length
); );
}), })
); );
written += 32; written += 32;
ctr += 1; ctr += 1;
@ -34,7 +34,7 @@ function kdf(secret: Uint8Array, outputLength: number): Promise<Uint8Array> {
function aesCtrEncrypt( function aesCtrEncrypt(
counter: Uint8Array, counter: Uint8Array,
key: ArrayBufferLike, key: ArrayBufferLike,
data: ArrayBufferLike, data: ArrayBufferLike
): Promise<Uint8Array> { ): Promise<Uint8Array> {
return getSubtle() return getSubtle()
.importKey("raw", key, "AES-CTR", false, ["encrypt"]) .importKey("raw", key, "AES-CTR", false, ["encrypt"])
@ -42,8 +42,8 @@ function aesCtrEncrypt(
getSubtle().encrypt( getSubtle().encrypt(
{ name: "AES-CTR", counter: counter, length: 128 }, { name: "AES-CTR", counter: counter, length: 128 },
cryptoKey, cryptoKey,
data, data
), )
) )
.then((bytes) => new Uint8Array(bytes)); .then((bytes) => new Uint8Array(bytes));
} }
@ -51,7 +51,7 @@ function aesCtrEncrypt(
function aesCtrDecrypt( function aesCtrDecrypt(
counter: Uint8Array, counter: Uint8Array,
key: ArrayBufferLike, key: ArrayBufferLike,
data: ArrayBufferLike, data: ArrayBufferLike
): Promise<Uint8Array> { ): Promise<Uint8Array> {
return getSubtle() return getSubtle()
.importKey("raw", key, "AES-CTR", false, ["decrypt"]) .importKey("raw", key, "AES-CTR", false, ["decrypt"])
@ -59,15 +59,15 @@ function aesCtrDecrypt(
getSubtle().decrypt( getSubtle().decrypt(
{ name: "AES-CTR", counter: counter, length: 128 }, { name: "AES-CTR", counter: counter, length: 128 },
cryptoKey, cryptoKey,
data, data
), )
) )
.then((bytes) => new Uint8Array(bytes)); .then((bytes) => new Uint8Array(bytes));
} }
function hmacSha256Sign( function hmacSha256Sign(
key: ArrayBufferLike, key: ArrayBufferLike,
msg: ArrayBufferLike, msg: ArrayBufferLike
): PromiseLike<Uint8Array> { ): PromiseLike<Uint8Array> {
const algorithm = { name: "HMAC", hash: { name: "SHA-256" } }; const algorithm = { name: "HMAC", hash: { name: "SHA-256" } };
return getSubtle() return getSubtle()
@ -79,12 +79,12 @@ function hmacSha256Sign(
function hmacSha256Verify( function hmacSha256Verify(
key: ArrayBufferLike, key: ArrayBufferLike,
msg: ArrayBufferLike, msg: ArrayBufferLike,
sig: ArrayBufferLike, sig: ArrayBufferLike
): Promise<boolean> { ): Promise<boolean> {
const algorithm = { name: "HMAC", hash: { name: "SHA-256" } }; const algorithm = { name: "HMAC", hash: { name: "SHA-256" } };
const _key = getSubtle().importKey("raw", key, algorithm, false, ["verify"]); const _key = getSubtle().importKey("raw", key, algorithm, false, ["verify"]);
return _key.then((cryptoKey) => return _key.then((cryptoKey) =>
getSubtle().verify(algorithm, cryptoKey, sig, msg), getSubtle().verify(algorithm, cryptoKey, sig, msg)
); );
} }
@ -99,11 +99,11 @@ function hmacSha256Verify(
function derive(privateKeyA: Uint8Array, publicKeyB: Uint8Array): Uint8Array { function derive(privateKeyA: Uint8Array, publicKeyB: Uint8Array): Uint8Array {
if (privateKeyA.length !== 32) { if (privateKeyA.length !== 32) {
throw new Error( throw new Error(
`Bad private key, it should be 32 bytes but it's actually ${privateKeyA.length} bytes long`, `Bad private key, it should be 32 bytes but it's actually ${privateKeyA.length} bytes long`
); );
} else if (publicKeyB.length !== 65) { } else if (publicKeyB.length !== 65) {
throw new Error( throw new Error(
`Bad public key, it should be 65 bytes but it's actually ${publicKeyB.length} bytes long`, `Bad public key, it should be 65 bytes but it's actually ${publicKeyB.length} bytes long`
); );
} else if (publicKeyB[0] !== 4) { } else if (publicKeyB[0] !== 4) {
throw new Error("Bad public key, a valid public key would begin with 4"); throw new Error("Bad public key, a valid public key would begin with 4");
@ -123,7 +123,7 @@ function derive(privateKeyA: Uint8Array, publicKeyB: Uint8Array): Uint8Array {
*/ */
export async function encrypt( export async function encrypt(
publicKeyTo: Uint8Array, publicKeyTo: Uint8Array,
msg: Uint8Array, msg: Uint8Array
): Promise<Uint8Array> { ): Promise<Uint8Array> {
const ephemPrivateKey = randomBytes(32); const ephemPrivateKey = randomBytes(32);
@ -143,7 +143,7 @@ export async function encrypt(
return concat( return concat(
[ephemPublicKey, ivCipherText, hmac], [ephemPublicKey, ivCipherText, hmac],
ephemPublicKey.length + ivCipherText.length + hmac.length, ephemPublicKey.length + ivCipherText.length + hmac.length
); );
} }
@ -159,15 +159,15 @@ const metaLength = 1 + 64 + 16 + 32;
*/ */
export async function decrypt( export async function decrypt(
privateKey: Uint8Array, privateKey: Uint8Array,
encrypted: Uint8Array, encrypted: Uint8Array
): Promise<Uint8Array> { ): Promise<Uint8Array> {
if (encrypted.length <= metaLength) { if (encrypted.length <= metaLength) {
throw new Error( throw new Error(
`Invalid Ciphertext. Data is too small. It should ba at least ${metaLength} bytes`, `Invalid Ciphertext. Data is too small. It should ba at least ${metaLength} bytes`
); );
} else if (encrypted[0] !== 4) { } else if (encrypted[0] !== 4) {
throw new Error( throw new Error(
`Not a valid ciphertext. It should begin with 4 but actually begin with ${encrypted[0]}`, `Not a valid ciphertext. It should begin with 4 but actually begin with ${encrypted[0]}`
); );
} else { } else {
// deserialize // deserialize
@ -182,7 +182,7 @@ export async function decrypt(
const px = derive(privateKey, ephemPublicKey); const px = derive(privateKey, ephemPublicKey);
const hash = await kdf(px, 32); const hash = await kdf(px, 32);
const [encryptionKey, macKey] = await sha256(hash.slice(16)).then( const [encryptionKey, macKey] = await sha256(hash.slice(16)).then(
(macKey) => [hash.slice(0, 16), macKey], (macKey) => [hash.slice(0, 16), macKey]
); );
if (!(await hmacSha256Verify(macKey, cipherAndIv, msgMac))) { if (!(await hmacSha256Verify(macKey, cipherAndIv, msgMac))) {

View File

@ -9,7 +9,7 @@ import { Asymmetric, Symmetric } from "../constants.js";
declare const self: Record<string, any> | undefined; declare const self: Record<string, any> | undefined;
const crypto: { node?: any; web?: any } = { const crypto: { node?: any; web?: any } = {
node: nodeCrypto, node: nodeCrypto,
web: typeof self === "object" && "crypto" in self ? self.crypto : undefined, web: typeof self === "object" && "crypto" in self ? self.crypto : undefined
}; };
export function getSubtle(): SubtleCrypto { export function getSubtle(): SubtleCrypto {
@ -19,7 +19,7 @@ export function getSubtle(): SubtleCrypto {
return crypto.node.webcrypto.subtle; return crypto.node.webcrypto.subtle;
} else { } else {
throw new Error( throw new Error(
"The environment doesn't have Crypto Subtle API (if in the browser, be sure to use to be in a secure context, ie, https)", "The environment doesn't have Crypto Subtle API (if in the browser, be sure to use to be in a secure context, ie, https)"
); );
} }
} }
@ -59,15 +59,15 @@ export const getPublicKey = secp.getPublicKey;
*/ */
export async function sign( export async function sign(
message: Uint8Array, message: Uint8Array,
privateKey: Uint8Array, privateKey: Uint8Array
): Promise<Uint8Array> { ): Promise<Uint8Array> {
const [signature, recoveryId] = await secp.sign(message, privateKey, { const [signature, recoveryId] = await secp.sign(message, privateKey, {
recovered: true, recovered: true,
der: false, der: false
}); });
return concat( return concat(
[signature, new Uint8Array([recoveryId])], [signature, new Uint8Array([recoveryId])],
signature.length + 1, signature.length + 1
); );
} }

View File

@ -5,12 +5,12 @@ import { getSubtle, randomBytes } from "./index.js";
export async function encrypt( export async function encrypt(
iv: Uint8Array, iv: Uint8Array,
key: Uint8Array, key: Uint8Array,
clearText: Uint8Array, clearText: Uint8Array
): Promise<Uint8Array> { ): Promise<Uint8Array> {
return getSubtle() return getSubtle()
.importKey("raw", key, Symmetric.algorithm, false, ["encrypt"]) .importKey("raw", key, Symmetric.algorithm, false, ["encrypt"])
.then((cryptoKey) => .then((cryptoKey) =>
getSubtle().encrypt({ iv, ...Symmetric.algorithm }, cryptoKey, clearText), getSubtle().encrypt({ iv, ...Symmetric.algorithm }, cryptoKey, clearText)
) )
.then((cipher) => new Uint8Array(cipher)); .then((cipher) => new Uint8Array(cipher));
} }
@ -18,16 +18,12 @@ export async function encrypt(
export async function decrypt( export async function decrypt(
iv: Uint8Array, iv: Uint8Array,
key: Uint8Array, key: Uint8Array,
cipherText: Uint8Array, cipherText: Uint8Array
): Promise<Uint8Array> { ): Promise<Uint8Array> {
return getSubtle() return getSubtle()
.importKey("raw", key, Symmetric.algorithm, false, ["decrypt"]) .importKey("raw", key, Symmetric.algorithm, false, ["decrypt"])
.then((cryptoKey) => .then((cryptoKey) =>
getSubtle().decrypt( getSubtle().decrypt({ iv, ...Symmetric.algorithm }, cryptoKey, cipherText)
{ iv, ...Symmetric.algorithm },
cryptoKey,
cipherText,
),
) )
.then((clear) => new Uint8Array(clear)); .then((clear) => new Uint8Array(clear));
} }

View File

@ -1,6 +1,6 @@
import { import {
DecodedMessage as DecodedMessageV0, DecodedMessage as DecodedMessageV0,
proto, proto
} from "@waku/core/lib/message/version_0"; } from "@waku/core/lib/message/version_0";
import type { IDecodedMessage } from "@waku/interfaces"; import type { IDecodedMessage } from "@waku/interfaces";
@ -15,7 +15,7 @@ export class DecodedMessage
proto: proto.WakuMessage, proto: proto.WakuMessage,
decodedPayload: Uint8Array, decodedPayload: Uint8Array,
public signature?: Uint8Array, public signature?: Uint8Array,
public signaturePublicKey?: Uint8Array, public signaturePublicKey?: Uint8Array
) { ) {
super(pubSubTopic, proto); super(pubSubTopic, proto);
this._decodedPayload = decodedPayload; this._decodedPayload = decodedPayload;

View File

@ -18,7 +18,7 @@ describe("Ecies Encryption", function () {
const encoder = createEncoder({ const encoder = createEncoder({
contentTopic, contentTopic,
publicKey, publicKey
}); });
const bytes = await encoder.toWire({ payload }); const bytes = await encoder.toWire({ payload });
@ -34,8 +34,8 @@ describe("Ecies Encryption", function () {
expect(result?.payload).to.deep.equal(payload); expect(result?.payload).to.deep.equal(payload);
expect(result.signature).to.be.undefined; expect(result.signature).to.be.undefined;
expect(result.signaturePublicKey).to.be.undefined; expect(result.signaturePublicKey).to.be.undefined;
}, }
), )
); );
}); });
@ -54,7 +54,7 @@ describe("Ecies Encryption", function () {
contentTopic, contentTopic,
payload, payload,
alicePrivateKey, alicePrivateKey,
bobPrivateKey, bobPrivateKey
) => { ) => {
const alicePublicKey = getPublicKey(alicePrivateKey); const alicePublicKey = getPublicKey(alicePrivateKey);
const bobPublicKey = getPublicKey(bobPrivateKey); const bobPublicKey = getPublicKey(bobPrivateKey);
@ -62,7 +62,7 @@ describe("Ecies Encryption", function () {
const encoder = createEncoder({ const encoder = createEncoder({
contentTopic, contentTopic,
publicKey: bobPublicKey, publicKey: bobPublicKey,
sigPrivKey: alicePrivateKey, sigPrivKey: alicePrivateKey
}); });
const bytes = await encoder.toWire({ payload }); const bytes = await encoder.toWire({ payload });
@ -78,8 +78,8 @@ describe("Ecies Encryption", function () {
expect(result?.payload).to.deep.equal(payload); expect(result?.payload).to.deep.equal(payload);
expect(result.signature).to.not.be.undefined; expect(result.signature).to.not.be.undefined;
expect(result.signaturePublicKey).to.deep.eq(alicePublicKey); expect(result.signaturePublicKey).to.deep.eq(alicePublicKey);
}, }
), )
); );
}); });
@ -93,7 +93,7 @@ describe("Ecies Encryption", function () {
async (pubSubTopic, contentTopic, payload, privateKey) => { async (pubSubTopic, contentTopic, payload, privateKey) => {
const publicKey = getPublicKey(privateKey); const publicKey = getPublicKey(privateKey);
const metaSetter = ( const metaSetter = (
msg: IProtoMessage & { meta: undefined }, msg: IProtoMessage & { meta: undefined }
): Uint8Array => { ): Uint8Array => {
const buffer = new ArrayBuffer(4); const buffer = new ArrayBuffer(4);
const view = new DataView(buffer); const view = new DataView(buffer);
@ -104,7 +104,7 @@ describe("Ecies Encryption", function () {
const encoder = createEncoder({ const encoder = createEncoder({
contentTopic, contentTopic,
publicKey, publicKey,
metaSetter, metaSetter
}); });
const bytes = await encoder.toWire({ payload }); const bytes = await encoder.toWire({ payload });
@ -121,12 +121,12 @@ describe("Ecies Encryption", function () {
ephemeral: undefined, ephemeral: undefined,
meta: undefined, meta: undefined,
rateLimitProof: undefined, rateLimitProof: undefined,
version: undefined, version: undefined
}); });
expect(result.meta).to.deep.equal(expectedMeta); expect(result.meta).to.deep.equal(expectedMeta);
}, }
), )
); );
}); });
}); });
@ -136,7 +136,7 @@ describe("Ensures content topic is defined", () => {
const wrapper = function (): void { const wrapper = function (): void {
createEncoder({ createEncoder({
contentTopic: undefined as unknown as string, contentTopic: undefined as unknown as string,
publicKey: new Uint8Array(), publicKey: new Uint8Array()
}); });
}; };

View File

@ -5,7 +5,7 @@ import type {
IDecoder, IDecoder,
IEncoder, IEncoder,
IMessage, IMessage,
IProtoMessage, IProtoMessage
} from "@waku/interfaces"; } from "@waku/interfaces";
import { WakuMessage } from "@waku/proto"; import { WakuMessage } from "@waku/proto";
import debug from "debug"; import debug from "debug";
@ -15,14 +15,14 @@ import {
decryptAsymmetric, decryptAsymmetric,
encryptAsymmetric, encryptAsymmetric,
postCipher, postCipher,
preCipher, preCipher
} from "./waku_payload.js"; } from "./waku_payload.js";
import { import {
generatePrivateKey, generatePrivateKey,
getPublicKey, getPublicKey,
OneMillion, OneMillion,
Version, Version
} from "./index.js"; } from "./index.js";
export { generatePrivateKey, getPublicKey }; export { generatePrivateKey, getPublicKey };
@ -36,7 +36,7 @@ class Encoder implements IEncoder {
private publicKey: Uint8Array, private publicKey: Uint8Array,
private sigPrivKey?: Uint8Array, private sigPrivKey?: Uint8Array,
public ephemeral: boolean = false, public ephemeral: boolean = false,
public metaSetter?: IMetaSetter, public metaSetter?: IMetaSetter
) { ) {
if (!contentTopic || contentTopic === "") { if (!contentTopic || contentTopic === "") {
throw new Error("Content topic must be specified"); throw new Error("Content topic must be specified");
@ -63,7 +63,7 @@ class Encoder implements IEncoder {
timestamp: BigInt(timestamp.valueOf()) * OneMillion, timestamp: BigInt(timestamp.valueOf()) * OneMillion,
meta: undefined, meta: undefined,
rateLimitProof: message.rateLimitProof, rateLimitProof: message.rateLimitProof,
ephemeral: this.ephemeral, ephemeral: this.ephemeral
}; };
if (this.metaSetter) { if (this.metaSetter) {
@ -99,28 +99,28 @@ export function createEncoder({
publicKey, publicKey,
sigPrivKey, sigPrivKey,
ephemeral = false, ephemeral = false,
metaSetter, metaSetter
}: EncoderOptions): Encoder { }: EncoderOptions): Encoder {
return new Encoder( return new Encoder(
contentTopic, contentTopic,
publicKey, publicKey,
sigPrivKey, sigPrivKey,
ephemeral, ephemeral,
metaSetter, metaSetter
); );
} }
class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> { class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
constructor( constructor(
contentTopic: string, contentTopic: string,
private privateKey: Uint8Array, private privateKey: Uint8Array
) { ) {
super(contentTopic); super(contentTopic);
} }
async fromProtoObj( async fromProtoObj(
pubSubTopic: string, pubSubTopic: string,
protoMessage: IProtoMessage, protoMessage: IProtoMessage
): Promise<DecodedMessage | undefined> { ): Promise<DecodedMessage | undefined> {
const cipherPayload = protoMessage.payload; const cipherPayload = protoMessage.payload;
@ -129,7 +129,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
"Failed to decrypt due to incorrect version, expected:", "Failed to decrypt due to incorrect version, expected:",
Version, Version,
", actual:", ", actual:",
protoMessage.version, protoMessage.version
); );
return; return;
} }
@ -141,7 +141,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
} catch (e) { } catch (e) {
log( log(
`Failed to decrypt message using asymmetric decryption for contentTopic: ${this.contentTopic}`, `Failed to decrypt message using asymmetric decryption for contentTopic: ${this.contentTopic}`,
e, e
); );
return; return;
} }
@ -164,7 +164,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
protoMessage, protoMessage,
res.payload, res.payload,
res.sig?.signature, res.sig?.signature,
res.sig?.publicKey, res.sig?.publicKey
); );
} }
} }
@ -184,7 +184,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
*/ */
export function createDecoder( export function createDecoder(
contentTopic: string, contentTopic: string,
privateKey: Uint8Array, privateKey: Uint8Array
): Decoder { ): Decoder {
return new Decoder(contentTopic, privateKey); return new Decoder(contentTopic, privateKey);
} }

View File

@ -1,7 +1,7 @@
import { import {
generatePrivateKey, generatePrivateKey,
generateSymmetricKey, generateSymmetricKey,
getPublicKey, getPublicKey
} from "./crypto/index.js"; } from "./crypto/index.js";
import { DecodedMessage } from "./decoded_message.js"; import { DecodedMessage } from "./decoded_message.js";

View File

@ -16,7 +16,7 @@ describe("Symmetric Encryption", function () {
async (pubSubTopic, contentTopic, payload, symKey) => { async (pubSubTopic, contentTopic, payload, symKey) => {
const encoder = createEncoder({ const encoder = createEncoder({
contentTopic, contentTopic,
symKey, symKey
}); });
const bytes = await encoder.toWire({ payload }); const bytes = await encoder.toWire({ payload });
@ -32,8 +32,8 @@ describe("Symmetric Encryption", function () {
expect(result?.payload).to.deep.equal(payload); expect(result?.payload).to.deep.equal(payload);
expect(result.signature).to.be.undefined; expect(result.signature).to.be.undefined;
expect(result.signaturePublicKey).to.be.undefined; expect(result.signaturePublicKey).to.be.undefined;
}, }
), )
); );
}); });
@ -51,7 +51,7 @@ describe("Symmetric Encryption", function () {
const encoder = createEncoder({ const encoder = createEncoder({
contentTopic, contentTopic,
symKey, symKey,
sigPrivKey, sigPrivKey
}); });
const bytes = await encoder.toWire({ payload }); const bytes = await encoder.toWire({ payload });
@ -67,8 +67,8 @@ describe("Symmetric Encryption", function () {
expect(result?.payload).to.deep.equal(payload); expect(result?.payload).to.deep.equal(payload);
expect(result.signature).to.not.be.undefined; expect(result.signature).to.not.be.undefined;
expect(result.signaturePublicKey).to.deep.eq(sigPubKey); expect(result.signaturePublicKey).to.deep.eq(sigPubKey);
}, }
), )
); );
}); });
@ -81,7 +81,7 @@ describe("Symmetric Encryption", function () {
fc.uint8Array({ min: 1, minLength: 32, maxLength: 32 }), fc.uint8Array({ min: 1, minLength: 32, maxLength: 32 }),
async (pubSubTopic, contentTopic, payload, symKey) => { async (pubSubTopic, contentTopic, payload, symKey) => {
const metaSetter = ( const metaSetter = (
msg: IProtoMessage & { meta: undefined }, msg: IProtoMessage & { meta: undefined }
): Uint8Array => { ): Uint8Array => {
const buffer = new ArrayBuffer(4); const buffer = new ArrayBuffer(4);
const view = new DataView(buffer); const view = new DataView(buffer);
@ -92,7 +92,7 @@ describe("Symmetric Encryption", function () {
const encoder = createEncoder({ const encoder = createEncoder({
contentTopic, contentTopic,
symKey, symKey,
metaSetter, metaSetter
}); });
const bytes = await encoder.toWire({ payload }); const bytes = await encoder.toWire({ payload });
@ -109,12 +109,12 @@ describe("Symmetric Encryption", function () {
ephemeral: undefined, ephemeral: undefined,
meta: undefined, meta: undefined,
rateLimitProof: undefined, rateLimitProof: undefined,
version: undefined, version: undefined
}); });
expect(result.meta).to.deep.equal(expectedMeta); expect(result.meta).to.deep.equal(expectedMeta);
}, }
), )
); );
}); });
}); });
@ -124,7 +124,7 @@ describe("Ensures content topic is defined", () => {
const wrapper = function (): void { const wrapper = function (): void {
createEncoder({ createEncoder({
contentTopic: undefined as unknown as string, contentTopic: undefined as unknown as string,
symKey: new Uint8Array(), symKey: new Uint8Array()
}); });
}; };

View File

@ -5,7 +5,7 @@ import type {
IEncoder, IEncoder,
IMessage, IMessage,
IMetaSetter, IMetaSetter,
IProtoMessage, IProtoMessage
} from "@waku/interfaces"; } from "@waku/interfaces";
import { WakuMessage } from "@waku/proto"; import { WakuMessage } from "@waku/proto";
import debug from "debug"; import debug from "debug";
@ -15,7 +15,7 @@ import {
decryptSymmetric, decryptSymmetric,
encryptSymmetric, encryptSymmetric,
postCipher, postCipher,
preCipher, preCipher
} from "./waku_payload.js"; } from "./waku_payload.js";
import { generateSymmetricKey, OneMillion, Version } from "./index.js"; import { generateSymmetricKey, OneMillion, Version } from "./index.js";
@ -31,7 +31,7 @@ class Encoder implements IEncoder {
private symKey: Uint8Array, private symKey: Uint8Array,
private sigPrivKey?: Uint8Array, private sigPrivKey?: Uint8Array,
public ephemeral: boolean = false, public ephemeral: boolean = false,
public metaSetter?: IMetaSetter, public metaSetter?: IMetaSetter
) { ) {
if (!contentTopic || contentTopic === "") { if (!contentTopic || contentTopic === "") {
throw new Error("Content topic must be specified"); throw new Error("Content topic must be specified");
@ -58,7 +58,7 @@ class Encoder implements IEncoder {
timestamp: BigInt(timestamp.valueOf()) * OneMillion, timestamp: BigInt(timestamp.valueOf()) * OneMillion,
meta: undefined, meta: undefined,
rateLimitProof: message.rateLimitProof, rateLimitProof: message.rateLimitProof,
ephemeral: this.ephemeral, ephemeral: this.ephemeral
}; };
if (this.metaSetter) { if (this.metaSetter) {
@ -95,7 +95,7 @@ export function createEncoder({
symKey, symKey,
sigPrivKey, sigPrivKey,
ephemeral = false, ephemeral = false,
metaSetter, metaSetter
}: EncoderOptions): Encoder { }: EncoderOptions): Encoder {
return new Encoder(contentTopic, symKey, sigPrivKey, ephemeral, metaSetter); return new Encoder(contentTopic, symKey, sigPrivKey, ephemeral, metaSetter);
} }
@ -103,14 +103,14 @@ export function createEncoder({
class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> { class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
constructor( constructor(
contentTopic: string, contentTopic: string,
private symKey: Uint8Array, private symKey: Uint8Array
) { ) {
super(contentTopic); super(contentTopic);
} }
async fromProtoObj( async fromProtoObj(
pubSubTopic: string, pubSubTopic: string,
protoMessage: IProtoMessage, protoMessage: IProtoMessage
): Promise<DecodedMessage | undefined> { ): Promise<DecodedMessage | undefined> {
const cipherPayload = protoMessage.payload; const cipherPayload = protoMessage.payload;
@ -119,7 +119,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
"Failed to decrypt due to incorrect version, expected:", "Failed to decrypt due to incorrect version, expected:",
Version, Version,
", actual:", ", actual:",
protoMessage.version, protoMessage.version
); );
return; return;
} }
@ -131,7 +131,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
} catch (e) { } catch (e) {
log( log(
`Failed to decrypt message using asymmetric decryption for contentTopic: ${this.contentTopic}`, `Failed to decrypt message using asymmetric decryption for contentTopic: ${this.contentTopic}`,
e, e
); );
return; return;
} }
@ -154,7 +154,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
protoMessage, protoMessage,
res.payload, res.payload,
res.sig?.signature, res.sig?.signature,
res.sig?.publicKey, res.sig?.publicKey
); );
} }
} }
@ -174,7 +174,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
*/ */
export function createDecoder( export function createDecoder(
contentTopic: string, contentTopic: string,
symKey: Uint8Array, symKey: Uint8Array
): Decoder { ): Decoder {
return new Decoder(contentTopic, symKey); return new Decoder(contentTopic, symKey);
} }

View File

@ -8,7 +8,7 @@ import {
encryptAsymmetric, encryptAsymmetric,
encryptSymmetric, encryptSymmetric,
postCipher, postCipher,
preCipher, preCipher
} from "./waku_payload.js"; } from "./waku_payload.js";
describe("Waku Payload", () => { describe("Waku Payload", () => {
@ -24,8 +24,8 @@ describe("Waku Payload", () => {
const res = await decryptAsymmetric(enc, privKey); const res = await decryptAsymmetric(enc, privKey);
expect(res).deep.equal(message); expect(res).deep.equal(message);
}, }
), )
); );
}); });
@ -39,8 +39,8 @@ describe("Waku Payload", () => {
const res = await decryptSymmetric(enc, key); const res = await decryptSymmetric(enc, key);
expect(res).deep.equal(message); expect(res).deep.equal(message);
}, }
), )
); );
}); });
@ -52,9 +52,9 @@ describe("Waku Payload", () => {
expect(res?.payload).deep.equal( expect(res?.payload).deep.equal(
message, message,
"Payload was not encrypted then decrypted correctly", "Payload was not encrypted then decrypted correctly"
); );
}), })
); );
}); });
@ -71,14 +71,14 @@ describe("Waku Payload", () => {
expect(res?.payload).deep.equal( expect(res?.payload).deep.equal(
message, message,
"Payload was not encrypted then decrypted correctly", "Payload was not encrypted then decrypted correctly"
); );
expect(res?.sig?.publicKey).deep.equal( expect(res?.sig?.publicKey).deep.equal(
sigPubKey, sigPubKey,
"signature Public key was not recovered from encrypted then decrypted signature", "signature Public key was not recovered from encrypted then decrypted signature"
); );
}, }
), )
); );
}); });
}); });

View File

@ -21,7 +21,7 @@ function getSizeOfPayloadSizeField(message: Uint8Array): number {
function getPayloadSize( function getPayloadSize(
message: Uint8Array, message: Uint8Array,
sizeOfPayloadSizeField: number, sizeOfPayloadSizeField: number
): number { ): number {
let payloadSizeBytes = message.slice(1, 1 + sizeOfPayloadSizeField); let payloadSizeBytes = message.slice(1, 1 + sizeOfPayloadSizeField);
// int 32 == 4 bytes // int 32 == 4 bytes
@ -29,7 +29,7 @@ function getPayloadSize(
// If less than 4 bytes pad right (Little Endian). // If less than 4 bytes pad right (Little Endian).
payloadSizeBytes = concat( payloadSizeBytes = concat(
[payloadSizeBytes, new Uint8Array(4 - sizeOfPayloadSizeField)], [payloadSizeBytes, new Uint8Array(4 - sizeOfPayloadSizeField)],
4, 4
); );
} }
const payloadSizeDataView = new DataView(payloadSizeBytes.buffer); const payloadSizeDataView = new DataView(payloadSizeBytes.buffer);
@ -50,7 +50,7 @@ function isMessageSigned(message: Uint8Array): boolean {
*/ */
export async function encryptAsymmetric( export async function encryptAsymmetric(
data: Uint8Array, data: Uint8Array,
publicKey: Uint8Array | string, publicKey: Uint8Array | string
): Promise<Uint8Array> { ): Promise<Uint8Array> {
return ecies.encrypt(hexToBytes(publicKey), data); return ecies.encrypt(hexToBytes(publicKey), data);
} }
@ -63,7 +63,7 @@ export async function encryptAsymmetric(
*/ */
export async function decryptAsymmetric( export async function decryptAsymmetric(
payload: Uint8Array, payload: Uint8Array,
privKey: Uint8Array, privKey: Uint8Array
): Promise<Uint8Array> { ): Promise<Uint8Array> {
return ecies.decrypt(privKey, payload); return ecies.decrypt(privKey, payload);
} }
@ -79,7 +79,7 @@ export async function decryptAsymmetric(
*/ */
export async function encryptSymmetric( export async function encryptSymmetric(
data: Uint8Array, data: Uint8Array,
key: Uint8Array | string, key: Uint8Array | string
): Promise<Uint8Array> { ): Promise<Uint8Array> {
const iv = symmetric.generateIv(); const iv = symmetric.generateIv();
@ -99,7 +99,7 @@ export async function encryptSymmetric(
*/ */
export async function decryptSymmetric( export async function decryptSymmetric(
payload: Uint8Array, payload: Uint8Array,
key: Uint8Array | string, key: Uint8Array | string
): Promise<Uint8Array> { ): Promise<Uint8Array> {
const ivStart = payload.length - Symmetric.ivSize; const ivStart = payload.length - Symmetric.ivSize;
const cipher = payload.slice(0, ivStart); const cipher = payload.slice(0, ivStart);
@ -135,7 +135,7 @@ function computeSizeOfPayloadSizeField(payload: Uint8Array): number {
function validateDataIntegrity( function validateDataIntegrity(
value: Uint8Array, value: Uint8Array,
expectedSize: number, expectedSize: number
): boolean { ): boolean {
if (value.length !== expectedSize) { if (value.length !== expectedSize) {
return false; return false;
@ -157,7 +157,7 @@ function getHash(message: Uint8Array, isSigned: boolean): Uint8Array {
function ecRecoverPubKey( function ecRecoverPubKey(
messageHash: Uint8Array, messageHash: Uint8Array,
signature: Uint8Array, signature: Uint8Array
): Uint8Array | undefined { ): Uint8Array | undefined {
const recoveryDataView = new DataView(signature.slice(64).buffer); const recoveryDataView = new DataView(signature.slice(64).buffer);
const recovery = recoveryDataView.getUint8(0); const recovery = recoveryDataView.getUint8(0);
@ -175,7 +175,7 @@ function ecRecoverPubKey(
*/ */
export async function preCipher( export async function preCipher(
messagePayload: Uint8Array, messagePayload: Uint8Array,
sigPrivKey?: Uint8Array, sigPrivKey?: Uint8Array
): Promise<Uint8Array> { ): Promise<Uint8Array> {
let envelope = new Uint8Array([0]); // No flags let envelope = new Uint8Array([0]); // No flags
envelope = addPayloadSizeField(envelope, messagePayload); envelope = addPayloadSizeField(envelope, messagePayload);
@ -216,7 +216,7 @@ export async function preCipher(
* @internal * @internal
*/ */
export function postCipher( export function postCipher(
message: Uint8Array, message: Uint8Array
): { payload: Uint8Array; sig?: Signature } | undefined { ): { payload: Uint8Array; sig?: Signature } | undefined {
const sizeOfPayloadSizeField = getSizeOfPayloadSizeField(message); const sizeOfPayloadSizeField = getSizeOfPayloadSizeField(message);
if (sizeOfPayloadSizeField === 0) return; if (sizeOfPayloadSizeField === 0) return;

View File

@ -1,6 +1,6 @@
module.exports = { module.exports = {
parserOptions: { parserOptions: {
tsconfigRootDir: __dirname, tsconfigRootDir: __dirname,
project: "./tsconfig.dev.json", project: "./tsconfig.dev.json"
}, }
}; };

View File

@ -6,7 +6,7 @@ module.exports = function (config) {
frameworks: ["webpack", "mocha"], frameworks: ["webpack", "mocha"],
files: ["src/**/!(node).spec.ts"], files: ["src/**/!(node).spec.ts"],
preprocessors: { preprocessors: {
"src/**/!(node).spec.ts": ["webpack"], "src/**/!(node).spec.ts": ["webpack"]
}, },
envPreprocessor: ["CI"], envPreprocessor: ["CI"],
reporters: ["progress"], reporters: ["progress"],
@ -14,32 +14,32 @@ module.exports = function (config) {
singleRun: true, singleRun: true,
client: { client: {
mocha: { mocha: {
timeout: 6000, // Default is 2s timeout: 6000 // Default is 2s
}, }
}, },
webpack: { webpack: {
mode: "development", mode: "development",
module: { module: {
rules: [{ test: /\.([cm]?ts|tsx)$/, loader: "ts-loader" }], rules: [{ test: /\.([cm]?ts|tsx)$/, loader: "ts-loader" }]
}, },
plugins: [ plugins: [
new webpack.DefinePlugin({ new webpack.DefinePlugin({
"process.env.CI": process.env.CI || false, "process.env.CI": process.env.CI || false
}), }),
new webpack.ProvidePlugin({ new webpack.ProvidePlugin({
process: "process/browser.js", process: "process/browser.js"
}), })
], ],
resolve: { resolve: {
extensions: [".ts", ".tsx", ".js"], extensions: [".ts", ".tsx", ".js"],
extensionAlias: { extensionAlias: {
".js": [".js", ".ts"], ".js": [".js", ".ts"],
".cjs": [".cjs", ".cts"], ".cjs": [".cjs", ".cts"],
".mjs": [".mjs", ".mts"], ".mjs": [".mjs", ".mts"]
}, }
}, },
stats: { warnings: false }, stats: { warnings: false },
devtool: "inline-source-map", devtool: "inline-source-map"
}, }
}); });
}; };

View File

@ -11,14 +11,14 @@ export default {
input, input,
output: { output: {
dir: "bundle", dir: "bundle",
format: "esm", format: "esm"
}, },
plugins: [ plugins: [
commonjs(), commonjs(),
json(), json(),
nodeResolve({ nodeResolve({
browser: true, browser: true,
preferBuiltins: false, preferBuiltins: false
}), })
], ]
}; };

View File

@ -18,7 +18,7 @@ describe("RFC Test Vectors", () => {
ephemeral: undefined, ephemeral: undefined,
rateLimitProof: undefined, rateLimitProof: undefined,
timestamp: undefined, timestamp: undefined,
version: undefined, version: undefined
}; };
const hash = messageHash(pubSubTopic, message); const hash = messageHash(pubSubTopic, message);
@ -38,7 +38,7 @@ describe("RFC Test Vectors", () => {
ephemeral: undefined, ephemeral: undefined,
rateLimitProof: undefined, rateLimitProof: undefined,
timestamp: undefined, timestamp: undefined,
version: undefined, version: undefined
}; };
const hash = messageHash(pubSubTopic, message); const hash = messageHash(pubSubTopic, message);
@ -58,7 +58,7 @@ describe("RFC Test Vectors", () => {
ephemeral: undefined, ephemeral: undefined,
rateLimitProof: undefined, rateLimitProof: undefined,
timestamp: undefined, timestamp: undefined,
version: undefined, version: undefined
}; };
const hash = messageHash(pubSubTopic, message); const hash = messageHash(pubSubTopic, message);

View File

@ -8,7 +8,7 @@ import { concat, utf8ToBytes } from "@waku/utils/bytes";
*/ */
export function messageHash( export function messageHash(
pubsubTopic: string, pubsubTopic: string,
message: IProtoMessage, message: IProtoMessage
): Uint8Array { ): Uint8Array {
const pubsubTopicBytes = utf8ToBytes(pubsubTopic); const pubsubTopicBytes = utf8ToBytes(pubsubTopic);
const contentTopicBytes = utf8ToBytes(message.contentTopic); const contentTopicBytes = utf8ToBytes(message.contentTopic);
@ -20,7 +20,7 @@ export function messageHash(
pubsubTopicBytes, pubsubTopicBytes,
message.payload, message.payload,
contentTopicBytes, contentTopicBytes,
message.meta, message.meta
]); ]);
} else { } else {
bytes = concat([pubsubTopicBytes, message.payload, contentTopicBytes]); bytes = concat([pubsubTopicBytes, message.payload, contentTopicBytes]);

View File

@ -1,6 +1,6 @@
module.exports = { module.exports = {
parserOptions: { parserOptions: {
tsconfigRootDir: __dirname, tsconfigRootDir: __dirname,
project: "./tsconfig.dev.json", project: "./tsconfig.dev.json"
}, }
}; };

View File

@ -48,25 +48,18 @@
"node": ">=16" "node": ">=16"
}, },
"dependencies": { "dependencies": {
"@libp2p/interface-peer-discovery": "^2.0.0",
"@libp2p/interfaces": "^3.3.2", "@libp2p/interfaces": "^3.3.2",
"@waku/core": "0.0.22", "@waku/core": "0.0.22",
"@waku/enr": "0.0.16", "@waku/enr": "0.0.16",
"@waku/interfaces": "0.0.17",
"@waku/proto": "0.0.5", "@waku/proto": "0.0.5",
"@waku/utils": "0.0.10", "@waku/utils": "0.0.10",
"@waku/interfaces": "0.0.17",
"debug": "^4.3.4", "debug": "^4.3.4",
"it-all": "^3.0.2", "it-all": "^3.0.2",
"it-length-prefixed": "^9.0.1", "it-length-prefixed": "^9.0.1",
"it-pipe": "^3.0.1" "it-pipe": "^3.0.1"
}, },
"devDependencies": { "devDependencies": {
"@libp2p/interface-connection-manager": "^3.0.1",
"@libp2p/interface-libp2p": "^3.2.0",
"@libp2p/interface-peer-id": "^2.0.2",
"@libp2p/interface-peer-info": "^1.0.10",
"@libp2p/interface-peer-store": "^2.0.4",
"@libp2p/interface-registrar": "^2.0.12",
"@rollup/plugin-commonjs": "^24.0.1", "@rollup/plugin-commonjs": "^24.0.1",
"@rollup/plugin-json": "^6.0.0", "@rollup/plugin-json": "^6.0.0",
"@rollup/plugin-node-resolve": "^15.0.2", "@rollup/plugin-node-resolve": "^15.0.2",

View File

@ -11,14 +11,14 @@ export default {
input, input,
output: { output: {
dir: "bundle", dir: "bundle",
format: "esm", format: "esm"
}, },
plugins: [ plugins: [
commonjs(), commonjs(),
json(), json(),
nodeResolve({ nodeResolve({
browser: true, browser: true,
preferBuiltins: false, preferBuiltins: false
}), })
], ]
}; };

View File

@ -1,11 +1,11 @@
export { export {
wakuPeerExchange, wakuPeerExchange,
PeerExchangeCodec, PeerExchangeCodec,
WakuPeerExchange, WakuPeerExchange
} from "./waku_peer_exchange.js"; } from "./waku_peer_exchange.js";
export { export {
wakuPeerExchangeDiscovery, wakuPeerExchangeDiscovery,
PeerExchangeDiscovery, PeerExchangeDiscovery,
Options, Options,
DEFAULT_PEER_EXCHANGE_TAG_NAME, DEFAULT_PEER_EXCHANGE_TAG_NAME
} from "./waku_peer_exchange_discovery.js"; } from "./waku_peer_exchange_discovery.js";

View File

@ -11,9 +11,9 @@ export class PeerExchangeRPC {
const { numPeers } = params; const { numPeers } = params;
return new PeerExchangeRPC({ return new PeerExchangeRPC({
query: { query: {
numPeers: numPeers, numPeers: numPeers
}, },
response: undefined, response: undefined
}); });
} }

Some files were not shown because too many files have changed in this diff Show More