merge master into branch

This commit is contained in:
fbarbu15 2023-10-10 09:59:24 +03:00
commit 7e96644ba8
No known key found for this signature in database
GPG Key ID: D75221C8DEA22501
103 changed files with 9623 additions and 10257 deletions

View File

@ -92,6 +92,7 @@
"secio",
"seckey",
"secp",
"sharded",
"sscanf",
"staticnode",
"statusim",
@ -113,6 +114,7 @@
"waku",
"wakuconnect",
"wakunode",
"wakuorg",
"wakuv",
"webfonts",
"websockets",

View File

@ -49,6 +49,7 @@
"**/*.spec.ts",
"**/tests/**",
"**/rollup.config.js",
"**/playwright.config.ts",
"**/.eslintrc.cjs",
"**/karma.conf.cjs"
]

View File

@ -10,7 +10,7 @@ on:
workflow_dispatch:
inputs:
nim_wakunode_image:
description: "Docker hub image name taken from https://hub.docker.com/r/statusteam/nim-waku/tags. Format: statusteam/nim-waku:v0.19.0"
description: "Docker hub image name taken from https://hub.docker.com/r/wakuorg/nwaku/tags. Format: wakuorg/nwaku:v0.20.0"
required: false
type: string
@ -64,32 +64,33 @@ jobs:
with:
node-version: ${{ env.NODE_JS }}
- uses: ./.github/actions/npm
- run: npx playwright install --with-deps
- run: npm run build:esm
- run: npm run test:browser
node:
uses: ./.github/workflows/test-node.yml
with:
nim_wakunode_image: ${{ inputs.nim_wakunode_image || 'statusteam/nim-waku:v0.19.0' }}
nim_wakunode_image: ${{ inputs.nim_wakunode_image || 'wakuorg/nwaku:v0.20.0' }}
test_type: node
node_optional:
uses: ./.github/workflows/test-node.yml
with:
nim_wakunode_image: ${{ inputs.nim_wakunode_image || 'statusteam/nim-waku:v0.19.0' }}
nim_wakunode_image: ${{ inputs.nim_wakunode_image || 'wakuorg/nwaku:v0.20.0' }}
test_type: node-optional
node_with_go_waku_master:
uses: ./.github/workflows/test-node.yml
with:
nim_wakunode_image: statusteam/go-waku:latest
nim_wakunode_image: wakuorg/go-waku:latest
test_type: go-waku-master
debug: waku*
node_with_nwaku_master:
uses: ./.github/workflows/test-node.yml
with:
nim_wakunode_image: statusteam/nim-waku:deploy-wakuv2-test
nim_wakunode_image: wakuorg/nwaku:deploy-wakuv2-test
test_type: nwaku-master
debug: waku*

40
.github/workflows/playwright.yml vendored Normal file
View File

@ -0,0 +1,40 @@
name: Playwright tests
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
env:
NODE_JS: "18"
EXAMPLE_TEMPLATE: "web-chat"
EXAMPLE_NAME: "example"
EXAMPLE_PORT: "8080"
# Firefox in container fails due to $HOME not being owned by user running commands
# more details https://github.com/microsoft/playwright/issues/6500
HOME: "/root"
jobs:
test:
timeout-minutes: 60
runs-on: ubuntu-latest
container:
image: mcr.microsoft.com/playwright:v1.38.0-jammy
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: ${{ env.NODE_JS }}
- uses: ./.github/actions/npm
- name: Run Playwright tests
run: npm run test --workspace=@waku/browser-tests
- uses: actions/upload-artifact@v3
if: always()
with:
name: playwright-report
path: playwright-report/
retention-days: 30

View File

@ -1,7 +1,12 @@
# WARNING: This workflow is used by upstream workflows (jswaku, nwaku, gowaku) via workflow_call.
# DO NOT modify the name, inputs, or other parts of this workflow that might break upstream CI.
name: Run Test
on:
workflow_call:
# IMPORTANT: Do not change the name or properties of these inputs.
# If you add new required inputs make sure that they have default value or you make the change upstream as well
inputs:
nim_wakunode_image:
required: true
@ -16,6 +21,7 @@ on:
env:
NODE_JS: "18"
# Ensure test type conditions remain consistent.
WAKU_SERVICE_NODE_PARAMS: ${{ (inputs.test_type == 'go-waku-master') && '--min-relay-peers-to-publish=0' || '' }}
DEBUG: ${{ inputs.debug }}

3
.gitignore vendored
View File

@ -9,3 +9,6 @@ coverage
*.log
*.tsbuildinfo
docs
test-results
playwright-report
example

View File

@ -8,7 +8,7 @@ A TypeScript implementation of the [Waku v2 protocol](https://rfc.vac.dev/spec/1
## Documentation
- [Quick start](https://docs.waku.org/guides/js-waku/quick-start)
- [Quick start](https://docs.waku.org/guides/js-waku/#getting-started)
- [Full documentation](https://docs.waku.org/guides/js-waku)
- [API documentation (`master` branch)](https://js.waku.org/)
- [Waku](https://waku.org/)

49
karma.conf.cjs Normal file
View File

@ -0,0 +1,49 @@
const webpack = require("webpack");
const playwright = require('playwright');
process.env.CHROME_BIN = playwright.chromium.executablePath();
process.env.FIREFOX_BIN = playwright.firefox.executablePath();
module.exports = function (config) {
config.set({
frameworks: ["webpack", "mocha"],
files: ["src/**/!(node).spec.ts"],
preprocessors: {
"src/**/!(node).spec.ts": ["webpack"]
},
envPreprocessor: ["CI"],
reporters: ["progress"],
browsers: ["ChromeHeadless", "FirefoxHeadless"],
singleRun: true,
client: {
mocha: {
timeout: 6000 // Default is 2s
}
},
webpack: {
mode: "development",
module: {
rules: [{ test: /\.([cm]?ts|tsx)$/, loader: "ts-loader" }]
},
plugins: [
new webpack.DefinePlugin({
"process.env.CI": process.env.CI || false,
"process.env.DISPLAY": "Browser",
}),
new webpack.ProvidePlugin({
process: "process/browser.js"
})
],
resolve: {
extensions: [".ts", ".tsx", ".js"],
extensionAlias: {
".js": [".js", ".ts"],
".cjs": [".cjs", ".cts"],
".mjs": [".mjs", ".mts"]
}
},
stats: { warnings: false },
devtool: "inline-source-map"
}
});
};

15384
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -15,6 +15,7 @@
"packages/message-encryption",
"packages/sdk",
"packages/tests",
"packages/browser-tests",
"packages/build-utils"
],
"scripts": {
@ -39,8 +40,8 @@
},
"devDependencies": {
"@size-limit/preset-big-lib": "^8.2.4",
"@typescript-eslint/eslint-plugin": "^6.2.1",
"@typescript-eslint/parser": "^6.0.0",
"@typescript-eslint/eslint-plugin": "^6.6.0",
"@typescript-eslint/parser": "^6.6.0",
"eslint": "^8.47.0",
"eslint-config-prettier": "^9.0.0",
"eslint-plugin-eslint-comments": "^3.2.0",
@ -49,10 +50,19 @@
"eslint-plugin-prettier": "^5.0.0",
"gh-pages": "^5.0.0",
"husky": "^8.0.3",
"lint-staged": "^13.2.2",
"size-limit": "^8.1.2",
"typedoc": "^0.23.26",
"typedoc-plugin-resolve-crossmodule-references": "^0.3.3"
"lint-staged": "^14.0.1",
"size-limit": "^9.0.0",
"ts-loader": "^9.4.2",
"ts-node": "^10.9.1",
"typedoc": "^0.25.1",
"typescript": "^5.2.2",
"karma": "^6.4.2",
"karma-chrome-launcher": "^3.2.0",
"karma-firefox-launcher": "^2.1.2",
"karma-mocha": "^2.0.1",
"karma-webkit-launcher": "^2.1.0",
"karma-webpack": "^5.0.0",
"playwright": "^1.38.1"
},
"lint-staged": {
"*.{ts,js}": [

View File

@ -0,0 +1,3 @@
EXAMPLE_TEMPLATE="web-chat"
EXAMPLE_NAME="example"
EXAMPLE_PORT="8080"

View File

@ -0,0 +1,14 @@
module.exports = {
parserOptions: {
tsconfigRootDir: __dirname,
project: "./tsconfig.dev.json"
},
env: {
node: true,
},
rules: {},
globals: {
process: true
}
};

View File

@ -0,0 +1,19 @@
{
"name": "@waku/browser-tests",
"version": "0.1.0",
"private": true,
"type": "module",
"scripts": {
"start": "run-s start:*",
"start:setup": "node ./src/setup-example.js",
"start:build": "node ./src/build-example.js",
"start:serve": "npx serve -p 8080 --no-port-switching ./example",
"test": "npx playwright test"
},
"devDependencies": {
"@playwright/test": "^1.37.1",
"@waku/create-app": "^0.1.1-7c24ffa",
"dotenv-flow": "^3.3.0",
"serve": "^14.2.1"
}
}

View File

@ -0,0 +1,80 @@
import "dotenv-flow/config";
import { defineConfig, devices } from "@playwright/test";
const EXAMPLE_PORT = process.env.EXAMPLE_PORT;
// web-chat specific thingy
const EXAMPLE_TEMPLATE = process.env.EXAMPLE_TEMPLATE;
const BASE_URL = `http://127.0.0.1:${EXAMPLE_PORT}/${EXAMPLE_TEMPLATE}`;
/**
* See https://playwright.dev/docs/test-configuration.
*/
export default defineConfig({
testDir: "./tests",
/* Run tests in files in parallel */
fullyParallel: true,
/* Fail the build on CI if you accidentally left test.only in the source code. */
forbidOnly: !!process.env.CI,
/* Retry on CI only */
retries: process.env.CI ? 2 : 0,
/* Opt out of parallel tests on CI. */
workers: process.env.CI ? 2 : undefined,
/* Reporter to use. See https://playwright.dev/docs/test-reporters */
reporter: "html",
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
use: {
/* Base URL to use in actions like `await page.goto('/')`. */
baseURL: BASE_URL,
/* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */
trace: "on-first-retry"
},
/* Configure projects for major browsers */
projects: [
{
name: "chromium",
use: { ...devices["Desktop Chrome"] }
},
{
name: "firefox",
use: { ...devices["Desktop Firefox"] }
},
{
name: "webkit",
use: { ...devices["Desktop Safari"] }
}
/* Test against mobile viewports. */
// {
// name: 'Mobile Chrome',
// use: { ...devices['Pixel 5'] },
// },
// {
// name: 'Mobile Safari',
// use: { ...devices['iPhone 12'] },
// },
/* Test against branded browsers. */
// {
// name: 'Microsoft Edge',
// use: { ...devices['Desktop Edge'], channel: 'msedge' },
// },
// {
// name: 'Google Chrome',
// use: { ...devices['Desktop Chrome'], channel: 'chrome' },
// },
],
/* Run your local dev server before starting the tests */
webServer: {
url: BASE_URL,
stdout: "pipe",
stderr: "pipe",
command: "npm start",
reuseExistingServer: !process.env.CI,
timeout: 5 * 60 * 1000 // five minutes for bootstrapping an example
}
});

View File

@ -0,0 +1,55 @@
#!/usr/bin/env node
import "dotenv-flow/config";
import { execSync } from "child_process";
import path from "path";
import { __dirname } from "./utils.js";
const EXAMPLE_NAME = process.env.EXAMPLE_NAME;
const EXAMPLE_PATH = path.resolve(__dirname, "..", EXAMPLE_NAME);
const BUILD_FOLDER = "build";
const BUILD_PATH = path.resolve(EXAMPLE_PATH, BUILD_FOLDER);
// required by web-chat example
const WEB_CHAT_BUILD_PATH = path.resolve(EXAMPLE_PATH, "web-chat");
run();
function run() {
cleanPrevBuildIfExists();
buildExample();
renameBuildFolderForWebChat();
}
function cleanPrevBuildIfExists() {
try {
console.log("Cleaning previous build if exists.");
execSync(`rm -rf ${BUILD_PATH}`, { stdio: "ignore" });
} catch (error) {
console.error(`Failed to clean previous build: ${error.message}`);
throw error;
}
}
function buildExample() {
try {
console.log("Building example at", EXAMPLE_PATH);
execSync(`cd ${EXAMPLE_PATH} && npm run build`, { stdio: "pipe" });
} catch (error) {
console.error(`Failed to build example: ${error.message}`);
throw error;
}
}
function renameBuildFolderForWebChat() {
try {
console.log("Renaming example's build folder.");
execSync(`mv ${BUILD_PATH} ${WEB_CHAT_BUILD_PATH}`, { stdio: "ignore" });
} catch (error) {
console.error(
`Failed to rename build folder for web-chat: ${error.message}`
);
throw error;
}
}

View File

@ -0,0 +1,93 @@
#!/usr/bin/env node
import "dotenv-flow/config";
import { execSync } from "child_process";
import path from "path";
import { __dirname, readJSON } from "./utils.js";
const ROOT_PATH = path.resolve(__dirname, "../../../");
const JS_WAKU_PACKAGES = readWorkspaces();
const EXAMPLE_NAME = process.env.EXAMPLE_NAME;
const EXAMPLE_TEMPLATE = process.env.EXAMPLE_TEMPLATE;
const EXAMPLE_PATH = path.resolve(__dirname, "..", EXAMPLE_NAME);
run();
function run() {
cleanExampleIfExists();
bootstrapExample();
linkPackages();
}
function cleanExampleIfExists() {
try {
console.log("Cleaning previous example if exists.");
execSync(`rm -rf ${EXAMPLE_PATH}`, { stdio: "ignore" });
} catch (error) {
console.error(`Failed to clean previous example: ${error.message}`);
throw error;
}
}
function bootstrapExample() {
try {
console.log("Bootstrapping example.");
execSync(
`npx @waku/create-app --template ${EXAMPLE_TEMPLATE} ${EXAMPLE_NAME}`,
{ stdio: "ignore" }
);
} catch (error) {
console.error(`Failed to bootstrap example: ${error.message}`);
throw error;
}
}
function linkPackages() {
const examplePackage = readJSON(`${EXAMPLE_PATH}/package.json`);
// remove duplicates if any
const dependencies = filterWakuDependencies({
...examplePackage.dependencies,
...examplePackage.devDependencies
});
Object.keys(dependencies).forEach(linkDependency);
}
function filterWakuDependencies(dependencies) {
return Object.entries(dependencies)
.filter((pair) => JS_WAKU_PACKAGES.includes(pair[0]))
.reduce((acc, pair) => {
acc[pair[0]] = pair[1];
return acc;
}, {});
}
function linkDependency(dependency) {
try {
console.log(`Linking dependency to example: ${dependency}`);
const pathToDependency = path.resolve(ROOT_PATH, toFolderName(dependency));
execSync(`npm link ${pathToDependency}`, { stdio: "ignore" });
} catch (error) {
console.error(
`Failed to npm link dependency ${dependency} in example: ${error.message}`
);
throw error;
}
}
function readWorkspaces() {
const rootPath = path.resolve(ROOT_PATH, "package.json");
const workspaces = readJSON(rootPath).workspaces;
return workspaces.map(toPackageName);
}
function toPackageName(str) {
// assumption is that package name published is always the same in `@waku/package` name
return str.replace("packages", "@waku");
}
function toFolderName(str) {
return str.replace("@waku", "packages");
}

View File

@ -0,0 +1,8 @@
import { readFileSync } from "fs";
import { dirname } from "path";
import { fileURLToPath } from "url";
const __filename = fileURLToPath(import.meta.url);
export const __dirname = dirname(__filename);
export const readJSON = (path) => JSON.parse(readFileSync(path, "utf-8"));

View File

@ -0,0 +1,6 @@
import { expect, test } from "@playwright/test";
test("has title Web Chat title", async ({ page }) => {
await page.goto("");
await expect(page).toHaveTitle("Waku v2 chat app");
});

View File

@ -0,0 +1,3 @@
{
"extends": "../../tsconfig.dev"
}

View File

@ -1,45 +1,3 @@
process.env.CHROME_BIN = require("puppeteer").executablePath();
const webpack = require("webpack");
const config = require("../../karma.conf.cjs");
module.exports = function (config) {
config.set({
frameworks: ["webpack", "mocha"],
files: ["src/lib/**/!(node).spec.ts"],
preprocessors: {
"src/lib/**/!(node).spec.ts": ["webpack"]
},
envPreprocessor: ["CI"],
reporters: ["progress"],
browsers: ["ChromeHeadless"],
singleRun: true,
client: {
mocha: {
timeout: 6000 // Default is 2s
}
},
webpack: {
mode: "development",
module: {
rules: [{ test: /\.([cm]?ts|tsx)$/, loader: "ts-loader" }]
},
plugins: [
new webpack.DefinePlugin({
"process.env.CI": process.env.CI || false
}),
new webpack.ProvidePlugin({
process: "process/browser.js"
})
],
resolve: {
extensions: [".ts", ".tsx", ".js"],
extensionAlias: {
".js": [".js", ".ts"],
".cjs": [".cjs", ".cts"],
".mjs": [".mjs", ".mts"]
}
},
stats: { warnings: false },
devtool: "inline-source-map"
}
});
};
module.exports = config;

View File

@ -69,7 +69,7 @@
"reset-hard": "git clean -dfx -e .idea && git reset --hard && npm i && npm run build"
},
"engines": {
"node": ">=16"
"node": ">=18"
},
"dependencies": {
"@noble/hashes": "^1.3.2",
@ -80,7 +80,7 @@
"it-all": "^3.0.3",
"it-length-prefixed": "^9.0.1",
"it-pipe": "^3.0.1",
"p-event": "^5.0.1",
"p-event": "^6.0.0",
"uint8arraylist": "^2.4.3",
"uuid": "^9.0.0"
},
@ -96,21 +96,13 @@
"@waku/build-utils": "*",
"chai": "^4.3.7",
"cspell": "^7.3.2",
"fast-check": "^3.12.0",
"fast-check": "^3.13.1",
"ignore-loader": "^0.1.2",
"isomorphic-fetch": "^3.0.0",
"karma": "^6.4.2",
"karma-chrome-launcher": "^3.2.0",
"karma-mocha": "^2.0.1",
"karma-webpack": "^5.0.0",
"mocha": "^10.2.0",
"npm-run-all": "^4.1.5",
"process": "^0.11.10",
"puppeteer": "^21.1.1",
"rollup": "^3.29.2",
"ts-loader": "^9.4.2",
"ts-node": "^10.9.1",
"typescript": "^5.0.4"
"rollup": "^3.29.2"
},
"peerDependencies": {
"@multiformats/multiaddr": "^12.0.0",
@ -121,9 +113,6 @@
"optional": true
}
},
"typedoc": {
"entryPoint": "./src/index.ts"
},
"files": [
"dist",
"bundle",

View File

@ -17,7 +17,11 @@ import type {
Unsubscribe
} from "@waku/interfaces";
import { WakuMessage } from "@waku/proto";
import { groupByContentTopic, toAsyncIterator } from "@waku/utils";
import {
ensurePubsubTopicIsConfigured,
groupByContentTopic,
toAsyncIterator
} from "@waku/utils";
import debug from "debug";
import all from "it-all";
import * as lp from "it-length-prefixed";
@ -230,7 +234,7 @@ class Subscription {
}
class Filter extends BaseProtocol implements IReceiver {
private readonly options: ProtocolCreateOptions;
private readonly pubSubTopics: PubSubTopic[] = [];
private activeSubscriptions = new Map<string, Subscription>();
private readonly NUM_PEERS_PROTOCOL = 1;
@ -253,19 +257,22 @@ class Filter extends BaseProtocol implements IReceiver {
constructor(libp2p: Libp2p, options?: ProtocolCreateOptions) {
super(FilterCodecs.SUBSCRIBE, libp2p.components);
this.pubSubTopics = options?.pubSubTopics || [DefaultPubSubTopic];
libp2p.handle(FilterCodecs.PUSH, this.onRequest.bind(this)).catch((e) => {
log("Failed to register ", FilterCodecs.PUSH, e);
});
this.activeSubscriptions = new Map();
this.options = options ?? {};
}
async createSubscription(pubSubTopic?: string): Promise<Subscription> {
const _pubSubTopic =
pubSubTopic ?? this.options.pubSubTopic ?? DefaultPubSubTopic;
async createSubscription(
pubSubTopic: string = DefaultPubSubTopic
): Promise<Subscription> {
ensurePubsubTopicIsConfigured(pubSubTopic, this.pubSubTopics);
//TODO: get a relevant peer for the topic/shard
// https://github.com/waku-org/js-waku/pull/1586#discussion_r1336428230
const peer = (
await this.getPeers({
maxBootstrapPeers: 1,
@ -274,11 +281,11 @@ class Filter extends BaseProtocol implements IReceiver {
)[0];
const subscription =
this.getActiveSubscription(_pubSubTopic, peer.id.toString()) ??
this.getActiveSubscription(pubSubTopic, peer.id.toString()) ??
this.setActiveSubscription(
_pubSubTopic,
pubSubTopic,
peer.id.toString(),
new Subscription(_pubSubTopic, peer, this.getStream.bind(this, peer))
new Subscription(pubSubTopic, peer, this.getStream.bind(this, peer))
);
return subscription;

View File

@ -1,6 +1,6 @@
import type { PeerId } from "@libp2p/interface/peer-id";
import type { PeerStore } from "@libp2p/interface/peer-store";
import type { IRelay } from "@waku/interfaces";
import type { IRelay, PeerIdStr } from "@waku/interfaces";
import type { KeepAliveOptions } from "@waku/interfaces";
import { utf8ToBytes } from "@waku/utils/bytes";
import debug from "debug";
@ -13,7 +13,7 @@ const log = debug("waku:keep-alive");
export class KeepAliveManager {
private pingKeepAliveTimers: Map<string, ReturnType<typeof setInterval>>;
private relayKeepAliveTimers: Map<PeerId, ReturnType<typeof setInterval>>;
private relayKeepAliveTimers: Map<PeerId, ReturnType<typeof setInterval>[]>;
private options: KeepAliveOptions;
private relay?: IRelay;
@ -66,17 +66,12 @@ export class KeepAliveManager {
const relay = this.relay;
if (relay && relayPeriodSecs !== 0) {
const encoder = createEncoder({
contentTopic: RelayPingContentTopic,
ephemeral: true
});
const interval = setInterval(() => {
log("Sending Waku Relay ping message");
relay
.send(encoder, { payload: new Uint8Array([1]) })
.catch((e) => log("Failed to send relay ping", e));
}, relayPeriodSecs * 1000);
this.relayKeepAliveTimers.set(peerId, interval);
const intervals = this.scheduleRelayPings(
relay,
relayPeriodSecs,
peerId.toString()
);
this.relayKeepAliveTimers.set(peerId, intervals);
}
}
@ -89,7 +84,7 @@ export class KeepAliveManager {
}
if (this.relayKeepAliveTimers.has(peerId)) {
clearInterval(this.relayKeepAliveTimers.get(peerId));
this.relayKeepAliveTimers.get(peerId)?.map(clearInterval);
this.relayKeepAliveTimers.delete(peerId);
}
}
@ -105,4 +100,32 @@ export class KeepAliveManager {
this.pingKeepAliveTimers.clear();
this.relayKeepAliveTimers.clear();
}
private scheduleRelayPings(
relay: IRelay,
relayPeriodSecs: number,
peerIdStr: PeerIdStr
): NodeJS.Timeout[] {
// send a ping message to each PubSubTopic the peer is part of
const intervals: NodeJS.Timeout[] = [];
for (const topic of relay.pubSubTopics) {
const meshPeers = relay.getMeshPeers(topic);
if (!meshPeers.includes(peerIdStr)) continue;
const encoder = createEncoder({
pubSubTopic: topic,
contentTopic: RelayPingContentTopic,
ephemeral: true
});
const interval = setInterval(() => {
log("Sending Waku Relay ping message");
relay
.send(encoder, { payload: new Uint8Array([1]) })
.catch((e) => log("Failed to send relay ping", e));
}, relayPeriodSecs * 1000);
intervals.push(interval);
}
return intervals;
}
}

View File

@ -6,11 +6,12 @@ import {
IMessage,
Libp2p,
ProtocolCreateOptions,
PubSubTopic,
SendError,
SendResult
} from "@waku/interfaces";
import { PushResponse } from "@waku/proto";
import { isSizeValid } from "@waku/utils";
import { ensurePubsubTopicIsConfigured, isSizeValid } from "@waku/utils";
import debug from "debug";
import all from "it-all";
import * as lp from "it-length-prefixed";
@ -41,12 +42,12 @@ type PreparePushMessageResult =
* Implements the [Waku v2 Light Push protocol](https://rfc.vac.dev/spec/19/).
*/
class LightPush extends BaseProtocol implements ILightPush {
options: ProtocolCreateOptions;
private readonly pubSubTopics: PubSubTopic[];
private readonly NUM_PEERS_PROTOCOL = 1;
constructor(libp2p: Libp2p, options?: ProtocolCreateOptions) {
super(LightPushCodec, libp2p.components);
this.options = options || {};
this.pubSubTopics = options?.pubSubTopics ?? [DefaultPubSubTopic];
}
private async preparePushMessage(
@ -82,7 +83,9 @@ class LightPush extends BaseProtocol implements ILightPush {
}
async send(encoder: IEncoder, message: IMessage): Promise<SendResult> {
const { pubSubTopic = DefaultPubSubTopic } = this.options;
const { pubSubTopic } = encoder;
ensurePubsubTopicIsConfigured(pubSubTopic, this.pubSubTopics);
const recipients: PeerId[] = [];
const { query, error: preparationError } = await this.preparePushMessage(
@ -98,6 +101,7 @@ class LightPush extends BaseProtocol implements ILightPush {
};
}
//TODO: get a relevant peer for the topic/shard
const peers = await this.getPeers({
maxBootstrapPeers: 1,
numPeers: this.NUM_PEERS_PROTOCOL

View File

@ -6,11 +6,14 @@ import type {
IMessage,
IMetaSetter,
IProtoMessage,
IRateLimitProof
IRateLimitProof,
PubSubTopic
} from "@waku/interfaces";
import { proto_message as proto } from "@waku/proto";
import debug from "debug";
import { DefaultPubSubTopic } from "../constants.js";
const log = debug("waku:message:version-0");
const OneMillion = BigInt(1_000_000);
@ -73,6 +76,7 @@ export class Encoder implements IEncoder {
constructor(
public contentTopic: string,
public ephemeral: boolean = false,
public pubSubTopic: PubSubTopic,
public metaSetter?: IMetaSetter
) {
if (!contentTopic || contentTopic === "") {
@ -109,22 +113,25 @@ export class Encoder implements IEncoder {
/**
* Creates an encoder that encode messages without Waku level encryption or signature.
*
* An encoder is used to encode messages in the [`14/WAKU2-MESSAGE](https://rfc.vac.dev/spec/14/)
* An encoder is used to encode messages in the [14/WAKU2-MESSAGE](https://rfc.vac.dev/spec/14/)
* format to be sent over the Waku network. The resulting encoder can then be
* pass to { @link @waku/interfaces.LightPush.push } or
* { @link @waku/interfaces.Relay.send } to automatically encode outgoing
* pass to { @link @waku/interfaces!ISender.send } to automatically encode outgoing
* messages.
*/
export function createEncoder({
pubSubTopic = DefaultPubSubTopic,
contentTopic,
ephemeral,
metaSetter
}: EncoderOptions): Encoder {
return new Encoder(contentTopic, ephemeral, metaSetter);
return new Encoder(contentTopic, ephemeral, pubSubTopic, metaSetter);
}
export class Decoder implements IDecoder<DecodedMessage> {
constructor(public contentTopic: string) {
constructor(
public pubSubTopic: PubSubTopic,
public contentTopic: string
) {
if (!contentTopic || contentTopic === "") {
throw new Error("Content topic must be specified");
}
@ -169,12 +176,14 @@ export class Decoder implements IDecoder<DecodedMessage> {
*
* A decoder is used to decode messages from the [14/WAKU2-MESSAGE](https://rfc.vac.dev/spec/14/)
* format when received from the Waku network. The resulting decoder can then be
* pass to { @link @waku/interfaces.Filter.subscribe } or
* { @link @waku/interfaces.Relay.subscribe } to automatically decode incoming
* pass to { @link @waku/interfaces!IReceiver.subscribe } to automatically decode incoming
* messages.
*
* @param contentTopic The resulting decoder will only decode messages with this content topic.
*/
export function createDecoder(contentTopic: string): Decoder {
return new Decoder(contentTopic);
export function createDecoder(
contentTopic: string,
pubsubTopic: PubSubTopic = DefaultPubSubTopic
): Decoder {
return new Decoder(pubsubTopic, contentTopic);
}

View File

@ -6,10 +6,11 @@ import {
IDecoder,
IStore,
Libp2p,
ProtocolCreateOptions
ProtocolCreateOptions,
PubSubTopic
} from "@waku/interfaces";
import { proto_store as proto } from "@waku/proto";
import { isDefined } from "@waku/utils";
import { ensurePubsubTopicIsConfigured, isDefined } from "@waku/utils";
import { concat, utf8ToBytes } from "@waku/utils/bytes";
import debug from "debug";
import all from "it-all";
@ -74,12 +75,12 @@ export interface QueryOptions {
* The Waku Store protocol can be used to retrieved historical messages.
*/
class Store extends BaseProtocol implements IStore {
options: ProtocolCreateOptions;
private readonly pubSubTopics: PubSubTopic[];
private readonly NUM_PEERS_PROTOCOL = 1;
constructor(libp2p: Libp2p, options?: ProtocolCreateOptions) {
super(StoreCodec, libp2p.components);
this.options = options ?? {};
this.pubSubTopics = options?.pubSubTopics ?? [DefaultPubSubTopic];
}
/**
@ -206,12 +207,20 @@ class Store extends BaseProtocol implements IStore {
* @throws If not able to reach a Waku Store peer to query,
* or if an error is encountered when processing the reply,
* or if two decoders with the same content topic are passed.
*
* This API only supports querying a single pubsub topic at a time.
* If multiple decoders are provided, they must all have the same pubsub topic.
* @throws If multiple decoders with different pubsub topics are provided.
* @throws If no decoders are provided.
* @throws If no decoders are found for the provided pubsub topic.
*/
async *queryGenerator<T extends IDecodedMessage>(
decoders: IDecoder<T>[],
options?: QueryOptions
): AsyncGenerator<Promise<T | undefined>[]> {
const { pubSubTopic = DefaultPubSubTopic } = this.options;
if (decoders.length === 0) {
throw new Error("No decoders provided");
}
let startTime, endTime;
@ -220,6 +229,33 @@ class Store extends BaseProtocol implements IStore {
endTime = options.timeFilter.endTime;
}
// convert array to set to remove duplicates
const uniquePubSubTopicsInQuery = Array.from(
new Set(decoders.map((decoder) => decoder.pubSubTopic))
);
// If multiple pubsub topics are provided, throw an error
if (uniquePubSubTopicsInQuery.length > 1) {
throw new Error(
"API does not support querying multiple pubsub topics at once"
);
}
// we can be certain that there is only one pubsub topic in the query
const pubSubTopicForQuery = uniquePubSubTopicsInQuery[0];
ensurePubsubTopicIsConfigured(pubSubTopicForQuery, this.pubSubTopics);
// check that the pubSubTopic from the Cursor and Decoder match
if (
options?.cursor?.pubsubTopic &&
options.cursor.pubsubTopic !== pubSubTopicForQuery
) {
throw new Error(
`Cursor pubsub topic (${options?.cursor?.pubsubTopic}) does not match decoder pubsub topic (${pubSubTopicForQuery})`
);
}
const decodersAsMap = new Map();
decoders.forEach((dec) => {
if (decodersAsMap.has(dec.contentTopic)) {
@ -230,11 +266,17 @@ class Store extends BaseProtocol implements IStore {
decodersAsMap.set(dec.contentTopic, dec);
});
const contentTopics = decoders.map((dec) => dec.contentTopic);
const contentTopics = decoders
.filter((decoder) => decoder.pubSubTopic === pubSubTopicForQuery)
.map((dec) => dec.contentTopic);
if (contentTopics.length === 0) {
throw new Error("No decoders found for topic " + pubSubTopicForQuery);
}
const queryOpts = Object.assign(
{
pubSubTopic: pubSubTopic,
pubSubTopic: pubSubTopicForQuery,
pageDirection: PageDirection.BACKWARD,
pageSize: DefaultPageSize
},
@ -365,10 +407,7 @@ async function* paginate<T extends IDecodedMessage>(
}
}
export async function createCursor(
message: IDecodedMessage,
pubsubTopic: string = DefaultPubSubTopic
): Promise<Cursor> {
export async function createCursor(message: IDecodedMessage): Promise<Cursor> {
if (
!message ||
!message.timestamp ||
@ -386,7 +425,7 @@ export async function createCursor(
return {
digest,
pubsubTopic,
pubsubTopic: message.pubSubTopic,
senderTime: messageTime,
receiverTime: messageTime
};

View File

@ -6,8 +6,8 @@ import { selectConnection } from "@waku/utils/libp2p";
import debug from "debug";
export class StreamManager {
private streamPool: Map<string, Promise<Stream>>;
private log: debug.Debugger;
private streamPool: Map<string, Promise<Stream | void>>;
private readonly log: debug.Debugger;
constructor(
public multicodec: string,
@ -38,7 +38,7 @@ export class StreamManager {
const stream = await streamPromise;
if (stream.status === "closed") {
if (!stream || stream.status === "closed") {
return this.newStream(peer); // fallback by creating a new stream on the spot
}
@ -55,7 +55,10 @@ export class StreamManager {
}
private prepareNewStream(peer: Peer): void {
const streamPromise = this.newStream(peer);
const streamPromise = this.newStream(peer).catch(() => {
// No error thrown as this call is not triggered by the user
this.log(`Failed to prepare a new stream for ${peer.id.toString()}`);
});
this.streamPool.set(peer.id.toString(), streamPromise);
}

View File

@ -9,8 +9,8 @@ const log = debug("waku:wait-for-remote-peer");
/**
* Wait for a remote peer to be ready given the passed protocols.
* Must be used after attempting to connect to nodes, using
* {@link @waku/core.WakuNode.dial} or a bootstrap method with
* {@link @waku/sdk.createLightNode}.
* {@link @waku/core!WakuNode.dial} or a bootstrap method with
* {@link @waku/sdk!createLightNode}.
*
* If the passed protocols is a GossipSub protocol, then it resolves only once
* a peer is in a mesh, to help ensure that other peers will send and receive
@ -96,15 +96,18 @@ async function waitForConnectedPeer(protocol: IBaseProtocol): Promise<void> {
}
/**
* Wait for a peer with the given protocol to be connected and in the gossipsub
* mesh.
* Wait for at least one peer with the given protocol to be connected and in the gossipsub
* mesh for all pubSubTopics.
*/
async function waitForGossipSubPeerInMesh(waku: IRelay): Promise<void> {
let peers = waku.getMeshPeers();
const pubSubTopics = waku.pubSubTopics;
while (peers.length == 0) {
await pEvent(waku.gossipSub, "gossipsub:heartbeat");
peers = waku.getMeshPeers();
for (const topic of pubSubTopics) {
while (peers.length == 0) {
await pEvent(waku.gossipSub, "gossipsub:heartbeat");
peers = waku.getMeshPeers(topic);
}
}
}

View File

@ -0,0 +1,4 @@
{
"extends": ["../../typedoc.base.json"],
"entryPoints": ["src/index.ts"]
}

View File

@ -1,57 +1,3 @@
process.env.CHROME_BIN = require("puppeteer").executablePath();
const webpack = require("webpack");
const config = require("../../karma.conf.cjs");
module.exports = function (config) {
config.set({
frameworks: ["webpack", "mocha"],
files: ["src/**/!(node).spec.ts"],
preprocessors: {
"src/**/!(node).spec.ts": ["webpack"]
},
envPreprocessor: ["CI"],
reporters: ["progress"],
browsers: ["ChromeHeadless"],
singleRun: true,
client: {
mocha: {
timeout: 6000 // Default is 2s
}
},
webpack: {
mode: "development",
module: {
rules: [
{
test: /\.([cm]?ts|tsx)$/,
use: [
{
loader: "ts-loader",
options: {
configFile: "tsconfig.karma.json"
}
}
]
}
]
},
plugins: [
new webpack.DefinePlugin({
"process.env.CI": process.env.CI || false
}),
new webpack.ProvidePlugin({
process: "process/browser.js"
})
],
resolve: {
extensions: [".ts", ".tsx", ".js"],
extensionAlias: {
".js": [".js", ".ts"],
".cjs": [".cjs", ".cts"],
".mjs": [".mjs", ".mts"]
}
},
stats: { warnings: false },
devtool: "inline-source-map"
}
});
};
module.exports = config;

View File

@ -48,7 +48,7 @@
"test:browser": "karma start karma.conf.cjs"
},
"engines": {
"node": ">=16"
"node": ">=18"
},
"dependencies": {
"@waku/enr": "0.0.17",
@ -70,18 +70,9 @@
"@waku/interfaces": "0.0.18",
"chai": "^4.3.7",
"cspell": "^7.3.2",
"karma": "^6.4.2",
"karma-chrome-launcher": "^3.2.0",
"karma-mocha": "^2.0.1",
"karma-webpack": "^5.0.0",
"mocha": "^10.2.0",
"npm-run-all": "^4.1.5",
"rollup": "^3.29.2",
"ts-loader": "^9.4.2",
"typescript": "^5.0.4"
},
"typedoc": {
"entryPoint": "./src/index.ts"
"rollup": "^3.29.2"
},
"files": [
"dist",

View File

@ -1,8 +1,8 @@
import type { NodeCapabilityCount } from "@waku/interfaces";
export const enrTree = {
TEST: "enrtree://AOGECG2SPND25EEFMAJ5WF3KSGJNSGV356DSTL2YVLLZWIV6SAYBM@test.waku.nodes.status.im",
PROD: "enrtree://AOGECG2SPND25EEFMAJ5WF3KSGJNSGV356DSTL2YVLLZWIV6SAYBM@prod.waku.nodes.status.im"
TEST: "enrtree://AO47IDOLBKH72HIZZOXQP6NMRESAN7CHYWIBNXDXWRJRZWLODKII6@test.wakuv2.nodes.status.im",
PROD: "enrtree://ANEDLO25QVUGJOUTQFRYKWX6P4Z4GKVESBMHML7DZ6YK4LGS5FC5O@prod.wakuv2.nodes.status.im"
};
export const DEFAULT_BOOTSTRAP_TAG_NAME = "bootstrap";

View File

@ -0,0 +1,4 @@
{
"extends": ["../../typedoc.base.json"],
"entryPoints": ["src/index.ts"]
}

View File

@ -1,45 +1,3 @@
process.env.CHROME_BIN = require("puppeteer").executablePath();
const webpack = require("webpack");
const config = require("../../karma.conf.cjs");
module.exports = function (config) {
config.set({
frameworks: ["webpack", "mocha"],
files: ["src/**/*.ts"],
preprocessors: {
"src/**/*.ts": ["webpack"]
},
envPreprocessor: ["CI"],
reporters: ["progress"],
browsers: ["ChromeHeadless"],
singleRun: true,
client: {
mocha: {
timeout: 6000 // Default is 2s
}
},
webpack: {
mode: "development",
module: {
rules: [{ test: /\.([cm]?ts|tsx)$/, loader: "ts-loader" }]
},
plugins: [
new webpack.DefinePlugin({
"process.env.CI": process.env.CI || false
}),
new webpack.ProvidePlugin({
process: "process/browser.js"
})
],
resolve: {
extensions: [".ts", ".tsx", ".js"],
extensionAlias: {
".js": [".js", ".ts"],
".cjs": [".cjs", ".cts"],
".mjs": [".mjs", ".mts"]
}
},
stats: { warnings: false },
devtool: "inline-source-map"
}
});
};
module.exports = config;

View File

@ -48,7 +48,7 @@
"reset-hard": "git clean -dfx -e .idea && git reset --hard && npm i && npm run build"
},
"engines": {
"node": ">=16"
"node": ">=18"
},
"dependencies": {
"@ethersproject/rlp": "^5.7.0",
@ -58,7 +58,7 @@
"@noble/secp256k1": "^1.7.1",
"@waku/utils": "0.0.11",
"debug": "^4.3.4",
"js-sha3": "^0.8.0"
"js-sha3": "^0.9.2"
},
"devDependencies": {
"@libp2p/peer-id-factory": "^3.0.3",
@ -71,22 +71,12 @@
"@waku/interfaces": "0.0.18",
"chai": "^4.3.7",
"cspell": "^7.3.2",
"karma": "^6.4.2",
"karma-chrome-launcher": "^3.2.0",
"karma-mocha": "^2.0.1",
"karma-webpack": "^5.0.0",
"mocha": "^10.2.0",
"npm-run-all": "^4.1.5",
"process": "^0.11.10",
"puppeteer": "^21.1.1",
"rollup": "^3.29.2",
"ts-loader": "^9.4.2",
"typescript": "^5.0.4",
"uint8arrays": "^4.0.4"
},
"typedoc": {
"entryPoint": "./src/index.ts"
},
"files": [
"dist",
"bundle",

View File

@ -0,0 +1,4 @@
{
"extends": ["../../typedoc.base.json"],
"entryPoints": ["src/index.ts"]
}

View File

@ -44,18 +44,14 @@
"reset-hard": "git clean -dfx -e .idea && git reset --hard && npm i && npm run build"
},
"engines": {
"node": ">=16"
"node": ">=18"
},
"devDependencies": {
"@chainsafe/libp2p-gossipsub": "^10.1.0",
"@multiformats/multiaddr": "^12.0.0",
"cspell": "^7.3.2",
"npm-run-all": "^4.1.5",
"typescript": "^5.0.4",
"libp2p": "^0.46.9"
},
"typedoc": {
"entryPoint": "./src/index.ts"
"libp2p": "^0.46.12"
},
"files": [
"dist",

View File

@ -1,3 +1,5 @@
import type { PubSubTopic } from "./misc.js";
export interface IRateLimitProof {
proof: Uint8Array;
merkleRoot: Uint8Array;
@ -36,6 +38,7 @@ export interface IMetaSetter {
}
export interface EncoderOptions {
pubSubTopic?: PubSubTopic;
/** The content topic to set on outgoing messages. */
contentTopic: string;
/**
@ -52,6 +55,7 @@ export interface EncoderOptions {
}
export interface IEncoder {
pubSubTopic: PubSubTopic;
contentTopic: string;
ephemeral: boolean;
toWire: (message: IMessage) => Promise<Uint8Array | undefined>;
@ -61,7 +65,7 @@ export interface IEncoder {
export interface IDecodedMessage {
payload: Uint8Array;
contentTopic: string;
pubSubTopic: string;
pubSubTopic: PubSubTopic;
timestamp: Date | undefined;
rateLimitProof: IRateLimitProof | undefined;
ephemeral: boolean | undefined;
@ -69,6 +73,7 @@ export interface IDecodedMessage {
}
export interface IDecoder<T extends IDecodedMessage> {
pubSubTopic: PubSubTopic;
contentTopic: string;
fromWireToProtoObj: (bytes: Uint8Array) => Promise<IProtoMessage | undefined>;
fromProtoObj: (

View File

@ -4,6 +4,7 @@ import type { Peer, PeerStore } from "@libp2p/interface/peer-store";
import type { Libp2pOptions } from "libp2p";
import type { IDecodedMessage } from "./message.js";
import type { PubSubTopic } from "./misc.js";
export enum Protocols {
Relay = "relay",
@ -22,24 +23,29 @@ export interface IBaseProtocol {
export type ProtocolCreateOptions = {
/**
* The PubSub Topic to use. Defaults to {@link @waku/core.DefaultPubSubTopic }.
* Waku supports usage of multiple pubsub topics, but this is still in early stages.
* Waku implements sharding to achieve scalability
* The format of the sharded topic is `/waku/2/rs/<shard_cluster_index>/<shard_number>`
* To learn more about the sharding specifications implemented, see [Relay Sharding](https://rfc.vac.dev/spec/51/).
* The PubSub Topic to use. Defaults to {@link @waku/core!DefaultPubSubTopic }.
*
* One and only one pubsub topic is used by Waku. This is used by:
* If no pubsub topic is specified, the default pubsub topic is used.
* The set of pubsub topics that are used to initialize the Waku node, will need to be used by the protocols as well
* You cannot currently add or remove pubsub topics after initialization.
* This is used by:
* - WakuRelay to receive, route and send messages,
* - WakuLightPush to send messages,
* - WakuStore to retrieve messages.
*
* The usage of the default pubsub topic is recommended.
* See [Waku v2 Topic Usage Recommendations](https://rfc.vac.dev/spec/23/) for details.
*
*/
pubSubTopic?: string;
pubSubTopics?: PubSubTopic[];
/**
* You can pass options to the `Libp2p` instance used by {@link @waku/core.WakuNode} using the `libp2p` property.
* You can pass options to the `Libp2p` instance used by {@link @waku/core!WakuNode} using the `libp2p` property.
* This property is the same type as the one passed to [`Libp2p.create`](https://github.com/libp2p/js-libp2p/blob/master/doc/API.md#create)
* apart that we made the `modules` property optional and partial,
* allowing its omission and letting Waku set good defaults.
* Notes that some values are overridden by {@link @waku/core.WakuNode} to ensure it implements the Waku protocol.
* Notes that some values are overridden by {@link @waku/core!WakuNode} to ensure it implements the Waku protocol.
*/
libp2p?: Partial<Libp2pOptions>;
/**
@ -71,6 +77,11 @@ export enum SendError {
* Compressing the message or using an alternative strategy for large messages is recommended.
*/
SIZE_TOO_BIG = "Size is too big",
/**
* The PubSubTopic passed to the send function is not configured on the Waku node.
* Please ensure that the PubSubTopic is used when initializing the Waku node.
*/
TOPIC_NOT_CONFIGURED = "Topic not configured",
/**
* Failure to find a peer with suitable protocols. This may due to a connection issue.
* Mitigation can be: retrying after a given time period, display connectivity issue

View File

@ -1,9 +1,12 @@
import type { IDecodedMessage, IDecoder } from "./message.js";
import type { IAsyncIterator, PubSubTopic, Unsubscribe } from "./misc.js";
import type {
ContentTopic,
IAsyncIterator,
PubSubTopic,
Unsubscribe
} from "./misc.js";
import type { Callback } from "./protocols.js";
type ContentTopic = string;
export type ActiveSubscriptions = Map<PubSubTopic, ContentTopic[]>;
export interface IReceiver {

View File

@ -1,6 +1,7 @@
import type { GossipSub } from "@chainsafe/libp2p-gossipsub";
import type { PeerIdStr, TopicStr } from "@chainsafe/libp2p-gossipsub/types";
import { PubSubTopic } from "./misc.js";
import { IReceiver } from "./receiver.js";
import type { ISender } from "./sender.js";
@ -12,6 +13,7 @@ import type { ISender } from "./sender.js";
* @property getMeshPeers - Function to retrieve the mesh peers for a given topic or all topics if none is specified. Returns an array of peer IDs as strings.
*/
export interface IRelayAPI {
readonly pubSubTopics: Set<PubSubTopic>;
readonly gossipSub: GossipSub;
start: () => Promise<void>;
getMeshPeers: (topic?: TopicStr) => PeerIdStr[];

View File

@ -0,0 +1,4 @@
{
"extends": ["../../typedoc.base.json"],
"entryPoints": ["src/index.ts"]
}

View File

@ -1,45 +1,3 @@
process.env.CHROME_BIN = require("puppeteer").executablePath();
const webpack = require("webpack");
const config = require("../../karma.conf.cjs");
module.exports = function (config) {
config.set({
frameworks: ["webpack", "mocha"],
files: ["src/**/*.ts"],
preprocessors: {
"src/**/*.ts": ["webpack"]
},
envPreprocessor: ["CI"],
reporters: ["progress"],
browsers: ["ChromeHeadless"],
singleRun: true,
client: {
mocha: {
timeout: 6000 // Default is 2s
}
},
webpack: {
mode: "development",
module: {
rules: [{ test: /\.([cm]?ts|tsx)$/, loader: "ts-loader" }]
},
plugins: [
new webpack.DefinePlugin({
"process.env.CI": process.env.CI || false
}),
new webpack.ProvidePlugin({
process: "process/browser.js"
})
],
resolve: {
extensions: [".ts", ".tsx", ".js"],
extensionAlias: {
".js": [".js", ".ts"],
".cjs": [".cjs", ".cts"],
".mjs": [".mjs", ".mts"]
}
},
stats: { warnings: false },
devtool: "inline-source-map"
}
});
};
module.exports = config;

View File

@ -65,7 +65,7 @@
"reset-hard": "git clean -dfx -e .idea && git reset --hard && npm i && npm run build"
},
"engines": {
"node": ">=16"
"node": ">=18"
},
"browser": {
"crypto": false
@ -77,7 +77,7 @@
"@waku/proto": "0.0.5",
"@waku/utils": "0.0.11",
"debug": "^4.3.4",
"js-sha3": "^0.8.0"
"js-sha3": "^0.9.2"
},
"devDependencies": {
"@rollup/plugin-commonjs": "^25.0.4",
@ -88,21 +88,11 @@
"@waku/build-utils": "*",
"chai": "^4.3.7",
"cspell": "^7.3.2",
"fast-check": "^3.12.0",
"karma": "^6.4.2",
"karma-chrome-launcher": "^3.2.0",
"karma-mocha": "^2.0.1",
"karma-webpack": "^5.0.0",
"fast-check": "^3.13.1",
"mocha": "^10.2.0",
"npm-run-all": "^4.1.5",
"process": "^0.11.10",
"puppeteer": "^21.1.1",
"rollup": "^3.29.2",
"ts-loader": "^9.4.2",
"typescript": "^5.0.4"
},
"typedoc": {
"entryPoint": "./src/index.ts"
"rollup": "^3.29.2"
},
"files": [
"dist",

View File

@ -1,5 +1,6 @@
import { DefaultPubSubTopic } from "@waku/core";
import { Decoder as DecoderV0 } from "@waku/core/lib/message/version_0";
import { IMetaSetter } from "@waku/interfaces";
import { IMetaSetter, PubSubTopic } from "@waku/interfaces";
import type {
EncoderOptions as BaseEncoderOptions,
IDecoder,
@ -32,6 +33,7 @@ const log = debug("waku:message-encryption:ecies");
class Encoder implements IEncoder {
constructor(
public pubSubTopic: PubSubTopic,
public contentTopic: string,
private publicKey: Uint8Array,
private sigPrivKey?: Uint8Array,
@ -88,13 +90,14 @@ export interface EncoderOptions extends BaseEncoderOptions {
*
* An encoder is used to encode messages in the [`14/WAKU2-MESSAGE](https://rfc.vac.dev/spec/14/)
* format to be sent over the Waku network. The resulting encoder can then be
* pass to { @link @waku/interfaces.LightPush.push } or
* { @link @waku/interfaces.Relay.send } to automatically encrypt
* pass to { @link @waku/interfaces!ISender.send } or
* { @link @waku/interfaces!ISender.send } to automatically encrypt
* and encode outgoing messages.
* The payload can optionally be signed with the given private key as defined
* in [26/WAKU2-PAYLOAD](https://rfc.vac.dev/spec/26/).
*/
export function createEncoder({
pubSubTopic = DefaultPubSubTopic,
contentTopic,
publicKey,
sigPrivKey,
@ -102,6 +105,7 @@ export function createEncoder({
metaSetter
}: EncoderOptions): Encoder {
return new Encoder(
pubSubTopic,
contentTopic,
publicKey,
sigPrivKey,
@ -112,10 +116,11 @@ export function createEncoder({
class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
constructor(
pubSubTopic: PubSubTopic,
contentTopic: string,
private privateKey: Uint8Array
) {
super(contentTopic);
super(pubSubTopic, contentTopic);
}
async fromProtoObj(
@ -175,8 +180,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
*
* A decoder is used to decode messages from the [14/WAKU2-MESSAGE](https://rfc.vac.dev/spec/14/)
* format when received from the Waku network. The resulting decoder can then be
* pass to { @link @waku/interfaces.Filter.subscribe } or
* { @link @waku/interfaces.Relay.subscribe } to automatically decrypt and
* pass to { @link @waku/interfaces!IReceiver.subscribe } to automatically decrypt and
* decode incoming messages.
*
* @param contentTopic The resulting decoder will only decode messages with this content topic.
@ -184,7 +188,8 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
*/
export function createDecoder(
contentTopic: string,
privateKey: Uint8Array
privateKey: Uint8Array,
pubSubTopic: PubSubTopic = DefaultPubSubTopic
): Decoder {
return new Decoder(contentTopic, privateKey);
return new Decoder(pubSubTopic, contentTopic, privateKey);
}

View File

@ -1,3 +1,4 @@
import { DefaultPubSubTopic } from "@waku/core";
import { Decoder as DecoderV0 } from "@waku/core/lib/message/version_0";
import type {
EncoderOptions as BaseEncoderOptions,
@ -5,7 +6,8 @@ import type {
IEncoder,
IMessage,
IMetaSetter,
IProtoMessage
IProtoMessage,
PubSubTopic
} from "@waku/interfaces";
import { WakuMessage } from "@waku/proto";
import debug from "debug";
@ -27,6 +29,7 @@ const log = debug("waku:message-encryption:symmetric");
class Encoder implements IEncoder {
constructor(
public pubSubTopic: PubSubTopic,
public contentTopic: string,
private symKey: Uint8Array,
private sigPrivKey?: Uint8Array,
@ -83,29 +86,37 @@ export interface EncoderOptions extends BaseEncoderOptions {
*
* An encoder is used to encode messages in the [`14/WAKU2-MESSAGE](https://rfc.vac.dev/spec/14/)
* format to be sent over the Waku network. The resulting encoder can then be
* pass to { @link @waku/interfaces.LightPush.push } or
* { @link @waku/interfaces.Relay.send } to automatically encrypt
* pass to { @link @waku/interfaces!ISender.send } to automatically encrypt
* and encode outgoing messages.
*
* The payload can optionally be signed with the given private key as defined
* in [26/WAKU2-PAYLOAD](https://rfc.vac.dev/spec/26/).
*/
export function createEncoder({
pubSubTopic = DefaultPubSubTopic,
contentTopic,
symKey,
sigPrivKey,
ephemeral = false,
metaSetter
}: EncoderOptions): Encoder {
return new Encoder(contentTopic, symKey, sigPrivKey, ephemeral, metaSetter);
return new Encoder(
pubSubTopic,
contentTopic,
symKey,
sigPrivKey,
ephemeral,
metaSetter
);
}
class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
constructor(
pubSubTopic: PubSubTopic,
contentTopic: string,
private symKey: Uint8Array
) {
super(contentTopic);
super(pubSubTopic, contentTopic);
}
async fromProtoObj(
@ -165,8 +176,7 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
*
* A decoder is used to decode messages from the [14/WAKU2-MESSAGE](https://rfc.vac.dev/spec/14/)
* format when received from the Waku network. The resulting decoder can then be
* pass to { @link @waku/interfaces.Filter.subscribe } or
* { @link @waku/interfaces.Relay.subscribe } to automatically decrypt and
* pass to { @link @waku/interfaces!IReceiver.subscribe } to automatically decrypt and
* decode incoming messages.
*
* @param contentTopic The resulting decoder will only decode messages with this content topic.
@ -174,7 +184,8 @@ class Decoder extends DecoderV0 implements IDecoder<DecodedMessage> {
*/
export function createDecoder(
contentTopic: string,
symKey: Uint8Array
symKey: Uint8Array,
pubSubTopic: PubSubTopic = DefaultPubSubTopic
): Decoder {
return new Decoder(contentTopic, symKey);
return new Decoder(pubSubTopic, contentTopic, symKey);
}

View File

@ -0,0 +1,4 @@
{
"extends": ["../../typedoc.base.json"],
"entryPoints": ["src/index.ts"]
}

View File

@ -1,45 +1,3 @@
process.env.CHROME_BIN = require("puppeteer").executablePath();
const webpack = require("webpack");
const config = require("../../karma.conf.cjs");
module.exports = function (config) {
config.set({
frameworks: ["webpack", "mocha"],
files: ["src/**/!(node).spec.ts"],
preprocessors: {
"src/**/!(node).spec.ts": ["webpack"]
},
envPreprocessor: ["CI"],
reporters: ["progress"],
browsers: ["ChromeHeadless"],
singleRun: true,
client: {
mocha: {
timeout: 6000 // Default is 2s
}
},
webpack: {
mode: "development",
module: {
rules: [{ test: /\.([cm]?ts|tsx)$/, loader: "ts-loader" }]
},
plugins: [
new webpack.DefinePlugin({
"process.env.CI": process.env.CI || false
}),
new webpack.ProvidePlugin({
process: "process/browser.js"
})
],
resolve: {
extensions: [".ts", ".tsx", ".js"],
extensionAlias: {
".js": [".js", ".ts"],
".cjs": [".cjs", ".cts"],
".mjs": [".mjs", ".mts"]
}
},
stats: { warnings: false },
devtool: "inline-source-map"
}
});
};
module.exports = config;

View File

@ -47,7 +47,7 @@
"reset-hard": "git clean -dfx -e .idea && git reset --hard && npm i && npm run build"
},
"engines": {
"node": ">=16"
"node": ">=18"
},
"dependencies": {
"@noble/hashes": "^1.3.2",
@ -64,24 +64,13 @@
"@waku/interfaces": "0.0.18",
"chai": "^4.3.7",
"cspell": "^7.3.2",
"fast-check": "^3.12.0",
"fast-check": "^3.13.1",
"ignore-loader": "^0.1.2",
"isomorphic-fetch": "^3.0.0",
"karma": "^6.4.2",
"karma-chrome-launcher": "^3.2.0",
"karma-mocha": "^2.0.1",
"karma-webpack": "^5.0.0",
"mocha": "^10.2.0",
"npm-run-all": "^4.1.5",
"process": "^0.11.10",
"puppeteer": "^21.1.1",
"rollup": "^3.29.2",
"ts-loader": "^9.4.2",
"ts-node": "^10.9.1",
"typescript": "^5.0.4"
},
"typedoc": {
"entryPoint": "./src/index.ts"
"rollup": "^3.29.2"
},
"files": [
"dist",

View File

@ -0,0 +1,4 @@
{
"extends": ["../../typedoc.base.json"],
"entryPoints": ["src/index.ts"]
}

View File

@ -45,7 +45,7 @@
"reset-hard": "git clean -dfx -e .idea && git reset --hard && npm i && npm run build"
},
"engines": {
"node": ">=16"
"node": ">=18"
},
"dependencies": {
"@libp2p/interfaces": "^3.3.2",
@ -68,13 +68,8 @@
"cspell": "^7.3.2",
"npm-run-all": "^4.1.5",
"rollup": "^3.29.2",
"ts-loader": "^9.4.2",
"typescript": "^5.0.4",
"uint8arraylist": "^2.4.3"
},
"typedoc": {
"entryPoint": "./src/index.ts"
},
"files": [
"dist",
"bundle",

View File

@ -0,0 +1,4 @@
{
"extends": ["../../typedoc.base.json"],
"entryPoints": ["src/index.ts"]
}

View File

@ -41,7 +41,7 @@
"reset-hard": "git clean -dfx -e .idea && git reset --hard && npm i && npm run build"
},
"engines": {
"node": ">=16"
"node": ">=18"
},
"dependencies": {
"protons-runtime": "^5.0.2"
@ -55,12 +55,8 @@
"npm-run-all": "^4.1.5",
"protons": "^7.0.2",
"rollup": "^3.29.2",
"typescript": "^5.0.4",
"uint8arraylist": "^2.4.3"
},
"typedoc": {
"entryPoint": "./src/index.ts"
},
"files": [
"dist",
"bundle",

View File

@ -0,0 +1,4 @@
{
"extends": ["../../typedoc.base.json"],
"entryPoints": ["src/index.ts"]
}

View File

@ -46,7 +46,7 @@
"reset-hard": "git clean -dfx -e .idea && git reset --hard && npm i && npm run build"
},
"engines": {
"node": ">=16"
"node": ">=18"
},
"dependencies": {
"@chainsafe/libp2p-gossipsub": "^10.1.0",
@ -57,20 +57,14 @@
"@waku/utils": "0.0.11",
"chai": "^4.3.7",
"debug": "^4.3.4",
"fast-check": "^3.12.0"
"fast-check": "^3.13.1"
},
"devDependencies": {
"@rollup/plugin-commonjs": "^25.0.4",
"@waku/build-utils": "*",
"@rollup/plugin-json": "^6.0.0",
"@rollup/plugin-node-resolve": "^15.1.0",
"rollup": "^3.29.2",
"ts-loader": "^9.4.2",
"ts-node": "^10.9.1",
"typescript": "^5.0.4"
},
"typedoc": {
"entryPoint": "./src/index.ts"
"rollup": "^3.29.2"
},
"files": [
"dist",

View File

@ -21,10 +21,12 @@ import {
IRelay,
Libp2p,
ProtocolCreateOptions,
PubSubTopic,
SendError,
SendResult
} from "@waku/interfaces";
import { groupByContentTopic, isSizeValid, toAsyncIterator } from "@waku/utils";
import { isSizeValid, toAsyncIterator } from "@waku/utils";
import { pushOrInitMapSet } from "@waku/utils";
import debug from "debug";
import { RelayCodecs } from "./constants.js";
@ -46,7 +48,7 @@ export type ContentTopic = string;
* Throws if libp2p.pubsub does not support Waku Relay
*/
class Relay implements IRelay {
private readonly pubSubTopic: string;
public readonly pubSubTopics: Set<PubSubTopic>;
private defaultDecoder: IDecoder<IDecodedMessage>;
public static multicodec: string = RelayCodecs[0];
@ -56,7 +58,7 @@ class Relay implements IRelay {
* observers called when receiving new message.
* Observers under key `""` are always called.
*/
private observers: Map<ContentTopic, Set<unknown>>;
private observers: Map<PubSubTopic, Map<ContentTopic, Set<unknown>>>;
constructor(libp2p: Libp2p, options?: Partial<RelayCreateOptions>) {
if (!this.isRelayPubSub(libp2p.services.pubsub)) {
@ -66,21 +68,22 @@ class Relay implements IRelay {
}
this.gossipSub = libp2p.services.pubsub as GossipSub;
this.pubSubTopic = options?.pubSubTopic ?? DefaultPubSubTopic;
this.pubSubTopics = new Set(options?.pubSubTopics ?? [DefaultPubSubTopic]);
if (this.gossipSub.isStarted()) {
this.gossipSubSubscribe(this.pubSubTopic);
this.subscribeToAllTopics();
}
this.observers = new Map();
// Default PubSubTopic decoder
// TODO: User might want to decide what decoder should be used (e.g. for RLN)
this.defaultDecoder = new TopicOnlyDecoder();
}
/**
* Mounts the gossipsub protocol onto the libp2p node
* and subscribes to the default topic.
* and subscribes to all the topics.
*
* @override
* @returns {void}
@ -91,7 +94,7 @@ class Relay implements IRelay {
}
await this.gossipSub.start();
this.gossipSubSubscribe(this.pubSubTopic);
this.subscribeToAllTopics();
}
/**
@ -99,6 +102,16 @@ class Relay implements IRelay {
*/
public async send(encoder: IEncoder, message: IMessage): Promise<SendResult> {
const recipients: PeerId[] = [];
const { pubSubTopic } = encoder;
if (!this.pubSubTopics.has(pubSubTopic)) {
log("Failed to send waku relay: topic not configured");
return {
recipients,
errors: [SendError.TOPIC_NOT_CONFIGURED]
};
}
if (!isSizeValid(message.payload)) {
log("Failed to send waku relay: message is bigger that 1MB");
return {
@ -116,50 +129,49 @@ class Relay implements IRelay {
};
}
return this.gossipSub.publish(this.pubSubTopic, msg);
return this.gossipSub.publish(pubSubTopic, msg);
}
/**
* Add an observer and associated Decoder to process incoming messages on a given content topic.
*
* @returns Function to delete the observer
*/
public subscribe<T extends IDecodedMessage>(
decoders: IDecoder<T> | IDecoder<T>[],
callback: Callback<T>
): () => void {
const contentTopicToObservers = Array.isArray(decoders)
? toObservers(decoders, callback)
: toObservers([decoders], callback);
const observers: Array<[PubSubTopic, Observer<T>]> = [];
for (const contentTopic of contentTopicToObservers.keys()) {
const currObservers = this.observers.get(contentTopic) || new Set();
const newObservers =
contentTopicToObservers.get(contentTopic) || new Set();
for (const decoder of Array.isArray(decoders) ? decoders : [decoders]) {
const { pubSubTopic } = decoder;
const ctObs: Map<ContentTopic, Set<Observer<T>>> = this.observers.get(
pubSubTopic
) ?? new Map();
const observer = { pubSubTopic, decoder, callback };
pushOrInitMapSet(ctObs, decoder.contentTopic, observer);
this.observers.set(contentTopic, union(currObservers, newObservers));
this.observers.set(pubSubTopic, ctObs);
observers.push([pubSubTopic, observer]);
}
return () => {
for (const contentTopic of contentTopicToObservers.keys()) {
const currentObservers = this.observers.get(contentTopic) || new Set();
const observersToRemove =
contentTopicToObservers.get(contentTopic) || new Set();
const nextObservers = leftMinusJoin(
currentObservers,
observersToRemove
);
if (nextObservers.size) {
this.observers.set(contentTopic, nextObservers);
} else {
this.observers.delete(contentTopic);
}
}
this.removeObservers(observers);
};
}
private removeObservers<T extends IDecodedMessage>(
observers: Array<[PubSubTopic, Observer<T>]>
): void {
for (const [pubSubTopic, observer] of observers) {
const ctObs = this.observers.get(pubSubTopic);
if (!ctObs) continue;
const contentTopic = observer.decoder.contentTopic;
const _obs = ctObs.get(contentTopic);
if (!_obs) continue;
_obs.delete(observer);
ctObs.set(contentTopic, _obs);
this.observers.set(pubSubTopic, ctObs);
}
}
public toSubscriptionIterator<T extends IDecodedMessage>(
decoders: IDecoder<T> | IDecoder<T>[]
): Promise<IAsyncIterator<T>> {
@ -168,12 +180,20 @@ class Relay implements IRelay {
public getActiveSubscriptions(): ActiveSubscriptions {
const map = new Map();
map.set(this.pubSubTopic, this.observers.keys());
for (const pubSubTopic of this.pubSubTopics) {
map.set(pubSubTopic, Array.from(this.observers.keys()));
}
return map;
}
public getMeshPeers(topic?: TopicStr): PeerIdStr[] {
return this.gossipSub.getMeshPeers(topic ?? this.pubSubTopic);
public getMeshPeers(topic: TopicStr = DefaultPubSubTopic): PeerIdStr[] {
return this.gossipSub.getMeshPeers(topic);
}
private subscribeToAllTopics(): void {
for (const pubSubTopic of this.pubSubTopics) {
this.gossipSubSubscribe(pubSubTopic);
}
}
private async processIncomingMessage<T extends IDecodedMessage>(
@ -186,12 +206,20 @@ class Relay implements IRelay {
return;
}
const observers = this.observers.get(topicOnlyMsg.contentTopic) as Set<
// Retrieve the map of content topics for the given pubSubTopic
const contentTopicMap = this.observers.get(pubSubTopic);
if (!contentTopicMap) {
return;
}
// Retrieve the set of observers for the given contentTopic
const observers = contentTopicMap.get(topicOnlyMsg.contentTopic) as Set<
Observer<T>
>;
if (!observers) {
return;
}
await Promise.all(
Array.from(observers).map(({ decoder, callback }) => {
return (async () => {
@ -241,7 +269,7 @@ class Relay implements IRelay {
}
private isRelayPubSub(pubsub: PubSub | undefined): boolean {
return pubsub?.multicodecs?.includes(Relay.multicodec) || false;
return pubsub?.multicodecs?.includes(Relay.multicodec) ?? false;
}
}
@ -267,46 +295,3 @@ export function wakuGossipSub(
return pubsub;
};
}
function toObservers<T extends IDecodedMessage>(
decoders: IDecoder<T>[],
callback: Callback<T>
): Map<ContentTopic, Set<Observer<T>>> {
const contentTopicToDecoders = Array.from(
groupByContentTopic(decoders).entries()
);
const contentTopicToObserversEntries = contentTopicToDecoders.map(
([contentTopic, decoders]) =>
[
contentTopic,
new Set(
decoders.map(
(decoder) =>
({
decoder,
callback
}) as Observer<T>
)
)
] as [ContentTopic, Set<Observer<T>>]
);
return new Map(contentTopicToObserversEntries);
}
function union(left: Set<unknown>, right: Set<unknown>): Set<unknown> {
for (const val of right.values()) {
left.add(val);
}
return left;
}
function leftMinusJoin(left: Set<unknown>, right: Set<unknown>): Set<unknown> {
for (const val of right.values()) {
if (left.has(val)) {
left.delete(val);
}
}
return left;
}

View File

@ -1,3 +1,4 @@
import { DefaultPubSubTopic } from "@waku/core";
import type {
IDecodedMessage,
IDecoder,
@ -26,6 +27,7 @@ export class TopicOnlyMessage implements IDecodedMessage {
}
export class TopicOnlyDecoder implements IDecoder<TopicOnlyMessage> {
pubSubTopic = DefaultPubSubTopic;
public contentTopic = "";
fromWireToProtoObj(bytes: Uint8Array): Promise<IProtoMessage | undefined> {

View File

@ -0,0 +1,4 @@
{
"extends": ["../../typedoc.base.json"],
"entryPoints": ["src/index.ts"]
}

View File

@ -45,7 +45,7 @@
"reset-hard": "git clean -dfx -e .idea && git reset --hard && npm i && npm run build"
},
"engines": {
"node": ">=16"
"node": ">=18"
},
"dependencies": {
"@chainsafe/libp2p-noise": "^13.0.0",
@ -57,7 +57,7 @@
"@waku/dns-discovery": "0.0.17",
"@waku/interfaces": "0.0.18",
"@waku/peer-exchange": "^0.0.16",
"libp2p": "^0.46.9"
"libp2p": "^0.46.12"
},
"devDependencies": {
"@chainsafe/libp2p-gossipsub": "^10.1.0",
@ -68,11 +68,7 @@
"cspell": "^7.3.2",
"interface-datastore": "^8.2.5",
"npm-run-all": "^4.1.5",
"rollup": "^3.29.2",
"typescript": "^5.0.4"
},
"typedoc": {
"entryPoint": "./src/index.ts"
"rollup": "^3.29.2"
},
"files": [
"dist",

View File

@ -0,0 +1,4 @@
{
"extends": ["../../typedoc.base.json"],
"entryPoints": ["src/index.ts"]
}

View File

@ -47,10 +47,10 @@
"reset-hard": "git clean -dfx -e .idea && git reset --hard && npm i && npm run build"
},
"engines": {
"node": ">=16"
"node": ">=18"
},
"dependencies": {
"@libp2p/interface-compliance-tests": "^4.0.5",
"@libp2p/interface-compliance-tests": "^4.1.0",
"@libp2p/peer-id": "^3.0.2",
"@waku/core": "*",
"@waku/enr": "*",
@ -60,21 +60,21 @@
"chai-as-promised": "^7.1.1",
"debug": "^4.3.4",
"dockerode": "^3.3.5",
"p-retry": "^6.0.0",
"lodash": "^4.17.21",
"p-retry": "^6.1.0",
"p-timeout": "^6.1.0",
"portfinder": "^1.0.32",
"sinon": "^15.2.0",
"sinon": "^16.0.0",
"tail": "^2.2.6"
},
"devDependencies": {
"@libp2p/bootstrap": "^9.0.2",
"@types/chai": "^4.3.5",
"@types/dockerode": "^3.3.19",
"@types/lodash": "^4.14.199",
"@types/mocha": "^10.0.1",
"@types/sinon": "^10.0.16",
"@types/tail": "^2.2.1",
"@typescript-eslint/eslint-plugin": "^5.57.0",
"@typescript-eslint/parser": "^5.62.0",
"@waku/dns-discovery": "*",
"@waku/message-encryption": "*",
"@waku/peer-exchange": "*",
@ -84,9 +84,8 @@
"datastore-core": "^9.2.3",
"debug": "^4.3.4",
"interface-datastore": "^8.2.5",
"libp2p": "^0.46.9",
"libp2p": "^0.46.12",
"mocha": "^10.2.0",
"npm-run-all": "^4.1.5",
"typescript": "^5.0.4"
"npm-run-all": "^4.1.5"
}
}

View File

@ -1,7 +1,8 @@
import { DecodedMessage, DefaultPubSubTopic } from "@waku/core";
import { bytesToUtf8 } from "@waku/utils/bytes";
import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes";
import { AssertionError, expect } from "chai";
import debug from "debug";
import isEqual from "lodash/isEqual";
import { MessageRpcResponse } from "./node/interfaces.js";
@ -18,11 +19,7 @@ export class MessageCollector {
list: Array<MessageRpcResponse | DecodedMessage> = [];
callback: (msg: DecodedMessage) => void = () => {};
constructor(
private contentTopic: string,
private nwaku?: NimGoNode,
private pubSubTopic = DefaultPubSubTopic
) {
constructor(private nwaku?: NimGoNode) {
if (!this.nwaku) {
this.callback = (msg: DecodedMessage): void => {
log("Got a message");
@ -39,6 +36,20 @@ export class MessageCollector {
return this.list[index];
}
hasMessage(topic: string, text: string): boolean {
return this.list.some((message) => {
if (message.contentTopic !== topic) {
return false;
}
if (typeof message.payload === "string") {
return message.payload === text;
} else if (message.payload instanceof Uint8Array) {
return isEqual(message.payload, utf8ToBytes(text));
}
return false;
});
}
// Type guard to determine if a message is of type MessageRpcResponse
isMessageRpcResponse(
message: MessageRpcResponse | DecodedMessage
@ -51,14 +62,21 @@ export class MessageCollector {
async waitForMessages(
numMessages: number,
timeoutDuration: number = 400
options?: {
pubSubTopic?: string;
timeoutDuration?: number;
exact?: boolean;
}
): Promise<boolean> {
const startTime = Date.now();
const pubSubTopic = options?.pubSubTopic || DefaultPubSubTopic;
const timeoutDuration = options?.timeoutDuration || 400;
const exact = options?.exact || false;
while (this.count < numMessages) {
if (this.nwaku) {
try {
this.list = await this.nwaku.messages(this.pubSubTopic);
this.list = await this.nwaku.messages(pubSubTopic);
} catch (error) {
log(`Can't retrieve messages because of ${error}`);
await delay(10);
@ -72,7 +90,16 @@ export class MessageCollector {
await delay(10);
}
return true;
if (exact) {
if (this.count == numMessages) {
return true;
} else {
log(`Was expecting exactly ${numMessages} messages`);
return false;
}
} else {
return true;
}
}
// Verifies a received message against expected values.
@ -96,10 +123,8 @@ export class MessageCollector {
const message = this.getMessage(index);
expect(message.contentTopic).to.eq(
options.expectedContentTopic || this.contentTopic,
`Message content topic mismatch. Expected: ${
options.expectedContentTopic || this.contentTopic
}. Got: ${message.contentTopic}`
options.expectedContentTopic,
`Message content topic mismatch. Expected: ${options.expectedContentTopic}. Got: ${message.contentTopic}`
);
expect(message.version).to.eq(

View File

@ -14,7 +14,7 @@ export interface Args {
peerExchange?: boolean;
discv5Discovery?: boolean;
storeMessageDbUrl?: string;
topic?: string;
topic?: Array<string>;
rpcPrivate?: boolean;
websocketSupport?: boolean;
tcpPort?: number;

View File

@ -27,8 +27,7 @@ const WAKU_SERVICE_NODE_PARAMS =
process.env.WAKU_SERVICE_NODE_PARAMS ?? undefined;
const NODE_READY_LOG_LINE = "Node setup complete";
const DOCKER_IMAGE_NAME =
process.env.WAKUNODE_IMAGE || "statusteam/nim-waku:v0.19.0";
const DOCKER_IMAGE_NAME = process.env.WAKUNODE_IMAGE || "wakuorg/nwaku:v0.20.0";
const isGoWaku = DOCKER_IMAGE_NAME.includes("go-waku");
@ -168,8 +167,8 @@ export class NimGoNode {
async startWithRetries(
args: Args,
options: {
retries: number;
}
retries?: number;
} = { retries: 3 }
): Promise<void> {
await pRetry(
async () => {
@ -210,14 +209,13 @@ export class NimGoNode {
}
async ensureSubscriptions(
pubsubTopics: [string] = [DefaultPubSubTopic]
pubsubTopics: string[] = [DefaultPubSubTopic]
): Promise<boolean> {
this.checkProcess();
return this.rpcCall<boolean>(
"post_waku_v2_relay_v1_subscriptions",
return this.rpcCall<boolean>("post_waku_v2_relay_v1_subscriptions", [
pubsubTopics
);
]);
}
async sendMessage(

View File

@ -3,8 +3,7 @@ import { promisify } from "util";
const execAsync = promisify(exec);
const WAKUNODE_IMAGE =
process.env.WAKUNODE_IMAGE || "statusteam/nim-waku:v0.19.0";
const WAKUNODE_IMAGE = process.env.WAKUNODE_IMAGE || "wakuorg/nwaku:v0.20.0";
async function main() {
try {

View File

@ -1,23 +1,49 @@
import { LightNode } from "@waku/interfaces";
import debug from "debug";
import pRetry from "p-retry";
import { NimGoNode } from "./index.js";
const log = debug("waku:test");
export function tearDownNodes(
nwakuNodes: NimGoNode[],
wakuNodes: LightNode[]
): void {
nwakuNodes.forEach((nwaku) => {
export async function tearDownNodes(
nwakuNodes: NimGoNode | NimGoNode[],
wakuNodes: LightNode | LightNode[]
): Promise<void> {
const nNodes = Array.isArray(nwakuNodes) ? nwakuNodes : [nwakuNodes];
const wNodes = Array.isArray(wakuNodes) ? wakuNodes : [wakuNodes];
const stopNwakuNodes = nNodes.map(async (nwaku) => {
if (nwaku) {
nwaku.stop().catch((e) => log("Nwaku failed to stop", e));
await pRetry(
async () => {
try {
await nwaku.stop();
} catch (error) {
log("Nwaku failed to stop:", error);
throw error;
}
},
{ retries: 3 }
);
}
});
wakuNodes.forEach((waku) => {
const stopWakuNodes = wNodes.map(async (waku) => {
if (waku) {
waku.stop().catch((e) => log("Waku failed to stop", e));
await pRetry(
async () => {
try {
await waku.stop();
} catch (error) {
log("Waku failed to stop:", error);
throw error;
}
},
{ retries: 3 }
);
}
});
await Promise.all([...stopNwakuNodes, ...stopWakuNodes]);
}

View File

@ -149,10 +149,12 @@ describe("ConnectionManager", function () {
let waku: LightNode;
this.beforeEach(async function () {
this.timeout(15000);
waku = await createLightNode();
});
afterEach(async () => {
this.timeout(15000);
await waku.stop();
sinon.restore();
});

View File

@ -0,0 +1,149 @@
import {
createDecoder,
createEncoder,
DefaultPubSubTopic,
waitForRemotePeer
} from "@waku/core";
import type { IFilterSubscription, LightNode } from "@waku/interfaces";
import { Protocols } from "@waku/interfaces";
import { utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai";
import {
makeLogFileName,
MessageCollector,
NimGoNode,
tearDownNodes
} from "../../src/index.js";
import {
runNodes,
TestContentTopic,
TestDecoder,
TestEncoder
} from "./utils.js";
describe("Waku Filter V2: Multiple PubSubtopics", function () {
// Set the timeout for all tests in this suite. Can be overwritten at test level
this.timeout(30000);
let waku: LightNode;
let nwaku: NimGoNode;
let nwaku2: NimGoNode;
let subscription: IFilterSubscription;
let messageCollector: MessageCollector;
const customPubSubTopic = "/waku/2/custom-dapp/proto";
const customContentTopic = "/test/2/waku-filter";
const newEncoder = createEncoder({
pubSubTopic: customPubSubTopic,
contentTopic: customContentTopic
});
const newDecoder = createDecoder(customContentTopic, customPubSubTopic);
this.beforeEach(async function () {
this.timeout(15000);
[nwaku, waku] = await runNodes(this, [
customPubSubTopic,
DefaultPubSubTopic
]);
subscription = await waku.filter.createSubscription(customPubSubTopic);
messageCollector = new MessageCollector();
});
this.afterEach(async function () {
this.timeout(15000);
await tearDownNodes([nwaku, nwaku2], waku);
});
it("Subscribe and receive messages on custom pubsubtopic", async function () {
await subscription.subscribe([newDecoder], messageCollector.callback);
await waku.lightPush.send(newEncoder, { payload: utf8ToBytes("M1") });
expect(await messageCollector.waitForMessages(1)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
expectedContentTopic: customContentTopic,
expectedPubSubTopic: customPubSubTopic,
expectedMessageText: "M1"
});
});
it("Subscribe and receive messages on 2 different pubsubtopics", async function () {
await subscription.subscribe([newDecoder], messageCollector.callback);
// Subscribe from the same lightnode to the 2nd pubSubtopic
const subscription2 =
await waku.filter.createSubscription(DefaultPubSubTopic);
const messageCollector2 = new MessageCollector();
await subscription2.subscribe([TestDecoder], messageCollector2.callback);
await waku.lightPush.send(newEncoder, { payload: utf8ToBytes("M1") });
await waku.lightPush.send(TestEncoder, { payload: utf8ToBytes("M2") });
expect(await messageCollector.waitForMessages(1)).to.eq(true);
expect(await messageCollector2.waitForMessages(1)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
expectedContentTopic: customContentTopic,
expectedPubSubTopic: customPubSubTopic,
expectedMessageText: "M1"
});
messageCollector2.verifyReceivedMessage(0, {
expectedContentTopic: TestContentTopic,
expectedPubSubTopic: DefaultPubSubTopic,
expectedMessageText: "M2"
});
});
it("Subscribe and receive messages from 2 nwaku nodes each with different pubsubtopics", async function () {
await subscription.subscribe([newDecoder], messageCollector.callback);
// Set up and start a new nwaku node with Default PubSubtopic
nwaku2 = new NimGoNode(makeLogFileName(this) + "2");
await nwaku2.start({
filter: true,
lightpush: true,
relay: true,
topic: [DefaultPubSubTopic]
});
await waku.dial(await nwaku2.getMultiaddrWithId());
await waitForRemotePeer(waku, [Protocols.Filter, Protocols.LightPush]);
// Subscribe from the same lightnode to the new nwaku on the new pubSubtopic
const subscription2 = await waku.filter.createSubscription(
DefaultPubSubTopic,
await nwaku2.getPeerId()
);
await nwaku2.ensureSubscriptions([DefaultPubSubTopic]);
const messageCollector2 = new MessageCollector();
await subscription2.subscribe([TestDecoder], messageCollector2.callback);
// Making sure that messages are send and reveiced for both subscriptions
// While loop is done because of https://github.com/waku-org/js-waku/issues/1606
while (
!(await messageCollector.waitForMessages(1, {
pubSubTopic: customPubSubTopic
})) ||
!(await messageCollector2.waitForMessages(1, {
pubSubTopic: DefaultPubSubTopic
}))
) {
await waku.lightPush.send(newEncoder, { payload: utf8ToBytes("M1") });
await waku.lightPush.send(TestEncoder, { payload: utf8ToBytes("M2") });
}
messageCollector.verifyReceivedMessage(0, {
expectedContentTopic: customContentTopic,
expectedPubSubTopic: customPubSubTopic,
expectedMessageText: "M1"
});
messageCollector2.verifyReceivedMessage(0, {
expectedContentTopic: TestContentTopic,
expectedPubSubTopic: DefaultPubSubTopic,
expectedMessageText: "M2"
});
});
});

View File

@ -1,3 +1,4 @@
import { DefaultPubSubTopic } from "@waku/core";
import type { IFilterSubscription, LightNode } from "@waku/interfaces";
import { utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai";
@ -22,13 +23,14 @@ describe("Waku Filter V2: Ping", function () {
this.beforeEach(async function () {
this.timeout(15000);
[nwaku, waku] = await runNodes(this);
[nwaku, waku] = await runNodes(this, [DefaultPubSubTopic]);
subscription = await waku.filter.createSubscription();
messageCollector = new MessageCollector(TestContentTopic);
messageCollector = new MessageCollector();
});
this.afterEach(async function () {
tearDownNodes([nwaku], [waku]);
this.timeout(15000);
await tearDownNodes(nwaku, waku);
});
it("Ping on subscribed peer", async function () {

View File

@ -31,13 +31,14 @@ describe("Waku Filter V2: FilterPush", function () {
this.beforeEach(async function () {
this.timeout(15000);
[nwaku, waku] = await runNodes(this);
[nwaku, waku] = await runNodes(this, [DefaultPubSubTopic]);
subscription = await waku.filter.createSubscription();
messageCollector = new MessageCollector(TestContentTopic);
messageCollector = new MessageCollector();
});
this.afterEach(async function () {
tearDownNodes([nwaku], [waku]);
this.timeout(15000);
await tearDownNodes(nwaku, waku);
});
TEST_STRING.forEach((testItem) => {
@ -49,7 +50,8 @@ describe("Waku Filter V2: FilterPush", function () {
expect(await messageCollector.waitForMessages(1)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: testItem.value
expectedMessageText: testItem.value,
expectedContentTopic: TestContentTopic
});
});
});
@ -71,7 +73,8 @@ describe("Waku Filter V2: FilterPush", function () {
expect(await messageCollector.waitForMessages(1)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: messageText,
checkTimestamp: false
checkTimestamp: false,
expectedContentTopic: TestContentTopic
});
// Check if the timestamp matches
@ -217,7 +220,8 @@ describe("Waku Filter V2: FilterPush", function () {
expect(await messageCollector.waitForMessages(1)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: messageText
expectedMessageText: messageText,
expectedContentTopic: TestContentTopic
});
});
@ -245,10 +249,12 @@ describe("Waku Filter V2: FilterPush", function () {
// Confirm both messages were received.
expect(await messageCollector.waitForMessages(2)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: "M1"
expectedMessageText: "M1",
expectedContentTopic: TestContentTopic
});
messageCollector.verifyReceivedMessage(1, {
expectedMessageText: "M2"
expectedMessageText: "M2",
expectedContentTopic: TestContentTopic
});
});
@ -268,10 +274,12 @@ describe("Waku Filter V2: FilterPush", function () {
// Confirm both messages were received.
expect(await messageCollector.waitForMessages(2)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: "M1"
expectedMessageText: "M1",
expectedContentTopic: TestContentTopic
});
messageCollector.verifyReceivedMessage(1, {
expectedMessageText: "M2"
expectedMessageText: "M2",
expectedContentTopic: TestContentTopic
});
});
});

View File

@ -39,16 +39,17 @@ describe("Waku Filter V2: Subscribe", function () {
this.beforeEach(async function () {
this.timeout(15000);
[nwaku, waku] = await runNodes(this);
[nwaku, waku] = await runNodes(this, [DefaultPubSubTopic]);
subscription = await waku.filter.createSubscription();
messageCollector = new MessageCollector(TestContentTopic);
messageCollector = new MessageCollector();
// Nwaku subscribe to the default pubsub topic
await nwaku.ensureSubscriptions();
});
this.afterEach(async function () {
tearDownNodes([nwaku, nwaku2], [waku]);
this.timeout(15000);
await tearDownNodes([nwaku, nwaku2], waku);
});
it("Subscribe and receive messages via lightPush", async function () {
@ -58,7 +59,8 @@ describe("Waku Filter V2: Subscribe", function () {
expect(await messageCollector.waitForMessages(1)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: messageText
expectedMessageText: messageText,
expectedContentTopic: TestContentTopic
});
expect((await nwaku.messages()).length).to.eq(1);
});
@ -78,7 +80,8 @@ describe("Waku Filter V2: Subscribe", function () {
expect(await messageCollector.waitForMessages(1)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: messageText
expectedMessageText: messageText,
expectedContentTopic: TestContentTopic
});
expect((await nwaku.messages()).length).to.eq(1);
});
@ -90,7 +93,8 @@ describe("Waku Filter V2: Subscribe", function () {
expect(await messageCollector.waitForMessages(1)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: messageText
expectedMessageText: messageText,
expectedContentTopic: TestContentTopic
});
// Send another message on the same topic.
@ -102,7 +106,8 @@ describe("Waku Filter V2: Subscribe", function () {
// Verify that the second message was successfully received.
expect(await messageCollector.waitForMessages(2)).to.eq(true);
messageCollector.verifyReceivedMessage(1, {
expectedMessageText: newMessageText
expectedMessageText: newMessageText,
expectedContentTopic: TestContentTopic
});
expect((await nwaku.messages()).length).to.eq(2);
});
@ -113,7 +118,8 @@ describe("Waku Filter V2: Subscribe", function () {
await waku.lightPush.send(TestEncoder, messagePayload);
expect(await messageCollector.waitForMessages(1)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: messageText
expectedMessageText: messageText,
expectedContentTopic: TestContentTopic
});
// Modify subscription to include a new content topic and send a message.
@ -136,7 +142,8 @@ describe("Waku Filter V2: Subscribe", function () {
await waku.lightPush.send(TestEncoder, newMessagePayload);
expect(await messageCollector.waitForMessages(3)).to.eq(true);
messageCollector.verifyReceivedMessage(2, {
expectedMessageText: newMessageText
expectedMessageText: newMessageText,
expectedContentTopic: TestContentTopic
});
expect((await nwaku.messages()).length).to.eq(3);
});
@ -258,10 +265,12 @@ describe("Waku Filter V2: Subscribe", function () {
// Confirm both messages were received.
expect(await messageCollector.waitForMessages(2)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: "M1"
expectedMessageText: "M1",
expectedContentTopic: TestContentTopic
});
messageCollector.verifyReceivedMessage(1, {
expectedMessageText: "M2"
expectedMessageText: "M2",
expectedContentTopic: TestContentTopic
});
});
@ -298,7 +307,8 @@ describe("Waku Filter V2: Subscribe", function () {
// Check if both messages were received
expect(await messageCollector.waitForMessages(2)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: "M1"
expectedMessageText: "M1",
expectedContentTopic: TestContentTopic
});
messageCollector.verifyReceivedMessage(1, {
expectedContentTopic: newContentTopic,
@ -306,38 +316,33 @@ describe("Waku Filter V2: Subscribe", function () {
});
});
// this test fail 50% of times with messageCount being 1. Seems like a message is lost somehow
it.skip("Subscribe and receive messages from multiple nwaku nodes", async function () {
it("Subscribe and receive messages from multiple nwaku nodes", async function () {
await subscription.subscribe([TestDecoder], messageCollector.callback);
await waku.lightPush.send(TestEncoder, { payload: utf8ToBytes("M1") });
expect(await messageCollector.waitForMessages(1)).to.eq(true);
// Set up and start a new nwaku node
nwaku2 = new NimGoNode(makeLogFileName(this) + "2");
await nwaku2.start({ filter: true, lightpush: true, relay: true });
await waku.dial(await nwaku2.getMultiaddrWithId());
await waitForRemotePeer(waku, [Protocols.Filter, Protocols.LightPush]);
const subscription2 = await waku.filter.createSubscription(
DefaultPubSubTopic,
await nwaku2.getPeerId()
);
await nwaku2.ensureSubscriptions([DefaultPubSubTopic]);
// Send a message using the new subscription
const newContentTopic = "/test/2/waku-filter";
const newEncoder = createEncoder({ contentTopic: newContentTopic });
const newDecoder = createDecoder(newContentTopic);
await subscription2.subscribe([newDecoder], messageCollector.callback);
await waku.lightPush.send(newEncoder, { payload: utf8ToBytes("M2") });
// Making sure that messages are send and reveiced for both subscriptions
while (!(await messageCollector.waitForMessages(2))) {
await waku.lightPush.send(TestEncoder, { payload: utf8ToBytes("M1") });
await waku.lightPush.send(newEncoder, { payload: utf8ToBytes("M2") });
}
// Check if both messages were received
expect(await messageCollector.waitForMessages(2)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: "M1"
});
messageCollector.verifyReceivedMessage(1, {
expectedContentTopic: newContentTopic,
expectedMessageText: "M2"
});
expect(messageCollector.hasMessage(TestContentTopic, "M1")).to.eq(true);
expect(messageCollector.hasMessage(newContentTopic, "M2")).to.eq(true);
});
});

View File

@ -1,4 +1,4 @@
import { createDecoder, createEncoder } from "@waku/core";
import { createDecoder, createEncoder, DefaultPubSubTopic } from "@waku/core";
import type { IFilterSubscription, LightNode } from "@waku/interfaces";
import { utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai";
@ -25,16 +25,17 @@ describe("Waku Filter V2: Unsubscribe", function () {
this.beforeEach(async function () {
this.timeout(15000);
[nwaku, waku] = await runNodes(this);
[nwaku, waku] = await runNodes(this, [DefaultPubSubTopic]);
subscription = await waku.filter.createSubscription();
messageCollector = new MessageCollector(TestContentTopic);
messageCollector = new MessageCollector();
// Nwaku subscribe to the default pubsub topic
await nwaku.ensureSubscriptions();
});
this.afterEach(async function () {
tearDownNodes([nwaku], [waku]);
this.timeout(15000);
await tearDownNodes(nwaku, waku);
});
it("Unsubscribe 1 topic - node subscribed to 1 topic", async function () {
@ -49,7 +50,8 @@ describe("Waku Filter V2: Unsubscribe", function () {
// Check that from 2 messages send only the 1st was received
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: messageText
expectedMessageText: messageText,
expectedContentTopic: TestContentTopic
});
expect(messageCollector.count).to.eq(1);
expect((await nwaku.messages()).length).to.eq(2);

View File

@ -64,14 +64,17 @@ export async function validatePingError(
}
export async function runNodes(
currentTest: Context
context: Context,
pubSubTopics: string[]
): Promise<[NimGoNode, LightNode]> {
const nwaku = new NimGoNode(makeLogFileName(currentTest));
const nwaku = new NimGoNode(makeLogFileName(context));
await nwaku.startWithRetries(
{
filter: true,
lightpush: true,
relay: true
relay: true,
topic: pubSubTopics
},
{ retries: 3 }
);
@ -79,6 +82,7 @@ export async function runNodes(
let waku: LightNode | undefined;
try {
waku = await createLightNode({
pubSubTopics: pubSubTopics,
staticNoiseKey: NOISE_KEY_1,
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
});
@ -90,6 +94,7 @@ export async function runNodes(
if (waku) {
await waku.dial(await nwaku.getMultiaddrWithId());
await waitForRemotePeer(waku, [Protocols.Filter, Protocols.LightPush]);
await nwaku.ensureSubscriptions(pubSubTopics);
return [nwaku, waku];
} else {
throw new Error("Failed to initialize waku");

View File

@ -1,49 +0,0 @@
import { LightNode } from "@waku/interfaces";
import { utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai";
import { MessageCollector, NimGoNode, tearDownNodes } from "../../src/index.js";
import {
messageText,
runNodes,
TestContentTopic,
TestEncoder
} from "./utils.js";
describe("Waku Light Push [node only] - custom pubsub topic", function () {
this.timeout(15000);
let waku: LightNode;
let nwaku: NimGoNode;
let messageCollector: MessageCollector;
const customPubSubTopic = "/waku/2/custom-dapp/proto";
beforeEach(async function () {
[nwaku, waku] = await runNodes(this, customPubSubTopic);
messageCollector = new MessageCollector(
TestContentTopic,
nwaku,
customPubSubTopic
);
});
this.afterEach(async function () {
tearDownNodes([nwaku], [waku]);
});
it("Push message", async function () {
const nimPeerId = await nwaku.getPeerId();
const pushResponse = await waku.lightPush.send(TestEncoder, {
payload: utf8ToBytes(messageText)
});
expect(pushResponse.recipients[0].toString()).to.eq(nimPeerId.toString());
expect(await messageCollector.waitForMessages(1)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: messageText,
expectedContentTopic: TestContentTopic
});
});
});

View File

@ -19,7 +19,7 @@ import {
TestEncoder
} from "./utils.js";
describe("Waku Light Push [node only]", function () {
describe("Waku Light Push", function () {
// Set the timeout for all tests in this suite. Can be overwritten at test level
this.timeout(15000);
let waku: LightNode;
@ -28,16 +28,15 @@ describe("Waku Light Push [node only]", function () {
this.beforeEach(async function () {
this.timeout(15000);
[nwaku, waku] = await runNodes(this);
messageCollector = new MessageCollector(
TestContentTopic,
nwaku,
DefaultPubSubTopic
);
[nwaku, waku] = await runNodes(this, [DefaultPubSubTopic]);
messageCollector = new MessageCollector(nwaku);
await nwaku.ensureSubscriptions();
});
this.afterEach(async function () {
tearDownNodes([nwaku], [waku]);
this.timeout(15000);
await tearDownNodes(nwaku, waku);
});
TEST_STRING.forEach((testItem) => {
@ -49,7 +48,8 @@ describe("Waku Light Push [node only]", function () {
expect(await messageCollector.waitForMessages(1)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: testItem.value
expectedMessageText: testItem.value,
expectedContentTopic: TestContentTopic
});
});
});
@ -68,7 +68,8 @@ describe("Waku Light Push [node only]", function () {
for (let i = 0; i < 30; i++) {
messageCollector.verifyReceivedMessage(i, {
expectedMessageText: generateMessageText(i)
expectedMessageText: generateMessageText(i),
expectedContentTopic: TestContentTopic
});
}
});
@ -82,7 +83,8 @@ describe("Waku Light Push [node only]", function () {
expect(pushResponse.recipients.length).to.eq(1);
expect(await messageCollector.waitForMessages(1)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: undefined
expectedMessageText: undefined,
expectedContentTopic: TestContentTopic
});
} else {
expect(pushResponse.recipients.length).to.eq(0);
@ -136,7 +138,8 @@ describe("Waku Light Push [node only]", function () {
expect(await messageCollector.waitForMessages(1)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: messageText
expectedMessageText: messageText,
expectedContentTopic: TestContentTopic
});
});
@ -155,7 +158,8 @@ describe("Waku Light Push [node only]", function () {
expect(pushResponse.recipients.length).to.eq(1);
expect(await messageCollector.waitForMessages(1)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: messageText
expectedMessageText: messageText,
expectedContentTopic: TestContentTopic
});
} else {
expect(pushResponse.recipients.length).to.eq(0);
@ -184,7 +188,8 @@ describe("Waku Light Push [node only]", function () {
expect(await messageCollector.waitForMessages(1)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: messageText
expectedMessageText: messageText,
expectedContentTopic: TestContentTopic
});
});
@ -204,7 +209,8 @@ describe("Waku Light Push [node only]", function () {
expect(await messageCollector.waitForMessages(1)).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: messageText,
expectedTimestamp: customTimeNanos
expectedTimestamp: customTimeNanos,
expectedContentTopic: TestContentTopic
});
});
});

View File

@ -0,0 +1,156 @@
import type { PeerId } from "@libp2p/interface/peer-id";
import {
createEncoder,
DefaultPubSubTopic,
waitForRemotePeer
} from "@waku/core";
import { LightNode, Protocols, SendResult } from "@waku/interfaces";
import { utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai";
import {
makeLogFileName,
MessageCollector,
NimGoNode,
tearDownNodes
} from "../../src/index.js";
import {
messageText,
runNodes,
TestContentTopic,
TestEncoder
} from "./utils.js";
describe("Waku Light Push : Multiple PubSubtopics", function () {
this.timeout(30000);
let waku: LightNode;
let nwaku: NimGoNode;
let nwaku2: NimGoNode;
let messageCollector: MessageCollector;
const customPubSubTopic = "/waku/2/custom-dapp/proto";
const customContentTopic = "/test/2/waku-light-push/utf8";
const customEncoder = createEncoder({
contentTopic: customContentTopic,
pubSubTopic: customPubSubTopic
});
let nimPeerId: PeerId;
this.beforeEach(async function () {
this.timeout(15000);
[nwaku, waku] = await runNodes(this, [
customPubSubTopic,
DefaultPubSubTopic
]);
messageCollector = new MessageCollector(nwaku);
nimPeerId = await nwaku.getPeerId();
});
this.afterEach(async function () {
this.timeout(15000);
await tearDownNodes([nwaku, nwaku2], waku);
});
it("Push message on custom pubSubTopic", async function () {
const pushResponse = await waku.lightPush.send(customEncoder, {
payload: utf8ToBytes(messageText)
});
expect(pushResponse.recipients[0].toString()).to.eq(nimPeerId.toString());
expect(
await messageCollector.waitForMessages(1, {
pubSubTopic: customPubSubTopic
})
).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: messageText,
expectedContentTopic: customContentTopic
});
});
it("Subscribe and receive messages on 2 different pubsubtopics", async function () {
const pushResponse1 = await waku.lightPush.send(customEncoder, {
payload: utf8ToBytes("M1")
});
const pushResponse2 = await waku.lightPush.send(TestEncoder, {
payload: utf8ToBytes("M2")
});
expect(pushResponse1.recipients[0].toString()).to.eq(nimPeerId.toString());
expect(pushResponse2.recipients[0].toString()).to.eq(nimPeerId.toString());
const messageCollector2 = new MessageCollector(nwaku);
expect(
await messageCollector.waitForMessages(1, {
pubSubTopic: customPubSubTopic
})
).to.eq(true);
expect(
await messageCollector2.waitForMessages(1, {
pubSubTopic: DefaultPubSubTopic
})
).to.eq(true);
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: "M1",
expectedContentTopic: customContentTopic,
expectedPubSubTopic: customPubSubTopic
});
messageCollector2.verifyReceivedMessage(0, {
expectedMessageText: "M2",
expectedContentTopic: TestContentTopic,
expectedPubSubTopic: DefaultPubSubTopic
});
});
it("Light push messages to 2 nwaku nodes each with different pubsubtopics", async function () {
// Set up and start a new nwaku node with Default PubSubtopic
nwaku2 = new NimGoNode(makeLogFileName(this) + "2");
await nwaku2.start({
filter: true,
lightpush: true,
relay: true,
topic: [DefaultPubSubTopic]
});
await nwaku2.ensureSubscriptions([DefaultPubSubTopic]);
await waku.dial(await nwaku2.getMultiaddrWithId());
await waitForRemotePeer(waku, [Protocols.LightPush]);
const messageCollector2 = new MessageCollector(nwaku2);
let pushResponse1: SendResult;
let pushResponse2: SendResult;
// Making sure that we send messages to both nwaku nodes
// While loop is done because of https://github.com/waku-org/js-waku/issues/1606
while (
!(await messageCollector.waitForMessages(1, {
pubSubTopic: customPubSubTopic
})) ||
!(await messageCollector2.waitForMessages(1, {
pubSubTopic: DefaultPubSubTopic
})) ||
pushResponse1!.recipients[0].toString() ===
pushResponse2!.recipients[0].toString()
) {
pushResponse1 = await waku.lightPush.send(customEncoder, {
payload: utf8ToBytes("M1")
});
pushResponse2 = await waku.lightPush.send(TestEncoder, {
payload: utf8ToBytes("M2")
});
}
messageCollector.verifyReceivedMessage(0, {
expectedMessageText: "M1",
expectedContentTopic: customContentTopic,
expectedPubSubTopic: customPubSubTopic
});
messageCollector2.verifyReceivedMessage(0, {
expectedMessageText: "M2",
expectedContentTopic: TestContentTopic,
expectedPubSubTopic: DefaultPubSubTopic
});
});
});

View File

@ -14,23 +14,18 @@ export const messagePayload = { payload: utf8ToBytes(messageText) };
export async function runNodes(
context: Mocha.Context,
pubSubTopic?: string
pubSubTopics: string[]
): Promise<[NimGoNode, LightNode]> {
const nwakuOptional = pubSubTopic ? { topic: pubSubTopic } : {};
const nwaku = new NimGoNode(makeLogFileName(context));
await nwaku.startWithRetries(
{
lightpush: true,
relay: true,
...nwakuOptional
},
{ lightpush: true, relay: true, topic: pubSubTopics },
{ retries: 3 }
);
let waku: LightNode | undefined;
try {
waku = await createLightNode({
pubSubTopic,
pubSubTopics: pubSubTopics,
staticNoiseKey: NOISE_KEY_1
});
await waku.start();
@ -41,6 +36,7 @@ export async function runNodes(
if (waku) {
await waku.dial(await nwaku.getMultiaddrWithId());
await waitForRemotePeer(waku, [Protocols.LightPush]);
await nwaku.ensureSubscriptions(pubSubTopics);
return [nwaku, waku];
} else {
throw new Error("Failed to initialize waku");

View File

@ -0,0 +1,40 @@
import { multiaddr } from "@multiformats/multiaddr";
import type { Waku } from "@waku/interfaces";
import { createLightNode } from "@waku/sdk";
import { expect } from "chai";
import { NimGoNode } from "../src/index.js";
describe("dials multiaddr", function () {
let waku: Waku;
let nwaku: NimGoNode;
afterEach(async function () {
!!nwaku &&
nwaku.stop().catch((e) => console.log("Nwaku failed to stop", e));
!!waku && waku.stop().catch((e) => console.log("Waku failed to stop", e));
});
it("TLS", async function () {
this.timeout(20_000);
let tlsWorks = true;
waku = await createLightNode();
await waku.start();
try {
// dummy multiaddr, doesn't have to be valid
await waku.dial(multiaddr(`/ip4/127.0.0.1/tcp/30303/tls/ws`));
} catch (error) {
if (error instanceof Error) {
// if the error is of tls unsupported, the test should fail
// for any other dial errors, the test should pass
if (error.message === "Unsupported protocol tls") {
tlsWorks = false;
}
}
}
expect(tlsWorks).to.eq(true);
});
});

View File

@ -27,6 +27,7 @@ import debug from "debug";
import {
delay,
makeLogFileName,
MessageCollector,
NOISE_KEY_1,
NOISE_KEY_2,
NOISE_KEY_3
@ -259,6 +260,16 @@ describe("Waku Relay [node only]", () => {
let waku1: RelayNode;
let waku2: RelayNode;
let waku3: RelayNode;
const CustomContentTopic = "/test/2/waku-relay/utf8";
const CustomPubSubTopic = "/some/pubsub/topic";
const CustomEncoder = createEncoder({
contentTopic: CustomContentTopic,
pubSubTopic: CustomPubSubTopic
});
const CustomDecoder = createDecoder(CustomContentTopic, CustomPubSubTopic);
afterEach(async function () {
!!waku1 &&
waku1.stop().catch((e) => console.log("Waku failed to stop", e));
@ -268,20 +279,196 @@ describe("Waku Relay [node only]", () => {
waku3.stop().catch((e) => console.log("Waku failed to stop", e));
});
it("Publish", async function () {
[
{
pubsub: CustomPubSubTopic,
encoder: CustomEncoder,
decoder: CustomDecoder
},
{
pubsub: DefaultPubSubTopic,
encoder: TestEncoder,
decoder: TestDecoder
}
].forEach((testItem) => {
it(`3 nodes on ${testItem.pubsub} topic`, async function () {
this.timeout(10000);
const [msgCollector1, msgCollector2, msgCollector3] = Array(3)
.fill(null)
.map(() => new MessageCollector());
[waku1, waku2, waku3] = await Promise.all([
createRelayNode({
pubSubTopics: [testItem.pubsub],
staticNoiseKey: NOISE_KEY_1
}).then((waku) => waku.start().then(() => waku)),
createRelayNode({
pubSubTopics: [testItem.pubsub],
staticNoiseKey: NOISE_KEY_2,
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
}).then((waku) => waku.start().then(() => waku)),
createRelayNode({
pubSubTopics: [testItem.pubsub],
staticNoiseKey: NOISE_KEY_3
}).then((waku) => waku.start().then(() => waku))
]);
await waku1.libp2p.peerStore.merge(waku2.libp2p.peerId, {
multiaddrs: waku2.libp2p.getMultiaddrs()
});
await waku3.libp2p.peerStore.merge(waku2.libp2p.peerId, {
multiaddrs: waku2.libp2p.getMultiaddrs()
});
await Promise.all([
waku1.dial(waku2.libp2p.peerId),
waku3.dial(waku2.libp2p.peerId)
]);
await Promise.all([
waitForRemotePeer(waku1, [Protocols.Relay]),
waitForRemotePeer(waku2, [Protocols.Relay]),
waitForRemotePeer(waku3, [Protocols.Relay])
]);
await waku1.relay.subscribe([testItem.decoder], msgCollector1.callback);
await waku2.relay.subscribe([testItem.decoder], msgCollector2.callback);
await waku3.relay.subscribe([testItem.decoder], msgCollector3.callback);
// The nodes are setup in such a way that all messages send should be relayed to the other nodes in the network
const relayResponse1 = await waku1.relay.send(testItem.encoder, {
payload: utf8ToBytes("M1")
});
const relayResponse2 = await waku2.relay.send(testItem.encoder, {
payload: utf8ToBytes("M2")
});
const relayResponse3 = await waku3.relay.send(testItem.encoder, {
payload: utf8ToBytes("M3")
});
expect(relayResponse1.recipients[0].toString()).to.eq(
waku2.libp2p.peerId.toString()
);
expect(relayResponse3.recipients[0].toString()).to.eq(
waku2.libp2p.peerId.toString()
);
expect(relayResponse2.recipients.map((r) => r.toString())).to.include(
waku1.libp2p.peerId.toString()
);
expect(relayResponse2.recipients.map((r) => r.toString())).to.include(
waku3.libp2p.peerId.toString()
);
expect(await msgCollector1.waitForMessages(2, { exact: true })).to.eq(
true
);
expect(await msgCollector2.waitForMessages(2, { exact: true })).to.eq(
true
);
expect(await msgCollector3.waitForMessages(2, { exact: true })).to.eq(
true
);
expect(msgCollector1.hasMessage(testItem.pubsub, "M2")).to.be.true;
expect(msgCollector1.hasMessage(testItem.pubsub, "M3")).to.be.true;
expect(msgCollector2.hasMessage(testItem.pubsub, "M1")).to.be.true;
expect(msgCollector2.hasMessage(testItem.pubsub, "M3")).to.be.true;
expect(msgCollector3.hasMessage(testItem.pubsub, "M1")).to.be.true;
expect(msgCollector3.hasMessage(testItem.pubsub, "M2")).to.be.true;
});
});
it("Nodes with multiple pubsub topic", async function () {
this.timeout(10000);
const pubSubTopic = "/some/pubsub/topic";
const [msgCollector1, msgCollector2, msgCollector3] = Array(3)
.fill(null)
.map(() => new MessageCollector());
// 1 and 2 uses a custom pubsub
// 3 uses the default pubsub
// Waku1 and waku2 are using multiple pubsub topis
[waku1, waku2, waku3] = await Promise.all([
createRelayNode({
pubSubTopic: pubSubTopic,
pubSubTopics: [DefaultPubSubTopic, CustomPubSubTopic],
staticNoiseKey: NOISE_KEY_1
}).then((waku) => waku.start().then(() => waku)),
createRelayNode({
pubSubTopic: pubSubTopic,
pubSubTopics: [DefaultPubSubTopic, CustomPubSubTopic],
staticNoiseKey: NOISE_KEY_2,
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
}).then((waku) => waku.start().then(() => waku)),
createRelayNode({
pubSubTopics: [DefaultPubSubTopic],
staticNoiseKey: NOISE_KEY_3
}).then((waku) => waku.start().then(() => waku))
]);
await waku1.libp2p.peerStore.merge(waku2.libp2p.peerId, {
multiaddrs: waku2.libp2p.getMultiaddrs()
});
await waku3.libp2p.peerStore.merge(waku2.libp2p.peerId, {
multiaddrs: waku2.libp2p.getMultiaddrs()
});
await Promise.all([
waku1.dial(waku2.libp2p.peerId),
waku3.dial(waku2.libp2p.peerId)
]);
await Promise.all([
waitForRemotePeer(waku1, [Protocols.Relay]),
waitForRemotePeer(waku2, [Protocols.Relay]),
waitForRemotePeer(waku3, [Protocols.Relay])
]);
await waku1.relay.subscribe(
[TestDecoder, CustomDecoder],
msgCollector1.callback
);
await waku2.relay.subscribe(
[TestDecoder, CustomDecoder],
msgCollector2.callback
);
await waku3.relay.subscribe([TestDecoder], msgCollector3.callback);
// The nodes are setup in such a way that all messages send should be relayed to the other nodes in the network
// However onlt waku1 and waku2 are receiving messages on the CustomPubSubTopic
await waku1.relay.send(TestEncoder, { payload: utf8ToBytes("M1") });
await waku1.relay.send(CustomEncoder, { payload: utf8ToBytes("M2") });
await waku2.relay.send(TestEncoder, { payload: utf8ToBytes("M3") });
await waku2.relay.send(CustomEncoder, { payload: utf8ToBytes("M4") });
await waku3.relay.send(TestEncoder, { payload: utf8ToBytes("M5") });
await waku3.relay.send(CustomEncoder, { payload: utf8ToBytes("M6") });
expect(await msgCollector1.waitForMessages(3, { exact: true })).to.eq(
true
);
expect(await msgCollector2.waitForMessages(3, { exact: true })).to.eq(
true
);
expect(await msgCollector3.waitForMessages(2, { exact: true })).to.eq(
true
);
expect(msgCollector1.hasMessage(DefaultPubSubTopic, "M3")).to.be.true;
expect(msgCollector1.hasMessage(CustomPubSubTopic, "M4")).to.be.true;
expect(msgCollector1.hasMessage(DefaultPubSubTopic, "M5")).to.be.true;
expect(msgCollector1.hasMessage(DefaultPubSubTopic, "M1")).to.be.true;
expect(msgCollector1.hasMessage(CustomPubSubTopic, "M2")).to.be.true;
expect(msgCollector1.hasMessage(DefaultPubSubTopic, "M5")).to.be.true;
expect(msgCollector2.hasMessage(CustomPubSubTopic, "M1")).to.be.true;
expect(msgCollector2.hasMessage(DefaultPubSubTopic, "M3")).to.be.true;
expect(msgCollector3.hasMessage(DefaultPubSubTopic, "M1")).to.be.true;
});
it("n1 and n2 uses a custom pubsub, n3 uses the default pubsub", async function () {
this.timeout(10000);
[waku1, waku2, waku3] = await Promise.all([
createRelayNode({
pubSubTopics: [CustomPubSubTopic],
staticNoiseKey: NOISE_KEY_1
}).then((waku) => waku.start().then(() => waku)),
createRelayNode({
pubSubTopics: [CustomPubSubTopic],
staticNoiseKey: NOISE_KEY_2,
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
}).then((waku) => waku.start().then(() => waku)),
@ -310,7 +497,7 @@ describe("Waku Relay [node only]", () => {
const waku2ReceivedMsgPromise: Promise<DecodedMessage> = new Promise(
(resolve) => {
void waku2.relay.subscribe([TestDecoder], resolve);
void waku2.relay.subscribe([CustomDecoder], resolve);
}
);
@ -323,7 +510,7 @@ describe("Waku Relay [node only]", () => {
}
);
await waku1.relay.send(TestEncoder, {
await waku1.relay.send(CustomEncoder, {
payload: utf8ToBytes(messageText)
});
@ -331,23 +518,21 @@ describe("Waku Relay [node only]", () => {
await waku3NoMsgPromise;
expect(bytesToUtf8(waku2ReceivedMsg.payload!)).to.eq(messageText);
expect(waku2ReceivedMsg.pubSubTopic).to.eq(pubSubTopic);
expect(waku2ReceivedMsg.pubSubTopic).to.eq(CustomPubSubTopic);
});
it("Publishes <= 1 MB and rejects others", async function () {
this.timeout(10000);
const MB = 1024 ** 2;
const pubSubTopic = "/some/pubsub/topic";
// 1 and 2 uses a custom pubsub
[waku1, waku2] = await Promise.all([
createRelayNode({
pubSubTopic: pubSubTopic,
pubSubTopics: [CustomPubSubTopic],
staticNoiseKey: NOISE_KEY_1
}).then((waku) => waku.start().then(() => waku)),
createRelayNode({
pubSubTopic: pubSubTopic,
pubSubTopics: [CustomPubSubTopic],
staticNoiseKey: NOISE_KEY_2,
libp2p: { addresses: { listen: ["/ip4/0.0.0.0/tcp/0/ws"] } }
}).then((waku) => waku.start().then(() => waku))
@ -365,7 +550,7 @@ describe("Waku Relay [node only]", () => {
const waku2ReceivedMsgPromise: Promise<DecodedMessage> = new Promise(
(resolve) => {
void waku2.relay.subscribe([TestDecoder], () =>
void waku2.relay.subscribe([CustomDecoder], () =>
resolve({
payload: new Uint8Array([])
} as DecodedMessage)
@ -373,18 +558,18 @@ describe("Waku Relay [node only]", () => {
}
);
let sendResult = await waku1.relay.send(TestEncoder, {
let sendResult = await waku1.relay.send(CustomEncoder, {
payload: generateRandomUint8Array(1 * MB)
});
expect(sendResult.recipients.length).to.eq(1);
sendResult = await waku1.relay.send(TestEncoder, {
sendResult = await waku1.relay.send(CustomEncoder, {
payload: generateRandomUint8Array(1 * MB + 65536)
});
expect(sendResult.recipients.length).to.eq(0);
expect(sendResult.errors).to.include(SendError.SIZE_TOO_BIG);
sendResult = await waku1.relay.send(TestEncoder, {
sendResult = await waku1.relay.send(CustomEncoder, {
payload: generateRandomUint8Array(2 * MB)
});
expect(sendResult.recipients.length).to.eq(0);

View File

@ -0,0 +1,79 @@
import { createLightNode, LightNode, utf8ToBytes } from "@waku/sdk";
import { createEncoder } from "@waku/sdk";
import chai, { expect } from "chai";
import chaiAsPromised from "chai-as-promised";
import { makeLogFileName } from "../src/log_file.js";
import { NimGoNode } from "../src/node/node.js";
const PubSubTopic1 = "/waku/2/rs/0/2";
const PubSubTopic2 = "/waku/2/rs/0/3";
const ContentTopic = "/waku/2/content/test";
chai.use(chaiAsPromised);
describe("Static Sharding", () => {
let waku: LightNode;
let nwaku: NimGoNode;
beforeEach(async function () {
this.timeout(15_000);
nwaku = new NimGoNode(makeLogFileName(this));
await nwaku.start({ store: true, lightpush: true, relay: true });
});
afterEach(async function () {
!!nwaku &&
nwaku.stop().catch((e) => console.log("Nwaku failed to stop", e));
!!waku && waku.stop().catch((e) => console.log("Waku failed to stop", e));
});
it("configure the node with multiple pubsub topics", async function () {
this.timeout(15_000);
waku = await createLightNode({
pubSubTopics: [PubSubTopic1, PubSubTopic2]
});
const encoder1 = createEncoder({
contentTopic: ContentTopic,
pubSubTopic: PubSubTopic1
});
const encoder2 = createEncoder({
contentTopic: ContentTopic,
pubSubTopic: PubSubTopic2
});
const request1 = waku.lightPush.send(encoder1, {
payload: utf8ToBytes("Hello World")
});
const request2 = waku.lightPush.send(encoder2, {
payload: utf8ToBytes("Hello World")
});
await expect(request1).to.be.fulfilled;
await expect(request2).to.be.fulfilled;
});
it("using a protocol with unconfigured pubsub topic should fail", async function () {
this.timeout(15_000);
waku = await createLightNode({
pubSubTopics: [PubSubTopic1]
});
// use a pubsub topic that is not configured
const encoder = createEncoder({
contentTopic: ContentTopic,
pubSubTopic: PubSubTopic2
});
// the following request should throw an error
const request = waku.lightPush.send(encoder, {
payload: utf8ToBytes("Hello World")
});
await expect(request).to.be.rejectedWith(Error);
});
});

View File

@ -1,630 +0,0 @@
import {
createCursor,
createDecoder,
createEncoder,
DecodedMessage,
PageDirection,
waitForRemotePeer
} from "@waku/core";
import type { IMessage, LightNode } from "@waku/interfaces";
import { Protocols } from "@waku/interfaces";
import {
createDecoder as createEciesDecoder,
createEncoder as createEciesEncoder,
generatePrivateKey,
getPublicKey
} from "@waku/message-encryption/ecies";
import {
createDecoder as createSymDecoder,
createEncoder as createSymEncoder,
generateSymmetricKey
} from "@waku/message-encryption/symmetric";
import { createLightNode } from "@waku/sdk";
import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai";
import debug from "debug";
import {
delay,
makeLogFileName,
NOISE_KEY_1,
NOISE_KEY_2
} from "../src/index.js";
import { NimGoNode } from "../src/node/node.js";
const log = debug("waku:test:store");
const TestContentTopic = "/test/1/waku-store/utf8";
const TestEncoder = createEncoder({ contentTopic: TestContentTopic });
const TestDecoder = createDecoder(TestContentTopic);
describe("Waku Store", () => {
let waku: LightNode;
let nwaku: NimGoNode;
beforeEach(async function () {
this.timeout(15_000);
nwaku = new NimGoNode(makeLogFileName(this));
await nwaku.start({ store: true, lightpush: true, relay: true });
});
afterEach(async function () {
!!nwaku &&
nwaku.stop().catch((e) => console.log("Nwaku failed to stop", e));
!!waku && waku.stop().catch((e) => console.log("Waku failed to stop", e));
});
it("Generator", async function () {
this.timeout(15_000);
const totalMsgs = 20;
for (let i = 0; i < totalMsgs; i++) {
expect(
await nwaku.sendMessage(
NimGoNode.toMessageRpcQuery({
payload: new Uint8Array([i]),
contentTopic: TestContentTopic
})
)
).to.be.true;
}
waku = await createLightNode({
staticNoiseKey: NOISE_KEY_1
});
await waku.start();
await waku.dial(await nwaku.getMultiaddrWithId());
await waitForRemotePeer(waku, [Protocols.Store]);
const messages: IMessage[] = [];
let promises: Promise<void>[] = [];
for await (const msgPromises of waku.store.queryGenerator([TestDecoder])) {
const _promises = msgPromises.map(async (promise) => {
const msg = await promise;
if (msg) {
messages.push(msg);
}
});
promises = promises.concat(_promises);
}
await Promise.all(promises);
expect(messages?.length).eq(totalMsgs);
const result = messages?.findIndex((msg) => {
return msg.payload[0]! === 0;
});
expect(result).to.not.eq(-1);
});
it("Generator, no message returned", async function () {
this.timeout(15_000);
waku = await createLightNode({
staticNoiseKey: NOISE_KEY_1
});
await waku.start();
await waku.dial(await nwaku.getMultiaddrWithId());
await waitForRemotePeer(waku, [Protocols.Store]);
const messages: IMessage[] = [];
let promises: Promise<void>[] = [];
for await (const msgPromises of waku.store.queryGenerator([TestDecoder])) {
const _promises = msgPromises.map(async (promise) => {
const msg = await promise;
if (msg) {
messages.push(msg);
}
});
promises = promises.concat(_promises);
}
await Promise.all(promises);
expect(messages?.length).eq(0);
});
it("Passing a cursor", async function () {
this.timeout(4_000);
const totalMsgs = 20;
for (let i = 0; i < totalMsgs; i++) {
expect(
await nwaku.sendMessage(
NimGoNode.toMessageRpcQuery({
payload: utf8ToBytes(`Message ${i}`),
contentTopic: TestContentTopic
})
)
).to.be.true;
}
waku = await createLightNode({
staticNoiseKey: NOISE_KEY_1
});
await waku.start();
await waku.dial(await nwaku.getMultiaddrWithId());
await waitForRemotePeer(waku, [Protocols.Store]);
const query = waku.store.queryGenerator([TestDecoder]);
// messages in reversed order (first message at last index)
const messages: DecodedMessage[] = [];
for await (const page of query) {
for await (const msg of page.reverse()) {
messages.push(msg as DecodedMessage);
}
}
// index 2 would mean the third last message sent
const cursorIndex = 2;
// create cursor to extract messages after the 3rd index
const cursor = await createCursor(messages[cursorIndex]);
const messagesAfterCursor: DecodedMessage[] = [];
for await (const page of waku.store.queryGenerator([TestDecoder], {
cursor
})) {
for await (const msg of page.reverse()) {
messagesAfterCursor.push(msg as DecodedMessage);
}
}
const testMessage = messagesAfterCursor[0];
expect(messages.length).be.eq(totalMsgs);
expect(bytesToUtf8(testMessage.payload)).to.be.eq(
bytesToUtf8(messages[cursorIndex + 1].payload)
);
});
it("Callback on promise", async function () {
this.timeout(15_000);
const totalMsgs = 15;
for (let i = 0; i < totalMsgs; i++) {
expect(
await nwaku.sendMessage(
NimGoNode.toMessageRpcQuery({
payload: new Uint8Array([i]),
contentTopic: TestContentTopic
})
)
).to.be.true;
}
waku = await createLightNode({
staticNoiseKey: NOISE_KEY_1
});
await waku.start();
await waku.dial(await nwaku.getMultiaddrWithId());
await waitForRemotePeer(waku, [Protocols.Store]);
const messages: IMessage[] = [];
await waku.store.queryWithPromiseCallback(
[TestDecoder],
async (msgPromise) => {
const msg = await msgPromise;
if (msg) {
messages.push(msg);
}
}
);
expect(messages?.length).eq(totalMsgs);
const result = messages?.findIndex((msg) => {
return msg.payload[0]! === 0;
});
expect(result).to.not.eq(-1);
});
it("Callback on promise, aborts when callback returns true", async function () {
this.timeout(15_000);
const totalMsgs = 20;
for (let i = 0; i < totalMsgs; i++) {
expect(
await nwaku.sendMessage(
NimGoNode.toMessageRpcQuery({
payload: new Uint8Array([i]),
contentTopic: TestContentTopic
})
)
).to.be.true;
}
waku = await createLightNode({
staticNoiseKey: NOISE_KEY_1
});
await waku.start();
await waku.dial(await nwaku.getMultiaddrWithId());
await waitForRemotePeer(waku, [Protocols.Store]);
const desiredMsgs = 14;
const messages: IMessage[] = [];
await waku.store.queryWithPromiseCallback(
[TestDecoder],
async (msgPromise) => {
const msg = await msgPromise;
if (msg) {
messages.push(msg);
}
return messages.length >= desiredMsgs;
},
{ pageSize: 7 }
);
expect(messages?.length).eq(desiredMsgs);
});
it("Ordered Callback - Forward", async function () {
this.timeout(15_000);
const totalMsgs = 18;
for (let i = 0; i < totalMsgs; i++) {
expect(
await nwaku.sendMessage(
NimGoNode.toMessageRpcQuery({
payload: new Uint8Array([i]),
contentTopic: TestContentTopic
})
)
).to.be.true;
await delay(1); // to ensure each timestamp is unique.
}
waku = await createLightNode({
staticNoiseKey: NOISE_KEY_1
});
await waku.start();
await waku.dial(await nwaku.getMultiaddrWithId());
await waitForRemotePeer(waku, [Protocols.Store]);
const messages: IMessage[] = [];
await waku.store.queryWithOrderedCallback(
[TestDecoder],
async (msg) => {
messages.push(msg);
},
{
pageDirection: PageDirection.FORWARD
}
);
expect(messages?.length).eq(totalMsgs);
const payloads = messages.map((msg) => msg.payload[0]!);
expect(payloads).to.deep.eq(Array.from(Array(totalMsgs).keys()));
});
it("Ordered Callback - Backward", async function () {
this.timeout(15_000);
const totalMsgs = 18;
for (let i = 0; i < totalMsgs; i++) {
expect(
await nwaku.sendMessage(
NimGoNode.toMessageRpcQuery({
payload: new Uint8Array([i]),
contentTopic: TestContentTopic
})
)
).to.be.true;
await delay(1); // to ensure each timestamp is unique.
}
waku = await createLightNode({
staticNoiseKey: NOISE_KEY_1
});
await waku.start();
await waku.dial(await nwaku.getMultiaddrWithId());
await waitForRemotePeer(waku, [Protocols.Store]);
let messages: IMessage[] = [];
await waku.store.queryWithOrderedCallback(
[TestDecoder],
async (msg) => {
messages.push(msg);
},
{
pageDirection: PageDirection.BACKWARD
}
);
messages = messages.reverse();
expect(messages?.length).eq(totalMsgs);
const payloads = messages.map((msg) => msg.payload![0]!);
expect(payloads).to.deep.eq(Array.from(Array(totalMsgs).keys()));
});
it("Generator, with asymmetric & symmetric encrypted messages", async function () {
this.timeout(15_000);
const asymText = "This message is encrypted for me using asymmetric";
const asymTopic = "/test/1/asymmetric/proto";
const symText =
"This message is encrypted for me using symmetric encryption";
const symTopic = "/test/1/symmetric/proto";
const clearText = "This is a clear text message for everyone to read";
const otherText =
"This message is not for and I must not be able to read it";
const timestamp = new Date();
const asymMsg = { payload: utf8ToBytes(asymText), timestamp };
const symMsg = {
payload: utf8ToBytes(symText),
timestamp: new Date(timestamp.valueOf() + 1)
};
const clearMsg = {
payload: utf8ToBytes(clearText),
timestamp: new Date(timestamp.valueOf() + 2)
};
const otherMsg = {
payload: utf8ToBytes(otherText),
timestamp: new Date(timestamp.valueOf() + 3)
};
const privateKey = generatePrivateKey();
const symKey = generateSymmetricKey();
const publicKey = getPublicKey(privateKey);
const eciesEncoder = createEciesEncoder({
contentTopic: asymTopic,
publicKey
});
const symEncoder = createSymEncoder({
contentTopic: symTopic,
symKey
});
const otherEncoder = createEciesEncoder({
contentTopic: TestContentTopic,
publicKey: getPublicKey(generatePrivateKey())
});
const eciesDecoder = createEciesDecoder(asymTopic, privateKey);
const symDecoder = createSymDecoder(symTopic, symKey);
const [waku1, waku2, nimWakuMultiaddr] = await Promise.all([
createLightNode({
staticNoiseKey: NOISE_KEY_1
}).then((waku) => waku.start().then(() => waku)),
createLightNode({
staticNoiseKey: NOISE_KEY_2
}).then((waku) => waku.start().then(() => waku)),
nwaku.getMultiaddrWithId()
]);
log("Waku nodes created");
await Promise.all([
waku1.dial(nimWakuMultiaddr),
waku2.dial(nimWakuMultiaddr)
]);
log("Waku nodes connected to nwaku");
await waitForRemotePeer(waku1, [Protocols.LightPush]);
log("Sending messages using light push");
await Promise.all([
waku1.lightPush.send(eciesEncoder, asymMsg),
waku1.lightPush.send(symEncoder, symMsg),
waku1.lightPush.send(otherEncoder, otherMsg),
waku1.lightPush.send(TestEncoder, clearMsg)
]);
await waitForRemotePeer(waku2, [Protocols.Store]);
const messages: DecodedMessage[] = [];
log("Retrieve messages from store");
for await (const msgPromises of waku2.store.queryGenerator([
eciesDecoder,
symDecoder,
TestDecoder
])) {
for (const promise of msgPromises) {
const msg = await promise;
if (msg) {
messages.push(msg);
}
}
}
// Messages are ordered from oldest to latest within a page (1 page query)
expect(bytesToUtf8(messages[0].payload!)).to.eq(asymText);
expect(bytesToUtf8(messages[1].payload!)).to.eq(symText);
expect(bytesToUtf8(messages[2].payload!)).to.eq(clearText);
expect(messages?.length).eq(3);
!!waku1 && waku1.stop().catch((e) => console.log("Waku failed to stop", e));
!!waku2 && waku2.stop().catch((e) => console.log("Waku failed to stop", e));
});
it("Ordered callback, using start and end time", async function () {
this.timeout(20000);
const now = new Date();
const startTime = new Date();
// Set start time 15 seconds in the past
startTime.setTime(now.getTime() - 15 * 1000);
const message1Timestamp = new Date();
// Set first message was 10 seconds in the past
message1Timestamp.setTime(now.getTime() - 10 * 1000);
const message2Timestamp = new Date();
// Set second message 2 seconds in the past
message2Timestamp.setTime(now.getTime() - 2 * 1000);
const messageTimestamps = [message1Timestamp, message2Timestamp];
const endTime = new Date();
// Set end time 1 second in the past
endTime.setTime(now.getTime() - 1000);
for (let i = 0; i < 2; i++) {
expect(
await nwaku.sendMessage(
NimGoNode.toMessageRpcQuery({
payload: new Uint8Array([i]),
contentTopic: TestContentTopic,
timestamp: messageTimestamps[i]
})
)
).to.be.true;
}
waku = await createLightNode({
staticNoiseKey: NOISE_KEY_1
});
await waku.start();
await waku.dial(await nwaku.getMultiaddrWithId());
await waitForRemotePeer(waku, [Protocols.Store]);
const firstMessages: IMessage[] = [];
await waku.store.queryWithOrderedCallback(
[TestDecoder],
(msg) => {
if (msg) {
firstMessages.push(msg);
}
},
{
timeFilter: { startTime, endTime: message1Timestamp }
}
);
const bothMessages: IMessage[] = [];
await waku.store.queryWithOrderedCallback(
[TestDecoder],
async (msg) => {
bothMessages.push(msg);
},
{
timeFilter: {
startTime,
endTime
}
}
);
expect(firstMessages?.length).eq(1);
expect(firstMessages[0].payload![0]!).eq(0);
expect(bothMessages?.length).eq(2);
});
it("Ordered callback, aborts when callback returns true", async function () {
this.timeout(15_000);
const totalMsgs = 20;
for (let i = 0; i < totalMsgs; i++) {
expect(
await nwaku.sendMessage(
NimGoNode.toMessageRpcQuery({
payload: new Uint8Array([i]),
contentTopic: TestContentTopic
})
)
).to.be.true;
await delay(1); // to ensure each timestamp is unique.
}
waku = await createLightNode({
staticNoiseKey: NOISE_KEY_1
});
await waku.start();
await waku.dial(await nwaku.getMultiaddrWithId());
await waitForRemotePeer(waku, [Protocols.Store]);
const desiredMsgs = 14;
const messages: IMessage[] = [];
await waku.store.queryWithOrderedCallback(
[TestDecoder],
async (msg) => {
messages.push(msg);
return messages.length >= desiredMsgs;
},
{ pageSize: 7 }
);
expect(messages?.length).eq(desiredMsgs);
});
});
describe("Waku Store, custom pubsub topic", () => {
const customPubSubTopic = "/waku/2/custom-dapp/proto";
let waku: LightNode;
let nwaku: NimGoNode;
beforeEach(async function () {
this.timeout(15_000);
nwaku = new NimGoNode(makeLogFileName(this));
await nwaku.start({
store: true,
topic: customPubSubTopic,
relay: true
});
});
afterEach(async function () {
!!nwaku &&
nwaku.stop().catch((e) => console.log("Nwaku failed to stop", e));
!!waku && waku.stop().catch((e) => console.log("Waku failed to stop", e));
});
it("Generator, custom pubsub topic", async function () {
this.timeout(15_000);
const totalMsgs = 20;
for (let i = 0; i < totalMsgs; i++) {
expect(
await nwaku.sendMessage(
NimGoNode.toMessageRpcQuery({
payload: new Uint8Array([i]),
contentTopic: TestContentTopic
}),
customPubSubTopic
)
).to.be.true;
}
waku = await createLightNode({
staticNoiseKey: NOISE_KEY_1,
pubSubTopic: customPubSubTopic
});
await waku.start();
await waku.dial(await nwaku.getMultiaddrWithId());
await waitForRemotePeer(waku, [Protocols.Store]);
const messages: IMessage[] = [];
let promises: Promise<void>[] = [];
for await (const msgPromises of waku.store.queryGenerator([TestDecoder])) {
const _promises = msgPromises.map(async (promise) => {
const msg = await promise;
if (msg) {
messages.push(msg);
expect(msg.pubSubTopic).to.eq(customPubSubTopic);
}
});
promises = promises.concat(_promises);
}
await Promise.all(promises);
expect(messages?.length).eq(totalMsgs);
const result = messages?.findIndex((msg) => {
return msg.payload![0]! === 0;
});
expect(result).to.not.eq(-1);
});
});

View File

@ -0,0 +1,203 @@
import { createCursor, DecodedMessage, DefaultPubSubTopic } from "@waku/core";
import type { LightNode } from "@waku/interfaces";
import { bytesToUtf8 } from "@waku/utils/bytes";
import { expect } from "chai";
import { makeLogFileName, NimGoNode, tearDownNodes } from "../../src/index.js";
import {
customPubSubTopic,
sendMessages,
startAndConnectLightNode,
TestContentTopic,
TestDecoder,
totalMsgs
} from "./utils.js";
describe("Waku Store, cursor", function () {
this.timeout(15000);
let waku: LightNode;
let waku2: LightNode;
let nwaku: NimGoNode;
beforeEach(async function () {
this.timeout(15000);
nwaku = new NimGoNode(makeLogFileName(this));
await nwaku.startWithRetries({ store: true, lightpush: true, relay: true });
await nwaku.ensureSubscriptions();
});
afterEach(async function () {
this.timeout(15000);
await tearDownNodes(nwaku, [waku, waku2]);
});
[
[2, 4],
[0, 20],
[10, 40],
[19, 20],
[19, 50],
[110, 120]
].forEach(([cursorIndex, messageCount]) => {
it(`Passing a valid cursor at ${cursorIndex} index when there are ${messageCount} messages`, async function () {
await sendMessages(
nwaku,
messageCount,
TestContentTopic,
DefaultPubSubTopic
);
waku = await startAndConnectLightNode(nwaku);
// messages in reversed order (first message at last index)
const messages: DecodedMessage[] = [];
for await (const page of waku.store.queryGenerator([TestDecoder])) {
for await (const msg of page.reverse()) {
messages.push(msg as DecodedMessage);
}
}
// create cursor to extract messages after the cursorIndex
const cursor = await createCursor(messages[cursorIndex]);
const messagesAfterCursor: DecodedMessage[] = [];
for await (const page of waku.store.queryGenerator([TestDecoder], {
cursor
})) {
for await (const msg of page.reverse()) {
if (msg) {
messagesAfterCursor.push(msg as DecodedMessage);
}
}
}
expect(messages.length).be.eql(messageCount);
expect(messagesAfterCursor.length).be.eql(messageCount - cursorIndex - 1);
if (cursorIndex == messages.length - 1) {
// in this case the cursor will return nothin because it points at the end of the list
expect(messagesAfterCursor).be.eql([]);
} else {
expect(bytesToUtf8(messagesAfterCursor[0].payload)).to.be.eq(
bytesToUtf8(messages[cursorIndex + 1].payload)
);
expect(
bytesToUtf8(
messagesAfterCursor[messagesAfterCursor.length - 1].payload
)
).to.be.eq(bytesToUtf8(messages[messages.length - 1].payload));
}
});
});
it("Reusing cursor across nodes", async function () {
await sendMessages(nwaku, totalMsgs, TestContentTopic, DefaultPubSubTopic);
waku = await startAndConnectLightNode(nwaku);
waku2 = await startAndConnectLightNode(nwaku);
// messages in reversed order (first message at last index)
const messages: DecodedMessage[] = [];
for await (const page of waku.store.queryGenerator([TestDecoder])) {
for await (const msg of page.reverse()) {
messages.push(msg as DecodedMessage);
}
}
// create cursor to extract messages after the cursorIndex
const cursor = await createCursor(messages[5]);
// query node2 with the cursor from node1
const messagesAfterCursor: DecodedMessage[] = [];
for await (const page of waku2.store.queryGenerator([TestDecoder], {
cursor
})) {
for await (const msg of page.reverse()) {
if (msg) {
messagesAfterCursor.push(msg as DecodedMessage);
}
}
}
expect(messages.length).be.eql(totalMsgs);
expect(messagesAfterCursor.length).be.eql(totalMsgs - 6);
expect(bytesToUtf8(messagesAfterCursor[0].payload)).to.be.eq(
bytesToUtf8(messages[6].payload)
);
expect(
bytesToUtf8(messagesAfterCursor[messagesAfterCursor.length - 1].payload)
).to.be.eq(bytesToUtf8(messages[messages.length - 1].payload));
});
it("Passing cursor with wrong message digest", async function () {
await sendMessages(nwaku, totalMsgs, TestContentTopic, DefaultPubSubTopic);
waku = await startAndConnectLightNode(nwaku);
const messages: DecodedMessage[] = [];
for await (const page of waku.store.queryGenerator([TestDecoder])) {
for await (const msg of page.reverse()) {
messages.push(msg as DecodedMessage);
}
}
const cursor = await createCursor(messages[5]);
// setting a wrong digest
cursor.digest = new Uint8Array([]);
const messagesAfterCursor: DecodedMessage[] = [];
try {
for await (const page of waku.store.queryGenerator([TestDecoder], {
cursor
})) {
for await (const msg of page.reverse()) {
if (msg) {
messagesAfterCursor.push(msg as DecodedMessage);
}
}
}
// Should return same as go-waku. Raised bug: https://github.com/waku-org/nwaku/issues/2117
expect(messagesAfterCursor.length).to.eql(0);
} catch (error) {
if (
nwaku.type() === "go-waku" &&
typeof error === "string" &&
error.includes("History response contains an Error: INVALID_CURSOR")
) {
return;
}
throw error instanceof Error
? new Error(`Unexpected error: ${error.message}`)
: error;
}
});
it("Passing cursor with wrong pubSubTopic", async function () {
await sendMessages(nwaku, totalMsgs, TestContentTopic, DefaultPubSubTopic);
waku = await startAndConnectLightNode(nwaku);
const messages: DecodedMessage[] = [];
for await (const page of waku.store.queryGenerator([TestDecoder])) {
for await (const msg of page.reverse()) {
messages.push(msg as DecodedMessage);
}
}
messages[5].pubSubTopic = customPubSubTopic;
const cursor = await createCursor(messages[5]);
try {
for await (const page of waku.store.queryGenerator([TestDecoder], {
cursor
})) {
page;
}
throw new Error("Cursor with wrong pubsubtopic was accepted");
} catch (err) {
if (
!(err instanceof Error) ||
!err.message.includes(
`Cursor pubsub topic (${customPubSubTopic}) does not match decoder pubsub topic (${DefaultPubSubTopic})`
)
) {
throw err;
}
}
});
});

View File

@ -0,0 +1,224 @@
import { DefaultPubSubTopic } from "@waku/core";
import { IMessage, type LightNode } from "@waku/interfaces";
import { expect } from "chai";
import { makeLogFileName, NimGoNode, tearDownNodes } from "../../src/index.js";
import {
customPubSubTopic,
customTestDecoder,
processQueriedMessages,
startAndConnectLightNode,
TestDecoder
} from "./utils.js";
describe("Waku Store, error handling", function () {
this.timeout(15000);
let waku: LightNode;
let nwaku: NimGoNode;
beforeEach(async function () {
this.timeout(15000);
nwaku = new NimGoNode(makeLogFileName(this));
await nwaku.startWithRetries({ store: true, lightpush: true, relay: true });
await nwaku.ensureSubscriptions();
waku = await startAndConnectLightNode(nwaku);
});
afterEach(async function () {
this.timeout(15000);
await tearDownNodes(nwaku, waku);
});
it("Query Generator, Wrong PubSubTopic", async function () {
try {
for await (const msgPromises of waku.store.queryGenerator([
customTestDecoder
])) {
msgPromises;
}
throw new Error("QueryGenerator was successful but was expected to fail");
} catch (err) {
if (
!(err instanceof Error) ||
!err.message.includes(
`PubSub topic ${customPubSubTopic} has not been configured on this instance. Configured topics are: ${DefaultPubSubTopic}`
)
) {
throw err;
}
}
});
it("Query Generator, Multiple PubSubTopics", async function () {
try {
for await (const msgPromises of waku.store.queryGenerator([
TestDecoder,
customTestDecoder
])) {
msgPromises;
}
throw new Error("QueryGenerator was successful but was expected to fail");
} catch (err) {
if (
!(err instanceof Error) ||
!err.message.includes(
"API does not support querying multiple pubsub topics at once"
)
) {
throw err;
}
}
});
it("Query Generator, No Decoder", async function () {
try {
for await (const msgPromises of waku.store.queryGenerator([])) {
msgPromises;
}
throw new Error("QueryGenerator was successful but was expected to fail");
} catch (err) {
if (
!(err instanceof Error) ||
!err.message.includes("No decoders provided")
) {
throw err;
}
}
});
it("Query Generator, No message returned", async function () {
const messages = await processQueriedMessages(
waku,
[TestDecoder],
DefaultPubSubTopic
);
expect(messages?.length).eq(0);
});
it("Query with Ordered Callback, Wrong PubSubTopic", async function () {
try {
await waku.store.queryWithOrderedCallback(
[customTestDecoder],
async () => {}
);
throw new Error("QueryGenerator was successful but was expected to fail");
} catch (err) {
if (
!(err instanceof Error) ||
!err.message.includes(
`PubSub topic ${customPubSubTopic} has not been configured on this instance. Configured topics are: ${DefaultPubSubTopic}`
)
) {
throw err;
}
}
});
it("Query with Ordered Callback, Multiple PubSubTopics", async function () {
try {
await waku.store.queryWithOrderedCallback(
[TestDecoder, customTestDecoder],
async () => {}
);
throw new Error("QueryGenerator was successful but was expected to fail");
} catch (err) {
if (
!(err instanceof Error) ||
!err.message.includes(
"API does not support querying multiple pubsub topics at once"
)
) {
throw err;
}
}
});
it("Query with Ordered Callback, No Decoder", async function () {
try {
await waku.store.queryWithOrderedCallback([], async () => {});
throw new Error("QueryGenerator was successful but was expected to fail");
} catch (err) {
if (
!(err instanceof Error) ||
!err.message.includes("No decoders provided")
) {
throw err;
}
}
});
it("Query with Ordered Callback, No message returned", async function () {
const messages: IMessage[] = [];
await waku.store.queryWithOrderedCallback([TestDecoder], async (msg) => {
messages.push(msg);
});
expect(messages?.length).eq(0);
});
it("Query with Promise Callback, Wrong PubSubTopic", async function () {
try {
await waku.store.queryWithPromiseCallback(
[customTestDecoder],
async () => {}
);
throw new Error("QueryGenerator was successful but was expected to fail");
} catch (err) {
if (
!(err instanceof Error) ||
!err.message.includes(
`PubSub topic ${customPubSubTopic} has not been configured on this instance. Configured topics are: ${DefaultPubSubTopic}`
)
) {
throw err;
}
}
});
it("Query with Promise Callback, Multiple PubSubTopics", async function () {
try {
await waku.store.queryWithPromiseCallback(
[TestDecoder, customTestDecoder],
async () => {}
);
throw new Error("QueryGenerator was successful but was expected to fail");
} catch (err) {
if (
!(err instanceof Error) ||
!err.message.includes(
"API does not support querying multiple pubsub topics at once"
)
) {
throw err;
}
}
});
it("Query with Promise Callback, No Decoder", async function () {
try {
await waku.store.queryWithPromiseCallback([], async () => {});
throw new Error("QueryGenerator was successful but was expected to fail");
} catch (err) {
if (
!(err instanceof Error) ||
!err.message.includes("No decoders provided")
) {
throw err;
}
}
});
it("Query with Promise Callback, No message returned", async function () {
const messages: IMessage[] = [];
await waku.store.queryWithPromiseCallback(
[TestDecoder],
async (msgPromise) => {
const msg = await msgPromise;
if (msg) {
messages.push(msg);
}
}
);
expect(messages?.length).eq(0);
});
});

View File

@ -0,0 +1,333 @@
import {
createDecoder,
DecodedMessage,
DefaultPubSubTopic,
waitForRemotePeer
} from "@waku/core";
import type { IMessage, LightNode } from "@waku/interfaces";
import { Protocols } from "@waku/interfaces";
import {
createDecoder as createEciesDecoder,
createEncoder as createEciesEncoder,
generatePrivateKey,
getPublicKey
} from "@waku/message-encryption/ecies";
import {
createDecoder as createSymDecoder,
createEncoder as createSymEncoder,
generateSymmetricKey
} from "@waku/message-encryption/symmetric";
import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai";
import isEqual from "lodash/isEqual";
import {
delay,
makeLogFileName,
MessageCollector,
NimGoNode,
tearDownNodes,
TEST_STRING
} from "../../src/index.js";
import {
customContentTopic,
log,
messageText,
processQueriedMessages,
sendMessages,
startAndConnectLightNode,
TestContentTopic,
TestDecoder,
TestEncoder,
totalMsgs
} from "./utils.js";
const secondDecoder = createDecoder(customContentTopic, DefaultPubSubTopic);
describe("Waku Store, general", function () {
this.timeout(15000);
let waku: LightNode;
let waku2: LightNode;
let nwaku: NimGoNode;
beforeEach(async function () {
this.timeout(15000);
nwaku = new NimGoNode(makeLogFileName(this));
await nwaku.startWithRetries({ store: true, lightpush: true, relay: true });
await nwaku.ensureSubscriptions();
});
afterEach(async function () {
this.timeout(15000);
await tearDownNodes(nwaku, [waku, waku2]);
});
it("Query generator for multiple messages", async function () {
await sendMessages(nwaku, totalMsgs, TestContentTopic, DefaultPubSubTopic);
waku = await startAndConnectLightNode(nwaku);
const messages = await processQueriedMessages(
waku,
[TestDecoder],
DefaultPubSubTopic
);
expect(messages?.length).eq(totalMsgs);
// checking that the message with text 0 exists
const result = messages?.findIndex((msg) => {
return msg.payload[0]! === 0;
});
expect(result).to.not.eq(-1);
});
it("Query generator for multiple messages with different message text format", async function () {
for (const testItem of TEST_STRING) {
expect(
await nwaku.sendMessage(
NimGoNode.toMessageRpcQuery({
payload: utf8ToBytes(testItem["value"]),
contentTopic: TestContentTopic
}),
DefaultPubSubTopic
)
).to.be.true;
await delay(1); // to ensure each timestamp is unique.
}
waku = await startAndConnectLightNode(nwaku);
const messageCollector = new MessageCollector();
messageCollector.list = await processQueriedMessages(
waku,
[TestDecoder],
DefaultPubSubTopic
);
// checking that all message sent were retrieved
TEST_STRING.forEach((testItem) => {
expect(
messageCollector.hasMessage(TestContentTopic, testItem["value"])
).to.eq(true);
});
});
it("Query generator for multiple messages with multiple decoders", async function () {
await nwaku.sendMessage(
NimGoNode.toMessageRpcQuery({
payload: utf8ToBytes("M1"),
contentTopic: TestContentTopic
}),
DefaultPubSubTopic
);
await nwaku.sendMessage(
NimGoNode.toMessageRpcQuery({
payload: utf8ToBytes("M2"),
contentTopic: customContentTopic
}),
DefaultPubSubTopic
);
waku = await startAndConnectLightNode(nwaku);
const messageCollector = new MessageCollector();
messageCollector.list = await processQueriedMessages(
waku,
[TestDecoder, secondDecoder],
DefaultPubSubTopic
);
expect(messageCollector.hasMessage(TestContentTopic, "M1")).to.eq(true);
expect(messageCollector.hasMessage(customContentTopic, "M2")).to.eq(true);
});
it("Query generator for multiple messages with different content topic format", async function () {
for (const testItem of TEST_STRING) {
expect(
await nwaku.sendMessage(
NimGoNode.toMessageRpcQuery({
payload: utf8ToBytes(messageText),
contentTopic: testItem["value"]
}),
DefaultPubSubTopic
)
).to.be.true;
await delay(1); // to ensure each timestamp is unique.
}
waku = await startAndConnectLightNode(nwaku);
for (const testItem of TEST_STRING) {
for await (const query of waku.store.queryGenerator([
createDecoder(testItem["value"])
])) {
for await (const msg of query) {
expect(isEqual(msg!.payload, utf8ToBytes(messageText))).to.eq(true);
}
}
}
});
it("Callback on promise", async function () {
await sendMessages(nwaku, totalMsgs, TestContentTopic, DefaultPubSubTopic);
waku = await startAndConnectLightNode(nwaku);
const messages: IMessage[] = [];
await waku.store.queryWithPromiseCallback(
[TestDecoder],
async (msgPromise) => {
const msg = await msgPromise;
if (msg) {
messages.push(msg);
}
}
);
expect(messages?.length).eq(totalMsgs);
const result = messages?.findIndex((msg) => {
return msg.payload[0]! === 0;
});
expect(result).to.not.eq(-1);
});
it("Callback on promise, aborts when callback returns true", async function () {
await sendMessages(nwaku, totalMsgs, TestContentTopic, DefaultPubSubTopic);
waku = await startAndConnectLightNode(nwaku);
const desiredMsgs = 14;
const messages: IMessage[] = [];
await waku.store.queryWithPromiseCallback(
[TestDecoder],
async (msgPromise) => {
const msg = await msgPromise;
if (msg) {
messages.push(msg);
}
return messages.length >= desiredMsgs;
},
{ pageSize: 7 }
);
expect(messages?.length).eq(desiredMsgs);
});
it("Generator, with asymmetric & symmetric encrypted messages", async function () {
const asymText = "This message is encrypted for me using asymmetric";
const asymTopic = "/test/1/asymmetric/proto";
const symText =
"This message is encrypted for me using symmetric encryption";
const symTopic = "/test/1/symmetric/proto";
const clearText = "This is a clear text message for everyone to read";
const otherText =
"This message is not for and I must not be able to read it";
const timestamp = new Date();
const asymMsg = { payload: utf8ToBytes(asymText), timestamp };
const symMsg = {
payload: utf8ToBytes(symText),
timestamp: new Date(timestamp.valueOf() + 1)
};
const clearMsg = {
payload: utf8ToBytes(clearText),
timestamp: new Date(timestamp.valueOf() + 2)
};
const otherMsg = {
payload: utf8ToBytes(otherText),
timestamp: new Date(timestamp.valueOf() + 3)
};
const privateKey = generatePrivateKey();
const symKey = generateSymmetricKey();
const publicKey = getPublicKey(privateKey);
const eciesEncoder = createEciesEncoder({
contentTopic: asymTopic,
publicKey
});
const symEncoder = createSymEncoder({
contentTopic: symTopic,
symKey
});
const otherEncoder = createEciesEncoder({
contentTopic: TestContentTopic,
publicKey: getPublicKey(generatePrivateKey())
});
const eciesDecoder = createEciesDecoder(asymTopic, privateKey);
const symDecoder = createSymDecoder(symTopic, symKey);
waku = await startAndConnectLightNode(nwaku);
waku2 = await startAndConnectLightNode(nwaku);
const nimWakuMultiaddr = await nwaku.getMultiaddrWithId();
await Promise.all([
waku.dial(nimWakuMultiaddr),
waku2.dial(nimWakuMultiaddr)
]);
log("Waku nodes connected to nwaku");
await waitForRemotePeer(waku, [Protocols.LightPush]);
log("Sending messages using light push");
await Promise.all([
waku.lightPush.send(eciesEncoder, asymMsg),
waku.lightPush.send(symEncoder, symMsg),
waku.lightPush.send(otherEncoder, otherMsg),
waku.lightPush.send(TestEncoder, clearMsg)
]);
await waitForRemotePeer(waku2, [Protocols.Store]);
const messages: DecodedMessage[] = [];
log("Retrieve messages from store");
for await (const query of waku2.store.queryGenerator([
eciesDecoder,
symDecoder,
TestDecoder
])) {
for await (const msg of query) {
if (msg) {
messages.push(msg as DecodedMessage);
}
}
}
// Messages are ordered from oldest to latest within a page (1 page query)
expect(bytesToUtf8(messages[0].payload!)).to.eq(asymText);
expect(bytesToUtf8(messages[1].payload!)).to.eq(symText);
expect(bytesToUtf8(messages[2].payload!)).to.eq(clearText);
expect(messages?.length).eq(3);
});
it("Ordered callback, aborts when callback returns true", async function () {
await sendMessages(nwaku, totalMsgs, TestContentTopic, DefaultPubSubTopic);
waku = await startAndConnectLightNode(nwaku);
const desiredMsgs = 14;
const messages: IMessage[] = [];
await waku.store.queryWithOrderedCallback(
[TestDecoder],
async (msg) => {
messages.push(msg);
return messages.length >= desiredMsgs;
},
{ pageSize: 7 }
);
expect(messages?.length).eq(desiredMsgs);
});
it("Query generator for 2000 messages", async function () {
this.timeout(40000);
await sendMessages(nwaku, 2000, TestContentTopic, DefaultPubSubTopic);
waku = await startAndConnectLightNode(nwaku);
const messages = await processQueriedMessages(
waku,
[TestDecoder],
DefaultPubSubTopic
);
expect(messages?.length).eq(2000);
});
});

View File

@ -0,0 +1,143 @@
import { DefaultPubSubTopic, waitForRemotePeer } from "@waku/core";
import type { IMessage, LightNode } from "@waku/interfaces";
import { createLightNode, Protocols } from "@waku/sdk";
import { expect } from "chai";
import {
makeLogFileName,
NimGoNode,
NOISE_KEY_1,
tearDownNodes
} from "../../src/index.js";
import {
customContentTopic,
customPubSubTopic,
customTestDecoder,
processQueriedMessages,
sendMessages,
startAndConnectLightNode,
TestContentTopic,
TestDecoder,
totalMsgs
} from "./utils.js";
describe("Waku Store, custom pubsub topic", function () {
this.timeout(15000);
let waku: LightNode;
let nwaku: NimGoNode;
let nwaku2: NimGoNode;
beforeEach(async function () {
this.timeout(15000);
nwaku = new NimGoNode(makeLogFileName(this));
await nwaku.start({
store: true,
topic: [customPubSubTopic, DefaultPubSubTopic],
relay: true
});
await nwaku.ensureSubscriptions([customPubSubTopic, DefaultPubSubTopic]);
});
afterEach(async function () {
this.timeout(15000);
await tearDownNodes([nwaku, nwaku2], waku);
});
it("Generator, custom pubsub topic", async function () {
await sendMessages(nwaku, totalMsgs, customContentTopic, customPubSubTopic);
waku = await startAndConnectLightNode(nwaku, [customPubSubTopic]);
const messages = await processQueriedMessages(
waku,
[customTestDecoder],
customPubSubTopic
);
expect(messages?.length).eq(totalMsgs);
const result = messages?.findIndex((msg) => {
return msg.payload![0]! === 0;
});
expect(result).to.not.eq(-1);
});
it("Generator, 2 different pubsubtopics", async function () {
this.timeout(10000);
const totalMsgs = 10;
await sendMessages(nwaku, totalMsgs, customContentTopic, customPubSubTopic);
await sendMessages(nwaku, totalMsgs, TestContentTopic, DefaultPubSubTopic);
waku = await startAndConnectLightNode(nwaku, [
customPubSubTopic,
DefaultPubSubTopic
]);
const customMessages = await processQueriedMessages(
waku,
[customTestDecoder],
customPubSubTopic
);
expect(customMessages?.length).eq(totalMsgs);
const result1 = customMessages?.findIndex((msg) => {
return msg.payload![0]! === 0;
});
expect(result1).to.not.eq(-1);
const testMessages = await processQueriedMessages(
waku,
[TestDecoder],
DefaultPubSubTopic
);
expect(testMessages?.length).eq(totalMsgs);
const result2 = testMessages?.findIndex((msg) => {
return msg.payload![0]! === 0;
});
expect(result2).to.not.eq(-1);
});
it("Generator, 2 nwaku nodes each with different pubsubtopics", async function () {
this.timeout(10000);
// Set up and start a new nwaku node with Default PubSubtopic
nwaku2 = new NimGoNode(makeLogFileName(this) + "2");
await nwaku2.start({
store: true,
topic: [DefaultPubSubTopic],
relay: true
});
await nwaku2.ensureSubscriptions([DefaultPubSubTopic]);
const totalMsgs = 10;
await sendMessages(nwaku, totalMsgs, customContentTopic, customPubSubTopic);
await sendMessages(nwaku2, totalMsgs, TestContentTopic, DefaultPubSubTopic);
waku = await createLightNode({
staticNoiseKey: NOISE_KEY_1,
pubSubTopics: [customPubSubTopic, DefaultPubSubTopic]
});
await waku.start();
await waku.dial(await nwaku.getMultiaddrWithId());
await waku.dial(await nwaku2.getMultiaddrWithId());
await waitForRemotePeer(waku, [Protocols.Store]);
let customMessages: IMessage[] = [];
let testMessages: IMessage[] = [];
while (
customMessages.length != totalMsgs ||
testMessages.length != totalMsgs
) {
customMessages = await processQueriedMessages(
waku,
[customTestDecoder],
customPubSubTopic
);
testMessages = await processQueriedMessages(
waku,
[TestDecoder],
DefaultPubSubTopic
);
}
});
});

View File

@ -0,0 +1,129 @@
import { DecodedMessage, DefaultPubSubTopic, PageDirection } from "@waku/core";
import type { IMessage, LightNode } from "@waku/interfaces";
import { expect } from "chai";
import { makeLogFileName, NimGoNode, tearDownNodes } from "../../src/index.js";
import {
chunkAndReverseArray,
sendMessages,
startAndConnectLightNode,
TestContentTopic,
TestDecoder,
totalMsgs
} from "./utils.js";
describe("Waku Store, order", function () {
this.timeout(15000);
let waku: LightNode;
let nwaku: NimGoNode;
beforeEach(async function () {
this.timeout(15000);
nwaku = new NimGoNode(makeLogFileName(this));
await nwaku.startWithRetries({ store: true, lightpush: true, relay: true });
await nwaku.ensureSubscriptions();
});
afterEach(async function () {
this.timeout(15000);
await tearDownNodes(nwaku, waku);
});
[PageDirection.FORWARD, PageDirection.BACKWARD].forEach((pageDirection) => {
it(`Query Generator - ${pageDirection}`, async function () {
await sendMessages(
nwaku,
totalMsgs,
TestContentTopic,
DefaultPubSubTopic
);
waku = await startAndConnectLightNode(nwaku);
const messages: IMessage[] = [];
for await (const query of waku.store.queryGenerator([TestDecoder], {
pageDirection: pageDirection
})) {
for await (const msg of query) {
if (msg) {
messages.push(msg as DecodedMessage);
}
}
}
let expectedPayloads = Array.from(Array(totalMsgs).keys());
if (pageDirection === PageDirection.BACKWARD) {
expectedPayloads = chunkAndReverseArray(expectedPayloads, 10);
}
expect(messages?.length).eq(totalMsgs);
const payloads = messages.map((msg) => msg.payload[0]!);
expect(payloads).to.deep.eq(expectedPayloads);
});
});
[PageDirection.FORWARD, PageDirection.BACKWARD].forEach((pageDirection) => {
it(`Promise Callback - ${pageDirection}`, async function () {
await sendMessages(
nwaku,
totalMsgs,
TestContentTopic,
DefaultPubSubTopic
);
waku = await startAndConnectLightNode(nwaku);
const messages: IMessage[] = [];
await waku.store.queryWithPromiseCallback(
[TestDecoder],
async (msgPromise) => {
const msg = await msgPromise;
if (msg) {
messages.push(msg);
}
},
{
pageDirection: pageDirection
}
);
let expectedPayloads = Array.from(Array(totalMsgs).keys());
if (pageDirection === PageDirection.BACKWARD) {
expectedPayloads = chunkAndReverseArray(expectedPayloads, 10);
}
expect(messages?.length).eq(totalMsgs);
const payloads = messages.map((msg) => msg.payload[0]!);
expect(payloads).to.deep.eq(expectedPayloads);
});
});
[PageDirection.FORWARD, PageDirection.BACKWARD].forEach((pageDirection) => {
it(`Ordered Callback - ${pageDirection}`, async function () {
await sendMessages(
nwaku,
totalMsgs,
TestContentTopic,
DefaultPubSubTopic
);
waku = await startAndConnectLightNode(nwaku);
const messages: IMessage[] = [];
await waku.store.queryWithOrderedCallback(
[TestDecoder],
async (msg) => {
messages.push(msg);
},
{
pageDirection: pageDirection
}
);
if (pageDirection === PageDirection.BACKWARD) {
messages.reverse();
}
expect(messages?.length).eq(totalMsgs);
const payloads = messages.map((msg) => msg.payload[0]!);
expect(payloads).to.deep.eq(Array.from(Array(totalMsgs).keys()));
});
});
});

View File

@ -0,0 +1,99 @@
import { DefaultPubSubTopic } from "@waku/core";
import type { LightNode } from "@waku/interfaces";
import { expect } from "chai";
import { makeLogFileName, NimGoNode, tearDownNodes } from "../../src/index.js";
import {
sendMessages,
startAndConnectLightNode,
TestContentTopic,
TestDecoder
} from "./utils.js";
describe("Waku Store, page size", function () {
this.timeout(15000);
let waku: LightNode;
let nwaku: NimGoNode;
beforeEach(async function () {
this.timeout(15000);
nwaku = new NimGoNode(makeLogFileName(this));
await nwaku.startWithRetries({ store: true, lightpush: true, relay: true });
await nwaku.ensureSubscriptions();
});
afterEach(async function () {
this.timeout(15000);
await tearDownNodes(nwaku, waku);
});
[
[0, 110],
[1, 4],
[3, 20],
[10, 10],
[11, 10],
[19, 20],
[110, 120]
].forEach(([pageSize, messageCount]) => {
it(`Passing page size ${pageSize} when there are ${messageCount} messages`, async function () {
await sendMessages(
nwaku,
messageCount,
TestContentTopic,
DefaultPubSubTopic
);
// Determine effectivePageSize for test expectations
let effectivePageSize = pageSize;
if (pageSize === 0) {
if (nwaku.type() == "go-waku") {
effectivePageSize = 100;
} else {
effectivePageSize = 20;
}
} else if (pageSize > 100) {
effectivePageSize = 100;
}
waku = await startAndConnectLightNode(nwaku);
let messagesRetrieved = 0;
for await (const query of waku.store.queryGenerator([TestDecoder], {
pageSize: pageSize
})) {
// Calculate expected page size
const expectedPageSize = Math.min(
effectivePageSize,
messageCount - messagesRetrieved
);
expect(query.length).eq(expectedPageSize);
for await (const msg of query) {
if (msg) {
messagesRetrieved++;
}
}
}
expect(messagesRetrieved).eq(messageCount);
});
});
// Possible issue here because pageSize differs across implementations
it("Default pageSize", async function () {
await sendMessages(nwaku, 20, TestContentTopic, DefaultPubSubTopic);
waku = await startAndConnectLightNode(nwaku);
let messagesRetrieved = 0;
for await (const query of waku.store.queryGenerator([TestDecoder])) {
expect(query.length).eq(10);
for await (const msg of query) {
if (msg) {
messagesRetrieved++;
}
}
}
expect(messagesRetrieved).eq(20);
});
});

View File

@ -0,0 +1,110 @@
import { DecodedMessage, DefaultPubSubTopic, PageDirection } from "@waku/core";
import type { IMessage, LightNode } from "@waku/interfaces";
import { makeLogFileName, NimGoNode, tearDownNodes } from "../../src/index.js";
import {
sendMessages,
startAndConnectLightNode,
TestContentTopic,
TestDecoder,
totalMsgs
} from "./utils.js";
describe("Waku Store, sorting", function () {
this.timeout(15000);
let waku: LightNode;
let nwaku: NimGoNode;
beforeEach(async function () {
this.timeout(15000);
nwaku = new NimGoNode(makeLogFileName(this));
await nwaku.startWithRetries({ store: true, lightpush: true, relay: true });
await nwaku.ensureSubscriptions();
});
afterEach(async function () {
this.timeout(15000);
await tearDownNodes(nwaku, waku);
});
[PageDirection.FORWARD, PageDirection.BACKWARD].forEach((pageDirection) => {
it(`Query Generator sorting by timestamp while page direction is ${pageDirection}`, async function () {
await sendMessages(
nwaku,
totalMsgs,
TestContentTopic,
DefaultPubSubTopic
);
waku = await startAndConnectLightNode(nwaku);
for await (const query of waku.store.queryGenerator([TestDecoder], {
pageDirection: PageDirection.FORWARD
})) {
const page: IMessage[] = [];
for await (const msg of query) {
if (msg) {
page.push(msg as DecodedMessage);
}
}
// Extract timestamps
const timestamps = page.map(
(msg) => msg.timestamp as unknown as bigint
);
// Check if timestamps are sorted
for (let i = 1; i < timestamps.length; i++) {
if (timestamps[i] < timestamps[i - 1]) {
throw new Error(
`Messages are not sorted by timestamp. Found out of order at index ${i}`
);
}
}
}
});
});
[PageDirection.FORWARD, PageDirection.BACKWARD].forEach((pageDirection) => {
it(`Ordered Callback sorting by timestamp while page direction is ${pageDirection}`, async function () {
await sendMessages(
nwaku,
totalMsgs,
TestContentTopic,
DefaultPubSubTopic
);
waku = await startAndConnectLightNode(nwaku);
const messages: IMessage[] = [];
await waku.store.queryWithOrderedCallback(
[TestDecoder],
async (msg) => {
messages.push(msg);
},
{
pageDirection: pageDirection
}
);
// Extract timestamps
const timestamps = messages.map(
(msg) => msg.timestamp as unknown as bigint
);
// Check if timestamps are sorted
for (let i = 1; i < timestamps.length; i++) {
if (
pageDirection === PageDirection.FORWARD &&
timestamps[i] < timestamps[i - 1]
) {
throw new Error(
`Messages are not sorted by timestamp in FORWARD direction. Found out of order at index ${i}`
);
} else if (
pageDirection === PageDirection.BACKWARD &&
timestamps[i] > timestamps[i - 1]
) {
throw new Error(
`Messages are not sorted by timestamp in BACKWARD direction. Found out of order at index ${i}`
);
}
}
});
});
});

View File

@ -0,0 +1,119 @@
import type { IMessage, LightNode } from "@waku/interfaces";
import { expect } from "chai";
import { makeLogFileName, NimGoNode, tearDownNodes } from "../../src/index.js";
import {
adjustDate,
startAndConnectLightNode,
TestContentTopic,
TestDecoder
} from "./utils.js";
describe("Waku Store, time filter", function () {
this.timeout(15000);
let waku: LightNode;
let nwaku: NimGoNode;
beforeEach(async function () {
this.timeout(15000);
nwaku = new NimGoNode(makeLogFileName(this));
await nwaku.startWithRetries({ store: true, lightpush: true, relay: true });
await nwaku.ensureSubscriptions();
});
afterEach(async function () {
this.timeout(15000);
await tearDownNodes(nwaku, waku);
});
[
[-19000, -10, 10],
[-19000, 1, 4],
[-19000, -2, -1],
// [-19000, 0, 1000], // skipped for now because it fails on gowaku which returns messages > startTime
[-19000, -1000, 0],
[19000, -10, 10], // message in the future
[-19000, 10, -10] // startTime is newer than endTime
].forEach(([msgTime, startTime, endTime]) => {
it(`msgTime: ${msgTime} ms from now, startTime: ${
msgTime + startTime
}, endTime: ${msgTime + endTime}`, async function () {
const msgTimestamp = adjustDate(new Date(), msgTime);
expect(
await nwaku.sendMessage(
NimGoNode.toMessageRpcQuery({
payload: new Uint8Array([0]),
contentTopic: TestContentTopic,
timestamp: msgTimestamp
})
)
).to.be.true;
waku = await startAndConnectLightNode(nwaku);
const messages: IMessage[] = [];
await waku.store.queryWithOrderedCallback(
[TestDecoder],
(msg) => {
if (msg) {
messages.push(msg);
}
},
{
timeFilter: {
startTime: adjustDate(msgTimestamp, startTime),
endTime: adjustDate(msgTimestamp, endTime)
}
}
);
// in this context 0 is the messageTimestamp
if (
(startTime > 0 && endTime > 0) ||
(startTime < 0 && endTime < 0) ||
startTime > endTime
) {
expect(messages.length).eq(0);
} else {
expect(messages.length).eq(1);
expect(messages[0].payload![0]!).eq(0);
}
});
});
[-20000, 40000].forEach((msgTime) => {
it(`Timestamp too far from node time: ${msgTime} ms from now`, async function () {
const msgTimestamp = adjustDate(new Date(), msgTime);
expect(
await nwaku.sendMessage(
NimGoNode.toMessageRpcQuery({
payload: new Uint8Array([0]),
contentTopic: TestContentTopic,
timestamp: msgTimestamp
})
)
).to.be.true;
waku = await startAndConnectLightNode(nwaku);
const messages: IMessage[] = [];
await waku.store.queryWithOrderedCallback(
[TestDecoder],
(msg) => {
if (msg) {
messages.push(msg);
}
},
{
timeFilter: {
startTime: adjustDate(msgTimestamp, -1000),
endTime: adjustDate(msgTimestamp, 1000)
}
}
);
expect(messages.length).eq(0);
});
});
});

View File

@ -0,0 +1,97 @@
import {
createDecoder,
createEncoder,
DecodedMessage,
Decoder,
DefaultPubSubTopic,
waitForRemotePeer
} from "@waku/core";
import { LightNode, Protocols } from "@waku/interfaces";
import { createLightNode } from "@waku/sdk";
import { expect } from "chai";
import debug from "debug";
import { delay, NimGoNode, NOISE_KEY_1 } from "../../src";
export const log = debug("waku:test:store");
export const TestContentTopic = "/test/1/waku-store/utf8";
export const TestEncoder = createEncoder({ contentTopic: TestContentTopic });
export const TestDecoder = createDecoder(TestContentTopic);
export const customContentTopic = "/test/2/waku-store/utf8";
export const customPubSubTopic = "/waku/2/custom-dapp/proto";
export const customTestDecoder = createDecoder(
customContentTopic,
customPubSubTopic
);
export const totalMsgs = 20;
export const messageText = "Store Push works!";
export async function sendMessages(
instance: NimGoNode,
numMessages: number,
contentTopic: string,
pubSubTopic: string
): Promise<void> {
for (let i = 0; i < numMessages; i++) {
expect(
await instance.sendMessage(
NimGoNode.toMessageRpcQuery({
payload: new Uint8Array([i]),
contentTopic: contentTopic
}),
pubSubTopic
)
).to.be.true;
await delay(1); // to ensure each timestamp is unique.
}
}
export async function processQueriedMessages(
instance: LightNode,
decoders: Array<Decoder>,
expectedTopic?: string
): Promise<DecodedMessage[]> {
const localMessages: DecodedMessage[] = [];
for await (const query of instance.store.queryGenerator(decoders)) {
for await (const msg of query) {
if (msg) {
expect(msg.pubSubTopic).to.eq(expectedTopic);
localMessages.push(msg as DecodedMessage);
}
}
}
return localMessages;
}
export async function startAndConnectLightNode(
instance: NimGoNode,
pubSubTopics: string[] = [DefaultPubSubTopic]
): Promise<LightNode> {
const waku = await createLightNode({
pubSubTopics: pubSubTopics,
staticNoiseKey: NOISE_KEY_1
});
await waku.start();
await waku.dial(await instance.getMultiaddrWithId());
await waitForRemotePeer(waku, [Protocols.Store]);
log("Waku node created");
return waku;
}
export function chunkAndReverseArray(
arr: number[],
chunkSize: number
): number[] {
const result: number[] = [];
for (let i = 0; i < arr.length; i += chunkSize) {
result.push(...arr.slice(i, i + chunkSize).reverse());
}
return result.reverse();
}
export const adjustDate = (baseDate: Date, adjustMs: number): Date => {
const adjusted = new Date(baseDate);
adjusted.setTime(adjusted.getTime() + adjustMs);
return adjusted;
};

View File

@ -50,7 +50,7 @@ describe("Util: toAsyncIterator: Filter", () => {
afterEach(async function () {
this.timeout(10000);
tearDownNodes([nwaku], [waku]);
await tearDownNodes(nwaku, waku);
});
it("creates an iterator", async function () {

View File

@ -1,4 +1,4 @@
import { waitForRemotePeer } from "@waku/core";
import { DefaultPubSubTopic, waitForRemotePeer } from "@waku/core";
import type { LightNode, RelayNode } from "@waku/interfaces";
import { Protocols } from "@waku/interfaces";
import { createLightNode, createRelayNode } from "@waku/sdk";
@ -39,7 +39,7 @@ describe("Wait for remote peer", function () {
await waku1.dial(multiAddrWithId);
await delay(1000);
await waitForRemotePeer(waku1, [Protocols.Relay]);
const peers = waku1.relay.getMeshPeers();
const peers = waku1.relay.getMeshPeers(DefaultPubSubTopic);
const nimPeerId = multiAddrWithId.getPeerId();
expect(nimPeerId).to.not.be.undefined;
@ -262,7 +262,7 @@ describe("Wait for remote peer", function () {
await waku1.dial(multiAddrWithId);
await waitForRemotePeer(waku1);
const peers = waku1.relay.getMeshPeers();
const peers = waku1.relay.getMeshPeers(DefaultPubSubTopic);
const nimPeerId = multiAddrWithId.getPeerId();

View File

@ -12,7 +12,11 @@ import {
createEncoder,
generateSymmetricKey
} from "@waku/message-encryption/symmetric";
import { createLightNode, createRelayNode } from "@waku/sdk";
import {
createLightNode,
createEncoder as createPlainEncoder,
createRelayNode
} from "@waku/sdk";
import { bytesToUtf8, utf8ToBytes } from "@waku/utils/bytes";
import { expect } from "chai";
@ -26,15 +30,16 @@ import { NimGoNode } from "../src/node/node.js";
const TestContentTopic = "/test/1/waku/utf8";
const TestEncoder = createPlainEncoder({ contentTopic: TestContentTopic });
describe("Waku Dial [node only]", function () {
describe("Interop: NimGoNode", function () {
let waku: Waku;
let waku: LightNode;
let nwaku: NimGoNode;
afterEach(async function () {
!!nwaku &&
nwaku.stop().catch((e) => console.log("Nwaku failed to stop", e));
!!waku && waku.stop().catch((e) => console.log("Waku failed to stop", e));
this.timeout(15000);
await tearDownNodes(nwaku, waku);
});
it("connects to nwaku", async function () {
@ -61,6 +66,35 @@ describe("Waku Dial [node only]", function () {
const nimPeerId = await nwaku.getPeerId();
expect(await waku.libp2p.peerStore.has(nimPeerId)).to.be.true;
});
it("Does not throw an exception when node disconnects", async function () {
this.timeout(20_000);
process.on("unhandledRejection", (e) =>
expect.fail("unhandledRejection", e)
);
process.on("uncaughtException", (e) =>
expect.fail("uncaughtException", e)
);
nwaku = new NimGoNode(makeLogFileName(this));
await nwaku.start({
filter: true,
store: true,
lightpush: true
});
const multiAddrWithId = await nwaku.getMultiaddrWithId();
waku = await createLightNode({
staticNoiseKey: NOISE_KEY_1
});
await waku.start();
await waku.dial(multiAddrWithId);
await nwaku.stop();
await waku.lightPush?.send(TestEncoder, {
payload: utf8ToBytes("hello world")
});
});
});
describe("Bootstrap", function () {
@ -68,8 +102,8 @@ describe("Waku Dial [node only]", function () {
let nwaku: NimGoNode;
afterEach(async function () {
this.timeout(10000);
tearDownNodes([nwaku], [waku]);
this.timeout(15000);
await tearDownNodes(nwaku, waku);
});
it("Passing an array", async function () {

View File

@ -62,26 +62,22 @@
"reset-hard": "git clean -dfx -e .idea && git reset --hard && npm i && npm run build"
},
"engines": {
"node": ">=16"
"node": ">=18"
},
"dependencies": {
"chai": "^4.3.8",
"debug": "^4.3.4",
"uint8arrays": "^4.0.4",
"@waku/interfaces": "0.0.18"
"uint8arrays": "^4.0.4"
},
"devDependencies": {
"@rollup/plugin-commonjs": "^25.0.4",
"@rollup/plugin-json": "^6.0.0",
"@rollup/plugin-node-resolve": "^15.1.0",
"@waku/build-utils": "*",
"@waku/interfaces": "0.0.17",
"@waku/interfaces": "0.0.18",
"cspell": "^7.3.2",
"npm-run-all": "^4.1.5",
"rollup": "^3.29.2",
"typescript": "^5.0.4"
},
"typedoc": {
"entryPoint": "./src/index.ts"
"rollup": "^3.29.2"
},
"files": [
"dist",

View File

@ -3,6 +3,8 @@ export * from "./random_subset.js";
export * from "./group_by.js";
export * from "./to_async_iterator.js";
export * from "./is_size_valid.js";
export * from "./sharding.js";
export * from "./push_or_init_map.js";
export function removeItemFromArray(arr: unknown[], value: unknown): unknown[] {
const index = arr.indexOf(value);

View File

@ -0,0 +1,12 @@
import type { PubSubTopic } from "@waku/interfaces";
export function ensurePubsubTopicIsConfigured(
pubsubTopic: PubSubTopic,
configuredTopics: PubSubTopic[]
): void {
if (!configuredTopics.includes(pubsubTopic)) {
throw new Error(
`PubSub topic ${pubsubTopic} has not been configured on this instance. Configured topics are: ${configuredTopics}. Please update your configuration by passing in the topic during Waku node instantiation.`
);
}
}

View File

@ -0,0 +1,4 @@
{
"extends": ["../../typedoc.base.json"],
"entryPoints": ["src/index.ts"]
}

View File

@ -1,8 +1,8 @@
{
"extends": "./tsconfig",
"compilerOptions": {
"module": "nodenext",
"module": "ESNext",
"moduleResolution": "Bundler",
"noEmit": true
},
"exclude": []
}
}

Some files were not shown because too many files have changed in this diff Show More