mirror of
https://github.com/logos-messaging/js-waku.git
synced 2026-01-03 06:13:08 +00:00
feat: consolidate browser testing code into a single package
feat: simplify browser sim image to light push flow
This commit is contained in:
parent
16253026c6
commit
e901f24c9d
4
.github/workflows/playwright.yml
vendored
4
.github/workflows/playwright.yml
vendored
@ -11,6 +11,7 @@ env:
|
||||
EXAMPLE_TEMPLATE: "web-chat"
|
||||
EXAMPLE_NAME: "example"
|
||||
EXAMPLE_PORT: "8080"
|
||||
PORT: "3000"
|
||||
# Firefox in container fails due to $HOME not being owned by user running commands
|
||||
# more details https://github.com/microsoft/playwright/issues/6500
|
||||
HOME: "/root"
|
||||
@ -29,9 +30,6 @@ jobs:
|
||||
|
||||
- uses: ./.github/actions/npm
|
||||
|
||||
- name: Build browser container
|
||||
run: npm run build --workspace=@waku/headless-tests
|
||||
|
||||
- name: Build browser test environment
|
||||
run: npm run build --workspace=@waku/browser-tests
|
||||
|
||||
|
||||
45
Dockerfile
45
Dockerfile
@ -1,45 +0,0 @@
|
||||
FROM node:20-slim
|
||||
|
||||
# Install Chrome dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
procps \
|
||||
libglib2.0-0 \
|
||||
libnss3 \
|
||||
libnspr4 \
|
||||
libatk1.0-0 \
|
||||
libatk-bridge2.0-0 \
|
||||
libcups2 \
|
||||
libdrm2 \
|
||||
libxkbcommon0 \
|
||||
libxcomposite1 \
|
||||
libxdamage1 \
|
||||
libxfixes3 \
|
||||
libxrandr2 \
|
||||
libgbm1 \
|
||||
libasound2 \
|
||||
libpango-1.0-0 \
|
||||
libcairo2 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy package files
|
||||
COPY package*.json ./
|
||||
COPY packages/browser-tests/package.json ./packages/browser-tests/
|
||||
COPY packages/headless-tests/package.json ./packages/headless-tests/
|
||||
|
||||
# Install dependencies and serve
|
||||
RUN npm install && npm install -g serve
|
||||
|
||||
# Copy source files
|
||||
COPY tsconfig.json ./
|
||||
COPY packages/ ./packages/
|
||||
|
||||
# Build packages
|
||||
RUN npm run build -w packages/headless-tests && \
|
||||
npm run build:server -w packages/browser-tests && \
|
||||
npx playwright install chromium
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
CMD ["npm", "run", "start:server", "-w", "packages/browser-tests"]
|
||||
27
PR.md
Normal file
27
PR.md
Normal file
@ -0,0 +1,27 @@
|
||||
### Problem / Description
|
||||
- Duplicate browser testing packages increased maintenance and broke CI (referenced removed `@waku/headless-tests`).
|
||||
- Dockerized tests failed in CI runners without Docker access.
|
||||
|
||||
### Solution
|
||||
- Consolidated all browser/headless tests into a single package: `@waku/browser-tests` (removed `packages/headless-tests`).
|
||||
- Introduced lightweight bootstrap (`src/assets/bootstrap.js`) and `shared/` module; simplified routes and server.
|
||||
- Replaced root-level Dockerfile with a package-local Dockerfile under `packages/browser-tests`.
|
||||
- Build image: `cd packages/browser-tests && npm run docker:build`
|
||||
- Run dockerized tests: `HEADLESS_USE_CDN_IN_DOCKER=0 npx playwright test tests/docker-server.spec.ts`
|
||||
- Fixed Playwright CI to build/test `@waku/browser-tests`; skip Docker-based tests on CI via Playwright `testIgnore`.
|
||||
|
||||
### Notes
|
||||
- Docker tests require a Docker-enabled environment and local image build; they are intentionally skipped in CI.
|
||||
- Resolves: CI failures from removed workspace and duplicated setup.
|
||||
- Related to: test infra consolidation and stability.
|
||||
|
||||
---
|
||||
|
||||
#### Checklist
|
||||
- [ ] Code changes are **covered by unit tests**.
|
||||
- [ ] Code changes are **covered by e2e tests**, if applicable.
|
||||
- [ ] **Dogfooding has been performed**, if feasible.
|
||||
- [ ] A **test version has been published**, if required.
|
||||
- [ ] All **CI checks** pass successfully.
|
||||
|
||||
|
||||
10
README.md
10
README.md
@ -37,6 +37,16 @@ You can track progress on the [project board](https://github.com/orgs/waku-org/p
|
||||
|
||||
See [CONTRIBUTING.md](https://github.com/waku-org/js-waku/blob/master/CONTRIBUTING.md).
|
||||
|
||||
## Dockerized browser tests
|
||||
|
||||
Build and run the dockerized Playwright tests from `packages/browser-tests`:
|
||||
|
||||
```bash
|
||||
cd packages/browser-tests
|
||||
npm run docker:build
|
||||
HEADLESS_USE_CDN_IN_DOCKER=0 npx playwright test tests/docker-server.spec.ts
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
Licensed and distributed under either of
|
||||
|
||||
4605
package-lock.json
generated
4605
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -16,7 +16,6 @@
|
||||
"packages/rln",
|
||||
"packages/tests",
|
||||
"packages/reliability-tests",
|
||||
"packages/headless-tests",
|
||||
"packages/browser-tests",
|
||||
"packages/build-utils"
|
||||
],
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
node_modules
|
||||
dist
|
||||
build
|
||||
.DS_Store
|
||||
*.log
|
||||
# Don't ignore dist - we need the built files
|
||||
# dist
|
||||
|
||||
64
packages/browser-tests/Dockerfile
Normal file
64
packages/browser-tests/Dockerfile
Normal file
@ -0,0 +1,64 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
FROM node:20-bullseye
|
||||
|
||||
# Install required system deps for Playwright Chromium
|
||||
RUN apt-get update && apt-get install -y \
|
||||
wget \
|
||||
gnupg \
|
||||
ca-certificates \
|
||||
fonts-liberation \
|
||||
libatk-bridge2.0-0 \
|
||||
libatk1.0-0 \
|
||||
libatspi2.0-0 \
|
||||
libcups2 \
|
||||
libdbus-1-3 \
|
||||
libdrm2 \
|
||||
libgtk-3-0 \
|
||||
libnspr4 \
|
||||
libnss3 \
|
||||
libx11-xcb1 \
|
||||
libxcomposite1 \
|
||||
libxdamage1 \
|
||||
libxfixes3 \
|
||||
libxkbcommon0 \
|
||||
libxrandr2 \
|
||||
xdg-utils \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY package.json package-lock.json* .npmrc* ./
|
||||
RUN npm install --no-audit --no-fund
|
||||
|
||||
# Copy sources (excluding web directory to avoid TypeScript issues)
|
||||
COPY src ./src
|
||||
COPY types ./types
|
||||
COPY tsconfig.json ./
|
||||
COPY web/index.html ./web/index.html
|
||||
|
||||
# Copy pre-built files
|
||||
COPY dist ./dist
|
||||
|
||||
# If dist doesn't exist, try to build (but skip web build due to monorepo issues)
|
||||
RUN if [ ! -f "dist/src/server.js" ]; then npm run build:web || echo "Web build failed, using existing files"; fi
|
||||
|
||||
# Install Playwright browsers (Chromium only) at runtime layer
|
||||
RUN npx playwright install --with-deps chromium
|
||||
|
||||
ENV PORT=8080 \
|
||||
NODE_ENV=production \
|
||||
CHROMIUM_NO_SANDBOX=1 \
|
||||
WAKU_CLUSTER_ID=${WAKU_CLUSTER_ID:-} \
|
||||
WAKU_SHARD=${WAKU_SHARD:-}
|
||||
|
||||
EXPOSE 8080
|
||||
|
||||
# Use a script to handle CLI arguments and environment variables
|
||||
COPY scripts/docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh
|
||||
RUN chmod +x /usr/local/bin/docker-entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"]
|
||||
CMD ["npm", "run", "start:server"]
|
||||
|
||||
|
||||
@ -1,182 +1,178 @@
|
||||
# Waku Browser Tests
|
||||
|
||||
This project provides a system for testing the Waku SDK in a browser environment.
|
||||
Browser-simulated js-waku node running inside headless Chromium, controlled by an Express server. Useful for long-running simulations and realistic verification in CI/Docker.
|
||||
|
||||
## Architecture
|
||||
|
||||
The system consists of:
|
||||
- **Headless browser**: Playwright launches Chromium and navigates to a static site built from TypeScript and served by the same server, which exposes `window.wakuAPI` and `window.waku`.
|
||||
- **Server**: Express app provides REST endpoints and proxies calls into the browser via `page.evaluate(...)`.
|
||||
- **Bootstrap module**: Small browser-side module in the static site initializes the API and imports `@waku/sdk`.
|
||||
- **Shared code**: `shared/` contains utilities used by tests and for typing.
|
||||
|
||||
1. **Headless Web App**: A simple web application (in the `@waku/headless-tests` package) that loads the Waku SDK and exposes shared API functions.
|
||||
2. **Express Server**: A server that communicates with the headless app using Playwright.
|
||||
3. **Shared API**: TypeScript functions shared between the server and web app.
|
||||
## Prerequisites
|
||||
|
||||
## Setup
|
||||
- Node.js 18+
|
||||
- Playwright (installed via dev dependency)
|
||||
- Docker (optional, for Testcontainers-based tests)
|
||||
|
||||
1. Install dependencies:
|
||||
## Install & Build
|
||||
|
||||
```bash
|
||||
# Install main dependencies
|
||||
npm install
|
||||
|
||||
# Install headless app dependencies
|
||||
cd ../headless-tests
|
||||
npm install
|
||||
cd ../browser-tests
|
||||
```
|
||||
|
||||
2. Build the application:
|
||||
|
||||
```bash
|
||||
npm run build
|
||||
```
|
||||
|
||||
This will:
|
||||
- Build the headless web app using webpack
|
||||
- Compile the TypeScript server code
|
||||
The build compiles the TypeScript server to `dist/` and bundles the static site to `dist/web/`.
|
||||
|
||||
## Running
|
||||
|
||||
Start the server with:
|
||||
## Run
|
||||
|
||||
```bash
|
||||
# Default configuration (cluster ID 1, auto-sharding)
|
||||
npm run start:server
|
||||
|
||||
# Use cluster ID 2 (for 10k sim compatibility)
|
||||
npm run start:cluster2
|
||||
|
||||
# Use specific cluster and shard
|
||||
npm run start:cluster2-shard0
|
||||
|
||||
# Or with direct CLI arguments
|
||||
npm run build && node dist/src/server.js --cluster-id=2 --shard=3
|
||||
```
|
||||
|
||||
This will:
|
||||
1. Serve the headless app on port 8080
|
||||
2. Start a headless browser to load the app
|
||||
3. Expose API endpoints to interact with Waku
|
||||
This starts the API server and a headless browser.
|
||||
|
||||
### CLI Arguments
|
||||
|
||||
- `--cluster-id=N` - Set the Waku cluster ID (default: 1)
|
||||
- `--shard=N` - Set a specific shard for static sharding (0-7, omit for auto-sharding)
|
||||
|
||||
## Environment variables
|
||||
|
||||
- `PORT`: API server port (default: 8080; Playwright sets this for tests)
|
||||
- `WAKU_WS_MULTIADDR`: a single ws/wss multiaddr to dial in tests (overrides peers)
|
||||
- `WAKU_WS_MULTIADDRS`: multiple peers as JSON array (e.g. `["/dns4/.../wss/p2p/16U..."]`) or comma-separated string; used when `WAKU_WS_MULTIADDR` is not set
|
||||
|
||||
## API Endpoints
|
||||
|
||||
- `GET /info`: Get information about the Waku node
|
||||
- `GET /debug/v1/info`: Get debug information from the Waku node
|
||||
- `POST /push`: Push a message to the Waku network (legacy)
|
||||
- `POST /lightpush/v1/message`: Push a message to the Waku network (Waku REST API compatible)
|
||||
- `POST /admin/v1/create-node`: Create a new Waku node (requires networkConfig)
|
||||
- `POST /admin/v1/start-node`: Start the Waku node
|
||||
- `POST /admin/v1/stop-node`: Stop the Waku node
|
||||
- `POST /admin/v1/peers`: Dial to specified peers (Waku REST API compatible)
|
||||
- `GET /filter/v2/messages/:contentTopic`: Subscribe to messages on a specific content topic using Server-Sent Events (Waku REST API compatible)
|
||||
- `GET /filter/v1/messages/:contentTopic`: Retrieve stored messages from a content topic (Waku REST API compatible)
|
||||
- `GET /` – health/status
|
||||
- `GET /info` – peer info from the node
|
||||
- `GET /debug/v1/info` – debug info/protocols
|
||||
- `POST /lightpush/v1/message` – push a message (Waku REST-compatible shape)
|
||||
- `POST /admin/v1/create-node` – create a node with `networkConfig`
|
||||
- `POST /admin/v1/start-node` – start the node
|
||||
- `POST /admin/v1/stop-node` – stop the node
|
||||
- `POST /admin/v1/peers` – dial to peers
|
||||
- `GET /filter/v2/messages/:contentTopic` – SSE subscription to messages
|
||||
- `GET /filter/v1/messages/:contentTopic` – retrieve queued messages
|
||||
- `POST /execute` – helper to execute functions in the browser context (testing/support)
|
||||
|
||||
### Example: Pushing a message with the legacy endpoint
|
||||
### Examples
|
||||
|
||||
```bash
|
||||
curl -X POST http://localhost:3000/push \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"contentTopic": "/toy-chat/2/huilong/proto", "payload": [1, 2, 3]}'
|
||||
```
|
||||
|
||||
### Example: Pushing a message with the Waku REST API compatible endpoint
|
||||
Push (REST-compatible):
|
||||
|
||||
```bash
|
||||
curl -X POST http://localhost:3000/lightpush/v1/message \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"pubsubTopic": "/waku/2/rs/0/0",
|
||||
"pubsubTopic": "/waku/2/rs/1/0",
|
||||
"message": {
|
||||
"payload": "SGVsbG8sIFdha3Uh",
|
||||
"contentTopic": "/toy-chat/2/huilong/proto",
|
||||
"timestamp": 1712135330213797632
|
||||
"payload": [1,2,3],
|
||||
"contentTopic": "/test/1/message/proto"
|
||||
}
|
||||
}'
|
||||
```
|
||||
|
||||
### Example: Executing a function
|
||||
|
||||
```bash
|
||||
curl -X POST http://localhost:3000/execute \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"functionName": "getPeerInfo", "params": []}'
|
||||
```
|
||||
|
||||
### Example: Creating a Waku node
|
||||
Create/Start/Stop:
|
||||
|
||||
```bash
|
||||
curl -X POST http://localhost:3000/admin/v1/create-node \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"defaultBootstrap": true,
|
||||
"networkConfig": {
|
||||
"clusterId": 1,
|
||||
"shards": [0, 1]
|
||||
}
|
||||
"networkConfig": { "clusterId": 42, "shards": [0] }
|
||||
}'
|
||||
```
|
||||
|
||||
### Example: Starting and stopping a Waku node
|
||||
|
||||
```bash
|
||||
# Start the node
|
||||
curl -X POST http://localhost:3000/admin/v1/start-node
|
||||
|
||||
# Stop the node
|
||||
curl -X POST http://localhost:3000/admin/v1/stop-node
|
||||
```
|
||||
|
||||
### Example: Dialing to specific peers with the Waku REST API compatible endpoint
|
||||
Dial peers:
|
||||
|
||||
```bash
|
||||
curl -X POST http://localhost:3000/admin/v1/peers \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"peerMultiaddrs": [
|
||||
"/ip4/127.0.0.1/tcp/8000/p2p/16Uiu2HAm4v8KuHUH6Cwz3upPeQbkyxQJsFGPdt7kHtkN8F79QiE6"]
|
||||
]
|
||||
"peerMultiaddrs": ["/dns4/example/tcp/8000/wss/p2p/16U..."]
|
||||
}'
|
||||
```
|
||||
|
||||
### Example: Dialing to specific peers with the execute endpoint
|
||||
SSE subscribe:
|
||||
|
||||
```bash
|
||||
curl -X POST http://localhost:3000/execute \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"functionName": "dialPeers",
|
||||
"params": [
|
||||
["/ip4/127.0.0.1/tcp/8000/p2p/16Uiu2HAm4v8KuHUH6Cwz3upPeQbkyxQJsFGPdt7kHtkN8F79QiE6"]
|
||||
]
|
||||
}'
|
||||
curl -N "http://localhost:3000/filter/v2/messages/test-topic?clusterId=1&shard=0"
|
||||
```
|
||||
|
||||
### Example: Subscribing to a content topic with the filter endpoint
|
||||
Query queued messages:
|
||||
|
||||
```bash
|
||||
# Open a persistent connection to receive messages as Server-Sent Events
|
||||
curl -N http://localhost:3000/filter/v2/messages/%2Ftoy-chat%2F2%2Fhuilong%2Fproto
|
||||
|
||||
# You can also specify clustering options
|
||||
curl -N "http://localhost:3000/filter/v2/messages/%2Ftoy-chat%2F2%2Fhuilong%2Fproto?clusterId=0&shard=0"
|
||||
curl "http://localhost:3000/filter/v1/messages/test-topic?pageSize=10&ascending=true"
|
||||
```
|
||||
|
||||
### Example: Retrieving stored messages from a content topic
|
||||
## Testing
|
||||
|
||||
```bash
|
||||
# Get the most recent 20 messages
|
||||
curl http://localhost:3000/filter/v1/messages/%2Ftoy-chat%2F2%2Fhuilong%2Fproto
|
||||
|
||||
# Get messages with pagination and time filtering
|
||||
curl "http://localhost:3000/filter/v1/messages/%2Ftoy-chat%2F2%2Fhuilong%2Fproto?pageSize=10&startTime=1712000000000&endTime=1713000000000&ascending=true"
|
||||
npm run build
|
||||
npm test
|
||||
```
|
||||
|
||||
Playwright will start the server (uses `npm run start:server`). Ensure the build artifacts exist before running tests.
|
||||
|
||||
## Docker Usage
|
||||
|
||||
Build and run with default configuration:
|
||||
|
||||
```bash
|
||||
npm run docker:build
|
||||
docker run -p 8080:8080 waku-browser-tests:local
|
||||
```
|
||||
|
||||
Run with cluster ID 2 for 10k sim compatibility:
|
||||
|
||||
```bash
|
||||
docker run -p 8080:8080 waku-browser-tests:local --cluster-id=2
|
||||
```
|
||||
|
||||
Run with specific cluster and shard:
|
||||
|
||||
```bash
|
||||
docker run -p 8080:8080 waku-browser-tests:local --cluster-id=2 --shard=0
|
||||
```
|
||||
|
||||
Or using environment variables:
|
||||
|
||||
```bash
|
||||
docker run -p 8080:8080 -e WAKU_CLUSTER_ID=2 -e WAKU_SHARD=0 waku-browser-tests:local
|
||||
```
|
||||
|
||||
### Dockerized tests
|
||||
|
||||
`tests/docker-server.spec.ts` uses Testcontainers. Ensure Docker is running.
|
||||
|
||||
Build the image and run only the docker tests locally:
|
||||
|
||||
```bash
|
||||
npm run docker:build
|
||||
npx playwright test tests/docker-server.spec.ts
|
||||
```
|
||||
|
||||
Notes:
|
||||
- The Docker image runs the server with Playwright Chromium and `--no-sandbox` for container compatibility.
|
||||
- Testcontainers will map the container port automatically; the tests probe readiness by waiting for `API server running on http://localhost:` in logs.
|
||||
|
||||
## Extending
|
||||
|
||||
To add new functionality:
|
||||
- To add new REST endpoints: update `src/server.ts` and route handlers.
|
||||
- To add new browser-executed functions: prefer updating `src/assets/bootstrap.js` (minimize inline JS in `src/server.ts`).
|
||||
- For shared logic usable in tests, add helpers under `shared/`.
|
||||
|
||||
1. Add your function to `src/api/shared.ts`
|
||||
2. Add your function to the `API` object in `src/api/shared.ts`
|
||||
3. Use it via the server endpoints
|
||||
|
||||
### Example: Dialing to specific peers
|
||||
|
||||
```bash
|
||||
curl -X POST http://localhost:3000/execute \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"functionName": "dialPeers",
|
||||
"params": [
|
||||
["/ip4/127.0.0.1/tcp/8000/p2p/16Uiu2HAm4v8KuHUH6Cwz3upPeQbkyxQJsFGPdt7kHtkN8F79QiE6"]
|
||||
]
|
||||
}'
|
||||
```
|
||||
|
||||
@ -5,10 +5,15 @@
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"start": "npm run start:server",
|
||||
"start:server": "node ./dist/server.js",
|
||||
"test": "npx playwright test",
|
||||
"start:server": "PORT=8080 node ./dist/src/server.js",
|
||||
"start:cluster2": "PORT=8080 node ./dist/src/server.js --cluster-id=2",
|
||||
"start:cluster2-shard0": "PORT=8080 node ./dist/src/server.js --cluster-id=2 --shard=0",
|
||||
"test": "npx playwright test --reporter=line",
|
||||
"build:server": "tsc -p tsconfig.json",
|
||||
"build": "npm run build:server"
|
||||
"build:web": "esbuild web/index.ts --bundle --format=esm --platform=browser --outdir=dist/web && cp web/index.html dist/web/index.html",
|
||||
"build": "npm-run-all -s build:server build:web",
|
||||
"docker:build": "docker build -t waku-browser-tests:local .",
|
||||
"docker:test": "npx playwright test --reporter=line tests/docker-server.spec.ts"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/cors": "^2.8.15",
|
||||
@ -16,16 +21,17 @@
|
||||
"@types/node": "^20.10.0",
|
||||
"axios": "^1.8.4",
|
||||
"dotenv-flow": "^0.4.0",
|
||||
"esbuild": "^0.21.5",
|
||||
"filter-obj": "^2.0.2",
|
||||
"it-first": "^3.0.9",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"serve": "^14.2.3",
|
||||
"typescript": "5.8.3",
|
||||
"webpack-cli": "^6.0.1"
|
||||
"testcontainers": "^10.9.0",
|
||||
"typescript": "5.8.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@playwright/test": "^1.51.1",
|
||||
"@waku/sdk": "^0.0.30",
|
||||
"@waku/sdk": "^0.0.34",
|
||||
"cors": "^2.8.5",
|
||||
"express": "^4.21.2",
|
||||
"node-polyfill-webpack-plugin": "^4.1.0"
|
||||
"express": "^4.21.2"
|
||||
}
|
||||
}
|
||||
|
||||
@ -5,19 +5,26 @@ import { defineConfig, devices } from "@playwright/test";
|
||||
if (!process.env.CI) {
|
||||
// Need to use .js extension for ES modules
|
||||
// eslint-disable-next-line import/extensions
|
||||
await import("dotenv-flow/config.js");
|
||||
try {
|
||||
await import("dotenv-flow/config.js");
|
||||
} catch (e) {
|
||||
console.warn("dotenv-flow not found; skipping env loading");
|
||||
}
|
||||
}
|
||||
|
||||
const EXAMPLE_PORT = process.env.EXAMPLE_PORT || "8080";
|
||||
// web-chat specific thingy
|
||||
const EXAMPLE_TEMPLATE = process.env.EXAMPLE_TEMPLATE || "";
|
||||
const BASE_URL = `http://127.0.0.1:${EXAMPLE_PORT}/${EXAMPLE_TEMPLATE}`;
|
||||
// Ignore docker-based tests on CI
|
||||
const TEST_IGNORE = process.env.CI ? ["tests/docker-*.spec.ts"] : [];
|
||||
|
||||
/**
|
||||
* See https://playwright.dev/docs/test-configuration.
|
||||
*/
|
||||
export default defineConfig({
|
||||
testDir: "./tests",
|
||||
testIgnore: TEST_IGNORE,
|
||||
/* Run tests in files in parallel */
|
||||
fullyParallel: true,
|
||||
/* Fail the build on CI if you accidentally left test.only in the source code. */
|
||||
@ -43,15 +50,6 @@ export default defineConfig({
|
||||
name: "chromium",
|
||||
use: { ...devices["Desktop Chrome"] }
|
||||
}
|
||||
],
|
||||
]
|
||||
|
||||
/* Run your local dev server before starting the tests */
|
||||
webServer: {
|
||||
url: BASE_URL,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
command: "npm run start:server",
|
||||
reuseExistingServer: !process.env.CI,
|
||||
timeout: 5 * 60 * 1000 // five minutes for bootstrapping an example
|
||||
}
|
||||
});
|
||||
|
||||
32
packages/browser-tests/scripts/docker-entrypoint.sh
Normal file
32
packages/browser-tests/scripts/docker-entrypoint.sh
Normal file
@ -0,0 +1,32 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Docker entrypoint script for waku-browser-tests
|
||||
# Handles CLI arguments and converts them to environment variables
|
||||
|
||||
# Parse command line arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--cluster-id=*)
|
||||
export WAKU_CLUSTER_ID="${1#*=}"
|
||||
echo "Setting WAKU_CLUSTER_ID=${WAKU_CLUSTER_ID}"
|
||||
shift
|
||||
;;
|
||||
--shard=*)
|
||||
export WAKU_SHARD="${1#*=}"
|
||||
echo "Setting WAKU_SHARD=${WAKU_SHARD}"
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
# Unknown argument, keep it for the main command
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# If no specific command is provided, use the default CMD
|
||||
if [ $# -eq 0 ]; then
|
||||
set -- "npm" "run" "start:server"
|
||||
fi
|
||||
|
||||
# Execute the main command
|
||||
exec "$@"
|
||||
22
packages/browser-tests/src/api/common.d.ts
vendored
22
packages/browser-tests/src/api/common.d.ts
vendored
@ -1,22 +0,0 @@
|
||||
/**
|
||||
* Shared utilities for working with Waku nodes
|
||||
* This file contains functions used by both browser tests and server
|
||||
*/
|
||||
|
||||
/**
|
||||
* Type definition for a minimal Waku node interface
|
||||
* This allows us to use the same code in different contexts
|
||||
*/
|
||||
export interface IWakuNode {
|
||||
libp2p: {
|
||||
peerId: { toString(): string };
|
||||
getMultiaddrs(): Array<{ toString(): string }>;
|
||||
getProtocols(): any;
|
||||
peerStore: {
|
||||
all(): Promise<Array<{ id: { toString(): string } }>>;
|
||||
};
|
||||
};
|
||||
lightPush: {
|
||||
send: (encoder: any, message: { payload: Uint8Array }) => Promise<{ successes: any[] }>;
|
||||
};
|
||||
}
|
||||
@ -1,36 +0,0 @@
|
||||
import { IWakuNode } from "./common.js";
|
||||
|
||||
/**
|
||||
* Gets peer information from a Waku node
|
||||
* Used in both server API endpoints and headless tests
|
||||
*/
|
||||
export async function getPeerInfo(waku: IWakuNode): Promise<{
|
||||
peerId: string;
|
||||
multiaddrs: string[];
|
||||
peers: string[];
|
||||
}> {
|
||||
const multiaddrs = waku.libp2p.getMultiaddrs();
|
||||
const peers = await waku.libp2p.peerStore.all();
|
||||
|
||||
return {
|
||||
peerId: waku.libp2p.peerId.toString(),
|
||||
multiaddrs: multiaddrs.map((addr) => addr.toString()),
|
||||
peers: peers.map((peer) => peer.id.toString())
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets debug information from a Waku node
|
||||
* Used in both server API endpoints and tests
|
||||
*/
|
||||
export async function getDebugInfo(waku: IWakuNode): Promise<{
|
||||
listenAddresses: string[];
|
||||
peerId: string;
|
||||
protocols: string[];
|
||||
}> {
|
||||
return {
|
||||
listenAddresses: waku.libp2p.getMultiaddrs().map((addr) => addr.toString()),
|
||||
peerId: waku.libp2p.peerId.toString(),
|
||||
protocols: Array.from(waku.libp2p.getProtocols())
|
||||
};
|
||||
}
|
||||
@ -1,16 +0,0 @@
|
||||
import { createEncoder, LightNode, SDKProtocolResult } from "@waku/sdk";
|
||||
|
||||
export async function pushMessage(
|
||||
waku: LightNode,
|
||||
contentTopic: string,
|
||||
payload?: Uint8Array
|
||||
): Promise<SDKProtocolResult> {
|
||||
const enc = createEncoder({
|
||||
contentTopic
|
||||
});
|
||||
|
||||
const result = await waku.lightPush.send(enc, {
|
||||
payload: payload ?? new Uint8Array()
|
||||
});
|
||||
return result;
|
||||
}
|
||||
@ -1,274 +0,0 @@
|
||||
import {
|
||||
createDecoder,
|
||||
createEncoder,
|
||||
createLightNode,
|
||||
CreateNodeOptions,
|
||||
DecodedMessage,
|
||||
LightNode,
|
||||
SDKProtocolResult,
|
||||
SubscribeResult
|
||||
} from "@waku/sdk";
|
||||
|
||||
import { IWakuNode } from "./common.js";
|
||||
|
||||
/**
|
||||
* Gets peer information from a Waku node
|
||||
*/
|
||||
export async function getPeerInfo(waku: IWakuNode): Promise<{
|
||||
peerId: string;
|
||||
multiaddrs: string[];
|
||||
peers: string[];
|
||||
}> {
|
||||
const multiaddrs = waku.libp2p.getMultiaddrs();
|
||||
const peers = await waku.libp2p.peerStore.all();
|
||||
|
||||
return {
|
||||
peerId: waku.libp2p.peerId.toString(),
|
||||
multiaddrs: multiaddrs.map((addr) => addr.toString()),
|
||||
peers: peers.map((peer) => peer.id.toString())
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets debug information from a Waku node
|
||||
*/
|
||||
export async function getDebugInfo(waku: IWakuNode): Promise<{
|
||||
listenAddresses: string[];
|
||||
peerId: string;
|
||||
protocols: string[];
|
||||
}> {
|
||||
return {
|
||||
listenAddresses: waku.libp2p.getMultiaddrs().map((addr) => addr.toString()),
|
||||
peerId: waku.libp2p.peerId.toString(),
|
||||
protocols: Array.from(waku.libp2p.getProtocols())
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Pushes a message to the network
|
||||
*/
|
||||
export async function pushMessage(
|
||||
waku: LightNode,
|
||||
contentTopic: string,
|
||||
payload?: Uint8Array,
|
||||
options?: {
|
||||
clusterId?: number;
|
||||
shard?: number;
|
||||
}
|
||||
): Promise<SDKProtocolResult> {
|
||||
if (!waku) {
|
||||
throw new Error("Waku node not found");
|
||||
}
|
||||
|
||||
const encoder = createEncoder({
|
||||
contentTopic,
|
||||
pubsubTopicShardInfo: {
|
||||
clusterId: options?.clusterId ?? 1,
|
||||
shard: options?.shard ?? 1
|
||||
}
|
||||
});
|
||||
|
||||
const result = await waku.lightPush.send(encoder, {
|
||||
payload: payload ?? new Uint8Array()
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates and initializes a Waku node
|
||||
* Checks if a node is already running in window and stops it if it exists
|
||||
*/
|
||||
export async function createWakuNode(
|
||||
options: CreateNodeOptions
|
||||
): Promise<{ success: boolean; error?: string }> {
|
||||
// Check if we're in a browser environment and a node already exists
|
||||
if (typeof window === "undefined") {
|
||||
return { success: false, error: "No window found" };
|
||||
}
|
||||
|
||||
try {
|
||||
if ((window as any).waku) {
|
||||
await (window as any).waku.stop();
|
||||
}
|
||||
(window as any).waku = await createLightNode(options);
|
||||
return { success: true };
|
||||
} catch (error: any) {
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
}
|
||||
|
||||
export async function startNode(): Promise<{
|
||||
success: boolean;
|
||||
error?: string;
|
||||
}> {
|
||||
if (typeof window !== "undefined" && (window as any).waku) {
|
||||
try {
|
||||
await (window as any).waku.start();
|
||||
return { success: true };
|
||||
} catch (error: any) {
|
||||
// Silently continue if there's an error starting the node
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
}
|
||||
return { success: false, error: "Waku node not found in window" };
|
||||
}
|
||||
|
||||
export async function stopNode(): Promise<{
|
||||
success: boolean;
|
||||
error?: string;
|
||||
}> {
|
||||
if (typeof window !== "undefined" && (window as any).waku) {
|
||||
await (window as any).waku.stop();
|
||||
return { success: true };
|
||||
}
|
||||
return { success: false, error: "Waku node not found in window" };
|
||||
}
|
||||
|
||||
export async function dialPeers(
|
||||
waku: LightNode,
|
||||
peers: string[]
|
||||
): Promise<{
|
||||
total: number;
|
||||
errors: string[];
|
||||
}> {
|
||||
const total = peers.length;
|
||||
const errors: string[] = [];
|
||||
|
||||
await Promise.allSettled(
|
||||
peers.map((peer) =>
|
||||
waku.dial(peer).catch((error: any) => {
|
||||
errors.push(error.message);
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
return { total, errors };
|
||||
}
|
||||
|
||||
export async function subscribe(
|
||||
waku: LightNode,
|
||||
contentTopic: string,
|
||||
options?: {
|
||||
clusterId?: number;
|
||||
shard?: number;
|
||||
},
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
callback?: (message: DecodedMessage) => void
|
||||
): Promise<SubscribeResult> {
|
||||
const clusterId = options?.clusterId ?? 42;
|
||||
const shard = options?.shard ?? 0;
|
||||
|
||||
console.log(
|
||||
`Creating decoder for content topic ${contentTopic} with clusterId=${clusterId}, shard=${shard}`
|
||||
);
|
||||
|
||||
const pubsubTopic = `/waku/2/rs/${clusterId}/${shard}`;
|
||||
|
||||
let configuredTopics: string[] = [];
|
||||
|
||||
try {
|
||||
const protocols = waku.libp2p.getProtocols();
|
||||
console.log(`Available protocols: ${Array.from(protocols).join(", ")}`);
|
||||
|
||||
const metadataMethod = (waku.libp2p as any)._services?.metadata?.getInfo;
|
||||
if (metadataMethod) {
|
||||
const metadata = metadataMethod();
|
||||
console.log(`Node metadata: ${JSON.stringify(metadata)}`);
|
||||
|
||||
if (metadata?.pubsubTopics && Array.isArray(metadata.pubsubTopics)) {
|
||||
configuredTopics = metadata.pubsubTopics;
|
||||
console.log(
|
||||
`Found configured pubsub topics: ${configuredTopics.join(", ")}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
configuredTopics.length > 0 &&
|
||||
!configuredTopics.includes(pubsubTopic)
|
||||
) {
|
||||
console.warn(
|
||||
`Pubsub topic ${pubsubTopic} is not configured. Configured topics: ${configuredTopics.join(", ")}`
|
||||
);
|
||||
|
||||
for (const topic of configuredTopics) {
|
||||
const parts = topic.split("/");
|
||||
if (parts.length === 6 && parts[1] === "waku" && parts[3] === "rs") {
|
||||
console.log(`Found potential matching pubsub topic: ${topic}`);
|
||||
|
||||
// Use the first topic as a fallback if no exact match is found
|
||||
// This isn't ideal but allows tests to continue
|
||||
const topicClusterId = parseInt(parts[4]);
|
||||
const topicShard = parseInt(parts[5]);
|
||||
|
||||
if (!isNaN(topicClusterId) && !isNaN(topicShard)) {
|
||||
console.log(
|
||||
`Using pubsub topic with clusterId=${topicClusterId}, shard=${topicShard} instead`
|
||||
);
|
||||
|
||||
const decoder = createDecoder(contentTopic, {
|
||||
clusterId: topicClusterId,
|
||||
shard: topicShard
|
||||
});
|
||||
|
||||
try {
|
||||
const subscription = await waku.filter.subscribe(
|
||||
decoder,
|
||||
callback ??
|
||||
((_message) => {
|
||||
console.log(_message);
|
||||
})
|
||||
);
|
||||
return subscription;
|
||||
} catch (innerErr: any) {
|
||||
console.error(
|
||||
`Error with alternative pubsub topic: ${innerErr.message}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`Error checking node protocols: ${String(err)}`);
|
||||
}
|
||||
|
||||
const decoder = createDecoder(contentTopic, {
|
||||
clusterId,
|
||||
shard
|
||||
});
|
||||
|
||||
try {
|
||||
const subscription = await waku.filter.subscribe(
|
||||
decoder,
|
||||
callback ??
|
||||
((_message) => {
|
||||
console.log(_message);
|
||||
})
|
||||
);
|
||||
return subscription;
|
||||
} catch (err: any) {
|
||||
if (err.message && err.message.includes("Pubsub topic")) {
|
||||
console.error(`Pubsub topic error: ${err.message}`);
|
||||
console.log("Subscription failed, but continuing with empty result");
|
||||
|
||||
return {
|
||||
unsubscribe: async () => {
|
||||
console.log("No-op unsubscribe from failed subscription");
|
||||
}
|
||||
} as unknown as SubscribeResult;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
export const API = {
|
||||
getPeerInfo,
|
||||
getDebugInfo,
|
||||
pushMessage,
|
||||
createWakuNode,
|
||||
startNode,
|
||||
stopNode,
|
||||
dialPeers,
|
||||
subscribe
|
||||
};
|
||||
@ -5,20 +5,45 @@ let browser: Browser | undefined;
|
||||
let page: Page | undefined;
|
||||
|
||||
/**
|
||||
* Initialize browser and load headless page
|
||||
* Initialize browser and load the Waku web app
|
||||
*/
|
||||
export async function initBrowser(): Promise<void> {
|
||||
browser = await chromium.launch({
|
||||
headless: true
|
||||
});
|
||||
export async function initBrowser(appPort: number): Promise<void> {
|
||||
try {
|
||||
// Support sandbox-less mode for containers
|
||||
const launchArgs =
|
||||
process.env.CHROMIUM_NO_SANDBOX === "1"
|
||||
? ["--no-sandbox", "--disable-setuid-sandbox"]
|
||||
: [];
|
||||
|
||||
if (!browser) {
|
||||
throw new Error("Failed to initialize browser");
|
||||
browser = await chromium.launch({
|
||||
headless: true,
|
||||
args: launchArgs
|
||||
});
|
||||
|
||||
if (!browser) {
|
||||
throw new Error("Failed to initialize browser");
|
||||
}
|
||||
|
||||
page = await browser.newPage();
|
||||
|
||||
// Load the Waku web app
|
||||
await page.goto(`http://localhost:${appPort}/app/index.html`, {
|
||||
waitUntil: "networkidle",
|
||||
});
|
||||
|
||||
// Wait for wakuApi to be available
|
||||
await page.waitForFunction(
|
||||
() => {
|
||||
return window.wakuApi && typeof window.wakuApi.createWakuNode === "function";
|
||||
},
|
||||
{ timeout: 30000 }
|
||||
);
|
||||
|
||||
console.log("Browser initialized successfully with wakuApi");
|
||||
} catch (error) {
|
||||
console.error("Error initializing browser:", error);
|
||||
throw error;
|
||||
}
|
||||
|
||||
page = await browser.newPage();
|
||||
|
||||
await page.goto("http://localhost:8080");
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -1,89 +0,0 @@
|
||||
// Message queue to store received messages by content topic
|
||||
export interface QueuedMessage {
|
||||
payload: number[] | undefined;
|
||||
contentTopic: string;
|
||||
timestamp: number;
|
||||
receivedAt: number;
|
||||
}
|
||||
|
||||
export interface MessageQueue {
|
||||
[contentTopic: string]: QueuedMessage[];
|
||||
}
|
||||
|
||||
// Global message queue storage
|
||||
const messageQueue: MessageQueue = {};
|
||||
|
||||
/**
|
||||
* Store a message in the queue
|
||||
*/
|
||||
export function storeMessage(message: QueuedMessage): void {
|
||||
const { contentTopic } = message;
|
||||
|
||||
if (!messageQueue[contentTopic]) {
|
||||
messageQueue[contentTopic] = [];
|
||||
}
|
||||
|
||||
messageQueue[contentTopic].push(message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get messages for a specific content topic
|
||||
*/
|
||||
export function getMessages(
|
||||
contentTopic: string,
|
||||
options?: {
|
||||
startTime?: number;
|
||||
endTime?: number;
|
||||
pageSize?: number;
|
||||
ascending?: boolean;
|
||||
}
|
||||
): QueuedMessage[] {
|
||||
if (!messageQueue[contentTopic]) {
|
||||
return [];
|
||||
}
|
||||
|
||||
let messages = [...messageQueue[contentTopic]];
|
||||
|
||||
// Filter by time if specified
|
||||
if (options?.startTime || options?.endTime) {
|
||||
messages = messages.filter((msg) => {
|
||||
const afterStart = options.startTime
|
||||
? msg.timestamp >= options.startTime
|
||||
: true;
|
||||
const beforeEnd = options.endTime
|
||||
? msg.timestamp <= options.endTime
|
||||
: true;
|
||||
return afterStart && beforeEnd;
|
||||
});
|
||||
}
|
||||
|
||||
// Sort by timestamp
|
||||
messages.sort((a, b) => {
|
||||
return options?.ascending
|
||||
? a.timestamp - b.timestamp
|
||||
: b.timestamp - a.timestamp;
|
||||
});
|
||||
|
||||
// Limit result size
|
||||
if (options?.pageSize && options.pageSize > 0) {
|
||||
messages = messages.slice(0, options.pageSize);
|
||||
}
|
||||
|
||||
return messages;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all messages from the queue
|
||||
*/
|
||||
export function clearQueue(): void {
|
||||
Object.keys(messageQueue).forEach((topic) => {
|
||||
delete messageQueue[topic];
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all content topics in the queue
|
||||
*/
|
||||
export function getContentTopics(): string[] {
|
||||
return Object.keys(messageQueue);
|
||||
}
|
||||
@ -1,223 +1,49 @@
|
||||
import express, { Request, Response, Router } from "express";
|
||||
|
||||
import { getPage } from "../browser/index.js";
|
||||
import { Router } from "express";
|
||||
import { createEndpointHandler, validators } from "../utils/endpoint-handler.js";
|
||||
|
||||
const router = Router();
|
||||
|
||||
router.head("/admin/v1/create-node", (_req: Request, res: Response) => {
|
||||
// HEAD endpoints for CORS preflight
|
||||
router.head("/admin/v1/create-node", (_req, res) => {
|
||||
res.status(200).end();
|
||||
});
|
||||
|
||||
router.head("/admin/v1/start-node", (_req: Request, res: Response) => {
|
||||
router.head("/admin/v1/start-node", (_req, res) => {
|
||||
res.status(200).end();
|
||||
});
|
||||
|
||||
router.head("/admin/v1/stop-node", (_req: Request, res: Response) => {
|
||||
res.status(200).end();
|
||||
});
|
||||
|
||||
router.post("/admin/v1/create-node", (async (req: Request, res: Response) => {
|
||||
try {
|
||||
const {
|
||||
defaultBootstrap = true,
|
||||
networkConfig
|
||||
} = req.body;
|
||||
|
||||
// Validate that networkConfig is provided
|
||||
if (!networkConfig) {
|
||||
return res.status(400).json({
|
||||
code: 400,
|
||||
message: "networkConfig is required"
|
||||
});
|
||||
// Create Waku node endpoint
|
||||
router.post("/admin/v1/create-node", createEndpointHandler({
|
||||
methodName: "createWakuNode",
|
||||
validateInput: (body) => {
|
||||
if (!body.networkConfig || body.networkConfig.clusterId === undefined) {
|
||||
throw new Error("networkConfig.clusterId is required");
|
||||
}
|
||||
|
||||
// Validate that networkConfig has required properties
|
||||
if (networkConfig.clusterId === undefined) {
|
||||
return res.status(400).json({
|
||||
code: 400,
|
||||
message: "networkConfig.clusterId is required"
|
||||
});
|
||||
}
|
||||
|
||||
const page = getPage();
|
||||
|
||||
if (!page) {
|
||||
return res.status(503).json({
|
||||
code: 503,
|
||||
message: "Browser not initialized"
|
||||
});
|
||||
}
|
||||
|
||||
const result = await page.evaluate(
|
||||
({ defaultBootstrap, networkConfig }) => {
|
||||
const nodeOptions: any = {
|
||||
defaultBootstrap,
|
||||
relay: {
|
||||
advertise: true,
|
||||
gossipsubOptions: {
|
||||
allowPublishToZeroPeers: true
|
||||
}
|
||||
},
|
||||
filter: true,
|
||||
peers: [],
|
||||
networkConfig: {
|
||||
clusterId: networkConfig.clusterId,
|
||||
shards: networkConfig.shards || [0]
|
||||
}
|
||||
};
|
||||
|
||||
return window.wakuAPI.createWakuNode(nodeOptions);
|
||||
},
|
||||
{ defaultBootstrap, networkConfig }
|
||||
);
|
||||
|
||||
if (result && result.success) {
|
||||
res.status(200).json({
|
||||
success: true,
|
||||
message: "Waku node created successfully"
|
||||
});
|
||||
} else {
|
||||
res.status(500).json({
|
||||
code: 500,
|
||||
message: "Failed to create Waku node",
|
||||
details: result?.error || "Unknown error"
|
||||
});
|
||||
}
|
||||
} catch (error: any) {
|
||||
res.status(500).json({
|
||||
code: 500,
|
||||
message: `Could not create Waku node: ${error.message}`
|
||||
});
|
||||
}
|
||||
}) as express.RequestHandler);
|
||||
return {
|
||||
defaultBootstrap: body.defaultBootstrap ?? true,
|
||||
networkConfig: body.networkConfig
|
||||
};
|
||||
},
|
||||
transformResult: (result) => ({
|
||||
success: result?.success || false,
|
||||
message: result?.success ? "Waku node created successfully" : "Failed to create Waku node"
|
||||
})
|
||||
}));
|
||||
|
||||
// Start Waku node endpoint
|
||||
router.post("/admin/v1/start-node", (async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const page = getPage();
|
||||
|
||||
if (!page) {
|
||||
return res.status(503).json({
|
||||
code: 503,
|
||||
message: "Browser not initialized"
|
||||
});
|
||||
}
|
||||
|
||||
const result = await page.evaluate(() => {
|
||||
return window.wakuAPI.startNode
|
||||
? window.wakuAPI.startNode()
|
||||
: { error: "startNode function not available" };
|
||||
});
|
||||
|
||||
if (result && !result.error) {
|
||||
res.status(200).json({
|
||||
success: true,
|
||||
message: "Waku node started successfully"
|
||||
});
|
||||
} else {
|
||||
res.status(500).json({
|
||||
code: 500,
|
||||
message: "Failed to start Waku node",
|
||||
details: result?.error || "Unknown error"
|
||||
});
|
||||
}
|
||||
} catch (error: any) {
|
||||
res.status(500).json({
|
||||
code: 500,
|
||||
message: `Could not start Waku node: ${error.message}`
|
||||
});
|
||||
router.post("/admin/v1/start-node", createEndpointHandler({
|
||||
methodName: "startNode",
|
||||
validateInput: validators.noInput,
|
||||
transformResult: (result) => {
|
||||
const success = result && (result.success === undefined || result.success);
|
||||
return {
|
||||
success,
|
||||
message: success ? "Waku node started successfully" : "Failed to start Waku node",
|
||||
...(result?.error && { details: result.error })
|
||||
};
|
||||
}
|
||||
}) as express.RequestHandler);
|
||||
}));
|
||||
|
||||
// Stop Waku node endpoint
|
||||
router.post("/admin/v1/stop-node", (async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const page = getPage();
|
||||
|
||||
if (!page) {
|
||||
return res.status(503).json({
|
||||
code: 503,
|
||||
message: "Browser not initialized"
|
||||
});
|
||||
}
|
||||
|
||||
const result = await page.evaluate(() => {
|
||||
return window.wakuAPI.stopNode
|
||||
? window.wakuAPI.stopNode()
|
||||
: { error: "stopNode function not available" };
|
||||
});
|
||||
|
||||
if (result && !result.error) {
|
||||
res.status(200).json({
|
||||
success: true,
|
||||
message: "Waku node stopped successfully"
|
||||
});
|
||||
} else {
|
||||
res.status(500).json({
|
||||
code: 500,
|
||||
message: "Failed to stop Waku node",
|
||||
details: result?.error || "Unknown error"
|
||||
});
|
||||
}
|
||||
} catch (error: any) {
|
||||
res.status(500).json({
|
||||
code: 500,
|
||||
message: `Could not stop Waku node: ${error.message}`
|
||||
});
|
||||
}
|
||||
}) as express.RequestHandler);
|
||||
|
||||
// Dial to peers endpoint
|
||||
router.post("/admin/v1/peers", (async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { peerMultiaddrs } = req.body;
|
||||
|
||||
if (!peerMultiaddrs || !Array.isArray(peerMultiaddrs)) {
|
||||
return res.status(400).json({
|
||||
code: 400,
|
||||
message: "Invalid request. peerMultiaddrs array is required."
|
||||
});
|
||||
}
|
||||
|
||||
const page = getPage();
|
||||
|
||||
if (!page) {
|
||||
return res.status(503).json({
|
||||
code: 503,
|
||||
message: "Browser not initialized"
|
||||
});
|
||||
}
|
||||
|
||||
const result = await page.evaluate(
|
||||
({ peerAddrs }) => {
|
||||
return window.wakuAPI.dialPeers(window.waku, peerAddrs);
|
||||
},
|
||||
{ peerAddrs: peerMultiaddrs }
|
||||
);
|
||||
|
||||
if (result) {
|
||||
res.status(200).json({
|
||||
peersAdded: peerMultiaddrs.length - (result.errors?.length || 0),
|
||||
peerErrors:
|
||||
result.errors?.map((error: string, index: number) => {
|
||||
return {
|
||||
peerMultiaddr: peerMultiaddrs[index],
|
||||
error
|
||||
};
|
||||
}) || []
|
||||
});
|
||||
} else {
|
||||
res.status(500).json({
|
||||
code: 500,
|
||||
message: "Failed to dial peers"
|
||||
});
|
||||
}
|
||||
} catch (error: any) {
|
||||
res.status(500).json({
|
||||
code: 500,
|
||||
message: `Could not dial peers: ${error.message}`
|
||||
});
|
||||
}
|
||||
}) as express.RequestHandler);
|
||||
|
||||
export default router;
|
||||
|
||||
@ -1,51 +0,0 @@
|
||||
import express, { Request, Response, Router } from "express";
|
||||
|
||||
import { getPage } from "../browser/index.js";
|
||||
|
||||
const router = Router();
|
||||
|
||||
// Get node info endpoint
|
||||
router.get("/info", (async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const page = getPage();
|
||||
if (!page) {
|
||||
return res.status(503).json({
|
||||
code: 503,
|
||||
message: "Browser not initialized"
|
||||
});
|
||||
}
|
||||
|
||||
const result = await page.evaluate(() => {
|
||||
return window.wakuAPI.getPeerInfo(window.waku);
|
||||
});
|
||||
|
||||
res.json(result);
|
||||
} catch (error: any) {
|
||||
console.error("Error getting info:", error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
}) as express.RequestHandler);
|
||||
|
||||
// Get node debug info endpoint
|
||||
router.get("/debug/v1/info", (async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const page = getPage();
|
||||
if (!page) {
|
||||
return res.status(503).json({
|
||||
code: 503,
|
||||
message: "Browser not initialized"
|
||||
});
|
||||
}
|
||||
|
||||
const result = await page.evaluate(() => {
|
||||
return window.wakuAPI.getDebugInfo(window.waku);
|
||||
});
|
||||
|
||||
res.json(result);
|
||||
} catch (error: any) {
|
||||
console.error("Error getting debug info:", error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
}) as express.RequestHandler);
|
||||
|
||||
export default router;
|
||||
@ -1,131 +0,0 @@
|
||||
import express, { Request, Response, Router } from "express";
|
||||
|
||||
import { getPage } from "../browser/index.js";
|
||||
|
||||
const router = Router();
|
||||
|
||||
// Legacy push message endpoint
|
||||
router.post("/push", (async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { contentTopic, payload } = req.body;
|
||||
|
||||
if (!contentTopic) {
|
||||
return res.status(400).json({
|
||||
code: 400,
|
||||
message: "Invalid request. contentTopic is required."
|
||||
});
|
||||
}
|
||||
|
||||
const page = getPage();
|
||||
if (!page) {
|
||||
return res.status(503).json({
|
||||
code: 503,
|
||||
message: "Browser not initialized"
|
||||
});
|
||||
}
|
||||
|
||||
const result = await page.evaluate(
|
||||
({ topic, data }) => {
|
||||
return window.wakuAPI.pushMessage(window.waku, topic, data);
|
||||
},
|
||||
{
|
||||
topic: contentTopic,
|
||||
data: payload
|
||||
}
|
||||
);
|
||||
|
||||
if (result) {
|
||||
res.status(200).json({
|
||||
messageId:
|
||||
"0x" +
|
||||
Buffer.from(contentTopic + Date.now().toString()).toString("hex")
|
||||
});
|
||||
} else {
|
||||
res.status(503).json({
|
||||
code: 503,
|
||||
message: "Could not publish message: no suitable peers"
|
||||
});
|
||||
}
|
||||
} catch (error: any) {
|
||||
if (
|
||||
error.message.includes("size exceeds") ||
|
||||
error.message.includes("stream reset")
|
||||
) {
|
||||
res.status(503).json({
|
||||
code: 503,
|
||||
message:
|
||||
"Could not publish message: message size exceeds gossipsub max message size"
|
||||
});
|
||||
} else {
|
||||
res.status(500).json({
|
||||
code: 500,
|
||||
message: `Could not publish message: ${error.message}`
|
||||
});
|
||||
}
|
||||
}
|
||||
}) as express.RequestHandler);
|
||||
|
||||
// Waku REST API compatible push endpoint
|
||||
router.post("/lightpush/v1/message", (async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { message } = req.body;
|
||||
|
||||
if (!message || !message.contentTopic) {
|
||||
return res.status(400).json({
|
||||
code: 400,
|
||||
message: "Invalid request. contentTopic is required."
|
||||
});
|
||||
}
|
||||
|
||||
const page = getPage();
|
||||
if (!page) {
|
||||
return res.status(503).json({
|
||||
code: 503,
|
||||
message: "Browser not initialized"
|
||||
});
|
||||
}
|
||||
|
||||
const result = await page.evaluate(
|
||||
({ contentTopic, payload }) => {
|
||||
return window.wakuAPI.pushMessage(window.waku, contentTopic, payload);
|
||||
},
|
||||
{
|
||||
contentTopic: message.contentTopic,
|
||||
payload: message.payload
|
||||
}
|
||||
);
|
||||
|
||||
if (result) {
|
||||
res.status(200).json({
|
||||
messageId:
|
||||
"0x" +
|
||||
Buffer.from(message.contentTopic + Date.now().toString()).toString(
|
||||
"hex"
|
||||
)
|
||||
});
|
||||
} else {
|
||||
res.status(503).json({
|
||||
code: 503,
|
||||
message: "Could not publish message: no suitable peers"
|
||||
});
|
||||
}
|
||||
} catch (error: any) {
|
||||
if (
|
||||
error.message.includes("size exceeds") ||
|
||||
error.message.includes("stream reset")
|
||||
) {
|
||||
res.status(503).json({
|
||||
code: 503,
|
||||
message:
|
||||
"Could not publish message: message size exceeds gossipsub max message size"
|
||||
});
|
||||
} else {
|
||||
res.status(500).json({
|
||||
code: 500,
|
||||
message: `Could not publish message: ${error.message}`
|
||||
});
|
||||
}
|
||||
}
|
||||
}) as express.RequestHandler);
|
||||
|
||||
export default router;
|
||||
182
packages/browser-tests/src/routes/waku.ts
Normal file
182
packages/browser-tests/src/routes/waku.ts
Normal file
@ -0,0 +1,182 @@
|
||||
import { Router } from "express";
|
||||
import { createEndpointHandler, validators, errorHandlers } from "../utils/endpoint-handler.js";
|
||||
import { getPage } from "../browser/index.js";
|
||||
|
||||
const router = Router();
|
||||
|
||||
// CORS preflight handlers
|
||||
const corsEndpoints = [
|
||||
"/waku/v1/create-node",
|
||||
"/waku/v1/start-node",
|
||||
"/waku/v1/stop-node",
|
||||
"/waku/v1/wait-for-peers",
|
||||
"/waku/v1/dial-peers",
|
||||
"/waku/v1/peer-info",
|
||||
"/waku/v1/debug-info",
|
||||
"/waku/v1/peer-protocols",
|
||||
"/waku/v1/connection-status",
|
||||
"/waku/v1/execute",
|
||||
"/lightpush/v3/message"
|
||||
];
|
||||
|
||||
corsEndpoints.forEach(endpoint => {
|
||||
router.head(endpoint, (_req, res) => {
|
||||
res.status(200).end();
|
||||
});
|
||||
});
|
||||
|
||||
// Node lifecycle endpoints
|
||||
router.post("/waku/v1/create-node", createEndpointHandler({
|
||||
methodName: "createWakuNode",
|
||||
validateInput: validators.requireNetworkConfig,
|
||||
transformResult: (result) => ({
|
||||
success: result?.success || false,
|
||||
message: result?.success ? "Waku node created successfully" : "Failed to create Waku node"
|
||||
})
|
||||
}));
|
||||
|
||||
router.post("/waku/v1/start-node", createEndpointHandler({
|
||||
methodName: "startNode",
|
||||
validateInput: validators.noInput,
|
||||
transformResult: (result) => ({
|
||||
success: result?.success || false,
|
||||
message: result?.success ? "Waku node started successfully" : "Failed to start Waku node"
|
||||
})
|
||||
}));
|
||||
|
||||
router.post("/waku/v1/stop-node", createEndpointHandler({
|
||||
methodName: "stopNode",
|
||||
validateInput: validators.noInput,
|
||||
transformResult: (result) => ({
|
||||
success: result?.success || false,
|
||||
message: result?.success ? "Waku node stopped successfully" : "Failed to stop Waku node"
|
||||
})
|
||||
}));
|
||||
|
||||
// Messaging endpoints
|
||||
|
||||
// Peer management endpoints
|
||||
router.post("/waku/v1/wait-for-peers", createEndpointHandler({
|
||||
methodName: "waitForPeers",
|
||||
validateInput: (body) => [
|
||||
body.timeoutMs || 10000,
|
||||
body.protocols || ["lightpush", "filter"]
|
||||
],
|
||||
transformResult: () => ({
|
||||
success: true,
|
||||
message: "Successfully connected to peers"
|
||||
})
|
||||
}));
|
||||
|
||||
router.post("/waku/v1/dial-peers", createEndpointHandler({
|
||||
methodName: "dialPeers",
|
||||
validateInput: validators.requirePeerAddrs
|
||||
}));
|
||||
|
||||
// Information endpoints (GET)
|
||||
router.get("/waku/v1/peer-info", createEndpointHandler({
|
||||
methodName: "getPeerInfo",
|
||||
validateInput: validators.noInput
|
||||
}));
|
||||
|
||||
router.get("/waku/v1/debug-info", createEndpointHandler({
|
||||
methodName: "getDebugInfo",
|
||||
validateInput: validators.noInput
|
||||
}));
|
||||
|
||||
router.get("/waku/v1/peer-protocols", createEndpointHandler({
|
||||
methodName: "getAvailablePeerProtocols",
|
||||
validateInput: validators.noInput
|
||||
}));
|
||||
|
||||
router.get("/waku/v1/connection-status", createEndpointHandler({
|
||||
methodName: "getPeerConnectionStatus",
|
||||
validateInput: validators.noInput
|
||||
}));
|
||||
|
||||
// nwaku v3 lightpush endpoint
|
||||
router.post("/lightpush/v3/message", createEndpointHandler({
|
||||
methodName: "pushMessageV3",
|
||||
validateInput: (body: any): [string, string, string] => {
|
||||
const validatedRequest = validators.requireLightpushV3(body);
|
||||
|
||||
// For v3 API, we pass the base64 payload directly to the method
|
||||
// The WakuHeadless pushMessageV3 method will handle base64 decoding
|
||||
return [
|
||||
validatedRequest.message.contentTopic,
|
||||
validatedRequest.message.payload, // Keep as base64
|
||||
validatedRequest.pubsubTopic
|
||||
];
|
||||
},
|
||||
handleError: errorHandlers.lightpushError,
|
||||
preCheck: async () => {
|
||||
try {
|
||||
console.log("[Server] Waiting for Lightpush peers before sending message...");
|
||||
await getPage()?.evaluate(() => {
|
||||
return window.wakuApi.waitForPeers?.(10000, ["lightpush"] as any);
|
||||
});
|
||||
console.log("[Server] Found Lightpush peers");
|
||||
} catch (e) {
|
||||
console.warn("[Server] No Lightpush peers found:", e);
|
||||
}
|
||||
},
|
||||
transformResult: (result) => {
|
||||
if (result && result.successes && result.successes.length > 0) {
|
||||
console.log("[Server] Message successfully sent via v3 lightpush!");
|
||||
return {
|
||||
success: true,
|
||||
result
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
success: false,
|
||||
error: "Could not publish message: no suitable peers"
|
||||
};
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
|
||||
// Custom handler for the execute endpoint since it needs special logic
|
||||
router.post("/waku/v1/execute", async (req, res) => {
|
||||
try {
|
||||
const { functionName, params = [] } = req.body;
|
||||
|
||||
if (!functionName || typeof functionName !== "string") {
|
||||
return res.status(400).json({
|
||||
code: 400,
|
||||
message: "functionName is required and must be a string"
|
||||
});
|
||||
}
|
||||
|
||||
const page = getPage();
|
||||
if (!page) {
|
||||
return res.status(503).json({
|
||||
code: 503,
|
||||
message: "Browser not initialized"
|
||||
});
|
||||
}
|
||||
|
||||
const result = await page.evaluate(
|
||||
({ fnName, fnParams }) => {
|
||||
const api: any = (window as any).wakuApi;
|
||||
if (!api || typeof api[fnName] !== "function") {
|
||||
return { error: `Function ${fnName} not found` };
|
||||
}
|
||||
return api[fnName](...fnParams);
|
||||
},
|
||||
{ fnName: functionName, fnParams: params }
|
||||
);
|
||||
|
||||
console.log(`[execute:${functionName}] Result:`, JSON.stringify(result, null, 2));
|
||||
res.status(200).json(result);
|
||||
} catch (error: any) {
|
||||
console.error("Error executing function", error);
|
||||
res.status(500).json({
|
||||
code: 500,
|
||||
message: `Could not execute function: ${error.message}`
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
@ -1,507 +1,220 @@
|
||||
import { ChildProcess, exec } from "child_process";
|
||||
import * as net from "net";
|
||||
import { dirname, join } from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
import * as path from "path";
|
||||
|
||||
import { chromium } from "@playwright/test";
|
||||
import cors from "cors";
|
||||
import express, { Request, Response } from "express";
|
||||
|
||||
import adminRouter from "./routes/admin.js";
|
||||
import { setPage, getPage, closeBrowser } from "./browser/index.js";
|
||||
import wakuRouter from "./routes/waku.js";
|
||||
import { initBrowser, getPage, closeBrowser } from "./browser/index.js";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
const app = express();
|
||||
|
||||
app.use(cors());
|
||||
app.use(express.json());
|
||||
app.use(adminRouter);
|
||||
|
||||
let headlessServerProcess: ChildProcess | undefined;
|
||||
import * as fs from "fs";
|
||||
|
||||
interface MessageQueue {
|
||||
[contentTopic: string]: Array<{
|
||||
payload: number[] | undefined;
|
||||
contentTopic: string;
|
||||
timestamp: number;
|
||||
receivedAt: number;
|
||||
}>;
|
||||
}
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const distRoot = path.resolve(__dirname, ".."); // server.js is in dist/src/, so go up to dist/
|
||||
const webDir = path.resolve(distRoot, "web");
|
||||
console.log("Setting up static file serving:");
|
||||
console.log("__dirname:", __dirname);
|
||||
console.log("webDir:", webDir);
|
||||
console.log("Files in webDir:", fs.readdirSync(webDir));
|
||||
|
||||
const messageQueue: MessageQueue = {};
|
||||
|
||||
async function startHeadlessServer(): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
headlessServerProcess = exec(
|
||||
`serve ${join(__dirname, "../../headless-tests")} -p 8080 -s`,
|
||||
(error) => {
|
||||
if (error) {
|
||||
console.error(`Error starting serve: ${error}`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
setTimeout(resolve, 2000);
|
||||
} catch (error) {
|
||||
console.error("Failed to start headless server:", error);
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async function initBrowser(): Promise<void> {
|
||||
// Serve dynamic index.html with network configuration BEFORE static files
|
||||
app.get("/app/index.html", (_req: Request, res: Response) => {
|
||||
try {
|
||||
const browser = await chromium.launch({
|
||||
headless: true
|
||||
});
|
||||
const htmlPath = path.join(webDir, "index.html");
|
||||
let htmlContent = fs.readFileSync(htmlPath, "utf8");
|
||||
|
||||
if (!browser) {
|
||||
throw new Error("Failed to initialize browser");
|
||||
// Build network configuration from environment variables
|
||||
const networkConfig: any = {};
|
||||
if (process.env.WAKU_CLUSTER_ID) {
|
||||
networkConfig.clusterId = parseInt(process.env.WAKU_CLUSTER_ID, 10);
|
||||
}
|
||||
if (process.env.WAKU_SHARD) {
|
||||
networkConfig.shards = [parseInt(process.env.WAKU_SHARD, 10)];
|
||||
}
|
||||
|
||||
const page = await browser.newPage();
|
||||
// Inject network configuration as a global variable
|
||||
const configScript = ` <script>window.__WAKU_NETWORK_CONFIG = ${JSON.stringify(networkConfig)};</script>`;
|
||||
const originalPattern = ' <script type="module" src="./index.js"></script>';
|
||||
const replacement = `${configScript}\n <script type="module" src="./index.js"></script>`;
|
||||
|
||||
try {
|
||||
await checkServerAvailability("http://localhost:8080", 3);
|
||||
await page.goto("http://localhost:8080");
|
||||
} catch (error) {
|
||||
console.error(
|
||||
"Error loading headless app, continuing without it:",
|
||||
error
|
||||
);
|
||||
await page.setContent(`
|
||||
<html>
|
||||
<head><title>Waku Test Environment</title></head>
|
||||
<body>
|
||||
<h1>Waku Test Environment (No headless app available)</h1>
|
||||
<script>
|
||||
window.waku = {};
|
||||
window.wakuAPI = {
|
||||
getPeerInfo: () => ({ peerId: "mock-peer-id", multiaddrs: [], peers: [] }),
|
||||
getDebugInfo: () => ({ listenAddresses: [], peerId: "mock-peer-id", protocols: [] }),
|
||||
pushMessage: () => ({ successes: [], failures: [{ error: "No headless app available" }] }),
|
||||
dialPeers: () => ({ total: 0, errors: ["No headless app available"] }),
|
||||
createWakuNode: () => ({ success: true, message: "Mock node created" }),
|
||||
startNode: () => ({ success: true }),
|
||||
stopNode: () => ({ success: true }),
|
||||
subscribe: () => ({ unsubscribe: async () => {} })
|
||||
};
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
`);
|
||||
}
|
||||
htmlContent = htmlContent.replace(originalPattern, replacement);
|
||||
|
||||
setPage(page);
|
||||
res.setHeader("Content-Type", "text/html");
|
||||
res.send(htmlContent);
|
||||
} catch (error) {
|
||||
console.error("Error initializing browser:", error);
|
||||
throw error;
|
||||
console.error("Error serving dynamic index.html:", error);
|
||||
res.status(500).send("Error loading page");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
async function checkServerAvailability(
|
||||
url: string,
|
||||
retries = 3
|
||||
): Promise<boolean> {
|
||||
for (let i = 0; i < retries; i++) {
|
||||
try {
|
||||
const response = await fetch(url, { method: "HEAD" });
|
||||
if (response.ok) return true;
|
||||
} catch (e) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
}
|
||||
}
|
||||
throw new Error(`Server at ${url} not available after ${retries} retries`);
|
||||
}
|
||||
// Serve static files (excluding index.html which is handled above)
|
||||
app.use("/app", express.static(webDir, { index: false }));
|
||||
|
||||
async function findAvailablePort(
|
||||
startPort: number,
|
||||
maxAttempts = 10
|
||||
): Promise<number> {
|
||||
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
||||
const port = startPort + attempt;
|
||||
try {
|
||||
// Try to create a server on the port
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const server = net
|
||||
.createServer()
|
||||
.once("error", (err: any) => {
|
||||
reject(err);
|
||||
})
|
||||
.once("listening", () => {
|
||||
// If we can listen, the port is available
|
||||
server.close();
|
||||
resolve();
|
||||
})
|
||||
.listen(port);
|
||||
});
|
||||
app.use(adminRouter);
|
||||
app.use(wakuRouter);
|
||||
|
||||
// If we get here, the port is available
|
||||
return port;
|
||||
} catch (err) {
|
||||
// Port is not available, continue to next port
|
||||
}
|
||||
}
|
||||
|
||||
// If we tried all ports and none are available, throw an error
|
||||
throw new Error(
|
||||
`Unable to find an available port after ${maxAttempts} attempts`
|
||||
);
|
||||
}
|
||||
|
||||
async function startServer(port: number = 3000): Promise<void> {
|
||||
try {
|
||||
await startHeadlessServer();
|
||||
|
||||
await initBrowser();
|
||||
|
||||
await startAPI(port);
|
||||
} catch (error: any) {
|
||||
console.error("Error starting server:", error);
|
||||
}
|
||||
}
|
||||
|
||||
async function startAPI(requestedPort: number): Promise<void> {
|
||||
async function startAPI(requestedPort: number): Promise<number> {
|
||||
try {
|
||||
app.get("/", (_req: Request, res: Response) => {
|
||||
res.json({ status: "Waku simulation server is running" });
|
||||
});
|
||||
|
||||
app.get("/info", (async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const result = await getPage()?.evaluate(() => {
|
||||
return window.wakuAPI.getPeerInfo(window.waku);
|
||||
});
|
||||
|
||||
res.json(result);
|
||||
} catch (error: any) {
|
||||
console.error("Error getting info:", error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
}) as express.RequestHandler);
|
||||
|
||||
app.get("/debug/v1/info", (async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const result = await getPage()?.evaluate(() => {
|
||||
return window.wakuAPI.getDebugInfo(window.waku);
|
||||
});
|
||||
|
||||
res.json(result);
|
||||
} catch (error: any) {
|
||||
console.error("Error getting debug info:", error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
}) as express.RequestHandler);
|
||||
|
||||
app.post("/lightpush/v1/message", (async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { message } = req.body;
|
||||
|
||||
if (!message || !message.contentTopic) {
|
||||
return res.status(400).json({
|
||||
code: 400,
|
||||
message: "Invalid request. contentTopic is required."
|
||||
});
|
||||
}
|
||||
|
||||
const result = await getPage()?.evaluate(
|
||||
({ contentTopic, payload }) => {
|
||||
return window.wakuAPI.pushMessage(
|
||||
window.waku,
|
||||
contentTopic,
|
||||
payload
|
||||
);
|
||||
},
|
||||
{
|
||||
contentTopic: message.contentTopic,
|
||||
payload: message.payload
|
||||
}
|
||||
);
|
||||
|
||||
if (result) {
|
||||
res.status(200).json({
|
||||
messageId:
|
||||
"0x" +
|
||||
Buffer.from(
|
||||
message.contentTopic + Date.now().toString()
|
||||
).toString("hex")
|
||||
});
|
||||
} else {
|
||||
res.status(503).json({
|
||||
code: 503,
|
||||
message: "Could not publish message: no suitable peers"
|
||||
});
|
||||
}
|
||||
} catch (error: any) {
|
||||
|
||||
if (
|
||||
error.message.includes("size exceeds") ||
|
||||
error.message.includes("stream reset")
|
||||
) {
|
||||
res.status(503).json({
|
||||
code: 503,
|
||||
|
||||
message:
|
||||
"Could not publish message: message size exceeds gossipsub max message size"
|
||||
});
|
||||
} else {
|
||||
res.status(500).json({
|
||||
code: 500,
|
||||
message: `Could not publish message: ${error.message}`
|
||||
});
|
||||
}
|
||||
}
|
||||
}) as express.RequestHandler);
|
||||
|
||||
app.get("/filter/v2/messages/:contentTopic", (async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
try {
|
||||
const { contentTopic } = req.params;
|
||||
const { clusterId, shard } = req.query;
|
||||
|
||||
const options = {
|
||||
clusterId: clusterId ? parseInt(clusterId as string, 10) : 42, // Default to match node creation
|
||||
shard: shard ? parseInt(shard as string, 10) : 0 // Default to match node creation
|
||||
};
|
||||
|
||||
|
||||
// Set up SSE (Server-Sent Events)
|
||||
res.setHeader("Content-Type", "text/event-stream");
|
||||
res.setHeader("Cache-Control", "no-cache");
|
||||
res.setHeader("Connection", "keep-alive");
|
||||
|
||||
// Function to send SSE
|
||||
const sendSSE = (data: any): void => {
|
||||
res.write(`data: ${JSON.stringify(data)}\n\n`);
|
||||
};
|
||||
|
||||
// Subscribe to messages
|
||||
await getPage()?.evaluate(
|
||||
({ contentTopic, options }) => {
|
||||
// Message handler that will send messages back to the client
|
||||
const callback = (message: any): void => {
|
||||
// Post message to the browser context
|
||||
window.postMessage(
|
||||
{
|
||||
type: "WAKU_MESSAGE",
|
||||
payload: {
|
||||
payload: message.payload
|
||||
? Array.from(message.payload)
|
||||
: undefined,
|
||||
contentTopic: message.contentTopic,
|
||||
timestamp: message.timestamp
|
||||
}
|
||||
},
|
||||
"*"
|
||||
);
|
||||
};
|
||||
|
||||
return window.wakuAPI.subscribe(
|
||||
window.waku,
|
||||
contentTopic,
|
||||
options,
|
||||
callback
|
||||
);
|
||||
},
|
||||
{ contentTopic, options }
|
||||
);
|
||||
|
||||
// Set up event listener for messages from the page
|
||||
await getPage()?.exposeFunction("sendMessageToServer", (message: any) => {
|
||||
// Send the message as SSE
|
||||
sendSSE(message);
|
||||
|
||||
const topic = message.contentTopic;
|
||||
if (!messageQueue[topic]) {
|
||||
messageQueue[topic] = [];
|
||||
}
|
||||
|
||||
messageQueue[topic].push({
|
||||
...message,
|
||||
receivedAt: Date.now()
|
||||
});
|
||||
|
||||
if (messageQueue[topic].length > 1000) {
|
||||
messageQueue[topic].shift();
|
||||
}
|
||||
});
|
||||
|
||||
// Add event listener in the browser context to forward messages to the server
|
||||
await getPage()?.evaluate(() => {
|
||||
window.addEventListener("message", (event) => {
|
||||
if (event.data.type === "WAKU_MESSAGE") {
|
||||
(window as any).sendMessageToServer(event.data.payload);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
req.on("close", () => {
|
||||
});
|
||||
} catch (error: any) {
|
||||
console.error("Error in filter subscription:", error);
|
||||
res.write(`data: ${JSON.stringify({ error: error.message })}\n\n`);
|
||||
res.end();
|
||||
}
|
||||
}) as express.RequestHandler);
|
||||
|
||||
app.get("/filter/v1/messages/:contentTopic", (async (
|
||||
req: Request,
|
||||
res: Response
|
||||
) => {
|
||||
try {
|
||||
const { contentTopic } = req.params;
|
||||
const {
|
||||
pageSize = "20",
|
||||
startTime,
|
||||
endTime,
|
||||
ascending = "false"
|
||||
} = req.query;
|
||||
|
||||
if (!messageQueue[contentTopic]) {
|
||||
return res.status(200).json({ messages: [] });
|
||||
}
|
||||
|
||||
const limit = parseInt(pageSize as string, 10);
|
||||
const isAscending = (ascending as string).toLowerCase() === "true";
|
||||
const timeStart = startTime ? parseInt(startTime as string, 10) : 0;
|
||||
const timeEnd = endTime ? parseInt(endTime as string, 10) : Date.now();
|
||||
|
||||
const filteredMessages = messageQueue[contentTopic]
|
||||
.filter((msg) => {
|
||||
const msgTime = msg.timestamp || msg.receivedAt;
|
||||
return msgTime >= timeStart && msgTime <= timeEnd;
|
||||
})
|
||||
.sort((a, b) => {
|
||||
const timeA = a.timestamp || a.receivedAt;
|
||||
const timeB = b.timestamp || b.receivedAt;
|
||||
return isAscending ? timeA - timeB : timeB - timeA;
|
||||
})
|
||||
.slice(0, limit);
|
||||
|
||||
|
||||
// Format response to match Waku REST API format
|
||||
const response = {
|
||||
messages: filteredMessages.map((msg) => ({
|
||||
payload: msg.payload
|
||||
? Buffer.from(msg.payload).toString("base64")
|
||||
: "",
|
||||
contentTopic: msg.contentTopic,
|
||||
timestamp: msg.timestamp,
|
||||
version: 0 // Default version
|
||||
}))
|
||||
};
|
||||
|
||||
res.status(200).json(response);
|
||||
} catch (error: any) {
|
||||
console.error("Error retrieving messages:", error);
|
||||
res.status(500).json({
|
||||
code: 500,
|
||||
message: `Failed to retrieve messages: ${error.message}`
|
||||
});
|
||||
}
|
||||
}) as express.RequestHandler);
|
||||
|
||||
// Helper endpoint for executing functions (useful for testing)
|
||||
app.post("/execute", (async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { functionName, params = [] } = req.body;
|
||||
|
||||
if (functionName === "simulateMessages") {
|
||||
const [contentTopic, messages] = params;
|
||||
|
||||
if (!messageQueue[contentTopic]) {
|
||||
messageQueue[contentTopic] = [];
|
||||
}
|
||||
|
||||
// Add messages to the queue
|
||||
for (const msg of messages) {
|
||||
messageQueue[contentTopic].push({
|
||||
...msg,
|
||||
contentTopic,
|
||||
receivedAt: Date.now()
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).json({
|
||||
success: true,
|
||||
messagesAdded: messages.length
|
||||
});
|
||||
}
|
||||
|
||||
const result = await getPage()?.evaluate(
|
||||
({ fnName, fnParams }) => {
|
||||
if (!window.wakuAPI[fnName]) {
|
||||
return { error: `Function ${fnName} not found` };
|
||||
}
|
||||
return window.wakuAPI[fnName](...fnParams);
|
||||
},
|
||||
{ fnName: functionName, fnParams: params }
|
||||
);
|
||||
|
||||
res.status(200).json(result);
|
||||
} catch (error: any) {
|
||||
console.error(
|
||||
`Error executing function ${req.body.functionName}:`,
|
||||
error
|
||||
);
|
||||
res.status(500).json({
|
||||
error: error.message
|
||||
});
|
||||
}
|
||||
}) as express.RequestHandler);
|
||||
|
||||
|
||||
let actualPort: number;
|
||||
try {
|
||||
actualPort = await findAvailablePort(requestedPort);
|
||||
} catch (error) {
|
||||
console.error("Failed to find an available port:", error);
|
||||
throw error;
|
||||
}
|
||||
|
||||
app
|
||||
.listen(actualPort, () => {
|
||||
.listen(requestedPort, () => {
|
||||
console.log(`API server running on http://localhost:${requestedPort}`);
|
||||
})
|
||||
.on("error", (error: any) => {
|
||||
if (error.code === "EADDRINUSE") {
|
||||
console.error(
|
||||
`Port ${actualPort} is already in use. Please close the application using this port and try again.`
|
||||
`Port ${requestedPort} is already in use. Please close the application using this port and try again.`,
|
||||
);
|
||||
} else {
|
||||
console.error("Error starting server:", error);
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
|
||||
return Promise.resolve();
|
||||
return requestedPort;
|
||||
} catch (error: any) {
|
||||
console.error("Error starting server:", error);
|
||||
return Promise.reject(error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
process.on("SIGINT", (async () => {
|
||||
await closeBrowser();
|
||||
async function startServer(port: number = 3000): Promise<void> {
|
||||
try {
|
||||
const actualPort = await startAPI(port);
|
||||
await initBrowser(actualPort);
|
||||
|
||||
if (headlessServerProcess && headlessServerProcess.pid) {
|
||||
try {
|
||||
process.kill(headlessServerProcess.pid);
|
||||
} catch (e) {
|
||||
// Process already stopped
|
||||
// Optional auto-create/start with consistent bootstrap approach
|
||||
const autoStart =
|
||||
process.env.AUTO_START === "1" || process.env.HEADLESS_AUTO_START === "1";
|
||||
if (autoStart) {
|
||||
try {
|
||||
console.log("Auto-starting node with CLI configuration...");
|
||||
|
||||
// Build network config from environment variables for auto-start
|
||||
const networkConfig: any = { defaultBootstrap: true };
|
||||
if (process.env.WAKU_CLUSTER_ID) {
|
||||
networkConfig.networkConfig = networkConfig.networkConfig || {};
|
||||
networkConfig.networkConfig.clusterId = parseInt(process.env.WAKU_CLUSTER_ID, 10);
|
||||
}
|
||||
if (process.env.WAKU_SHARD) {
|
||||
networkConfig.networkConfig = networkConfig.networkConfig || {};
|
||||
networkConfig.networkConfig.shards = [parseInt(process.env.WAKU_SHARD, 10)];
|
||||
}
|
||||
|
||||
await getPage()?.evaluate((config) => {
|
||||
return window.wakuApi.createWakuNode(config);
|
||||
}, networkConfig);
|
||||
await getPage()?.evaluate(() => window.wakuApi.startNode());
|
||||
|
||||
// Wait for bootstrap peers to connect
|
||||
await getPage()?.evaluate(() =>
|
||||
window.wakuApi.waitForPeers?.(5000, ["lightpush"] as any),
|
||||
);
|
||||
console.log("Auto-start completed with bootstrap peers");
|
||||
} catch (e) {
|
||||
console.warn("Auto-start failed:", e);
|
||||
}
|
||||
}
|
||||
} catch (error: any) {
|
||||
console.error("Error starting server:", error);
|
||||
// Don't exit the process, just log the error
|
||||
// The server might still be partially functional
|
||||
}
|
||||
}
|
||||
|
||||
// Process error handlers to prevent container from crashing
|
||||
process.on("uncaughtException", (error) => {
|
||||
console.error("Uncaught Exception:", error);
|
||||
// Don't exit in production/container environment
|
||||
if (process.env.NODE_ENV !== "production") {
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
process.on("unhandledRejection", (reason, promise) => {
|
||||
console.error("Unhandled Rejection at:", promise, "reason:", reason);
|
||||
// Don't exit in production/container environment
|
||||
if (process.env.NODE_ENV !== "production") {
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
process.on("SIGINT", (async () => {
|
||||
console.log("Received SIGINT, gracefully shutting down...");
|
||||
try {
|
||||
await closeBrowser();
|
||||
} catch (e) {
|
||||
console.warn("Error closing browser:", e);
|
||||
}
|
||||
process.exit(0);
|
||||
}) as any);
|
||||
|
||||
process.on("SIGTERM", (async () => {
|
||||
console.log("Received SIGTERM, gracefully shutting down...");
|
||||
try {
|
||||
await closeBrowser();
|
||||
} catch (e) {
|
||||
console.warn("Error closing browser:", e);
|
||||
}
|
||||
process.exit(0);
|
||||
}) as any);
|
||||
|
||||
/**
|
||||
* Parse CLI arguments for cluster and shard configuration
|
||||
*/
|
||||
function parseCliArgs() {
|
||||
const args = process.argv.slice(2);
|
||||
let clusterId: number | undefined;
|
||||
let shard: number | undefined;
|
||||
|
||||
for (const arg of args) {
|
||||
if (arg.startsWith('--cluster-id=')) {
|
||||
clusterId = parseInt(arg.split('=')[1], 10);
|
||||
if (isNaN(clusterId)) {
|
||||
console.error('Invalid cluster-id value. Must be a number.');
|
||||
process.exit(1);
|
||||
}
|
||||
} else if (arg.startsWith('--shard=')) {
|
||||
shard = parseInt(arg.split('=')[1], 10);
|
||||
if (isNaN(shard)) {
|
||||
console.error('Invalid shard value. Must be a number.');
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
process.exit(0);
|
||||
}) as any);
|
||||
return { clusterId, shard };
|
||||
}
|
||||
|
||||
const isMainModule = process.argv[1] === fileURLToPath(import.meta.url);
|
||||
|
||||
if (isMainModule) {
|
||||
const port = process.env.PORT ? parseInt(process.env.PORT, 10) : 3000;
|
||||
const cliArgs = parseCliArgs();
|
||||
|
||||
// Set global configuration for CLI arguments
|
||||
if (cliArgs.clusterId !== undefined) {
|
||||
process.env.WAKU_CLUSTER_ID = cliArgs.clusterId.toString();
|
||||
console.log(`Using CLI cluster ID: ${cliArgs.clusterId}`);
|
||||
}
|
||||
if (cliArgs.shard !== undefined) {
|
||||
process.env.WAKU_SHARD = cliArgs.shard.toString();
|
||||
console.log(`Using CLI shard: ${cliArgs.shard}`);
|
||||
}
|
||||
|
||||
void startServer(port);
|
||||
}
|
||||
|
||||
@ -1,8 +0,0 @@
|
||||
import { readFileSync } from "fs";
|
||||
import { dirname } from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
export const __dirname = dirname(__filename);
|
||||
|
||||
export const readJSON = (path) => JSON.parse(readFileSync(path, "utf-8"));
|
||||
214
packages/browser-tests/src/utils/endpoint-handler.ts
Normal file
214
packages/browser-tests/src/utils/endpoint-handler.ts
Normal file
@ -0,0 +1,214 @@
|
||||
import { Request, Response } from "express";
|
||||
import { getPage } from "../browser/index.js";
|
||||
|
||||
/**
|
||||
* nwaku v3 Lightpush API interfaces
|
||||
*/
|
||||
export interface LightpushV3Request {
|
||||
pubsubTopic: string;
|
||||
message: {
|
||||
payload: string; // base64 encoded
|
||||
contentTopic: string;
|
||||
version: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface LightpushV3Response {
|
||||
success?: boolean;
|
||||
error?: string;
|
||||
result?: {
|
||||
successes: string[]; // PeerIds converted to strings
|
||||
failures: Array<{
|
||||
error: string;
|
||||
peerId?: string;
|
||||
}>;
|
||||
[key: string]: any;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration for an endpoint handler
|
||||
*/
|
||||
/* eslint-disable no-unused-vars */
|
||||
export interface EndpointConfig<TInput = any, TOutput = any> {
|
||||
/** Name of the method to call on window.wakuApi */
|
||||
methodName: string;
|
||||
/** Optional input validation function - takes request body, returns validated input */
|
||||
validateInput?: (requestBody: any) => TInput;
|
||||
/** Optional transformation of the result before sending response - takes SDK result, returns transformed result */
|
||||
transformResult?: (sdkResult: any) => TOutput;
|
||||
/** Optional custom error handling - takes error, returns response with code and message */
|
||||
handleError?: (caughtError: Error) => { code: number; message: string };
|
||||
/** Optional pre-execution checks */
|
||||
preCheck?: () => Promise<void> | void;
|
||||
/** Whether to log the result (default: true) */
|
||||
logResult?: boolean;
|
||||
}
|
||||
/* eslint-enable no-unused-vars */
|
||||
|
||||
/**
|
||||
* Generic endpoint handler that follows the pattern:
|
||||
* 1. Parse and validate inputs
|
||||
* 2. Call function on WakuHeadless instance via page.evaluate
|
||||
* 3. Wait for result
|
||||
* 4. Log result
|
||||
* 5. Return result or error
|
||||
*/
|
||||
export function createEndpointHandler<TInput = any, TOutput = any>(
|
||||
config: EndpointConfig<TInput, TOutput>
|
||||
) {
|
||||
return async (req: Request, res: Response) => {
|
||||
try {
|
||||
// Step 1: Parse and validate inputs
|
||||
let input: TInput;
|
||||
try {
|
||||
input = config.validateInput ? config.validateInput(req.body) : req.body;
|
||||
} catch (validationError: any) {
|
||||
return res.status(400).json({
|
||||
code: 400,
|
||||
message: `Invalid input: ${validationError.message}`
|
||||
});
|
||||
}
|
||||
|
||||
// Pre-execution checks
|
||||
if (config.preCheck) {
|
||||
try {
|
||||
await config.preCheck();
|
||||
} catch (checkError: any) {
|
||||
return res.status(503).json({
|
||||
code: 503,
|
||||
message: checkError.message
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Check browser availability
|
||||
const page = getPage();
|
||||
if (!page) {
|
||||
return res.status(503).json({
|
||||
code: 503,
|
||||
message: "Browser not initialized"
|
||||
});
|
||||
}
|
||||
|
||||
// Step 2 & 3: Call function and wait for result
|
||||
const result = await page.evaluate(
|
||||
({ methodName, params }) => {
|
||||
if (!window.wakuApi) {
|
||||
throw new Error("window.wakuApi is not available");
|
||||
}
|
||||
|
||||
const method = (window.wakuApi as any)[methodName];
|
||||
if (typeof method !== "function") {
|
||||
throw new Error(`window.wakuApi.${methodName} is not a function`);
|
||||
}
|
||||
|
||||
// Handle both parameterized and parameterless methods
|
||||
if (params === null || params === undefined) {
|
||||
return method.call(window.wakuApi);
|
||||
} else if (Array.isArray(params)) {
|
||||
return method.apply(window.wakuApi, params);
|
||||
} else {
|
||||
return method.call(window.wakuApi, params);
|
||||
}
|
||||
},
|
||||
{ methodName: config.methodName, params: input }
|
||||
);
|
||||
|
||||
// Step 4: Log result
|
||||
if (config.logResult !== false) {
|
||||
console.log(`[${config.methodName}] Result:`, JSON.stringify(result, null, 2));
|
||||
}
|
||||
|
||||
// Step 5: Transform and return result
|
||||
const finalResult = config.transformResult ? config.transformResult(result) : result;
|
||||
|
||||
res.status(200).json(finalResult);
|
||||
} catch (error: any) {
|
||||
// Custom error handling
|
||||
if (config.handleError) {
|
||||
const errorResponse = config.handleError(error);
|
||||
return res.status(errorResponse.code).json({
|
||||
code: errorResponse.code,
|
||||
message: errorResponse.message
|
||||
});
|
||||
}
|
||||
|
||||
// Default error handling
|
||||
console.error(`[${config.methodName}] Error:`, error);
|
||||
res.status(500).json({
|
||||
code: 500,
|
||||
message: `Could not execute ${config.methodName}: ${error.message}`
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Common validation functions
|
||||
*/
|
||||
export const validators = {
|
||||
requireNetworkConfig: (body: any) => {
|
||||
if (!body.networkConfig || body.networkConfig.clusterId === undefined) {
|
||||
throw new Error("networkConfig.clusterId is required");
|
||||
}
|
||||
return body;
|
||||
},
|
||||
|
||||
|
||||
requireLightpushV3: (body: any): LightpushV3Request => {
|
||||
if (!body.pubsubTopic || typeof body.pubsubTopic !== "string") {
|
||||
throw new Error("pubsubTopic is required and must be a string");
|
||||
}
|
||||
if (!body.message || typeof body.message !== "object") {
|
||||
throw new Error("message is required and must be an object");
|
||||
}
|
||||
if (!body.message.contentTopic || typeof body.message.contentTopic !== "string") {
|
||||
throw new Error("message.contentTopic is required and must be a string");
|
||||
}
|
||||
if (!body.message.payload || typeof body.message.payload !== "string") {
|
||||
throw new Error("message.payload is required and must be a string (base64 encoded)");
|
||||
}
|
||||
if (body.message.version !== undefined && typeof body.message.version !== "number") {
|
||||
throw new Error("message.version must be a number if provided");
|
||||
}
|
||||
|
||||
return {
|
||||
pubsubTopic: body.pubsubTopic,
|
||||
message: {
|
||||
payload: body.message.payload,
|
||||
contentTopic: body.message.contentTopic,
|
||||
version: body.message.version || 1
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
requirePeerAddrs: (body: any) => {
|
||||
if (!Array.isArray(body.peerAddrs)) {
|
||||
throw new Error("peerAddrs must be an array");
|
||||
}
|
||||
return body.peerAddrs;
|
||||
},
|
||||
|
||||
noInput: () => null,
|
||||
|
||||
passThrough: (body: any) => body
|
||||
};
|
||||
|
||||
/**
|
||||
* Common error handlers
|
||||
*/
|
||||
export const errorHandlers = {
|
||||
lightpushError: (error: Error) => {
|
||||
if (error.message.includes("size exceeds") || error.message.includes("stream reset")) {
|
||||
return {
|
||||
code: 503,
|
||||
message: "Could not publish message: message size exceeds gossipsub max message size"
|
||||
};
|
||||
}
|
||||
return {
|
||||
code: 500,
|
||||
message: `Could not publish message: ${error.message}`
|
||||
};
|
||||
}
|
||||
};
|
||||
364
packages/browser-tests/tests/docker-server.spec.ts
Normal file
364
packages/browser-tests/tests/docker-server.spec.ts
Normal file
@ -0,0 +1,364 @@
|
||||
import { test, expect } from "@playwright/test";
|
||||
import axios from "axios";
|
||||
import { GenericContainer, StartedTestContainer } from "testcontainers";
|
||||
import { createLightNode, waitForRemotePeer, LightNode, Protocols } from "@waku/sdk";
|
||||
|
||||
test.describe.configure({ mode: "serial" });
|
||||
|
||||
let container: StartedTestContainer;
|
||||
let baseUrl = "http://127.0.0.1:8080";
|
||||
let wakuNode: LightNode;
|
||||
let unsubscribe: () => void;
|
||||
|
||||
test.beforeAll(async () => {
|
||||
// Build and run the container once for the suite; reuse across tests
|
||||
const generic = new GenericContainer(
|
||||
"waku-browser-tests:local",
|
||||
).withExposedPorts(8080)
|
||||
.withEnvironment({
|
||||
"AUTO_START": "0", // Don't auto-start, let tests control initialization
|
||||
});
|
||||
|
||||
container = await generic.start();
|
||||
|
||||
console.log("Container started, waiting for initialization...");
|
||||
await new Promise((r) => setTimeout(r, 2000)); // Give container more time to start
|
||||
|
||||
// Get initial container logs for debugging
|
||||
const logs = await container.logs({ tail: 100 });
|
||||
logs.on("data", (b) => process.stdout.write("[container] " + b.toString()));
|
||||
logs.on("error", (err) => console.error("[container log error]", err));
|
||||
|
||||
const mappedPort = container.getMappedPort(8080);
|
||||
baseUrl = `http://127.0.0.1:${mappedPort}`;
|
||||
|
||||
// Probe readiness - wait for both server and browser
|
||||
let serverReady = false;
|
||||
// let browserReady = false;
|
||||
|
||||
// Wait for server to be ready with more debugging
|
||||
for (let i = 0; i < 60; i++) { // Increased attempts from 40 to 60
|
||||
try {
|
||||
const res = await axios.get(`${baseUrl}/`, { timeout: 2000 }); // Increased timeout
|
||||
if (res.status === 200) {
|
||||
console.log(`Server is ready after ${i + 1} attempts`);
|
||||
serverReady = true;
|
||||
break;
|
||||
}
|
||||
} catch (error: any) {
|
||||
if (i % 10 === 0) { // Log every 10th attempt
|
||||
console.log(`Attempt ${i + 1}/60 failed:`, error.code || error.message);
|
||||
}
|
||||
}
|
||||
await new Promise((r) => setTimeout(r, 1000)); // Increased wait time from 500ms to 1000ms
|
||||
}
|
||||
|
||||
if (!serverReady) {
|
||||
// Get final container logs for debugging
|
||||
try {
|
||||
const finalLogs = await container.logs({ tail: 50 });
|
||||
console.log("=== Final Container Logs ===");
|
||||
finalLogs.on("data", (b) => console.log(b.toString()));
|
||||
await new Promise(r => setTimeout(r, 1000)); // Give logs time to print
|
||||
} catch (logError) {
|
||||
console.error("Failed to get container logs:", logError);
|
||||
}
|
||||
}
|
||||
|
||||
expect(serverReady).toBe(true);
|
||||
|
||||
await new Promise((r) => setTimeout(r, 500));
|
||||
});
|
||||
|
||||
test.afterAll(async () => {
|
||||
// Clean up subscription first
|
||||
try {
|
||||
if (typeof unsubscribe === 'function') {
|
||||
unsubscribe();
|
||||
console.log("Filter subscription cleaned up");
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn("Filter cleanup had issues:", (error as any).message);
|
||||
}
|
||||
|
||||
if (wakuNode) {
|
||||
console.log("Stopping Waku node...");
|
||||
try {
|
||||
await wakuNode.stop();
|
||||
console.log("Waku node stopped successfully");
|
||||
} catch (error) {
|
||||
console.warn("Waku node stop had issues:", (error as any).message);
|
||||
}
|
||||
}
|
||||
|
||||
if (container) {
|
||||
console.log("Stopping container gracefully...");
|
||||
try {
|
||||
// Give the container a chance to shut down gracefully
|
||||
await container.stop({ timeout: 10000 });
|
||||
console.log("Container stopped successfully");
|
||||
} catch (error) {
|
||||
console.warn("Container stop had issues (expected):", (error as any).message);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
test("container: health endpoint", async () => {
|
||||
const res = await axios.get(`${baseUrl}/`);
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.data.status).toBe("Waku simulation server is running");
|
||||
});
|
||||
|
||||
// New focused test: validate create-node using window.wakuApi.start()
|
||||
test("container: create-node only (wakuApi.start)", async () => {
|
||||
const res = await axios.post(`${baseUrl}/admin/v1/create-node`, {
|
||||
defaultBootstrap: true,
|
||||
networkConfig: {
|
||||
clusterId: 1,
|
||||
numShardsInCluster: 8 // Enable auto-sharding
|
||||
}
|
||||
});
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.data.success).toBe(true);
|
||||
});
|
||||
|
||||
test("container: create/start node and push", async () => {
|
||||
// Create node with required networkConfig
|
||||
await axios.post(`${baseUrl}/admin/v1/create-node`, {
|
||||
defaultBootstrap: true,
|
||||
networkConfig: {
|
||||
clusterId: 1,
|
||||
numShardsInCluster: 8 // Enable auto-sharding
|
||||
}
|
||||
});
|
||||
await axios.post(`${baseUrl}/admin/v1/start-node`);
|
||||
|
||||
// Wait for Lightpush peers with longer timeout for real network connections
|
||||
console.log("⏳ Waiting for Lightpush peers to connect...");
|
||||
try {
|
||||
await axios.post(`${baseUrl}/waku/v1/wait-for-peers`, {
|
||||
timeoutMs: 30000,
|
||||
protocols: ["lightpush"] // 30 second timeout for real network
|
||||
});
|
||||
console.log("✅ Found Lightpush peers");
|
||||
} catch (e) {
|
||||
console.error("❌ Failed to find Lightpush peers:", e);
|
||||
throw new Error("Failed to connect to Lightpush peers - this should succeed in all environments");
|
||||
}
|
||||
|
||||
// Also wait for Filter peers
|
||||
console.log("⏳ Waiting for Filter peers to connect...");
|
||||
try {
|
||||
await axios.post(`${baseUrl}/waku/v1/wait-for-peers`, {
|
||||
timeoutMs: 30000,
|
||||
protocols: ["filter"] // 30 second timeout for real network
|
||||
});
|
||||
console.log("✅ Found Filter peers");
|
||||
} catch (e) {
|
||||
console.warn("⚠️ No Filter peers found (non-critical):", e);
|
||||
}
|
||||
|
||||
// Test lightpush endpoint - expect it to succeed with real peers
|
||||
console.log("📤 Attempting to push message to Waku network...");
|
||||
const testMessage = "Hello from Docker container test";
|
||||
const base64Payload = btoa(testMessage); // Convert to base64
|
||||
|
||||
const push = await axios.post(`${baseUrl}/lightpush/v3/message`, {
|
||||
pubsubTopic: "/waku/2/default-waku/proto",
|
||||
message: {
|
||||
contentTopic: "/test/1/message/proto",
|
||||
payload: base64Payload,
|
||||
version: 1
|
||||
},
|
||||
});
|
||||
|
||||
// Verify successful push (v3 API returns { success: boolean, result?: SDKProtocolResult })
|
||||
expect(push.status).toBe(200);
|
||||
expect(push.data).toBeDefined();
|
||||
expect(push.data.success).toBe(true);
|
||||
expect(push.data.result).toBeDefined();
|
||||
expect(push.data.result.successes).toBeDefined();
|
||||
expect(push.data.result.successes.length).toBeGreaterThan(0);
|
||||
console.log("✅ Message successfully pushed to Waku network!");
|
||||
|
||||
// Log a clean summary instead of raw JSON
|
||||
const successCount = push.data.result.successes?.length || 0;
|
||||
const failureCount = push.data.result.failures?.length || 0;
|
||||
console.log(`📊 Push Summary: ${successCount} success(es), ${failureCount} failure(s)`);
|
||||
|
||||
if (successCount > 0) {
|
||||
console.log("📤 Successfully sent to peers:");
|
||||
push.data.result.successes.forEach((peerIdString: string, index: number) => {
|
||||
console.log(` ${index + 1}. ${peerIdString}`);
|
||||
});
|
||||
}
|
||||
|
||||
if (failureCount > 0) {
|
||||
console.log("❌ Failed to send to peers:");
|
||||
push.data.result.failures.forEach((failure: { error: string; peerId?: string }, index: number) => {
|
||||
const peerInfo = failure.peerId || 'unknown peer';
|
||||
console.log(` ${index + 1}. ${peerInfo} - ${failure.error}`);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
test("cross-network message delivery: SDK light node receives server lightpush", async () => {
|
||||
const contentTopic = "/test/1/cross-network/proto";
|
||||
const pubsubTopic = "/waku/2/default-waku/proto";
|
||||
const testMessage = "Hello from SDK to Docker server test";
|
||||
|
||||
console.log("🚀 Creating SDK light node with same config as server...");
|
||||
|
||||
// Create light node with same configuration as the docker server
|
||||
wakuNode = await createLightNode({
|
||||
defaultBootstrap: true,
|
||||
networkConfig: {
|
||||
clusterId: 1,
|
||||
numShardsInCluster: 8
|
||||
},
|
||||
libp2p: {
|
||||
filterMultiaddrs: false
|
||||
}
|
||||
});
|
||||
|
||||
await wakuNode.start();
|
||||
console.log("✅ SDK light node started");
|
||||
|
||||
// Wait for filter peer to connect
|
||||
console.log("⏳ Waiting for Filter peers to connect...");
|
||||
await waitForRemotePeer(wakuNode, [Protocols.Filter]);
|
||||
console.log("✅ Connected to Filter peers");
|
||||
|
||||
// Set up message subscription
|
||||
console.log("📡 Setting up message subscription...");
|
||||
const messages: any[] = [];
|
||||
|
||||
console.log(`🔍 Subscribing to contentTopic: "${contentTopic}" on pubsubTopic: "${pubsubTopic}"`);
|
||||
|
||||
// Create decoder that matches the server's encoder (using same pattern as server)
|
||||
const decoder = wakuNode.createDecoder({ contentTopic, pubsubTopic });
|
||||
console.log("🔧 Created decoder with pubsubTopic:", decoder.pubsubTopic);
|
||||
|
||||
// Set up message subscription and WAIT for it to be established
|
||||
try {
|
||||
unsubscribe = await wakuNode.filter.subscribe(
|
||||
[decoder],
|
||||
(message) => {
|
||||
console.log("📥 Received message via Filter!");
|
||||
console.log(`📝 Message details: topic=${message.contentTopic}, payload="${new TextDecoder().decode(message.payload)}"`);
|
||||
messages.push(message);
|
||||
}
|
||||
);
|
||||
console.log("✅ Filter subscription established successfully");
|
||||
} catch (error) {
|
||||
console.error("❌ Failed to subscribe to Filter:", error);
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Give extra time for subscription to propagate to network
|
||||
console.log("⏳ Waiting for subscription to propagate...");
|
||||
await new Promise(r => setTimeout(r, 2000));
|
||||
|
||||
const messagePromise = new Promise<void>((resolve) => {
|
||||
const originalLength = messages.length;
|
||||
const checkForMessage = () => {
|
||||
if (messages.length > originalLength) {
|
||||
resolve();
|
||||
} else {
|
||||
setTimeout(checkForMessage, 100);
|
||||
}
|
||||
};
|
||||
checkForMessage();
|
||||
});
|
||||
|
||||
// Create and start server node
|
||||
console.log("🔧 Creating server node...");
|
||||
await axios.post(`${baseUrl}/admin/v1/create-node`, {
|
||||
defaultBootstrap: true,
|
||||
networkConfig: {
|
||||
clusterId: 1,
|
||||
numShardsInCluster: 8
|
||||
}
|
||||
});
|
||||
|
||||
await axios.post(`${baseUrl}/admin/v1/start-node`);
|
||||
console.log("✅ Server node created and started");
|
||||
|
||||
// CRITICAL: Wait for server node to find peers BEFORE attempting to send
|
||||
console.log("⏳ Waiting for server to connect to Lightpush peers...");
|
||||
await axios.post(`${baseUrl}/waku/v1/wait-for-peers`, {
|
||||
timeoutMs: 30000,
|
||||
protocols: ["lightpush"]
|
||||
});
|
||||
console.log("✅ Server connected to Lightpush peers");
|
||||
|
||||
console.log("⏳ Waiting for server to connect to Filter peers...");
|
||||
try {
|
||||
await axios.post(`${baseUrl}/waku/v1/wait-for-peers`, {
|
||||
timeoutMs: 30000,
|
||||
protocols: ["filter"]
|
||||
});
|
||||
console.log("✅ Server connected to Filter peers");
|
||||
} catch (e) {
|
||||
console.warn("⚠️ Server didn't connect to Filter peers:", e);
|
||||
}
|
||||
|
||||
// Give nodes extra time to discover each other and establish proper mesh connectivity
|
||||
console.log("⏳ Allowing time for nodes to discover each other...");
|
||||
await new Promise(r => setTimeout(r, 8000));
|
||||
|
||||
// Debug: Check peer information before sending
|
||||
console.log("🔍 Checking peer connections...");
|
||||
try {
|
||||
const peerInfo = await axios.get(`${baseUrl}/waku/v1/peer-info`);
|
||||
console.log(`📊 Server peer count: ${JSON.stringify(peerInfo.data)}`);
|
||||
} catch (e) {
|
||||
console.warn("⚠️ Could not get peer info:", e);
|
||||
}
|
||||
|
||||
// IMPORTANT: Verify filter is ready before sending
|
||||
console.log("🔍 Verifying filter subscription is active before sending...");
|
||||
|
||||
// Send message via server's lightpush
|
||||
console.log("📤 Sending message via server lightpush...");
|
||||
const base64Payload = btoa(testMessage);
|
||||
|
||||
const pushResponse = await axios.post(`${baseUrl}/lightpush/v3/message`, {
|
||||
pubsubTopic,
|
||||
message: {
|
||||
contentTopic,
|
||||
payload: base64Payload,
|
||||
version: 1
|
||||
}
|
||||
});
|
||||
|
||||
expect(pushResponse.status).toBe(200);
|
||||
expect(pushResponse.data.success).toBe(true);
|
||||
console.log("✅ Message sent via server lightpush");
|
||||
|
||||
// Wait for message to be received by SDK node (with longer timeout for network propagation)
|
||||
console.log("⏳ Waiting for message to be received by SDK node...");
|
||||
console.log("💡 Note: Filter messages may take time to propagate through the network...");
|
||||
|
||||
await Promise.race([
|
||||
messagePromise,
|
||||
new Promise((_, reject) =>
|
||||
setTimeout(() => {
|
||||
console.error(`❌ Timeout after 45 seconds. Messages received: ${messages.length}`);
|
||||
reject(new Error("Timeout waiting for message"));
|
||||
}, 45000)
|
||||
)
|
||||
]);
|
||||
|
||||
// Verify message was received
|
||||
expect(messages).toHaveLength(1);
|
||||
const receivedMessage = messages[0];
|
||||
expect(receivedMessage.contentTopic).toBe(contentTopic);
|
||||
|
||||
// Decode and verify payload
|
||||
const receivedPayload = new TextDecoder().decode(receivedMessage.payload);
|
||||
expect(receivedPayload).toBe(testMessage);
|
||||
|
||||
console.log("🎉 SUCCESS: Message successfully sent from server and received by SDK node!");
|
||||
console.log(`📝 Message content: "${receivedPayload}"`);
|
||||
});
|
||||
@ -1,136 +0,0 @@
|
||||
import { expect, test } from "@playwright/test";
|
||||
import { LightNode } from "@waku/sdk";
|
||||
|
||||
import { API } from "../src/api/shared.js";
|
||||
import { NETWORK_CONFIG, ACTIVE_PEERS } from "./test-config.js";
|
||||
|
||||
// Define the window interface for TypeScript
|
||||
declare global {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
interface Window {
|
||||
waku: LightNode;
|
||||
wakuAPI: typeof API;
|
||||
}
|
||||
}
|
||||
|
||||
test.describe("waku", () => {
|
||||
test.beforeEach(async ({ page }) => {
|
||||
await page.goto("");
|
||||
await page.waitForTimeout(5000);
|
||||
|
||||
// Create and initialize a fresh Waku node for each test
|
||||
const setupResult = await page.evaluate(async (config) => {
|
||||
try {
|
||||
await window.wakuAPI.createWakuNode({
|
||||
...config.defaultNodeConfig,
|
||||
networkConfig: config.networkConfig
|
||||
});
|
||||
await window.wakuAPI.startNode();
|
||||
return { success: true };
|
||||
} catch (error) {
|
||||
console.error("Failed to initialize Waku node:", error);
|
||||
return { success: false, error: String(error) };
|
||||
}
|
||||
}, NETWORK_CONFIG);
|
||||
|
||||
expect(setupResult.success).toBe(true);
|
||||
});
|
||||
|
||||
test("can get peer id", async ({ page }) => {
|
||||
const peerId = await page.evaluate(() => {
|
||||
return window.waku.libp2p.peerId.toString();
|
||||
});
|
||||
|
||||
expect(peerId).toBeDefined();
|
||||
console.log("Peer ID:", peerId);
|
||||
});
|
||||
|
||||
test("can get info", async ({ page }) => {
|
||||
const info = await page.evaluate(() => {
|
||||
return window.wakuAPI.getPeerInfo(window.waku);
|
||||
});
|
||||
|
||||
expect(info).toBeDefined();
|
||||
expect(info.peerId).toBeDefined();
|
||||
expect(info.multiaddrs).toBeDefined();
|
||||
expect(info.peers).toBeDefined();
|
||||
console.log("Info:", info);
|
||||
});
|
||||
|
||||
test("can get debug info", async ({ page }) => {
|
||||
const debug = await page.evaluate(() => {
|
||||
return window.wakuAPI.getDebugInfo(window.waku);
|
||||
});
|
||||
|
||||
expect(debug).toBeDefined();
|
||||
expect(debug.listenAddresses).toBeDefined();
|
||||
expect(debug.peerId).toBeDefined();
|
||||
expect(debug.protocols).toBeDefined();
|
||||
console.log("Debug:", debug);
|
||||
});
|
||||
|
||||
test("can dial peers", async ({ page }) => {
|
||||
const result = await page.evaluate((peerAddrs) => {
|
||||
return window.wakuAPI.dialPeers(window.waku, peerAddrs);
|
||||
}, ACTIVE_PEERS);
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result.total).toBe(ACTIVE_PEERS.length);
|
||||
expect(result.errors.length >= result.total).toBe(false);
|
||||
console.log("Dial result:", result);
|
||||
});
|
||||
|
||||
test("can push a message", async ({ page }) => {
|
||||
// First dial to peers
|
||||
await page.evaluate((peersToDial) => {
|
||||
return window.wakuAPI.dialPeers(window.waku, peersToDial);
|
||||
}, ACTIVE_PEERS);
|
||||
|
||||
// Create a test message
|
||||
const contentTopic = NETWORK_CONFIG.testMessage.contentTopic;
|
||||
const payload = new TextEncoder().encode(NETWORK_CONFIG.testMessage.payload);
|
||||
const arrayPayload = Array.from(payload);
|
||||
|
||||
// Push the message
|
||||
const result = await page.evaluate(
|
||||
({ topic, data }) => {
|
||||
return window.wakuAPI.pushMessage(
|
||||
window.waku,
|
||||
topic,
|
||||
new Uint8Array(data)
|
||||
);
|
||||
},
|
||||
{ topic: contentTopic, data: arrayPayload }
|
||||
);
|
||||
|
||||
expect(result).toBeDefined();
|
||||
console.log("Push result:", result);
|
||||
});
|
||||
|
||||
test("can recreate Waku node", async ({ page }) => {
|
||||
// Get the current node's peer ID
|
||||
const initialPeerId = await page.evaluate(() => {
|
||||
return window.waku.libp2p.peerId.toString();
|
||||
});
|
||||
|
||||
// Create a new node with different parameters
|
||||
const result = await page.evaluate(() => {
|
||||
return window.wakuAPI.createWakuNode({
|
||||
defaultBootstrap: true // Different from beforeEach
|
||||
});
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// Start the new node
|
||||
await page.evaluate(() => window.wakuAPI.startNode());
|
||||
|
||||
// Get the new peer ID
|
||||
const newPeerId = await page.evaluate(() => {
|
||||
return window.waku.libp2p.peerId.toString();
|
||||
});
|
||||
|
||||
expect(newPeerId).not.toBe(initialPeerId);
|
||||
console.log("Initial:", initialPeerId, "New:", newPeerId);
|
||||
});
|
||||
});
|
||||
@ -1,722 +1,116 @@
|
||||
import { ChildProcess, exec, spawn } from "child_process";
|
||||
import * as http from "http";
|
||||
import * as net from "net";
|
||||
import { join } from "path";
|
||||
|
||||
import { expect, test } from "@playwright/test";
|
||||
import { test, expect } from "@playwright/test";
|
||||
import axios from "axios";
|
||||
import { spawn } from "child_process";
|
||||
import { fileURLToPath } from "url";
|
||||
import { dirname, join } from "path";
|
||||
|
||||
// The default URL, but we'll update this if we detect a different port
|
||||
let API_URL = "http://localhost:3000";
|
||||
// Need this for basic node initialization that doesn't rely on /execute
|
||||
const PEERS = [
|
||||
"/dns4/waku-test.bloxy.one/tcp/8095/wss/p2p/16Uiu2HAmSZbDB7CusdRhgkD81VssRjQV5ZH13FbzCGcdnbbh6VwZ",
|
||||
"/dns4/waku.fryorcraken.xyz/tcp/8000/wss/p2p/16Uiu2HAmMRvhDHrtiHft1FTUYnn6cVA8AWVrTyLUayJJ3MWpUZDB"
|
||||
];
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
let serverProcess: ChildProcess;
|
||||
|
||||
// Force tests to run sequentially to avoid port conflicts
|
||||
// Run this entire file in serial mode to avoid port collisions
|
||||
test.describe.configure({ mode: "serial" });
|
||||
|
||||
// Helper function to check if a port is in use
|
||||
async function isPortInUse(port: number): Promise<boolean> {
|
||||
return new Promise((resolve) => {
|
||||
const server = net
|
||||
.createServer()
|
||||
.once("error", () => {
|
||||
// Port is in use
|
||||
resolve(true);
|
||||
})
|
||||
.once("listening", () => {
|
||||
// Port is free, close server
|
||||
server.close();
|
||||
resolve(false);
|
||||
})
|
||||
.listen(port);
|
||||
});
|
||||
}
|
||||
test.describe("Server Tests", () => {
|
||||
let serverProcess: any;
|
||||
let baseUrl = "http://localhost:3000";
|
||||
|
||||
// Helper function to kill processes on port 3000
|
||||
async function killProcessOnPort(): Promise<void> {
|
||||
return new Promise<void>((resolve) => {
|
||||
// Different commands for different platforms
|
||||
const cmd =
|
||||
process.platform === "win32"
|
||||
? `netstat -ano | findstr :3000 | findstr LISTENING`
|
||||
: `lsof -i:3000 -t`;
|
||||
test.beforeAll(async () => {
|
||||
// Start the server
|
||||
const serverPath = join(__dirname, "..", "dist", "src", "server.js");
|
||||
console.log("Starting server from:", serverPath);
|
||||
|
||||
exec(cmd, (err, stdout) => {
|
||||
if (err || !stdout.trim()) {
|
||||
console.log("No process running on port 3000");
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Found processes on port 3000: ${stdout.trim()}`);
|
||||
|
||||
// Kill the process
|
||||
const killCmd =
|
||||
process.platform === "win32"
|
||||
? `FOR /F "tokens=5" %P IN ('netstat -ano ^| findstr :3000 ^| findstr LISTENING') DO taskkill /F /PID %P`
|
||||
: `kill -9 ${stdout.trim()}`;
|
||||
|
||||
exec(killCmd, (killErr) => {
|
||||
if (killErr) {
|
||||
console.error(`Error killing process: ${killErr.message}`);
|
||||
} else {
|
||||
console.log("Killed process on port 3000");
|
||||
}
|
||||
|
||||
// Wait a moment for OS to release the port
|
||||
setTimeout(resolve, 500);
|
||||
});
|
||||
serverProcess = spawn("node", [serverPath], {
|
||||
stdio: "pipe",
|
||||
env: { ...process.env, PORT: "3000" }
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Helper function to wait for the API server to be available
|
||||
async function waitForApiServer(
|
||||
maxRetries = 10,
|
||||
interval = 1000
|
||||
): Promise<boolean> {
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
try {
|
||||
const response = await axios.get(API_URL, { timeout: 2000 });
|
||||
if (response.status === 200) {
|
||||
console.log(`API server is available at ${API_URL}`);
|
||||
return true;
|
||||
}
|
||||
} catch (e) {
|
||||
console.log(
|
||||
`API server not available at ${API_URL}, retrying (${i + 1}/${maxRetries})...`
|
||||
);
|
||||
await new Promise((resolve) => setTimeout(resolve, interval));
|
||||
}
|
||||
}
|
||||
console.warn(
|
||||
`API server at ${API_URL} not available after ${maxRetries} attempts`
|
||||
);
|
||||
return false;
|
||||
}
|
||||
// Log server output
|
||||
serverProcess.stdout?.on("data", (data: Buffer) => {
|
||||
console.log("[Server]", data.toString().trim());
|
||||
});
|
||||
|
||||
// Setup and teardown for the whole test suite
|
||||
test.beforeAll(async () => {
|
||||
// First check if port 3000 is already in use - if so, try to kill it
|
||||
const portInUse = await isPortInUse(3000);
|
||||
if (portInUse) {
|
||||
console.log(
|
||||
"Port 3000 is already in use. Attempting to kill the process..."
|
||||
);
|
||||
await killProcessOnPort();
|
||||
serverProcess.stderr?.on("data", (data: Buffer) => {
|
||||
console.error("[Server Error]", data.toString().trim());
|
||||
});
|
||||
|
||||
// Check again
|
||||
const stillInUse = await isPortInUse(3000);
|
||||
if (stillInUse) {
|
||||
console.log("Failed to free port 3000. Waiting for it to be released...");
|
||||
await new Promise((resolve) => setTimeout(resolve, 5000));
|
||||
}
|
||||
}
|
||||
// Wait for server to start
|
||||
console.log("Waiting for server to start...");
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000));
|
||||
|
||||
// Start the server
|
||||
console.log("Starting server for tests...");
|
||||
serverProcess = spawn("node", [join(process.cwd(), "dist/server.js")], {
|
||||
stdio: "pipe",
|
||||
detached: true
|
||||
});
|
||||
|
||||
// Log server output for debugging and capture the actual port
|
||||
serverProcess.stdout?.on("data", (data) => {
|
||||
const output = data.toString();
|
||||
console.log(`Server: ${output}`);
|
||||
|
||||
// Check if the output contains the port information
|
||||
const portMatch = output.match(
|
||||
/API server running on http:\/\/localhost:(\d+)/
|
||||
);
|
||||
if (portMatch && portMatch[1]) {
|
||||
const detectedPort = parseInt(portMatch[1], 10);
|
||||
if (detectedPort !== 3000) {
|
||||
console.log(
|
||||
`Server is running on port ${detectedPort} instead of 3000`
|
||||
);
|
||||
API_URL = `http://localhost:${detectedPort}`;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
serverProcess.stderr?.on("data", (data) => {
|
||||
console.error(`Server Error: ${data}`);
|
||||
});
|
||||
|
||||
// Wait for server to start and API to be available
|
||||
console.log("Waiting for server to start...");
|
||||
await new Promise((resolve) => setTimeout(resolve, 5000));
|
||||
|
||||
const apiAvailable = await waitForApiServer();
|
||||
if (!apiAvailable) {
|
||||
console.warn("API server is not available, tests may fail");
|
||||
}
|
||||
|
||||
if (apiAvailable) {
|
||||
// Create a node for the tests
|
||||
try {
|
||||
console.log("Creating node for tests...");
|
||||
const createNodeResponse = await axios.post(
|
||||
`${API_URL}/admin/v1/create-node`,
|
||||
{
|
||||
defaultBootstrap: false,
|
||||
networkConfig: {
|
||||
clusterId: 42,
|
||||
shards: [0]
|
||||
},
|
||||
pubsubTopics: ["/waku/2/rs/42/0"] // Explicitly configure the pubsub topic
|
||||
},
|
||||
{ timeout: 10000 }
|
||||
);
|
||||
|
||||
if (createNodeResponse.status === 200) {
|
||||
console.log("Node creation response:", createNodeResponse.data);
|
||||
|
||||
// Start the node
|
||||
const startNodeResponse = await axios.post(
|
||||
`${API_URL}/admin/v1/start-node`,
|
||||
{},
|
||||
{ timeout: 5000 }
|
||||
);
|
||||
|
||||
if (startNodeResponse.status === 200) {
|
||||
console.log("Node started successfully");
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(
|
||||
"Failed to create/start node through API, some tests may fail:",
|
||||
error
|
||||
);
|
||||
}
|
||||
} else {
|
||||
console.warn(
|
||||
"Skipping node creation as server doesn't appear to be running"
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
test.afterAll(async () => {
|
||||
// Stop the server
|
||||
console.log("Stopping server...");
|
||||
if (serverProcess && serverProcess.pid) {
|
||||
if (process.platform === "win32") {
|
||||
spawn("taskkill", ["/pid", serverProcess.pid.toString(), "/f", "/t"]);
|
||||
} else {
|
||||
// Ensure the process and all its children are terminated
|
||||
// Wait for server to be ready
|
||||
let serverReady = false;
|
||||
for (let i = 0; i < 30; i++) {
|
||||
try {
|
||||
process.kill(-serverProcess.pid, "SIGINT");
|
||||
} catch (e) {
|
||||
console.log("Server process already terminated");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Verify no processes running on port 3000
|
||||
await killProcessOnPort();
|
||||
|
||||
// Give time for all processes to terminate
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
});
|
||||
|
||||
test.describe("Waku Server API", () => {
|
||||
// Direct test of filter endpoint - this runs first
|
||||
test("can directly access filter/v1/messages endpoint", async () => {
|
||||
// Try with different content topic formats
|
||||
const testTopics = [
|
||||
"test-topic",
|
||||
"/test/topic",
|
||||
"%2Ftest%2Ftopic", // Pre-encoded
|
||||
"%2Ftest%2Ftopic" // Pre-encoded
|
||||
];
|
||||
|
||||
for (const topic of testTopics) {
|
||||
console.log(`Testing direct access with topic: ${topic}`);
|
||||
try {
|
||||
const response = await axios.get(
|
||||
`${API_URL}/filter/v1/messages/${topic}`,
|
||||
{
|
||||
timeout: 5000,
|
||||
validateStatus: () => true
|
||||
}
|
||||
);
|
||||
|
||||
console.log(` Status: ${response.status}`);
|
||||
console.log(` Content-Type: ${response.headers["content-type"]}`);
|
||||
console.log(` Data: ${JSON.stringify(response.data)}`);
|
||||
|
||||
// If this succeeds, we'll use this topic format for our tests
|
||||
if (response.status === 200) {
|
||||
console.log(` Found working topic format: ${topic}`);
|
||||
const res = await axios.get(`${baseUrl}/`, { timeout: 2000 });
|
||||
if (res.status === 200) {
|
||||
console.log(`Server is ready after ${i + 1} attempts`);
|
||||
serverReady = true;
|
||||
break;
|
||||
}
|
||||
} catch (error: any) {
|
||||
console.error(` Error with topic ${topic}:`, error.message);
|
||||
if (error.response) {
|
||||
console.error(` Response status: ${error.response.status}`);
|
||||
if (i % 5 === 0) {
|
||||
console.log(`Attempt ${i + 1}/30 failed:`, error.code || error.message);
|
||||
}
|
||||
}
|
||||
await new Promise((r) => setTimeout(r, 1000));
|
||||
}
|
||||
|
||||
expect(serverReady).toBe(true);
|
||||
});
|
||||
|
||||
test.afterAll(async () => {
|
||||
if (serverProcess) {
|
||||
console.log("Stopping server...");
|
||||
serverProcess.kill("SIGTERM");
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
}
|
||||
});
|
||||
|
||||
// This test checks if the server is running and can serve the basic endpoints
|
||||
test("can get server status and verify endpoints", async () => {
|
||||
// Get initial server status with retry mechanism
|
||||
let initialResponse;
|
||||
for (let attempt = 0; attempt < 5; attempt++) {
|
||||
try {
|
||||
initialResponse = await axios.get(`${API_URL}/`, {
|
||||
timeout: 5000,
|
||||
validateStatus: () => true // Accept any status code
|
||||
});
|
||||
if (initialResponse.status === 200) {
|
||||
break;
|
||||
}
|
||||
} catch (e) {
|
||||
console.log(
|
||||
`Server not responding on attempt ${attempt + 1}/5, retrying...`
|
||||
);
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
}
|
||||
}
|
||||
test("server health endpoint", async () => {
|
||||
const res = await axios.get(`${baseUrl}/`);
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.data.status).toBe("Waku simulation server is running");
|
||||
});
|
||||
|
||||
// If we still couldn't connect, skip this test
|
||||
if (!initialResponse || initialResponse.status !== 200) {
|
||||
console.warn("Server is not responding, skipping endpoint checks");
|
||||
test.skip();
|
||||
return;
|
||||
}
|
||||
test("static files are served", async () => {
|
||||
// Check if the main HTML file is accessible
|
||||
const htmlRes = await axios.get(`${baseUrl}/app/index.html`);
|
||||
expect(htmlRes.status).toBe(200);
|
||||
expect(htmlRes.data).toContain("Waku Test Environment");
|
||||
|
||||
expect(initialResponse.status).toBe(200);
|
||||
expect(initialResponse.data.status).toBe(
|
||||
"Waku simulation server is running"
|
||||
);
|
||||
|
||||
// Check if key endpoints are available
|
||||
console.log("Checking if server endpoints are properly registered...");
|
||||
// Check if the JavaScript file is accessible
|
||||
const jsRes = await axios.get(`${baseUrl}/app/index.js`);
|
||||
expect(jsRes.status).toBe(200);
|
||||
expect(jsRes.data).toContain("WakuHeadless");
|
||||
});
|
||||
|
||||
test("create and start Waku node", async () => {
|
||||
try {
|
||||
// Try to access the various endpoints with simple HEAD requests
|
||||
const endpoints = [
|
||||
"/info",
|
||||
"/debug/v1/info",
|
||||
"/admin/v1/create-node",
|
||||
"/admin/v1/start-node",
|
||||
"/admin/v1/stop-node",
|
||||
"/filter/v1/messages/test-topic",
|
||||
"/filter/v2/messages/test-topic"
|
||||
];
|
||||
|
||||
for (const endpoint of endpoints) {
|
||||
try {
|
||||
const response = await axios.head(`${API_URL}${endpoint}`, {
|
||||
validateStatus: () => true, // Accept any status code
|
||||
timeout: 3000 // Short timeout to avoid hanging
|
||||
});
|
||||
|
||||
// Some endpoints may return 404 or 405 if they only support specific methods,
|
||||
// but at least we should get a response if the route is registered
|
||||
console.log(`Endpoint ${endpoint}: Status ${response.status}`);
|
||||
|
||||
// If we get a 404, the route is not registered
|
||||
expect(response.status).not.toBe(404);
|
||||
} catch (error) {
|
||||
console.warn(`Error checking endpoint ${endpoint}:`, error.message);
|
||||
// Continue checking other endpoints even if one fails
|
||||
}
|
||||
}
|
||||
} catch (error: any) {
|
||||
console.error("Error checking endpoints:", error.message);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
|
||||
// Test node lifecycle operations using the dedicated endpoints
|
||||
test("can create, start, and stop a node", async () => {
|
||||
// 1. Create a new node
|
||||
const createResponse = await axios.post(`${API_URL}/admin/v1/create-node`, {
|
||||
defaultBootstrap: true,
|
||||
networkConfig: {
|
||||
clusterId: 42,
|
||||
shards: [0]
|
||||
},
|
||||
pubsubTopics: ["/waku/2/rs/42/0"] // Explicitly configure the pubsub topic
|
||||
});
|
||||
expect(createResponse.status).toBe(200);
|
||||
expect(createResponse.data.success).toBe(true);
|
||||
|
||||
// 2. Start the node
|
||||
const startResponse = await axios.post(`${API_URL}/admin/v1/start-node`);
|
||||
expect(startResponse.status).toBe(200);
|
||||
expect(startResponse.data.success).toBe(true);
|
||||
|
||||
// 3. Get info to verify it's running
|
||||
const infoResponse = await axios.get(`${API_URL}/info`);
|
||||
expect(infoResponse.status).toBe(200);
|
||||
expect(infoResponse.data.peerId).toBeDefined();
|
||||
console.log("Node peer ID:", infoResponse.data.peerId);
|
||||
|
||||
// 4. Stop the node
|
||||
const stopResponse = await axios.post(`${API_URL}/admin/v1/stop-node`);
|
||||
expect(stopResponse.status).toBe(200);
|
||||
expect(stopResponse.data.success).toBe(true);
|
||||
|
||||
// 5. Start it again
|
||||
const restartResponse = await axios.post(`${API_URL}/admin/v1/start-node`);
|
||||
expect(restartResponse.status).toBe(200);
|
||||
expect(restartResponse.data.success).toBe(true);
|
||||
|
||||
// 6. Verify it's running again
|
||||
const finalInfoResponse = await axios.get(`${API_URL}/info`);
|
||||
expect(finalInfoResponse.status).toBe(200);
|
||||
expect(finalInfoResponse.data.peerId).toBeDefined();
|
||||
});
|
||||
|
||||
// This test requires a running node, which we now can properly initialize with our new endpoints
|
||||
test("can connect to peers and get node info", async () => {
|
||||
// Create and start a fresh node
|
||||
await axios.post(`${API_URL}/admin/v1/create-node`, {
|
||||
defaultBootstrap: false,
|
||||
networkConfig: {
|
||||
clusterId: 42,
|
||||
shards: [0]
|
||||
},
|
||||
pubsubTopics: ["/waku/2/rs/42/0"] // Explicitly configure the pubsub topic
|
||||
});
|
||||
await axios.post(`${API_URL}/admin/v1/start-node`);
|
||||
|
||||
// FilterConnect to peers
|
||||
const dialResponse = await axios.post(`${API_URL}/admin/v1/peers`, {
|
||||
peerMultiaddrs: PEERS
|
||||
});
|
||||
|
||||
expect(dialResponse.status).toBe(200);
|
||||
console.log("Peer connection response:", dialResponse.data);
|
||||
|
||||
// Get debug info now that we have a properly initialized node
|
||||
const debugResponse = await axios.get(`${API_URL}/debug/v1/info`);
|
||||
expect(debugResponse.status).toBe(200);
|
||||
expect(debugResponse.data).toBeDefined();
|
||||
|
||||
// Log protocols available
|
||||
if (debugResponse.data.protocols) {
|
||||
const wakuProtocols = debugResponse.data.protocols.filter((p: string) =>
|
||||
p.includes("/waku/")
|
||||
);
|
||||
console.log("Waku protocols:", wakuProtocols);
|
||||
}
|
||||
});
|
||||
|
||||
test("can push messages", async () => {
|
||||
// Create and start a fresh node
|
||||
await axios.post(`${API_URL}/admin/v1/create-node`, {
|
||||
defaultBootstrap: true,
|
||||
networkConfig: {
|
||||
clusterId: 42,
|
||||
shards: [0]
|
||||
},
|
||||
pubsubTopics: ["/waku/2/rs/42/0"] // Explicitly configure the pubsub topic
|
||||
});
|
||||
await axios.post(`${API_URL}/admin/v1/start-node`);
|
||||
|
||||
// FilterConnect to peers
|
||||
await axios.post(`${API_URL}/admin/v1/peers`, {
|
||||
peerMultiaddrs: PEERS
|
||||
});
|
||||
|
||||
// Test the REST API format push endpoint
|
||||
try {
|
||||
const restPushResponse = await axios.post(
|
||||
`${API_URL}/lightpush/v1/message`,
|
||||
{
|
||||
pubsubTopic: "/waku/2/default-waku/proto",
|
||||
message: {
|
||||
contentTopic: "/test/1/message/proto",
|
||||
payload: Array.from(
|
||||
new TextEncoder().encode("Test message via REST endpoint")
|
||||
)
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
expect(restPushResponse.status).toBe(200);
|
||||
expect(restPushResponse.data.messageId).toBeDefined();
|
||||
console.log("Message ID:", restPushResponse.data.messageId);
|
||||
} catch (error) {
|
||||
console.log("REST push might fail if no peers connected:", error);
|
||||
}
|
||||
});
|
||||
|
||||
test("can retrieve messages from the queue", async () => {
|
||||
// Create and start a fresh node
|
||||
await axios.post(`${API_URL}/admin/v1/create-node`, {
|
||||
defaultBootstrap: true,
|
||||
networkConfig: {
|
||||
clusterId: 42,
|
||||
shards: [0]
|
||||
},
|
||||
pubsubTopics: ["/waku/2/rs/42/0"] // Explicitly configure the pubsub topic
|
||||
});
|
||||
await axios.post(`${API_URL}/admin/v1/start-node`);
|
||||
|
||||
// FilterConnect to peers
|
||||
await axios.post(`${API_URL}/admin/v1/peers`, {
|
||||
peerMultiaddrs: PEERS
|
||||
});
|
||||
|
||||
// Use a simple content topic to avoid encoding issues
|
||||
const contentTopic = "test-queue";
|
||||
|
||||
try {
|
||||
// Check endpoint existence by checking available routes
|
||||
console.log("Checking server routes and status...");
|
||||
const rootResponse = await axios.get(`${API_URL}/`);
|
||||
console.log(
|
||||
"Server root response:",
|
||||
rootResponse.status,
|
||||
rootResponse.data
|
||||
);
|
||||
|
||||
// First ensure the queue is empty
|
||||
console.log(`Attempting to get messages from ${contentTopic}...`);
|
||||
const emptyQueueResponse = await axios.get(
|
||||
`${API_URL}/filter/v1/messages/${contentTopic}`
|
||||
);
|
||||
expect(emptyQueueResponse.status).toBe(200);
|
||||
expect(emptyQueueResponse.data.messages).toEqual([]);
|
||||
} catch (error: any) {
|
||||
console.error("Error accessing filter endpoint:", error.message);
|
||||
if (error.response) {
|
||||
console.error("Response status:", error.response.status);
|
||||
console.error("Response data:", error.response.data);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Simulate adding messages to the queue
|
||||
const messages = [
|
||||
{
|
||||
payload: Array.from(new TextEncoder().encode("Message 1")),
|
||||
timestamp: Date.now() - 2000,
|
||||
contentTopic
|
||||
},
|
||||
{
|
||||
payload: Array.from(new TextEncoder().encode("Message 2")),
|
||||
timestamp: Date.now() - 1000,
|
||||
contentTopic
|
||||
},
|
||||
{
|
||||
payload: Array.from(new TextEncoder().encode("Message 3")),
|
||||
timestamp: Date.now(),
|
||||
contentTopic
|
||||
}
|
||||
];
|
||||
|
||||
const testMessages = await axios.post(`${API_URL}/execute`, {
|
||||
functionName: "simulateMessages",
|
||||
params: [contentTopic, messages]
|
||||
});
|
||||
expect(testMessages.status).toBe(200);
|
||||
|
||||
// Now check if we can retrieve messages
|
||||
const messagesResponse = await axios.get(
|
||||
`${API_URL}/filter/v1/messages/${contentTopic}`
|
||||
);
|
||||
expect(messagesResponse.status).toBe(200);
|
||||
expect(messagesResponse.data.messages.length).toBe(3);
|
||||
|
||||
// Verify message format
|
||||
const message = messagesResponse.data.messages[0];
|
||||
expect(message).toHaveProperty("payload");
|
||||
expect(message).toHaveProperty("contentTopic");
|
||||
expect(message).toHaveProperty("timestamp");
|
||||
expect(message).toHaveProperty("version");
|
||||
|
||||
// Test pagination
|
||||
const paginatedResponse = await axios.get(
|
||||
`${API_URL}/filter/v1/messages/${contentTopic}?pageSize=2`
|
||||
);
|
||||
expect(paginatedResponse.status).toBe(200);
|
||||
expect(paginatedResponse.data.messages.length).toBe(2);
|
||||
|
||||
// Test sorting order
|
||||
const ascendingResponse = await axios.get(
|
||||
`${API_URL}/filter/v1/messages/${contentTopic}?ascending=true`
|
||||
);
|
||||
expect(ascendingResponse.status).toBe(200);
|
||||
expect(ascendingResponse.data.messages.length).toBe(3);
|
||||
const timestamps = ascendingResponse.data.messages.map(
|
||||
(msg: any) => msg.timestamp
|
||||
);
|
||||
expect(timestamps[0]).toBeLessThan(timestamps[1]);
|
||||
expect(timestamps[1]).toBeLessThan(timestamps[2]);
|
||||
});
|
||||
|
||||
test("can access filter endpoint for SSE", async () => {
|
||||
// Create and start a fresh node - only if API is accessible
|
||||
try {
|
||||
// Quick check if server is running
|
||||
await axios.get(API_URL, { timeout: 2000 });
|
||||
|
||||
// Create node
|
||||
await axios.post(`${API_URL}/admin/v1/create-node`, {
|
||||
// Create a Waku node
|
||||
const createRes = await axios.post(`${baseUrl}/admin/v1/create-node`, {
|
||||
defaultBootstrap: true,
|
||||
networkConfig: {
|
||||
clusterId: 42,
|
||||
shards: [0]
|
||||
},
|
||||
pubsubTopics: ["/waku/2/rs/42/0"] // Explicitly configure the pubsub topic
|
||||
});
|
||||
|
||||
// Start node
|
||||
await axios.post(`${API_URL}/admin/v1/start-node`);
|
||||
|
||||
// FilterConnect to peers
|
||||
await axios.post(`${API_URL}/admin/v1/peers`, {
|
||||
peerMultiaddrs: PEERS
|
||||
});
|
||||
} catch (error) {
|
||||
console.warn("Server appears to be unreachable, skipping test");
|
||||
test.skip();
|
||||
return;
|
||||
}
|
||||
|
||||
const contentTopic = "test-sse";
|
||||
|
||||
// Verify filter endpoint is accessible
|
||||
// Instead of implementing a full SSE client, we'll make sure the endpoint
|
||||
// returns the correct headers and status code which indicates SSE readiness
|
||||
try {
|
||||
const sseResponse = await axios
|
||||
.get(
|
||||
`${API_URL}/filter/v2/messages/${contentTopic}?clusterId=42&shard=0`,
|
||||
{
|
||||
// Set a timeout to avoid hanging the test
|
||||
timeout: 2000,
|
||||
// Expecting the request to timeout as SSE keeps connection open
|
||||
validateStatus: () => true,
|
||||
// We can't use responseType: 'stream' directly with axios,
|
||||
// but we can check the response headers
|
||||
headers: {
|
||||
Accept: "text/event-stream"
|
||||
}
|
||||
}
|
||||
)
|
||||
.catch((e) => {
|
||||
// We expect a timeout error since SSE keeps connection open
|
||||
if (e.code === "ECONNABORTED") {
|
||||
return e.response;
|
||||
}
|
||||
throw e;
|
||||
});
|
||||
|
||||
// If response exists and has expected SSE headers, the test passes
|
||||
if (sseResponse) {
|
||||
expect(sseResponse.headers["content-type"]).toBe("text/event-stream");
|
||||
expect(sseResponse.headers["cache-control"]).toBe("no-cache");
|
||||
expect(sseResponse.headers["connection"]).toBe("keep-alive");
|
||||
} else {
|
||||
// If no response, we manually make an HTTP request to check the headers
|
||||
const headers = await new Promise<Record<string, string>>((resolve) => {
|
||||
const requestUrl = new URL(
|
||||
`${API_URL}/filter/v2/messages/${contentTopic}?clusterId=42&shard=0`
|
||||
);
|
||||
const req = http.get(requestUrl, (res) => {
|
||||
// Only interested in headers
|
||||
req.destroy();
|
||||
if (res.headers) {
|
||||
resolve(res.headers as Record<string, string>);
|
||||
} else {
|
||||
resolve({});
|
||||
}
|
||||
});
|
||||
req.on("error", () => resolve({}));
|
||||
});
|
||||
|
||||
if (Object.keys(headers).length === 0) {
|
||||
console.warn(
|
||||
"No headers received, SSE endpoint may not be accessible"
|
||||
);
|
||||
test.skip();
|
||||
return;
|
||||
clusterId: 1
|
||||
}
|
||||
|
||||
expect(headers["content-type"]).toBe("text/event-stream");
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error during SSE endpoint test:", error);
|
||||
test.fail();
|
||||
return;
|
||||
}
|
||||
|
||||
console.log("SSE endpoint is accessible with correct headers");
|
||||
});
|
||||
|
||||
// Add a specific test just for the filter/v1/messages endpoint
|
||||
test("can access filter/v1/messages endpoint directly", async () => {
|
||||
// Check if server is available first
|
||||
try {
|
||||
await axios.get(API_URL, { timeout: 2000 });
|
||||
} catch (error) {
|
||||
console.warn("Server appears to be unreachable, skipping test");
|
||||
test.skip();
|
||||
return;
|
||||
}
|
||||
|
||||
// Create a random content topic just for this test
|
||||
const contentTopic = `direct-filter-${Date.now()}`;
|
||||
|
||||
try {
|
||||
// Try different approaches to access the endpoint
|
||||
console.log(
|
||||
`Testing direct access to filter/v1/messages/${contentTopic}`
|
||||
);
|
||||
|
||||
// Method 1: GET request with encoded content topic
|
||||
const getResponse = await axios({
|
||||
method: "get",
|
||||
url: `${API_URL}/filter/v1/messages/${contentTopic}`,
|
||||
validateStatus: function () {
|
||||
// Allow any status code to check what's coming back
|
||||
return true;
|
||||
},
|
||||
timeout: 5000
|
||||
});
|
||||
expect(createRes.status).toBe(200);
|
||||
expect(createRes.data.success).toBe(true);
|
||||
|
||||
console.log("Response status:", getResponse.status);
|
||||
console.log("Response headers:", getResponse.headers);
|
||||
// Start the node
|
||||
const startRes = await axios.post(`${baseUrl}/admin/v1/start-node`);
|
||||
expect(startRes.status).toBe(200);
|
||||
expect(startRes.data.success).toBe(true);
|
||||
|
||||
if (getResponse.status === 404) {
|
||||
throw new Error(
|
||||
`Endpoint not found (404): /filter/v1/messages/${contentTopic}`
|
||||
);
|
||||
}
|
||||
|
||||
// If we got here, the endpoint exists even if it returns empty results
|
||||
expect(getResponse.status).toBe(200);
|
||||
expect(getResponse.data).toHaveProperty("messages");
|
||||
expect(Array.isArray(getResponse.data.messages)).toBe(true);
|
||||
// Now the peer info endpoint should work
|
||||
const infoRes = await axios.get(`${baseUrl}/waku/v1/peer-info`);
|
||||
expect(infoRes.status).toBe(200);
|
||||
expect(infoRes.data.peerId).toBeDefined();
|
||||
expect(infoRes.data.multiaddrs).toBeDefined();
|
||||
} catch (error: any) {
|
||||
console.error("Error during filter/v1 endpoint test:", error.message);
|
||||
|
||||
if (error.response) {
|
||||
console.error("Response status:", error.response.status);
|
||||
console.error("Response headers:", error.response.headers);
|
||||
console.error("Response data:", error.response.data);
|
||||
} else if (error.request) {
|
||||
console.error("No response received:", error.request);
|
||||
// If no response, we'll skip the test rather than fail it
|
||||
test.skip();
|
||||
return;
|
||||
}
|
||||
|
||||
throw error;
|
||||
// If browser initialization failed, this test will fail - that's expected
|
||||
console.log("Waku node test failed (expected if browser not initialized):", error.response?.data?.error || error.message);
|
||||
// Validation error due to missing required networkConfig field results in 400
|
||||
expect(error.response?.status).toBe(400);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@ -1,35 +0,0 @@
|
||||
export const NETWORK_CONFIG = {
|
||||
"waku.sandbox": {
|
||||
peers: [
|
||||
"/dns4/node-01.do-ams3.waku.sandbox.status.im/tcp/30303/p2p/16Uiu2HAmNaeL4p3WEYzC9mgXBmBWSgWjPHRvatZTXnp8Jgv3iKsb",
|
||||
"/dns4/node-01.gc-us-central1-a.waku.sandbox.status.im/tcp/30303/p2p/16Uiu2HAmRv1iQ3NoMMcjbtRmKxPuYBbF9nLYz2SDv9MTN8WhGuUU",
|
||||
"/dns4/node-01.ac-cn-hongkong-c.waku.sandbox.status.im/tcp/30303/p2p/16Uiu2HAmQYiojgZ8APsh9wqbWNyCstVhnp9gbeNrxSEQnLJchC92"
|
||||
]
|
||||
},
|
||||
|
||||
"waku.test": {
|
||||
peers: [
|
||||
"/dns4/node-01.do-ams3.waku.test.status.im/tcp/8000/wss/p2p/16Uiu2HAkykgaECHswi3YKJ5dMLbq2kPVCo89fcyTd38UcQD6ej5W",
|
||||
"/dns4/node-01.gc-us-central1-a.waku.test.status.im/tcp/8000/wss/p2p/16Uiu2HAmDCp8XJ9z1ev18zuv8NHekAsjNyezAvmMfFEJkiharitG",
|
||||
"/dns4/node-01.ac-cn-hongkong-c.waku.test.status.im/tcp/8000/wss/p2p/16Uiu2HAkzHaTP5JsUwfR9NR8Rj9HC24puS6ocaU8wze4QrXr9iXp"
|
||||
]
|
||||
},
|
||||
|
||||
networkConfig: {
|
||||
clusterId: 1,
|
||||
shards: [0]
|
||||
},
|
||||
|
||||
// Default node configuration
|
||||
defaultNodeConfig: {
|
||||
defaultBootstrap: false
|
||||
},
|
||||
|
||||
// Test message configuration
|
||||
testMessage: {
|
||||
contentTopic: "/test/1/message/proto",
|
||||
payload: "Hello, Waku!"
|
||||
}
|
||||
};
|
||||
|
||||
export const ACTIVE_PEERS = NETWORK_CONFIG["waku.test"].peers;
|
||||
@ -15,5 +15,5 @@
|
||||
"typeRoots": ["./node_modules/@types", "./types"]
|
||||
},
|
||||
"include": ["src/server.ts", "types/**/*.d.ts"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
"exclude": ["node_modules", "dist", "web"]
|
||||
}
|
||||
|
||||
23
packages/browser-tests/types/global.d.ts
vendored
23
packages/browser-tests/types/global.d.ts
vendored
@ -1,27 +1,8 @@
|
||||
import { LightNode } from "@waku/sdk";
|
||||
import { IWakuNode } from "../src/api/common.js";
|
||||
import {
|
||||
createWakuNode,
|
||||
dialPeers,
|
||||
getDebugInfo,
|
||||
getPeerInfo,
|
||||
pushMessage,
|
||||
subscribe
|
||||
} from "../src/api/shared.js";
|
||||
import type { WakuHeadless } from "../web/index.js";
|
||||
|
||||
// Define types for the Waku node and window
|
||||
declare global {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
interface Window {
|
||||
waku: IWakuNode & LightNode;
|
||||
wakuAPI: {
|
||||
getPeerInfo: typeof getPeerInfo;
|
||||
getDebugInfo: typeof getDebugInfo;
|
||||
pushMessage: typeof pushMessage;
|
||||
dialPeers: typeof dialPeers;
|
||||
createWakuNode: typeof createWakuNode;
|
||||
subscribe: typeof subscribe;
|
||||
[key: string]: any;
|
||||
};
|
||||
wakuApi: WakuHeadless;
|
||||
}
|
||||
}
|
||||
|
||||
14
packages/browser-tests/web/index.html
Normal file
14
packages/browser-tests/web/index.html
Normal file
@ -0,0 +1,14 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<title>Waku Test Environment</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Waku Test Environment</h1>
|
||||
<script type="module" src="./index.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
|
||||
539
packages/browser-tests/web/index.ts
Normal file
539
packages/browser-tests/web/index.ts
Normal file
@ -0,0 +1,539 @@
|
||||
// @ts-nocheck
|
||||
import {
|
||||
createLightNode,
|
||||
LightNode,
|
||||
Protocols,
|
||||
NetworkConfig,
|
||||
SDKProtocolResult,
|
||||
CreateNodeOptions,
|
||||
} from "@waku/sdk";
|
||||
import type { PeerId } from "@libp2p/interface";
|
||||
|
||||
/**
|
||||
* Enhanced SDKProtocolResult with serializable peer IDs for browser/Node.js communication
|
||||
*/
|
||||
export interface SerializableSDKProtocolResult {
|
||||
successes: string[]; // Converted PeerId objects to strings
|
||||
failures: Array<{
|
||||
error: string;
|
||||
peerId?: string; // Converted PeerId to string if available
|
||||
}>;
|
||||
[key: string]: any; // Allow for other SDK result properties
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert SDKProtocolResult to a serializable format for browser/Node.js communication
|
||||
*/
|
||||
function makeSerializable(result: SDKProtocolResult): SerializableSDKProtocolResult {
|
||||
return {
|
||||
...result,
|
||||
successes: result.successes.map((peerId: PeerId) => peerId.toString()),
|
||||
failures: result.failures.map((failure: any) => ({
|
||||
error: failure.error || failure.toString(),
|
||||
peerId: failure.peerId ? failure.peerId.toString() : undefined
|
||||
}))
|
||||
};
|
||||
}
|
||||
|
||||
export class WakuHeadless {
|
||||
waku: LightNode | null;
|
||||
networkConfig: NetworkConfig;
|
||||
constructor(networkConfig?: Partial<NetworkConfig>) {
|
||||
this.waku = null as unknown as LightNode;
|
||||
// Use provided config or defaults
|
||||
this.networkConfig = this.buildNetworkConfig(networkConfig);
|
||||
}
|
||||
|
||||
/**
|
||||
* Build network configuration from provided config or defaults
|
||||
*/
|
||||
private buildNetworkConfig(providedConfig?: Partial<NetworkConfig>): NetworkConfig {
|
||||
// Default configuration
|
||||
let config: NetworkConfig = {
|
||||
clusterId: 1,
|
||||
numShardsInCluster: 8 // Enable auto-sharding by default
|
||||
};
|
||||
|
||||
// Apply provided configuration
|
||||
if (providedConfig) {
|
||||
config.clusterId = providedConfig.clusterId ?? config.clusterId;
|
||||
|
||||
// If specific shards are provided, use static sharding
|
||||
if (providedConfig.shards && providedConfig.shards.length > 0) {
|
||||
config.shards = providedConfig.shards;
|
||||
delete config.numShardsInCluster; // Remove auto-sharding when using static shards
|
||||
console.log(`Using static sharding with shard(s) ${providedConfig.shards.join(', ')} on cluster ${config.clusterId}`);
|
||||
} else if (providedConfig.numShardsInCluster) {
|
||||
config.numShardsInCluster = providedConfig.numShardsInCluster;
|
||||
console.log(`Using auto-sharding with ${config.numShardsInCluster} shards on cluster ${config.clusterId}`);
|
||||
} else {
|
||||
console.log(`Using auto-sharding with ${config.numShardsInCluster} shards on cluster ${config.clusterId}`);
|
||||
}
|
||||
} else {
|
||||
console.log(`Using default auto-sharding with ${config.numShardsInCluster} shards on cluster ${config.clusterId}`);
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create and start a Waku light node with default bootstrap
|
||||
* Optionally override the network config
|
||||
* @param networkConfig
|
||||
*/
|
||||
async start() {
|
||||
this.waku = await createLightNode({
|
||||
defaultBootstrap: true,
|
||||
networkConfig: this.networkConfig,
|
||||
});
|
||||
await this.waku?.start();
|
||||
}
|
||||
|
||||
async pushMessage(
|
||||
contentTopic: string,
|
||||
payload: string,
|
||||
): Promise<SerializableSDKProtocolResult> {
|
||||
if (!this.waku) {
|
||||
throw new Error("Waku node not started");
|
||||
}
|
||||
|
||||
// Ensure payload is properly formatted
|
||||
let processedPayload: Uint8Array;
|
||||
// If it's a string, try to decode as base64 first
|
||||
try {
|
||||
// Use TextDecoder to decode base64 (browser-compatible)
|
||||
const binaryString = atob(payload);
|
||||
const bytes = new Uint8Array(binaryString.length);
|
||||
for (let i = 0; i < binaryString.length; i++) {
|
||||
bytes[i] = binaryString.charCodeAt(i);
|
||||
}
|
||||
processedPayload = bytes;
|
||||
} catch (e) {
|
||||
// If base64 decoding fails, encode as UTF-8
|
||||
processedPayload = new TextEncoder().encode(payload);
|
||||
}
|
||||
|
||||
try {
|
||||
const lightPush = this.waku.lightPush;
|
||||
if (!lightPush) {
|
||||
throw new Error("Lightpush service not available");
|
||||
}
|
||||
|
||||
console.log(`Preparing to send message with contentTopic: ${contentTopic}`);
|
||||
console.log(`Using network config:`, this.networkConfig);
|
||||
|
||||
// Use the WakuNode's createEncoder method which handles auto-sharding properly
|
||||
const encoder = this.waku.createEncoder({ contentTopic });
|
||||
|
||||
console.log("Encoder created with pubsubTopic:", encoder.pubsubTopic);
|
||||
// Send the message using lightpush
|
||||
const result = await lightPush.send(encoder, {
|
||||
payload: processedPayload,
|
||||
timestamp: new Date(),
|
||||
});
|
||||
|
||||
// Convert to serializable format for cross-context communication
|
||||
const serializableResult = makeSerializable(result);
|
||||
|
||||
// Log a cleaner representation of the lightpush result
|
||||
if (serializableResult.successes && serializableResult.successes.length > 0) {
|
||||
console.log(`✅ Message sent successfully to ${serializableResult.successes.length} peer(s):`);
|
||||
|
||||
// Get current connected peers for better identification
|
||||
const connectedPeers = this.waku.libp2p.getPeers();
|
||||
|
||||
serializableResult.successes.forEach((peerIdString: string, index: number) => {
|
||||
console.log(` ${index + 1}. ${peerIdString}`);
|
||||
});
|
||||
|
||||
// Show connected peer count for context
|
||||
if (connectedPeers.length > 0) {
|
||||
console.log(`📡 Connected to ${connectedPeers.length} total peer(s)`);
|
||||
}
|
||||
|
||||
if (serializableResult.failures && serializableResult.failures.length > 0) {
|
||||
console.log(`❌ Failed to send to ${serializableResult.failures.length} peer(s)`);
|
||||
}
|
||||
} else {
|
||||
console.log("Message send result:", serializableResult);
|
||||
}
|
||||
return serializableResult;
|
||||
} catch (error) {
|
||||
console.error("Error sending message via lightpush:", error);
|
||||
throw new Error(
|
||||
`Failed to send message: ${error instanceof Error ? error.message : String(error)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async pushMessageV3(
|
||||
contentTopic: string,
|
||||
payload: string,
|
||||
pubsubTopic: string,
|
||||
): Promise<SerializableSDKProtocolResult> {
|
||||
if (!this.waku) {
|
||||
throw new Error("Waku node not started");
|
||||
}
|
||||
|
||||
// Ensure payload is properly formatted
|
||||
let processedPayload: Uint8Array;
|
||||
// If it's a string, try to decode as base64 first
|
||||
try {
|
||||
// Use TextDecoder to decode base64 (browser-compatible)
|
||||
const binaryString = atob(payload);
|
||||
const bytes = new Uint8Array(binaryString.length);
|
||||
for (let i = 0; i < binaryString.length; i++) {
|
||||
bytes[i] = binaryString.charCodeAt(i);
|
||||
}
|
||||
processedPayload = bytes;
|
||||
} catch (e) {
|
||||
// If base64 decoding fails, encode as UTF-8
|
||||
processedPayload = new TextEncoder().encode(payload);
|
||||
}
|
||||
|
||||
try {
|
||||
const lightPush = this.waku.lightPush;
|
||||
if (!lightPush) {
|
||||
throw new Error("Lightpush service not available");
|
||||
}
|
||||
|
||||
console.log(`Preparing to send message with contentTopic: ${contentTopic}, pubsubTopic: ${pubsubTopic}`);
|
||||
console.log(`Using network config:`, this.networkConfig);
|
||||
|
||||
// Create encoder with explicit pubsubTopic for v3 API compatibility
|
||||
const encoder = this.waku.createEncoder({ contentTopic, pubsubTopic });
|
||||
|
||||
console.log("Encoder created with pubsubTopic:", encoder.pubsubTopic);
|
||||
// Send the message using lightpush
|
||||
const result = await lightPush.send(encoder, {
|
||||
payload: processedPayload,
|
||||
timestamp: new Date(),
|
||||
});
|
||||
|
||||
// Convert to serializable format for cross-context communication
|
||||
const serializableResult = makeSerializable(result);
|
||||
|
||||
// Log a cleaner representation of the lightpush result
|
||||
if (serializableResult.successes && serializableResult.successes.length > 0) {
|
||||
console.log(`✅ v3 Message sent successfully to ${serializableResult.successes.length} peer(s):`);
|
||||
|
||||
// Get current connected peers for better identification
|
||||
const connectedPeers = this.waku.libp2p.getPeers();
|
||||
|
||||
serializableResult.successes.forEach((peerIdString: string, index: number) => {
|
||||
console.log(` ${index + 1}. ${peerIdString}`);
|
||||
});
|
||||
|
||||
// Show connected peer count for context
|
||||
if (connectedPeers.length > 0) {
|
||||
console.log(`📡 Connected to ${connectedPeers.length} total peer(s)`);
|
||||
}
|
||||
|
||||
if (serializableResult.failures && serializableResult.failures.length > 0) {
|
||||
console.log(`❌ Failed to send to ${serializableResult.failures.length} peer(s)`);
|
||||
}
|
||||
} else {
|
||||
console.log("v3 Message send result:", serializableResult);
|
||||
}
|
||||
return serializableResult;
|
||||
} catch (error) {
|
||||
console.error("Error sending message via v3 lightpush:", error);
|
||||
throw new Error(
|
||||
`Failed to send v3 message: ${error instanceof Error ? error.message : String(error)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async waitForPeers(
|
||||
timeoutMs: number = 10000,
|
||||
protocols: Protocols[] = [Protocols.LightPush, Protocols.Filter],
|
||||
) {
|
||||
if (!this.waku) {
|
||||
throw new Error("Waku node not started");
|
||||
}
|
||||
|
||||
console.log(`Waiting for peers with protocols ${protocols} (timeout: ${timeoutMs}ms)...`);
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
await this.waku.waitForPeers(protocols, timeoutMs);
|
||||
const elapsed = Date.now() - startTime;
|
||||
console.log(`Found peers after ${elapsed}ms`);
|
||||
|
||||
// Log connected peers
|
||||
const peers = this.waku.libp2p.getPeers();
|
||||
console.log(`Connected to ${peers.length} peers:`, peers.map(p => p.toString()));
|
||||
|
||||
return {
|
||||
success: true,
|
||||
peersFound: peers.length,
|
||||
protocolsRequested: protocols,
|
||||
timeElapsed: elapsed
|
||||
};
|
||||
} catch (error) {
|
||||
const elapsed = Date.now() - startTime;
|
||||
console.error(`Failed to find peers after ${elapsed}ms:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async createWakuNode(options: CreateNodeOptions) {
|
||||
try {
|
||||
if (this.waku) {
|
||||
await this.waku.stop();
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn("ignore previous waku stop error");
|
||||
}
|
||||
|
||||
// Store the network config from options if provided
|
||||
if (options.networkConfig) {
|
||||
this.networkConfig = options.networkConfig;
|
||||
}
|
||||
|
||||
console.log("Creating Waku node with options:", JSON.stringify(options, null, 2));
|
||||
console.log("Using network config:", JSON.stringify(this.networkConfig, null, 2));
|
||||
|
||||
// Configure for real network connectivity
|
||||
const createOptions = {
|
||||
...options,
|
||||
// Always use our stored network config
|
||||
networkConfig: this.networkConfig,
|
||||
libp2p: {
|
||||
...options.libp2p,
|
||||
filterMultiaddrs: false,
|
||||
connectionManager: {
|
||||
minConnections: 1,
|
||||
maxConnections: 50,
|
||||
connectionGater: {
|
||||
// Allow all connections
|
||||
denyDialPeer: () => false,
|
||||
denyDialMultiaddr: () => false,
|
||||
denyInboundConnection: () => false,
|
||||
denyOutboundConnection: () => false,
|
||||
denyInboundEncryptedConnection: () => false,
|
||||
denyOutboundEncryptedConnection: () => false,
|
||||
denyInboundUpgradedConnection: () => false,
|
||||
denyOutboundUpgradedConnection: () => false,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
this.waku = await createLightNode(createOptions);
|
||||
console.log("Waku node created successfully");
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
async startNode() {
|
||||
if (!this.waku) {
|
||||
throw new Error("Waku node not created");
|
||||
}
|
||||
console.log("Starting Waku node...");
|
||||
await this.waku.start();
|
||||
console.log("Waku node started, peer ID:", this.waku.libp2p.peerId.toString());
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
async stopNode() {
|
||||
if (!this.waku) {
|
||||
throw new Error("Waku node not created");
|
||||
}
|
||||
await this.waku.stop();
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
async dialPeers(peerAddrs: string[]) {
|
||||
if (!this.waku) {
|
||||
throw new Error("Waku node not started");
|
||||
}
|
||||
|
||||
const errors: string[] = [];
|
||||
await Promise.allSettled(
|
||||
(peerAddrs || []).map((addr) =>
|
||||
this.waku!.dial(addr).catch((err: any) =>
|
||||
errors.push(String(err?.message || err)),
|
||||
),
|
||||
),
|
||||
);
|
||||
return { total: (peerAddrs || []).length, errors };
|
||||
}
|
||||
|
||||
getPeerInfo() {
|
||||
if (!this.waku) {
|
||||
throw new Error("Waku node not started");
|
||||
}
|
||||
|
||||
const addrs = this.waku.libp2p.getMultiaddrs();
|
||||
return {
|
||||
peerId: this.waku.libp2p.peerId.toString(),
|
||||
multiaddrs: addrs.map((a: any) => a.toString()),
|
||||
peers: [],
|
||||
};
|
||||
}
|
||||
|
||||
getDebugInfo() {
|
||||
if (!this.waku) {
|
||||
throw new Error("Waku node not started");
|
||||
}
|
||||
|
||||
return {
|
||||
listenAddresses: this.waku.libp2p
|
||||
.getMultiaddrs()
|
||||
.map((a: any) => a.toString()),
|
||||
peerId: this.waku.libp2p.peerId.toString(),
|
||||
protocols: Array.from(this.waku.libp2p.getProtocols()),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get available protocols from connected peers
|
||||
*/
|
||||
getAvailablePeerProtocols() {
|
||||
if (!this.waku) {
|
||||
throw new Error("Waku node not started");
|
||||
}
|
||||
|
||||
try {
|
||||
const libp2p = this.waku.libp2p;
|
||||
const availableProtocols = new Set<string>();
|
||||
|
||||
// Get protocols from our own node
|
||||
const ownProtocols = Array.from(libp2p.getProtocols());
|
||||
ownProtocols.forEach(p => availableProtocols.add(p));
|
||||
|
||||
// Try to get protocols from connected peers
|
||||
if (libp2p.components && libp2p.components.connectionManager) {
|
||||
const connections = libp2p.components.connectionManager.getConnections();
|
||||
connections.forEach((conn: any) => {
|
||||
// Note: Getting peer protocols might require additional libp2p methods
|
||||
// For now, we'll just log the connection info
|
||||
console.log(`Peer ${conn.remotePeer.toString()} connected via ${conn.remoteAddr.toString()}`);
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
ownProtocols: ownProtocols,
|
||||
availableProtocols: Array.from(availableProtocols),
|
||||
totalConnections: libp2p.components?.connectionManager?.getConnections().length || 0
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
error: `Failed to get peer protocols: ${error instanceof Error ? error.message : String(error)}`,
|
||||
ownProtocols: this.waku.libp2p.getProtocols(),
|
||||
availableProtocols: [],
|
||||
totalConnections: 0
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get detailed peer connection status for debugging
|
||||
*/
|
||||
getPeerConnectionStatus() {
|
||||
if (!this.waku) {
|
||||
throw new Error("Waku node not started");
|
||||
}
|
||||
|
||||
try {
|
||||
const libp2p = this.waku.libp2p;
|
||||
|
||||
// Basic info that should always be available
|
||||
const basicInfo = {
|
||||
peerId: libp2p.peerId.toString(),
|
||||
listenAddresses: libp2p.getMultiaddrs().map((a: any) => a.toString()),
|
||||
protocols: Array.from(libp2p.getProtocols()),
|
||||
networkConfig: this.networkConfig,
|
||||
// Add debug info about libp2p
|
||||
libp2pKeys: Object.keys(libp2p),
|
||||
libp2pType: typeof libp2p,
|
||||
};
|
||||
|
||||
// Try to get connection info if available
|
||||
try {
|
||||
if (libp2p.components && libp2p.components.connectionManager) {
|
||||
const connectionManager = libp2p.components.connectionManager;
|
||||
const connections = connectionManager.getConnections().map((conn: any) => ({
|
||||
remotePeer: conn.remotePeer.toString(),
|
||||
remoteAddr: conn.remoteAddr.toString(),
|
||||
status: conn.status,
|
||||
}));
|
||||
basicInfo.connections = connections;
|
||||
} else {
|
||||
basicInfo.connections = [];
|
||||
basicInfo.connectionError = `No connection manager found in components`;
|
||||
}
|
||||
} catch (connError) {
|
||||
basicInfo.connections = [];
|
||||
basicInfo.connectionError = `Connection manager error: ${connError instanceof Error ? connError.message : String(connError)}`;
|
||||
}
|
||||
|
||||
// Try to get peer store info if available
|
||||
try {
|
||||
if (libp2p.peerStore) {
|
||||
const peerStore = libp2p.peerStore;
|
||||
if (typeof peerStore.getPeers === 'function') {
|
||||
const peers = Array.from(peerStore.getPeers()).map((peerId: any) => peerId.toString());
|
||||
basicInfo.peers = peers;
|
||||
} else {
|
||||
basicInfo.peers = [];
|
||||
basicInfo.peerError = `peerStore.getPeers is not a function`;
|
||||
}
|
||||
} else {
|
||||
basicInfo.peers = [];
|
||||
basicInfo.peerError = `No peerStore found`;
|
||||
}
|
||||
} catch (peerError) {
|
||||
basicInfo.peers = [];
|
||||
basicInfo.peerError = `Peer store error: ${peerError instanceof Error ? peerError.message : String(peerError)}`;
|
||||
}
|
||||
|
||||
// Try to check if started
|
||||
try {
|
||||
if (libp2p.status) {
|
||||
basicInfo.isStarted = libp2p.status;
|
||||
} else {
|
||||
basicInfo.isStarted = 'unknown';
|
||||
basicInfo.startError = `No status property found`;
|
||||
}
|
||||
} catch (startError) {
|
||||
basicInfo.isStarted = 'error';
|
||||
basicInfo.startError = `Start check error: ${startError instanceof Error ? startError.message : String(startError)}`;
|
||||
}
|
||||
|
||||
return basicInfo;
|
||||
} catch (error) {
|
||||
return {
|
||||
error: `Failed to get peer status: ${error instanceof Error ? error.message : String(error)}`,
|
||||
peerId: this.waku.libp2p.peerId.toString(),
|
||||
isStarted: 'unknown',
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Expose a singleton instance on window for Playwright to use
|
||||
(() => {
|
||||
try {
|
||||
console.log("Initializing WakuHeadless...");
|
||||
|
||||
// Check for global network configuration set by server
|
||||
const globalNetworkConfig = (window as any).__WAKU_NETWORK_CONFIG;
|
||||
const instance = new WakuHeadless(globalNetworkConfig);
|
||||
|
||||
// @ts-ignore - will add proper typings in global.d.ts
|
||||
(window as any).wakuApi = instance;
|
||||
console.log(
|
||||
"WakuHeadless initialized successfully:",
|
||||
!!(window as any).wakuApi,
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Error initializing WakuHeadless:", error);
|
||||
// Set a fallback to help with debugging
|
||||
(window as any).wakuApi = {
|
||||
start: () =>
|
||||
Promise.reject(new Error("WakuHeadless failed to initialize")),
|
||||
error: error,
|
||||
};
|
||||
}
|
||||
})();
|
||||
35
packages/browser-tests/webpack.config.js
Normal file
35
packages/browser-tests/webpack.config.js
Normal file
@ -0,0 +1,35 @@
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
import NodePolyfillPlugin from "node-polyfill-webpack-plugin";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
export default {
|
||||
mode: "production",
|
||||
entry: "./web/index.js",
|
||||
output: {
|
||||
path: path.resolve(__dirname, "web"),
|
||||
filename: "bundle.js",
|
||||
clean: true
|
||||
},
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /\.ts$/,
|
||||
use: "babel-loader",
|
||||
exclude: /node_modules/
|
||||
}
|
||||
]
|
||||
},
|
||||
resolve: {
|
||||
extensions: [".ts", ".js"],
|
||||
fallback: {
|
||||
fs: false,
|
||||
net: false,
|
||||
tls: false
|
||||
}
|
||||
},
|
||||
plugins: [new NodePolyfillPlugin()],
|
||||
target: "web"
|
||||
};
|
||||
@ -1,34 +0,0 @@
|
||||
module.exports = {
|
||||
root: true,
|
||||
env: {
|
||||
browser: true,
|
||||
node: true,
|
||||
es2021: true
|
||||
},
|
||||
plugins: ["import"],
|
||||
extends: ["eslint:recommended"],
|
||||
parserOptions: {
|
||||
ecmaVersion: 2022,
|
||||
sourceType: "module"
|
||||
},
|
||||
rules: {
|
||||
// Disable rules that might cause issues with this package
|
||||
"no-undef": "off"
|
||||
},
|
||||
ignorePatterns: [
|
||||
"node_modules",
|
||||
"build",
|
||||
"coverage"
|
||||
],
|
||||
overrides: [
|
||||
{
|
||||
files: ["*.spec.ts", "**/test_utils/*.ts", "*.js", "*.cjs"],
|
||||
rules: {
|
||||
"@typescript-eslint/no-non-null-assertion": "off",
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
"no-console": "off",
|
||||
"import/no-extraneous-dependencies": ["error", { "devDependencies": true }]
|
||||
}
|
||||
}
|
||||
]
|
||||
};
|
||||
@ -1,23 +0,0 @@
|
||||
# Waku Headless Tests
|
||||
|
||||
This package contains a minimal browser application used for testing the Waku SDK in a browser environment. It is used by the browser-tests package to run end-to-end tests on the SDK.
|
||||
|
||||
## Usage
|
||||
|
||||
### Build the app
|
||||
|
||||
```bash
|
||||
npm run build
|
||||
```
|
||||
|
||||
### Start the app
|
||||
|
||||
```bash
|
||||
npm start
|
||||
```
|
||||
|
||||
This will start a server on port 8080 by default.
|
||||
|
||||
## Integration with browser-tests
|
||||
|
||||
This package is designed to be used with the browser-tests package to run end-to-end tests on the SDK. It exposes the Waku API via a global object in the browser.
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 4.2 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 14 KiB |
@ -1,50 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta content="width=device-width, initial-scale=1.0" name="viewport" />
|
||||
<title>Headless</title>
|
||||
<link rel="stylesheet" href="./style.css" />
|
||||
<link rel="apple-touch-icon" href="./favicon.png" />
|
||||
<link rel="manifest" href="./manifest.json" />
|
||||
<link rel="icon" href="./favicon.ico" />
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="state"></div>
|
||||
<div class="content">
|
||||
<div class="header">
|
||||
<h3>Status: <span id="status"></span></h3>
|
||||
|
||||
<details>
|
||||
<summary>Peer's information</summary>
|
||||
|
||||
<h4>Content topic</h4>
|
||||
<p id="contentTopic"></p>
|
||||
|
||||
<h4>Local Peer Id</h4>
|
||||
<p id="localPeerId"></p>
|
||||
|
||||
<h4>Remote Peer Id</h4>
|
||||
<p id="remotePeerId"></p>
|
||||
</details>
|
||||
</div>
|
||||
|
||||
<div id="messages"></div>
|
||||
|
||||
<div class="footer">
|
||||
<div class="inputArea">
|
||||
<input type="text" id="nickText" placeholder="Nickname" />
|
||||
<textarea id="messageText" placeholder="Message"></textarea>
|
||||
</div>
|
||||
|
||||
<div class="controls">
|
||||
<button id="send">Send</button>
|
||||
<button id="exit">Exit chat</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script src="./build/bundle.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
@ -1,14 +0,0 @@
|
||||
/* eslint-disable */
|
||||
import { API } from "../browser-tests/src/api/shared.ts";
|
||||
|
||||
runApp().catch((err) => {
|
||||
console.error(err);
|
||||
});
|
||||
|
||||
async function runApp() {
|
||||
if (typeof window !== "undefined") {
|
||||
// Expose shared API functions for browser communication
|
||||
window.wakuAPI = API;
|
||||
window.subscriptions = [];
|
||||
}
|
||||
}
|
||||
@ -1,19 +0,0 @@
|
||||
{
|
||||
"name": "Light Chat",
|
||||
"description": "Send messages between several users (or just one) using light client targeted protocols.",
|
||||
"icons": [
|
||||
{
|
||||
"src": "favicon.ico",
|
||||
"sizes": "64x64 32x32 24x24 16x16",
|
||||
"type": "image/x-icon"
|
||||
},
|
||||
{
|
||||
"src": "favicon.png",
|
||||
"type": "image/png",
|
||||
"sizes": "192x192"
|
||||
}
|
||||
],
|
||||
"display": "standalone",
|
||||
"theme_color": "#ffffff",
|
||||
"background_color": "#ffffff"
|
||||
}
|
||||
@ -1,27 +0,0 @@
|
||||
{
|
||||
"name": "@waku/headless-tests",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"homepage": "/headless",
|
||||
"type": "module",
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.24.0",
|
||||
"@babel/preset-env": "^7.24.0",
|
||||
"@babel/preset-typescript": "^7.23.3",
|
||||
"babel-loader": "^9.1.3",
|
||||
"filter-obj": "^2.0.2",
|
||||
"it-first": "^3.0.9",
|
||||
"node-polyfill-webpack-plugin": "^2.0.1",
|
||||
"serve": "^14.1.2",
|
||||
"webpack": "^5.99.5",
|
||||
"webpack-cli": "^5.1.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@waku/sdk": "^0.0.30"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "serve .",
|
||||
"build": "webpack",
|
||||
"format": "eslint --fix webpack.config.js"
|
||||
}
|
||||
}
|
||||
@ -1,153 +0,0 @@
|
||||
* {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
word-wrap: break-word;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
html, body {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
max-width: 100%;
|
||||
max-height: 100%;
|
||||
}
|
||||
|
||||
html {
|
||||
font-size: 16px;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
body {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: 10px;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
details {
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
|
||||
details p {
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
summary {
|
||||
cursor: pointer;
|
||||
max-width: 100%;
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
|
||||
span {
|
||||
font-weight: 300;
|
||||
}
|
||||
|
||||
input, textarea {
|
||||
line-height: 1rem;
|
||||
padding: 5px;
|
||||
}
|
||||
|
||||
textarea {
|
||||
min-height: 3rem;
|
||||
}
|
||||
|
||||
h3 {
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
|
||||
.content {
|
||||
width: 800px;
|
||||
min-width: 300px;
|
||||
max-width: 800px;
|
||||
height: 100%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-content: space-between;
|
||||
}
|
||||
|
||||
#messages {
|
||||
overflow-y: scroll;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
|
||||
.message + .message {
|
||||
margin-top: 15px;
|
||||
}
|
||||
|
||||
.message :first-child {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.message p + p {
|
||||
margin-top: 5px;
|
||||
}
|
||||
|
||||
.message span {
|
||||
font-size: 0.8rem;
|
||||
}
|
||||
|
||||
.inputArea {
|
||||
display: flex;
|
||||
gap: 10px;
|
||||
flex-direction: column;
|
||||
margin-top: 20px;
|
||||
}
|
||||
|
||||
.controls {
|
||||
margin-top: 10px;
|
||||
display: flex;
|
||||
gap: 10px;
|
||||
}
|
||||
|
||||
.controls button {
|
||||
flex-grow: 1;
|
||||
cursor: pointer;
|
||||
padding: 10px;
|
||||
}
|
||||
|
||||
#send {
|
||||
background-color: #32d1a0;
|
||||
border: none;
|
||||
color: white;
|
||||
}
|
||||
#send:hover {
|
||||
background-color: #3abd96;
|
||||
}
|
||||
#send:active {
|
||||
background-color: #3ba183;
|
||||
}
|
||||
|
||||
#exit {
|
||||
color: white;
|
||||
border: none;
|
||||
background-color: #ff3a31;
|
||||
}
|
||||
#exit:hover {
|
||||
background-color: #e4423a;
|
||||
}
|
||||
#exit:active {
|
||||
background-color: #c84740;
|
||||
}
|
||||
|
||||
.success {
|
||||
color: #3ba183;
|
||||
}
|
||||
|
||||
.progress {
|
||||
color: #9ea13b;
|
||||
}
|
||||
|
||||
.terminated {
|
||||
color: black;
|
||||
}
|
||||
|
||||
.error {
|
||||
color: #c84740;
|
||||
}
|
||||
|
||||
.footer {
|
||||
display: flex;
|
||||
width: 100%;
|
||||
flex-direction: column;
|
||||
align-self: flex-end;
|
||||
}
|
||||
@ -1,14 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es2020",
|
||||
"module": "commonjs",
|
||||
"allowJs": true,
|
||||
"checkJs": false,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true
|
||||
},
|
||||
"include": [
|
||||
"**/*.js"
|
||||
]
|
||||
}
|
||||
@ -1,47 +0,0 @@
|
||||
/* eslint-disable */
|
||||
/**
|
||||
* This webpack configuration file uses ES Module syntax.
|
||||
*/
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import NodePolyfillPlugin from 'node-polyfill-webpack-plugin';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
export default {
|
||||
entry: "./index.js",
|
||||
output: {
|
||||
filename: "bundle.js",
|
||||
path: path.resolve(__dirname, "build")
|
||||
},
|
||||
mode: "production",
|
||||
target: "web",
|
||||
plugins: [new NodePolyfillPlugin()],
|
||||
resolve: {
|
||||
extensions: [".js", ".ts", ".tsx", ".jsx"],
|
||||
fallback: {
|
||||
fs: false,
|
||||
net: false,
|
||||
tls: false
|
||||
},
|
||||
alias: {
|
||||
// Create an alias to easily import from src
|
||||
"@src": path.resolve(__dirname, "../src")
|
||||
}
|
||||
},
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /\.(js|ts|tsx)$/,
|
||||
exclude: /node_modules/,
|
||||
use: {
|
||||
loader: "babel-loader",
|
||||
options: {
|
||||
presets: ["@babel/preset-env", "@babel/preset-typescript"]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
Loading…
x
Reference in New Issue
Block a user