mirror of
https://github.com/logos-messaging/js-waku.git
synced 2026-01-04 06:43:12 +00:00
Compare commits
No commits in common. "master" and "enr-v0.0.31" have entirely different histories.
master
...
enr-v0.0.3
15
.cspell.json
15
.cspell.json
@ -4,7 +4,6 @@
|
|||||||
"language": "en",
|
"language": "en",
|
||||||
"words": [
|
"words": [
|
||||||
"abortable",
|
"abortable",
|
||||||
"acks",
|
|
||||||
"Addrs",
|
"Addrs",
|
||||||
"ahadns",
|
"ahadns",
|
||||||
"Alives",
|
"Alives",
|
||||||
@ -24,11 +23,9 @@
|
|||||||
"cipherparams",
|
"cipherparams",
|
||||||
"ciphertext",
|
"ciphertext",
|
||||||
"circleci",
|
"circleci",
|
||||||
"circom",
|
|
||||||
"codecov",
|
"codecov",
|
||||||
"codegen",
|
"codegen",
|
||||||
"commitlint",
|
"commitlint",
|
||||||
"cooldown",
|
|
||||||
"dependabot",
|
"dependabot",
|
||||||
"dialable",
|
"dialable",
|
||||||
"dingpu",
|
"dingpu",
|
||||||
@ -43,7 +40,9 @@
|
|||||||
"Encrypters",
|
"Encrypters",
|
||||||
"enr",
|
"enr",
|
||||||
"enrs",
|
"enrs",
|
||||||
|
"unsubscription",
|
||||||
"enrtree",
|
"enrtree",
|
||||||
|
"unhandle",
|
||||||
"ephem",
|
"ephem",
|
||||||
"esnext",
|
"esnext",
|
||||||
"ethersproject",
|
"ethersproject",
|
||||||
@ -55,7 +54,6 @@
|
|||||||
"fontsource",
|
"fontsource",
|
||||||
"globby",
|
"globby",
|
||||||
"gossipsub",
|
"gossipsub",
|
||||||
"hackathons",
|
|
||||||
"huilong",
|
"huilong",
|
||||||
"iasked",
|
"iasked",
|
||||||
"ihave",
|
"ihave",
|
||||||
@ -63,7 +61,7 @@
|
|||||||
"ineed",
|
"ineed",
|
||||||
"IPAM",
|
"IPAM",
|
||||||
"ipfs",
|
"ipfs",
|
||||||
"isready",
|
"cooldown",
|
||||||
"iwant",
|
"iwant",
|
||||||
"jdev",
|
"jdev",
|
||||||
"jswaku",
|
"jswaku",
|
||||||
@ -104,7 +102,6 @@
|
|||||||
"reactjs",
|
"reactjs",
|
||||||
"recid",
|
"recid",
|
||||||
"rlnrelay",
|
"rlnrelay",
|
||||||
"rlnv",
|
|
||||||
"roadmap",
|
"roadmap",
|
||||||
"sandboxed",
|
"sandboxed",
|
||||||
"scanf",
|
"scanf",
|
||||||
@ -124,18 +121,14 @@
|
|||||||
"typedoc",
|
"typedoc",
|
||||||
"undialable",
|
"undialable",
|
||||||
"unencrypted",
|
"unencrypted",
|
||||||
"unhandle",
|
|
||||||
"unmarshal",
|
"unmarshal",
|
||||||
"unmount",
|
"unmount",
|
||||||
"unmounts",
|
"unmounts",
|
||||||
"unsubscription",
|
|
||||||
"untracked",
|
"untracked",
|
||||||
"upgrader",
|
"upgrader",
|
||||||
"vacp",
|
"vacp",
|
||||||
"varint",
|
"varint",
|
||||||
"viem",
|
|
||||||
"vkey",
|
"vkey",
|
||||||
"wagmi",
|
|
||||||
"waku",
|
"waku",
|
||||||
"wakuconnect",
|
"wakuconnect",
|
||||||
"wakunode",
|
"wakunode",
|
||||||
@ -145,7 +138,6 @@
|
|||||||
"weboko",
|
"weboko",
|
||||||
"websockets",
|
"websockets",
|
||||||
"wifi",
|
"wifi",
|
||||||
"WTNS",
|
|
||||||
"xsalsa20",
|
"xsalsa20",
|
||||||
"zerokit",
|
"zerokit",
|
||||||
"Привет",
|
"Привет",
|
||||||
@ -170,7 +162,6 @@
|
|||||||
"gen",
|
"gen",
|
||||||
"proto",
|
"proto",
|
||||||
"*.spec.ts",
|
"*.spec.ts",
|
||||||
"*.log",
|
|
||||||
"CHANGELOG.md"
|
"CHANGELOG.md"
|
||||||
],
|
],
|
||||||
"patterns": [
|
"patterns": [
|
||||||
|
|||||||
87
.github/workflows/ci.yml
vendored
87
.github/workflows/ci.yml
vendored
@ -15,7 +15,7 @@ on:
|
|||||||
type: string
|
type: string
|
||||||
|
|
||||||
env:
|
env:
|
||||||
NODE_JS: "24"
|
NODE_JS: "22"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check:
|
check:
|
||||||
@ -57,7 +57,7 @@ jobs:
|
|||||||
browser:
|
browser:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
image: mcr.microsoft.com/playwright:v1.56.1-jammy
|
image: mcr.microsoft.com/playwright:v1.53.1-jammy
|
||||||
env:
|
env:
|
||||||
HOME: "/root"
|
HOME: "/root"
|
||||||
steps:
|
steps:
|
||||||
@ -71,18 +71,65 @@ jobs:
|
|||||||
- run: npm run build:esm
|
- run: npm run build:esm
|
||||||
- run: npm run test:browser
|
- run: npm run test:browser
|
||||||
|
|
||||||
|
build_rln_tree:
|
||||||
|
if: false # This condition disables the job
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: waku-org/js-waku
|
||||||
|
- uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: ${{ env.NODE_JS }}
|
||||||
|
- name: Check for existing RLN tree artifact
|
||||||
|
id: check-artifact
|
||||||
|
uses: actions/github-script@v6
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const artifact = await github.rest.actions.listWorkflowRunArtifacts({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
run_id: context.runId
|
||||||
|
});
|
||||||
|
console.log(artifact);
|
||||||
|
const foundArtifact = artifact.data.artifacts.find(art => art.name === 'rln_tree.tar.gz');
|
||||||
|
if (foundArtifact) {
|
||||||
|
core.setOutput('artifact_id', foundArtifact.id);
|
||||||
|
core.setOutput('artifact_found', 'true');
|
||||||
|
} else {
|
||||||
|
core.setOutput('artifact_found', 'false');
|
||||||
|
}
|
||||||
|
- name: Download RLN tree artifact
|
||||||
|
if: steps.check-artifact.outputs.artifact_found == 'true'
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: rln_tree.tar.gz
|
||||||
|
path: /tmp
|
||||||
|
- uses: ./.github/actions/npm
|
||||||
|
- name: Sync rln tree and save artifact
|
||||||
|
run: |
|
||||||
|
mkdir -p /tmp/rln_tree.db
|
||||||
|
npm run build:esm
|
||||||
|
npm run sync-rln-tree
|
||||||
|
tar -czf rln_tree.tar.gz -C /tmp/rln_tree.db .
|
||||||
|
- name: Upload artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: rln_tree.tar.gz
|
||||||
|
path: rln_tree.tar.gz
|
||||||
|
|
||||||
node:
|
node:
|
||||||
uses: ./.github/workflows/test-node.yml
|
uses: ./.github/workflows/test-node.yml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
with:
|
with:
|
||||||
nim_wakunode_image: ${{ inputs.nim_wakunode_image || 'wakuorg/nwaku:v0.36.0' }}
|
nim_wakunode_image: ${{ inputs.nim_wakunode_image || 'wakuorg/nwaku:v0.35.1' }}
|
||||||
test_type: node
|
test_type: node
|
||||||
allure_reports: true
|
allure_reports: true
|
||||||
|
|
||||||
node_optional:
|
node_optional:
|
||||||
uses: ./.github/workflows/test-node.yml
|
uses: ./.github/workflows/test-node.yml
|
||||||
with:
|
with:
|
||||||
nim_wakunode_image: ${{ inputs.nim_wakunode_image || 'wakuorg/nwaku:v0.36.0' }}
|
nim_wakunode_image: ${{ inputs.nim_wakunode_image || 'wakuorg/nwaku:v0.35.1' }}
|
||||||
test_type: node-optional
|
test_type: node-optional
|
||||||
|
|
||||||
node_with_nwaku_master:
|
node_with_nwaku_master:
|
||||||
@ -113,44 +160,12 @@ jobs:
|
|||||||
node-version: ${{ env.NODE_JS }}
|
node-version: ${{ env.NODE_JS }}
|
||||||
registry-url: "https://registry.npmjs.org"
|
registry-url: "https://registry.npmjs.org"
|
||||||
|
|
||||||
- uses: pnpm/action-setup@v4
|
|
||||||
if: ${{ steps.release.outputs.releases_created }}
|
|
||||||
with:
|
|
||||||
version: 9
|
|
||||||
|
|
||||||
- run: npm install
|
- run: npm install
|
||||||
if: ${{ steps.release.outputs.releases_created }}
|
if: ${{ steps.release.outputs.releases_created }}
|
||||||
|
|
||||||
- run: npm run build
|
- run: npm run build
|
||||||
if: ${{ steps.release.outputs.releases_created }}
|
if: ${{ steps.release.outputs.releases_created }}
|
||||||
|
|
||||||
- name: Setup Foundry
|
|
||||||
if: ${{ steps.release.outputs.releases_created }}
|
|
||||||
uses: foundry-rs/foundry-toolchain@v1
|
|
||||||
with:
|
|
||||||
version: nightly
|
|
||||||
|
|
||||||
- name: Generate RLN contract ABIs
|
|
||||||
id: rln-abi
|
|
||||||
if: ${{ steps.release.outputs.releases_created }}
|
|
||||||
run: |
|
|
||||||
npm run setup:contract-abi -w @waku/rln || {
|
|
||||||
echo "::warning::Failed to generate contract ABIs, marking @waku/rln as private to skip publishing"
|
|
||||||
cd packages/rln
|
|
||||||
node -e "const fs = require('fs'); const pkg = JSON.parse(fs.readFileSync('package.json', 'utf8')); pkg.private = true; fs.writeFileSync('package.json', JSON.stringify(pkg, null, 2));"
|
|
||||||
echo "failed=true" >> $GITHUB_OUTPUT
|
|
||||||
}
|
|
||||||
|
|
||||||
- name: Rebuild with new ABIs
|
|
||||||
if: ${{ steps.release.outputs.releases_created && steps.rln-abi.outputs.failed != 'true' }}
|
|
||||||
run: |
|
|
||||||
npm install -w packages/rln
|
|
||||||
npm run build -w @waku/rln || {
|
|
||||||
echo "::warning::Failed to build @waku/rln, marking as private to skip publishing"
|
|
||||||
cd packages/rln
|
|
||||||
node -e "const fs = require('fs'); const pkg = JSON.parse(fs.readFileSync('package.json', 'utf8')); pkg.private = true; fs.writeFileSync('package.json', JSON.stringify(pkg, null, 2));"
|
|
||||||
}
|
|
||||||
|
|
||||||
- run: npm run publish
|
- run: npm run publish
|
||||||
if: ${{ steps.release.outputs.releases_created }}
|
if: ${{ steps.release.outputs.releases_created }}
|
||||||
env:
|
env:
|
||||||
|
|||||||
12
.github/workflows/playwright.yml
vendored
12
.github/workflows/playwright.yml
vendored
@ -8,6 +8,9 @@ on:
|
|||||||
|
|
||||||
env:
|
env:
|
||||||
NODE_JS: "22"
|
NODE_JS: "22"
|
||||||
|
EXAMPLE_TEMPLATE: "web-chat"
|
||||||
|
EXAMPLE_NAME: "example"
|
||||||
|
EXAMPLE_PORT: "8080"
|
||||||
# Firefox in container fails due to $HOME not being owned by user running commands
|
# Firefox in container fails due to $HOME not being owned by user running commands
|
||||||
# more details https://github.com/microsoft/playwright/issues/6500
|
# more details https://github.com/microsoft/playwright/issues/6500
|
||||||
HOME: "/root"
|
HOME: "/root"
|
||||||
@ -17,7 +20,7 @@ jobs:
|
|||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
image: mcr.microsoft.com/playwright:v1.56.1-jammy
|
image: mcr.microsoft.com/playwright:v1.53.1-jammy
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
@ -26,8 +29,11 @@ jobs:
|
|||||||
|
|
||||||
- uses: ./.github/actions/npm
|
- uses: ./.github/actions/npm
|
||||||
|
|
||||||
- name: Build entire monorepo
|
- name: Build browser container
|
||||||
run: npm run build
|
run: npm run build --workspace=@waku/headless-tests
|
||||||
|
|
||||||
|
- name: Build browser test environment
|
||||||
|
run: npm run build --workspace=@waku/browser-tests
|
||||||
|
|
||||||
- name: Run Playwright tests
|
- name: Run Playwright tests
|
||||||
run: npm run test --workspace=@waku/browser-tests
|
run: npm run test --workspace=@waku/browser-tests
|
||||||
|
|||||||
50
.github/workflows/pre-release.yml
vendored
50
.github/workflows/pre-release.yml
vendored
@ -2,11 +2,7 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
env:
|
env:
|
||||||
NODE_JS: "24"
|
NODE_JS: "22"
|
||||||
|
|
||||||
permissions:
|
|
||||||
id-token: write
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pre-release:
|
pre-release:
|
||||||
@ -14,49 +10,19 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: github.event_name == 'workflow_dispatch'
|
if: github.event_name == 'workflow_dispatch'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: waku-org/js-waku
|
repository: waku-org/js-waku
|
||||||
ref: ${{ github.ref }}
|
|
||||||
|
- uses: actions/setup-node@v3
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
with:
|
||||||
node-version: ${{ env.NODE_JS }}
|
node-version: ${{ env.NODE_JS }}
|
||||||
registry-url: "https://registry.npmjs.org"
|
registry-url: "https://registry.npmjs.org"
|
||||||
|
|
||||||
- uses: pnpm/action-setup@v4
|
|
||||||
with:
|
|
||||||
version: 9
|
|
||||||
|
|
||||||
- run: npm install
|
- run: npm install
|
||||||
|
|
||||||
- run: npm run build
|
- run: npm run build
|
||||||
|
|
||||||
- name: Setup Foundry
|
|
||||||
uses: foundry-rs/foundry-toolchain@v1
|
|
||||||
with:
|
|
||||||
version: nightly
|
|
||||||
|
|
||||||
- name: Generate RLN contract ABIs
|
|
||||||
id: rln-abi
|
|
||||||
run: |
|
|
||||||
npm run setup:contract-abi -w @waku/rln || {
|
|
||||||
echo "::warning::Failed to generate contract ABIs, marking @waku/rln as private to skip publishing"
|
|
||||||
cd packages/rln
|
|
||||||
node -e "const fs = require('fs'); const pkg = JSON.parse(fs.readFileSync('package.json', 'utf8')); pkg.private = true; fs.writeFileSync('package.json', JSON.stringify(pkg, null, 2));"
|
|
||||||
echo "failed=true" >> $GITHUB_OUTPUT
|
|
||||||
}
|
|
||||||
|
|
||||||
- name: Rebuild with new ABIs
|
|
||||||
if: steps.rln-abi.outputs.failed != 'true'
|
|
||||||
run: |
|
|
||||||
npm install -w packages/rln
|
|
||||||
npm run build -w @waku/rln || {
|
|
||||||
echo "::warning::Failed to build @waku/rln, marking as private to skip publishing"
|
|
||||||
cd packages/rln
|
|
||||||
node -e "const fs = require('fs'); const pkg = JSON.parse(fs.readFileSync('package.json', 'utf8')); pkg.private = true; fs.writeFileSync('package.json', JSON.stringify(pkg, null, 2));"
|
|
||||||
}
|
|
||||||
|
|
||||||
- run: npm run publish -- --tag next
|
- run: npm run publish -- --tag next
|
||||||
env:
|
env:
|
||||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_JS_WAKU_PUBLISH }}
|
NODE_AUTH_TOKEN: ${{ secrets.NPM_JS_WAKU_PUBLISH }}
|
||||||
|
|||||||
8
.github/workflows/test-node.yml
vendored
8
.github/workflows/test-node.yml
vendored
@ -24,7 +24,7 @@ on:
|
|||||||
default: false
|
default: false
|
||||||
|
|
||||||
env:
|
env:
|
||||||
NODE_JS: "24"
|
NODE_JS: "22"
|
||||||
# Ensure test type conditions remain consistent.
|
# Ensure test type conditions remain consistent.
|
||||||
WAKU_SERVICE_NODE_PARAMS: ${{ (inputs.test_type == 'go-waku-master') && '--min-relay-peers-to-publish=0' || '' }}
|
WAKU_SERVICE_NODE_PARAMS: ${{ (inputs.test_type == 'go-waku-master') && '--min-relay-peers-to-publish=0' || '' }}
|
||||||
DEBUG: ${{ inputs.debug }}
|
DEBUG: ${{ inputs.debug }}
|
||||||
@ -42,7 +42,7 @@ jobs:
|
|||||||
checks: write
|
checks: write
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: waku-org/js-waku
|
repository: waku-org/js-waku
|
||||||
|
|
||||||
- name: Remove unwanted software
|
- name: Remove unwanted software
|
||||||
@ -62,7 +62,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Merge allure reports
|
- name: Merge allure reports
|
||||||
if: always() && env.ALLURE_REPORTS == 'true'
|
if: always() && env.ALLURE_REPORTS == 'true'
|
||||||
run: node ci/mergeAllureResults.cjs
|
run: node ci/mergeAllureResults.cjs
|
||||||
|
|
||||||
- name: Get allure history
|
- name: Get allure history
|
||||||
if: always() && env.ALLURE_REPORTS == 'true'
|
if: always() && env.ALLURE_REPORTS == 'true'
|
||||||
@ -125,4 +125,4 @@ jobs:
|
|||||||
echo "## Run Information" >> $GITHUB_STEP_SUMMARY
|
echo "## Run Information" >> $GITHUB_STEP_SUMMARY
|
||||||
echo "- **NWAKU**: ${{ env.WAKUNODE_IMAGE }}" >> $GITHUB_STEP_SUMMARY
|
echo "- **NWAKU**: ${{ env.WAKUNODE_IMAGE }}" >> $GITHUB_STEP_SUMMARY
|
||||||
echo "## Test Results" >> $GITHUB_STEP_SUMMARY
|
echo "## Test Results" >> $GITHUB_STEP_SUMMARY
|
||||||
echo "Allure report will be available at: https://waku-org.github.io/allure-jswaku/${{ github.run_number }}" >> $GITHUB_STEP_SUMMARY
|
echo "Allure report will be available at: https://waku-org.github.io/allure-jswaku/${{ github.run_number }}" >> $GITHUB_STEP_SUMMARY
|
||||||
23
.github/workflows/test-reliability.yml
vendored
23
.github/workflows/test-reliability.yml
vendored
@ -12,13 +12,10 @@ on:
|
|||||||
- longevity
|
- longevity
|
||||||
- high-throughput
|
- high-throughput
|
||||||
- throughput-sizes
|
- throughput-sizes
|
||||||
- network-latency
|
|
||||||
- low-bandwidth
|
|
||||||
- packet-loss
|
|
||||||
- all
|
- all
|
||||||
|
|
||||||
env:
|
env:
|
||||||
NODE_JS: "24"
|
NODE_JS: "22"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test:
|
test:
|
||||||
@ -29,12 +26,12 @@ jobs:
|
|||||||
checks: write
|
checks: write
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
test_type: [longevity, high-throughput, throughput-sizes, network-latency, low-bandwidth, packet-loss]
|
test_type: [longevity, high-throughput, throughput-sizes]
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
if: ${{ github.event.inputs.test_type == 'all' }}
|
if: ${{ github.event.inputs.test_type == 'all' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: waku-org/js-waku
|
repository: waku-org/js-waku
|
||||||
|
|
||||||
- name: Remove unwanted software
|
- name: Remove unwanted software
|
||||||
@ -55,12 +52,6 @@ jobs:
|
|||||||
npm run test:high-throughput
|
npm run test:high-throughput
|
||||||
elif [ "${{ matrix.test_type }}" = "throughput-sizes" ]; then
|
elif [ "${{ matrix.test_type }}" = "throughput-sizes" ]; then
|
||||||
npm run test:throughput-sizes
|
npm run test:throughput-sizes
|
||||||
elif [ "${{ matrix.test_type }}" = "network-latency" ]; then
|
|
||||||
npm run test:network-latency
|
|
||||||
elif [ "${{ matrix.test_type }}" = "low-bandwidth" ]; then
|
|
||||||
npm run test:low-bandwidth
|
|
||||||
elif [ "${{ matrix.test_type }}" = "packet-loss" ]; then
|
|
||||||
npm run test:packet-loss
|
|
||||||
else
|
else
|
||||||
npm run test:longevity
|
npm run test:longevity
|
||||||
fi
|
fi
|
||||||
@ -74,7 +65,7 @@ jobs:
|
|||||||
if: ${{ github.event.inputs.test_type != 'all' }}
|
if: ${{ github.event.inputs.test_type != 'all' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: waku-org/js-waku
|
repository: waku-org/js-waku
|
||||||
|
|
||||||
- name: Remove unwanted software
|
- name: Remove unwanted software
|
||||||
@ -95,12 +86,6 @@ jobs:
|
|||||||
npm run test:high-throughput
|
npm run test:high-throughput
|
||||||
elif [ "${{ github.event.inputs.test_type }}" = "throughput-sizes" ]; then
|
elif [ "${{ github.event.inputs.test_type }}" = "throughput-sizes" ]; then
|
||||||
npm run test:throughput-sizes
|
npm run test:throughput-sizes
|
||||||
elif [ "${{ github.event.inputs.test_type }}" = "network-latency" ]; then
|
|
||||||
npm run test:network-latency
|
|
||||||
elif [ "${{ github.event.inputs.test_type }}" = "low-bandwidth" ]; then
|
|
||||||
npm run test:low-bandwidth
|
|
||||||
elif [ "${{ github.event.inputs.test_type }}" = "packet-loss" ]; then
|
|
||||||
npm run test:packet-loss
|
|
||||||
else
|
else
|
||||||
npm run test:longevity
|
npm run test:longevity
|
||||||
fi
|
fi
|
||||||
|
|||||||
5
.gitignore
vendored
5
.gitignore
vendored
@ -17,7 +17,4 @@ packages/discovery/mock_local_storage
|
|||||||
.giga
|
.giga
|
||||||
.cursor
|
.cursor
|
||||||
.DS_Store
|
.DS_Store
|
||||||
CLAUDE.md
|
CLAUDE.md
|
||||||
.env
|
|
||||||
postgres-data/
|
|
||||||
packages/rln/waku-rlnv2-contract/
|
|
||||||
@ -1,15 +1,13 @@
|
|||||||
{
|
{
|
||||||
"packages/utils": "0.0.27",
|
"packages/utils": "0.0.25",
|
||||||
"packages/proto": "0.0.15",
|
"packages/proto": "0.0.12",
|
||||||
"packages/interfaces": "0.0.34",
|
"packages/interfaces": "0.0.32",
|
||||||
"packages/enr": "0.0.33",
|
"packages/enr": "0.0.31",
|
||||||
"packages/core": "0.0.40",
|
"packages/core": "0.0.37",
|
||||||
"packages/message-encryption": "0.0.38",
|
"packages/message-encryption": "0.0.35",
|
||||||
"packages/relay": "0.0.23",
|
"packages/relay": "0.0.20",
|
||||||
"packages/sdk": "0.0.36",
|
"packages/sdk": "0.0.33",
|
||||||
"packages/discovery": "0.0.13",
|
"packages/discovery": "0.0.10",
|
||||||
"packages/sds": "0.0.8",
|
"packages/sds": "0.0.5",
|
||||||
"packages/rln": "0.1.10",
|
"packages/rln": "0.1.7"
|
||||||
"packages/react": "0.0.8",
|
|
||||||
"packages/run": "0.0.2"
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -33,9 +33,9 @@ module.exports = [
|
|||||||
import: "{ wakuPeerExchangeDiscovery }",
|
import: "{ wakuPeerExchangeDiscovery }",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Peer Cache Discovery",
|
name: "Local Peer Cache Discovery",
|
||||||
path: "packages/discovery/bundle/index.js",
|
path: "packages/discovery/bundle/index.js",
|
||||||
import: "{ wakuPeerCacheDiscovery }",
|
import: "{ wakuLocalPeerCacheDiscovery }",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Privacy preserving protocols",
|
name: "Privacy preserving protocols",
|
||||||
|
|||||||
45
Dockerfile
Normal file
45
Dockerfile
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
FROM node:20-slim
|
||||||
|
|
||||||
|
# Install Chrome dependencies
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
procps \
|
||||||
|
libglib2.0-0 \
|
||||||
|
libnss3 \
|
||||||
|
libnspr4 \
|
||||||
|
libatk1.0-0 \
|
||||||
|
libatk-bridge2.0-0 \
|
||||||
|
libcups2 \
|
||||||
|
libdrm2 \
|
||||||
|
libxkbcommon0 \
|
||||||
|
libxcomposite1 \
|
||||||
|
libxdamage1 \
|
||||||
|
libxfixes3 \
|
||||||
|
libxrandr2 \
|
||||||
|
libgbm1 \
|
||||||
|
libasound2 \
|
||||||
|
libpango-1.0-0 \
|
||||||
|
libcairo2 \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy package files
|
||||||
|
COPY package*.json ./
|
||||||
|
COPY packages/browser-tests/package.json ./packages/browser-tests/
|
||||||
|
COPY packages/headless-tests/package.json ./packages/headless-tests/
|
||||||
|
|
||||||
|
# Install dependencies and serve
|
||||||
|
RUN npm install && npm install -g serve
|
||||||
|
|
||||||
|
# Copy source files
|
||||||
|
COPY tsconfig.json ./
|
||||||
|
COPY packages/ ./packages/
|
||||||
|
|
||||||
|
# Build packages
|
||||||
|
RUN npm run build -w packages/headless-tests && \
|
||||||
|
npm run build:server -w packages/browser-tests && \
|
||||||
|
npx playwright install chromium
|
||||||
|
|
||||||
|
EXPOSE 3000
|
||||||
|
|
||||||
|
CMD ["npm", "run", "start:server", "-w", "packages/browser-tests"]
|
||||||
@ -23,15 +23,6 @@ npm install
|
|||||||
npm run doc
|
npm run doc
|
||||||
```
|
```
|
||||||
|
|
||||||
# Using Nix shell
|
|
||||||
```shell
|
|
||||||
git clone https://github.com/waku-org/js-waku.git
|
|
||||||
cd js-waku
|
|
||||||
nix develop
|
|
||||||
npm install
|
|
||||||
npm run doc
|
|
||||||
```
|
|
||||||
|
|
||||||
## Bugs, Questions & Features
|
## Bugs, Questions & Features
|
||||||
|
|
||||||
If you encounter any bug or would like to propose new features, feel free to [open an issue](https://github.com/waku-org/js-waku/issues/new/).
|
If you encounter any bug or would like to propose new features, feel free to [open an issue](https://github.com/waku-org/js-waku/issues/new/).
|
||||||
|
|||||||
29
ci/Jenkinsfile
vendored
29
ci/Jenkinsfile
vendored
@ -1,16 +1,5 @@
|
|||||||
#!/usr/bin/env groovy
|
|
||||||
library 'status-jenkins-lib@v1.9.27'
|
|
||||||
|
|
||||||
pipeline {
|
pipeline {
|
||||||
agent {
|
agent { label 'linux' }
|
||||||
docker {
|
|
||||||
label 'linuxcontainer'
|
|
||||||
image 'harbor.status.im/infra/ci-build-containers:linux-base-1.0.0'
|
|
||||||
args '--volume=/nix:/nix ' +
|
|
||||||
'--volume=/etc/nix:/etc/nix ' +
|
|
||||||
'--user jenkins'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
options {
|
options {
|
||||||
disableConcurrentBuilds()
|
disableConcurrentBuilds()
|
||||||
@ -32,25 +21,19 @@ pipeline {
|
|||||||
stages {
|
stages {
|
||||||
stage('Deps') {
|
stage('Deps') {
|
||||||
steps {
|
steps {
|
||||||
script {
|
sh 'npm install'
|
||||||
nix.develop('npm install', pure: true)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
stage('Packages') {
|
stage('Packages') {
|
||||||
steps {
|
steps {
|
||||||
script {
|
sh 'npm run build'
|
||||||
nix.develop('npm run build', pure: true)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
stage('Build') {
|
stage('Build') {
|
||||||
steps {
|
steps {
|
||||||
script {
|
sh 'npm run doc'
|
||||||
nix.develop('npm run doc', pure: true)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -58,9 +41,7 @@ pipeline {
|
|||||||
when { expression { GIT_BRANCH.endsWith('master') } }
|
when { expression { GIT_BRANCH.endsWith('master') } }
|
||||||
steps {
|
steps {
|
||||||
sshagent(credentials: ['status-im-auto-ssh']) {
|
sshagent(credentials: ['status-im-auto-ssh']) {
|
||||||
script {
|
sh 'npm run deploy'
|
||||||
nix.develop('npm run deploy', pure: false)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
26
flake.lock
generated
26
flake.lock
generated
@ -1,26 +0,0 @@
|
|||||||
{
|
|
||||||
"nodes": {
|
|
||||||
"nixpkgs": {
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1761016216,
|
|
||||||
"narHash": "sha256-G/iC4t/9j/52i/nm+0/4ybBmAF4hzR8CNHC75qEhjHo=",
|
|
||||||
"owner": "NixOS",
|
|
||||||
"repo": "nixpkgs",
|
|
||||||
"rev": "481cf557888e05d3128a76f14c76397b7d7cc869",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"id": "nixpkgs",
|
|
||||||
"ref": "nixos-25.05",
|
|
||||||
"type": "indirect"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"root": {
|
|
||||||
"inputs": {
|
|
||||||
"nixpkgs": "nixpkgs"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"root": "root",
|
|
||||||
"version": 7
|
|
||||||
}
|
|
||||||
33
flake.nix
33
flake.nix
@ -1,33 +0,0 @@
|
|||||||
{
|
|
||||||
description = "Nix flake development shell.";
|
|
||||||
|
|
||||||
inputs = {
|
|
||||||
nixpkgs.url = "nixpkgs/nixos-25.05";
|
|
||||||
};
|
|
||||||
|
|
||||||
outputs =
|
|
||||||
{ self, nixpkgs }:
|
|
||||||
let
|
|
||||||
supportedSystems = [
|
|
||||||
"x86_64-linux"
|
|
||||||
"aarch64-linux"
|
|
||||||
"x86_64-darwin"
|
|
||||||
"aarch64-darwin"
|
|
||||||
];
|
|
||||||
forEachSystem = nixpkgs.lib.genAttrs supportedSystems;
|
|
||||||
pkgsFor = forEachSystem (system: import nixpkgs { inherit system; });
|
|
||||||
in
|
|
||||||
rec {
|
|
||||||
formatter = forEachSystem (system: pkgsFor.${system}.nixpkgs-fmt);
|
|
||||||
|
|
||||||
devShells = forEachSystem (system: {
|
|
||||||
default = pkgsFor.${system}.mkShellNoCC {
|
|
||||||
packages = with pkgsFor.${system}.buildPackages; [
|
|
||||||
git # 2.44.1
|
|
||||||
openssh # 9.7p1
|
|
||||||
nodejs_20 # v20.15.1
|
|
||||||
];
|
|
||||||
};
|
|
||||||
});
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@ -6,10 +6,7 @@ if (!process.env.CHROME_BIN) {
|
|||||||
process.env.CHROME_BIN = playwright.chromium.executablePath();
|
process.env.CHROME_BIN = playwright.chromium.executablePath();
|
||||||
}
|
}
|
||||||
console.log("Using CHROME_BIN:", process.env.CHROME_BIN);
|
console.log("Using CHROME_BIN:", process.env.CHROME_BIN);
|
||||||
if (!process.env.FIREFOX_BIN) {
|
process.env.FIREFOX_BIN = playwright.firefox.executablePath();
|
||||||
process.env.FIREFOX_BIN = playwright.firefox.executablePath();
|
|
||||||
}
|
|
||||||
console.log("Using FIREFOX_BIN:", process.env.FIREFOX_BIN);
|
|
||||||
|
|
||||||
module.exports = function (config) {
|
module.exports = function (config) {
|
||||||
const configuration = {
|
const configuration = {
|
||||||
|
|||||||
8228
package-lock.json
generated
8228
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
18
package.json
18
package.json
@ -10,16 +10,15 @@
|
|||||||
"packages/core",
|
"packages/core",
|
||||||
"packages/discovery",
|
"packages/discovery",
|
||||||
"packages/message-encryption",
|
"packages/message-encryption",
|
||||||
"packages/sds",
|
|
||||||
"packages/rln",
|
|
||||||
"packages/sdk",
|
"packages/sdk",
|
||||||
"packages/relay",
|
"packages/relay",
|
||||||
"packages/run",
|
"packages/sds",
|
||||||
|
"packages/rln",
|
||||||
"packages/tests",
|
"packages/tests",
|
||||||
"packages/reliability-tests",
|
"packages/reliability-tests",
|
||||||
|
"packages/headless-tests",
|
||||||
"packages/browser-tests",
|
"packages/browser-tests",
|
||||||
"packages/build-utils",
|
"packages/build-utils"
|
||||||
"packages/react"
|
|
||||||
],
|
],
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"prepare": "husky",
|
"prepare": "husky",
|
||||||
@ -37,15 +36,13 @@
|
|||||||
"test:longevity": "npm --prefix packages/reliability-tests run test:longevity",
|
"test:longevity": "npm --prefix packages/reliability-tests run test:longevity",
|
||||||
"test:high-throughput": "npm --prefix packages/reliability-tests run test:high-throughput",
|
"test:high-throughput": "npm --prefix packages/reliability-tests run test:high-throughput",
|
||||||
"test:throughput-sizes": "npm --prefix packages/reliability-tests run test:throughput-sizes",
|
"test:throughput-sizes": "npm --prefix packages/reliability-tests run test:throughput-sizes",
|
||||||
"test:network-latency": "npm --prefix packages/reliability-tests run test:network-latency",
|
|
||||||
"test:low-bandwidth": "npm --prefix packages/reliability-tests run test:low-bandwidth",
|
|
||||||
"test:packet-loss": "npm --prefix packages/reliability-tests run test:packet-loss",
|
|
||||||
"proto": "npm run proto --workspaces --if-present",
|
"proto": "npm run proto --workspaces --if-present",
|
||||||
"deploy": "node ci/deploy.js",
|
"deploy": "node ci/deploy.js",
|
||||||
"doc": "run-s doc:*",
|
"doc": "run-s doc:*",
|
||||||
"doc:html": "typedoc --options typedoc.cjs",
|
"doc:html": "typedoc --options typedoc.cjs",
|
||||||
"doc:cname": "echo 'js.waku.org' > docs/CNAME",
|
"doc:cname": "echo 'js.waku.org' > docs/CNAME",
|
||||||
"publish": "node ./ci/publish.js"
|
"publish": "node ./ci/publish.js",
|
||||||
|
"sync-rln-tree": "node ./packages/tests/src/sync-rln-tree.js"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@size-limit/preset-big-lib": "^11.0.2",
|
"@size-limit/preset-big-lib": "^11.0.2",
|
||||||
@ -78,6 +75,5 @@
|
|||||||
"*.{ts,js}": [
|
"*.{ts,js}": [
|
||||||
"eslint --fix"
|
"eslint --fix"
|
||||||
]
|
]
|
||||||
},
|
}
|
||||||
"version": ""
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
node_modules
|
node_modules
|
||||||
|
dist
|
||||||
build
|
build
|
||||||
.DS_Store
|
.DS_Store
|
||||||
*.log
|
*.log
|
||||||
|
|||||||
@ -12,7 +12,7 @@ module.exports = {
|
|||||||
plugins: ["import"],
|
plugins: ["import"],
|
||||||
extends: ["eslint:recommended"],
|
extends: ["eslint:recommended"],
|
||||||
rules: {
|
rules: {
|
||||||
"no-unused-vars": ["error", { "argsIgnorePattern": "^_", "ignoreRestSiblings": true }]
|
"no-console": "off"
|
||||||
},
|
},
|
||||||
globals: {
|
globals: {
|
||||||
process: true
|
process: true
|
||||||
|
|||||||
@ -1,72 +0,0 @@
|
|||||||
# syntax=docker/dockerfile:1
|
|
||||||
|
|
||||||
# Build stage - install all dependencies and build
|
|
||||||
FROM node:22-bullseye AS builder
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
# Copy package.json and temporarily remove workspace dependencies that can't be resolved
|
|
||||||
COPY package.json package.json.orig
|
|
||||||
RUN sed '/"@waku\/tests": "\*",/d' package.json.orig > package.json
|
|
||||||
RUN npm install --no-audit --no-fund
|
|
||||||
|
|
||||||
COPY src ./src
|
|
||||||
COPY types ./types
|
|
||||||
COPY tsconfig.json ./
|
|
||||||
COPY web ./web
|
|
||||||
|
|
||||||
RUN npm run build
|
|
||||||
|
|
||||||
# Production stage - only runtime dependencies
|
|
||||||
FROM node:22-bullseye
|
|
||||||
|
|
||||||
# Install required system deps for Playwright Chromium
|
|
||||||
RUN apt-get update && apt-get install -y \
|
|
||||||
wget \
|
|
||||||
gnupg \
|
|
||||||
ca-certificates \
|
|
||||||
fonts-liberation \
|
|
||||||
libatk-bridge2.0-0 \
|
|
||||||
libatk1.0-0 \
|
|
||||||
libatspi2.0-0 \
|
|
||||||
libcups2 \
|
|
||||||
libdbus-1-3 \
|
|
||||||
libdrm2 \
|
|
||||||
libgtk-3-0 \
|
|
||||||
libnspr4 \
|
|
||||||
libnss3 \
|
|
||||||
libx11-xcb1 \
|
|
||||||
libxcomposite1 \
|
|
||||||
libxdamage1 \
|
|
||||||
libxfixes3 \
|
|
||||||
libxkbcommon0 \
|
|
||||||
libxrandr2 \
|
|
||||||
xdg-utils \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
# Copy package files and install only production dependencies
|
|
||||||
COPY package.json package.json.orig
|
|
||||||
RUN sed '/"@waku\/tests": "\*",/d' package.json.orig > package.json
|
|
||||||
RUN npm install --only=production --no-audit --no-fund
|
|
||||||
|
|
||||||
# Copy built application from builder stage
|
|
||||||
COPY --from=builder /app/dist ./dist
|
|
||||||
|
|
||||||
# Install Playwright browsers (Chromium only) at runtime layer
|
|
||||||
RUN npx playwright install --with-deps chromium
|
|
||||||
|
|
||||||
ENV PORT=8080 \
|
|
||||||
NODE_ENV=production
|
|
||||||
|
|
||||||
EXPOSE 8080
|
|
||||||
|
|
||||||
# Use a script to handle CLI arguments and environment variables
|
|
||||||
COPY scripts/docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh
|
|
||||||
RUN chmod +x /usr/local/bin/docker-entrypoint.sh
|
|
||||||
|
|
||||||
ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"]
|
|
||||||
CMD ["npm", "run", "start:server"]
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,174 +1,182 @@
|
|||||||
# Waku Browser Tests
|
# Waku Browser Tests
|
||||||
|
|
||||||
This package provides a containerized Waku light node simulation server for testing and development. The server runs a headless browser using Playwright and exposes a REST API similar to the nwaku REST API. A Dockerfile is provided to allow programmatic simulation and "deployment" of js-waku nodes in any Waku orchestration environment that uses Docker (e.g. [10ksim](https://github.com/vacp2p/10ksim) ).
|
This project provides a system for testing the Waku SDK in a browser environment.
|
||||||
|
|
||||||
## Quick Start
|
## Architecture
|
||||||
|
|
||||||
### Build and Run
|
The system consists of:
|
||||||
|
|
||||||
|
1. **Headless Web App**: A simple web application (in the `@waku/headless-tests` package) that loads the Waku SDK and exposes shared API functions.
|
||||||
|
2. **Express Server**: A server that communicates with the headless app using Playwright.
|
||||||
|
3. **Shared API**: TypeScript functions shared between the server and web app.
|
||||||
|
|
||||||
|
## Setup
|
||||||
|
|
||||||
|
1. Install dependencies:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Install main dependencies
|
||||||
|
npm install
|
||||||
|
|
||||||
|
# Install headless app dependencies
|
||||||
|
cd ../headless-tests
|
||||||
|
npm install
|
||||||
|
cd ../browser-tests
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Build the application:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Build the application
|
|
||||||
npm run build
|
npm run build
|
||||||
|
|
||||||
# Start the server (port 8080)
|
|
||||||
npm run start:server
|
|
||||||
|
|
||||||
# Build and run Docker container
|
|
||||||
npm run docker:build
|
|
||||||
docker run -p 8080:8080 waku-browser-tests:local
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Configuration
|
This will:
|
||||||
|
- Build the headless web app using webpack
|
||||||
|
- Compile the TypeScript server code
|
||||||
|
|
||||||
Configure the Waku node using environment variables:
|
## Running
|
||||||
|
|
||||||
### Network Configuration
|
Start the server with:
|
||||||
- `WAKU_CLUSTER_ID`: Cluster ID (default: 1)
|
|
||||||
- `WAKU_SHARD`: Specific shard number - enables static sharding mode (optional)
|
|
||||||
|
|
||||||
**Sharding Behavior:**
|
|
||||||
- **Auto-sharding** (default): Uses `numShardsInCluster: 8` across cluster 1
|
|
||||||
- **Static sharding**: When `WAKU_SHARD` is set, uses only that specific shard
|
|
||||||
|
|
||||||
### Bootstrap Configuration
|
|
||||||
- `WAKU_ENR_BOOTSTRAP`: Enable ENR bootstrap mode with custom bootstrap peers (comma-separated)
|
|
||||||
- `WAKU_LIGHTPUSH_NODE`: Preferred lightpush node multiaddr (Docker only)
|
|
||||||
|
|
||||||
### ENR Bootstrap Mode
|
|
||||||
|
|
||||||
When `WAKU_ENR_BOOTSTRAP` is set:
|
|
||||||
- Disables default bootstrap (`defaultBootstrap: false`)
|
|
||||||
- Enables DNS discovery using production ENR trees
|
|
||||||
- Enables peer exchange and peer cache
|
|
||||||
- Uses the specified ENR for additional bootstrap peers
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Example: ENR bootstrap mode
|
npm run start:server
|
||||||
WAKU_ENR_BOOTSTRAP="enr:-QEnuEBEAyErHEfhiQxAVQoWowGTCuEF9fKZtXSd7H_PymHFhGJA3rGAYDVSHKCyJDGRLBGsloNbS8AZF33IVuefjOO6BIJpZIJ2NIJpcIQS39tkim11bHRpYWRkcnO4lgAvNihub2RlLTAxLmRvLWFtczMud2FrdXYyLnRlc3Quc3RhdHVzaW0ubmV0BgG73gMAODcxbm9kZS0wMS5hYy1jbi1ob25na29uZy1jLndha3V2Mi50ZXN0LnN0YXR1c2ltLm5ldAYBu94DACm9A62t7AQL4Ef5ZYZosRpQTzFVAB8jGjf1TER2wH-0zBOe1-MDBNLeA4lzZWNwMjU2azGhAzfsxbxyCkgCqq8WwYsVWH7YkpMLnU2Bw5xJSimxKav-g3VkcIIjKA" npm run start:server
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
This will:
|
||||||
|
1. Serve the headless app on port 8080
|
||||||
|
2. Start a headless browser to load the app
|
||||||
|
3. Expose API endpoints to interact with Waku
|
||||||
|
|
||||||
## API Endpoints
|
## API Endpoints
|
||||||
|
|
||||||
The server exposes the following HTTP endpoints:
|
- `GET /info`: Get information about the Waku node
|
||||||
|
- `GET /debug/v1/info`: Get debug information from the Waku node
|
||||||
|
- `POST /push`: Push a message to the Waku network (legacy)
|
||||||
|
- `POST /lightpush/v1/message`: Push a message to the Waku network (Waku REST API compatible)
|
||||||
|
- `POST /admin/v1/create-node`: Create a new Waku node (requires networkConfig)
|
||||||
|
- `POST /admin/v1/start-node`: Start the Waku node
|
||||||
|
- `POST /admin/v1/stop-node`: Stop the Waku node
|
||||||
|
- `POST /admin/v1/peers`: Dial to specified peers (Waku REST API compatible)
|
||||||
|
- `GET /filter/v2/messages/:contentTopic`: Subscribe to messages on a specific content topic using Server-Sent Events (Waku REST API compatible)
|
||||||
|
- `GET /filter/v1/messages/:contentTopic`: Retrieve stored messages from a content topic (Waku REST API compatible)
|
||||||
|
|
||||||
### Node Management
|
### Example: Pushing a message with the legacy endpoint
|
||||||
- `GET /`: Health check - returns server status
|
|
||||||
- `GET /waku/v1/peer-info`: Get node peer information
|
|
||||||
- `POST /waku/v1/wait-for-peers`: Wait for peers with specific protocols
|
|
||||||
|
|
||||||
### Messaging
|
|
||||||
- `POST /lightpush/v3/message`: Send message via lightpush
|
|
||||||
|
|
||||||
### Static Files
|
|
||||||
- `GET /app/index.html`: Web application entry point
|
|
||||||
- `GET /app/*`: Static web application files
|
|
||||||
|
|
||||||
### Examples
|
|
||||||
|
|
||||||
#### Send a Message (Auto-sharding)
|
|
||||||
```bash
|
```bash
|
||||||
curl -X POST http://localhost:8080/lightpush/v3/message \
|
curl -X POST http://localhost:3000/push \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{"contentTopic": "/toy-chat/2/huilong/proto", "payload": [1, 2, 3]}'
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example: Pushing a message with the Waku REST API compatible endpoint
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -X POST http://localhost:3000/lightpush/v1/message \
|
||||||
-H "Content-Type: application/json" \
|
-H "Content-Type: application/json" \
|
||||||
-d '{
|
-d '{
|
||||||
"pubsubTopic": "",
|
"pubsubTopic": "/waku/2/rs/0/0",
|
||||||
"message": {
|
"message": {
|
||||||
"contentTopic": "/test/1/example/proto",
|
"payload": "SGVsbG8sIFdha3Uh",
|
||||||
"payload": "SGVsbG8gV2FrdQ==",
|
"contentTopic": "/toy-chat/2/huilong/proto",
|
||||||
"version": 1
|
"timestamp": 1712135330213797632
|
||||||
}
|
}
|
||||||
}'
|
}'
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Send a Message (Explicit pubsub topic)
|
### Example: Executing a function
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
curl -X POST http://localhost:8080/lightpush/v3/message \
|
curl -X POST http://localhost:3000/execute \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{"functionName": "getPeerInfo", "params": []}'
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example: Creating a Waku node
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -X POST http://localhost:3000/admin/v1/create-node \
|
||||||
-H "Content-Type: application/json" \
|
-H "Content-Type: application/json" \
|
||||||
-d '{
|
-d '{
|
||||||
"pubsubTopic": "/waku/2/rs/1/4",
|
"defaultBootstrap": true,
|
||||||
"message": {
|
"networkConfig": {
|
||||||
"contentTopic": "/test/1/example/proto",
|
"clusterId": 1,
|
||||||
"payload": "SGVsbG8gV2FrdQ==",
|
"shards": [0, 1]
|
||||||
"version": 1
|
|
||||||
}
|
}
|
||||||
}'
|
}'
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Wait for Peers
|
### Example: Starting and stopping a Waku node
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
curl -X POST http://localhost:8080/waku/v1/wait-for-peers \
|
# Start the node
|
||||||
|
curl -X POST http://localhost:3000/admin/v1/start-node
|
||||||
|
|
||||||
|
# Stop the node
|
||||||
|
curl -X POST http://localhost:3000/admin/v1/stop-node
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example: Dialing to specific peers with the Waku REST API compatible endpoint
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -X POST http://localhost:3000/admin/v1/peers \
|
||||||
-H "Content-Type: application/json" \
|
-H "Content-Type: application/json" \
|
||||||
-d '{
|
-d '{
|
||||||
"timeoutMs": 30000,
|
"peerMultiaddrs": [
|
||||||
"protocols": ["lightpush", "filter"]
|
"/ip4/127.0.0.1/tcp/8000/p2p/16Uiu2HAm4v8KuHUH6Cwz3upPeQbkyxQJsFGPdt7kHtkN8F79QiE6"]
|
||||||
|
]
|
||||||
}'
|
}'
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Get Peer Info
|
### Example: Dialing to specific peers with the execute endpoint
|
||||||
```bash
|
|
||||||
curl -X GET http://localhost:8080/waku/v1/peer-info
|
|
||||||
```
|
|
||||||
|
|
||||||
## CLI Usage
|
|
||||||
|
|
||||||
Run with CLI arguments:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Custom cluster and shard
|
curl -X POST http://localhost:3000/execute \
|
||||||
node dist/src/server.js --cluster-id=2 --shard=0
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{
|
||||||
|
"functionName": "dialPeers",
|
||||||
|
"params": [
|
||||||
|
["/ip4/127.0.0.1/tcp/8000/p2p/16Uiu2HAm4v8KuHUH6Cwz3upPeQbkyxQJsFGPdt7kHtkN8F79QiE6"]
|
||||||
|
]
|
||||||
|
}'
|
||||||
```
|
```
|
||||||
|
|
||||||
## Testing
|
### Example: Subscribing to a content topic with the filter endpoint
|
||||||
|
|
||||||
The package includes several test suites:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Basic server functionality tests (default)
|
# Open a persistent connection to receive messages as Server-Sent Events
|
||||||
npm test
|
curl -N http://localhost:3000/filter/v2/messages/%2Ftoy-chat%2F2%2Fhuilong%2Fproto
|
||||||
|
|
||||||
# Docker testing workflow
|
# You can also specify clustering options
|
||||||
npm run docker:build
|
curl -N "http://localhost:3000/filter/v2/messages/%2Ftoy-chat%2F2%2Fhuilong%2Fproto?clusterId=0&shard=0"
|
||||||
npm run test:integration
|
|
||||||
|
|
||||||
# All tests
|
|
||||||
npm run test:all
|
|
||||||
|
|
||||||
# Individual test suites:
|
|
||||||
npm run test:server # Server-only tests
|
|
||||||
npm run test:e2e # End-to-end tests
|
|
||||||
```
|
```
|
||||||
|
|
||||||
**Test Types:**
|
### Example: Retrieving stored messages from a content topic
|
||||||
- `server.spec.ts` - Tests basic server functionality and static file serving
|
|
||||||
- `integration.spec.ts` - Tests Docker container integration with external services
|
|
||||||
- `e2e.spec.ts` - Full end-to-end tests using nwaku nodes
|
|
||||||
|
|
||||||
## Docker Usage
|
|
||||||
|
|
||||||
The package includes Docker support for containerized testing:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Build image
|
# Get the most recent 20 messages
|
||||||
docker build -t waku-browser-tests:local .
|
curl http://localhost:3000/filter/v1/messages/%2Ftoy-chat%2F2%2Fhuilong%2Fproto
|
||||||
|
|
||||||
# Run with ENR bootstrap
|
# Get messages with pagination and time filtering
|
||||||
docker run -p 8080:8080 \
|
curl "http://localhost:3000/filter/v1/messages/%2Ftoy-chat%2F2%2Fhuilong%2Fproto?pageSize=10&startTime=1712000000000&endTime=1713000000000&ascending=true"
|
||||||
-e WAKU_ENR_BOOTSTRAP="enr:-QEnuE..." \
|
|
||||||
-e WAKU_CLUSTER_ID="1" \
|
|
||||||
waku-browser-tests:local
|
|
||||||
|
|
||||||
# Run with specific configuration
|
|
||||||
docker run -p 8080:8080 \
|
|
||||||
-e WAKU_CLUSTER_ID="2" \
|
|
||||||
-e WAKU_SHARD="0" \
|
|
||||||
waku-browser-tests:local
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Development
|
## Extending
|
||||||
|
|
||||||
The server automatically:
|
To add new functionality:
|
||||||
- Creates a Waku light node on startup
|
|
||||||
- Configures network settings from environment variables
|
|
||||||
- Enables appropriate protocols (lightpush, filter)
|
|
||||||
- Handles peer discovery and connection management
|
|
||||||
|
|
||||||
All endpoints are CORS-enabled for cross-origin requests.
|
1. Add your function to `src/api/shared.ts`
|
||||||
|
2. Add your function to the `API` object in `src/api/shared.ts`
|
||||||
|
3. Use it via the server endpoints
|
||||||
|
|
||||||
|
### Example: Dialing to specific peers
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -X POST http://localhost:3000/execute \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{
|
||||||
|
"functionName": "dialPeers",
|
||||||
|
"params": [
|
||||||
|
["/ip4/127.0.0.1/tcp/8000/p2p/16Uiu2HAm4v8KuHUH6Cwz3upPeQbkyxQJsFGPdt7kHtkN8F79QiE6"]
|
||||||
|
]
|
||||||
|
}'
|
||||||
|
```
|
||||||
|
|||||||
@ -5,38 +5,27 @@
|
|||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "npm run start:server",
|
"start": "npm run start:server",
|
||||||
"start:server": "PORT=8080 node ./dist/src/server.js",
|
"start:server": "node ./dist/server.js",
|
||||||
"test": "npx playwright test tests/server.spec.ts --reporter=line",
|
"test": "npx playwright test",
|
||||||
"test:all": "npx playwright test --reporter=line",
|
|
||||||
"test:server": "npx playwright test tests/server.spec.ts --reporter=line",
|
|
||||||
"test:integration": "npx playwright test tests/integration.spec.ts --reporter=line",
|
|
||||||
"test:e2e": "npx playwright test tests/e2e.spec.ts --reporter=line",
|
|
||||||
"build:server": "tsc -p tsconfig.json",
|
"build:server": "tsc -p tsconfig.json",
|
||||||
"build:web": "esbuild web/index.ts --bundle --format=esm --platform=browser --outdir=dist/web && cp web/index.html dist/web/index.html",
|
"build": "npm run build:server"
|
||||||
"build": "npm-run-all -s build:server build:web",
|
|
||||||
"docker:build": "docker build -t waku-browser-tests:local . && docker tag waku-browser-tests:local waku-browser-tests:latest"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@playwright/test": "^1.51.1",
|
|
||||||
"@waku/discovery": "^0.0.11",
|
|
||||||
"@waku/interfaces": "^0.0.33",
|
|
||||||
"@waku/sdk": "^0.0.34",
|
|
||||||
"@waku/utils": "0.0.27",
|
|
||||||
"cors": "^2.8.5",
|
|
||||||
"dotenv-flow": "^0.4.0",
|
|
||||||
"express": "^4.21.2",
|
|
||||||
"filter-obj": "^2.0.2",
|
|
||||||
"it-first": "^3.0.9"
|
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/cors": "^2.8.15",
|
"@types/cors": "^2.8.15",
|
||||||
"@types/express": "^4.17.21",
|
"@types/express": "^4.17.21",
|
||||||
"@types/node": "^20.10.0",
|
"@types/node": "^20.10.0",
|
||||||
"@waku/tests": "*",
|
|
||||||
"axios": "^1.8.4",
|
"axios": "^1.8.4",
|
||||||
"esbuild": "^0.21.5",
|
"dotenv-flow": "^0.4.0",
|
||||||
"npm-run-all": "^4.1.5",
|
"npm-run-all": "^4.1.5",
|
||||||
"testcontainers": "^10.9.0",
|
"serve": "^14.2.3",
|
||||||
"typescript": "5.8.3"
|
"typescript": "5.8.3",
|
||||||
|
"webpack-cli": "^6.0.1"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@playwright/test": "^1.51.1",
|
||||||
|
"@waku/sdk": "^0.0.30",
|
||||||
|
"cors": "^2.8.5",
|
||||||
|
"express": "^4.21.2",
|
||||||
|
"node-polyfill-webpack-plugin": "^4.1.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,39 +1,57 @@
|
|||||||
|
// For dynamic import of dotenv-flow
|
||||||
import { defineConfig, devices } from "@playwright/test";
|
import { defineConfig, devices } from "@playwright/test";
|
||||||
import { Logger } from "@waku/utils";
|
|
||||||
|
|
||||||
const log = new Logger("playwright-config");
|
|
||||||
|
|
||||||
|
// Only load dotenv-flow in non-CI environments
|
||||||
if (!process.env.CI) {
|
if (!process.env.CI) {
|
||||||
try {
|
// Need to use .js extension for ES modules
|
||||||
await import("dotenv-flow/config.js");
|
// eslint-disable-next-line import/extensions
|
||||||
} catch (e) {
|
await import("dotenv-flow/config.js");
|
||||||
log.warn("dotenv-flow not found; skipping env loading");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const EXAMPLE_PORT = process.env.EXAMPLE_PORT || "8080";
|
const EXAMPLE_PORT = process.env.EXAMPLE_PORT || "8080";
|
||||||
const BASE_URL = `http://127.0.0.1:${EXAMPLE_PORT}`;
|
// web-chat specific thingy
|
||||||
const TEST_IGNORE = process.env.CI ? ["tests/e2e.spec.ts"] : [];
|
const EXAMPLE_TEMPLATE = process.env.EXAMPLE_TEMPLATE || "";
|
||||||
|
const BASE_URL = `http://127.0.0.1:${EXAMPLE_PORT}/${EXAMPLE_TEMPLATE}`;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* See https://playwright.dev/docs/test-configuration.
|
||||||
|
*/
|
||||||
export default defineConfig({
|
export default defineConfig({
|
||||||
testDir: "./tests",
|
testDir: "./tests",
|
||||||
testIgnore: TEST_IGNORE,
|
/* Run tests in files in parallel */
|
||||||
fullyParallel: true,
|
fullyParallel: true,
|
||||||
|
/* Fail the build on CI if you accidentally left test.only in the source code. */
|
||||||
forbidOnly: !!process.env.CI,
|
forbidOnly: !!process.env.CI,
|
||||||
|
/* Retry on CI only */
|
||||||
retries: process.env.CI ? 2 : 0,
|
retries: process.env.CI ? 2 : 0,
|
||||||
|
/* Opt out of parallel tests on CI. */
|
||||||
workers: process.env.CI ? 2 : undefined,
|
workers: process.env.CI ? 2 : undefined,
|
||||||
|
/* Reporter to use. See https://playwright.dev/docs/test-reporters */
|
||||||
reporter: "html",
|
reporter: "html",
|
||||||
|
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
|
||||||
use: {
|
use: {
|
||||||
|
/* Base URL to use in actions like `await page.goto('/')`. */
|
||||||
baseURL: BASE_URL,
|
baseURL: BASE_URL,
|
||||||
|
|
||||||
|
/* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */
|
||||||
trace: "on-first-retry"
|
trace: "on-first-retry"
|
||||||
},
|
},
|
||||||
|
|
||||||
|
/* Configure projects for major browsers */
|
||||||
projects: [
|
projects: [
|
||||||
{
|
{
|
||||||
name: "chromium",
|
name: "chromium",
|
||||||
use: { ...devices["Desktop Chrome"] }
|
use: { ...devices["Desktop Chrome"] }
|
||||||
}
|
}
|
||||||
]
|
],
|
||||||
|
|
||||||
|
/* Run your local dev server before starting the tests */
|
||||||
|
webServer: {
|
||||||
|
url: BASE_URL,
|
||||||
|
stdout: "pipe",
|
||||||
|
stderr: "pipe",
|
||||||
|
command: "npm run start:server",
|
||||||
|
reuseExistingServer: !process.env.CI,
|
||||||
|
timeout: 5 * 60 * 1000 // five minutes for bootstrapping an example
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|||||||
@ -1,54 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Docker entrypoint script for waku-browser-tests
|
|
||||||
# Handles CLI arguments and converts them to environment variables
|
|
||||||
# Supports reading discovered addresses from /etc/addrs/addrs.env (10k sim pattern)
|
|
||||||
echo "docker-entrypoint.sh"
|
|
||||||
echo "Using address: $addrs1"
|
|
||||||
# Only set WAKU_LIGHTPUSH_NODE if it's not already set and addrs1 is available
|
|
||||||
if [ -z "$WAKU_LIGHTPUSH_NODE" ] && [ -n "$addrs1" ]; then
|
|
||||||
export WAKU_LIGHTPUSH_NODE="$addrs1"
|
|
||||||
fi
|
|
||||||
echo "Num Args: $#"
|
|
||||||
echo "Args: $@"
|
|
||||||
|
|
||||||
echo "WAKU_LIGHTPUSH_NODE=$WAKU_LIGHTPUSH_NODE"
|
|
||||||
|
|
||||||
# Parse command line arguments
|
|
||||||
while [[ $# -gt 0 ]]; do
|
|
||||||
case $1 in
|
|
||||||
--cluster-id=*)
|
|
||||||
export WAKU_CLUSTER_ID="${1#*=}"
|
|
||||||
echo "Setting WAKU_CLUSTER_ID=${WAKU_CLUSTER_ID}"
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
--shard=*)
|
|
||||||
export WAKU_SHARD="${1#*=}"
|
|
||||||
echo "Setting WAKU_SHARD=${WAKU_SHARD}"
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
--lightpushnode=*)
|
|
||||||
export WAKU_LIGHTPUSH_NODE="${1#*=}"
|
|
||||||
echo "Setting WAKU_LIGHTPUSH_NODE=${WAKU_LIGHTPUSH_NODE}"
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
--enr-bootstrap=*)
|
|
||||||
export WAKU_ENR_BOOTSTRAP="${1#*=}"
|
|
||||||
echo "Setting WAKU_ENR_BOOTSTRAP=${WAKU_ENR_BOOTSTRAP}"
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
# Unknown argument, notify user and keep it for the main command
|
|
||||||
echo "Warning: Unknown argument '$1' will be passed to the main command"
|
|
||||||
break
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
# If no specific command is provided, use the default CMD
|
|
||||||
if [ $# -eq 0 ]; then
|
|
||||||
set -- "npm" "run" "start:server"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Execute the main command
|
|
||||||
exec "$@"
|
|
||||||
22
packages/browser-tests/src/api/common.d.ts
vendored
Normal file
22
packages/browser-tests/src/api/common.d.ts
vendored
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
/**
|
||||||
|
* Shared utilities for working with Waku nodes
|
||||||
|
* This file contains functions used by both browser tests and server
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type definition for a minimal Waku node interface
|
||||||
|
* This allows us to use the same code in different contexts
|
||||||
|
*/
|
||||||
|
export interface IWakuNode {
|
||||||
|
libp2p: {
|
||||||
|
peerId: { toString(): string };
|
||||||
|
getMultiaddrs(): Array<{ toString(): string }>;
|
||||||
|
getProtocols(): any;
|
||||||
|
peerStore: {
|
||||||
|
all(): Promise<Array<{ id: { toString(): string } }>>;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
lightPush: {
|
||||||
|
send: (encoder: any, message: { payload: Uint8Array }) => Promise<{ successes: any[] }>;
|
||||||
|
};
|
||||||
|
}
|
||||||
36
packages/browser-tests/src/api/debug.ts
Normal file
36
packages/browser-tests/src/api/debug.ts
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
import { IWakuNode } from "./common.js";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets peer information from a Waku node
|
||||||
|
* Used in both server API endpoints and headless tests
|
||||||
|
*/
|
||||||
|
export async function getPeerInfo(waku: IWakuNode): Promise<{
|
||||||
|
peerId: string;
|
||||||
|
multiaddrs: string[];
|
||||||
|
peers: string[];
|
||||||
|
}> {
|
||||||
|
const multiaddrs = waku.libp2p.getMultiaddrs();
|
||||||
|
const peers = await waku.libp2p.peerStore.all();
|
||||||
|
|
||||||
|
return {
|
||||||
|
peerId: waku.libp2p.peerId.toString(),
|
||||||
|
multiaddrs: multiaddrs.map((addr) => addr.toString()),
|
||||||
|
peers: peers.map((peer) => peer.id.toString())
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets debug information from a Waku node
|
||||||
|
* Used in both server API endpoints and tests
|
||||||
|
*/
|
||||||
|
export async function getDebugInfo(waku: IWakuNode): Promise<{
|
||||||
|
listenAddresses: string[];
|
||||||
|
peerId: string;
|
||||||
|
protocols: string[];
|
||||||
|
}> {
|
||||||
|
return {
|
||||||
|
listenAddresses: waku.libp2p.getMultiaddrs().map((addr) => addr.toString()),
|
||||||
|
peerId: waku.libp2p.peerId.toString(),
|
||||||
|
protocols: Array.from(waku.libp2p.getProtocols())
|
||||||
|
};
|
||||||
|
}
|
||||||
16
packages/browser-tests/src/api/push.ts
Normal file
16
packages/browser-tests/src/api/push.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import { createEncoder, LightNode, SDKProtocolResult } from "@waku/sdk";
|
||||||
|
|
||||||
|
export async function pushMessage(
|
||||||
|
waku: LightNode,
|
||||||
|
contentTopic: string,
|
||||||
|
payload?: Uint8Array
|
||||||
|
): Promise<SDKProtocolResult> {
|
||||||
|
const enc = createEncoder({
|
||||||
|
contentTopic
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await waku.lightPush.send(enc, {
|
||||||
|
payload: payload ?? new Uint8Array()
|
||||||
|
});
|
||||||
|
return result;
|
||||||
|
}
|
||||||
274
packages/browser-tests/src/api/shared.ts
Normal file
274
packages/browser-tests/src/api/shared.ts
Normal file
@ -0,0 +1,274 @@
|
|||||||
|
import {
|
||||||
|
createDecoder,
|
||||||
|
createEncoder,
|
||||||
|
createLightNode,
|
||||||
|
CreateNodeOptions,
|
||||||
|
DecodedMessage,
|
||||||
|
LightNode,
|
||||||
|
SDKProtocolResult,
|
||||||
|
SubscribeResult
|
||||||
|
} from "@waku/sdk";
|
||||||
|
|
||||||
|
import { IWakuNode } from "./common.js";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets peer information from a Waku node
|
||||||
|
*/
|
||||||
|
export async function getPeerInfo(waku: IWakuNode): Promise<{
|
||||||
|
peerId: string;
|
||||||
|
multiaddrs: string[];
|
||||||
|
peers: string[];
|
||||||
|
}> {
|
||||||
|
const multiaddrs = waku.libp2p.getMultiaddrs();
|
||||||
|
const peers = await waku.libp2p.peerStore.all();
|
||||||
|
|
||||||
|
return {
|
||||||
|
peerId: waku.libp2p.peerId.toString(),
|
||||||
|
multiaddrs: multiaddrs.map((addr) => addr.toString()),
|
||||||
|
peers: peers.map((peer) => peer.id.toString())
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets debug information from a Waku node
|
||||||
|
*/
|
||||||
|
export async function getDebugInfo(waku: IWakuNode): Promise<{
|
||||||
|
listenAddresses: string[];
|
||||||
|
peerId: string;
|
||||||
|
protocols: string[];
|
||||||
|
}> {
|
||||||
|
return {
|
||||||
|
listenAddresses: waku.libp2p.getMultiaddrs().map((addr) => addr.toString()),
|
||||||
|
peerId: waku.libp2p.peerId.toString(),
|
||||||
|
protocols: Array.from(waku.libp2p.getProtocols())
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pushes a message to the network
|
||||||
|
*/
|
||||||
|
export async function pushMessage(
|
||||||
|
waku: LightNode,
|
||||||
|
contentTopic: string,
|
||||||
|
payload?: Uint8Array,
|
||||||
|
options?: {
|
||||||
|
clusterId?: number;
|
||||||
|
shard?: number;
|
||||||
|
}
|
||||||
|
): Promise<SDKProtocolResult> {
|
||||||
|
if (!waku) {
|
||||||
|
throw new Error("Waku node not found");
|
||||||
|
}
|
||||||
|
|
||||||
|
const encoder = createEncoder({
|
||||||
|
contentTopic,
|
||||||
|
pubsubTopicShardInfo: {
|
||||||
|
clusterId: options?.clusterId ?? 1,
|
||||||
|
shard: options?.shard ?? 1
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await waku.lightPush.send(encoder, {
|
||||||
|
payload: payload ?? new Uint8Array()
|
||||||
|
});
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates and initializes a Waku node
|
||||||
|
* Checks if a node is already running in window and stops it if it exists
|
||||||
|
*/
|
||||||
|
export async function createWakuNode(
|
||||||
|
options: CreateNodeOptions
|
||||||
|
): Promise<{ success: boolean; error?: string }> {
|
||||||
|
// Check if we're in a browser environment and a node already exists
|
||||||
|
if (typeof window === "undefined") {
|
||||||
|
return { success: false, error: "No window found" };
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
if ((window as any).waku) {
|
||||||
|
await (window as any).waku.stop();
|
||||||
|
}
|
||||||
|
(window as any).waku = await createLightNode(options);
|
||||||
|
return { success: true };
|
||||||
|
} catch (error: any) {
|
||||||
|
return { success: false, error: error.message };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function startNode(): Promise<{
|
||||||
|
success: boolean;
|
||||||
|
error?: string;
|
||||||
|
}> {
|
||||||
|
if (typeof window !== "undefined" && (window as any).waku) {
|
||||||
|
try {
|
||||||
|
await (window as any).waku.start();
|
||||||
|
return { success: true };
|
||||||
|
} catch (error: any) {
|
||||||
|
// Silently continue if there's an error starting the node
|
||||||
|
return { success: false, error: error.message };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return { success: false, error: "Waku node not found in window" };
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function stopNode(): Promise<{
|
||||||
|
success: boolean;
|
||||||
|
error?: string;
|
||||||
|
}> {
|
||||||
|
if (typeof window !== "undefined" && (window as any).waku) {
|
||||||
|
await (window as any).waku.stop();
|
||||||
|
return { success: true };
|
||||||
|
}
|
||||||
|
return { success: false, error: "Waku node not found in window" };
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function dialPeers(
|
||||||
|
waku: LightNode,
|
||||||
|
peers: string[]
|
||||||
|
): Promise<{
|
||||||
|
total: number;
|
||||||
|
errors: string[];
|
||||||
|
}> {
|
||||||
|
const total = peers.length;
|
||||||
|
const errors: string[] = [];
|
||||||
|
|
||||||
|
await Promise.allSettled(
|
||||||
|
peers.map((peer) =>
|
||||||
|
waku.dial(peer).catch((error: any) => {
|
||||||
|
errors.push(error.message);
|
||||||
|
})
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
return { total, errors };
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function subscribe(
|
||||||
|
waku: LightNode,
|
||||||
|
contentTopic: string,
|
||||||
|
options?: {
|
||||||
|
clusterId?: number;
|
||||||
|
shard?: number;
|
||||||
|
},
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
callback?: (message: DecodedMessage) => void
|
||||||
|
): Promise<SubscribeResult> {
|
||||||
|
const clusterId = options?.clusterId ?? 42;
|
||||||
|
const shard = options?.shard ?? 0;
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`Creating decoder for content topic ${contentTopic} with clusterId=${clusterId}, shard=${shard}`
|
||||||
|
);
|
||||||
|
|
||||||
|
const pubsubTopic = `/waku/2/rs/${clusterId}/${shard}`;
|
||||||
|
|
||||||
|
let configuredTopics: string[] = [];
|
||||||
|
|
||||||
|
try {
|
||||||
|
const protocols = waku.libp2p.getProtocols();
|
||||||
|
console.log(`Available protocols: ${Array.from(protocols).join(", ")}`);
|
||||||
|
|
||||||
|
const metadataMethod = (waku.libp2p as any)._services?.metadata?.getInfo;
|
||||||
|
if (metadataMethod) {
|
||||||
|
const metadata = metadataMethod();
|
||||||
|
console.log(`Node metadata: ${JSON.stringify(metadata)}`);
|
||||||
|
|
||||||
|
if (metadata?.pubsubTopics && Array.isArray(metadata.pubsubTopics)) {
|
||||||
|
configuredTopics = metadata.pubsubTopics;
|
||||||
|
console.log(
|
||||||
|
`Found configured pubsub topics: ${configuredTopics.join(", ")}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
configuredTopics.length > 0 &&
|
||||||
|
!configuredTopics.includes(pubsubTopic)
|
||||||
|
) {
|
||||||
|
console.warn(
|
||||||
|
`Pubsub topic ${pubsubTopic} is not configured. Configured topics: ${configuredTopics.join(", ")}`
|
||||||
|
);
|
||||||
|
|
||||||
|
for (const topic of configuredTopics) {
|
||||||
|
const parts = topic.split("/");
|
||||||
|
if (parts.length === 6 && parts[1] === "waku" && parts[3] === "rs") {
|
||||||
|
console.log(`Found potential matching pubsub topic: ${topic}`);
|
||||||
|
|
||||||
|
// Use the first topic as a fallback if no exact match is found
|
||||||
|
// This isn't ideal but allows tests to continue
|
||||||
|
const topicClusterId = parseInt(parts[4]);
|
||||||
|
const topicShard = parseInt(parts[5]);
|
||||||
|
|
||||||
|
if (!isNaN(topicClusterId) && !isNaN(topicShard)) {
|
||||||
|
console.log(
|
||||||
|
`Using pubsub topic with clusterId=${topicClusterId}, shard=${topicShard} instead`
|
||||||
|
);
|
||||||
|
|
||||||
|
const decoder = createDecoder(contentTopic, {
|
||||||
|
clusterId: topicClusterId,
|
||||||
|
shard: topicShard
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
const subscription = await waku.filter.subscribe(
|
||||||
|
decoder,
|
||||||
|
callback ??
|
||||||
|
((_message) => {
|
||||||
|
console.log(_message);
|
||||||
|
})
|
||||||
|
);
|
||||||
|
return subscription;
|
||||||
|
} catch (innerErr: any) {
|
||||||
|
console.error(
|
||||||
|
`Error with alternative pubsub topic: ${innerErr.message}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error(`Error checking node protocols: ${String(err)}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const decoder = createDecoder(contentTopic, {
|
||||||
|
clusterId,
|
||||||
|
shard
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
const subscription = await waku.filter.subscribe(
|
||||||
|
decoder,
|
||||||
|
callback ??
|
||||||
|
((_message) => {
|
||||||
|
console.log(_message);
|
||||||
|
})
|
||||||
|
);
|
||||||
|
return subscription;
|
||||||
|
} catch (err: any) {
|
||||||
|
if (err.message && err.message.includes("Pubsub topic")) {
|
||||||
|
console.error(`Pubsub topic error: ${err.message}`);
|
||||||
|
console.log("Subscription failed, but continuing with empty result");
|
||||||
|
|
||||||
|
return {
|
||||||
|
unsubscribe: async () => {
|
||||||
|
console.log("No-op unsubscribe from failed subscription");
|
||||||
|
}
|
||||||
|
} as unknown as SubscribeResult;
|
||||||
|
}
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const API = {
|
||||||
|
getPeerInfo,
|
||||||
|
getDebugInfo,
|
||||||
|
pushMessage,
|
||||||
|
createWakuNode,
|
||||||
|
startNode,
|
||||||
|
stopNode,
|
||||||
|
dialPeers,
|
||||||
|
subscribe
|
||||||
|
};
|
||||||
@ -1,63 +1,43 @@
|
|||||||
import { Browser, chromium, Page } from "@playwright/test";
|
import { Browser, chromium, Page } from "@playwright/test";
|
||||||
import { Logger } from "@waku/utils";
|
|
||||||
|
|
||||||
const log = new Logger("browser-test");
|
|
||||||
|
|
||||||
|
// Global variable to store the browser and page
|
||||||
let browser: Browser | undefined;
|
let browser: Browser | undefined;
|
||||||
let page: Page | undefined;
|
let page: Page | undefined;
|
||||||
|
|
||||||
export async function initBrowser(appPort: number): Promise<void> {
|
/**
|
||||||
try {
|
* Initialize browser and load headless page
|
||||||
const launchArgs = ["--no-sandbox", "--disable-setuid-sandbox"];
|
*/
|
||||||
|
export async function initBrowser(): Promise<void> {
|
||||||
|
browser = await chromium.launch({
|
||||||
|
headless: true
|
||||||
|
});
|
||||||
|
|
||||||
browser = await chromium.launch({
|
if (!browser) {
|
||||||
headless: true,
|
throw new Error("Failed to initialize browser");
|
||||||
args: launchArgs
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!browser) {
|
|
||||||
throw new Error("Failed to initialize browser");
|
|
||||||
}
|
|
||||||
|
|
||||||
page = await browser.newPage();
|
|
||||||
|
|
||||||
// Forward browser console to server logs
|
|
||||||
page.on('console', msg => {
|
|
||||||
const type = msg.type();
|
|
||||||
const text = msg.text();
|
|
||||||
log.info(`[Browser Console ${type.toUpperCase()}] ${text}`);
|
|
||||||
});
|
|
||||||
|
|
||||||
page.on('pageerror', error => {
|
|
||||||
log.error('[Browser Page Error]', error.message);
|
|
||||||
});
|
|
||||||
|
|
||||||
await page.goto(`http://localhost:${appPort}/app/index.html`, {
|
|
||||||
waitUntil: "networkidle",
|
|
||||||
});
|
|
||||||
|
|
||||||
await page.waitForFunction(
|
|
||||||
() => {
|
|
||||||
return window.wakuApi && typeof window.wakuApi.createWakuNode === "function";
|
|
||||||
},
|
|
||||||
{ timeout: 30000 }
|
|
||||||
);
|
|
||||||
|
|
||||||
log.info("Browser initialized successfully with wakuApi");
|
|
||||||
} catch (error) {
|
|
||||||
log.error("Error initializing browser:", error);
|
|
||||||
throw error;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
page = await browser.newPage();
|
||||||
|
|
||||||
|
await page.goto("http://localhost:8080");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the current page instance
|
||||||
|
*/
|
||||||
export function getPage(): Page | undefined {
|
export function getPage(): Page | undefined {
|
||||||
return page;
|
return page;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the page instance (for use by server.ts)
|
||||||
|
*/
|
||||||
export function setPage(pageInstance: Page | undefined): void {
|
export function setPage(pageInstance: Page | undefined): void {
|
||||||
page = pageInstance;
|
page = pageInstance;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Closes the browser instance
|
||||||
|
*/
|
||||||
export async function closeBrowser(): Promise<void> {
|
export async function closeBrowser(): Promise<void> {
|
||||||
if (browser) {
|
if (browser) {
|
||||||
await browser.close();
|
await browser.close();
|
||||||
|
|||||||
89
packages/browser-tests/src/queue/index.ts
Normal file
89
packages/browser-tests/src/queue/index.ts
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
// Message queue to store received messages by content topic
|
||||||
|
export interface QueuedMessage {
|
||||||
|
payload: number[] | undefined;
|
||||||
|
contentTopic: string;
|
||||||
|
timestamp: number;
|
||||||
|
receivedAt: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MessageQueue {
|
||||||
|
[contentTopic: string]: QueuedMessage[];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Global message queue storage
|
||||||
|
const messageQueue: MessageQueue = {};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Store a message in the queue
|
||||||
|
*/
|
||||||
|
export function storeMessage(message: QueuedMessage): void {
|
||||||
|
const { contentTopic } = message;
|
||||||
|
|
||||||
|
if (!messageQueue[contentTopic]) {
|
||||||
|
messageQueue[contentTopic] = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
messageQueue[contentTopic].push(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get messages for a specific content topic
|
||||||
|
*/
|
||||||
|
export function getMessages(
|
||||||
|
contentTopic: string,
|
||||||
|
options?: {
|
||||||
|
startTime?: number;
|
||||||
|
endTime?: number;
|
||||||
|
pageSize?: number;
|
||||||
|
ascending?: boolean;
|
||||||
|
}
|
||||||
|
): QueuedMessage[] {
|
||||||
|
if (!messageQueue[contentTopic]) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
let messages = [...messageQueue[contentTopic]];
|
||||||
|
|
||||||
|
// Filter by time if specified
|
||||||
|
if (options?.startTime || options?.endTime) {
|
||||||
|
messages = messages.filter((msg) => {
|
||||||
|
const afterStart = options.startTime
|
||||||
|
? msg.timestamp >= options.startTime
|
||||||
|
: true;
|
||||||
|
const beforeEnd = options.endTime
|
||||||
|
? msg.timestamp <= options.endTime
|
||||||
|
: true;
|
||||||
|
return afterStart && beforeEnd;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort by timestamp
|
||||||
|
messages.sort((a, b) => {
|
||||||
|
return options?.ascending
|
||||||
|
? a.timestamp - b.timestamp
|
||||||
|
: b.timestamp - a.timestamp;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Limit result size
|
||||||
|
if (options?.pageSize && options.pageSize > 0) {
|
||||||
|
messages = messages.slice(0, options.pageSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
return messages;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear all messages from the queue
|
||||||
|
*/
|
||||||
|
export function clearQueue(): void {
|
||||||
|
Object.keys(messageQueue).forEach((topic) => {
|
||||||
|
delete messageQueue[topic];
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all content topics in the queue
|
||||||
|
*/
|
||||||
|
export function getContentTopics(): string[] {
|
||||||
|
return Object.keys(messageQueue);
|
||||||
|
}
|
||||||
223
packages/browser-tests/src/routes/admin.ts
Normal file
223
packages/browser-tests/src/routes/admin.ts
Normal file
@ -0,0 +1,223 @@
|
|||||||
|
import express, { Request, Response, Router } from "express";
|
||||||
|
|
||||||
|
import { getPage } from "../browser/index.js";
|
||||||
|
|
||||||
|
const router = Router();
|
||||||
|
|
||||||
|
router.head("/admin/v1/create-node", (_req: Request, res: Response) => {
|
||||||
|
res.status(200).end();
|
||||||
|
});
|
||||||
|
|
||||||
|
router.head("/admin/v1/start-node", (_req: Request, res: Response) => {
|
||||||
|
res.status(200).end();
|
||||||
|
});
|
||||||
|
|
||||||
|
router.head("/admin/v1/stop-node", (_req: Request, res: Response) => {
|
||||||
|
res.status(200).end();
|
||||||
|
});
|
||||||
|
|
||||||
|
router.post("/admin/v1/create-node", (async (req: Request, res: Response) => {
|
||||||
|
try {
|
||||||
|
const {
|
||||||
|
defaultBootstrap = true,
|
||||||
|
networkConfig
|
||||||
|
} = req.body;
|
||||||
|
|
||||||
|
// Validate that networkConfig is provided
|
||||||
|
if (!networkConfig) {
|
||||||
|
return res.status(400).json({
|
||||||
|
code: 400,
|
||||||
|
message: "networkConfig is required"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate that networkConfig has required properties
|
||||||
|
if (networkConfig.clusterId === undefined) {
|
||||||
|
return res.status(400).json({
|
||||||
|
code: 400,
|
||||||
|
message: "networkConfig.clusterId is required"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const page = getPage();
|
||||||
|
|
||||||
|
if (!page) {
|
||||||
|
return res.status(503).json({
|
||||||
|
code: 503,
|
||||||
|
message: "Browser not initialized"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await page.evaluate(
|
||||||
|
({ defaultBootstrap, networkConfig }) => {
|
||||||
|
const nodeOptions: any = {
|
||||||
|
defaultBootstrap,
|
||||||
|
relay: {
|
||||||
|
advertise: true,
|
||||||
|
gossipsubOptions: {
|
||||||
|
allowPublishToZeroPeers: true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
filter: true,
|
||||||
|
peers: [],
|
||||||
|
networkConfig: {
|
||||||
|
clusterId: networkConfig.clusterId,
|
||||||
|
shards: networkConfig.shards || [0]
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return window.wakuAPI.createWakuNode(nodeOptions);
|
||||||
|
},
|
||||||
|
{ defaultBootstrap, networkConfig }
|
||||||
|
);
|
||||||
|
|
||||||
|
if (result && result.success) {
|
||||||
|
res.status(200).json({
|
||||||
|
success: true,
|
||||||
|
message: "Waku node created successfully"
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
res.status(500).json({
|
||||||
|
code: 500,
|
||||||
|
message: "Failed to create Waku node",
|
||||||
|
details: result?.error || "Unknown error"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
res.status(500).json({
|
||||||
|
code: 500,
|
||||||
|
message: `Could not create Waku node: ${error.message}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}) as express.RequestHandler);
|
||||||
|
|
||||||
|
// Start Waku node endpoint
|
||||||
|
router.post("/admin/v1/start-node", (async (_req: Request, res: Response) => {
|
||||||
|
try {
|
||||||
|
const page = getPage();
|
||||||
|
|
||||||
|
if (!page) {
|
||||||
|
return res.status(503).json({
|
||||||
|
code: 503,
|
||||||
|
message: "Browser not initialized"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await page.evaluate(() => {
|
||||||
|
return window.wakuAPI.startNode
|
||||||
|
? window.wakuAPI.startNode()
|
||||||
|
: { error: "startNode function not available" };
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result && !result.error) {
|
||||||
|
res.status(200).json({
|
||||||
|
success: true,
|
||||||
|
message: "Waku node started successfully"
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
res.status(500).json({
|
||||||
|
code: 500,
|
||||||
|
message: "Failed to start Waku node",
|
||||||
|
details: result?.error || "Unknown error"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
res.status(500).json({
|
||||||
|
code: 500,
|
||||||
|
message: `Could not start Waku node: ${error.message}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}) as express.RequestHandler);
|
||||||
|
|
||||||
|
// Stop Waku node endpoint
|
||||||
|
router.post("/admin/v1/stop-node", (async (_req: Request, res: Response) => {
|
||||||
|
try {
|
||||||
|
const page = getPage();
|
||||||
|
|
||||||
|
if (!page) {
|
||||||
|
return res.status(503).json({
|
||||||
|
code: 503,
|
||||||
|
message: "Browser not initialized"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await page.evaluate(() => {
|
||||||
|
return window.wakuAPI.stopNode
|
||||||
|
? window.wakuAPI.stopNode()
|
||||||
|
: { error: "stopNode function not available" };
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result && !result.error) {
|
||||||
|
res.status(200).json({
|
||||||
|
success: true,
|
||||||
|
message: "Waku node stopped successfully"
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
res.status(500).json({
|
||||||
|
code: 500,
|
||||||
|
message: "Failed to stop Waku node",
|
||||||
|
details: result?.error || "Unknown error"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
res.status(500).json({
|
||||||
|
code: 500,
|
||||||
|
message: `Could not stop Waku node: ${error.message}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}) as express.RequestHandler);
|
||||||
|
|
||||||
|
// Dial to peers endpoint
|
||||||
|
router.post("/admin/v1/peers", (async (req: Request, res: Response) => {
|
||||||
|
try {
|
||||||
|
const { peerMultiaddrs } = req.body;
|
||||||
|
|
||||||
|
if (!peerMultiaddrs || !Array.isArray(peerMultiaddrs)) {
|
||||||
|
return res.status(400).json({
|
||||||
|
code: 400,
|
||||||
|
message: "Invalid request. peerMultiaddrs array is required."
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const page = getPage();
|
||||||
|
|
||||||
|
if (!page) {
|
||||||
|
return res.status(503).json({
|
||||||
|
code: 503,
|
||||||
|
message: "Browser not initialized"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await page.evaluate(
|
||||||
|
({ peerAddrs }) => {
|
||||||
|
return window.wakuAPI.dialPeers(window.waku, peerAddrs);
|
||||||
|
},
|
||||||
|
{ peerAddrs: peerMultiaddrs }
|
||||||
|
);
|
||||||
|
|
||||||
|
if (result) {
|
||||||
|
res.status(200).json({
|
||||||
|
peersAdded: peerMultiaddrs.length - (result.errors?.length || 0),
|
||||||
|
peerErrors:
|
||||||
|
result.errors?.map((error: string, index: number) => {
|
||||||
|
return {
|
||||||
|
peerMultiaddr: peerMultiaddrs[index],
|
||||||
|
error
|
||||||
|
};
|
||||||
|
}) || []
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
res.status(500).json({
|
||||||
|
code: 500,
|
||||||
|
message: "Failed to dial peers"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
res.status(500).json({
|
||||||
|
code: 500,
|
||||||
|
message: `Could not dial peers: ${error.message}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}) as express.RequestHandler);
|
||||||
|
|
||||||
|
export default router;
|
||||||
51
packages/browser-tests/src/routes/info.ts
Normal file
51
packages/browser-tests/src/routes/info.ts
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
import express, { Request, Response, Router } from "express";
|
||||||
|
|
||||||
|
import { getPage } from "../browser/index.js";
|
||||||
|
|
||||||
|
const router = Router();
|
||||||
|
|
||||||
|
// Get node info endpoint
|
||||||
|
router.get("/info", (async (_req: Request, res: Response) => {
|
||||||
|
try {
|
||||||
|
const page = getPage();
|
||||||
|
if (!page) {
|
||||||
|
return res.status(503).json({
|
||||||
|
code: 503,
|
||||||
|
message: "Browser not initialized"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await page.evaluate(() => {
|
||||||
|
return window.wakuAPI.getPeerInfo(window.waku);
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json(result);
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error("Error getting info:", error);
|
||||||
|
res.status(500).json({ error: error.message });
|
||||||
|
}
|
||||||
|
}) as express.RequestHandler);
|
||||||
|
|
||||||
|
// Get node debug info endpoint
|
||||||
|
router.get("/debug/v1/info", (async (_req: Request, res: Response) => {
|
||||||
|
try {
|
||||||
|
const page = getPage();
|
||||||
|
if (!page) {
|
||||||
|
return res.status(503).json({
|
||||||
|
code: 503,
|
||||||
|
message: "Browser not initialized"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await page.evaluate(() => {
|
||||||
|
return window.wakuAPI.getDebugInfo(window.waku);
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json(result);
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error("Error getting debug info:", error);
|
||||||
|
res.status(500).json({ error: error.message });
|
||||||
|
}
|
||||||
|
}) as express.RequestHandler);
|
||||||
|
|
||||||
|
export default router;
|
||||||
131
packages/browser-tests/src/routes/push.ts
Normal file
131
packages/browser-tests/src/routes/push.ts
Normal file
@ -0,0 +1,131 @@
|
|||||||
|
import express, { Request, Response, Router } from "express";
|
||||||
|
|
||||||
|
import { getPage } from "../browser/index.js";
|
||||||
|
|
||||||
|
const router = Router();
|
||||||
|
|
||||||
|
// Legacy push message endpoint
|
||||||
|
router.post("/push", (async (req: Request, res: Response) => {
|
||||||
|
try {
|
||||||
|
const { contentTopic, payload } = req.body;
|
||||||
|
|
||||||
|
if (!contentTopic) {
|
||||||
|
return res.status(400).json({
|
||||||
|
code: 400,
|
||||||
|
message: "Invalid request. contentTopic is required."
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const page = getPage();
|
||||||
|
if (!page) {
|
||||||
|
return res.status(503).json({
|
||||||
|
code: 503,
|
||||||
|
message: "Browser not initialized"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await page.evaluate(
|
||||||
|
({ topic, data }) => {
|
||||||
|
return window.wakuAPI.pushMessage(window.waku, topic, data);
|
||||||
|
},
|
||||||
|
{
|
||||||
|
topic: contentTopic,
|
||||||
|
data: payload
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
if (result) {
|
||||||
|
res.status(200).json({
|
||||||
|
messageId:
|
||||||
|
"0x" +
|
||||||
|
Buffer.from(contentTopic + Date.now().toString()).toString("hex")
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
res.status(503).json({
|
||||||
|
code: 503,
|
||||||
|
message: "Could not publish message: no suitable peers"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
if (
|
||||||
|
error.message.includes("size exceeds") ||
|
||||||
|
error.message.includes("stream reset")
|
||||||
|
) {
|
||||||
|
res.status(503).json({
|
||||||
|
code: 503,
|
||||||
|
message:
|
||||||
|
"Could not publish message: message size exceeds gossipsub max message size"
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
res.status(500).json({
|
||||||
|
code: 500,
|
||||||
|
message: `Could not publish message: ${error.message}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}) as express.RequestHandler);
|
||||||
|
|
||||||
|
// Waku REST API compatible push endpoint
|
||||||
|
router.post("/lightpush/v1/message", (async (req: Request, res: Response) => {
|
||||||
|
try {
|
||||||
|
const { message } = req.body;
|
||||||
|
|
||||||
|
if (!message || !message.contentTopic) {
|
||||||
|
return res.status(400).json({
|
||||||
|
code: 400,
|
||||||
|
message: "Invalid request. contentTopic is required."
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const page = getPage();
|
||||||
|
if (!page) {
|
||||||
|
return res.status(503).json({
|
||||||
|
code: 503,
|
||||||
|
message: "Browser not initialized"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await page.evaluate(
|
||||||
|
({ contentTopic, payload }) => {
|
||||||
|
return window.wakuAPI.pushMessage(window.waku, contentTopic, payload);
|
||||||
|
},
|
||||||
|
{
|
||||||
|
contentTopic: message.contentTopic,
|
||||||
|
payload: message.payload
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
if (result) {
|
||||||
|
res.status(200).json({
|
||||||
|
messageId:
|
||||||
|
"0x" +
|
||||||
|
Buffer.from(message.contentTopic + Date.now().toString()).toString(
|
||||||
|
"hex"
|
||||||
|
)
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
res.status(503).json({
|
||||||
|
code: 503,
|
||||||
|
message: "Could not publish message: no suitable peers"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
if (
|
||||||
|
error.message.includes("size exceeds") ||
|
||||||
|
error.message.includes("stream reset")
|
||||||
|
) {
|
||||||
|
res.status(503).json({
|
||||||
|
code: 503,
|
||||||
|
message:
|
||||||
|
"Could not publish message: message size exceeds gossipsub max message size"
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
res.status(500).json({
|
||||||
|
code: 500,
|
||||||
|
message: `Could not publish message: ${error.message}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}) as express.RequestHandler);
|
||||||
|
|
||||||
|
export default router;
|
||||||
@ -1,87 +0,0 @@
|
|||||||
import { Router } from "express";
|
|
||||||
import { Logger } from "@waku/utils";
|
|
||||||
import {
|
|
||||||
createEndpointHandler,
|
|
||||||
validators,
|
|
||||||
errorHandlers,
|
|
||||||
} from "../utils/endpoint-handler.js";
|
|
||||||
|
|
||||||
interface LightPushResult {
|
|
||||||
successes: string[];
|
|
||||||
failures: Array<{ error: string; peerId?: string }>;
|
|
||||||
}
|
|
||||||
|
|
||||||
const log = new Logger("routes:waku");
|
|
||||||
const router = Router();
|
|
||||||
|
|
||||||
const corsEndpoints = [
|
|
||||||
"/waku/v1/wait-for-peers",
|
|
||||||
"/waku/v1/peer-info",
|
|
||||||
"/lightpush/v3/message",
|
|
||||||
];
|
|
||||||
|
|
||||||
corsEndpoints.forEach((endpoint) => {
|
|
||||||
router.head(endpoint, (_req, res) => {
|
|
||||||
res.status(200).end();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
router.post(
|
|
||||||
"/waku/v1/wait-for-peers",
|
|
||||||
createEndpointHandler({
|
|
||||||
methodName: "waitForPeers",
|
|
||||||
validateInput: (body: unknown) => {
|
|
||||||
const bodyObj = body as { timeoutMs?: number; protocols?: string[] };
|
|
||||||
return [
|
|
||||||
bodyObj.timeoutMs || 10000,
|
|
||||||
bodyObj.protocols || ["lightpush", "filter"],
|
|
||||||
];
|
|
||||||
},
|
|
||||||
transformResult: () => ({
|
|
||||||
success: true,
|
|
||||||
message: "Successfully connected to peers",
|
|
||||||
}),
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
router.get(
|
|
||||||
"/waku/v1/peer-info",
|
|
||||||
createEndpointHandler({
|
|
||||||
methodName: "getPeerInfo",
|
|
||||||
validateInput: validators.noInput,
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
router.post(
|
|
||||||
"/lightpush/v3/message",
|
|
||||||
createEndpointHandler({
|
|
||||||
methodName: "pushMessageV3",
|
|
||||||
validateInput: (body: unknown): [string, string, string] => {
|
|
||||||
const validatedRequest = validators.requireLightpushV3(body);
|
|
||||||
|
|
||||||
return [
|
|
||||||
validatedRequest.message.contentTopic,
|
|
||||||
validatedRequest.message.payload,
|
|
||||||
validatedRequest.pubsubTopic,
|
|
||||||
];
|
|
||||||
},
|
|
||||||
handleError: errorHandlers.lightpushError,
|
|
||||||
transformResult: (result: unknown) => {
|
|
||||||
const lightPushResult = result as LightPushResult;
|
|
||||||
if (lightPushResult && lightPushResult.successes && lightPushResult.successes.length > 0) {
|
|
||||||
log.info("[Server] Message successfully sent via v3 lightpush!");
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
result: lightPushResult,
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: "Could not publish message: no suitable peers",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
export default router;
|
|
||||||
@ -1,244 +1,507 @@
|
|||||||
|
import { ChildProcess, exec } from "child_process";
|
||||||
|
import * as net from "net";
|
||||||
|
import { dirname, join } from "path";
|
||||||
import { fileURLToPath } from "url";
|
import { fileURLToPath } from "url";
|
||||||
import * as path from "path";
|
|
||||||
|
|
||||||
|
import { chromium } from "@playwright/test";
|
||||||
import cors from "cors";
|
import cors from "cors";
|
||||||
import express, { Request, Response } from "express";
|
import express, { Request, Response } from "express";
|
||||||
import { Logger } from "@waku/utils";
|
|
||||||
|
|
||||||
import wakuRouter from "./routes/waku.js";
|
import adminRouter from "./routes/admin.js";
|
||||||
import { initBrowser, getPage, closeBrowser } from "./browser/index.js";
|
import { setPage, getPage, closeBrowser } from "./browser/index.js";
|
||||||
import {
|
|
||||||
DEFAULT_CLUSTER_ID,
|
|
||||||
DEFAULT_NUM_SHARDS,
|
|
||||||
Protocols,
|
|
||||||
AutoSharding,
|
|
||||||
StaticSharding,
|
|
||||||
} from "@waku/interfaces";
|
|
||||||
import { CreateNodeOptions } from "@waku/sdk";
|
|
||||||
import type { WindowNetworkConfig } from "../types/global.js";
|
|
||||||
|
|
||||||
interface NodeError extends Error {
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
code?: string;
|
const __dirname = dirname(__filename);
|
||||||
}
|
|
||||||
|
|
||||||
const log = new Logger("server");
|
|
||||||
const app = express();
|
const app = express();
|
||||||
|
|
||||||
app.use(cors());
|
app.use(cors());
|
||||||
app.use(express.json());
|
app.use(express.json());
|
||||||
|
app.use(adminRouter);
|
||||||
|
|
||||||
import * as fs from "fs";
|
let headlessServerProcess: ChildProcess | undefined;
|
||||||
|
|
||||||
const __filename = fileURLToPath(import.meta.url);
|
interface MessageQueue {
|
||||||
const __dirname = path.dirname(__filename);
|
[contentTopic: string]: Array<{
|
||||||
const distRoot = path.resolve(__dirname, "..");
|
payload: number[] | undefined;
|
||||||
const webDir = path.resolve(distRoot, "web");
|
contentTopic: string;
|
||||||
|
timestamp: number;
|
||||||
|
receivedAt: number;
|
||||||
|
}>;
|
||||||
|
}
|
||||||
|
|
||||||
app.get("/app/index.html", (_req: Request, res: Response) => {
|
const messageQueue: MessageQueue = {};
|
||||||
|
|
||||||
|
async function startHeadlessServer(): Promise<void> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
try {
|
||||||
|
headlessServerProcess = exec(
|
||||||
|
`serve ${join(__dirname, "../../headless-tests")} -p 8080 -s`,
|
||||||
|
(error) => {
|
||||||
|
if (error) {
|
||||||
|
console.error(`Error starting serve: ${error}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
setTimeout(resolve, 2000);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to start headless server:", error);
|
||||||
|
reject(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function initBrowser(): Promise<void> {
|
||||||
try {
|
try {
|
||||||
const htmlPath = path.join(webDir, "index.html");
|
const browser = await chromium.launch({
|
||||||
let htmlContent = fs.readFileSync(htmlPath, "utf8");
|
headless: true
|
||||||
|
});
|
||||||
|
|
||||||
const networkConfig: WindowNetworkConfig = {};
|
if (!browser) {
|
||||||
if (process.env.WAKU_CLUSTER_ID) {
|
throw new Error("Failed to initialize browser");
|
||||||
networkConfig.clusterId = parseInt(process.env.WAKU_CLUSTER_ID, 10);
|
|
||||||
}
|
|
||||||
if (process.env.WAKU_SHARD) {
|
|
||||||
networkConfig.shards = [parseInt(process.env.WAKU_SHARD, 10)];
|
|
||||||
log.info("Using static shard:", networkConfig.shards);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const lightpushNode = process.env.WAKU_LIGHTPUSH_NODE || null;
|
const page = await browser.newPage();
|
||||||
const enrBootstrap = process.env.WAKU_ENR_BOOTSTRAP || null;
|
|
||||||
|
|
||||||
log.info("Network config on server start, pre headless:", networkConfig);
|
try {
|
||||||
|
await checkServerAvailability("http://localhost:8080", 3);
|
||||||
|
await page.goto("http://localhost:8080");
|
||||||
|
} catch (error) {
|
||||||
|
console.error(
|
||||||
|
"Error loading headless app, continuing without it:",
|
||||||
|
error
|
||||||
|
);
|
||||||
|
await page.setContent(`
|
||||||
|
<html>
|
||||||
|
<head><title>Waku Test Environment</title></head>
|
||||||
|
<body>
|
||||||
|
<h1>Waku Test Environment (No headless app available)</h1>
|
||||||
|
<script>
|
||||||
|
window.waku = {};
|
||||||
|
window.wakuAPI = {
|
||||||
|
getPeerInfo: () => ({ peerId: "mock-peer-id", multiaddrs: [], peers: [] }),
|
||||||
|
getDebugInfo: () => ({ listenAddresses: [], peerId: "mock-peer-id", protocols: [] }),
|
||||||
|
pushMessage: () => ({ successes: [], failures: [{ error: "No headless app available" }] }),
|
||||||
|
dialPeers: () => ({ total: 0, errors: ["No headless app available"] }),
|
||||||
|
createWakuNode: () => ({ success: true, message: "Mock node created" }),
|
||||||
|
startNode: () => ({ success: true }),
|
||||||
|
stopNode: () => ({ success: true }),
|
||||||
|
subscribe: () => ({ unsubscribe: async () => {} })
|
||||||
|
};
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
`);
|
||||||
|
}
|
||||||
|
|
||||||
const configScript = ` <script>
|
setPage(page);
|
||||||
window.__WAKU_NETWORK_CONFIG = ${JSON.stringify(networkConfig)};
|
|
||||||
window.__WAKU_LIGHTPUSH_NODE = ${JSON.stringify(lightpushNode)};
|
|
||||||
window.__WAKU_ENR_BOOTSTRAP = ${JSON.stringify(enrBootstrap)};
|
|
||||||
</script>`;
|
|
||||||
const originalPattern =
|
|
||||||
' <script type="module" src="./index.js"></script>';
|
|
||||||
const replacement = `${configScript}\n <script type="module" src="./index.js"></script>`;
|
|
||||||
|
|
||||||
htmlContent = htmlContent.replace(originalPattern, replacement);
|
|
||||||
|
|
||||||
res.setHeader("Content-Type", "text/html");
|
|
||||||
res.send(htmlContent);
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
log.error("Error serving dynamic index.html:", error);
|
console.error("Error initializing browser:", error);
|
||||||
res.status(500).send("Error loading page");
|
throw error;
|
||||||
}
|
}
|
||||||
});
|
}
|
||||||
|
|
||||||
app.use("/app", express.static(webDir, { index: false }));
|
async function checkServerAvailability(
|
||||||
|
url: string,
|
||||||
|
retries = 3
|
||||||
|
): Promise<boolean> {
|
||||||
|
for (let i = 0; i < retries; i++) {
|
||||||
|
try {
|
||||||
|
const response = await fetch(url, { method: "HEAD" });
|
||||||
|
if (response.ok) return true;
|
||||||
|
} catch (e) {
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw new Error(`Server at ${url} not available after ${retries} retries`);
|
||||||
|
}
|
||||||
|
|
||||||
app.use(wakuRouter);
|
async function findAvailablePort(
|
||||||
|
startPort: number,
|
||||||
|
maxAttempts = 10
|
||||||
|
): Promise<number> {
|
||||||
|
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
||||||
|
const port = startPort + attempt;
|
||||||
|
try {
|
||||||
|
// Try to create a server on the port
|
||||||
|
await new Promise<void>((resolve, reject) => {
|
||||||
|
const server = net
|
||||||
|
.createServer()
|
||||||
|
.once("error", (err: any) => {
|
||||||
|
reject(err);
|
||||||
|
})
|
||||||
|
.once("listening", () => {
|
||||||
|
// If we can listen, the port is available
|
||||||
|
server.close();
|
||||||
|
resolve();
|
||||||
|
})
|
||||||
|
.listen(port);
|
||||||
|
});
|
||||||
|
|
||||||
async function startAPI(requestedPort: number): Promise<number> {
|
// If we get here, the port is available
|
||||||
|
return port;
|
||||||
|
} catch (err) {
|
||||||
|
// Port is not available, continue to next port
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we tried all ports and none are available, throw an error
|
||||||
|
throw new Error(
|
||||||
|
`Unable to find an available port after ${maxAttempts} attempts`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function startServer(port: number = 3000): Promise<void> {
|
||||||
|
try {
|
||||||
|
await startHeadlessServer();
|
||||||
|
|
||||||
|
await initBrowser();
|
||||||
|
|
||||||
|
await startAPI(port);
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error("Error starting server:", error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function startAPI(requestedPort: number): Promise<void> {
|
||||||
try {
|
try {
|
||||||
app.get("/", (_req: Request, res: Response) => {
|
app.get("/", (_req: Request, res: Response) => {
|
||||||
res.json({ status: "Waku simulation server is running" });
|
res.json({ status: "Waku simulation server is running" });
|
||||||
});
|
});
|
||||||
|
|
||||||
|
app.get("/info", (async (_req: Request, res: Response) => {
|
||||||
|
try {
|
||||||
|
const result = await getPage()?.evaluate(() => {
|
||||||
|
return window.wakuAPI.getPeerInfo(window.waku);
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json(result);
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error("Error getting info:", error);
|
||||||
|
res.status(500).json({ error: error.message });
|
||||||
|
}
|
||||||
|
}) as express.RequestHandler);
|
||||||
|
|
||||||
|
app.get("/debug/v1/info", (async (_req: Request, res: Response) => {
|
||||||
|
try {
|
||||||
|
const result = await getPage()?.evaluate(() => {
|
||||||
|
return window.wakuAPI.getDebugInfo(window.waku);
|
||||||
|
});
|
||||||
|
|
||||||
|
res.json(result);
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error("Error getting debug info:", error);
|
||||||
|
res.status(500).json({ error: error.message });
|
||||||
|
}
|
||||||
|
}) as express.RequestHandler);
|
||||||
|
|
||||||
|
app.post("/lightpush/v1/message", (async (req: Request, res: Response) => {
|
||||||
|
try {
|
||||||
|
const { message } = req.body;
|
||||||
|
|
||||||
|
if (!message || !message.contentTopic) {
|
||||||
|
return res.status(400).json({
|
||||||
|
code: 400,
|
||||||
|
message: "Invalid request. contentTopic is required."
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await getPage()?.evaluate(
|
||||||
|
({ contentTopic, payload }) => {
|
||||||
|
return window.wakuAPI.pushMessage(
|
||||||
|
window.waku,
|
||||||
|
contentTopic,
|
||||||
|
payload
|
||||||
|
);
|
||||||
|
},
|
||||||
|
{
|
||||||
|
contentTopic: message.contentTopic,
|
||||||
|
payload: message.payload
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
if (result) {
|
||||||
|
res.status(200).json({
|
||||||
|
messageId:
|
||||||
|
"0x" +
|
||||||
|
Buffer.from(
|
||||||
|
message.contentTopic + Date.now().toString()
|
||||||
|
).toString("hex")
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
res.status(503).json({
|
||||||
|
code: 503,
|
||||||
|
message: "Could not publish message: no suitable peers"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
|
||||||
|
if (
|
||||||
|
error.message.includes("size exceeds") ||
|
||||||
|
error.message.includes("stream reset")
|
||||||
|
) {
|
||||||
|
res.status(503).json({
|
||||||
|
code: 503,
|
||||||
|
|
||||||
|
message:
|
||||||
|
"Could not publish message: message size exceeds gossipsub max message size"
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
res.status(500).json({
|
||||||
|
code: 500,
|
||||||
|
message: `Could not publish message: ${error.message}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}) as express.RequestHandler);
|
||||||
|
|
||||||
|
app.get("/filter/v2/messages/:contentTopic", (async (
|
||||||
|
req: Request,
|
||||||
|
res: Response
|
||||||
|
) => {
|
||||||
|
try {
|
||||||
|
const { contentTopic } = req.params;
|
||||||
|
const { clusterId, shard } = req.query;
|
||||||
|
|
||||||
|
const options = {
|
||||||
|
clusterId: clusterId ? parseInt(clusterId as string, 10) : 42, // Default to match node creation
|
||||||
|
shard: shard ? parseInt(shard as string, 10) : 0 // Default to match node creation
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
// Set up SSE (Server-Sent Events)
|
||||||
|
res.setHeader("Content-Type", "text/event-stream");
|
||||||
|
res.setHeader("Cache-Control", "no-cache");
|
||||||
|
res.setHeader("Connection", "keep-alive");
|
||||||
|
|
||||||
|
// Function to send SSE
|
||||||
|
const sendSSE = (data: any): void => {
|
||||||
|
res.write(`data: ${JSON.stringify(data)}\n\n`);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Subscribe to messages
|
||||||
|
await getPage()?.evaluate(
|
||||||
|
({ contentTopic, options }) => {
|
||||||
|
// Message handler that will send messages back to the client
|
||||||
|
const callback = (message: any): void => {
|
||||||
|
// Post message to the browser context
|
||||||
|
window.postMessage(
|
||||||
|
{
|
||||||
|
type: "WAKU_MESSAGE",
|
||||||
|
payload: {
|
||||||
|
payload: message.payload
|
||||||
|
? Array.from(message.payload)
|
||||||
|
: undefined,
|
||||||
|
contentTopic: message.contentTopic,
|
||||||
|
timestamp: message.timestamp
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"*"
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
return window.wakuAPI.subscribe(
|
||||||
|
window.waku,
|
||||||
|
contentTopic,
|
||||||
|
options,
|
||||||
|
callback
|
||||||
|
);
|
||||||
|
},
|
||||||
|
{ contentTopic, options }
|
||||||
|
);
|
||||||
|
|
||||||
|
// Set up event listener for messages from the page
|
||||||
|
await getPage()?.exposeFunction("sendMessageToServer", (message: any) => {
|
||||||
|
// Send the message as SSE
|
||||||
|
sendSSE(message);
|
||||||
|
|
||||||
|
const topic = message.contentTopic;
|
||||||
|
if (!messageQueue[topic]) {
|
||||||
|
messageQueue[topic] = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
messageQueue[topic].push({
|
||||||
|
...message,
|
||||||
|
receivedAt: Date.now()
|
||||||
|
});
|
||||||
|
|
||||||
|
if (messageQueue[topic].length > 1000) {
|
||||||
|
messageQueue[topic].shift();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add event listener in the browser context to forward messages to the server
|
||||||
|
await getPage()?.evaluate(() => {
|
||||||
|
window.addEventListener("message", (event) => {
|
||||||
|
if (event.data.type === "WAKU_MESSAGE") {
|
||||||
|
(window as any).sendMessageToServer(event.data.payload);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
req.on("close", () => {
|
||||||
|
});
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error("Error in filter subscription:", error);
|
||||||
|
res.write(`data: ${JSON.stringify({ error: error.message })}\n\n`);
|
||||||
|
res.end();
|
||||||
|
}
|
||||||
|
}) as express.RequestHandler);
|
||||||
|
|
||||||
|
app.get("/filter/v1/messages/:contentTopic", (async (
|
||||||
|
req: Request,
|
||||||
|
res: Response
|
||||||
|
) => {
|
||||||
|
try {
|
||||||
|
const { contentTopic } = req.params;
|
||||||
|
const {
|
||||||
|
pageSize = "20",
|
||||||
|
startTime,
|
||||||
|
endTime,
|
||||||
|
ascending = "false"
|
||||||
|
} = req.query;
|
||||||
|
|
||||||
|
if (!messageQueue[contentTopic]) {
|
||||||
|
return res.status(200).json({ messages: [] });
|
||||||
|
}
|
||||||
|
|
||||||
|
const limit = parseInt(pageSize as string, 10);
|
||||||
|
const isAscending = (ascending as string).toLowerCase() === "true";
|
||||||
|
const timeStart = startTime ? parseInt(startTime as string, 10) : 0;
|
||||||
|
const timeEnd = endTime ? parseInt(endTime as string, 10) : Date.now();
|
||||||
|
|
||||||
|
const filteredMessages = messageQueue[contentTopic]
|
||||||
|
.filter((msg) => {
|
||||||
|
const msgTime = msg.timestamp || msg.receivedAt;
|
||||||
|
return msgTime >= timeStart && msgTime <= timeEnd;
|
||||||
|
})
|
||||||
|
.sort((a, b) => {
|
||||||
|
const timeA = a.timestamp || a.receivedAt;
|
||||||
|
const timeB = b.timestamp || b.receivedAt;
|
||||||
|
return isAscending ? timeA - timeB : timeB - timeA;
|
||||||
|
})
|
||||||
|
.slice(0, limit);
|
||||||
|
|
||||||
|
|
||||||
|
// Format response to match Waku REST API format
|
||||||
|
const response = {
|
||||||
|
messages: filteredMessages.map((msg) => ({
|
||||||
|
payload: msg.payload
|
||||||
|
? Buffer.from(msg.payload).toString("base64")
|
||||||
|
: "",
|
||||||
|
contentTopic: msg.contentTopic,
|
||||||
|
timestamp: msg.timestamp,
|
||||||
|
version: 0 // Default version
|
||||||
|
}))
|
||||||
|
};
|
||||||
|
|
||||||
|
res.status(200).json(response);
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error("Error retrieving messages:", error);
|
||||||
|
res.status(500).json({
|
||||||
|
code: 500,
|
||||||
|
message: `Failed to retrieve messages: ${error.message}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}) as express.RequestHandler);
|
||||||
|
|
||||||
|
// Helper endpoint for executing functions (useful for testing)
|
||||||
|
app.post("/execute", (async (req: Request, res: Response) => {
|
||||||
|
try {
|
||||||
|
const { functionName, params = [] } = req.body;
|
||||||
|
|
||||||
|
if (functionName === "simulateMessages") {
|
||||||
|
const [contentTopic, messages] = params;
|
||||||
|
|
||||||
|
if (!messageQueue[contentTopic]) {
|
||||||
|
messageQueue[contentTopic] = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add messages to the queue
|
||||||
|
for (const msg of messages) {
|
||||||
|
messageQueue[contentTopic].push({
|
||||||
|
...msg,
|
||||||
|
contentTopic,
|
||||||
|
receivedAt: Date.now()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return res.status(200).json({
|
||||||
|
success: true,
|
||||||
|
messagesAdded: messages.length
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await getPage()?.evaluate(
|
||||||
|
({ fnName, fnParams }) => {
|
||||||
|
if (!window.wakuAPI[fnName]) {
|
||||||
|
return { error: `Function ${fnName} not found` };
|
||||||
|
}
|
||||||
|
return window.wakuAPI[fnName](...fnParams);
|
||||||
|
},
|
||||||
|
{ fnName: functionName, fnParams: params }
|
||||||
|
);
|
||||||
|
|
||||||
|
res.status(200).json(result);
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error(
|
||||||
|
`Error executing function ${req.body.functionName}:`,
|
||||||
|
error
|
||||||
|
);
|
||||||
|
res.status(500).json({
|
||||||
|
error: error.message
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}) as express.RequestHandler);
|
||||||
|
|
||||||
|
|
||||||
|
let actualPort: number;
|
||||||
|
try {
|
||||||
|
actualPort = await findAvailablePort(requestedPort);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to find an available port:", error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
app
|
app
|
||||||
.listen(requestedPort, () => {
|
.listen(actualPort, () => {
|
||||||
log.info(`API server running on http://localhost:${requestedPort}`);
|
|
||||||
})
|
})
|
||||||
.on("error", (error: NodeError) => {
|
.on("error", (error: any) => {
|
||||||
if (error.code === "EADDRINUSE") {
|
if (error.code === "EADDRINUSE") {
|
||||||
log.error(
|
console.error(
|
||||||
`Port ${requestedPort} is already in use. Please close the application using this port and try again.`,
|
`Port ${actualPort} is already in use. Please close the application using this port and try again.`
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
log.error("Error starting server:", error);
|
console.error("Error starting server:", error);
|
||||||
}
|
}
|
||||||
throw error;
|
|
||||||
});
|
});
|
||||||
|
|
||||||
return requestedPort;
|
return Promise.resolve();
|
||||||
} catch (error) {
|
} catch (error: any) {
|
||||||
log.error("Error starting server:", error);
|
console.error("Error starting server:", error);
|
||||||
throw error;
|
return Promise.reject(error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function startServer(port: number = 3000): Promise<void> {
|
process.on("SIGINT", (async () => {
|
||||||
try {
|
await closeBrowser();
|
||||||
const actualPort = await startAPI(port);
|
|
||||||
await initBrowser(actualPort);
|
|
||||||
|
|
||||||
|
if (headlessServerProcess && headlessServerProcess.pid) {
|
||||||
try {
|
try {
|
||||||
log.info("Auto-starting node with CLI configuration...");
|
process.kill(headlessServerProcess.pid);
|
||||||
|
|
||||||
const hasEnrBootstrap = Boolean(process.env.WAKU_ENR_BOOTSTRAP);
|
|
||||||
|
|
||||||
const networkConfig: AutoSharding | StaticSharding = process.env.WAKU_SHARD
|
|
||||||
? ({
|
|
||||||
clusterId: process.env.WAKU_CLUSTER_ID
|
|
||||||
? parseInt(process.env.WAKU_CLUSTER_ID, 10)
|
|
||||||
: DEFAULT_CLUSTER_ID,
|
|
||||||
shards: [parseInt(process.env.WAKU_SHARD, 10)],
|
|
||||||
} as StaticSharding)
|
|
||||||
: ({
|
|
||||||
clusterId: process.env.WAKU_CLUSTER_ID
|
|
||||||
? parseInt(process.env.WAKU_CLUSTER_ID, 10)
|
|
||||||
: DEFAULT_CLUSTER_ID,
|
|
||||||
numShardsInCluster: DEFAULT_NUM_SHARDS,
|
|
||||||
} as AutoSharding);
|
|
||||||
|
|
||||||
const createOptions: CreateNodeOptions = {
|
|
||||||
defaultBootstrap: false,
|
|
||||||
...(hasEnrBootstrap && {
|
|
||||||
discovery: {
|
|
||||||
dns: true,
|
|
||||||
peerExchange: true,
|
|
||||||
peerCache: true,
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
networkConfig,
|
|
||||||
};
|
|
||||||
|
|
||||||
log.info(
|
|
||||||
`Bootstrap mode: ${hasEnrBootstrap ? "ENR-only (defaultBootstrap=false)" : "default bootstrap (defaultBootstrap=true)"}`,
|
|
||||||
);
|
|
||||||
if (hasEnrBootstrap) {
|
|
||||||
log.info(`ENR bootstrap peers: ${process.env.WAKU_ENR_BOOTSTRAP}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
log.info(
|
|
||||||
`Network config: ${JSON.stringify(networkConfig)}`,
|
|
||||||
);
|
|
||||||
|
|
||||||
await getPage()?.evaluate((config) => {
|
|
||||||
return window.wakuApi.createWakuNode(config);
|
|
||||||
}, createOptions);
|
|
||||||
await getPage()?.evaluate(() => window.wakuApi.startNode());
|
|
||||||
|
|
||||||
try {
|
|
||||||
await getPage()?.evaluate(() =>
|
|
||||||
window.wakuApi.waitForPeers?.(5000, [Protocols.LightPush]),
|
|
||||||
);
|
|
||||||
log.info("Auto-start completed with bootstrap peers");
|
|
||||||
} catch (peerError) {
|
|
||||||
log.info(
|
|
||||||
"Auto-start completed (no bootstrap peers found - may be expected with test ENRs)",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
log.warn("Auto-start failed:", e);
|
// Process already stopped
|
||||||
}
|
}
|
||||||
} catch (error) {
|
|
||||||
log.error("Error starting server:", error);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
process.on("uncaughtException", (error) => {
|
|
||||||
log.error("Uncaught Exception:", error);
|
|
||||||
if (process.env.NODE_ENV !== "production") {
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
process.on("unhandledRejection", (reason, promise) => {
|
|
||||||
log.error("Unhandled Rejection at:", promise, "reason:", reason);
|
|
||||||
if (process.env.NODE_ENV !== "production") {
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
const gracefulShutdown = async (signal: string) => {
|
|
||||||
log.info(`Received ${signal}, gracefully shutting down...`);
|
|
||||||
try {
|
|
||||||
await closeBrowser();
|
|
||||||
} catch (e) {
|
|
||||||
log.warn("Error closing browser:", e);
|
|
||||||
}
|
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
};
|
}) as any);
|
||||||
|
|
||||||
process.on("SIGINT", () => gracefulShutdown("SIGINT"));
|
|
||||||
process.on("SIGTERM", () => gracefulShutdown("SIGTERM"));
|
|
||||||
|
|
||||||
function parseCliArgs() {
|
|
||||||
const args = process.argv.slice(2);
|
|
||||||
let clusterId: number | undefined;
|
|
||||||
let shard: number | undefined;
|
|
||||||
|
|
||||||
for (const arg of args) {
|
|
||||||
if (arg.startsWith("--cluster-id=")) {
|
|
||||||
clusterId = parseInt(arg.split("=")[1], 10);
|
|
||||||
if (isNaN(clusterId)) {
|
|
||||||
log.error("Invalid cluster-id value. Must be a number.");
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
} else if (arg.startsWith("--shard=")) {
|
|
||||||
shard = parseInt(arg.split("=")[1], 10);
|
|
||||||
if (isNaN(shard)) {
|
|
||||||
log.error("Invalid shard value. Must be a number.");
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return { clusterId, shard };
|
|
||||||
}
|
|
||||||
|
|
||||||
const isMainModule = process.argv[1] === fileURLToPath(import.meta.url);
|
const isMainModule = process.argv[1] === fileURLToPath(import.meta.url);
|
||||||
|
|
||||||
if (isMainModule) {
|
if (isMainModule) {
|
||||||
const port = process.env.PORT ? parseInt(process.env.PORT, 10) : 3000;
|
const port = process.env.PORT ? parseInt(process.env.PORT, 10) : 3000;
|
||||||
const cliArgs = parseCliArgs();
|
|
||||||
|
|
||||||
if (cliArgs.clusterId !== undefined) {
|
|
||||||
process.env.WAKU_CLUSTER_ID = cliArgs.clusterId.toString();
|
|
||||||
log.info(`Using CLI cluster ID: ${cliArgs.clusterId}`);
|
|
||||||
}
|
|
||||||
if (cliArgs.shard !== undefined) {
|
|
||||||
process.env.WAKU_SHARD = cliArgs.shard.toString();
|
|
||||||
log.info(`Using CLI shard: ${cliArgs.shard}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
void startServer(port);
|
void startServer(port);
|
||||||
}
|
}
|
||||||
|
|||||||
8
packages/browser-tests/src/utils.js
Normal file
8
packages/browser-tests/src/utils.js
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
import { readFileSync } from "fs";
|
||||||
|
import { dirname } from "path";
|
||||||
|
import { fileURLToPath } from "url";
|
||||||
|
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
export const __dirname = dirname(__filename);
|
||||||
|
|
||||||
|
export const readJSON = (path) => JSON.parse(readFileSync(path, "utf-8"));
|
||||||
@ -1,197 +0,0 @@
|
|||||||
import { Request, Response } from "express";
|
|
||||||
import { Logger } from "@waku/utils";
|
|
||||||
import { getPage } from "../browser/index.js";
|
|
||||||
import type { ITestBrowser } from "../../types/global.js";
|
|
||||||
|
|
||||||
const log = new Logger("endpoint-handler");
|
|
||||||
|
|
||||||
export interface LightpushV3Request {
|
|
||||||
pubsubTopic: string;
|
|
||||||
message: {
|
|
||||||
payload: string;
|
|
||||||
contentTopic: string;
|
|
||||||
version: number;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface LightpushV3Response {
|
|
||||||
success?: boolean;
|
|
||||||
error?: string;
|
|
||||||
result?: {
|
|
||||||
successes: string[];
|
|
||||||
failures: Array<{
|
|
||||||
error: string;
|
|
||||||
peerId?: string;
|
|
||||||
}>;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface EndpointConfig<TInput = unknown, TOutput = unknown> {
|
|
||||||
methodName: string;
|
|
||||||
validateInput?: (_requestBody: unknown) => TInput;
|
|
||||||
transformResult?: (_sdkResult: unknown) => TOutput;
|
|
||||||
handleError?: (_caughtError: Error) => { code: number; message: string };
|
|
||||||
preCheck?: () => Promise<void> | void;
|
|
||||||
logResult?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createEndpointHandler<TInput = unknown, TOutput = unknown>(
|
|
||||||
config: EndpointConfig<TInput, TOutput>,
|
|
||||||
) {
|
|
||||||
return async (req: Request, res: Response) => {
|
|
||||||
try {
|
|
||||||
let input: TInput;
|
|
||||||
try {
|
|
||||||
input = config.validateInput
|
|
||||||
? config.validateInput(req.body)
|
|
||||||
: req.body;
|
|
||||||
} catch (validationError) {
|
|
||||||
return res.status(400).json({
|
|
||||||
code: 400,
|
|
||||||
message: `Invalid input: ${validationError instanceof Error ? validationError.message : String(validationError)}`,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (config.preCheck) {
|
|
||||||
try {
|
|
||||||
await config.preCheck();
|
|
||||||
} catch (checkError) {
|
|
||||||
return res.status(503).json({
|
|
||||||
code: 503,
|
|
||||||
message: checkError instanceof Error ? checkError.message : String(checkError),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const page = getPage();
|
|
||||||
if (!page) {
|
|
||||||
return res.status(503).json({
|
|
||||||
code: 503,
|
|
||||||
message: "Browser not initialized",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await page.evaluate(
|
|
||||||
({ methodName, params }) => {
|
|
||||||
const testWindow = window as ITestBrowser;
|
|
||||||
if (!testWindow.wakuApi) {
|
|
||||||
throw new Error("window.wakuApi is not available");
|
|
||||||
}
|
|
||||||
|
|
||||||
const wakuApi = testWindow.wakuApi as unknown as Record<string, unknown>;
|
|
||||||
const method = wakuApi[methodName];
|
|
||||||
if (typeof method !== "function") {
|
|
||||||
throw new Error(`window.wakuApi.${methodName} is not a function`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (params === null || params === undefined) {
|
|
||||||
return method.call(testWindow.wakuApi);
|
|
||||||
} else if (Array.isArray(params)) {
|
|
||||||
return method.apply(testWindow.wakuApi, params);
|
|
||||||
} else {
|
|
||||||
return method.call(testWindow.wakuApi, params);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{ methodName: config.methodName, params: input },
|
|
||||||
);
|
|
||||||
|
|
||||||
if (config.logResult !== false) {
|
|
||||||
log.info(
|
|
||||||
`[${config.methodName}] Result:`,
|
|
||||||
JSON.stringify(result, null, 2),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const finalResult = config.transformResult
|
|
||||||
? config.transformResult(result)
|
|
||||||
: result;
|
|
||||||
|
|
||||||
res.status(200).json(finalResult);
|
|
||||||
} catch (error) {
|
|
||||||
if (config.handleError) {
|
|
||||||
const errorResponse = config.handleError(error as Error);
|
|
||||||
return res.status(errorResponse.code).json({
|
|
||||||
code: errorResponse.code,
|
|
||||||
message: errorResponse.message,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
log.error(`[${config.methodName}] Error:`, error);
|
|
||||||
res.status(500).json({
|
|
||||||
code: 500,
|
|
||||||
message: `Could not execute ${config.methodName}: ${error instanceof Error ? error.message : String(error)}`,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export const validators = {
|
|
||||||
requireLightpushV3: (body: unknown): LightpushV3Request => {
|
|
||||||
// Type guard to check if body is an object
|
|
||||||
if (!body || typeof body !== "object") {
|
|
||||||
throw new Error("Request body must be an object");
|
|
||||||
}
|
|
||||||
|
|
||||||
const bodyObj = body as Record<string, unknown>;
|
|
||||||
|
|
||||||
if (
|
|
||||||
bodyObj.pubsubTopic !== undefined &&
|
|
||||||
typeof bodyObj.pubsubTopic !== "string"
|
|
||||||
) {
|
|
||||||
throw new Error("pubsubTopic must be a string if provided");
|
|
||||||
}
|
|
||||||
if (!bodyObj.message || typeof bodyObj.message !== "object") {
|
|
||||||
throw new Error("message is required and must be an object");
|
|
||||||
}
|
|
||||||
|
|
||||||
const message = bodyObj.message as Record<string, unknown>;
|
|
||||||
|
|
||||||
if (
|
|
||||||
!message.contentTopic ||
|
|
||||||
typeof message.contentTopic !== "string"
|
|
||||||
) {
|
|
||||||
throw new Error("message.contentTopic is required and must be a string");
|
|
||||||
}
|
|
||||||
if (!message.payload || typeof message.payload !== "string") {
|
|
||||||
throw new Error(
|
|
||||||
"message.payload is required and must be a string (base64 encoded)",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (
|
|
||||||
message.version !== undefined &&
|
|
||||||
typeof message.version !== "number"
|
|
||||||
) {
|
|
||||||
throw new Error("message.version must be a number if provided");
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
pubsubTopic: (bodyObj.pubsubTopic as string) || "",
|
|
||||||
message: {
|
|
||||||
payload: message.payload as string,
|
|
||||||
contentTopic: message.contentTopic as string,
|
|
||||||
version: (message.version as number) || 1,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
},
|
|
||||||
|
|
||||||
noInput: () => null,
|
|
||||||
};
|
|
||||||
|
|
||||||
export const errorHandlers = {
|
|
||||||
lightpushError: (error: Error) => {
|
|
||||||
if (
|
|
||||||
error.message.includes("size exceeds") ||
|
|
||||||
error.message.includes("stream reset")
|
|
||||||
) {
|
|
||||||
return {
|
|
||||||
code: 503,
|
|
||||||
message:
|
|
||||||
"Could not publish message: message size exceeds gossipsub max message size",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
code: 500,
|
|
||||||
message: `Could not publish message: ${error.message}`,
|
|
||||||
};
|
|
||||||
},
|
|
||||||
};
|
|
||||||
@ -1,117 +0,0 @@
|
|||||||
import { test, expect } from "@playwright/test";
|
|
||||||
import axios from "axios";
|
|
||||||
import { StartedTestContainer } from "testcontainers";
|
|
||||||
import { DefaultTestRoutingInfo } from "@waku/tests";
|
|
||||||
import {
|
|
||||||
startBrowserTestsContainer,
|
|
||||||
stopContainer
|
|
||||||
} from "./utils/container-helpers.js";
|
|
||||||
import {
|
|
||||||
createTwoNodeNetwork,
|
|
||||||
getDockerAccessibleMultiaddr,
|
|
||||||
stopNwakuNodes,
|
|
||||||
TwoNodeNetwork
|
|
||||||
} from "./utils/nwaku-helpers.js";
|
|
||||||
import {
|
|
||||||
ENV_BUILDERS,
|
|
||||||
TEST_CONFIG,
|
|
||||||
ASSERTIONS
|
|
||||||
} from "./utils/test-config.js";
|
|
||||||
|
|
||||||
test.describe.configure({ mode: "serial" });
|
|
||||||
|
|
||||||
let container: StartedTestContainer;
|
|
||||||
let nwakuNodes: TwoNodeNetwork;
|
|
||||||
let baseUrl: string;
|
|
||||||
|
|
||||||
test.beforeAll(async () => {
|
|
||||||
nwakuNodes = await createTwoNodeNetwork();
|
|
||||||
|
|
||||||
const lightPushPeerAddr = await getDockerAccessibleMultiaddr(nwakuNodes.nodes[0]);
|
|
||||||
|
|
||||||
const result = await startBrowserTestsContainer({
|
|
||||||
environment: {
|
|
||||||
...ENV_BUILDERS.withLocalLightPush(lightPushPeerAddr),
|
|
||||||
DEBUG: "waku:*",
|
|
||||||
WAKU_LIGHTPUSH_NODE: lightPushPeerAddr,
|
|
||||||
},
|
|
||||||
networkMode: "waku",
|
|
||||||
});
|
|
||||||
|
|
||||||
container = result.container;
|
|
||||||
baseUrl = result.baseUrl;
|
|
||||||
});
|
|
||||||
|
|
||||||
test.afterAll(async () => {
|
|
||||||
await Promise.all([
|
|
||||||
stopContainer(container),
|
|
||||||
stopNwakuNodes(nwakuNodes?.nodes || []),
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("WakuHeadless can discover nwaku peer and use it for light push", async () => {
|
|
||||||
test.setTimeout(TEST_CONFIG.DEFAULT_TEST_TIMEOUT);
|
|
||||||
|
|
||||||
const contentTopic = TEST_CONFIG.DEFAULT_CONTENT_TOPIC;
|
|
||||||
const testMessage = TEST_CONFIG.DEFAULT_TEST_MESSAGE;
|
|
||||||
|
|
||||||
await new Promise((r) => setTimeout(r, TEST_CONFIG.WAKU_INIT_DELAY));
|
|
||||||
|
|
||||||
const healthResponse = await axios.get(`${baseUrl}/`, { timeout: 5000 });
|
|
||||||
ASSERTIONS.serverHealth(healthResponse);
|
|
||||||
|
|
||||||
try {
|
|
||||||
await axios.post(`${baseUrl}/waku/v1/wait-for-peers`, {
|
|
||||||
timeoutMs: 10000,
|
|
||||||
protocols: ["lightpush"],
|
|
||||||
}, { timeout: 15000 });
|
|
||||||
} catch {
|
|
||||||
// Ignore errors
|
|
||||||
}
|
|
||||||
|
|
||||||
const peerInfoResponse = await axios.get(`${baseUrl}/waku/v1/peer-info`);
|
|
||||||
ASSERTIONS.peerInfo(peerInfoResponse);
|
|
||||||
|
|
||||||
const routingInfo = DefaultTestRoutingInfo;
|
|
||||||
|
|
||||||
const subscriptionResults = await Promise.all([
|
|
||||||
nwakuNodes.nodes[0].ensureSubscriptions([routingInfo.pubsubTopic]),
|
|
||||||
nwakuNodes.nodes[1].ensureSubscriptions([routingInfo.pubsubTopic])
|
|
||||||
]);
|
|
||||||
|
|
||||||
expect(subscriptionResults[0]).toBe(true);
|
|
||||||
expect(subscriptionResults[1]).toBe(true);
|
|
||||||
|
|
||||||
await new Promise((r) => setTimeout(r, TEST_CONFIG.SUBSCRIPTION_DELAY));
|
|
||||||
|
|
||||||
const base64Payload = btoa(testMessage);
|
|
||||||
|
|
||||||
const pushResponse = await axios.post(`${baseUrl}/lightpush/v3/message`, {
|
|
||||||
pubsubTopic: routingInfo.pubsubTopic,
|
|
||||||
message: {
|
|
||||||
contentTopic,
|
|
||||||
payload: base64Payload,
|
|
||||||
version: 1,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
ASSERTIONS.lightPushV3Success(pushResponse);
|
|
||||||
|
|
||||||
await new Promise((r) => setTimeout(r, TEST_CONFIG.MESSAGE_PROPAGATION_DELAY));
|
|
||||||
|
|
||||||
const [node1Messages, node2Messages] = await Promise.all([
|
|
||||||
nwakuNodes.nodes[0].messages(contentTopic),
|
|
||||||
nwakuNodes.nodes[1].messages(contentTopic)
|
|
||||||
]);
|
|
||||||
|
|
||||||
|
|
||||||
const totalMessages = node1Messages.length + node2Messages.length;
|
|
||||||
expect(totalMessages).toBeGreaterThanOrEqual(1);
|
|
||||||
|
|
||||||
const receivedMessages = [...node1Messages, ...node2Messages];
|
|
||||||
expect(receivedMessages.length).toBeGreaterThan(0);
|
|
||||||
|
|
||||||
const receivedMessage = receivedMessages[0];
|
|
||||||
ASSERTIONS.messageContent(receivedMessage, testMessage, contentTopic);
|
|
||||||
|
|
||||||
});
|
|
||||||
136
packages/browser-tests/tests/headless.spec.ts
Normal file
136
packages/browser-tests/tests/headless.spec.ts
Normal file
@ -0,0 +1,136 @@
|
|||||||
|
import { expect, test } from "@playwright/test";
|
||||||
|
import { LightNode } from "@waku/sdk";
|
||||||
|
|
||||||
|
import { API } from "../src/api/shared.js";
|
||||||
|
import { NETWORK_CONFIG, ACTIVE_PEERS } from "./test-config.js";
|
||||||
|
|
||||||
|
// Define the window interface for TypeScript
|
||||||
|
declare global {
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
interface Window {
|
||||||
|
waku: LightNode;
|
||||||
|
wakuAPI: typeof API;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
test.describe("waku", () => {
|
||||||
|
test.beforeEach(async ({ page }) => {
|
||||||
|
await page.goto("");
|
||||||
|
await page.waitForTimeout(5000);
|
||||||
|
|
||||||
|
// Create and initialize a fresh Waku node for each test
|
||||||
|
const setupResult = await page.evaluate(async (config) => {
|
||||||
|
try {
|
||||||
|
await window.wakuAPI.createWakuNode({
|
||||||
|
...config.defaultNodeConfig,
|
||||||
|
networkConfig: config.cluster42.networkConfig
|
||||||
|
});
|
||||||
|
await window.wakuAPI.startNode();
|
||||||
|
return { success: true };
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to initialize Waku node:", error);
|
||||||
|
return { success: false, error: String(error) };
|
||||||
|
}
|
||||||
|
}, NETWORK_CONFIG);
|
||||||
|
|
||||||
|
expect(setupResult.success).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("can get peer id", async ({ page }) => {
|
||||||
|
const peerId = await page.evaluate(() => {
|
||||||
|
return window.waku.libp2p.peerId.toString();
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(peerId).toBeDefined();
|
||||||
|
console.log("Peer ID:", peerId);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("can get info", async ({ page }) => {
|
||||||
|
const info = await page.evaluate(() => {
|
||||||
|
return window.wakuAPI.getPeerInfo(window.waku);
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(info).toBeDefined();
|
||||||
|
expect(info.peerId).toBeDefined();
|
||||||
|
expect(info.multiaddrs).toBeDefined();
|
||||||
|
expect(info.peers).toBeDefined();
|
||||||
|
console.log("Info:", info);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("can get debug info", async ({ page }) => {
|
||||||
|
const debug = await page.evaluate(() => {
|
||||||
|
return window.wakuAPI.getDebugInfo(window.waku);
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(debug).toBeDefined();
|
||||||
|
expect(debug.listenAddresses).toBeDefined();
|
||||||
|
expect(debug.peerId).toBeDefined();
|
||||||
|
expect(debug.protocols).toBeDefined();
|
||||||
|
console.log("Debug:", debug);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("can dial peers", async ({ page }) => {
|
||||||
|
const result = await page.evaluate((peerAddrs) => {
|
||||||
|
return window.wakuAPI.dialPeers(window.waku, peerAddrs);
|
||||||
|
}, ACTIVE_PEERS);
|
||||||
|
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
expect(result.total).toBe(ACTIVE_PEERS.length);
|
||||||
|
expect(result.errors.length >= result.total).toBe(false);
|
||||||
|
console.log("Dial result:", result);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("can push a message", async ({ page }) => {
|
||||||
|
// First dial to peers
|
||||||
|
await page.evaluate((peersToDial) => {
|
||||||
|
return window.wakuAPI.dialPeers(window.waku, peersToDial);
|
||||||
|
}, ACTIVE_PEERS);
|
||||||
|
|
||||||
|
// Create a test message
|
||||||
|
const contentTopic = NETWORK_CONFIG.testMessage.contentTopic;
|
||||||
|
const payload = new TextEncoder().encode(NETWORK_CONFIG.testMessage.payload);
|
||||||
|
const arrayPayload = Array.from(payload);
|
||||||
|
|
||||||
|
// Push the message
|
||||||
|
const result = await page.evaluate(
|
||||||
|
({ topic, data }) => {
|
||||||
|
return window.wakuAPI.pushMessage(
|
||||||
|
window.waku,
|
||||||
|
topic,
|
||||||
|
new Uint8Array(data)
|
||||||
|
);
|
||||||
|
},
|
||||||
|
{ topic: contentTopic, data: arrayPayload }
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
console.log("Push result:", result);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("can recreate Waku node", async ({ page }) => {
|
||||||
|
// Get the current node's peer ID
|
||||||
|
const initialPeerId = await page.evaluate(() => {
|
||||||
|
return window.waku.libp2p.peerId.toString();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create a new node with different parameters
|
||||||
|
const result = await page.evaluate(() => {
|
||||||
|
return window.wakuAPI.createWakuNode({
|
||||||
|
defaultBootstrap: true // Different from beforeEach
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.success).toBe(true);
|
||||||
|
|
||||||
|
// Start the new node
|
||||||
|
await page.evaluate(() => window.wakuAPI.startNode());
|
||||||
|
|
||||||
|
// Get the new peer ID
|
||||||
|
const newPeerId = await page.evaluate(() => {
|
||||||
|
return window.waku.libp2p.peerId.toString();
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(newPeerId).not.toBe(initialPeerId);
|
||||||
|
console.log("Initial:", initialPeerId, "New:", newPeerId);
|
||||||
|
});
|
||||||
|
});
|
||||||
@ -1,134 +0,0 @@
|
|||||||
import { test, expect } from "@playwright/test";
|
|
||||||
import axios from "axios";
|
|
||||||
import { StartedTestContainer } from "testcontainers";
|
|
||||||
import {
|
|
||||||
createLightNode,
|
|
||||||
LightNode,
|
|
||||||
Protocols,
|
|
||||||
IDecodedMessage,
|
|
||||||
} from "@waku/sdk";
|
|
||||||
import { DEFAULT_CLUSTER_ID, DEFAULT_NUM_SHARDS } from "@waku/interfaces";
|
|
||||||
import { startBrowserTestsContainer, stopContainer } from "./utils/container-helpers.js";
|
|
||||||
import { ENV_BUILDERS, TEST_CONFIG } from "./utils/test-config.js";
|
|
||||||
|
|
||||||
test.describe.configure({ mode: "serial" });
|
|
||||||
|
|
||||||
let container: StartedTestContainer;
|
|
||||||
let baseUrl: string;
|
|
||||||
let wakuNode: LightNode;
|
|
||||||
|
|
||||||
test.beforeAll(async () => {
|
|
||||||
const result = await startBrowserTestsContainer({
|
|
||||||
environment: {
|
|
||||||
...ENV_BUILDERS.withProductionEnr(),
|
|
||||||
DEBUG: "waku:*",
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
container = result.container;
|
|
||||||
baseUrl = result.baseUrl;
|
|
||||||
});
|
|
||||||
|
|
||||||
test.afterAll(async () => {
|
|
||||||
if (wakuNode) {
|
|
||||||
try {
|
|
||||||
await wakuNode.stop();
|
|
||||||
} catch {
|
|
||||||
// Ignore errors
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await stopContainer(container);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("cross-network message delivery: SDK light node receives server lightpush", async () => {
|
|
||||||
test.setTimeout(TEST_CONFIG.DEFAULT_TEST_TIMEOUT);
|
|
||||||
|
|
||||||
const contentTopic = TEST_CONFIG.DEFAULT_CONTENT_TOPIC;
|
|
||||||
const testMessage = TEST_CONFIG.DEFAULT_TEST_MESSAGE;
|
|
||||||
|
|
||||||
wakuNode = await createLightNode({
|
|
||||||
defaultBootstrap: true,
|
|
||||||
discovery: {
|
|
||||||
dns: true,
|
|
||||||
peerExchange: true,
|
|
||||||
peerCache: true,
|
|
||||||
},
|
|
||||||
networkConfig: {
|
|
||||||
clusterId: DEFAULT_CLUSTER_ID,
|
|
||||||
numShardsInCluster: DEFAULT_NUM_SHARDS,
|
|
||||||
},
|
|
||||||
libp2p: {
|
|
||||||
filterMultiaddrs: false,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
await wakuNode.start();
|
|
||||||
|
|
||||||
await wakuNode.waitForPeers(
|
|
||||||
[Protocols.Filter, Protocols.LightPush],
|
|
||||||
30000,
|
|
||||||
);
|
|
||||||
|
|
||||||
const messages: IDecodedMessage[] = [];
|
|
||||||
const decoder = wakuNode.createDecoder({ contentTopic });
|
|
||||||
|
|
||||||
if (
|
|
||||||
!(await wakuNode.filter.subscribe([decoder], (message) => {
|
|
||||||
messages.push(message);
|
|
||||||
}))
|
|
||||||
) {
|
|
||||||
throw new Error("Failed to subscribe to Filter");
|
|
||||||
}
|
|
||||||
|
|
||||||
await new Promise((r) => setTimeout(r, 2000));
|
|
||||||
|
|
||||||
const messagePromise = new Promise<void>((resolve) => {
|
|
||||||
const originalLength = messages.length;
|
|
||||||
const checkForMessage = () => {
|
|
||||||
if (messages.length > originalLength) {
|
|
||||||
resolve();
|
|
||||||
} else {
|
|
||||||
setTimeout(checkForMessage, 100);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
checkForMessage();
|
|
||||||
});
|
|
||||||
|
|
||||||
await axios.post(`${baseUrl}/waku/v1/wait-for-peers`, {
|
|
||||||
timeoutMs: 30000, // Increased timeout
|
|
||||||
protocols: ["lightpush", "filter"],
|
|
||||||
});
|
|
||||||
|
|
||||||
await new Promise((r) => setTimeout(r, 10000));
|
|
||||||
|
|
||||||
const base64Payload = btoa(testMessage);
|
|
||||||
|
|
||||||
const pushResponse = await axios.post(`${baseUrl}/lightpush/v3/message`, {
|
|
||||||
pubsubTopic: decoder.pubsubTopic,
|
|
||||||
message: {
|
|
||||||
contentTopic,
|
|
||||||
payload: base64Payload,
|
|
||||||
version: 1,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(pushResponse.status).toBe(200);
|
|
||||||
expect(pushResponse.data.success).toBe(true);
|
|
||||||
|
|
||||||
await Promise.race([
|
|
||||||
messagePromise,
|
|
||||||
new Promise((_, reject) =>
|
|
||||||
setTimeout(() => {
|
|
||||||
reject(new Error("Timeout waiting for message"));
|
|
||||||
}, 45000),
|
|
||||||
),
|
|
||||||
]);
|
|
||||||
|
|
||||||
expect(messages).toHaveLength(1);
|
|
||||||
const receivedMessage = messages[0];
|
|
||||||
expect(receivedMessage.contentTopic).toBe(contentTopic);
|
|
||||||
|
|
||||||
const receivedPayload = new TextDecoder().decode(receivedMessage.payload);
|
|
||||||
expect(receivedPayload).toBe(testMessage);
|
|
||||||
});
|
|
||||||
@ -1,82 +1,722 @@
|
|||||||
import { test, expect } from "@playwright/test";
|
import { ChildProcess, exec, spawn } from "child_process";
|
||||||
|
import * as http from "http";
|
||||||
|
import * as net from "net";
|
||||||
|
import { join } from "path";
|
||||||
|
|
||||||
|
import { expect, test } from "@playwright/test";
|
||||||
import axios from "axios";
|
import axios from "axios";
|
||||||
import { spawn, ChildProcess } from "child_process";
|
|
||||||
import { fileURLToPath } from "url";
|
|
||||||
import { dirname, join } from "path";
|
|
||||||
|
|
||||||
const __filename = fileURLToPath(import.meta.url);
|
// The default URL, but we'll update this if we detect a different port
|
||||||
const __dirname = dirname(__filename);
|
let API_URL = "http://localhost:3000";
|
||||||
|
// Need this for basic node initialization that doesn't rely on /execute
|
||||||
|
const PEERS = [
|
||||||
|
"/dns4/waku-test.bloxy.one/tcp/8095/wss/p2p/16Uiu2HAmSZbDB7CusdRhgkD81VssRjQV5ZH13FbzCGcdnbbh6VwZ",
|
||||||
|
"/dns4/waku.fryorcraken.xyz/tcp/8000/wss/p2p/16Uiu2HAmMRvhDHrtiHft1FTUYnn6cVA8AWVrTyLUayJJ3MWpUZDB"
|
||||||
|
];
|
||||||
|
|
||||||
|
let serverProcess: ChildProcess;
|
||||||
|
|
||||||
|
// Force tests to run sequentially to avoid port conflicts
|
||||||
test.describe.configure({ mode: "serial" });
|
test.describe.configure({ mode: "serial" });
|
||||||
|
|
||||||
test.describe("Server Tests", () => {
|
// Helper function to check if a port is in use
|
||||||
let serverProcess: ChildProcess;
|
async function isPortInUse(port: number): Promise<boolean> {
|
||||||
let baseUrl = "http://localhost:3000";
|
return new Promise((resolve) => {
|
||||||
|
const server = net
|
||||||
|
.createServer()
|
||||||
|
.once("error", () => {
|
||||||
|
// Port is in use
|
||||||
|
resolve(true);
|
||||||
|
})
|
||||||
|
.once("listening", () => {
|
||||||
|
// Port is free, close server
|
||||||
|
server.close();
|
||||||
|
resolve(false);
|
||||||
|
})
|
||||||
|
.listen(port);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
test.beforeAll(async () => {
|
// Helper function to kill processes on port 3000
|
||||||
const serverPath = join(__dirname, "..", "dist", "src", "server.js");
|
async function killProcessOnPort(): Promise<void> {
|
||||||
|
return new Promise<void>((resolve) => {
|
||||||
|
// Different commands for different platforms
|
||||||
|
const cmd =
|
||||||
|
process.platform === "win32"
|
||||||
|
? `netstat -ano | findstr :3000 | findstr LISTENING`
|
||||||
|
: `lsof -i:3000 -t`;
|
||||||
|
|
||||||
serverProcess = spawn("node", [serverPath], {
|
exec(cmd, (err, stdout) => {
|
||||||
stdio: "pipe",
|
if (err || !stdout.trim()) {
|
||||||
env: { ...process.env, PORT: "3000" }
|
console.log("No process running on port 3000");
|
||||||
|
resolve();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Found processes on port 3000: ${stdout.trim()}`);
|
||||||
|
|
||||||
|
// Kill the process
|
||||||
|
const killCmd =
|
||||||
|
process.platform === "win32"
|
||||||
|
? `FOR /F "tokens=5" %P IN ('netstat -ano ^| findstr :3000 ^| findstr LISTENING') DO taskkill /F /PID %P`
|
||||||
|
: `kill -9 ${stdout.trim()}`;
|
||||||
|
|
||||||
|
exec(killCmd, (killErr) => {
|
||||||
|
if (killErr) {
|
||||||
|
console.error(`Error killing process: ${killErr.message}`);
|
||||||
|
} else {
|
||||||
|
console.log("Killed process on port 3000");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait a moment for OS to release the port
|
||||||
|
setTimeout(resolve, 500);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
serverProcess.stdout?.on("data", (_data: Buffer) => {
|
// Helper function to wait for the API server to be available
|
||||||
});
|
async function waitForApiServer(
|
||||||
|
maxRetries = 10,
|
||||||
|
interval = 1000
|
||||||
|
): Promise<boolean> {
|
||||||
|
for (let i = 0; i < maxRetries; i++) {
|
||||||
|
try {
|
||||||
|
const response = await axios.get(API_URL, { timeout: 2000 });
|
||||||
|
if (response.status === 200) {
|
||||||
|
console.log(`API server is available at ${API_URL}`);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.log(
|
||||||
|
`API server not available at ${API_URL}, retrying (${i + 1}/${maxRetries})...`
|
||||||
|
);
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, interval));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
console.warn(
|
||||||
|
`API server at ${API_URL} not available after ${maxRetries} attempts`
|
||||||
|
);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
serverProcess.stderr?.on("data", (_data: Buffer) => {
|
// Setup and teardown for the whole test suite
|
||||||
});
|
test.beforeAll(async () => {
|
||||||
|
// First check if port 3000 is already in use - if so, try to kill it
|
||||||
|
const portInUse = await isPortInUse(3000);
|
||||||
|
if (portInUse) {
|
||||||
|
console.log(
|
||||||
|
"Port 3000 is already in use. Attempting to kill the process..."
|
||||||
|
);
|
||||||
|
await killProcessOnPort();
|
||||||
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, 3000));
|
// Check again
|
||||||
|
const stillInUse = await isPortInUse(3000);
|
||||||
|
if (stillInUse) {
|
||||||
|
console.log("Failed to free port 3000. Waiting for it to be released...");
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 5000));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let serverReady = false;
|
// Start the server
|
||||||
for (let i = 0; i < 30; i++) {
|
console.log("Starting server for tests...");
|
||||||
|
serverProcess = spawn("node", [join(process.cwd(), "dist/server.js")], {
|
||||||
|
stdio: "pipe",
|
||||||
|
detached: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// Log server output for debugging and capture the actual port
|
||||||
|
serverProcess.stdout?.on("data", (data) => {
|
||||||
|
const output = data.toString();
|
||||||
|
console.log(`Server: ${output}`);
|
||||||
|
|
||||||
|
// Check if the output contains the port information
|
||||||
|
const portMatch = output.match(
|
||||||
|
/API server running on http:\/\/localhost:(\d+)/
|
||||||
|
);
|
||||||
|
if (portMatch && portMatch[1]) {
|
||||||
|
const detectedPort = parseInt(portMatch[1], 10);
|
||||||
|
if (detectedPort !== 3000) {
|
||||||
|
console.log(
|
||||||
|
`Server is running on port ${detectedPort} instead of 3000`
|
||||||
|
);
|
||||||
|
API_URL = `http://localhost:${detectedPort}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
serverProcess.stderr?.on("data", (data) => {
|
||||||
|
console.error(`Server Error: ${data}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wait for server to start and API to be available
|
||||||
|
console.log("Waiting for server to start...");
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 5000));
|
||||||
|
|
||||||
|
const apiAvailable = await waitForApiServer();
|
||||||
|
if (!apiAvailable) {
|
||||||
|
console.warn("API server is not available, tests may fail");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (apiAvailable) {
|
||||||
|
// Create a node for the tests
|
||||||
|
try {
|
||||||
|
console.log("Creating node for tests...");
|
||||||
|
const createNodeResponse = await axios.post(
|
||||||
|
`${API_URL}/admin/v1/create-node`,
|
||||||
|
{
|
||||||
|
defaultBootstrap: false,
|
||||||
|
networkConfig: {
|
||||||
|
clusterId: 42,
|
||||||
|
shards: [0]
|
||||||
|
},
|
||||||
|
pubsubTopics: ["/waku/2/rs/42/0"] // Explicitly configure the pubsub topic
|
||||||
|
},
|
||||||
|
{ timeout: 10000 }
|
||||||
|
);
|
||||||
|
|
||||||
|
if (createNodeResponse.status === 200) {
|
||||||
|
console.log("Node creation response:", createNodeResponse.data);
|
||||||
|
|
||||||
|
// Start the node
|
||||||
|
const startNodeResponse = await axios.post(
|
||||||
|
`${API_URL}/admin/v1/start-node`,
|
||||||
|
{},
|
||||||
|
{ timeout: 5000 }
|
||||||
|
);
|
||||||
|
|
||||||
|
if (startNodeResponse.status === 200) {
|
||||||
|
console.log("Node started successfully");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.warn(
|
||||||
|
"Failed to create/start node through API, some tests may fail:",
|
||||||
|
error
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.warn(
|
||||||
|
"Skipping node creation as server doesn't appear to be running"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test.afterAll(async () => {
|
||||||
|
// Stop the server
|
||||||
|
console.log("Stopping server...");
|
||||||
|
if (serverProcess && serverProcess.pid) {
|
||||||
|
if (process.platform === "win32") {
|
||||||
|
spawn("taskkill", ["/pid", serverProcess.pid.toString(), "/f", "/t"]);
|
||||||
|
} else {
|
||||||
|
// Ensure the process and all its children are terminated
|
||||||
try {
|
try {
|
||||||
const res = await axios.get(`${baseUrl}/`, { timeout: 2000 });
|
process.kill(-serverProcess.pid, "SIGINT");
|
||||||
if (res.status === 200) {
|
} catch (e) {
|
||||||
serverReady = true;
|
console.log("Server process already terminated");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify no processes running on port 3000
|
||||||
|
await killProcessOnPort();
|
||||||
|
|
||||||
|
// Give time for all processes to terminate
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||||
|
});
|
||||||
|
|
||||||
|
test.describe("Waku Server API", () => {
|
||||||
|
// Direct test of filter endpoint - this runs first
|
||||||
|
test("can directly access filter/v1/messages endpoint", async () => {
|
||||||
|
// Try with different content topic formats
|
||||||
|
const testTopics = [
|
||||||
|
"test-topic",
|
||||||
|
"/test/topic",
|
||||||
|
"%2Ftest%2Ftopic", // Pre-encoded
|
||||||
|
"%2Ftest%2Ftopic" // Pre-encoded
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const topic of testTopics) {
|
||||||
|
console.log(`Testing direct access with topic: ${topic}`);
|
||||||
|
try {
|
||||||
|
const response = await axios.get(
|
||||||
|
`${API_URL}/filter/v1/messages/${topic}`,
|
||||||
|
{
|
||||||
|
timeout: 5000,
|
||||||
|
validateStatus: () => true
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log(` Status: ${response.status}`);
|
||||||
|
console.log(` Content-Type: ${response.headers["content-type"]}`);
|
||||||
|
console.log(` Data: ${JSON.stringify(response.data)}`);
|
||||||
|
|
||||||
|
// If this succeeds, we'll use this topic format for our tests
|
||||||
|
if (response.status === 200) {
|
||||||
|
console.log(` Found working topic format: ${topic}`);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
} catch {
|
} catch (error: any) {
|
||||||
// Ignore errors
|
console.error(` Error with topic ${topic}:`, error.message);
|
||||||
|
if (error.response) {
|
||||||
|
console.error(` Response status: ${error.response.status}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
await new Promise((r) => setTimeout(r, 1000));
|
|
||||||
}
|
|
||||||
|
|
||||||
expect(serverReady).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
test.afterAll(async () => {
|
|
||||||
if (serverProcess) {
|
|
||||||
serverProcess.kill("SIGTERM");
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
test("server health endpoint", async () => {
|
// This test checks if the server is running and can serve the basic endpoints
|
||||||
const res = await axios.get(`${baseUrl}/`);
|
test("can get server status and verify endpoints", async () => {
|
||||||
expect(res.status).toBe(200);
|
// Get initial server status with retry mechanism
|
||||||
expect(res.data.status).toBe("Waku simulation server is running");
|
let initialResponse;
|
||||||
});
|
for (let attempt = 0; attempt < 5; attempt++) {
|
||||||
|
try {
|
||||||
|
initialResponse = await axios.get(`${API_URL}/`, {
|
||||||
|
timeout: 5000,
|
||||||
|
validateStatus: () => true // Accept any status code
|
||||||
|
});
|
||||||
|
if (initialResponse.status === 200) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.log(
|
||||||
|
`Server not responding on attempt ${attempt + 1}/5, retrying...`
|
||||||
|
);
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
test("static files are served", async () => {
|
// If we still couldn't connect, skip this test
|
||||||
const htmlRes = await axios.get(`${baseUrl}/app/index.html`);
|
if (!initialResponse || initialResponse.status !== 200) {
|
||||||
expect(htmlRes.status).toBe(200);
|
console.warn("Server is not responding, skipping endpoint checks");
|
||||||
expect(htmlRes.data).toContain("Waku Test Environment");
|
test.skip();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const jsRes = await axios.get(`${baseUrl}/app/index.js`);
|
expect(initialResponse.status).toBe(200);
|
||||||
expect(jsRes.status).toBe(200);
|
expect(initialResponse.data.status).toBe(
|
||||||
expect(jsRes.data).toContain("WakuHeadless");
|
"Waku simulation server is running"
|
||||||
});
|
);
|
||||||
|
|
||||||
|
// Check if key endpoints are available
|
||||||
|
console.log("Checking if server endpoints are properly registered...");
|
||||||
|
|
||||||
test("Waku node auto-started", async () => {
|
|
||||||
try {
|
try {
|
||||||
const infoRes = await axios.get(`${baseUrl}/waku/v1/peer-info`);
|
// Try to access the various endpoints with simple HEAD requests
|
||||||
expect(infoRes.status).toBe(200);
|
const endpoints = [
|
||||||
expect(infoRes.data.peerId).toBeDefined();
|
"/info",
|
||||||
expect(infoRes.data.multiaddrs).toBeDefined();
|
"/debug/v1/info",
|
||||||
|
"/admin/v1/create-node",
|
||||||
|
"/admin/v1/start-node",
|
||||||
|
"/admin/v1/stop-node",
|
||||||
|
"/filter/v1/messages/test-topic",
|
||||||
|
"/filter/v2/messages/test-topic"
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const endpoint of endpoints) {
|
||||||
|
try {
|
||||||
|
const response = await axios.head(`${API_URL}${endpoint}`, {
|
||||||
|
validateStatus: () => true, // Accept any status code
|
||||||
|
timeout: 3000 // Short timeout to avoid hanging
|
||||||
|
});
|
||||||
|
|
||||||
|
// Some endpoints may return 404 or 405 if they only support specific methods,
|
||||||
|
// but at least we should get a response if the route is registered
|
||||||
|
console.log(`Endpoint ${endpoint}: Status ${response.status}`);
|
||||||
|
|
||||||
|
// If we get a 404, the route is not registered
|
||||||
|
expect(response.status).not.toBe(404);
|
||||||
|
} catch (error) {
|
||||||
|
console.warn(`Error checking endpoint ${endpoint}:`, error.message);
|
||||||
|
// Continue checking other endpoints even if one fails
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error("Error checking endpoints:", error.message);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test node lifecycle operations using the dedicated endpoints
|
||||||
|
test("can create, start, and stop a node", async () => {
|
||||||
|
// 1. Create a new node
|
||||||
|
const createResponse = await axios.post(`${API_URL}/admin/v1/create-node`, {
|
||||||
|
defaultBootstrap: true,
|
||||||
|
networkConfig: {
|
||||||
|
clusterId: 42,
|
||||||
|
shards: [0]
|
||||||
|
},
|
||||||
|
pubsubTopics: ["/waku/2/rs/42/0"] // Explicitly configure the pubsub topic
|
||||||
|
});
|
||||||
|
expect(createResponse.status).toBe(200);
|
||||||
|
expect(createResponse.data.success).toBe(true);
|
||||||
|
|
||||||
|
// 2. Start the node
|
||||||
|
const startResponse = await axios.post(`${API_URL}/admin/v1/start-node`);
|
||||||
|
expect(startResponse.status).toBe(200);
|
||||||
|
expect(startResponse.data.success).toBe(true);
|
||||||
|
|
||||||
|
// 3. Get info to verify it's running
|
||||||
|
const infoResponse = await axios.get(`${API_URL}/info`);
|
||||||
|
expect(infoResponse.status).toBe(200);
|
||||||
|
expect(infoResponse.data.peerId).toBeDefined();
|
||||||
|
console.log("Node peer ID:", infoResponse.data.peerId);
|
||||||
|
|
||||||
|
// 4. Stop the node
|
||||||
|
const stopResponse = await axios.post(`${API_URL}/admin/v1/stop-node`);
|
||||||
|
expect(stopResponse.status).toBe(200);
|
||||||
|
expect(stopResponse.data.success).toBe(true);
|
||||||
|
|
||||||
|
// 5. Start it again
|
||||||
|
const restartResponse = await axios.post(`${API_URL}/admin/v1/start-node`);
|
||||||
|
expect(restartResponse.status).toBe(200);
|
||||||
|
expect(restartResponse.data.success).toBe(true);
|
||||||
|
|
||||||
|
// 6. Verify it's running again
|
||||||
|
const finalInfoResponse = await axios.get(`${API_URL}/info`);
|
||||||
|
expect(finalInfoResponse.status).toBe(200);
|
||||||
|
expect(finalInfoResponse.data.peerId).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
// This test requires a running node, which we now can properly initialize with our new endpoints
|
||||||
|
test("can connect to peers and get node info", async () => {
|
||||||
|
// Create and start a fresh node
|
||||||
|
await axios.post(`${API_URL}/admin/v1/create-node`, {
|
||||||
|
defaultBootstrap: false,
|
||||||
|
networkConfig: {
|
||||||
|
clusterId: 42,
|
||||||
|
shards: [0]
|
||||||
|
},
|
||||||
|
pubsubTopics: ["/waku/2/rs/42/0"] // Explicitly configure the pubsub topic
|
||||||
|
});
|
||||||
|
await axios.post(`${API_URL}/admin/v1/start-node`);
|
||||||
|
|
||||||
|
// Connect to peers
|
||||||
|
const dialResponse = await axios.post(`${API_URL}/admin/v1/peers`, {
|
||||||
|
peerMultiaddrs: PEERS
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(dialResponse.status).toBe(200);
|
||||||
|
console.log("Peer connection response:", dialResponse.data);
|
||||||
|
|
||||||
|
// Get debug info now that we have a properly initialized node
|
||||||
|
const debugResponse = await axios.get(`${API_URL}/debug/v1/info`);
|
||||||
|
expect(debugResponse.status).toBe(200);
|
||||||
|
expect(debugResponse.data).toBeDefined();
|
||||||
|
|
||||||
|
// Log protocols available
|
||||||
|
if (debugResponse.data.protocols) {
|
||||||
|
const wakuProtocols = debugResponse.data.protocols.filter((p: string) =>
|
||||||
|
p.includes("/waku/")
|
||||||
|
);
|
||||||
|
console.log("Waku protocols:", wakuProtocols);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test("can push messages", async () => {
|
||||||
|
// Create and start a fresh node
|
||||||
|
await axios.post(`${API_URL}/admin/v1/create-node`, {
|
||||||
|
defaultBootstrap: true,
|
||||||
|
networkConfig: {
|
||||||
|
clusterId: 42,
|
||||||
|
shards: [0]
|
||||||
|
},
|
||||||
|
pubsubTopics: ["/waku/2/rs/42/0"] // Explicitly configure the pubsub topic
|
||||||
|
});
|
||||||
|
await axios.post(`${API_URL}/admin/v1/start-node`);
|
||||||
|
|
||||||
|
// Connect to peers
|
||||||
|
await axios.post(`${API_URL}/admin/v1/peers`, {
|
||||||
|
peerMultiaddrs: PEERS
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test the REST API format push endpoint
|
||||||
|
try {
|
||||||
|
const restPushResponse = await axios.post(
|
||||||
|
`${API_URL}/lightpush/v1/message`,
|
||||||
|
{
|
||||||
|
pubsubTopic: "/waku/2/default-waku/proto",
|
||||||
|
message: {
|
||||||
|
contentTopic: "/test/1/message/proto",
|
||||||
|
payload: Array.from(
|
||||||
|
new TextEncoder().encode("Test message via REST endpoint")
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(restPushResponse.status).toBe(200);
|
||||||
|
expect(restPushResponse.data.messageId).toBeDefined();
|
||||||
|
console.log("Message ID:", restPushResponse.data.messageId);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
expect(error.response?.status).toBe(400);
|
console.log("REST push might fail if no peers connected:", error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test("can retrieve messages from the queue", async () => {
|
||||||
|
// Create and start a fresh node
|
||||||
|
await axios.post(`${API_URL}/admin/v1/create-node`, {
|
||||||
|
defaultBootstrap: true,
|
||||||
|
networkConfig: {
|
||||||
|
clusterId: 42,
|
||||||
|
shards: [0]
|
||||||
|
},
|
||||||
|
pubsubTopics: ["/waku/2/rs/42/0"] // Explicitly configure the pubsub topic
|
||||||
|
});
|
||||||
|
await axios.post(`${API_URL}/admin/v1/start-node`);
|
||||||
|
|
||||||
|
// Connect to peers
|
||||||
|
await axios.post(`${API_URL}/admin/v1/peers`, {
|
||||||
|
peerMultiaddrs: PEERS
|
||||||
|
});
|
||||||
|
|
||||||
|
// Use a simple content topic to avoid encoding issues
|
||||||
|
const contentTopic = "test-queue";
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Check endpoint existence by checking available routes
|
||||||
|
console.log("Checking server routes and status...");
|
||||||
|
const rootResponse = await axios.get(`${API_URL}/`);
|
||||||
|
console.log(
|
||||||
|
"Server root response:",
|
||||||
|
rootResponse.status,
|
||||||
|
rootResponse.data
|
||||||
|
);
|
||||||
|
|
||||||
|
// First ensure the queue is empty
|
||||||
|
console.log(`Attempting to get messages from ${contentTopic}...`);
|
||||||
|
const emptyQueueResponse = await axios.get(
|
||||||
|
`${API_URL}/filter/v1/messages/${contentTopic}`
|
||||||
|
);
|
||||||
|
expect(emptyQueueResponse.status).toBe(200);
|
||||||
|
expect(emptyQueueResponse.data.messages).toEqual([]);
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error("Error accessing filter endpoint:", error.message);
|
||||||
|
if (error.response) {
|
||||||
|
console.error("Response status:", error.response.status);
|
||||||
|
console.error("Response data:", error.response.data);
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Simulate adding messages to the queue
|
||||||
|
const messages = [
|
||||||
|
{
|
||||||
|
payload: Array.from(new TextEncoder().encode("Message 1")),
|
||||||
|
timestamp: Date.now() - 2000,
|
||||||
|
contentTopic
|
||||||
|
},
|
||||||
|
{
|
||||||
|
payload: Array.from(new TextEncoder().encode("Message 2")),
|
||||||
|
timestamp: Date.now() - 1000,
|
||||||
|
contentTopic
|
||||||
|
},
|
||||||
|
{
|
||||||
|
payload: Array.from(new TextEncoder().encode("Message 3")),
|
||||||
|
timestamp: Date.now(),
|
||||||
|
contentTopic
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
const testMessages = await axios.post(`${API_URL}/execute`, {
|
||||||
|
functionName: "simulateMessages",
|
||||||
|
params: [contentTopic, messages]
|
||||||
|
});
|
||||||
|
expect(testMessages.status).toBe(200);
|
||||||
|
|
||||||
|
// Now check if we can retrieve messages
|
||||||
|
const messagesResponse = await axios.get(
|
||||||
|
`${API_URL}/filter/v1/messages/${contentTopic}`
|
||||||
|
);
|
||||||
|
expect(messagesResponse.status).toBe(200);
|
||||||
|
expect(messagesResponse.data.messages.length).toBe(3);
|
||||||
|
|
||||||
|
// Verify message format
|
||||||
|
const message = messagesResponse.data.messages[0];
|
||||||
|
expect(message).toHaveProperty("payload");
|
||||||
|
expect(message).toHaveProperty("contentTopic");
|
||||||
|
expect(message).toHaveProperty("timestamp");
|
||||||
|
expect(message).toHaveProperty("version");
|
||||||
|
|
||||||
|
// Test pagination
|
||||||
|
const paginatedResponse = await axios.get(
|
||||||
|
`${API_URL}/filter/v1/messages/${contentTopic}?pageSize=2`
|
||||||
|
);
|
||||||
|
expect(paginatedResponse.status).toBe(200);
|
||||||
|
expect(paginatedResponse.data.messages.length).toBe(2);
|
||||||
|
|
||||||
|
// Test sorting order
|
||||||
|
const ascendingResponse = await axios.get(
|
||||||
|
`${API_URL}/filter/v1/messages/${contentTopic}?ascending=true`
|
||||||
|
);
|
||||||
|
expect(ascendingResponse.status).toBe(200);
|
||||||
|
expect(ascendingResponse.data.messages.length).toBe(3);
|
||||||
|
const timestamps = ascendingResponse.data.messages.map(
|
||||||
|
(msg: any) => msg.timestamp
|
||||||
|
);
|
||||||
|
expect(timestamps[0]).toBeLessThan(timestamps[1]);
|
||||||
|
expect(timestamps[1]).toBeLessThan(timestamps[2]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("can access filter endpoint for SSE", async () => {
|
||||||
|
// Create and start a fresh node - only if API is accessible
|
||||||
|
try {
|
||||||
|
// Quick check if server is running
|
||||||
|
await axios.get(API_URL, { timeout: 2000 });
|
||||||
|
|
||||||
|
// Create node
|
||||||
|
await axios.post(`${API_URL}/admin/v1/create-node`, {
|
||||||
|
defaultBootstrap: true,
|
||||||
|
networkConfig: {
|
||||||
|
clusterId: 42,
|
||||||
|
shards: [0]
|
||||||
|
},
|
||||||
|
pubsubTopics: ["/waku/2/rs/42/0"] // Explicitly configure the pubsub topic
|
||||||
|
});
|
||||||
|
|
||||||
|
// Start node
|
||||||
|
await axios.post(`${API_URL}/admin/v1/start-node`);
|
||||||
|
|
||||||
|
// Connect to peers
|
||||||
|
await axios.post(`${API_URL}/admin/v1/peers`, {
|
||||||
|
peerMultiaddrs: PEERS
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.warn("Server appears to be unreachable, skipping test");
|
||||||
|
test.skip();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const contentTopic = "test-sse";
|
||||||
|
|
||||||
|
// Verify filter endpoint is accessible
|
||||||
|
// Instead of implementing a full SSE client, we'll make sure the endpoint
|
||||||
|
// returns the correct headers and status code which indicates SSE readiness
|
||||||
|
try {
|
||||||
|
const sseResponse = await axios
|
||||||
|
.get(
|
||||||
|
`${API_URL}/filter/v2/messages/${contentTopic}?clusterId=42&shard=0`,
|
||||||
|
{
|
||||||
|
// Set a timeout to avoid hanging the test
|
||||||
|
timeout: 2000,
|
||||||
|
// Expecting the request to timeout as SSE keeps connection open
|
||||||
|
validateStatus: () => true,
|
||||||
|
// We can't use responseType: 'stream' directly with axios,
|
||||||
|
// but we can check the response headers
|
||||||
|
headers: {
|
||||||
|
Accept: "text/event-stream"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.catch((e) => {
|
||||||
|
// We expect a timeout error since SSE keeps connection open
|
||||||
|
if (e.code === "ECONNABORTED") {
|
||||||
|
return e.response;
|
||||||
|
}
|
||||||
|
throw e;
|
||||||
|
});
|
||||||
|
|
||||||
|
// If response exists and has expected SSE headers, the test passes
|
||||||
|
if (sseResponse) {
|
||||||
|
expect(sseResponse.headers["content-type"]).toBe("text/event-stream");
|
||||||
|
expect(sseResponse.headers["cache-control"]).toBe("no-cache");
|
||||||
|
expect(sseResponse.headers["connection"]).toBe("keep-alive");
|
||||||
|
} else {
|
||||||
|
// If no response, we manually make an HTTP request to check the headers
|
||||||
|
const headers = await new Promise<Record<string, string>>((resolve) => {
|
||||||
|
const requestUrl = new URL(
|
||||||
|
`${API_URL}/filter/v2/messages/${contentTopic}?clusterId=42&shard=0`
|
||||||
|
);
|
||||||
|
const req = http.get(requestUrl, (res) => {
|
||||||
|
// Only interested in headers
|
||||||
|
req.destroy();
|
||||||
|
if (res.headers) {
|
||||||
|
resolve(res.headers as Record<string, string>);
|
||||||
|
} else {
|
||||||
|
resolve({});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
req.on("error", () => resolve({}));
|
||||||
|
});
|
||||||
|
|
||||||
|
if (Object.keys(headers).length === 0) {
|
||||||
|
console.warn(
|
||||||
|
"No headers received, SSE endpoint may not be accessible"
|
||||||
|
);
|
||||||
|
test.skip();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(headers["content-type"]).toBe("text/event-stream");
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error during SSE endpoint test:", error);
|
||||||
|
test.fail();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("SSE endpoint is accessible with correct headers");
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add a specific test just for the filter/v1/messages endpoint
|
||||||
|
test("can access filter/v1/messages endpoint directly", async () => {
|
||||||
|
// Check if server is available first
|
||||||
|
try {
|
||||||
|
await axios.get(API_URL, { timeout: 2000 });
|
||||||
|
} catch (error) {
|
||||||
|
console.warn("Server appears to be unreachable, skipping test");
|
||||||
|
test.skip();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a random content topic just for this test
|
||||||
|
const contentTopic = `direct-filter-${Date.now()}`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Try different approaches to access the endpoint
|
||||||
|
console.log(
|
||||||
|
`Testing direct access to filter/v1/messages/${contentTopic}`
|
||||||
|
);
|
||||||
|
|
||||||
|
// Method 1: GET request with encoded content topic
|
||||||
|
const getResponse = await axios({
|
||||||
|
method: "get",
|
||||||
|
url: `${API_URL}/filter/v1/messages/${contentTopic}`,
|
||||||
|
validateStatus: function () {
|
||||||
|
// Allow any status code to check what's coming back
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
timeout: 5000
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log("Response status:", getResponse.status);
|
||||||
|
console.log("Response headers:", getResponse.headers);
|
||||||
|
|
||||||
|
if (getResponse.status === 404) {
|
||||||
|
throw new Error(
|
||||||
|
`Endpoint not found (404): /filter/v1/messages/${contentTopic}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we got here, the endpoint exists even if it returns empty results
|
||||||
|
expect(getResponse.status).toBe(200);
|
||||||
|
expect(getResponse.data).toHaveProperty("messages");
|
||||||
|
expect(Array.isArray(getResponse.data.messages)).toBe(true);
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error("Error during filter/v1 endpoint test:", error.message);
|
||||||
|
|
||||||
|
if (error.response) {
|
||||||
|
console.error("Response status:", error.response.status);
|
||||||
|
console.error("Response headers:", error.response.headers);
|
||||||
|
console.error("Response data:", error.response.data);
|
||||||
|
} else if (error.request) {
|
||||||
|
console.error("No response received:", error.request);
|
||||||
|
// If no response, we'll skip the test rather than fail it
|
||||||
|
test.skip();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
40
packages/browser-tests/tests/test-config.ts
Normal file
40
packages/browser-tests/tests/test-config.ts
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
export const NETWORK_CONFIG = {
|
||||||
|
cluster42: {
|
||||||
|
networkConfig: {
|
||||||
|
clusterId: 42,
|
||||||
|
shards: [0]
|
||||||
|
},
|
||||||
|
peers: [
|
||||||
|
"/dns4/waku-test.bloxy.one/tcp/8095/wss/p2p/16Uiu2HAmSZbDB7CusdRhgkD81VssRjQV5ZH13FbzCGcdnbbh6VwZ",
|
||||||
|
"/dns4/waku.fryorcraken.xyz/tcp/8000/wss/p2p/16Uiu2HAmMRvhDHrtiHft1FTUYnn6cVA8AWVrTyLUayJJ3MWpUZDB",
|
||||||
|
"/dns4/ivansete.xyz/tcp/8000/wss/p2p/16Uiu2HAmDAHuJ8w9zgxVnhtFe8otWNJdCewPAerJJPbXJcn8tu4r"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
|
||||||
|
sandbox: {
|
||||||
|
networkConfig: {
|
||||||
|
clusterId: 1,
|
||||||
|
shards: [0]
|
||||||
|
},
|
||||||
|
peers: [
|
||||||
|
"/dns4/node-01.do-ams3.waku.sandbox.status.im/tcp/30303/p2p/16Uiu2HAmNaeL4p3WEYzC9mgXBmBWSgWjPHRvatZTXnp8Jgv3iKsb",
|
||||||
|
"/dns4/node-01.gc-us-central1-a.waku.sandbox.status.im/tcp/30303/p2p/16Uiu2HAmRv1iQ3NoMMcjbtRmKxPuYBbF9nLYz2SDv9MTN8WhGuUU",
|
||||||
|
"/dns4/node-01.ac-cn-hongkong-c.waku.sandbox.status.im/tcp/30303/p2p/16Uiu2HAmQYiojgZ8APsh9wqbWNyCstVhnp9gbeNrxSEQnLJchC92"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
|
||||||
|
// Default node configuration
|
||||||
|
defaultNodeConfig: {
|
||||||
|
defaultBootstrap: false
|
||||||
|
},
|
||||||
|
|
||||||
|
// Test message configuration
|
||||||
|
testMessage: {
|
||||||
|
contentTopic: "/test/1/message/proto",
|
||||||
|
payload: "Hello, Waku!"
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Active environment - change this to switch between cluster42 and sandbox
|
||||||
|
export const ACTIVE_ENV = 'cluster42';
|
||||||
|
export const ACTIVE_PEERS = NETWORK_CONFIG[ACTIVE_ENV].peers;
|
||||||
@ -1,128 +0,0 @@
|
|||||||
import axios from "axios";
|
|
||||||
import { GenericContainer, StartedTestContainer } from "testcontainers";
|
|
||||||
import { Logger } from "@waku/utils";
|
|
||||||
|
|
||||||
const log = new Logger("container-helpers");
|
|
||||||
|
|
||||||
export interface ContainerSetupOptions {
|
|
||||||
environment?: Record<string, string>;
|
|
||||||
networkMode?: string;
|
|
||||||
timeout?: number;
|
|
||||||
maxAttempts?: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ContainerSetupResult {
|
|
||||||
container: StartedTestContainer;
|
|
||||||
baseUrl: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Starts a waku-browser-tests Docker container with proper health checking.
|
|
||||||
* Follows patterns from @waku/tests package for retry logic and cleanup.
|
|
||||||
*/
|
|
||||||
export async function startBrowserTestsContainer(
|
|
||||||
options: ContainerSetupOptions = {}
|
|
||||||
): Promise<ContainerSetupResult> {
|
|
||||||
const {
|
|
||||||
environment = {},
|
|
||||||
networkMode = "bridge",
|
|
||||||
timeout = 2000,
|
|
||||||
maxAttempts = 60
|
|
||||||
} = options;
|
|
||||||
|
|
||||||
log.info("Starting waku-browser-tests container...");
|
|
||||||
|
|
||||||
let generic = new GenericContainer("waku-browser-tests:local")
|
|
||||||
.withExposedPorts(8080)
|
|
||||||
.withNetworkMode(networkMode);
|
|
||||||
|
|
||||||
// Apply environment variables
|
|
||||||
for (const [key, value] of Object.entries(environment)) {
|
|
||||||
generic = generic.withEnvironment({ [key]: value });
|
|
||||||
}
|
|
||||||
|
|
||||||
const container = await generic.start();
|
|
||||||
|
|
||||||
// Set up container logging - stream all output from the start
|
|
||||||
const logs = await container.logs();
|
|
||||||
logs.on("data", (b) => process.stdout.write("[container] " + b.toString()));
|
|
||||||
logs.on("error", (err) => log.error("[container log error]", err));
|
|
||||||
|
|
||||||
// Give container time to initialize
|
|
||||||
await new Promise((r) => setTimeout(r, 5000));
|
|
||||||
|
|
||||||
const mappedPort = container.getMappedPort(8080);
|
|
||||||
const baseUrl = `http://127.0.0.1:${mappedPort}`;
|
|
||||||
|
|
||||||
// Wait for server readiness with retry logic (following waku/tests patterns)
|
|
||||||
const serverReady = await waitForServerReady(baseUrl, maxAttempts, timeout);
|
|
||||||
|
|
||||||
if (!serverReady) {
|
|
||||||
await logFinalContainerState(container);
|
|
||||||
throw new Error("Container failed to become ready");
|
|
||||||
}
|
|
||||||
|
|
||||||
log.info("✅ Browser tests container ready");
|
|
||||||
await new Promise((r) => setTimeout(r, 500)); // Final settling time
|
|
||||||
|
|
||||||
return { container, baseUrl };
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Waits for server to become ready with exponential backoff and detailed logging.
|
|
||||||
* Follows retry patterns from @waku/tests ServiceNode.
|
|
||||||
*/
|
|
||||||
async function waitForServerReady(
|
|
||||||
baseUrl: string,
|
|
||||||
maxAttempts: number,
|
|
||||||
timeout: number
|
|
||||||
): Promise<boolean> {
|
|
||||||
for (let i = 0; i < maxAttempts; i++) {
|
|
||||||
try {
|
|
||||||
const res = await axios.get(`${baseUrl}/`, { timeout });
|
|
||||||
if (res.status === 200) {
|
|
||||||
log.info(`Server is ready after ${i + 1} attempts`);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
if (i % 10 === 0) {
|
|
||||||
log.info(`Attempt ${i + 1}/${maxAttempts} failed:`, error.code || error.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
await new Promise((r) => setTimeout(r, 1000));
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Logs final container state for debugging, following waku/tests error handling patterns.
|
|
||||||
*/
|
|
||||||
async function logFinalContainerState(container: StartedTestContainer): Promise<void> {
|
|
||||||
try {
|
|
||||||
const finalLogs = await container.logs({ tail: 50 });
|
|
||||||
log.info("=== Final Container Logs ===");
|
|
||||||
finalLogs.on("data", (b) => log.info(b.toString()));
|
|
||||||
await new Promise((r) => setTimeout(r, 1000));
|
|
||||||
} catch (logError) {
|
|
||||||
log.error("Failed to get container logs:", logError);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gracefully stops containers with retry logic, following teardown patterns from waku/tests.
|
|
||||||
*/
|
|
||||||
export async function stopContainer(container: StartedTestContainer): Promise<void> {
|
|
||||||
if (!container) return;
|
|
||||||
|
|
||||||
log.info("Stopping container gracefully...");
|
|
||||||
try {
|
|
||||||
await container.stop({ timeout: 10000 });
|
|
||||||
log.info("Container stopped successfully");
|
|
||||||
} catch (error) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error);
|
|
||||||
log.warn(
|
|
||||||
"Container stop had issues (expected):",
|
|
||||||
message
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,8 +0,0 @@
|
|||||||
/**
|
|
||||||
* Shared test utilities for browser-tests package.
|
|
||||||
* Follows patterns established in @waku/tests package.
|
|
||||||
*/
|
|
||||||
|
|
||||||
export * from "./container-helpers.js";
|
|
||||||
export * from "./nwaku-helpers.js";
|
|
||||||
export * from "./test-config.js";
|
|
||||||
@ -1,141 +0,0 @@
|
|||||||
import { ServiceNode } from "@waku/tests";
|
|
||||||
import { DefaultTestRoutingInfo } from "@waku/tests";
|
|
||||||
import { Logger } from "@waku/utils";
|
|
||||||
|
|
||||||
const log = new Logger("nwaku-helpers");
|
|
||||||
|
|
||||||
export interface TwoNodeNetwork {
|
|
||||||
nodes: ServiceNode[];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a two-node nwaku network following waku/tests patterns.
|
|
||||||
* Node 1: Relay + Light Push (service provider)
|
|
||||||
* Node 2: Relay only (network peer)
|
|
||||||
*/
|
|
||||||
export async function createTwoNodeNetwork(): Promise<TwoNodeNetwork> {
|
|
||||||
log.info("Creating nwaku node 1 (Relay + Light Push)...");
|
|
||||||
const lightPushNode = new ServiceNode(
|
|
||||||
"lightpush-node-" + Math.random().toString(36).substring(7),
|
|
||||||
);
|
|
||||||
|
|
||||||
const lightPushArgs = {
|
|
||||||
relay: true,
|
|
||||||
lightpush: true,
|
|
||||||
filter: false,
|
|
||||||
store: false,
|
|
||||||
clusterId: DefaultTestRoutingInfo.clusterId,
|
|
||||||
numShardsInNetwork: DefaultTestRoutingInfo.networkConfig.numShardsInCluster,
|
|
||||||
contentTopic: [DefaultTestRoutingInfo.contentTopic],
|
|
||||||
};
|
|
||||||
|
|
||||||
await lightPushNode.start(lightPushArgs, { retries: 3 });
|
|
||||||
|
|
||||||
log.info("Creating nwaku node 2 (Relay only)...");
|
|
||||||
const relayNode = new ServiceNode(
|
|
||||||
"relay-node-" + Math.random().toString(36).substring(7),
|
|
||||||
);
|
|
||||||
|
|
||||||
// Connect second node to first node (following ServiceNodesFleet pattern)
|
|
||||||
const firstNodeAddr = await lightPushNode.getExternalMultiaddr();
|
|
||||||
const relayArgs = {
|
|
||||||
relay: true,
|
|
||||||
lightpush: false,
|
|
||||||
filter: false,
|
|
||||||
store: false,
|
|
||||||
staticnode: firstNodeAddr,
|
|
||||||
clusterId: DefaultTestRoutingInfo.clusterId,
|
|
||||||
numShardsInNetwork: DefaultTestRoutingInfo.networkConfig.numShardsInCluster,
|
|
||||||
contentTopic: [DefaultTestRoutingInfo.contentTopic],
|
|
||||||
};
|
|
||||||
|
|
||||||
await relayNode.start(relayArgs, { retries: 3 });
|
|
||||||
|
|
||||||
// Wait for network formation (following waku/tests timing patterns)
|
|
||||||
log.info("Waiting for nwaku network formation...");
|
|
||||||
await new Promise((r) => setTimeout(r, 5000));
|
|
||||||
|
|
||||||
// Verify connectivity (optional, for debugging)
|
|
||||||
await verifyNetworkFormation([lightPushNode, relayNode]);
|
|
||||||
|
|
||||||
return {
|
|
||||||
nodes: [lightPushNode, relayNode],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Verifies that nwaku nodes have formed connections.
|
|
||||||
* Follows error handling patterns from waku/tests.
|
|
||||||
*/
|
|
||||||
async function verifyNetworkFormation(nodes: ServiceNode[]): Promise<void> {
|
|
||||||
try {
|
|
||||||
const peerCounts = await Promise.all(
|
|
||||||
nodes.map(async (node, index) => {
|
|
||||||
const peers = await node.peers();
|
|
||||||
log.info(`Node ${index + 1} has ${peers.length} peer(s)`);
|
|
||||||
return peers.length;
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
if (peerCounts.every((count) => count === 0)) {
|
|
||||||
log.warn("⚠️ Nodes may not be properly connected yet");
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
log.warn("Could not verify peer connections:", error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Extracts Docker-accessible multiaddr from nwaku node.
|
|
||||||
* Returns multiaddr using container's internal IP for Docker network communication.
|
|
||||||
*/
|
|
||||||
export async function getDockerAccessibleMultiaddr(
|
|
||||||
node: ServiceNode,
|
|
||||||
): Promise<string> {
|
|
||||||
// Get multiaddr with localhost and extract components
|
|
||||||
const localhostMultiaddr = await node.getMultiaddrWithId();
|
|
||||||
const peerId = await node.getPeerId();
|
|
||||||
|
|
||||||
// Extract port from multiaddr string
|
|
||||||
const multiaddrStr = localhostMultiaddr.toString();
|
|
||||||
const portMatch = multiaddrStr.match(/\/tcp\/(\d+)/);
|
|
||||||
const port = portMatch ? portMatch[1] : null;
|
|
||||||
|
|
||||||
if (!port) {
|
|
||||||
throw new Error("Could not extract port from multiaddr: " + multiaddrStr);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get Docker container IP (accessing internal field)
|
|
||||||
// Note: This accesses an internal implementation detail of ServiceNode
|
|
||||||
const nodeWithDocker = node as ServiceNode & {
|
|
||||||
docker?: { containerIp?: string };
|
|
||||||
};
|
|
||||||
const containerIp = nodeWithDocker.docker?.containerIp;
|
|
||||||
if (!containerIp) {
|
|
||||||
throw new Error("Could not get container IP from node");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build Docker network accessible multiaddr
|
|
||||||
const dockerMultiaddr = `/ip4/${containerIp}/tcp/${port}/ws/p2p/${peerId}`;
|
|
||||||
|
|
||||||
log.info("Original multiaddr:", multiaddrStr);
|
|
||||||
log.info("Docker accessible multiaddr:", dockerMultiaddr);
|
|
||||||
|
|
||||||
return dockerMultiaddr;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Stops nwaku nodes with retry logic, following teardown patterns from waku/tests.
|
|
||||||
*/
|
|
||||||
export async function stopNwakuNodes(nodes: ServiceNode[]): Promise<void> {
|
|
||||||
if (!nodes || nodes.length === 0) return;
|
|
||||||
|
|
||||||
log.info("Stopping nwaku nodes...");
|
|
||||||
try {
|
|
||||||
await Promise.all(nodes.map((node) => node.stop()));
|
|
||||||
log.info("Nwaku nodes stopped successfully");
|
|
||||||
} catch (error) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error);
|
|
||||||
log.warn("Nwaku nodes stop had issues:", message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,127 +0,0 @@
|
|||||||
import { expect } from "@playwright/test";
|
|
||||||
import { DefaultTestRoutingInfo } from "@waku/tests";
|
|
||||||
import { AxiosResponse } from "axios";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Response type definitions for API endpoints
|
|
||||||
*/
|
|
||||||
interface ServerHealthResponse {
|
|
||||||
status: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface PeerInfoResponse {
|
|
||||||
peerId: string;
|
|
||||||
multiaddrs: string[];
|
|
||||||
peers: string[];
|
|
||||||
}
|
|
||||||
|
|
||||||
interface LightPushV3Result {
|
|
||||||
successes: string[];
|
|
||||||
failures: Array<{ error: string; peerId?: string }>;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface LightPushV3Response {
|
|
||||||
success: boolean;
|
|
||||||
result: LightPushV3Result;
|
|
||||||
error?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface MessageResponse {
|
|
||||||
contentTopic: string;
|
|
||||||
payload: string;
|
|
||||||
version: number;
|
|
||||||
timestamp?: bigint | number;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Common test configuration constants following waku/tests patterns.
|
|
||||||
*/
|
|
||||||
export const TEST_CONFIG = {
|
|
||||||
// Test timeouts (following waku/tests timeout patterns)
|
|
||||||
DEFAULT_TEST_TIMEOUT: 120000, // 2 minutes
|
|
||||||
CONTAINER_READY_TIMEOUT: 60000, // 1 minute
|
|
||||||
NETWORK_FORMATION_DELAY: 5000, // 5 seconds
|
|
||||||
SUBSCRIPTION_DELAY: 3000, // 3 seconds
|
|
||||||
MESSAGE_PROPAGATION_DELAY: 5000, // 5 seconds
|
|
||||||
WAKU_INIT_DELAY: 8000, // 8 seconds
|
|
||||||
|
|
||||||
// Network configuration
|
|
||||||
DEFAULT_CLUSTER_ID: DefaultTestRoutingInfo.clusterId.toString(),
|
|
||||||
DEFAULT_CONTENT_TOPIC: "/test/1/browser-tests/proto",
|
|
||||||
|
|
||||||
// Test messages
|
|
||||||
DEFAULT_TEST_MESSAGE: "Hello from browser tests",
|
|
||||||
} as const;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Environment variable builders for different test scenarios.
|
|
||||||
*/
|
|
||||||
export const ENV_BUILDERS = {
|
|
||||||
/**
|
|
||||||
* Environment for production ENR bootstrap (integration test pattern).
|
|
||||||
*/
|
|
||||||
withProductionEnr: () => ({
|
|
||||||
WAKU_ENR_BOOTSTRAP: "enr:-QEnuEBEAyErHEfhiQxAVQoWowGTCuEF9fKZtXSd7H_PymHFhGJA3rGAYDVSHKCyJDGRLBGsloNbS8AZF33IVuefjOO6BIJpZIJ2NIJpcIQS39tkim11bHRpYWRkcnO4lgAvNihub2RlLTAxLmRvLWFtczMud2FrdXYyLnRlc3Quc3RhdHVzaW0ubmV0BgG73gMAODcxbm9kZS0wMS5hYy1jbi1ob25na29uZy1jLndha3V2Mi50ZXN0LnN0YXR1c2ltLm5ldAYBu94DACm9A62t7AQL4Ef5ZYZosRpQTzFVAB8jGjf1TER2wH-0zBOe1-MDBNLeA4lzZWNwMjU2azGhAzfsxbxyCkgCqq8WwYsVWH7YkpMLnU2Bw5xJSimxKav-g3VkcIIjKA",
|
|
||||||
WAKU_CLUSTER_ID: "1",
|
|
||||||
}),
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Environment for local nwaku node connection (e2e test pattern).
|
|
||||||
*/
|
|
||||||
withLocalLightPush: (lightpushMultiaddr: string) => ({
|
|
||||||
WAKU_LIGHTPUSH_NODE: lightpushMultiaddr,
|
|
||||||
WAKU_CLUSTER_ID: TEST_CONFIG.DEFAULT_CLUSTER_ID,
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Test assertion helpers following waku/tests verification patterns.
|
|
||||||
*/
|
|
||||||
export const ASSERTIONS = {
|
|
||||||
/**
|
|
||||||
* Verifies server health response structure.
|
|
||||||
*/
|
|
||||||
serverHealth: (response: AxiosResponse<ServerHealthResponse>) => {
|
|
||||||
expect(response.status).toBe(200);
|
|
||||||
expect(response.data.status).toBe("Waku simulation server is running");
|
|
||||||
},
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Verifies peer info response structure.
|
|
||||||
*/
|
|
||||||
peerInfo: (response: AxiosResponse<PeerInfoResponse>) => {
|
|
||||||
expect(response.status).toBe(200);
|
|
||||||
expect(response.data.peerId).toBeDefined();
|
|
||||||
expect(typeof response.data.peerId).toBe("string");
|
|
||||||
},
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Verifies lightpush response structure (v3 format).
|
|
||||||
*/
|
|
||||||
lightPushV3Success: (response: AxiosResponse<LightPushV3Response>) => {
|
|
||||||
expect(response.status).toBe(200);
|
|
||||||
expect(response.data).toHaveProperty('success', true);
|
|
||||||
expect(response.data).toHaveProperty('result');
|
|
||||||
expect(response.data.result).toHaveProperty('successes');
|
|
||||||
expect(Array.isArray(response.data.result.successes)).toBe(true);
|
|
||||||
expect(response.data.result.successes.length).toBeGreaterThan(0);
|
|
||||||
},
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Verifies message content and structure.
|
|
||||||
*/
|
|
||||||
messageContent: (message: MessageResponse, expectedContent: string, expectedTopic: string) => {
|
|
||||||
expect(message).toHaveProperty('contentTopic', expectedTopic);
|
|
||||||
expect(message).toHaveProperty('payload');
|
|
||||||
expect(typeof message.payload).toBe('string');
|
|
||||||
|
|
||||||
const receivedPayload = Buffer.from(message.payload, 'base64').toString();
|
|
||||||
expect(receivedPayload).toBe(expectedContent);
|
|
||||||
|
|
||||||
// Optional fields
|
|
||||||
expect(message).toHaveProperty('version');
|
|
||||||
if (message.timestamp) {
|
|
||||||
expect(['bigint', 'number']).toContain(typeof message.timestamp);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
};
|
|
||||||
@ -15,5 +15,5 @@
|
|||||||
"typeRoots": ["./node_modules/@types", "./types"]
|
"typeRoots": ["./node_modules/@types", "./types"]
|
||||||
},
|
},
|
||||||
"include": ["src/server.ts", "types/**/*.d.ts"],
|
"include": ["src/server.ts", "types/**/*.d.ts"],
|
||||||
"exclude": ["node_modules", "dist", "web"]
|
"exclude": ["node_modules", "dist"]
|
||||||
}
|
}
|
||||||
|
|||||||
36
packages/browser-tests/types/global.d.ts
vendored
36
packages/browser-tests/types/global.d.ts
vendored
@ -1,19 +1,27 @@
|
|||||||
import type { WakuHeadless } from "../web/index.js";
|
import { LightNode } from "@waku/sdk";
|
||||||
|
import { IWakuNode } from "../src/api/common.js";
|
||||||
export interface WindowNetworkConfig {
|
import {
|
||||||
clusterId?: number;
|
createWakuNode,
|
||||||
shards?: number[];
|
dialPeers,
|
||||||
}
|
getDebugInfo,
|
||||||
|
getPeerInfo,
|
||||||
export interface ITestBrowser extends Window {
|
pushMessage,
|
||||||
wakuApi: WakuHeadless;
|
subscribe
|
||||||
__WAKU_NETWORK_CONFIG?: WindowNetworkConfig;
|
} from "../src/api/shared.js";
|
||||||
__WAKU_LIGHTPUSH_NODE?: string | null;
|
|
||||||
__WAKU_ENR_BOOTSTRAP?: string | null;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
// Define types for the Waku node and window
|
||||||
declare global {
|
declare global {
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
interface Window {
|
interface Window {
|
||||||
wakuApi: WakuHeadless;
|
waku: IWakuNode & LightNode;
|
||||||
|
wakuAPI: {
|
||||||
|
getPeerInfo: typeof getPeerInfo;
|
||||||
|
getDebugInfo: typeof getDebugInfo;
|
||||||
|
pushMessage: typeof pushMessage;
|
||||||
|
dialPeers: typeof dialPeers;
|
||||||
|
createWakuNode: typeof createWakuNode;
|
||||||
|
subscribe: typeof subscribe;
|
||||||
|
[key: string]: any;
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
6
packages/browser-tests/types/serve.d.ts
vendored
6
packages/browser-tests/types/serve.d.ts
vendored
@ -1,9 +1,7 @@
|
|||||||
declare module "serve" {
|
declare module "serve" {
|
||||||
import type { Server } from "http";
|
|
||||||
|
|
||||||
function serve(
|
function serve(
|
||||||
folder: string,
|
folder: string,
|
||||||
options: { port: number; single: boolean; listen: boolean },
|
options: { port: number; single: boolean; listen: boolean }
|
||||||
): Promise<Server>;
|
): any;
|
||||||
export default serve;
|
export default serve;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,14 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8" />
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
|
||||||
<title>Waku Test Environment</title>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<h1>Waku Test Environment</h1>
|
|
||||||
<script type="module" src="./index.js"></script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,431 +0,0 @@
|
|||||||
import {
|
|
||||||
createLightNode,
|
|
||||||
LightNode,
|
|
||||||
Protocols,
|
|
||||||
NetworkConfig,
|
|
||||||
CreateNodeOptions,
|
|
||||||
} from "@waku/sdk";
|
|
||||||
import {
|
|
||||||
AutoSharding,
|
|
||||||
DEFAULT_CLUSTER_ID,
|
|
||||||
DEFAULT_NUM_SHARDS,
|
|
||||||
ShardId,
|
|
||||||
StaticSharding,
|
|
||||||
ShardInfo,
|
|
||||||
CreateLibp2pOptions,
|
|
||||||
IEncoder,
|
|
||||||
ILightPush,
|
|
||||||
SDKProtocolResult,
|
|
||||||
Failure,
|
|
||||||
} from "@waku/interfaces";
|
|
||||||
import { bootstrap } from "@libp2p/bootstrap";
|
|
||||||
import { EnrDecoder, TransportProtocol } from "@waku/enr";
|
|
||||||
import type { Multiaddr } from "@multiformats/multiaddr";
|
|
||||||
import type { ITestBrowser } from "../types/global.js";
|
|
||||||
import { Logger, StaticShardingRoutingInfo } from "@waku/utils";
|
|
||||||
import type { PeerId } from "@libp2p/interface";
|
|
||||||
|
|
||||||
const log = new Logger("waku-headless");
|
|
||||||
|
|
||||||
export interface SerializableSDKProtocolResult {
|
|
||||||
successes: string[];
|
|
||||||
failures: Array<{
|
|
||||||
error: string;
|
|
||||||
peerId?: string;
|
|
||||||
}>;
|
|
||||||
myPeerId?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
function makeSerializable(result: SDKProtocolResult): SerializableSDKProtocolResult {
|
|
||||||
return {
|
|
||||||
...result,
|
|
||||||
successes: result.successes.map((peerId: PeerId) => peerId.toString()),
|
|
||||||
failures: result.failures.map((failure: Failure) => ({
|
|
||||||
error: failure.error || failure.toString(),
|
|
||||||
peerId: failure.peerId ? failure.peerId.toString() : undefined,
|
|
||||||
})),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async function convertEnrToMultiaddrs(enrString: string): Promise<string[]> {
|
|
||||||
try {
|
|
||||||
const enr = await EnrDecoder.fromString(enrString);
|
|
||||||
const allMultiaddrs = enr.getAllLocationMultiaddrs();
|
|
||||||
const multiaddrs: string[] = [];
|
|
||||||
|
|
||||||
for (const multiaddr of allMultiaddrs) {
|
|
||||||
const maStr = multiaddr.toString();
|
|
||||||
multiaddrs.push(maStr);
|
|
||||||
}
|
|
||||||
if (multiaddrs.length === 0) {
|
|
||||||
const tcpMultiaddr = enr.getFullMultiaddr(TransportProtocol.TCP);
|
|
||||||
if (tcpMultiaddr) {
|
|
||||||
const tcpStr = tcpMultiaddr.toString();
|
|
||||||
multiaddrs.push(tcpStr);
|
|
||||||
}
|
|
||||||
const udpMultiaddr = enr.getFullMultiaddr(TransportProtocol.UDP);
|
|
||||||
if (udpMultiaddr) {
|
|
||||||
const udpStr = udpMultiaddr.toString();
|
|
||||||
multiaddrs.push(udpStr);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return multiaddrs;
|
|
||||||
} catch (error) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class WakuHeadless {
|
|
||||||
waku: LightNode | null;
|
|
||||||
networkConfig: NetworkConfig;
|
|
||||||
lightpushNode: string | null;
|
|
||||||
enrBootstrap: string | null;
|
|
||||||
constructor(
|
|
||||||
networkConfig?: Partial<NetworkConfig>,
|
|
||||||
lightpushNode?: string | null,
|
|
||||||
enrBootstrap?: string | null,
|
|
||||||
) {
|
|
||||||
this.waku = null;
|
|
||||||
this.networkConfig = this.buildNetworkConfig(networkConfig);
|
|
||||||
log.info("Network config on construction:", this.networkConfig);
|
|
||||||
this.lightpushNode = lightpushNode || null;
|
|
||||||
this.enrBootstrap = enrBootstrap || null;
|
|
||||||
|
|
||||||
if (this.lightpushNode) {
|
|
||||||
log.info(`Configured preferred lightpush node: ${this.lightpushNode}`);
|
|
||||||
}
|
|
||||||
if (this.enrBootstrap) {
|
|
||||||
log.info(`Configured ENR bootstrap: ${this.enrBootstrap}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private shouldUseCustomBootstrap(options: CreateNodeOptions): boolean {
|
|
||||||
const hasEnr = Boolean(this.enrBootstrap);
|
|
||||||
const isDefaultBootstrap = Boolean(options.defaultBootstrap);
|
|
||||||
|
|
||||||
return hasEnr && !isDefaultBootstrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async getBootstrapMultiaddrs(): Promise<string[]> {
|
|
||||||
if (!this.enrBootstrap) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
const enrList = this.enrBootstrap.split(",").map((enr) => enr.trim());
|
|
||||||
const allMultiaddrs: string[] = [];
|
|
||||||
|
|
||||||
for (const enr of enrList) {
|
|
||||||
const multiaddrs = await convertEnrToMultiaddrs(enr);
|
|
||||||
if (multiaddrs.length > 0) {
|
|
||||||
allMultiaddrs.push(...multiaddrs);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return allMultiaddrs;
|
|
||||||
}
|
|
||||||
|
|
||||||
private buildNetworkConfig(
|
|
||||||
providedConfig?: Partial<NetworkConfig> | Partial<ShardInfo>,
|
|
||||||
): NetworkConfig {
|
|
||||||
const clusterId = providedConfig?.clusterId ?? DEFAULT_CLUSTER_ID;
|
|
||||||
|
|
||||||
const staticShards = (providedConfig as Partial<ShardInfo>)?.shards;
|
|
||||||
if (
|
|
||||||
staticShards &&
|
|
||||||
Array.isArray(staticShards) &&
|
|
||||||
staticShards.length > 0
|
|
||||||
) {
|
|
||||||
log.info("Using static sharding with shards:", staticShards);
|
|
||||||
return {
|
|
||||||
clusterId,
|
|
||||||
} as StaticSharding;
|
|
||||||
}
|
|
||||||
|
|
||||||
const numShardsInCluster =
|
|
||||||
(providedConfig as Partial<AutoSharding>)?.numShardsInCluster ?? DEFAULT_NUM_SHARDS;
|
|
||||||
log.info(
|
|
||||||
"Using auto sharding with num shards in cluster:",
|
|
||||||
numShardsInCluster,
|
|
||||||
);
|
|
||||||
return {
|
|
||||||
clusterId,
|
|
||||||
numShardsInCluster,
|
|
||||||
} as AutoSharding;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async send(
|
|
||||||
lightPush: ILightPush,
|
|
||||||
encoder: IEncoder,
|
|
||||||
payload: Uint8Array,
|
|
||||||
): Promise<SDKProtocolResult> {
|
|
||||||
return lightPush.send(encoder, {
|
|
||||||
payload,
|
|
||||||
timestamp: new Date(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async pushMessageV3(
|
|
||||||
contentTopic: string,
|
|
||||||
payload: string,
|
|
||||||
pubsubTopic: string,
|
|
||||||
): Promise<SerializableSDKProtocolResult> {
|
|
||||||
if (!this.waku) {
|
|
||||||
throw new Error("Waku node not started");
|
|
||||||
}
|
|
||||||
log.info(
|
|
||||||
"Pushing message via v3 lightpush:",
|
|
||||||
contentTopic,
|
|
||||||
payload,
|
|
||||||
pubsubTopic,
|
|
||||||
);
|
|
||||||
log.info("Waku node:", this.waku);
|
|
||||||
log.info("Network config:", this.networkConfig);
|
|
||||||
|
|
||||||
let processedPayload: Uint8Array;
|
|
||||||
try {
|
|
||||||
const binaryString = atob(payload);
|
|
||||||
const bytes = new Uint8Array(binaryString.length);
|
|
||||||
for (let i = 0; i < binaryString.length; i++) {
|
|
||||||
bytes[i] = binaryString.charCodeAt(i);
|
|
||||||
}
|
|
||||||
processedPayload = bytes;
|
|
||||||
} catch (e) {
|
|
||||||
processedPayload = new TextEncoder().encode(payload);
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const lightPush = this.waku.lightPush;
|
|
||||||
if (!lightPush) {
|
|
||||||
throw new Error("Lightpush service not available");
|
|
||||||
}
|
|
||||||
|
|
||||||
let shardId: ShardId | undefined;
|
|
||||||
if (pubsubTopic) {
|
|
||||||
const staticShardingRoutingInfo =
|
|
||||||
StaticShardingRoutingInfo.fromPubsubTopic(
|
|
||||||
pubsubTopic,
|
|
||||||
this.networkConfig as StaticSharding,
|
|
||||||
);
|
|
||||||
shardId = staticShardingRoutingInfo?.shardId;
|
|
||||||
}
|
|
||||||
|
|
||||||
const encoder = this.waku.createEncoder({
|
|
||||||
contentTopic,
|
|
||||||
shardId,
|
|
||||||
});
|
|
||||||
log.info("Encoder:", encoder);
|
|
||||||
log.info("Pubsub topic:", pubsubTopic);
|
|
||||||
log.info("Encoder pubsub topic:", encoder.pubsubTopic);
|
|
||||||
|
|
||||||
if (pubsubTopic && pubsubTopic !== encoder.pubsubTopic) {
|
|
||||||
log.warn(
|
|
||||||
`Explicit pubsubTopic ${pubsubTopic} provided, but auto-sharding determined ${encoder.pubsubTopic}. Using auto-sharding.`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let result;
|
|
||||||
if (this.lightpushNode) {
|
|
||||||
try {
|
|
||||||
const preferredPeerId = this.getPeerIdFromMultiaddr(
|
|
||||||
this.lightpushNode,
|
|
||||||
);
|
|
||||||
if (preferredPeerId) {
|
|
||||||
result = await this.send(lightPush, encoder, processedPayload);
|
|
||||||
log.info("✅ Message sent via preferred lightpush node");
|
|
||||||
} else {
|
|
||||||
throw new Error(
|
|
||||||
"Could not extract peer ID from preferred node address",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
log.error(
|
|
||||||
"Couldn't send message via preferred lightpush node:",
|
|
||||||
error,
|
|
||||||
);
|
|
||||||
result = await this.send(lightPush, encoder, processedPayload);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
result = await this.send(lightPush, encoder, processedPayload);
|
|
||||||
}
|
|
||||||
|
|
||||||
const serializableResult = makeSerializable(result);
|
|
||||||
|
|
||||||
return serializableResult;
|
|
||||||
} catch (error) {
|
|
||||||
log.error("Error sending message via v3 lightpush:", error);
|
|
||||||
throw new Error(
|
|
||||||
`Failed to send v3 message: ${error instanceof Error ? error.message : String(error)}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async waitForPeers(
|
|
||||||
timeoutMs: number = 10000,
|
|
||||||
protocols: Protocols[] = [Protocols.LightPush, Protocols.Filter],
|
|
||||||
) {
|
|
||||||
if (!this.waku) {
|
|
||||||
throw new Error("Waku node not started");
|
|
||||||
}
|
|
||||||
|
|
||||||
const startTime = Date.now();
|
|
||||||
|
|
||||||
try {
|
|
||||||
await this.waku.waitForPeers(protocols, timeoutMs);
|
|
||||||
const elapsed = Date.now() - startTime;
|
|
||||||
|
|
||||||
const peers = this.waku.libp2p.getPeers();
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
peersFound: peers.length,
|
|
||||||
protocolsRequested: protocols,
|
|
||||||
timeElapsed: elapsed,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
const elapsed = Date.now() - startTime;
|
|
||||||
log.error(`Failed to find peers after ${elapsed}ms:`, error);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async createWakuNode(options: CreateNodeOptions) {
|
|
||||||
try {
|
|
||||||
if (this.waku) {
|
|
||||||
await this.waku.stop();
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
log.warn("ignore previous waku stop error");
|
|
||||||
}
|
|
||||||
|
|
||||||
let libp2pConfig: CreateLibp2pOptions = {
|
|
||||||
...options.libp2p,
|
|
||||||
filterMultiaddrs: false,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (this.enrBootstrap) {
|
|
||||||
const multiaddrs = await this.getBootstrapMultiaddrs();
|
|
||||||
|
|
||||||
if (multiaddrs.length > 0) {
|
|
||||||
libp2pConfig.peerDiscovery = [
|
|
||||||
bootstrap({ list: multiaddrs }),
|
|
||||||
...(options.libp2p?.peerDiscovery || []),
|
|
||||||
];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const createOptions = {
|
|
||||||
...options,
|
|
||||||
networkConfig: this.networkConfig,
|
|
||||||
libp2p: libp2pConfig,
|
|
||||||
};
|
|
||||||
|
|
||||||
this.waku = await createLightNode(createOptions);
|
|
||||||
return { success: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
async startNode() {
|
|
||||||
if (!this.waku) {
|
|
||||||
throw new Error("Waku node not created");
|
|
||||||
}
|
|
||||||
await this.waku.start();
|
|
||||||
|
|
||||||
if (this.lightpushNode) {
|
|
||||||
await this.dialPreferredLightpushNode();
|
|
||||||
}
|
|
||||||
|
|
||||||
return { success: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
private async dialPreferredLightpushNode() {
|
|
||||||
if (!this.waku || !this.lightpushNode) {
|
|
||||||
log.info("Skipping dial: waku or lightpushNode not set");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
log.info("Attempting to dial preferred lightpush node:", this.lightpushNode);
|
|
||||||
await this.waku.dial(this.lightpushNode);
|
|
||||||
log.info("Successfully dialed preferred lightpush node:", this.lightpushNode);
|
|
||||||
} catch (error) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error);
|
|
||||||
log.error(
|
|
||||||
"Failed to dial preferred lightpush node:",
|
|
||||||
this.lightpushNode,
|
|
||||||
message
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private getPeerIdFromMultiaddr(multiaddr: string): string | null {
|
|
||||||
const parts = multiaddr.split("/");
|
|
||||||
const p2pIndex = parts.indexOf("p2p");
|
|
||||||
return p2pIndex !== -1 && p2pIndex + 1 < parts.length
|
|
||||||
? parts[p2pIndex + 1]
|
|
||||||
: null;
|
|
||||||
}
|
|
||||||
|
|
||||||
async stopNode() {
|
|
||||||
if (!this.waku) {
|
|
||||||
throw new Error("Waku node not created");
|
|
||||||
}
|
|
||||||
await this.waku.stop();
|
|
||||||
return { success: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
getPeerInfo() {
|
|
||||||
if (!this.waku) {
|
|
||||||
throw new Error("Waku node not started");
|
|
||||||
}
|
|
||||||
|
|
||||||
const addrs = this.waku.libp2p.getMultiaddrs();
|
|
||||||
return {
|
|
||||||
peerId: this.waku.libp2p.peerId.toString(),
|
|
||||||
multiaddrs: addrs.map((a: Multiaddr) => a.toString()),
|
|
||||||
peers: [],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
(() => {
|
|
||||||
try {
|
|
||||||
log.info("Initializing WakuHeadless...");
|
|
||||||
|
|
||||||
const testWindow = window as ITestBrowser;
|
|
||||||
const globalNetworkConfig = testWindow.__WAKU_NETWORK_CONFIG;
|
|
||||||
const globalLightpushNode = testWindow.__WAKU_LIGHTPUSH_NODE;
|
|
||||||
const globalEnrBootstrap = testWindow.__WAKU_ENR_BOOTSTRAP;
|
|
||||||
|
|
||||||
log.info("Global config from window:", {
|
|
||||||
networkConfig: globalNetworkConfig,
|
|
||||||
lightpushNode: globalLightpushNode,
|
|
||||||
enrBootstrap: globalEnrBootstrap
|
|
||||||
});
|
|
||||||
|
|
||||||
const instance = new WakuHeadless(
|
|
||||||
globalNetworkConfig,
|
|
||||||
globalLightpushNode,
|
|
||||||
globalEnrBootstrap,
|
|
||||||
);
|
|
||||||
|
|
||||||
testWindow.wakuApi = instance;
|
|
||||||
log.info("WakuHeadless initialized successfully:", !!testWindow.wakuApi);
|
|
||||||
} catch (error) {
|
|
||||||
log.error("Error initializing WakuHeadless:", error);
|
|
||||||
const testWindow = window as ITestBrowser;
|
|
||||||
// Create a stub wakuApi that will reject all method calls
|
|
||||||
testWindow.wakuApi = {
|
|
||||||
waku: null,
|
|
||||||
networkConfig: { clusterId: 0, numShardsInCluster: 0 },
|
|
||||||
lightpushNode: null,
|
|
||||||
enrBootstrap: null,
|
|
||||||
error,
|
|
||||||
createWakuNode: () => Promise.reject(new Error("WakuHeadless failed to initialize")),
|
|
||||||
startNode: () => Promise.reject(new Error("WakuHeadless failed to initialize")),
|
|
||||||
stopNode: () => Promise.reject(new Error("WakuHeadless failed to initialize")),
|
|
||||||
pushMessageV3: () => Promise.reject(new Error("WakuHeadless failed to initialize")),
|
|
||||||
waitForPeers: () => Promise.reject(new Error("WakuHeadless failed to initialize")),
|
|
||||||
getPeerInfo: () => { throw new Error("WakuHeadless failed to initialize"); },
|
|
||||||
} as unknown as WakuHeadless;
|
|
||||||
}
|
|
||||||
})();
|
|
||||||
@ -5,65 +5,6 @@ All notable changes to this project will be documented in this file.
|
|||||||
The file is maintained by [Release Please](https://github.com/googleapis/release-please) based on [Conventional Commits](https://www.conventionalcommits.org) specification,
|
The file is maintained by [Release Please](https://github.com/googleapis/release-please) based on [Conventional Commits](https://www.conventionalcommits.org) specification,
|
||||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
## [0.0.40](https://github.com/waku-org/js-waku/compare/core-v0.0.39...core-v0.0.40) (2025-10-31)
|
|
||||||
|
|
||||||
|
|
||||||
### Dependencies
|
|
||||||
|
|
||||||
* The following workspace dependencies were updated
|
|
||||||
* dependencies
|
|
||||||
* @waku/proto bumped from 0.0.14 to 0.0.15
|
|
||||||
|
|
||||||
## [0.0.39](https://github.com/waku-org/js-waku/compare/core-v0.0.38...core-v0.0.39) (2025-09-20)
|
|
||||||
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
* Add start/stop to filter ([#2592](https://github.com/waku-org/js-waku/issues/2592)) ([2fba052](https://github.com/waku-org/js-waku/commit/2fba052b8b98cb64f6383de95d01b33beb771448))
|
|
||||||
* Expose message hash from IDecodedMessage ([#2578](https://github.com/waku-org/js-waku/issues/2578)) ([836d6b8](https://github.com/waku-org/js-waku/commit/836d6b8793a5124747684f6ea76b6dd47c73048b))
|
|
||||||
* Implement lp-v3 error codes with backwards compatibility ([#2501](https://github.com/waku-org/js-waku/issues/2501)) ([1625302](https://github.com/waku-org/js-waku/commit/16253026c6e30052d87d9975b58480951de469d8))
|
|
||||||
* Implement peer-store re-bootstrapping ([#2641](https://github.com/waku-org/js-waku/issues/2641)) ([11d84ad](https://github.com/waku-org/js-waku/commit/11d84ad342fe45158ef0734f9ca070f14704503f))
|
|
||||||
* StoreConnect events ([#2601](https://github.com/waku-org/js-waku/issues/2601)) ([0dfbcf6](https://github.com/waku-org/js-waku/commit/0dfbcf6b6bd9225dcb0dec540aeb1eb2703c8397))
|
|
||||||
|
|
||||||
|
|
||||||
### Dependencies
|
|
||||||
|
|
||||||
* The following workspace dependencies were updated
|
|
||||||
* dependencies
|
|
||||||
* @waku/enr bumped from ^0.0.32 to ^0.0.33
|
|
||||||
* @waku/interfaces bumped from 0.0.33 to 0.0.34
|
|
||||||
* @waku/proto bumped from 0.0.13 to 0.0.14
|
|
||||||
* @waku/utils bumped from 0.0.26 to 0.0.27
|
|
||||||
|
|
||||||
## [0.0.38](https://github.com/waku-org/js-waku/compare/core-v0.0.37...core-v0.0.38) (2025-08-14)
|
|
||||||
|
|
||||||
|
|
||||||
### ⚠ BREAKING CHANGES
|
|
||||||
|
|
||||||
* local peer discovery improvements ([#2557](https://github.com/waku-org/js-waku/issues/2557))
|
|
||||||
* Introduce routing info concept
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
* Introduce routing info concept ([3842d84](https://github.com/waku-org/js-waku/commit/3842d84b55eb96728f6b05b9307ff823fac58a54))
|
|
||||||
* Local peer discovery improvements ([#2557](https://github.com/waku-org/js-waku/issues/2557)) ([eab8ce8](https://github.com/waku-org/js-waku/commit/eab8ce81b431b11d79dcbec31aea759319853336))
|
|
||||||
* Peer exchange discovery improvements ([#2537](https://github.com/waku-org/js-waku/issues/2537)) ([95da57a](https://github.com/waku-org/js-waku/commit/95da57a8705fa195529ef52a6c908642da5e120c))
|
|
||||||
|
|
||||||
|
|
||||||
### Bug Fixes
|
|
||||||
|
|
||||||
* Improve error handling for stream manager ([#2546](https://github.com/waku-org/js-waku/issues/2546)) ([ada2657](https://github.com/waku-org/js-waku/commit/ada265731acfeddc2bfe2e8e963bc2be37f13900))
|
|
||||||
|
|
||||||
|
|
||||||
### Dependencies
|
|
||||||
|
|
||||||
* The following workspace dependencies were updated
|
|
||||||
* dependencies
|
|
||||||
* @waku/enr bumped from ^0.0.31 to ^0.0.32
|
|
||||||
* @waku/interfaces bumped from 0.0.32 to 0.0.33
|
|
||||||
* @waku/proto bumped from 0.0.12 to 0.0.13
|
|
||||||
* @waku/utils bumped from 0.0.25 to 0.0.26
|
|
||||||
|
|
||||||
## [0.0.37](https://github.com/waku-org/js-waku/compare/core-v0.0.36...core-v0.0.37) (2025-07-18)
|
## [0.0.37](https://github.com/waku-org/js-waku/compare/core-v0.0.36...core-v0.0.37) (2025-07-18)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@waku/core",
|
"name": "@waku/core",
|
||||||
"version": "0.0.40",
|
"version": "0.0.37",
|
||||||
"description": "TypeScript implementation of the Waku v2 protocol",
|
"description": "TypeScript implementation of the Waku v2 protocol",
|
||||||
"types": "./dist/index.d.ts",
|
"types": "./dist/index.d.ts",
|
||||||
"module": "./dist/index.js",
|
"module": "./dist/index.js",
|
||||||
@ -28,7 +28,7 @@
|
|||||||
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/core#readme",
|
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/core#readme",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "git+https://github.com/waku-org/js-waku.git"
|
"url": "https://github.com/waku-org/js-waku.git"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/waku-org/js-waku/issues"
|
"url": "https://github.com/waku-org/js-waku/issues"
|
||||||
@ -64,11 +64,11 @@
|
|||||||
"node": ">=22"
|
"node": ">=22"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@waku/enr": "^0.0.33",
|
"@waku/enr": "^0.0.31",
|
||||||
"@waku/interfaces": "0.0.34",
|
"@waku/interfaces": "0.0.32",
|
||||||
"@libp2p/ping": "2.0.35",
|
"@libp2p/ping": "2.0.35",
|
||||||
"@waku/proto": "0.0.15",
|
"@waku/proto": "0.0.12",
|
||||||
"@waku/utils": "0.0.27",
|
"@waku/utils": "0.0.25",
|
||||||
"debug": "^4.3.4",
|
"debug": "^4.3.4",
|
||||||
"@noble/hashes": "^1.3.2",
|
"@noble/hashes": "^1.3.2",
|
||||||
"it-all": "^3.0.4",
|
"it-all": "^3.0.4",
|
||||||
|
|||||||
@ -10,11 +10,7 @@ export * as waku_filter from "./lib/filter/index.js";
|
|||||||
export { FilterCore, FilterCodecs } from "./lib/filter/index.js";
|
export { FilterCore, FilterCodecs } from "./lib/filter/index.js";
|
||||||
|
|
||||||
export * as waku_light_push from "./lib/light_push/index.js";
|
export * as waku_light_push from "./lib/light_push/index.js";
|
||||||
export {
|
export { LightPushCodec, LightPushCore } from "./lib/light_push/index.js";
|
||||||
LightPushCore,
|
|
||||||
LightPushCodec,
|
|
||||||
LightPushCodecV2
|
|
||||||
} from "./lib/light_push/index.js";
|
|
||||||
|
|
||||||
export * as waku_store from "./lib/store/index.js";
|
export * as waku_store from "./lib/store/index.js";
|
||||||
export { StoreCore, StoreCodec } from "./lib/store/index.js";
|
export { StoreCore, StoreCodec } from "./lib/store/index.js";
|
||||||
|
|||||||
@ -3,8 +3,7 @@ import { multiaddr } from "@multiformats/multiaddr";
|
|||||||
import {
|
import {
|
||||||
CONNECTION_LOCKED_TAG,
|
CONNECTION_LOCKED_TAG,
|
||||||
IWakuEventEmitter,
|
IWakuEventEmitter,
|
||||||
Tags,
|
Tags
|
||||||
WakuEvent
|
|
||||||
} from "@waku/interfaces";
|
} from "@waku/interfaces";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import sinon from "sinon";
|
import sinon from "sinon";
|
||||||
@ -87,12 +86,6 @@ describe("ConnectionLimiter", () => {
|
|||||||
mockPeer2 = createMockPeer("12D3KooWTest2", [Tags.BOOTSTRAP]); // Ensure mockPeer2 is prioritized and dialed
|
mockPeer2 = createMockPeer("12D3KooWTest2", [Tags.BOOTSTRAP]); // Ensure mockPeer2 is prioritized and dialed
|
||||||
mockConnection = createMockConnection(mockPeerId, [Tags.BOOTSTRAP]);
|
mockConnection = createMockConnection(mockPeerId, [Tags.BOOTSTRAP]);
|
||||||
|
|
||||||
dialer = {
|
|
||||||
start: sinon.stub(),
|
|
||||||
stop: sinon.stub(),
|
|
||||||
dial: sinon.stub().resolves()
|
|
||||||
} as unknown as sinon.SinonStubbedInstance<Dialer>;
|
|
||||||
|
|
||||||
libp2p = {
|
libp2p = {
|
||||||
addEventListener: sinon.stub(),
|
addEventListener: sinon.stub(),
|
||||||
removeEventListener: sinon.stub(),
|
removeEventListener: sinon.stub(),
|
||||||
@ -101,11 +94,7 @@ describe("ConnectionLimiter", () => {
|
|||||||
getConnections: sinon.stub().returns([]),
|
getConnections: sinon.stub().returns([]),
|
||||||
peerStore: {
|
peerStore: {
|
||||||
all: sinon.stub().resolves([]),
|
all: sinon.stub().resolves([]),
|
||||||
get: sinon.stub().resolves(mockPeer),
|
get: sinon.stub().resolves(mockPeer)
|
||||||
merge: sinon.stub().resolves()
|
|
||||||
},
|
|
||||||
components: {
|
|
||||||
components: {}
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -122,20 +111,6 @@ describe("ConnectionLimiter", () => {
|
|||||||
isConnected: sinon.stub().returns(true),
|
isConnected: sinon.stub().returns(true),
|
||||||
isP2PConnected: sinon.stub().returns(true)
|
isP2PConnected: sinon.stub().returns(true)
|
||||||
} as unknown as sinon.SinonStubbedInstance<NetworkMonitor>;
|
} as unknown as sinon.SinonStubbedInstance<NetworkMonitor>;
|
||||||
|
|
||||||
// Mock the libp2p components needed by isAddressesSupported
|
|
||||||
libp2p.components = {
|
|
||||||
components: {},
|
|
||||||
transportManager: {
|
|
||||||
getTransports: sinon.stub().returns([
|
|
||||||
{
|
|
||||||
dialFilter: sinon
|
|
||||||
.stub()
|
|
||||||
.returns([multiaddr("/dns4/test/tcp/443/wss")])
|
|
||||||
}
|
|
||||||
])
|
|
||||||
}
|
|
||||||
};
|
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
@ -168,7 +143,7 @@ describe("ConnectionLimiter", () => {
|
|||||||
.true;
|
.true;
|
||||||
expect(
|
expect(
|
||||||
(events.addEventListener as sinon.SinonStub).calledWith(
|
(events.addEventListener as sinon.SinonStub).calledWith(
|
||||||
WakuEvent.Connection,
|
"waku:connection",
|
||||||
sinon.match.func
|
sinon.match.func
|
||||||
)
|
)
|
||||||
).to.be.true;
|
).to.be.true;
|
||||||
@ -203,7 +178,7 @@ describe("ConnectionLimiter", () => {
|
|||||||
.true;
|
.true;
|
||||||
expect(
|
expect(
|
||||||
(events.removeEventListener as sinon.SinonStub).calledWith(
|
(events.removeEventListener as sinon.SinonStub).calledWith(
|
||||||
WakuEvent.Connection,
|
"waku:connection",
|
||||||
sinon.match.func
|
sinon.match.func
|
||||||
)
|
)
|
||||||
).to.be.true;
|
).to.be.true;
|
||||||
@ -298,6 +273,11 @@ describe("ConnectionLimiter", () => {
|
|||||||
|
|
||||||
describe("dialPeersFromStore", () => {
|
describe("dialPeersFromStore", () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
|
dialer = {
|
||||||
|
start: sinon.stub(),
|
||||||
|
stop: sinon.stub(),
|
||||||
|
dial: sinon.stub().resolves()
|
||||||
|
} as unknown as sinon.SinonStubbedInstance<Dialer>;
|
||||||
libp2p.hangUp = sinon.stub().resolves();
|
libp2p.hangUp = sinon.stub().resolves();
|
||||||
connectionLimiter = createLimiter();
|
connectionLimiter = createLimiter();
|
||||||
mockPeer.addresses = [
|
mockPeer.addresses = [
|
||||||
@ -423,6 +403,11 @@ describe("ConnectionLimiter", () => {
|
|||||||
|
|
||||||
describe("maintainConnectionsCount", () => {
|
describe("maintainConnectionsCount", () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
|
dialer = {
|
||||||
|
start: sinon.stub(),
|
||||||
|
stop: sinon.stub(),
|
||||||
|
dial: sinon.stub().resolves()
|
||||||
|
} as unknown as sinon.SinonStubbedInstance<Dialer>;
|
||||||
libp2p.hangUp = sinon.stub().resolves();
|
libp2p.hangUp = sinon.stub().resolves();
|
||||||
connectionLimiter = createLimiter({ maxConnections: 2 });
|
connectionLimiter = createLimiter({ maxConnections: 2 });
|
||||||
mockPeer.addresses = [
|
mockPeer.addresses = [
|
||||||
@ -523,13 +508,12 @@ describe("ConnectionLimiter", () => {
|
|||||||
pxPeer.addresses = [
|
pxPeer.addresses = [
|
||||||
{ multiaddr: multiaddr("/dns4/px/tcp/443/wss"), isCertified: false }
|
{ multiaddr: multiaddr("/dns4/px/tcp/443/wss"), isCertified: false }
|
||||||
];
|
];
|
||||||
const localPeer = createMockPeer("l", [Tags.PEER_CACHE]);
|
const localPeer = createMockPeer("l", [Tags.LOCAL]);
|
||||||
localPeer.addresses = [
|
localPeer.addresses = [
|
||||||
{ multiaddr: multiaddr("/dns4/l/tcp/443/wss"), isCertified: false }
|
{ multiaddr: multiaddr("/dns4/l/tcp/443/wss"), isCertified: false }
|
||||||
];
|
];
|
||||||
libp2p.peerStore.all.resolves([bootstrapPeer, pxPeer, localPeer]);
|
libp2p.peerStore.all.resolves([bootstrapPeer, pxPeer, localPeer]);
|
||||||
libp2p.getConnections.returns([]);
|
libp2p.getConnections.returns([]);
|
||||||
connectionLimiter = createLimiter();
|
|
||||||
const peers = await (connectionLimiter as any).getPrioritizedPeers();
|
const peers = await (connectionLimiter as any).getPrioritizedPeers();
|
||||||
expect(peers[0].id.toString()).to.equal("b");
|
expect(peers[0].id.toString()).to.equal("b");
|
||||||
expect(peers[1].id.toString()).to.equal("px");
|
expect(peers[1].id.toString()).to.equal("px");
|
||||||
|
|||||||
@ -5,14 +5,12 @@ import {
|
|||||||
IWakuEventEmitter,
|
IWakuEventEmitter,
|
||||||
Libp2p,
|
Libp2p,
|
||||||
Libp2pEventHandler,
|
Libp2pEventHandler,
|
||||||
Tags,
|
Tags
|
||||||
WakuEvent
|
|
||||||
} from "@waku/interfaces";
|
} from "@waku/interfaces";
|
||||||
import { Logger } from "@waku/utils";
|
import { Logger } from "@waku/utils";
|
||||||
|
|
||||||
import { Dialer } from "./dialer.js";
|
import { Dialer } from "./dialer.js";
|
||||||
import { NetworkMonitor } from "./network_monitor.js";
|
import { NetworkMonitor } from "./network_monitor.js";
|
||||||
import { isAddressesSupported } from "./utils.js";
|
|
||||||
|
|
||||||
const log = new Logger("connection-limiter");
|
const log = new Logger("connection-limiter");
|
||||||
|
|
||||||
@ -71,10 +69,7 @@ export class ConnectionLimiter implements IConnectionLimiter {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
this.events.addEventListener(
|
this.events.addEventListener("waku:connection", this.onWakuConnectionEvent);
|
||||||
WakuEvent.Connection,
|
|
||||||
this.onWakuConnectionEvent
|
|
||||||
);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* NOTE: Event is not being emitted on closing nor losing a connection.
|
* NOTE: Event is not being emitted on closing nor losing a connection.
|
||||||
@ -95,7 +90,7 @@ export class ConnectionLimiter implements IConnectionLimiter {
|
|||||||
|
|
||||||
public stop(): void {
|
public stop(): void {
|
||||||
this.events.removeEventListener(
|
this.events.removeEventListener(
|
||||||
WakuEvent.Connection,
|
"waku:connection",
|
||||||
this.onWakuConnectionEvent
|
this.onWakuConnectionEvent
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -146,15 +141,13 @@ export class ConnectionLimiter implements IConnectionLimiter {
|
|||||||
const peers = await this.getPrioritizedPeers();
|
const peers = await this.getPrioritizedPeers();
|
||||||
|
|
||||||
if (peers.length === 0) {
|
if (peers.length === 0) {
|
||||||
log.info(`No peers to dial, skipping`);
|
log.info(`No peers to dial, node is utilizing all known peers`);
|
||||||
await this.triggerBootstrap();
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const promises = peers
|
const promises = peers
|
||||||
.slice(0, this.options.maxConnections - connections.length)
|
.slice(0, this.options.maxConnections - connections.length)
|
||||||
.map((p) => this.dialer.dial(p.id));
|
.map((p) => this.dialer.dial(p.id));
|
||||||
|
|
||||||
await Promise.all(promises);
|
await Promise.all(promises);
|
||||||
|
|
||||||
return;
|
return;
|
||||||
@ -221,7 +214,6 @@ export class ConnectionLimiter implements IConnectionLimiter {
|
|||||||
|
|
||||||
if (peers.length === 0) {
|
if (peers.length === 0) {
|
||||||
log.info(`No peers to dial, skipping`);
|
log.info(`No peers to dial, skipping`);
|
||||||
await this.triggerBootstrap();
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -239,14 +231,11 @@ export class ConnectionLimiter implements IConnectionLimiter {
|
|||||||
* Returns a list of peers ordered by priority:
|
* Returns a list of peers ordered by priority:
|
||||||
* - bootstrap peers
|
* - bootstrap peers
|
||||||
* - peers from peer exchange
|
* - peers from peer exchange
|
||||||
* - peers from peer cache (last because we are not sure that locally stored information is up to date)
|
* - peers from local store (last because we are not sure that locally stored information is up to date)
|
||||||
*/
|
*/
|
||||||
private async getPrioritizedPeers(): Promise<Peer[]> {
|
private async getPrioritizedPeers(): Promise<Peer[]> {
|
||||||
const allPeers = await this.libp2p.peerStore.all();
|
const allPeers = await this.libp2p.peerStore.all();
|
||||||
const allConnections = this.libp2p.getConnections();
|
const allConnections = this.libp2p.getConnections();
|
||||||
const allConnectionsSet = new Set(
|
|
||||||
allConnections.map((c) => c.remotePeer.toString())
|
|
||||||
);
|
|
||||||
|
|
||||||
log.info(
|
log.info(
|
||||||
`Found ${allPeers.length} peers in store, and found ${allConnections.length} connections`
|
`Found ${allPeers.length} peers in store, and found ${allConnections.length} connections`
|
||||||
@ -254,10 +243,11 @@ export class ConnectionLimiter implements IConnectionLimiter {
|
|||||||
|
|
||||||
const notConnectedPeers = allPeers.filter(
|
const notConnectedPeers = allPeers.filter(
|
||||||
(p) =>
|
(p) =>
|
||||||
!allConnectionsSet.has(p.id.toString()) &&
|
!allConnections.some((c) => c.remotePeer.equals(p.id)) &&
|
||||||
isAddressesSupported(
|
p.addresses.some(
|
||||||
this.libp2p,
|
(a) =>
|
||||||
p.addresses.map((a) => a.multiaddr)
|
a.multiaddr.toString().includes("wss") ||
|
||||||
|
a.multiaddr.toString().includes("ws")
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -270,22 +260,10 @@ export class ConnectionLimiter implements IConnectionLimiter {
|
|||||||
);
|
);
|
||||||
|
|
||||||
const localStorePeers = notConnectedPeers.filter((p) =>
|
const localStorePeers = notConnectedPeers.filter((p) =>
|
||||||
p.tags.has(Tags.PEER_CACHE)
|
p.tags.has(Tags.LOCAL)
|
||||||
);
|
);
|
||||||
|
|
||||||
const restPeers = notConnectedPeers.filter(
|
return [...bootstrapPeers, ...peerExchangePeers, ...localStorePeers];
|
||||||
(p) =>
|
|
||||||
!p.tags.has(Tags.BOOTSTRAP) &&
|
|
||||||
!p.tags.has(Tags.PEER_EXCHANGE) &&
|
|
||||||
!p.tags.has(Tags.PEER_CACHE)
|
|
||||||
);
|
|
||||||
|
|
||||||
return [
|
|
||||||
...bootstrapPeers,
|
|
||||||
...peerExchangePeers,
|
|
||||||
...localStorePeers,
|
|
||||||
...restPeers
|
|
||||||
];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private async getBootstrapPeers(): Promise<Peer[]> {
|
private async getBootstrapPeers(): Promise<Peer[]> {
|
||||||
@ -296,9 +274,11 @@ export class ConnectionLimiter implements IConnectionLimiter {
|
|||||||
.map((id) => this.getPeer(id))
|
.map((id) => this.getPeer(id))
|
||||||
);
|
);
|
||||||
|
|
||||||
return peers.filter(
|
const bootstrapPeers = peers.filter(
|
||||||
(peer) => peer && peer.tags.has(Tags.BOOTSTRAP)
|
(peer) => peer && peer.tags.has(Tags.BOOTSTRAP)
|
||||||
) as Peer[];
|
) as Peer[];
|
||||||
|
|
||||||
|
return bootstrapPeers;
|
||||||
}
|
}
|
||||||
|
|
||||||
private async getPeer(peerId: PeerId): Promise<Peer | null> {
|
private async getPeer(peerId: PeerId): Promise<Peer | null> {
|
||||||
@ -309,41 +289,4 @@ export class ConnectionLimiter implements IConnectionLimiter {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Triggers the bootstrap or peer cache discovery if they are mounted.
|
|
||||||
* @returns void
|
|
||||||
*/
|
|
||||||
private async triggerBootstrap(): Promise<void> {
|
|
||||||
log.info("Triggering bootstrap discovery");
|
|
||||||
|
|
||||||
const bootstrapComponents = Object.values(this.libp2p.components.components)
|
|
||||||
.filter((c) => !!c)
|
|
||||||
.filter((c: unknown) =>
|
|
||||||
[`@waku/${Tags.BOOTSTRAP}`, `@waku/${Tags.PEER_CACHE}`].includes(
|
|
||||||
(c as { [Symbol.toStringTag]: string })?.[Symbol.toStringTag]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
if (bootstrapComponents.length === 0) {
|
|
||||||
log.warn("No bootstrap components found to trigger");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
log.info(
|
|
||||||
`Found ${bootstrapComponents.length} bootstrap components, starting them`
|
|
||||||
);
|
|
||||||
|
|
||||||
const promises = bootstrapComponents.map(async (component) => {
|
|
||||||
try {
|
|
||||||
await (component as { stop: () => Promise<void> })?.stop?.();
|
|
||||||
await (component as { start: () => Promise<void> })?.start?.();
|
|
||||||
log.info("Successfully started bootstrap component");
|
|
||||||
} catch (error) {
|
|
||||||
log.error("Failed to start bootstrap component", error);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
await Promise.all(promises);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -15,7 +15,7 @@ import { ConnectionManager } from "./connection_manager.js";
|
|||||||
import { DiscoveryDialer } from "./discovery_dialer.js";
|
import { DiscoveryDialer } from "./discovery_dialer.js";
|
||||||
import { KeepAliveManager } from "./keep_alive_manager.js";
|
import { KeepAliveManager } from "./keep_alive_manager.js";
|
||||||
import { NetworkMonitor } from "./network_monitor.js";
|
import { NetworkMonitor } from "./network_monitor.js";
|
||||||
import { IShardReader, ShardReader } from "./shard_reader.js";
|
import { ShardReader } from "./shard_reader.js";
|
||||||
|
|
||||||
describe("ConnectionManager", () => {
|
describe("ConnectionManager", () => {
|
||||||
let libp2p: Libp2p;
|
let libp2p: Libp2p;
|
||||||
@ -30,7 +30,7 @@ describe("ConnectionManager", () => {
|
|||||||
// Mock internal components
|
// Mock internal components
|
||||||
let mockKeepAliveManager: sinon.SinonStubbedInstance<KeepAliveManager>;
|
let mockKeepAliveManager: sinon.SinonStubbedInstance<KeepAliveManager>;
|
||||||
let mockDiscoveryDialer: sinon.SinonStubbedInstance<DiscoveryDialer>;
|
let mockDiscoveryDialer: sinon.SinonStubbedInstance<DiscoveryDialer>;
|
||||||
let mockShardReader: sinon.SinonStubbedInstance<IShardReader>;
|
let mockShardReader: sinon.SinonStubbedInstance<ShardReader>;
|
||||||
let mockNetworkMonitor: sinon.SinonStubbedInstance<NetworkMonitor>;
|
let mockNetworkMonitor: sinon.SinonStubbedInstance<NetworkMonitor>;
|
||||||
let mockConnectionLimiter: sinon.SinonStubbedInstance<ConnectionLimiter>;
|
let mockConnectionLimiter: sinon.SinonStubbedInstance<ConnectionLimiter>;
|
||||||
|
|
||||||
@ -52,12 +52,6 @@ describe("ConnectionManager", () => {
|
|||||||
dialProtocol: sinon.stub().resolves({} as Stream),
|
dialProtocol: sinon.stub().resolves({} as Stream),
|
||||||
hangUp: sinon.stub().resolves(),
|
hangUp: sinon.stub().resolves(),
|
||||||
getPeers: sinon.stub().returns([]),
|
getPeers: sinon.stub().returns([]),
|
||||||
getConnections: sinon.stub().returns([]),
|
|
||||||
addEventListener: sinon.stub(),
|
|
||||||
removeEventListener: sinon.stub(),
|
|
||||||
components: {
|
|
||||||
components: {}
|
|
||||||
},
|
|
||||||
peerStore: {
|
peerStore: {
|
||||||
get: sinon.stub().resolves(null),
|
get: sinon.stub().resolves(null),
|
||||||
merge: sinon.stub().resolves()
|
merge: sinon.stub().resolves()
|
||||||
@ -69,7 +63,7 @@ describe("ConnectionManager", () => {
|
|||||||
} as unknown as IWakuEventEmitter;
|
} as unknown as IWakuEventEmitter;
|
||||||
|
|
||||||
networkConfig = {
|
networkConfig = {
|
||||||
clusterId: 2,
|
clusterId: 1,
|
||||||
shards: [0, 1]
|
shards: [0, 1]
|
||||||
} as NetworkConfig;
|
} as NetworkConfig;
|
||||||
|
|
||||||
@ -93,7 +87,7 @@ describe("ConnectionManager", () => {
|
|||||||
|
|
||||||
mockShardReader = {
|
mockShardReader = {
|
||||||
isPeerOnTopic: sinon.stub().resolves(true)
|
isPeerOnTopic: sinon.stub().resolves(true)
|
||||||
} as unknown as sinon.SinonStubbedInstance<IShardReader>;
|
} as unknown as sinon.SinonStubbedInstance<ShardReader>;
|
||||||
|
|
||||||
mockNetworkMonitor = {
|
mockNetworkMonitor = {
|
||||||
start: sinon.stub(),
|
start: sinon.stub(),
|
||||||
|
|||||||
@ -5,8 +5,7 @@ import {
|
|||||||
IConnectionManager,
|
IConnectionManager,
|
||||||
IRelay,
|
IRelay,
|
||||||
IWakuEventEmitter,
|
IWakuEventEmitter,
|
||||||
NetworkConfig,
|
NetworkConfig
|
||||||
ShardId
|
|
||||||
} from "@waku/interfaces";
|
} from "@waku/interfaces";
|
||||||
import { Libp2p } from "@waku/interfaces";
|
import { Libp2p } from "@waku/interfaces";
|
||||||
import { Logger } from "@waku/utils";
|
import { Logger } from "@waku/utils";
|
||||||
@ -46,7 +45,7 @@ export class ConnectionManager implements IConnectionManager {
|
|||||||
private readonly networkMonitor: NetworkMonitor;
|
private readonly networkMonitor: NetworkMonitor;
|
||||||
private readonly connectionLimiter: ConnectionLimiter;
|
private readonly connectionLimiter: ConnectionLimiter;
|
||||||
|
|
||||||
private readonly options: ConnectionManagerOptions;
|
private options: ConnectionManagerOptions;
|
||||||
private libp2p: Libp2p;
|
private libp2p: Libp2p;
|
||||||
|
|
||||||
public constructor(options: ConnectionManagerConstructorOptions) {
|
public constructor(options: ConnectionManagerConstructorOptions) {
|
||||||
@ -67,7 +66,6 @@ export class ConnectionManager implements IConnectionManager {
|
|||||||
this.keepAliveManager = new KeepAliveManager({
|
this.keepAliveManager = new KeepAliveManager({
|
||||||
relay: options.relay,
|
relay: options.relay,
|
||||||
libp2p: options.libp2p,
|
libp2p: options.libp2p,
|
||||||
networkConfig: options.networkConfig,
|
|
||||||
options: {
|
options: {
|
||||||
pingKeepAlive: this.options.pingKeepAlive,
|
pingKeepAlive: this.options.pingKeepAlive,
|
||||||
relayKeepAlive: this.options.relayKeepAlive
|
relayKeepAlive: this.options.relayKeepAlive
|
||||||
@ -196,11 +194,4 @@ export class ConnectionManager implements IConnectionManager {
|
|||||||
): Promise<boolean> {
|
): Promise<boolean> {
|
||||||
return this.shardReader.isPeerOnTopic(peerId, pubsubTopic);
|
return this.shardReader.isPeerOnTopic(peerId, pubsubTopic);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async isPeerOnShard(
|
|
||||||
peerId: PeerId,
|
|
||||||
shardId: ShardId
|
|
||||||
): Promise<boolean> {
|
|
||||||
return this.shardReader.isPeerOnShard(peerId, shardId);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -29,7 +29,7 @@ describe("Dialer", () => {
|
|||||||
|
|
||||||
mockShardReader = {
|
mockShardReader = {
|
||||||
hasShardInfo: sinon.stub().resolves(false),
|
hasShardInfo: sinon.stub().resolves(false),
|
||||||
isPeerOnCluster: sinon.stub().resolves(true)
|
isPeerOnNetwork: sinon.stub().resolves(true)
|
||||||
} as unknown as sinon.SinonStubbedInstance<ShardReader>;
|
} as unknown as sinon.SinonStubbedInstance<ShardReader>;
|
||||||
|
|
||||||
mockOptions = {
|
mockOptions = {
|
||||||
@ -280,9 +280,9 @@ describe("Dialer", () => {
|
|||||||
expect(dialStub.calledTwice).to.be.true;
|
expect(dialStub.calledTwice).to.be.true;
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should skip peer when not on same cluster", async () => {
|
it("should skip peer when not on same shard", async () => {
|
||||||
mockShardReader.hasShardInfo.resolves(true);
|
mockShardReader.hasShardInfo.resolves(true);
|
||||||
mockShardReader.isPeerOnCluster.resolves(false);
|
mockShardReader.isPeerOnNetwork.resolves(false);
|
||||||
|
|
||||||
const dialStub = libp2p.dial as sinon.SinonStub;
|
const dialStub = libp2p.dial as sinon.SinonStub;
|
||||||
|
|
||||||
@ -290,12 +290,12 @@ describe("Dialer", () => {
|
|||||||
|
|
||||||
expect(dialStub.called).to.be.false;
|
expect(dialStub.called).to.be.false;
|
||||||
expect(mockShardReader.hasShardInfo.calledWith(mockPeerId)).to.be.true;
|
expect(mockShardReader.hasShardInfo.calledWith(mockPeerId)).to.be.true;
|
||||||
expect(mockShardReader.isPeerOnCluster.calledWith(mockPeerId)).to.be.true;
|
expect(mockShardReader.isPeerOnNetwork.calledWith(mockPeerId)).to.be.true;
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should dial peer when on same shard", async () => {
|
it("should dial peer when on same shard", async () => {
|
||||||
mockShardReader.hasShardInfo.resolves(true);
|
mockShardReader.hasShardInfo.resolves(true);
|
||||||
mockShardReader.isPeerOnCluster.resolves(true);
|
mockShardReader.isPeerOnNetwork.resolves(true);
|
||||||
|
|
||||||
const dialStub = libp2p.dial as sinon.SinonStub;
|
const dialStub = libp2p.dial as sinon.SinonStub;
|
||||||
dialStub.resolves();
|
dialStub.resolves();
|
||||||
@ -305,7 +305,7 @@ describe("Dialer", () => {
|
|||||||
expect(dialStub.calledOnce).to.be.true;
|
expect(dialStub.calledOnce).to.be.true;
|
||||||
expect(dialStub.calledWith(mockPeerId)).to.be.true;
|
expect(dialStub.calledWith(mockPeerId)).to.be.true;
|
||||||
expect(mockShardReader.hasShardInfo.calledWith(mockPeerId)).to.be.true;
|
expect(mockShardReader.hasShardInfo.calledWith(mockPeerId)).to.be.true;
|
||||||
expect(mockShardReader.isPeerOnCluster.calledWith(mockPeerId)).to.be.true;
|
expect(mockShardReader.isPeerOnNetwork.calledWith(mockPeerId)).to.be.true;
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should dial peer when no shard info available", async () => {
|
it("should dial peer when no shard info available", async () => {
|
||||||
@ -319,7 +319,7 @@ describe("Dialer", () => {
|
|||||||
expect(dialStub.calledOnce).to.be.true;
|
expect(dialStub.calledOnce).to.be.true;
|
||||||
expect(dialStub.calledWith(mockPeerId)).to.be.true;
|
expect(dialStub.calledWith(mockPeerId)).to.be.true;
|
||||||
expect(mockShardReader.hasShardInfo.calledWith(mockPeerId)).to.be.true;
|
expect(mockShardReader.hasShardInfo.calledWith(mockPeerId)).to.be.true;
|
||||||
expect(mockShardReader.isPeerOnCluster.called).to.be.false;
|
expect(mockShardReader.isPeerOnNetwork.called).to.be.false;
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should handle dial errors gracefully", async () => {
|
it("should handle dial errors gracefully", async () => {
|
||||||
@ -468,7 +468,7 @@ describe("Dialer", () => {
|
|||||||
|
|
||||||
it("should handle network check errors gracefully", async () => {
|
it("should handle network check errors gracefully", async () => {
|
||||||
mockShardReader.hasShardInfo.resolves(true);
|
mockShardReader.hasShardInfo.resolves(true);
|
||||||
mockShardReader.isPeerOnCluster.rejects(new Error("Network check error"));
|
mockShardReader.isPeerOnNetwork.rejects(new Error("Network check error"));
|
||||||
|
|
||||||
const dialStub = libp2p.dial as sinon.SinonStub;
|
const dialStub = libp2p.dial as sinon.SinonStub;
|
||||||
|
|
||||||
@ -476,7 +476,7 @@ describe("Dialer", () => {
|
|||||||
|
|
||||||
expect(dialStub.called).to.be.false;
|
expect(dialStub.called).to.be.false;
|
||||||
expect(mockShardReader.hasShardInfo.calledWith(mockPeerId)).to.be.true;
|
expect(mockShardReader.hasShardInfo.calledWith(mockPeerId)).to.be.true;
|
||||||
expect(mockShardReader.isPeerOnCluster.calledWith(mockPeerId)).to.be.true;
|
expect(mockShardReader.isPeerOnNetwork.calledWith(mockPeerId)).to.be.true;
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -512,7 +512,7 @@ describe("Dialer", () => {
|
|||||||
dialStub.resolves();
|
dialStub.resolves();
|
||||||
|
|
||||||
mockShardReader.hasShardInfo.withArgs(mockPeerId).resolves(true);
|
mockShardReader.hasShardInfo.withArgs(mockPeerId).resolves(true);
|
||||||
mockShardReader.isPeerOnCluster.withArgs(mockPeerId).resolves(true);
|
mockShardReader.isPeerOnNetwork.withArgs(mockPeerId).resolves(true);
|
||||||
|
|
||||||
mockShardReader.hasShardInfo.withArgs(mockPeerId2).resolves(false);
|
mockShardReader.hasShardInfo.withArgs(mockPeerId2).resolves(false);
|
||||||
|
|
||||||
|
|||||||
@ -153,9 +153,9 @@ export class Dialer implements IDialer {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
const isOnSameCluster = await this.shardReader.isPeerOnCluster(peerId);
|
const isOnSameShard = await this.shardReader.isPeerOnNetwork(peerId);
|
||||||
if (!isOnSameCluster) {
|
if (!isOnSameShard) {
|
||||||
log.info(`Skipping peer ${peerId} - not on same cluster`);
|
log.info(`Skipping peer ${peerId} - not on same shard`);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,5 +1,4 @@
|
|||||||
import type { PeerId } from "@libp2p/interface";
|
import type { PeerId } from "@libp2p/interface";
|
||||||
import { AutoSharding } from "@waku/interfaces";
|
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import sinon from "sinon";
|
import sinon from "sinon";
|
||||||
|
|
||||||
@ -24,11 +23,6 @@ describe("KeepAliveManager", () => {
|
|||||||
relayKeepAlive: 60
|
relayKeepAlive: 60
|
||||||
};
|
};
|
||||||
|
|
||||||
const defaultNetworkConfig: AutoSharding = {
|
|
||||||
clusterId: 0,
|
|
||||||
numShardsInCluster: 1
|
|
||||||
};
|
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
clock = sinon.useFakeTimers();
|
clock = sinon.useFakeTimers();
|
||||||
|
|
||||||
@ -67,7 +61,6 @@ describe("KeepAliveManager", () => {
|
|||||||
it("should create KeepAliveManager with required options", () => {
|
it("should create KeepAliveManager with required options", () => {
|
||||||
keepAliveManager = new KeepAliveManager({
|
keepAliveManager = new KeepAliveManager({
|
||||||
options: defaultOptions,
|
options: defaultOptions,
|
||||||
networkConfig: defaultNetworkConfig,
|
|
||||||
libp2p
|
libp2p
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -77,7 +70,6 @@ describe("KeepAliveManager", () => {
|
|||||||
it("should create KeepAliveManager with relay", () => {
|
it("should create KeepAliveManager with relay", () => {
|
||||||
keepAliveManager = new KeepAliveManager({
|
keepAliveManager = new KeepAliveManager({
|
||||||
options: defaultOptions,
|
options: defaultOptions,
|
||||||
networkConfig: defaultNetworkConfig,
|
|
||||||
libp2p,
|
libp2p,
|
||||||
relay
|
relay
|
||||||
});
|
});
|
||||||
@ -90,7 +82,6 @@ describe("KeepAliveManager", () => {
|
|||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
keepAliveManager = new KeepAliveManager({
|
keepAliveManager = new KeepAliveManager({
|
||||||
options: defaultOptions,
|
options: defaultOptions,
|
||||||
networkConfig: defaultNetworkConfig,
|
|
||||||
libp2p
|
libp2p
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@ -119,7 +110,6 @@ describe("KeepAliveManager", () => {
|
|||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
keepAliveManager = new KeepAliveManager({
|
keepAliveManager = new KeepAliveManager({
|
||||||
options: defaultOptions,
|
options: defaultOptions,
|
||||||
networkConfig: defaultNetworkConfig,
|
|
||||||
libp2p,
|
libp2p,
|
||||||
relay
|
relay
|
||||||
});
|
});
|
||||||
@ -168,7 +158,6 @@ describe("KeepAliveManager", () => {
|
|||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
keepAliveManager = new KeepAliveManager({
|
keepAliveManager = new KeepAliveManager({
|
||||||
options: defaultOptions,
|
options: defaultOptions,
|
||||||
networkConfig: defaultNetworkConfig,
|
|
||||||
libp2p,
|
libp2p,
|
||||||
relay
|
relay
|
||||||
});
|
});
|
||||||
@ -205,7 +194,6 @@ describe("KeepAliveManager", () => {
|
|||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
keepAliveManager = new KeepAliveManager({
|
keepAliveManager = new KeepAliveManager({
|
||||||
options: defaultOptions,
|
options: defaultOptions,
|
||||||
networkConfig: defaultNetworkConfig,
|
|
||||||
libp2p,
|
libp2p,
|
||||||
relay
|
relay
|
||||||
});
|
});
|
||||||
@ -237,7 +225,6 @@ describe("KeepAliveManager", () => {
|
|||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
keepAliveManager = new KeepAliveManager({
|
keepAliveManager = new KeepAliveManager({
|
||||||
options: defaultOptions,
|
options: defaultOptions,
|
||||||
networkConfig: defaultNetworkConfig,
|
|
||||||
libp2p
|
libp2p
|
||||||
});
|
});
|
||||||
keepAliveManager.start();
|
keepAliveManager.start();
|
||||||
@ -257,7 +244,6 @@ describe("KeepAliveManager", () => {
|
|||||||
keepAliveManager.stop();
|
keepAliveManager.stop();
|
||||||
keepAliveManager = new KeepAliveManager({
|
keepAliveManager = new KeepAliveManager({
|
||||||
options: { pingKeepAlive: 0, relayKeepAlive: 0 },
|
options: { pingKeepAlive: 0, relayKeepAlive: 0 },
|
||||||
networkConfig: defaultNetworkConfig,
|
|
||||||
libp2p
|
libp2p
|
||||||
});
|
});
|
||||||
keepAliveManager.start();
|
keepAliveManager.start();
|
||||||
@ -331,7 +317,6 @@ describe("KeepAliveManager", () => {
|
|||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
keepAliveManager = new KeepAliveManager({
|
keepAliveManager = new KeepAliveManager({
|
||||||
options: defaultOptions,
|
options: defaultOptions,
|
||||||
networkConfig: defaultNetworkConfig,
|
|
||||||
libp2p,
|
libp2p,
|
||||||
relay
|
relay
|
||||||
});
|
});
|
||||||
@ -352,7 +337,6 @@ describe("KeepAliveManager", () => {
|
|||||||
keepAliveManager.stop();
|
keepAliveManager.stop();
|
||||||
keepAliveManager = new KeepAliveManager({
|
keepAliveManager = new KeepAliveManager({
|
||||||
options: { pingKeepAlive: 30, relayKeepAlive: 0 },
|
options: { pingKeepAlive: 30, relayKeepAlive: 0 },
|
||||||
networkConfig: defaultNetworkConfig,
|
|
||||||
libp2p,
|
libp2p,
|
||||||
relay
|
relay
|
||||||
});
|
});
|
||||||
@ -371,7 +355,6 @@ describe("KeepAliveManager", () => {
|
|||||||
keepAliveManager.stop();
|
keepAliveManager.stop();
|
||||||
keepAliveManager = new KeepAliveManager({
|
keepAliveManager = new KeepAliveManager({
|
||||||
options: defaultOptions,
|
options: defaultOptions,
|
||||||
networkConfig: defaultNetworkConfig,
|
|
||||||
libp2p
|
libp2p
|
||||||
});
|
});
|
||||||
keepAliveManager.start();
|
keepAliveManager.start();
|
||||||
@ -440,7 +423,6 @@ describe("KeepAliveManager", () => {
|
|||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
keepAliveManager = new KeepAliveManager({
|
keepAliveManager = new KeepAliveManager({
|
||||||
options: defaultOptions,
|
options: defaultOptions,
|
||||||
networkConfig: defaultNetworkConfig,
|
|
||||||
libp2p,
|
libp2p,
|
||||||
relay
|
relay
|
||||||
});
|
});
|
||||||
@ -507,7 +489,6 @@ describe("KeepAliveManager", () => {
|
|||||||
|
|
||||||
keepAliveManager = new KeepAliveManager({
|
keepAliveManager = new KeepAliveManager({
|
||||||
options: defaultOptions,
|
options: defaultOptions,
|
||||||
networkConfig: defaultNetworkConfig,
|
|
||||||
libp2p,
|
libp2p,
|
||||||
relay: emptyRelay
|
relay: emptyRelay
|
||||||
});
|
});
|
||||||
@ -525,7 +506,6 @@ describe("KeepAliveManager", () => {
|
|||||||
it("should handle all zero keep alive options", () => {
|
it("should handle all zero keep alive options", () => {
|
||||||
keepAliveManager = new KeepAliveManager({
|
keepAliveManager = new KeepAliveManager({
|
||||||
options: { pingKeepAlive: 0, relayKeepAlive: 0 },
|
options: { pingKeepAlive: 0, relayKeepAlive: 0 },
|
||||||
networkConfig: defaultNetworkConfig,
|
|
||||||
libp2p,
|
libp2p,
|
||||||
relay
|
relay
|
||||||
});
|
});
|
||||||
@ -545,7 +525,6 @@ describe("KeepAliveManager", () => {
|
|||||||
|
|
||||||
keepAliveManager = new KeepAliveManager({
|
keepAliveManager = new KeepAliveManager({
|
||||||
options: defaultOptions,
|
options: defaultOptions,
|
||||||
networkConfig: defaultNetworkConfig,
|
|
||||||
libp2p,
|
libp2p,
|
||||||
relay
|
relay
|
||||||
});
|
});
|
||||||
@ -565,7 +544,6 @@ describe("KeepAliveManager", () => {
|
|||||||
it("should handle complete peer lifecycle", async () => {
|
it("should handle complete peer lifecycle", async () => {
|
||||||
keepAliveManager = new KeepAliveManager({
|
keepAliveManager = new KeepAliveManager({
|
||||||
options: defaultOptions,
|
options: defaultOptions,
|
||||||
networkConfig: defaultNetworkConfig,
|
|
||||||
libp2p,
|
libp2p,
|
||||||
relay
|
relay
|
||||||
});
|
});
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
import type { PeerId } from "@libp2p/interface";
|
import type { PeerId } from "@libp2p/interface";
|
||||||
import type { IEncoder, IRelay, Libp2p, NetworkConfig } from "@waku/interfaces";
|
import type { IEncoder, IRelay, Libp2p } from "@waku/interfaces";
|
||||||
import { createRoutingInfo, Logger } from "@waku/utils";
|
import { Logger, pubsubTopicToSingleShardInfo } from "@waku/utils";
|
||||||
import { utf8ToBytes } from "@waku/utils/bytes";
|
import { utf8ToBytes } from "@waku/utils/bytes";
|
||||||
|
|
||||||
import { createEncoder } from "../message/version_0.js";
|
import { createEncoder } from "../message/version_0.js";
|
||||||
@ -15,7 +15,6 @@ type KeepAliveOptions = {
|
|||||||
|
|
||||||
type CreateKeepAliveManagerOptions = {
|
type CreateKeepAliveManagerOptions = {
|
||||||
options: KeepAliveOptions;
|
options: KeepAliveOptions;
|
||||||
networkConfig: NetworkConfig;
|
|
||||||
libp2p: Libp2p;
|
libp2p: Libp2p;
|
||||||
relay?: IRelay;
|
relay?: IRelay;
|
||||||
};
|
};
|
||||||
@ -27,7 +26,6 @@ interface IKeepAliveManager {
|
|||||||
|
|
||||||
export class KeepAliveManager implements IKeepAliveManager {
|
export class KeepAliveManager implements IKeepAliveManager {
|
||||||
private readonly relay?: IRelay;
|
private readonly relay?: IRelay;
|
||||||
private readonly networkConfig: NetworkConfig;
|
|
||||||
private readonly libp2p: Libp2p;
|
private readonly libp2p: Libp2p;
|
||||||
|
|
||||||
private readonly options: KeepAliveOptions;
|
private readonly options: KeepAliveOptions;
|
||||||
@ -40,12 +38,10 @@ export class KeepAliveManager implements IKeepAliveManager {
|
|||||||
public constructor({
|
public constructor({
|
||||||
options,
|
options,
|
||||||
relay,
|
relay,
|
||||||
networkConfig,
|
|
||||||
libp2p
|
libp2p
|
||||||
}: CreateKeepAliveManagerOptions) {
|
}: CreateKeepAliveManagerOptions) {
|
||||||
this.options = options;
|
this.options = options;
|
||||||
this.relay = relay;
|
this.relay = relay;
|
||||||
this.networkConfig = networkConfig;
|
|
||||||
this.libp2p = libp2p;
|
this.libp2p = libp2p;
|
||||||
|
|
||||||
this.onPeerConnect = this.onPeerConnect.bind(this);
|
this.onPeerConnect = this.onPeerConnect.bind(this);
|
||||||
@ -167,13 +163,8 @@ export class KeepAliveManager implements IKeepAliveManager {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
const routingInfo = createRoutingInfo(this.networkConfig, {
|
|
||||||
contentTopic: RelayPingContentTopic,
|
|
||||||
pubsubTopic: topic
|
|
||||||
});
|
|
||||||
|
|
||||||
const encoder = createEncoder({
|
const encoder = createEncoder({
|
||||||
routingInfo: routingInfo,
|
pubsubTopicShardInfo: pubsubTopicToSingleShardInfo(topic),
|
||||||
contentTopic: RelayPingContentTopic,
|
contentTopic: RelayPingContentTopic,
|
||||||
ephemeral: true
|
ephemeral: true
|
||||||
});
|
});
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
import { IWakuEventEmitter, Libp2p, WakuEvent } from "@waku/interfaces";
|
import { IWakuEventEmitter, Libp2p } from "@waku/interfaces";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import sinon from "sinon";
|
import sinon from "sinon";
|
||||||
|
|
||||||
@ -341,7 +341,7 @@ describe("NetworkMonitor", () => {
|
|||||||
const dispatchedEvent = dispatchEventStub.getCall(0)
|
const dispatchedEvent = dispatchEventStub.getCall(0)
|
||||||
.args[0] as CustomEvent<boolean>;
|
.args[0] as CustomEvent<boolean>;
|
||||||
expect(dispatchedEvent).to.be.instanceOf(CustomEvent);
|
expect(dispatchedEvent).to.be.instanceOf(CustomEvent);
|
||||||
expect(dispatchedEvent.type).to.equal(WakuEvent.Connection);
|
expect(dispatchedEvent.type).to.equal("waku:connection");
|
||||||
expect(dispatchedEvent.detail).to.be.true;
|
expect(dispatchedEvent.detail).to.be.true;
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
import { IWakuEventEmitter, Libp2p, WakuEvent } from "@waku/interfaces";
|
import { IWakuEventEmitter, Libp2p } from "@waku/interfaces";
|
||||||
|
|
||||||
type NetworkMonitorConstructorOptions = {
|
type NetworkMonitorConstructorOptions = {
|
||||||
libp2p: Libp2p;
|
libp2p: Libp2p;
|
||||||
@ -104,7 +104,7 @@ export class NetworkMonitor implements INetworkMonitor {
|
|||||||
|
|
||||||
private dispatchNetworkEvent(): void {
|
private dispatchNetworkEvent(): void {
|
||||||
this.events.dispatchEvent(
|
this.events.dispatchEvent(
|
||||||
new CustomEvent<boolean>(WakuEvent.Connection, {
|
new CustomEvent<boolean>("waku:connection", {
|
||||||
detail: this.isConnected()
|
detail: this.isConnected()
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|||||||
@ -1,10 +1,9 @@
|
|||||||
import { PeerId } from "@libp2p/interface";
|
import { PeerId } from "@libp2p/interface";
|
||||||
import {
|
import {
|
||||||
AutoSharding,
|
|
||||||
DEFAULT_NUM_SHARDS,
|
|
||||||
NetworkConfig,
|
NetworkConfig,
|
||||||
PubsubTopic,
|
PubsubTopic,
|
||||||
ShardInfo
|
ShardInfo,
|
||||||
|
SingleShardInfo
|
||||||
} from "@waku/interfaces";
|
} from "@waku/interfaces";
|
||||||
import { contentTopicToShardIndex, encodeRelayShard } from "@waku/utils";
|
import { contentTopicToShardIndex, encodeRelayShard } from "@waku/utils";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
@ -29,14 +28,11 @@ describe("ShardReader", function () {
|
|||||||
|
|
||||||
const testContentTopic = "/test/1/waku-light-push/utf8";
|
const testContentTopic = "/test/1/waku-light-push/utf8";
|
||||||
const testClusterId = 3;
|
const testClusterId = 3;
|
||||||
const testShardIndex = contentTopicToShardIndex(
|
const testShardIndex = contentTopicToShardIndex(testContentTopic);
|
||||||
testContentTopic,
|
|
||||||
DEFAULT_NUM_SHARDS
|
|
||||||
);
|
|
||||||
|
|
||||||
const testNetworkConfig: AutoSharding = {
|
const testNetworkConfig: NetworkConfig = {
|
||||||
clusterId: testClusterId,
|
contentTopics: [testContentTopic],
|
||||||
numShardsInCluster: DEFAULT_NUM_SHARDS
|
clusterId: testClusterId
|
||||||
};
|
};
|
||||||
|
|
||||||
const testShardInfo: ShardInfo = {
|
const testShardInfo: ShardInfo = {
|
||||||
@ -68,10 +64,10 @@ describe("ShardReader", function () {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe("constructor", function () {
|
describe("constructor", function () {
|
||||||
it("should create ShardReader with auto sharding network config", function () {
|
it("should create ShardReader with contentTopics network config", function () {
|
||||||
const config: AutoSharding = {
|
const config: NetworkConfig = {
|
||||||
clusterId: 3,
|
contentTopics: ["/test/1/waku-light-push/utf8"],
|
||||||
numShardsInCluster: 10
|
clusterId: 3
|
||||||
};
|
};
|
||||||
|
|
||||||
const reader = new ShardReader({
|
const reader = new ShardReader({
|
||||||
@ -82,9 +78,10 @@ describe("ShardReader", function () {
|
|||||||
expect(reader).to.be.instanceOf(ShardReader);
|
expect(reader).to.be.instanceOf(ShardReader);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should create ShardReader with static shards network config", function () {
|
it("should create ShardReader with shards network config", function () {
|
||||||
const config: NetworkConfig = {
|
const config: NetworkConfig = {
|
||||||
clusterId: 3
|
clusterId: 3,
|
||||||
|
shards: [1, 2, 3]
|
||||||
};
|
};
|
||||||
|
|
||||||
const reader = new ShardReader({
|
const reader = new ShardReader({
|
||||||
@ -97,7 +94,7 @@ describe("ShardReader", function () {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe("isPeerOnNetwork", function () {
|
describe("isPeerOnNetwork", function () {
|
||||||
it("should return true when peer is on the same cluster", async function () {
|
it("should return true when peer is on the same network", async function () {
|
||||||
const shardInfoBytes = encodeRelayShard(testShardInfo);
|
const shardInfoBytes = encodeRelayShard(testShardInfo);
|
||||||
const mockPeer = {
|
const mockPeer = {
|
||||||
metadata: new Map([["shardInfo", shardInfoBytes]])
|
metadata: new Map([["shardInfo", shardInfoBytes]])
|
||||||
@ -105,7 +102,7 @@ describe("ShardReader", function () {
|
|||||||
|
|
||||||
mockPeerStore.get.resolves(mockPeer);
|
mockPeerStore.get.resolves(mockPeer);
|
||||||
|
|
||||||
const result = await shardReader.isPeerOnCluster(testPeerId);
|
const result = await shardReader.isPeerOnNetwork(testPeerId);
|
||||||
|
|
||||||
expect(result).to.be.true;
|
expect(result).to.be.true;
|
||||||
sinon.assert.calledWith(mockPeerStore.get, testPeerId);
|
sinon.assert.calledWith(mockPeerStore.get, testPeerId);
|
||||||
@ -123,12 +120,12 @@ describe("ShardReader", function () {
|
|||||||
|
|
||||||
mockPeerStore.get.resolves(mockPeer);
|
mockPeerStore.get.resolves(mockPeer);
|
||||||
|
|
||||||
const result = await shardReader.isPeerOnCluster(testPeerId);
|
const result = await shardReader.isPeerOnNetwork(testPeerId);
|
||||||
|
|
||||||
expect(result).to.be.false;
|
expect(result).to.be.false;
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should return true even if peer has no overlapping shards", async function () {
|
it("should return false when peer has no overlapping shards", async function () {
|
||||||
const noOverlapShardInfo: ShardInfo = {
|
const noOverlapShardInfo: ShardInfo = {
|
||||||
clusterId: testClusterId,
|
clusterId: testClusterId,
|
||||||
shards: [testShardIndex + 100, testShardIndex + 200] // Use different shards
|
shards: [testShardIndex + 100, testShardIndex + 200] // Use different shards
|
||||||
@ -140,9 +137,9 @@ describe("ShardReader", function () {
|
|||||||
|
|
||||||
mockPeerStore.get.resolves(mockPeer);
|
mockPeerStore.get.resolves(mockPeer);
|
||||||
|
|
||||||
const result = await shardReader.isPeerOnCluster(testPeerId);
|
const result = await shardReader.isPeerOnNetwork(testPeerId);
|
||||||
|
|
||||||
expect(result).to.be.true;
|
expect(result).to.be.false;
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should return false when peer has no shard info", async function () {
|
it("should return false when peer has no shard info", async function () {
|
||||||
@ -152,7 +149,7 @@ describe("ShardReader", function () {
|
|||||||
|
|
||||||
mockPeerStore.get.resolves(mockPeer);
|
mockPeerStore.get.resolves(mockPeer);
|
||||||
|
|
||||||
const result = await shardReader.isPeerOnCluster(testPeerId);
|
const result = await shardReader.isPeerOnNetwork(testPeerId);
|
||||||
|
|
||||||
expect(result).to.be.false;
|
expect(result).to.be.false;
|
||||||
});
|
});
|
||||||
@ -160,7 +157,7 @@ describe("ShardReader", function () {
|
|||||||
it("should return false when peer is not found", async function () {
|
it("should return false when peer is not found", async function () {
|
||||||
mockPeerStore.get.rejects(new Error("Peer not found"));
|
mockPeerStore.get.rejects(new Error("Peer not found"));
|
||||||
|
|
||||||
const result = await shardReader.isPeerOnCluster(testPeerId);
|
const result = await shardReader.isPeerOnNetwork(testPeerId);
|
||||||
|
|
||||||
expect(result).to.be.false;
|
expect(result).to.be.false;
|
||||||
});
|
});
|
||||||
@ -175,10 +172,12 @@ describe("ShardReader", function () {
|
|||||||
|
|
||||||
mockPeerStore.get.resolves(mockPeer);
|
mockPeerStore.get.resolves(mockPeer);
|
||||||
|
|
||||||
const result = await shardReader.isPeerOnShard(
|
const shard: SingleShardInfo = {
|
||||||
testPeerId,
|
clusterId: testClusterId,
|
||||||
testShardIndex
|
shard: testShardIndex
|
||||||
);
|
};
|
||||||
|
|
||||||
|
const result = await shardReader.isPeerOnShard(testPeerId, shard);
|
||||||
|
|
||||||
expect(result).to.be.true;
|
expect(result).to.be.true;
|
||||||
});
|
});
|
||||||
@ -191,15 +190,12 @@ describe("ShardReader", function () {
|
|||||||
|
|
||||||
mockPeerStore.get.resolves(mockPeer);
|
mockPeerStore.get.resolves(mockPeer);
|
||||||
|
|
||||||
const shardReaderCluster5 = new ShardReader({
|
const shard: SingleShardInfo = {
|
||||||
libp2p: mockLibp2p as any,
|
clusterId: 5,
|
||||||
networkConfig: { clusterId: 5 }
|
shard: testShardIndex
|
||||||
});
|
};
|
||||||
|
|
||||||
const result = await shardReaderCluster5.isPeerOnShard(
|
const result = await shardReader.isPeerOnShard(testPeerId, shard);
|
||||||
testPeerId,
|
|
||||||
testShardIndex
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(result).to.be.false;
|
expect(result).to.be.false;
|
||||||
});
|
});
|
||||||
@ -212,10 +208,23 @@ describe("ShardReader", function () {
|
|||||||
|
|
||||||
mockPeerStore.get.resolves(mockPeer);
|
mockPeerStore.get.resolves(mockPeer);
|
||||||
|
|
||||||
const result = await shardReader.isPeerOnShard(
|
const shard: SingleShardInfo = {
|
||||||
testPeerId,
|
clusterId: testClusterId,
|
||||||
testShardIndex + 100
|
shard: testShardIndex + 100
|
||||||
);
|
};
|
||||||
|
|
||||||
|
const result = await shardReader.isPeerOnShard(testPeerId, shard);
|
||||||
|
|
||||||
|
expect(result).to.be.false;
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return false when shard info is undefined", async function () {
|
||||||
|
const shard: SingleShardInfo = {
|
||||||
|
clusterId: testClusterId,
|
||||||
|
shard: undefined
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await shardReader.isPeerOnShard(testPeerId, shard);
|
||||||
|
|
||||||
expect(result).to.be.false;
|
expect(result).to.be.false;
|
||||||
});
|
});
|
||||||
@ -223,10 +232,12 @@ describe("ShardReader", function () {
|
|||||||
it("should return false when peer shard info is not found", async function () {
|
it("should return false when peer shard info is not found", async function () {
|
||||||
mockPeerStore.get.rejects(new Error("Peer not found"));
|
mockPeerStore.get.rejects(new Error("Peer not found"));
|
||||||
|
|
||||||
const result = await shardReader.isPeerOnShard(
|
const shard: SingleShardInfo = {
|
||||||
testPeerId,
|
clusterId: testClusterId,
|
||||||
testShardIndex
|
shard: testShardIndex
|
||||||
);
|
};
|
||||||
|
|
||||||
|
const result = await shardReader.isPeerOnShard(testPeerId, shard);
|
||||||
|
|
||||||
expect(result).to.be.false;
|
expect(result).to.be.false;
|
||||||
});
|
});
|
||||||
@ -296,7 +307,7 @@ describe("ShardReader", function () {
|
|||||||
it("should handle errors gracefully when getting peer info", async function () {
|
it("should handle errors gracefully when getting peer info", async function () {
|
||||||
mockPeerStore.get.rejects(new Error("Network error"));
|
mockPeerStore.get.rejects(new Error("Network error"));
|
||||||
|
|
||||||
const result = await shardReader.isPeerOnCluster(testPeerId);
|
const result = await shardReader.isPeerOnNetwork(testPeerId);
|
||||||
|
|
||||||
expect(result).to.be.false;
|
expect(result).to.be.false;
|
||||||
});
|
});
|
||||||
@ -308,7 +319,7 @@ describe("ShardReader", function () {
|
|||||||
|
|
||||||
mockPeerStore.get.resolves(mockPeer);
|
mockPeerStore.get.resolves(mockPeer);
|
||||||
|
|
||||||
const result = await shardReader.isPeerOnCluster(testPeerId);
|
const result = await shardReader.isPeerOnNetwork(testPeerId);
|
||||||
|
|
||||||
expect(result).to.be.false;
|
expect(result).to.be.false;
|
||||||
});
|
});
|
||||||
|
|||||||
@ -1,12 +1,13 @@
|
|||||||
import type { PeerId } from "@libp2p/interface";
|
import type { PeerId } from "@libp2p/interface";
|
||||||
import type {
|
import type {
|
||||||
ClusterId,
|
|
||||||
NetworkConfig,
|
NetworkConfig,
|
||||||
PubsubTopic,
|
PubsubTopic,
|
||||||
ShardId,
|
ShardInfo,
|
||||||
ShardInfo
|
SingleShardInfo,
|
||||||
|
StaticSharding
|
||||||
} from "@waku/interfaces";
|
} from "@waku/interfaces";
|
||||||
import {
|
import {
|
||||||
|
contentTopicToShardIndex,
|
||||||
decodeRelayShard,
|
decodeRelayShard,
|
||||||
Logger,
|
Logger,
|
||||||
pubsubTopicToSingleShardInfo
|
pubsubTopicToSingleShardInfo
|
||||||
@ -20,14 +21,10 @@ type ShardReaderConstructorOptions = {
|
|||||||
networkConfig: NetworkConfig;
|
networkConfig: NetworkConfig;
|
||||||
};
|
};
|
||||||
|
|
||||||
export interface IShardReader {
|
interface IShardReader {
|
||||||
hasShardInfo(id: PeerId): Promise<boolean>;
|
hasShardInfo(id: PeerId): Promise<boolean>;
|
||||||
isPeerOnCluster(id: PeerId): Promise<boolean>;
|
isPeerOnNetwork(id: PeerId): Promise<boolean>;
|
||||||
isPeerOnShard(
|
isPeerOnShard(id: PeerId, shard: SingleShardInfo): Promise<boolean>;
|
||||||
id: PeerId,
|
|
||||||
clusterId: ClusterId,
|
|
||||||
shard: ShardId
|
|
||||||
): Promise<boolean>;
|
|
||||||
isPeerOnTopic(id: PeerId, pubsubTopic: PubsubTopic): Promise<boolean>;
|
isPeerOnTopic(id: PeerId, pubsubTopic: PubsubTopic): Promise<boolean>;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -37,26 +34,33 @@ export interface IShardReader {
|
|||||||
export class ShardReader implements IShardReader {
|
export class ShardReader implements IShardReader {
|
||||||
private readonly libp2p: Libp2p;
|
private readonly libp2p: Libp2p;
|
||||||
|
|
||||||
private readonly clusterId: ClusterId;
|
private readonly staticShard: StaticSharding;
|
||||||
|
|
||||||
public constructor(options: ShardReaderConstructorOptions) {
|
public constructor(options: ShardReaderConstructorOptions) {
|
||||||
this.libp2p = options.libp2p;
|
this.libp2p = options.libp2p;
|
||||||
|
|
||||||
this.clusterId = options.networkConfig.clusterId;
|
this.staticShard = this.getStaticShardFromNetworkConfig(
|
||||||
|
options.networkConfig
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async isPeerOnCluster(id: PeerId): Promise<boolean> {
|
public async isPeerOnNetwork(id: PeerId): Promise<boolean> {
|
||||||
const peerRelayShards = await this.getRelayShards(id);
|
const shardInfo = await this.getShardInfo(id);
|
||||||
|
|
||||||
if (!peerRelayShards) {
|
if (!shardInfo) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
return peerRelayShards.clusterId === this.clusterId;
|
const clusterMatch = shardInfo.clusterId === this.staticShard.clusterId;
|
||||||
|
const shardOverlap = this.staticShard.shards.some((s) =>
|
||||||
|
shardInfo.shards.includes(s)
|
||||||
|
);
|
||||||
|
|
||||||
|
return clusterMatch && shardOverlap;
|
||||||
}
|
}
|
||||||
|
|
||||||
public async hasShardInfo(id: PeerId): Promise<boolean> {
|
public async hasShardInfo(id: PeerId): Promise<boolean> {
|
||||||
const shardInfo = await this.getRelayShards(id);
|
const shardInfo = await this.getShardInfo(id);
|
||||||
return !!shardInfo;
|
return !!shardInfo;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -65,9 +69,8 @@ export class ShardReader implements IShardReader {
|
|||||||
pubsubTopic: PubsubTopic
|
pubsubTopic: PubsubTopic
|
||||||
): Promise<boolean> {
|
): Promise<boolean> {
|
||||||
try {
|
try {
|
||||||
const { clusterId, shard } = pubsubTopicToSingleShardInfo(pubsubTopic);
|
const shardInfo = pubsubTopicToSingleShardInfo(pubsubTopic);
|
||||||
if (clusterId !== this.clusterId) return false;
|
return await this.isPeerOnShard(id, shardInfo);
|
||||||
return await this.isPeerOnShard(id, shard);
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
log.error(
|
log.error(
|
||||||
`Error comparing pubsub topic ${pubsubTopic} with shard info for ${id}`,
|
`Error comparing pubsub topic ${pubsubTopic} with shard info for ${id}`,
|
||||||
@ -77,23 +80,23 @@ export class ShardReader implements IShardReader {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public async isPeerOnShard(id: PeerId, shard: ShardId): Promise<boolean> {
|
public async isPeerOnShard(
|
||||||
const peerShardInfo = await this.getRelayShards(id);
|
id: PeerId,
|
||||||
log.info(
|
shard: SingleShardInfo
|
||||||
`Checking if peer on same shard: this { clusterId: ${this.clusterId}, shardId: ${shard} },` +
|
): Promise<boolean> {
|
||||||
`${id} { clusterId: ${peerShardInfo?.clusterId}, shards: ${peerShardInfo?.shards} }`
|
const peerShardInfo = await this.getShardInfo(id);
|
||||||
);
|
|
||||||
if (!peerShardInfo) {
|
if (!peerShardInfo || shard.shard === undefined) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
peerShardInfo.clusterId === this.clusterId &&
|
peerShardInfo.clusterId === shard.clusterId &&
|
||||||
peerShardInfo.shards.includes(shard)
|
peerShardInfo.shards.includes(shard.shard)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
private async getRelayShards(id: PeerId): Promise<ShardInfo | undefined> {
|
private async getShardInfo(id: PeerId): Promise<ShardInfo | undefined> {
|
||||||
try {
|
try {
|
||||||
const peer = await this.libp2p.peerStore.get(id);
|
const peer = await this.libp2p.peerStore.get(id);
|
||||||
|
|
||||||
@ -103,10 +106,29 @@ export class ShardReader implements IShardReader {
|
|||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
return decodeRelayShard(shardInfoBytes);
|
const decodedShardInfo = decodeRelayShard(shardInfoBytes);
|
||||||
|
|
||||||
|
return decodedShardInfo;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
log.error(`Error getting shard info for ${id}`, error);
|
log.error(`Error getting shard info for ${id}`, error);
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private getStaticShardFromNetworkConfig(
|
||||||
|
networkConfig: NetworkConfig
|
||||||
|
): StaticSharding {
|
||||||
|
if ("shards" in networkConfig) {
|
||||||
|
return networkConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
const shards = networkConfig.contentTopics.map((topic) =>
|
||||||
|
contentTopicToShardIndex(topic)
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
clusterId: networkConfig.clusterId!,
|
||||||
|
shards
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,7 +1,6 @@
|
|||||||
import { isPeerId, type Peer, type PeerId } from "@libp2p/interface";
|
import { isPeerId, type Peer, type PeerId } from "@libp2p/interface";
|
||||||
import { peerIdFromString } from "@libp2p/peer-id";
|
import { peerIdFromString } from "@libp2p/peer-id";
|
||||||
import { Multiaddr, multiaddr, MultiaddrInput } from "@multiformats/multiaddr";
|
import { Multiaddr, multiaddr, MultiaddrInput } from "@multiformats/multiaddr";
|
||||||
import { Libp2p } from "@waku/interfaces";
|
|
||||||
import { bytesToUtf8 } from "@waku/utils/bytes";
|
import { bytesToUtf8 } from "@waku/utils/bytes";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -50,25 +49,3 @@ export const mapToPeerId = (input: PeerId | MultiaddrInput): PeerId => {
|
|||||||
? input
|
? input
|
||||||
: peerIdFromString(multiaddr(input).getPeerId()!);
|
: peerIdFromString(multiaddr(input).getPeerId()!);
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
|
||||||
* Checks if the address is supported by the libp2p instance.
|
|
||||||
* @param libp2p - The libp2p instance.
|
|
||||||
* @param addresses - The addresses to check.
|
|
||||||
* @returns True if the addresses are supported, false otherwise.
|
|
||||||
*/
|
|
||||||
export const isAddressesSupported = (
|
|
||||||
libp2p: Libp2p,
|
|
||||||
addresses: Multiaddr[]
|
|
||||||
): boolean => {
|
|
||||||
const transports =
|
|
||||||
libp2p?.components?.transportManager?.getTransports() || [];
|
|
||||||
|
|
||||||
if (transports.length === 0) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return transports
|
|
||||||
.map((transport) => transport.dialFilter(addresses))
|
|
||||||
.some((supportedAddresses) => supportedAddresses.length > 0);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,10 +1,10 @@
|
|||||||
import type { PeerId } from "@libp2p/interface";
|
import type { PeerId, Stream } from "@libp2p/interface";
|
||||||
import type { IncomingStreamData } from "@libp2p/interface-internal";
|
import type { IncomingStreamData } from "@libp2p/interface-internal";
|
||||||
import {
|
import {
|
||||||
type ContentTopic,
|
type ContentTopic,
|
||||||
type FilterCoreResult,
|
type CoreProtocolResult,
|
||||||
FilterError,
|
|
||||||
type Libp2p,
|
type Libp2p,
|
||||||
|
ProtocolError,
|
||||||
type PubsubTopic
|
type PubsubTopic
|
||||||
} from "@waku/interfaces";
|
} from "@waku/interfaces";
|
||||||
import { WakuMessage } from "@waku/proto";
|
import { WakuMessage } from "@waku/proto";
|
||||||
@ -42,50 +42,29 @@ export class FilterCore {
|
|||||||
|
|
||||||
public constructor(
|
public constructor(
|
||||||
private handleIncomingMessage: IncomingMessageHandler,
|
private handleIncomingMessage: IncomingMessageHandler,
|
||||||
private libp2p: Libp2p
|
libp2p: Libp2p
|
||||||
) {
|
) {
|
||||||
this.streamManager = new StreamManager(
|
this.streamManager = new StreamManager(
|
||||||
FilterCodecs.SUBSCRIBE,
|
FilterCodecs.SUBSCRIBE,
|
||||||
libp2p.components
|
libp2p.components
|
||||||
);
|
);
|
||||||
}
|
|
||||||
|
|
||||||
public async start(): Promise<void> {
|
libp2p
|
||||||
try {
|
.handle(FilterCodecs.PUSH, this.onRequest.bind(this), {
|
||||||
await this.libp2p.handle(FilterCodecs.PUSH, this.onRequest.bind(this), {
|
|
||||||
maxInboundStreams: 100
|
maxInboundStreams: 100
|
||||||
|
})
|
||||||
|
.catch((e) => {
|
||||||
|
log.error("Failed to register ", FilterCodecs.PUSH, e);
|
||||||
});
|
});
|
||||||
} catch (e) {
|
|
||||||
log.error("Failed to register ", FilterCodecs.PUSH, e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public async stop(): Promise<void> {
|
|
||||||
this.streamManager.stop();
|
|
||||||
try {
|
|
||||||
await this.libp2p.unhandle(FilterCodecs.PUSH);
|
|
||||||
} catch (e) {
|
|
||||||
log.error("Failed to unregister ", FilterCodecs.PUSH, e);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public async subscribe(
|
public async subscribe(
|
||||||
pubsubTopic: PubsubTopic,
|
pubsubTopic: PubsubTopic,
|
||||||
peerId: PeerId,
|
peerId: PeerId,
|
||||||
contentTopics: ContentTopic[]
|
contentTopics: ContentTopic[]
|
||||||
): Promise<FilterCoreResult> {
|
): Promise<CoreProtocolResult> {
|
||||||
const stream = await this.streamManager.getStream(peerId);
|
const stream = await this.streamManager.getStream(peerId);
|
||||||
|
|
||||||
if (!stream) {
|
|
||||||
return {
|
|
||||||
success: null,
|
|
||||||
failure: {
|
|
||||||
error: FilterError.NO_STREAM_AVAILABLE,
|
|
||||||
peerId: peerId
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const request = FilterSubscribeRpc.createSubscribeRequest(
|
const request = FilterSubscribeRpc.createSubscribeRequest(
|
||||||
pubsubTopic,
|
pubsubTopic,
|
||||||
contentTopics
|
contentTopics
|
||||||
@ -109,7 +88,7 @@ export class FilterCore {
|
|||||||
return {
|
return {
|
||||||
success: null,
|
success: null,
|
||||||
failure: {
|
failure: {
|
||||||
error: FilterError.GENERIC_FAIL,
|
error: ProtocolError.GENERIC_FAIL,
|
||||||
peerId: peerId
|
peerId: peerId
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -124,7 +103,7 @@ export class FilterCore {
|
|||||||
);
|
);
|
||||||
return {
|
return {
|
||||||
failure: {
|
failure: {
|
||||||
error: FilterError.REMOTE_PEER_REJECTED,
|
error: ProtocolError.REMOTE_PEER_REJECTED,
|
||||||
peerId: peerId
|
peerId: peerId
|
||||||
},
|
},
|
||||||
success: null
|
success: null
|
||||||
@ -141,15 +120,19 @@ export class FilterCore {
|
|||||||
pubsubTopic: PubsubTopic,
|
pubsubTopic: PubsubTopic,
|
||||||
peerId: PeerId,
|
peerId: PeerId,
|
||||||
contentTopics: ContentTopic[]
|
contentTopics: ContentTopic[]
|
||||||
): Promise<FilterCoreResult> {
|
): Promise<CoreProtocolResult> {
|
||||||
const stream = await this.streamManager.getStream(peerId);
|
let stream: Stream | undefined;
|
||||||
|
try {
|
||||||
if (!stream) {
|
stream = await this.streamManager.getStream(peerId);
|
||||||
log.error(`Failed to get a stream for remote peer:${peerId.toString()}`);
|
} catch (error) {
|
||||||
|
log.error(
|
||||||
|
`Failed to get a stream for remote peer${peerId.toString()}`,
|
||||||
|
error
|
||||||
|
);
|
||||||
return {
|
return {
|
||||||
success: null,
|
success: null,
|
||||||
failure: {
|
failure: {
|
||||||
error: FilterError.NO_STREAM_AVAILABLE,
|
error: ProtocolError.NO_STREAM_AVAILABLE,
|
||||||
peerId: peerId
|
peerId: peerId
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -167,7 +150,7 @@ export class FilterCore {
|
|||||||
return {
|
return {
|
||||||
success: null,
|
success: null,
|
||||||
failure: {
|
failure: {
|
||||||
error: FilterError.GENERIC_FAIL,
|
error: ProtocolError.GENERIC_FAIL,
|
||||||
peerId: peerId
|
peerId: peerId
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -182,20 +165,9 @@ export class FilterCore {
|
|||||||
public async unsubscribeAll(
|
public async unsubscribeAll(
|
||||||
pubsubTopic: PubsubTopic,
|
pubsubTopic: PubsubTopic,
|
||||||
peerId: PeerId
|
peerId: PeerId
|
||||||
): Promise<FilterCoreResult> {
|
): Promise<CoreProtocolResult> {
|
||||||
const stream = await this.streamManager.getStream(peerId);
|
const stream = await this.streamManager.getStream(peerId);
|
||||||
|
|
||||||
if (!stream) {
|
|
||||||
log.error(`Failed to get a stream for remote peer:${peerId.toString()}`);
|
|
||||||
return {
|
|
||||||
success: null,
|
|
||||||
failure: {
|
|
||||||
error: FilterError.NO_STREAM_AVAILABLE,
|
|
||||||
peerId: peerId
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const request = FilterSubscribeRpc.createUnsubscribeAllRequest(pubsubTopic);
|
const request = FilterSubscribeRpc.createUnsubscribeAllRequest(pubsubTopic);
|
||||||
|
|
||||||
const res = await pipe(
|
const res = await pipe(
|
||||||
@ -209,7 +181,7 @@ export class FilterCore {
|
|||||||
if (!res || !res.length) {
|
if (!res || !res.length) {
|
||||||
return {
|
return {
|
||||||
failure: {
|
failure: {
|
||||||
error: FilterError.NO_RESPONSE,
|
error: ProtocolError.NO_RESPONSE,
|
||||||
peerId: peerId
|
peerId: peerId
|
||||||
},
|
},
|
||||||
success: null
|
success: null
|
||||||
@ -225,7 +197,7 @@ export class FilterCore {
|
|||||||
);
|
);
|
||||||
return {
|
return {
|
||||||
failure: {
|
failure: {
|
||||||
error: FilterError.REMOTE_PEER_REJECTED,
|
error: ProtocolError.REMOTE_PEER_REJECTED,
|
||||||
peerId: peerId
|
peerId: peerId
|
||||||
},
|
},
|
||||||
success: null
|
success: null
|
||||||
@ -238,15 +210,19 @@ export class FilterCore {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
public async ping(peerId: PeerId): Promise<FilterCoreResult> {
|
public async ping(peerId: PeerId): Promise<CoreProtocolResult> {
|
||||||
const stream = await this.streamManager.getStream(peerId);
|
let stream: Stream | undefined;
|
||||||
|
try {
|
||||||
if (!stream) {
|
stream = await this.streamManager.getStream(peerId);
|
||||||
log.error(`Failed to get a stream for remote peer:${peerId.toString()}`);
|
} catch (error) {
|
||||||
|
log.error(
|
||||||
|
`Failed to get a stream for remote peer${peerId.toString()}`,
|
||||||
|
error
|
||||||
|
);
|
||||||
return {
|
return {
|
||||||
success: null,
|
success: null,
|
||||||
failure: {
|
failure: {
|
||||||
error: FilterError.NO_STREAM_AVAILABLE,
|
error: ProtocolError.NO_STREAM_AVAILABLE,
|
||||||
peerId: peerId
|
peerId: peerId
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -268,7 +244,7 @@ export class FilterCore {
|
|||||||
return {
|
return {
|
||||||
success: null,
|
success: null,
|
||||||
failure: {
|
failure: {
|
||||||
error: FilterError.GENERIC_FAIL,
|
error: ProtocolError.GENERIC_FAIL,
|
||||||
peerId: peerId
|
peerId: peerId
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -278,7 +254,7 @@ export class FilterCore {
|
|||||||
return {
|
return {
|
||||||
success: null,
|
success: null,
|
||||||
failure: {
|
failure: {
|
||||||
error: FilterError.NO_RESPONSE,
|
error: ProtocolError.NO_RESPONSE,
|
||||||
peerId: peerId
|
peerId: peerId
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -294,7 +270,7 @@ export class FilterCore {
|
|||||||
return {
|
return {
|
||||||
success: null,
|
success: null,
|
||||||
failure: {
|
failure: {
|
||||||
error: FilterError.REMOTE_PEER_REJECTED,
|
error: ProtocolError.REMOTE_PEER_REJECTED,
|
||||||
peerId: peerId
|
peerId: peerId
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,7 +0,0 @@
|
|||||||
export const CODECS = {
|
|
||||||
v2: "/vac/waku/lightpush/2.0.0-beta1",
|
|
||||||
v3: "/vac/waku/lightpush/3.0.0"
|
|
||||||
} as const;
|
|
||||||
|
|
||||||
export const LightPushCodecV2 = CODECS.v2;
|
|
||||||
export const LightPushCodec = CODECS.v3;
|
|
||||||
@ -1,2 +1 @@
|
|||||||
export { LightPushCore } from "./light_push.js";
|
export { LightPushCore, LightPushCodec, PushResponse } from "./light_push.js";
|
||||||
export { LightPushCodec, LightPushCodecV2 } from "./constants.js";
|
|
||||||
|
|||||||
@ -1,11 +1,14 @@
|
|||||||
import type { PeerId, Stream } from "@libp2p/interface";
|
import type { PeerId, Stream } from "@libp2p/interface";
|
||||||
import {
|
import {
|
||||||
|
type CoreProtocolResult,
|
||||||
type IEncoder,
|
type IEncoder,
|
||||||
type IMessage,
|
type IMessage,
|
||||||
type Libp2p,
|
type Libp2p,
|
||||||
type LightPushCoreResult,
|
ProtocolError,
|
||||||
LightPushError
|
type ThisOrThat
|
||||||
} from "@waku/interfaces";
|
} from "@waku/interfaces";
|
||||||
|
import { PushResponse } from "@waku/proto";
|
||||||
|
import { isMessageSizeUnderCap } from "@waku/utils";
|
||||||
import { Logger } from "@waku/utils";
|
import { Logger } from "@waku/utils";
|
||||||
import all from "it-all";
|
import all from "it-all";
|
||||||
import * as lp from "it-length-prefixed";
|
import * as lp from "it-length-prefixed";
|
||||||
@ -14,76 +17,93 @@ import { Uint8ArrayList } from "uint8arraylist";
|
|||||||
|
|
||||||
import { StreamManager } from "../stream_manager/index.js";
|
import { StreamManager } from "../stream_manager/index.js";
|
||||||
|
|
||||||
import { CODECS } from "./constants.js";
|
import { PushRpc } from "./push_rpc.js";
|
||||||
import { ProtocolHandler } from "./protocol_handler.js";
|
import { isRLNResponseError } from "./utils.js";
|
||||||
|
|
||||||
const log = new Logger("light-push");
|
const log = new Logger("light-push");
|
||||||
|
|
||||||
|
export const LightPushCodec = "/vac/waku/lightpush/2.0.0-beta1";
|
||||||
|
export { PushResponse };
|
||||||
|
|
||||||
|
type PreparePushMessageResult = ThisOrThat<"query", PushRpc>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Implements the [Waku v2 Light Push protocol](https://rfc.vac.dev/spec/19/).
|
* Implements the [Waku v2 Light Push protocol](https://rfc.vac.dev/spec/19/).
|
||||||
*/
|
*/
|
||||||
export class LightPushCore {
|
export class LightPushCore {
|
||||||
private readonly streamManager: StreamManager;
|
private readonly streamManager: StreamManager;
|
||||||
private readonly streamManagerV2: StreamManager;
|
|
||||||
|
|
||||||
public readonly multicodec = [CODECS.v3, CODECS.v2];
|
public readonly multicodec = LightPushCodec;
|
||||||
|
|
||||||
public constructor(private libp2p: Libp2p) {
|
public constructor(libp2p: Libp2p) {
|
||||||
this.streamManagerV2 = new StreamManager(CODECS.v2, libp2p.components);
|
this.streamManager = new StreamManager(LightPushCodec, libp2p.components);
|
||||||
this.streamManager = new StreamManager(CODECS.v3, libp2p.components);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public stop(): void {
|
private async preparePushMessage(
|
||||||
this.streamManager.stop();
|
encoder: IEncoder,
|
||||||
this.streamManagerV2.stop();
|
message: IMessage
|
||||||
|
): Promise<PreparePushMessageResult> {
|
||||||
|
try {
|
||||||
|
if (!message.payload || message.payload.length === 0) {
|
||||||
|
log.error("Failed to send waku light push: payload is empty");
|
||||||
|
return { query: null, error: ProtocolError.EMPTY_PAYLOAD };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(await isMessageSizeUnderCap(encoder, message))) {
|
||||||
|
log.error("Failed to send waku light push: message is bigger than 1MB");
|
||||||
|
return { query: null, error: ProtocolError.SIZE_TOO_BIG };
|
||||||
|
}
|
||||||
|
|
||||||
|
const protoMessage = await encoder.toProtoObj(message);
|
||||||
|
if (!protoMessage) {
|
||||||
|
log.error("Failed to encode to protoMessage, aborting push");
|
||||||
|
return {
|
||||||
|
query: null,
|
||||||
|
error: ProtocolError.ENCODE_FAILED
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const query = PushRpc.createRequest(protoMessage, encoder.pubsubTopic);
|
||||||
|
return { query, error: null };
|
||||||
|
} catch (error) {
|
||||||
|
log.error("Failed to prepare push message", error);
|
||||||
|
|
||||||
|
return {
|
||||||
|
query: null,
|
||||||
|
error: ProtocolError.GENERIC_FAIL
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public async send(
|
public async send(
|
||||||
encoder: IEncoder,
|
encoder: IEncoder,
|
||||||
message: IMessage,
|
message: IMessage,
|
||||||
peerId: PeerId,
|
peerId: PeerId
|
||||||
useLegacy: boolean = false
|
): Promise<CoreProtocolResult> {
|
||||||
): Promise<LightPushCoreResult> {
|
const { query, error: preparationError } = await this.preparePushMessage(
|
||||||
const protocol = await this.getProtocol(peerId, useLegacy);
|
|
||||||
|
|
||||||
log.info(
|
|
||||||
`Sending light push request to peer:${peerId.toString()}, protocol:${protocol}`
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!protocol) {
|
|
||||||
return {
|
|
||||||
success: null,
|
|
||||||
failure: {
|
|
||||||
error: LightPushError.GENERIC_FAIL,
|
|
||||||
peerId
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const { rpc, error: prepError } = await ProtocolHandler.preparePushMessage(
|
|
||||||
encoder,
|
encoder,
|
||||||
message,
|
message
|
||||||
protocol
|
|
||||||
);
|
);
|
||||||
|
|
||||||
if (prepError) {
|
if (preparationError || !query) {
|
||||||
return {
|
return {
|
||||||
success: null,
|
success: null,
|
||||||
failure: {
|
failure: {
|
||||||
error: prepError,
|
error: preparationError,
|
||||||
peerId
|
peerId
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const stream = await this.getStream(peerId, protocol);
|
let stream: Stream;
|
||||||
|
try {
|
||||||
if (!stream) {
|
stream = await this.streamManager.getStream(peerId);
|
||||||
log.error(`Failed to get a stream for remote peer:${peerId.toString()}`);
|
} catch (error) {
|
||||||
|
log.error("Failed to get stream", error);
|
||||||
return {
|
return {
|
||||||
success: null,
|
success: null,
|
||||||
failure: {
|
failure: {
|
||||||
error: LightPushError.NO_STREAM_AVAILABLE,
|
error: ProtocolError.NO_STREAM_AVAILABLE,
|
||||||
peerId: peerId
|
peerId: peerId
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -92,74 +112,76 @@ export class LightPushCore {
|
|||||||
let res: Uint8ArrayList[] | undefined;
|
let res: Uint8ArrayList[] | undefined;
|
||||||
try {
|
try {
|
||||||
res = await pipe(
|
res = await pipe(
|
||||||
[rpc.encode()],
|
[query.encode()],
|
||||||
lp.encode,
|
lp.encode,
|
||||||
stream,
|
stream,
|
||||||
lp.decode,
|
lp.decode,
|
||||||
async (source) => await all(source)
|
async (source) => await all(source)
|
||||||
);
|
);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
// can fail only because of `stream` abortion
|
||||||
log.error("Failed to send waku light push request", err);
|
log.error("Failed to send waku light push request", err);
|
||||||
return {
|
return {
|
||||||
success: null,
|
success: null,
|
||||||
failure: {
|
failure: {
|
||||||
error: LightPushError.STREAM_ABORTED,
|
error: ProtocolError.STREAM_ABORTED,
|
||||||
peerId: peerId
|
peerId: peerId
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const bytes = new Uint8ArrayList();
|
const bytes = new Uint8ArrayList();
|
||||||
res.forEach((chunk) => bytes.append(chunk));
|
res.forEach((chunk) => {
|
||||||
|
bytes.append(chunk);
|
||||||
|
});
|
||||||
|
|
||||||
if (bytes.length === 0) {
|
let response: PushResponse | undefined;
|
||||||
|
try {
|
||||||
|
response = PushRpc.decode(bytes).response;
|
||||||
|
} catch (err) {
|
||||||
|
log.error("Failed to decode push reply", err);
|
||||||
return {
|
return {
|
||||||
success: null,
|
success: null,
|
||||||
failure: {
|
failure: {
|
||||||
error: LightPushError.NO_RESPONSE,
|
error: ProtocolError.DECODE_FAILED,
|
||||||
peerId: peerId
|
peerId: peerId
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
return ProtocolHandler.handleResponse(bytes, protocol, peerId);
|
if (!response) {
|
||||||
}
|
log.error("Remote peer fault: No response in PushRPC");
|
||||||
|
return {
|
||||||
private async getProtocol(
|
success: null,
|
||||||
peerId: PeerId,
|
failure: {
|
||||||
useLegacy: boolean
|
error: ProtocolError.NO_RESPONSE,
|
||||||
): Promise<string | undefined> {
|
peerId: peerId
|
||||||
try {
|
}
|
||||||
const peer = await this.libp2p.peerStore.get(peerId);
|
};
|
||||||
|
|
||||||
if (
|
|
||||||
useLegacy ||
|
|
||||||
(!peer.protocols.includes(CODECS.v3) &&
|
|
||||||
peer.protocols.includes(CODECS.v2))
|
|
||||||
) {
|
|
||||||
return CODECS.v2;
|
|
||||||
} else if (peer.protocols.includes(CODECS.v3)) {
|
|
||||||
return CODECS.v3;
|
|
||||||
} else {
|
|
||||||
throw new Error("No supported protocol found");
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
log.error("Failed to get protocol", error);
|
|
||||||
return undefined;
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
private async getStream(
|
if (isRLNResponseError(response.info)) {
|
||||||
peerId: PeerId,
|
log.error("Remote peer fault: RLN generation");
|
||||||
protocol: string
|
return {
|
||||||
): Promise<Stream | undefined> {
|
success: null,
|
||||||
switch (protocol) {
|
failure: {
|
||||||
case CODECS.v2:
|
error: ProtocolError.RLN_PROOF_GENERATION,
|
||||||
return this.streamManagerV2.getStream(peerId);
|
peerId: peerId
|
||||||
case CODECS.v3:
|
}
|
||||||
return this.streamManager.getStream(peerId);
|
};
|
||||||
default:
|
|
||||||
return undefined;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!response.isSuccess) {
|
||||||
|
log.error("Remote peer rejected the message: ", response.info);
|
||||||
|
return {
|
||||||
|
success: null,
|
||||||
|
failure: {
|
||||||
|
error: ProtocolError.REMOTE_PEER_REJECTED,
|
||||||
|
peerId: peerId
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return { success: peerId, failure: null };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,191 +0,0 @@
|
|||||||
import type { PeerId } from "@libp2p/interface";
|
|
||||||
import type { IEncoder, IMessage, LightPushCoreResult } from "@waku/interfaces";
|
|
||||||
import { LightPushError, LightPushStatusCode } from "@waku/interfaces";
|
|
||||||
import { PushResponse, WakuMessage } from "@waku/proto";
|
|
||||||
import { isMessageSizeUnderCap, Logger } from "@waku/utils";
|
|
||||||
import { Uint8ArrayList } from "uint8arraylist";
|
|
||||||
|
|
||||||
import { CODECS } from "./constants.js";
|
|
||||||
import { PushRpcV2 } from "./push_rpc.js";
|
|
||||||
import { PushRpc } from "./push_rpc_v3.js";
|
|
||||||
import { isRLNResponseError } from "./utils.js";
|
|
||||||
|
|
||||||
type VersionedPushRpc =
|
|
||||||
| ({ version: "v2" } & PushRpcV2)
|
|
||||||
| ({ version: "v3" } & PushRpc);
|
|
||||||
|
|
||||||
type PreparePushMessageResult =
|
|
||||||
| { rpc: VersionedPushRpc; error: null }
|
|
||||||
| { rpc: null; error: LightPushError };
|
|
||||||
|
|
||||||
const log = new Logger("light-push:protocol-handler");
|
|
||||||
|
|
||||||
export class ProtocolHandler {
|
|
||||||
public static async preparePushMessage(
|
|
||||||
encoder: IEncoder,
|
|
||||||
message: IMessage,
|
|
||||||
protocol: string
|
|
||||||
): Promise<PreparePushMessageResult> {
|
|
||||||
try {
|
|
||||||
if (!message.payload || message.payload.length === 0) {
|
|
||||||
log.error("Failed to send waku light push: payload is empty");
|
|
||||||
return { rpc: null, error: LightPushError.EMPTY_PAYLOAD };
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!(await isMessageSizeUnderCap(encoder, message))) {
|
|
||||||
log.error("Failed to send waku light push: message is bigger than 1MB");
|
|
||||||
return { rpc: null, error: LightPushError.SIZE_TOO_BIG };
|
|
||||||
}
|
|
||||||
|
|
||||||
const protoMessage = await encoder.toProtoObj(message);
|
|
||||||
if (!protoMessage) {
|
|
||||||
log.error("Failed to encode to protoMessage, aborting push");
|
|
||||||
return { rpc: null, error: LightPushError.ENCODE_FAILED };
|
|
||||||
}
|
|
||||||
|
|
||||||
if (protocol === CODECS.v3) {
|
|
||||||
log.info("Creating v3 RPC message");
|
|
||||||
return {
|
|
||||||
rpc: ProtocolHandler.createV3Rpc(protoMessage, encoder.pubsubTopic),
|
|
||||||
error: null
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
log.info("Creating v2 RPC message");
|
|
||||||
return {
|
|
||||||
rpc: ProtocolHandler.createV2Rpc(protoMessage, encoder.pubsubTopic),
|
|
||||||
error: null
|
|
||||||
};
|
|
||||||
} catch (err) {
|
|
||||||
log.error("Failed to prepare push message", err);
|
|
||||||
return { rpc: null, error: LightPushError.GENERIC_FAIL };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Decode and evaluate a LightPush response according to the protocol version
|
|
||||||
*/
|
|
||||||
public static handleResponse(
|
|
||||||
bytes: Uint8ArrayList,
|
|
||||||
protocol: string,
|
|
||||||
peerId: PeerId
|
|
||||||
): LightPushCoreResult {
|
|
||||||
if (protocol === CODECS.v3) {
|
|
||||||
return ProtocolHandler.handleV3Response(bytes, peerId);
|
|
||||||
}
|
|
||||||
|
|
||||||
return ProtocolHandler.handleV2Response(bytes, peerId);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static handleV3Response(
|
|
||||||
bytes: Uint8ArrayList,
|
|
||||||
peerId: PeerId
|
|
||||||
): LightPushCoreResult {
|
|
||||||
try {
|
|
||||||
const decodedRpcV3 = PushRpc.decodeResponse(bytes);
|
|
||||||
const statusCode = decodedRpcV3.statusCode;
|
|
||||||
const statusDesc = decodedRpcV3.statusDesc;
|
|
||||||
|
|
||||||
if (statusCode !== LightPushStatusCode.SUCCESS) {
|
|
||||||
const error = LightPushError.REMOTE_PEER_REJECTED;
|
|
||||||
log.error(
|
|
||||||
`Remote peer rejected with v3 status code ${statusCode}: ${statusDesc}`
|
|
||||||
);
|
|
||||||
return {
|
|
||||||
success: null,
|
|
||||||
failure: {
|
|
||||||
error,
|
|
||||||
peerId: peerId
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (decodedRpcV3.relayPeerCount !== undefined) {
|
|
||||||
log.info(`Message relayed to ${decodedRpcV3.relayPeerCount} peers`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return { success: peerId, failure: null };
|
|
||||||
} catch (err) {
|
|
||||||
return {
|
|
||||||
success: null,
|
|
||||||
failure: {
|
|
||||||
error: LightPushError.DECODE_FAILED,
|
|
||||||
peerId: peerId
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static handleV2Response(
|
|
||||||
bytes: Uint8ArrayList,
|
|
||||||
peerId: PeerId
|
|
||||||
): LightPushCoreResult {
|
|
||||||
let response: PushResponse | undefined;
|
|
||||||
try {
|
|
||||||
const decodedRpc = PushRpcV2.decode(bytes);
|
|
||||||
response = decodedRpc.response;
|
|
||||||
} catch (err) {
|
|
||||||
return {
|
|
||||||
success: null,
|
|
||||||
failure: {
|
|
||||||
error: LightPushError.DECODE_FAILED,
|
|
||||||
peerId: peerId
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!response) {
|
|
||||||
return {
|
|
||||||
success: null,
|
|
||||||
failure: {
|
|
||||||
error: LightPushError.NO_RESPONSE,
|
|
||||||
peerId: peerId
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isRLNResponseError(response.info)) {
|
|
||||||
log.error("Remote peer fault: RLN generation");
|
|
||||||
return {
|
|
||||||
success: null,
|
|
||||||
failure: {
|
|
||||||
error: LightPushError.RLN_PROOF_GENERATION,
|
|
||||||
peerId: peerId
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!response.isSuccess) {
|
|
||||||
log.error("Remote peer rejected the message: ", response.info);
|
|
||||||
return {
|
|
||||||
success: null,
|
|
||||||
failure: {
|
|
||||||
error: LightPushError.REMOTE_PEER_REJECTED,
|
|
||||||
peerId: peerId
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return { success: peerId, failure: null };
|
|
||||||
}
|
|
||||||
|
|
||||||
private static createV2Rpc(
|
|
||||||
message: WakuMessage,
|
|
||||||
pubsubTopic: string
|
|
||||||
): VersionedPushRpc {
|
|
||||||
const v2Rpc = PushRpcV2.createRequest(message, pubsubTopic);
|
|
||||||
return Object.assign(v2Rpc, { version: "v2" as const });
|
|
||||||
}
|
|
||||||
|
|
||||||
private static createV3Rpc(
|
|
||||||
message: WakuMessage,
|
|
||||||
pubsubTopic: string
|
|
||||||
): VersionedPushRpc {
|
|
||||||
if (!message.timestamp) {
|
|
||||||
message.timestamp = BigInt(Date.now()) * BigInt(1_000_000);
|
|
||||||
}
|
|
||||||
|
|
||||||
const v3Rpc = PushRpc.createRequest(message, pubsubTopic);
|
|
||||||
return Object.assign(v3Rpc, { version: "v3" as const });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -2,14 +2,14 @@ import { proto_lightpush as proto } from "@waku/proto";
|
|||||||
import type { Uint8ArrayList } from "uint8arraylist";
|
import type { Uint8ArrayList } from "uint8arraylist";
|
||||||
import { v4 as uuid } from "uuid";
|
import { v4 as uuid } from "uuid";
|
||||||
|
|
||||||
export class PushRpcV2 {
|
export class PushRpc {
|
||||||
public constructor(public proto: proto.PushRpc) {}
|
public constructor(public proto: proto.PushRpc) {}
|
||||||
|
|
||||||
public static createRequest(
|
public static createRequest(
|
||||||
message: proto.WakuMessage,
|
message: proto.WakuMessage,
|
||||||
pubsubTopic: string
|
pubsubTopic: string
|
||||||
): PushRpcV2 {
|
): PushRpc {
|
||||||
return new PushRpcV2({
|
return new PushRpc({
|
||||||
requestId: uuid(),
|
requestId: uuid(),
|
||||||
request: {
|
request: {
|
||||||
message: message,
|
message: message,
|
||||||
@ -19,9 +19,9 @@ export class PushRpcV2 {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
public static decode(bytes: Uint8ArrayList): PushRpcV2 {
|
public static decode(bytes: Uint8ArrayList): PushRpc {
|
||||||
const res = proto.PushRpc.decode(bytes);
|
const res = proto.PushRpc.decode(bytes);
|
||||||
return new PushRpcV2(res);
|
return new PushRpc(res);
|
||||||
}
|
}
|
||||||
|
|
||||||
public encode(): Uint8Array {
|
public encode(): Uint8Array {
|
||||||
|
|||||||
@ -1,162 +0,0 @@
|
|||||||
import { proto_lightpush as proto } from "@waku/proto";
|
|
||||||
import type { Uint8ArrayList } from "uint8arraylist";
|
|
||||||
import { v4 as uuid } from "uuid";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* LightPush v3 protocol RPC handler.
|
|
||||||
* Implements the v3 message format with correct field numbers:
|
|
||||||
* - requestId: 1
|
|
||||||
* - pubsubTopic: 20
|
|
||||||
* - message: 21
|
|
||||||
*/
|
|
||||||
export class PushRpc {
|
|
||||||
public constructor(
|
|
||||||
public proto: proto.LightPushRequestV3 | proto.LightPushResponseV3
|
|
||||||
) {}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a v3 request message with proper field numbering
|
|
||||||
*/
|
|
||||||
public static createRequest(
|
|
||||||
message: proto.WakuMessage,
|
|
||||||
pubsubTopic: string
|
|
||||||
): PushRpc {
|
|
||||||
return new PushRpc({
|
|
||||||
requestId: uuid(),
|
|
||||||
pubsubTopic: pubsubTopic,
|
|
||||||
message: message
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a v3 response message with status code handling
|
|
||||||
*/
|
|
||||||
public static createResponse(
|
|
||||||
requestId: string,
|
|
||||||
statusCode: number,
|
|
||||||
statusDesc?: string,
|
|
||||||
relayPeerCount?: number
|
|
||||||
): PushRpc {
|
|
||||||
return new PushRpc({
|
|
||||||
requestId,
|
|
||||||
statusCode,
|
|
||||||
statusDesc,
|
|
||||||
relayPeerCount
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Decode v3 request message
|
|
||||||
*/
|
|
||||||
public static decodeRequest(bytes: Uint8ArrayList): PushRpc {
|
|
||||||
const res = proto.LightPushRequestV3.decode(bytes);
|
|
||||||
return new PushRpc(res);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Decode v3 response message
|
|
||||||
*/
|
|
||||||
public static decodeResponse(bytes: Uint8ArrayList): PushRpc {
|
|
||||||
const res = proto.LightPushResponseV3.decode(bytes);
|
|
||||||
return new PushRpc(res);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Encode message to bytes
|
|
||||||
*/
|
|
||||||
public encode(): Uint8Array {
|
|
||||||
if (this.isRequest()) {
|
|
||||||
return proto.LightPushRequestV3.encode(
|
|
||||||
this.proto as proto.LightPushRequestV3
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
return proto.LightPushResponseV3.encode(
|
|
||||||
this.proto as proto.LightPushResponseV3
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get request data (if this is a request message)
|
|
||||||
*/
|
|
||||||
public get request(): proto.LightPushRequestV3 | undefined {
|
|
||||||
return this.isRequest()
|
|
||||||
? (this.proto as proto.LightPushRequestV3)
|
|
||||||
: undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get response data (if this is a response message)
|
|
||||||
*/
|
|
||||||
public get response(): proto.LightPushResponseV3 | undefined {
|
|
||||||
return this.isResponse()
|
|
||||||
? (this.proto as proto.LightPushResponseV3)
|
|
||||||
: undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the request ID
|
|
||||||
*/
|
|
||||||
public get requestId(): string {
|
|
||||||
return this.proto.requestId;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the pubsub topic (only available in requests)
|
|
||||||
*/
|
|
||||||
public get pubsubTopic(): string | undefined {
|
|
||||||
return this.isRequest()
|
|
||||||
? (this.proto as proto.LightPushRequestV3).pubsubTopic
|
|
||||||
: undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the message (only available in requests)
|
|
||||||
*/
|
|
||||||
public get message(): proto.WakuMessage | undefined {
|
|
||||||
return this.isRequest()
|
|
||||||
? (this.proto as proto.LightPushRequestV3).message
|
|
||||||
: undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the status code (only available in responses)
|
|
||||||
*/
|
|
||||||
public get statusCode(): number | undefined {
|
|
||||||
return this.isResponse()
|
|
||||||
? (this.proto as proto.LightPushResponseV3).statusCode
|
|
||||||
: undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the status description (only available in responses)
|
|
||||||
*/
|
|
||||||
public get statusDesc(): string | undefined {
|
|
||||||
return this.isResponse()
|
|
||||||
? (this.proto as proto.LightPushResponseV3).statusDesc
|
|
||||||
: undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the relay peer count (only available in responses)
|
|
||||||
*/
|
|
||||||
public get relayPeerCount(): number | undefined {
|
|
||||||
return this.isResponse()
|
|
||||||
? (this.proto as proto.LightPushResponseV3).relayPeerCount
|
|
||||||
: undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if this is a request message
|
|
||||||
*/
|
|
||||||
private isRequest(): boolean {
|
|
||||||
return "pubsubTopic" in this.proto && "message" in this.proto;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if this is a response message
|
|
||||||
*/
|
|
||||||
private isResponse(): boolean {
|
|
||||||
return "statusCode" in this.proto;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,46 +1,30 @@
|
|||||||
import type { AutoSharding, IProtoMessage } from "@waku/interfaces";
|
import type { IProtoMessage } from "@waku/interfaces";
|
||||||
import { createRoutingInfo } from "@waku/utils";
|
import { contentTopicToPubsubTopic } from "@waku/utils";
|
||||||
import { bytesToHex } from "@waku/utils/bytes";
|
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import fc from "fast-check";
|
import fc from "fast-check";
|
||||||
|
|
||||||
import { messageHash } from "../message_hash/index.js";
|
import { createDecoder, createEncoder, DecodedMessage } from "./version_0.js";
|
||||||
|
|
||||||
import {
|
const contentTopic = "/js-waku/1/tests/bytes";
|
||||||
createDecoder,
|
const pubsubTopic = contentTopicToPubsubTopic(contentTopic);
|
||||||
createEncoder,
|
|
||||||
DecodedMessage,
|
|
||||||
proto
|
|
||||||
} from "./version_0.js";
|
|
||||||
|
|
||||||
const testContentTopic = "/js-waku/1/tests/bytes";
|
|
||||||
|
|
||||||
const testNetworkConfig: AutoSharding = {
|
|
||||||
clusterId: 0,
|
|
||||||
numShardsInCluster: 8
|
|
||||||
};
|
|
||||||
const testRoutingInfo = createRoutingInfo(testNetworkConfig, {
|
|
||||||
contentTopic: testContentTopic
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("Waku Message version 0", function () {
|
describe("Waku Message version 0", function () {
|
||||||
it("Round trip binary serialization", async function () {
|
it("Round trip binary serialization", async function () {
|
||||||
await fc.assert(
|
await fc.assert(
|
||||||
fc.asyncProperty(fc.uint8Array({ minLength: 1 }), async (payload) => {
|
fc.asyncProperty(fc.uint8Array({ minLength: 1 }), async (payload) => {
|
||||||
const encoder = createEncoder({
|
const encoder = createEncoder({
|
||||||
contentTopic: testContentTopic,
|
contentTopic
|
||||||
routingInfo: testRoutingInfo
|
|
||||||
});
|
});
|
||||||
const bytes = await encoder.toWire({ payload });
|
const bytes = await encoder.toWire({ payload });
|
||||||
const decoder = createDecoder(testContentTopic, testRoutingInfo);
|
const decoder = createDecoder(contentTopic);
|
||||||
const protoResult = await decoder.fromWireToProtoObj(bytes);
|
const protoResult = await decoder.fromWireToProtoObj(bytes);
|
||||||
const result = (await decoder.fromProtoObj(
|
const result = (await decoder.fromProtoObj(
|
||||||
testRoutingInfo.pubsubTopic,
|
pubsubTopic,
|
||||||
protoResult!
|
protoResult!
|
||||||
)) as DecodedMessage;
|
)) as DecodedMessage;
|
||||||
|
|
||||||
expect(result.contentTopic).to.eq(testContentTopic);
|
expect(result.contentTopic).to.eq(contentTopic);
|
||||||
expect(result.pubsubTopic).to.eq(testRoutingInfo.pubsubTopic);
|
expect(result.pubsubTopic).to.eq(pubsubTopic);
|
||||||
expect(result.version).to.eq(0);
|
expect(result.version).to.eq(0);
|
||||||
expect(result.ephemeral).to.be.false;
|
expect(result.ephemeral).to.be.false;
|
||||||
expect(result.payload).to.deep.eq(payload);
|
expect(result.payload).to.deep.eq(payload);
|
||||||
@ -53,15 +37,14 @@ describe("Waku Message version 0", function () {
|
|||||||
await fc.assert(
|
await fc.assert(
|
||||||
fc.asyncProperty(fc.uint8Array({ minLength: 1 }), async (payload) => {
|
fc.asyncProperty(fc.uint8Array({ minLength: 1 }), async (payload) => {
|
||||||
const encoder = createEncoder({
|
const encoder = createEncoder({
|
||||||
contentTopic: testContentTopic,
|
contentTopic,
|
||||||
routingInfo: testRoutingInfo,
|
|
||||||
ephemeral: true
|
ephemeral: true
|
||||||
});
|
});
|
||||||
const bytes = await encoder.toWire({ payload });
|
const bytes = await encoder.toWire({ payload });
|
||||||
const decoder = createDecoder(testContentTopic, testRoutingInfo);
|
const decoder = createDecoder(contentTopic);
|
||||||
const protoResult = await decoder.fromWireToProtoObj(bytes);
|
const protoResult = await decoder.fromWireToProtoObj(bytes);
|
||||||
const result = (await decoder.fromProtoObj(
|
const result = (await decoder.fromProtoObj(
|
||||||
testRoutingInfo.pubsubTopic,
|
pubsubTopic,
|
||||||
protoResult!
|
protoResult!
|
||||||
)) as DecodedMessage;
|
)) as DecodedMessage;
|
||||||
|
|
||||||
@ -85,16 +68,15 @@ describe("Waku Message version 0", function () {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const encoder = createEncoder({
|
const encoder = createEncoder({
|
||||||
contentTopic: testContentTopic,
|
contentTopic,
|
||||||
routingInfo: testRoutingInfo,
|
|
||||||
ephemeral: true,
|
ephemeral: true,
|
||||||
metaSetter
|
metaSetter
|
||||||
});
|
});
|
||||||
const bytes = await encoder.toWire({ payload });
|
const bytes = await encoder.toWire({ payload });
|
||||||
const decoder = createDecoder(testContentTopic, testRoutingInfo);
|
const decoder = createDecoder(contentTopic);
|
||||||
const protoResult = await decoder.fromWireToProtoObj(bytes);
|
const protoResult = await decoder.fromWireToProtoObj(bytes);
|
||||||
const result = (await decoder.fromProtoObj(
|
const result = (await decoder.fromProtoObj(
|
||||||
testRoutingInfo.pubsubTopic,
|
pubsubTopic,
|
||||||
protoResult!
|
protoResult!
|
||||||
)) as DecodedMessage;
|
)) as DecodedMessage;
|
||||||
|
|
||||||
@ -117,34 +99,28 @@ describe("Waku Message version 0", function () {
|
|||||||
describe("Ensures content topic is defined", () => {
|
describe("Ensures content topic is defined", () => {
|
||||||
it("Encoder throws on undefined content topic", () => {
|
it("Encoder throws on undefined content topic", () => {
|
||||||
const wrapper = function (): void {
|
const wrapper = function (): void {
|
||||||
createEncoder({
|
createEncoder({ contentTopic: undefined as unknown as string });
|
||||||
contentTopic: undefined as unknown as string,
|
|
||||||
routingInfo: testRoutingInfo
|
|
||||||
});
|
|
||||||
};
|
};
|
||||||
|
|
||||||
expect(wrapper).to.throw("Content topic must be specified");
|
expect(wrapper).to.throw("Content topic must be specified");
|
||||||
});
|
});
|
||||||
it("Encoder throws on empty string content topic", () => {
|
it("Encoder throws on empty string content topic", () => {
|
||||||
const wrapper = function (): void {
|
const wrapper = function (): void {
|
||||||
createEncoder({
|
createEncoder({ contentTopic: "" });
|
||||||
contentTopic: "",
|
|
||||||
routingInfo: testRoutingInfo
|
|
||||||
});
|
|
||||||
};
|
};
|
||||||
|
|
||||||
expect(wrapper).to.throw("Content topic must be specified");
|
expect(wrapper).to.throw("Content topic must be specified");
|
||||||
});
|
});
|
||||||
it("Decoder throws on undefined content topic", () => {
|
it("Decoder throws on undefined content topic", () => {
|
||||||
const wrapper = function (): void {
|
const wrapper = function (): void {
|
||||||
createDecoder(undefined as unknown as string, testRoutingInfo);
|
createDecoder(undefined as unknown as string);
|
||||||
};
|
};
|
||||||
|
|
||||||
expect(wrapper).to.throw("Content topic must be specified");
|
expect(wrapper).to.throw("Content topic must be specified");
|
||||||
});
|
});
|
||||||
it("Decoder throws on empty string content topic", () => {
|
it("Decoder throws on empty string content topic", () => {
|
||||||
const wrapper = function (): void {
|
const wrapper = function (): void {
|
||||||
createDecoder("", testRoutingInfo);
|
createDecoder("");
|
||||||
};
|
};
|
||||||
|
|
||||||
expect(wrapper).to.throw("Content topic must be specified");
|
expect(wrapper).to.throw("Content topic must be specified");
|
||||||
@ -154,73 +130,23 @@ describe("Ensures content topic is defined", () => {
|
|||||||
describe("Sets sharding configuration correctly", () => {
|
describe("Sets sharding configuration correctly", () => {
|
||||||
it("uses static shard pubsub topic instead of autosharding when set", async () => {
|
it("uses static shard pubsub topic instead of autosharding when set", async () => {
|
||||||
// Create an encoder setup to use autosharding
|
// Create an encoder setup to use autosharding
|
||||||
const contentTopic = "/myapp/1/test/proto";
|
const ContentTopic = "/waku/2/content/test.js";
|
||||||
const autoshardingEncoder = createEncoder({
|
const autoshardingEncoder = createEncoder({
|
||||||
contentTopic: contentTopic,
|
pubsubTopicShardInfo: { clusterId: 0 },
|
||||||
routingInfo: createRoutingInfo(testNetworkConfig, { contentTopic })
|
contentTopic: ContentTopic
|
||||||
});
|
});
|
||||||
|
|
||||||
// When autosharding is enabled, we expect the shard index to be 1
|
// When autosharding is enabled, we expect the shard index to be 1
|
||||||
expect(autoshardingEncoder.pubsubTopic).to.be.eq("/waku/2/rs/0/0");
|
expect(autoshardingEncoder.pubsubTopic).to.be.eq("/waku/2/rs/0/1");
|
||||||
|
|
||||||
// Create an encoder setup to use static sharding with the same content topic
|
// Create an encoder setup to use static sharding with the same content topic
|
||||||
|
const singleShardInfo = { clusterId: 0, shard: 0 };
|
||||||
const staticshardingEncoder = createEncoder({
|
const staticshardingEncoder = createEncoder({
|
||||||
contentTopic: contentTopic,
|
contentTopic: ContentTopic,
|
||||||
routingInfo: createRoutingInfo({ clusterId: 0 }, { shardId: 3 })
|
pubsubTopicShardInfo: singleShardInfo
|
||||||
});
|
});
|
||||||
|
|
||||||
// When static sharding is enabled, we expect the shard index to be 0
|
// When static sharding is enabled, we expect the shard index to be 0
|
||||||
expect(staticshardingEncoder.pubsubTopic).to.be.eq("/waku/2/rs/0/3");
|
expect(staticshardingEncoder.pubsubTopic).to.be.eq("/waku/2/rs/0/0");
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("DecodedMessage lazy hash initialization", () => {
|
|
||||||
it("should compute hash only when first accessed", () => {
|
|
||||||
const pubsubTopic = "/waku/2/default-waku/proto";
|
|
||||||
const protoMessage: proto.WakuMessage = {
|
|
||||||
payload: new Uint8Array([1, 2, 3]),
|
|
||||||
contentTopic: "/test/1/test-proto/proto",
|
|
||||||
timestamp: BigInt(1234567890000000),
|
|
||||||
ephemeral: false
|
|
||||||
};
|
|
||||||
|
|
||||||
const message = new DecodedMessage(pubsubTopic, protoMessage);
|
|
||||||
|
|
||||||
expect((message as any)._hash).to.be.undefined;
|
|
||||||
expect((message as any)._hashStr).to.be.undefined;
|
|
||||||
|
|
||||||
const hash = message.hash;
|
|
||||||
expect((message as any)._hash).to.not.be.undefined;
|
|
||||||
expect((message as any)._hashStr).to.be.undefined;
|
|
||||||
|
|
||||||
const hashStr = message.hashStr;
|
|
||||||
expect((message as any)._hashStr).to.not.be.undefined;
|
|
||||||
|
|
||||||
const expectedHash = messageHash(
|
|
||||||
pubsubTopic,
|
|
||||||
protoMessage as IProtoMessage
|
|
||||||
);
|
|
||||||
expect(hash).to.deep.equal(expectedHash);
|
|
||||||
expect(hashStr).to.equal(bytesToHex(expectedHash));
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should return cached hash on subsequent access", () => {
|
|
||||||
const pubsubTopic = "/waku/2/default-waku/proto";
|
|
||||||
const protoMessage: proto.WakuMessage = {
|
|
||||||
payload: new Uint8Array([1, 2, 3]),
|
|
||||||
contentTopic: "/test/1/test-proto/proto",
|
|
||||||
timestamp: BigInt(1234567890000000),
|
|
||||||
ephemeral: false
|
|
||||||
};
|
|
||||||
|
|
||||||
const message = new DecodedMessage(pubsubTopic, protoMessage);
|
|
||||||
|
|
||||||
const hash1 = message.hash;
|
|
||||||
const hash2 = message.hash;
|
|
||||||
expect(hash1).to.equal(hash2);
|
|
||||||
|
|
||||||
const hashStr1 = message.hashStr;
|
|
||||||
const hashStr2 = message.hashStr;
|
|
||||||
expect(hashStr1).to.equal(hashStr2);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@ -7,14 +7,11 @@ import type {
|
|||||||
IMetaSetter,
|
IMetaSetter,
|
||||||
IProtoMessage,
|
IProtoMessage,
|
||||||
IRateLimitProof,
|
IRateLimitProof,
|
||||||
IRoutingInfo,
|
PubsubTopic,
|
||||||
PubsubTopic
|
SingleShardInfo
|
||||||
} from "@waku/interfaces";
|
} from "@waku/interfaces";
|
||||||
import { proto_message as proto } from "@waku/proto";
|
import { proto_message as proto } from "@waku/proto";
|
||||||
import { Logger } from "@waku/utils";
|
import { determinePubsubTopic, Logger } from "@waku/utils";
|
||||||
import { bytesToHex } from "@waku/utils/bytes";
|
|
||||||
|
|
||||||
import { messageHash } from "../message_hash/index.js";
|
|
||||||
|
|
||||||
const log = new Logger("message:version-0");
|
const log = new Logger("message:version-0");
|
||||||
const OneMillion = BigInt(1_000_000);
|
const OneMillion = BigInt(1_000_000);
|
||||||
@ -23,9 +20,6 @@ export const Version = 0;
|
|||||||
export { proto };
|
export { proto };
|
||||||
|
|
||||||
export class DecodedMessage implements IDecodedMessage {
|
export class DecodedMessage implements IDecodedMessage {
|
||||||
private _hash: Uint8Array | undefined;
|
|
||||||
private _hashStr: string | undefined;
|
|
||||||
|
|
||||||
public constructor(
|
public constructor(
|
||||||
public pubsubTopic: string,
|
public pubsubTopic: string,
|
||||||
private proto: proto.WakuMessage
|
private proto: proto.WakuMessage
|
||||||
@ -43,20 +37,6 @@ export class DecodedMessage implements IDecodedMessage {
|
|||||||
return this.proto.contentTopic;
|
return this.proto.contentTopic;
|
||||||
}
|
}
|
||||||
|
|
||||||
public get hash(): Uint8Array {
|
|
||||||
if (this._hash === undefined) {
|
|
||||||
this._hash = messageHash(this.pubsubTopic, this.proto as IProtoMessage);
|
|
||||||
}
|
|
||||||
return this._hash;
|
|
||||||
}
|
|
||||||
|
|
||||||
public get hashStr(): string {
|
|
||||||
if (this._hashStr === undefined) {
|
|
||||||
this._hashStr = bytesToHex(this.hash);
|
|
||||||
}
|
|
||||||
return this._hashStr;
|
|
||||||
}
|
|
||||||
|
|
||||||
public get timestamp(): Date | undefined {
|
public get timestamp(): Date | undefined {
|
||||||
// In the case we receive a value that is bigger than JS's max number,
|
// In the case we receive a value that is bigger than JS's max number,
|
||||||
// we catch the error and return undefined.
|
// we catch the error and return undefined.
|
||||||
@ -91,7 +71,7 @@ export class Encoder implements IEncoder {
|
|||||||
public constructor(
|
public constructor(
|
||||||
public contentTopic: string,
|
public contentTopic: string,
|
||||||
public ephemeral: boolean = false,
|
public ephemeral: boolean = false,
|
||||||
public routingInfo: IRoutingInfo,
|
public pubsubTopic: PubsubTopic,
|
||||||
public metaSetter?: IMetaSetter
|
public metaSetter?: IMetaSetter
|
||||||
) {
|
) {
|
||||||
if (!contentTopic || contentTopic === "") {
|
if (!contentTopic || contentTopic === "") {
|
||||||
@ -99,10 +79,6 @@ export class Encoder implements IEncoder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public get pubsubTopic(): PubsubTopic {
|
|
||||||
return this.routingInfo.pubsubTopic;
|
|
||||||
}
|
|
||||||
|
|
||||||
public async toWire(message: IMessage): Promise<Uint8Array> {
|
public async toWire(message: IMessage): Promise<Uint8Array> {
|
||||||
return proto.WakuMessage.encode(await this.toProtoObj(message));
|
return proto.WakuMessage.encode(await this.toProtoObj(message));
|
||||||
}
|
}
|
||||||
@ -136,32 +112,32 @@ export class Encoder implements IEncoder {
|
|||||||
* format to be sent over the Waku network. The resulting encoder can then be
|
* format to be sent over the Waku network. The resulting encoder can then be
|
||||||
* pass to { @link @waku/interfaces!ISender.send } to automatically encode outgoing
|
* pass to { @link @waku/interfaces!ISender.send } to automatically encode outgoing
|
||||||
* messages.
|
* messages.
|
||||||
*
|
|
||||||
* Note that a routing info may be tied to a given content topic, this is not checked by the encoder.
|
|
||||||
*/
|
*/
|
||||||
export function createEncoder({
|
export function createEncoder({
|
||||||
|
pubsubTopic,
|
||||||
|
pubsubTopicShardInfo,
|
||||||
contentTopic,
|
contentTopic,
|
||||||
routingInfo,
|
|
||||||
ephemeral,
|
ephemeral,
|
||||||
metaSetter
|
metaSetter
|
||||||
}: EncoderOptions): Encoder {
|
}: EncoderOptions): Encoder {
|
||||||
return new Encoder(contentTopic, ephemeral, routingInfo, metaSetter);
|
return new Encoder(
|
||||||
|
contentTopic,
|
||||||
|
ephemeral,
|
||||||
|
determinePubsubTopic(contentTopic, pubsubTopic ?? pubsubTopicShardInfo),
|
||||||
|
metaSetter
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export class Decoder implements IDecoder<IDecodedMessage> {
|
export class Decoder implements IDecoder<IDecodedMessage> {
|
||||||
public constructor(
|
public constructor(
|
||||||
public contentTopic: string,
|
public pubsubTopic: PubsubTopic,
|
||||||
public routingInfo: IRoutingInfo
|
public contentTopic: string
|
||||||
) {
|
) {
|
||||||
if (!contentTopic || contentTopic === "") {
|
if (!contentTopic || contentTopic === "") {
|
||||||
throw new Error("Content topic must be specified");
|
throw new Error("Content topic must be specified");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public get pubsubTopic(): PubsubTopic {
|
|
||||||
return this.routingInfo.pubsubTopic;
|
|
||||||
}
|
|
||||||
|
|
||||||
public fromWireToProtoObj(
|
public fromWireToProtoObj(
|
||||||
bytes: Uint8Array
|
bytes: Uint8Array
|
||||||
): Promise<IProtoMessage | undefined> {
|
): Promise<IProtoMessage | undefined> {
|
||||||
@ -206,13 +182,13 @@ export class Decoder implements IDecoder<IDecodedMessage> {
|
|||||||
* messages.
|
* messages.
|
||||||
*
|
*
|
||||||
* @param contentTopic The resulting decoder will only decode messages with this content topic.
|
* @param contentTopic The resulting decoder will only decode messages with this content topic.
|
||||||
* @param routingInfo Routing information such as cluster id and shard id on which the message is expected to be received.
|
|
||||||
*
|
|
||||||
* Note that a routing info may be tied to a given content topic, this is not checked by the encoder.
|
|
||||||
*/
|
*/
|
||||||
export function createDecoder(
|
export function createDecoder(
|
||||||
contentTopic: string,
|
contentTopic: string,
|
||||||
routingInfo: IRoutingInfo
|
pubsubTopicShardInfo?: SingleShardInfo | PubsubTopic
|
||||||
): Decoder {
|
): Decoder {
|
||||||
return new Decoder(contentTopic, routingInfo);
|
return new Decoder(
|
||||||
|
determinePubsubTopic(contentTopic, pubsubTopicShardInfo),
|
||||||
|
contentTopic
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -57,10 +57,11 @@ class Metadata implements IMetadata {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const stream = await this.streamManager.getStream(peerId);
|
let stream;
|
||||||
|
try {
|
||||||
if (!stream) {
|
stream = await this.streamManager.getStream(peerId);
|
||||||
log.error(`Failed to get a stream for remote peer:${peerId.toString()}`);
|
} catch (error) {
|
||||||
|
log.error("Failed to get stream", error);
|
||||||
return {
|
return {
|
||||||
shardInfo: null,
|
shardInfo: null,
|
||||||
error: ProtocolError.NO_STREAM_AVAILABLE
|
error: ProtocolError.NO_STREAM_AVAILABLE
|
||||||
|
|||||||
@ -155,7 +155,7 @@ describe("StoreCore", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it("ends if stream creation fails", async () => {
|
it("ends if stream creation fails", async () => {
|
||||||
mockStreamManager.getStream.resolves(undefined as any);
|
mockStreamManager.getStream.rejects(new Error("Stream creation failed"));
|
||||||
const generator = storeCore.queryPerPage(queryOpts, decoders, mockPeerId);
|
const generator = storeCore.queryPerPage(queryOpts, decoders, mockPeerId);
|
||||||
const result = await generator.next();
|
const result = await generator.next();
|
||||||
expect(result.done).to.be.true;
|
expect(result.done).to.be.true;
|
||||||
|
|||||||
@ -35,10 +35,6 @@ export class StoreCore {
|
|||||||
this.streamManager = new StreamManager(StoreCodec, libp2p.components);
|
this.streamManager = new StreamManager(StoreCodec, libp2p.components);
|
||||||
}
|
}
|
||||||
|
|
||||||
public stop(): void {
|
|
||||||
this.streamManager.stop();
|
|
||||||
}
|
|
||||||
|
|
||||||
public get maxTimeLimit(): number {
|
public get maxTimeLimit(): number {
|
||||||
return MAX_TIME_RANGE;
|
return MAX_TIME_RANGE;
|
||||||
}
|
}
|
||||||
@ -72,11 +68,6 @@ export class StoreCore {
|
|||||||
|
|
||||||
let currentCursor = queryOpts.paginationCursor;
|
let currentCursor = queryOpts.paginationCursor;
|
||||||
while (true) {
|
while (true) {
|
||||||
if (queryOpts.abortSignal?.aborted) {
|
|
||||||
log.info("Store query aborted by signal");
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
const storeQueryRequest = StoreQueryRequest.create({
|
const storeQueryRequest = StoreQueryRequest.create({
|
||||||
...queryOpts,
|
...queryOpts,
|
||||||
paginationCursor: currentCursor
|
paginationCursor: currentCursor
|
||||||
@ -89,31 +80,21 @@ export class StoreCore {
|
|||||||
contentTopics: queryOpts.contentTopics
|
contentTopics: queryOpts.contentTopics
|
||||||
});
|
});
|
||||||
|
|
||||||
const stream = await this.streamManager.getStream(peerId);
|
let stream;
|
||||||
|
try {
|
||||||
if (!stream) {
|
stream = await this.streamManager.getStream(peerId);
|
||||||
log.error(
|
} catch (e) {
|
||||||
`Failed to get a stream for remote peer:${peerId.toString()}`
|
log.error("Failed to get stream", e);
|
||||||
);
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
let res;
|
const res = await pipe(
|
||||||
try {
|
[storeQueryRequest.encode()],
|
||||||
res = await pipe(
|
lp.encode,
|
||||||
[storeQueryRequest.encode()],
|
stream,
|
||||||
lp.encode,
|
lp.decode,
|
||||||
stream,
|
async (source) => await all(source)
|
||||||
lp.decode,
|
);
|
||||||
async (source) => await all(source)
|
|
||||||
);
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof Error && error.name === "AbortError") {
|
|
||||||
log.info(`Store query aborted for peer ${peerId.toString()}`);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
|
|
||||||
const bytes = new Uint8ArrayList();
|
const bytes = new Uint8ArrayList();
|
||||||
res.forEach((chunk) => {
|
res.forEach((chunk) => {
|
||||||
@ -140,11 +121,6 @@ export class StoreCore {
|
|||||||
`${storeQueryResponse.messages.length} messages retrieved from store`
|
`${storeQueryResponse.messages.length} messages retrieved from store`
|
||||||
);
|
);
|
||||||
|
|
||||||
if (queryOpts.abortSignal?.aborted) {
|
|
||||||
log.info("Store query aborted by signal before processing messages");
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
const decodedMessages = storeQueryResponse.messages.map((protoMsg) => {
|
const decodedMessages = storeQueryResponse.messages.map((protoMsg) => {
|
||||||
if (!protoMsg.message) {
|
if (!protoMsg.message) {
|
||||||
return Promise.resolve(undefined);
|
return Promise.resolve(undefined);
|
||||||
|
|||||||
@ -27,10 +27,6 @@ describe("StreamManager", () => {
|
|||||||
} as any as Libp2pComponents);
|
} as any as Libp2pComponents);
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
sinon.restore();
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should return usable stream attached to connection", async () => {
|
it("should return usable stream attached to connection", async () => {
|
||||||
for (const writeStatus of ["ready", "writing"]) {
|
for (const writeStatus of ["ready", "writing"]) {
|
||||||
const con1 = createMockConnection();
|
const con1 = createMockConnection();
|
||||||
@ -49,13 +45,21 @@ describe("StreamManager", () => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should return undefined if no connection provided", async () => {
|
it("should throw if no connection provided", async () => {
|
||||||
streamManager["libp2p"]["connectionManager"]["getConnections"] = (
|
streamManager["libp2p"]["connectionManager"]["getConnections"] = (
|
||||||
_peerId: PeerId | undefined
|
_peerId: PeerId | undefined
|
||||||
) => [];
|
) => [];
|
||||||
|
|
||||||
const stream = await streamManager.getStream(mockPeer.id);
|
let error: Error | undefined;
|
||||||
expect(stream).to.be.undefined;
|
try {
|
||||||
|
await streamManager.getStream(mockPeer.id);
|
||||||
|
} catch (e) {
|
||||||
|
error = e as Error;
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(error).not.to.be.undefined;
|
||||||
|
expect(error?.message).to.include(mockPeer.id.toString());
|
||||||
|
expect(error?.message).to.include(MULTICODEC);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should create a new stream if no existing for protocol found", async () => {
|
it("should create a new stream if no existing for protocol found", async () => {
|
||||||
@ -110,11 +114,8 @@ describe("StreamManager", () => {
|
|||||||
streamManager.getStream(mockPeer.id)
|
streamManager.getStream(mockPeer.id)
|
||||||
]);
|
]);
|
||||||
|
|
||||||
expect(stream1).to.not.be.undefined;
|
|
||||||
expect(stream2).to.not.be.undefined;
|
|
||||||
|
|
||||||
const expected = ["1", "2"].toString();
|
const expected = ["1", "2"].toString();
|
||||||
const actual = [stream1?.id, stream2?.id].sort().toString();
|
const actual = [stream1.id, stream2.id].sort().toString();
|
||||||
|
|
||||||
expect(actual).to.be.eq(expected);
|
expect(actual).to.be.eq(expected);
|
||||||
});
|
});
|
||||||
@ -123,9 +124,7 @@ describe("StreamManager", () => {
|
|||||||
const scheduleNewStreamSpy = sinon.spy();
|
const scheduleNewStreamSpy = sinon.spy();
|
||||||
streamManager["scheduleNewStream"] = scheduleNewStreamSpy;
|
streamManager["scheduleNewStream"] = scheduleNewStreamSpy;
|
||||||
eventTarget.dispatchEvent(
|
eventTarget.dispatchEvent(
|
||||||
new CustomEvent("peer:update", {
|
new CustomEvent("peer:update", { detail: { peer: { protocols: [] } } })
|
||||||
detail: { peer: { id: mockPeer.id, protocols: [] } }
|
|
||||||
})
|
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(scheduleNewStreamSpy.calledOnce).to.be.false;
|
expect(scheduleNewStreamSpy.calledOnce).to.be.false;
|
||||||
@ -136,7 +135,7 @@ describe("StreamManager", () => {
|
|||||||
streamManager["scheduleNewStream"] = scheduleNewStreamSpy;
|
streamManager["scheduleNewStream"] = scheduleNewStreamSpy;
|
||||||
eventTarget.dispatchEvent(
|
eventTarget.dispatchEvent(
|
||||||
new CustomEvent("peer:update", {
|
new CustomEvent("peer:update", {
|
||||||
detail: { peer: { id: mockPeer.id, protocols: [MULTICODEC] } }
|
detail: { peer: { protocols: [MULTICODEC] } }
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -161,7 +160,7 @@ describe("StreamManager", () => {
|
|||||||
|
|
||||||
eventTarget.dispatchEvent(
|
eventTarget.dispatchEvent(
|
||||||
new CustomEvent("peer:update", {
|
new CustomEvent("peer:update", {
|
||||||
detail: { peer: { id: mockPeer.id, protocols: [MULTICODEC] } }
|
detail: { peer: { protocols: [MULTICODEC] } }
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|||||||
@ -13,7 +13,7 @@ export class StreamManager {
|
|||||||
private streamPool: Map<string, Promise<void>> = new Map();
|
private streamPool: Map<string, Promise<void>> = new Map();
|
||||||
|
|
||||||
public constructor(
|
public constructor(
|
||||||
private readonly multicodec: string,
|
private multicodec: string,
|
||||||
private readonly libp2p: Libp2pComponents
|
private readonly libp2p: Libp2pComponents
|
||||||
) {
|
) {
|
||||||
this.log = new Logger(`stream-manager:${multicodec}`);
|
this.log = new Logger(`stream-manager:${multicodec}`);
|
||||||
@ -23,56 +23,39 @@ export class StreamManager {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
public stop(): void {
|
public async getStream(peerId: PeerId): Promise<Stream> {
|
||||||
this.libp2p.events.removeEventListener(
|
const peerIdStr = peerId.toString();
|
||||||
"peer:update",
|
const scheduledStream = this.streamPool.get(peerIdStr);
|
||||||
this.handlePeerUpdateStreamPool
|
|
||||||
);
|
|
||||||
this.streamPool.clear();
|
|
||||||
this.ongoingCreation.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
public async getStream(peerId: PeerId): Promise<Stream | undefined> {
|
if (scheduledStream) {
|
||||||
try {
|
this.streamPool.delete(peerIdStr);
|
||||||
const peerIdStr = peerId.toString();
|
await scheduledStream;
|
||||||
const scheduledStream = this.streamPool.get(peerIdStr);
|
}
|
||||||
|
|
||||||
if (scheduledStream) {
|
let stream = this.getOpenStreamForCodec(peerId);
|
||||||
this.streamPool.delete(peerIdStr);
|
|
||||||
await scheduledStream;
|
|
||||||
}
|
|
||||||
|
|
||||||
const stream =
|
|
||||||
this.getOpenStreamForCodec(peerId) || (await this.createStream(peerId));
|
|
||||||
|
|
||||||
if (!stream) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
if (stream) {
|
||||||
this.log.info(
|
this.log.info(
|
||||||
`Using stream for peerId=${peerIdStr} multicodec=${this.multicodec}`
|
`Found existing stream peerId=${peerIdStr} multicodec=${this.multicodec}`
|
||||||
);
|
);
|
||||||
|
|
||||||
this.lockStream(peerIdStr, stream);
|
this.lockStream(peerIdStr, stream);
|
||||||
return stream;
|
return stream;
|
||||||
} catch (error) {
|
|
||||||
this.log.error(`Failed to getStream:`, error);
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
stream = await this.createStream(peerId);
|
||||||
|
this.lockStream(peerIdStr, stream);
|
||||||
|
|
||||||
|
return stream;
|
||||||
}
|
}
|
||||||
|
|
||||||
private async createStream(
|
private async createStream(peerId: PeerId, retries = 0): Promise<Stream> {
|
||||||
peerId: PeerId,
|
|
||||||
retries = 0
|
|
||||||
): Promise<Stream | undefined> {
|
|
||||||
const connections = this.libp2p.connectionManager.getConnections(peerId);
|
const connections = this.libp2p.connectionManager.getConnections(peerId);
|
||||||
const connection = selectOpenConnection(connections);
|
const connection = selectOpenConnection(connections);
|
||||||
|
|
||||||
if (!connection) {
|
if (!connection) {
|
||||||
this.log.error(
|
throw new Error(
|
||||||
`Failed to get a connection to the peer peerId=${peerId.toString()} multicodec=${this.multicodec}`
|
`Failed to get a connection to the peer peerId=${peerId.toString()} multicodec=${this.multicodec}`
|
||||||
);
|
);
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let lastError: unknown;
|
let lastError: unknown;
|
||||||
@ -94,10 +77,9 @@ export class StreamManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!stream) {
|
if (!stream) {
|
||||||
this.log.error(
|
throw new Error(
|
||||||
`Failed to create a new stream for ${peerId.toString()} -- ` + lastError
|
`Failed to create a new stream for ${peerId.toString()} -- ` + lastError
|
||||||
);
|
);
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return stream;
|
return stream;
|
||||||
@ -159,9 +141,6 @@ export class StreamManager {
|
|||||||
const connection = selectOpenConnection(connections);
|
const connection = selectOpenConnection(connections);
|
||||||
|
|
||||||
if (!connection) {
|
if (!connection) {
|
||||||
this.log.info(
|
|
||||||
`No open connection found for peerId=${peerId.toString()} multicodec=${this.multicodec}`
|
|
||||||
);
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -170,27 +149,16 @@ export class StreamManager {
|
|||||||
);
|
);
|
||||||
|
|
||||||
if (!stream) {
|
if (!stream) {
|
||||||
this.log.info(
|
|
||||||
`No open stream found for peerId=${peerId.toString()} multicodec=${this.multicodec}`
|
|
||||||
);
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const isStreamUnusable = ["done", "closed", "closing"].includes(
|
const isStreamUnusable = ["done", "closed", "closing"].includes(
|
||||||
stream.writeStatus || ""
|
stream.writeStatus || ""
|
||||||
);
|
);
|
||||||
|
|
||||||
if (isStreamUnusable || this.isStreamLocked(stream)) {
|
if (isStreamUnusable || this.isStreamLocked(stream)) {
|
||||||
this.log.info(
|
|
||||||
`Stream for peerId=${peerId.toString()} multicodec=${this.multicodec} is unusable`
|
|
||||||
);
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
this.log.info(
|
|
||||||
`Found open stream for peerId=${peerId.toString()} multicodec=${this.multicodec}`
|
|
||||||
);
|
|
||||||
|
|
||||||
return stream;
|
return stream;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,61 +1,5 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
## [0.0.13](https://github.com/waku-org/js-waku/compare/discovery-v0.0.12...discovery-v0.0.13) (2025-10-31)
|
|
||||||
|
|
||||||
|
|
||||||
### Dependencies
|
|
||||||
|
|
||||||
* The following workspace dependencies were updated
|
|
||||||
* dependencies
|
|
||||||
* @waku/core bumped from 0.0.39 to 0.0.40
|
|
||||||
* @waku/proto bumped from ^0.0.14 to ^0.0.15
|
|
||||||
|
|
||||||
## [0.0.12](https://github.com/waku-org/js-waku/compare/discovery-v0.0.11...discovery-v0.0.12) (2025-09-20)
|
|
||||||
|
|
||||||
|
|
||||||
### Dependencies
|
|
||||||
|
|
||||||
* The following workspace dependencies were updated
|
|
||||||
* dependencies
|
|
||||||
* @waku/core bumped from 0.0.38 to 0.0.39
|
|
||||||
* @waku/enr bumped from 0.0.32 to 0.0.33
|
|
||||||
* @waku/interfaces bumped from 0.0.33 to 0.0.34
|
|
||||||
* @waku/proto bumped from ^0.0.13 to ^0.0.14
|
|
||||||
* @waku/utils bumped from 0.0.26 to 0.0.27
|
|
||||||
|
|
||||||
## [0.0.11](https://github.com/waku-org/js-waku/compare/discovery-v0.0.10...discovery-v0.0.11) (2025-08-14)
|
|
||||||
|
|
||||||
|
|
||||||
### ⚠ BREAKING CHANGES
|
|
||||||
|
|
||||||
* local peer discovery improvements ([#2557](https://github.com/waku-org/js-waku/issues/2557))
|
|
||||||
* Introduce routing info concept
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
* Introduce routing info concept ([3842d84](https://github.com/waku-org/js-waku/commit/3842d84b55eb96728f6b05b9307ff823fac58a54))
|
|
||||||
* Local peer discovery improvements ([#2557](https://github.com/waku-org/js-waku/issues/2557)) ([eab8ce8](https://github.com/waku-org/js-waku/commit/eab8ce81b431b11d79dcbec31aea759319853336))
|
|
||||||
* Peer exchange discovery improvements ([#2537](https://github.com/waku-org/js-waku/issues/2537)) ([95da57a](https://github.com/waku-org/js-waku/commit/95da57a8705fa195529ef52a6c908642da5e120c))
|
|
||||||
* Retrieve peers from all passed enrtree URLs ([25f884e](https://github.com/waku-org/js-waku/commit/25f884e05b430cebe3b6650c16026d771d1b7626))
|
|
||||||
|
|
||||||
|
|
||||||
### Bug Fixes
|
|
||||||
|
|
||||||
* Do not limit DNS Peer Discovery on capability ([0dfe352](https://github.com/waku-org/js-waku/commit/0dfe35281c677e91c064557a83a50e6a1ca6d0ac))
|
|
||||||
* Improve error handling for stream manager ([#2546](https://github.com/waku-org/js-waku/issues/2546)) ([ada2657](https://github.com/waku-org/js-waku/commit/ada265731acfeddc2bfe2e8e963bc2be37f13900))
|
|
||||||
* Prevent setting shard info from PX if it exists ([#2561](https://github.com/waku-org/js-waku/issues/2561)) ([dfb2baf](https://github.com/waku-org/js-waku/commit/dfb2baf004a58c29f7afd0144c82a8d2e6710d5a))
|
|
||||||
|
|
||||||
|
|
||||||
### Dependencies
|
|
||||||
|
|
||||||
* The following workspace dependencies were updated
|
|
||||||
* dependencies
|
|
||||||
* @waku/core bumped from 0.0.37 to 0.0.38
|
|
||||||
* @waku/enr bumped from 0.0.31 to 0.0.32
|
|
||||||
* @waku/interfaces bumped from 0.0.32 to 0.0.33
|
|
||||||
* @waku/proto bumped from ^0.0.12 to ^0.0.13
|
|
||||||
* @waku/utils bumped from 0.0.25 to 0.0.26
|
|
||||||
|
|
||||||
## [0.0.10](https://github.com/waku-org/js-waku/compare/discovery-v0.0.9...discovery-v0.0.10) (2025-07-18)
|
## [0.0.10](https://github.com/waku-org/js-waku/compare/discovery-v0.0.9...discovery-v0.0.10) (2025-07-18)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@waku/discovery",
|
"name": "@waku/discovery",
|
||||||
"version": "0.0.13",
|
"version": "0.0.10",
|
||||||
"description": "Contains various discovery mechanisms: DNS Discovery (EIP-1459, Peer Exchange, Local Peer Cache Discovery.",
|
"description": "Contains various discovery mechanisms: DNS Discovery (EIP-1459, Peer Exchange, Local Peer Cache Discovery.",
|
||||||
"types": "./dist/index.d.ts",
|
"types": "./dist/index.d.ts",
|
||||||
"module": "./dist/index.js",
|
"module": "./dist/index.js",
|
||||||
@ -15,7 +15,7 @@
|
|||||||
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/discovery#readme",
|
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/discovery#readme",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "git+https://github.com/waku-org/js-waku.git"
|
"url": "https://github.com/waku-org/js-waku.git"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/waku-org/js-waku/issues"
|
"url": "https://github.com/waku-org/js-waku/issues"
|
||||||
@ -51,11 +51,11 @@
|
|||||||
"node": ">=22"
|
"node": ">=22"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@waku/core": "0.0.40",
|
"@waku/core": "0.0.37",
|
||||||
"@waku/enr": "0.0.33",
|
"@waku/enr": "0.0.31",
|
||||||
"@waku/interfaces": "0.0.34",
|
"@waku/interfaces": "0.0.32",
|
||||||
"@waku/proto": "^0.0.15",
|
"@waku/proto": "^0.0.12",
|
||||||
"@waku/utils": "0.0.27",
|
"@waku/utils": "0.0.25",
|
||||||
"debug": "^4.3.4",
|
"debug": "^4.3.4",
|
||||||
"dns-over-http-resolver": "^3.0.8",
|
"dns-over-http-resolver": "^3.0.8",
|
||||||
"hi-base32": "^0.5.1",
|
"hi-base32": "^0.5.1",
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
import { Tags } from "@waku/interfaces";
|
import { type NodeCapabilityCount, Tags } from "@waku/interfaces";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The ENR tree for the different fleets.
|
* The ENR tree for the different fleets.
|
||||||
@ -13,3 +13,9 @@ export const enrTree = {
|
|||||||
export const DEFAULT_BOOTSTRAP_TAG_NAME = Tags.BOOTSTRAP;
|
export const DEFAULT_BOOTSTRAP_TAG_NAME = Tags.BOOTSTRAP;
|
||||||
export const DEFAULT_BOOTSTRAP_TAG_VALUE = 50;
|
export const DEFAULT_BOOTSTRAP_TAG_VALUE = 50;
|
||||||
export const DEFAULT_BOOTSTRAP_TAG_TTL = 100_000_000;
|
export const DEFAULT_BOOTSTRAP_TAG_TTL = 100_000_000;
|
||||||
|
|
||||||
|
export const DEFAULT_NODE_REQUIREMENTS: Partial<NodeCapabilityCount> = {
|
||||||
|
store: 1,
|
||||||
|
filter: 2,
|
||||||
|
lightPush: 2
|
||||||
|
};
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
import type { DnsClient } from "@waku/interfaces";
|
import type { DnsClient } from "@waku/interfaces";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
import sinon from "sinon";
|
|
||||||
|
|
||||||
import { DnsNodeDiscovery } from "./dns.js";
|
import { DnsNodeDiscovery } from "./dns.js";
|
||||||
import testData from "./testdata.json" with { type: "json" };
|
import testData from "./testdata.json" with { type: "json" };
|
||||||
@ -18,6 +17,7 @@ const branchDomainD = "D5SNLTAGWNQ34NTQTPHNZDECFU";
|
|||||||
const partialBranchA = "AAAA";
|
const partialBranchA = "AAAA";
|
||||||
const partialBranchB = "BBBB";
|
const partialBranchB = "BBBB";
|
||||||
const singleBranch = `enrtree-branch:${branchDomainA}`;
|
const singleBranch = `enrtree-branch:${branchDomainA}`;
|
||||||
|
const doubleBranch = `enrtree-branch:${branchDomainA},${branchDomainB}`;
|
||||||
const multiComponentBranch = [
|
const multiComponentBranch = [
|
||||||
`enrtree-branch:${branchDomainA},${partialBranchA}`,
|
`enrtree-branch:${branchDomainA},${partialBranchA}`,
|
||||||
`${partialBranchB},${branchDomainB}`
|
`${partialBranchB},${branchDomainB}`
|
||||||
@ -34,12 +34,10 @@ const errorBranchB = `enrtree-branch:${branchDomainD}`;
|
|||||||
class MockDNS implements DnsClient {
|
class MockDNS implements DnsClient {
|
||||||
private fqdnRes: Map<string, string[]>;
|
private fqdnRes: Map<string, string[]>;
|
||||||
private fqdnThrows: string[];
|
private fqdnThrows: string[];
|
||||||
public hasThrown: boolean;
|
|
||||||
|
|
||||||
public constructor() {
|
public constructor() {
|
||||||
this.fqdnRes = new Map();
|
this.fqdnRes = new Map();
|
||||||
this.fqdnThrows = [];
|
this.fqdnThrows = [];
|
||||||
this.hasThrown = false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public addRes(fqdn: string, res: string[]): void {
|
public addRes(fqdn: string, res: string[]): void {
|
||||||
@ -51,17 +49,11 @@ class MockDNS implements DnsClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public resolveTXT(fqdn: string): Promise<string[]> {
|
public resolveTXT(fqdn: string): Promise<string[]> {
|
||||||
if (this.fqdnThrows.includes(fqdn)) {
|
if (this.fqdnThrows.includes(fqdn)) throw "Mock DNS throws.";
|
||||||
this.hasThrown = true;
|
|
||||||
throw "Mock DNS throws.";
|
|
||||||
}
|
|
||||||
|
|
||||||
const res = this.fqdnRes.get(fqdn);
|
const res = this.fqdnRes.get(fqdn);
|
||||||
|
|
||||||
if (!res) {
|
if (!res) throw `Mock DNS could not resolve ${fqdn}`;
|
||||||
this.hasThrown = true;
|
|
||||||
throw `Mock DNS could not resolve ${fqdn}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
return Promise.resolve(res);
|
return Promise.resolve(res);
|
||||||
}
|
}
|
||||||
@ -80,10 +72,9 @@ describe("DNS Node Discovery", () => {
|
|||||||
mockDns.addRes(`${branchDomainA}.${host}`, [mockData.enrWithWaku2Relay]);
|
mockDns.addRes(`${branchDomainA}.${host}`, [mockData.enrWithWaku2Relay]);
|
||||||
|
|
||||||
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||||
const peers = [];
|
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||||
for await (const peer of dnsNodeDiscovery.getNextPeer([mockData.enrTree])) {
|
relay: 1
|
||||||
peers.push(peer);
|
});
|
||||||
}
|
|
||||||
|
|
||||||
expect(peers.length).to.eq(1);
|
expect(peers.length).to.eq(1);
|
||||||
expect(peers[0].ip).to.eq("192.168.178.251");
|
expect(peers[0].ip).to.eq("192.168.178.251");
|
||||||
@ -97,10 +88,9 @@ describe("DNS Node Discovery", () => {
|
|||||||
mockDns.addRes(`${branchDomainA}.${host}`, [singleBranch]);
|
mockDns.addRes(`${branchDomainA}.${host}`, [singleBranch]);
|
||||||
|
|
||||||
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||||
const peers = [];
|
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||||
for await (const peer of dnsNodeDiscovery.getNextPeer([mockData.enrTree])) {
|
relay: 1
|
||||||
peers.push(peer);
|
});
|
||||||
}
|
|
||||||
|
|
||||||
expect(peers.length).to.eq(0);
|
expect(peers.length).to.eq(0);
|
||||||
});
|
});
|
||||||
@ -112,21 +102,17 @@ describe("DNS Node Discovery", () => {
|
|||||||
mockDns.addRes(`${branchDomainA}.${host}`, []);
|
mockDns.addRes(`${branchDomainA}.${host}`, []);
|
||||||
|
|
||||||
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||||
const peersA = [];
|
let peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||||
for await (const peer of dnsNodeDiscovery.getNextPeer([mockData.enrTree])) {
|
relay: 1
|
||||||
peersA.push(peer);
|
});
|
||||||
}
|
|
||||||
|
|
||||||
expect(peersA.length).to.eq(0);
|
expect(peers.length).to.eq(0);
|
||||||
|
|
||||||
// No TXT records case
|
// No TXT records case
|
||||||
mockDns.addRes(`${branchDomainA}.${host}`, []);
|
mockDns.addRes(`${branchDomainA}.${host}`, []);
|
||||||
|
|
||||||
const peersB = [];
|
peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], { relay: 1 });
|
||||||
for await (const peer of dnsNodeDiscovery.getNextPeer([mockData.enrTree])) {
|
expect(peers.length).to.eq(0);
|
||||||
peersB.push(peer);
|
|
||||||
}
|
|
||||||
expect(peersB.length).to.eq(0);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("ignores domain fetching errors", async function () {
|
it("ignores domain fetching errors", async function () {
|
||||||
@ -134,20 +120,18 @@ describe("DNS Node Discovery", () => {
|
|||||||
mockDns.addThrow(`${branchDomainC}.${host}`);
|
mockDns.addThrow(`${branchDomainC}.${host}`);
|
||||||
|
|
||||||
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||||
const peers = [];
|
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||||
for await (const peer of dnsNodeDiscovery.getNextPeer([mockData.enrTree])) {
|
relay: 1
|
||||||
peers.push(peer);
|
});
|
||||||
}
|
|
||||||
expect(peers.length).to.eq(0);
|
expect(peers.length).to.eq(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("ignores unrecognized TXT record formats", async function () {
|
it("ignores unrecognized TXT record formats", async function () {
|
||||||
mockDns.addRes(`${rootDomain}.${host}`, [mockData.enrBranchBadPrefix]);
|
mockDns.addRes(`${rootDomain}.${host}`, [mockData.enrBranchBadPrefix]);
|
||||||
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||||
const peers = [];
|
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||||
for await (const peer of dnsNodeDiscovery.getNextPeer([mockData.enrTree])) {
|
relay: 1
|
||||||
peers.push(peer);
|
});
|
||||||
}
|
|
||||||
expect(peers.length).to.eq(0);
|
expect(peers.length).to.eq(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -156,23 +140,20 @@ describe("DNS Node Discovery", () => {
|
|||||||
mockDns.addRes(`${branchDomainD}.${host}`, [mockData.enrWithWaku2Relay]);
|
mockDns.addRes(`${branchDomainD}.${host}`, [mockData.enrWithWaku2Relay]);
|
||||||
|
|
||||||
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||||
const peersA = [];
|
const peersA = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||||
for await (const peer of dnsNodeDiscovery.getNextPeer([mockData.enrTree])) {
|
relay: 1
|
||||||
peersA.push(peer);
|
});
|
||||||
}
|
|
||||||
expect(peersA.length).to.eq(1);
|
expect(peersA.length).to.eq(1);
|
||||||
|
|
||||||
// Specify that a subsequent network call retrieving the same peer should throw.
|
// Specify that a subsequent network call retrieving the same peer should throw.
|
||||||
// This test passes only if the peer is fetched from cache
|
// This test passes only if the peer is fetched from cache
|
||||||
mockDns.addThrow(`${branchDomainD}.${host}`);
|
mockDns.addThrow(`${branchDomainD}.${host}`);
|
||||||
|
|
||||||
const peersB = [];
|
const peersB = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||||
for await (const peer of dnsNodeDiscovery.getNextPeer([mockData.enrTree])) {
|
relay: 1
|
||||||
peersB.push(peer);
|
});
|
||||||
}
|
|
||||||
expect(peersB.length).to.eq(1);
|
expect(peersB.length).to.eq(1);
|
||||||
expect(peersA[0].ip).to.eq(peersB[0].ip);
|
expect(peersA[0].ip).to.eq(peersB[0].ip);
|
||||||
expect(mockDns.hasThrown).to.be.false;
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -188,10 +169,9 @@ describe("DNS Node Discovery w/ capabilities", () => {
|
|||||||
mockDns.addRes(`${rootDomain}.${host}`, [mockData.enrWithWaku2Relay]);
|
mockDns.addRes(`${rootDomain}.${host}`, [mockData.enrWithWaku2Relay]);
|
||||||
|
|
||||||
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||||
const peers = [];
|
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||||
for await (const peer of dnsNodeDiscovery.getNextPeer([mockData.enrTree])) {
|
relay: 1
|
||||||
peers.push(peer);
|
});
|
||||||
}
|
|
||||||
|
|
||||||
expect(peers.length).to.eq(1);
|
expect(peers.length).to.eq(1);
|
||||||
expect(peers[0].peerId?.toString()).to.eq(
|
expect(peers[0].peerId?.toString()).to.eq(
|
||||||
@ -203,10 +183,10 @@ describe("DNS Node Discovery w/ capabilities", () => {
|
|||||||
mockDns.addRes(`${rootDomain}.${host}`, [mockData.enrWithWaku2RelayStore]);
|
mockDns.addRes(`${rootDomain}.${host}`, [mockData.enrWithWaku2RelayStore]);
|
||||||
|
|
||||||
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||||
const peers = [];
|
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||||
for await (const peer of dnsNodeDiscovery.getNextPeer([mockData.enrTree])) {
|
store: 1,
|
||||||
peers.push(peer);
|
relay: 1
|
||||||
}
|
});
|
||||||
|
|
||||||
expect(peers.length).to.eq(1);
|
expect(peers.length).to.eq(1);
|
||||||
expect(peers[0].peerId?.toString()).to.eq(
|
expect(peers[0].peerId?.toString()).to.eq(
|
||||||
@ -214,29 +194,42 @@ describe("DNS Node Discovery w/ capabilities", () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("return first retrieved peers without further DNS queries", async function () {
|
it("should only return 1 node with store capability", async () => {
|
||||||
mockDns.addRes(`${rootDomain}.${host}`, multiComponentBranch);
|
mockDns.addRes(`${rootDomain}.${host}`, [mockData.enrWithWaku2Store]);
|
||||||
|
|
||||||
|
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||||
|
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||||
|
store: 1
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(peers.length).to.eq(1);
|
||||||
|
expect(peers[0].peerId?.toString()).to.eq(
|
||||||
|
"16Uiu2HAkv3La3ECgQpdYeEJfrX36EWdhkUDv4C9wvXM8TFZ9dNgd"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("retrieves all peers (2) when cannot fulfill all requirements", async () => {
|
||||||
|
mockDns.addRes(`${rootDomain}.${host}`, [doubleBranch]);
|
||||||
mockDns.addRes(`${branchDomainA}.${host}`, [
|
mockDns.addRes(`${branchDomainA}.${host}`, [
|
||||||
mockData.enrWithWaku2RelayStore
|
mockData.enrWithWaku2RelayStore
|
||||||
]);
|
]);
|
||||||
// The ENR Tree is such as there are more branches to be explored.
|
mockDns.addRes(`${branchDomainB}.${host}`, [mockData.enrWithWaku2Relay]);
|
||||||
// But they should not be explored if it isn't asked
|
|
||||||
mockDns.addThrow(`${branchDomainB}.${host}`);
|
|
||||||
|
|
||||||
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||||
|
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||||
|
store: 1,
|
||||||
|
relay: 2,
|
||||||
|
filter: 1
|
||||||
|
});
|
||||||
|
|
||||||
const randomStub = sinon.stub(Math, "random").returns(0);
|
expect(peers.length).to.eq(2);
|
||||||
try {
|
const peerIds = peers.map((p) => p.peerId?.toString());
|
||||||
const iterator = dnsNodeDiscovery.getNextPeer([mockData.enrTree]);
|
expect(peerIds).to.contain(
|
||||||
const { value: peer } = await iterator.next();
|
"16Uiu2HAm2HyS6brcCspSbszG9i36re2bWBVjMe3tMdnFp1Hua34F"
|
||||||
|
);
|
||||||
expect(peer.peerId?.toString()).to.eq(
|
expect(peerIds).to.contain(
|
||||||
"16Uiu2HAm2HyS6brcCspSbszG9i36re2bWBVjMe3tMdnFp1Hua34F"
|
"16Uiu2HAmPsYLvfKafxgRsb6tioYyGnSvGXS2iuMigptHrqHPNPzx"
|
||||||
);
|
);
|
||||||
expect(mockDns.hasThrown).to.be.false;
|
|
||||||
} finally {
|
|
||||||
randomStub.restore();
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("retrieves all peers (3) when branch entries are composed of multiple strings", async function () {
|
it("retrieves all peers (3) when branch entries are composed of multiple strings", async function () {
|
||||||
@ -250,10 +243,10 @@ describe("DNS Node Discovery w/ capabilities", () => {
|
|||||||
]);
|
]);
|
||||||
|
|
||||||
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
const dnsNodeDiscovery = new DnsNodeDiscovery(mockDns);
|
||||||
const peers = [];
|
const peers = await dnsNodeDiscovery.getPeers([mockData.enrTree], {
|
||||||
for await (const peer of dnsNodeDiscovery.getNextPeer([mockData.enrTree])) {
|
store: 2,
|
||||||
peers.push(peer);
|
relay: 2
|
||||||
}
|
});
|
||||||
|
|
||||||
expect(peers.length).to.eq(3);
|
expect(peers.length).to.eq(3);
|
||||||
const peerIds = peers.map((p) => p.peerId?.toString());
|
const peerIds = peers.map((p) => p.peerId?.toString());
|
||||||
@ -282,10 +275,12 @@ describe("DNS Node Discovery [live data]", function () {
|
|||||||
this.timeout(10000);
|
this.timeout(10000);
|
||||||
// Google's dns server address. Needs to be set explicitly to run in CI
|
// Google's dns server address. Needs to be set explicitly to run in CI
|
||||||
const dnsNodeDiscovery = await DnsNodeDiscovery.dnsOverHttp();
|
const dnsNodeDiscovery = await DnsNodeDiscovery.dnsOverHttp();
|
||||||
const peers = [];
|
const peers = await dnsNodeDiscovery.getPeers([enrTree.TEST], {
|
||||||
for await (const peer of dnsNodeDiscovery.getNextPeer([enrTree.TEST])) {
|
relay: maxQuantity,
|
||||||
peers.push(peer);
|
store: maxQuantity,
|
||||||
}
|
filter: maxQuantity,
|
||||||
|
lightPush: maxQuantity
|
||||||
|
});
|
||||||
|
|
||||||
expect(peers.length).to.eq(maxQuantity);
|
expect(peers.length).to.eq(maxQuantity);
|
||||||
|
|
||||||
@ -303,10 +298,12 @@ describe("DNS Node Discovery [live data]", function () {
|
|||||||
this.timeout(10000);
|
this.timeout(10000);
|
||||||
// Google's dns server address. Needs to be set explicitly to run in CI
|
// Google's dns server address. Needs to be set explicitly to run in CI
|
||||||
const dnsNodeDiscovery = await DnsNodeDiscovery.dnsOverHttp();
|
const dnsNodeDiscovery = await DnsNodeDiscovery.dnsOverHttp();
|
||||||
const peers = [];
|
const peers = await dnsNodeDiscovery.getPeers([enrTree.SANDBOX], {
|
||||||
for await (const peer of dnsNodeDiscovery.getNextPeer([enrTree.SANDBOX])) {
|
relay: maxQuantity,
|
||||||
peers.push(peer);
|
store: maxQuantity,
|
||||||
}
|
filter: maxQuantity,
|
||||||
|
lightPush: maxQuantity
|
||||||
|
});
|
||||||
|
|
||||||
expect(peers.length).to.eq(maxQuantity);
|
expect(peers.length).to.eq(maxQuantity);
|
||||||
|
|
||||||
|
|||||||
@ -1,16 +1,25 @@
|
|||||||
import { ENR, EnrDecoder } from "@waku/enr";
|
import { ENR, EnrDecoder } from "@waku/enr";
|
||||||
import type { DnsClient, IEnr, SearchContext } from "@waku/interfaces";
|
import type {
|
||||||
import { Logger, shuffle } from "@waku/utils";
|
DnsClient,
|
||||||
|
IEnr,
|
||||||
|
NodeCapabilityCount,
|
||||||
|
SearchContext
|
||||||
|
} from "@waku/interfaces";
|
||||||
|
import { Logger } from "@waku/utils";
|
||||||
|
|
||||||
import { DnsOverHttps } from "./dns_over_https.js";
|
import { DnsOverHttps } from "./dns_over_https.js";
|
||||||
import { ENRTree } from "./enrtree.js";
|
import { ENRTree } from "./enrtree.js";
|
||||||
import { fetchNodes } from "./fetch_nodes.js";
|
import {
|
||||||
|
fetchNodesUntilCapabilitiesFulfilled,
|
||||||
|
yieldNodesUntilCapabilitiesFulfilled
|
||||||
|
} from "./fetch_nodes.js";
|
||||||
|
|
||||||
const log = new Logger("discovery:dns");
|
const log = new Logger("discovery:dns");
|
||||||
|
|
||||||
export class DnsNodeDiscovery {
|
export class DnsNodeDiscovery {
|
||||||
private readonly dns: DnsClient;
|
private readonly dns: DnsClient;
|
||||||
private readonly _DNSTreeCache: { [key: string]: string };
|
private readonly _DNSTreeCache: { [key: string]: string };
|
||||||
|
private readonly _errorTolerance: number = 10;
|
||||||
|
|
||||||
public static async dnsOverHttp(
|
public static async dnsOverHttp(
|
||||||
dnsClient?: DnsClient
|
dnsClient?: DnsClient
|
||||||
@ -21,29 +30,68 @@ export class DnsNodeDiscovery {
|
|||||||
return new DnsNodeDiscovery(dnsClient);
|
return new DnsNodeDiscovery(dnsClient);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a list of verified peers listed in an EIP-1459 DNS tree. Method may
|
||||||
|
* return fewer peers than requested if @link wantedNodeCapabilityCount requires
|
||||||
|
* larger quantity of peers than available or the number of errors/duplicate
|
||||||
|
* peers encountered by randomized search exceeds the sum of the fields of
|
||||||
|
* @link wantedNodeCapabilityCount plus the @link _errorTolerance factor.
|
||||||
|
*/
|
||||||
|
public async getPeers(
|
||||||
|
enrTreeUrls: string[],
|
||||||
|
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>
|
||||||
|
): Promise<IEnr[]> {
|
||||||
|
const networkIndex = Math.floor(Math.random() * enrTreeUrls.length);
|
||||||
|
const { publicKey, domain } = ENRTree.parseTree(enrTreeUrls[networkIndex]);
|
||||||
|
const context: SearchContext = {
|
||||||
|
domain,
|
||||||
|
publicKey,
|
||||||
|
visits: {}
|
||||||
|
};
|
||||||
|
|
||||||
|
const peers = await fetchNodesUntilCapabilitiesFulfilled(
|
||||||
|
wantedNodeCapabilityCount,
|
||||||
|
this._errorTolerance,
|
||||||
|
() => this._search(domain, context)
|
||||||
|
);
|
||||||
|
log.info(
|
||||||
|
"retrieved peers: ",
|
||||||
|
peers.map((peer) => {
|
||||||
|
return {
|
||||||
|
id: peer.peerId?.toString(),
|
||||||
|
multiaddrs: peer.multiaddrs?.map((ma) => ma.toString())
|
||||||
|
};
|
||||||
|
})
|
||||||
|
);
|
||||||
|
return peers;
|
||||||
|
}
|
||||||
|
|
||||||
public constructor(dns: DnsClient) {
|
public constructor(dns: DnsClient) {
|
||||||
this._DNSTreeCache = {};
|
this._DNSTreeCache = {};
|
||||||
this.dns = dns;
|
this.dns = dns;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieve the next peers from the passed [[enrTreeUrls]],
|
* {@inheritDoc getPeers}
|
||||||
*/
|
*/
|
||||||
public async *getNextPeer(enrTreeUrls: string[]): AsyncGenerator<IEnr> {
|
public async *getNextPeer(
|
||||||
// Shuffle the ENR Trees so that not all clients connect to same nodes first.
|
enrTreeUrls: string[],
|
||||||
for (const enrTreeUrl of shuffle(enrTreeUrls)) {
|
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>
|
||||||
const { publicKey, domain } = ENRTree.parseTree(enrTreeUrl);
|
): AsyncGenerator<IEnr> {
|
||||||
const context: SearchContext = {
|
const networkIndex = Math.floor(Math.random() * enrTreeUrls.length);
|
||||||
domain,
|
const { publicKey, domain } = ENRTree.parseTree(enrTreeUrls[networkIndex]);
|
||||||
publicKey,
|
const context: SearchContext = {
|
||||||
visits: {}
|
domain,
|
||||||
};
|
publicKey,
|
||||||
|
visits: {}
|
||||||
|
};
|
||||||
|
|
||||||
for await (const peer of fetchNodes(() =>
|
for await (const peer of yieldNodesUntilCapabilitiesFulfilled(
|
||||||
this._search(domain, context)
|
wantedNodeCapabilityCount,
|
||||||
)) {
|
this._errorTolerance,
|
||||||
yield peer;
|
() => this._search(domain, context)
|
||||||
}
|
)) {
|
||||||
|
yield peer;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -117,7 +165,7 @@ export class DnsNodeDiscovery {
|
|||||||
throw new Error("Received empty result array while fetching TXT record");
|
throw new Error("Received empty result array while fetching TXT record");
|
||||||
if (!response[0].length) throw new Error("Received empty TXT record");
|
if (!response[0].length) throw new Error("Received empty TXT record");
|
||||||
|
|
||||||
// Branch entries can be an array of strings of comma-delimited subdomains, with
|
// Branch entries can be an array of strings of comma delimited subdomains, with
|
||||||
// some subdomain strings split across the array elements
|
// some subdomain strings split across the array elements
|
||||||
const result = response.join("");
|
const result = response.join("");
|
||||||
|
|
||||||
|
|||||||
@ -9,7 +9,8 @@ import type {
|
|||||||
DiscoveryTrigger,
|
DiscoveryTrigger,
|
||||||
DnsDiscOptions,
|
DnsDiscOptions,
|
||||||
DnsDiscoveryComponents,
|
DnsDiscoveryComponents,
|
||||||
IEnr
|
IEnr,
|
||||||
|
NodeCapabilityCount
|
||||||
} from "@waku/interfaces";
|
} from "@waku/interfaces";
|
||||||
import { DNS_DISCOVERY_TAG } from "@waku/interfaces";
|
import { DNS_DISCOVERY_TAG } from "@waku/interfaces";
|
||||||
import { encodeRelayShard, Logger } from "@waku/utils";
|
import { encodeRelayShard, Logger } from "@waku/utils";
|
||||||
@ -17,7 +18,8 @@ import { encodeRelayShard, Logger } from "@waku/utils";
|
|||||||
import {
|
import {
|
||||||
DEFAULT_BOOTSTRAP_TAG_NAME,
|
DEFAULT_BOOTSTRAP_TAG_NAME,
|
||||||
DEFAULT_BOOTSTRAP_TAG_TTL,
|
DEFAULT_BOOTSTRAP_TAG_TTL,
|
||||||
DEFAULT_BOOTSTRAP_TAG_VALUE
|
DEFAULT_BOOTSTRAP_TAG_VALUE,
|
||||||
|
DEFAULT_NODE_REQUIREMENTS
|
||||||
} from "./constants.js";
|
} from "./constants.js";
|
||||||
import { DnsNodeDiscovery } from "./dns.js";
|
import { DnsNodeDiscovery } from "./dns.js";
|
||||||
|
|
||||||
@ -33,7 +35,7 @@ export class PeerDiscoveryDns
|
|||||||
private nextPeer: (() => AsyncGenerator<IEnr>) | undefined;
|
private nextPeer: (() => AsyncGenerator<IEnr>) | undefined;
|
||||||
private _started: boolean;
|
private _started: boolean;
|
||||||
private _components: DnsDiscoveryComponents;
|
private _components: DnsDiscoveryComponents;
|
||||||
private readonly _options: DnsDiscOptions;
|
private _options: DnsDiscOptions;
|
||||||
|
|
||||||
public constructor(
|
public constructor(
|
||||||
components: DnsDiscoveryComponents,
|
components: DnsDiscoveryComponents,
|
||||||
@ -63,9 +65,14 @@ export class PeerDiscoveryDns
|
|||||||
let { enrUrls } = this._options;
|
let { enrUrls } = this._options;
|
||||||
if (!Array.isArray(enrUrls)) enrUrls = [enrUrls];
|
if (!Array.isArray(enrUrls)) enrUrls = [enrUrls];
|
||||||
|
|
||||||
|
const { wantedNodeCapabilityCount } = this._options;
|
||||||
const dns = await DnsNodeDiscovery.dnsOverHttp();
|
const dns = await DnsNodeDiscovery.dnsOverHttp();
|
||||||
|
|
||||||
this.nextPeer = dns.getNextPeer.bind(dns, enrUrls);
|
this.nextPeer = dns.getNextPeer.bind(
|
||||||
|
dns,
|
||||||
|
enrUrls,
|
||||||
|
wantedNodeCapabilityCount
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
for await (const peerEnr of this.nextPeer()) {
|
for await (const peerEnr of this.nextPeer()) {
|
||||||
@ -87,11 +94,9 @@ export class PeerDiscoveryDns
|
|||||||
};
|
};
|
||||||
|
|
||||||
let isPeerChanged = false;
|
let isPeerChanged = false;
|
||||||
const isPeerAlreadyInPeerStore = await this._components.peerStore.has(
|
const isPeerExists = await this._components.peerStore.has(peerInfo.id);
|
||||||
peerInfo.id
|
|
||||||
);
|
|
||||||
|
|
||||||
if (isPeerAlreadyInPeerStore) {
|
if (isPeerExists) {
|
||||||
const peer = await this._components.peerStore.get(peerInfo.id);
|
const peer = await this._components.peerStore.get(peerInfo.id);
|
||||||
const hasBootstrapTag = peer.tags.has(DEFAULT_BOOTSTRAP_TAG_NAME);
|
const hasBootstrapTag = peer.tags.has(DEFAULT_BOOTSTRAP_TAG_NAME);
|
||||||
|
|
||||||
@ -138,8 +143,9 @@ export class PeerDiscoveryDns
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function wakuDnsDiscovery(
|
export function wakuDnsDiscovery(
|
||||||
enrUrls: string[]
|
enrUrls: string[],
|
||||||
|
wantedNodeCapabilityCount: Partial<NodeCapabilityCount> = DEFAULT_NODE_REQUIREMENTS
|
||||||
): (components: DnsDiscoveryComponents) => PeerDiscoveryDns {
|
): (components: DnsDiscoveryComponents) => PeerDiscoveryDns {
|
||||||
return (components: DnsDiscoveryComponents) =>
|
return (components: DnsDiscoveryComponents) =>
|
||||||
new PeerDiscoveryDns(components, { enrUrls });
|
new PeerDiscoveryDns(components, { enrUrls, wantedNodeCapabilityCount });
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,11 +3,12 @@ import { peerIdFromPrivateKey } from "@libp2p/peer-id";
|
|||||||
import { multiaddr } from "@multiformats/multiaddr";
|
import { multiaddr } from "@multiformats/multiaddr";
|
||||||
import { ENR } from "@waku/enr";
|
import { ENR } from "@waku/enr";
|
||||||
import { EnrCreator } from "@waku/enr";
|
import { EnrCreator } from "@waku/enr";
|
||||||
|
import type { Waku2 } from "@waku/interfaces";
|
||||||
import { expect } from "chai";
|
import { expect } from "chai";
|
||||||
|
|
||||||
import { fetchNodes } from "./fetch_nodes.js";
|
import { fetchNodesUntilCapabilitiesFulfilled } from "./fetch_nodes.js";
|
||||||
|
|
||||||
async function createEnr(): Promise<ENR> {
|
async function createEnr(waku2: Waku2): Promise<ENR> {
|
||||||
const peerId = await generateKeyPair("secp256k1").then(peerIdFromPrivateKey);
|
const peerId = await generateKeyPair("secp256k1").then(peerIdFromPrivateKey);
|
||||||
const enr = await EnrCreator.fromPeerId(peerId);
|
const enr = await EnrCreator.fromPeerId(peerId);
|
||||||
enr.setLocationMultiaddr(multiaddr("/ip4/18.223.219.100/udp/9000"));
|
enr.setLocationMultiaddr(multiaddr("/ip4/18.223.219.100/udp/9000"));
|
||||||
@ -19,67 +20,99 @@ async function createEnr(): Promise<ENR> {
|
|||||||
)
|
)
|
||||||
];
|
];
|
||||||
|
|
||||||
enr.waku2 = { lightPush: true, filter: true, relay: false, store: false };
|
enr.waku2 = waku2;
|
||||||
return enr;
|
return enr;
|
||||||
}
|
}
|
||||||
|
|
||||||
describe("Fetch nodes", function () {
|
const Waku2None = {
|
||||||
it("Get Nodes", async function () {
|
relay: false,
|
||||||
const retrievedNodes = [await createEnr(), await createEnr()];
|
store: false,
|
||||||
|
filter: false,
|
||||||
|
lightPush: false
|
||||||
|
};
|
||||||
|
|
||||||
let fetchCount = 0;
|
describe("Fetch nodes until capabilities are fulfilled", function () {
|
||||||
const getNode = (): Promise<ENR> => {
|
it("1 Relay, 1 fetch", async function () {
|
||||||
const node = retrievedNodes[fetchCount];
|
const relayNode = await createEnr({ ...Waku2None, relay: true });
|
||||||
fetchCount++;
|
|
||||||
return Promise.resolve(node);
|
|
||||||
};
|
|
||||||
|
|
||||||
const res = [];
|
const getNode = (): Promise<ENR> => Promise.resolve(relayNode);
|
||||||
for await (const node of fetchNodes(getNode, 5)) {
|
|
||||||
res.push(node);
|
|
||||||
}
|
|
||||||
|
|
||||||
expect(res.length).to.eq(2);
|
const res = await fetchNodesUntilCapabilitiesFulfilled(
|
||||||
expect(res[0].peerId!.toString()).to.not.eq(res[1].peerId!.toString());
|
{ relay: 1 },
|
||||||
});
|
0,
|
||||||
|
getNode
|
||||||
it("Stops search when maxGet is reached", async function () {
|
);
|
||||||
const retrievedNodes = [
|
|
||||||
await createEnr(),
|
|
||||||
await createEnr(),
|
|
||||||
await createEnr()
|
|
||||||
];
|
|
||||||
|
|
||||||
let fetchCount = 0;
|
|
||||||
const getNode = (): Promise<ENR> => {
|
|
||||||
const node = retrievedNodes[fetchCount];
|
|
||||||
fetchCount++;
|
|
||||||
return Promise.resolve(node);
|
|
||||||
};
|
|
||||||
|
|
||||||
const res = [];
|
|
||||||
for await (const node of fetchNodes(getNode, 2)) {
|
|
||||||
res.push(node);
|
|
||||||
}
|
|
||||||
|
|
||||||
expect(res.length).to.eq(2);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("Stops search when 2 null results are returned", async function () {
|
|
||||||
const retrievedNodes = [await createEnr(), null, null, await createEnr()];
|
|
||||||
|
|
||||||
let fetchCount = 0;
|
|
||||||
const getNode = (): Promise<ENR | null> => {
|
|
||||||
const node = retrievedNodes[fetchCount];
|
|
||||||
fetchCount++;
|
|
||||||
return Promise.resolve(node);
|
|
||||||
};
|
|
||||||
|
|
||||||
const res = [];
|
|
||||||
for await (const node of fetchNodes(getNode, 10, 2)) {
|
|
||||||
res.push(node);
|
|
||||||
}
|
|
||||||
|
|
||||||
expect(res.length).to.eq(1);
|
expect(res.length).to.eq(1);
|
||||||
|
expect(res[0].peerId!.toString()).to.eq(relayNode.peerId?.toString());
|
||||||
|
});
|
||||||
|
|
||||||
|
it("1 Store, 2 fetches", async function () {
|
||||||
|
const relayNode = await createEnr({ ...Waku2None, relay: true });
|
||||||
|
const storeNode = await createEnr({ ...Waku2None, store: true });
|
||||||
|
|
||||||
|
const retrievedNodes = [relayNode, storeNode];
|
||||||
|
|
||||||
|
let fetchCount = 0;
|
||||||
|
const getNode = (): Promise<ENR> => {
|
||||||
|
const node = retrievedNodes[fetchCount];
|
||||||
|
fetchCount++;
|
||||||
|
return Promise.resolve(node);
|
||||||
|
};
|
||||||
|
|
||||||
|
const res = await fetchNodesUntilCapabilitiesFulfilled(
|
||||||
|
{ store: 1 },
|
||||||
|
1,
|
||||||
|
getNode
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(res.length).to.eq(1);
|
||||||
|
expect(res[0].peerId!.toString()).to.eq(storeNode.peerId?.toString());
|
||||||
|
});
|
||||||
|
|
||||||
|
it("1 Store, 2 relays, 2 fetches", async function () {
|
||||||
|
const relayNode1 = await createEnr({ ...Waku2None, relay: true });
|
||||||
|
const relayNode2 = await createEnr({ ...Waku2None, relay: true });
|
||||||
|
const relayNode3 = await createEnr({ ...Waku2None, relay: true });
|
||||||
|
const relayStoreNode = await createEnr({
|
||||||
|
...Waku2None,
|
||||||
|
relay: true,
|
||||||
|
store: true
|
||||||
|
});
|
||||||
|
|
||||||
|
const retrievedNodes = [relayNode1, relayNode2, relayNode3, relayStoreNode];
|
||||||
|
|
||||||
|
let fetchCount = 0;
|
||||||
|
const getNode = (): Promise<ENR> => {
|
||||||
|
const node = retrievedNodes[fetchCount];
|
||||||
|
fetchCount++;
|
||||||
|
return Promise.resolve(node);
|
||||||
|
};
|
||||||
|
|
||||||
|
const res = await fetchNodesUntilCapabilitiesFulfilled(
|
||||||
|
{ store: 1, relay: 2 },
|
||||||
|
1,
|
||||||
|
getNode
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(res.length).to.eq(3);
|
||||||
|
expect(res[0].peerId!.toString()).to.eq(relayNode1.peerId?.toString());
|
||||||
|
expect(res[1].peerId!.toString()).to.eq(relayNode2.peerId?.toString());
|
||||||
|
expect(res[2].peerId!.toString()).to.eq(relayStoreNode.peerId?.toString());
|
||||||
|
});
|
||||||
|
|
||||||
|
it("1 Relay, 1 Filter, gives up", async function () {
|
||||||
|
const relayNode = await createEnr({ ...Waku2None, relay: true });
|
||||||
|
|
||||||
|
const getNode = (): Promise<ENR> => Promise.resolve(relayNode);
|
||||||
|
|
||||||
|
const res = await fetchNodesUntilCapabilitiesFulfilled(
|
||||||
|
{ filter: 1, relay: 1 },
|
||||||
|
5,
|
||||||
|
getNode
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(res.length).to.eq(1);
|
||||||
|
expect(res[0].peerId!.toString()).to.eq(relayNode.peerId?.toString());
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@ -1,44 +1,181 @@
|
|||||||
import type { IEnr } from "@waku/interfaces";
|
import type { IEnr, NodeCapabilityCount, Waku2 } from "@waku/interfaces";
|
||||||
import { Logger } from "@waku/utils";
|
import { Logger } from "@waku/utils";
|
||||||
|
|
||||||
const log = new Logger("discovery:fetch_nodes");
|
const log = new Logger("discovery:fetch_nodes");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetch nodes using passed [[getNode]] until it has been called [[maxGet]]
|
* Fetch nodes using passed [[getNode]] until all wanted capabilities are
|
||||||
* times, or it has returned empty or duplicate results more than [[maxErrors]]
|
* fulfilled or the number of [[getNode]] call exceeds the sum of
|
||||||
* times.
|
* [[wantedNodeCapabilityCount]] plus [[errorTolerance]].
|
||||||
*/
|
*/
|
||||||
export async function* fetchNodes(
|
export async function fetchNodesUntilCapabilitiesFulfilled(
|
||||||
getNode: () => Promise<IEnr | null>,
|
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>,
|
||||||
maxGet: number = 10,
|
errorTolerance: number,
|
||||||
maxErrors: number = 3
|
getNode: () => Promise<IEnr | null>
|
||||||
): AsyncGenerator<IEnr> {
|
): Promise<IEnr[]> {
|
||||||
const peerNodeIds = new Set();
|
const wanted = {
|
||||||
|
relay: wantedNodeCapabilityCount.relay ?? 0,
|
||||||
|
store: wantedNodeCapabilityCount.store ?? 0,
|
||||||
|
filter: wantedNodeCapabilityCount.filter ?? 0,
|
||||||
|
lightPush: wantedNodeCapabilityCount.lightPush ?? 0
|
||||||
|
};
|
||||||
|
|
||||||
|
const maxSearches =
|
||||||
|
wanted.relay + wanted.store + wanted.filter + wanted.lightPush;
|
||||||
|
|
||||||
|
const actual = {
|
||||||
|
relay: 0,
|
||||||
|
store: 0,
|
||||||
|
filter: 0,
|
||||||
|
lightPush: 0
|
||||||
|
};
|
||||||
|
|
||||||
let totalSearches = 0;
|
let totalSearches = 0;
|
||||||
let erroneousSearches = 0;
|
const peers: IEnr[] = [];
|
||||||
|
|
||||||
while (
|
while (
|
||||||
totalSearches < maxGet &&
|
!isSatisfied(wanted, actual) &&
|
||||||
erroneousSearches < maxErrors // Allows a couple of empty results before calling it quit
|
totalSearches < maxSearches + errorTolerance
|
||||||
) {
|
) {
|
||||||
totalSearches++;
|
|
||||||
|
|
||||||
const peer = await getNode();
|
const peer = await getNode();
|
||||||
if (!peer || !peer.nodeId) {
|
if (peer && isNewPeer(peer, peers)) {
|
||||||
erroneousSearches++;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!peerNodeIds.has(peer.nodeId)) {
|
|
||||||
peerNodeIds.add(peer.nodeId);
|
|
||||||
// ENRs without a waku2 key are ignored.
|
// ENRs without a waku2 key are ignored.
|
||||||
if (peer.waku2) {
|
if (peer.waku2) {
|
||||||
yield peer;
|
if (helpsSatisfyCapabilities(peer.waku2, wanted, actual)) {
|
||||||
|
addCapabilities(peer.waku2, actual);
|
||||||
|
peers.push(peer);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
log.info(
|
log.info(
|
||||||
`got new peer candidate from DNS address=${peer.nodeId}@${peer.ip}`
|
`got new peer candidate from DNS address=${peer.nodeId}@${peer.ip}`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
totalSearches++;
|
||||||
|
}
|
||||||
|
return peers;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch nodes using passed [[getNode]] until all wanted capabilities are
|
||||||
|
* fulfilled or the number of [[getNode]] call exceeds the sum of
|
||||||
|
* [[wantedNodeCapabilityCount]] plus [[errorTolerance]].
|
||||||
|
*/
|
||||||
|
export async function* yieldNodesUntilCapabilitiesFulfilled(
|
||||||
|
wantedNodeCapabilityCount: Partial<NodeCapabilityCount>,
|
||||||
|
errorTolerance: number,
|
||||||
|
getNode: () => Promise<IEnr | null>
|
||||||
|
): AsyncGenerator<IEnr> {
|
||||||
|
const wanted = {
|
||||||
|
relay: wantedNodeCapabilityCount.relay ?? 0,
|
||||||
|
store: wantedNodeCapabilityCount.store ?? 0,
|
||||||
|
filter: wantedNodeCapabilityCount.filter ?? 0,
|
||||||
|
lightPush: wantedNodeCapabilityCount.lightPush ?? 0
|
||||||
|
};
|
||||||
|
|
||||||
|
const maxSearches =
|
||||||
|
wanted.relay + wanted.store + wanted.filter + wanted.lightPush;
|
||||||
|
|
||||||
|
const actual = {
|
||||||
|
relay: 0,
|
||||||
|
store: 0,
|
||||||
|
filter: 0,
|
||||||
|
lightPush: 0
|
||||||
|
};
|
||||||
|
|
||||||
|
let totalSearches = 0;
|
||||||
|
const peerNodeIds = new Set();
|
||||||
|
|
||||||
|
while (
|
||||||
|
!isSatisfied(wanted, actual) &&
|
||||||
|
totalSearches < maxSearches + errorTolerance
|
||||||
|
) {
|
||||||
|
const peer = await getNode();
|
||||||
|
if (peer && peer.nodeId && !peerNodeIds.has(peer.nodeId)) {
|
||||||
|
peerNodeIds.add(peer.nodeId);
|
||||||
|
// ENRs without a waku2 key are ignored.
|
||||||
|
if (peer.waku2) {
|
||||||
|
if (helpsSatisfyCapabilities(peer.waku2, wanted, actual)) {
|
||||||
|
addCapabilities(peer.waku2, actual);
|
||||||
|
yield peer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
log.info(
|
||||||
|
`got new peer candidate from DNS address=${peer.nodeId}@${peer.ip}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
totalSearches++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function isSatisfied(
|
||||||
|
wanted: NodeCapabilityCount,
|
||||||
|
actual: NodeCapabilityCount
|
||||||
|
): boolean {
|
||||||
|
return (
|
||||||
|
actual.relay >= wanted.relay &&
|
||||||
|
actual.store >= wanted.store &&
|
||||||
|
actual.filter >= wanted.filter &&
|
||||||
|
actual.lightPush >= wanted.lightPush
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function isNewPeer(peer: IEnr, peers: IEnr[]): boolean {
|
||||||
|
if (!peer.nodeId) return false;
|
||||||
|
|
||||||
|
for (const existingPeer of peers) {
|
||||||
|
if (peer.nodeId === existingPeer.nodeId) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
function addCapabilities(node: Waku2, total: NodeCapabilityCount): void {
|
||||||
|
if (node.relay) total.relay += 1;
|
||||||
|
if (node.store) total.store += 1;
|
||||||
|
if (node.filter) total.filter += 1;
|
||||||
|
if (node.lightPush) total.lightPush += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if the proposed ENR [[node]] helps satisfy the [[wanted]] capabilities,
|
||||||
|
* considering the [[actual]] capabilities of nodes retrieved so far..
|
||||||
|
*
|
||||||
|
* @throws If the function is called when the wanted capabilities are already fulfilled.
|
||||||
|
*/
|
||||||
|
function helpsSatisfyCapabilities(
|
||||||
|
node: Waku2,
|
||||||
|
wanted: NodeCapabilityCount,
|
||||||
|
actual: NodeCapabilityCount
|
||||||
|
): boolean {
|
||||||
|
if (isSatisfied(wanted, actual)) {
|
||||||
|
throw "Internal Error: Waku2 wanted capabilities are already fulfilled";
|
||||||
|
}
|
||||||
|
|
||||||
|
const missing = missingCapabilities(wanted, actual);
|
||||||
|
|
||||||
|
return (
|
||||||
|
(missing.relay && node.relay) ||
|
||||||
|
(missing.store && node.store) ||
|
||||||
|
(missing.filter && node.filter) ||
|
||||||
|
(missing.lightPush && node.lightPush)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a [[Waku2]] Object for which capabilities are set to true if they are
|
||||||
|
* [[wanted]] yet missing from [[actual]].
|
||||||
|
*/
|
||||||
|
function missingCapabilities(
|
||||||
|
wanted: NodeCapabilityCount,
|
||||||
|
actual: NodeCapabilityCount
|
||||||
|
): Waku2 {
|
||||||
|
return {
|
||||||
|
relay: actual.relay < wanted.relay,
|
||||||
|
store: actual.store < wanted.store,
|
||||||
|
filter: actual.filter < wanted.filter,
|
||||||
|
lightPush: actual.lightPush < wanted.lightPush
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|||||||
@ -1,14 +1,21 @@
|
|||||||
|
// DNS Discovery
|
||||||
export { PeerDiscoveryDns, wakuDnsDiscovery } from "./dns/dns_discovery.js";
|
export { PeerDiscoveryDns, wakuDnsDiscovery } from "./dns/dns_discovery.js";
|
||||||
export { enrTree } from "./dns/constants.js";
|
export { enrTree } from "./dns/constants.js";
|
||||||
export { DnsNodeDiscovery } from "./dns/dns.js";
|
export { DnsNodeDiscovery } from "./dns/dns.js";
|
||||||
|
|
||||||
|
// Peer Exchange Discovery
|
||||||
|
export {
|
||||||
|
wakuPeerExchange,
|
||||||
|
PeerExchangeCodec,
|
||||||
|
WakuPeerExchange
|
||||||
|
} from "./peer-exchange/waku_peer_exchange.js";
|
||||||
export {
|
export {
|
||||||
wakuPeerExchangeDiscovery,
|
wakuPeerExchangeDiscovery,
|
||||||
PeerExchangeDiscovery,
|
PeerExchangeDiscovery
|
||||||
PeerExchangeCodec
|
} from "./peer-exchange/waku_peer_exchange_discovery.js";
|
||||||
} from "./peer-exchange/index.js";
|
|
||||||
|
|
||||||
|
// Local Peer Cache Discovery
|
||||||
export {
|
export {
|
||||||
PeerCacheDiscovery,
|
LocalPeerCacheDiscovery,
|
||||||
wakuPeerCacheDiscovery
|
wakuLocalPeerCacheDiscovery
|
||||||
} from "./peer-cache/index.js";
|
} from "./local-peer-cache/index.js";
|
||||||
|
|||||||
@ -6,68 +6,70 @@ import { prefixLogger } from "@libp2p/logger";
|
|||||||
import { peerIdFromPrivateKey, peerIdFromString } from "@libp2p/peer-id";
|
import { peerIdFromPrivateKey, peerIdFromString } from "@libp2p/peer-id";
|
||||||
import { persistentPeerStore } from "@libp2p/peer-store";
|
import { persistentPeerStore } from "@libp2p/peer-store";
|
||||||
import { multiaddr } from "@multiformats/multiaddr";
|
import { multiaddr } from "@multiformats/multiaddr";
|
||||||
import { Libp2pComponents, PartialPeerInfo, PeerCache } from "@waku/interfaces";
|
import { Libp2pComponents } from "@waku/interfaces";
|
||||||
|
import { LocalStoragePeerInfo } from "@waku/interfaces";
|
||||||
import chai, { expect } from "chai";
|
import chai, { expect } from "chai";
|
||||||
import chaiAsPromised from "chai-as-promised";
|
import chaiAsPromised from "chai-as-promised";
|
||||||
import { MemoryDatastore } from "datastore-core/memory";
|
import { MemoryDatastore } from "datastore-core/memory";
|
||||||
import sinon from "sinon";
|
import sinon from "sinon";
|
||||||
|
|
||||||
import { PeerCacheDiscovery } from "./index.js";
|
import { LocalPeerCacheDiscovery } from "./index.js";
|
||||||
|
|
||||||
chai.use(chaiAsPromised);
|
chai.use(chaiAsPromised);
|
||||||
|
|
||||||
const mockPeers: PartialPeerInfo[] = [
|
if (typeof window === "undefined") {
|
||||||
|
try {
|
||||||
|
global.localStorage = {
|
||||||
|
store: {} as Record<string, string>,
|
||||||
|
getItem(key: string) {
|
||||||
|
return this.store[key] || null;
|
||||||
|
},
|
||||||
|
setItem(key: string, value: string) {
|
||||||
|
this.store[key] = value;
|
||||||
|
},
|
||||||
|
removeItem(key: string) {
|
||||||
|
delete this.store[key];
|
||||||
|
},
|
||||||
|
clear() {
|
||||||
|
this.store = {};
|
||||||
|
}
|
||||||
|
} as any;
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to load localStorage polyfill:", error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const mockPeers = [
|
||||||
{
|
{
|
||||||
id: "16Uiu2HAm4v86W3bmT1BiH6oSPzcsSr24iDQpSN5Qa992BCjjwgrD",
|
id: "16Uiu2HAm4v86W3bmT1BiH6oSPzcsSr24iDQpSN5Qa992BCjjwgrD",
|
||||||
multiaddrs: [
|
address:
|
||||||
"/ip4/127.0.0.1/tcp/8000/wss/p2p/16Uiu2HAm4v86W3bmT1BiH6oSPzcsSr24iDQpSN5Qa992BCjjwgrD"
|
"/ip4/127.0.0.1/tcp/8000/ws/p2p/16Uiu2HAm4v86W3bmT1BiH6oSPzcsSr24iDQpSN5Qa992BCjjwgrD"
|
||||||
]
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: "16Uiu2HAm4v86W3bmT1BiH6oSPzcsSr24iDQpSN5Qa992BCjjwgrE",
|
id: "16Uiu2HAm4v86W3bmT1BiH6oSPzcsSr24iDQpSN5Qa992BCjjwgrE",
|
||||||
multiaddrs: [
|
address:
|
||||||
"/ip4/127.0.0.1/tcp/8001/wss/p2p/16Uiu2HAm4v86W3bmT1BiH6oSPzcsSr24iDQpSN5Qa992BCjjwgrE"
|
"/ip4/127.0.0.1/tcp/8001/ws/p2p/16Uiu2HAm4v86W3bmT1BiH6oSPzcsSr24iDQpSN5Qa992BCjjwgrE"
|
||||||
]
|
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
|
||||||
class MockPeerCache implements PeerCache {
|
async function setPeersInLocalStorage(
|
||||||
public data: PartialPeerInfo[] = [];
|
peers: LocalStoragePeerInfo[]
|
||||||
public throwOnGet = false;
|
|
||||||
public get(): PartialPeerInfo[] {
|
|
||||||
if (this.throwOnGet) {
|
|
||||||
throw new Error("cache get error");
|
|
||||||
}
|
|
||||||
return this.data;
|
|
||||||
}
|
|
||||||
public set(value: PartialPeerInfo[]): void {
|
|
||||||
this.data = value;
|
|
||||||
}
|
|
||||||
public remove(): void {
|
|
||||||
this.data = [];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function setPeersInCache(
|
|
||||||
cache: MockPeerCache,
|
|
||||||
peers: PartialPeerInfo[]
|
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
cache.set(peers);
|
localStorage.setItem("waku:peers", JSON.stringify(peers));
|
||||||
}
|
}
|
||||||
|
|
||||||
describe("Peer Cache Discovery", function () {
|
describe("Local Storage Discovery", function () {
|
||||||
this.timeout(25_000);
|
this.timeout(25_000);
|
||||||
let components: Libp2pComponents;
|
let components: Libp2pComponents;
|
||||||
let mockCache: MockPeerCache;
|
|
||||||
|
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
mockCache = new MockPeerCache();
|
localStorage.clear();
|
||||||
components = {
|
components = {
|
||||||
peerStore: persistentPeerStore({
|
peerStore: persistentPeerStore({
|
||||||
events: new TypedEventEmitter(),
|
events: new TypedEventEmitter(),
|
||||||
peerId: await generateKeyPair("secp256k1").then(peerIdFromPrivateKey),
|
peerId: await generateKeyPair("secp256k1").then(peerIdFromPrivateKey),
|
||||||
datastore: new MemoryDatastore(),
|
datastore: new MemoryDatastore(),
|
||||||
logger: prefixLogger("peer_cache_discovery.spec.ts")
|
logger: prefixLogger("local_discovery.spec.ts")
|
||||||
}),
|
}),
|
||||||
events: new TypedEventEmitter()
|
events: new TypedEventEmitter()
|
||||||
} as unknown as Libp2pComponents;
|
} as unknown as Libp2pComponents;
|
||||||
@ -75,24 +77,23 @@ describe("Peer Cache Discovery", function () {
|
|||||||
|
|
||||||
describe("Compliance Tests", function () {
|
describe("Compliance Tests", function () {
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
mockCache = new MockPeerCache();
|
await setPeersInLocalStorage([mockPeers[0]]);
|
||||||
await setPeersInCache(mockCache, [mockPeers[0]]);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
tests({
|
tests({
|
||||||
async setup() {
|
async setup() {
|
||||||
return new PeerCacheDiscovery(components, { cache: mockCache });
|
return new LocalPeerCacheDiscovery(components);
|
||||||
},
|
},
|
||||||
async teardown() {}
|
async teardown() {}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("Unit Tests", function () {
|
describe("Unit Tests", function () {
|
||||||
let discovery: PeerCacheDiscovery;
|
let discovery: LocalPeerCacheDiscovery;
|
||||||
|
|
||||||
beforeEach(async function () {
|
beforeEach(async function () {
|
||||||
discovery = new PeerCacheDiscovery(components, { cache: mockCache });
|
discovery = new LocalPeerCacheDiscovery(components);
|
||||||
await setPeersInCache(mockCache, mockPeers);
|
await setPeersInLocalStorage(mockPeers);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should load peers from local storage and dispatch events", async () => {
|
it("should load peers from local storage and dispatch events", async () => {
|
||||||
@ -102,46 +103,43 @@ describe("Peer Cache Discovery", function () {
|
|||||||
|
|
||||||
expect(dispatchEventSpy.calledWith(sinon.match.has("type", "peer"))).to.be
|
expect(dispatchEventSpy.calledWith(sinon.match.has("type", "peer"))).to.be
|
||||||
.true;
|
.true;
|
||||||
|
|
||||||
const dispatchedIds = dispatchEventSpy
|
|
||||||
.getCalls()
|
|
||||||
.map((c) => (c.args[0] as CustomEvent<any>).detail?.id?.toString?.())
|
|
||||||
.filter(Boolean);
|
|
||||||
|
|
||||||
mockPeers.forEach((mockPeer) => {
|
mockPeers.forEach((mockPeer) => {
|
||||||
expect(dispatchedIds).to.include(mockPeer.id);
|
expect(
|
||||||
|
dispatchEventSpy.calledWith(
|
||||||
|
sinon.match.hasNested("detail.id", mockPeer.id)
|
||||||
|
)
|
||||||
|
).to.be.true;
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should update peers in cache on 'peer:identify' event", async () => {
|
it("should update peers in local storage on 'peer:identify' event", async () => {
|
||||||
await discovery.start();
|
const newPeerIdentifyEvent = {
|
||||||
|
detail: {
|
||||||
const newPeerIdentifyEvent = new CustomEvent<IdentifyResult>(
|
peerId: peerIdFromString(mockPeers[1].id.toString()),
|
||||||
"peer:identify",
|
listenAddrs: [multiaddr(mockPeers[1].address)]
|
||||||
{
|
|
||||||
detail: {
|
|
||||||
peerId: peerIdFromString(mockPeers[1].id.toString()),
|
|
||||||
listenAddrs: [multiaddr(mockPeers[1].multiaddrs[0])]
|
|
||||||
} as IdentifyResult
|
|
||||||
}
|
}
|
||||||
|
} as CustomEvent<IdentifyResult>;
|
||||||
|
|
||||||
|
// Directly invoke handleNewPeers to simulate receiving an 'identify' event
|
||||||
|
discovery.handleNewPeers(newPeerIdentifyEvent);
|
||||||
|
|
||||||
|
const updatedPeers = JSON.parse(
|
||||||
|
localStorage.getItem("waku:peers") || "[]"
|
||||||
);
|
);
|
||||||
|
expect(updatedPeers).to.deep.include({
|
||||||
components.events.dispatchEvent(newPeerIdentifyEvent);
|
id: newPeerIdentifyEvent.detail.peerId.toString(),
|
||||||
|
address: newPeerIdentifyEvent.detail.listenAddrs[0].toString()
|
||||||
expect(mockCache.get()).to.deep.include({
|
|
||||||
id: mockPeers[1].id,
|
|
||||||
multiaddrs: [mockPeers[1].multiaddrs[0]]
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should handle cache.get errors gracefully", async () => {
|
it("should handle corrupted local storage data gracefully", async () => {
|
||||||
mockCache.throwOnGet = true;
|
localStorage.setItem("waku:peers", "not-a-valid-json");
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await discovery.start();
|
await discovery.start();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
expect.fail(
|
expect.fail(
|
||||||
"start() should not have thrown an error when cache.get throws"
|
"start() should not have thrown an error for corrupted local storage data"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
163
packages/discovery/src/local-peer-cache/index.ts
Normal file
163
packages/discovery/src/local-peer-cache/index.ts
Normal file
@ -0,0 +1,163 @@
|
|||||||
|
import { TypedEventEmitter } from "@libp2p/interface";
|
||||||
|
import {
|
||||||
|
IdentifyResult,
|
||||||
|
PeerDiscovery,
|
||||||
|
PeerDiscoveryEvents,
|
||||||
|
PeerInfo,
|
||||||
|
Startable
|
||||||
|
} from "@libp2p/interface";
|
||||||
|
import { peerIdFromString } from "@libp2p/peer-id";
|
||||||
|
import { multiaddr } from "@multiformats/multiaddr";
|
||||||
|
import {
|
||||||
|
type Libp2pComponents,
|
||||||
|
type LocalStoragePeerInfo,
|
||||||
|
Tags
|
||||||
|
} from "@waku/interfaces";
|
||||||
|
import { getWsMultiaddrFromMultiaddrs, Logger } from "@waku/utils";
|
||||||
|
|
||||||
|
const log = new Logger("local-cache-discovery");
|
||||||
|
|
||||||
|
type LocalPeerCacheDiscoveryOptions = {
|
||||||
|
tagName?: string;
|
||||||
|
tagValue?: number;
|
||||||
|
tagTTL?: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
const DEFAULT_LOCAL_TAG_NAME = Tags.LOCAL;
|
||||||
|
const DEFAULT_LOCAL_TAG_VALUE = 50;
|
||||||
|
const DEFAULT_LOCAL_TAG_TTL = 100_000_000;
|
||||||
|
|
||||||
|
export class LocalPeerCacheDiscovery
|
||||||
|
extends TypedEventEmitter<PeerDiscoveryEvents>
|
||||||
|
implements PeerDiscovery, Startable
|
||||||
|
{
|
||||||
|
private isStarted: boolean;
|
||||||
|
private peers: LocalStoragePeerInfo[] = [];
|
||||||
|
|
||||||
|
public constructor(
|
||||||
|
private readonly components: Libp2pComponents,
|
||||||
|
private readonly options?: LocalPeerCacheDiscoveryOptions
|
||||||
|
) {
|
||||||
|
super();
|
||||||
|
this.isStarted = false;
|
||||||
|
this.peers = this.getPeersFromLocalStorage();
|
||||||
|
}
|
||||||
|
|
||||||
|
public get [Symbol.toStringTag](): string {
|
||||||
|
return "@waku/local-peer-cache-discovery";
|
||||||
|
}
|
||||||
|
|
||||||
|
public async start(): Promise<void> {
|
||||||
|
if (this.isStarted) return;
|
||||||
|
|
||||||
|
log.info("Starting Local Storage Discovery");
|
||||||
|
this.components.events.addEventListener(
|
||||||
|
"peer:identify",
|
||||||
|
this.handleNewPeers
|
||||||
|
);
|
||||||
|
|
||||||
|
for (const { id: idStr, address } of this.peers) {
|
||||||
|
const peerId = peerIdFromString(idStr);
|
||||||
|
if (await this.components.peerStore.has(peerId)) continue;
|
||||||
|
|
||||||
|
await this.components.peerStore.save(peerId, {
|
||||||
|
multiaddrs: [multiaddr(address)],
|
||||||
|
tags: {
|
||||||
|
[this.options?.tagName ?? DEFAULT_LOCAL_TAG_NAME]: {
|
||||||
|
value: this.options?.tagValue ?? DEFAULT_LOCAL_TAG_VALUE,
|
||||||
|
ttl: this.options?.tagTTL ?? DEFAULT_LOCAL_TAG_TTL
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
this.dispatchEvent(
|
||||||
|
new CustomEvent<PeerInfo>("peer", {
|
||||||
|
detail: {
|
||||||
|
id: peerId,
|
||||||
|
multiaddrs: [multiaddr(address)]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info(`Discovered ${this.peers.length} peers`);
|
||||||
|
|
||||||
|
this.isStarted = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public stop(): void | Promise<void> {
|
||||||
|
if (!this.isStarted) return;
|
||||||
|
log.info("Stopping Local Storage Discovery");
|
||||||
|
this.components.events.removeEventListener(
|
||||||
|
"peer:identify",
|
||||||
|
this.handleNewPeers
|
||||||
|
);
|
||||||
|
this.isStarted = false;
|
||||||
|
|
||||||
|
this.savePeersToLocalStorage();
|
||||||
|
}
|
||||||
|
|
||||||
|
public handleNewPeers = (event: CustomEvent<IdentifyResult>): void => {
|
||||||
|
const { peerId, listenAddrs } = event.detail;
|
||||||
|
|
||||||
|
const websocketMultiaddr = getWsMultiaddrFromMultiaddrs(listenAddrs);
|
||||||
|
|
||||||
|
const localStoragePeers = this.getPeersFromLocalStorage();
|
||||||
|
|
||||||
|
const existingPeerIndex = localStoragePeers.findIndex(
|
||||||
|
(_peer) => _peer.id === peerId.toString()
|
||||||
|
);
|
||||||
|
|
||||||
|
if (existingPeerIndex >= 0) {
|
||||||
|
localStoragePeers[existingPeerIndex].address =
|
||||||
|
websocketMultiaddr.toString();
|
||||||
|
} else {
|
||||||
|
localStoragePeers.push({
|
||||||
|
id: peerId.toString(),
|
||||||
|
address: websocketMultiaddr.toString()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
this.peers = localStoragePeers;
|
||||||
|
this.savePeersToLocalStorage();
|
||||||
|
};
|
||||||
|
|
||||||
|
private getPeersFromLocalStorage(): LocalStoragePeerInfo[] {
|
||||||
|
try {
|
||||||
|
const storedPeersData = localStorage.getItem("waku:peers");
|
||||||
|
if (!storedPeersData) return [];
|
||||||
|
const peers = JSON.parse(storedPeersData);
|
||||||
|
return peers.filter(isValidStoredPeer);
|
||||||
|
} catch (error) {
|
||||||
|
log.error("Error parsing peers from local storage:", error);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private savePeersToLocalStorage(): void {
|
||||||
|
try {
|
||||||
|
localStorage.setItem("waku:peers", JSON.stringify(this.peers));
|
||||||
|
} catch (error) {
|
||||||
|
log.error("Error saving peers to local storage:", error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function isValidStoredPeer(peer: any): peer is LocalStoragePeerInfo {
|
||||||
|
return (
|
||||||
|
peer &&
|
||||||
|
typeof peer === "object" &&
|
||||||
|
typeof peer.id === "string" &&
|
||||||
|
typeof peer.address === "string"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function wakuLocalPeerCacheDiscovery(): (
|
||||||
|
components: Libp2pComponents,
|
||||||
|
options?: LocalPeerCacheDiscoveryOptions
|
||||||
|
) => LocalPeerCacheDiscovery {
|
||||||
|
return (
|
||||||
|
components: Libp2pComponents,
|
||||||
|
options?: LocalPeerCacheDiscoveryOptions
|
||||||
|
) => new LocalPeerCacheDiscovery(components, options);
|
||||||
|
}
|
||||||
@ -1,4 +0,0 @@
|
|||||||
import { Tags } from "@waku/interfaces";
|
|
||||||
|
|
||||||
export const DEFAULT_PEER_CACHE_TAG_NAME = Tags.PEER_CACHE;
|
|
||||||
export const DEFAULT_PEER_CACHE_TAG_VALUE = 50;
|
|
||||||
@ -1 +0,0 @@
|
|||||||
export { wakuPeerCacheDiscovery, PeerCacheDiscovery } from "./peer_cache.js";
|
|
||||||
@ -1,152 +0,0 @@
|
|||||||
import { TypedEventEmitter } from "@libp2p/interface";
|
|
||||||
import {
|
|
||||||
IdentifyResult,
|
|
||||||
PeerDiscovery,
|
|
||||||
PeerDiscoveryEvents,
|
|
||||||
PeerInfo,
|
|
||||||
Startable
|
|
||||||
} from "@libp2p/interface";
|
|
||||||
import { peerIdFromString } from "@libp2p/peer-id";
|
|
||||||
import { multiaddr } from "@multiformats/multiaddr";
|
|
||||||
import type {
|
|
||||||
Libp2pComponents,
|
|
||||||
PartialPeerInfo,
|
|
||||||
PeerCache,
|
|
||||||
PeerCacheDiscoveryOptions
|
|
||||||
} from "@waku/interfaces";
|
|
||||||
import { Logger } from "@waku/utils";
|
|
||||||
|
|
||||||
import {
|
|
||||||
DEFAULT_PEER_CACHE_TAG_NAME,
|
|
||||||
DEFAULT_PEER_CACHE_TAG_VALUE
|
|
||||||
} from "./constants.js";
|
|
||||||
import { defaultCache } from "./utils.js";
|
|
||||||
|
|
||||||
const log = new Logger("peer-cache");
|
|
||||||
|
|
||||||
export class PeerCacheDiscovery
|
|
||||||
extends TypedEventEmitter<PeerDiscoveryEvents>
|
|
||||||
implements PeerDiscovery, Startable
|
|
||||||
{
|
|
||||||
private isStarted: boolean = false;
|
|
||||||
private readonly cache: PeerCache;
|
|
||||||
|
|
||||||
public constructor(
|
|
||||||
private readonly components: Libp2pComponents,
|
|
||||||
options?: Partial<PeerCacheDiscoveryOptions>
|
|
||||||
) {
|
|
||||||
super();
|
|
||||||
this.cache = options?.cache ?? defaultCache();
|
|
||||||
}
|
|
||||||
|
|
||||||
public get [Symbol.toStringTag](): string {
|
|
||||||
return `@waku/${DEFAULT_PEER_CACHE_TAG_NAME}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
public async start(): Promise<void> {
|
|
||||||
if (this.isStarted) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
log.info("Starting Peer Cache Discovery");
|
|
||||||
|
|
||||||
this.components.events.addEventListener(
|
|
||||||
"peer:identify",
|
|
||||||
this.handleDiscoveredPeer
|
|
||||||
);
|
|
||||||
|
|
||||||
await this.discoverPeers();
|
|
||||||
|
|
||||||
this.isStarted = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
public stop(): void | Promise<void> {
|
|
||||||
if (!this.isStarted) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
log.info("Stopping Peer Cache Discovery");
|
|
||||||
|
|
||||||
this.components.events.removeEventListener(
|
|
||||||
"peer:identify",
|
|
||||||
this.handleDiscoveredPeer
|
|
||||||
);
|
|
||||||
|
|
||||||
this.isStarted = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
private handleDiscoveredPeer = (event: CustomEvent<IdentifyResult>): void => {
|
|
||||||
const { peerId, listenAddrs } = event.detail;
|
|
||||||
const multiaddrs = listenAddrs.map((addr) => addr.toString());
|
|
||||||
|
|
||||||
const peerIdStr = peerId.toString();
|
|
||||||
const knownPeers = this.readPeerInfoFromCache();
|
|
||||||
const peerIndex = knownPeers.findIndex((p) => p.id === peerIdStr);
|
|
||||||
|
|
||||||
if (peerIndex !== -1) {
|
|
||||||
knownPeers[peerIndex].multiaddrs = multiaddrs;
|
|
||||||
} else {
|
|
||||||
knownPeers.push({
|
|
||||||
id: peerIdStr,
|
|
||||||
multiaddrs
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
this.writePeerInfoToCache(knownPeers);
|
|
||||||
};
|
|
||||||
|
|
||||||
private async discoverPeers(): Promise<void> {
|
|
||||||
const knownPeers = this.readPeerInfoFromCache();
|
|
||||||
|
|
||||||
for (const peer of knownPeers) {
|
|
||||||
const peerId = peerIdFromString(peer.id);
|
|
||||||
const multiaddrs = peer.multiaddrs.map((addr) => multiaddr(addr));
|
|
||||||
|
|
||||||
if (await this.components.peerStore.has(peerId)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.components.peerStore.save(peerId, {
|
|
||||||
multiaddrs,
|
|
||||||
tags: {
|
|
||||||
[DEFAULT_PEER_CACHE_TAG_NAME]: {
|
|
||||||
value: DEFAULT_PEER_CACHE_TAG_VALUE
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
this.dispatchEvent(
|
|
||||||
new CustomEvent<PeerInfo>("peer", {
|
|
||||||
detail: {
|
|
||||||
id: peerId,
|
|
||||||
multiaddrs
|
|
||||||
}
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private readPeerInfoFromCache(): PartialPeerInfo[] {
|
|
||||||
try {
|
|
||||||
return this.cache.get();
|
|
||||||
} catch (error) {
|
|
||||||
log.error("Error parsing peers from cache:", error);
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private writePeerInfoToCache(peers: PartialPeerInfo[]): void {
|
|
||||||
try {
|
|
||||||
this.cache.set(peers);
|
|
||||||
} catch (error) {
|
|
||||||
log.error("Error saving peers to cache:", error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function wakuPeerCacheDiscovery(
|
|
||||||
options: Partial<PeerCacheDiscoveryOptions> = {}
|
|
||||||
): (components: Libp2pComponents) => PeerCacheDiscovery {
|
|
||||||
return (components: Libp2pComponents) =>
|
|
||||||
new PeerCacheDiscovery(components, options);
|
|
||||||
}
|
|
||||||
@ -1,73 +0,0 @@
|
|||||||
import type { PartialPeerInfo, PeerCache } from "@waku/interfaces";
|
|
||||||
|
|
||||||
const isValidStoredPeer = (peer: unknown): boolean => {
|
|
||||||
return (
|
|
||||||
!!peer &&
|
|
||||||
typeof peer === "object" &&
|
|
||||||
"id" in peer &&
|
|
||||||
typeof peer.id === "string" &&
|
|
||||||
"multiaddrs" in peer &&
|
|
||||||
Array.isArray(peer.multiaddrs)
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A noop cache that will be used in environments where localStorage is not available.
|
|
||||||
*/
|
|
||||||
class NoopCache implements PeerCache {
|
|
||||||
public get(): PartialPeerInfo[] {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
public set(_value: PartialPeerInfo[]): void {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
public remove(): void {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A cache that uses localStorage to store peer information.
|
|
||||||
*/
|
|
||||||
class LocalStorageCache implements PeerCache {
|
|
||||||
public get(): PartialPeerInfo[] {
|
|
||||||
try {
|
|
||||||
const cachedPeers = localStorage.getItem("waku:peers");
|
|
||||||
const peers = cachedPeers ? JSON.parse(cachedPeers) : [];
|
|
||||||
|
|
||||||
return peers.filter(isValidStoredPeer);
|
|
||||||
} catch (e) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public set(_value: PartialPeerInfo[]): void {
|
|
||||||
try {
|
|
||||||
localStorage.setItem("waku:peers", JSON.stringify(_value));
|
|
||||||
} catch (e) {
|
|
||||||
// ignore
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public remove(): void {
|
|
||||||
try {
|
|
||||||
localStorage.removeItem("waku:peers");
|
|
||||||
} catch (e) {
|
|
||||||
// ignore
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const defaultCache = (): PeerCache => {
|
|
||||||
try {
|
|
||||||
if (typeof localStorage !== "undefined") {
|
|
||||||
return new LocalStorageCache();
|
|
||||||
}
|
|
||||||
} catch (_e) {
|
|
||||||
// ignore
|
|
||||||
}
|
|
||||||
|
|
||||||
return new NoopCache();
|
|
||||||
};
|
|
||||||
@ -1,10 +0,0 @@
|
|||||||
import { Tags } from "@waku/interfaces";
|
|
||||||
|
|
||||||
// amount of peers available per specification
|
|
||||||
export const DEFAULT_PEER_EXCHANGE_REQUEST_NODES = 60;
|
|
||||||
|
|
||||||
export const DEFAULT_PEER_EXCHANGE_TAG_NAME = Tags.PEER_EXCHANGE;
|
|
||||||
export const DEFAULT_PEER_EXCHANGE_TAG_VALUE = 50;
|
|
||||||
export const DEFAULT_PEER_EXCHANGE_TAG_TTL = 30_000;
|
|
||||||
|
|
||||||
export const PeerExchangeCodec = "/vac/waku/peer-exchange/2.0.0-alpha1";
|
|
||||||
@ -1,5 +1,10 @@
|
|||||||
|
export {
|
||||||
|
wakuPeerExchange,
|
||||||
|
PeerExchangeCodec,
|
||||||
|
WakuPeerExchange
|
||||||
|
} from "./waku_peer_exchange.js";
|
||||||
export {
|
export {
|
||||||
wakuPeerExchangeDiscovery,
|
wakuPeerExchangeDiscovery,
|
||||||
PeerExchangeDiscovery
|
PeerExchangeDiscovery,
|
||||||
} from "./peer_exchange_discovery.js";
|
Options
|
||||||
export { PeerExchangeCodec } from "./constants.js";
|
} from "./waku_peer_exchange_discovery.js";
|
||||||
|
|||||||
@ -1,321 +0,0 @@
|
|||||||
import { EnrDecoder } from "@waku/enr";
|
|
||||||
import { ProtocolError } from "@waku/interfaces";
|
|
||||||
import { expect } from "chai";
|
|
||||||
import sinon from "sinon";
|
|
||||||
|
|
||||||
import { PeerExchange } from "./peer_exchange.js";
|
|
||||||
import { PeerExchangeRPC } from "./rpc.js";
|
|
||||||
|
|
||||||
describe("PeerExchange", () => {
|
|
||||||
let peerExchange: PeerExchange;
|
|
||||||
let mockComponents: any;
|
|
||||||
let mockStreamManager: any;
|
|
||||||
let mockPeerStore: any;
|
|
||||||
let mockStream: any;
|
|
||||||
let mockPeerId: any;
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
mockPeerId = {
|
|
||||||
toString: () => "test-peer-id",
|
|
||||||
equals: (other: any) => other && other.toString() === "test-peer-id"
|
|
||||||
};
|
|
||||||
|
|
||||||
mockStream = {
|
|
||||||
sink: sinon.stub(),
|
|
||||||
source: (async function* () {
|
|
||||||
const data = new Uint8Array([0, 0, 0, 4, 1, 2, 3, 4]);
|
|
||||||
yield data;
|
|
||||||
})()
|
|
||||||
};
|
|
||||||
|
|
||||||
mockStreamManager = {
|
|
||||||
getStream: sinon.stub().resolves(mockStream)
|
|
||||||
};
|
|
||||||
|
|
||||||
mockPeerStore = {
|
|
||||||
has: sinon.stub().resolves(true)
|
|
||||||
};
|
|
||||||
|
|
||||||
mockComponents = {
|
|
||||||
peerStore: mockPeerStore,
|
|
||||||
events: {
|
|
||||||
addEventListener: sinon.stub(),
|
|
||||||
removeEventListener: sinon.stub()
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
peerExchange = new PeerExchange(mockComponents as any);
|
|
||||||
|
|
||||||
(peerExchange as any).streamManager = mockStreamManager;
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
sinon.restore();
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("constructor", () => {
|
|
||||||
it("should initialize with libp2p components", () => {
|
|
||||||
const components = {
|
|
||||||
peerStore: {},
|
|
||||||
events: {
|
|
||||||
addEventListener: sinon.stub(),
|
|
||||||
removeEventListener: sinon.stub()
|
|
||||||
}
|
|
||||||
} as any;
|
|
||||||
const instance = new PeerExchange(components);
|
|
||||||
expect(instance).to.be.instanceOf(PeerExchange);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("query", () => {
|
|
||||||
let queryParams: any;
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
queryParams = {
|
|
||||||
numPeers: 5,
|
|
||||||
peerId: mockPeerId
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should successfully query peers and return peer infos", async () => {
|
|
||||||
const mockResponse = {
|
|
||||||
peerInfos: [
|
|
||||||
{ enr: new Uint8Array([1, 2, 3]) },
|
|
||||||
{ enr: new Uint8Array([4, 5, 6]) }
|
|
||||||
]
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockRpcResponse = {
|
|
||||||
response: mockResponse
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockRpcQuery = {
|
|
||||||
encode: sinon.stub().returns(new Uint8Array([1, 2, 3]))
|
|
||||||
};
|
|
||||||
sinon.stub(PeerExchangeRPC, "createRequest").returns(mockRpcQuery as any);
|
|
||||||
sinon.stub(PeerExchangeRPC, "decode").returns(mockRpcResponse as any);
|
|
||||||
|
|
||||||
const mockEnr = { toString: () => "mock-enr" };
|
|
||||||
sinon.stub(EnrDecoder, "fromRLP").resolves(mockEnr as any);
|
|
||||||
|
|
||||||
const result = await peerExchange.query(queryParams);
|
|
||||||
|
|
||||||
expect(result.error).to.be.null;
|
|
||||||
expect(result.peerInfos).to.have.length(2);
|
|
||||||
expect(result.peerInfos![0]).to.have.property("ENR");
|
|
||||||
expect(result.peerInfos![1]).to.have.property("ENR");
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should handle empty peer infos gracefully", async () => {
|
|
||||||
const mockResponse = {
|
|
||||||
peerInfos: []
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockRpcResponse = {
|
|
||||||
response: mockResponse
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockRpcQuery = {
|
|
||||||
encode: sinon.stub().returns(new Uint8Array([1, 2, 3]))
|
|
||||||
};
|
|
||||||
sinon.stub(PeerExchangeRPC, "createRequest").returns(mockRpcQuery as any);
|
|
||||||
sinon.stub(PeerExchangeRPC, "decode").returns(mockRpcResponse as any);
|
|
||||||
|
|
||||||
const result = await peerExchange.query(queryParams);
|
|
||||||
|
|
||||||
expect(result.error).to.be.null;
|
|
||||||
expect(result.peerInfos).to.have.length(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should filter out undefined ENRs", async () => {
|
|
||||||
const mockResponse = {
|
|
||||||
peerInfos: [
|
|
||||||
{ enr: new Uint8Array([1, 2, 3]) },
|
|
||||||
{ enr: undefined },
|
|
||||||
{ enr: new Uint8Array([4, 5, 6]) }
|
|
||||||
]
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockRpcResponse = {
|
|
||||||
response: mockResponse
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockRpcQuery = {
|
|
||||||
encode: sinon.stub().returns(new Uint8Array([1, 2, 3]))
|
|
||||||
};
|
|
||||||
sinon.stub(PeerExchangeRPC, "createRequest").returns(mockRpcQuery as any);
|
|
||||||
sinon.stub(PeerExchangeRPC, "decode").returns(mockRpcResponse as any);
|
|
||||||
|
|
||||||
const mockEnr = { toString: () => "mock-enr" };
|
|
||||||
sinon.stub(EnrDecoder, "fromRLP").resolves(mockEnr as any);
|
|
||||||
|
|
||||||
const result = await peerExchange.query(queryParams);
|
|
||||||
|
|
||||||
expect(result.error).to.be.null;
|
|
||||||
expect(result.peerInfos).to.have.length(2);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should return NO_PEER_AVAILABLE when peer is not in peer store", async () => {
|
|
||||||
mockPeerStore.has.resolves(false);
|
|
||||||
|
|
||||||
const result = await peerExchange.query(queryParams);
|
|
||||||
|
|
||||||
expect(result.error).to.equal(ProtocolError.NO_PEER_AVAILABLE);
|
|
||||||
expect(result.peerInfos).to.be.null;
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should return NO_STREAM_AVAILABLE when stream creation fails", async () => {
|
|
||||||
mockStreamManager.getStream.returns(undefined);
|
|
||||||
|
|
||||||
const result = await peerExchange.query(queryParams);
|
|
||||||
|
|
||||||
expect(result.error).to.equal(ProtocolError.NO_STREAM_AVAILABLE);
|
|
||||||
expect(result.peerInfos).to.be.null;
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should return EMPTY_PAYLOAD when response field is missing", async () => {
|
|
||||||
const mockRpcResponse = {
|
|
||||||
response: undefined
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockRpcQuery = {
|
|
||||||
encode: sinon.stub().returns(new Uint8Array([1, 2, 3]))
|
|
||||||
};
|
|
||||||
sinon.stub(PeerExchangeRPC, "createRequest").returns(mockRpcQuery as any);
|
|
||||||
sinon.stub(PeerExchangeRPC, "decode").returns(mockRpcResponse as any);
|
|
||||||
|
|
||||||
const result = await peerExchange.query(queryParams);
|
|
||||||
|
|
||||||
expect(result.error).to.equal(ProtocolError.EMPTY_PAYLOAD);
|
|
||||||
expect(result.peerInfos).to.be.null;
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should return DECODE_FAILED when RPC decode fails", async () => {
|
|
||||||
const mockRpcQuery = {
|
|
||||||
encode: sinon.stub().returns(new Uint8Array([1, 2, 3]))
|
|
||||||
};
|
|
||||||
sinon.stub(PeerExchangeRPC, "createRequest").returns(mockRpcQuery as any);
|
|
||||||
sinon.stub(PeerExchangeRPC, "decode").throws(new Error("Decode failed"));
|
|
||||||
|
|
||||||
const result = await peerExchange.query(queryParams);
|
|
||||||
|
|
||||||
expect(result.error).to.equal(ProtocolError.DECODE_FAILED);
|
|
||||||
expect(result.peerInfos).to.be.null;
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should return DECODE_FAILED when ENR decoding fails", async () => {
|
|
||||||
const mockResponse = {
|
|
||||||
peerInfos: [{ enr: new Uint8Array([1, 2, 3]) }]
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockRpcResponse = {
|
|
||||||
response: mockResponse
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockRpcQuery = {
|
|
||||||
encode: sinon.stub().returns(new Uint8Array([1, 2, 3]))
|
|
||||||
};
|
|
||||||
sinon.stub(PeerExchangeRPC, "createRequest").returns(mockRpcQuery as any);
|
|
||||||
sinon.stub(PeerExchangeRPC, "decode").returns(mockRpcResponse as any);
|
|
||||||
sinon.stub(EnrDecoder, "fromRLP").rejects(new Error("ENR decode failed"));
|
|
||||||
|
|
||||||
const result = await peerExchange.query(queryParams);
|
|
||||||
|
|
||||||
expect(result.error).to.equal(ProtocolError.DECODE_FAILED);
|
|
||||||
expect(result.peerInfos).to.be.null;
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should handle malformed response data", async () => {
|
|
||||||
const mockRpcQuery = {
|
|
||||||
encode: sinon.stub().returns(new Uint8Array([1, 2, 3]))
|
|
||||||
};
|
|
||||||
sinon.stub(PeerExchangeRPC, "createRequest").returns(mockRpcQuery as any);
|
|
||||||
|
|
||||||
sinon.stub(PeerExchangeRPC, "decode").throws(new Error("Malformed data"));
|
|
||||||
|
|
||||||
const result = await peerExchange.query(queryParams);
|
|
||||||
|
|
||||||
expect(result.error).to.equal(ProtocolError.DECODE_FAILED);
|
|
||||||
expect(result.peerInfos).to.be.null;
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should handle large number of peers request", async () => {
|
|
||||||
const largeQueryParams = {
|
|
||||||
numPeers: 1000,
|
|
||||||
peerId: mockPeerId
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockResponse = {
|
|
||||||
peerInfos: Array(1000).fill({ enr: new Uint8Array([1, 2, 3]) })
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockRpcResponse = {
|
|
||||||
response: mockResponse
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockRpcQuery = {
|
|
||||||
encode: sinon.stub().returns(new Uint8Array([1, 2, 3]))
|
|
||||||
};
|
|
||||||
sinon.stub(PeerExchangeRPC, "createRequest").returns(mockRpcQuery as any);
|
|
||||||
sinon.stub(PeerExchangeRPC, "decode").returns(mockRpcResponse as any);
|
|
||||||
|
|
||||||
const mockEnr = { toString: () => "mock-enr" };
|
|
||||||
sinon.stub(EnrDecoder, "fromRLP").resolves(mockEnr as any);
|
|
||||||
|
|
||||||
const result = await peerExchange.query(largeQueryParams);
|
|
||||||
|
|
||||||
expect(result.error).to.be.null;
|
|
||||||
expect(result.peerInfos).to.have.length(1000);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should handle zero peers request", async () => {
|
|
||||||
const zeroQueryParams = {
|
|
||||||
numPeers: 0,
|
|
||||||
peerId: mockPeerId
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockResponse = {
|
|
||||||
peerInfos: []
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockRpcResponse = {
|
|
||||||
response: mockResponse
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockRpcQuery = {
|
|
||||||
encode: sinon.stub().returns(new Uint8Array([1, 2, 3]))
|
|
||||||
};
|
|
||||||
sinon.stub(PeerExchangeRPC, "createRequest").returns(mockRpcQuery as any);
|
|
||||||
sinon.stub(PeerExchangeRPC, "decode").returns(mockRpcResponse as any);
|
|
||||||
|
|
||||||
const result = await peerExchange.query(zeroQueryParams);
|
|
||||||
|
|
||||||
expect(result.error).to.be.null;
|
|
||||||
expect(result.peerInfos).to.have.length(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should create RPC request with correct parameters", async () => {
|
|
||||||
const mockRpcQuery = {
|
|
||||||
encode: sinon.stub().returns(new Uint8Array([1, 2, 3]))
|
|
||||||
};
|
|
||||||
const createRequestStub = sinon
|
|
||||||
.stub(PeerExchangeRPC, "createRequest")
|
|
||||||
.returns(mockRpcQuery as any);
|
|
||||||
sinon
|
|
||||||
.stub(PeerExchangeRPC, "decode")
|
|
||||||
.returns({ response: { peerInfos: [] } } as any);
|
|
||||||
|
|
||||||
await peerExchange.query(queryParams);
|
|
||||||
|
|
||||||
expect(createRequestStub.calledOnce).to.be.true;
|
|
||||||
expect(createRequestStub.firstCall.args[0]).to.deep.equal({
|
|
||||||
numPeers: BigInt(queryParams.numPeers)
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should create PeerExchange instance with components", () => {
|
|
||||||
const instance = new PeerExchange(mockComponents as any);
|
|
||||||
expect(instance).to.be.instanceOf(PeerExchange);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@ -1,386 +0,0 @@
|
|||||||
import { TypedEventEmitter } from "@libp2p/interface";
|
|
||||||
import { peerDiscoverySymbol as symbol } from "@libp2p/interface";
|
|
||||||
import type {
|
|
||||||
IdentifyResult,
|
|
||||||
PeerDiscoveryEvents,
|
|
||||||
PeerId
|
|
||||||
} from "@libp2p/interface";
|
|
||||||
import {
|
|
||||||
type IPeerExchange,
|
|
||||||
type Libp2pComponents,
|
|
||||||
ProtocolError
|
|
||||||
} from "@waku/interfaces";
|
|
||||||
import { expect } from "chai";
|
|
||||||
import sinon from "sinon";
|
|
||||||
|
|
||||||
import { PeerExchangeCodec } from "./constants.js";
|
|
||||||
import {
|
|
||||||
PeerExchangeDiscovery,
|
|
||||||
wakuPeerExchangeDiscovery
|
|
||||||
} from "./peer_exchange_discovery.js";
|
|
||||||
|
|
||||||
describe("PeerExchangeDiscovery", () => {
|
|
||||||
let peerExchangeDiscovery: PeerExchangeDiscovery;
|
|
||||||
let mockComponents: Libp2pComponents;
|
|
||||||
let mockEvents: TypedEventEmitter<PeerDiscoveryEvents>;
|
|
||||||
let mockConnectionManager: any;
|
|
||||||
let mockPeerStore: any;
|
|
||||||
let mockPeerId: PeerId;
|
|
||||||
|
|
||||||
let clock: sinon.SinonFakeTimers;
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
clock = sinon.useFakeTimers();
|
|
||||||
|
|
||||||
mockPeerId = {
|
|
||||||
toString: sinon.stub().returns("peer-id-1"),
|
|
||||||
toBytes: sinon.stub().returns(new Uint8Array([1, 2, 3]))
|
|
||||||
} as unknown as PeerId;
|
|
||||||
|
|
||||||
mockEvents = new TypedEventEmitter<PeerDiscoveryEvents>();
|
|
||||||
mockConnectionManager = {
|
|
||||||
getConnections: sinon.stub().returns([{ remotePeer: mockPeerId }])
|
|
||||||
};
|
|
||||||
mockPeerStore = {
|
|
||||||
get: sinon.stub().resolves({
|
|
||||||
id: mockPeerId,
|
|
||||||
protocols: [PeerExchangeCodec]
|
|
||||||
}),
|
|
||||||
merge: sinon.stub().resolves(undefined),
|
|
||||||
has: sinon.stub().resolves(true)
|
|
||||||
};
|
|
||||||
|
|
||||||
mockComponents = {
|
|
||||||
events: mockEvents,
|
|
||||||
connectionManager: mockConnectionManager,
|
|
||||||
peerStore: mockPeerStore
|
|
||||||
} as unknown as Libp2pComponents;
|
|
||||||
|
|
||||||
peerExchangeDiscovery = new PeerExchangeDiscovery(mockComponents, {});
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
clock.restore();
|
|
||||||
sinon.restore();
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("constructor", () => {
|
|
||||||
it("should initialize with default options", () => {
|
|
||||||
const discovery = new PeerExchangeDiscovery(mockComponents);
|
|
||||||
expect(discovery).to.be.instanceOf(PeerExchangeDiscovery);
|
|
||||||
expect(discovery[symbol]).to.be.true;
|
|
||||||
expect(discovery[Symbol.toStringTag]).to.equal("@waku/peer-exchange");
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should initialize with custom TTL", () => {
|
|
||||||
const customTTL = 60000;
|
|
||||||
const discovery = new PeerExchangeDiscovery(mockComponents, {
|
|
||||||
TTL: customTTL
|
|
||||||
});
|
|
||||||
expect(discovery).to.be.instanceOf(PeerExchangeDiscovery);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("start", () => {
|
|
||||||
it("should start peer exchange discovery", () => {
|
|
||||||
const addEventListenerSpy = sinon.spy(mockEvents, "addEventListener");
|
|
||||||
|
|
||||||
peerExchangeDiscovery.start();
|
|
||||||
|
|
||||||
expect(addEventListenerSpy.called).to.be.true;
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should not start if already started", () => {
|
|
||||||
const addEventListenerSpy = sinon.spy(mockEvents, "addEventListener");
|
|
||||||
|
|
||||||
peerExchangeDiscovery.start();
|
|
||||||
peerExchangeDiscovery.start();
|
|
||||||
|
|
||||||
expect(addEventListenerSpy.calledOnce).to.be.true;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("stop", () => {
|
|
||||||
it("should stop peer exchange discovery", () => {
|
|
||||||
const removeEventListenerSpy = sinon.spy(
|
|
||||||
mockEvents,
|
|
||||||
"removeEventListener"
|
|
||||||
);
|
|
||||||
|
|
||||||
peerExchangeDiscovery.start();
|
|
||||||
peerExchangeDiscovery.stop();
|
|
||||||
|
|
||||||
expect(removeEventListenerSpy.called).to.be.true;
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should not stop if not started", () => {
|
|
||||||
const removeEventListenerSpy = sinon.spy(
|
|
||||||
mockEvents,
|
|
||||||
"removeEventListener"
|
|
||||||
);
|
|
||||||
|
|
||||||
peerExchangeDiscovery.stop();
|
|
||||||
|
|
||||||
expect(removeEventListenerSpy.called).to.be.false;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("handleDiscoveredPeer", () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
peerExchangeDiscovery.start();
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should handle peer identify event", async () => {
|
|
||||||
const runQuerySpy = sinon.spy(peerExchangeDiscovery as any, "runQuery");
|
|
||||||
const mockIdentifyResult: IdentifyResult = {
|
|
||||||
peerId: mockPeerId,
|
|
||||||
protocols: [PeerExchangeCodec],
|
|
||||||
listenAddrs: [],
|
|
||||||
connection: {} as any
|
|
||||||
};
|
|
||||||
|
|
||||||
const event = new CustomEvent<IdentifyResult>("peer:identify", {
|
|
||||||
detail: mockIdentifyResult
|
|
||||||
});
|
|
||||||
|
|
||||||
await peerExchangeDiscovery["handleDiscoveredPeer"](event);
|
|
||||||
|
|
||||||
expect(runQuerySpy.called).to.be.true;
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should skip peers without peer exchange protocol", async () => {
|
|
||||||
const mockIdentifyResult: IdentifyResult = {
|
|
||||||
peerId: mockPeerId,
|
|
||||||
protocols: ["other-protocol"],
|
|
||||||
listenAddrs: [],
|
|
||||||
connection: {} as any
|
|
||||||
};
|
|
||||||
|
|
||||||
const event = new CustomEvent<IdentifyResult>("peer:identify", {
|
|
||||||
detail: mockIdentifyResult
|
|
||||||
});
|
|
||||||
|
|
||||||
await peerExchangeDiscovery["handleDiscoveredPeer"](event);
|
|
||||||
|
|
||||||
expect(mockPeerStore.get.called).to.be.false;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("handlePeriodicDiscovery", () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
peerExchangeDiscovery.start();
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should query peers that support peer exchange", async () => {
|
|
||||||
await peerExchangeDiscovery["handlePeriodicDiscovery"]();
|
|
||||||
|
|
||||||
expect(mockConnectionManager.getConnections.called).to.be.true;
|
|
||||||
expect(mockPeerStore.get.called).to.be.true;
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should skip peers that don't support peer exchange", async () => {
|
|
||||||
mockPeerStore.get.resolves({
|
|
||||||
id: mockPeerId,
|
|
||||||
protocols: ["other-protocol"]
|
|
||||||
});
|
|
||||||
|
|
||||||
await peerExchangeDiscovery["handlePeriodicDiscovery"]();
|
|
||||||
|
|
||||||
expect(mockConnectionManager.getConnections.called).to.be.true;
|
|
||||||
expect(mockPeerStore.get.called).to.be.true;
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should handle peer store errors gracefully", async () => {
|
|
||||||
mockPeerStore.get.rejects(new Error("Peer store error"));
|
|
||||||
|
|
||||||
await peerExchangeDiscovery["handlePeriodicDiscovery"]();
|
|
||||||
|
|
||||||
expect(mockConnectionManager.getConnections.called).to.be.true;
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should skip peers that were recently queried", async () => {
|
|
||||||
const peerIdStr = mockPeerId.toString();
|
|
||||||
peerExchangeDiscovery["peerExpirationRecords"].set(
|
|
||||||
peerIdStr,
|
|
||||||
Date.now() + 10000
|
|
||||||
);
|
|
||||||
|
|
||||||
await peerExchangeDiscovery["handlePeriodicDiscovery"]();
|
|
||||||
|
|
||||||
expect(mockPeerStore.get.called).to.be.false;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("runQuery", () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
peerExchangeDiscovery.start();
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should query peer with peer exchange protocol", async () => {
|
|
||||||
const querySpy = sinon.spy(peerExchangeDiscovery as any, "query");
|
|
||||||
await peerExchangeDiscovery["runQuery"](mockPeerId, [PeerExchangeCodec]);
|
|
||||||
|
|
||||||
expect(querySpy.called).to.be.true;
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should skip peers without peer exchange protocol", async () => {
|
|
||||||
const querySpy = sinon.spy(peerExchangeDiscovery as any, "query");
|
|
||||||
await peerExchangeDiscovery["runQuery"](mockPeerId, ["other-protocol"]);
|
|
||||||
|
|
||||||
expect(querySpy.called).to.be.false;
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should skip already querying peers", async () => {
|
|
||||||
peerExchangeDiscovery["queryingPeers"].add(mockPeerId.toString());
|
|
||||||
const querySpy = sinon.spy(peerExchangeDiscovery as any, "query");
|
|
||||||
|
|
||||||
await peerExchangeDiscovery["runQuery"](mockPeerId, [PeerExchangeCodec]);
|
|
||||||
|
|
||||||
expect(querySpy.called).to.be.false;
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should handle query errors gracefully", async () => {
|
|
||||||
const queryStub = sinon
|
|
||||||
.stub(peerExchangeDiscovery as any, "query")
|
|
||||||
.rejects(new Error("Query failed"));
|
|
||||||
|
|
||||||
await peerExchangeDiscovery["runQuery"](mockPeerId, [PeerExchangeCodec]);
|
|
||||||
|
|
||||||
expect(queryStub.called).to.be.true;
|
|
||||||
expect(peerExchangeDiscovery["queryingPeers"].has(mockPeerId.toString()))
|
|
||||||
.to.be.false;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("query", () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
peerExchangeDiscovery.start();
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should process successful peer exchange query", async () => {
|
|
||||||
const mockENR = {
|
|
||||||
peerInfo: {
|
|
||||||
id: mockPeerId,
|
|
||||||
multiaddrs: []
|
|
||||||
},
|
|
||||||
shardInfo: { clusterId: 1, shards: [1] }
|
|
||||||
};
|
|
||||||
|
|
||||||
const internalPeerExchange = (peerExchangeDiscovery as any)[
|
|
||||||
"peerExchange"
|
|
||||||
] as IPeerExchange;
|
|
||||||
sinon.stub(internalPeerExchange, "query").resolves({
|
|
||||||
peerInfos: [{ ENR: mockENR as any }],
|
|
||||||
error: null
|
|
||||||
});
|
|
||||||
|
|
||||||
const dispatchEventSpy = sinon.spy(
|
|
||||||
peerExchangeDiscovery,
|
|
||||||
"dispatchEvent"
|
|
||||||
);
|
|
||||||
|
|
||||||
await peerExchangeDiscovery["query"](mockPeerId);
|
|
||||||
|
|
||||||
expect(mockPeerStore.merge.called).to.be.true;
|
|
||||||
expect(dispatchEventSpy.called).to.be.true;
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should handle query errors", async () => {
|
|
||||||
const internalPeerExchange = (peerExchangeDiscovery as any)[
|
|
||||||
"peerExchange"
|
|
||||||
] as IPeerExchange;
|
|
||||||
sinon.stub(internalPeerExchange, "query").resolves({
|
|
||||||
peerInfos: null,
|
|
||||||
error: ProtocolError.NO_PEER_AVAILABLE
|
|
||||||
});
|
|
||||||
|
|
||||||
await peerExchangeDiscovery["query"](mockPeerId);
|
|
||||||
|
|
||||||
expect(mockPeerStore.merge.called).to.be.false;
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should skip peers without ENR", async () => {
|
|
||||||
const internalPeerExchange = (peerExchangeDiscovery as any)[
|
|
||||||
"peerExchange"
|
|
||||||
] as IPeerExchange;
|
|
||||||
sinon.stub(internalPeerExchange, "query").resolves({
|
|
||||||
peerInfos: [{ ENR: undefined }],
|
|
||||||
error: null
|
|
||||||
});
|
|
||||||
|
|
||||||
await peerExchangeDiscovery["query"](mockPeerId);
|
|
||||||
|
|
||||||
expect(mockPeerStore.merge.called).to.be.false;
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should skip peers without peerInfo in ENR", async () => {
|
|
||||||
const internalPeerExchange = (peerExchangeDiscovery as any)[
|
|
||||||
"peerExchange"
|
|
||||||
] as IPeerExchange;
|
|
||||||
sinon.stub(internalPeerExchange, "query").resolves({
|
|
||||||
peerInfos: [{ ENR: { peerInfo: undefined } as any }],
|
|
||||||
error: null
|
|
||||||
});
|
|
||||||
|
|
||||||
await peerExchangeDiscovery["query"](mockPeerId);
|
|
||||||
|
|
||||||
expect(mockPeerStore.merge.called).to.be.false;
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should handle ENR without shardInfo", async () => {
|
|
||||||
const mockENRWithoutShard = {
|
|
||||||
peerInfo: {
|
|
||||||
id: mockPeerId,
|
|
||||||
multiaddrs: []
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const internalPeerExchange = (peerExchangeDiscovery as any)[
|
|
||||||
"peerExchange"
|
|
||||||
] as IPeerExchange;
|
|
||||||
sinon.stub(internalPeerExchange, "query").resolves({
|
|
||||||
peerInfos: [{ ENR: mockENRWithoutShard as any }],
|
|
||||||
error: null
|
|
||||||
});
|
|
||||||
|
|
||||||
await peerExchangeDiscovery["query"](mockPeerId);
|
|
||||||
|
|
||||||
expect(mockPeerStore.merge.called).to.be.true;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("continuous discovery interval", () => {
|
|
||||||
it("should start periodic discovery on start", () => {
|
|
||||||
const setIntervalSpy = sinon.spy(global, "setInterval");
|
|
||||||
|
|
||||||
peerExchangeDiscovery.start();
|
|
||||||
|
|
||||||
expect(setIntervalSpy.called).to.be.true;
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should clear interval on stop", () => {
|
|
||||||
const clearIntervalSpy = sinon.spy(global, "clearInterval");
|
|
||||||
|
|
||||||
peerExchangeDiscovery.start();
|
|
||||||
peerExchangeDiscovery.stop();
|
|
||||||
|
|
||||||
expect(clearIntervalSpy.called).to.be.true;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("wakuPeerExchangeDiscovery factory", () => {
|
|
||||||
it("should create PeerExchangeDiscovery instance", () => {
|
|
||||||
const factory = wakuPeerExchangeDiscovery({ TTL: 60000 });
|
|
||||||
const discovery = factory(mockComponents);
|
|
||||||
|
|
||||||
expect(discovery).to.be.instanceOf(PeerExchangeDiscovery);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should create PeerExchangeDiscovery with default options", () => {
|
|
||||||
const factory = wakuPeerExchangeDiscovery();
|
|
||||||
const discovery = factory(mockComponents);
|
|
||||||
|
|
||||||
expect(discovery).to.be.instanceOf(PeerExchangeDiscovery);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user